summaryrefslogtreecommitdiffstats
path: root/vendor/github.com/pelletier/go-toml
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/github.com/pelletier/go-toml')
-rw-r--r--vendor/github.com/pelletier/go-toml/LICENSE21
-rw-r--r--vendor/github.com/pelletier/go-toml/cmd/test_program.go91
-rw-r--r--vendor/github.com/pelletier/go-toml/cmd/tomljson/main.go72
-rw-r--r--vendor/github.com/pelletier/go-toml/cmd/tomll/main.go66
-rw-r--r--vendor/github.com/pelletier/go-toml/doc.go23
-rw-r--r--vendor/github.com/pelletier/go-toml/fuzz.go31
-rw-r--r--vendor/github.com/pelletier/go-toml/keysparsing.go85
-rw-r--r--vendor/github.com/pelletier/go-toml/lexer.go750
-rw-r--r--vendor/github.com/pelletier/go-toml/marshal.go600
-rw-r--r--vendor/github.com/pelletier/go-toml/parser.go430
-rw-r--r--vendor/github.com/pelletier/go-toml/position.go29
-rw-r--r--vendor/github.com/pelletier/go-toml/query/doc.go175
-rw-r--r--vendor/github.com/pelletier/go-toml/query/lexer.go357
-rw-r--r--vendor/github.com/pelletier/go-toml/query/match.go232
-rw-r--r--vendor/github.com/pelletier/go-toml/query/parser.go275
-rw-r--r--vendor/github.com/pelletier/go-toml/query/query.go158
-rw-r--r--vendor/github.com/pelletier/go-toml/query/tokens.go106
-rw-r--r--vendor/github.com/pelletier/go-toml/token.go144
-rw-r--r--vendor/github.com/pelletier/go-toml/toml.go309
-rw-r--r--vendor/github.com/pelletier/go-toml/tomltree_create.go142
-rw-r--r--vendor/github.com/pelletier/go-toml/tomltree_write.go287
21 files changed, 4383 insertions, 0 deletions
diff --git a/vendor/github.com/pelletier/go-toml/LICENSE b/vendor/github.com/pelletier/go-toml/LICENSE
new file mode 100644
index 00000000..583bdae6
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2013 - 2017 Thomas Pelletier, Eric Anderton
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/pelletier/go-toml/cmd/test_program.go b/vendor/github.com/pelletier/go-toml/cmd/test_program.go
new file mode 100644
index 00000000..73077f61
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/cmd/test_program.go
@@ -0,0 +1,91 @@
+package main
+
+import (
+ "encoding/json"
+ "fmt"
+ "io/ioutil"
+ "log"
+ "os"
+ "time"
+
+ "github.com/pelletier/go-toml"
+)
+
+func main() {
+ bytes, err := ioutil.ReadAll(os.Stdin)
+ if err != nil {
+ log.Fatalf("Error during TOML read: %s", err)
+ os.Exit(2)
+ }
+ tree, err := toml.Load(string(bytes))
+ if err != nil {
+ log.Fatalf("Error during TOML load: %s", err)
+ os.Exit(1)
+ }
+
+ typedTree := translate(*tree)
+
+ if err := json.NewEncoder(os.Stdout).Encode(typedTree); err != nil {
+ log.Fatalf("Error encoding JSON: %s", err)
+ os.Exit(3)
+ }
+
+ os.Exit(0)
+}
+
+func translate(tomlData interface{}) interface{} {
+ switch orig := tomlData.(type) {
+ case map[string]interface{}:
+ typed := make(map[string]interface{}, len(orig))
+ for k, v := range orig {
+ typed[k] = translate(v)
+ }
+ return typed
+ case *toml.Tree:
+ return translate(*orig)
+ case toml.Tree:
+ keys := orig.Keys()
+ typed := make(map[string]interface{}, len(keys))
+ for _, k := range keys {
+ typed[k] = translate(orig.GetPath([]string{k}))
+ }
+ return typed
+ case []*toml.Tree:
+ typed := make([]map[string]interface{}, len(orig))
+ for i, v := range orig {
+ typed[i] = translate(v).(map[string]interface{})
+ }
+ return typed
+ case []map[string]interface{}:
+ typed := make([]map[string]interface{}, len(orig))
+ for i, v := range orig {
+ typed[i] = translate(v).(map[string]interface{})
+ }
+ return typed
+ case []interface{}:
+ typed := make([]interface{}, len(orig))
+ for i, v := range orig {
+ typed[i] = translate(v)
+ }
+ return tag("array", typed)
+ case time.Time:
+ return tag("datetime", orig.Format("2006-01-02T15:04:05Z"))
+ case bool:
+ return tag("bool", fmt.Sprintf("%v", orig))
+ case int64:
+ return tag("integer", fmt.Sprintf("%d", orig))
+ case float64:
+ return tag("float", fmt.Sprintf("%v", orig))
+ case string:
+ return tag("string", orig)
+ }
+
+ panic(fmt.Sprintf("Unknown type: %T", tomlData))
+}
+
+func tag(typeName string, data interface{}) map[string]interface{} {
+ return map[string]interface{}{
+ "type": typeName,
+ "value": data,
+ }
+}
diff --git a/vendor/github.com/pelletier/go-toml/cmd/tomljson/main.go b/vendor/github.com/pelletier/go-toml/cmd/tomljson/main.go
new file mode 100644
index 00000000..b2d6fc67
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/cmd/tomljson/main.go
@@ -0,0 +1,72 @@
+// Tomljson reads TOML and converts to JSON.
+//
+// Usage:
+// cat file.toml | tomljson > file.json
+// tomljson file1.toml > file.json
+package main
+
+import (
+ "encoding/json"
+ "flag"
+ "fmt"
+ "io"
+ "os"
+
+ "github.com/pelletier/go-toml"
+)
+
+func main() {
+ flag.Usage = func() {
+ fmt.Fprintln(os.Stderr, `tomljson can be used in two ways:
+Writing to STDIN and reading from STDOUT:
+ cat file.toml | tomljson > file.json
+
+Reading from a file name:
+ tomljson file.toml
+`)
+ }
+ flag.Parse()
+ os.Exit(processMain(flag.Args(), os.Stdin, os.Stdout, os.Stderr))
+}
+
+func processMain(files []string, defaultInput io.Reader, output io.Writer, errorOutput io.Writer) int {
+ // read from stdin and print to stdout
+ inputReader := defaultInput
+
+ if len(files) > 0 {
+ var err error
+ inputReader, err = os.Open(files[0])
+ if err != nil {
+ printError(err, errorOutput)
+ return -1
+ }
+ }
+ s, err := reader(inputReader)
+ if err != nil {
+ printError(err, errorOutput)
+ return -1
+ }
+ io.WriteString(output, s+"\n")
+ return 0
+}
+
+func printError(err error, output io.Writer) {
+ io.WriteString(output, err.Error()+"\n")
+}
+
+func reader(r io.Reader) (string, error) {
+ tree, err := toml.LoadReader(r)
+ if err != nil {
+ return "", err
+ }
+ return mapToJSON(tree)
+}
+
+func mapToJSON(tree *toml.Tree) (string, error) {
+ treeMap := tree.ToMap()
+ bytes, err := json.MarshalIndent(treeMap, "", " ")
+ if err != nil {
+ return "", err
+ }
+ return string(bytes[:]), nil
+}
diff --git a/vendor/github.com/pelletier/go-toml/cmd/tomll/main.go b/vendor/github.com/pelletier/go-toml/cmd/tomll/main.go
new file mode 100644
index 00000000..36c7e375
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/cmd/tomll/main.go
@@ -0,0 +1,66 @@
+// Tomll is a linter for TOML
+//
+// Usage:
+// cat file.toml | tomll > file_linted.toml
+// tomll file1.toml file2.toml # lint the two files in place
+package main
+
+import (
+ "flag"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "os"
+
+ "github.com/pelletier/go-toml"
+)
+
+func main() {
+ flag.Usage = func() {
+ fmt.Fprintln(os.Stderr, `tomll can be used in two ways:
+Writing to STDIN and reading from STDOUT:
+ cat file.toml | tomll > file.toml
+
+Reading and updating a list of files:
+ tomll a.toml b.toml c.toml
+
+When given a list of files, tomll will modify all files in place without asking.
+`)
+ }
+ flag.Parse()
+ // read from stdin and print to stdout
+ if flag.NArg() == 0 {
+ s, err := lintReader(os.Stdin)
+ if err != nil {
+ io.WriteString(os.Stderr, err.Error())
+ os.Exit(-1)
+ }
+ io.WriteString(os.Stdout, s)
+ } else {
+ // otherwise modify a list of files
+ for _, filename := range flag.Args() {
+ s, err := lintFile(filename)
+ if err != nil {
+ io.WriteString(os.Stderr, err.Error())
+ os.Exit(-1)
+ }
+ ioutil.WriteFile(filename, []byte(s), 0644)
+ }
+ }
+}
+
+func lintFile(filename string) (string, error) {
+ tree, err := toml.LoadFile(filename)
+ if err != nil {
+ return "", err
+ }
+ return tree.String(), nil
+}
+
+func lintReader(r io.Reader) (string, error) {
+ tree, err := toml.LoadReader(r)
+ if err != nil {
+ return "", err
+ }
+ return tree.String(), nil
+}
diff --git a/vendor/github.com/pelletier/go-toml/doc.go b/vendor/github.com/pelletier/go-toml/doc.go
new file mode 100644
index 00000000..d5fd98c0
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/doc.go
@@ -0,0 +1,23 @@
+// Package toml is a TOML parser and manipulation library.
+//
+// This version supports the specification as described in
+// https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.4.0.md
+//
+// Marshaling
+//
+// Go-toml can marshal and unmarshal TOML documents from and to data
+// structures.
+//
+// TOML document as a tree
+//
+// Go-toml can operate on a TOML document as a tree. Use one of the Load*
+// functions to parse TOML data and obtain a Tree instance, then one of its
+// methods to manipulate the tree.
+//
+// JSONPath-like queries
+//
+// The package github.com/pelletier/go-toml/query implements a system
+// similar to JSONPath to quickly retrieve elements of a TOML document using a
+// single expression. See the package documentation for more information.
+//
+package toml
diff --git a/vendor/github.com/pelletier/go-toml/fuzz.go b/vendor/github.com/pelletier/go-toml/fuzz.go
new file mode 100644
index 00000000..14570c8d
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/fuzz.go
@@ -0,0 +1,31 @@
+// +build gofuzz
+
+package toml
+
+func Fuzz(data []byte) int {
+ tree, err := LoadBytes(data)
+ if err != nil {
+ if tree != nil {
+ panic("tree must be nil if there is an error")
+ }
+ return 0
+ }
+
+ str, err := tree.ToTomlString()
+ if err != nil {
+ if str != "" {
+ panic(`str must be "" if there is an error`)
+ }
+ panic(err)
+ }
+
+ tree, err = Load(str)
+ if err != nil {
+ if tree != nil {
+ panic("tree must be nil if there is an error")
+ }
+ return 0
+ }
+
+ return 1
+}
diff --git a/vendor/github.com/pelletier/go-toml/keysparsing.go b/vendor/github.com/pelletier/go-toml/keysparsing.go
new file mode 100644
index 00000000..284db646
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/keysparsing.go
@@ -0,0 +1,85 @@
+// Parsing keys handling both bare and quoted keys.
+
+package toml
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "unicode"
+)
+
+// Convert the bare key group string to an array.
+// The input supports double quotation to allow "." inside the key name,
+// but escape sequences are not supported. Lexers must unescape them beforehand.
+func parseKey(key string) ([]string, error) {
+ groups := []string{}
+ var buffer bytes.Buffer
+ inQuotes := false
+ wasInQuotes := false
+ ignoreSpace := true
+ expectDot := false
+
+ for _, char := range key {
+ if ignoreSpace {
+ if char == ' ' {
+ continue
+ }
+ ignoreSpace = false
+ }
+ switch char {
+ case '"':
+ if inQuotes {
+ groups = append(groups, buffer.String())
+ buffer.Reset()
+ wasInQuotes = true
+ }
+ inQuotes = !inQuotes
+ expectDot = false
+ case '.':
+ if inQuotes {
+ buffer.WriteRune(char)
+ } else {
+ if !wasInQuotes {
+ if buffer.Len() == 0 {
+ return nil, errors.New("empty table key")
+ }
+ groups = append(groups, buffer.String())
+ buffer.Reset()
+ }
+ ignoreSpace = true
+ expectDot = false
+ wasInQuotes = false
+ }
+ case ' ':
+ if inQuotes {
+ buffer.WriteRune(char)
+ } else {
+ expectDot = true
+ }
+ default:
+ if !inQuotes && !isValidBareChar(char) {
+ return nil, fmt.Errorf("invalid bare character: %c", char)
+ }
+ if !inQuotes && expectDot {
+ return nil, errors.New("what?")
+ }
+ buffer.WriteRune(char)
+ expectDot = false
+ }
+ }
+ if inQuotes {
+ return nil, errors.New("mismatched quotes")
+ }
+ if buffer.Len() > 0 {
+ groups = append(groups, buffer.String())
+ }
+ if len(groups) == 0 {
+ return nil, errors.New("empty key")
+ }
+ return groups, nil
+}
+
+func isValidBareChar(r rune) bool {
+ return isAlphanumeric(r) || r == '-' || unicode.IsNumber(r)
+}
diff --git a/vendor/github.com/pelletier/go-toml/lexer.go b/vendor/github.com/pelletier/go-toml/lexer.go
new file mode 100644
index 00000000..d11de428
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/lexer.go
@@ -0,0 +1,750 @@
+// TOML lexer.
+//
+// Written using the principles developed by Rob Pike in
+// http://www.youtube.com/watch?v=HxaD_trXwRE
+
+package toml
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "regexp"
+ "strconv"
+ "strings"
+)
+
+var dateRegexp *regexp.Regexp
+
+// Define state functions
+type tomlLexStateFn func() tomlLexStateFn
+
+// Define lexer
+type tomlLexer struct {
+ inputIdx int
+ input []rune // Textual source
+ currentTokenStart int
+ currentTokenStop int
+ tokens []token
+ depth int
+ line int
+ col int
+ endbufferLine int
+ endbufferCol int
+}
+
+// Basic read operations on input
+
+func (l *tomlLexer) read() rune {
+ r := l.peek()
+ if r == '\n' {
+ l.endbufferLine++
+ l.endbufferCol = 1
+ } else {
+ l.endbufferCol++
+ }
+ l.inputIdx++
+ return r
+}
+
+func (l *tomlLexer) next() rune {
+ r := l.read()
+
+ if r != eof {
+ l.currentTokenStop++
+ }
+ return r
+}
+
+func (l *tomlLexer) ignore() {
+ l.currentTokenStart = l.currentTokenStop
+ l.line = l.endbufferLine
+ l.col = l.endbufferCol
+}
+
+func (l *tomlLexer) skip() {
+ l.next()
+ l.ignore()
+}
+
+func (l *tomlLexer) fastForward(n int) {
+ for i := 0; i < n; i++ {
+ l.next()
+ }
+}
+
+func (l *tomlLexer) emitWithValue(t tokenType, value string) {
+ l.tokens = append(l.tokens, token{
+ Position: Position{l.line, l.col},
+ typ: t,
+ val: value,
+ })
+ l.ignore()
+}
+
+func (l *tomlLexer) emit(t tokenType) {
+ l.emitWithValue(t, string(l.input[l.currentTokenStart:l.currentTokenStop]))
+}
+
+func (l *tomlLexer) peek() rune {
+ if l.inputIdx >= len(l.input) {
+ return eof
+ }
+ return l.input[l.inputIdx]
+}
+
+func (l *tomlLexer) peekString(size int) string {
+ maxIdx := len(l.input)
+ upperIdx := l.inputIdx + size // FIXME: potential overflow
+ if upperIdx > maxIdx {
+ upperIdx = maxIdx
+ }
+ return string(l.input[l.inputIdx:upperIdx])
+}
+
+func (l *tomlLexer) follow(next string) bool {
+ return next == l.peekString(len(next))
+}
+
+// Error management
+
+func (l *tomlLexer) errorf(format string, args ...interface{}) tomlLexStateFn {
+ l.tokens = append(l.tokens, token{
+ Position: Position{l.line, l.col},
+ typ: tokenError,
+ val: fmt.Sprintf(format, args...),
+ })
+ return nil
+}
+
+// State functions
+
+func (l *tomlLexer) lexVoid() tomlLexStateFn {
+ for {
+ next := l.peek()
+ switch next {
+ case '[':
+ return l.lexTableKey
+ case '#':
+ return l.lexComment(l.lexVoid)
+ case '=':
+ return l.lexEqual
+ case '\r':
+ fallthrough
+ case '\n':
+ l.skip()
+ continue
+ }
+
+ if isSpace(next) {
+ l.skip()
+ }
+
+ if l.depth > 0 {
+ return l.lexRvalue
+ }
+
+ if isKeyStartChar(next) {
+ return l.lexKey
+ }
+
+ if next == eof {
+ l.next()
+ break
+ }
+ }
+
+ l.emit(tokenEOF)
+ return nil
+}
+
+func (l *tomlLexer) lexRvalue() tomlLexStateFn {
+ for {
+ next := l.peek()
+ switch next {
+ case '.':
+ return l.errorf("cannot start float with a dot")
+ case '=':
+ return l.lexEqual
+ case '[':
+ l.depth++
+ return l.lexLeftBracket
+ case ']':
+ l.depth--
+ return l.lexRightBracket
+ case '{':
+ return l.lexLeftCurlyBrace
+ case '}':
+ return l.lexRightCurlyBrace
+ case '#':
+ return l.lexComment(l.lexRvalue)
+ case '"':
+ return l.lexString
+ case '\'':
+ return l.lexLiteralString
+ case ',':
+ return l.lexComma
+ case '\r':
+ fallthrough
+ case '\n':
+ l.skip()
+ if l.depth == 0 {
+ return l.lexVoid
+ }
+ return l.lexRvalue
+ case '_':
+ return l.errorf("cannot start number with underscore")
+ }
+
+ if l.follow("true") {
+ return l.lexTrue
+ }
+
+ if l.follow("false") {
+ return l.lexFalse
+ }
+
+ if l.follow("inf") {
+ return l.lexInf
+ }
+
+ if l.follow("nan") {
+ return l.lexNan
+ }
+
+ if isSpace(next) {
+ l.skip()
+ continue
+ }
+
+ if next == eof {
+ l.next()
+ break
+ }
+
+ possibleDate := l.peekString(35)
+ dateMatch := dateRegexp.FindString(possibleDate)
+ if dateMatch != "" {
+ l.fastForward(len(dateMatch))
+ return l.lexDate
+ }
+
+ if next == '+' || next == '-' || isDigit(next) {
+ return l.lexNumber
+ }
+
+ if isAlphanumeric(next) {
+ return l.lexKey
+ }
+
+ return l.errorf("no value can start with %c", next)
+ }
+
+ l.emit(tokenEOF)
+ return nil
+}
+
+func (l *tomlLexer) lexLeftCurlyBrace() tomlLexStateFn {
+ l.next()
+ l.emit(tokenLeftCurlyBrace)
+ return l.lexRvalue
+}
+
+func (l *tomlLexer) lexRightCurlyBrace() tomlLexStateFn {
+ l.next()
+ l.emit(tokenRightCurlyBrace)
+ return l.lexRvalue
+}
+
+func (l *tomlLexer) lexDate() tomlLexStateFn {
+ l.emit(tokenDate)
+ return l.lexRvalue
+}
+
+func (l *tomlLexer) lexTrue() tomlLexStateFn {
+ l.fastForward(4)
+ l.emit(tokenTrue)
+ return l.lexRvalue
+}
+
+func (l *tomlLexer) lexFalse() tomlLexStateFn {
+ l.fastForward(5)
+ l.emit(tokenFalse)
+ return l.lexRvalue
+}
+
+func (l *tomlLexer) lexInf() tomlLexStateFn {
+ l.fastForward(3)
+ l.emit(tokenInf)
+ return l.lexRvalue
+}
+
+func (l *tomlLexer) lexNan() tomlLexStateFn {
+ l.fastForward(3)
+ l.emit(tokenNan)
+ return l.lexRvalue
+}
+
+func (l *tomlLexer) lexEqual() tomlLexStateFn {
+ l.next()
+ l.emit(tokenEqual)
+ return l.lexRvalue
+}
+
+func (l *tomlLexer) lexComma() tomlLexStateFn {
+ l.next()
+ l.emit(tokenComma)
+ return l.lexRvalue
+}
+
+// Parse the key and emits its value without escape sequences.
+// bare keys, basic string keys and literal string keys are supported.
+func (l *tomlLexer) lexKey() tomlLexStateFn {
+ growingString := ""
+
+ for r := l.peek(); isKeyChar(r) || r == '\n' || r == '\r'; r = l.peek() {
+ if r == '"' {
+ l.next()
+ str, err := l.lexStringAsString(`"`, false, true)
+ if err != nil {
+ return l.errorf(err.Error())
+ }
+ growingString += str
+ l.next()
+ continue
+ } else if r == '\'' {
+ l.next()
+ str, err := l.lexLiteralStringAsString(`'`, false)
+ if err != nil {
+ return l.errorf(err.Error())
+ }
+ growingString += str
+ l.next()
+ continue
+ } else if r == '\n' {
+ return l.errorf("keys cannot contain new lines")
+ } else if isSpace(r) {
+ break
+ } else if !isValidBareChar(r) {
+ return l.errorf("keys cannot contain %c character", r)
+ }
+ growingString += string(r)
+ l.next()
+ }
+ l.emitWithValue(tokenKey, growingString)
+ return l.lexVoid
+}
+
+func (l *tomlLexer) lexComment(previousState tomlLexStateFn) tomlLexStateFn {
+ return func() tomlLexStateFn {
+ for next := l.peek(); next != '\n' && next != eof; next = l.peek() {
+ if next == '\r' && l.follow("\r\n") {
+ break
+ }
+ l.next()
+ }
+ l.ignore()
+ return previousState
+ }
+}
+
+func (l *tomlLexer) lexLeftBracket() tomlLexStateFn {
+ l.next()
+ l.emit(tokenLeftBracket)
+ return l.lexRvalue
+}
+
+func (l *tomlLexer) lexLiteralStringAsString(terminator string, discardLeadingNewLine bool) (string, error) {
+ growingString := ""
+
+ if discardLeadingNewLine {
+ if l.follow("\r\n") {
+ l.skip()
+ l.skip()
+ } else if l.peek() == '\n' {
+ l.skip()
+ }
+ }
+
+ // find end of string
+ for {
+ if l.follow(terminator) {
+ return growingString, nil
+ }
+
+ next := l.peek()
+ if next == eof {
+ break
+ }
+ growingString += string(l.next())
+ }
+
+ return "", errors.New("unclosed string")
+}
+
+func (l *tomlLexer) lexLiteralString() tomlLexStateFn {
+ l.skip()
+
+ // handle special case for triple-quote
+ terminator := "'"
+ discardLeadingNewLine := false
+ if l.follow("''") {
+ l.skip()
+ l.skip()
+ terminator = "'''"
+ discardLeadingNewLine = true
+ }
+
+ str, err := l.lexLiteralStringAsString(terminator, discardLeadingNewLine)
+ if err != nil {
+ return l.errorf(err.Error())
+ }
+
+ l.emitWithValue(tokenString, str)
+ l.fastForward(len(terminator))
+ l.ignore()
+ return l.lexRvalue
+}
+
+// Lex a string and return the results as a string.
+// Terminator is the substring indicating the end of the token.
+// The resulting string does not include the terminator.
+func (l *tomlLexer) lexStringAsString(terminator string, discardLeadingNewLine, acceptNewLines bool) (string, error) {
+ growingString := ""
+
+ if discardLeadingNewLine {
+ if l.follow("\r\n") {
+ l.skip()
+ l.skip()
+ } else if l.peek() == '\n' {
+ l.skip()
+ }
+ }
+
+ for {
+ if l.follow(terminator) {
+ return growingString, nil
+ }
+
+ if l.follow("\\") {
+ l.next()
+ switch l.peek() {
+ case '\r':
+ fallthrough
+ case '\n':
+ fallthrough
+ case '\t':
+ fallthrough
+ case ' ':
+ // skip all whitespace chars following backslash
+ for strings.ContainsRune("\r\n\t ", l.peek()) {
+ l.next()
+ }
+ case '"':
+ growingString += "\""
+ l.next()
+ case 'n':
+ growingString += "\n"
+ l.next()
+ case 'b':
+ growingString += "\b"
+ l.next()
+ case 'f':
+ growingString += "\f"
+ l.next()
+ case '/':
+ growingString += "/"
+ l.next()
+ case 't':
+ growingString += "\t"
+ l.next()
+ case 'r':
+ growingString += "\r"
+ l.next()
+ case '\\':
+ growingString += "\\"
+ l.next()
+ case 'u':
+ l.next()
+ code := ""
+ for i := 0; i < 4; i++ {
+ c := l.peek()
+ if !isHexDigit(c) {
+ return "", errors.New("unfinished unicode escape")
+ }
+ l.next()
+ code = code + string(c)
+ }
+ intcode, err := strconv.ParseInt(code, 16, 32)
+ if err != nil {
+ return "", errors.New("invalid unicode escape: \\u" + code)
+ }
+ growingString += string(rune(intcode))
+ case 'U':
+ l.next()
+ code := ""
+ for i := 0; i < 8; i++ {
+ c := l.peek()
+ if !isHexDigit(c) {
+ return "", errors.New("unfinished unicode escape")
+ }
+ l.next()
+ code = code + string(c)
+ }
+ intcode, err := strconv.ParseInt(code, 16, 64)
+ if err != nil {
+ return "", errors.New("invalid unicode escape: \\U" + code)
+ }
+ growingString += string(rune(intcode))
+ default:
+ return "", errors.New("invalid escape sequence: \\" + string(l.peek()))
+ }
+ } else {
+ r := l.peek()
+
+ if 0x00 <= r && r <= 0x1F && !(acceptNewLines && (r == '\n' || r == '\r')) {
+ return "", fmt.Errorf("unescaped control character %U", r)
+ }
+ l.next()
+ growingString += string(r)
+ }
+
+ if l.peek() == eof {
+ break
+ }
+ }
+
+ return "", errors.New("unclosed string")
+}
+
+func (l *tomlLexer) lexString() tomlLexStateFn {
+ l.skip()
+
+ // handle special case for triple-quote
+ terminator := `"`
+ discardLeadingNewLine := false
+ acceptNewLines := false
+ if l.follow(`""`) {
+ l.skip()
+ l.skip()
+ terminator = `"""`
+ discardLeadingNewLine = true
+ acceptNewLines = true
+ }
+
+ str, err := l.lexStringAsString(terminator, discardLeadingNewLine, acceptNewLines)
+
+ if err != nil {
+ return l.errorf(err.Error())
+ }
+
+ l.emitWithValue(tokenString, str)
+ l.fastForward(len(terminator))
+ l.ignore()
+ return l.lexRvalue
+}
+
+func (l *tomlLexer) lexTableKey() tomlLexStateFn {
+ l.next()
+
+ if l.peek() == '[' {
+ // token '[[' signifies an array of tables
+ l.next()
+ l.emit(tokenDoubleLeftBracket)
+ return l.lexInsideTableArrayKey
+ }
+ // vanilla table key
+ l.emit(tokenLeftBracket)
+ return l.lexInsideTableKey
+}
+
+// Parse the key till "]]", but only bare keys are supported
+func (l *tomlLexer) lexInsideTableArrayKey() tomlLexStateFn {
+ for r := l.peek(); r != eof; r = l.peek() {
+ switch r {
+ case ']':
+ if l.currentTokenStop > l.currentTokenStart {
+ l.emit(tokenKeyGroupArray)
+ }
+ l.next()
+ if l.peek() != ']' {
+ break
+ }
+ l.next()
+ l.emit(tokenDoubleRightBracket)
+ return l.lexVoid
+ case '[':
+ return l.errorf("table array key cannot contain ']'")
+ default:
+ l.next()
+ }
+ }
+ return l.errorf("unclosed table array key")
+}
+
+// Parse the key till "]" but only bare keys are supported
+func (l *tomlLexer) lexInsideTableKey() tomlLexStateFn {
+ for r := l.peek(); r != eof; r = l.peek() {
+ switch r {
+ case ']':
+ if l.currentTokenStop > l.currentTokenStart {
+ l.emit(tokenKeyGroup)
+ }
+ l.next()
+ l.emit(tokenRightBracket)
+ return l.lexVoid
+ case '[':
+ return l.errorf("table key cannot contain ']'")
+ default:
+ l.next()
+ }
+ }
+ return l.errorf("unclosed table key")
+}
+
+func (l *tomlLexer) lexRightBracket() tomlLexStateFn {
+ l.next()
+ l.emit(tokenRightBracket)
+ return l.lexRvalue
+}
+
+type validRuneFn func(r rune) bool
+
+func isValidHexRune(r rune) bool {
+ return r >= 'a' && r <= 'f' ||
+ r >= 'A' && r <= 'F' ||
+ r >= '0' && r <= '9' ||
+ r == '_'
+}
+
+func isValidOctalRune(r rune) bool {
+ return r >= '0' && r <= '7' || r == '_'
+}
+
+func isValidBinaryRune(r rune) bool {
+ return r == '0' || r == '1' || r == '_'
+}
+
+func (l *tomlLexer) lexNumber() tomlLexStateFn {
+ r := l.peek()
+
+ if r == '0' {
+ follow := l.peekString(2)
+ if len(follow) == 2 {
+ var isValidRune validRuneFn
+ switch follow[1] {
+ case 'x':
+ isValidRune = isValidHexRune
+ case 'o':
+ isValidRune = isValidOctalRune
+ case 'b':
+ isValidRune = isValidBinaryRune
+ default:
+ if follow[1] >= 'a' && follow[1] <= 'z' || follow[1] >= 'A' && follow[1] <= 'Z' {
+ return l.errorf("unknown number base: %s. possible options are x (hex) o (octal) b (binary)", string(follow[1]))
+ }
+ }
+
+ if isValidRune != nil {
+ l.next()
+ l.next()
+ digitSeen := false
+ for {
+ next := l.peek()
+ if !isValidRune(next) {
+ break
+ }
+ digitSeen = true
+ l.next()
+ }
+
+ if !digitSeen {
+ return l.errorf("number needs at least one digit")
+ }
+
+ l.emit(tokenInteger)
+
+ return l.lexRvalue
+ }
+ }
+ }
+
+ if r == '+' || r == '-' {
+ l.next()
+ if l.follow("inf") {
+ return l.lexInf
+ }
+ if l.follow("nan") {
+ return l.lexNan
+ }
+ }
+
+ pointSeen := false
+ expSeen := false
+ digitSeen := false
+ for {
+ next := l.peek()
+ if next == '.' {
+ if pointSeen {
+ return l.errorf("cannot have two dots in one float")
+ }
+ l.next()
+ if !isDigit(l.peek()) {
+ return l.errorf("float cannot end with a dot")
+ }
+ pointSeen = true
+ } else if next == 'e' || next == 'E' {
+ expSeen = true
+ l.next()
+ r := l.peek()
+ if r == '+' || r == '-' {
+ l.next()
+ }
+ } else if isDigit(next) {
+ digitSeen = true
+ l.next()
+ } else if next == '_' {
+ l.next()
+ } else {
+ break
+ }
+ if pointSeen && !digitSeen {
+ return l.errorf("cannot start float with a dot")
+ }
+ }
+
+ if !digitSeen {
+ return l.errorf("no digit in that number")
+ }
+ if pointSeen || expSeen {
+ l.emit(tokenFloat)
+ } else {
+ l.emit(tokenInteger)
+ }
+ return l.lexRvalue
+}
+
+func (l *tomlLexer) run() {
+ for state := l.lexVoid; state != nil; {
+ state = state()
+ }
+}
+
+func init() {
+ dateRegexp = regexp.MustCompile(`^\d{1,4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d{1,9})?(Z|[+-]\d{2}:\d{2})`)
+}
+
+// Entry point
+func lexToml(inputBytes []byte) []token {
+ runes := bytes.Runes(inputBytes)
+ l := &tomlLexer{
+ input: runes,
+ tokens: make([]token, 0, 256),
+ line: 1,
+ col: 1,
+ endbufferLine: 1,
+ endbufferCol: 1,
+ }
+ l.run()
+ return l.tokens
+}
diff --git a/vendor/github.com/pelletier/go-toml/marshal.go b/vendor/github.com/pelletier/go-toml/marshal.go
new file mode 100644
index 00000000..f584ba4e
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/marshal.go
@@ -0,0 +1,600 @@
+package toml
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "io"
+ "reflect"
+ "strconv"
+ "strings"
+ "time"
+)
+
+type tomlOpts struct {
+ name string
+ comment string
+ commented bool
+ include bool
+ omitempty bool
+}
+
+type encOpts struct {
+ quoteMapKeys bool
+ arraysOneElementPerLine bool
+}
+
+var encOptsDefaults = encOpts{
+ quoteMapKeys: false,
+}
+
+var timeType = reflect.TypeOf(time.Time{})
+var marshalerType = reflect.TypeOf(new(Marshaler)).Elem()
+
+// Check if the given marshall type maps to a Tree primitive
+func isPrimitive(mtype reflect.Type) bool {
+ switch mtype.Kind() {
+ case reflect.Ptr:
+ return isPrimitive(mtype.Elem())
+ case reflect.Bool:
+ return true
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return true
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ return true
+ case reflect.Float32, reflect.Float64:
+ return true
+ case reflect.String:
+ return true
+ case reflect.Struct:
+ return mtype == timeType || isCustomMarshaler(mtype)
+ default:
+ return false
+ }
+}
+
+// Check if the given marshall type maps to a Tree slice
+func isTreeSlice(mtype reflect.Type) bool {
+ switch mtype.Kind() {
+ case reflect.Slice:
+ return !isOtherSlice(mtype)
+ default:
+ return false
+ }
+}
+
+// Check if the given marshall type maps to a non-Tree slice
+func isOtherSlice(mtype reflect.Type) bool {
+ switch mtype.Kind() {
+ case reflect.Ptr:
+ return isOtherSlice(mtype.Elem())
+ case reflect.Slice:
+ return isPrimitive(mtype.Elem()) || isOtherSlice(mtype.Elem())
+ default:
+ return false
+ }
+}
+
+// Check if the given marshall type maps to a Tree
+func isTree(mtype reflect.Type) bool {
+ switch mtype.Kind() {
+ case reflect.Map:
+ return true
+ case reflect.Struct:
+ return !isPrimitive(mtype)
+ default:
+ return false
+ }
+}
+
+func isCustomMarshaler(mtype reflect.Type) bool {
+ return mtype.Implements(marshalerType)
+}
+
+func callCustomMarshaler(mval reflect.Value) ([]byte, error) {
+ return mval.Interface().(Marshaler).MarshalTOML()
+}
+
+// Marshaler is the interface implemented by types that
+// can marshal themselves into valid TOML.
+type Marshaler interface {
+ MarshalTOML() ([]byte, error)
+}
+
+/*
+Marshal returns the TOML encoding of v. Behavior is similar to the Go json
+encoder, except that there is no concept of a Marshaler interface or MarshalTOML
+function for sub-structs, and currently only definite types can be marshaled
+(i.e. no `interface{}`).
+
+The following struct annotations are supported:
+
+ toml:"Field" Overrides the field's name to output.
+ omitempty When set, empty values and groups are not emitted.
+ comment:"comment" Emits a # comment on the same line. This supports new lines.
+ commented:"true" Emits the value as commented.
+
+Note that pointers are automatically assigned the "omitempty" option, as TOML
+explicitly does not handle null values (saying instead the label should be
+dropped).
+
+Tree structural types and corresponding marshal types:
+
+ *Tree (*)struct, (*)map[string]interface{}
+ []*Tree (*)[](*)struct, (*)[](*)map[string]interface{}
+ []interface{} (as interface{}) (*)[]primitive, (*)[]([]interface{})
+ interface{} (*)primitive
+
+Tree primitive types and corresponding marshal types:
+
+ uint64 uint, uint8-uint64, pointers to same
+ int64 int, int8-uint64, pointers to same
+ float64 float32, float64, pointers to same
+ string string, pointers to same
+ bool bool, pointers to same
+ time.Time time.Time{}, pointers to same
+*/
+func Marshal(v interface{}) ([]byte, error) {
+ return NewEncoder(nil).marshal(v)
+}
+
+// Encoder writes TOML values to an output stream.
+type Encoder struct {
+ w io.Writer
+ encOpts
+}
+
+// NewEncoder returns a new encoder that writes to w.
+func NewEncoder(w io.Writer) *Encoder {
+ return &Encoder{
+ w: w,
+ encOpts: encOptsDefaults,
+ }
+}
+
+// Encode writes the TOML encoding of v to the stream.
+//
+// See the documentation for Marshal for details.
+func (e *Encoder) Encode(v interface{}) error {
+ b, err := e.marshal(v)
+ if err != nil {
+ return err
+ }
+ if _, err := e.w.Write(b); err != nil {
+ return err
+ }
+ return nil
+}
+
+// QuoteMapKeys sets up the encoder to encode
+// maps with string type keys with quoted TOML keys.
+//
+// This relieves the character limitations on map keys.
+func (e *Encoder) QuoteMapKeys(v bool) *Encoder {
+ e.quoteMapKeys = v
+ return e
+}
+
+// ArraysWithOneElementPerLine sets up the encoder to encode arrays
+// with more than one element on multiple lines instead of one.
+//
+// For example:
+//
+// A = [1,2,3]
+//
+// Becomes
+//
+// A = [
+// 1,
+// 2,
+// 3,
+// ]
+func (e *Encoder) ArraysWithOneElementPerLine(v bool) *Encoder {
+ e.arraysOneElementPerLine = v
+ return e
+}
+
+func (e *Encoder) marshal(v interface{}) ([]byte, error) {
+ mtype := reflect.TypeOf(v)
+ if mtype.Kind() != reflect.Struct {
+ return []byte{}, errors.New("Only a struct can be marshaled to TOML")
+ }
+ sval := reflect.ValueOf(v)
+ if isCustomMarshaler(mtype) {
+ return callCustomMarshaler(sval)
+ }
+ t, err := e.valueToTree(mtype, sval)
+ if err != nil {
+ return []byte{}, err
+ }
+
+ var buf bytes.Buffer
+ _, err = t.writeTo(&buf, "", "", 0, e.arraysOneElementPerLine)
+
+ return buf.Bytes(), err
+}
+
+// Convert given marshal struct or map value to toml tree
+func (e *Encoder) valueToTree(mtype reflect.Type, mval reflect.Value) (*Tree, error) {
+ if mtype.Kind() == reflect.Ptr {
+ return e.valueToTree(mtype.Elem(), mval.Elem())
+ }
+ tval := newTree()
+ switch mtype.Kind() {
+ case reflect.Struct:
+ for i := 0; i < mtype.NumField(); i++ {
+ mtypef, mvalf := mtype.Field(i), mval.Field(i)
+ opts := tomlOptions(mtypef)
+ if opts.include && (!opts.omitempty || !isZero(mvalf)) {
+ val, err := e.valueToToml(mtypef.Type, mvalf)
+ if err != nil {
+ return nil, err
+ }
+ tval.SetWithComment(opts.name, opts.comment, opts.commented, val)
+ }
+ }
+ case reflect.Map:
+ for _, key := range mval.MapKeys() {
+ mvalf := mval.MapIndex(key)
+ val, err := e.valueToToml(mtype.Elem(), mvalf)
+ if err != nil {
+ return nil, err
+ }
+ if e.quoteMapKeys {
+ keyStr, err := tomlValueStringRepresentation(key.String(), "", e.arraysOneElementPerLine)
+ if err != nil {
+ return nil, err
+ }
+ tval.SetPath([]string{keyStr}, val)
+ } else {
+ tval.Set(key.String(), val)
+ }
+ }
+ }
+ return tval, nil
+}
+
+// Convert given marshal slice to slice of Toml trees
+func (e *Encoder) valueToTreeSlice(mtype reflect.Type, mval reflect.Value) ([]*Tree, error) {
+ tval := make([]*Tree, mval.Len(), mval.Len())
+ for i := 0; i < mval.Len(); i++ {
+ val, err := e.valueToTree(mtype.Elem(), mval.Index(i))
+ if err != nil {
+ return nil, err
+ }
+ tval[i] = val
+ }
+ return tval, nil
+}
+
+// Convert given marshal slice to slice of toml values
+func (e *Encoder) valueToOtherSlice(mtype reflect.Type, mval reflect.Value) (interface{}, error) {
+ tval := make([]interface{}, mval.Len(), mval.Len())
+ for i := 0; i < mval.Len(); i++ {
+ val, err := e.valueToToml(mtype.Elem(), mval.Index(i))
+ if err != nil {
+ return nil, err
+ }
+ tval[i] = val
+ }
+ return tval, nil
+}
+
+// Convert given marshal value to toml value
+func (e *Encoder) valueToToml(mtype reflect.Type, mval reflect.Value) (interface{}, error) {
+ if mtype.Kind() == reflect.Ptr {
+ return e.valueToToml(mtype.Elem(), mval.Elem())
+ }
+ switch {
+ case isCustomMarshaler(mtype):
+ return callCustomMarshaler(mval)
+ case isTree(mtype):
+ return e.valueToTree(mtype, mval)
+ case isTreeSlice(mtype):
+ return e.valueToTreeSlice(mtype, mval)
+ case isOtherSlice(mtype):
+ return e.valueToOtherSlice(mtype, mval)
+ default:
+ switch mtype.Kind() {
+ case reflect.Bool:
+ return mval.Bool(), nil
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return mval.Int(), nil
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ return mval.Uint(), nil
+ case reflect.Float32, reflect.Float64:
+ return mval.Float(), nil
+ case reflect.String:
+ return mval.String(), nil
+ case reflect.Struct:
+ return mval.Interface().(time.Time), nil
+ default:
+ return nil, fmt.Errorf("Marshal can't handle %v(%v)", mtype, mtype.Kind())
+ }
+ }
+}
+
+// Unmarshal attempts to unmarshal the Tree into a Go struct pointed by v.
+// Neither Unmarshaler interfaces nor UnmarshalTOML functions are supported for
+// sub-structs, and only definite types can be unmarshaled.
+func (t *Tree) Unmarshal(v interface{}) error {
+ d := Decoder{tval: t}
+ return d.unmarshal(v)
+}
+
+// Marshal returns the TOML encoding of Tree.
+// See Marshal() documentation for types mapping table.
+func (t *Tree) Marshal() ([]byte, error) {
+ var buf bytes.Buffer
+ err := NewEncoder(&buf).Encode(t)
+ return buf.Bytes(), err
+}
+
+// Unmarshal parses the TOML-encoded data and stores the result in the value
+// pointed to by v. Behavior is similar to the Go json encoder, except that there
+// is no concept of an Unmarshaler interface or UnmarshalTOML function for
+// sub-structs, and currently only definite types can be unmarshaled to (i.e. no
+// `interface{}`).
+//
+// The following struct annotations are supported:
+//
+// toml:"Field" Overrides the field's name to map to.
+//
+// See Marshal() documentation for types mapping table.
+func Unmarshal(data []byte, v interface{}) error {
+ t, err := LoadReader(bytes.NewReader(data))
+ if err != nil {
+ return err
+ }
+ return t.Unmarshal(v)
+}
+
+// Decoder reads and decodes TOML values from an input stream.
+type Decoder struct {
+ r io.Reader
+ tval *Tree
+ encOpts
+}
+
+// NewDecoder returns a new decoder that reads from r.
+func NewDecoder(r io.Reader) *Decoder {
+ return &Decoder{
+ r: r,
+ encOpts: encOptsDefaults,
+ }
+}
+
+// Decode reads a TOML-encoded value from it's input
+// and unmarshals it in the value pointed at by v.
+//
+// See the documentation for Marshal for details.
+func (d *Decoder) Decode(v interface{}) error {
+ var err error
+ d.tval, err = LoadReader(d.r)
+ if err != nil {
+ return err
+ }
+ return d.unmarshal(v)
+}
+
+func (d *Decoder) unmarshal(v interface{}) error {
+ mtype := reflect.TypeOf(v)
+ if mtype.Kind() != reflect.Ptr || mtype.Elem().Kind() != reflect.Struct {
+ return errors.New("Only a pointer to struct can be unmarshaled from TOML")
+ }
+
+ sval, err := d.valueFromTree(mtype.Elem(), d.tval)
+ if err != nil {
+ return err
+ }
+ reflect.ValueOf(v).Elem().Set(sval)
+ return nil
+}
+
+// Convert toml tree to marshal struct or map, using marshal type
+func (d *Decoder) valueFromTree(mtype reflect.Type, tval *Tree) (reflect.Value, error) {
+ if mtype.Kind() == reflect.Ptr {
+ return d.unwrapPointer(mtype, tval)
+ }
+ var mval reflect.Value
+ switch mtype.Kind() {
+ case reflect.Struct:
+ mval = reflect.New(mtype).Elem()
+ for i := 0; i < mtype.NumField(); i++ {
+ mtypef := mtype.Field(i)
+ opts := tomlOptions(mtypef)
+ if opts.include {
+ baseKey := opts.name
+ keysToTry := []string{baseKey, strings.ToLower(baseKey), strings.ToTitle(baseKey)}
+ for _, key := range keysToTry {
+ exists := tval.Has(key)
+ if !exists {
+ continue
+ }
+ val := tval.Get(key)
+ mvalf, err := d.valueFromToml(mtypef.Type, val)
+ if err != nil {
+ return mval, formatError(err, tval.GetPosition(key))
+ }
+ mval.Field(i).Set(mvalf)
+ break
+ }
+ }
+ }
+ case reflect.Map:
+ mval = reflect.MakeMap(mtype)
+ for _, key := range tval.Keys() {
+ // TODO: path splits key
+ val := tval.GetPath([]string{key})
+ mvalf, err := d.valueFromToml(mtype.Elem(), val)
+ if err != nil {
+ return mval, formatError(err, tval.GetPosition(key))
+ }
+ mval.SetMapIndex(reflect.ValueOf(key), mvalf)
+ }
+ }
+ return mval, nil
+}
+
+// Convert toml value to marshal struct/map slice, using marshal type
+func (d *Decoder) valueFromTreeSlice(mtype reflect.Type, tval []*Tree) (reflect.Value, error) {
+ mval := reflect.MakeSlice(mtype, len(tval), len(tval))
+ for i := 0; i < len(tval); i++ {
+ val, err := d.valueFromTree(mtype.Elem(), tval[i])
+ if err != nil {
+ return mval, err
+ }
+ mval.Index(i).Set(val)
+ }
+ return mval, nil
+}
+
+// Convert toml value to marshal primitive slice, using marshal type
+func (d *Decoder) valueFromOtherSlice(mtype reflect.Type, tval []interface{}) (reflect.Value, error) {
+ mval := reflect.MakeSlice(mtype, len(tval), len(tval))
+ for i := 0; i < len(tval); i++ {
+ val, err := d.valueFromToml(mtype.Elem(), tval[i])
+ if err != nil {
+ return mval, err
+ }
+ mval.Index(i).Set(val)
+ }
+ return mval, nil
+}
+
+// Convert toml value to marshal value, using marshal type
+func (d *Decoder) valueFromToml(mtype reflect.Type, tval interface{}) (reflect.Value, error) {
+ if mtype.Kind() == reflect.Ptr {
+ return d.unwrapPointer(mtype, tval)
+ }
+
+ switch tval.(type) {
+ case *Tree:
+ if isTree(mtype) {
+ return d.valueFromTree(mtype, tval.(*Tree))
+ }
+ return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to a tree", tval, tval)
+ case []*Tree:
+ if isTreeSlice(mtype) {
+ return d.valueFromTreeSlice(mtype, tval.([]*Tree))
+ }
+ return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to trees", tval, tval)
+ case []interface{}:
+ if isOtherSlice(mtype) {
+ return d.valueFromOtherSlice(mtype, tval.([]interface{}))
+ }
+ return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to a slice", tval, tval)
+ default:
+ switch mtype.Kind() {
+ case reflect.Bool, reflect.Struct:
+ val := reflect.ValueOf(tval)
+ // if this passes for when mtype is reflect.Struct, tval is a time.Time
+ if !val.Type().ConvertibleTo(mtype) {
+ return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v", tval, tval, mtype.String())
+ }
+
+ return val.Convert(mtype), nil
+ case reflect.String:
+ val := reflect.ValueOf(tval)
+ // stupidly, int64 is convertible to string. So special case this.
+ if !val.Type().ConvertibleTo(mtype) || val.Kind() == reflect.Int64 {
+ return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v", tval, tval, mtype.String())
+ }
+
+ return val.Convert(mtype), nil
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ val := reflect.ValueOf(tval)
+ if !val.Type().ConvertibleTo(mtype) {
+ return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v", tval, tval, mtype.String())
+ }
+ if reflect.Indirect(reflect.New(mtype)).OverflowInt(val.Int()) {
+ return reflect.ValueOf(nil), fmt.Errorf("%v(%T) would overflow %v", tval, tval, mtype.String())
+ }
+
+ return val.Convert(mtype), nil
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ val := reflect.ValueOf(tval)
+ if !val.Type().ConvertibleTo(mtype) {
+ return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v", tval, tval, mtype.String())
+ }
+ if val.Int() < 0 {
+ return reflect.ValueOf(nil), fmt.Errorf("%v(%T) is negative so does not fit in %v", tval, tval, mtype.String())
+ }
+ if reflect.Indirect(reflect.New(mtype)).OverflowUint(uint64(val.Int())) {
+ return reflect.ValueOf(nil), fmt.Errorf("%v(%T) would overflow %v", tval, tval, mtype.String())
+ }
+
+ return val.Convert(mtype), nil
+ case reflect.Float32, reflect.Float64:
+ val := reflect.ValueOf(tval)
+ if !val.Type().ConvertibleTo(mtype) {
+ return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v", tval, tval, mtype.String())
+ }
+ if reflect.Indirect(reflect.New(mtype)).OverflowFloat(val.Float()) {
+ return reflect.ValueOf(nil), fmt.Errorf("%v(%T) would overflow %v", tval, tval, mtype.String())
+ }
+
+ return val.Convert(mtype), nil
+ default:
+ return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v(%v)", tval, tval, mtype, mtype.Kind())
+ }
+ }
+}
+
+func (d *Decoder) unwrapPointer(mtype reflect.Type, tval interface{}) (reflect.Value, error) {
+ val, err := d.valueFromToml(mtype.Elem(), tval)
+ if err != nil {
+ return reflect.ValueOf(nil), err
+ }
+ mval := reflect.New(mtype.Elem())
+ mval.Elem().Set(val)
+ return mval, nil
+}
+
+func tomlOptions(vf reflect.StructField) tomlOpts {
+ tag := vf.Tag.Get("toml")
+ parse := strings.Split(tag, ",")
+ var comment string
+ if c := vf.Tag.Get("comment"); c != "" {
+ comment = c
+ }
+ commented, _ := strconv.ParseBool(vf.Tag.Get("commented"))
+ result := tomlOpts{name: vf.Name, comment: comment, commented: commented, include: true, omitempty: false}
+ if parse[0] != "" {
+ if parse[0] == "-" && len(parse) == 1 {
+ result.include = false
+ } else {
+ result.name = strings.Trim(parse[0], " ")
+ }
+ }
+ if vf.PkgPath != "" {
+ result.include = false
+ }
+ if len(parse) > 1 && strings.Trim(parse[1], " ") == "omitempty" {
+ result.omitempty = true
+ }
+ if vf.Type.Kind() == reflect.Ptr {
+ result.omitempty = true
+ }
+ return result
+}
+
+func isZero(val reflect.Value) bool {
+ switch val.Type().Kind() {
+ case reflect.Map:
+ fallthrough
+ case reflect.Array:
+ fallthrough
+ case reflect.Slice:
+ return val.Len() == 0
+ default:
+ return reflect.DeepEqual(val.Interface(), reflect.Zero(val.Type()).Interface())
+ }
+}
+
+func formatError(err error, pos Position) error {
+ if err.Error()[0] == '(' { // Error already contains position information
+ return err
+ }
+ return fmt.Errorf("%s: %s", pos, err)
+}
diff --git a/vendor/github.com/pelletier/go-toml/parser.go b/vendor/github.com/pelletier/go-toml/parser.go
new file mode 100644
index 00000000..2d27599a
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/parser.go
@@ -0,0 +1,430 @@
+// TOML Parser.
+
+package toml
+
+import (
+ "errors"
+ "fmt"
+ "math"
+ "reflect"
+ "regexp"
+ "strconv"
+ "strings"
+ "time"
+)
+
+type tomlParser struct {
+ flowIdx int
+ flow []token
+ tree *Tree
+ currentTable []string
+ seenTableKeys []string
+}
+
+type tomlParserStateFn func() tomlParserStateFn
+
+// Formats and panics an error message based on a token
+func (p *tomlParser) raiseError(tok *token, msg string, args ...interface{}) {
+ panic(tok.Position.String() + ": " + fmt.Sprintf(msg, args...))
+}
+
+func (p *tomlParser) run() {
+ for state := p.parseStart; state != nil; {
+ state = state()
+ }
+}
+
+func (p *tomlParser) peek() *token {
+ if p.flowIdx >= len(p.flow) {
+ return nil
+ }
+ return &p.flow[p.flowIdx]
+}
+
+func (p *tomlParser) assume(typ tokenType) {
+ tok := p.getToken()
+ if tok == nil {
+ p.raiseError(tok, "was expecting token %s, but token stream is empty", tok)
+ }
+ if tok.typ != typ {
+ p.raiseError(tok, "was expecting token %s, but got %s instead", typ, tok)
+ }
+}
+
+func (p *tomlParser) getToken() *token {
+ tok := p.peek()
+ if tok == nil {
+ return nil
+ }
+ p.flowIdx++
+ return tok
+}
+
+func (p *tomlParser) parseStart() tomlParserStateFn {
+ tok := p.peek()
+
+ // end of stream, parsing is finished
+ if tok == nil {
+ return nil
+ }
+
+ switch tok.typ {
+ case tokenDoubleLeftBracket:
+ return p.parseGroupArray
+ case tokenLeftBracket:
+ return p.parseGroup
+ case tokenKey:
+ return p.parseAssign
+ case tokenEOF:
+ return nil
+ default:
+ p.raiseError(tok, "unexpected token")
+ }
+ return nil
+}
+
+func (p *tomlParser) parseGroupArray() tomlParserStateFn {
+ startToken := p.getToken() // discard the [[
+ key := p.getToken()
+ if key.typ != tokenKeyGroupArray {
+ p.raiseError(key, "unexpected token %s, was expecting a table array key", key)
+ }
+
+ // get or create table array element at the indicated part in the path
+ keys, err := parseKey(key.val)
+ if err != nil {
+ p.raiseError(key, "invalid table array key: %s", err)
+ }
+ p.tree.createSubTree(keys[:len(keys)-1], startToken.Position) // create parent entries
+ destTree := p.tree.GetPath(keys)
+ var array []*Tree
+ if destTree == nil {
+ array = make([]*Tree, 0)
+ } else if target, ok := destTree.([]*Tree); ok && target != nil {
+ array = destTree.([]*Tree)
+ } else {
+ p.raiseError(key, "key %s is already assigned and not of type table array", key)
+ }
+ p.currentTable = keys
+
+ // add a new tree to the end of the table array
+ newTree := newTree()
+ newTree.position = startToken.Position
+ array = append(array, newTree)
+ p.tree.SetPath(p.currentTable, array)
+
+ // remove all keys that were children of this table array
+ prefix := key.val + "."
+ found := false
+ for ii := 0; ii < len(p.seenTableKeys); {
+ tableKey := p.seenTableKeys[ii]
+ if strings.HasPrefix(tableKey, prefix) {
+ p.seenTableKeys = append(p.seenTableKeys[:ii], p.seenTableKeys[ii+1:]...)
+ } else {
+ found = (tableKey == key.val)
+ ii++
+ }
+ }
+
+ // keep this key name from use by other kinds of assignments
+ if !found {
+ p.seenTableKeys = append(p.seenTableKeys, key.val)
+ }
+
+ // move to next parser state
+ p.assume(tokenDoubleRightBracket)
+ return p.parseStart
+}
+
+func (p *tomlParser) parseGroup() tomlParserStateFn {
+ startToken := p.getToken() // discard the [
+ key := p.getToken()
+ if key.typ != tokenKeyGroup {
+ p.raiseError(key, "unexpected token %s, was expecting a table key", key)
+ }
+ for _, item := range p.seenTableKeys {
+ if item == key.val {
+ p.raiseError(key, "duplicated tables")
+ }
+ }
+
+ p.seenTableKeys = append(p.seenTableKeys, key.val)
+ keys, err := parseKey(key.val)
+ if err != nil {
+ p.raiseError(key, "invalid table array key: %s", err)
+ }
+ if err := p.tree.createSubTree(keys, startToken.Position); err != nil {
+ p.raiseError(key, "%s", err)
+ }
+ p.assume(tokenRightBracket)
+ p.currentTable = keys
+ return p.parseStart
+}
+
+func (p *tomlParser) parseAssign() tomlParserStateFn {
+ key := p.getToken()
+ p.assume(tokenEqual)
+
+ value := p.parseRvalue()
+ var tableKey []string
+ if len(p.currentTable) > 0 {
+ tableKey = p.currentTable
+ } else {
+ tableKey = []string{}
+ }
+
+ // find the table to assign, looking out for arrays of tables
+ var targetNode *Tree
+ switch node := p.tree.GetPath(tableKey).(type) {
+ case []*Tree:
+ targetNode = node[len(node)-1]
+ case *Tree:
+ targetNode = node
+ default:
+ p.raiseError(key, "Unknown table type for path: %s",
+ strings.Join(tableKey, "."))
+ }
+
+ // assign value to the found table
+ keyVals := []string{key.val}
+ if len(keyVals) != 1 {
+ p.raiseError(key, "Invalid key")
+ }
+ keyVal := keyVals[0]
+ localKey := []string{keyVal}
+ finalKey := append(tableKey, keyVal)
+ if targetNode.GetPath(localKey) != nil {
+ p.raiseError(key, "The following key was defined twice: %s",
+ strings.Join(finalKey, "."))
+ }
+ var toInsert interface{}
+
+ switch value.(type) {
+ case *Tree, []*Tree:
+ toInsert = value
+ default:
+ toInsert = &tomlValue{value: value, position: key.Position}
+ }
+ targetNode.values[keyVal] = toInsert
+ return p.parseStart
+}
+
+var numberUnderscoreInvalidRegexp *regexp.Regexp
+var hexNumberUnderscoreInvalidRegexp *regexp.Regexp
+
+func numberContainsInvalidUnderscore(value string) error {
+ if numberUnderscoreInvalidRegexp.MatchString(value) {
+ return errors.New("invalid use of _ in number")
+ }
+ return nil
+}
+
+func hexNumberContainsInvalidUnderscore(value string) error {
+ if hexNumberUnderscoreInvalidRegexp.MatchString(value) {
+ return errors.New("invalid use of _ in hex number")
+ }
+ return nil
+}
+
+func cleanupNumberToken(value string) string {
+ cleanedVal := strings.Replace(value, "_", "", -1)
+ return cleanedVal
+}
+
+func (p *tomlParser) parseRvalue() interface{} {
+ tok := p.getToken()
+ if tok == nil || tok.typ == tokenEOF {
+ p.raiseError(tok, "expecting a value")
+ }
+
+ switch tok.typ {
+ case tokenString:
+ return tok.val
+ case tokenTrue:
+ return true
+ case tokenFalse:
+ return false
+ case tokenInf:
+ if tok.val[0] == '-' {
+ return math.Inf(-1)
+ }
+ return math.Inf(1)
+ case tokenNan:
+ return math.NaN()
+ case tokenInteger:
+ cleanedVal := cleanupNumberToken(tok.val)
+ var err error
+ var val int64
+ if len(cleanedVal) >= 3 && cleanedVal[0] == '0' {
+ switch cleanedVal[1] {
+ case 'x':
+ err = hexNumberContainsInvalidUnderscore(tok.val)
+ if err != nil {
+ p.raiseError(tok, "%s", err)
+ }
+ val, err = strconv.ParseInt(cleanedVal[2:], 16, 64)
+ case 'o':
+ err = numberContainsInvalidUnderscore(tok.val)
+ if err != nil {
+ p.raiseError(tok, "%s", err)
+ }
+ val, err = strconv.ParseInt(cleanedVal[2:], 8, 64)
+ case 'b':
+ err = numberContainsInvalidUnderscore(tok.val)
+ if err != nil {
+ p.raiseError(tok, "%s", err)
+ }
+ val, err = strconv.ParseInt(cleanedVal[2:], 2, 64)
+ default:
+ panic("invalid base") // the lexer should catch this first
+ }
+ } else {
+ err = numberContainsInvalidUnderscore(tok.val)
+ if err != nil {
+ p.raiseError(tok, "%s", err)
+ }
+ val, err = strconv.ParseInt(cleanedVal, 10, 64)
+ }
+ if err != nil {
+ p.raiseError(tok, "%s", err)
+ }
+ return val
+ case tokenFloat:
+ err := numberContainsInvalidUnderscore(tok.val)
+ if err != nil {
+ p.raiseError(tok, "%s", err)
+ }
+ cleanedVal := cleanupNumberToken(tok.val)
+ val, err := strconv.ParseFloat(cleanedVal, 64)
+ if err != nil {
+ p.raiseError(tok, "%s", err)
+ }
+ return val
+ case tokenDate:
+ val, err := time.ParseInLocation(time.RFC3339Nano, tok.val, time.UTC)
+ if err != nil {
+ p.raiseError(tok, "%s", err)
+ }
+ return val
+ case tokenLeftBracket:
+ return p.parseArray()
+ case tokenLeftCurlyBrace:
+ return p.parseInlineTable()
+ case tokenEqual:
+ p.raiseError(tok, "cannot have multiple equals for the same key")
+ case tokenError:
+ p.raiseError(tok, "%s", tok)
+ }
+
+ p.raiseError(tok, "never reached")
+
+ return nil
+}
+
+func tokenIsComma(t *token) bool {
+ return t != nil && t.typ == tokenComma
+}
+
+func (p *tomlParser) parseInlineTable() *Tree {
+ tree := newTree()
+ var previous *token
+Loop:
+ for {
+ follow := p.peek()
+ if follow == nil || follow.typ == tokenEOF {
+ p.raiseError(follow, "unterminated inline table")
+ }
+ switch follow.typ {
+ case tokenRightCurlyBrace:
+ p.getToken()
+ break Loop
+ case tokenKey:
+ if !tokenIsComma(previous) && previous != nil {
+ p.raiseError(follow, "comma expected between fields in inline table")
+ }
+ key := p.getToken()
+ p.assume(tokenEqual)
+ value := p.parseRvalue()
+ tree.Set(key.val, value)
+ case tokenComma:
+ if previous == nil {
+ p.raiseError(follow, "inline table cannot start with a comma")
+ }
+ if tokenIsComma(previous) {
+ p.raiseError(follow, "need field between two commas in inline table")
+ }
+ p.getToken()
+ default:
+ p.raiseError(follow, "unexpected token type in inline table: %s", follow.String())
+ }
+ previous = follow
+ }
+ if tokenIsComma(previous) {
+ p.raiseError(previous, "trailing comma at the end of inline table")
+ }
+ return tree
+}
+
+func (p *tomlParser) parseArray() interface{} {
+ var array []interface{}
+ arrayType := reflect.TypeOf(nil)
+ for {
+ follow := p.peek()
+ if follow == nil || follow.typ == tokenEOF {
+ p.raiseError(follow, "unterminated array")
+ }
+ if follow.typ == tokenRightBracket {
+ p.getToken()
+ break
+ }
+ val := p.parseRvalue()
+ if arrayType == nil {
+ arrayType = reflect.TypeOf(val)
+ }
+ if reflect.TypeOf(val) != arrayType {
+ p.raiseError(follow, "mixed types in array")
+ }
+ array = append(array, val)
+ follow = p.peek()
+ if follow == nil || follow.typ == tokenEOF {
+ p.raiseError(follow, "unterminated array")
+ }
+ if follow.typ != tokenRightBracket && follow.typ != tokenComma {
+ p.raiseError(follow, "missing comma")
+ }
+ if follow.typ == tokenComma {
+ p.getToken()
+ }
+ }
+ // An array of Trees is actually an array of inline
+ // tables, which is a shorthand for a table array. If the
+ // array was not converted from []interface{} to []*Tree,
+ // the two notations would not be equivalent.
+ if arrayType == reflect.TypeOf(newTree()) {
+ tomlArray := make([]*Tree, len(array))
+ for i, v := range array {
+ tomlArray[i] = v.(*Tree)
+ }
+ return tomlArray
+ }
+ return array
+}
+
+func parseToml(flow []token) *Tree {
+ result := newTree()
+ result.position = Position{1, 1}
+ parser := &tomlParser{
+ flowIdx: 0,
+ flow: flow,
+ tree: result,
+ currentTable: make([]string, 0),
+ seenTableKeys: make([]string, 0),
+ }
+ parser.run()
+ return result
+}
+
+func init() {
+ numberUnderscoreInvalidRegexp = regexp.MustCompile(`([^\d]_|_[^\d])|_$|^_`)
+ hexNumberUnderscoreInvalidRegexp = regexp.MustCompile(`(^0x_)|([^\da-f]_|_[^\da-f])|_$|^_`)
+}
diff --git a/vendor/github.com/pelletier/go-toml/position.go b/vendor/github.com/pelletier/go-toml/position.go
new file mode 100644
index 00000000..c17bff87
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/position.go
@@ -0,0 +1,29 @@
+// Position support for go-toml
+
+package toml
+
+import (
+ "fmt"
+)
+
+// Position of a document element within a TOML document.
+//
+// Line and Col are both 1-indexed positions for the element's line number and
+// column number, respectively. Values of zero or less will cause Invalid(),
+// to return true.
+type Position struct {
+ Line int // line within the document
+ Col int // column within the line
+}
+
+// String representation of the position.
+// Displays 1-indexed line and column numbers.
+func (p Position) String() string {
+ return fmt.Sprintf("(%d, %d)", p.Line, p.Col)
+}
+
+// Invalid returns whether or not the position is valid (i.e. with negative or
+// null values)
+func (p Position) Invalid() bool {
+ return p.Line <= 0 || p.Col <= 0
+}
diff --git a/vendor/github.com/pelletier/go-toml/query/doc.go b/vendor/github.com/pelletier/go-toml/query/doc.go
new file mode 100644
index 00000000..ed63c110
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/query/doc.go
@@ -0,0 +1,175 @@
+// Package query performs JSONPath-like queries on a TOML document.
+//
+// The query path implementation is based loosely on the JSONPath specification:
+// http://goessner.net/articles/JsonPath/.
+//
+// The idea behind a query path is to allow quick access to any element, or set
+// of elements within TOML document, with a single expression.
+//
+// result, err := query.CompileAndExecute("$.foo.bar.baz", tree)
+//
+// This is roughly equivalent to:
+//
+// next := tree.Get("foo")
+// if next != nil {
+// next = next.Get("bar")
+// if next != nil {
+// next = next.Get("baz")
+// }
+// }
+// result := next
+//
+// err is nil if any parsing exception occurs.
+//
+// If no node in the tree matches the query, result will simply contain an empty list of
+// items.
+//
+// As illustrated above, the query path is much more efficient, especially since
+// the structure of the TOML file can vary. Rather than making assumptions about
+// a document's structure, a query allows the programmer to make structured
+// requests into the document, and get zero or more values as a result.
+//
+// Query syntax
+//
+// The syntax of a query begins with a root token, followed by any number
+// sub-expressions:
+//
+// $
+// Root of the TOML tree. This must always come first.
+// .name
+// Selects child of this node, where 'name' is a TOML key
+// name.
+// ['name']
+// Selects child of this node, where 'name' is a string
+// containing a TOML key name.
+// [index]
+// Selcts child array element at 'index'.
+// ..expr
+// Recursively selects all children, filtered by an a union,
+// index, or slice expression.
+// ..*
+// Recursive selection of all nodes at this point in the
+// tree.
+// .*
+// Selects all children of the current node.
+// [expr,expr]
+// Union operator - a logical 'or' grouping of two or more
+// sub-expressions: index, key name, or filter.
+// [start:end:step]
+// Slice operator - selects array elements from start to
+// end-1, at the given step. All three arguments are
+// optional.
+// [?(filter)]
+// Named filter expression - the function 'filter' is
+// used to filter children at this node.
+//
+// Query Indexes And Slices
+//
+// Index expressions perform no bounds checking, and will contribute no
+// values to the result set if the provided index or index range is invalid.
+// Negative indexes represent values from the end of the array, counting backwards.
+//
+// // select the last index of the array named 'foo'
+// query.CompileAndExecute("$.foo[-1]", tree)
+//
+// Slice expressions are supported, by using ':' to separate a start/end index pair.
+//
+// // select up to the first five elements in the array
+// query.CompileAndExecute("$.foo[0:5]", tree)
+//
+// Slice expressions also allow negative indexes for the start and stop
+// arguments.
+//
+// // select all array elements.
+// query.CompileAndExecute("$.foo[0:-1]", tree)
+//
+// Slice expressions may have an optional stride/step parameter:
+//
+// // select every other element
+// query.CompileAndExecute("$.foo[0:-1:2]", tree)
+//
+// Slice start and end parameters are also optional:
+//
+// // these are all equivalent and select all the values in the array
+// query.CompileAndExecute("$.foo[:]", tree)
+// query.CompileAndExecute("$.foo[0:]", tree)
+// query.CompileAndExecute("$.foo[:-1]", tree)
+// query.CompileAndExecute("$.foo[0:-1:]", tree)
+// query.CompileAndExecute("$.foo[::1]", tree)
+// query.CompileAndExecute("$.foo[0::1]", tree)
+// query.CompileAndExecute("$.foo[:-1:1]", tree)
+// query.CompileAndExecute("$.foo[0:-1:1]", tree)
+//
+// Query Filters
+//
+// Query filters are used within a Union [,] or single Filter [] expression.
+// A filter only allows nodes that qualify through to the next expression,
+// and/or into the result set.
+//
+// // returns children of foo that are permitted by the 'bar' filter.
+// query.CompileAndExecute("$.foo[?(bar)]", tree)
+//
+// There are several filters provided with the library:
+//
+// tree
+// Allows nodes of type Tree.
+// int
+// Allows nodes of type int64.
+// float
+// Allows nodes of type float64.
+// string
+// Allows nodes of type string.
+// time
+// Allows nodes of type time.Time.
+// bool
+// Allows nodes of type bool.
+//
+// Query Results
+//
+// An executed query returns a Result object. This contains the nodes
+// in the TOML tree that qualify the query expression. Position information
+// is also available for each value in the set.
+//
+// // display the results of a query
+// results := query.CompileAndExecute("$.foo.bar.baz", tree)
+// for idx, value := results.Values() {
+// fmt.Println("%v: %v", results.Positions()[idx], value)
+// }
+//
+// Compiled Queries
+//
+// Queries may be executed directly on a Tree object, or compiled ahead
+// of time and executed discretely. The former is more convenient, but has the
+// penalty of having to recompile the query expression each time.
+//
+// // basic query
+// results := query.CompileAndExecute("$.foo.bar.baz", tree)
+//
+// // compiled query
+// query, err := toml.Compile("$.foo.bar.baz")
+// results := query.Execute(tree)
+//
+// // run the compiled query again on a different tree
+// moreResults := query.Execute(anotherTree)
+//
+// User Defined Query Filters
+//
+// Filter expressions may also be user defined by using the SetFilter()
+// function on the Query object. The function must return true/false, which
+// signifies if the passed node is kept or discarded, respectively.
+//
+// // create a query that references a user-defined filter
+// query, _ := query.Compile("$[?(bazOnly)]")
+//
+// // define the filter, and assign it to the query
+// query.SetFilter("bazOnly", func(node interface{}) bool{
+// if tree, ok := node.(*Tree); ok {
+// return tree.Has("baz")
+// }
+// return false // reject all other node types
+// })
+//
+// // run the query
+// query.Execute(tree)
+//
+package query
diff --git a/vendor/github.com/pelletier/go-toml/query/lexer.go b/vendor/github.com/pelletier/go-toml/query/lexer.go
new file mode 100644
index 00000000..2dc31940
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/query/lexer.go
@@ -0,0 +1,357 @@
+// TOML JSONPath lexer.
+//
+// Written using the principles developed by Rob Pike in
+// http://www.youtube.com/watch?v=HxaD_trXwRE
+
+package query
+
+import (
+ "fmt"
+ "github.com/pelletier/go-toml"
+ "strconv"
+ "strings"
+ "unicode/utf8"
+)
+
+// Lexer state function
+type queryLexStateFn func() queryLexStateFn
+
+// Lexer definition
+type queryLexer struct {
+ input string
+ start int
+ pos int
+ width int
+ tokens chan token
+ depth int
+ line int
+ col int
+ stringTerm string
+}
+
+func (l *queryLexer) run() {
+ for state := l.lexVoid; state != nil; {
+ state = state()
+ }
+ close(l.tokens)
+}
+
+func (l *queryLexer) nextStart() {
+ // iterate by runes (utf8 characters)
+ // search for newlines and advance line/col counts
+ for i := l.start; i < l.pos; {
+ r, width := utf8.DecodeRuneInString(l.input[i:])
+ if r == '\n' {
+ l.line++
+ l.col = 1
+ } else {
+ l.col++
+ }
+ i += width
+ }
+ // advance start position to next token
+ l.start = l.pos
+}
+
+func (l *queryLexer) emit(t tokenType) {
+ l.tokens <- token{
+ Position: toml.Position{Line: l.line, Col: l.col},
+ typ: t,
+ val: l.input[l.start:l.pos],
+ }
+ l.nextStart()
+}
+
+func (l *queryLexer) emitWithValue(t tokenType, value string) {
+ l.tokens <- token{
+ Position: toml.Position{Line: l.line, Col: l.col},
+ typ: t,
+ val: value,
+ }
+ l.nextStart()
+}
+
+func (l *queryLexer) next() rune {
+ if l.pos >= len(l.input) {
+ l.width = 0
+ return eof
+ }
+ var r rune
+ r, l.width = utf8.DecodeRuneInString(l.input[l.pos:])
+ l.pos += l.width
+ return r
+}
+
+func (l *queryLexer) ignore() {
+ l.nextStart()
+}
+
+func (l *queryLexer) backup() {
+ l.pos -= l.width
+}
+
+func (l *queryLexer) errorf(format string, args ...interface{}) queryLexStateFn {
+ l.tokens <- token{
+ Position: toml.Position{Line: l.line, Col: l.col},
+ typ: tokenError,
+ val: fmt.Sprintf(format, args...),
+ }
+ return nil
+}
+
+func (l *queryLexer) peek() rune {
+ r := l.next()
+ l.backup()
+ return r
+}
+
+func (l *queryLexer) accept(valid string) bool {
+ if strings.ContainsRune(valid, l.next()) {
+ return true
+ }
+ l.backup()
+ return false
+}
+
+func (l *queryLexer) follow(next string) bool {
+ return strings.HasPrefix(l.input[l.pos:], next)
+}
+
+func (l *queryLexer) lexVoid() queryLexStateFn {
+ for {
+ next := l.peek()
+ switch next {
+ case '$':
+ l.pos++
+ l.emit(tokenDollar)
+ continue
+ case '.':
+ if l.follow("..") {
+ l.pos += 2
+ l.emit(tokenDotDot)
+ } else {
+ l.pos++
+ l.emit(tokenDot)
+ }
+ continue
+ case '[':
+ l.pos++
+ l.emit(tokenLeftBracket)
+ continue
+ case ']':
+ l.pos++
+ l.emit(tokenRightBracket)
+ continue
+ case ',':
+ l.pos++
+ l.emit(tokenComma)
+ continue
+ case '*':
+ l.pos++
+ l.emit(tokenStar)
+ continue
+ case '(':
+ l.pos++
+ l.emit(tokenLeftParen)
+ continue
+ case ')':
+ l.pos++
+ l.emit(tokenRightParen)
+ continue
+ case '?':
+ l.pos++
+ l.emit(tokenQuestion)
+ continue
+ case ':':
+ l.pos++
+ l.emit(tokenColon)
+ continue
+ case '\'':
+ l.ignore()
+ l.stringTerm = string(next)
+ return l.lexString
+ case '"':
+ l.ignore()
+ l.stringTerm = string(next)
+ return l.lexString
+ }
+
+ if isSpace(next) {
+ l.next()
+ l.ignore()
+ continue
+ }
+
+ if isAlphanumeric(next) {
+ return l.lexKey
+ }
+
+ if next == '+' || next == '-' || isDigit(next) {
+ return l.lexNumber
+ }
+
+ if l.next() == eof {
+ break
+ }
+
+ return l.errorf("unexpected char: '%v'", next)
+ }
+ l.emit(tokenEOF)
+ return nil
+}
+
+func (l *queryLexer) lexKey() queryLexStateFn {
+ for {
+ next := l.peek()
+ if !isAlphanumeric(next) {
+ l.emit(tokenKey)
+ return l.lexVoid
+ }
+
+ if l.next() == eof {
+ break
+ }
+ }
+ l.emit(tokenEOF)
+ return nil
+}
+
+func (l *queryLexer) lexString() queryLexStateFn {
+ l.pos++
+ l.ignore()
+ growingString := ""
+
+ for {
+ if l.follow(l.stringTerm) {
+ l.emitWithValue(tokenString, growingString)
+ l.pos++
+ l.ignore()
+ return l.lexVoid
+ }
+
+ if l.follow("\\\"") {
+ l.pos++
+ growingString += "\""
+ } else if l.follow("\\'") {
+ l.pos++
+ growingString += "'"
+ } else if l.follow("\\n") {
+ l.pos++
+ growingString += "\n"
+ } else if l.follow("\\b") {
+ l.pos++
+ growingString += "\b"
+ } else if l.follow("\\f") {
+ l.pos++
+ growingString += "\f"
+ } else if l.follow("\\/") {
+ l.pos++
+ growingString += "/"
+ } else if l.follow("\\t") {
+ l.pos++
+ growingString += "\t"
+ } else if l.follow("\\r") {
+ l.pos++
+ growingString += "\r"
+ } else if l.follow("\\\\") {
+ l.pos++
+ growingString += "\\"
+ } else if l.follow("\\u") {
+ l.pos += 2
+ code := ""
+ for i := 0; i < 4; i++ {
+ c := l.peek()
+ l.pos++
+ if !isHexDigit(c) {
+ return l.errorf("unfinished unicode escape")
+ }
+ code = code + string(c)
+ }
+ l.pos--
+ intcode, err := strconv.ParseInt(code, 16, 32)
+ if err != nil {
+ return l.errorf("invalid unicode escape: \\u" + code)
+ }
+ growingString += string(rune(intcode))
+ } else if l.follow("\\U") {
+ l.pos += 2
+ code := ""
+ for i := 0; i < 8; i++ {
+ c := l.peek()
+ l.pos++
+ if !isHexDigit(c) {
+ return l.errorf("unfinished unicode escape")
+ }
+ code = code + string(c)
+ }
+ l.pos--
+ intcode, err := strconv.ParseInt(code, 16, 32)
+ if err != nil {
+ return l.errorf("invalid unicode escape: \\u" + code)
+ }
+ growingString += string(rune(intcode))
+ } else if l.follow("\\") {
+ l.pos++
+ return l.errorf("invalid escape sequence: \\" + string(l.peek()))
+ } else {
+ growingString += string(l.peek())
+ }
+
+ if l.next() == eof {
+ break
+ }
+ }
+
+ return l.errorf("unclosed string")
+}
+
+func (l *queryLexer) lexNumber() queryLexStateFn {
+ l.ignore()
+ if !l.accept("+") {
+ l.accept("-")
+ }
+ pointSeen := false
+ digitSeen := false
+ for {
+ next := l.next()
+ if next == '.' {
+ if pointSeen {
+ return l.errorf("cannot have two dots in one float")
+ }
+ if !isDigit(l.peek()) {
+ return l.errorf("float cannot end with a dot")
+ }
+ pointSeen = true
+ } else if isDigit(next) {
+ digitSeen = true
+ } else {
+ l.backup()
+ break
+ }
+ if pointSeen && !digitSeen {
+ return l.errorf("cannot start float with a dot")
+ }
+ }
+
+ if !digitSeen {
+ return l.errorf("no digit in that number")
+ }
+ if pointSeen {
+ l.emit(tokenFloat)
+ } else {
+ l.emit(tokenInteger)
+ }
+ return l.lexVoid
+}
+
+// Entry point
+func lexQuery(input string) chan token {
+ l := &queryLexer{
+ input: input,
+ tokens: make(chan token),
+ line: 1,
+ col: 1,
+ }
+ go l.run()
+ return l.tokens
+}
diff --git a/vendor/github.com/pelletier/go-toml/query/match.go b/vendor/github.com/pelletier/go-toml/query/match.go
new file mode 100644
index 00000000..d7bb15a4
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/query/match.go
@@ -0,0 +1,232 @@
+package query
+
+import (
+ "fmt"
+ "github.com/pelletier/go-toml"
+)
+
+// base match
+type matchBase struct {
+ next pathFn
+}
+
+func (f *matchBase) setNext(next pathFn) {
+ f.next = next
+}
+
+// terminating functor - gathers results
+type terminatingFn struct {
+ // empty
+}
+
+func newTerminatingFn() *terminatingFn {
+ return &terminatingFn{}
+}
+
+func (f *terminatingFn) setNext(next pathFn) {
+ // do nothing
+}
+
+func (f *terminatingFn) call(node interface{}, ctx *queryContext) {
+ ctx.result.appendResult(node, ctx.lastPosition)
+}
+
+// match single key
+type matchKeyFn struct {
+ matchBase
+ Name string
+}
+
+func newMatchKeyFn(name string) *matchKeyFn {
+ return &matchKeyFn{Name: name}
+}
+
+func (f *matchKeyFn) call(node interface{}, ctx *queryContext) {
+ if array, ok := node.([]*toml.Tree); ok {
+ for _, tree := range array {
+ item := tree.Get(f.Name)
+ if item != nil {
+ ctx.lastPosition = tree.GetPosition(f.Name)
+ f.next.call(item, ctx)
+ }
+ }
+ } else if tree, ok := node.(*toml.Tree); ok {
+ item := tree.Get(f.Name)
+ if item != nil {
+ ctx.lastPosition = tree.GetPosition(f.Name)
+ f.next.call(item, ctx)
+ }
+ }
+}
+
+// match single index
+type matchIndexFn struct {
+ matchBase
+ Idx int
+}
+
+func newMatchIndexFn(idx int) *matchIndexFn {
+ return &matchIndexFn{Idx: idx}
+}
+
+func (f *matchIndexFn) call(node interface{}, ctx *queryContext) {
+ if arr, ok := node.([]interface{}); ok {
+ if f.Idx < len(arr) && f.Idx >= 0 {
+ if treesArray, ok := node.([]*toml.Tree); ok {
+ if len(treesArray) > 0 {
+ ctx.lastPosition = treesArray[0].Position()
+ }
+ }
+ f.next.call(arr[f.Idx], ctx)
+ }
+ }
+}
+
+// filter by slicing
+type matchSliceFn struct {
+ matchBase
+ Start, End, Step int
+}
+
+func newMatchSliceFn(start, end, step int) *matchSliceFn {
+ return &matchSliceFn{Start: start, End: end, Step: step}
+}
+
+func (f *matchSliceFn) call(node interface{}, ctx *queryContext) {
+ if arr, ok := node.([]interface{}); ok {
+ // adjust indexes for negative values, reverse ordering
+ realStart, realEnd := f.Start, f.End
+ if realStart < 0 {
+ realStart = len(arr) + realStart
+ }
+ if realEnd < 0 {
+ realEnd = len(arr) + realEnd
+ }
+ if realEnd < realStart {
+ realEnd, realStart = realStart, realEnd // swap
+ }
+ // loop and gather
+ for idx := realStart; idx < realEnd; idx += f.Step {
+ if treesArray, ok := node.([]*toml.Tree); ok {
+ if len(treesArray) > 0 {
+ ctx.lastPosition = treesArray[0].Position()
+ }
+ }
+ f.next.call(arr[idx], ctx)
+ }
+ }
+}
+
+// match anything
+type matchAnyFn struct {
+ matchBase
+}
+
+func newMatchAnyFn() *matchAnyFn {
+ return &matchAnyFn{}
+}
+
+func (f *matchAnyFn) call(node interface{}, ctx *queryContext) {
+ if tree, ok := node.(*toml.Tree); ok {
+ for _, k := range tree.Keys() {
+ v := tree.Get(k)
+ ctx.lastPosition = tree.GetPosition(k)
+ f.next.call(v, ctx)
+ }
+ }
+}
+
+// filter through union
+type matchUnionFn struct {
+ Union []pathFn
+}
+
+func (f *matchUnionFn) setNext(next pathFn) {
+ for _, fn := range f.Union {
+ fn.setNext(next)
+ }
+}
+
+func (f *matchUnionFn) call(node interface{}, ctx *queryContext) {
+ for _, fn := range f.Union {
+ fn.call(node, ctx)
+ }
+}
+
+// match every single last node in the tree
+type matchRecursiveFn struct {
+ matchBase
+}
+
+func newMatchRecursiveFn() *matchRecursiveFn {
+ return &matchRecursiveFn{}
+}
+
+func (f *matchRecursiveFn) call(node interface{}, ctx *queryContext) {
+ originalPosition := ctx.lastPosition
+ if tree, ok := node.(*toml.Tree); ok {
+ var visit func(tree *toml.Tree)
+ visit = func(tree *toml.Tree) {
+ for _, k := range tree.Keys() {
+ v := tree.Get(k)
+ ctx.lastPosition = tree.GetPosition(k)
+ f.next.call(v, ctx)
+ switch node := v.(type) {
+ case *toml.Tree:
+ visit(node)
+ case []*toml.Tree:
+ for _, subtree := range node {
+ visit(subtree)
+ }
+ }
+ }
+ }
+ ctx.lastPosition = originalPosition
+ f.next.call(tree, ctx)
+ visit(tree)
+ }
+}
+
+// match based on an externally provided functional filter
+type matchFilterFn struct {
+ matchBase
+ Pos toml.Position
+ Name string
+}
+
+func newMatchFilterFn(name string, pos toml.Position) *matchFilterFn {
+ return &matchFilterFn{Name: name, Pos: pos}
+}
+
+func (f *matchFilterFn) call(node interface{}, ctx *queryContext) {
+ fn, ok := (*ctx.filters)[f.Name]
+ if !ok {
+ panic(fmt.Sprintf("%s: query context does not have filter '%s'",
+ f.Pos.String(), f.Name))
+ }
+ switch castNode := node.(type) {
+ case *toml.Tree:
+ for _, k := range castNode.Keys() {
+ v := castNode.Get(k)
+ if fn(v) {
+ ctx.lastPosition = castNode.GetPosition(k)
+ f.next.call(v, ctx)
+ }
+ }
+ case []*toml.Tree:
+ for _, v := range castNode {
+ if fn(v) {
+ if len(castNode) > 0 {
+ ctx.lastPosition = castNode[0].Position()
+ }
+ f.next.call(v, ctx)
+ }
+ }
+ case []interface{}:
+ for _, v := range castNode {
+ if fn(v) {
+ f.next.call(v, ctx)
+ }
+ }
+ }
+}
diff --git a/vendor/github.com/pelletier/go-toml/query/parser.go b/vendor/github.com/pelletier/go-toml/query/parser.go
new file mode 100644
index 00000000..5f69b70d
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/query/parser.go
@@ -0,0 +1,275 @@
+/*
+ Based on the "jsonpath" spec/concept.
+
+ http://goessner.net/articles/JsonPath/
+ https://code.google.com/p/json-path/
+*/
+
+package query
+
+import (
+ "fmt"
+)
+
+const maxInt = int(^uint(0) >> 1)
+
+type queryParser struct {
+ flow chan token
+ tokensBuffer []token
+ query *Query
+ union []pathFn
+ err error
+}
+
+type queryParserStateFn func() queryParserStateFn
+
+// Formats and panics an error message based on a token
+func (p *queryParser) parseError(tok *token, msg string, args ...interface{}) queryParserStateFn {
+ p.err = fmt.Errorf(tok.Position.String()+": "+msg, args...)
+ return nil // trigger parse to end
+}
+
+func (p *queryParser) run() {
+ for state := p.parseStart; state != nil; {
+ state = state()
+ }
+}
+
+func (p *queryParser) backup(tok *token) {
+ p.tokensBuffer = append(p.tokensBuffer, *tok)
+}
+
+func (p *queryParser) peek() *token {
+ if len(p.tokensBuffer) != 0 {
+ return &(p.tokensBuffer[0])
+ }
+
+ tok, ok := <-p.flow
+ if !ok {
+ return nil
+ }
+ p.backup(&tok)
+ return &tok
+}
+
+func (p *queryParser) lookahead(types ...tokenType) bool {
+ result := true
+ buffer := []token{}
+
+ for _, typ := range types {
+ tok := p.getToken()
+ if tok == nil {
+ result = false
+ break
+ }
+ buffer = append(buffer, *tok)
+ if tok.typ != typ {
+ result = false
+ break
+ }
+ }
+ // add the tokens back to the buffer, and return
+ p.tokensBuffer = append(p.tokensBuffer, buffer...)
+ return result
+}
+
+func (p *queryParser) getToken() *token {
+ if len(p.tokensBuffer) != 0 {
+ tok := p.tokensBuffer[0]
+ p.tokensBuffer = p.tokensBuffer[1:]
+ return &tok
+ }
+ tok, ok := <-p.flow
+ if !ok {
+ return nil
+ }
+ return &tok
+}
+
+func (p *queryParser) parseStart() queryParserStateFn {
+ tok := p.getToken()
+
+ if tok == nil || tok.typ == tokenEOF {
+ return nil
+ }
+
+ if tok.typ != tokenDollar {
+ return p.parseError(tok, "Expected '$' at start of expression")
+ }
+
+ return p.parseMatchExpr
+}
+
+// handle '.' prefix, '[]', and '..'
+func (p *queryParser) parseMatchExpr() queryParserStateFn {
+ tok := p.getToken()
+ switch tok.typ {
+ case tokenDotDot:
+ p.query.appendPath(&matchRecursiveFn{})
+ // nested parse for '..'
+ tok := p.getToken()
+ switch tok.typ {
+ case tokenKey:
+ p.query.appendPath(newMatchKeyFn(tok.val))
+ return p.parseMatchExpr
+ case tokenLeftBracket:
+ return p.parseBracketExpr
+ case tokenStar:
+ // do nothing - the recursive predicate is enough
+ return p.parseMatchExpr
+ }
+
+ case tokenDot:
+ // nested parse for '.'
+ tok := p.getToken()
+ switch tok.typ {
+ case tokenKey:
+ p.query.appendPath(newMatchKeyFn(tok.val))
+ return p.parseMatchExpr
+ case tokenStar:
+ p.query.appendPath(&matchAnyFn{})
+ return p.parseMatchExpr
+ }
+
+ case tokenLeftBracket:
+ return p.parseBracketExpr
+
+ case tokenEOF:
+ return nil // allow EOF at this stage
+ }
+ return p.parseError(tok, "expected match expression")
+}
+
+func (p *queryParser) parseBracketExpr() queryParserStateFn {
+ if p.lookahead(tokenInteger, tokenColon) {
+ return p.parseSliceExpr
+ }
+ if p.peek().typ == tokenColon {
+ return p.parseSliceExpr
+ }
+ return p.parseUnionExpr
+}
+
+func (p *queryParser) parseUnionExpr() queryParserStateFn {
+ var tok *token
+
+ // this state can be traversed after some sub-expressions
+ // so be careful when setting up state in the parser
+ if p.union == nil {
+ p.union = []pathFn{}
+ }
+
+loop: // labeled loop for easy breaking
+ for {
+ if len(p.union) > 0 {
+ // parse delimiter or terminator
+ tok = p.getToken()
+ switch tok.typ {
+ case tokenComma:
+ // do nothing
+ case tokenRightBracket:
+ break loop
+ default:
+ return p.parseError(tok, "expected ',' or ']', not '%s'", tok.val)
+ }
+ }
+
+ // parse sub expression
+ tok = p.getToken()
+ switch tok.typ {
+ case tokenInteger:
+ p.union = append(p.union, newMatchIndexFn(tok.Int()))
+ case tokenKey:
+ p.union = append(p.union, newMatchKeyFn(tok.val))
+ case tokenString:
+ p.union = append(p.union, newMatchKeyFn(tok.val))
+ case tokenQuestion:
+ return p.parseFilterExpr
+ default:
+ return p.parseError(tok, "expected union sub expression, not '%s', %d", tok.val, len(p.union))
+ }
+ }
+
+ // if there is only one sub-expression, use that instead
+ if len(p.union) == 1 {
+ p.query.appendPath(p.union[0])
+ } else {
+ p.query.appendPath(&matchUnionFn{p.union})
+ }
+
+ p.union = nil // clear out state
+ return p.parseMatchExpr
+}
+
+func (p *queryParser) parseSliceExpr() queryParserStateFn {
+ // init slice to grab all elements
+ start, end, step := 0, maxInt, 1
+
+ // parse optional start
+ tok := p.getToken()
+ if tok.typ == tokenInteger {
+ start = tok.Int()
+ tok = p.getToken()
+ }
+ if tok.typ != tokenColon {
+ return p.parseError(tok, "expected ':'")
+ }
+
+ // parse optional end
+ tok = p.getToken()
+ if tok.typ == tokenInteger {
+ end = tok.Int()
+ tok = p.getToken()
+ }
+ if tok.typ == tokenRightBracket {
+ p.query.appendPath(newMatchSliceFn(start, end, step))
+ return p.parseMatchExpr
+ }
+ if tok.typ != tokenColon {
+ return p.parseError(tok, "expected ']' or ':'")
+ }
+
+ // parse optional step
+ tok = p.getToken()
+ if tok.typ == tokenInteger {
+ step = tok.Int()
+ if step < 0 {
+ return p.parseError(tok, "step must be a positive value")
+ }
+ tok = p.getToken()
+ }
+ if tok.typ != tokenRightBracket {
+ return p.parseError(tok, "expected ']'")
+ }
+
+ p.query.appendPath(newMatchSliceFn(start, end, step))
+ return p.parseMatchExpr
+}
+
+func (p *queryParser) parseFilterExpr() queryParserStateFn {
+ tok := p.getToken()
+ if tok.typ != tokenLeftParen {
+ return p.parseError(tok, "expected left-parenthesis for filter expression")
+ }
+ tok = p.getToken()
+ if tok.typ != tokenKey && tok.typ != tokenString {
+ return p.parseError(tok, "expected key or string for filter function name")
+ }
+ name := tok.val
+ tok = p.getToken()
+ if tok.typ != tokenRightParen {
+ return p.parseError(tok, "expected right-parenthesis for filter expression")
+ }
+ p.union = append(p.union, newMatchFilterFn(name, tok.Position))
+ return p.parseUnionExpr
+}
+
+func parseQuery(flow chan token) (*Query, error) {
+ parser := &queryParser{
+ flow: flow,
+ tokensBuffer: []token{},
+ query: newQuery(),
+ }
+ parser.run()
+ return parser.query, parser.err
+}
diff --git a/vendor/github.com/pelletier/go-toml/query/query.go b/vendor/github.com/pelletier/go-toml/query/query.go
new file mode 100644
index 00000000..1c6cd801
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/query/query.go
@@ -0,0 +1,158 @@
+package query
+
+import (
+ "time"
+
+ "github.com/pelletier/go-toml"
+)
+
+// NodeFilterFn represents a user-defined filter function, for use with
+// Query.SetFilter().
+//
+// The return value of the function must indicate if 'node' is to be included
+// at this stage of the TOML path. Returning true will include the node, and
+// returning false will exclude it.
+//
+// NOTE: Care should be taken to write script callbacks such that they are safe
+// to use from multiple goroutines.
+type NodeFilterFn func(node interface{}) bool
+
+// Result is the result of Executing a Query.
+type Result struct {
+ items []interface{}
+ positions []toml.Position
+}
+
+// appends a value/position pair to the result set.
+func (r *Result) appendResult(node interface{}, pos toml.Position) {
+ r.items = append(r.items, node)
+ r.positions = append(r.positions, pos)
+}
+
+// Values is a set of values within a Result. The order of values is not
+// guaranteed to be in document order, and may be different each time a query is
+// executed.
+func (r Result) Values() []interface{} {
+ return r.items
+}
+
+// Positions is a set of positions for values within a Result. Each index
+// in Positions() corresponds to the entry in Value() of the same index.
+func (r Result) Positions() []toml.Position {
+ return r.positions
+}
+
+// runtime context for executing query paths
+type queryContext struct {
+ result *Result
+ filters *map[string]NodeFilterFn
+ lastPosition toml.Position
+}
+
+// generic path functor interface
+type pathFn interface {
+ setNext(next pathFn)
+ // it is the caller's responsibility to set the ctx.lastPosition before invoking call()
+ // node can be one of: *toml.Tree, []*toml.Tree, or a scalar
+ call(node interface{}, ctx *queryContext)
+}
+
+// A Query is the representation of a compiled TOML path. A Query is safe
+// for concurrent use by multiple goroutines.
+type Query struct {
+ root pathFn
+ tail pathFn
+ filters *map[string]NodeFilterFn
+}
+
+func newQuery() *Query {
+ return &Query{
+ root: nil,
+ tail: nil,
+ filters: &defaultFilterFunctions,
+ }
+}
+
+func (q *Query) appendPath(next pathFn) {
+ if q.root == nil {
+ q.root = next
+ } else {
+ q.tail.setNext(next)
+ }
+ q.tail = next
+ next.setNext(newTerminatingFn()) // init the next functor
+}
+
+// Compile compiles a TOML path expression. The returned Query can be used
+// to match elements within a Tree and its descendants. See Execute.
+func Compile(path string) (*Query, error) {
+ return parseQuery(lexQuery(path))
+}
+
+// Execute executes a query against a Tree, and returns the result of the query.
+func (q *Query) Execute(tree *toml.Tree) *Result {
+ result := &Result{
+ items: []interface{}{},
+ positions: []toml.Position{},
+ }
+ if q.root == nil {
+ result.appendResult(tree, tree.GetPosition(""))
+ } else {
+ ctx := &queryContext{
+ result: result,
+ filters: q.filters,
+ }
+ ctx.lastPosition = tree.Position()
+ q.root.call(tree, ctx)
+ }
+ return result
+}
+
+// CompileAndExecute is a shorthand for Compile(path) followed by Execute(tree).
+func CompileAndExecute(path string, tree *toml.Tree) (*Result, error) {
+ query, err := Compile(path)
+ if err != nil {
+ return nil, err
+ }
+ return query.Execute(tree), nil
+}
+
+// SetFilter sets a user-defined filter function. These may be used inside
+// "?(..)" query expressions to filter TOML document elements within a query.
+func (q *Query) SetFilter(name string, fn NodeFilterFn) {
+ if q.filters == &defaultFilterFunctions {
+ // clone the static table
+ q.filters = &map[string]NodeFilterFn{}
+ for k, v := range defaultFilterFunctions {
+ (*q.filters)[k] = v
+ }
+ }
+ (*q.filters)[name] = fn
+}
+
+var defaultFilterFunctions = map[string]NodeFilterFn{
+ "tree": func(node interface{}) bool {
+ _, ok := node.(*toml.Tree)
+ return ok
+ },
+ "int": func(node interface{}) bool {
+ _, ok := node.(int64)
+ return ok
+ },
+ "float": func(node interface{}) bool {
+ _, ok := node.(float64)
+ return ok
+ },
+ "string": func(node interface{}) bool {
+ _, ok := node.(string)
+ return ok
+ },
+ "time": func(node interface{}) bool {
+ _, ok := node.(time.Time)
+ return ok
+ },
+ "bool": func(node interface{}) bool {
+ _, ok := node.(bool)
+ return ok
+ },
+}
diff --git a/vendor/github.com/pelletier/go-toml/query/tokens.go b/vendor/github.com/pelletier/go-toml/query/tokens.go
new file mode 100644
index 00000000..9ae579de
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/query/tokens.go
@@ -0,0 +1,106 @@
+package query
+
+import (
+ "fmt"
+ "github.com/pelletier/go-toml"
+ "strconv"
+ "unicode"
+)
+
+// Define tokens
+type tokenType int
+
+const (
+ eof = -(iota + 1)
+)
+
+const (
+ tokenError tokenType = iota
+ tokenEOF
+ tokenKey
+ tokenString
+ tokenInteger
+ tokenFloat
+ tokenLeftBracket
+ tokenRightBracket
+ tokenLeftParen
+ tokenRightParen
+ tokenComma
+ tokenColon
+ tokenDollar
+ tokenStar
+ tokenQuestion
+ tokenDot
+ tokenDotDot
+)
+
+var tokenTypeNames = []string{
+ "Error",
+ "EOF",
+ "Key",
+ "String",
+ "Integer",
+ "Float",
+ "[",
+ "]",
+ "(",
+ ")",
+ ",",
+ ":",
+ "$",
+ "*",
+ "?",
+ ".",
+ "..",
+}
+
+type token struct {
+ toml.Position
+ typ tokenType
+ val string
+}
+
+func (tt tokenType) String() string {
+ idx := int(tt)
+ if idx < len(tokenTypeNames) {
+ return tokenTypeNames[idx]
+ }
+ return "Unknown"
+}
+
+func (t token) Int() int {
+ if result, err := strconv.Atoi(t.val); err != nil {
+ panic(err)
+ } else {
+ return result
+ }
+}
+
+func (t token) String() string {
+ switch t.typ {
+ case tokenEOF:
+ return "EOF"
+ case tokenError:
+ return t.val
+ }
+
+ return fmt.Sprintf("%q", t.val)
+}
+
+func isSpace(r rune) bool {
+ return r == ' ' || r == '\t'
+}
+
+func isAlphanumeric(r rune) bool {
+ return unicode.IsLetter(r) || r == '_'
+}
+
+func isDigit(r rune) bool {
+ return unicode.IsNumber(r)
+}
+
+func isHexDigit(r rune) bool {
+ return isDigit(r) ||
+ (r >= 'a' && r <= 'f') ||
+ (r >= 'A' && r <= 'F')
+}
diff --git a/vendor/github.com/pelletier/go-toml/token.go b/vendor/github.com/pelletier/go-toml/token.go
new file mode 100644
index 00000000..1a908134
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/token.go
@@ -0,0 +1,144 @@
+package toml
+
+import (
+ "fmt"
+ "strconv"
+ "unicode"
+)
+
+// Define tokens
+type tokenType int
+
+const (
+ eof = -(iota + 1)
+)
+
+const (
+ tokenError tokenType = iota
+ tokenEOF
+ tokenComment
+ tokenKey
+ tokenString
+ tokenInteger
+ tokenTrue
+ tokenFalse
+ tokenFloat
+ tokenInf
+ tokenNan
+ tokenEqual
+ tokenLeftBracket
+ tokenRightBracket
+ tokenLeftCurlyBrace
+ tokenRightCurlyBrace
+ tokenLeftParen
+ tokenRightParen
+ tokenDoubleLeftBracket
+ tokenDoubleRightBracket
+ tokenDate
+ tokenKeyGroup
+ tokenKeyGroupArray
+ tokenComma
+ tokenColon
+ tokenDollar
+ tokenStar
+ tokenQuestion
+ tokenDot
+ tokenDotDot
+ tokenEOL
+)
+
+var tokenTypeNames = []string{
+ "Error",
+ "EOF",
+ "Comment",
+ "Key",
+ "String",
+ "Integer",
+ "True",
+ "False",
+ "Float",
+ "Inf",
+ "NaN",
+ "=",
+ "[",
+ "]",
+ "{",
+ "}",
+ "(",
+ ")",
+ "]]",
+ "[[",
+ "Date",
+ "KeyGroup",
+ "KeyGroupArray",
+ ",",
+ ":",
+ "$",
+ "*",
+ "?",
+ ".",
+ "..",
+ "EOL",
+}
+
+type token struct {
+ Position
+ typ tokenType
+ val string
+}
+
+func (tt tokenType) String() string {
+ idx := int(tt)
+ if idx < len(tokenTypeNames) {
+ return tokenTypeNames[idx]
+ }
+ return "Unknown"
+}
+
+func (t token) Int() int {
+ if result, err := strconv.Atoi(t.val); err != nil {
+ panic(err)
+ } else {
+ return result
+ }
+}
+
+func (t token) String() string {
+ switch t.typ {
+ case tokenEOF:
+ return "EOF"
+ case tokenError:
+ return t.val
+ }
+
+ return fmt.Sprintf("%q", t.val)
+}
+
+func isSpace(r rune) bool {
+ return r == ' ' || r == '\t'
+}
+
+func isAlphanumeric(r rune) bool {
+ return unicode.IsLetter(r) || r == '_'
+}
+
+func isKeyChar(r rune) bool {
+ // Keys start with the first character that isn't whitespace or [ and end
+ // with the last non-whitespace character before the equals sign. Keys
+ // cannot contain a # character."
+ return !(r == '\r' || r == '\n' || r == eof || r == '=')
+}
+
+func isKeyStartChar(r rune) bool {
+ return !(isSpace(r) || r == '\r' || r == '\n' || r == eof || r == '[')
+}
+
+func isDigit(r rune) bool {
+ return unicode.IsNumber(r)
+}
+
+func isHexDigit(r rune) bool {
+ return isDigit(r) ||
+ (r >= 'a' && r <= 'f') ||
+ (r >= 'A' && r <= 'F')
+}
diff --git a/vendor/github.com/pelletier/go-toml/toml.go b/vendor/github.com/pelletier/go-toml/toml.go
new file mode 100644
index 00000000..05493a44
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/toml.go
@@ -0,0 +1,309 @@
+package toml
+
+import (
+ "errors"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "os"
+ "runtime"
+ "strings"
+)
+
+type tomlValue struct {
+ value interface{} // string, int64, uint64, float64, bool, time.Time, [] of any of this list
+ comment string
+ commented bool
+ position Position
+}
+
+// Tree is the result of the parsing of a TOML file.
+type Tree struct {
+ values map[string]interface{} // string -> *tomlValue, *Tree, []*Tree
+ comment string
+ commented bool
+ position Position
+}
+
+func newTree() *Tree {
+ return &Tree{
+ values: make(map[string]interface{}),
+ position: Position{},
+ }
+}
+
+// TreeFromMap initializes a new Tree object using the given map.
+func TreeFromMap(m map[string]interface{}) (*Tree, error) {
+ result, err := toTree(m)
+ if err != nil {
+ return nil, err
+ }
+ return result.(*Tree), nil
+}
+
+// Position returns the position of the tree.
+func (t *Tree) Position() Position {
+ return t.position
+}
+
+// Has returns a boolean indicating if the given key exists.
+func (t *Tree) Has(key string) bool {
+ if key == "" {
+ return false
+ }
+ return t.HasPath(strings.Split(key, "."))
+}
+
+// HasPath returns true if the given path of keys exists, false otherwise.
+func (t *Tree) HasPath(keys []string) bool {
+ return t.GetPath(keys) != nil
+}
+
+// Keys returns the keys of the toplevel tree (does not recurse).
+func (t *Tree) Keys() []string {
+ keys := make([]string, len(t.values))
+ i := 0
+ for k := range t.values {
+ keys[i] = k
+ i++
+ }
+ return keys
+}
+
+// Get the value at key in the Tree.
+// Key is a dot-separated path (e.g. a.b.c) without single/double quoted strings.
+// If you need to retrieve non-bare keys, use GetPath.
+// Returns nil if the path does not exist in the tree.
+// If keys is of length zero, the current tree is returned.
+func (t *Tree) Get(key string) interface{} {
+ if key == "" {
+ return t
+ }
+ return t.GetPath(strings.Split(key, "."))
+}
+
+// GetPath returns the element in the tree indicated by 'keys'.
+// If keys is of length zero, the current tree is returned.
+func (t *Tree) GetPath(keys []string) interface{} {
+ if len(keys) == 0 {
+ return t
+ }
+ subtree := t
+ for _, intermediateKey := range keys[:len(keys)-1] {
+ value, exists := subtree.values[intermediateKey]
+ if !exists {
+ return nil
+ }
+ switch node := value.(type) {
+ case *Tree:
+ subtree = node
+ case []*Tree:
+ // go to most recent element
+ if len(node) == 0 {
+ return nil
+ }
+ subtree = node[len(node)-1]
+ default:
+ return nil // cannot navigate through other node types
+ }
+ }
+ // branch based on final node type
+ switch node := subtree.values[keys[len(keys)-1]].(type) {
+ case *tomlValue:
+ return node.value
+ default:
+ return node
+ }
+}
+
+// GetPosition returns the position of the given key.
+func (t *Tree) GetPosition(key string) Position {
+ if key == "" {
+ return t.position
+ }
+ return t.GetPositionPath(strings.Split(key, "."))
+}
+
+// GetPositionPath returns the element in the tree indicated by 'keys'.
+// If keys is of length zero, the current tree is returned.
+func (t *Tree) GetPositionPath(keys []string) Position {
+ if len(keys) == 0 {
+ return t.position
+ }
+ subtree := t
+ for _, intermediateKey := range keys[:len(keys)-1] {
+ value, exists := subtree.values[intermediateKey]
+ if !exists {
+ return Position{0, 0}
+ }
+ switch node := value.(type) {
+ case *Tree:
+ subtree = node
+ case []*Tree:
+ // go to most recent element
+ if len(node) == 0 {
+ return Position{0, 0}
+ }
+ subtree = node[len(node)-1]
+ default:
+ return Position{0, 0}
+ }
+ }
+ // branch based on final node type
+ switch node := subtree.values[keys[len(keys)-1]].(type) {
+ case *tomlValue:
+ return node.position
+ case *Tree:
+ return node.position
+ case []*Tree:
+ // go to most recent element
+ if len(node) == 0 {
+ return Position{0, 0}
+ }
+ return node[len(node)-1].position
+ default:
+ return Position{0, 0}
+ }
+}
+
+// GetDefault works like Get but with a default value
+func (t *Tree) GetDefault(key string, def interface{}) interface{} {
+ val := t.Get(key)
+ if val == nil {
+ return def
+ }
+ return val
+}
+
+// Set an element in the tree.
+// Key is a dot-separated path (e.g. a.b.c).
+// Creates all necessary intermediate trees, if needed.
+func (t *Tree) Set(key string, value interface{}) {
+ t.SetWithComment(key, "", false, value)
+}
+
+// SetWithComment is the same as Set, but allows you to provide comment
+// information to the key, that will be reused by Marshal().
+func (t *Tree) SetWithComment(key string, comment string, commented bool, value interface{}) {
+ t.SetPathWithComment(strings.Split(key, "."), comment, commented, value)
+}
+
+// SetPath sets an element in the tree.
+// Keys is an array of path elements (e.g. {"a","b","c"}).
+// Creates all necessary intermediate trees, if needed.
+func (t *Tree) SetPath(keys []string, value interface{}) {
+ t.SetPathWithComment(keys, "", false, value)
+}
+
+// SetPathWithComment is the same as SetPath, but allows you to provide comment
+// information to the key, that will be reused by Marshal().
+func (t *Tree) SetPathWithComment(keys []string, comment string, commented bool, value interface{}) {
+ subtree := t
+ for _, intermediateKey := range keys[:len(keys)-1] {
+ nextTree, exists := subtree.values[intermediateKey]
+ if !exists {
+ nextTree = newTree()
+ subtree.values[intermediateKey] = nextTree // add new element here
+ }
+ switch node := nextTree.(type) {
+ case *Tree:
+ subtree = node
+ case []*Tree:
+ // go to most recent element
+ if len(node) == 0 {
+ // create element if it does not exist
+ subtree.values[intermediateKey] = append(node, newTree())
+ }
+ subtree = node[len(node)-1]
+ }
+ }
+
+ var toInsert interface{}
+
+ switch value.(type) {
+ case *Tree:
+ tt := value.(*Tree)
+ tt.comment = comment
+ toInsert = value
+ case []*Tree:
+ toInsert = value
+ case *tomlValue:
+ tt := value.(*tomlValue)
+ tt.comment = comment
+ toInsert = tt
+ default:
+ toInsert = &tomlValue{value: value, comment: comment, commented: commented}
+ }
+
+ subtree.values[keys[len(keys)-1]] = toInsert
+}
+
+// createSubTree takes a tree and a key and create the necessary intermediate
+// subtrees to create a subtree at that point. In-place.
+//
+// e.g. passing a.b.c will create (assuming tree is empty) tree[a], tree[a][b]
+// and tree[a][b][c]
+//
+// Returns nil on success, error object on failure
+func (t *Tree) createSubTree(keys []string, pos Position) error {
+ subtree := t
+ for _, intermediateKey := range keys {
+ nextTree, exists := subtree.values[intermediateKey]
+ if !exists {
+ tree := newTree()
+ tree.position = pos
+ subtree.values[intermediateKey] = tree
+ nextTree = tree
+ }
+
+ switch node := nextTree.(type) {
+ case []*Tree:
+ subtree = node[len(node)-1]
+ case *Tree:
+ subtree = node
+ default:
+ return fmt.Errorf("unknown type for path %s (%s): %T (%#v)",
+ strings.Join(keys, "."), intermediateKey, nextTree, nextTree)
+ }
+ }
+ return nil
+}
+
+// LoadBytes creates a Tree from a []byte.
+func LoadBytes(b []byte) (tree *Tree, err error) {
+ defer func() {
+ if r := recover(); r != nil {
+ if _, ok := r.(runtime.Error); ok {
+ panic(r)
+ }
+ err = errors.New(r.(string))
+ }
+ }()
+ tree = parseToml(lexToml(b))
+ return
+}
+
+// LoadReader creates a Tree from any io.Reader.
+func LoadReader(reader io.Reader) (tree *Tree, err error) {
+ inputBytes, err := ioutil.ReadAll(reader)
+ if err != nil {
+ return
+ }
+ tree, err = LoadBytes(inputBytes)
+ return
+}
+
+// Load creates a Tree from a string.
+func Load(content string) (tree *Tree, err error) {
+ return LoadBytes([]byte(content))
+}
+
+// LoadFile creates a Tree from a file.
+func LoadFile(path string) (tree *Tree, err error) {
+ file, err := os.Open(path)
+ if err != nil {
+ return nil, err
+ }
+ defer file.Close()
+ return LoadReader(file)
+}
diff --git a/vendor/github.com/pelletier/go-toml/tomltree_create.go b/vendor/github.com/pelletier/go-toml/tomltree_create.go
new file mode 100644
index 00000000..79610e9b
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/tomltree_create.go
@@ -0,0 +1,142 @@
+package toml
+
+import (
+ "fmt"
+ "reflect"
+ "time"
+)
+
+var kindToType = [reflect.String + 1]reflect.Type{
+ reflect.Bool: reflect.TypeOf(true),
+ reflect.String: reflect.TypeOf(""),
+ reflect.Float32: reflect.TypeOf(float64(1)),
+ reflect.Float64: reflect.TypeOf(float64(1)),
+ reflect.Int: reflect.TypeOf(int64(1)),
+ reflect.Int8: reflect.TypeOf(int64(1)),
+ reflect.Int16: reflect.TypeOf(int64(1)),
+ reflect.Int32: reflect.TypeOf(int64(1)),
+ reflect.Int64: reflect.TypeOf(int64(1)),
+ reflect.Uint: reflect.TypeOf(uint64(1)),
+ reflect.Uint8: reflect.TypeOf(uint64(1)),
+ reflect.Uint16: reflect.TypeOf(uint64(1)),
+ reflect.Uint32: reflect.TypeOf(uint64(1)),
+ reflect.Uint64: reflect.TypeOf(uint64(1)),
+}
+
+// typeFor returns a reflect.Type for a reflect.Kind, or nil if none is found.
+// supported values:
+// string, bool, int64, uint64, float64, time.Time, int, int8, int16, int32, uint, uint8, uint16, uint32, float32
+func typeFor(k reflect.Kind) reflect.Type {
+ if k > 0 && int(k) < len(kindToType) {
+ return kindToType[k]
+ }
+ return nil
+}
+
+func simpleValueCoercion(object interface{}) (interface{}, error) {
+ switch original := object.(type) {
+ case string, bool, int64, uint64, float64, time.Time:
+ return original, nil
+ case int:
+ return int64(original), nil
+ case int8:
+ return int64(original), nil
+ case int16:
+ return int64(original), nil
+ case int32:
+ return int64(original), nil
+ case uint:
+ return uint64(original), nil
+ case uint8:
+ return uint64(original), nil
+ case uint16:
+ return uint64(original), nil
+ case uint32:
+ return uint64(original), nil
+ case float32:
+ return float64(original), nil
+ case fmt.Stringer:
+ return original.String(), nil
+ default:
+ return nil, fmt.Errorf("cannot convert type %T to Tree", object)
+ }
+}
+
+func sliceToTree(object interface{}) (interface{}, error) {
+ // arrays are a bit tricky, since they can represent either a
+ // collection of simple values, which is represented by one
+ // *tomlValue, or an array of tables, which is represented by an
+ // array of *Tree.
+
+ // holding the assumption that this function is called from toTree only when value.Kind() is Array or Slice
+ value := reflect.ValueOf(object)
+ insideType := value.Type().Elem()
+ length := value.Len()
+ if length > 0 {
+ insideType = reflect.ValueOf(value.Index(0).Interface()).Type()
+ }
+ if insideType.Kind() == reflect.Map {
+ // this is considered as an array of tables
+ tablesArray := make([]*Tree, 0, length)
+ for i := 0; i < length; i++ {
+ table := value.Index(i)
+ tree, err := toTree(table.Interface())
+ if err != nil {
+ return nil, err
+ }
+ tablesArray = append(tablesArray, tree.(*Tree))
+ }
+ return tablesArray, nil
+ }
+
+ sliceType := typeFor(insideType.Kind())
+ if sliceType == nil {
+ sliceType = insideType
+ }
+
+ arrayValue := reflect.MakeSlice(reflect.SliceOf(sliceType), 0, length)
+
+ for i := 0; i < length; i++ {
+ val := value.Index(i).Interface()
+ simpleValue, err := simpleValueCoercion(val)
+ if err != nil {
+ return nil, err
+ }
+ arrayValue = reflect.Append(arrayValue, reflect.ValueOf(simpleValue))
+ }
+ return &tomlValue{value: arrayValue.Interface(), position: Position{}}, nil
+}
+
+func toTree(object interface{}) (interface{}, error) {
+ value := reflect.ValueOf(object)
+
+ if value.Kind() == reflect.Map {
+ values := map[string]interface{}{}
+ keys := value.MapKeys()
+ for _, key := range keys {
+ if key.Kind() != reflect.String {
+ if _, ok := key.Interface().(string); !ok {
+ return nil, fmt.Errorf("map key needs to be a string, not %T (%v)", key.Interface(), key.Kind())
+ }
+ }
+
+ v := value.MapIndex(key)
+ newValue, err := toTree(v.Interface())
+ if err != nil {
+ return nil, err
+ }
+ values[key.String()] = newValue
+ }
+ return &Tree{values: values, position: Position{}}, nil
+ }
+
+ if value.Kind() == reflect.Array || value.Kind() == reflect.Slice {
+ return sliceToTree(object)
+ }
+
+ simpleValue, err := simpleValueCoercion(object)
+ if err != nil {
+ return nil, err
+ }
+ return &tomlValue{value: simpleValue, position: Position{}}, nil
+}
diff --git a/vendor/github.com/pelletier/go-toml/tomltree_write.go b/vendor/github.com/pelletier/go-toml/tomltree_write.go
new file mode 100644
index 00000000..b5600a58
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/tomltree_write.go
@@ -0,0 +1,287 @@
+package toml
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "math"
+ "reflect"
+ "sort"
+ "strconv"
+ "strings"
+ "time"
+)
+
+// encodes a string to a TOML-compliant string value
+func encodeTomlString(value string) string {
+ var b bytes.Buffer
+
+ for _, rr := range value {
+ switch rr {
+ case '\b':
+ b.WriteString(`\b`)
+ case '\t':
+ b.WriteString(`\t`)
+ case '\n':
+ b.WriteString(`\n`)
+ case '\f':
+ b.WriteString(`\f`)
+ case '\r':
+ b.WriteString(`\r`)
+ case '"':
+ b.WriteString(`\"`)
+ case '\\':
+ b.WriteString(`\\`)
+ default:
+ intRr := uint16(rr)
+ if intRr < 0x001F {
+ b.WriteString(fmt.Sprintf("\\u%0.4X", intRr))
+ } else {
+ b.WriteRune(rr)
+ }
+ }
+ }
+ return b.String()
+}
+
+func tomlValueStringRepresentation(v interface{}, indent string, arraysOneElementPerLine bool) (string, error) {
+ switch value := v.(type) {
+ case uint64:
+ return strconv.FormatUint(value, 10), nil
+ case int64:
+ return strconv.FormatInt(value, 10), nil
+ case float64:
+ // Ensure a round float does contain a decimal point. Otherwise feeding
+ // the output back to the parser would convert to an integer.
+ if math.Trunc(value) == value {
+ return strings.ToLower(strconv.FormatFloat(value, 'f', 1, 32)), nil
+ }
+ return strings.ToLower(strconv.FormatFloat(value, 'f', -1, 32)), nil
+ case string:
+ return "\"" + encodeTomlString(value) + "\"", nil
+ case []byte:
+ b, _ := v.([]byte)
+ return tomlValueStringRepresentation(string(b), indent, arraysOneElementPerLine)
+ case bool:
+ if value {
+ return "true", nil
+ }
+ return "false", nil
+ case time.Time:
+ return value.Format(time.RFC3339), nil
+ case nil:
+ return "", nil
+ }
+
+ rv := reflect.ValueOf(v)
+
+ if rv.Kind() == reflect.Slice {
+ var values []string
+ for i := 0; i < rv.Len(); i++ {
+ item := rv.Index(i).Interface()
+ itemRepr, err := tomlValueStringRepresentation(item, indent, arraysOneElementPerLine)
+ if err != nil {
+ return "", err
+ }
+ values = append(values, itemRepr)
+ }
+ if arraysOneElementPerLine && len(values) > 1 {
+ stringBuffer := bytes.Buffer{}
+ valueIndent := indent + ` ` // TODO: move that to a shared encoder state
+
+ stringBuffer.WriteString("[\n")
+
+ for _, value := range values {
+ stringBuffer.WriteString(valueIndent)
+ stringBuffer.WriteString(value)
+ stringBuffer.WriteString(`,`)
+ stringBuffer.WriteString("\n")
+ }
+
+ stringBuffer.WriteString(indent + "]")
+
+ return stringBuffer.String(), nil
+ }
+ return "[" + strings.Join(values, ",") + "]", nil
+ }
+ return "", fmt.Errorf("unsupported value type %T: %v", v, v)
+}
+
+func (t *Tree) writeTo(w io.Writer, indent, keyspace string, bytesCount int64, arraysOneElementPerLine bool) (int64, error) {
+ simpleValuesKeys := make([]string, 0)
+ complexValuesKeys := make([]string, 0)
+
+ for k := range t.values {
+ v := t.values[k]
+ switch v.(type) {
+ case *Tree, []*Tree:
+ complexValuesKeys = append(complexValuesKeys, k)
+ default:
+ simpleValuesKeys = append(simpleValuesKeys, k)
+ }
+ }
+
+ sort.Strings(simpleValuesKeys)
+ sort.Strings(complexValuesKeys)
+
+ for _, k := range simpleValuesKeys {
+ v, ok := t.values[k].(*tomlValue)
+ if !ok {
+ return bytesCount, fmt.Errorf("invalid value type at %s: %T", k, t.values[k])
+ }
+
+ repr, err := tomlValueStringRepresentation(v.value, indent, arraysOneElementPerLine)
+ if err != nil {
+ return bytesCount, err
+ }
+
+ if v.comment != "" {
+ comment := strings.Replace(v.comment, "\n", "\n"+indent+"#", -1)
+ start := "# "
+ if strings.HasPrefix(comment, "#") {
+ start = ""
+ }
+ writtenBytesCountComment, errc := writeStrings(w, "\n", indent, start, comment, "\n")
+ bytesCount += int64(writtenBytesCountComment)
+ if errc != nil {
+ return bytesCount, errc
+ }
+ }
+
+ var commented string
+ if v.commented {
+ commented = "# "
+ }
+ writtenBytesCount, err := writeStrings(w, indent, commented, k, " = ", repr, "\n")
+ bytesCount += int64(writtenBytesCount)
+ if err != nil {
+ return bytesCount, err
+ }
+ }
+
+ for _, k := range complexValuesKeys {
+ v := t.values[k]
+
+ combinedKey := k
+ if keyspace != "" {
+ combinedKey = keyspace + "." + combinedKey
+ }
+ var commented string
+ if t.commented {
+ commented = "# "
+ }
+
+ switch node := v.(type) {
+ // node has to be of those two types given how keys are sorted above
+ case *Tree:
+ tv, ok := t.values[k].(*Tree)
+ if !ok {
+ return bytesCount, fmt.Errorf("invalid value type at %s: %T", k, t.values[k])
+ }
+ if tv.comment != "" {
+ comment := strings.Replace(tv.comment, "\n", "\n"+indent+"#", -1)
+ start := "# "
+ if strings.HasPrefix(comment, "#") {
+ start = ""
+ }
+ writtenBytesCountComment, errc := writeStrings(w, "\n", indent, start, comment)
+ bytesCount += int64(writtenBytesCountComment)
+ if errc != nil {
+ return bytesCount, errc
+ }
+ }
+ writtenBytesCount, err := writeStrings(w, "\n", indent, commented, "[", combinedKey, "]\n")
+ bytesCount += int64(writtenBytesCount)
+ if err != nil {
+ return bytesCount, err
+ }
+ bytesCount, err = node.writeTo(w, indent+" ", combinedKey, bytesCount, arraysOneElementPerLine)
+ if err != nil {
+ return bytesCount, err
+ }
+ case []*Tree:
+ for _, subTree := range node {
+ writtenBytesCount, err := writeStrings(w, "\n", indent, commented, "[[", combinedKey, "]]\n")
+ bytesCount += int64(writtenBytesCount)
+ if err != nil {
+ return bytesCount, err
+ }
+
+ bytesCount, err = subTree.writeTo(w, indent+" ", combinedKey, bytesCount, arraysOneElementPerLine)
+ if err != nil {
+ return bytesCount, err
+ }
+ }
+ }
+ }
+
+ return bytesCount, nil
+}
+
+func writeStrings(w io.Writer, s ...string) (int, error) {
+ var n int
+ for i := range s {
+ b, err := io.WriteString(w, s[i])
+ n += b
+ if err != nil {
+ return n, err
+ }
+ }
+ return n, nil
+}
+
+// WriteTo encode the Tree as Toml and writes it to the writer w.
+// Returns the number of bytes written in case of success, or an error if anything happened.
+func (t *Tree) WriteTo(w io.Writer) (int64, error) {
+ return t.writeTo(w, "", "", 0, false)
+}
+
+// ToTomlString generates a human-readable representation of the current tree.
+// Output spans multiple lines, and is suitable for ingest by a TOML parser.
+// If the conversion cannot be performed, ToString returns a non-nil error.
+func (t *Tree) ToTomlString() (string, error) {
+ var buf bytes.Buffer
+ _, err := t.WriteTo(&buf)
+ if err != nil {
+ return "", err
+ }
+ return buf.String(), nil
+}
+
+// String generates a human-readable representation of the current tree.
+// Alias of ToString. Present to implement the fmt.Stringer interface.
+func (t *Tree) String() string {
+ result, _ := t.ToTomlString()
+ return result
+}
+
+// ToMap recursively generates a representation of the tree using Go built-in structures.
+// The following types are used:
+//
+// * bool
+// * float64
+// * int64
+// * string
+// * uint64
+// * time.Time
+// * map[string]interface{} (where interface{} is any of this list)
+// * []interface{} (where interface{} is any of this list)
+func (t *Tree) ToMap() map[string]interface{} {
+ result := map[string]interface{}{}
+
+ for k, v := range t.values {
+ switch node := v.(type) {
+ case []*Tree:
+ var array []interface{}
+ for _, item := range node {
+ array = append(array, item.ToMap())
+ }
+ result[k] = array
+ case *Tree:
+ result[k] = node.ToMap()
+ case *tomlValue:
+ result[k] = node.value
+ }
+ }
+ return result
+}