diff options
author | Wim <wim@42.be> | 2018-08-06 21:47:05 +0200 |
---|---|---|
committer | Wim <wim@42.be> | 2018-08-06 21:47:05 +0200 |
commit | 51062863a5c34d81e296cf15c61140911037cf3b (patch) | |
tree | 9b5e044672486326c7a0ca8fb26430f37bf4d83c /vendor/github.com/pelletier/go-toml | |
parent | 4fb4b7aa6c02a54db8ad8dd98e4d321396926c0d (diff) | |
download | matterbridge-msglm-51062863a5c34d81e296cf15c61140911037cf3b.tar.gz matterbridge-msglm-51062863a5c34d81e296cf15c61140911037cf3b.tar.bz2 matterbridge-msglm-51062863a5c34d81e296cf15c61140911037cf3b.zip |
Use mod vendor for vendored directory (backwards compatible)
Diffstat (limited to 'vendor/github.com/pelletier/go-toml')
17 files changed, 578 insertions, 1532 deletions
diff --git a/vendor/github.com/pelletier/go-toml/.gitignore b/vendor/github.com/pelletier/go-toml/.gitignore new file mode 100644 index 00000000..99e38bbc --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/.gitignore @@ -0,0 +1,2 @@ +test_program/test_program_bin +fuzz/ diff --git a/vendor/github.com/pelletier/go-toml/.travis.yml b/vendor/github.com/pelletier/go-toml/.travis.yml new file mode 100644 index 00000000..ab2775d7 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/.travis.yml @@ -0,0 +1,22 @@ +sudo: false +language: go +go: + - 1.8.5 + - 1.9.2 + - tip +matrix: + allow_failures: + - go: tip + fast_finish: true +script: + - if [ -n "$(go fmt ./...)" ]; then exit 1; fi + - ./test.sh + - ./benchmark.sh $TRAVIS_BRANCH https://github.com/$TRAVIS_REPO_SLUG.git +before_install: + - go get github.com/axw/gocov/gocov + - go get github.com/mattn/goveralls + - if ! go get code.google.com/p/go.tools/cmd/cover; then go get golang.org/x/tools/cmd/cover; fi +branches: + only: [master] +after_success: + - $HOME/gopath/bin/goveralls -service=travis-ci -coverprofile=coverage.out -repotoken $COVERALLS_TOKEN diff --git a/vendor/github.com/pelletier/go-toml/README.md b/vendor/github.com/pelletier/go-toml/README.md new file mode 100644 index 00000000..0d357acf --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/README.md @@ -0,0 +1,131 @@ +# go-toml + +Go library for the [TOML](https://github.com/mojombo/toml) format. + +This library supports TOML version +[v0.4.0](https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.4.0.md) + +[![GoDoc](https://godoc.org/github.com/pelletier/go-toml?status.svg)](http://godoc.org/github.com/pelletier/go-toml) +[![license](https://img.shields.io/github/license/pelletier/go-toml.svg)](https://github.com/pelletier/go-toml/blob/master/LICENSE) +[![Build Status](https://travis-ci.org/pelletier/go-toml.svg?branch=master)](https://travis-ci.org/pelletier/go-toml) +[![Coverage Status](https://coveralls.io/repos/github/pelletier/go-toml/badge.svg?branch=master)](https://coveralls.io/github/pelletier/go-toml?branch=master) +[![Go Report Card](https://goreportcard.com/badge/github.com/pelletier/go-toml)](https://goreportcard.com/report/github.com/pelletier/go-toml) + +## Features + +Go-toml provides the following features for using data parsed from TOML documents: + +* Load TOML documents from files and string data +* Easily navigate TOML structure using Tree +* Mashaling and unmarshaling to and from data structures +* Line & column position data for all parsed elements +* [Query support similar to JSON-Path](query/) +* Syntax errors contain line and column numbers + +## Import + +```go +import "github.com/pelletier/go-toml" +``` + +## Usage example + +Read a TOML document: + +```go +config, _ := toml.Load(` +[postgres] +user = "pelletier" +password = "mypassword"`) +// retrieve data directly +user := config.Get("postgres.user").(string) + +// or using an intermediate object +postgresConfig := config.Get("postgres").(*toml.Tree) +password := postgresConfig.Get("password").(string) +``` + +Or use Unmarshal: + +```go +type Postgres struct { + User string + Password string +} +type Config struct { + Postgres Postgres +} + +doc := []byte(` +[Postgres] +User = "pelletier" +Password = "mypassword"`) + +config := Config{} +toml.Unmarshal(doc, &config) +fmt.Println("user=", config.Postgres.User) +``` + +Or use a query: + +```go +// use a query to gather elements without walking the tree +q, _ := query.Compile("$..[user,password]") +results := q.Execute(config) +for ii, item := range results.Values() { + fmt.Println("Query result %d: %v", ii, item) +} +``` + +## Documentation + +The documentation and additional examples are available at +[godoc.org](http://godoc.org/github.com/pelletier/go-toml). + +## Tools + +Go-toml provides two handy command line tools: + +* `tomll`: Reads TOML files and lint them. + + ``` + go install github.com/pelletier/go-toml/cmd/tomll + tomll --help + ``` +* `tomljson`: Reads a TOML file and outputs its JSON representation. + + ``` + go install github.com/pelletier/go-toml/cmd/tomljson + tomljson --help + ``` + +## Contribute + +Feel free to report bugs and patches using GitHub's pull requests system on +[pelletier/go-toml](https://github.com/pelletier/go-toml). Any feedback would be +much appreciated! + +### Run tests + +You have to make sure two kind of tests run: + +1. The Go unit tests +2. The TOML examples base + +You can run both of them using `./test.sh`. + +### Fuzzing + +The script `./fuzz.sh` is available to +run [go-fuzz](https://github.com/dvyukov/go-fuzz) on go-toml. + +## Versioning + +Go-toml follows [Semantic Versioning](http://semver.org/). The supported version +of [TOML](https://github.com/toml-lang/toml) is indicated at the beginning of +this document. The last two major versions of Go are supported +(see [Go Release Policy](https://golang.org/doc/devel/release.html#policy)). + +## License + +The MIT License (MIT). Read [LICENSE](LICENSE). diff --git a/vendor/github.com/pelletier/go-toml/benchmark.json b/vendor/github.com/pelletier/go-toml/benchmark.json new file mode 100644 index 00000000..86f99c6a --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/benchmark.json @@ -0,0 +1,164 @@ +{ + "array": { + "key1": [ + 1, + 2, + 3 + ], + "key2": [ + "red", + "yellow", + "green" + ], + "key3": [ + [ + 1, + 2 + ], + [ + 3, + 4, + 5 + ] + ], + "key4": [ + [ + 1, + 2 + ], + [ + "a", + "b", + "c" + ] + ], + "key5": [ + 1, + 2, + 3 + ], + "key6": [ + 1, + 2 + ] + }, + "boolean": { + "False": false, + "True": true + }, + "datetime": { + "key1": "1979-05-27T07:32:00Z", + "key2": "1979-05-27T00:32:00-07:00", + "key3": "1979-05-27T00:32:00.999999-07:00" + }, + "float": { + "both": { + "key": 6.626e-34 + }, + "exponent": { + "key1": 5e+22, + "key2": 1000000, + "key3": -0.02 + }, + "fractional": { + "key1": 1, + "key2": 3.1415, + "key3": -0.01 + }, + "underscores": { + "key1": 9224617.445991227, + "key2": 1e+100 + } + }, + "fruit": [{ + "name": "apple", + "physical": { + "color": "red", + "shape": "round" + }, + "variety": [{ + "name": "red delicious" + }, + { + "name": "granny smith" + } + ] + }, + { + "name": "banana", + "variety": [{ + "name": "plantain" + }] + } + ], + "integer": { + "key1": 99, + "key2": 42, + "key3": 0, + "key4": -17, + "underscores": { + "key1": 1000, + "key2": 5349221, + "key3": 12345 + } + }, + "products": [{ + "name": "Hammer", + "sku": 738594937 + }, + {}, + { + "color": "gray", + "name": "Nail", + "sku": 284758393 + } + ], + "string": { + "basic": { + "basic": "I'm a string. \"You can quote me\". Name\tJosé\nLocation\tSF." + }, + "literal": { + "multiline": { + "lines": "The first newline is\ntrimmed in raw strings.\n All other whitespace\n is preserved.\n", + "regex2": "I [dw]on't need \\d{2} apples" + }, + "quoted": "Tom \"Dubs\" Preston-Werner", + "regex": "\u003c\\i\\c*\\s*\u003e", + "winpath": "C:\\Users\\nodejs\\templates", + "winpath2": "\\\\ServerX\\admin$\\system32\\" + }, + "multiline": { + "continued": { + "key1": "The quick brown fox jumps over the lazy dog.", + "key2": "The quick brown fox jumps over the lazy dog.", + "key3": "The quick brown fox jumps over the lazy dog." + }, + "key1": "One\nTwo", + "key2": "One\nTwo", + "key3": "One\nTwo" + } + }, + "table": { + "inline": { + "name": { + "first": "Tom", + "last": "Preston-Werner" + }, + "point": { + "x": 1, + "y": 2 + } + }, + "key": "value", + "subtable": { + "key": "another value" + } + }, + "x": { + "y": { + "z": { + "w": {} + } + } + } +} diff --git a/vendor/github.com/pelletier/go-toml/benchmark.sh b/vendor/github.com/pelletier/go-toml/benchmark.sh new file mode 100644 index 00000000..8b8bb528 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/benchmark.sh @@ -0,0 +1,32 @@ +#!/bin/bash + +set -e + +reference_ref=${1:-master} +reference_git=${2:-.} + +if ! `hash benchstat 2>/dev/null`; then + echo "Installing benchstat" + go get golang.org/x/perf/cmd/benchstat + go install golang.org/x/perf/cmd/benchstat +fi + +tempdir=`mktemp -d /tmp/go-toml-benchmark-XXXXXX` +ref_tempdir="${tempdir}/ref" +ref_benchmark="${ref_tempdir}/benchmark-`echo -n ${reference_ref}|tr -s '/' '-'`.txt" +local_benchmark="`pwd`/benchmark-local.txt" + +echo "=== ${reference_ref} (${ref_tempdir})" +git clone ${reference_git} ${ref_tempdir} >/dev/null 2>/dev/null +pushd ${ref_tempdir} >/dev/null +git checkout ${reference_ref} >/dev/null 2>/dev/null +go test -bench=. -benchmem | tee ${ref_benchmark} +popd >/dev/null + +echo "" +echo "=== local" +go test -bench=. -benchmem | tee ${local_benchmark} + +echo "" +echo "=== diff" +benchstat -delta-test=none ${ref_benchmark} ${local_benchmark}
\ No newline at end of file diff --git a/vendor/github.com/pelletier/go-toml/benchmark.yml b/vendor/github.com/pelletier/go-toml/benchmark.yml new file mode 100644 index 00000000..0bd19f08 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/benchmark.yml @@ -0,0 +1,121 @@ +--- +array: + key1: + - 1 + - 2 + - 3 + key2: + - red + - yellow + - green + key3: + - - 1 + - 2 + - - 3 + - 4 + - 5 + key4: + - - 1 + - 2 + - - a + - b + - c + key5: + - 1 + - 2 + - 3 + key6: + - 1 + - 2 +boolean: + 'False': false + 'True': true +datetime: + key1: '1979-05-27T07:32:00Z' + key2: '1979-05-27T00:32:00-07:00' + key3: '1979-05-27T00:32:00.999999-07:00' +float: + both: + key: 6.626e-34 + exponent: + key1: 5.0e+22 + key2: 1000000 + key3: -0.02 + fractional: + key1: 1 + key2: 3.1415 + key3: -0.01 + underscores: + key1: 9224617.445991227 + key2: 1.0e+100 +fruit: +- name: apple + physical: + color: red + shape: round + variety: + - name: red delicious + - name: granny smith +- name: banana + variety: + - name: plantain +integer: + key1: 99 + key2: 42 + key3: 0 + key4: -17 + underscores: + key1: 1000 + key2: 5349221 + key3: 12345 +products: +- name: Hammer + sku: 738594937 +- {} +- color: gray + name: Nail + sku: 284758393 +string: + basic: + basic: "I'm a string. \"You can quote me\". Name\tJosé\nLocation\tSF." + literal: + multiline: + lines: | + The first newline is + trimmed in raw strings. + All other whitespace + is preserved. + regex2: I [dw]on't need \d{2} apples + quoted: Tom "Dubs" Preston-Werner + regex: "<\\i\\c*\\s*>" + winpath: C:\Users\nodejs\templates + winpath2: "\\\\ServerX\\admin$\\system32\\" + multiline: + continued: + key1: The quick brown fox jumps over the lazy dog. + key2: The quick brown fox jumps over the lazy dog. + key3: The quick brown fox jumps over the lazy dog. + key1: |- + One + Two + key2: |- + One + Two + key3: |- + One + Two +table: + inline: + name: + first: Tom + last: Preston-Werner + point: + x: 1 + y: 2 + key: value + subtable: + key: another value +x: + y: + z: + w: {} diff --git a/vendor/github.com/pelletier/go-toml/cmd/test_program.go b/vendor/github.com/pelletier/go-toml/cmd/test_program.go deleted file mode 100644 index 73077f61..00000000 --- a/vendor/github.com/pelletier/go-toml/cmd/test_program.go +++ /dev/null @@ -1,91 +0,0 @@ -package main - -import ( - "encoding/json" - "fmt" - "io/ioutil" - "log" - "os" - "time" - - "github.com/pelletier/go-toml" -) - -func main() { - bytes, err := ioutil.ReadAll(os.Stdin) - if err != nil { - log.Fatalf("Error during TOML read: %s", err) - os.Exit(2) - } - tree, err := toml.Load(string(bytes)) - if err != nil { - log.Fatalf("Error during TOML load: %s", err) - os.Exit(1) - } - - typedTree := translate(*tree) - - if err := json.NewEncoder(os.Stdout).Encode(typedTree); err != nil { - log.Fatalf("Error encoding JSON: %s", err) - os.Exit(3) - } - - os.Exit(0) -} - -func translate(tomlData interface{}) interface{} { - switch orig := tomlData.(type) { - case map[string]interface{}: - typed := make(map[string]interface{}, len(orig)) - for k, v := range orig { - typed[k] = translate(v) - } - return typed - case *toml.Tree: - return translate(*orig) - case toml.Tree: - keys := orig.Keys() - typed := make(map[string]interface{}, len(keys)) - for _, k := range keys { - typed[k] = translate(orig.GetPath([]string{k})) - } - return typed - case []*toml.Tree: - typed := make([]map[string]interface{}, len(orig)) - for i, v := range orig { - typed[i] = translate(v).(map[string]interface{}) - } - return typed - case []map[string]interface{}: - typed := make([]map[string]interface{}, len(orig)) - for i, v := range orig { - typed[i] = translate(v).(map[string]interface{}) - } - return typed - case []interface{}: - typed := make([]interface{}, len(orig)) - for i, v := range orig { - typed[i] = translate(v) - } - return tag("array", typed) - case time.Time: - return tag("datetime", orig.Format("2006-01-02T15:04:05Z")) - case bool: - return tag("bool", fmt.Sprintf("%v", orig)) - case int64: - return tag("integer", fmt.Sprintf("%d", orig)) - case float64: - return tag("float", fmt.Sprintf("%v", orig)) - case string: - return tag("string", orig) - } - - panic(fmt.Sprintf("Unknown type: %T", tomlData)) -} - -func tag(typeName string, data interface{}) map[string]interface{} { - return map[string]interface{}{ - "type": typeName, - "value": data, - } -} diff --git a/vendor/github.com/pelletier/go-toml/cmd/tomljson/main.go b/vendor/github.com/pelletier/go-toml/cmd/tomljson/main.go deleted file mode 100644 index b2d6fc67..00000000 --- a/vendor/github.com/pelletier/go-toml/cmd/tomljson/main.go +++ /dev/null @@ -1,72 +0,0 @@ -// Tomljson reads TOML and converts to JSON. -// -// Usage: -// cat file.toml | tomljson > file.json -// tomljson file1.toml > file.json -package main - -import ( - "encoding/json" - "flag" - "fmt" - "io" - "os" - - "github.com/pelletier/go-toml" -) - -func main() { - flag.Usage = func() { - fmt.Fprintln(os.Stderr, `tomljson can be used in two ways: -Writing to STDIN and reading from STDOUT: - cat file.toml | tomljson > file.json - -Reading from a file name: - tomljson file.toml -`) - } - flag.Parse() - os.Exit(processMain(flag.Args(), os.Stdin, os.Stdout, os.Stderr)) -} - -func processMain(files []string, defaultInput io.Reader, output io.Writer, errorOutput io.Writer) int { - // read from stdin and print to stdout - inputReader := defaultInput - - if len(files) > 0 { - var err error - inputReader, err = os.Open(files[0]) - if err != nil { - printError(err, errorOutput) - return -1 - } - } - s, err := reader(inputReader) - if err != nil { - printError(err, errorOutput) - return -1 - } - io.WriteString(output, s+"\n") - return 0 -} - -func printError(err error, output io.Writer) { - io.WriteString(output, err.Error()+"\n") -} - -func reader(r io.Reader) (string, error) { - tree, err := toml.LoadReader(r) - if err != nil { - return "", err - } - return mapToJSON(tree) -} - -func mapToJSON(tree *toml.Tree) (string, error) { - treeMap := tree.ToMap() - bytes, err := json.MarshalIndent(treeMap, "", " ") - if err != nil { - return "", err - } - return string(bytes[:]), nil -} diff --git a/vendor/github.com/pelletier/go-toml/cmd/tomll/main.go b/vendor/github.com/pelletier/go-toml/cmd/tomll/main.go deleted file mode 100644 index 36c7e375..00000000 --- a/vendor/github.com/pelletier/go-toml/cmd/tomll/main.go +++ /dev/null @@ -1,66 +0,0 @@ -// Tomll is a linter for TOML -// -// Usage: -// cat file.toml | tomll > file_linted.toml -// tomll file1.toml file2.toml # lint the two files in place -package main - -import ( - "flag" - "fmt" - "io" - "io/ioutil" - "os" - - "github.com/pelletier/go-toml" -) - -func main() { - flag.Usage = func() { - fmt.Fprintln(os.Stderr, `tomll can be used in two ways: -Writing to STDIN and reading from STDOUT: - cat file.toml | tomll > file.toml - -Reading and updating a list of files: - tomll a.toml b.toml c.toml - -When given a list of files, tomll will modify all files in place without asking. -`) - } - flag.Parse() - // read from stdin and print to stdout - if flag.NArg() == 0 { - s, err := lintReader(os.Stdin) - if err != nil { - io.WriteString(os.Stderr, err.Error()) - os.Exit(-1) - } - io.WriteString(os.Stdout, s) - } else { - // otherwise modify a list of files - for _, filename := range flag.Args() { - s, err := lintFile(filename) - if err != nil { - io.WriteString(os.Stderr, err.Error()) - os.Exit(-1) - } - ioutil.WriteFile(filename, []byte(s), 0644) - } - } -} - -func lintFile(filename string) (string, error) { - tree, err := toml.LoadFile(filename) - if err != nil { - return "", err - } - return tree.String(), nil -} - -func lintReader(r io.Reader) (string, error) { - tree, err := toml.LoadReader(r) - if err != nil { - return "", err - } - return tree.String(), nil -} diff --git a/vendor/github.com/pelletier/go-toml/fuzz.sh b/vendor/github.com/pelletier/go-toml/fuzz.sh new file mode 100644 index 00000000..3204b4c4 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/fuzz.sh @@ -0,0 +1,15 @@ +#! /bin/sh +set -eu + +go get github.com/dvyukov/go-fuzz/go-fuzz +go get github.com/dvyukov/go-fuzz/go-fuzz-build + +if [ ! -e toml-fuzz.zip ]; then + go-fuzz-build github.com/pelletier/go-toml +fi + +rm -fr fuzz +mkdir -p fuzz/corpus +cp *.toml fuzz/corpus + +go-fuzz -bin=toml-fuzz.zip -workdir=fuzz diff --git a/vendor/github.com/pelletier/go-toml/query/doc.go b/vendor/github.com/pelletier/go-toml/query/doc.go deleted file mode 100644 index ed63c110..00000000 --- a/vendor/github.com/pelletier/go-toml/query/doc.go +++ /dev/null @@ -1,175 +0,0 @@ -// Package query performs JSONPath-like queries on a TOML document. -// -// The query path implementation is based loosely on the JSONPath specification: -// http://goessner.net/articles/JsonPath/. -// -// The idea behind a query path is to allow quick access to any element, or set -// of elements within TOML document, with a single expression. -// -// result, err := query.CompileAndExecute("$.foo.bar.baz", tree) -// -// This is roughly equivalent to: -// -// next := tree.Get("foo") -// if next != nil { -// next = next.Get("bar") -// if next != nil { -// next = next.Get("baz") -// } -// } -// result := next -// -// err is nil if any parsing exception occurs. -// -// If no node in the tree matches the query, result will simply contain an empty list of -// items. -// -// As illustrated above, the query path is much more efficient, especially since -// the structure of the TOML file can vary. Rather than making assumptions about -// a document's structure, a query allows the programmer to make structured -// requests into the document, and get zero or more values as a result. -// -// Query syntax -// -// The syntax of a query begins with a root token, followed by any number -// sub-expressions: -// -// $ -// Root of the TOML tree. This must always come first. -// .name -// Selects child of this node, where 'name' is a TOML key -// name. -// ['name'] -// Selects child of this node, where 'name' is a string -// containing a TOML key name. -// [index] -// Selcts child array element at 'index'. -// ..expr -// Recursively selects all children, filtered by an a union, -// index, or slice expression. -// ..* -// Recursive selection of all nodes at this point in the -// tree. -// .* -// Selects all children of the current node. -// [expr,expr] -// Union operator - a logical 'or' grouping of two or more -// sub-expressions: index, key name, or filter. -// [start:end:step] -// Slice operator - selects array elements from start to -// end-1, at the given step. All three arguments are -// optional. -// [?(filter)] -// Named filter expression - the function 'filter' is -// used to filter children at this node. -// -// Query Indexes And Slices -// -// Index expressions perform no bounds checking, and will contribute no -// values to the result set if the provided index or index range is invalid. -// Negative indexes represent values from the end of the array, counting backwards. -// -// // select the last index of the array named 'foo' -// query.CompileAndExecute("$.foo[-1]", tree) -// -// Slice expressions are supported, by using ':' to separate a start/end index pair. -// -// // select up to the first five elements in the array -// query.CompileAndExecute("$.foo[0:5]", tree) -// -// Slice expressions also allow negative indexes for the start and stop -// arguments. -// -// // select all array elements. -// query.CompileAndExecute("$.foo[0:-1]", tree) -// -// Slice expressions may have an optional stride/step parameter: -// -// // select every other element -// query.CompileAndExecute("$.foo[0:-1:2]", tree) -// -// Slice start and end parameters are also optional: -// -// // these are all equivalent and select all the values in the array -// query.CompileAndExecute("$.foo[:]", tree) -// query.CompileAndExecute("$.foo[0:]", tree) -// query.CompileAndExecute("$.foo[:-1]", tree) -// query.CompileAndExecute("$.foo[0:-1:]", tree) -// query.CompileAndExecute("$.foo[::1]", tree) -// query.CompileAndExecute("$.foo[0::1]", tree) -// query.CompileAndExecute("$.foo[:-1:1]", tree) -// query.CompileAndExecute("$.foo[0:-1:1]", tree) -// -// Query Filters -// -// Query filters are used within a Union [,] or single Filter [] expression. -// A filter only allows nodes that qualify through to the next expression, -// and/or into the result set. -// -// // returns children of foo that are permitted by the 'bar' filter. -// query.CompileAndExecute("$.foo[?(bar)]", tree) -// -// There are several filters provided with the library: -// -// tree -// Allows nodes of type Tree. -// int -// Allows nodes of type int64. -// float -// Allows nodes of type float64. -// string -// Allows nodes of type string. -// time -// Allows nodes of type time.Time. -// bool -// Allows nodes of type bool. -// -// Query Results -// -// An executed query returns a Result object. This contains the nodes -// in the TOML tree that qualify the query expression. Position information -// is also available for each value in the set. -// -// // display the results of a query -// results := query.CompileAndExecute("$.foo.bar.baz", tree) -// for idx, value := results.Values() { -// fmt.Println("%v: %v", results.Positions()[idx], value) -// } -// -// Compiled Queries -// -// Queries may be executed directly on a Tree object, or compiled ahead -// of time and executed discretely. The former is more convenient, but has the -// penalty of having to recompile the query expression each time. -// -// // basic query -// results := query.CompileAndExecute("$.foo.bar.baz", tree) -// -// // compiled query -// query, err := toml.Compile("$.foo.bar.baz") -// results := query.Execute(tree) -// -// // run the compiled query again on a different tree -// moreResults := query.Execute(anotherTree) -// -// User Defined Query Filters -// -// Filter expressions may also be user defined by using the SetFilter() -// function on the Query object. The function must return true/false, which -// signifies if the passed node is kept or discarded, respectively. -// -// // create a query that references a user-defined filter -// query, _ := query.Compile("$[?(bazOnly)]") -// -// // define the filter, and assign it to the query -// query.SetFilter("bazOnly", func(node interface{}) bool{ -// if tree, ok := node.(*Tree); ok { -// return tree.Has("baz") -// } -// return false // reject all other node types -// }) -// -// // run the query -// query.Execute(tree) -// -package query diff --git a/vendor/github.com/pelletier/go-toml/query/lexer.go b/vendor/github.com/pelletier/go-toml/query/lexer.go deleted file mode 100644 index 2dc31940..00000000 --- a/vendor/github.com/pelletier/go-toml/query/lexer.go +++ /dev/null @@ -1,357 +0,0 @@ -// TOML JSONPath lexer. -// -// Written using the principles developed by Rob Pike in -// http://www.youtube.com/watch?v=HxaD_trXwRE - -package query - -import ( - "fmt" - "github.com/pelletier/go-toml" - "strconv" - "strings" - "unicode/utf8" -) - -// Lexer state function -type queryLexStateFn func() queryLexStateFn - -// Lexer definition -type queryLexer struct { - input string - start int - pos int - width int - tokens chan token - depth int - line int - col int - stringTerm string -} - -func (l *queryLexer) run() { - for state := l.lexVoid; state != nil; { - state = state() - } - close(l.tokens) -} - -func (l *queryLexer) nextStart() { - // iterate by runes (utf8 characters) - // search for newlines and advance line/col counts - for i := l.start; i < l.pos; { - r, width := utf8.DecodeRuneInString(l.input[i:]) - if r == '\n' { - l.line++ - l.col = 1 - } else { - l.col++ - } - i += width - } - // advance start position to next token - l.start = l.pos -} - -func (l *queryLexer) emit(t tokenType) { - l.tokens <- token{ - Position: toml.Position{Line: l.line, Col: l.col}, - typ: t, - val: l.input[l.start:l.pos], - } - l.nextStart() -} - -func (l *queryLexer) emitWithValue(t tokenType, value string) { - l.tokens <- token{ - Position: toml.Position{Line: l.line, Col: l.col}, - typ: t, - val: value, - } - l.nextStart() -} - -func (l *queryLexer) next() rune { - if l.pos >= len(l.input) { - l.width = 0 - return eof - } - var r rune - r, l.width = utf8.DecodeRuneInString(l.input[l.pos:]) - l.pos += l.width - return r -} - -func (l *queryLexer) ignore() { - l.nextStart() -} - -func (l *queryLexer) backup() { - l.pos -= l.width -} - -func (l *queryLexer) errorf(format string, args ...interface{}) queryLexStateFn { - l.tokens <- token{ - Position: toml.Position{Line: l.line, Col: l.col}, - typ: tokenError, - val: fmt.Sprintf(format, args...), - } - return nil -} - -func (l *queryLexer) peek() rune { - r := l.next() - l.backup() - return r -} - -func (l *queryLexer) accept(valid string) bool { - if strings.ContainsRune(valid, l.next()) { - return true - } - l.backup() - return false -} - -func (l *queryLexer) follow(next string) bool { - return strings.HasPrefix(l.input[l.pos:], next) -} - -func (l *queryLexer) lexVoid() queryLexStateFn { - for { - next := l.peek() - switch next { - case '$': - l.pos++ - l.emit(tokenDollar) - continue - case '.': - if l.follow("..") { - l.pos += 2 - l.emit(tokenDotDot) - } else { - l.pos++ - l.emit(tokenDot) - } - continue - case '[': - l.pos++ - l.emit(tokenLeftBracket) - continue - case ']': - l.pos++ - l.emit(tokenRightBracket) - continue - case ',': - l.pos++ - l.emit(tokenComma) - continue - case '*': - l.pos++ - l.emit(tokenStar) - continue - case '(': - l.pos++ - l.emit(tokenLeftParen) - continue - case ')': - l.pos++ - l.emit(tokenRightParen) - continue - case '?': - l.pos++ - l.emit(tokenQuestion) - continue - case ':': - l.pos++ - l.emit(tokenColon) - continue - case '\'': - l.ignore() - l.stringTerm = string(next) - return l.lexString - case '"': - l.ignore() - l.stringTerm = string(next) - return l.lexString - } - - if isSpace(next) { - l.next() - l.ignore() - continue - } - - if isAlphanumeric(next) { - return l.lexKey - } - - if next == '+' || next == '-' || isDigit(next) { - return l.lexNumber - } - - if l.next() == eof { - break - } - - return l.errorf("unexpected char: '%v'", next) - } - l.emit(tokenEOF) - return nil -} - -func (l *queryLexer) lexKey() queryLexStateFn { - for { - next := l.peek() - if !isAlphanumeric(next) { - l.emit(tokenKey) - return l.lexVoid - } - - if l.next() == eof { - break - } - } - l.emit(tokenEOF) - return nil -} - -func (l *queryLexer) lexString() queryLexStateFn { - l.pos++ - l.ignore() - growingString := "" - - for { - if l.follow(l.stringTerm) { - l.emitWithValue(tokenString, growingString) - l.pos++ - l.ignore() - return l.lexVoid - } - - if l.follow("\\\"") { - l.pos++ - growingString += "\"" - } else if l.follow("\\'") { - l.pos++ - growingString += "'" - } else if l.follow("\\n") { - l.pos++ - growingString += "\n" - } else if l.follow("\\b") { - l.pos++ - growingString += "\b" - } else if l.follow("\\f") { - l.pos++ - growingString += "\f" - } else if l.follow("\\/") { - l.pos++ - growingString += "/" - } else if l.follow("\\t") { - l.pos++ - growingString += "\t" - } else if l.follow("\\r") { - l.pos++ - growingString += "\r" - } else if l.follow("\\\\") { - l.pos++ - growingString += "\\" - } else if l.follow("\\u") { - l.pos += 2 - code := "" - for i := 0; i < 4; i++ { - c := l.peek() - l.pos++ - if !isHexDigit(c) { - return l.errorf("unfinished unicode escape") - } - code = code + string(c) - } - l.pos-- - intcode, err := strconv.ParseInt(code, 16, 32) - if err != nil { - return l.errorf("invalid unicode escape: \\u" + code) - } - growingString += string(rune(intcode)) - } else if l.follow("\\U") { - l.pos += 2 - code := "" - for i := 0; i < 8; i++ { - c := l.peek() - l.pos++ - if !isHexDigit(c) { - return l.errorf("unfinished unicode escape") - } - code = code + string(c) - } - l.pos-- - intcode, err := strconv.ParseInt(code, 16, 32) - if err != nil { - return l.errorf("invalid unicode escape: \\u" + code) - } - growingString += string(rune(intcode)) - } else if l.follow("\\") { - l.pos++ - return l.errorf("invalid escape sequence: \\" + string(l.peek())) - } else { - growingString += string(l.peek()) - } - - if l.next() == eof { - break - } - } - - return l.errorf("unclosed string") -} - -func (l *queryLexer) lexNumber() queryLexStateFn { - l.ignore() - if !l.accept("+") { - l.accept("-") - } - pointSeen := false - digitSeen := false - for { - next := l.next() - if next == '.' { - if pointSeen { - return l.errorf("cannot have two dots in one float") - } - if !isDigit(l.peek()) { - return l.errorf("float cannot end with a dot") - } - pointSeen = true - } else if isDigit(next) { - digitSeen = true - } else { - l.backup() - break - } - if pointSeen && !digitSeen { - return l.errorf("cannot start float with a dot") - } - } - - if !digitSeen { - return l.errorf("no digit in that number") - } - if pointSeen { - l.emit(tokenFloat) - } else { - l.emit(tokenInteger) - } - return l.lexVoid -} - -// Entry point -func lexQuery(input string) chan token { - l := &queryLexer{ - input: input, - tokens: make(chan token), - line: 1, - col: 1, - } - go l.run() - return l.tokens -} diff --git a/vendor/github.com/pelletier/go-toml/query/match.go b/vendor/github.com/pelletier/go-toml/query/match.go deleted file mode 100644 index d7bb15a4..00000000 --- a/vendor/github.com/pelletier/go-toml/query/match.go +++ /dev/null @@ -1,232 +0,0 @@ -package query - -import ( - "fmt" - "github.com/pelletier/go-toml" -) - -// base match -type matchBase struct { - next pathFn -} - -func (f *matchBase) setNext(next pathFn) { - f.next = next -} - -// terminating functor - gathers results -type terminatingFn struct { - // empty -} - -func newTerminatingFn() *terminatingFn { - return &terminatingFn{} -} - -func (f *terminatingFn) setNext(next pathFn) { - // do nothing -} - -func (f *terminatingFn) call(node interface{}, ctx *queryContext) { - ctx.result.appendResult(node, ctx.lastPosition) -} - -// match single key -type matchKeyFn struct { - matchBase - Name string -} - -func newMatchKeyFn(name string) *matchKeyFn { - return &matchKeyFn{Name: name} -} - -func (f *matchKeyFn) call(node interface{}, ctx *queryContext) { - if array, ok := node.([]*toml.Tree); ok { - for _, tree := range array { - item := tree.Get(f.Name) - if item != nil { - ctx.lastPosition = tree.GetPosition(f.Name) - f.next.call(item, ctx) - } - } - } else if tree, ok := node.(*toml.Tree); ok { - item := tree.Get(f.Name) - if item != nil { - ctx.lastPosition = tree.GetPosition(f.Name) - f.next.call(item, ctx) - } - } -} - -// match single index -type matchIndexFn struct { - matchBase - Idx int -} - -func newMatchIndexFn(idx int) *matchIndexFn { - return &matchIndexFn{Idx: idx} -} - -func (f *matchIndexFn) call(node interface{}, ctx *queryContext) { - if arr, ok := node.([]interface{}); ok { - if f.Idx < len(arr) && f.Idx >= 0 { - if treesArray, ok := node.([]*toml.Tree); ok { - if len(treesArray) > 0 { - ctx.lastPosition = treesArray[0].Position() - } - } - f.next.call(arr[f.Idx], ctx) - } - } -} - -// filter by slicing -type matchSliceFn struct { - matchBase - Start, End, Step int -} - -func newMatchSliceFn(start, end, step int) *matchSliceFn { - return &matchSliceFn{Start: start, End: end, Step: step} -} - -func (f *matchSliceFn) call(node interface{}, ctx *queryContext) { - if arr, ok := node.([]interface{}); ok { - // adjust indexes for negative values, reverse ordering - realStart, realEnd := f.Start, f.End - if realStart < 0 { - realStart = len(arr) + realStart - } - if realEnd < 0 { - realEnd = len(arr) + realEnd - } - if realEnd < realStart { - realEnd, realStart = realStart, realEnd // swap - } - // loop and gather - for idx := realStart; idx < realEnd; idx += f.Step { - if treesArray, ok := node.([]*toml.Tree); ok { - if len(treesArray) > 0 { - ctx.lastPosition = treesArray[0].Position() - } - } - f.next.call(arr[idx], ctx) - } - } -} - -// match anything -type matchAnyFn struct { - matchBase -} - -func newMatchAnyFn() *matchAnyFn { - return &matchAnyFn{} -} - -func (f *matchAnyFn) call(node interface{}, ctx *queryContext) { - if tree, ok := node.(*toml.Tree); ok { - for _, k := range tree.Keys() { - v := tree.Get(k) - ctx.lastPosition = tree.GetPosition(k) - f.next.call(v, ctx) - } - } -} - -// filter through union -type matchUnionFn struct { - Union []pathFn -} - -func (f *matchUnionFn) setNext(next pathFn) { - for _, fn := range f.Union { - fn.setNext(next) - } -} - -func (f *matchUnionFn) call(node interface{}, ctx *queryContext) { - for _, fn := range f.Union { - fn.call(node, ctx) - } -} - -// match every single last node in the tree -type matchRecursiveFn struct { - matchBase -} - -func newMatchRecursiveFn() *matchRecursiveFn { - return &matchRecursiveFn{} -} - -func (f *matchRecursiveFn) call(node interface{}, ctx *queryContext) { - originalPosition := ctx.lastPosition - if tree, ok := node.(*toml.Tree); ok { - var visit func(tree *toml.Tree) - visit = func(tree *toml.Tree) { - for _, k := range tree.Keys() { - v := tree.Get(k) - ctx.lastPosition = tree.GetPosition(k) - f.next.call(v, ctx) - switch node := v.(type) { - case *toml.Tree: - visit(node) - case []*toml.Tree: - for _, subtree := range node { - visit(subtree) - } - } - } - } - ctx.lastPosition = originalPosition - f.next.call(tree, ctx) - visit(tree) - } -} - -// match based on an externally provided functional filter -type matchFilterFn struct { - matchBase - Pos toml.Position - Name string -} - -func newMatchFilterFn(name string, pos toml.Position) *matchFilterFn { - return &matchFilterFn{Name: name, Pos: pos} -} - -func (f *matchFilterFn) call(node interface{}, ctx *queryContext) { - fn, ok := (*ctx.filters)[f.Name] - if !ok { - panic(fmt.Sprintf("%s: query context does not have filter '%s'", - f.Pos.String(), f.Name)) - } - switch castNode := node.(type) { - case *toml.Tree: - for _, k := range castNode.Keys() { - v := castNode.Get(k) - if fn(v) { - ctx.lastPosition = castNode.GetPosition(k) - f.next.call(v, ctx) - } - } - case []*toml.Tree: - for _, v := range castNode { - if fn(v) { - if len(castNode) > 0 { - ctx.lastPosition = castNode[0].Position() - } - f.next.call(v, ctx) - } - } - case []interface{}: - for _, v := range castNode { - if fn(v) { - f.next.call(v, ctx) - } - } - } -} diff --git a/vendor/github.com/pelletier/go-toml/query/parser.go b/vendor/github.com/pelletier/go-toml/query/parser.go deleted file mode 100644 index 5f69b70d..00000000 --- a/vendor/github.com/pelletier/go-toml/query/parser.go +++ /dev/null @@ -1,275 +0,0 @@ -/* - Based on the "jsonpath" spec/concept. - - http://goessner.net/articles/JsonPath/ - https://code.google.com/p/json-path/ -*/ - -package query - -import ( - "fmt" -) - -const maxInt = int(^uint(0) >> 1) - -type queryParser struct { - flow chan token - tokensBuffer []token - query *Query - union []pathFn - err error -} - -type queryParserStateFn func() queryParserStateFn - -// Formats and panics an error message based on a token -func (p *queryParser) parseError(tok *token, msg string, args ...interface{}) queryParserStateFn { - p.err = fmt.Errorf(tok.Position.String()+": "+msg, args...) - return nil // trigger parse to end -} - -func (p *queryParser) run() { - for state := p.parseStart; state != nil; { - state = state() - } -} - -func (p *queryParser) backup(tok *token) { - p.tokensBuffer = append(p.tokensBuffer, *tok) -} - -func (p *queryParser) peek() *token { - if len(p.tokensBuffer) != 0 { - return &(p.tokensBuffer[0]) - } - - tok, ok := <-p.flow - if !ok { - return nil - } - p.backup(&tok) - return &tok -} - -func (p *queryParser) lookahead(types ...tokenType) bool { - result := true - buffer := []token{} - - for _, typ := range types { - tok := p.getToken() - if tok == nil { - result = false - break - } - buffer = append(buffer, *tok) - if tok.typ != typ { - result = false - break - } - } - // add the tokens back to the buffer, and return - p.tokensBuffer = append(p.tokensBuffer, buffer...) - return result -} - -func (p *queryParser) getToken() *token { - if len(p.tokensBuffer) != 0 { - tok := p.tokensBuffer[0] - p.tokensBuffer = p.tokensBuffer[1:] - return &tok - } - tok, ok := <-p.flow - if !ok { - return nil - } - return &tok -} - -func (p *queryParser) parseStart() queryParserStateFn { - tok := p.getToken() - - if tok == nil || tok.typ == tokenEOF { - return nil - } - - if tok.typ != tokenDollar { - return p.parseError(tok, "Expected '$' at start of expression") - } - - return p.parseMatchExpr -} - -// handle '.' prefix, '[]', and '..' -func (p *queryParser) parseMatchExpr() queryParserStateFn { - tok := p.getToken() - switch tok.typ { - case tokenDotDot: - p.query.appendPath(&matchRecursiveFn{}) - // nested parse for '..' - tok := p.getToken() - switch tok.typ { - case tokenKey: - p.query.appendPath(newMatchKeyFn(tok.val)) - return p.parseMatchExpr - case tokenLeftBracket: - return p.parseBracketExpr - case tokenStar: - // do nothing - the recursive predicate is enough - return p.parseMatchExpr - } - - case tokenDot: - // nested parse for '.' - tok := p.getToken() - switch tok.typ { - case tokenKey: - p.query.appendPath(newMatchKeyFn(tok.val)) - return p.parseMatchExpr - case tokenStar: - p.query.appendPath(&matchAnyFn{}) - return p.parseMatchExpr - } - - case tokenLeftBracket: - return p.parseBracketExpr - - case tokenEOF: - return nil // allow EOF at this stage - } - return p.parseError(tok, "expected match expression") -} - -func (p *queryParser) parseBracketExpr() queryParserStateFn { - if p.lookahead(tokenInteger, tokenColon) { - return p.parseSliceExpr - } - if p.peek().typ == tokenColon { - return p.parseSliceExpr - } - return p.parseUnionExpr -} - -func (p *queryParser) parseUnionExpr() queryParserStateFn { - var tok *token - - // this state can be traversed after some sub-expressions - // so be careful when setting up state in the parser - if p.union == nil { - p.union = []pathFn{} - } - -loop: // labeled loop for easy breaking - for { - if len(p.union) > 0 { - // parse delimiter or terminator - tok = p.getToken() - switch tok.typ { - case tokenComma: - // do nothing - case tokenRightBracket: - break loop - default: - return p.parseError(tok, "expected ',' or ']', not '%s'", tok.val) - } - } - - // parse sub expression - tok = p.getToken() - switch tok.typ { - case tokenInteger: - p.union = append(p.union, newMatchIndexFn(tok.Int())) - case tokenKey: - p.union = append(p.union, newMatchKeyFn(tok.val)) - case tokenString: - p.union = append(p.union, newMatchKeyFn(tok.val)) - case tokenQuestion: - return p.parseFilterExpr - default: - return p.parseError(tok, "expected union sub expression, not '%s', %d", tok.val, len(p.union)) - } - } - - // if there is only one sub-expression, use that instead - if len(p.union) == 1 { - p.query.appendPath(p.union[0]) - } else { - p.query.appendPath(&matchUnionFn{p.union}) - } - - p.union = nil // clear out state - return p.parseMatchExpr -} - -func (p *queryParser) parseSliceExpr() queryParserStateFn { - // init slice to grab all elements - start, end, step := 0, maxInt, 1 - - // parse optional start - tok := p.getToken() - if tok.typ == tokenInteger { - start = tok.Int() - tok = p.getToken() - } - if tok.typ != tokenColon { - return p.parseError(tok, "expected ':'") - } - - // parse optional end - tok = p.getToken() - if tok.typ == tokenInteger { - end = tok.Int() - tok = p.getToken() - } - if tok.typ == tokenRightBracket { - p.query.appendPath(newMatchSliceFn(start, end, step)) - return p.parseMatchExpr - } - if tok.typ != tokenColon { - return p.parseError(tok, "expected ']' or ':'") - } - - // parse optional step - tok = p.getToken() - if tok.typ == tokenInteger { - step = tok.Int() - if step < 0 { - return p.parseError(tok, "step must be a positive value") - } - tok = p.getToken() - } - if tok.typ != tokenRightBracket { - return p.parseError(tok, "expected ']'") - } - - p.query.appendPath(newMatchSliceFn(start, end, step)) - return p.parseMatchExpr -} - -func (p *queryParser) parseFilterExpr() queryParserStateFn { - tok := p.getToken() - if tok.typ != tokenLeftParen { - return p.parseError(tok, "expected left-parenthesis for filter expression") - } - tok = p.getToken() - if tok.typ != tokenKey && tok.typ != tokenString { - return p.parseError(tok, "expected key or string for filter function name") - } - name := tok.val - tok = p.getToken() - if tok.typ != tokenRightParen { - return p.parseError(tok, "expected right-parenthesis for filter expression") - } - p.union = append(p.union, newMatchFilterFn(name, tok.Position)) - return p.parseUnionExpr -} - -func parseQuery(flow chan token) (*Query, error) { - parser := &queryParser{ - flow: flow, - tokensBuffer: []token{}, - query: newQuery(), - } - parser.run() - return parser.query, parser.err -} diff --git a/vendor/github.com/pelletier/go-toml/query/query.go b/vendor/github.com/pelletier/go-toml/query/query.go deleted file mode 100644 index 1c6cd801..00000000 --- a/vendor/github.com/pelletier/go-toml/query/query.go +++ /dev/null @@ -1,158 +0,0 @@ -package query - -import ( - "time" - - "github.com/pelletier/go-toml" -) - -// NodeFilterFn represents a user-defined filter function, for use with -// Query.SetFilter(). -// -// The return value of the function must indicate if 'node' is to be included -// at this stage of the TOML path. Returning true will include the node, and -// returning false will exclude it. -// -// NOTE: Care should be taken to write script callbacks such that they are safe -// to use from multiple goroutines. -type NodeFilterFn func(node interface{}) bool - -// Result is the result of Executing a Query. -type Result struct { - items []interface{} - positions []toml.Position -} - -// appends a value/position pair to the result set. -func (r *Result) appendResult(node interface{}, pos toml.Position) { - r.items = append(r.items, node) - r.positions = append(r.positions, pos) -} - -// Values is a set of values within a Result. The order of values is not -// guaranteed to be in document order, and may be different each time a query is -// executed. -func (r Result) Values() []interface{} { - return r.items -} - -// Positions is a set of positions for values within a Result. Each index -// in Positions() corresponds to the entry in Value() of the same index. -func (r Result) Positions() []toml.Position { - return r.positions -} - -// runtime context for executing query paths -type queryContext struct { - result *Result - filters *map[string]NodeFilterFn - lastPosition toml.Position -} - -// generic path functor interface -type pathFn interface { - setNext(next pathFn) - // it is the caller's responsibility to set the ctx.lastPosition before invoking call() - // node can be one of: *toml.Tree, []*toml.Tree, or a scalar - call(node interface{}, ctx *queryContext) -} - -// A Query is the representation of a compiled TOML path. A Query is safe -// for concurrent use by multiple goroutines. -type Query struct { - root pathFn - tail pathFn - filters *map[string]NodeFilterFn -} - -func newQuery() *Query { - return &Query{ - root: nil, - tail: nil, - filters: &defaultFilterFunctions, - } -} - -func (q *Query) appendPath(next pathFn) { - if q.root == nil { - q.root = next - } else { - q.tail.setNext(next) - } - q.tail = next - next.setNext(newTerminatingFn()) // init the next functor -} - -// Compile compiles a TOML path expression. The returned Query can be used -// to match elements within a Tree and its descendants. See Execute. -func Compile(path string) (*Query, error) { - return parseQuery(lexQuery(path)) -} - -// Execute executes a query against a Tree, and returns the result of the query. -func (q *Query) Execute(tree *toml.Tree) *Result { - result := &Result{ - items: []interface{}{}, - positions: []toml.Position{}, - } - if q.root == nil { - result.appendResult(tree, tree.GetPosition("")) - } else { - ctx := &queryContext{ - result: result, - filters: q.filters, - } - ctx.lastPosition = tree.Position() - q.root.call(tree, ctx) - } - return result -} - -// CompileAndExecute is a shorthand for Compile(path) followed by Execute(tree). -func CompileAndExecute(path string, tree *toml.Tree) (*Result, error) { - query, err := Compile(path) - if err != nil { - return nil, err - } - return query.Execute(tree), nil -} - -// SetFilter sets a user-defined filter function. These may be used inside -// "?(..)" query expressions to filter TOML document elements within a query. -func (q *Query) SetFilter(name string, fn NodeFilterFn) { - if q.filters == &defaultFilterFunctions { - // clone the static table - q.filters = &map[string]NodeFilterFn{} - for k, v := range defaultFilterFunctions { - (*q.filters)[k] = v - } - } - (*q.filters)[name] = fn -} - -var defaultFilterFunctions = map[string]NodeFilterFn{ - "tree": func(node interface{}) bool { - _, ok := node.(*toml.Tree) - return ok - }, - "int": func(node interface{}) bool { - _, ok := node.(int64) - return ok - }, - "float": func(node interface{}) bool { - _, ok := node.(float64) - return ok - }, - "string": func(node interface{}) bool { - _, ok := node.(string) - return ok - }, - "time": func(node interface{}) bool { - _, ok := node.(time.Time) - return ok - }, - "bool": func(node interface{}) bool { - _, ok := node.(bool) - return ok - }, -} diff --git a/vendor/github.com/pelletier/go-toml/query/tokens.go b/vendor/github.com/pelletier/go-toml/query/tokens.go deleted file mode 100644 index 9ae579de..00000000 --- a/vendor/github.com/pelletier/go-toml/query/tokens.go +++ /dev/null @@ -1,106 +0,0 @@ -package query - -import ( - "fmt" - "github.com/pelletier/go-toml" - "strconv" - "unicode" -) - -// Define tokens -type tokenType int - -const ( - eof = -(iota + 1) -) - -const ( - tokenError tokenType = iota - tokenEOF - tokenKey - tokenString - tokenInteger - tokenFloat - tokenLeftBracket - tokenRightBracket - tokenLeftParen - tokenRightParen - tokenComma - tokenColon - tokenDollar - tokenStar - tokenQuestion - tokenDot - tokenDotDot -) - -var tokenTypeNames = []string{ - "Error", - "EOF", - "Key", - "String", - "Integer", - "Float", - "[", - "]", - "(", - ")", - ",", - ":", - "$", - "*", - "?", - ".", - "..", -} - -type token struct { - toml.Position - typ tokenType - val string -} - -func (tt tokenType) String() string { - idx := int(tt) - if idx < len(tokenTypeNames) { - return tokenTypeNames[idx] - } - return "Unknown" -} - -func (t token) Int() int { - if result, err := strconv.Atoi(t.val); err != nil { - panic(err) - } else { - return result - } -} - -func (t token) String() string { - switch t.typ { - case tokenEOF: - return "EOF" - case tokenError: - return t.val - } - - return fmt.Sprintf("%q", t.val) -} - -func isSpace(r rune) bool { - return r == ' ' || r == '\t' -} - -func isAlphanumeric(r rune) bool { - return unicode.IsLetter(r) || r == '_' -} - -func isDigit(r rune) bool { - return unicode.IsNumber(r) -} - -func isHexDigit(r rune) bool { - return isDigit(r) || - (r >= 'a' && r <= 'f') || - (r >= 'A' && r <= 'F') -} diff --git a/vendor/github.com/pelletier/go-toml/test.sh b/vendor/github.com/pelletier/go-toml/test.sh new file mode 100644 index 00000000..a70a8b02 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/test.sh @@ -0,0 +1,91 @@ +#!/bin/bash +# fail out of the script if anything here fails +set -e +set -o pipefail + +# set the path to the present working directory +export GOPATH=`pwd` + +function git_clone() { + path=$1 + branch=$2 + version=$3 + if [ ! -d "src/$path" ]; then + mkdir -p src/$path + git clone https://$path.git src/$path + fi + pushd src/$path + git checkout "$branch" + git reset --hard "$version" + popd +} + +# Remove potential previous runs +rm -rf src test_program_bin toml-test + +# Run go vet +go vet ./... + +go get github.com/pelletier/go-buffruneio +go get github.com/davecgh/go-spew/spew +go get gopkg.in/yaml.v2 +go get github.com/BurntSushi/toml + +# get code for BurntSushi TOML validation +# pinning all to 'HEAD' for version 0.3.x work (TODO: pin to commit hash when tests stabilize) +git_clone github.com/BurntSushi/toml master HEAD +git_clone github.com/BurntSushi/toml-test master HEAD #was: 0.2.0 HEAD + +# build the BurntSushi test application +go build -o toml-test github.com/BurntSushi/toml-test + +# vendorize the current lib for testing +# NOTE: this basically mocks an install without having to go back out to github for code +mkdir -p src/github.com/pelletier/go-toml/cmd +mkdir -p src/github.com/pelletier/go-toml/query +cp *.go *.toml src/github.com/pelletier/go-toml +cp -R cmd/* src/github.com/pelletier/go-toml/cmd +cp -R query/* src/github.com/pelletier/go-toml/query +go build -o test_program_bin src/github.com/pelletier/go-toml/cmd/test_program.go + +# Run basic unit tests +go test github.com/pelletier/go-toml -covermode=count -coverprofile=coverage.out +go test github.com/pelletier/go-toml/cmd/tomljson +go test github.com/pelletier/go-toml/query + +# run the entire BurntSushi test suite +if [[ $# -eq 0 ]] ; then + echo "Running all BurntSushi tests" + ./toml-test ./test_program_bin | tee test_out +else + # run a specific test + test=$1 + test_path='src/github.com/BurntSushi/toml-test/tests' + valid_test="$test_path/valid/$test" + invalid_test="$test_path/invalid/$test" + + if [ -e "$valid_test.toml" ]; then + echo "Valid Test TOML for $test:" + echo "====" + cat "$valid_test.toml" + + echo "Valid Test JSON for $test:" + echo "====" + cat "$valid_test.json" + + echo "Go-TOML Output for $test:" + echo "====" + cat "$valid_test.toml" | ./test_program_bin + fi + + if [ -e "$invalid_test.toml" ]; then + echo "Invalid Test TOML for $test:" + echo "====" + cat "$invalid_test.toml" + + echo "Go-TOML Output for $test:" + echo "====" + echo "go-toml Output:" + cat "$invalid_test.toml" | ./test_program_bin + fi +fi |