summaryrefslogtreecommitdiffstats
path: root/vendor/github.com/tidwall
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/github.com/tidwall')
-rw-r--r--vendor/github.com/tidwall/gjson/LICENSE20
-rw-r--r--vendor/github.com/tidwall/gjson/README.md497
-rw-r--r--vendor/github.com/tidwall/gjson/SYNTAX.md342
-rw-r--r--vendor/github.com/tidwall/gjson/gjson.go3359
-rw-r--r--vendor/github.com/tidwall/gjson/logo.pngbin0 -> 15936 bytes
-rw-r--r--vendor/github.com/tidwall/match/LICENSE20
-rw-r--r--vendor/github.com/tidwall/match/README.md29
-rw-r--r--vendor/github.com/tidwall/match/match.go237
-rw-r--r--vendor/github.com/tidwall/pretty/LICENSE20
-rw-r--r--vendor/github.com/tidwall/pretty/README.md122
-rw-r--r--vendor/github.com/tidwall/pretty/pretty.go674
-rw-r--r--vendor/github.com/tidwall/sjson/LICENSE21
-rw-r--r--vendor/github.com/tidwall/sjson/README.md278
-rw-r--r--vendor/github.com/tidwall/sjson/logo.pngbin0 -> 16874 bytes
-rw-r--r--vendor/github.com/tidwall/sjson/sjson.go737
15 files changed, 6356 insertions, 0 deletions
diff --git a/vendor/github.com/tidwall/gjson/LICENSE b/vendor/github.com/tidwall/gjson/LICENSE
new file mode 100644
index 00000000..58f5819a
--- /dev/null
+++ b/vendor/github.com/tidwall/gjson/LICENSE
@@ -0,0 +1,20 @@
+The MIT License (MIT)
+
+Copyright (c) 2016 Josh Baker
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/vendor/github.com/tidwall/gjson/README.md b/vendor/github.com/tidwall/gjson/README.md
new file mode 100644
index 00000000..c8db11f1
--- /dev/null
+++ b/vendor/github.com/tidwall/gjson/README.md
@@ -0,0 +1,497 @@
+<p align="center">
+<img
+ src="logo.png"
+ width="240" height="78" border="0" alt="GJSON">
+<br>
+<a href="https://godoc.org/github.com/tidwall/gjson"><img src="https://img.shields.io/badge/api-reference-blue.svg?style=flat-square" alt="GoDoc"></a>
+<a href="https://tidwall.com/gjson-play"><img src="https://img.shields.io/badge/%F0%9F%8F%90-playground-9900cc.svg?style=flat-square" alt="GJSON Playground"></a>
+<a href="SYNTAX.md"><img src="https://img.shields.io/badge/{}-syntax-33aa33.svg?style=flat-square" alt="GJSON Syntax"></a>
+
+</p>
+
+<p align="center">get json values quickly</a></p>
+
+GJSON is a Go package that provides a [fast](#performance) and [simple](#get-a-value) way to get values from a json document.
+It has features such as [one line retrieval](#get-a-value), [dot notation paths](#path-syntax), [iteration](#iterate-through-an-object-or-array), and [parsing json lines](#json-lines).
+
+Also check out [SJSON](https://github.com/tidwall/sjson) for modifying json, and the [JJ](https://github.com/tidwall/jj) command line tool.
+
+This README is a quick overview of how to use GJSON, for more information check out [GJSON Syntax](SYNTAX.md).
+
+GJSON is also available for [Python](https://github.com/volans-/gjson-py) and [Rust](https://github.com/tidwall/gjson.rs)
+
+Getting Started
+===============
+
+## Installing
+
+To start using GJSON, install Go and run `go get`:
+
+```sh
+$ go get -u github.com/tidwall/gjson
+```
+
+This will retrieve the library.
+
+## Get a value
+Get searches json for the specified path. A path is in dot syntax, such as "name.last" or "age". When the value is found it's returned immediately.
+
+```go
+package main
+
+import "github.com/tidwall/gjson"
+
+const json = `{"name":{"first":"Janet","last":"Prichard"},"age":47}`
+
+func main() {
+ value := gjson.Get(json, "name.last")
+ println(value.String())
+}
+```
+
+This will print:
+
+```
+Prichard
+```
+*There's also the [GetMany](#get-multiple-values-at-once) function to get multiple values at once, and [GetBytes](#working-with-bytes) for working with JSON byte slices.*
+
+## Path Syntax
+
+Below is a quick overview of the path syntax, for more complete information please
+check out [GJSON Syntax](SYNTAX.md).
+
+A path is a series of keys separated by a dot.
+A key may contain special wildcard characters '\*' and '?'.
+To access an array value use the index as the key.
+To get the number of elements in an array or to access a child path, use the '#' character.
+The dot and wildcard characters can be escaped with '\\'.
+
+```json
+{
+ "name": {"first": "Tom", "last": "Anderson"},
+ "age":37,
+ "children": ["Sara","Alex","Jack"],
+ "fav.movie": "Deer Hunter",
+ "friends": [
+ {"first": "Dale", "last": "Murphy", "age": 44, "nets": ["ig", "fb", "tw"]},
+ {"first": "Roger", "last": "Craig", "age": 68, "nets": ["fb", "tw"]},
+ {"first": "Jane", "last": "Murphy", "age": 47, "nets": ["ig", "tw"]}
+ ]
+}
+```
+```
+"name.last" >> "Anderson"
+"age" >> 37
+"children" >> ["Sara","Alex","Jack"]
+"children.#" >> 3
+"children.1" >> "Alex"
+"child*.2" >> "Jack"
+"c?ildren.0" >> "Sara"
+"fav\.movie" >> "Deer Hunter"
+"friends.#.first" >> ["Dale","Roger","Jane"]
+"friends.1.last" >> "Craig"
+```
+
+You can also query an array for the first match by using `#(...)`, or find all
+matches with `#(...)#`. Queries support the `==`, `!=`, `<`, `<=`, `>`, `>=`
+comparison operators and the simple pattern matching `%` (like) and `!%`
+(not like) operators.
+
+```
+friends.#(last=="Murphy").first >> "Dale"
+friends.#(last=="Murphy")#.first >> ["Dale","Jane"]
+friends.#(age>45)#.last >> ["Craig","Murphy"]
+friends.#(first%"D*").last >> "Murphy"
+friends.#(first!%"D*").last >> "Craig"
+friends.#(nets.#(=="fb"))#.first >> ["Dale","Roger"]
+```
+
+*Please note that prior to v1.3.0, queries used the `#[...]` brackets. This was
+changed in v1.3.0 as to avoid confusion with the new
+[multipath](SYNTAX.md#multipaths) syntax. For backwards compatibility,
+`#[...]` will continue to work until the next major release.*
+
+## Result Type
+
+GJSON supports the json types `string`, `number`, `bool`, and `null`.
+Arrays and Objects are returned as their raw json types.
+
+The `Result` type holds one of these:
+
+```
+bool, for JSON booleans
+float64, for JSON numbers
+string, for JSON string literals
+nil, for JSON null
+```
+
+To directly access the value:
+
+```go
+result.Type // can be String, Number, True, False, Null, or JSON
+result.Str // holds the string
+result.Num // holds the float64 number
+result.Raw // holds the raw json
+result.Index // index of raw value in original json, zero means index unknown
+result.Indexes // indexes of all the elements that match on a path containing the '#' query character.
+```
+
+There are a variety of handy functions that work on a result:
+
+```go
+result.Exists() bool
+result.Value() interface{}
+result.Int() int64
+result.Uint() uint64
+result.Float() float64
+result.String() string
+result.Bool() bool
+result.Time() time.Time
+result.Array() []gjson.Result
+result.Map() map[string]gjson.Result
+result.Get(path string) Result
+result.ForEach(iterator func(key, value Result) bool)
+result.Less(token Result, caseSensitive bool) bool
+```
+
+The `result.Value()` function returns an `interface{}` which requires type assertion and is one of the following Go types:
+
+```go
+boolean >> bool
+number >> float64
+string >> string
+null >> nil
+array >> []interface{}
+object >> map[string]interface{}
+```
+
+The `result.Array()` function returns back an array of values.
+If the result represents a non-existent value, then an empty array will be returned.
+If the result is not a JSON array, the return value will be an array containing one result.
+
+### 64-bit integers
+
+The `result.Int()` and `result.Uint()` calls are capable of reading all 64 bits, allowing for large JSON integers.
+
+```go
+result.Int() int64 // -9223372036854775808 to 9223372036854775807
+result.Uint() uint64 // 0 to 18446744073709551615
+```
+
+## Modifiers and path chaining
+
+New in version 1.2 is support for modifier functions and path chaining.
+
+A modifier is a path component that performs custom processing on the
+json.
+
+Multiple paths can be "chained" together using the pipe character.
+This is useful for getting results from a modified query.
+
+For example, using the built-in `@reverse` modifier on the above json document,
+we'll get `children` array and reverse the order:
+
+```
+"children|@reverse" >> ["Jack","Alex","Sara"]
+"children|@reverse|0" >> "Jack"
+```
+
+There are currently the following built-in modifiers:
+
+- `@reverse`: Reverse an array or the members of an object.
+- `@ugly`: Remove all whitespace from a json document.
+- `@pretty`: Make the json document more human readable.
+- `@this`: Returns the current element. It can be used to retrieve the root element.
+- `@valid`: Ensure the json document is valid.
+- `@flatten`: Flattens an array.
+- `@join`: Joins multiple objects into a single object.
+- `@keys`: Returns an array of keys for an object.
+- `@values`: Returns an array of values for an object.
+- `@tostr`: Converts json to a string. Wraps a json string.
+- `@fromstr`: Converts a string from json. Unwraps a json string.
+- `@group`: Groups arrays of objects. See [e4fc67c](https://github.com/tidwall/gjson/commit/e4fc67c92aeebf2089fabc7872f010e340d105db).
+
+### Modifier arguments
+
+A modifier may accept an optional argument. The argument can be a valid JSON
+document or just characters.
+
+For example, the `@pretty` modifier takes a json object as its argument.
+
+```
+@pretty:{"sortKeys":true}
+```
+
+Which makes the json pretty and orders all of its keys.
+
+```json
+{
+ "age":37,
+ "children": ["Sara","Alex","Jack"],
+ "fav.movie": "Deer Hunter",
+ "friends": [
+ {"age": 44, "first": "Dale", "last": "Murphy"},
+ {"age": 68, "first": "Roger", "last": "Craig"},
+ {"age": 47, "first": "Jane", "last": "Murphy"}
+ ],
+ "name": {"first": "Tom", "last": "Anderson"}
+}
+```
+
+*The full list of `@pretty` options are `sortKeys`, `indent`, `prefix`, and `width`.
+Please see [Pretty Options](https://github.com/tidwall/pretty#customized-output) for more information.*
+
+### Custom modifiers
+
+You can also add custom modifiers.
+
+For example, here we create a modifier that makes the entire json document upper
+or lower case.
+
+```go
+gjson.AddModifier("case", func(json, arg string) string {
+ if arg == "upper" {
+ return strings.ToUpper(json)
+ }
+ if arg == "lower" {
+ return strings.ToLower(json)
+ }
+ return json
+})
+```
+
+```
+"children|@case:upper" >> ["SARA","ALEX","JACK"]
+"children|@case:lower|@reverse" >> ["jack","alex","sara"]
+```
+
+## JSON Lines
+
+There's support for [JSON Lines](http://jsonlines.org/) using the `..` prefix, which treats a multilined document as an array.
+
+For example:
+
+```
+{"name": "Gilbert", "age": 61}
+{"name": "Alexa", "age": 34}
+{"name": "May", "age": 57}
+{"name": "Deloise", "age": 44}
+```
+
+```
+..# >> 4
+..1 >> {"name": "Alexa", "age": 34}
+..3 >> {"name": "Deloise", "age": 44}
+..#.name >> ["Gilbert","Alexa","May","Deloise"]
+..#(name="May").age >> 57
+```
+
+The `ForEachLines` function will iterate through JSON lines.
+
+```go
+gjson.ForEachLine(json, func(line gjson.Result) bool{
+ println(line.String())
+ return true
+})
+```
+
+## Get nested array values
+
+Suppose you want all the last names from the following json:
+
+```json
+{
+ "programmers": [
+ {
+ "firstName": "Janet",
+ "lastName": "McLaughlin",
+ }, {
+ "firstName": "Elliotte",
+ "lastName": "Hunter",
+ }, {
+ "firstName": "Jason",
+ "lastName": "Harold",
+ }
+ ]
+}
+```
+
+You would use the path "programmers.#.lastName" like such:
+
+```go
+result := gjson.Get(json, "programmers.#.lastName")
+for _, name := range result.Array() {
+ println(name.String())
+}
+```
+
+You can also query an object inside an array:
+
+```go
+name := gjson.Get(json, `programmers.#(lastName="Hunter").firstName`)
+println(name.String()) // prints "Elliotte"
+```
+
+## Iterate through an object or array
+
+The `ForEach` function allows for quickly iterating through an object or array.
+The key and value are passed to the iterator function for objects.
+Only the value is passed for arrays.
+Returning `false` from an iterator will stop iteration.
+
+```go
+result := gjson.Get(json, "programmers")
+result.ForEach(func(key, value gjson.Result) bool {
+ println(value.String())
+ return true // keep iterating
+})
+```
+
+## Simple Parse and Get
+
+There's a `Parse(json)` function that will do a simple parse, and `result.Get(path)` that will search a result.
+
+For example, all of these will return the same result:
+
+```go
+gjson.Parse(json).Get("name").Get("last")
+gjson.Get(json, "name").Get("last")
+gjson.Get(json, "name.last")
+```
+
+## Check for the existence of a value
+
+Sometimes you just want to know if a value exists.
+
+```go
+value := gjson.Get(json, "name.last")
+if !value.Exists() {
+ println("no last name")
+} else {
+ println(value.String())
+}
+
+// Or as one step
+if gjson.Get(json, "name.last").Exists() {
+ println("has a last name")
+}
+```
+
+## Validate JSON
+
+The `Get*` and `Parse*` functions expects that the json is well-formed. Bad json will not panic, but it may return back unexpected results.
+
+If you are consuming JSON from an unpredictable source then you may want to validate prior to using GJSON.
+
+```go
+if !gjson.Valid(json) {
+ return errors.New("invalid json")
+}
+value := gjson.Get(json, "name.last")
+```
+
+## Unmarshal to a map
+
+To unmarshal to a `map[string]interface{}`:
+
+```go
+m, ok := gjson.Parse(json).Value().(map[string]interface{})
+if !ok {
+ // not a map
+}
+```
+
+## Working with Bytes
+
+If your JSON is contained in a `[]byte` slice, there's the [GetBytes](https://godoc.org/github.com/tidwall/gjson#GetBytes) function. This is preferred over `Get(string(data), path)`.
+
+```go
+var json []byte = ...
+result := gjson.GetBytes(json, path)
+```
+
+If you are using the `gjson.GetBytes(json, path)` function and you want to avoid converting `result.Raw` to a `[]byte`, then you can use this pattern:
+
+```go
+var json []byte = ...
+result := gjson.GetBytes(json, path)
+var raw []byte
+if result.Index > 0 {
+ raw = json[result.Index:result.Index+len(result.Raw)]
+} else {
+ raw = []byte(result.Raw)
+}
+```
+
+This is a best-effort no allocation sub slice of the original json. This method utilizes the `result.Index` field, which is the position of the raw data in the original json. It's possible that the value of `result.Index` equals zero, in which case the `result.Raw` is converted to a `[]byte`.
+
+## Get multiple values at once
+
+The `GetMany` function can be used to get multiple values at the same time.
+
+```go
+results := gjson.GetMany(json, "name.first", "name.last", "age")
+```
+
+The return value is a `[]Result`, which will always contain exactly the same number of items as the input paths.
+
+## Performance
+
+Benchmarks of GJSON alongside [encoding/json](https://golang.org/pkg/encoding/json/),
+[ffjson](https://github.com/pquerna/ffjson),
+[EasyJSON](https://github.com/mailru/easyjson),
+[jsonparser](https://github.com/buger/jsonparser),
+and [json-iterator](https://github.com/json-iterator/go)
+
+```
+BenchmarkGJSONGet-16 11644512 311 ns/op 0 B/op 0 allocs/op
+BenchmarkGJSONUnmarshalMap-16 1122678 3094 ns/op 1920 B/op 26 allocs/op
+BenchmarkJSONUnmarshalMap-16 516681 6810 ns/op 2944 B/op 69 allocs/op
+BenchmarkJSONUnmarshalStruct-16 697053 5400 ns/op 928 B/op 13 allocs/op
+BenchmarkJSONDecoder-16 330450 10217 ns/op 3845 B/op 160 allocs/op
+BenchmarkFFJSONLexer-16 1424979 2585 ns/op 880 B/op 8 allocs/op
+BenchmarkEasyJSONLexer-16 3000000 729 ns/op 501 B/op 5 allocs/op
+BenchmarkJSONParserGet-16 3000000 366 ns/op 21 B/op 0 allocs/op
+BenchmarkJSONIterator-16 3000000 869 ns/op 693 B/op 14 allocs/op
+```
+
+JSON document used:
+
+```json
+{
+ "widget": {
+ "debug": "on",
+ "window": {
+ "title": "Sample Konfabulator Widget",
+ "name": "main_window",
+ "width": 500,
+ "height": 500
+ },
+ "image": {
+ "src": "Images/Sun.png",
+ "hOffset": 250,
+ "vOffset": 250,
+ "alignment": "center"
+ },
+ "text": {
+ "data": "Click Here",
+ "size": 36,
+ "style": "bold",
+ "vOffset": 100,
+ "alignment": "center",
+ "onMouseUp": "sun1.opacity = (sun1.opacity / 100) * 90;"
+ }
+ }
+}
+```
+
+Each operation was rotated through one of the following search paths:
+
+```
+widget.window.name
+widget.image.hOffset
+widget.text.onMouseUp
+```
+
+*These benchmarks were run on a MacBook Pro 16" 2.4 GHz Intel Core i9 using Go 1.17 and can be found [here](https://github.com/tidwall/gjson-benchmarks).*
diff --git a/vendor/github.com/tidwall/gjson/SYNTAX.md b/vendor/github.com/tidwall/gjson/SYNTAX.md
new file mode 100644
index 00000000..7a9b6a2d
--- /dev/null
+++ b/vendor/github.com/tidwall/gjson/SYNTAX.md
@@ -0,0 +1,342 @@
+# GJSON Path Syntax
+
+A GJSON Path is a text string syntax that describes a search pattern for quickly retreiving values from a JSON payload.
+
+This document is designed to explain the structure of a GJSON Path through examples.
+
+- [Path structure](#path-structure)
+- [Basic](#basic)
+- [Wildcards](#wildcards)
+- [Escape Character](#escape-character)
+- [Arrays](#arrays)
+- [Queries](#queries)
+- [Dot vs Pipe](#dot-vs-pipe)
+- [Modifiers](#modifiers)
+- [Multipaths](#multipaths)
+- [Literals](#literals)
+
+The definitive implemenation is [github.com/tidwall/gjson](https://github.com/tidwall/gjson).
+Use the [GJSON Playground](https://gjson.dev) to experiment with the syntax online.
+
+## Path structure
+
+A GJSON Path is intended to be easily expressed as a series of components seperated by a `.` character.
+
+Along with `.` character, there are a few more that have special meaning, including `|`, `#`, `@`, `\`, `*`, `!`, and `?`.
+
+## Example
+
+Given this JSON
+
+```json
+{
+ "name": {"first": "Tom", "last": "Anderson"},
+ "age":37,
+ "children": ["Sara","Alex","Jack"],
+ "fav.movie": "Deer Hunter",
+ "friends": [
+ {"first": "Dale", "last": "Murphy", "age": 44, "nets": ["ig", "fb", "tw"]},
+ {"first": "Roger", "last": "Craig", "age": 68, "nets": ["fb", "tw"]},
+ {"first": "Jane", "last": "Murphy", "age": 47, "nets": ["ig", "tw"]}
+ ]
+}
+```
+
+The following GJSON Paths evaluate to the accompanying values.
+
+### Basic
+
+In many cases you'll just want to retreive values by object name or array index.
+
+```go
+name.last "Anderson"
+name.first "Tom"
+age 37
+children ["Sara","Alex","Jack"]
+children.0 "Sara"
+children.1 "Alex"
+friends.1 {"first": "Roger", "last": "Craig", "age": 68}
+friends.1.first "Roger"
+```
+
+### Wildcards
+
+A key may contain the special wildcard characters `*` and `?`.
+The `*` will match on any zero+ characters, and `?` matches on any one character.
+
+```go
+child*.2 "Jack"
+c?ildren.0 "Sara"
+```
+
+### Escape character
+
+Special purpose characters, such as `.`, `*`, and `?` can be escaped with `\`.
+
+```go
+fav\.movie "Deer Hunter"
+```
+
+You'll also need to make sure that the `\` character is correctly escaped when hardcoding a path in your source code.
+
+```go
+// Go
+val := gjson.Get(json, "fav\\.movie") // must escape the slash
+val := gjson.Get(json, `fav\.movie`) // no need to escape the slash
+```
+
+```rust
+// Rust
+let val = gjson::get(json, "fav\\.movie") // must escape the slash
+let val = gjson::get(json, r#"fav\.movie"#) // no need to escape the slash
+```
+
+
+### Arrays
+
+The `#` character allows for digging into JSON Arrays.
+
+To get the length of an array you'll just use the `#` all by itself.
+
+```go
+friends.# 3
+friends.#.age [44,68,47]
+```
+
+### Queries
+
+You can also query an array for the first match by using `#(...)`, or find all matches with `#(...)#`.
+Queries support the `==`, `!=`, `<`, `<=`, `>`, `>=` comparison operators,
+and the simple pattern matching `%` (like) and `!%` (not like) operators.
+
+```go
+friends.#(last=="Murphy").first "Dale"
+friends.#(last=="Murphy")#.first ["Dale","Jane"]
+friends.#(age>45)#.last ["Craig","Murphy"]
+friends.#(first%"D*").last "Murphy"
+friends.#(first!%"D*").last "Craig"
+```
+
+To query for a non-object value in an array, you can forgo the string to the right of the operator.
+
+```go
+children.#(!%"*a*") "Alex"
+children.#(%"*a*")# ["Sara","Jack"]
+```
+
+Nested queries are allowed.
+
+```go
+friends.#(nets.#(=="fb"))#.first >> ["Dale","Roger"]
+```
+
+*Please note that prior to v1.3.0, queries used the `#[...]` brackets. This was
+changed in v1.3.0 as to avoid confusion with the new [multipath](#multipaths)
+syntax. For backwards compatibility, `#[...]` will continue to work until the
+next major release.*
+
+The `~` (tilde) operator will convert a value to a boolean before comparison.
+
+For example, using the following JSON:
+
+```json
+{
+ "vals": [
+ { "a": 1, "b": true },
+ { "a": 2, "b": true },
+ { "a": 3, "b": false },
+ { "a": 4, "b": "0" },
+ { "a": 5, "b": 0 },
+ { "a": 6, "b": "1" },
+ { "a": 7, "b": 1 },
+ { "a": 8, "b": "true" },
+ { "a": 9, "b": false },
+ { "a": 10, "b": null },
+ { "a": 11 }
+ ]
+}
+```
+
+You can now query for all true(ish) or false(ish) values:
+
+```
+vals.#(b==~true)#.a >> [1,2,6,7,8]
+vals.#(b==~false)#.a >> [3,4,5,9,10,11]
+```
+
+The last value which was non-existent is treated as `false`
+
+### Dot vs Pipe
+
+The `.` is standard separator, but it's also possible to use a `|`.
+In most cases they both end up returning the same results.
+The cases where`|` differs from `.` is when it's used after the `#` for [Arrays](#arrays) and [Queries](#queries).
+
+Here are some examples
+
+```go
+friends.0.first "Dale"
+friends|0.first "Dale"
+friends.0|first "Dale"
+friends|0|first "Dale"
+friends|# 3
+friends.# 3
+friends.#(last="Murphy")# [{"first": "Dale", "last": "Murphy", "age": 44},{"first": "Jane", "last": "Murphy", "age": 47}]
+friends.#(last="Murphy")#.first ["Dale","Jane"]
+friends.#(last="Murphy")#|first <non-existent>
+friends.#(last="Murphy")#.0 []
+friends.#(last="Murphy")#|0 {"first": "Dale", "last": "Murphy", "age": 44}
+friends.#(last="Murphy")#.# []
+friends.#(last="Murphy")#|# 2
+```
+
+Let's break down a few of these.
+
+The path `friends.#(last="Murphy")#` all by itself results in
+
+```json
+[{"first": "Dale", "last": "Murphy", "age": 44},{"first": "Jane", "last": "Murphy", "age": 47}]
+```
+
+The `.first` suffix will process the `first` path on each array element *before* returning the results. Which becomes
+
+```json
+["Dale","Jane"]
+```
+
+But the `|first` suffix actually processes the `first` path *after* the previous result.
+Since the previous result is an array, not an object, it's not possible to process
+because `first` does not exist.
+
+Yet, `|0` suffix returns
+
+```json
+{"first": "Dale", "last": "Murphy", "age": 44}
+```
+
+Because `0` is the first index of the previous result.
+
+### Modifiers
+
+A modifier is a path component that performs custom processing on the JSON.
+
+For example, using the built-in `@reverse` modifier on the above JSON payload will reverse the `children` array:
+
+```go
+children.@reverse ["Jack","Alex","Sara"]
+children.@reverse.0 "Jack"
+```
+
+There are currently the following built-in modifiers:
+
+- `@reverse`: Reverse an array or the members of an object.
+- `@ugly`: Remove all whitespace from JSON.
+- `@pretty`: Make the JSON more human readable.
+- `@this`: Returns the current element. It can be used to retrieve the root element.
+- `@valid`: Ensure the json document is valid.
+- `@flatten`: Flattens an array.
+- `@join`: Joins multiple objects into a single object.
+- `@keys`: Returns an array of keys for an object.
+- `@values`: Returns an array of values for an object.
+- `@tostr`: Converts json to a string. Wraps a json string.
+- `@fromstr`: Converts a string from json. Unwraps a json string.
+- `@group`: Groups arrays of objects. See [e4fc67c](https://github.com/tidwall/gjson/commit/e4fc67c92aeebf2089fabc7872f010e340d105db).
+
+#### Modifier arguments
+
+A modifier may accept an optional argument. The argument can be a valid JSON payload or just characters.
+
+For example, the `@pretty` modifier takes a json object as its argument.
+
+```
+@pretty:{"sortKeys":true}
+```
+
+Which makes the json pretty and orders all of its keys.
+
+```json
+{
+ "age":37,
+ "children": ["Sara","Alex","Jack"],
+ "fav.movie": "Deer Hunter",
+ "friends": [
+ {"age": 44, "first": "Dale", "last": "Murphy"},
+ {"age": 68, "first": "Roger", "last": "Craig"},
+ {"age": 47, "first": "Jane", "last": "Murphy"}
+ ],
+ "name": {"first": "Tom", "last": "Anderson"}
+}
+```
+
+*The full list of `@pretty` options are `sortKeys`, `indent`, `prefix`, and `width`.
+Please see [Pretty Options](https://github.com/tidwall/pretty#customized-output) for more information.*
+
+#### Custom modifiers
+
+You can also add custom modifiers.
+
+For example, here we create a modifier which makes the entire JSON payload upper or lower case.
+
+```go
+gjson.AddModifier("case", func(json, arg string) string {
+ if arg == "upper" {
+ return strings.ToUpper(json)
+ }
+ if arg == "lower" {
+ return strings.ToLower(json)
+ }
+ return json
+})
+"children.@case:upper" ["SARA","ALEX","JACK"]
+"children.@case:lower.@reverse" ["jack","alex","sara"]
+```
+
+*Note: Custom modifiers are not yet available in the Rust version*
+
+### Multipaths
+
+Starting with v1.3.0, GJSON added the ability to join multiple paths together
+to form new documents. Wrapping comma-separated paths between `[...]` or
+`{...}` will result in a new array or object, respectively.
+
+For example, using the given multipath:
+
+```
+{name.first,age,"the_murphys":friends.#(last="Murphy")#.first}
+```
+
+Here we selected the first name, age, and the first name for friends with the
+last name "Murphy".
+
+You'll notice that an optional key can be provided, in this case
+"the_murphys", to force assign a key to a value. Otherwise, the name of the
+actual field will be used, in this case "first". If a name cannot be
+determined, then "_" is used.
+
+This results in
+
+```json
+{"first":"Tom","age":37,"the_murphys":["Dale","Jane"]}
+```
+
+### Literals
+
+Starting with v1.12.0, GJSON added support of json literals, which provides a way for constructing static blocks of json. This is can be particularly useful when constructing a new json document using [multipaths](#multipaths).
+
+A json literal begins with the '!' declaration character.
+
+For example, using the given multipath:
+
+```
+{name.first,age,"company":!"Happysoft","employed":!true}
+```
+
+Here we selected the first name and age. Then add two new fields, "company" and "employed".
+
+This results in
+
+```json
+{"first":"Tom","age":37,"company":"Happysoft","employed":true}
+```
+
+*See issue [#249](https://github.com/tidwall/gjson/issues/249) for additional context on JSON Literals.*
diff --git a/vendor/github.com/tidwall/gjson/gjson.go b/vendor/github.com/tidwall/gjson/gjson.go
new file mode 100644
index 00000000..53cbd236
--- /dev/null
+++ b/vendor/github.com/tidwall/gjson/gjson.go
@@ -0,0 +1,3359 @@
+// Package gjson provides searching for json strings.
+package gjson
+
+import (
+ "strconv"
+ "strings"
+ "time"
+ "unicode/utf16"
+ "unicode/utf8"
+ "unsafe"
+
+ "github.com/tidwall/match"
+ "github.com/tidwall/pretty"
+)
+
+// Type is Result type
+type Type int
+
+const (
+ // Null is a null json value
+ Null Type = iota
+ // False is a json false boolean
+ False
+ // Number is json number
+ Number
+ // String is a json string
+ String
+ // True is a json true boolean
+ True
+ // JSON is a raw block of JSON
+ JSON
+)
+
+// String returns a string representation of the type.
+func (t Type) String() string {
+ switch t {
+ default:
+ return ""
+ case Null:
+ return "Null"
+ case False:
+ return "False"
+ case Number:
+ return "Number"
+ case String:
+ return "String"
+ case True:
+ return "True"
+ case JSON:
+ return "JSON"
+ }
+}
+
+// Result represents a json value that is returned from Get().
+type Result struct {
+ // Type is the json type
+ Type Type
+ // Raw is the raw json
+ Raw string
+ // Str is the json string
+ Str string
+ // Num is the json number
+ Num float64
+ // Index of raw value in original json, zero means index unknown
+ Index int
+ // Indexes of all the elements that match on a path containing the '#'
+ // query character.
+ Indexes []int
+}
+
+// String returns a string representation of the value.
+func (t Result) String() string {
+ switch t.Type {
+ default:
+ return ""
+ case False:
+ return "false"
+ case Number:
+ if len(t.Raw) == 0 {
+ // calculated result
+ return strconv.FormatFloat(t.Num, 'f', -1, 64)
+ }
+ var i int
+ if t.Raw[0] == '-' {
+ i++
+ }
+ for ; i < len(t.Raw); i++ {
+ if t.Raw[i] < '0' || t.Raw[i] > '9' {
+ return strconv.FormatFloat(t.Num, 'f', -1, 64)
+ }
+ }
+ return t.Raw
+ case String:
+ return t.Str
+ case JSON:
+ return t.Raw
+ case True:
+ return "true"
+ }
+}
+
+// Bool returns an boolean representation.
+func (t Result) Bool() bool {
+ switch t.Type {
+ default:
+ return false
+ case True:
+ return true
+ case String:
+ b, _ := strconv.ParseBool(strings.ToLower(t.Str))
+ return b
+ case Number:
+ return t.Num != 0
+ }
+}
+
+// Int returns an integer representation.
+func (t Result) Int() int64 {
+ switch t.Type {
+ default:
+ return 0
+ case True:
+ return 1
+ case String:
+ n, _ := parseInt(t.Str)
+ return n
+ case Number:
+ // try to directly convert the float64 to int64
+ i, ok := safeInt(t.Num)
+ if ok {
+ return i
+ }
+ // now try to parse the raw string
+ i, ok = parseInt(t.Raw)
+ if ok {
+ return i
+ }
+ // fallback to a standard conversion
+ return int64(t.Num)
+ }
+}
+
+// Uint returns an unsigned integer representation.
+func (t Result) Uint() uint64 {
+ switch t.Type {
+ default:
+ return 0
+ case True:
+ return 1
+ case String:
+ n, _ := parseUint(t.Str)
+ return n
+ case Number:
+ // try to directly convert the float64 to uint64
+ i, ok := safeInt(t.Num)
+ if ok && i >= 0 {
+ return uint64(i)
+ }
+ // now try to parse the raw string
+ u, ok := parseUint(t.Raw)
+ if ok {
+ return u
+ }
+ // fallback to a standard conversion
+ return uint64(t.Num)
+ }
+}
+
+// Float returns an float64 representation.
+func (t Result) Float() float64 {
+ switch t.Type {
+ default:
+ return 0
+ case True:
+ return 1
+ case String:
+ n, _ := strconv.ParseFloat(t.Str, 64)
+ return n
+ case Number:
+ return t.Num
+ }
+}
+
+// Time returns a time.Time representation.
+func (t Result) Time() time.Time {
+ res, _ := time.Parse(time.RFC3339, t.String())
+ return res
+}
+
+// Array returns back an array of values.
+// If the result represents a null value or is non-existent, then an empty
+// array will be returned.
+// If the result is not a JSON array, the return value will be an
+// array containing one result.
+func (t Result) Array() []Result {
+ if t.Type == Null {
+ return []Result{}
+ }
+ if !t.IsArray() {
+ return []Result{t}
+ }
+ r := t.arrayOrMap('[', false)
+ return r.a
+}
+
+// IsObject returns true if the result value is a JSON object.
+func (t Result) IsObject() bool {
+ return t.Type == JSON && len(t.Raw) > 0 && t.Raw[0] == '{'
+}
+
+// IsArray returns true if the result value is a JSON array.
+func (t Result) IsArray() bool {
+ return t.Type == JSON && len(t.Raw) > 0 && t.Raw[0] == '['
+}
+
+// IsBool returns true if the result value is a JSON boolean.
+func (t Result) IsBool() bool {
+ return t.Type == True || t.Type == False
+}
+
+// ForEach iterates through values.
+// If the result represents a non-existent value, then no values will be
+// iterated. If the result is an Object, the iterator will pass the key and
+// value of each item. If the result is an Array, the iterator will only pass
+// the value of each item. If the result is not a JSON array or object, the
+// iterator will pass back one value equal to the result.
+func (t Result) ForEach(iterator func(key, value Result) bool) {
+ if !t.Exists() {
+ return
+ }
+ if t.Type != JSON {
+ iterator(Result{}, t)
+ return
+ }
+ json := t.Raw
+ var obj bool
+ var i int
+ var key, value Result
+ for ; i < len(json); i++ {
+ if json[i] == '{' {
+ i++
+ key.Type = String
+ obj = true
+ break
+ } else if json[i] == '[' {
+ i++
+ key.Type = Number
+ key.Num = -1
+ break
+ }
+ if json[i] > ' ' {
+ return
+ }
+ }
+ var str string
+ var vesc bool
+ var ok bool
+ var idx int
+ for ; i < len(json); i++ {
+ if obj {
+ if json[i] != '"' {
+ continue
+ }
+ s := i
+ i, str, vesc, ok = parseString(json, i+1)
+ if !ok {
+ return
+ }
+ if vesc {
+ key.Str = unescape(str[1 : len(str)-1])
+ } else {
+ key.Str = str[1 : len(str)-1]
+ }
+ key.Raw = str
+ key.Index = s + t.Index
+ } else {
+ key.Num += 1
+ }
+ for ; i < len(json); i++ {
+ if json[i] <= ' ' || json[i] == ',' || json[i] == ':' {
+ continue
+ }
+ break
+ }
+ s := i
+ i, value, ok = parseAny(json, i, true)
+ if !ok {
+ return
+ }
+ if t.Indexes != nil {
+ if idx < len(t.Indexes) {
+ value.Index = t.Indexes[idx]
+ }
+ } else {
+ value.Index = s + t.Index
+ }
+ if !iterator(key, value) {
+ return
+ }
+ idx++
+ }
+}
+
+// Map returns back a map of values. The result should be a JSON object.
+// If the result is not a JSON object, the return value will be an empty map.
+func (t Result) Map() map[string]Result {
+ if t.Type != JSON {
+ return map[string]Result{}
+ }
+ r := t.arrayOrMap('{', false)
+ return r.o
+}
+
+// Get searches result for the specified path.
+// The result should be a JSON array or object.
+func (t Result) Get(path string) Result {
+ r := Get(t.Raw, path)
+ if r.Indexes != nil {
+ for i := 0; i < len(r.Indexes); i++ {
+ r.Indexes[i] += t.Index
+ }
+ } else {
+ r.Index += t.Index
+ }
+ return r
+}
+
+type arrayOrMapResult struct {
+ a []Result
+ ai []interface{}
+ o map[string]Result
+ oi map[string]interface{}
+ vc byte
+}
+
+func (t Result) arrayOrMap(vc byte, valueize bool) (r arrayOrMapResult) {
+ var json = t.Raw
+ var i int
+ var value Result
+ var count int
+ var key Result
+ if vc == 0 {
+ for ; i < len(json); i++ {
+ if json[i] == '{' || json[i] == '[' {
+ r.vc = json[i]
+ i++
+ break
+ }
+ if json[i] > ' ' {
+ goto end
+ }
+ }
+ } else {
+ for ; i < len(json); i++ {
+ if json[i] == vc {
+ i++
+ break
+ }
+ if json[i] > ' ' {
+ goto end
+ }
+ }
+ r.vc = vc
+ }
+ if r.vc == '{' {
+ if valueize {
+ r.oi = make(map[string]interface{})
+ } else {
+ r.o = make(map[string]Result)
+ }
+ } else {
+ if valueize {
+ r.ai = make([]interface{}, 0)
+ } else {
+ r.a = make([]Result, 0)
+ }
+ }
+ for ; i < len(json); i++ {
+ if json[i] <= ' ' {
+ continue
+ }
+ // get next value
+ if json[i] == ']' || json[i] == '}' {
+ break
+ }
+ switch json[i] {
+ default:
+ if (json[i] >= '0' && json[i] <= '9') || json[i] == '-' {
+ value.Type = Number
+ value.Raw, value.Num = tonum(json[i:])
+ value.Str = ""
+ } else {
+ continue
+ }
+ case '{', '[':
+ value.Type = JSON
+ value.Raw = squash(json[i:])
+ value.Str, value.Num = "", 0
+ case 'n':
+ value.Type = Null
+ value.Raw = tolit(json[i:])
+ value.Str, value.Num = "", 0
+ case 't':
+ value.Type = True
+ value.Raw = tolit(json[i:])
+ value.Str, value.Num = "", 0
+ case 'f':
+ value.Type = False
+ value.Raw = tolit(json[i:])
+ value.Str, value.Num = "", 0
+ case '"':
+ value.Type = String
+ value.Raw, value.Str = tostr(json[i:])
+ value.Num = 0
+ }
+ value.Index = i + t.Index
+
+ i += len(value.Raw) - 1
+
+ if r.vc == '{' {
+ if count%2 == 0 {
+ key = value
+ } else {
+ if valueize {
+ if _, ok := r.oi[key.Str]; !ok {
+ r.oi[key.Str] = value.Value()
+ }
+ } else {
+ if _, ok := r.o[key.Str]; !ok {
+ r.o[key.Str] = value
+ }
+ }
+ }
+ count++
+ } else {
+ if valueize {
+ r.ai = append(r.ai, value.Value())
+ } else {
+ r.a = append(r.a, value)
+ }
+ }
+ }
+end:
+ if t.Indexes != nil {
+ if len(t.Indexes) != len(r.a) {
+ for i := 0; i < len(r.a); i++ {
+ r.a[i].Index = 0
+ }
+ } else {
+ for i := 0; i < len(r.a); i++ {
+ r.a[i].Index = t.Indexes[i]
+ }
+ }
+ }
+ return
+}
+
+// Parse parses the json and returns a result.
+//
+// This function expects that the json is well-formed, and does not validate.
+// Invalid json will not panic, but it may return back unexpected results.
+// If you are consuming JSON from an unpredictable source then you may want to
+// use the Valid function first.
+func Parse(json string) Result {
+ var value Result
+ i := 0
+ for ; i < len(json); i++ {
+ if json[i] == '{' || json[i] == '[' {
+ value.Type = JSON
+ value.Raw = json[i:] // just take the entire raw
+ break
+ }
+ if json[i] <= ' ' {
+ continue
+ }
+ switch json[i] {
+ case '+', '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
+ 'i', 'I', 'N':
+ value.Type = Number
+ value.Raw, value.Num = tonum(json[i:])
+ case 'n':
+ if i+1 < len(json) && json[i+1] != 'u' {
+ // nan
+ value.Type = Number
+ value.Raw, value.Num = tonum(json[i:])
+ } else {
+ // null
+ value.Type = Null
+ value.Raw = tolit(json[i:])
+ }
+ case 't':
+ value.Type = True
+ value.Raw = tolit(json[i:])
+ case 'f':
+ value.Type = False
+ value.Raw = tolit(json[i:])
+ case '"':
+ value.Type = String
+ value.Raw, value.Str = tostr(json[i:])
+ default:
+ return Result{}
+ }
+ break
+ }
+ if value.Exists() {
+ value.Index = i
+ }
+ return value
+}
+
+// ParseBytes parses the json and returns a result.
+// If working with bytes, this method preferred over Parse(string(data))
+func ParseBytes(json []byte) Result {
+ return Parse(string(json))
+}
+
+func squash(json string) string {
+ // expects that the lead character is a '[' or '{' or '(' or '"'
+ // squash the value, ignoring all nested arrays and objects.
+ var i, depth int
+ if json[0] != '"' {
+ i, depth = 1, 1
+ }
+ for ; i < len(json); i++ {
+ if json[i] >= '"' && json[i] <= '}' {
+ switch json[i] {
+ case '"':
+ i++
+ s2 := i
+ for ; i < len(json); i++ {
+ if json[i] > '\\' {
+ continue
+ }
+ if json[i] == '"' {
+ // look for an escaped slash
+ if json[i-1] == '\\' {
+ n := 0
+ for j := i - 2; j > s2-1; j-- {
+ if json[j] != '\\' {
+ break
+ }
+ n++
+ }
+ if n%2 == 0 {
+ continue
+ }
+ }
+ break
+ }
+ }
+ if depth == 0 {
+ if i >= len(json) {
+ return json
+ }
+ return json[:i+1]
+ }
+ case '{', '[', '(':
+ depth++
+ case '}', ']', ')':
+ depth--
+ if depth == 0 {
+ return json[:i+1]
+ }
+ }
+ }
+ }
+ return json
+}
+
+func tonum(json string) (raw string, num float64) {
+ for i := 1; i < len(json); i++ {
+ // less than dash might have valid characters
+ if json[i] <= '-' {
+ if json[i] <= ' ' || json[i] == ',' {
+ // break on whitespace and comma
+ raw = json[:i]
+ num, _ = strconv.ParseFloat(raw, 64)
+ return
+ }
+ // could be a '+' or '-'. let's assume so.
+ } else if json[i] == ']' || json[i] == '}' {
+ // break on ']' or '}'
+ raw = json[:i]
+ num, _ = strconv.ParseFloat(raw, 64)
+ return
+ }
+ }
+ raw = json
+ num, _ = strconv.ParseFloat(raw, 64)
+ return
+}
+
+func tolit(json string) (raw string) {
+ for i := 1; i < len(json); i++ {
+ if json[i] < 'a' || json[i] > 'z' {
+ return json[:i]
+ }
+ }
+ return json
+}
+
+func tostr(json string) (raw string, str string) {
+ // expects that the lead character is a '"'
+ for i := 1; i < len(json); i++ {
+ if json[i] > '\\' {
+ continue
+ }
+ if json[i] == '"' {
+ return json[:i+1], json[1:i]
+ }
+ if json[i] == '\\' {
+ i++
+ for ; i < len(json); i++ {
+ if json[i] > '\\' {
+ continue
+ }
+ if json[i] == '"' {
+ // look for an escaped slash
+ if json[i-1] == '\\' {
+ n := 0
+ for j := i - 2; j > 0; j-- {
+ if json[j] != '\\' {
+ break
+ }
+ n++
+ }
+ if n%2 == 0 {
+ continue
+ }
+ }
+ return json[:i+1], unescape(json[1:i])
+ }
+ }
+ var ret string
+ if i+1 < len(json) {
+ ret = json[:i+1]
+ } else {
+ ret = json[:i]
+ }
+ return ret, unescape(json[1:i])
+ }
+ }
+ return json, json[1:]
+}
+
+// Exists returns true if value exists.
+//
+// if gjson.Get(json, "name.last").Exists(){
+// println("value exists")
+// }
+func (t Result) Exists() bool {
+ return t.Type != Null || len(t.Raw) != 0
+}
+
+// Value returns one of these types:
+//
+// bool, for JSON booleans
+// float64, for JSON numbers
+// Number, for JSON numbers
+// string, for JSON string literals
+// nil, for JSON null
+// map[string]interface{}, for JSON objects
+// []interface{}, for JSON arrays
+//
+func (t Result) Value() interface{} {
+ if t.Type == String {
+ return t.Str
+ }
+ switch t.Type {
+ default:
+ return nil
+ case False:
+ return false
+ case Number:
+ return t.Num
+ case JSON:
+ r := t.arrayOrMap(0, true)
+ if r.vc == '{' {
+ return r.oi
+ } else if r.vc == '[' {
+ return r.ai
+ }
+ return nil
+ case True:
+ return true
+ }
+}
+
+func parseString(json string, i int) (int, string, bool, bool) {
+ var s = i
+ for ; i < len(json); i++ {
+ if json[i] > '\\' {
+ continue
+ }
+ if json[i] == '"' {
+ return i + 1, json[s-1 : i+1], false, true
+ }
+ if json[i] == '\\' {
+ i++
+ for ; i < len(json); i++ {
+ if json[i] > '\\' {
+ continue
+ }
+ if json[i] == '"' {
+ // look for an escaped slash
+ if json[i-1] == '\\' {
+ n := 0
+ for j := i - 2; j > 0; j-- {
+ if json[j] != '\\' {
+ break
+ }
+ n++
+ }
+ if n%2 == 0 {
+ continue
+ }
+ }
+ return i + 1, json[s-1 : i+1], true, true
+ }
+ }
+ break
+ }
+ }
+ return i, json[s-1:], false, false
+}
+
+func parseNumber(json string, i int) (int, string) {
+ var s = i
+ i++
+ for ; i < len(json); i++ {
+ if json[i] <= ' ' || json[i] == ',' || json[i] == ']' ||
+ json[i] == '}' {
+ return i, json[s:i]
+ }
+ }
+ return i, json[s:]
+}
+
+func parseLiteral(json string, i int) (int, string) {
+ var s = i
+ i++
+ for ; i < len(json); i++ {
+ if json[i] < 'a' || json[i] > 'z' {
+ return i, json[s:i]
+ }
+ }
+ return i, json[s:]
+}
+
+type arrayPathResult struct {
+ part string
+ path string
+ pipe string
+ piped bool
+ more bool
+ alogok bool
+ arrch bool
+ alogkey string
+ query struct {
+ on bool
+ all bool
+ path string
+ op string
+ value string
+ }
+}
+
+func parseArrayPath(path string) (r arrayPathResult) {
+ for i := 0; i < len(path); i++ {
+ if path[i] == '|' {
+ r.part = path[:i]
+ r.pipe = path[i+1:]
+ r.piped = true
+ return
+ }
+ if path[i] == '.' {
+ r.part = path[:i]
+ if !r.arrch && i < len(path)-1 && isDotPiperChar(path[i+1:]) {
+ r.pipe = path[i+1:]
+ r.piped = true
+ } else {
+ r.path = path[i+1:]
+ r.more = true
+ }
+ return
+ }
+ if path[i] == '#' {
+ r.arrch = true
+ if i == 0 && len(path) > 1 {
+ if path[1] == '.' {
+ r.alogok = true
+ r.alogkey = path[2:]
+ r.path = path[:1]
+ } else if path[1] == '[' || path[1] == '(' {
+ // query
+ r.query.on = true
+ qpath, op, value, _, fi, vesc, ok :=
+ parseQuery(path[i:])
+ if !ok {
+ // bad query, end now
+ break
+ }
+ if len(value) >= 2 && value[0] == '"' &&
+ value[len(value)-1] == '"' {
+ value = value[1 : len(value)-1]
+ if vesc {
+ value = unescape(value)
+ }
+ }
+ r.query.path = qpath
+ r.query.op = op
+ r.query.value = value
+
+ i = fi - 1
+ if i+1 < len(path) && path[i+1] == '#' {
+ r.query.all = true
+ }
+ }
+ }
+ continue
+ }
+ }
+ r.part = path
+ r.path = ""
+ return
+}
+
+// splitQuery takes a query and splits it into three parts:
+// path, op, middle, and right.
+// So for this query:
+// #(first_name=="Murphy").last
+// Becomes
+// first_name # path
+// =="Murphy" # middle
+// .last # right
+// Or,
+// #(service_roles.#(=="one")).cap
+// Becomes
+// service_roles.#(=="one") # path
+// # middle
+// .cap # right
+func parseQuery(query string) (
+ path, op, value, remain string, i int, vesc, ok bool,
+) {
+ if len(query) < 2 || query[0] != '#' ||
+ (query[1] != '(' && query[1] != '[') {
+ return "", "", "", "", i, false, false
+ }
+ i = 2
+ j := 0 // start of value part
+ depth := 1
+ for ; i < len(query); i++ {
+ if depth == 1 && j == 0 {
+ switch query[i] {
+ case '!', '=', '<', '>', '%':
+ // start of the value part
+ j = i
+ continue
+ }
+ }
+ if query[i] == '\\' {
+ i++
+ } else if query[i] == '[' || query[i] == '(' {
+ depth++
+ } else if query[i] == ']' || query[i] == ')' {
+ depth--
+ if depth == 0 {
+ break
+ }
+ } else if query[i] == '"' {
+ // inside selector string, balance quotes
+ i++
+ for ; i < len(query); i++ {
+ if query[i] == '\\' {
+ vesc = true
+ i++
+ } else if query[i] == '"' {
+ break
+ }
+ }
+ }
+ }
+ if depth > 0 {
+ return "", "", "", "", i, false, false
+ }
+ if j > 0 {
+ path = trim(query[2:j])
+ value = trim(query[j:i])
+ remain = query[i+1:]
+ // parse the compare op from the value
+ var opsz int
+ switch {
+ case len(value) == 1:
+ opsz = 1
+ case value[0] == '!' && value[1] == '=':
+ opsz = 2
+ case value[0] == '!' && value[1] == '%':
+ opsz = 2
+ case value[0] == '<' && value[1] == '=':
+ opsz = 2
+ case value[0] == '>' && value[1] == '=':
+ opsz = 2
+ case value[0] == '=' && value[1] == '=':
+ value = value[1:]
+ opsz = 1
+ case value[0] == '<':
+ opsz = 1
+ case value[0] == '>':
+ opsz = 1
+ case value[0] == '=':
+ opsz = 1
+ case value[0] == '%':
+ opsz = 1
+ }
+ op = value[:opsz]
+ value = trim(value[opsz:])
+ } else {
+ path = trim(query[2:i])
+ remain = query[i+1:]
+ }
+ return path, op, value, remain, i + 1, vesc, true
+}
+
+func trim(s string) string {
+left:
+ if len(s) > 0 && s[0] <= ' ' {
+ s = s[1:]
+ goto left
+ }
+right:
+ if len(s) > 0 && s[len(s)-1] <= ' ' {
+ s = s[:len(s)-1]
+ goto right
+ }
+ return s
+}
+
+// peek at the next byte and see if it's a '@', '[', or '{'.
+func isDotPiperChar(s string) bool {
+ if DisableModifiers {
+ return false
+ }
+ c := s[0]
+ if c == '@' {
+ // check that the next component is *not* a modifier.
+ i := 1
+ for ; i < len(s); i++ {
+ if s[i] == '.' || s[i] == '|' || s[i] == ':' {
+ break
+ }
+ }
+ _, ok := modifiers[s[1:i]]
+ return ok
+ }
+ return c == '[' || c == '{'
+}
+
+type objectPathResult struct {
+ part string
+ path string
+ pipe string
+ piped bool
+ wild bool
+ more bool
+}
+
+func parseObjectPath(path string) (r objectPathResult) {
+ for i := 0; i < len(path); i++ {
+ if path[i] == '|' {
+ r.part = path[:i]
+ r.pipe = path[i+1:]
+ r.piped = true
+ return
+ }
+ if path[i] == '.' {
+ r.part = path[:i]
+ if i < len(path)-1 && isDotPiperChar(path[i+1:]) {
+ r.pipe = path[i+1:]
+ r.piped = true
+ } else {
+ r.path = path[i+1:]
+ r.more = true
+ }
+ return
+ }
+ if path[i] == '*' || path[i] == '?' {
+ r.wild = true
+ continue
+ }
+ if path[i] == '\\' {
+ // go into escape mode. this is a slower path that
+ // strips off the escape character from the part.
+ epart := []byte(path[:i])
+ i++
+ if i < len(path) {
+ epart = append(epart, path[i])
+ i++
+ for ; i < len(path); i++ {
+ if path[i] == '\\' {
+ i++
+ if i < len(path) {
+ epart = append(epart, path[i])
+ }
+ continue
+ } else if path[i] == '.' {
+ r.part = string(epart)
+ if i < len(path)-1 && isDotPiperChar(path[i+1:]) {
+ r.pipe = path[i+1:]
+ r.piped = true
+ } else {
+ r.path = path[i+1:]
+ r.more = true
+ }
+ return
+ } else if path[i] == '|' {
+ r.part = string(epart)
+ r.pipe = path[i+1:]
+ r.piped = true
+ return
+ } else if path[i] == '*' || path[i] == '?' {
+ r.wild = true
+ }
+ epart = append(epart, path[i])
+ }
+ }
+ // append the last part
+ r.part = string(epart)
+ return
+ }
+ }
+ r.part = path
+ return
+}
+
+func parseSquash(json string, i int) (int, string) {
+ // expects that the lead character is a '[' or '{' or '('
+ // squash the value, ignoring all nested arrays and objects.
+ // the first '[' or '{' or '(' has already been read
+ s := i
+ i++
+ depth := 1
+ for ; i < len(json); i++ {
+ if json[i] >= '"' && json[i] <= '}' {
+ switch json[i] {
+ case '"':
+ i++
+ s2 := i
+ for ; i < len(json); i++ {
+ if json[i] > '\\' {
+ continue
+ }
+ if json[i] == '"' {
+ // look for an escaped slash
+ if json[i-1] == '\\' {
+ n := 0
+ for j := i - 2; j > s2-1; j-- {
+ if json[j] != '\\' {
+ break
+ }
+ n++
+ }
+ if n%2 == 0 {
+ continue
+ }
+ }
+ break
+ }
+ }
+ case '{', '[', '(':
+ depth++
+ case '}', ']', ')':
+ depth--
+ if depth == 0 {
+ i++
+ return i, json[s:i]
+ }
+ }
+ }
+ }
+ return i, json[s:]
+}
+
+func parseObject(c *parseContext, i int, path string) (int, bool) {
+ var pmatch, kesc, vesc, ok, hit bool
+ var key, val string
+ rp := parseObjectPath(path)
+ if !rp.more && rp.piped {
+ c.pipe = rp.pipe
+ c.piped = true
+ }
+ for i < len(c.json) {
+ for ; i < len(c.json); i++ {
+ if c.json[i] == '"' {
+ // parse_key_string
+ // this is slightly different from getting s string value
+ // because we don't need the outer quotes.
+ i++
+ var s = i
+ for ; i < len(c.json); i++ {
+ if c.json[i] > '\\' {
+ continue
+ }
+ if c.json[i] == '"' {
+ i, key, kesc, ok = i+1, c.json[s:i], false, true
+ goto parse_key_string_done
+ }
+ if c.json[i] == '\\' {
+ i++
+ for ; i < len(c.json); i++ {
+ if c.json[i] > '\\' {
+ continue
+ }
+ if c.json[i] == '"' {
+ // look for an escaped slash
+ if c.json[i-1] == '\\' {
+ n := 0
+ for j := i - 2; j > 0; j-- {
+ if c.json[j] != '\\' {
+ break
+ }
+ n++
+ }
+ if n%2 == 0 {
+ continue
+ }
+ }
+ i, key, kesc, ok = i+1, c.json[s:i], true, true
+ goto parse_key_string_done
+ }
+ }
+ break
+ }
+ }
+ key, kesc, ok = c.json[s:], false, false
+ parse_key_string_done:
+ break
+ }
+ if c.json[i] == '}' {
+ return i + 1, false
+ }
+ }
+ if !ok {
+ return i, false
+ }
+ if rp.wild {
+ if kesc {
+ pmatch = matchLimit(unescape(key), rp.part)
+ } else {
+ pmatch = matchLimit(key, rp.part)
+ }
+ } else {
+ if kesc {
+ pmatch = rp.part == unescape(key)
+ } else {
+ pmatch = rp.part == key
+ }
+ }
+ hit = pmatch && !rp.more
+ for ; i < len(c.json); i++ {
+ var num bool
+ switch c.json[i] {
+ default:
+ continue
+ case '"':
+ i++
+ i, val, vesc, ok = parseString(c.json, i)
+ if !ok {
+ return i, false
+ }
+ if hit {
+ if vesc {
+ c.value.Str = unescape(val[1 : len(val)-1])
+ } else {
+ c.value.Str = val[1 : len(val)-1]
+ }
+ c.value.Raw = val
+ c.value.Type = String
+ return i, true
+ }
+ case '{':
+ if pmatch && !hit {
+ i, hit = parseObject(c, i+1, rp.path)
+ if hit {
+ return i, true
+ }
+ } else {
+ i, val = parseSquash(c.json, i)
+ if hit {
+ c.value.Raw = val
+ c.value.Type = JSON
+ return i, true
+ }
+ }
+ case '[':
+ if pmatch && !hit {
+ i, hit = parseArray(c, i+1, rp.path)
+ if hit {
+ return i, true
+ }
+ } else {
+ i, val = parseSquash(c.json, i)
+ if hit {
+ c.value.Raw = val
+ c.value.Type = JSON
+ return i, true
+ }
+ }
+ case 'n':
+ if i+1 < len(c.json) && c.json[i+1] != 'u' {
+ num = true
+ break
+ }
+ fallthrough
+ case 't', 'f':
+ vc := c.json[i]
+ i, val = parseLiteral(c.json, i)
+ if hit {
+ c.value.Raw = val
+ switch vc {
+ case 't':
+ c.value.Type = True
+ case 'f':
+ c.value.Type = False
+ }
+ return i, true
+ }
+ case '+', '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
+ 'i', 'I', 'N':
+ num = true
+ }
+ if num {
+ i, val = parseNumber(c.json, i)
+ if hit {
+ c.value.Raw = val
+ c.value.Type = Number
+ c.value.Num, _ = strconv.ParseFloat(val, 64)
+ return i, true
+ }
+ }
+ break
+ }
+ }
+ return i, false
+}
+
+// matchLimit will limit the complexity of the match operation to avoid ReDos
+// attacks from arbritary inputs.
+// See the github.com/tidwall/match.MatchLimit function for more information.
+func matchLimit(str, pattern string) bool {
+ matched, _ := match.MatchLimit(str, pattern, 10000)
+ return matched
+}
+
+func queryMatches(rp *arrayPathResult, value Result) bool {
+ rpv := rp.query.value
+ if len(rpv) > 0 && rpv[0] == '~' {
+ // convert to bool
+ rpv = rpv[1:]
+ if value.Bool() {
+ value = Result{Type: True}
+ } else {
+ value = Result{Type: False}
+ }
+ }
+ if !value.Exists() {
+ return false
+ }
+ if rp.query.op == "" {
+ // the query is only looking for existence, such as:
+ // friends.#(name)
+ // which makes sure that the array "friends" has an element of
+ // "name" that exists
+ return true
+ }
+ switch value.Type {
+ case String:
+ switch rp.query.op {
+ case "=":
+ return value.Str == rpv
+ case "!=":
+ return value.Str != rpv
+ case "<":
+ return value.Str < rpv
+ case "<=":
+ return value.Str <= rpv
+ case ">":
+ return value.Str > rpv
+ case ">=":
+ return value.Str >= rpv
+ case "%":
+ return matchLimit(value.Str, rpv)
+ case "!%":
+ return !matchLimit(value.Str, rpv)
+ }
+ case Number:
+ rpvn, _ := strconv.ParseFloat(rpv, 64)
+ switch rp.query.op {
+ case "=":
+ return value.Num == rpvn
+ case "!=":
+ return value.Num != rpvn
+ case "<":
+ return value.Num < rpvn
+ case "<=":
+ return value.Num <= rpvn
+ case ">":
+ return value.Num > rpvn
+ case ">=":
+ return value.Num >= rpvn
+ }
+ case True:
+ switch rp.query.op {
+ case "=":
+ return rpv == "true"
+ case "!=":
+ return rpv != "true"
+ case ">":
+ return rpv == "false"
+ case ">=":
+ return true
+ }
+ case False:
+ switch rp.query.op {
+ case "=":
+ return rpv == "false"
+ case "!=":
+ return rpv != "false"
+ case "<":
+ return rpv == "true"
+ case "<=":
+ return true
+ }
+ }
+ return false
+}
+func parseArray(c *parseContext, i int, path string) (int, bool) {
+ var pmatch, vesc, ok, hit bool
+ var val string
+ var h int
+ var alog []int
+ var partidx int
+ var multires []byte
+ var queryIndexes []int
+ rp := parseArrayPath(path)
+ if !rp.arrch {
+ n, ok := parseUint(rp.part)
+ if !ok {
+ partidx = -1
+ } else {
+ partidx = int(n)
+ }
+ }
+ if !rp.more && rp.piped {
+ c.pipe = rp.pipe
+ c.piped = true
+ }
+
+ procQuery := func(qval Result) bool {
+ if rp.query.all {
+ if len(multires) == 0 {
+ multires = append(multires, '[')
+ }
+ }
+ var tmp parseContext
+ tmp.value = qval
+ fillIndex(c.json, &tmp)
+ parentIndex := tmp.value.Index
+ var res Result
+ if qval.Type == JSON {
+ res = qval.Get(rp.query.path)
+ } else {
+ if rp.query.path != "" {
+ return false
+ }
+ res = qval
+ }
+ if queryMatches(&rp, res) {
+ if rp.more {
+ left, right, ok := splitPossiblePipe(rp.path)
+ if ok {
+ rp.path = left
+ c.pipe = right
+ c.piped = true
+ }
+ res = qval.Get(rp.path)
+ } else {
+ res = qval
+ }
+ if rp.query.all {
+ raw := res.Raw
+ if len(raw) == 0 {
+ raw = res.String()
+ }
+ if raw != "" {
+ if len(multires) > 1 {
+ multires = append(multires, ',')
+ }
+ multires = append(multires, raw...)
+ queryIndexes = append(queryIndexes, res.Index+parentIndex)
+ }
+ } else {
+ c.value = res
+ return true
+ }
+ }
+ return false
+ }
+ for i < len(c.json)+1 {
+ if !rp.arrch {
+ pmatch = partidx == h
+ hit = pmatch && !rp.more
+ }
+ h++
+ if rp.alogok {
+ alog = append(alog, i)
+ }
+ for ; ; i++ {
+ var ch byte
+ if i > len(c.json) {
+ break
+ } else if i == len(c.json) {
+ ch = ']'
+ } else {
+ ch = c.json[i]
+ }
+ var num bool
+ switch ch {
+ default:
+ continue
+ case '"':
+ i++
+ i, val, vesc, ok = parseString(c.json, i)
+ if !ok {
+ return i, false
+ }
+ if rp.query.on {
+ var qval Result
+ if vesc {
+ qval.Str = unescape(val[1 : len(val)-1])
+ } else {
+ qval.Str = val[1 : len(val)-1]
+ }
+ qval.Raw = val
+ qval.Type = String
+ if procQuery(qval) {
+ return i, true
+ }
+ } else if hit {
+ if rp.alogok {
+ break
+ }
+ if vesc {
+ c.value.Str = unescape(val[1 : len(val)-1])
+ } else {
+ c.value.Str = val[1 : len(val)-1]
+ }
+ c.value.Raw = val
+ c.value.Type = String
+ return i, true
+ }
+ case '{':
+ if pmatch && !hit {
+ i, hit = parseObject(c, i+1, rp.path)
+ if hit {
+ if rp.alogok {
+ break
+ }
+ return i, true
+ }
+ } else {
+ i, val = parseSquash(c.json, i)
+ if rp.query.on {
+ if procQuery(Result{Raw: val, Type: JSON}) {
+ return i, true
+ }
+ } else if hit {
+ if rp.alogok {
+ break
+ }
+ c.value.Raw = val
+ c.value.Type = JSON
+ return i, true
+ }
+ }
+ case '[':
+ if pmatch && !hit {
+ i, hit = parseArray(c, i+1, rp.path)
+ if hit {
+ if rp.alogok {
+ break
+ }
+ return i, true
+ }
+ } else {
+ i, val = parseSquash(c.json, i)
+ if rp.query.on {
+ if procQuery(Result{Raw: val, Type: JSON}) {
+ return i, true
+ }
+ } else if hit {
+ if rp.alogok {
+ break
+ }
+ c.value.Raw = val
+ c.value.Type = JSON
+ return i, true
+ }
+ }
+ case 'n':
+ if i+1 < len(c.json) && c.json[i+1] != 'u' {
+ num = true
+ break
+ }
+ fallthrough
+ case 't', 'f':
+ vc := c.json[i]
+ i, val = parseLiteral(c.json, i)
+ if rp.query.on {
+ var qval Result
+ qval.Raw = val
+ switch vc {
+ case 't':
+ qval.Type = True
+ case 'f':
+ qval.Type = False
+ }
+ if procQuery(qval) {
+ return i, true
+ }
+ } else if hit {
+ if rp.alogok {
+ break
+ }
+ c.value.Raw = val
+ switch vc {
+ case 't':
+ c.value.Type = True
+ case 'f':
+ c.value.Type = False
+ }
+ return i, true
+ }
+ case '+', '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
+ 'i', 'I', 'N':
+ num = true
+ case ']':
+ if rp.arrch && rp.part == "#" {
+ if rp.alogok {
+ left, right, ok := splitPossiblePipe(rp.alogkey)
+ if ok {
+ rp.alogkey = left
+ c.pipe = right
+ c.piped = true
+ }
+ var indexes = make([]int, 0, 64)
+ var jsons = make([]byte, 0, 64)
+ jsons = append(jsons, '[')
+ for j, k := 0, 0; j < len(alog); j++ {
+ idx := alog[j]
+ for idx < len(c.json) {
+ switch c.json[idx] {
+ case ' ', '\t', '\r', '\n':
+ idx++
+ continue
+ }
+ break
+ }
+ if idx < len(c.json) && c.json[idx] != ']' {
+ _, res, ok := parseAny(c.json, idx, true)
+ if ok {
+ res := res.Get(rp.alogkey)
+ if res.Exists() {
+ if k > 0 {
+ jsons = append(jsons, ',')
+ }
+ raw := res.Raw
+ if len(raw) == 0 {
+ raw = res.String()
+ }
+ jsons = append(jsons, []byte(raw)...)
+ indexes = append(indexes, res.Index)
+ k++
+ }
+ }
+ }
+ }
+ jsons = append(jsons, ']')
+ c.value.Type = JSON
+ c.value.Raw = string(jsons)
+ c.value.Indexes = indexes
+ return i + 1, true
+ }
+ if rp.alogok {
+ break
+ }
+
+ c.value.Type = Number
+ c.value.Num = float64(h - 1)
+ c.value.Raw = strconv.Itoa(h - 1)
+ c.calcd = true
+ return i + 1, true
+ }
+ if !c.value.Exists() {
+ if len(multires) > 0 {
+ c.value = Result{
+ Raw: string(append(multires, ']')),
+ Type: JSON,
+ Indexes: queryIndexes,
+ }
+ } else if rp.query.all {
+ c.value = Result{
+ Raw: "[]",
+ Type: JSON,
+ }
+ }
+ }
+ return i + 1, false
+ }
+ if num {
+ i, val = parseNumber(c.json, i)
+ if rp.query.on {
+ var qval Result
+ qval.Raw = val
+ qval.Type = Number
+ qval.Num, _ = strconv.ParseFloat(val, 64)
+ if procQuery(qval) {
+ return i, true
+ }
+ } else if hit {
+ if rp.alogok {
+ break
+ }
+ c.value.Raw = val
+ c.value.Type = Number
+ c.value.Num, _ = strconv.ParseFloat(val, 64)
+ return i, true
+ }
+ }
+ break
+ }
+ }
+ return i, false
+}
+
+func splitPossiblePipe(path string) (left, right string, ok bool) {
+ // take a quick peek for the pipe character. If found we'll split the piped
+ // part of the path into the c.pipe field and shorten the rp.
+ var possible bool
+ for i := 0; i < len(path); i++ {
+ if path[i] == '|' {
+ possible = true
+ break
+ }
+ }
+ if !possible {
+ return
+ }
+
+ if len(path) > 0 && path[0] == '{' {
+ squashed := squash(path[1:])
+ if len(squashed) < len(path)-1 {
+ squashed = path[:len(squashed)+1]
+ remain := path[len(squashed):]
+ if remain[0] == '|' {
+ return squashed, remain[1:], true
+ }
+ }
+ return
+ }
+
+ // split the left and right side of the path with the pipe character as
+ // the delimiter. This is a little tricky because we'll need to basically
+ // parse the entire path.
+ for i := 0; i < len(path); i++ {
+ if path[i] == '\\' {
+ i++
+ } else if path[i] == '.' {
+ if i == len(path)-1 {
+ return
+ }
+ if path[i+1] == '#' {
+ i += 2
+ if i == len(path) {
+ return
+ }
+ if path[i] == '[' || path[i] == '(' {
+ var start, end byte
+ if path[i] == '[' {
+ start, end = '[', ']'
+ } else {
+ start, end = '(', ')'
+ }
+ // inside selector, balance brackets
+ i++
+ depth := 1
+ for ; i < len(path); i++ {
+ if path[i] == '\\' {
+ i++
+ } else if path[i] == start {
+ depth++
+ } else if path[i] == end {
+ depth--
+ if depth == 0 {
+ break
+ }
+ } else if path[i] == '"' {
+ // inside selector string, balance quotes
+ i++
+ for ; i < len(path); i++ {
+ if path[i] == '\\' {
+ i++
+ } else if path[i] == '"' {
+ break
+ }
+ }
+ }
+ }
+ }
+ }
+ } else if path[i] == '|' {
+ return path[:i], path[i+1:], true
+ }
+ }
+ return
+}
+
+// ForEachLine iterates through lines of JSON as specified by the JSON Lines
+// format (http://jsonlines.org/).
+// Each line is returned as a GJSON Result.
+func ForEachLine(json string, iterator func(line Result) bool) {
+ var res Result
+ var i int
+ for {
+ i, res, _ = parseAny(json, i, true)
+ if !res.Exists() {
+ break
+ }
+ if !iterator(res) {
+ return
+ }
+ }
+}
+
+type subSelector struct {
+ name string
+ path string
+}
+
+// parseSubSelectors returns the subselectors belonging to a '[path1,path2]' or
+// '{"field1":path1,"field2":path2}' type subSelection. It's expected that the
+// first character in path is either '[' or '{', and has already been checked
+// prior to calling this function.
+func parseSubSelectors(path string) (sels []subSelector, out string, ok bool) {
+ modifier := 0
+ depth := 1
+ colon := 0
+ start := 1
+ i := 1
+ pushSel := func() {
+ var sel subSelector
+ if colon == 0 {
+ sel.path = path[start:i]
+ } else {
+ sel.name = path[start:colon]
+ sel.path = path[colon+1 : i]
+ }
+ sels = append(sels, sel)
+ colon = 0
+ modifier = 0
+ start = i + 1
+ }
+ for ; i < len(path); i++ {
+ switch path[i] {
+ case '\\':
+ i++
+ case '@':
+ if modifier == 0 && i > 0 && (path[i-1] == '.' || path[i-1] == '|') {
+ modifier = i
+ }
+ case ':':
+ if modifier == 0 && colon == 0 && depth == 1 {
+ colon = i
+ }
+ case ',':
+ if depth == 1 {
+ pushSel()
+ }
+ case '"':
+ i++
+ loop:
+ for ; i < len(path); i++ {
+ switch path[i] {
+ case '\\':
+ i++
+ case '"':
+ break loop
+ }
+ }
+ case '[', '(', '{':
+ depth++
+ case ']', ')', '}':
+ depth--
+ if depth == 0 {
+ pushSel()
+ path = path[i+1:]
+ return sels, path, true
+ }
+ }
+ }
+ return
+}
+
+// nameOfLast returns the name of the last component
+func nameOfLast(path string) string {
+ for i := len(path) - 1; i >= 0; i-- {
+ if path[i] == '|' || path[i] == '.' {
+ if i > 0 {
+ if path[i-1] == '\\' {
+ continue
+ }
+ }
+ return path[i+1:]
+ }
+ }
+ return path
+}
+
+func isSimpleName(component string) bool {
+ for i := 0; i < len(component); i++ {
+ if component[i] < ' ' {
+ return false
+ }
+ switch component[i] {
+ case '[', ']', '{', '}', '(', ')', '#', '|', '!':
+ return false
+ }
+ }
+ return true
+}
+
+var hexchars = [...]byte{
+ '0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
+ 'a', 'b', 'c', 'd', 'e', 'f',
+}
+
+func appendHex16(dst []byte, x uint16) []byte {
+ return append(dst,
+ hexchars[x>>12&0xF], hexchars[x>>8&0xF],
+ hexchars[x>>4&0xF], hexchars[x>>0&0xF],
+ )
+}
+
+// AppendJSONString is a convenience function that converts the provided string
+// to a valid JSON string and appends it to dst.
+func AppendJSONString(dst []byte, s string) []byte {
+ dst = append(dst, make([]byte, len(s)+2)...)
+ dst = append(dst[:len(dst)-len(s)-2], '"')
+ for i := 0; i < len(s); i++ {
+ if s[i] < ' ' {
+ dst = append(dst, '\\')
+ switch s[i] {
+ case '\n':
+ dst = append(dst, 'n')
+ case '\r':
+ dst = append(dst, 'r')
+ case '\t':
+ dst = append(dst, 't')
+ default:
+ dst = append(dst, 'u')
+ dst = appendHex16(dst, uint16(s[i]))
+ }
+ } else if s[i] == '>' || s[i] == '<' || s[i] == '&' {
+ dst = append(dst, '\\', 'u')
+ dst = appendHex16(dst, uint16(s[i]))
+ } else if s[i] == '\\' {
+ dst = append(dst, '\\', '\\')
+ } else if s[i] == '"' {
+ dst = append(dst, '\\', '"')
+ } else if s[i] > 127 {
+ // read utf8 character
+ r, n := utf8.DecodeRuneInString(s[i:])
+ if n == 0 {
+ break
+ }
+ if r == utf8.RuneError && n == 1 {
+ dst = append(dst, `\ufffd`...)
+ } else if r == '\u2028' || r == '\u2029' {
+ dst = append(dst, `\u202`...)
+ dst = append(dst, hexchars[r&0xF])
+ } else {
+ dst = append(dst, s[i:i+n]...)
+ }
+ i = i + n - 1
+ } else {
+ dst = append(dst, s[i])
+ }
+ }
+ return append(dst, '"')
+}
+
+type parseContext struct {
+ json string
+ value Result
+ pipe string
+ piped bool
+ calcd bool
+ lines bool
+}
+
+// Get searches json for the specified path.
+// A path is in dot syntax, such as "name.last" or "age".
+// When the value is found it's returned immediately.
+//
+// A path is a series of keys separated by a dot.
+// A key may contain special wildcard characters '*' and '?'.
+// To access an array value use the index as the key.
+// To get the number of elements in an array or to access a child path, use
+// the '#' character.
+// The dot and wildcard character can be escaped with '\'.
+//
+// {
+// "name": {"first": "Tom", "last": "Anderson"},
+// "age":37,
+// "children": ["Sara","Alex","Jack"],
+// "friends": [
+// {"first": "James", "last": "Murphy"},
+// {"first": "Roger", "last": "Craig"}
+// ]
+// }
+// "name.last" >> "Anderson"
+// "age" >> 37
+// "children" >> ["Sara","Alex","Jack"]
+// "children.#" >> 3
+// "children.1" >> "Alex"
+// "child*.2" >> "Jack"
+// "c?ildren.0" >> "Sara"
+// "friends.#.first" >> ["James","Roger"]
+//
+// This function expects that the json is well-formed, and does not validate.
+// Invalid json will not panic, but it may return back unexpected results.
+// If you are consuming JSON from an unpredictable source then you may want to
+// use the Valid function first.
+func Get(json, path string) Result {
+ if len(path) > 1 {
+ if (path[0] == '@' && !DisableModifiers) || path[0] == '!' {
+ // possible modifier
+ var ok bool
+ var npath string
+ var rjson string
+ if path[0] == '@' && !DisableModifiers {
+ npath, rjson, ok = execModifier(json, path)
+ } else if path[0] == '!' {
+ npath, rjson, ok = execStatic(json, path)
+ }
+ if ok {
+ path = npath
+ if len(path) > 0 && (path[0] == '|' || path[0] == '.') {
+ res := Get(rjson, path[1:])
+ res.Index = 0
+ res.Indexes = nil
+ return res
+ }
+ return Parse(rjson)
+ }
+ }
+ if path[0] == '[' || path[0] == '{' {
+ // using a subselector path
+ kind := path[0]
+ var ok bool
+ var subs []subSelector
+ subs, path, ok = parseSubSelectors(path)
+ if ok {
+ if len(path) == 0 || (path[0] == '|' || path[0] == '.') {
+ var b []byte
+ b = append(b, kind)
+ var i int
+ for _, sub := range subs {
+ res := Get(json, sub.path)
+ if res.Exists() {
+ if i > 0 {
+ b = append(b, ',')
+ }
+ if kind == '{' {
+ if len(sub.name) > 0 {
+ if sub.name[0] == '"' && Valid(sub.name) {
+ b = append(b, sub.name...)
+ } else {
+ b = AppendJSONString(b, sub.name)
+ }
+ } else {
+ last := nameOfLast(sub.path)
+ if isSimpleName(last) {
+ b = AppendJSONString(b, last)
+ } else {
+ b = AppendJSONString(b, "_")
+ }
+ }
+ b = append(b, ':')
+ }
+ var raw string
+ if len(res.Raw) == 0 {
+ raw = res.String()
+ if len(raw) == 0 {
+ raw = "null"
+ }
+ } else {
+ raw = res.Raw
+ }
+ b = append(b, raw...)
+ i++
+ }
+ }
+ b = append(b, kind+2)
+ var res Result
+ res.Raw = string(b)
+ res.Type = JSON
+ if len(path) > 0 {
+ res = res.Get(path[1:])
+ }
+ res.Index = 0
+ return res
+ }
+ }
+ }
+ }
+ var i int
+ var c = &parseContext{json: json}
+ if len(path) >= 2 && path[0] == '.' && path[1] == '.' {
+ c.lines = true
+ parseArray(c, 0, path[2:])
+ } else {
+ for ; i < len(c.json); i++ {
+ if c.json[i] == '{' {
+ i++
+ parseObject(c, i, path)
+ break
+ }
+ if c.json[i] == '[' {
+ i++
+ parseArray(c, i, path)
+ break
+ }
+ }
+ }
+ if c.piped {
+ res := c.value.Get(c.pipe)
+ res.Index = 0
+ return res
+ }
+ fillIndex(json, c)
+ return c.value
+}
+
+// GetBytes searches json for the specified path.
+// If working with bytes, this method preferred over Get(string(data), path)
+func GetBytes(json []byte, path string) Result {
+ return getBytes(json, path)
+}
+
+// runeit returns the rune from the the \uXXXX
+func runeit(json string) rune {
+ n, _ := strconv.ParseUint(json[:4], 16, 64)
+ return rune(n)
+}
+
+// unescape unescapes a string
+func unescape(json string) string {
+ var str = make([]byte, 0, len(json))
+ for i := 0; i < len(json); i++ {
+ switch {
+ default:
+ str = append(str, json[i])
+ case json[i] < ' ':
+ return string(str)
+ case json[i] == '\\':
+ i++
+ if i >= len(json) {
+ return string(str)
+ }
+ switch json[i] {
+ default:
+ return string(str)
+ case '\\':
+ str = append(str, '\\')
+ case '/':
+ str = append(str, '/')
+ case 'b':
+ str = append(str, '\b')
+ case 'f':
+ str = append(str, '\f')
+ case 'n':
+ str = append(str, '\n')
+ case 'r':
+ str = append(str, '\r')
+ case 't':
+ str = append(str, '\t')
+ case '"':
+ str = append(str, '"')
+ case 'u':
+ if i+5 > len(json) {
+ return string(str)
+ }
+ r := runeit(json[i+1:])
+ i += 5
+ if utf16.IsSurrogate(r) {
+ // need another code
+ if len(json[i:]) >= 6 && json[i] == '\\' &&
+ json[i+1] == 'u' {
+ // we expect it to be correct so just consume it
+ r = utf16.DecodeRune(r, runeit(json[i+2:]))
+ i += 6
+ }
+ }
+ // provide enough space to encode the largest utf8 possible
+ str = append(str, 0, 0, 0, 0, 0, 0, 0, 0)
+ n := utf8.EncodeRune(str[len(str)-8:], r)
+ str = str[:len(str)-8+n]
+ i-- // backtrack index by one
+ }
+ }
+ }
+ return string(str)
+}
+
+// Less return true if a token is less than another token.
+// The caseSensitive paramater is used when the tokens are Strings.
+// The order when comparing two different type is:
+//
+// Null < False < Number < String < True < JSON
+//
+func (t Result) Less(token Result, caseSensitive bool) bool {
+ if t.Type < token.Type {
+ return true
+ }
+ if t.Type > token.Type {
+ return false
+ }
+ if t.Type == String {
+ if caseSensitive {
+ return t.Str < token.Str
+ }
+ return stringLessInsensitive(t.Str, token.Str)
+ }
+ if t.Type == Number {
+ return t.Num < token.Num
+ }
+ return t.Raw < token.Raw
+}
+
+func stringLessInsensitive(a, b string) bool {
+ for i := 0; i < len(a) && i < len(b); i++ {
+ if a[i] >= 'A' && a[i] <= 'Z' {
+ if b[i] >= 'A' && b[i] <= 'Z' {
+ // both are uppercase, do nothing
+ if a[i] < b[i] {
+ return true
+ } else if a[i] > b[i] {
+ return false
+ }
+ } else {
+ // a is uppercase, convert a to lowercase
+ if a[i]+32 < b[i] {
+ return true
+ } else if a[i]+32 > b[i] {
+ return false
+ }
+ }
+ } else if b[i] >= 'A' && b[i] <= 'Z' {
+ // b is uppercase, convert b to lowercase
+ if a[i] < b[i]+32 {
+ return true
+ } else if a[i] > b[i]+32 {
+ return false
+ }
+ } else {
+ // neither are uppercase
+ if a[i] < b[i] {
+ return true
+ } else if a[i] > b[i] {
+ return false
+ }
+ }
+ }
+ return len(a) < len(b)
+}
+
+// parseAny parses the next value from a json string.
+// A Result is returned when the hit param is set.
+// The return values are (i int, res Result, ok bool)
+func parseAny(json string, i int, hit bool) (int, Result, bool) {
+ var res Result
+ var val string
+ for ; i < len(json); i++ {
+ if json[i] == '{' || json[i] == '[' {
+ i, val = parseSquash(json, i)
+ if hit {
+ res.Raw = val
+ res.Type = JSON
+ }
+ var tmp parseContext
+ tmp.value = res
+ fillIndex(json, &tmp)
+ return i, tmp.value, true
+ }
+ if json[i] <= ' ' {
+ continue
+ }
+ var num bool
+ switch json[i] {
+ case '"':
+ i++
+ var vesc bool
+ var ok bool
+ i, val, vesc, ok = parseString(json, i)
+ if !ok {
+ return i, res, false
+ }
+ if hit {
+ res.Type = String
+ res.Raw = val
+ if vesc {
+ res.Str = unescape(val[1 : len(val)-1])
+ } else {
+ res.Str = val[1 : len(val)-1]
+ }
+ }
+ return i, res, true
+ case 'n':
+ if i+1 < len(json) && json[i+1] != 'u' {
+ num = true
+ break
+ }
+ fallthrough
+ case 't', 'f':
+ vc := json[i]
+ i, val = parseLiteral(json, i)
+ if hit {
+ res.Raw = val
+ switch vc {
+ case 't':
+ res.Type = True
+ case 'f':
+ res.Type = False
+ }
+ return i, res, true
+ }
+ case '+', '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
+ 'i', 'I', 'N':
+ num = true
+ }
+ if num {
+ i, val = parseNumber(json, i)
+ if hit {
+ res.Raw = val
+ res.Type = Number
+ res.Num, _ = strconv.ParseFloat(val, 64)
+ }
+ return i, res, true
+ }
+
+ }
+ return i, res, false
+}
+
+// GetMany searches json for the multiple paths.
+// The return value is a Result array where the number of items
+// will be equal to the number of input paths.
+func GetMany(json string, path ...string) []Result {
+ res := make([]Result, len(path))
+ for i, path := range path {
+ res[i] = Get(json, path)
+ }
+ return res
+}
+
+// GetManyBytes searches json for the multiple paths.
+// The return value is a Result array where the number of items
+// will be equal to the number of input paths.
+func GetManyBytes(json []byte, path ...string) []Result {
+ res := make([]Result, len(path))
+ for i, path := range path {
+ res[i] = GetBytes(json, path)
+ }
+ return res
+}
+
+func validpayload(data []byte, i int) (outi int, ok bool) {
+ for ; i < len(data); i++ {
+ switch data[i] {
+ default:
+ i, ok = validany(data, i)
+ if !ok {
+ return i, false
+ }
+ for ; i < len(data); i++ {
+ switch data[i] {
+ default:
+ return i, false
+ case ' ', '\t', '\n', '\r':
+ continue
+ }
+ }
+ return i, true
+ case ' ', '\t', '\n', '\r':
+ continue
+ }
+ }
+ return i, false
+}
+func validany(data []byte, i int) (outi int, ok bool) {
+ for ; i < len(data); i++ {
+ switch data[i] {
+ default:
+ return i, false
+ case ' ', '\t', '\n', '\r':
+ continue
+ case '{':
+ return validobject(data, i+1)
+ case '[':
+ return validarray(data, i+1)
+ case '"':
+ return validstring(data, i+1)
+ case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ return validnumber(data, i+1)
+ case 't':
+ return validtrue(data, i+1)
+ case 'f':
+ return validfalse(data, i+1)
+ case 'n':
+ return validnull(data, i+1)
+ }
+ }
+ return i, false
+}
+func validobject(data []byte, i int) (outi int, ok bool) {
+ for ; i < len(data); i++ {
+ switch data[i] {
+ default:
+ return i, false
+ case ' ', '\t', '\n', '\r':
+ continue
+ case '}':
+ return i + 1, true
+ case '"':
+ key:
+ if i, ok = validstring(data, i+1); !ok {
+ return i, false
+ }
+ if i, ok = validcolon(data, i); !ok {
+ return i, false
+ }
+ if i, ok = validany(data, i); !ok {
+ return i, false
+ }
+ if i, ok = validcomma(data, i, '}'); !ok {
+ return i, false
+ }
+ if data[i] == '}' {
+ return i + 1, true
+ }
+ i++
+ for ; i < len(data); i++ {
+ switch data[i] {
+ default:
+ return i, false
+ case ' ', '\t', '\n', '\r':
+ continue
+ case '"':
+ goto key
+ }
+ }
+ return i, false
+ }
+ }
+ return i, false
+}
+func validcolon(data []byte, i int) (outi int, ok bool) {
+ for ; i < len(data); i++ {
+ switch data[i] {
+ default:
+ return i, false
+ case ' ', '\t', '\n', '\r':
+ continue
+ case ':':
+ return i + 1, true
+ }
+ }
+ return i, false
+}
+func validcomma(data []byte, i int, end byte) (outi int, ok bool) {
+ for ; i < len(data); i++ {
+ switch data[i] {
+ default:
+ return i, false
+ case ' ', '\t', '\n', '\r':
+ continue
+ case ',':
+ return i, true
+ case end:
+ return i, true
+ }
+ }
+ return i, false
+}
+func validarray(data []byte, i int) (outi int, ok bool) {
+ for ; i < len(data); i++ {
+ switch data[i] {
+ default:
+ for ; i < len(data); i++ {
+ if i, ok = validany(data, i); !ok {
+ return i, false
+ }
+ if i, ok = validcomma(data, i, ']'); !ok {
+ return i, false
+ }
+ if data[i] == ']' {
+ return i + 1, true
+ }
+ }
+ case ' ', '\t', '\n', '\r':
+ continue
+ case ']':
+ return i + 1, true
+ }
+ }
+ return i, false
+}
+func validstring(data []byte, i int) (outi int, ok bool) {
+ for ; i < len(data); i++ {
+ if data[i] < ' ' {
+ return i, false
+ } else if data[i] == '\\' {
+ i++
+ if i == len(data) {
+ return i, false
+ }
+ switch data[i] {
+ default:
+ return i, false
+ case '"', '\\', '/', 'b', 'f', 'n', 'r', 't':
+ case 'u':
+ for j := 0; j < 4; j++ {
+ i++
+ if i >= len(data) {
+ return i, false
+ }
+ if !((data[i] >= '0' && data[i] <= '9') ||
+ (data[i] >= 'a' && data[i] <= 'f') ||
+ (data[i] >= 'A' && data[i] <= 'F')) {
+ return i, false
+ }
+ }
+ }
+ } else if data[i] == '"' {
+ return i + 1, true
+ }
+ }
+ return i, false
+}
+func validnumber(data []byte, i int) (outi int, ok bool) {
+ i--
+ // sign
+ if data[i] == '-' {
+ i++
+ if i == len(data) {
+ return i, false
+ }
+ if data[i] < '0' || data[i] > '9' {
+ return i, false
+ }
+ }
+ // int
+ if i == len(data) {
+ return i, false
+ }
+ if data[i] == '0' {
+ i++
+ } else {
+ for ; i < len(data); i++ {
+ if data[i] >= '0' && data[i] <= '9' {
+ continue
+ }
+ break
+ }
+ }
+ // frac
+ if i == len(data) {
+ return i, true
+ }
+ if data[i] == '.' {
+ i++
+ if i == len(data) {
+ return i, false
+ }
+ if data[i] < '0' || data[i] > '9' {
+ return i, false
+ }
+ i++
+ for ; i < len(data); i++ {
+ if data[i] >= '0' && data[i] <= '9' {
+ continue
+ }
+ break
+ }
+ }
+ // exp
+ if i == len(data) {
+ return i, true
+ }
+ if data[i] == 'e' || data[i] == 'E' {
+ i++
+ if i == len(data) {
+ return i, false
+ }
+ if data[i] == '+' || data[i] == '-' {
+ i++
+ }
+ if i == len(data) {
+ return i, false
+ }
+ if data[i] < '0' || data[i] > '9' {
+ return i, false
+ }
+ i++
+ for ; i < len(data); i++ {
+ if data[i] >= '0' && data[i] <= '9' {
+ continue
+ }
+ break
+ }
+ }
+ return i, true
+}
+
+func validtrue(data []byte, i int) (outi int, ok bool) {
+ if i+3 <= len(data) && data[i] == 'r' && data[i+1] == 'u' &&
+ data[i+2] == 'e' {
+ return i + 3, true
+ }
+ return i, false
+}
+func validfalse(data []byte, i int) (outi int, ok bool) {
+ if i+4 <= len(data) && data[i] == 'a' && data[i+1] == 'l' &&
+ data[i+2] == 's' && data[i+3] == 'e' {
+ return i + 4, true
+ }
+ return i, false
+}
+func validnull(data []byte, i int) (outi int, ok bool) {
+ if i+3 <= len(data) && data[i] == 'u' && data[i+1] == 'l' &&
+ data[i+2] == 'l' {
+ return i + 3, true
+ }
+ return i, false
+}
+
+// Valid returns true if the input is valid json.
+//
+// if !gjson.Valid(json) {
+// return errors.New("invalid json")
+// }
+// value := gjson.Get(json, "name.last")
+//
+func Valid(json string) bool {
+ _, ok := validpayload(stringBytes(json), 0)
+ return ok
+}
+
+// ValidBytes returns true if the input is valid json.
+//
+// if !gjson.Valid(json) {
+// return errors.New("invalid json")
+// }
+// value := gjson.Get(json, "name.last")
+//
+// If working with bytes, this method preferred over ValidBytes(string(data))
+//
+func ValidBytes(json []byte) bool {
+ _, ok := validpayload(json, 0)
+ return ok
+}
+
+func parseUint(s string) (n uint64, ok bool) {
+ var i int
+ if i == len(s) {
+ return 0, false
+ }
+ for ; i < len(s); i++ {
+ if s[i] >= '0' && s[i] <= '9' {
+ n = n*10 + uint64(s[i]-'0')
+ } else {
+ return 0, false
+ }
+ }
+ return n, true
+}
+
+func parseInt(s string) (n int64, ok bool) {
+ var i int
+ var sign bool
+ if len(s) > 0 && s[0] == '-' {
+ sign = true
+ i++
+ }
+ if i == len(s) {
+ return 0, false
+ }
+ for ; i < len(s); i++ {
+ if s[i] >= '0' && s[i] <= '9' {
+ n = n*10 + int64(s[i]-'0')
+ } else {
+ return 0, false
+ }
+ }
+ if sign {
+ return n * -1, true
+ }
+ return n, true
+}
+
+// safeInt validates a given JSON number
+// ensures it lies within the minimum and maximum representable JSON numbers
+func safeInt(f float64) (n int64, ok bool) {
+ // https://tc39.es/ecma262/#sec-number.min_safe_integer
+ // https://tc39.es/ecma262/#sec-number.max_safe_integer
+ if f < -9007199254740991 || f > 9007199254740991 {
+ return 0, false
+ }
+ return int64(f), true
+}
+
+// execStatic parses the path to find a static value.
+// The input expects that the path already starts with a '!'
+func execStatic(json, path string) (pathOut, res string, ok bool) {
+ name := path[1:]
+ if len(name) > 0 {
+ switch name[0] {
+ case '{', '[', '"', '+', '-', '0', '1', '2', '3', '4', '5', '6', '7',
+ '8', '9':
+ _, res = parseSquash(name, 0)
+ pathOut = name[len(res):]
+ return pathOut, res, true
+ }
+ }
+ for i := 1; i < len(path); i++ {
+ if path[i] == '|' {
+ pathOut = path[i:]
+ name = path[1:i]
+ break
+ }
+ if path[i] == '.' {
+ pathOut = path[i:]
+ name = path[1:i]
+ break
+ }
+ }
+ switch strings.ToLower(name) {
+ case "true", "false", "null", "nan", "inf":
+ return pathOut, name, true
+ }
+ return pathOut, res, false
+}
+
+// execModifier parses the path to find a matching modifier function.
+// The input expects that the path already starts with a '@'
+func execModifier(json, path string) (pathOut, res string, ok bool) {
+ name := path[1:]
+ var hasArgs bool
+ for i := 1; i < len(path); i++ {
+ if path[i] == ':' {
+ pathOut = path[i+1:]
+ name = path[1:i]
+ hasArgs = len(pathOut) > 0
+ break
+ }
+ if path[i] == '|' {
+ pathOut = path[i:]
+ name = path[1:i]
+ break
+ }
+ if path[i] == '.' {
+ pathOut = path[i:]
+ name = path[1:i]
+ break
+ }
+ }
+ if fn, ok := modifiers[name]; ok {
+ var args string
+ if hasArgs {
+ var parsedArgs bool
+ switch pathOut[0] {
+ case '{', '[', '"':
+ res := Parse(pathOut)
+ if res.Exists() {
+ args = squash(pathOut)
+ pathOut = pathOut[len(args):]
+ parsedArgs = true
+ }
+ }
+ if !parsedArgs {
+ idx := strings.IndexByte(pathOut, '|')
+ if idx == -1 {
+ args = pathOut
+ pathOut = ""
+ } else {
+ args = pathOut[:idx]
+ pathOut = pathOut[idx:]
+ }
+ }
+ }
+ return pathOut, fn(json, args), true
+ }
+ return pathOut, res, false
+}
+
+// unwrap removes the '[]' or '{}' characters around json
+func unwrap(json string) string {
+ json = trim(json)
+ if len(json) >= 2 && (json[0] == '[' || json[0] == '{') {
+ json = json[1 : len(json)-1]
+ }
+ return json
+}
+
+// DisableModifiers will disable the modifier syntax
+var DisableModifiers = false
+
+var modifiers = map[string]func(json, arg string) string{
+ "pretty": modPretty,
+ "ugly": modUgly,
+ "reverse": modReverse,
+ "this": modThis,
+ "flatten": modFlatten,
+ "join": modJoin,
+ "valid": modValid,
+ "keys": modKeys,
+ "values": modValues,
+ "tostr": modToStr,
+ "fromstr": modFromStr,
+ "group": modGroup,
+}
+
+// AddModifier binds a custom modifier command to the GJSON syntax.
+// This operation is not thread safe and should be executed prior to
+// using all other gjson function.
+func AddModifier(name string, fn func(json, arg string) string) {
+ modifiers[name] = fn
+}
+
+// ModifierExists returns true when the specified modifier exists.
+func ModifierExists(name string, fn func(json, arg string) string) bool {
+ _, ok := modifiers[name]
+ return ok
+}
+
+// cleanWS remove any non-whitespace from string
+func cleanWS(s string) string {
+ for i := 0; i < len(s); i++ {
+ switch s[i] {
+ case ' ', '\t', '\n', '\r':
+ continue
+ default:
+ var s2 []byte
+ for i := 0; i < len(s); i++ {
+ switch s[i] {
+ case ' ', '\t', '\n', '\r':
+ s2 = append(s2, s[i])
+ }
+ }
+ return string(s2)
+ }
+ }
+ return s
+}
+
+// @pretty modifier makes the json look nice.
+func modPretty(json, arg string) string {
+ if len(arg) > 0 {
+ opts := *pretty.DefaultOptions
+ Parse(arg).ForEach(func(key, value Result) bool {
+ switch key.String() {
+ case "sortKeys":
+ opts.SortKeys = value.Bool()
+ case "indent":
+ opts.Indent = cleanWS(value.String())
+ case "prefix":
+ opts.Prefix = cleanWS(value.String())
+ case "width":
+ opts.Width = int(value.Int())
+ }
+ return true
+ })
+ return bytesString(pretty.PrettyOptions(stringBytes(json), &opts))
+ }
+ return bytesString(pretty.Pretty(stringBytes(json)))
+}
+
+// @this returns the current element. Can be used to retrieve the root element.
+func modThis(json, arg string) string {
+ return json
+}
+
+// @ugly modifier removes all whitespace.
+func modUgly(json, arg string) string {
+ return bytesString(pretty.Ugly(stringBytes(json)))
+}
+
+// @reverse reverses array elements or root object members.
+func modReverse(json, arg string) string {
+ res := Parse(json)
+ if res.IsArray() {
+ var values []Result
+ res.ForEach(func(_, value Result) bool {
+ values = append(values, value)
+ return true
+ })
+ out := make([]byte, 0, len(json))
+ out = append(out, '[')
+ for i, j := len(values)-1, 0; i >= 0; i, j = i-1, j+1 {
+ if j > 0 {
+ out = append(out, ',')
+ }
+ out = append(out, values[i].Raw...)
+ }
+ out = append(out, ']')
+ return bytesString(out)
+ }
+ if res.IsObject() {
+ var keyValues []Result
+ res.ForEach(func(key, value Result) bool {
+ keyValues = append(keyValues, key, value)
+ return true
+ })
+ out := make([]byte, 0, len(json))
+ out = append(out, '{')
+ for i, j := len(keyValues)-2, 0; i >= 0; i, j = i-2, j+1 {
+ if j > 0 {
+ out = append(out, ',')
+ }
+ out = append(out, keyValues[i+0].Raw...)
+ out = append(out, ':')
+ out = append(out, keyValues[i+1].Raw...)
+ }
+ out = append(out, '}')
+ return bytesString(out)
+ }
+ return json
+}
+
+// @flatten an array with child arrays.
+// [1,[2],[3,4],[5,[6,7]]] -> [1,2,3,4,5,[6,7]]
+// The {"deep":true} arg can be provide for deep flattening.
+// [1,[2],[3,4],[5,[6,7]]] -> [1,2,3,4,5,6,7]
+// The original json is returned when the json is not an array.
+func modFlatten(json, arg string) string {
+ res := Parse(json)
+ if !res.IsArray() {
+ return json
+ }
+ var deep bool
+ if arg != "" {
+ Parse(arg).ForEach(func(key, value Result) bool {
+ if key.String() == "deep" {
+ deep = value.Bool()
+ }
+ return true
+ })
+ }
+ var out []byte
+ out = append(out, '[')
+ var idx int
+ res.ForEach(func(_, value Result) bool {
+ var raw string
+ if value.IsArray() {
+ if deep {
+ raw = unwrap(modFlatten(value.Raw, arg))
+ } else {
+ raw = unwrap(value.Raw)
+ }
+ } else {
+ raw = value.Raw
+ }
+ raw = strings.TrimSpace(raw)
+ if len(raw) > 0 {
+ if idx > 0 {
+ out = append(out, ',')
+ }
+ out = append(out, raw...)
+ idx++
+ }
+ return true
+ })
+ out = append(out, ']')
+ return bytesString(out)
+}
+
+// @keys extracts the keys from an object.
+// {"first":"Tom","last":"Smith"} -> ["first","last"]
+func modKeys(json, arg string) string {
+ v := Parse(json)
+ if !v.Exists() {
+ return "[]"
+ }
+ obj := v.IsObject()
+ var out strings.Builder
+ out.WriteByte('[')
+ var i int
+ v.ForEach(func(key, _ Result) bool {
+ if i > 0 {
+ out.WriteByte(',')
+ }
+ if obj {
+ out.WriteString(key.Raw)
+ } else {
+ out.WriteString("null")
+ }
+ i++
+ return true
+ })
+ out.WriteByte(']')
+ return out.String()
+}
+
+// @values extracts the values from an object.
+// {"first":"Tom","last":"Smith"} -> ["Tom","Smith"]
+func modValues(json, arg string) string {
+ v := Parse(json)
+ if !v.Exists() {
+ return "[]"
+ }
+ if v.IsArray() {
+ return json
+ }
+ var out strings.Builder
+ out.WriteByte('[')
+ var i int
+ v.ForEach(func(_, value Result) bool {
+ if i > 0 {
+ out.WriteByte(',')
+ }
+ out.WriteString(value.Raw)
+ i++
+ return true
+ })
+ out.WriteByte(']')
+ return out.String()
+}
+
+// @join multiple objects into a single object.
+// [{"first":"Tom"},{"last":"Smith"}] -> {"first","Tom","last":"Smith"}
+// The arg can be "true" to specify that duplicate keys should be preserved.
+// [{"first":"Tom","age":37},{"age":41}] -> {"first","Tom","age":37,"age":41}
+// Without preserved keys:
+// [{"first":"Tom","age":37},{"age":41}] -> {"first","Tom","age":41}
+// The original json is returned when the json is not an object.
+func modJoin(json, arg string) string {
+ res := Parse(json)
+ if !res.IsArray() {
+ return json
+ }
+ var preserve bool
+ if arg != "" {
+ Parse(arg).ForEach(func(key, value Result) bool {
+ if key.String() == "preserve" {
+ preserve = value.Bool()
+ }
+ return true
+ })
+ }
+ var out []byte
+ out = append(out, '{')
+ if preserve {
+ // Preserve duplicate keys.
+ var idx int
+ res.ForEach(func(_, value Result) bool {
+ if !value.IsObject() {
+ return true
+ }
+ if idx > 0 {
+ out = append(out, ',')
+ }
+ out = append(out, unwrap(value.Raw)...)
+ idx++
+ return true
+ })
+ } else {
+ // Deduplicate keys and generate an object with stable ordering.
+ var keys []Result
+ kvals := make(map[string]Result)
+ res.ForEach(func(_, value Result) bool {
+ if !value.IsObject() {
+ return true
+ }
+ value.ForEach(func(key, value Result) bool {
+ k := key.String()
+ if _, ok := kvals[k]; !ok {
+ keys = append(keys, key)
+ }
+ kvals[k] = value
+ return true
+ })
+ return true
+ })
+ for i := 0; i < len(keys); i++ {
+ if i > 0 {
+ out = append(out, ',')
+ }
+ out = append(out, keys[i].Raw...)
+ out = append(out, ':')
+ out = append(out, kvals[keys[i].String()].Raw...)
+ }
+ }
+ out = append(out, '}')
+ return bytesString(out)
+}
+
+// @valid ensures that the json is valid before moving on. An empty string is
+// returned when the json is not valid, otherwise it returns the original json.
+func modValid(json, arg string) string {
+ if !Valid(json) {
+ return ""
+ }
+ return json
+}
+
+// @fromstr converts a string to json
+// "{\"id\":1023,\"name\":\"alert\"}" -> {"id":1023,"name":"alert"}
+func modFromStr(json, arg string) string {
+ if !Valid(json) {
+ return ""
+ }
+ return Parse(json).String()
+}
+
+// @tostr converts a string to json
+// {"id":1023,"name":"alert"} -> "{\"id\":1023,\"name\":\"alert\"}"
+func modToStr(str, arg string) string {
+ return string(AppendJSONString(nil, str))
+}
+
+func modGroup(json, arg string) string {
+ res := Parse(json)
+ if !res.IsObject() {
+ return ""
+ }
+ var all [][]byte
+ res.ForEach(func(key, value Result) bool {
+ if !value.IsArray() {
+ return true
+ }
+ var idx int
+ value.ForEach(func(_, value Result) bool {
+ if idx == len(all) {
+ all = append(all, []byte{})
+ }
+ all[idx] = append(all[idx], ("," + key.Raw + ":" + value.Raw)...)
+ idx++
+ return true
+ })
+ return true
+ })
+ var data []byte
+ data = append(data, '[')
+ for i, item := range all {
+ if i > 0 {
+ data = append(data, ',')
+ }
+ data = append(data, '{')
+ data = append(data, item[1:]...)
+ data = append(data, '}')
+ }
+ data = append(data, ']')
+ return string(data)
+}
+
+// stringHeader instead of reflect.StringHeader
+type stringHeader struct {
+ data unsafe.Pointer
+ len int
+}
+
+// sliceHeader instead of reflect.SliceHeader
+type sliceHeader struct {
+ data unsafe.Pointer
+ len int
+ cap int
+}
+
+// getBytes casts the input json bytes to a string and safely returns the
+// results as uniquely allocated data. This operation is intended to minimize
+// copies and allocations for the large json string->[]byte.
+func getBytes(json []byte, path string) Result {
+ var result Result
+ if json != nil {
+ // unsafe cast to string
+ result = Get(*(*string)(unsafe.Pointer(&json)), path)
+ // safely get the string headers
+ rawhi := *(*stringHeader)(unsafe.Pointer(&result.Raw))
+ strhi := *(*stringHeader)(unsafe.Pointer(&result.Str))
+ // create byte slice headers
+ rawh := sliceHeader{data: rawhi.data, len: rawhi.len, cap: rawhi.len}
+ strh := sliceHeader{data: strhi.data, len: strhi.len, cap: rawhi.len}
+ if strh.data == nil {
+ // str is nil
+ if rawh.data == nil {
+ // raw is nil
+ result.Raw = ""
+ } else {
+ // raw has data, safely copy the slice header to a string
+ result.Raw = string(*(*[]byte)(unsafe.Pointer(&rawh)))
+ }
+ result.Str = ""
+ } else if rawh.data == nil {
+ // raw is nil
+ result.Raw = ""
+ // str has data, safely copy the slice header to a string
+ result.Str = string(*(*[]byte)(unsafe.Pointer(&strh)))
+ } else if uintptr(strh.data) >= uintptr(rawh.data) &&
+ uintptr(strh.data)+uintptr(strh.len) <=
+ uintptr(rawh.data)+uintptr(rawh.len) {
+ // Str is a substring of Raw.
+ start := uintptr(strh.data) - uintptr(rawh.data)
+ // safely copy the raw slice header
+ result.Raw = string(*(*[]byte)(unsafe.Pointer(&rawh)))
+ // substring the raw
+ result.Str = result.Raw[start : start+uintptr(strh.len)]
+ } else {
+ // safely copy both the raw and str slice headers to strings
+ result.Raw = string(*(*[]byte)(unsafe.Pointer(&rawh)))
+ result.Str = string(*(*[]byte)(unsafe.Pointer(&strh)))
+ }
+ }
+ return result
+}
+
+// fillIndex finds the position of Raw data and assigns it to the Index field
+// of the resulting value. If the position cannot be found then Index zero is
+// used instead.
+func fillIndex(json string, c *parseContext) {
+ if len(c.value.Raw) > 0 && !c.calcd {
+ jhdr := *(*stringHeader)(unsafe.Pointer(&json))
+ rhdr := *(*stringHeader)(unsafe.Pointer(&(c.value.Raw)))
+ c.value.Index = int(uintptr(rhdr.data) - uintptr(jhdr.data))
+ if c.value.Index < 0 || c.value.Index >= len(json) {
+ c.value.Index = 0
+ }
+ }
+}
+
+func stringBytes(s string) []byte {
+ return *(*[]byte)(unsafe.Pointer(&sliceHeader{
+ data: (*stringHeader)(unsafe.Pointer(&s)).data,
+ len: len(s),
+ cap: len(s),
+ }))
+}
+
+func bytesString(b []byte) string {
+ return *(*string)(unsafe.Pointer(&b))
+}
+
+func revSquash(json string) string {
+ // reverse squash
+ // expects that the tail character is a ']' or '}' or ')' or '"'
+ // squash the value, ignoring all nested arrays and objects.
+ i := len(json) - 1
+ var depth int
+ if json[i] != '"' {
+ depth++
+ }
+ if json[i] == '}' || json[i] == ']' || json[i] == ')' {
+ i--
+ }
+ for ; i >= 0; i-- {
+ switch json[i] {
+ case '"':
+ i--
+ for ; i >= 0; i-- {
+ if json[i] == '"' {
+ esc := 0
+ for i > 0 && json[i-1] == '\\' {
+ i--
+ esc++
+ }
+ if esc%2 == 1 {
+ continue
+ }
+ i += esc
+ break
+ }
+ }
+ if depth == 0 {
+ if i < 0 {
+ i = 0
+ }
+ return json[i:]
+ }
+ case '}', ']', ')':
+ depth++
+ case '{', '[', '(':
+ depth--
+ if depth == 0 {
+ return json[i:]
+ }
+ }
+ }
+ return json
+}
+
+// Paths returns the original GJSON paths for a Result where the Result came
+// from a simple query path that returns an array, like:
+//
+// gjson.Get(json, "friends.#.first")
+//
+// The returned value will be in the form of a JSON array:
+//
+// ["friends.0.first","friends.1.first","friends.2.first"]
+//
+// The param 'json' must be the original JSON used when calling Get.
+//
+// Returns an empty string if the paths cannot be determined, which can happen
+// when the Result came from a path that contained a multipath, modifier,
+// or a nested query.
+func (t Result) Paths(json string) []string {
+ if t.Indexes == nil {
+ return nil
+ }
+ paths := make([]string, 0, len(t.Indexes))
+ t.ForEach(func(_, value Result) bool {
+ paths = append(paths, value.Path(json))
+ return true
+ })
+ if len(paths) != len(t.Indexes) {
+ return nil
+ }
+ return paths
+}
+
+// Path returns the original GJSON path for a Result where the Result came
+// from a simple path that returns a single value, like:
+//
+// gjson.Get(json, "friends.#(last=Murphy)")
+//
+// The returned value will be in the form of a JSON string:
+//
+// "friends.0"
+//
+// The param 'json' must be the original JSON used when calling Get.
+//
+// Returns an empty string if the paths cannot be determined, which can happen
+// when the Result came from a path that contained a multipath, modifier,
+// or a nested query.
+func (t Result) Path(json string) string {
+ var path []byte
+ var comps []string // raw components
+ i := t.Index - 1
+ if t.Index+len(t.Raw) > len(json) {
+ // JSON cannot safely contain Result.
+ goto fail
+ }
+ if !strings.HasPrefix(json[t.Index:], t.Raw) {
+ // Result is not at the JSON index as exepcted.
+ goto fail
+ }
+ for ; i >= 0; i-- {
+ if json[i] <= ' ' {
+ continue
+ }
+ if json[i] == ':' {
+ // inside of object, get the key
+ for ; i >= 0; i-- {
+ if json[i] != '"' {
+ continue
+ }
+ break
+ }
+ raw := revSquash(json[:i+1])
+ i = i - len(raw)
+ comps = append(comps, raw)
+ // key gotten, now squash the rest
+ raw = revSquash(json[:i+1])
+ i = i - len(raw)
+ i++ // increment the index for next loop step
+ } else if json[i] == '{' {
+ // Encountered an open object. The original result was probably an
+ // object key.
+ goto fail
+ } else if json[i] == ',' || json[i] == '[' {
+ // inside of an array, count the position
+ var arrIdx int
+ if json[i] == ',' {
+ arrIdx++
+ i--
+ }
+ for ; i >= 0; i-- {
+ if json[i] == ':' {
+ // Encountered an unexpected colon. The original result was
+ // probably an object key.
+ goto fail
+ } else if json[i] == ',' {
+ arrIdx++
+ } else if json[i] == '[' {
+ comps = append(comps, strconv.Itoa(arrIdx))
+ break
+ } else if json[i] == ']' || json[i] == '}' || json[i] == '"' {
+ raw := revSquash(json[:i+1])
+ i = i - len(raw) + 1
+ }
+ }
+ }
+ }
+ if len(comps) == 0 {
+ if DisableModifiers {
+ goto fail
+ }
+ return "@this"
+ }
+ for i := len(comps) - 1; i >= 0; i-- {
+ rcomp := Parse(comps[i])
+ if !rcomp.Exists() {
+ goto fail
+ }
+ comp := escapeComp(rcomp.String())
+ path = append(path, '.')
+ path = append(path, comp...)
+ }
+ if len(path) > 0 {
+ path = path[1:]
+ }
+ return string(path)
+fail:
+ return ""
+}
+
+// isSafePathKeyChar returns true if the input character is safe for not
+// needing escaping.
+func isSafePathKeyChar(c byte) bool {
+ return c <= ' ' || c > '~' || c == '_' || c == '-' || c == ':' ||
+ (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') ||
+ (c >= '0' && c <= '9')
+}
+
+// escapeComp escaped a path compontent, making it safe for generating a
+// path for later use.
+func escapeComp(comp string) string {
+ for i := 0; i < len(comp); i++ {
+ if !isSafePathKeyChar(comp[i]) {
+ ncomp := []byte(comp[:i])
+ for ; i < len(comp); i++ {
+ if !isSafePathKeyChar(comp[i]) {
+ ncomp = append(ncomp, '\\')
+ }
+ ncomp = append(ncomp, comp[i])
+ }
+ return string(ncomp)
+ }
+ }
+ return comp
+}
diff --git a/vendor/github.com/tidwall/gjson/logo.png b/vendor/github.com/tidwall/gjson/logo.png
new file mode 100644
index 00000000..17a8bbe9
--- /dev/null
+++ b/vendor/github.com/tidwall/gjson/logo.png
Binary files differ
diff --git a/vendor/github.com/tidwall/match/LICENSE b/vendor/github.com/tidwall/match/LICENSE
new file mode 100644
index 00000000..58f5819a
--- /dev/null
+++ b/vendor/github.com/tidwall/match/LICENSE
@@ -0,0 +1,20 @@
+The MIT License (MIT)
+
+Copyright (c) 2016 Josh Baker
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/vendor/github.com/tidwall/match/README.md b/vendor/github.com/tidwall/match/README.md
new file mode 100644
index 00000000..5fdd4cf6
--- /dev/null
+++ b/vendor/github.com/tidwall/match/README.md
@@ -0,0 +1,29 @@
+# Match
+
+[![GoDoc](https://godoc.org/github.com/tidwall/match?status.svg)](https://godoc.org/github.com/tidwall/match)
+
+Match is a very simple pattern matcher where '*' matches on any
+number characters and '?' matches on any one character.
+
+## Installing
+
+```
+go get -u github.com/tidwall/match
+```
+
+## Example
+
+```go
+match.Match("hello", "*llo")
+match.Match("jello", "?ello")
+match.Match("hello", "h*o")
+```
+
+
+## Contact
+
+Josh Baker [@tidwall](http://twitter.com/tidwall)
+
+## License
+
+Redcon source code is available under the MIT [License](/LICENSE).
diff --git a/vendor/github.com/tidwall/match/match.go b/vendor/github.com/tidwall/match/match.go
new file mode 100644
index 00000000..11da28f1
--- /dev/null
+++ b/vendor/github.com/tidwall/match/match.go
@@ -0,0 +1,237 @@
+// Package match provides a simple pattern matcher with unicode support.
+package match
+
+import (
+ "unicode/utf8"
+)
+
+// Match returns true if str matches pattern. This is a very
+// simple wildcard match where '*' matches on any number characters
+// and '?' matches on any one character.
+//
+// pattern:
+// { term }
+// term:
+// '*' matches any sequence of non-Separator characters
+// '?' matches any single non-Separator character
+// c matches character c (c != '*', '?', '\\')
+// '\\' c matches character c
+//
+func Match(str, pattern string) bool {
+ if pattern == "*" {
+ return true
+ }
+ return match(str, pattern, 0, nil, -1) == rMatch
+}
+
+// MatchLimit is the same as Match but will limit the complexity of the match
+// operation. This is to avoid long running matches, specifically to avoid ReDos
+// attacks from arbritary inputs.
+//
+// How it works:
+// The underlying match routine is recursive and may call itself when it
+// encounters a sandwiched wildcard pattern, such as: `user:*:name`.
+// Everytime it calls itself a counter is incremented.
+// The operation is stopped when counter > maxcomp*len(str).
+func MatchLimit(str, pattern string, maxcomp int) (matched, stopped bool) {
+ if pattern == "*" {
+ return true, false
+ }
+ counter := 0
+ r := match(str, pattern, len(str), &counter, maxcomp)
+ if r == rStop {
+ return false, true
+ }
+ return r == rMatch, false
+}
+
+type result int
+
+const (
+ rNoMatch result = iota
+ rMatch
+ rStop
+)
+
+func match(str, pat string, slen int, counter *int, maxcomp int) result {
+ // check complexity limit
+ if maxcomp > -1 {
+ if *counter > slen*maxcomp {
+ return rStop
+ }
+ *counter++
+ }
+
+ for len(pat) > 0 {
+ var wild bool
+ pc, ps := rune(pat[0]), 1
+ if pc > 0x7f {
+ pc, ps = utf8.DecodeRuneInString(pat)
+ }
+ var sc rune
+ var ss int
+ if len(str) > 0 {
+ sc, ss = rune(str[0]), 1
+ if sc > 0x7f {
+ sc, ss = utf8.DecodeRuneInString(str)
+ }
+ }
+ switch pc {
+ case '?':
+ if ss == 0 {
+ return rNoMatch
+ }
+ case '*':
+ // Ignore repeating stars.
+ for len(pat) > 1 && pat[1] == '*' {
+ pat = pat[1:]
+ }
+
+ // If this star is the last character then it must be a match.
+ if len(pat) == 1 {
+ return rMatch
+ }
+
+ // Match and trim any non-wildcard suffix characters.
+ var ok bool
+ str, pat, ok = matchTrimSuffix(str, pat)
+ if !ok {
+ return rNoMatch
+ }
+
+ // Check for single star again.
+ if len(pat) == 1 {
+ return rMatch
+ }
+
+ // Perform recursive wildcard search.
+ r := match(str, pat[1:], slen, counter, maxcomp)
+ if r != rNoMatch {
+ return r
+ }
+ if len(str) == 0 {
+ return rNoMatch
+ }
+ wild = true
+ default:
+ if ss == 0 {
+ return rNoMatch
+ }
+ if pc == '\\' {
+ pat = pat[ps:]
+ pc, ps = utf8.DecodeRuneInString(pat)
+ if ps == 0 {
+ return rNoMatch
+ }
+ }
+ if sc != pc {
+ return rNoMatch
+ }
+ }
+ str = str[ss:]
+ if !wild {
+ pat = pat[ps:]
+ }
+ }
+ if len(str) == 0 {
+ return rMatch
+ }
+ return rNoMatch
+}
+
+// matchTrimSuffix matches and trims any non-wildcard suffix characters.
+// Returns the trimed string and pattern.
+//
+// This is called because the pattern contains extra data after the wildcard
+// star. Here we compare any suffix characters in the pattern to the suffix of
+// the target string. Basically a reverse match that stops when a wildcard
+// character is reached. This is a little trickier than a forward match because
+// we need to evaluate an escaped character in reverse.
+//
+// Any matched characters will be trimmed from both the target
+// string and the pattern.
+func matchTrimSuffix(str, pat string) (string, string, bool) {
+ // It's expected that the pattern has at least two bytes and the first byte
+ // is a wildcard star '*'
+ match := true
+ for len(str) > 0 && len(pat) > 1 {
+ pc, ps := utf8.DecodeLastRuneInString(pat)
+ var esc bool
+ for i := 0; ; i++ {
+ if pat[len(pat)-ps-i-1] != '\\' {
+ if i&1 == 1 {
+ esc = true
+ ps++
+ }
+ break
+ }
+ }
+ if pc == '*' && !esc {
+ match = true
+ break
+ }
+ sc, ss := utf8.DecodeLastRuneInString(str)
+ if !((pc == '?' && !esc) || pc == sc) {
+ match = false
+ break
+ }
+ str = str[:len(str)-ss]
+ pat = pat[:len(pat)-ps]
+ }
+ return str, pat, match
+}
+
+var maxRuneBytes = [...]byte{244, 143, 191, 191}
+
+// Allowable parses the pattern and determines the minimum and maximum allowable
+// values that the pattern can represent.
+// When the max cannot be determined, 'true' will be returned
+// for infinite.
+func Allowable(pattern string) (min, max string) {
+ if pattern == "" || pattern[0] == '*' {
+ return "", ""
+ }
+
+ minb := make([]byte, 0, len(pattern))
+ maxb := make([]byte, 0, len(pattern))
+ var wild bool
+ for i := 0; i < len(pattern); i++ {
+ if pattern[i] == '*' {
+ wild = true
+ break
+ }
+ if pattern[i] == '?' {
+ minb = append(minb, 0)
+ maxb = append(maxb, maxRuneBytes[:]...)
+ } else {
+ minb = append(minb, pattern[i])
+ maxb = append(maxb, pattern[i])
+ }
+ }
+ if wild {
+ r, n := utf8.DecodeLastRune(maxb)
+ if r != utf8.RuneError {
+ if r < utf8.MaxRune {
+ r++
+ if r > 0x7f {
+ b := make([]byte, 4)
+ nn := utf8.EncodeRune(b, r)
+ maxb = append(maxb[:len(maxb)-n], b[:nn]...)
+ } else {
+ maxb = append(maxb[:len(maxb)-n], byte(r))
+ }
+ }
+ }
+ }
+ return string(minb), string(maxb)
+}
+
+// IsPattern returns true if the string is a pattern.
+func IsPattern(str string) bool {
+ for i := 0; i < len(str); i++ {
+ if str[i] == '*' || str[i] == '?' {
+ return true
+ }
+ }
+ return false
+}
diff --git a/vendor/github.com/tidwall/pretty/LICENSE b/vendor/github.com/tidwall/pretty/LICENSE
new file mode 100644
index 00000000..993b83f2
--- /dev/null
+++ b/vendor/github.com/tidwall/pretty/LICENSE
@@ -0,0 +1,20 @@
+The MIT License (MIT)
+
+Copyright (c) 2017 Josh Baker
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/vendor/github.com/tidwall/pretty/README.md b/vendor/github.com/tidwall/pretty/README.md
new file mode 100644
index 00000000..d3be5e54
--- /dev/null
+++ b/vendor/github.com/tidwall/pretty/README.md
@@ -0,0 +1,122 @@
+# Pretty
+
+[![GoDoc](https://img.shields.io/badge/api-reference-blue.svg?style=flat-square)](https://pkg.go.dev/github.com/tidwall/pretty)
+
+Pretty is a Go package that provides [fast](#performance) methods for formatting JSON for human readability, or to compact JSON for smaller payloads.
+
+Getting Started
+===============
+
+## Installing
+
+To start using Pretty, install Go and run `go get`:
+
+```sh
+$ go get -u github.com/tidwall/pretty
+```
+
+This will retrieve the library.
+
+## Pretty
+
+Using this example:
+
+```json
+{"name": {"first":"Tom","last":"Anderson"}, "age":37,
+"children": ["Sara","Alex","Jack"],
+"fav.movie": "Deer Hunter", "friends": [
+ {"first": "Janet", "last": "Murphy", "age": 44}
+ ]}
+```
+
+The following code:
+```go
+result = pretty.Pretty(example)
+```
+
+Will format the json to:
+
+```json
+{
+ "name": {
+ "first": "Tom",
+ "last": "Anderson"
+ },
+ "age": 37,
+ "children": ["Sara", "Alex", "Jack"],
+ "fav.movie": "Deer Hunter",
+ "friends": [
+ {
+ "first": "Janet",
+ "last": "Murphy",
+ "age": 44
+ }
+ ]
+}
+```
+
+## Color
+
+Color will colorize the json for outputing to the screen.
+
+```json
+result = pretty.Color(json, nil)
+```
+
+Will add color to the result for printing to the terminal.
+The second param is used for a customizing the style, and passing nil will use the default `pretty.TerminalStyle`.
+
+## Ugly
+
+The following code:
+```go
+result = pretty.Ugly(example)
+```
+
+Will format the json to:
+
+```json
+{"name":{"first":"Tom","last":"Anderson"},"age":37,"children":["Sara","Alex","Jack"],"fav.movie":"Deer Hunter","friends":[{"first":"Janet","last":"Murphy","age":44}]}```
+```
+
+## Customized output
+
+There's a `PrettyOptions(json, opts)` function which allows for customizing the output with the following options:
+
+```go
+type Options struct {
+ // Width is an max column width for single line arrays
+ // Default is 80
+ Width int
+ // Prefix is a prefix for all lines
+ // Default is an empty string
+ Prefix string
+ // Indent is the nested indentation
+ // Default is two spaces
+ Indent string
+ // SortKeys will sort the keys alphabetically
+ // Default is false
+ SortKeys bool
+}
+```
+## Performance
+
+Benchmarks of Pretty alongside the builtin `encoding/json` Indent/Compact methods.
+```
+BenchmarkPretty-16 1000000 1034 ns/op 720 B/op 2 allocs/op
+BenchmarkPrettySortKeys-16 586797 1983 ns/op 2848 B/op 14 allocs/op
+BenchmarkUgly-16 4652365 254 ns/op 240 B/op 1 allocs/op
+BenchmarkUglyInPlace-16 6481233 183 ns/op 0 B/op 0 allocs/op
+BenchmarkJSONIndent-16 450654 2687 ns/op 1221 B/op 0 allocs/op
+BenchmarkJSONCompact-16 685111 1699 ns/op 442 B/op 0 allocs/op
+```
+
+*These benchmarks were run on a MacBook Pro 2.4 GHz 8-Core Intel Core i9.*
+
+## Contact
+Josh Baker [@tidwall](http://twitter.com/tidwall)
+
+## License
+
+Pretty source code is available under the MIT [License](/LICENSE).
+
diff --git a/vendor/github.com/tidwall/pretty/pretty.go b/vendor/github.com/tidwall/pretty/pretty.go
new file mode 100644
index 00000000..f3f756aa
--- /dev/null
+++ b/vendor/github.com/tidwall/pretty/pretty.go
@@ -0,0 +1,674 @@
+package pretty
+
+import (
+ "bytes"
+ "encoding/json"
+ "sort"
+ "strconv"
+)
+
+// Options is Pretty options
+type Options struct {
+ // Width is an max column width for single line arrays
+ // Default is 80
+ Width int
+ // Prefix is a prefix for all lines
+ // Default is an empty string
+ Prefix string
+ // Indent is the nested indentation
+ // Default is two spaces
+ Indent string
+ // SortKeys will sort the keys alphabetically
+ // Default is false
+ SortKeys bool
+}
+
+// DefaultOptions is the default options for pretty formats.
+var DefaultOptions = &Options{Width: 80, Prefix: "", Indent: " ", SortKeys: false}
+
+// Pretty converts the input json into a more human readable format where each
+// element is on it's own line with clear indentation.
+func Pretty(json []byte) []byte { return PrettyOptions(json, nil) }
+
+// PrettyOptions is like Pretty but with customized options.
+func PrettyOptions(json []byte, opts *Options) []byte {
+ if opts == nil {
+ opts = DefaultOptions
+ }
+ buf := make([]byte, 0, len(json))
+ if len(opts.Prefix) != 0 {
+ buf = append(buf, opts.Prefix...)
+ }
+ buf, _, _, _ = appendPrettyAny(buf, json, 0, true,
+ opts.Width, opts.Prefix, opts.Indent, opts.SortKeys,
+ 0, 0, -1)
+ if len(buf) > 0 {
+ buf = append(buf, '\n')
+ }
+ return buf
+}
+
+// Ugly removes insignificant space characters from the input json byte slice
+// and returns the compacted result.
+func Ugly(json []byte) []byte {
+ buf := make([]byte, 0, len(json))
+ return ugly(buf, json)
+}
+
+// UglyInPlace removes insignificant space characters from the input json
+// byte slice and returns the compacted result. This method reuses the
+// input json buffer to avoid allocations. Do not use the original bytes
+// slice upon return.
+func UglyInPlace(json []byte) []byte { return ugly(json, json) }
+
+func ugly(dst, src []byte) []byte {
+ dst = dst[:0]
+ for i := 0; i < len(src); i++ {
+ if src[i] > ' ' {
+ dst = append(dst, src[i])
+ if src[i] == '"' {
+ for i = i + 1; i < len(src); i++ {
+ dst = append(dst, src[i])
+ if src[i] == '"' {
+ j := i - 1
+ for ; ; j-- {
+ if src[j] != '\\' {
+ break
+ }
+ }
+ if (j-i)%2 != 0 {
+ break
+ }
+ }
+ }
+ }
+ }
+ }
+ return dst
+}
+
+func isNaNOrInf(src []byte) bool {
+ return src[0] == 'i' || //Inf
+ src[0] == 'I' || // inf
+ src[0] == '+' || // +Inf
+ src[0] == 'N' || // Nan
+ (src[0] == 'n' && len(src) > 1 && src[1] != 'u') // nan
+}
+
+func appendPrettyAny(buf, json []byte, i int, pretty bool, width int, prefix, indent string, sortkeys bool, tabs, nl, max int) ([]byte, int, int, bool) {
+ for ; i < len(json); i++ {
+ if json[i] <= ' ' {
+ continue
+ }
+ if json[i] == '"' {
+ return appendPrettyString(buf, json, i, nl)
+ }
+
+ if (json[i] >= '0' && json[i] <= '9') || json[i] == '-' || isNaNOrInf(json[i:]) {
+ return appendPrettyNumber(buf, json, i, nl)
+ }
+ if json[i] == '{' {
+ return appendPrettyObject(buf, json, i, '{', '}', pretty, width, prefix, indent, sortkeys, tabs, nl, max)
+ }
+ if json[i] == '[' {
+ return appendPrettyObject(buf, json, i, '[', ']', pretty, width, prefix, indent, sortkeys, tabs, nl, max)
+ }
+ switch json[i] {
+ case 't':
+ return append(buf, 't', 'r', 'u', 'e'), i + 4, nl, true
+ case 'f':
+ return append(buf, 'f', 'a', 'l', 's', 'e'), i + 5, nl, true
+ case 'n':
+ return append(buf, 'n', 'u', 'l', 'l'), i + 4, nl, true
+ }
+ }
+ return buf, i, nl, true
+}
+
+type pair struct {
+ kstart, kend int
+ vstart, vend int
+}
+
+type byKeyVal struct {
+ sorted bool
+ json []byte
+ buf []byte
+ pairs []pair
+}
+
+func (arr *byKeyVal) Len() int {
+ return len(arr.pairs)
+}
+func (arr *byKeyVal) Less(i, j int) bool {
+ if arr.isLess(i, j, byKey) {
+ return true
+ }
+ if arr.isLess(j, i, byKey) {
+ return false
+ }
+ return arr.isLess(i, j, byVal)
+}
+func (arr *byKeyVal) Swap(i, j int) {
+ arr.pairs[i], arr.pairs[j] = arr.pairs[j], arr.pairs[i]
+ arr.sorted = true
+}
+
+type byKind int
+
+const (
+ byKey byKind = 0
+ byVal byKind = 1
+)
+
+type jtype int
+
+const (
+ jnull jtype = iota
+ jfalse
+ jnumber
+ jstring
+ jtrue
+ jjson
+)
+
+func getjtype(v []byte) jtype {
+ if len(v) == 0 {
+ return jnull
+ }
+ switch v[0] {
+ case '"':
+ return jstring
+ case 'f':
+ return jfalse
+ case 't':
+ return jtrue
+ case 'n':
+ return jnull
+ case '[', '{':
+ return jjson
+ default:
+ return jnumber
+ }
+}
+
+func (arr *byKeyVal) isLess(i, j int, kind byKind) bool {
+ k1 := arr.json[arr.pairs[i].kstart:arr.pairs[i].kend]
+ k2 := arr.json[arr.pairs[j].kstart:arr.pairs[j].kend]
+ var v1, v2 []byte
+ if kind == byKey {
+ v1 = k1
+ v2 = k2
+ } else {
+ v1 = bytes.TrimSpace(arr.buf[arr.pairs[i].vstart:arr.pairs[i].vend])
+ v2 = bytes.TrimSpace(arr.buf[arr.pairs[j].vstart:arr.pairs[j].vend])
+ if len(v1) >= len(k1)+1 {
+ v1 = bytes.TrimSpace(v1[len(k1)+1:])
+ }
+ if len(v2) >= len(k2)+1 {
+ v2 = bytes.TrimSpace(v2[len(k2)+1:])
+ }
+ }
+ t1 := getjtype(v1)
+ t2 := getjtype(v2)
+ if t1 < t2 {
+ return true
+ }
+ if t1 > t2 {
+ return false
+ }
+ if t1 == jstring {
+ s1 := parsestr(v1)
+ s2 := parsestr(v2)
+ return string(s1) < string(s2)
+ }
+ if t1 == jnumber {
+ n1, _ := strconv.ParseFloat(string(v1), 64)
+ n2, _ := strconv.ParseFloat(string(v2), 64)
+ return n1 < n2
+ }
+ return string(v1) < string(v2)
+
+}
+
+func parsestr(s []byte) []byte {
+ for i := 1; i < len(s); i++ {
+ if s[i] == '\\' {
+ var str string
+ json.Unmarshal(s, &str)
+ return []byte(str)
+ }
+ if s[i] == '"' {
+ return s[1:i]
+ }
+ }
+ return nil
+}
+
+func appendPrettyObject(buf, json []byte, i int, open, close byte, pretty bool, width int, prefix, indent string, sortkeys bool, tabs, nl, max int) ([]byte, int, int, bool) {
+ var ok bool
+ if width > 0 {
+ if pretty && open == '[' && max == -1 {
+ // here we try to create a single line array
+ max := width - (len(buf) - nl)
+ if max > 3 {
+ s1, s2 := len(buf), i
+ buf, i, _, ok = appendPrettyObject(buf, json, i, '[', ']', false, width, prefix, "", sortkeys, 0, 0, max)
+ if ok && len(buf)-s1 <= max {
+ return buf, i, nl, true
+ }
+ buf = buf[:s1]
+ i = s2
+ }
+ } else if max != -1 && open == '{' {
+ return buf, i, nl, false
+ }
+ }
+ buf = append(buf, open)
+ i++
+ var pairs []pair
+ if open == '{' && sortkeys {
+ pairs = make([]pair, 0, 8)
+ }
+ var n int
+ for ; i < len(json); i++ {
+ if json[i] <= ' ' {
+ continue
+ }
+ if json[i] == close {
+ if pretty {
+ if open == '{' && sortkeys {
+ buf = sortPairs(json, buf, pairs)
+ }
+ if n > 0 {
+ nl = len(buf)
+ if buf[nl-1] == ' ' {
+ buf[nl-1] = '\n'
+ } else {
+ buf = append(buf, '\n')
+ }
+ }
+ if buf[len(buf)-1] != open {
+ buf = appendTabs(buf, prefix, indent, tabs)
+ }
+ }
+ buf = append(buf, close)
+ return buf, i + 1, nl, open != '{'
+ }
+ if open == '[' || json[i] == '"' {
+ if n > 0 {
+ buf = append(buf, ',')
+ if width != -1 && open == '[' {
+ buf = append(buf, ' ')
+ }
+ }
+ var p pair
+ if pretty {
+ nl = len(buf)
+ if buf[nl-1] == ' ' {
+ buf[nl-1] = '\n'
+ } else {
+ buf = append(buf, '\n')
+ }
+ if open == '{' && sortkeys {
+ p.kstart = i
+ p.vstart = len(buf)
+ }
+ buf = appendTabs(buf, prefix, indent, tabs+1)
+ }
+ if open == '{' {
+ buf, i, nl, _ = appendPrettyString(buf, json, i, nl)
+ if sortkeys {
+ p.kend = i
+ }
+ buf = append(buf, ':')
+ if pretty {
+ buf = append(buf, ' ')
+ }
+ }
+ buf, i, nl, ok = appendPrettyAny(buf, json, i, pretty, width, prefix, indent, sortkeys, tabs+1, nl, max)
+ if max != -1 && !ok {
+ return buf, i, nl, false
+ }
+ if pretty && open == '{' && sortkeys {
+ p.vend = len(buf)
+ if p.kstart > p.kend || p.vstart > p.vend {
+ // bad data. disable sorting
+ sortkeys = false
+ } else {
+ pairs = append(pairs, p)
+ }
+ }
+ i--
+ n++
+ }
+ }
+ return buf, i, nl, open != '{'
+}
+func sortPairs(json, buf []byte, pairs []pair) []byte {
+ if len(pairs) == 0 {
+ return buf
+ }
+ vstart := pairs[0].vstart
+ vend := pairs[len(pairs)-1].vend
+ arr := byKeyVal{false, json, buf, pairs}
+ sort.Stable(&arr)
+ if !arr.sorted {
+ return buf
+ }
+ nbuf := make([]byte, 0, vend-vstart)
+ for i, p := range pairs {
+ nbuf = append(nbuf, buf[p.vstart:p.vend]...)
+ if i < len(pairs)-1 {
+ nbuf = append(nbuf, ',')
+ nbuf = append(nbuf, '\n')
+ }
+ }
+ return append(buf[:vstart], nbuf...)
+}
+
+func appendPrettyString(buf, json []byte, i, nl int) ([]byte, int, int, bool) {
+ s := i
+ i++
+ for ; i < len(json); i++ {
+ if json[i] == '"' {
+ var sc int
+ for j := i - 1; j > s; j-- {
+ if json[j] == '\\' {
+ sc++
+ } else {
+ break
+ }
+ }
+ if sc%2 == 1 {
+ continue
+ }
+ i++
+ break
+ }
+ }
+ return append(buf, json[s:i]...), i, nl, true
+}
+
+func appendPrettyNumber(buf, json []byte, i, nl int) ([]byte, int, int, bool) {
+ s := i
+ i++
+ for ; i < len(json); i++ {
+ if json[i] <= ' ' || json[i] == ',' || json[i] == ':' || json[i] == ']' || json[i] == '}' {
+ break
+ }
+ }
+ return append(buf, json[s:i]...), i, nl, true
+}
+
+func appendTabs(buf []byte, prefix, indent string, tabs int) []byte {
+ if len(prefix) != 0 {
+ buf = append(buf, prefix...)
+ }
+ if len(indent) == 2 && indent[0] == ' ' && indent[1] == ' ' {
+ for i := 0; i < tabs; i++ {
+ buf = append(buf, ' ', ' ')
+ }
+ } else {
+ for i := 0; i < tabs; i++ {
+ buf = append(buf, indent...)
+ }
+ }
+ return buf
+}
+
+// Style is the color style
+type Style struct {
+ Key, String, Number [2]string
+ True, False, Null [2]string
+ Escape [2]string
+ Append func(dst []byte, c byte) []byte
+}
+
+func hexp(p byte) byte {
+ switch {
+ case p < 10:
+ return p + '0'
+ default:
+ return (p - 10) + 'a'
+ }
+}
+
+// TerminalStyle is for terminals
+var TerminalStyle *Style
+
+func init() {
+ TerminalStyle = &Style{
+ Key: [2]string{"\x1B[94m", "\x1B[0m"},
+ String: [2]string{"\x1B[92m", "\x1B[0m"},
+ Number: [2]string{"\x1B[93m", "\x1B[0m"},
+ True: [2]string{"\x1B[96m", "\x1B[0m"},
+ False: [2]string{"\x1B[96m", "\x1B[0m"},
+ Null: [2]string{"\x1B[91m", "\x1B[0m"},
+ Escape: [2]string{"\x1B[35m", "\x1B[0m"},
+ Append: func(dst []byte, c byte) []byte {
+ if c < ' ' && (c != '\r' && c != '\n' && c != '\t' && c != '\v') {
+ dst = append(dst, "\\u00"...)
+ dst = append(dst, hexp((c>>4)&0xF))
+ return append(dst, hexp((c)&0xF))
+ }
+ return append(dst, c)
+ },
+ }
+}
+
+// Color will colorize the json. The style parma is used for customizing
+// the colors. Passing nil to the style param will use the default
+// TerminalStyle.
+func Color(src []byte, style *Style) []byte {
+ if style == nil {
+ style = TerminalStyle
+ }
+ apnd := style.Append
+ if apnd == nil {
+ apnd = func(dst []byte, c byte) []byte {
+ return append(dst, c)
+ }
+ }
+ type stackt struct {
+ kind byte
+ key bool
+ }
+ var dst []byte
+ var stack []stackt
+ for i := 0; i < len(src); i++ {
+ if src[i] == '"' {
+ key := len(stack) > 0 && stack[len(stack)-1].key
+ if key {
+ dst = append(dst, style.Key[0]...)
+ } else {
+ dst = append(dst, style.String[0]...)
+ }
+ dst = apnd(dst, '"')
+ esc := false
+ uesc := 0
+ for i = i + 1; i < len(src); i++ {
+ if src[i] == '\\' {
+ if key {
+ dst = append(dst, style.Key[1]...)
+ } else {
+ dst = append(dst, style.String[1]...)
+ }
+ dst = append(dst, style.Escape[0]...)
+ dst = apnd(dst, src[i])
+ esc = true
+ if i+1 < len(src) && src[i+1] == 'u' {
+ uesc = 5
+ } else {
+ uesc = 1
+ }
+ } else if esc {
+ dst = apnd(dst, src[i])
+ if uesc == 1 {
+ esc = false
+ dst = append(dst, style.Escape[1]...)
+ if key {
+ dst = append(dst, style.Key[0]...)
+ } else {
+ dst = append(dst, style.String[0]...)
+ }
+ } else {
+ uesc--
+ }
+ } else {
+ dst = apnd(dst, src[i])
+ }
+ if src[i] == '"' {
+ j := i - 1
+ for ; ; j-- {
+ if src[j] != '\\' {
+ break
+ }
+ }
+ if (j-i)%2 != 0 {
+ break
+ }
+ }
+ }
+ if esc {
+ dst = append(dst, style.Escape[1]...)
+ } else if key {
+ dst = append(dst, style.Key[1]...)
+ } else {
+ dst = append(dst, style.String[1]...)
+ }
+ } else if src[i] == '{' || src[i] == '[' {
+ stack = append(stack, stackt{src[i], src[i] == '{'})
+ dst = apnd(dst, src[i])
+ } else if (src[i] == '}' || src[i] == ']') && len(stack) > 0 {
+ stack = stack[:len(stack)-1]
+ dst = apnd(dst, src[i])
+ } else if (src[i] == ':' || src[i] == ',') && len(stack) > 0 && stack[len(stack)-1].kind == '{' {
+ stack[len(stack)-1].key = !stack[len(stack)-1].key
+ dst = apnd(dst, src[i])
+ } else {
+ var kind byte
+ if (src[i] >= '0' && src[i] <= '9') || src[i] == '-' || isNaNOrInf(src[i:]) {
+ kind = '0'
+ dst = append(dst, style.Number[0]...)
+ } else if src[i] == 't' {
+ kind = 't'
+ dst = append(dst, style.True[0]...)
+ } else if src[i] == 'f' {
+ kind = 'f'
+ dst = append(dst, style.False[0]...)
+ } else if src[i] == 'n' {
+ kind = 'n'
+ dst = append(dst, style.Null[0]...)
+ } else {
+ dst = apnd(dst, src[i])
+ }
+ if kind != 0 {
+ for ; i < len(src); i++ {
+ if src[i] <= ' ' || src[i] == ',' || src[i] == ':' || src[i] == ']' || src[i] == '}' {
+ i--
+ break
+ }
+ dst = apnd(dst, src[i])
+ }
+ if kind == '0' {
+ dst = append(dst, style.Number[1]...)
+ } else if kind == 't' {
+ dst = append(dst, style.True[1]...)
+ } else if kind == 'f' {
+ dst = append(dst, style.False[1]...)
+ } else if kind == 'n' {
+ dst = append(dst, style.Null[1]...)
+ }
+ }
+ }
+ }
+ return dst
+}
+
+// Spec strips out comments and trailing commas and convert the input to a
+// valid JSON per the official spec: https://tools.ietf.org/html/rfc8259
+//
+// The resulting JSON will always be the same length as the input and it will
+// include all of the same line breaks at matching offsets. This is to ensure
+// the result can be later processed by a external parser and that that
+// parser will report messages or errors with the correct offsets.
+func Spec(src []byte) []byte {
+ return spec(src, nil)
+}
+
+// SpecInPlace is the same as Spec, but this method reuses the input json
+// buffer to avoid allocations. Do not use the original bytes slice upon return.
+func SpecInPlace(src []byte) []byte {
+ return spec(src, src)
+}
+
+func spec(src, dst []byte) []byte {
+ dst = dst[:0]
+ for i := 0; i < len(src); i++ {
+ if src[i] == '/' {
+ if i < len(src)-1 {
+ if src[i+1] == '/' {
+ dst = append(dst, ' ', ' ')
+ i += 2
+ for ; i < len(src); i++ {
+ if src[i] == '\n' {
+ dst = append(dst, '\n')
+ break
+ } else if src[i] == '\t' || src[i] == '\r' {
+ dst = append(dst, src[i])
+ } else {
+ dst = append(dst, ' ')
+ }
+ }
+ continue
+ }
+ if src[i+1] == '*' {
+ dst = append(dst, ' ', ' ')
+ i += 2
+ for ; i < len(src)-1; i++ {
+ if src[i] == '*' && src[i+1] == '/' {
+ dst = append(dst, ' ', ' ')
+ i++
+ break
+ } else if src[i] == '\n' || src[i] == '\t' ||
+ src[i] == '\r' {
+ dst = append(dst, src[i])
+ } else {
+ dst = append(dst, ' ')
+ }
+ }
+ continue
+ }
+ }
+ }
+ dst = append(dst, src[i])
+ if src[i] == '"' {
+ for i = i + 1; i < len(src); i++ {
+ dst = append(dst, src[i])
+ if src[i] == '"' {
+ j := i - 1
+ for ; ; j-- {
+ if src[j] != '\\' {
+ break
+ }
+ }
+ if (j-i)%2 != 0 {
+ break
+ }
+ }
+ }
+ } else if src[i] == '}' || src[i] == ']' {
+ for j := len(dst) - 2; j >= 0; j-- {
+ if dst[j] <= ' ' {
+ continue
+ }
+ if dst[j] == ',' {
+ dst[j] = ' '
+ }
+ break
+ }
+ }
+ }
+ return dst
+}
diff --git a/vendor/github.com/tidwall/sjson/LICENSE b/vendor/github.com/tidwall/sjson/LICENSE
new file mode 100644
index 00000000..89593c7c
--- /dev/null
+++ b/vendor/github.com/tidwall/sjson/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2016 Josh Baker
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
diff --git a/vendor/github.com/tidwall/sjson/README.md b/vendor/github.com/tidwall/sjson/README.md
new file mode 100644
index 00000000..4598424e
--- /dev/null
+++ b/vendor/github.com/tidwall/sjson/README.md
@@ -0,0 +1,278 @@
+<p align="center">
+<img
+ src="logo.png"
+ width="240" height="78" border="0" alt="SJSON">
+<br>
+<a href="https://godoc.org/github.com/tidwall/sjson"><img src="https://img.shields.io/badge/api-reference-blue.svg?style=flat-square" alt="GoDoc"></a>
+</p>
+
+<p align="center">set a json value quickly</p>
+
+SJSON is a Go package that provides a [very fast](#performance) and simple way to set a value in a json document.
+For quickly retrieving json values check out [GJSON](https://github.com/tidwall/gjson).
+
+For a command line interface check out [JJ](https://github.com/tidwall/jj).
+
+Getting Started
+===============
+
+Installing
+----------
+
+To start using SJSON, install Go and run `go get`:
+
+```sh
+$ go get -u github.com/tidwall/sjson
+```
+
+This will retrieve the library.
+
+Set a value
+-----------
+Set sets the value for the specified path.
+A path is in dot syntax, such as "name.last" or "age".
+This function expects that the json is well-formed and validated.
+Invalid json will not panic, but it may return back unexpected results.
+Invalid paths may return an error.
+
+```go
+package main
+
+import "github.com/tidwall/sjson"
+
+const json = `{"name":{"first":"Janet","last":"Prichard"},"age":47}`
+
+func main() {
+ value, _ := sjson.Set(json, "name.last", "Anderson")
+ println(value)
+}
+```
+
+This will print:
+
+```json
+{"name":{"first":"Janet","last":"Anderson"},"age":47}
+```
+
+Path syntax
+-----------
+
+A path is a series of keys separated by a dot.
+The dot and colon characters can be escaped with ``\``.
+
+```json
+{
+ "name": {"first": "Tom", "last": "Anderson"},
+ "age":37,
+ "children": ["Sara","Alex","Jack"],
+ "fav.movie": "Deer Hunter",
+ "friends": [
+ {"first": "James", "last": "Murphy"},
+ {"first": "Roger", "last": "Craig"}
+ ]
+}
+```
+```
+"name.last" >> "Anderson"
+"age" >> 37
+"children.1" >> "Alex"
+"friends.1.last" >> "Craig"
+```
+
+The `-1` key can be used to append a value to an existing array:
+
+```
+"children.-1" >> appends a new value to the end of the children array
+```
+
+Normally number keys are used to modify arrays, but it's possible to force a numeric object key by using the colon character:
+
+```json
+{
+ "users":{
+ "2313":{"name":"Sara"},
+ "7839":{"name":"Andy"}
+ }
+}
+```
+
+A colon path would look like:
+
+```
+"users.:2313.name" >> "Sara"
+```
+
+Supported types
+---------------
+
+Pretty much any type is supported:
+
+```go
+sjson.Set(`{"key":true}`, "key", nil)
+sjson.Set(`{"key":true}`, "key", false)
+sjson.Set(`{"key":true}`, "key", 1)
+sjson.Set(`{"key":true}`, "key", 10.5)
+sjson.Set(`{"key":true}`, "key", "hello")
+sjson.Set(`{"key":true}`, "key", []string{"hello", "world"})
+sjson.Set(`{"key":true}`, "key", map[string]interface{}{"hello":"world"})
+```
+
+When a type is not recognized, SJSON will fallback to the `encoding/json` Marshaller.
+
+
+Examples
+--------
+
+Set a value from empty document:
+```go
+value, _ := sjson.Set("", "name", "Tom")
+println(value)
+
+// Output:
+// {"name":"Tom"}
+```
+
+Set a nested value from empty document:
+```go
+value, _ := sjson.Set("", "name.last", "Anderson")
+println(value)
+
+// Output:
+// {"name":{"last":"Anderson"}}
+```
+
+Set a new value:
+```go
+value, _ := sjson.Set(`{"name":{"last":"Anderson"}}`, "name.first", "Sara")
+println(value)
+
+// Output:
+// {"name":{"first":"Sara","last":"Anderson"}}
+```
+
+Update an existing value:
+```go
+value, _ := sjson.Set(`{"name":{"last":"Anderson"}}`, "name.last", "Smith")
+println(value)
+
+// Output:
+// {"name":{"last":"Smith"}}
+```
+
+Set a new array value:
+```go
+value, _ := sjson.Set(`{"friends":["Andy","Carol"]}`, "friends.2", "Sara")
+println(value)
+
+// Output:
+// {"friends":["Andy","Carol","Sara"]
+```
+
+Append an array value by using the `-1` key in a path:
+```go
+value, _ := sjson.Set(`{"friends":["Andy","Carol"]}`, "friends.-1", "Sara")
+println(value)
+
+// Output:
+// {"friends":["Andy","Carol","Sara"]
+```
+
+Append an array value that is past the end:
+```go
+value, _ := sjson.Set(`{"friends":["Andy","Carol"]}`, "friends.4", "Sara")
+println(value)
+
+// Output:
+// {"friends":["Andy","Carol",null,null,"Sara"]
+```
+
+Delete a value:
+```go
+value, _ := sjson.Delete(`{"name":{"first":"Sara","last":"Anderson"}}`, "name.first")
+println(value)
+
+// Output:
+// {"name":{"last":"Anderson"}}
+```
+
+Delete an array value:
+```go
+value, _ := sjson.Delete(`{"friends":["Andy","Carol"]}`, "friends.1")
+println(value)
+
+// Output:
+// {"friends":["Andy"]}
+```
+
+Delete the last array value:
+```go
+value, _ := sjson.Delete(`{"friends":["Andy","Carol"]}`, "friends.-1")
+println(value)
+
+// Output:
+// {"friends":["Andy"]}
+```
+
+## Performance
+
+Benchmarks of SJSON alongside [encoding/json](https://golang.org/pkg/encoding/json/),
+[ffjson](https://github.com/pquerna/ffjson),
+[EasyJSON](https://github.com/mailru/easyjson),
+and [Gabs](https://github.com/Jeffail/gabs)
+
+```
+Benchmark_SJSON-8 3000000 805 ns/op 1077 B/op 3 allocs/op
+Benchmark_SJSON_ReplaceInPlace-8 3000000 449 ns/op 0 B/op 0 allocs/op
+Benchmark_JSON_Map-8 300000 21236 ns/op 6392 B/op 150 allocs/op
+Benchmark_JSON_Struct-8 300000 14691 ns/op 1789 B/op 24 allocs/op
+Benchmark_Gabs-8 300000 21311 ns/op 6752 B/op 150 allocs/op
+Benchmark_FFJSON-8 300000 17673 ns/op 3589 B/op 47 allocs/op
+Benchmark_EasyJSON-8 1500000 3119 ns/op 1061 B/op 13 allocs/op
+```
+
+JSON document used:
+
+```json
+{
+ "widget": {
+ "debug": "on",
+ "window": {
+ "title": "Sample Konfabulator Widget",
+ "name": "main_window",
+ "width": 500,
+ "height": 500
+ },
+ "image": {
+ "src": "Images/Sun.png",
+ "hOffset": 250,
+ "vOffset": 250,
+ "alignment": "center"
+ },
+ "text": {
+ "data": "Click Here",
+ "size": 36,
+ "style": "bold",
+ "vOffset": 100,
+ "alignment": "center",
+ "onMouseUp": "sun1.opacity = (sun1.opacity / 100) * 90;"
+ }
+ }
+}
+```
+
+Each operation was rotated though one of the following search paths:
+
+```
+widget.window.name
+widget.image.hOffset
+widget.text.onMouseUp
+```
+
+*These benchmarks were run on a MacBook Pro 15" 2.8 GHz Intel Core i7 using Go 1.7 and can be be found [here](https://github.com/tidwall/sjson-benchmarks)*.
+
+## Contact
+Josh Baker [@tidwall](http://twitter.com/tidwall)
+
+## License
+
+SJSON source code is available under the MIT [License](/LICENSE).
diff --git a/vendor/github.com/tidwall/sjson/logo.png b/vendor/github.com/tidwall/sjson/logo.png
new file mode 100644
index 00000000..b5aa257b
--- /dev/null
+++ b/vendor/github.com/tidwall/sjson/logo.png
Binary files differ
diff --git a/vendor/github.com/tidwall/sjson/sjson.go b/vendor/github.com/tidwall/sjson/sjson.go
new file mode 100644
index 00000000..a55eef3f
--- /dev/null
+++ b/vendor/github.com/tidwall/sjson/sjson.go
@@ -0,0 +1,737 @@
+// Package sjson provides setting json values.
+package sjson
+
+import (
+ jsongo "encoding/json"
+ "sort"
+ "strconv"
+ "unsafe"
+
+ "github.com/tidwall/gjson"
+)
+
+type errorType struct {
+ msg string
+}
+
+func (err *errorType) Error() string {
+ return err.msg
+}
+
+// Options represents additional options for the Set and Delete functions.
+type Options struct {
+ // Optimistic is a hint that the value likely exists which
+ // allows for the sjson to perform a fast-track search and replace.
+ Optimistic bool
+ // ReplaceInPlace is a hint to replace the input json rather than
+ // allocate a new json byte slice. When this field is specified
+ // the input json will not longer be valid and it should not be used
+ // In the case when the destination slice doesn't have enough free
+ // bytes to replace the data in place, a new bytes slice will be
+ // created under the hood.
+ // The Optimistic flag must be set to true and the input must be a
+ // byte slice in order to use this field.
+ ReplaceInPlace bool
+}
+
+type pathResult struct {
+ part string // current key part
+ gpart string // gjson get part
+ path string // remaining path
+ force bool // force a string key
+ more bool // there is more path to parse
+}
+
+func isSimpleChar(ch byte) bool {
+ switch ch {
+ case '|', '#', '@', '*', '?':
+ return false
+ default:
+ return true
+ }
+}
+
+func parsePath(path string) (res pathResult, simple bool) {
+ var r pathResult
+ if len(path) > 0 && path[0] == ':' {
+ r.force = true
+ path = path[1:]
+ }
+ for i := 0; i < len(path); i++ {
+ if path[i] == '.' {
+ r.part = path[:i]
+ r.gpart = path[:i]
+ r.path = path[i+1:]
+ r.more = true
+ return r, true
+ }
+ if !isSimpleChar(path[i]) {
+ return r, false
+ }
+ if path[i] == '\\' {
+ // go into escape mode. this is a slower path that
+ // strips off the escape character from the part.
+ epart := []byte(path[:i])
+ gpart := []byte(path[:i+1])
+ i++
+ if i < len(path) {
+ epart = append(epart, path[i])
+ gpart = append(gpart, path[i])
+ i++
+ for ; i < len(path); i++ {
+ if path[i] == '\\' {
+ gpart = append(gpart, '\\')
+ i++
+ if i < len(path) {
+ epart = append(epart, path[i])
+ gpart = append(gpart, path[i])
+ }
+ continue
+ } else if path[i] == '.' {
+ r.part = string(epart)
+ r.gpart = string(gpart)
+ r.path = path[i+1:]
+ r.more = true
+ return r, true
+ } else if !isSimpleChar(path[i]) {
+ return r, false
+ }
+ epart = append(epart, path[i])
+ gpart = append(gpart, path[i])
+ }
+ }
+ // append the last part
+ r.part = string(epart)
+ r.gpart = string(gpart)
+ return r, true
+ }
+ }
+ r.part = path
+ r.gpart = path
+ return r, true
+}
+
+func mustMarshalString(s string) bool {
+ for i := 0; i < len(s); i++ {
+ if s[i] < ' ' || s[i] > 0x7f || s[i] == '"' || s[i] == '\\' {
+ return true
+ }
+ }
+ return false
+}
+
+// appendStringify makes a json string and appends to buf.
+func appendStringify(buf []byte, s string) []byte {
+ if mustMarshalString(s) {
+ b, _ := jsongo.Marshal(s)
+ return append(buf, b...)
+ }
+ buf = append(buf, '"')
+ buf = append(buf, s...)
+ buf = append(buf, '"')
+ return buf
+}
+
+// appendBuild builds a json block from a json path.
+func appendBuild(buf []byte, array bool, paths []pathResult, raw string,
+ stringify bool) []byte {
+ if !array {
+ buf = appendStringify(buf, paths[0].part)
+ buf = append(buf, ':')
+ }
+ if len(paths) > 1 {
+ n, numeric := atoui(paths[1])
+ if numeric || (!paths[1].force && paths[1].part == "-1") {
+ buf = append(buf, '[')
+ buf = appendRepeat(buf, "null,", n)
+ buf = appendBuild(buf, true, paths[1:], raw, stringify)
+ buf = append(buf, ']')
+ } else {
+ buf = append(buf, '{')
+ buf = appendBuild(buf, false, paths[1:], raw, stringify)
+ buf = append(buf, '}')
+ }
+ } else {
+ if stringify {
+ buf = appendStringify(buf, raw)
+ } else {
+ buf = append(buf, raw...)
+ }
+ }
+ return buf
+}
+
+// atoui does a rip conversion of string -> unigned int.
+func atoui(r pathResult) (n int, ok bool) {
+ if r.force {
+ return 0, false
+ }
+ for i := 0; i < len(r.part); i++ {
+ if r.part[i] < '0' || r.part[i] > '9' {
+ return 0, false
+ }
+ n = n*10 + int(r.part[i]-'0')
+ }
+ return n, true
+}
+
+// appendRepeat repeats string "n" times and appends to buf.
+func appendRepeat(buf []byte, s string, n int) []byte {
+ for i := 0; i < n; i++ {
+ buf = append(buf, s...)
+ }
+ return buf
+}
+
+// trim does a rip trim
+func trim(s string) string {
+ for len(s) > 0 {
+ if s[0] <= ' ' {
+ s = s[1:]
+ continue
+ }
+ break
+ }
+ for len(s) > 0 {
+ if s[len(s)-1] <= ' ' {
+ s = s[:len(s)-1]
+ continue
+ }
+ break
+ }
+ return s
+}
+
+// deleteTailItem deletes the previous key or comma.
+func deleteTailItem(buf []byte) ([]byte, bool) {
+loop:
+ for i := len(buf) - 1; i >= 0; i-- {
+ // look for either a ',',':','['
+ switch buf[i] {
+ case '[':
+ return buf, true
+ case ',':
+ return buf[:i], false
+ case ':':
+ // delete tail string
+ i--
+ for ; i >= 0; i-- {
+ if buf[i] == '"' {
+ i--
+ for ; i >= 0; i-- {
+ if buf[i] == '"' {
+ i--
+ if i >= 0 && buf[i] == '\\' {
+ i--
+ continue
+ }
+ for ; i >= 0; i-- {
+ // look for either a ',','{'
+ switch buf[i] {
+ case '{':
+ return buf[:i+1], true
+ case ',':
+ return buf[:i], false
+ }
+ }
+ }
+ }
+ break
+ }
+ }
+ break loop
+ }
+ }
+ return buf, false
+}
+
+var errNoChange = &errorType{"no change"}
+
+func appendRawPaths(buf []byte, jstr string, paths []pathResult, raw string,
+ stringify, del bool) ([]byte, error) {
+ var err error
+ var res gjson.Result
+ var found bool
+ if del {
+ if paths[0].part == "-1" && !paths[0].force {
+ res = gjson.Get(jstr, "#")
+ if res.Int() > 0 {
+ res = gjson.Get(jstr, strconv.FormatInt(int64(res.Int()-1), 10))
+ found = true
+ }
+ }
+ }
+ if !found {
+ res = gjson.Get(jstr, paths[0].gpart)
+ }
+ if res.Index > 0 {
+ if len(paths) > 1 {
+ buf = append(buf, jstr[:res.Index]...)
+ buf, err = appendRawPaths(buf, res.Raw, paths[1:], raw,
+ stringify, del)
+ if err != nil {
+ return nil, err
+ }
+ buf = append(buf, jstr[res.Index+len(res.Raw):]...)
+ return buf, nil
+ }
+ buf = append(buf, jstr[:res.Index]...)
+ var exidx int // additional forward stripping
+ if del {
+ var delNextComma bool
+ buf, delNextComma = deleteTailItem(buf)
+ if delNextComma {
+ i, j := res.Index+len(res.Raw), 0
+ for ; i < len(jstr); i, j = i+1, j+1 {
+ if jstr[i] <= ' ' {
+ continue
+ }
+ if jstr[i] == ',' {
+ exidx = j + 1
+ }
+ break
+ }
+ }
+ } else {
+ if stringify {
+ buf = appendStringify(buf, raw)
+ } else {
+ buf = append(buf, raw...)
+ }
+ }
+ buf = append(buf, jstr[res.Index+len(res.Raw)+exidx:]...)
+ return buf, nil
+ }
+ if del {
+ return nil, errNoChange
+ }
+ n, numeric := atoui(paths[0])
+ isempty := true
+ for i := 0; i < len(jstr); i++ {
+ if jstr[i] > ' ' {
+ isempty = false
+ break
+ }
+ }
+ if isempty {
+ if numeric {
+ jstr = "[]"
+ } else {
+ jstr = "{}"
+ }
+ }
+ jsres := gjson.Parse(jstr)
+ if jsres.Type != gjson.JSON {
+ if numeric {
+ jstr = "[]"
+ } else {
+ jstr = "{}"
+ }
+ jsres = gjson.Parse(jstr)
+ }
+ var comma bool
+ for i := 1; i < len(jsres.Raw); i++ {
+ if jsres.Raw[i] <= ' ' {
+ continue
+ }
+ if jsres.Raw[i] == '}' || jsres.Raw[i] == ']' {
+ break
+ }
+ comma = true
+ break
+ }
+ switch jsres.Raw[0] {
+ default:
+ return nil, &errorType{"json must be an object or array"}
+ case '{':
+ end := len(jsres.Raw) - 1
+ for ; end > 0; end-- {
+ if jsres.Raw[end] == '}' {
+ break
+ }
+ }
+ buf = append(buf, jsres.Raw[:end]...)
+ if comma {
+ buf = append(buf, ',')
+ }
+ buf = appendBuild(buf, false, paths, raw, stringify)
+ buf = append(buf, '}')
+ return buf, nil
+ case '[':
+ var appendit bool
+ if !numeric {
+ if paths[0].part == "-1" && !paths[0].force {
+ appendit = true
+ } else {
+ return nil, &errorType{
+ "cannot set array element for non-numeric key '" +
+ paths[0].part + "'"}
+ }
+ }
+ if appendit {
+ njson := trim(jsres.Raw)
+ if njson[len(njson)-1] == ']' {
+ njson = njson[:len(njson)-1]
+ }
+ buf = append(buf, njson...)
+ if comma {
+ buf = append(buf, ',')
+ }
+
+ buf = appendBuild(buf, true, paths, raw, stringify)
+ buf = append(buf, ']')
+ return buf, nil
+ }
+ buf = append(buf, '[')
+ ress := jsres.Array()
+ for i := 0; i < len(ress); i++ {
+ if i > 0 {
+ buf = append(buf, ',')
+ }
+ buf = append(buf, ress[i].Raw...)
+ }
+ if len(ress) == 0 {
+ buf = appendRepeat(buf, "null,", n-len(ress))
+ } else {
+ buf = appendRepeat(buf, ",null", n-len(ress))
+ if comma {
+ buf = append(buf, ',')
+ }
+ }
+ buf = appendBuild(buf, true, paths, raw, stringify)
+ buf = append(buf, ']')
+ return buf, nil
+ }
+}
+
+func isOptimisticPath(path string) bool {
+ for i := 0; i < len(path); i++ {
+ if path[i] < '.' || path[i] > 'z' {
+ return false
+ }
+ if path[i] > '9' && path[i] < 'A' {
+ return false
+ }
+ if path[i] > 'z' {
+ return false
+ }
+ }
+ return true
+}
+
+// Set sets a json value for the specified path.
+// A path is in dot syntax, such as "name.last" or "age".
+// This function expects that the json is well-formed, and does not validate.
+// Invalid json will not panic, but it may return back unexpected results.
+// An error is returned if the path is not valid.
+//
+// A path is a series of keys separated by a dot.
+//
+// {
+// "name": {"first": "Tom", "last": "Anderson"},
+// "age":37,
+// "children": ["Sara","Alex","Jack"],
+// "friends": [
+// {"first": "James", "last": "Murphy"},
+// {"first": "Roger", "last": "Craig"}
+// ]
+// }
+// "name.last" >> "Anderson"
+// "age" >> 37
+// "children.1" >> "Alex"
+//
+func Set(json, path string, value interface{}) (string, error) {
+ return SetOptions(json, path, value, nil)
+}
+
+// SetBytes sets a json value for the specified path.
+// If working with bytes, this method preferred over
+// Set(string(data), path, value)
+func SetBytes(json []byte, path string, value interface{}) ([]byte, error) {
+ return SetBytesOptions(json, path, value, nil)
+}
+
+// SetRaw sets a raw json value for the specified path.
+// This function works the same as Set except that the value is set as a
+// raw block of json. This allows for setting premarshalled json objects.
+func SetRaw(json, path, value string) (string, error) {
+ return SetRawOptions(json, path, value, nil)
+}
+
+// SetRawOptions sets a raw json value for the specified path with options.
+// This furnction works the same as SetOptions except that the value is set
+// as a raw block of json. This allows for setting premarshalled json objects.
+func SetRawOptions(json, path, value string, opts *Options) (string, error) {
+ var optimistic bool
+ if opts != nil {
+ optimistic = opts.Optimistic
+ }
+ res, err := set(json, path, value, false, false, optimistic, false)
+ if err == errNoChange {
+ return json, nil
+ }
+ return string(res), err
+}
+
+// SetRawBytes sets a raw json value for the specified path.
+// If working with bytes, this method preferred over
+// SetRaw(string(data), path, value)
+func SetRawBytes(json []byte, path string, value []byte) ([]byte, error) {
+ return SetRawBytesOptions(json, path, value, nil)
+}
+
+type dtype struct{}
+
+// Delete deletes a value from json for the specified path.
+func Delete(json, path string) (string, error) {
+ return Set(json, path, dtype{})
+}
+
+// DeleteBytes deletes a value from json for the specified path.
+func DeleteBytes(json []byte, path string) ([]byte, error) {
+ return SetBytes(json, path, dtype{})
+}
+
+type stringHeader struct {
+ data unsafe.Pointer
+ len int
+}
+
+type sliceHeader struct {
+ data unsafe.Pointer
+ len int
+ cap int
+}
+
+func set(jstr, path, raw string,
+ stringify, del, optimistic, inplace bool) ([]byte, error) {
+ if path == "" {
+ return []byte(jstr), &errorType{"path cannot be empty"}
+ }
+ if !del && optimistic && isOptimisticPath(path) {
+ res := gjson.Get(jstr, path)
+ if res.Exists() && res.Index > 0 {
+ sz := len(jstr) - len(res.Raw) + len(raw)
+ if stringify {
+ sz += 2
+ }
+ if inplace && sz <= len(jstr) {
+ if !stringify || !mustMarshalString(raw) {
+ jsonh := *(*stringHeader)(unsafe.Pointer(&jstr))
+ jsonbh := sliceHeader{
+ data: jsonh.data, len: jsonh.len, cap: jsonh.len}
+ jbytes := *(*[]byte)(unsafe.Pointer(&jsonbh))
+ if stringify {
+ jbytes[res.Index] = '"'
+ copy(jbytes[res.Index+1:], []byte(raw))
+ jbytes[res.Index+1+len(raw)] = '"'
+ copy(jbytes[res.Index+1+len(raw)+1:],
+ jbytes[res.Index+len(res.Raw):])
+ } else {
+ copy(jbytes[res.Index:], []byte(raw))
+ copy(jbytes[res.Index+len(raw):],
+ jbytes[res.Index+len(res.Raw):])
+ }
+ return jbytes[:sz], nil
+ }
+ return []byte(jstr), nil
+ }
+ buf := make([]byte, 0, sz)
+ buf = append(buf, jstr[:res.Index]...)
+ if stringify {
+ buf = appendStringify(buf, raw)
+ } else {
+ buf = append(buf, raw...)
+ }
+ buf = append(buf, jstr[res.Index+len(res.Raw):]...)
+ return buf, nil
+ }
+ }
+ var paths []pathResult
+ r, simple := parsePath(path)
+ if simple {
+ paths = append(paths, r)
+ for r.more {
+ r, simple = parsePath(r.path)
+ if !simple {
+ break
+ }
+ paths = append(paths, r)
+ }
+ }
+ if !simple {
+ if del {
+ return []byte(jstr),
+ &errorType{"cannot delete value from a complex path"}
+ }
+ return setComplexPath(jstr, path, raw, stringify)
+ }
+ njson, err := appendRawPaths(nil, jstr, paths, raw, stringify, del)
+ if err != nil {
+ return []byte(jstr), err
+ }
+ return njson, nil
+}
+
+func setComplexPath(jstr, path, raw string, stringify bool) ([]byte, error) {
+ res := gjson.Get(jstr, path)
+ if !res.Exists() || !(res.Index != 0 || len(res.Indexes) != 0) {
+ return []byte(jstr), errNoChange
+ }
+ if res.Index != 0 {
+ njson := []byte(jstr[:res.Index])
+ if stringify {
+ njson = appendStringify(njson, raw)
+ } else {
+ njson = append(njson, raw...)
+ }
+ njson = append(njson, jstr[res.Index+len(res.Raw):]...)
+ jstr = string(njson)
+ }
+ if len(res.Indexes) > 0 {
+ type val struct {
+ index int
+ res gjson.Result
+ }
+ vals := make([]val, 0, len(res.Indexes))
+ res.ForEach(func(_, vres gjson.Result) bool {
+ vals = append(vals, val{res: vres})
+ return true
+ })
+ if len(res.Indexes) != len(vals) {
+ return []byte(jstr), errNoChange
+ }
+ for i := 0; i < len(res.Indexes); i++ {
+ vals[i].index = res.Indexes[i]
+ }
+ sort.SliceStable(vals, func(i, j int) bool {
+ return vals[i].index > vals[j].index
+ })
+ for _, val := range vals {
+ vres := val.res
+ index := val.index
+ njson := []byte(jstr[:index])
+ if stringify {
+ njson = appendStringify(njson, raw)
+ } else {
+ njson = append(njson, raw...)
+ }
+ njson = append(njson, jstr[index+len(vres.Raw):]...)
+ jstr = string(njson)
+ }
+ }
+ return []byte(jstr), nil
+}
+
+// SetOptions sets a json value for the specified path with options.
+// A path is in dot syntax, such as "name.last" or "age".
+// This function expects that the json is well-formed, and does not validate.
+// Invalid json will not panic, but it may return back unexpected results.
+// An error is returned if the path is not valid.
+func SetOptions(json, path string, value interface{},
+ opts *Options) (string, error) {
+ if opts != nil {
+ if opts.ReplaceInPlace {
+ // it's not safe to replace bytes in-place for strings
+ // copy the Options and set options.ReplaceInPlace to false.
+ nopts := *opts
+ opts = &nopts
+ opts.ReplaceInPlace = false
+ }
+ }
+ jsonh := *(*stringHeader)(unsafe.Pointer(&json))
+ jsonbh := sliceHeader{data: jsonh.data, len: jsonh.len, cap: jsonh.len}
+ jsonb := *(*[]byte)(unsafe.Pointer(&jsonbh))
+ res, err := SetBytesOptions(jsonb, path, value, opts)
+ return string(res), err
+}
+
+// SetBytesOptions sets a json value for the specified path with options.
+// If working with bytes, this method preferred over
+// SetOptions(string(data), path, value)
+func SetBytesOptions(json []byte, path string, value interface{},
+ opts *Options) ([]byte, error) {
+ var optimistic, inplace bool
+ if opts != nil {
+ optimistic = opts.Optimistic
+ inplace = opts.ReplaceInPlace
+ }
+ jstr := *(*string)(unsafe.Pointer(&json))
+ var res []byte
+ var err error
+ switch v := value.(type) {
+ default:
+ b, merr := jsongo.Marshal(value)
+ if merr != nil {
+ return nil, merr
+ }
+ raw := *(*string)(unsafe.Pointer(&b))
+ res, err = set(jstr, path, raw, false, false, optimistic, inplace)
+ case dtype:
+ res, err = set(jstr, path, "", false, true, optimistic, inplace)
+ case string:
+ res, err = set(jstr, path, v, true, false, optimistic, inplace)
+ case []byte:
+ raw := *(*string)(unsafe.Pointer(&v))
+ res, err = set(jstr, path, raw, true, false, optimistic, inplace)
+ case bool:
+ if v {
+ res, err = set(jstr, path, "true", false, false, optimistic, inplace)
+ } else {
+ res, err = set(jstr, path, "false", false, false, optimistic, inplace)
+ }
+ case int8:
+ res, err = set(jstr, path, strconv.FormatInt(int64(v), 10),
+ false, false, optimistic, inplace)
+ case int16:
+ res, err = set(jstr, path, strconv.FormatInt(int64(v), 10),
+ false, false, optimistic, inplace)
+ case int32:
+ res, err = set(jstr, path, strconv.FormatInt(int64(v), 10),
+ false, false, optimistic, inplace)
+ case int64:
+ res, err = set(jstr, path, strconv.FormatInt(int64(v), 10),
+ false, false, optimistic, inplace)
+ case uint8:
+ res, err = set(jstr, path, strconv.FormatUint(uint64(v), 10),
+ false, false, optimistic, inplace)
+ case uint16:
+ res, err = set(jstr, path, strconv.FormatUint(uint64(v), 10),
+ false, false, optimistic, inplace)
+ case uint32:
+ res, err = set(jstr, path, strconv.FormatUint(uint64(v), 10),
+ false, false, optimistic, inplace)
+ case uint64:
+ res, err = set(jstr, path, strconv.FormatUint(uint64(v), 10),
+ false, false, optimistic, inplace)
+ case float32:
+ res, err = set(jstr, path, strconv.FormatFloat(float64(v), 'f', -1, 64),
+ false, false, optimistic, inplace)
+ case float64:
+ res, err = set(jstr, path, strconv.FormatFloat(float64(v), 'f', -1, 64),
+ false, false, optimistic, inplace)
+ }
+ if err == errNoChange {
+ return json, nil
+ }
+ return res, err
+}
+
+// SetRawBytesOptions sets a raw json value for the specified path with options.
+// If working with bytes, this method preferred over
+// SetRawOptions(string(data), path, value, opts)
+func SetRawBytesOptions(json []byte, path string, value []byte,
+ opts *Options) ([]byte, error) {
+ jstr := *(*string)(unsafe.Pointer(&json))
+ vstr := *(*string)(unsafe.Pointer(&value))
+ var optimistic, inplace bool
+ if opts != nil {
+ optimistic = opts.Optimistic
+ inplace = opts.ReplaceInPlace
+ }
+ res, err := set(jstr, path, vstr, false, false, optimistic, inplace)
+ if err == errNoChange {
+ return json, nil
+ }
+ return res, err
+}