summaryrefslogtreecommitdiffstats
path: root/vendor/github.com/graph-gophers/graphql-go/internal
diff options
context:
space:
mode:
authorWim <wim@42.be>2022-04-01 00:23:19 +0200
committerGitHub <noreply@github.com>2022-04-01 00:23:19 +0200
commitc6716e030c02f316b887c1d3ee4b443aa3ab6afd (patch)
tree470461fe2d29662e7a69834ed21fce30beed65ab /vendor/github.com/graph-gophers/graphql-go/internal
parent4ab72acec656dafd304f88359b509b1f27c06604 (diff)
downloadmatterbridge-msglm-c6716e030c02f316b887c1d3ee4b443aa3ab6afd.tar.gz
matterbridge-msglm-c6716e030c02f316b887c1d3ee4b443aa3ab6afd.tar.bz2
matterbridge-msglm-c6716e030c02f316b887c1d3ee4b443aa3ab6afd.zip
Update dependencies (#1784)
Diffstat (limited to 'vendor/github.com/graph-gophers/graphql-go/internal')
-rw-r--r--vendor/github.com/graph-gophers/graphql-go/internal/common/blockstring.go103
-rw-r--r--vendor/github.com/graph-gophers/graphql-go/internal/common/directive.go18
-rw-r--r--vendor/github.com/graph-gophers/graphql-go/internal/common/lexer.go229
-rw-r--r--vendor/github.com/graph-gophers/graphql-go/internal/common/literals.go58
-rw-r--r--vendor/github.com/graph-gophers/graphql-go/internal/common/types.go67
-rw-r--r--vendor/github.com/graph-gophers/graphql-go/internal/common/values.go37
-rw-r--r--vendor/github.com/graph-gophers/graphql-go/internal/exec/exec.go381
-rw-r--r--vendor/github.com/graph-gophers/graphql-go/internal/exec/packer/packer.go390
-rw-r--r--vendor/github.com/graph-gophers/graphql-go/internal/exec/resolvable/meta.go70
-rw-r--r--vendor/github.com/graph-gophers/graphql-go/internal/exec/resolvable/resolvable.go453
-rw-r--r--vendor/github.com/graph-gophers/graphql-go/internal/exec/selected/selected.go269
-rw-r--r--vendor/github.com/graph-gophers/graphql-go/internal/exec/subscribe.go179
-rw-r--r--vendor/github.com/graph-gophers/graphql-go/internal/query/query.go156
-rw-r--r--vendor/github.com/graph-gophers/graphql-go/internal/schema/meta.go203
-rw-r--r--vendor/github.com/graph-gophers/graphql-go/internal/schema/schema.go586
-rw-r--r--vendor/github.com/graph-gophers/graphql-go/internal/validation/suggestion.go71
-rw-r--r--vendor/github.com/graph-gophers/graphql-go/internal/validation/validation.go980
17 files changed, 4250 insertions, 0 deletions
diff --git a/vendor/github.com/graph-gophers/graphql-go/internal/common/blockstring.go b/vendor/github.com/graph-gophers/graphql-go/internal/common/blockstring.go
new file mode 100644
index 00000000..1f7fe813
--- /dev/null
+++ b/vendor/github.com/graph-gophers/graphql-go/internal/common/blockstring.go
@@ -0,0 +1,103 @@
+// MIT License
+//
+// Copyright (c) 2019 GraphQL Contributors
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in all
+// copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+// SOFTWARE.
+//
+// This implementation has been adapted from the graphql-js reference implementation
+// https://github.com/graphql/graphql-js/blob/5eb7c4ded7ceb83ac742149cbe0dae07a8af9a30/src/language/blockString.js
+// which is released under the MIT License above.
+
+package common
+
+import (
+ "strings"
+)
+
+// Produces the value of a block string from its parsed raw value, similar to
+// CoffeeScript's block string, Python's docstring trim or Ruby's strip_heredoc.
+//
+// This implements the GraphQL spec's BlockStringValue() static algorithm.
+func blockString(raw string) string {
+ lines := strings.Split(raw, "\n")
+
+ // Remove common indentation from all lines except the first (which has none)
+ ind := blockStringIndentation(lines)
+ if ind > 0 {
+ for i := 1; i < len(lines); i++ {
+ l := lines[i]
+ if len(l) < ind {
+ lines[i] = ""
+ continue
+ }
+ lines[i] = l[ind:]
+ }
+ }
+
+ // Remove leading and trailing blank lines
+ trimStart := 0
+ for i := 0; i < len(lines) && isBlank(lines[i]); i++ {
+ trimStart++
+ }
+ lines = lines[trimStart:]
+ trimEnd := 0
+ for i := len(lines) - 1; i > 0 && isBlank(lines[i]); i-- {
+ trimEnd++
+ }
+ lines = lines[:len(lines)-trimEnd]
+
+ return strings.Join(lines, "\n")
+}
+
+func blockStringIndentation(lines []string) int {
+ var commonIndent *int
+ for i := 1; i < len(lines); i++ {
+ l := lines[i]
+ indent := leadingWhitespace(l)
+ if indent == len(l) {
+ // don't consider blank/empty lines
+ continue
+ }
+ if indent == 0 {
+ return 0
+ }
+ if commonIndent == nil || indent < *commonIndent {
+ commonIndent = &indent
+ }
+ }
+ if commonIndent == nil {
+ return 0
+ }
+ return *commonIndent
+}
+
+func isBlank(s string) bool {
+ return len(s) == 0 || leadingWhitespace(s) == len(s)
+}
+
+func leadingWhitespace(s string) int {
+ i := 0
+ for _, r := range s {
+ if r != '\t' && r != ' ' {
+ break
+ }
+ i++
+ }
+ return i
+}
diff --git a/vendor/github.com/graph-gophers/graphql-go/internal/common/directive.go b/vendor/github.com/graph-gophers/graphql-go/internal/common/directive.go
new file mode 100644
index 00000000..f767e28f
--- /dev/null
+++ b/vendor/github.com/graph-gophers/graphql-go/internal/common/directive.go
@@ -0,0 +1,18 @@
+package common
+
+import "github.com/graph-gophers/graphql-go/types"
+
+func ParseDirectives(l *Lexer) types.DirectiveList {
+ var directives types.DirectiveList
+ for l.Peek() == '@' {
+ l.ConsumeToken('@')
+ d := &types.Directive{}
+ d.Name = l.ConsumeIdentWithLoc()
+ d.Name.Loc.Column--
+ if l.Peek() == '(' {
+ d.Arguments = ParseArgumentList(l)
+ }
+ directives = append(directives, d)
+ }
+ return directives
+}
diff --git a/vendor/github.com/graph-gophers/graphql-go/internal/common/lexer.go b/vendor/github.com/graph-gophers/graphql-go/internal/common/lexer.go
new file mode 100644
index 00000000..ff45bcad
--- /dev/null
+++ b/vendor/github.com/graph-gophers/graphql-go/internal/common/lexer.go
@@ -0,0 +1,229 @@
+package common
+
+import (
+ "bytes"
+ "fmt"
+ "strconv"
+ "strings"
+ "text/scanner"
+
+ "github.com/graph-gophers/graphql-go/errors"
+ "github.com/graph-gophers/graphql-go/types"
+)
+
+type syntaxError string
+
+type Lexer struct {
+ sc *scanner.Scanner
+ next rune
+ comment bytes.Buffer
+ useStringDescriptions bool
+}
+
+type Ident struct {
+ Name string
+ Loc errors.Location
+}
+
+func NewLexer(s string, useStringDescriptions bool) *Lexer {
+ sc := &scanner.Scanner{
+ Mode: scanner.ScanIdents | scanner.ScanInts | scanner.ScanFloats | scanner.ScanStrings,
+ }
+ sc.Init(strings.NewReader(s))
+
+ l := Lexer{sc: sc, useStringDescriptions: useStringDescriptions}
+ l.sc.Error = l.CatchScannerError
+
+ return &l
+}
+
+func (l *Lexer) CatchSyntaxError(f func()) (errRes *errors.QueryError) {
+ defer func() {
+ if err := recover(); err != nil {
+ if err, ok := err.(syntaxError); ok {
+ errRes = errors.Errorf("syntax error: %s", err)
+ errRes.Locations = []errors.Location{l.Location()}
+ return
+ }
+ panic(err)
+ }
+ }()
+
+ f()
+ return
+}
+
+func (l *Lexer) Peek() rune {
+ return l.next
+}
+
+// ConsumeWhitespace consumes whitespace and tokens equivalent to whitespace (e.g. commas and comments).
+//
+// Consumed comment characters will build the description for the next type or field encountered.
+// The description is available from `DescComment()`, and will be reset every time `ConsumeWhitespace()` is
+// executed unless l.useStringDescriptions is set.
+func (l *Lexer) ConsumeWhitespace() {
+ l.comment.Reset()
+ for {
+ l.next = l.sc.Scan()
+
+ if l.next == ',' {
+ // Similar to white space and line terminators, commas (',') are used to improve the
+ // legibility of source text and separate lexical tokens but are otherwise syntactically and
+ // semantically insignificant within GraphQL documents.
+ //
+ // http://facebook.github.io/graphql/draft/#sec-Insignificant-Commas
+ continue
+ }
+
+ if l.next == '#' {
+ // GraphQL source documents may contain single-line comments, starting with the '#' marker.
+ //
+ // A comment can contain any Unicode code point except `LineTerminator` so a comment always
+ // consists of all code points starting with the '#' character up to but not including the
+ // line terminator.
+ l.consumeComment()
+ continue
+ }
+
+ break
+ }
+}
+
+// consumeDescription optionally consumes a description based on the June 2018 graphql spec if any are present.
+//
+// Single quote strings are also single line. Triple quote strings can be multi-line. Triple quote strings
+// whitespace trimmed on both ends.
+// If a description is found, consume any following comments as well
+//
+// http://facebook.github.io/graphql/June2018/#sec-Descriptions
+func (l *Lexer) consumeDescription() string {
+ // If the next token is not a string, we don't consume it
+ if l.next != scanner.String {
+ return ""
+ }
+ // Triple quote string is an empty "string" followed by an open quote due to the way the parser treats strings as one token
+ var desc string
+ if l.sc.Peek() == '"' {
+ desc = l.consumeTripleQuoteComment()
+ } else {
+ desc = l.consumeStringComment()
+ }
+ l.ConsumeWhitespace()
+ return desc
+}
+
+func (l *Lexer) ConsumeIdent() string {
+ name := l.sc.TokenText()
+ l.ConsumeToken(scanner.Ident)
+ return name
+}
+
+func (l *Lexer) ConsumeIdentWithLoc() types.Ident {
+ loc := l.Location()
+ name := l.sc.TokenText()
+ l.ConsumeToken(scanner.Ident)
+ return types.Ident{Name: name, Loc: loc}
+}
+
+func (l *Lexer) ConsumeKeyword(keyword string) {
+ if l.next != scanner.Ident || l.sc.TokenText() != keyword {
+ l.SyntaxError(fmt.Sprintf("unexpected %q, expecting %q", l.sc.TokenText(), keyword))
+ }
+ l.ConsumeWhitespace()
+}
+
+func (l *Lexer) ConsumeLiteral() *types.PrimitiveValue {
+ lit := &types.PrimitiveValue{Type: l.next, Text: l.sc.TokenText()}
+ l.ConsumeWhitespace()
+ return lit
+}
+
+func (l *Lexer) ConsumeToken(expected rune) {
+ if l.next != expected {
+ l.SyntaxError(fmt.Sprintf("unexpected %q, expecting %s", l.sc.TokenText(), scanner.TokenString(expected)))
+ }
+ l.ConsumeWhitespace()
+}
+
+func (l *Lexer) DescComment() string {
+ comment := l.comment.String()
+ desc := l.consumeDescription()
+ if l.useStringDescriptions {
+ return desc
+ }
+ return comment
+}
+
+func (l *Lexer) SyntaxError(message string) {
+ panic(syntaxError(message))
+}
+
+func (l *Lexer) Location() errors.Location {
+ return errors.Location{
+ Line: l.sc.Line,
+ Column: l.sc.Column,
+ }
+}
+
+func (l *Lexer) consumeTripleQuoteComment() string {
+ l.next = l.sc.Next()
+ if l.next != '"' {
+ panic("consumeTripleQuoteComment used in wrong context: no third quote?")
+ }
+
+ var buf bytes.Buffer
+ var numQuotes int
+ for {
+ l.next = l.sc.Next()
+ if l.next == '"' {
+ numQuotes++
+ } else {
+ numQuotes = 0
+ }
+ buf.WriteRune(l.next)
+ if numQuotes == 3 || l.next == scanner.EOF {
+ break
+ }
+ }
+ val := buf.String()
+ val = val[:len(val)-numQuotes]
+ return blockString(val)
+}
+
+func (l *Lexer) consumeStringComment() string {
+ val, err := strconv.Unquote(l.sc.TokenText())
+ if err != nil {
+ panic(err)
+ }
+ return val
+}
+
+// consumeComment consumes all characters from `#` to the first encountered line terminator.
+// The characters are appended to `l.comment`.
+func (l *Lexer) consumeComment() {
+ if l.next != '#' {
+ panic("consumeComment used in wrong context")
+ }
+
+ // TODO: count and trim whitespace so we can dedent any following lines.
+ if l.sc.Peek() == ' ' {
+ l.sc.Next()
+ }
+
+ if l.comment.Len() > 0 {
+ l.comment.WriteRune('\n')
+ }
+
+ for {
+ next := l.sc.Next()
+ if next == '\r' || next == '\n' || next == scanner.EOF {
+ break
+ }
+ l.comment.WriteRune(next)
+ }
+}
+
+func (l *Lexer) CatchScannerError(s *scanner.Scanner, msg string) {
+ l.SyntaxError(msg)
+}
diff --git a/vendor/github.com/graph-gophers/graphql-go/internal/common/literals.go b/vendor/github.com/graph-gophers/graphql-go/internal/common/literals.go
new file mode 100644
index 00000000..a6af3c43
--- /dev/null
+++ b/vendor/github.com/graph-gophers/graphql-go/internal/common/literals.go
@@ -0,0 +1,58 @@
+package common
+
+import (
+ "text/scanner"
+
+ "github.com/graph-gophers/graphql-go/types"
+)
+
+func ParseLiteral(l *Lexer, constOnly bool) types.Value {
+ loc := l.Location()
+ switch l.Peek() {
+ case '$':
+ if constOnly {
+ l.SyntaxError("variable not allowed")
+ panic("unreachable")
+ }
+ l.ConsumeToken('$')
+ return &types.Variable{Name: l.ConsumeIdent(), Loc: loc}
+
+ case scanner.Int, scanner.Float, scanner.String, scanner.Ident:
+ lit := l.ConsumeLiteral()
+ if lit.Type == scanner.Ident && lit.Text == "null" {
+ return &types.NullValue{Loc: loc}
+ }
+ lit.Loc = loc
+ return lit
+ case '-':
+ l.ConsumeToken('-')
+ lit := l.ConsumeLiteral()
+ lit.Text = "-" + lit.Text
+ lit.Loc = loc
+ return lit
+ case '[':
+ l.ConsumeToken('[')
+ var list []types.Value
+ for l.Peek() != ']' {
+ list = append(list, ParseLiteral(l, constOnly))
+ }
+ l.ConsumeToken(']')
+ return &types.ListValue{Values: list, Loc: loc}
+
+ case '{':
+ l.ConsumeToken('{')
+ var fields []*types.ObjectField
+ for l.Peek() != '}' {
+ name := l.ConsumeIdentWithLoc()
+ l.ConsumeToken(':')
+ value := ParseLiteral(l, constOnly)
+ fields = append(fields, &types.ObjectField{Name: name, Value: value})
+ }
+ l.ConsumeToken('}')
+ return &types.ObjectValue{Fields: fields, Loc: loc}
+
+ default:
+ l.SyntaxError("invalid value")
+ panic("unreachable")
+ }
+}
diff --git a/vendor/github.com/graph-gophers/graphql-go/internal/common/types.go b/vendor/github.com/graph-gophers/graphql-go/internal/common/types.go
new file mode 100644
index 00000000..4a30f46e
--- /dev/null
+++ b/vendor/github.com/graph-gophers/graphql-go/internal/common/types.go
@@ -0,0 +1,67 @@
+package common
+
+import (
+ "github.com/graph-gophers/graphql-go/errors"
+ "github.com/graph-gophers/graphql-go/types"
+)
+
+func ParseType(l *Lexer) types.Type {
+ t := parseNullType(l)
+ if l.Peek() == '!' {
+ l.ConsumeToken('!')
+ return &types.NonNull{OfType: t}
+ }
+ return t
+}
+
+func parseNullType(l *Lexer) types.Type {
+ if l.Peek() == '[' {
+ l.ConsumeToken('[')
+ ofType := ParseType(l)
+ l.ConsumeToken(']')
+ return &types.List{OfType: ofType}
+ }
+
+ return &types.TypeName{Ident: l.ConsumeIdentWithLoc()}
+}
+
+type Resolver func(name string) types.Type
+
+// ResolveType attempts to resolve a type's name against a resolving function.
+// This function is used when one needs to check if a TypeName exists in the resolver (typically a Schema).
+//
+// In the example below, ResolveType would be used to check if the resolving function
+// returns a valid type for Dimension:
+//
+// type Profile {
+// picture(dimensions: Dimension): Url
+// }
+//
+// ResolveType recursively unwraps List and NonNull types until a NamedType is reached.
+func ResolveType(t types.Type, resolver Resolver) (types.Type, *errors.QueryError) {
+ switch t := t.(type) {
+ case *types.List:
+ ofType, err := ResolveType(t.OfType, resolver)
+ if err != nil {
+ return nil, err
+ }
+ return &types.List{OfType: ofType}, nil
+ case *types.NonNull:
+ ofType, err := ResolveType(t.OfType, resolver)
+ if err != nil {
+ return nil, err
+ }
+ return &types.NonNull{OfType: ofType}, nil
+ case *types.TypeName:
+ refT := resolver(t.Name)
+ if refT == nil {
+ err := errors.Errorf("Unknown type %q.", t.Name)
+ err.Rule = "KnownTypeNames"
+ err.Locations = []errors.Location{t.Loc}
+ return nil, err
+ }
+ return refT, nil
+ default:
+ return t, nil
+ }
+}
diff --git a/vendor/github.com/graph-gophers/graphql-go/internal/common/values.go b/vendor/github.com/graph-gophers/graphql-go/internal/common/values.go
new file mode 100644
index 00000000..2d6e0b54
--- /dev/null
+++ b/vendor/github.com/graph-gophers/graphql-go/internal/common/values.go
@@ -0,0 +1,37 @@
+package common
+
+import (
+ "github.com/graph-gophers/graphql-go/types"
+)
+
+func ParseInputValue(l *Lexer) *types.InputValueDefinition {
+ p := &types.InputValueDefinition{}
+ p.Loc = l.Location()
+ p.Desc = l.DescComment()
+ p.Name = l.ConsumeIdentWithLoc()
+ l.ConsumeToken(':')
+ p.TypeLoc = l.Location()
+ p.Type = ParseType(l)
+ if l.Peek() == '=' {
+ l.ConsumeToken('=')
+ p.Default = ParseLiteral(l, true)
+ }
+ p.Directives = ParseDirectives(l)
+ return p
+}
+
+func ParseArgumentList(l *Lexer) types.ArgumentList {
+ var args types.ArgumentList
+ l.ConsumeToken('(')
+ for l.Peek() != ')' {
+ name := l.ConsumeIdentWithLoc()
+ l.ConsumeToken(':')
+ value := ParseLiteral(l, false)
+ args = append(args, &types.Argument{
+ Name: name,
+ Value: value,
+ })
+ }
+ l.ConsumeToken(')')
+ return args
+}
diff --git a/vendor/github.com/graph-gophers/graphql-go/internal/exec/exec.go b/vendor/github.com/graph-gophers/graphql-go/internal/exec/exec.go
new file mode 100644
index 00000000..6b478487
--- /dev/null
+++ b/vendor/github.com/graph-gophers/graphql-go/internal/exec/exec.go
@@ -0,0 +1,381 @@
+package exec
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "fmt"
+ "reflect"
+ "sync"
+ "time"
+
+ "github.com/graph-gophers/graphql-go/errors"
+ "github.com/graph-gophers/graphql-go/internal/exec/resolvable"
+ "github.com/graph-gophers/graphql-go/internal/exec/selected"
+ "github.com/graph-gophers/graphql-go/internal/query"
+ "github.com/graph-gophers/graphql-go/log"
+ "github.com/graph-gophers/graphql-go/trace"
+ "github.com/graph-gophers/graphql-go/types"
+)
+
+type Request struct {
+ selected.Request
+ Limiter chan struct{}
+ Tracer trace.Tracer
+ Logger log.Logger
+ PanicHandler errors.PanicHandler
+ SubscribeResolverTimeout time.Duration
+}
+
+func (r *Request) handlePanic(ctx context.Context) {
+ if value := recover(); value != nil {
+ r.Logger.LogPanic(ctx, value)
+ r.AddError(r.PanicHandler.MakePanicError(ctx, value))
+ }
+}
+
+type extensionser interface {
+ Extensions() map[string]interface{}
+}
+
+func (r *Request) Execute(ctx context.Context, s *resolvable.Schema, op *types.OperationDefinition) ([]byte, []*errors.QueryError) {
+ var out bytes.Buffer
+ func() {
+ defer r.handlePanic(ctx)
+ sels := selected.ApplyOperation(&r.Request, s, op)
+ r.execSelections(ctx, sels, nil, s, s.Resolver, &out, op.Type == query.Mutation)
+ }()
+
+ if err := ctx.Err(); err != nil {
+ return nil, []*errors.QueryError{errors.Errorf("%s", err)}
+ }
+
+ return out.Bytes(), r.Errs
+}
+
+type fieldToExec struct {
+ field *selected.SchemaField
+ sels []selected.Selection
+ resolver reflect.Value
+ out *bytes.Buffer
+}
+
+func resolvedToNull(b *bytes.Buffer) bool {
+ return bytes.Equal(b.Bytes(), []byte("null"))
+}
+
+func (r *Request) execSelections(ctx context.Context, sels []selected.Selection, path *pathSegment, s *resolvable.Schema, resolver reflect.Value, out *bytes.Buffer, serially bool) {
+ async := !serially && selected.HasAsyncSel(sels)
+
+ var fields []*fieldToExec
+ collectFieldsToResolve(sels, s, resolver, &fields, make(map[string]*fieldToExec))
+
+ if async {
+ var wg sync.WaitGroup
+ wg.Add(len(fields))
+ for _, f := range fields {
+ go func(f *fieldToExec) {
+ defer wg.Done()
+ defer r.handlePanic(ctx)
+ f.out = new(bytes.Buffer)
+ execFieldSelection(ctx, r, s, f, &pathSegment{path, f.field.Alias}, true)
+ }(f)
+ }
+ wg.Wait()
+ } else {
+ for _, f := range fields {
+ f.out = new(bytes.Buffer)
+ execFieldSelection(ctx, r, s, f, &pathSegment{path, f.field.Alias}, true)
+ }
+ }
+
+ out.WriteByte('{')
+ for i, f := range fields {
+ // If a non-nullable child resolved to null, an error was added to the
+ // "errors" list in the response, so this field resolves to null.
+ // If this field is non-nullable, the error is propagated to its parent.
+ if _, ok := f.field.Type.(*types.NonNull); ok && resolvedToNull(f.out) {
+ out.Reset()
+ out.Write([]byte("null"))
+ return
+ }
+
+ if i > 0 {
+ out.WriteByte(',')
+ }
+ out.WriteByte('"')
+ out.WriteString(f.field.Alias)
+ out.WriteByte('"')
+ out.WriteByte(':')
+ out.Write(f.out.Bytes())
+ }
+ out.WriteByte('}')
+}
+
+func collectFieldsToResolve(sels []selected.Selection, s *resolvable.Schema, resolver reflect.Value, fields *[]*fieldToExec, fieldByAlias map[string]*fieldToExec) {
+ for _, sel := range sels {
+ switch sel := sel.(type) {
+ case *selected.SchemaField:
+ field, ok := fieldByAlias[sel.Alias]
+ if !ok { // validation already checked for conflict (TODO)
+ field = &fieldToExec{field: sel, resolver: resolver}
+ fieldByAlias[sel.Alias] = field
+ *fields = append(*fields, field)
+ }
+ field.sels = append(field.sels, sel.Sels...)
+
+ case *selected.TypenameField:
+ _, ok := fieldByAlias[sel.Alias]
+ if !ok {
+ res := reflect.ValueOf(typeOf(sel, resolver))
+ f := s.FieldTypename
+ f.TypeName = res.String()
+
+ sf := &selected.SchemaField{
+ Field: f,
+ Alias: sel.Alias,
+ FixedResult: res,
+ }
+
+ field := &fieldToExec{field: sf, resolver: resolver}
+ *fields = append(*fields, field)
+ fieldByAlias[sel.Alias] = field
+ }
+
+ case *selected.TypeAssertion:
+ out := resolver.Method(sel.MethodIndex).Call(nil)
+ if !out[1].Bool() {
+ continue
+ }
+ collectFieldsToResolve(sel.Sels, s, out[0], fields, fieldByAlias)
+
+ default:
+ panic("unreachable")
+ }
+ }
+}
+
+func typeOf(tf *selected.TypenameField, resolver reflect.Value) string {
+ if len(tf.TypeAssertions) == 0 {
+ return tf.Name
+ }
+ for name, a := range tf.TypeAssertions {
+ out := resolver.Method(a.MethodIndex).Call(nil)
+ if out[1].Bool() {
+ return name
+ }
+ }
+ return ""
+}
+
+func execFieldSelection(ctx context.Context, r *Request, s *resolvable.Schema, f *fieldToExec, path *pathSegment, applyLimiter bool) {
+ if applyLimiter {
+ r.Limiter <- struct{}{}
+ }
+
+ var result reflect.Value
+ var err *errors.QueryError
+
+ traceCtx, finish := r.Tracer.TraceField(ctx, f.field.TraceLabel, f.field.TypeName, f.field.Name, !f.field.Async, f.field.Args)
+ defer func() {
+ finish(err)
+ }()
+
+ err = func() (err *errors.QueryError) {
+ defer func() {
+ if panicValue := recover(); panicValue != nil {
+ r.Logger.LogPanic(ctx, panicValue)
+ err = r.PanicHandler.MakePanicError(ctx, panicValue)
+ err.Path = path.toSlice()
+ }
+ }()
+
+ if f.field.FixedResult.IsValid() {
+ result = f.field.FixedResult
+ return nil
+ }
+
+ if err := traceCtx.Err(); err != nil {
+ return errors.Errorf("%s", err) // don't execute any more resolvers if context got cancelled
+ }
+
+ res := f.resolver
+ if f.field.UseMethodResolver() {
+ var in []reflect.Value
+ if f.field.HasContext {
+ in = append(in, reflect.ValueOf(traceCtx))
+ }
+ if f.field.ArgsPacker != nil {
+ in = append(in, f.field.PackedArgs)
+ }
+ callOut := res.Method(f.field.MethodIndex).Call(in)
+ result = callOut[0]
+ if f.field.HasError && !callOut[1].IsNil() {
+ resolverErr := callOut[1].Interface().(error)
+ err := errors.Errorf("%s", resolverErr)
+ err.Path = path.toSlice()
+ err.ResolverError = resolverErr
+ if ex, ok := callOut[1].Interface().(extensionser); ok {
+ err.Extensions = ex.Extensions()
+ }
+ return err
+ }
+ } else {
+ // TODO extract out unwrapping ptr logic to a common place
+ if res.Kind() == reflect.Ptr {
+ res = res.Elem()
+ }
+ result = res.FieldByIndex(f.field.FieldIndex)
+ }
+ return nil
+ }()
+
+ if applyLimiter {
+ <-r.Limiter
+ }
+
+ if err != nil {
+ // If an error occurred while resolving a field, it should be treated as though the field
+ // returned null, and an error must be added to the "errors" list in the response.
+ r.AddError(err)
+ f.out.WriteString("null")
+ return
+ }
+
+ r.execSelectionSet(traceCtx, f.sels, f.field.Type, path, s, result, f.out)
+}
+
+func (r *Request) execSelectionSet(ctx context.Context, sels []selected.Selection, typ types.Type, path *pathSegment, s *resolvable.Schema, resolver reflect.Value, out *bytes.Buffer) {
+ t, nonNull := unwrapNonNull(typ)
+
+ // a reflect.Value of a nil interface will show up as an Invalid value
+ if resolver.Kind() == reflect.Invalid || ((resolver.Kind() == reflect.Ptr || resolver.Kind() == reflect.Interface) && resolver.IsNil()) {
+ // If a field of a non-null type resolves to null (either because the
+ // function to resolve the field returned null or because an error occurred),
+ // add an error to the "errors" list in the response.
+ if nonNull {
+ err := errors.Errorf("graphql: got nil for non-null %q", t)
+ err.Path = path.toSlice()
+ r.AddError(err)
+ }
+ out.WriteString("null")
+ return
+ }
+
+ switch t.(type) {
+ case *types.ObjectTypeDefinition, *types.InterfaceTypeDefinition, *types.Union:
+ r.execSelections(ctx, sels, path, s, resolver, out, false)
+ return
+ }
+
+ // Any pointers or interfaces at this point should be non-nil, so we can get the actual value of them
+ // for serialization
+ if resolver.Kind() == reflect.Ptr || resolver.Kind() == reflect.Interface {
+ resolver = resolver.Elem()
+ }
+
+ switch t := t.(type) {
+ case *types.List:
+ r.execList(ctx, sels, t, path, s, resolver, out)
+
+ case *types.ScalarTypeDefinition:
+ v := resolver.Interface()
+ data, err := json.Marshal(v)
+ if err != nil {
+ panic(errors.Errorf("could not marshal %v: %s", v, err))
+ }
+ out.Write(data)
+
+ case *types.EnumTypeDefinition:
+ var stringer fmt.Stringer = resolver
+ if s, ok := resolver.Interface().(fmt.Stringer); ok {
+ stringer = s
+ }
+ name := stringer.String()
+ var valid bool
+ for _, v := range t.EnumValuesDefinition {
+ if v.EnumValue == name {
+ valid = true
+ break
+ }
+ }
+ if !valid {
+ err := errors.Errorf("Invalid value %s.\nExpected type %s, found %s.", name, t.Name, name)
+ err.Path = path.toSlice()
+ r.AddError(err)
+ out.WriteString("null")
+ return
+ }
+ out.WriteByte('"')
+ out.WriteString(name)
+ out.WriteByte('"')
+
+ default:
+ panic("unreachable")
+ }
+}
+
+func (r *Request) execList(ctx context.Context, sels []selected.Selection, typ *types.List, path *pathSegment, s *resolvable.Schema, resolver reflect.Value, out *bytes.Buffer) {
+ l := resolver.Len()
+ entryouts := make([]bytes.Buffer, l)
+
+ if selected.HasAsyncSel(sels) {
+ // Limit the number of concurrent goroutines spawned as it can lead to large
+ // memory spikes for large lists.
+ concurrency := cap(r.Limiter)
+ sem := make(chan struct{}, concurrency)
+ for i := 0; i < l; i++ {
+ sem <- struct{}{}
+ go func(i int) {
+ defer func() { <-sem }()
+ defer r.handlePanic(ctx)
+ r.execSelectionSet(ctx, sels, typ.OfType, &pathSegment{path, i}, s, resolver.Index(i), &entryouts[i])
+ }(i)
+ }
+ for i := 0; i < concurrency; i++ {
+ sem <- struct{}{}
+ }
+ } else {
+ for i := 0; i < l; i++ {
+ r.execSelectionSet(ctx, sels, typ.OfType, &pathSegment{path, i}, s, resolver.Index(i), &entryouts[i])
+ }
+ }
+
+ _, listOfNonNull := typ.OfType.(*types.NonNull)
+
+ out.WriteByte('[')
+ for i, entryout := range entryouts {
+ // If the list wraps a non-null type and one of the list elements
+ // resolves to null, then the entire list resolves to null.
+ if listOfNonNull && resolvedToNull(&entryout) {
+ out.Reset()
+ out.WriteString("null")
+ return
+ }
+
+ if i > 0 {
+ out.WriteByte(',')
+ }
+ out.Write(entryout.Bytes())
+ }
+ out.WriteByte(']')
+}
+
+func unwrapNonNull(t types.Type) (types.Type, bool) {
+ if nn, ok := t.(*types.NonNull); ok {
+ return nn.OfType, true
+ }
+ return t, false
+}
+
+type pathSegment struct {
+ parent *pathSegment
+ value interface{}
+}
+
+func (p *pathSegment) toSlice() []interface{} {
+ if p == nil {
+ return nil
+ }
+ return append(p.parent.toSlice(), p.value)
+}
diff --git a/vendor/github.com/graph-gophers/graphql-go/internal/exec/packer/packer.go b/vendor/github.com/graph-gophers/graphql-go/internal/exec/packer/packer.go
new file mode 100644
index 00000000..c0bb7dc9
--- /dev/null
+++ b/vendor/github.com/graph-gophers/graphql-go/internal/exec/packer/packer.go
@@ -0,0 +1,390 @@
+package packer
+
+import (
+ "fmt"
+ "math"
+ "reflect"
+ "strings"
+
+ "github.com/graph-gophers/graphql-go/decode"
+ "github.com/graph-gophers/graphql-go/errors"
+ "github.com/graph-gophers/graphql-go/types"
+)
+
+type packer interface {
+ Pack(value interface{}) (reflect.Value, error)
+}
+
+type Builder struct {
+ packerMap map[typePair]*packerMapEntry
+ structPackers []*StructPacker
+}
+
+type typePair struct {
+ graphQLType types.Type
+ resolverType reflect.Type
+}
+
+type packerMapEntry struct {
+ packer packer
+ targets []*packer
+}
+
+func NewBuilder() *Builder {
+ return &Builder{
+ packerMap: make(map[typePair]*packerMapEntry),
+ }
+}
+
+func (b *Builder) Finish() error {
+ for _, entry := range b.packerMap {
+ for _, target := range entry.targets {
+ *target = entry.packer
+ }
+ }
+
+ for _, p := range b.structPackers {
+ p.defaultStruct = reflect.New(p.structType).Elem()
+ for _, f := range p.fields {
+ if defaultVal := f.field.Default; defaultVal != nil {
+ v, err := f.fieldPacker.Pack(defaultVal.Deserialize(nil))
+ if err != nil {
+ return err
+ }
+ p.defaultStruct.FieldByIndex(f.fieldIndex).Set(v)
+ }
+ }
+ }
+
+ return nil
+}
+
+func (b *Builder) assignPacker(target *packer, schemaType types.Type, reflectType reflect.Type) error {
+ k := typePair{schemaType, reflectType}
+ ref, ok := b.packerMap[k]
+ if !ok {
+ ref = &packerMapEntry{}
+ b.packerMap[k] = ref
+ var err error
+ ref.packer, err = b.makePacker(schemaType, reflectType)
+ if err != nil {
+ return err
+ }
+ }
+ ref.targets = append(ref.targets, target)
+ return nil
+}
+
+func (b *Builder) makePacker(schemaType types.Type, reflectType reflect.Type) (packer, error) {
+ t, nonNull := unwrapNonNull(schemaType)
+ if !nonNull {
+ if reflectType.Kind() == reflect.Ptr {
+ elemType := reflectType.Elem()
+ addPtr := true
+ if _, ok := t.(*types.InputObject); ok {
+ elemType = reflectType // keep pointer for input objects
+ addPtr = false
+ }
+ elem, err := b.makeNonNullPacker(t, elemType)
+ if err != nil {
+ return nil, err
+ }
+ return &nullPacker{
+ elemPacker: elem,
+ valueType: reflectType,
+ addPtr: addPtr,
+ }, nil
+ } else if isNullable(reflectType) {
+ elemType := reflectType
+ addPtr := false
+ elem, err := b.makeNonNullPacker(t, elemType)
+ if err != nil {
+ return nil, err
+ }
+ return &nullPacker{
+ elemPacker: elem,
+ valueType: reflectType,
+ addPtr: addPtr,
+ }, nil
+ } else {
+ return nil, fmt.Errorf("%s is not a pointer or a nullable type", reflectType)
+ }
+ }
+
+ return b.makeNonNullPacker(t, reflectType)
+}
+
+func (b *Builder) makeNonNullPacker(schemaType types.Type, reflectType reflect.Type) (packer, error) {
+ if u, ok := reflect.New(reflectType).Interface().(decode.Unmarshaler); ok {
+ if !u.ImplementsGraphQLType(schemaType.String()) {
+ return nil, fmt.Errorf("can not unmarshal %s into %s", schemaType, reflectType)
+ }
+ return &unmarshalerPacker{
+ ValueType: reflectType,
+ }, nil
+ }
+
+ switch t := schemaType.(type) {
+ case *types.ScalarTypeDefinition:
+ return &ValuePacker{
+ ValueType: reflectType,
+ }, nil
+
+ case *types.EnumTypeDefinition:
+ if reflectType.Kind() != reflect.String {
+ return nil, fmt.Errorf("wrong type, expected %s", reflect.String)
+ }
+ return &ValuePacker{
+ ValueType: reflectType,
+ }, nil
+
+ case *types.InputObject:
+ e, err := b.MakeStructPacker(t.Values, reflectType)
+ if err != nil {
+ return nil, err
+ }
+ return e, nil
+
+ case *types.List:
+ if reflectType.Kind() != reflect.Slice {
+ return nil, fmt.Errorf("expected slice, got %s", reflectType)
+ }
+ p := &listPacker{
+ sliceType: reflectType,
+ }
+ if err := b.assignPacker(&p.elem, t.OfType, reflectType.Elem()); err != nil {
+ return nil, err
+ }
+ return p, nil
+
+ case *types.ObjectTypeDefinition, *types.InterfaceTypeDefinition, *types.Union:
+ return nil, fmt.Errorf("type of kind %s can not be used as input", t.Kind())
+
+ default:
+ panic("unreachable")
+ }
+}
+
+func (b *Builder) MakeStructPacker(values []*types.InputValueDefinition, typ reflect.Type) (*StructPacker, error) {
+ structType := typ
+ usePtr := false
+ if typ.Kind() == reflect.Ptr {
+ structType = typ.Elem()
+ usePtr = true
+ }
+ if structType.Kind() != reflect.Struct {
+ return nil, fmt.Errorf("expected struct or pointer to struct, got %s (hint: missing `args struct { ... }` wrapper for field arguments?)", typ)
+ }
+
+ var fields []*structPackerField
+ for _, v := range values {
+ fe := &structPackerField{field: v}
+ fx := func(n string) bool {
+ return strings.EqualFold(stripUnderscore(n), stripUnderscore(v.Name.Name))
+ }
+
+ sf, ok := structType.FieldByNameFunc(fx)
+ if !ok {
+ return nil, fmt.Errorf("%s does not define field %q (hint: missing `args struct { ... }` wrapper for field arguments, or missing field on input struct)", typ, v.Name.Name)
+ }
+ if sf.PkgPath != "" {
+ return nil, fmt.Errorf("field %q must be exported", sf.Name)
+ }
+ fe.fieldIndex = sf.Index
+
+ ft := v.Type
+ if v.Default != nil {
+ ft, _ = unwrapNonNull(ft)
+ ft = &types.NonNull{OfType: ft}
+ }
+
+ if err := b.assignPacker(&fe.fieldPacker, ft, sf.Type); err != nil {
+ return nil, fmt.Errorf("field %q: %s", sf.Name, err)
+ }
+
+ fields = append(fields, fe)
+ }
+
+ p := &StructPacker{
+ structType: structType,
+ usePtr: usePtr,
+ fields: fields,
+ }
+ b.structPackers = append(b.structPackers, p)
+ return p, nil
+}
+
+type StructPacker struct {
+ structType reflect.Type
+ usePtr bool
+ defaultStruct reflect.Value
+ fields []*structPackerField
+}
+
+type structPackerField struct {
+ field *types.InputValueDefinition
+ fieldIndex []int
+ fieldPacker packer
+}
+
+func (p *StructPacker) Pack(value interface{}) (reflect.Value, error) {
+ if value == nil {
+ return reflect.Value{}, errors.Errorf("got null for non-null")
+ }
+
+ values := value.(map[string]interface{})
+ v := reflect.New(p.structType)
+ v.Elem().Set(p.defaultStruct)
+ for _, f := range p.fields {
+ if value, ok := values[f.field.Name.Name]; ok {
+ packed, err := f.fieldPacker.Pack(value)
+ if err != nil {
+ return reflect.Value{}, err
+ }
+ v.Elem().FieldByIndex(f.fieldIndex).Set(packed)
+ }
+ }
+ if !p.usePtr {
+ return v.Elem(), nil
+ }
+ return v, nil
+}
+
+type listPacker struct {
+ sliceType reflect.Type
+ elem packer
+}
+
+func (e *listPacker) Pack(value interface{}) (reflect.Value, error) {
+ list, ok := value.([]interface{})
+ if !ok {
+ list = []interface{}{value}
+ }
+
+ v := reflect.MakeSlice(e.sliceType, len(list), len(list))
+ for i := range list {
+ packed, err := e.elem.Pack(list[i])
+ if err != nil {
+ return reflect.Value{}, err
+ }
+ v.Index(i).Set(packed)
+ }
+ return v, nil
+}
+
+type nullPacker struct {
+ elemPacker packer
+ valueType reflect.Type
+ addPtr bool
+}
+
+func (p *nullPacker) Pack(value interface{}) (reflect.Value, error) {
+ if value == nil && !isNullable(p.valueType) {
+ return reflect.Zero(p.valueType), nil
+ }
+
+ v, err := p.elemPacker.Pack(value)
+ if err != nil {
+ return reflect.Value{}, err
+ }
+
+ if p.addPtr {
+ ptr := reflect.New(p.valueType.Elem())
+ ptr.Elem().Set(v)
+ return ptr, nil
+ }
+
+ return v, nil
+}
+
+type ValuePacker struct {
+ ValueType reflect.Type
+}
+
+func (p *ValuePacker) Pack(value interface{}) (reflect.Value, error) {
+ if value == nil {
+ return reflect.Value{}, errors.Errorf("got null for non-null")
+ }
+
+ coerced, err := unmarshalInput(p.ValueType, value)
+ if err != nil {
+ return reflect.Value{}, fmt.Errorf("could not unmarshal %#v (%T) into %s: %s", value, value, p.ValueType, err)
+ }
+ return reflect.ValueOf(coerced), nil
+}
+
+type unmarshalerPacker struct {
+ ValueType reflect.Type
+}
+
+func (p *unmarshalerPacker) Pack(value interface{}) (reflect.Value, error) {
+ if value == nil && !isNullable(p.ValueType) {
+ return reflect.Value{}, errors.Errorf("got null for non-null")
+ }
+
+ v := reflect.New(p.ValueType)
+ if err := v.Interface().(decode.Unmarshaler).UnmarshalGraphQL(value); err != nil {
+ return reflect.Value{}, err
+ }
+ return v.Elem(), nil
+}
+
+func unmarshalInput(typ reflect.Type, input interface{}) (interface{}, error) {
+ if reflect.TypeOf(input) == typ {
+ return input, nil
+ }
+
+ switch typ.Kind() {
+ case reflect.Int32:
+ switch input := input.(type) {
+ case int:
+ if input < math.MinInt32 || input > math.MaxInt32 {
+ return nil, fmt.Errorf("not a 32-bit integer")
+ }
+ return int32(input), nil
+ case float64:
+ coerced := int32(input)
+ if input < math.MinInt32 || input > math.MaxInt32 || float64(coerced) != input {
+ return nil, fmt.Errorf("not a 32-bit integer")
+ }
+ return coerced, nil
+ }
+
+ case reflect.Float64:
+ switch input := input.(type) {
+ case int32:
+ return float64(input), nil
+ case int:
+ return float64(input), nil
+ }
+
+ case reflect.String:
+ if reflect.TypeOf(input).ConvertibleTo(typ) {
+ return reflect.ValueOf(input).Convert(typ).Interface(), nil
+ }
+ }
+
+ return nil, fmt.Errorf("incompatible type")
+}
+
+func unwrapNonNull(t types.Type) (types.Type, bool) {
+ if nn, ok := t.(*types.NonNull); ok {
+ return nn.OfType, true
+ }
+ return t, false
+}
+
+func stripUnderscore(s string) string {
+ return strings.Replace(s, "_", "", -1)
+}
+
+// NullUnmarshaller is an unmarshaller that can handle a nil input
+type NullUnmarshaller interface {
+ decode.Unmarshaler
+ Nullable()
+}
+
+func isNullable(t reflect.Type) bool {
+ _, ok := reflect.New(t).Interface().(NullUnmarshaller)
+ return ok
+}
diff --git a/vendor/github.com/graph-gophers/graphql-go/internal/exec/resolvable/meta.go b/vendor/github.com/graph-gophers/graphql-go/internal/exec/resolvable/meta.go
new file mode 100644
index 00000000..02d5e262
--- /dev/null
+++ b/vendor/github.com/graph-gophers/graphql-go/internal/exec/resolvable/meta.go
@@ -0,0 +1,70 @@
+package resolvable
+
+import (
+ "reflect"
+
+ "github.com/graph-gophers/graphql-go/introspection"
+ "github.com/graph-gophers/graphql-go/types"
+)
+
+// Meta defines the details of the metadata schema for introspection.
+type Meta struct {
+ FieldSchema Field
+ FieldType Field
+ FieldTypename Field
+ Schema *Object
+ Type *Object
+}
+
+func newMeta(s *types.Schema) *Meta {
+ var err error
+ b := newBuilder(s)
+
+ metaSchema := s.Types["__Schema"].(*types.ObjectTypeDefinition)
+ so, err := b.makeObjectExec(metaSchema.Name, metaSchema.Fields, nil, false, reflect.TypeOf(&introspection.Schema{}))
+ if err != nil {
+ panic(err)
+ }
+
+ metaType := s.Types["__Type"].(*types.ObjectTypeDefinition)
+ t, err := b.makeObjectExec(metaType.Name, metaType.Fields, nil, false, reflect.TypeOf(&introspection.Type{}))
+ if err != nil {
+ panic(err)
+ }
+
+ if err := b.finish(); err != nil {
+ panic(err)
+ }
+
+ fieldTypename := Field{
+ FieldDefinition: types.FieldDefinition{
+ Name: "__typename",
+ Type: &types.NonNull{OfType: s.Types["String"]},
+ },
+ TraceLabel: "GraphQL field: __typename",
+ }
+
+ fieldSchema := Field{
+ FieldDefinition: types.FieldDefinition{
+ Name: "__schema",
+ Type: s.Types["__Schema"],
+ },
+ TraceLabel: "GraphQL field: __schema",
+ }
+
+ fieldType := Field{
+ FieldDefinition: types.FieldDefinition{
+ Name: "__type",
+ Type: s.Types["__Type"],
+ },
+ TraceLabel: "GraphQL field: __type",
+ }
+
+ return &Meta{
+ FieldSchema: fieldSchema,
+ FieldTypename: fieldTypename,
+ FieldType: fieldType,
+ Schema: so,
+ Type: t,
+ }
+}
diff --git a/vendor/github.com/graph-gophers/graphql-go/internal/exec/resolvable/resolvable.go b/vendor/github.com/graph-gophers/graphql-go/internal/exec/resolvable/resolvable.go
new file mode 100644
index 00000000..3410f557
--- /dev/null
+++ b/vendor/github.com/graph-gophers/graphql-go/internal/exec/resolvable/resolvable.go
@@ -0,0 +1,453 @@
+package resolvable
+
+import (
+ "context"
+ "fmt"
+ "reflect"
+ "strings"
+
+ "github.com/graph-gophers/graphql-go/decode"
+ "github.com/graph-gophers/graphql-go/internal/exec/packer"
+ "github.com/graph-gophers/graphql-go/types"
+)
+
+type Schema struct {
+ *Meta
+ types.Schema
+ Query Resolvable
+ Mutation Resolvable
+ Subscription Resolvable
+ Resolver reflect.Value
+}
+
+type Resolvable interface {
+ isResolvable()
+}
+
+type Object struct {
+ Name string
+ Fields map[string]*Field
+ TypeAssertions map[string]*TypeAssertion
+}
+
+type Field struct {
+ types.FieldDefinition
+ TypeName string
+ MethodIndex int
+ FieldIndex []int
+ HasContext bool
+ HasError bool
+ ArgsPacker *packer.StructPacker
+ ValueExec Resolvable
+ TraceLabel string
+}
+
+func (f *Field) UseMethodResolver() bool {
+ return len(f.FieldIndex) == 0
+}
+
+type TypeAssertion struct {
+ MethodIndex int
+ TypeExec Resolvable
+}
+
+type List struct {
+ Elem Resolvable
+}
+
+type Scalar struct{}
+
+func (*Object) isResolvable() {}
+func (*List) isResolvable() {}
+func (*Scalar) isResolvable() {}
+
+func ApplyResolver(s *types.Schema, resolver interface{}) (*Schema, error) {
+ if resolver == nil {
+ return &Schema{Meta: newMeta(s), Schema: *s}, nil
+ }
+
+ b := newBuilder(s)
+
+ var query, mutation, subscription Resolvable
+
+ if t, ok := s.EntryPoints["query"]; ok {
+ if err := b.assignExec(&query, t, reflect.TypeOf(resolver)); err != nil {
+ return nil, err
+ }
+ }
+
+ if t, ok := s.EntryPoints["mutation"]; ok {
+ if err := b.assignExec(&mutation, t, reflect.TypeOf(resolver)); err != nil {
+ return nil, err
+ }
+ }
+
+ if t, ok := s.EntryPoints["subscription"]; ok {
+ if err := b.assignExec(&subscription, t, reflect.TypeOf(resolver)); err != nil {
+ return nil, err
+ }
+ }
+
+ if err := b.finish(); err != nil {
+ return nil, err
+ }
+
+ return &Schema{
+ Meta: newMeta(s),
+ Schema: *s,
+ Resolver: reflect.ValueOf(resolver),
+ Query: query,
+ Mutation: mutation,
+ Subscription: subscription,
+ }, nil
+}
+
+type execBuilder struct {
+ schema *types.Schema
+ resMap map[typePair]*resMapEntry
+ packerBuilder *packer.Builder
+}
+
+type typePair struct {
+ graphQLType types.Type
+ resolverType reflect.Type
+}
+
+type resMapEntry struct {
+ exec Resolvable
+ targets []*Resolvable
+}
+
+func newBuilder(s *types.Schema) *execBuilder {
+ return &execBuilder{
+ schema: s,
+ resMap: make(map[typePair]*resMapEntry),
+ packerBuilder: packer.NewBuilder(),
+ }
+}
+
+func (b *execBuilder) finish() error {
+ for _, entry := range b.resMap {
+ for _, target := range entry.targets {
+ *target = entry.exec
+ }
+ }
+
+ return b.packerBuilder.Finish()
+}
+
+func (b *execBuilder) assignExec(target *Resolvable, t types.Type, resolverType reflect.Type) error {
+ k := typePair{t, resolverType}
+ ref, ok := b.resMap[k]
+ if !ok {
+ ref = &resMapEntry{}
+ b.resMap[k] = ref
+ var err error
+ ref.exec, err = b.makeExec(t, resolverType)
+ if err != nil {
+ return err
+ }
+ }
+ ref.targets = append(ref.targets, target)
+ return nil
+}
+
+func (b *execBuilder) makeExec(t types.Type, resolverType reflect.Type) (Resolvable, error) {
+ var nonNull bool
+ t, nonNull = unwrapNonNull(t)
+
+ switch t := t.(type) {
+ case *types.ObjectTypeDefinition:
+ return b.makeObjectExec(t.Name, t.Fields, nil, nonNull, resolverType)
+
+ case *types.InterfaceTypeDefinition:
+ return b.makeObjectExec(t.Name, t.Fields, t.PossibleTypes, nonNull, resolverType)
+
+ case *types.Union:
+ return b.makeObjectExec(t.Name, nil, t.UnionMemberTypes, nonNull, resolverType)
+ }
+
+ if !nonNull {
+ if resolverType.Kind() != reflect.Ptr {
+ return nil, fmt.Errorf("%s is not a pointer", resolverType)
+ }
+ resolverType = resolverType.Elem()
+ }
+
+ switch t := t.(type) {
+ case *types.ScalarTypeDefinition:
+ return makeScalarExec(t, resolverType)
+
+ case *types.EnumTypeDefinition:
+ return &Scalar{}, nil
+
+ case *types.List:
+ if resolverType.Kind() != reflect.Slice {
+ return nil, fmt.Errorf("%s is not a slice", resolverType)
+ }
+ e := &List{}
+ if err := b.assignExec(&e.Elem, t.OfType, resolverType.Elem()); err != nil {
+ return nil, err
+ }
+ return e, nil
+
+ default:
+ panic("invalid type: " + t.String())
+ }
+}
+
+func makeScalarExec(t *types.ScalarTypeDefinition, resolverType reflect.Type) (Resolvable, error) {
+ implementsType := false
+ switch r := reflect.New(resolverType).Interface().(type) {
+ case *int32:
+ implementsType = t.Name == "Int"
+ case *float64:
+ implementsType = t.Name == "Float"
+ case *string:
+ implementsType = t.Name == "String"
+ case *bool:
+ implementsType = t.Name == "Boolean"
+ case decode.Unmarshaler:
+ implementsType = r.ImplementsGraphQLType(t.Name)
+ }
+
+ if !implementsType {
+ return nil, fmt.Errorf("can not use %s as %s", resolverType, t.Name)
+ }
+ return &Scalar{}, nil
+}
+
+func (b *execBuilder) makeObjectExec(typeName string, fields types.FieldsDefinition, possibleTypes []*types.ObjectTypeDefinition,
+ nonNull bool, resolverType reflect.Type) (*Object, error) {
+ if !nonNull {
+ if resolverType.Kind() != reflect.Ptr && resolverType.Kind() != reflect.Interface {
+ return nil, fmt.Errorf("%s is not a pointer or interface", resolverType)
+ }
+ }
+
+ methodHasReceiver := resolverType.Kind() != reflect.Interface
+
+ Fields := make(map[string]*Field)
+ rt := unwrapPtr(resolverType)
+ fieldsCount := fieldCount(rt, map[string]int{})
+ for _, f := range fields {
+ var fieldIndex []int
+ methodIndex := findMethod(resolverType, f.Name)
+ if b.schema.UseFieldResolvers && methodIndex == -1 {
+ if fieldsCount[strings.ToLower(stripUnderscore(f.Name))] > 1 {
+ return nil, fmt.Errorf("%s does not resolve %q: ambiguous field %q", resolverType, typeName, f.Name)
+ }
+ fieldIndex = findField(rt, f.Name, []int{})
+ }
+ if methodIndex == -1 && len(fieldIndex) == 0 {
+ hint := ""
+ if findMethod(reflect.PtrTo(resolverType), f.Name) != -1 {
+ hint = " (hint: the method exists on the pointer type)"
+ }
+ return nil, fmt.Errorf("%s does not resolve %q: missing method for field %q%s", resolverType, typeName, f.Name, hint)
+ }
+
+ var m reflect.Method
+ var sf reflect.StructField
+ if methodIndex != -1 {
+ m = resolverType.Method(methodIndex)
+ } else {
+ sf = rt.FieldByIndex(fieldIndex)
+ }
+ fe, err := b.makeFieldExec(typeName, f, m, sf, methodIndex, fieldIndex, methodHasReceiver)
+ if err != nil {
+ var resolverName string
+ if methodIndex != -1 {
+ resolverName = m.Name
+ } else {
+ resolverName = sf.Name
+ }
+ return nil, fmt.Errorf("%s\n\tused by (%s).%s", err, resolverType, resolverName)
+ }
+ Fields[f.Name] = fe
+ }
+
+ // Check type assertions when
+ // 1) using method resolvers
+ // 2) Or resolver is not an interface type
+ typeAssertions := make(map[string]*TypeAssertion)
+ if !b.schema.UseFieldResolvers || resolverType.Kind() != reflect.Interface {
+ for _, impl := range possibleTypes {
+ methodIndex := findMethod(resolverType, "To"+impl.Name)
+ if methodIndex == -1 {
+ return nil, fmt.Errorf("%s does not resolve %q: missing method %q to convert to %q", resolverType, typeName, "To"+impl.Name, impl.Name)
+ }
+ if resolverType.Method(methodIndex).Type.NumOut() != 2 {
+ return nil, fmt.Errorf("%s does not resolve %q: method %q should return a value and a bool indicating success", resolverType, typeName, "To"+impl.Name)
+ }
+ a := &TypeAssertion{
+ MethodIndex: methodIndex,
+ }
+ if err := b.assignExec(&a.TypeExec, impl, resolverType.Method(methodIndex).Type.Out(0)); err != nil {
+ return nil, err
+ }
+ typeAssertions[impl.Name] = a
+ }
+ }
+
+ return &Object{
+ Name: typeName,
+ Fields: Fields,
+ TypeAssertions: typeAssertions,
+ }, nil
+}
+
+var contextType = reflect.TypeOf((*context.Context)(nil)).Elem()
+var errorType = reflect.TypeOf((*error)(nil)).Elem()
+
+func (b *execBuilder) makeFieldExec(typeName string, f *types.FieldDefinition, m reflect.Method, sf reflect.StructField,
+ methodIndex int, fieldIndex []int, methodHasReceiver bool) (*Field, error) {
+
+ var argsPacker *packer.StructPacker
+ var hasError bool
+ var hasContext bool
+
+ // Validate resolver method only when there is one
+ if methodIndex != -1 {
+ in := make([]reflect.Type, m.Type.NumIn())
+ for i := range in {
+ in[i] = m.Type.In(i)
+ }
+ if methodHasReceiver {
+ in = in[1:] // first parameter is receiver
+ }
+
+ hasContext = len(in) > 0 && in[0] == contextType
+ if hasContext {
+ in = in[1:]
+ }
+
+ if len(f.Arguments) > 0 {
+ if len(in) == 0 {
+ return nil, fmt.Errorf("must have parameter for field arguments")
+ }
+ var err error
+ argsPacker, err = b.packerBuilder.MakeStructPacker(f.Arguments, in[0])
+ if err != nil {
+ return nil, err
+ }
+ in = in[1:]
+ }
+
+ if len(in) > 0 {
+ return nil, fmt.Errorf("too many parameters")
+ }
+
+ maxNumOfReturns := 2
+ if m.Type.NumOut() < maxNumOfReturns-1 {
+ return nil, fmt.Errorf("too few return values")
+ }
+
+ if m.Type.NumOut() > maxNumOfReturns {
+ return nil, fmt.Errorf("too many return values")
+ }
+
+ hasError = m.Type.NumOut() == maxNumOfReturns
+ if hasError {
+ if m.Type.Out(maxNumOfReturns-1) != errorType {
+ return nil, fmt.Errorf(`must have "error" as its last return value`)
+ }
+ }
+ }
+
+ fe := &Field{
+ FieldDefinition: *f,
+ TypeName: typeName,
+ MethodIndex: methodIndex,
+ FieldIndex: fieldIndex,
+ HasContext: hasContext,
+ ArgsPacker: argsPacker,
+ HasError: hasError,
+ TraceLabel: fmt.Sprintf("GraphQL field: %s.%s", typeName, f.Name),
+ }
+
+ var out reflect.Type
+ if methodIndex != -1 {
+ out = m.Type.Out(0)
+ sub, ok := b.schema.EntryPoints["subscription"]
+ if ok && typeName == sub.TypeName() && out.Kind() == reflect.Chan {
+ out = m.Type.Out(0).Elem()
+ }
+ } else {
+ out = sf.Type
+ }
+ if err := b.assignExec(&fe.ValueExec, f.Type, out); err != nil {
+ return nil, err
+ }
+
+ return fe, nil
+}
+
+func findMethod(t reflect.Type, name string) int {
+ for i := 0; i < t.NumMethod(); i++ {
+ if strings.EqualFold(stripUnderscore(name), stripUnderscore(t.Method(i).Name)) {
+ return i
+ }
+ }
+ return -1
+}
+
+func findField(t reflect.Type, name string, index []int) []int {
+ for i := 0; i < t.NumField(); i++ {
+ field := t.Field(i)
+
+ if field.Type.Kind() == reflect.Struct && field.Anonymous {
+ newIndex := findField(field.Type, name, []int{i})
+ if len(newIndex) > 1 {
+ return append(index, newIndex...)
+ }
+ }
+
+ if strings.EqualFold(stripUnderscore(name), stripUnderscore(field.Name)) {
+ return append(index, i)
+ }
+ }
+
+ return index
+}
+
+// fieldCount helps resolve ambiguity when more than one embedded struct contains fields with the same name.
+func fieldCount(t reflect.Type, count map[string]int) map[string]int {
+ if t.Kind() != reflect.Struct {
+ return nil
+ }
+
+ for i := 0; i < t.NumField(); i++ {
+ field := t.Field(i)
+ fieldName := strings.ToLower(stripUnderscore(field.Name))
+
+ if field.Type.Kind() == reflect.Struct && field.Anonymous {
+ count = fieldCount(field.Type, count)
+ } else {
+ if _, ok := count[fieldName]; !ok {
+ count[fieldName] = 0
+ }
+ count[fieldName]++
+ }
+ }
+
+ return count
+}
+
+func unwrapNonNull(t types.Type) (types.Type, bool) {
+ if nn, ok := t.(*types.NonNull); ok {
+ return nn.OfType, true
+ }
+ return t, false
+}
+
+func stripUnderscore(s string) string {
+ return strings.Replace(s, "_", "", -1)
+}
+
+func unwrapPtr(t reflect.Type) reflect.Type {
+ if t.Kind() == reflect.Ptr {
+ return t.Elem()
+ }
+ return t
+}
diff --git a/vendor/github.com/graph-gophers/graphql-go/internal/exec/selected/selected.go b/vendor/github.com/graph-gophers/graphql-go/internal/exec/selected/selected.go
new file mode 100644
index 00000000..9b96d2b6
--- /dev/null
+++ b/vendor/github.com/graph-gophers/graphql-go/internal/exec/selected/selected.go
@@ -0,0 +1,269 @@
+package selected
+
+import (
+ "fmt"
+ "reflect"
+ "sync"
+
+ "github.com/graph-gophers/graphql-go/errors"
+ "github.com/graph-gophers/graphql-go/internal/exec/packer"
+ "github.com/graph-gophers/graphql-go/internal/exec/resolvable"
+ "github.com/graph-gophers/graphql-go/internal/query"
+ "github.com/graph-gophers/graphql-go/introspection"
+ "github.com/graph-gophers/graphql-go/types"
+)
+
+type Request struct {
+ Schema *types.Schema
+ Doc *types.ExecutableDefinition
+ Vars map[string]interface{}
+ Mu sync.Mutex
+ Errs []*errors.QueryError
+ DisableIntrospection bool
+}
+
+func (r *Request) AddError(err *errors.QueryError) {
+ r.Mu.Lock()
+ r.Errs = append(r.Errs, err)
+ r.Mu.Unlock()
+}
+
+func ApplyOperation(r *Request, s *resolvable.Schema, op *types.OperationDefinition) []Selection {
+ var obj *resolvable.Object
+ switch op.Type {
+ case query.Query:
+ obj = s.Query.(*resolvable.Object)
+ case query.Mutation:
+ obj = s.Mutation.(*resolvable.Object)
+ case query.Subscription:
+ obj = s.Subscription.(*resolvable.Object)
+ }
+ return applySelectionSet(r, s, obj, op.Selections)
+}
+
+type Selection interface {
+ isSelection()
+}
+
+type SchemaField struct {
+ resolvable.Field
+ Alias string
+ Args map[string]interface{}
+ PackedArgs reflect.Value
+ Sels []Selection
+ Async bool
+ FixedResult reflect.Value
+}
+
+type TypeAssertion struct {
+ resolvable.TypeAssertion
+ Sels []Selection
+}
+
+type TypenameField struct {
+ resolvable.Object
+ Alias string
+}
+
+func (*SchemaField) isSelection() {}
+func (*TypeAssertion) isSelection() {}
+func (*TypenameField) isSelection() {}
+
+func applySelectionSet(r *Request, s *resolvable.Schema, e *resolvable.Object, sels []types.Selection) (flattenedSels []Selection) {
+ for _, sel := range sels {
+ switch sel := sel.(type) {
+ case *types.Field:
+ field := sel
+ if skipByDirective(r, field.Directives) {
+ continue
+ }
+
+ switch field.Name.Name {
+ case "__typename":
+ // __typename is available even though r.DisableIntrospection == true
+ // because it is necessary when using union types and interfaces: https://graphql.org/learn/schema/#union-types
+ flattenedSels = append(flattenedSels, &TypenameField{
+ Object: *e,
+ Alias: field.Alias.Name,
+ })
+
+ case "__schema":
+ if !r.DisableIntrospection {
+ flattenedSels = append(flattenedSels, &SchemaField{
+ Field: s.Meta.FieldSchema,
+ Alias: field.Alias.Name,
+ Sels: applySelectionSet(r, s, s.Meta.Schema, field.SelectionSet),
+ Async: true,
+ FixedResult: reflect.ValueOf(introspection.WrapSchema(r.Schema)),
+ })
+ }
+
+ case "__type":
+ if !r.DisableIntrospection {
+ p := packer.ValuePacker{ValueType: reflect.TypeOf("")}
+ v, err := p.Pack(field.Arguments.MustGet("name").Deserialize(r.Vars))
+ if err != nil {
+ r.AddError(errors.Errorf("%s", err))
+ return nil
+ }
+
+ t, ok := r.Schema.Types[v.String()]
+ if !ok {
+ return nil
+ }
+
+ flattenedSels = append(flattenedSels, &SchemaField{
+ Field: s.Meta.FieldType,
+ Alias: field.Alias.Name,
+ Sels: applySelectionSet(r, s, s.Meta.Type, field.SelectionSet),
+ Async: true,
+ FixedResult: reflect.ValueOf(introspection.WrapType(t)),
+ })
+ }
+
+ default:
+ fe := e.Fields[field.Name.Name]
+
+ var args map[string]interface{}
+ var packedArgs reflect.Value
+ if fe.ArgsPacker != nil {
+ args = make(map[string]interface{})
+ for _, arg := range field.Arguments {
+ args[arg.Name.Name] = arg.Value.Deserialize(r.Vars)
+ }
+ var err error
+ packedArgs, err = fe.ArgsPacker.Pack(args)
+ if err != nil {
+ r.AddError(errors.Errorf("%s", err))
+ return
+ }
+ }
+
+ fieldSels := applyField(r, s, fe.ValueExec, field.SelectionSet)
+ flattenedSels = append(flattenedSels, &SchemaField{
+ Field: *fe,
+ Alias: field.Alias.Name,
+ Args: args,
+ PackedArgs: packedArgs,
+ Sels: fieldSels,
+ Async: fe.HasContext || fe.ArgsPacker != nil || fe.HasError || HasAsyncSel(fieldSels),
+ })
+ }
+
+ case *types.InlineFragment:
+ frag := sel
+ if skipByDirective(r, frag.Directives) {
+ continue
+ }
+ flattenedSels = append(flattenedSels, applyFragment(r, s, e, &frag.Fragment)...)
+
+ case *types.FragmentSpread:
+ spread := sel
+ if skipByDirective(r, spread.Directives) {
+ continue
+ }
+ flattenedSels = append(flattenedSels, applyFragment(r, s, e, &r.Doc.Fragments.Get(spread.Name.Name).Fragment)...)
+
+ default:
+ panic("invalid type")
+ }
+ }
+ return
+}
+
+func applyFragment(r *Request, s *resolvable.Schema, e *resolvable.Object, frag *types.Fragment) []Selection {
+ if frag.On.Name != e.Name {
+ t := r.Schema.Resolve(frag.On.Name)
+ face, ok := t.(*types.InterfaceTypeDefinition)
+ if !ok && frag.On.Name != "" {
+ a, ok2 := e.TypeAssertions[frag.On.Name]
+ if !ok2 {
+ panic(fmt.Errorf("%q does not implement %q", frag.On, e.Name)) // TODO proper error handling
+ }
+
+ return []Selection{&TypeAssertion{
+ TypeAssertion: *a,
+ Sels: applySelectionSet(r, s, a.TypeExec.(*resolvable.Object), frag.Selections),
+ }}
+ }
+ if ok && len(face.PossibleTypes) > 0 {
+ sels := []Selection{}
+ for _, t := range face.PossibleTypes {
+ if t.Name == e.Name {
+ return applySelectionSet(r, s, e, frag.Selections)
+ }
+
+ if a, ok := e.TypeAssertions[t.Name]; ok {
+ sels = append(sels, &TypeAssertion{
+ TypeAssertion: *a,
+ Sels: applySelectionSet(r, s, a.TypeExec.(*resolvable.Object), frag.Selections),
+ })
+ }
+ }
+ if len(sels) == 0 {
+ panic(fmt.Errorf("%q does not implement %q", e.Name, frag.On)) // TODO proper error handling
+ }
+ return sels
+ }
+ }
+ return applySelectionSet(r, s, e, frag.Selections)
+}
+
+func applyField(r *Request, s *resolvable.Schema, e resolvable.Resolvable, sels []types.Selection) []Selection {
+ switch e := e.(type) {
+ case *resolvable.Object:
+ return applySelectionSet(r, s, e, sels)
+ case *resolvable.List:
+ return applyField(r, s, e.Elem, sels)
+ case *resolvable.Scalar:
+ return nil
+ default:
+ panic("unreachable")
+ }
+}
+
+func skipByDirective(r *Request, directives types.DirectiveList) bool {
+ if d := directives.Get("skip"); d != nil {
+ p := packer.ValuePacker{ValueType: reflect.TypeOf(false)}
+ v, err := p.Pack(d.Arguments.MustGet("if").Deserialize(r.Vars))
+ if err != nil {
+ r.AddError(errors.Errorf("%s", err))
+ }
+ if err == nil && v.Bool() {
+ return true
+ }
+ }
+
+ if d := directives.Get("include"); d != nil {
+ p := packer.ValuePacker{ValueType: reflect.TypeOf(false)}
+ v, err := p.Pack(d.Arguments.MustGet("if").Deserialize(r.Vars))
+ if err != nil {
+ r.AddError(errors.Errorf("%s", err))
+ }
+ if err == nil && !v.Bool() {
+ return true
+ }
+ }
+
+ return false
+}
+
+func HasAsyncSel(sels []Selection) bool {
+ for _, sel := range sels {
+ switch sel := sel.(type) {
+ case *SchemaField:
+ if sel.Async {
+ return true
+ }
+ case *TypeAssertion:
+ if HasAsyncSel(sel.Sels) {
+ return true
+ }
+ case *TypenameField:
+ // sync
+ default:
+ panic("unreachable")
+ }
+ }
+ return false
+}
diff --git a/vendor/github.com/graph-gophers/graphql-go/internal/exec/subscribe.go b/vendor/github.com/graph-gophers/graphql-go/internal/exec/subscribe.go
new file mode 100644
index 00000000..37ebacbc
--- /dev/null
+++ b/vendor/github.com/graph-gophers/graphql-go/internal/exec/subscribe.go
@@ -0,0 +1,179 @@
+package exec
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "fmt"
+ "reflect"
+ "time"
+
+ "github.com/graph-gophers/graphql-go/errors"
+ "github.com/graph-gophers/graphql-go/internal/exec/resolvable"
+ "github.com/graph-gophers/graphql-go/internal/exec/selected"
+ "github.com/graph-gophers/graphql-go/types"
+)
+
+type Response struct {
+ Data json.RawMessage
+ Errors []*errors.QueryError
+}
+
+func (r *Request) Subscribe(ctx context.Context, s *resolvable.Schema, op *types.OperationDefinition) <-chan *Response {
+ var result reflect.Value
+ var f *fieldToExec
+ var err *errors.QueryError
+ func() {
+ defer r.handlePanic(ctx)
+
+ sels := selected.ApplyOperation(&r.Request, s, op)
+ var fields []*fieldToExec
+ collectFieldsToResolve(sels, s, s.Resolver, &fields, make(map[string]*fieldToExec))
+
+ // TODO: move this check into validation.Validate
+ if len(fields) != 1 {
+ err = errors.Errorf("%s", "can subscribe to at most one subscription at a time")
+ return
+ }
+ f = fields[0]
+
+ var in []reflect.Value
+ if f.field.HasContext {
+ in = append(in, reflect.ValueOf(ctx))
+ }
+ if f.field.ArgsPacker != nil {
+ in = append(in, f.field.PackedArgs)
+ }
+ callOut := f.resolver.Method(f.field.MethodIndex).Call(in)
+ result = callOut[0]
+
+ if f.field.HasError && !callOut[1].IsNil() {
+ switch resolverErr := callOut[1].Interface().(type) {
+ case *errors.QueryError:
+ err = resolverErr
+ case error:
+ err = errors.Errorf("%s", resolverErr)
+ err.ResolverError = resolverErr
+ default:
+ panic(fmt.Errorf("can only deal with *QueryError and error types, got %T", resolverErr))
+ }
+ }
+ }()
+
+ // Handles the case where the locally executed func above panicked
+ if len(r.Request.Errs) > 0 {
+ return sendAndReturnClosed(&Response{Errors: r.Request.Errs})
+ }
+
+ if f == nil {
+ return sendAndReturnClosed(&Response{Errors: []*errors.QueryError{err}})
+ }
+
+ if err != nil {
+ if _, nonNullChild := f.field.Type.(*types.NonNull); nonNullChild {
+ return sendAndReturnClosed(&Response{Errors: []*errors.QueryError{err}})
+ }
+ return sendAndReturnClosed(&Response{Data: []byte(fmt.Sprintf(`{"%s":null}`, f.field.Alias)), Errors: []*errors.QueryError{err}})
+ }
+
+ if ctxErr := ctx.Err(); ctxErr != nil {
+ return sendAndReturnClosed(&Response{Errors: []*errors.QueryError{errors.Errorf("%s", ctxErr)}})
+ }
+
+ c := make(chan *Response)
+ // TODO: handle resolver nil channel better?
+ if result.IsZero() {
+ close(c)
+ return c
+ }
+
+ go func() {
+ for {
+ // Check subscription context
+ chosen, resp, ok := reflect.Select([]reflect.SelectCase{
+ {
+ Dir: reflect.SelectRecv,
+ Chan: reflect.ValueOf(ctx.Done()),
+ },
+ {
+ Dir: reflect.SelectRecv,
+ Chan: result,
+ },
+ })
+ switch chosen {
+ // subscription context done
+ case 0:
+ close(c)
+ return
+ // upstream received
+ case 1:
+ // upstream closed
+ if !ok {
+ close(c)
+ return
+ }
+
+ subR := &Request{
+ Request: selected.Request{
+ Doc: r.Request.Doc,
+ Vars: r.Request.Vars,
+ Schema: r.Request.Schema,
+ },
+ Limiter: r.Limiter,
+ Tracer: r.Tracer,
+ Logger: r.Logger,
+ }
+ var out bytes.Buffer
+ func() {
+ timeout := r.SubscribeResolverTimeout
+ if timeout == 0 {
+ timeout = time.Second
+ }
+
+ subCtx, cancel := context.WithTimeout(ctx, timeout)
+ defer cancel()
+
+ // resolve response
+ func() {
+ defer subR.handlePanic(subCtx)
+
+ var buf bytes.Buffer
+ subR.execSelectionSet(subCtx, f.sels, f.field.Type, &pathSegment{nil, f.field.Alias}, s, resp, &buf)
+
+ propagateChildError := false
+ if _, nonNullChild := f.field.Type.(*types.NonNull); nonNullChild && resolvedToNull(&buf) {
+ propagateChildError = true
+ }
+
+ if !propagateChildError {
+ out.WriteString(fmt.Sprintf(`{"%s":`, f.field.Alias))
+ out.Write(buf.Bytes())
+ out.WriteString(`}`)
+ }
+ }()
+
+ if err := subCtx.Err(); err != nil {
+ c <- &Response{Errors: []*errors.QueryError{errors.Errorf("%s", err)}}
+ return
+ }
+
+ // Send response within timeout
+ // TODO: maybe block until sent?
+ select {
+ case <-subCtx.Done():
+ case c <- &Response{Data: out.Bytes(), Errors: subR.Errs}:
+ }
+ }()
+ }
+ }
+ }()
+
+ return c
+}
+
+func sendAndReturnClosed(resp *Response) chan *Response {
+ c := make(chan *Response, 1)
+ c <- resp
+ close(c)
+ return c
+}
diff --git a/vendor/github.com/graph-gophers/graphql-go/internal/query/query.go b/vendor/github.com/graph-gophers/graphql-go/internal/query/query.go
new file mode 100644
index 00000000..ca0400cd
--- /dev/null
+++ b/vendor/github.com/graph-gophers/graphql-go/internal/query/query.go
@@ -0,0 +1,156 @@
+package query
+
+import (
+ "fmt"
+ "text/scanner"
+
+ "github.com/graph-gophers/graphql-go/errors"
+ "github.com/graph-gophers/graphql-go/internal/common"
+ "github.com/graph-gophers/graphql-go/types"
+)
+
+const (
+ Query types.OperationType = "QUERY"
+ Mutation types.OperationType = "MUTATION"
+ Subscription types.OperationType = "SUBSCRIPTION"
+)
+
+func Parse(queryString string) (*types.ExecutableDefinition, *errors.QueryError) {
+ l := common.NewLexer(queryString, false)
+
+ var execDef *types.ExecutableDefinition
+ err := l.CatchSyntaxError(func() { execDef = parseExecutableDefinition(l) })
+ if err != nil {
+ return nil, err
+ }
+
+ return execDef, nil
+}
+
+func parseExecutableDefinition(l *common.Lexer) *types.ExecutableDefinition {
+ ed := &types.ExecutableDefinition{}
+ l.ConsumeWhitespace()
+ for l.Peek() != scanner.EOF {
+ if l.Peek() == '{' {
+ op := &types.OperationDefinition{Type: Query, Loc: l.Location()}
+ op.Selections = parseSelectionSet(l)
+ ed.Operations = append(ed.Operations, op)
+ continue
+ }
+
+ loc := l.Location()
+ switch x := l.ConsumeIdent(); x {
+ case "query":
+ op := parseOperation(l, Query)
+ op.Loc = loc
+ ed.Operations = append(ed.Operations, op)
+
+ case "mutation":
+ ed.Operations = append(ed.Operations, parseOperation(l, Mutation))
+
+ case "subscription":
+ ed.Operations = append(ed.Operations, parseOperation(l, Subscription))
+
+ case "fragment":
+ frag := parseFragment(l)
+ frag.Loc = loc
+ ed.Fragments = append(ed.Fragments, frag)
+
+ default:
+ l.SyntaxError(fmt.Sprintf(`unexpected %q, expecting "fragment"`, x))
+ }
+ }
+ return ed
+}
+
+func parseOperation(l *common.Lexer, opType types.OperationType) *types.OperationDefinition {
+ op := &types.OperationDefinition{Type: opType}
+ op.Name.Loc = l.Location()
+ if l.Peek() == scanner.Ident {
+ op.Name = l.ConsumeIdentWithLoc()
+ }
+ op.Directives = common.ParseDirectives(l)
+ if l.Peek() == '(' {
+ l.ConsumeToken('(')
+ for l.Peek() != ')' {
+ loc := l.Location()
+ l.ConsumeToken('$')
+ iv := common.ParseInputValue(l)
+ iv.Loc = loc
+ op.Vars = append(op.Vars, iv)
+ }
+ l.ConsumeToken(')')
+ }
+ op.Selections = parseSelectionSet(l)
+ return op
+}
+
+func parseFragment(l *common.Lexer) *types.FragmentDefinition {
+ f := &types.FragmentDefinition{}
+ f.Name = l.ConsumeIdentWithLoc()
+ l.ConsumeKeyword("on")
+ f.On = types.TypeName{Ident: l.ConsumeIdentWithLoc()}
+ f.Directives = common.ParseDirectives(l)
+ f.Selections = parseSelectionSet(l)
+ return f
+}
+
+func parseSelectionSet(l *common.Lexer) []types.Selection {
+ var sels []types.Selection
+ l.ConsumeToken('{')
+ for l.Peek() != '}' {
+ sels = append(sels, parseSelection(l))
+ }
+ l.ConsumeToken('}')
+ return sels
+}
+
+func parseSelection(l *common.Lexer) types.Selection {
+ if l.Peek() == '.' {
+ return parseSpread(l)
+ }
+ return parseFieldDef(l)
+}
+
+func parseFieldDef(l *common.Lexer) *types.Field {
+ f := &types.Field{}
+ f.Alias = l.ConsumeIdentWithLoc()
+ f.Name = f.Alias
+ if l.Peek() == ':' {
+ l.ConsumeToken(':')
+ f.Name = l.ConsumeIdentWithLoc()
+ }
+ if l.Peek() == '(' {
+ f.Arguments = common.ParseArgumentList(l)
+ }
+ f.Directives = common.ParseDirectives(l)
+ if l.Peek() == '{' {
+ f.SelectionSetLoc = l.Location()
+ f.SelectionSet = parseSelectionSet(l)
+ }
+ return f
+}
+
+func parseSpread(l *common.Lexer) types.Selection {
+ loc := l.Location()
+ l.ConsumeToken('.')
+ l.ConsumeToken('.')
+ l.ConsumeToken('.')
+
+ f := &types.InlineFragment{Loc: loc}
+ if l.Peek() == scanner.Ident {
+ ident := l.ConsumeIdentWithLoc()
+ if ident.Name != "on" {
+ fs := &types.FragmentSpread{
+ Name: ident,
+ Loc: loc,
+ }
+ fs.Directives = common.ParseDirectives(l)
+ return fs
+ }
+ f.On = types.TypeName{Ident: l.ConsumeIdentWithLoc()}
+ }
+ f.Directives = common.ParseDirectives(l)
+ f.Selections = parseSelectionSet(l)
+ return f
+}
diff --git a/vendor/github.com/graph-gophers/graphql-go/internal/schema/meta.go b/vendor/github.com/graph-gophers/graphql-go/internal/schema/meta.go
new file mode 100644
index 00000000..9f5bba56
--- /dev/null
+++ b/vendor/github.com/graph-gophers/graphql-go/internal/schema/meta.go
@@ -0,0 +1,203 @@
+package schema
+
+import (
+ "github.com/graph-gophers/graphql-go/types"
+)
+
+func init() {
+ _ = newMeta()
+}
+
+// newMeta initializes an instance of the meta Schema.
+func newMeta() *types.Schema {
+ s := &types.Schema{
+ EntryPointNames: make(map[string]string),
+ Types: make(map[string]types.NamedType),
+ Directives: make(map[string]*types.DirectiveDefinition),
+ }
+
+ err := Parse(s, metaSrc, false)
+ if err != nil {
+ panic(err)
+ }
+ return s
+}
+
+var metaSrc = `
+ # The ` + "`" + `Int` + "`" + ` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.
+ scalar Int
+
+ # The ` + "`" + `Float` + "`" + ` scalar type represents signed double-precision fractional values as specified by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point).
+ scalar Float
+
+ # The ` + "`" + `String` + "`" + ` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+ scalar String
+
+ # The ` + "`" + `Boolean` + "`" + ` scalar type represents ` + "`" + `true` + "`" + ` or ` + "`" + `false` + "`" + `.
+ scalar Boolean
+
+ # The ` + "`" + `ID` + "`" + ` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as ` + "`" + `"4"` + "`" + `) or integer (such as ` + "`" + `4` + "`" + `) input value will be accepted as an ID.
+ scalar ID
+
+ # Directs the executor to include this field or fragment only when the ` + "`" + `if` + "`" + ` argument is true.
+ directive @include(
+ # Included when true.
+ if: Boolean!
+ ) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT
+
+ # Directs the executor to skip this field or fragment when the ` + "`" + `if` + "`" + ` argument is true.
+ directive @skip(
+ # Skipped when true.
+ if: Boolean!
+ ) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT
+
+ # Marks an element of a GraphQL schema as no longer supported.
+ directive @deprecated(
+ # Explains why this element was deprecated, usually also including a suggestion
+ # for how to access supported similar data. Formatted in
+ # [Markdown](https://daringfireball.net/projects/markdown/).
+ reason: String = "No longer supported"
+ ) on FIELD_DEFINITION | ENUM_VALUE
+
+ # A Directive provides a way to describe alternate runtime execution and type validation behavior in a GraphQL document.
+ #
+ # In some cases, you need to provide options to alter GraphQL's execution behavior
+ # in ways field arguments will not suffice, such as conditionally including or
+ # skipping a field. Directives provide this by describing additional information
+ # to the executor.
+ type __Directive {
+ name: String!
+ description: String
+ locations: [__DirectiveLocation!]!
+ args: [__InputValue!]!
+ }
+
+ # A Directive can be adjacent to many parts of the GraphQL language, a
+ # __DirectiveLocation describes one such possible adjacencies.
+ enum __DirectiveLocation {
+ # Location adjacent to a query operation.
+ QUERY
+ # Location adjacent to a mutation operation.
+ MUTATION
+ # Location adjacent to a subscription operation.
+ SUBSCRIPTION
+ # Location adjacent to a field.
+ FIELD
+ # Location adjacent to a fragment definition.
+ FRAGMENT_DEFINITION
+ # Location adjacent to a fragment spread.
+ FRAGMENT_SPREAD
+ # Location adjacent to an inline fragment.
+ INLINE_FRAGMENT
+ # Location adjacent to a schema definition.
+ SCHEMA
+ # Location adjacent to a scalar definition.
+ SCALAR
+ # Location adjacent to an object type definition.
+ OBJECT
+ # Location adjacent to a field definition.
+ FIELD_DEFINITION
+ # Location adjacent to an argument definition.
+ ARGUMENT_DEFINITION
+ # Location adjacent to an interface definition.
+ INTERFACE
+ # Location adjacent to a union definition.
+ UNION
+ # Location adjacent to an enum definition.
+ ENUM
+ # Location adjacent to an enum value definition.
+ ENUM_VALUE
+ # Location adjacent to an input object type definition.
+ INPUT_OBJECT
+ # Location adjacent to an input object field definition.
+ INPUT_FIELD_DEFINITION
+ }
+
+ # One possible value for a given Enum. Enum values are unique values, not a
+ # placeholder for a string or numeric value. However an Enum value is returned in
+ # a JSON response as a string.
+ type __EnumValue {
+ name: String!
+ description: String
+ isDeprecated: Boolean!
+ deprecationReason: String
+ }
+
+ # Object and Interface types are described by a list of Fields, each of which has
+ # a name, potentially a list of arguments, and a return type.
+ type __Field {
+ name: String!
+ description: String
+ args: [__InputValue!]!
+ type: __Type!
+ isDeprecated: Boolean!
+ deprecationReason: String
+ }
+
+ # Arguments provided to Fields or Directives and the input fields of an
+ # InputObject are represented as Input Values which describe their type and
+ # optionally a default value.
+ type __InputValue {
+ name: String!
+ description: String
+ type: __Type!
+ # A GraphQL-formatted string representing the default value for this input value.
+ defaultValue: String
+ }
+
+ # A GraphQL Schema defines the capabilities of a GraphQL server. It exposes all
+ # available types and directives on the server, as well as the entry points for
+ # query, mutation, and subscription operations.
+ type __Schema {
+ # A list of all types supported by this server.
+ types: [__Type!]!
+ # The type that query operations will be rooted at.
+ queryType: __Type!
+ # If this server supports mutation, the type that mutation operations will be rooted at.
+ mutationType: __Type
+ # If this server support subscription, the type that subscription operations will be rooted at.
+ subscriptionType: __Type
+ # A list of all directives supported by this server.
+ directives: [__Directive!]!
+ }
+
+ # The fundamental unit of any GraphQL Schema is the type. There are many kinds of
+ # types in GraphQL as represented by the ` + "`" + `__TypeKind` + "`" + ` enum.
+ #
+ # Depending on the kind of a type, certain fields describe information about that
+ # type. Scalar types provide no information beyond a name and description, while
+ # Enum types provide their values. Object and Interface types provide the fields
+ # they describe. Abstract types, Union and Interface, provide the Object types
+ # possible at runtime. List and NonNull types compose other types.
+ type __Type {
+ kind: __TypeKind!
+ name: String
+ description: String
+ fields(includeDeprecated: Boolean = false): [__Field!]
+ interfaces: [__Type!]
+ possibleTypes: [__Type!]
+ enumValues(includeDeprecated: Boolean = false): [__EnumValue!]
+ inputFields: [__InputValue!]
+ ofType: __Type
+ }
+
+ # An enum describing what kind of type a given ` + "`" + `__Type` + "`" + ` is.
+ enum __TypeKind {
+ # Indicates this type is a scalar.
+ SCALAR
+ # Indicates this type is an object. ` + "`" + `fields` + "`" + ` and ` + "`" + `interfaces` + "`" + ` are valid fields.
+ OBJECT
+ # Indicates this type is an interface. ` + "`" + `fields` + "`" + ` and ` + "`" + `possibleTypes` + "`" + ` are valid fields.
+ INTERFACE
+ # Indicates this type is a union. ` + "`" + `possibleTypes` + "`" + ` is a valid field.
+ UNION
+ # Indicates this type is an enum. ` + "`" + `enumValues` + "`" + ` is a valid field.
+ ENUM
+ # Indicates this type is an input object. ` + "`" + `inputFields` + "`" + ` is a valid field.
+ INPUT_OBJECT
+ # Indicates this type is a list. ` + "`" + `ofType` + "`" + ` is a valid field.
+ LIST
+ # Indicates this type is a non-null. ` + "`" + `ofType` + "`" + ` is a valid field.
+ NON_NULL
+ }
+`
diff --git a/vendor/github.com/graph-gophers/graphql-go/internal/schema/schema.go b/vendor/github.com/graph-gophers/graphql-go/internal/schema/schema.go
new file mode 100644
index 00000000..fb301c46
--- /dev/null
+++ b/vendor/github.com/graph-gophers/graphql-go/internal/schema/schema.go
@@ -0,0 +1,586 @@
+package schema
+
+import (
+ "fmt"
+ "text/scanner"
+
+ "github.com/graph-gophers/graphql-go/errors"
+ "github.com/graph-gophers/graphql-go/internal/common"
+ "github.com/graph-gophers/graphql-go/types"
+)
+
+// New initializes an instance of Schema.
+func New() *types.Schema {
+ s := &types.Schema{
+ EntryPointNames: make(map[string]string),
+ Types: make(map[string]types.NamedType),
+ Directives: make(map[string]*types.DirectiveDefinition),
+ }
+ m := newMeta()
+ for n, t := range m.Types {
+ s.Types[n] = t
+ }
+ for n, d := range m.Directives {
+ s.Directives[n] = d
+ }
+ return s
+}
+
+func Parse(s *types.Schema, schemaString string, useStringDescriptions bool) error {
+ l := common.NewLexer(schemaString, useStringDescriptions)
+ err := l.CatchSyntaxError(func() { parseSchema(s, l) })
+ if err != nil {
+ return err
+ }
+
+ if err := mergeExtensions(s); err != nil {
+ return err
+ }
+
+ for _, t := range s.Types {
+ if err := resolveNamedType(s, t); err != nil {
+ return err
+ }
+ }
+ for _, d := range s.Directives {
+ for _, arg := range d.Arguments {
+ t, err := common.ResolveType(arg.Type, s.Resolve)
+ if err != nil {
+ return err
+ }
+ arg.Type = t
+ }
+ }
+
+ // https://graphql.github.io/graphql-spec/June2018/#sec-Root-Operation-Types
+ // > While any type can be the root operation type for a GraphQL operation, the type system definition language can
+ // > omit the schema definition when the query, mutation, and subscription root types are named Query, Mutation,
+ // > and Subscription respectively.
+ if len(s.EntryPointNames) == 0 {
+ if _, ok := s.Types["Query"]; ok {
+ s.EntryPointNames["query"] = "Query"
+ }
+ if _, ok := s.Types["Mutation"]; ok {
+ s.EntryPointNames["mutation"] = "Mutation"
+ }
+ if _, ok := s.Types["Subscription"]; ok {
+ s.EntryPointNames["subscription"] = "Subscription"
+ }
+ }
+ s.EntryPoints = make(map[string]types.NamedType)
+ for key, name := range s.EntryPointNames {
+ t, ok := s.Types[name]
+ if !ok {
+ return errors.Errorf("type %q not found", name)
+ }
+ s.EntryPoints[key] = t
+ }
+
+ // Interface types need validation: https://spec.graphql.org/draft/#sec-Interfaces.Interfaces-Implementing-Interfaces
+ for _, typeDef := range s.Types {
+ switch t := typeDef.(type) {
+ case *types.InterfaceTypeDefinition:
+ for i, implements := range t.Interfaces {
+ typ, ok := s.Types[implements.Name]
+ if !ok {
+ return errors.Errorf("interface %q not found", implements)
+ }
+ inteface, ok := typ.(*types.InterfaceTypeDefinition)
+ if !ok {
+ return errors.Errorf("type %q is not an interface", inteface)
+ }
+
+ for _, f := range inteface.Fields.Names() {
+ if t.Fields.Get(f) == nil {
+ return errors.Errorf("interface %q expects field %q but %q does not provide it", inteface.Name, f, t.Name)
+ }
+ }
+
+ t.Interfaces[i] = inteface
+ }
+ default:
+ continue
+ }
+ }
+
+ for _, obj := range s.Objects {
+ obj.Interfaces = make([]*types.InterfaceTypeDefinition, len(obj.InterfaceNames))
+ if err := resolveDirectives(s, obj.Directives, "OBJECT"); err != nil {
+ return err
+ }
+ for _, field := range obj.Fields {
+ if err := resolveDirectives(s, field.Directives, "FIELD_DEFINITION"); err != nil {
+ return err
+ }
+ }
+ for i, intfName := range obj.InterfaceNames {
+ t, ok := s.Types[intfName]
+ if !ok {
+ return errors.Errorf("interface %q not found", intfName)
+ }
+ intf, ok := t.(*types.InterfaceTypeDefinition)
+ if !ok {
+ return errors.Errorf("type %q is not an interface", intfName)
+ }
+ for _, f := range intf.Fields.Names() {
+ if obj.Fields.Get(f) == nil {
+ return errors.Errorf("interface %q expects field %q but %q does not provide it", intfName, f, obj.Name)
+ }
+ }
+ obj.Interfaces[i] = intf
+ intf.PossibleTypes = append(intf.PossibleTypes, obj)
+ }
+ }
+
+ for _, union := range s.Unions {
+ if err := resolveDirectives(s, union.Directives, "UNION"); err != nil {
+ return err
+ }
+ union.UnionMemberTypes = make([]*types.ObjectTypeDefinition, len(union.TypeNames))
+ for i, name := range union.TypeNames {
+ t, ok := s.Types[name]
+ if !ok {
+ return errors.Errorf("object type %q not found", name)
+ }
+ obj, ok := t.(*types.ObjectTypeDefinition)
+ if !ok {
+ return errors.Errorf("type %q is not an object", name)
+ }
+ union.UnionMemberTypes[i] = obj
+ }
+ }
+
+ for _, enum := range s.Enums {
+ if err := resolveDirectives(s, enum.Directives, "ENUM"); err != nil {
+ return err
+ }
+ for _, value := range enum.EnumValuesDefinition {
+ if err := resolveDirectives(s, value.Directives, "ENUM_VALUE"); err != nil {
+ return err
+ }
+ }
+ }
+
+ return nil
+}
+
+func ParseSchema(schemaString string, useStringDescriptions bool) (*types.Schema, error) {
+ s := New()
+ err := Parse(s, schemaString, useStringDescriptions)
+ return s, err
+}
+
+func mergeExtensions(s *types.Schema) error {
+ for _, ext := range s.Extensions {
+ typ := s.Types[ext.Type.TypeName()]
+ if typ == nil {
+ return fmt.Errorf("trying to extend unknown type %q", ext.Type.TypeName())
+ }
+
+ if typ.Kind() != ext.Type.Kind() {
+ return fmt.Errorf("trying to extend type %q with type %q", typ.Kind(), ext.Type.Kind())
+ }
+
+ switch og := typ.(type) {
+ case *types.ObjectTypeDefinition:
+ e := ext.Type.(*types.ObjectTypeDefinition)
+
+ for _, field := range e.Fields {
+ if og.Fields.Get(field.Name) != nil {
+ return fmt.Errorf("extended field %q already exists", field.Name)
+ }
+ }
+ og.Fields = append(og.Fields, e.Fields...)
+
+ for _, en := range e.InterfaceNames {
+ for _, on := range og.InterfaceNames {
+ if on == en {
+ return fmt.Errorf("interface %q implemented in the extension is already implemented in %q", on, og.Name)
+ }
+ }
+ }
+ og.InterfaceNames = append(og.InterfaceNames, e.InterfaceNames...)
+
+ case *types.InputObject:
+ e := ext.Type.(*types.InputObject)
+
+ for _, field := range e.Values {
+ if og.Values.Get(field.Name.Name) != nil {
+ return fmt.Errorf("extended field %q already exists", field.Name)
+ }
+ }
+ og.Values = append(og.Values, e.Values...)
+
+ case *types.InterfaceTypeDefinition:
+ e := ext.Type.(*types.InterfaceTypeDefinition)
+
+ for _, field := range e.Fields {
+ if og.Fields.Get(field.Name) != nil {
+ return fmt.Errorf("extended field %s already exists", field.Name)
+ }
+ }
+ og.Fields = append(og.Fields, e.Fields...)
+
+ case *types.Union:
+ e := ext.Type.(*types.Union)
+
+ for _, en := range e.TypeNames {
+ for _, on := range og.TypeNames {
+ if on == en {
+ return fmt.Errorf("union type %q already declared in %q", on, og.Name)
+ }
+ }
+ }
+ og.TypeNames = append(og.TypeNames, e.TypeNames...)
+
+ case *types.EnumTypeDefinition:
+ e := ext.Type.(*types.EnumTypeDefinition)
+
+ for _, en := range e.EnumValuesDefinition {
+ for _, on := range og.EnumValuesDefinition {
+ if on.EnumValue == en.EnumValue {
+ return fmt.Errorf("enum value %q already declared in %q", on.EnumValue, og.Name)
+ }
+ }
+ }
+ og.EnumValuesDefinition = append(og.EnumValuesDefinition, e.EnumValuesDefinition...)
+ default:
+ return fmt.Errorf(`unexpected %q, expecting "schema", "type", "enum", "interface", "union" or "input"`, og.TypeName())
+ }
+ }
+
+ return nil
+}
+
+func resolveNamedType(s *types.Schema, t types.NamedType) error {
+ switch t := t.(type) {
+ case *types.ObjectTypeDefinition:
+ for _, f := range t.Fields {
+ if err := resolveField(s, f); err != nil {
+ return err
+ }
+ }
+ case *types.InterfaceTypeDefinition:
+ for _, f := range t.Fields {
+ if err := resolveField(s, f); err != nil {
+ return err
+ }
+ }
+ case *types.InputObject:
+ if err := resolveInputObject(s, t.Values); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func resolveField(s *types.Schema, f *types.FieldDefinition) error {
+ t, err := common.ResolveType(f.Type, s.Resolve)
+ if err != nil {
+ return err
+ }
+ f.Type = t
+ if err := resolveDirectives(s, f.Directives, "FIELD_DEFINITION"); err != nil {
+ return err
+ }
+ return resolveInputObject(s, f.Arguments)
+}
+
+func resolveDirectives(s *types.Schema, directives types.DirectiveList, loc string) error {
+ for _, d := range directives {
+ dirName := d.Name.Name
+ dd, ok := s.Directives[dirName]
+ if !ok {
+ return errors.Errorf("directive %q not found", dirName)
+ }
+ validLoc := false
+ for _, l := range dd.Locations {
+ if l == loc {
+ validLoc = true
+ break
+ }
+ }
+ if !validLoc {
+ return errors.Errorf("invalid location %q for directive %q (must be one of %v)", loc, dirName, dd.Locations)
+ }
+ for _, arg := range d.Arguments {
+ if dd.Arguments.Get(arg.Name.Name) == nil {
+ return errors.Errorf("invalid argument %q for directive %q", arg.Name.Name, dirName)
+ }
+ }
+ for _, arg := range dd.Arguments {
+ if _, ok := d.Arguments.Get(arg.Name.Name); !ok {
+ d.Arguments = append(d.Arguments, &types.Argument{Name: arg.Name, Value: arg.Default})
+ }
+ }
+ }
+ return nil
+}
+
+func resolveInputObject(s *types.Schema, values types.ArgumentsDefinition) error {
+ for _, v := range values {
+ t, err := common.ResolveType(v.Type, s.Resolve)
+ if err != nil {
+ return err
+ }
+ v.Type = t
+ }
+ return nil
+}
+
+func parseSchema(s *types.Schema, l *common.Lexer) {
+ l.ConsumeWhitespace()
+
+ for l.Peek() != scanner.EOF {
+ desc := l.DescComment()
+ switch x := l.ConsumeIdent(); x {
+
+ case "schema":
+ l.ConsumeToken('{')
+ for l.Peek() != '}' {
+
+ name := l.ConsumeIdent()
+ l.ConsumeToken(':')
+ typ := l.ConsumeIdent()
+ s.EntryPointNames[name] = typ
+ }
+ l.ConsumeToken('}')
+
+ case "type":
+ obj := parseObjectDef(l)
+ obj.Desc = desc
+ s.Types[obj.Name] = obj
+ s.Objects = append(s.Objects, obj)
+
+ case "interface":
+ iface := parseInterfaceDef(l)
+ iface.Desc = desc
+ s.Types[iface.Name] = iface
+
+ case "union":
+ union := parseUnionDef(l)
+ union.Desc = desc
+ s.Types[union.Name] = union
+ s.Unions = append(s.Unions, union)
+
+ case "enum":
+ enum := parseEnumDef(l)
+ enum.Desc = desc
+ s.Types[enum.Name] = enum
+ s.Enums = append(s.Enums, enum)
+
+ case "input":
+ input := parseInputDef(l)
+ input.Desc = desc
+ s.Types[input.Name] = input
+
+ case "scalar":
+ loc := l.Location()
+ name := l.ConsumeIdent()
+ directives := common.ParseDirectives(l)
+ s.Types[name] = &types.ScalarTypeDefinition{Name: name, Desc: desc, Directives: directives, Loc: loc}
+
+ case "directive":
+ directive := parseDirectiveDef(l)
+ directive.Desc = desc
+ s.Directives[directive.Name] = directive
+
+ case "extend":
+ parseExtension(s, l)
+
+ default:
+ // TODO: Add support for type extensions.
+ l.SyntaxError(fmt.Sprintf(`unexpected %q, expecting "schema", "type", "enum", "interface", "union", "input", "scalar" or "directive"`, x))
+ }
+ }
+}
+
+func parseObjectDef(l *common.Lexer) *types.ObjectTypeDefinition {
+ object := &types.ObjectTypeDefinition{Loc: l.Location(), Name: l.ConsumeIdent()}
+
+ for {
+ if l.Peek() == '{' {
+ break
+ }
+
+ if l.Peek() == '@' {
+ object.Directives = common.ParseDirectives(l)
+ continue
+ }
+
+ if l.Peek() == scanner.Ident {
+ l.ConsumeKeyword("implements")
+
+ for l.Peek() != '{' && l.Peek() != '@' {
+ if l.Peek() == '&' {
+ l.ConsumeToken('&')
+ }
+
+ object.InterfaceNames = append(object.InterfaceNames, l.ConsumeIdent())
+ }
+ continue
+ }
+
+ }
+ l.ConsumeToken('{')
+ object.Fields = parseFieldsDef(l)
+ l.ConsumeToken('}')
+
+ return object
+
+}
+
+func parseInterfaceDef(l *common.Lexer) *types.InterfaceTypeDefinition {
+ i := &types.InterfaceTypeDefinition{Loc: l.Location(), Name: l.ConsumeIdent()}
+
+ if l.Peek() == scanner.Ident {
+ l.ConsumeKeyword("implements")
+ i.Interfaces = append(i.Interfaces, &types.InterfaceTypeDefinition{Name: l.ConsumeIdent()})
+
+ for l.Peek() == '&' {
+ l.ConsumeToken('&')
+ i.Interfaces = append(i.Interfaces, &types.InterfaceTypeDefinition{Name: l.ConsumeIdent()})
+ }
+ }
+
+ i.Directives = common.ParseDirectives(l)
+
+ l.ConsumeToken('{')
+ i.Fields = parseFieldsDef(l)
+ l.ConsumeToken('}')
+
+ return i
+}
+
+func parseUnionDef(l *common.Lexer) *types.Union {
+ union := &types.Union{Loc: l.Location(), Name: l.ConsumeIdent()}
+
+ union.Directives = common.ParseDirectives(l)
+ l.ConsumeToken('=')
+ union.TypeNames = []string{l.ConsumeIdent()}
+ for l.Peek() == '|' {
+ l.ConsumeToken('|')
+ union.TypeNames = append(union.TypeNames, l.ConsumeIdent())
+ }
+
+ return union
+}
+
+func parseInputDef(l *common.Lexer) *types.InputObject {
+ i := &types.InputObject{}
+ i.Loc = l.Location()
+ i.Name = l.ConsumeIdent()
+ i.Directives = common.ParseDirectives(l)
+ l.ConsumeToken('{')
+ for l.Peek() != '}' {
+ i.Values = append(i.Values, common.ParseInputValue(l))
+ }
+ l.ConsumeToken('}')
+ return i
+}
+
+func parseEnumDef(l *common.Lexer) *types.EnumTypeDefinition {
+ enum := &types.EnumTypeDefinition{Loc: l.Location(), Name: l.ConsumeIdent()}
+
+ enum.Directives = common.ParseDirectives(l)
+ l.ConsumeToken('{')
+ for l.Peek() != '}' {
+ v := &types.EnumValueDefinition{
+ Desc: l.DescComment(),
+ Loc: l.Location(),
+ EnumValue: l.ConsumeIdent(),
+ Directives: common.ParseDirectives(l),
+ }
+
+ enum.EnumValuesDefinition = append(enum.EnumValuesDefinition, v)
+ }
+ l.ConsumeToken('}')
+ return enum
+}
+func parseDirectiveDef(l *common.Lexer) *types.DirectiveDefinition {
+ l.ConsumeToken('@')
+ loc := l.Location()
+ d := &types.DirectiveDefinition{Name: l.ConsumeIdent(), Loc: loc}
+
+ if l.Peek() == '(' {
+ l.ConsumeToken('(')
+ for l.Peek() != ')' {
+ v := common.ParseInputValue(l)
+ d.Arguments = append(d.Arguments, v)
+ }
+ l.ConsumeToken(')')
+ }
+
+ l.ConsumeKeyword("on")
+
+ for {
+ loc := l.ConsumeIdent()
+ d.Locations = append(d.Locations, loc)
+ if l.Peek() != '|' {
+ break
+ }
+ l.ConsumeToken('|')
+ }
+ return d
+}
+
+func parseExtension(s *types.Schema, l *common.Lexer) {
+ loc := l.Location()
+ switch x := l.ConsumeIdent(); x {
+ case "schema":
+ l.ConsumeToken('{')
+ for l.Peek() != '}' {
+ name := l.ConsumeIdent()
+ l.ConsumeToken(':')
+ typ := l.ConsumeIdent()
+ s.EntryPointNames[name] = typ
+ }
+ l.ConsumeToken('}')
+
+ case "type":
+ obj := parseObjectDef(l)
+ s.Extensions = append(s.Extensions, &types.Extension{Type: obj, Loc: loc})
+
+ case "interface":
+ iface := parseInterfaceDef(l)
+ s.Extensions = append(s.Extensions, &types.Extension{Type: iface, Loc: loc})
+
+ case "union":
+ union := parseUnionDef(l)
+ s.Extensions = append(s.Extensions, &types.Extension{Type: union, Loc: loc})
+
+ case "enum":
+ enum := parseEnumDef(l)
+ s.Extensions = append(s.Extensions, &types.Extension{Type: enum, Loc: loc})
+
+ case "input":
+ input := parseInputDef(l)
+ s.Extensions = append(s.Extensions, &types.Extension{Type: input, Loc: loc})
+
+ default:
+ // TODO: Add ScalarTypeDefinition when adding directives
+ l.SyntaxError(fmt.Sprintf(`unexpected %q, expecting "schema", "type", "enum", "interface", "union" or "input"`, x))
+ }
+}
+
+func parseFieldsDef(l *common.Lexer) types.FieldsDefinition {
+ var fields types.FieldsDefinition
+ for l.Peek() != '}' {
+ f := &types.FieldDefinition{}
+ f.Desc = l.DescComment()
+ f.Loc = l.Location()
+ f.Name = l.ConsumeIdent()
+ if l.Peek() == '(' {
+ l.ConsumeToken('(')
+ for l.Peek() != ')' {
+ f.Arguments = append(f.Arguments, common.ParseInputValue(l))
+ }
+ l.ConsumeToken(')')
+ }
+ l.ConsumeToken(':')
+ f.Type = common.ParseType(l)
+ f.Directives = common.ParseDirectives(l)
+ fields = append(fields, f)
+ }
+ return fields
+}
diff --git a/vendor/github.com/graph-gophers/graphql-go/internal/validation/suggestion.go b/vendor/github.com/graph-gophers/graphql-go/internal/validation/suggestion.go
new file mode 100644
index 00000000..9702b5f5
--- /dev/null
+++ b/vendor/github.com/graph-gophers/graphql-go/internal/validation/suggestion.go
@@ -0,0 +1,71 @@
+package validation
+
+import (
+ "fmt"
+ "sort"
+ "strconv"
+ "strings"
+)
+
+func makeSuggestion(prefix string, options []string, input string) string {
+ var selected []string
+ distances := make(map[string]int)
+ for _, opt := range options {
+ distance := levenshteinDistance(input, opt)
+ threshold := max(len(input)/2, max(len(opt)/2, 1))
+ if distance < threshold {
+ selected = append(selected, opt)
+ distances[opt] = distance
+ }
+ }
+
+ if len(selected) == 0 {
+ return ""
+ }
+ sort.Slice(selected, func(i, j int) bool {
+ return distances[selected[i]] < distances[selected[j]]
+ })
+
+ parts := make([]string, len(selected))
+ for i, opt := range selected {
+ parts[i] = strconv.Quote(opt)
+ }
+ if len(parts) > 1 {
+ parts[len(parts)-1] = "or " + parts[len(parts)-1]
+ }
+ return fmt.Sprintf(" %s %s?", prefix, strings.Join(parts, ", "))
+}
+
+func levenshteinDistance(s1, s2 string) int {
+ column := make([]int, len(s1)+1)
+ for y := range s1 {
+ column[y+1] = y + 1
+ }
+ for x, rx := range s2 {
+ column[0] = x + 1
+ lastdiag := x
+ for y, ry := range s1 {
+ olddiag := column[y+1]
+ if rx != ry {
+ lastdiag++
+ }
+ column[y+1] = min(column[y+1]+1, min(column[y]+1, lastdiag))
+ lastdiag = olddiag
+ }
+ }
+ return column[len(s1)]
+}
+
+func min(a, b int) int {
+ if a < b {
+ return a
+ }
+ return b
+}
+
+func max(a, b int) int {
+ if a > b {
+ return a
+ }
+ return b
+}
diff --git a/vendor/github.com/graph-gophers/graphql-go/internal/validation/validation.go b/vendor/github.com/graph-gophers/graphql-go/internal/validation/validation.go
new file mode 100644
index 00000000..e3672638
--- /dev/null
+++ b/vendor/github.com/graph-gophers/graphql-go/internal/validation/validation.go
@@ -0,0 +1,980 @@
+package validation
+
+import (
+ "fmt"
+ "math"
+ "reflect"
+ "strconv"
+ "strings"
+ "text/scanner"
+
+ "github.com/graph-gophers/graphql-go/errors"
+ "github.com/graph-gophers/graphql-go/internal/common"
+ "github.com/graph-gophers/graphql-go/internal/query"
+ "github.com/graph-gophers/graphql-go/types"
+)
+
+type varSet map[*types.InputValueDefinition]struct{}
+
+type selectionPair struct{ a, b types.Selection }
+
+type nameSet map[string]errors.Location
+
+type fieldInfo struct {
+ sf *types.FieldDefinition
+ parent types.NamedType
+}
+
+type context struct {
+ schema *types.Schema
+ doc *types.ExecutableDefinition
+ errs []*errors.QueryError
+ opErrs map[*types.OperationDefinition][]*errors.QueryError
+ usedVars map[*types.OperationDefinition]varSet
+ fieldMap map[*types.Field]fieldInfo
+ overlapValidated map[selectionPair]struct{}
+ maxDepth int
+}
+
+func (c *context) addErr(loc errors.Location, rule string, format string, a ...interface{}) {
+ c.addErrMultiLoc([]errors.Location{loc}, rule, format, a...)
+}
+
+func (c *context) addErrMultiLoc(locs []errors.Location, rule string, format string, a ...interface{}) {
+ c.errs = append(c.errs, &errors.QueryError{
+ Message: fmt.Sprintf(format, a...),
+ Locations: locs,
+ Rule: rule,
+ })
+}
+
+type opContext struct {
+ *context
+ ops []*types.OperationDefinition
+}
+
+func newContext(s *types.Schema, doc *types.ExecutableDefinition, maxDepth int) *context {
+ return &context{
+ schema: s,
+ doc: doc,
+ opErrs: make(map[*types.OperationDefinition][]*errors.QueryError),
+ usedVars: make(map[*types.OperationDefinition]varSet),
+ fieldMap: make(map[*types.Field]fieldInfo),
+ overlapValidated: make(map[selectionPair]struct{}),
+ maxDepth: maxDepth,
+ }
+}
+
+func Validate(s *types.Schema, doc *types.ExecutableDefinition, variables map[string]interface{}, maxDepth int) []*errors.QueryError {
+ c := newContext(s, doc, maxDepth)
+
+ opNames := make(nameSet)
+ fragUsedBy := make(map[*types.FragmentDefinition][]*types.OperationDefinition)
+ for _, op := range doc.Operations {
+ c.usedVars[op] = make(varSet)
+ opc := &opContext{c, []*types.OperationDefinition{op}}
+
+ // Check if max depth is exceeded, if it's set. If max depth is exceeded,
+ // don't continue to validate the document and exit early.
+ if validateMaxDepth(opc, op.Selections, nil, 1) {
+ return c.errs
+ }
+
+ if op.Name.Name == "" && len(doc.Operations) != 1 {
+ c.addErr(op.Loc, "LoneAnonymousOperation", "This anonymous operation must be the only defined operation.")
+ }
+ if op.Name.Name != "" {
+ validateName(c, opNames, op.Name, "UniqueOperationNames", "operation")
+ }
+
+ validateDirectives(opc, string(op.Type), op.Directives)
+
+ varNames := make(nameSet)
+ for _, v := range op.Vars {
+ validateName(c, varNames, v.Name, "UniqueVariableNames", "variable")
+
+ t := resolveType(c, v.Type)
+ if !canBeInput(t) {
+ c.addErr(v.TypeLoc, "VariablesAreInputTypes", "Variable %q cannot be non-input type %q.", "$"+v.Name.Name, t)
+ }
+ validateValue(opc, v, variables[v.Name.Name], t)
+
+ if v.Default != nil {
+ validateLiteral(opc, v.Default)
+
+ if t != nil {
+ if nn, ok := t.(*types.NonNull); ok {
+ c.addErr(v.Default.Location(), "DefaultValuesOfCorrectType", "Variable %q of type %q is required and will not use the default value. Perhaps you meant to use type %q.", "$"+v.Name.Name, t, nn.OfType)
+ }
+
+ if ok, reason := validateValueType(opc, v.Default, t); !ok {
+ c.addErr(v.Default.Location(), "DefaultValuesOfCorrectType", "Variable %q of type %q has invalid default value %s.\n%s", "$"+v.Name.Name, t, v.Default, reason)
+ }
+ }
+ }
+ }
+
+ var entryPoint types.NamedType
+ switch op.Type {
+ case query.Query:
+ entryPoint = s.EntryPoints["query"]
+ case query.Mutation:
+ entryPoint = s.EntryPoints["mutation"]
+ case query.Subscription:
+ entryPoint = s.EntryPoints["subscription"]
+ default:
+ panic("unreachable")
+ }
+
+ validateSelectionSet(opc, op.Selections, entryPoint)
+
+ fragUsed := make(map[*types.FragmentDefinition]struct{})
+ markUsedFragments(c, op.Selections, fragUsed)
+ for frag := range fragUsed {
+ fragUsedBy[frag] = append(fragUsedBy[frag], op)
+ }
+ }
+
+ fragNames := make(nameSet)
+ fragVisited := make(map[*types.FragmentDefinition]struct{})
+ for _, frag := range doc.Fragments {
+ opc := &opContext{c, fragUsedBy[frag]}
+
+ validateName(c, fragNames, frag.Name, "UniqueFragmentNames", "fragment")
+ validateDirectives(opc, "FRAGMENT_DEFINITION", frag.Directives)
+
+ t := unwrapType(resolveType(c, &frag.On))
+ // continue even if t is nil
+ if t != nil && !canBeFragment(t) {
+ c.addErr(frag.On.Loc, "FragmentsOnCompositeTypes", "Fragment %q cannot condition on non composite type %q.", frag.Name.Name, t)
+ continue
+ }
+
+ validateSelectionSet(opc, frag.Selections, t)
+
+ if _, ok := fragVisited[frag]; !ok {
+ detectFragmentCycle(c, frag.Selections, fragVisited, nil, map[string]int{frag.Name.Name: 0})
+ }
+ }
+
+ for _, frag := range doc.Fragments {
+ if len(fragUsedBy[frag]) == 0 {
+ c.addErr(frag.Loc, "NoUnusedFragments", "Fragment %q is never used.", frag.Name.Name)
+ }
+ }
+
+ for _, op := range doc.Operations {
+ c.errs = append(c.errs, c.opErrs[op]...)
+
+ opUsedVars := c.usedVars[op]
+ for _, v := range op.Vars {
+ if _, ok := opUsedVars[v]; !ok {
+ opSuffix := ""
+ if op.Name.Name != "" {
+ opSuffix = fmt.Sprintf(" in operation %q", op.Name.Name)
+ }
+ c.addErr(v.Loc, "NoUnusedVariables", "Variable %q is never used%s.", "$"+v.Name.Name, opSuffix)
+ }
+ }
+ }
+
+ return c.errs
+}
+
+func validateValue(c *opContext, v *types.InputValueDefinition, val interface{}, t types.Type) {
+ switch t := t.(type) {
+ case *types.NonNull:
+ if val == nil {
+ c.addErr(v.Loc, "VariablesOfCorrectType", "Variable \"%s\" has invalid value null.\nExpected type \"%s\", found null.", v.Name.Name, t)
+ return
+ }
+ validateValue(c, v, val, t.OfType)
+ case *types.List:
+ if val == nil {
+ return
+ }
+ vv, ok := val.([]interface{})
+ if !ok {
+ // Input coercion rules allow single items without wrapping array
+ validateValue(c, v, val, t.OfType)
+ return
+ }
+ for _, elem := range vv {
+ validateValue(c, v, elem, t.OfType)
+ }
+ case *types.EnumTypeDefinition:
+ if val == nil {
+ return
+ }
+ e, ok := val.(string)
+ if !ok {
+ c.addErr(v.Loc, "VariablesOfCorrectType", "Variable \"%s\" has invalid type %T.\nExpected type \"%s\", found %v.", v.Name.Name, val, t, val)
+ return
+ }
+ for _, option := range t.EnumValuesDefinition {
+ if option.EnumValue == e {
+ return
+ }
+ }
+ c.addErr(v.Loc, "VariablesOfCorrectType", "Variable \"%s\" has invalid value %s.\nExpected type \"%s\", found %s.", v.Name.Name, e, t, e)
+ case *types.InputObject:
+ if val == nil {
+ return
+ }
+ in, ok := val.(map[string]interface{})
+ if !ok {
+ c.addErr(v.Loc, "VariablesOfCorrectType", "Variable \"%s\" has invalid type %T.\nExpected type \"%s\", found %s.", v.Name.Name, val, t, val)
+ return
+ }
+ for _, f := range t.Values {
+ fieldVal := in[f.Name.Name]
+ validateValue(c, f, fieldVal, f.Type)
+ }
+ }
+}
+
+// validates the query doesn't go deeper than maxDepth (if set). Returns whether
+// or not query validated max depth to avoid excessive recursion.
+//
+// The visited map is necessary to ensure that max depth validation does not get stuck in cyclical
+// fragment spreads.
+func validateMaxDepth(c *opContext, sels []types.Selection, visited map[*types.FragmentDefinition]struct{}, depth int) bool {
+ // maxDepth checking is turned off when maxDepth is 0
+ if c.maxDepth == 0 {
+ return false
+ }
+
+ exceededMaxDepth := false
+ if visited == nil {
+ visited = map[*types.FragmentDefinition]struct{}{}
+ }
+
+ for _, sel := range sels {
+ switch sel := sel.(type) {
+ case *types.Field:
+ if depth > c.maxDepth {
+ exceededMaxDepth = true
+ c.addErr(sel.Alias.Loc, "MaxDepthExceeded", "Field %q has depth %d that exceeds max depth %d", sel.Name.Name, depth, c.maxDepth)
+ continue
+ }
+ exceededMaxDepth = exceededMaxDepth || validateMaxDepth(c, sel.SelectionSet, visited, depth+1)
+
+ case *types.InlineFragment:
+ // Depth is not checked because inline fragments resolve to other fields which are checked.
+ // Depth is not incremented because inline fragments have the same depth as neighboring fields
+ exceededMaxDepth = exceededMaxDepth || validateMaxDepth(c, sel.Selections, visited, depth)
+ case *types.FragmentSpread:
+ // Depth is not checked because fragments resolve to other fields which are checked.
+ frag := c.doc.Fragments.Get(sel.Name.Name)
+ if frag == nil {
+ // In case of unknown fragment (invalid request), ignore max depth evaluation
+ c.addErr(sel.Loc, "MaxDepthEvaluationError", "Unknown fragment %q. Unable to evaluate depth.", sel.Name.Name)
+ continue
+ }
+
+ if _, ok := visited[frag]; ok {
+ // we've already seen this fragment, don't check depth again.
+ continue
+ }
+ visited[frag] = struct{}{}
+
+ // Depth is not incremented because fragments have the same depth as surrounding fields
+ exceededMaxDepth = exceededMaxDepth || validateMaxDepth(c, frag.Selections, visited, depth)
+ }
+ }
+
+ return exceededMaxDepth
+}
+
+func validateSelectionSet(c *opContext, sels []types.Selection, t types.NamedType) {
+ for _, sel := range sels {
+ validateSelection(c, sel, t)
+ }
+
+ for i, a := range sels {
+ for _, b := range sels[i+1:] {
+ c.validateOverlap(a, b, nil, nil)
+ }
+ }
+}
+
+func validateSelection(c *opContext, sel types.Selection, t types.NamedType) {
+ switch sel := sel.(type) {
+ case *types.Field:
+ validateDirectives(c, "FIELD", sel.Directives)
+
+ fieldName := sel.Name.Name
+ var f *types.FieldDefinition
+ switch fieldName {
+ case "__typename":
+ f = &types.FieldDefinition{
+ Name: "__typename",
+ Type: c.schema.Types["String"],
+ }
+ case "__schema":
+ f = &types.FieldDefinition{
+ Name: "__schema",
+ Type: c.schema.Types["__Schema"],
+ }
+ case "__type":
+ f = &types.FieldDefinition{
+ Name: "__type",
+ Arguments: types.ArgumentsDefinition{
+ &types.InputValueDefinition{
+ Name: types.Ident{Name: "name"},
+ Type: &types.NonNull{OfType: c.schema.Types["String"]},
+ },
+ },
+ Type: c.schema.Types["__Type"],
+ }
+ default:
+ f = fields(t).Get(fieldName)
+ if f == nil && t != nil {
+ suggestion := makeSuggestion("Did you mean", fields(t).Names(), fieldName)
+ c.addErr(sel.Alias.Loc, "FieldsOnCorrectType", "Cannot query field %q on type %q.%s", fieldName, t, suggestion)
+ }
+ }
+ c.fieldMap[sel] = fieldInfo{sf: f, parent: t}
+
+ validateArgumentLiterals(c, sel.Arguments)
+ if f != nil {
+ validateArgumentTypes(c, sel.Arguments, f.Arguments, sel.Alias.Loc,
+ func() string { return fmt.Sprintf("field %q of type %q", fieldName, t) },
+ func() string { return fmt.Sprintf("Field %q", fieldName) },
+ )
+ }
+
+ var ft types.Type
+ if f != nil {
+ ft = f.Type
+ sf := hasSubfields(ft)
+ if sf && sel.SelectionSet == nil {
+ c.addErr(sel.Alias.Loc, "ScalarLeafs", "Field %q of type %q must have a selection of subfields. Did you mean \"%s { ... }\"?", fieldName, ft, fieldName)
+ }
+ if !sf && sel.SelectionSet != nil {
+ c.addErr(sel.SelectionSetLoc, "ScalarLeafs", "Field %q must not have a selection since type %q has no subfields.", fieldName, ft)
+ }
+ }
+ if sel.SelectionSet != nil {
+ validateSelectionSet(c, sel.SelectionSet, unwrapType(ft))
+ }
+
+ case *types.InlineFragment:
+ validateDirectives(c, "INLINE_FRAGMENT", sel.Directives)
+ if sel.On.Name != "" {
+ fragTyp := unwrapType(resolveType(c.context, &sel.On))
+ if fragTyp != nil && !compatible(t, fragTyp) {
+ c.addErr(sel.Loc, "PossibleFragmentSpreads", "Fragment cannot be spread here as objects of type %q can never be of type %q.", t, fragTyp)
+ }
+ t = fragTyp
+ // continue even if t is nil
+ }
+ if t != nil && !canBeFragment(t) {
+ c.addErr(sel.On.Loc, "FragmentsOnCompositeTypes", "Fragment cannot condition on non composite type %q.", t)
+ return
+ }
+ validateSelectionSet(c, sel.Selections, unwrapType(t))
+
+ case *types.FragmentSpread:
+ validateDirectives(c, "FRAGMENT_SPREAD", sel.Directives)
+ frag := c.doc.Fragments.Get(sel.Name.Name)
+ if frag == nil {
+ c.addErr(sel.Name.Loc, "KnownFragmentNames", "Unknown fragment %q.", sel.Name.Name)
+ return
+ }
+ fragTyp := c.schema.Types[frag.On.Name]
+ if !compatible(t, fragTyp) {
+ c.addErr(sel.Loc, "PossibleFragmentSpreads", "Fragment %q cannot be spread here as objects of type %q can never be of type %q.", frag.Name.Name, t, fragTyp)
+ }
+
+ default:
+ panic("unreachable")
+ }
+}
+
+func compatible(a, b types.Type) bool {
+ for _, pta := range possibleTypes(a) {
+ for _, ptb := range possibleTypes(b) {
+ if pta == ptb {
+ return true
+ }
+ }
+ }
+ return false
+}
+
+func possibleTypes(t types.Type) []*types.ObjectTypeDefinition {
+ switch t := t.(type) {
+ case *types.ObjectTypeDefinition:
+ return []*types.ObjectTypeDefinition{t}
+ case *types.InterfaceTypeDefinition:
+ return t.PossibleTypes
+ case *types.Union:
+ return t.UnionMemberTypes
+ default:
+ return nil
+ }
+}
+
+func markUsedFragments(c *context, sels []types.Selection, fragUsed map[*types.FragmentDefinition]struct{}) {
+ for _, sel := range sels {
+ switch sel := sel.(type) {
+ case *types.Field:
+ if sel.SelectionSet != nil {
+ markUsedFragments(c, sel.SelectionSet, fragUsed)
+ }
+
+ case *types.InlineFragment:
+ markUsedFragments(c, sel.Selections, fragUsed)
+
+ case *types.FragmentSpread:
+ frag := c.doc.Fragments.Get(sel.Name.Name)
+ if frag == nil {
+ return
+ }
+
+ if _, ok := fragUsed[frag]; ok {
+ continue
+ }
+
+ fragUsed[frag] = struct{}{}
+ markUsedFragments(c, frag.Selections, fragUsed)
+
+ default:
+ panic("unreachable")
+ }
+ }
+}
+
+func detectFragmentCycle(c *context, sels []types.Selection, fragVisited map[*types.FragmentDefinition]struct{}, spreadPath []*types.FragmentSpread, spreadPathIndex map[string]int) {
+ for _, sel := range sels {
+ detectFragmentCycleSel(c, sel, fragVisited, spreadPath, spreadPathIndex)
+ }
+}
+
+func detectFragmentCycleSel(c *context, sel types.Selection, fragVisited map[*types.FragmentDefinition]struct{}, spreadPath []*types.FragmentSpread, spreadPathIndex map[string]int) {
+ switch sel := sel.(type) {
+ case *types.Field:
+ if sel.SelectionSet != nil {
+ detectFragmentCycle(c, sel.SelectionSet, fragVisited, spreadPath, spreadPathIndex)
+ }
+
+ case *types.InlineFragment:
+ detectFragmentCycle(c, sel.Selections, fragVisited, spreadPath, spreadPathIndex)
+
+ case *types.FragmentSpread:
+ frag := c.doc.Fragments.Get(sel.Name.Name)
+ if frag == nil {
+ return
+ }
+
+ spreadPath = append(spreadPath, sel)
+ if i, ok := spreadPathIndex[frag.Name.Name]; ok {
+ cyclePath := spreadPath[i:]
+ via := ""
+ if len(cyclePath) > 1 {
+ names := make([]string, len(cyclePath)-1)
+ for i, frag := range cyclePath[:len(cyclePath)-1] {
+ names[i] = frag.Name.Name
+ }
+ via = " via " + strings.Join(names, ", ")
+ }
+
+ locs := make([]errors.Location, len(cyclePath))
+ for i, frag := range cyclePath {
+ locs[i] = frag.Loc
+ }
+ c.addErrMultiLoc(locs, "NoFragmentCycles", "Cannot spread fragment %q within itself%s.", frag.Name.Name, via)
+ return
+ }
+
+ if _, ok := fragVisited[frag]; ok {
+ return
+ }
+ fragVisited[frag] = struct{}{}
+
+ spreadPathIndex[frag.Name.Name] = len(spreadPath)
+ detectFragmentCycle(c, frag.Selections, fragVisited, spreadPath, spreadPathIndex)
+ delete(spreadPathIndex, frag.Name.Name)
+
+ default:
+ panic("unreachable")
+ }
+}
+
+func (c *context) validateOverlap(a, b types.Selection, reasons *[]string, locs *[]errors.Location) {
+ if a == b {
+ return
+ }
+
+ if _, ok := c.overlapValidated[selectionPair{a, b}]; ok {
+ return
+ }
+ c.overlapValidated[selectionPair{a, b}] = struct{}{}
+ c.overlapValidated[selectionPair{b, a}] = struct{}{}
+
+ switch a := a.(type) {
+ case *types.Field:
+ switch b := b.(type) {
+ case *types.Field:
+ if b.Alias.Loc.Before(a.Alias.Loc) {
+ a, b = b, a
+ }
+ if reasons2, locs2 := c.validateFieldOverlap(a, b); len(reasons2) != 0 {
+ locs2 = append(locs2, a.Alias.Loc, b.Alias.Loc)
+ if reasons == nil {
+ c.addErrMultiLoc(locs2, "OverlappingFieldsCanBeMerged", "Fields %q conflict because %s. Use different aliases on the fields to fetch both if this was intentional.", a.Alias.Name, strings.Join(reasons2, " and "))
+ return
+ }
+ for _, r := range reasons2 {
+ *reasons = append(*reasons, fmt.Sprintf("subfields %q conflict because %s", a.Alias.Name, r))
+ }
+ *locs = append(*locs, locs2...)
+ }
+
+ case *types.InlineFragment:
+ for _, sel := range b.Selections {
+ c.validateOverlap(a, sel, reasons, locs)
+ }
+
+ case *types.FragmentSpread:
+ if frag := c.doc.Fragments.Get(b.Name.Name); frag != nil {
+ for _, sel := range frag.Selections {
+ c.validateOverlap(a, sel, reasons, locs)
+ }
+ }
+
+ default:
+ panic("unreachable")
+ }
+
+ case *types.InlineFragment:
+ for _, sel := range a.Selections {
+ c.validateOverlap(sel, b, reasons, locs)
+ }
+
+ case *types.FragmentSpread:
+ if frag := c.doc.Fragments.Get(a.Name.Name); frag != nil {
+ for _, sel := range frag.Selections {
+ c.validateOverlap(sel, b, reasons, locs)
+ }
+ }
+
+ default:
+ panic("unreachable")
+ }
+}
+
+func (c *context) validateFieldOverlap(a, b *types.Field) ([]string, []errors.Location) {
+ if a.Alias.Name != b.Alias.Name {
+ return nil, nil
+ }
+
+ if asf := c.fieldMap[a].sf; asf != nil {
+ if bsf := c.fieldMap[b].sf; bsf != nil {
+ if !typesCompatible(asf.Type, bsf.Type) {
+ return []string{fmt.Sprintf("they return conflicting types %s and %s", asf.Type, bsf.Type)}, nil
+ }
+ }
+ }
+
+ at := c.fieldMap[a].parent
+ bt := c.fieldMap[b].parent
+ if at == nil || bt == nil || at == bt {
+ if a.Name.Name != b.Name.Name {
+ return []string{fmt.Sprintf("%s and %s are different fields", a.Name.Name, b.Name.Name)}, nil
+ }
+
+ if argumentsConflict(a.Arguments, b.Arguments) {
+ return []string{"they have differing arguments"}, nil
+ }
+ }
+
+ var reasons []string
+ var locs []errors.Location
+ for _, a2 := range a.SelectionSet {
+ for _, b2 := range b.SelectionSet {
+ c.validateOverlap(a2, b2, &reasons, &locs)
+ }
+ }
+ return reasons, locs
+}
+
+func argumentsConflict(a, b types.ArgumentList) bool {
+ if len(a) != len(b) {
+ return true
+ }
+ for _, argA := range a {
+ valB, ok := b.Get(argA.Name.Name)
+ if !ok || !reflect.DeepEqual(argA.Value.Deserialize(nil), valB.Deserialize(nil)) {
+ return true
+ }
+ }
+ return false
+}
+
+func fields(t types.Type) types.FieldsDefinition {
+ switch t := t.(type) {
+ case *types.ObjectTypeDefinition:
+ return t.Fields
+ case *types.InterfaceTypeDefinition:
+ return t.Fields
+ default:
+ return nil
+ }
+}
+
+func unwrapType(t types.Type) types.NamedType {
+ if t == nil {
+ return nil
+ }
+ for {
+ switch t2 := t.(type) {
+ case types.NamedType:
+ return t2
+ case *types.List:
+ t = t2.OfType
+ case *types.NonNull:
+ t = t2.OfType
+ default:
+ panic("unreachable")
+ }
+ }
+}
+
+func resolveType(c *context, t types.Type) types.Type {
+ t2, err := common.ResolveType(t, c.schema.Resolve)
+ if err != nil {
+ c.errs = append(c.errs, err)
+ }
+ return t2
+}
+
+func validateDirectives(c *opContext, loc string, directives types.DirectiveList) {
+ directiveNames := make(nameSet)
+ for _, d := range directives {
+ dirName := d.Name.Name
+ validateNameCustomMsg(c.context, directiveNames, d.Name, "UniqueDirectivesPerLocation", func() string {
+ return fmt.Sprintf("The directive %q can only be used once at this location.", dirName)
+ })
+
+ validateArgumentLiterals(c, d.Arguments)
+
+ dd, ok := c.schema.Directives[dirName]
+ if !ok {
+ c.addErr(d.Name.Loc, "KnownDirectives", "Unknown directive %q.", dirName)
+ continue
+ }
+
+ locOK := false
+ for _, allowedLoc := range dd.Locations {
+ if loc == allowedLoc {
+ locOK = true
+ break
+ }
+ }
+ if !locOK {
+ c.addErr(d.Name.Loc, "KnownDirectives", "Directive %q may not be used on %s.", dirName, loc)
+ }
+
+ validateArgumentTypes(c, d.Arguments, dd.Arguments, d.Name.Loc,
+ func() string { return fmt.Sprintf("directive %q", "@"+dirName) },
+ func() string { return fmt.Sprintf("Directive %q", "@"+dirName) },
+ )
+ }
+}
+
+func validateName(c *context, set nameSet, name types.Ident, rule string, kind string) {
+ validateNameCustomMsg(c, set, name, rule, func() string {
+ return fmt.Sprintf("There can be only one %s named %q.", kind, name.Name)
+ })
+}
+
+func validateNameCustomMsg(c *context, set nameSet, name types.Ident, rule string, msg func() string) {
+ if loc, ok := set[name.Name]; ok {
+ c.addErrMultiLoc([]errors.Location{loc, name.Loc}, rule, msg())
+ return
+ }
+ set[name.Name] = name.Loc
+}
+
+func validateArgumentTypes(c *opContext, args types.ArgumentList, argDecls types.ArgumentsDefinition, loc errors.Location, owner1, owner2 func() string) {
+ for _, selArg := range args {
+ arg := argDecls.Get(selArg.Name.Name)
+ if arg == nil {
+ c.addErr(selArg.Name.Loc, "KnownArgumentNames", "Unknown argument %q on %s.", selArg.Name.Name, owner1())
+ continue
+ }
+ value := selArg.Value
+ if ok, reason := validateValueType(c, value, arg.Type); !ok {
+ c.addErr(value.Location(), "ArgumentsOfCorrectType", "Argument %q has invalid value %s.\n%s", arg.Name.Name, value, reason)
+ }
+ }
+ for _, decl := range argDecls {
+ if _, ok := decl.Type.(*types.NonNull); ok {
+ if _, ok := args.Get(decl.Name.Name); !ok {
+ c.addErr(loc, "ProvidedNonNullArguments", "%s argument %q of type %q is required but not provided.", owner2(), decl.Name.Name, decl.Type)
+ }
+ }
+ }
+}
+
+func validateArgumentLiterals(c *opContext, args types.ArgumentList) {
+ argNames := make(nameSet)
+ for _, arg := range args {
+ validateName(c.context, argNames, arg.Name, "UniqueArgumentNames", "argument")
+ validateLiteral(c, arg.Value)
+ }
+}
+
+func validateLiteral(c *opContext, l types.Value) {
+ switch l := l.(type) {
+ case *types.ObjectValue:
+ fieldNames := make(nameSet)
+ for _, f := range l.Fields {
+ validateName(c.context, fieldNames, f.Name, "UniqueInputFieldNames", "input field")
+ validateLiteral(c, f.Value)
+ }
+ case *types.ListValue:
+ for _, entry := range l.Values {
+ validateLiteral(c, entry)
+ }
+ case *types.Variable:
+ for _, op := range c.ops {
+ v := op.Vars.Get(l.Name)
+ if v == nil {
+ byOp := ""
+ if op.Name.Name != "" {
+ byOp = fmt.Sprintf(" by operation %q", op.Name.Name)
+ }
+ c.opErrs[op] = append(c.opErrs[op], &errors.QueryError{
+ Message: fmt.Sprintf("Variable %q is not defined%s.", "$"+l.Name, byOp),
+ Locations: []errors.Location{l.Loc, op.Loc},
+ Rule: "NoUndefinedVariables",
+ })
+ continue
+ }
+ validateValueType(c, l, resolveType(c.context, v.Type))
+ c.usedVars[op][v] = struct{}{}
+ }
+ }
+}
+
+func validateValueType(c *opContext, v types.Value, t types.Type) (bool, string) {
+ if v, ok := v.(*types.Variable); ok {
+ for _, op := range c.ops {
+ if v2 := op.Vars.Get(v.Name); v2 != nil {
+ t2, err := common.ResolveType(v2.Type, c.schema.Resolve)
+ if _, ok := t2.(*types.NonNull); !ok && v2.Default != nil {
+ t2 = &types.NonNull{OfType: t2}
+ }
+ if err == nil && !typeCanBeUsedAs(t2, t) {
+ c.addErrMultiLoc([]errors.Location{v2.Loc, v.Loc}, "VariablesInAllowedPosition", "Variable %q of type %q used in position expecting type %q.", "$"+v.Name, t2, t)
+ }
+ }
+ }
+ return true, ""
+ }
+
+ if nn, ok := t.(*types.NonNull); ok {
+ if isNull(v) {
+ return false, fmt.Sprintf("Expected %q, found null.", t)
+ }
+ t = nn.OfType
+ }
+ if isNull(v) {
+ return true, ""
+ }
+
+ switch t := t.(type) {
+ case *types.ScalarTypeDefinition, *types.EnumTypeDefinition:
+ if lit, ok := v.(*types.PrimitiveValue); ok {
+ if validateBasicLit(lit, t) {
+ return true, ""
+ }
+ return false, fmt.Sprintf("Expected type %q, found %s.", t, v)
+ }
+ return true, ""
+
+ case *types.List:
+ list, ok := v.(*types.ListValue)
+ if !ok {
+ return validateValueType(c, v, t.OfType) // single value instead of list
+ }
+ for i, entry := range list.Values {
+ if ok, reason := validateValueType(c, entry, t.OfType); !ok {
+ return false, fmt.Sprintf("In element #%d: %s", i, reason)
+ }
+ }
+ return true, ""
+
+ case *types.InputObject:
+ v, ok := v.(*types.ObjectValue)
+ if !ok {
+ return false, fmt.Sprintf("Expected %q, found not an object.", t)
+ }
+ for _, f := range v.Fields {
+ name := f.Name.Name
+ iv := t.Values.Get(name)
+ if iv == nil {
+ return false, fmt.Sprintf("In field %q: Unknown field.", name)
+ }
+ if ok, reason := validateValueType(c, f.Value, iv.Type); !ok {
+ return false, fmt.Sprintf("In field %q: %s", name, reason)
+ }
+ }
+ for _, iv := range t.Values {
+ found := false
+ for _, f := range v.Fields {
+ if f.Name.Name == iv.Name.Name {
+ found = true
+ break
+ }
+ }
+ if !found {
+ if _, ok := iv.Type.(*types.NonNull); ok && iv.Default == nil {
+ return false, fmt.Sprintf("In field %q: Expected %q, found null.", iv.Name.Name, iv.Type)
+ }
+ }
+ }
+ return true, ""
+ }
+
+ return false, fmt.Sprintf("Expected type %q, found %s.", t, v)
+}
+
+func validateBasicLit(v *types.PrimitiveValue, t types.Type) bool {
+ switch t := t.(type) {
+ case *types.ScalarTypeDefinition:
+ switch t.Name {
+ case "Int":
+ if v.Type != scanner.Int {
+ return false
+ }
+ f, err := strconv.ParseFloat(v.Text, 64)
+ if err != nil {
+ panic(err)
+ }
+ return f >= math.MinInt32 && f <= math.MaxInt32
+ case "Float":
+ return v.Type == scanner.Int || v.Type == scanner.Float
+ case "String":
+ return v.Type == scanner.String
+ case "Boolean":
+ return v.Type == scanner.Ident && (v.Text == "true" || v.Text == "false")
+ case "ID":
+ return v.Type == scanner.Int || v.Type == scanner.String
+ default:
+ //TODO: Type-check against expected type by Unmarshalling
+ return true
+ }
+
+ case *types.EnumTypeDefinition:
+ if v.Type != scanner.Ident {
+ return false
+ }
+ for _, option := range t.EnumValuesDefinition {
+ if option.EnumValue == v.Text {
+ return true
+ }
+ }
+ return false
+ }
+
+ return false
+}
+
+func canBeFragment(t types.Type) bool {
+ switch t.(type) {
+ case *types.ObjectTypeDefinition, *types.InterfaceTypeDefinition, *types.Union:
+ return true
+ default:
+ return false
+ }
+}
+
+func canBeInput(t types.Type) bool {
+ switch t := t.(type) {
+ case *types.InputObject, *types.ScalarTypeDefinition, *types.EnumTypeDefinition:
+ return true
+ case *types.List:
+ return canBeInput(t.OfType)
+ case *types.NonNull:
+ return canBeInput(t.OfType)
+ default:
+ return false
+ }
+}
+
+func hasSubfields(t types.Type) bool {
+ switch t := t.(type) {
+ case *types.ObjectTypeDefinition, *types.InterfaceTypeDefinition, *types.Union:
+ return true
+ case *types.List:
+ return hasSubfields(t.OfType)
+ case *types.NonNull:
+ return hasSubfields(t.OfType)
+ default:
+ return false
+ }
+}
+
+func isLeaf(t types.Type) bool {
+ switch t.(type) {
+ case *types.ScalarTypeDefinition, *types.EnumTypeDefinition:
+ return true
+ default:
+ return false
+ }
+}
+
+func isNull(lit interface{}) bool {
+ _, ok := lit.(*types.NullValue)
+ return ok
+}
+
+func typesCompatible(a, b types.Type) bool {
+ al, aIsList := a.(*types.List)
+ bl, bIsList := b.(*types.List)
+ if aIsList || bIsList {
+ return aIsList && bIsList && typesCompatible(al.OfType, bl.OfType)
+ }
+
+ ann, aIsNN := a.(*types.NonNull)
+ bnn, bIsNN := b.(*types.NonNull)
+ if aIsNN || bIsNN {
+ return aIsNN && bIsNN && typesCompatible(ann.OfType, bnn.OfType)
+ }
+
+ if isLeaf(a) || isLeaf(b) {
+ return a == b
+ }
+
+ return true
+}
+
+func typeCanBeUsedAs(t, as types.Type) bool {
+ nnT, okT := t.(*types.NonNull)
+ if okT {
+ t = nnT.OfType
+ }
+
+ nnAs, okAs := as.(*types.NonNull)
+ if okAs {
+ as = nnAs.OfType
+ if !okT {
+ return false // nullable can not be used as non-null
+ }
+ }
+
+ if t == as {
+ return true
+ }
+
+ if lT, ok := t.(*types.List); ok {
+ if lAs, ok := as.(*types.List); ok {
+ return typeCanBeUsedAs(lT.OfType, lAs.OfType)
+ }
+ }
+ return false
+}