mirror of
https://github.com/cwinfo/matterbridge.git
synced 2025-07-03 04:57:44 +00:00
Update dependencies (#1784)
This commit is contained in:
103
vendor/github.com/graph-gophers/graphql-go/internal/common/blockstring.go
generated
vendored
Normal file
103
vendor/github.com/graph-gophers/graphql-go/internal/common/blockstring.go
generated
vendored
Normal file
@ -0,0 +1,103 @@
|
||||
// MIT License
|
||||
//
|
||||
// Copyright (c) 2019 GraphQL Contributors
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
//
|
||||
// This implementation has been adapted from the graphql-js reference implementation
|
||||
// https://github.com/graphql/graphql-js/blob/5eb7c4ded7ceb83ac742149cbe0dae07a8af9a30/src/language/blockString.js
|
||||
// which is released under the MIT License above.
|
||||
|
||||
package common
|
||||
|
||||
import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Produces the value of a block string from its parsed raw value, similar to
|
||||
// CoffeeScript's block string, Python's docstring trim or Ruby's strip_heredoc.
|
||||
//
|
||||
// This implements the GraphQL spec's BlockStringValue() static algorithm.
|
||||
func blockString(raw string) string {
|
||||
lines := strings.Split(raw, "\n")
|
||||
|
||||
// Remove common indentation from all lines except the first (which has none)
|
||||
ind := blockStringIndentation(lines)
|
||||
if ind > 0 {
|
||||
for i := 1; i < len(lines); i++ {
|
||||
l := lines[i]
|
||||
if len(l) < ind {
|
||||
lines[i] = ""
|
||||
continue
|
||||
}
|
||||
lines[i] = l[ind:]
|
||||
}
|
||||
}
|
||||
|
||||
// Remove leading and trailing blank lines
|
||||
trimStart := 0
|
||||
for i := 0; i < len(lines) && isBlank(lines[i]); i++ {
|
||||
trimStart++
|
||||
}
|
||||
lines = lines[trimStart:]
|
||||
trimEnd := 0
|
||||
for i := len(lines) - 1; i > 0 && isBlank(lines[i]); i-- {
|
||||
trimEnd++
|
||||
}
|
||||
lines = lines[:len(lines)-trimEnd]
|
||||
|
||||
return strings.Join(lines, "\n")
|
||||
}
|
||||
|
||||
func blockStringIndentation(lines []string) int {
|
||||
var commonIndent *int
|
||||
for i := 1; i < len(lines); i++ {
|
||||
l := lines[i]
|
||||
indent := leadingWhitespace(l)
|
||||
if indent == len(l) {
|
||||
// don't consider blank/empty lines
|
||||
continue
|
||||
}
|
||||
if indent == 0 {
|
||||
return 0
|
||||
}
|
||||
if commonIndent == nil || indent < *commonIndent {
|
||||
commonIndent = &indent
|
||||
}
|
||||
}
|
||||
if commonIndent == nil {
|
||||
return 0
|
||||
}
|
||||
return *commonIndent
|
||||
}
|
||||
|
||||
func isBlank(s string) bool {
|
||||
return len(s) == 0 || leadingWhitespace(s) == len(s)
|
||||
}
|
||||
|
||||
func leadingWhitespace(s string) int {
|
||||
i := 0
|
||||
for _, r := range s {
|
||||
if r != '\t' && r != ' ' {
|
||||
break
|
||||
}
|
||||
i++
|
||||
}
|
||||
return i
|
||||
}
|
18
vendor/github.com/graph-gophers/graphql-go/internal/common/directive.go
generated
vendored
Normal file
18
vendor/github.com/graph-gophers/graphql-go/internal/common/directive.go
generated
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
package common
|
||||
|
||||
import "github.com/graph-gophers/graphql-go/types"
|
||||
|
||||
func ParseDirectives(l *Lexer) types.DirectiveList {
|
||||
var directives types.DirectiveList
|
||||
for l.Peek() == '@' {
|
||||
l.ConsumeToken('@')
|
||||
d := &types.Directive{}
|
||||
d.Name = l.ConsumeIdentWithLoc()
|
||||
d.Name.Loc.Column--
|
||||
if l.Peek() == '(' {
|
||||
d.Arguments = ParseArgumentList(l)
|
||||
}
|
||||
directives = append(directives, d)
|
||||
}
|
||||
return directives
|
||||
}
|
229
vendor/github.com/graph-gophers/graphql-go/internal/common/lexer.go
generated
vendored
Normal file
229
vendor/github.com/graph-gophers/graphql-go/internal/common/lexer.go
generated
vendored
Normal file
@ -0,0 +1,229 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
"text/scanner"
|
||||
|
||||
"github.com/graph-gophers/graphql-go/errors"
|
||||
"github.com/graph-gophers/graphql-go/types"
|
||||
)
|
||||
|
||||
type syntaxError string
|
||||
|
||||
type Lexer struct {
|
||||
sc *scanner.Scanner
|
||||
next rune
|
||||
comment bytes.Buffer
|
||||
useStringDescriptions bool
|
||||
}
|
||||
|
||||
type Ident struct {
|
||||
Name string
|
||||
Loc errors.Location
|
||||
}
|
||||
|
||||
func NewLexer(s string, useStringDescriptions bool) *Lexer {
|
||||
sc := &scanner.Scanner{
|
||||
Mode: scanner.ScanIdents | scanner.ScanInts | scanner.ScanFloats | scanner.ScanStrings,
|
||||
}
|
||||
sc.Init(strings.NewReader(s))
|
||||
|
||||
l := Lexer{sc: sc, useStringDescriptions: useStringDescriptions}
|
||||
l.sc.Error = l.CatchScannerError
|
||||
|
||||
return &l
|
||||
}
|
||||
|
||||
func (l *Lexer) CatchSyntaxError(f func()) (errRes *errors.QueryError) {
|
||||
defer func() {
|
||||
if err := recover(); err != nil {
|
||||
if err, ok := err.(syntaxError); ok {
|
||||
errRes = errors.Errorf("syntax error: %s", err)
|
||||
errRes.Locations = []errors.Location{l.Location()}
|
||||
return
|
||||
}
|
||||
panic(err)
|
||||
}
|
||||
}()
|
||||
|
||||
f()
|
||||
return
|
||||
}
|
||||
|
||||
func (l *Lexer) Peek() rune {
|
||||
return l.next
|
||||
}
|
||||
|
||||
// ConsumeWhitespace consumes whitespace and tokens equivalent to whitespace (e.g. commas and comments).
|
||||
//
|
||||
// Consumed comment characters will build the description for the next type or field encountered.
|
||||
// The description is available from `DescComment()`, and will be reset every time `ConsumeWhitespace()` is
|
||||
// executed unless l.useStringDescriptions is set.
|
||||
func (l *Lexer) ConsumeWhitespace() {
|
||||
l.comment.Reset()
|
||||
for {
|
||||
l.next = l.sc.Scan()
|
||||
|
||||
if l.next == ',' {
|
||||
// Similar to white space and line terminators, commas (',') are used to improve the
|
||||
// legibility of source text and separate lexical tokens but are otherwise syntactically and
|
||||
// semantically insignificant within GraphQL documents.
|
||||
//
|
||||
// http://facebook.github.io/graphql/draft/#sec-Insignificant-Commas
|
||||
continue
|
||||
}
|
||||
|
||||
if l.next == '#' {
|
||||
// GraphQL source documents may contain single-line comments, starting with the '#' marker.
|
||||
//
|
||||
// A comment can contain any Unicode code point except `LineTerminator` so a comment always
|
||||
// consists of all code points starting with the '#' character up to but not including the
|
||||
// line terminator.
|
||||
l.consumeComment()
|
||||
continue
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// consumeDescription optionally consumes a description based on the June 2018 graphql spec if any are present.
|
||||
//
|
||||
// Single quote strings are also single line. Triple quote strings can be multi-line. Triple quote strings
|
||||
// whitespace trimmed on both ends.
|
||||
// If a description is found, consume any following comments as well
|
||||
//
|
||||
// http://facebook.github.io/graphql/June2018/#sec-Descriptions
|
||||
func (l *Lexer) consumeDescription() string {
|
||||
// If the next token is not a string, we don't consume it
|
||||
if l.next != scanner.String {
|
||||
return ""
|
||||
}
|
||||
// Triple quote string is an empty "string" followed by an open quote due to the way the parser treats strings as one token
|
||||
var desc string
|
||||
if l.sc.Peek() == '"' {
|
||||
desc = l.consumeTripleQuoteComment()
|
||||
} else {
|
||||
desc = l.consumeStringComment()
|
||||
}
|
||||
l.ConsumeWhitespace()
|
||||
return desc
|
||||
}
|
||||
|
||||
func (l *Lexer) ConsumeIdent() string {
|
||||
name := l.sc.TokenText()
|
||||
l.ConsumeToken(scanner.Ident)
|
||||
return name
|
||||
}
|
||||
|
||||
func (l *Lexer) ConsumeIdentWithLoc() types.Ident {
|
||||
loc := l.Location()
|
||||
name := l.sc.TokenText()
|
||||
l.ConsumeToken(scanner.Ident)
|
||||
return types.Ident{Name: name, Loc: loc}
|
||||
}
|
||||
|
||||
func (l *Lexer) ConsumeKeyword(keyword string) {
|
||||
if l.next != scanner.Ident || l.sc.TokenText() != keyword {
|
||||
l.SyntaxError(fmt.Sprintf("unexpected %q, expecting %q", l.sc.TokenText(), keyword))
|
||||
}
|
||||
l.ConsumeWhitespace()
|
||||
}
|
||||
|
||||
func (l *Lexer) ConsumeLiteral() *types.PrimitiveValue {
|
||||
lit := &types.PrimitiveValue{Type: l.next, Text: l.sc.TokenText()}
|
||||
l.ConsumeWhitespace()
|
||||
return lit
|
||||
}
|
||||
|
||||
func (l *Lexer) ConsumeToken(expected rune) {
|
||||
if l.next != expected {
|
||||
l.SyntaxError(fmt.Sprintf("unexpected %q, expecting %s", l.sc.TokenText(), scanner.TokenString(expected)))
|
||||
}
|
||||
l.ConsumeWhitespace()
|
||||
}
|
||||
|
||||
func (l *Lexer) DescComment() string {
|
||||
comment := l.comment.String()
|
||||
desc := l.consumeDescription()
|
||||
if l.useStringDescriptions {
|
||||
return desc
|
||||
}
|
||||
return comment
|
||||
}
|
||||
|
||||
func (l *Lexer) SyntaxError(message string) {
|
||||
panic(syntaxError(message))
|
||||
}
|
||||
|
||||
func (l *Lexer) Location() errors.Location {
|
||||
return errors.Location{
|
||||
Line: l.sc.Line,
|
||||
Column: l.sc.Column,
|
||||
}
|
||||
}
|
||||
|
||||
func (l *Lexer) consumeTripleQuoteComment() string {
|
||||
l.next = l.sc.Next()
|
||||
if l.next != '"' {
|
||||
panic("consumeTripleQuoteComment used in wrong context: no third quote?")
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
var numQuotes int
|
||||
for {
|
||||
l.next = l.sc.Next()
|
||||
if l.next == '"' {
|
||||
numQuotes++
|
||||
} else {
|
||||
numQuotes = 0
|
||||
}
|
||||
buf.WriteRune(l.next)
|
||||
if numQuotes == 3 || l.next == scanner.EOF {
|
||||
break
|
||||
}
|
||||
}
|
||||
val := buf.String()
|
||||
val = val[:len(val)-numQuotes]
|
||||
return blockString(val)
|
||||
}
|
||||
|
||||
func (l *Lexer) consumeStringComment() string {
|
||||
val, err := strconv.Unquote(l.sc.TokenText())
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
// consumeComment consumes all characters from `#` to the first encountered line terminator.
|
||||
// The characters are appended to `l.comment`.
|
||||
func (l *Lexer) consumeComment() {
|
||||
if l.next != '#' {
|
||||
panic("consumeComment used in wrong context")
|
||||
}
|
||||
|
||||
// TODO: count and trim whitespace so we can dedent any following lines.
|
||||
if l.sc.Peek() == ' ' {
|
||||
l.sc.Next()
|
||||
}
|
||||
|
||||
if l.comment.Len() > 0 {
|
||||
l.comment.WriteRune('\n')
|
||||
}
|
||||
|
||||
for {
|
||||
next := l.sc.Next()
|
||||
if next == '\r' || next == '\n' || next == scanner.EOF {
|
||||
break
|
||||
}
|
||||
l.comment.WriteRune(next)
|
||||
}
|
||||
}
|
||||
|
||||
func (l *Lexer) CatchScannerError(s *scanner.Scanner, msg string) {
|
||||
l.SyntaxError(msg)
|
||||
}
|
58
vendor/github.com/graph-gophers/graphql-go/internal/common/literals.go
generated
vendored
Normal file
58
vendor/github.com/graph-gophers/graphql-go/internal/common/literals.go
generated
vendored
Normal file
@ -0,0 +1,58 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"text/scanner"
|
||||
|
||||
"github.com/graph-gophers/graphql-go/types"
|
||||
)
|
||||
|
||||
func ParseLiteral(l *Lexer, constOnly bool) types.Value {
|
||||
loc := l.Location()
|
||||
switch l.Peek() {
|
||||
case '$':
|
||||
if constOnly {
|
||||
l.SyntaxError("variable not allowed")
|
||||
panic("unreachable")
|
||||
}
|
||||
l.ConsumeToken('$')
|
||||
return &types.Variable{Name: l.ConsumeIdent(), Loc: loc}
|
||||
|
||||
case scanner.Int, scanner.Float, scanner.String, scanner.Ident:
|
||||
lit := l.ConsumeLiteral()
|
||||
if lit.Type == scanner.Ident && lit.Text == "null" {
|
||||
return &types.NullValue{Loc: loc}
|
||||
}
|
||||
lit.Loc = loc
|
||||
return lit
|
||||
case '-':
|
||||
l.ConsumeToken('-')
|
||||
lit := l.ConsumeLiteral()
|
||||
lit.Text = "-" + lit.Text
|
||||
lit.Loc = loc
|
||||
return lit
|
||||
case '[':
|
||||
l.ConsumeToken('[')
|
||||
var list []types.Value
|
||||
for l.Peek() != ']' {
|
||||
list = append(list, ParseLiteral(l, constOnly))
|
||||
}
|
||||
l.ConsumeToken(']')
|
||||
return &types.ListValue{Values: list, Loc: loc}
|
||||
|
||||
case '{':
|
||||
l.ConsumeToken('{')
|
||||
var fields []*types.ObjectField
|
||||
for l.Peek() != '}' {
|
||||
name := l.ConsumeIdentWithLoc()
|
||||
l.ConsumeToken(':')
|
||||
value := ParseLiteral(l, constOnly)
|
||||
fields = append(fields, &types.ObjectField{Name: name, Value: value})
|
||||
}
|
||||
l.ConsumeToken('}')
|
||||
return &types.ObjectValue{Fields: fields, Loc: loc}
|
||||
|
||||
default:
|
||||
l.SyntaxError("invalid value")
|
||||
panic("unreachable")
|
||||
}
|
||||
}
|
67
vendor/github.com/graph-gophers/graphql-go/internal/common/types.go
generated
vendored
Normal file
67
vendor/github.com/graph-gophers/graphql-go/internal/common/types.go
generated
vendored
Normal file
@ -0,0 +1,67 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"github.com/graph-gophers/graphql-go/errors"
|
||||
"github.com/graph-gophers/graphql-go/types"
|
||||
)
|
||||
|
||||
func ParseType(l *Lexer) types.Type {
|
||||
t := parseNullType(l)
|
||||
if l.Peek() == '!' {
|
||||
l.ConsumeToken('!')
|
||||
return &types.NonNull{OfType: t}
|
||||
}
|
||||
return t
|
||||
}
|
||||
|
||||
func parseNullType(l *Lexer) types.Type {
|
||||
if l.Peek() == '[' {
|
||||
l.ConsumeToken('[')
|
||||
ofType := ParseType(l)
|
||||
l.ConsumeToken(']')
|
||||
return &types.List{OfType: ofType}
|
||||
}
|
||||
|
||||
return &types.TypeName{Ident: l.ConsumeIdentWithLoc()}
|
||||
}
|
||||
|
||||
type Resolver func(name string) types.Type
|
||||
|
||||
// ResolveType attempts to resolve a type's name against a resolving function.
|
||||
// This function is used when one needs to check if a TypeName exists in the resolver (typically a Schema).
|
||||
//
|
||||
// In the example below, ResolveType would be used to check if the resolving function
|
||||
// returns a valid type for Dimension:
|
||||
//
|
||||
// type Profile {
|
||||
// picture(dimensions: Dimension): Url
|
||||
// }
|
||||
//
|
||||
// ResolveType recursively unwraps List and NonNull types until a NamedType is reached.
|
||||
func ResolveType(t types.Type, resolver Resolver) (types.Type, *errors.QueryError) {
|
||||
switch t := t.(type) {
|
||||
case *types.List:
|
||||
ofType, err := ResolveType(t.OfType, resolver)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &types.List{OfType: ofType}, nil
|
||||
case *types.NonNull:
|
||||
ofType, err := ResolveType(t.OfType, resolver)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &types.NonNull{OfType: ofType}, nil
|
||||
case *types.TypeName:
|
||||
refT := resolver(t.Name)
|
||||
if refT == nil {
|
||||
err := errors.Errorf("Unknown type %q.", t.Name)
|
||||
err.Rule = "KnownTypeNames"
|
||||
err.Locations = []errors.Location{t.Loc}
|
||||
return nil, err
|
||||
}
|
||||
return refT, nil
|
||||
default:
|
||||
return t, nil
|
||||
}
|
||||
}
|
37
vendor/github.com/graph-gophers/graphql-go/internal/common/values.go
generated
vendored
Normal file
37
vendor/github.com/graph-gophers/graphql-go/internal/common/values.go
generated
vendored
Normal file
@ -0,0 +1,37 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"github.com/graph-gophers/graphql-go/types"
|
||||
)
|
||||
|
||||
func ParseInputValue(l *Lexer) *types.InputValueDefinition {
|
||||
p := &types.InputValueDefinition{}
|
||||
p.Loc = l.Location()
|
||||
p.Desc = l.DescComment()
|
||||
p.Name = l.ConsumeIdentWithLoc()
|
||||
l.ConsumeToken(':')
|
||||
p.TypeLoc = l.Location()
|
||||
p.Type = ParseType(l)
|
||||
if l.Peek() == '=' {
|
||||
l.ConsumeToken('=')
|
||||
p.Default = ParseLiteral(l, true)
|
||||
}
|
||||
p.Directives = ParseDirectives(l)
|
||||
return p
|
||||
}
|
||||
|
||||
func ParseArgumentList(l *Lexer) types.ArgumentList {
|
||||
var args types.ArgumentList
|
||||
l.ConsumeToken('(')
|
||||
for l.Peek() != ')' {
|
||||
name := l.ConsumeIdentWithLoc()
|
||||
l.ConsumeToken(':')
|
||||
value := ParseLiteral(l, false)
|
||||
args = append(args, &types.Argument{
|
||||
Name: name,
|
||||
Value: value,
|
||||
})
|
||||
}
|
||||
l.ConsumeToken(')')
|
||||
return args
|
||||
}
|
Reference in New Issue
Block a user