summaryrefslogtreecommitdiffstats
path: root/vendor/github.com
diff options
context:
space:
mode:
authorSebastian P <5564491+s3lph@users.noreply.github.com>2020-10-01 22:50:56 +0200
committerGitHub <noreply@github.com>2020-10-01 22:50:56 +0200
commit214a6a13861b9fd495797386f36507373728e577 (patch)
treea2eb9541924b432ecf7a0807f42281296fa144a4 /vendor/github.com
parente7781dc79c0c556f60eb64a93a95a5da494dcf2d (diff)
downloadmatterbridge-msglm-214a6a13861b9fd495797386f36507373728e577.tar.gz
matterbridge-msglm-214a6a13861b9fd495797386f36507373728e577.tar.bz2
matterbridge-msglm-214a6a13861b9fd495797386f36507373728e577.zip
Add Mumble support (#1245)
Diffstat (limited to 'vendor/github.com')
-rw-r--r--vendor/github.com/vincent-petithory/dataurl/LICENSE20
-rw-r--r--vendor/github.com/vincent-petithory/dataurl/README.md81
-rw-r--r--vendor/github.com/vincent-petithory/dataurl/dataurl.go291
-rw-r--r--vendor/github.com/vincent-petithory/dataurl/doc.go28
-rw-r--r--vendor/github.com/vincent-petithory/dataurl/lex.go521
-rw-r--r--vendor/github.com/vincent-petithory/dataurl/rfc2396.go130
-rw-r--r--vendor/github.com/vincent-petithory/dataurl/wercker.yml1
7 files changed, 1072 insertions, 0 deletions
diff --git a/vendor/github.com/vincent-petithory/dataurl/LICENSE b/vendor/github.com/vincent-petithory/dataurl/LICENSE
new file mode 100644
index 00000000..ae6cb62b
--- /dev/null
+++ b/vendor/github.com/vincent-petithory/dataurl/LICENSE
@@ -0,0 +1,20 @@
+The MIT License (MIT)
+
+Copyright (c) 2014 Vincent Petithory
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/vendor/github.com/vincent-petithory/dataurl/README.md b/vendor/github.com/vincent-petithory/dataurl/README.md
new file mode 100644
index 00000000..1ac59ad2
--- /dev/null
+++ b/vendor/github.com/vincent-petithory/dataurl/README.md
@@ -0,0 +1,81 @@
+# Data URL Schemes for Go [![wercker status](https://app.wercker.com/status/6f9a2e144dfcc59e862c52459b452928/s "wercker status")](https://app.wercker.com/project/bykey/6f9a2e144dfcc59e862c52459b452928) [![GoDoc](https://godoc.org/github.com/vincent-petithory/dataurl?status.png)](https://godoc.org/github.com/vincent-petithory/dataurl)
+
+This package parses and generates Data URL Schemes for the Go language, according to [RFC 2397](http://tools.ietf.org/html/rfc2397).
+
+Data URLs are small chunks of data commonly used in browsers to display inline data,
+typically like small images, or when you use the FileReader API of the browser.
+
+Common use-cases:
+
+ * generate a data URL out of a `string`, `[]byte`, `io.Reader` for inclusion in HTML templates,
+ * parse a data URL sent by a browser in a http.Handler, and do something with the data (save to disk, etc.)
+ * ...
+
+Install the package with:
+~~~
+go get github.com/vincent-petithory/dataurl
+~~~
+
+## Usage
+
+~~~ go
+package main
+
+import (
+ "github.com/vincent-petithory/dataurl"
+ "fmt"
+)
+
+func main() {
+ dataURL, err := dataurl.DecodeString(`data:text/plain;charset=utf-8;base64,aGV5YQ==`)
+ if err != nil {
+ fmt.Println(err)
+ return
+ }
+ fmt.Printf("content type: %s, data: %s\n", dataURL.MediaType.ContentType(), string(dataURL.Data))
+ // Output: content type: text/plain, data: heya
+}
+~~~
+
+From a `http.Handler`:
+
+~~~ go
+func handleDataURLUpload(w http.ResponseWriter, r *http.Request) {
+ dataURL, err := dataurl.Decode(r.Body)
+ defer r.Body.Close()
+ if err != nil {
+ http.Error(w, err.Error(), http.StatusBadRequest)
+ return
+ }
+ if dataURL.ContentType() == "image/png" {
+ ioutil.WriteFile("image.png", dataURL.Data, 0644)
+ } else {
+ http.Error(w, "not a png", http.StatusBadRequest)
+ }
+}
+~~~
+
+## Command
+
+For convenience, a `dataurl` command is provided to encode/decode dataurl streams.
+
+~~~
+dataurl - Encode or decode dataurl data and print to standard output
+
+Usage: dataurl [OPTION]... [FILE]
+
+ dataurl encodes or decodes FILE or standard input if FILE is - or omitted, and prints to standard output.
+ Unless -mimetype is used, when FILE is specified, dataurl will attempt to detect its mimetype using Go's mime.TypeByExtension (http://golang.org/pkg/mime/#TypeByExtension). If this fails or data is read from STDIN, the mimetype will default to application/octet-stream.
+
+Options:
+ -a=false: encode data using ascii instead of base64
+ -ascii=false: encode data using ascii instead of base64
+ -d=false: decode data instead of encoding
+ -decode=false: decode data instead of encoding
+ -m="": force the mimetype of the data to encode to this value
+ -mimetype="": force the mimetype of the data to encode to this value
+~~~
+
+## Contributing
+
+Feel free to file an issue/make a pull request if you find any bug, or want to suggest enhancements.
diff --git a/vendor/github.com/vincent-petithory/dataurl/dataurl.go b/vendor/github.com/vincent-petithory/dataurl/dataurl.go
new file mode 100644
index 00000000..7a9fe67e
--- /dev/null
+++ b/vendor/github.com/vincent-petithory/dataurl/dataurl.go
@@ -0,0 +1,291 @@
+package dataurl
+
+import (
+ "bytes"
+ "encoding/base64"
+ "errors"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "net/http"
+ "sort"
+ "strconv"
+ "strings"
+)
+
+const (
+ // EncodingBase64 is base64 encoding for the data url
+ EncodingBase64 = "base64"
+ // EncodingASCII is ascii encoding for the data url
+ EncodingASCII = "ascii"
+)
+
+func defaultMediaType() MediaType {
+ return MediaType{
+ "text",
+ "plain",
+ map[string]string{"charset": "US-ASCII"},
+ }
+}
+
+// MediaType is the combination of a media type, a media subtype
+// and optional parameters.
+type MediaType struct {
+ Type string
+ Subtype string
+ Params map[string]string
+}
+
+// ContentType returns the content type of the dataurl's data, in the form type/subtype.
+func (mt *MediaType) ContentType() string {
+ return fmt.Sprintf("%s/%s", mt.Type, mt.Subtype)
+}
+
+// String implements the Stringer interface.
+//
+// Params values are escaped with the Escape function, rather than in a quoted string.
+func (mt *MediaType) String() string {
+ var (
+ buf bytes.Buffer
+ keys = make([]string, len(mt.Params))
+ i int
+ )
+ for k := range mt.Params {
+ keys[i] = k
+ i++
+ }
+ sort.Strings(keys)
+ for _, k := range keys {
+ v := mt.Params[k]
+ fmt.Fprintf(&buf, ";%s=%s", k, EscapeString(v))
+ }
+ return mt.ContentType() + (&buf).String()
+}
+
+// DataURL is the combination of a MediaType describing the type of its Data.
+type DataURL struct {
+ MediaType
+ Encoding string
+ Data []byte
+}
+
+// New returns a new DataURL initialized with data and
+// a MediaType parsed from mediatype and paramPairs.
+// mediatype must be of the form "type/subtype" or it will panic.
+// paramPairs must have an even number of elements or it will panic.
+// For more complex DataURL, initialize a DataURL struct.
+// The DataURL is initialized with base64 encoding.
+func New(data []byte, mediatype string, paramPairs ...string) *DataURL {
+ parts := strings.Split(mediatype, "/")
+ if len(parts) != 2 {
+ panic("dataurl: invalid mediatype")
+ }
+
+ nParams := len(paramPairs)
+ if nParams%2 != 0 {
+ panic("dataurl: requires an even number of param pairs")
+ }
+ params := make(map[string]string)
+ for i := 0; i < nParams; i += 2 {
+ params[paramPairs[i]] = paramPairs[i+1]
+ }
+
+ mt := MediaType{
+ parts[0],
+ parts[1],
+ params,
+ }
+ return &DataURL{
+ MediaType: mt,
+ Encoding: EncodingBase64,
+ Data: data,
+ }
+}
+
+// String implements the Stringer interface.
+//
+// Note: it doesn't guarantee the returned string is equal to
+// the initial source string that was used to create this DataURL.
+// The reasons for that are:
+// * Insertion of default values for MediaType that were maybe not in the initial string,
+// * Various ways to encode the MediaType parameters (quoted string or url encoded string, the latter is used),
+func (du *DataURL) String() string {
+ var buf bytes.Buffer
+ du.WriteTo(&buf)
+ return (&buf).String()
+}
+
+// WriteTo implements the WriterTo interface.
+// See the note about String().
+func (du *DataURL) WriteTo(w io.Writer) (n int64, err error) {
+ var ni int
+ ni, _ = fmt.Fprint(w, "data:")
+ n += int64(ni)
+
+ ni, _ = fmt.Fprint(w, du.MediaType.String())
+ n += int64(ni)
+
+ if du.Encoding == EncodingBase64 {
+ ni, _ = fmt.Fprint(w, ";base64")
+ n += int64(ni)
+ }
+
+ ni, _ = fmt.Fprint(w, ",")
+ n += int64(ni)
+
+ if du.Encoding == EncodingBase64 {
+ encoder := base64.NewEncoder(base64.StdEncoding, w)
+ ni, err = encoder.Write(du.Data)
+ if err != nil {
+ return
+ }
+ encoder.Close()
+ } else if du.Encoding == EncodingASCII {
+ ni, _ = fmt.Fprint(w, Escape(du.Data))
+ n += int64(ni)
+ } else {
+ err = fmt.Errorf("dataurl: invalid encoding %s", du.Encoding)
+ return
+ }
+
+ return
+}
+
+// UnmarshalText decodes a Data URL string and sets it to *du
+func (du *DataURL) UnmarshalText(text []byte) error {
+ decoded, err := DecodeString(string(text))
+ if err != nil {
+ return err
+ }
+ *du = *decoded
+ return nil
+}
+
+// MarshalText writes du as a Data URL
+func (du *DataURL) MarshalText() ([]byte, error) {
+ buf := bytes.NewBuffer(nil)
+ if _, err := du.WriteTo(buf); err != nil {
+ return nil, err
+ }
+ return buf.Bytes(), nil
+}
+
+type encodedDataReader func(string) ([]byte, error)
+
+var asciiDataReader encodedDataReader = func(s string) ([]byte, error) {
+ us, err := Unescape(s)
+ if err != nil {
+ return nil, err
+ }
+ return []byte(us), nil
+}
+
+var base64DataReader encodedDataReader = func(s string) ([]byte, error) {
+ data, err := base64.StdEncoding.DecodeString(s)
+ if err != nil {
+ return nil, err
+ }
+ return []byte(data), nil
+}
+
+type parser struct {
+ du *DataURL
+ l *lexer
+ currentAttr string
+ unquoteParamVal bool
+ encodedDataReaderFn encodedDataReader
+}
+
+func (p *parser) parse() error {
+ for item := range p.l.items {
+ switch item.t {
+ case itemError:
+ return errors.New(item.String())
+ case itemMediaType:
+ p.du.MediaType.Type = item.val
+ // Should we clear the default
+ // "charset" parameter at this point?
+ delete(p.du.MediaType.Params, "charset")
+ case itemMediaSubType:
+ p.du.MediaType.Subtype = item.val
+ case itemParamAttr:
+ p.currentAttr = item.val
+ case itemLeftStringQuote:
+ p.unquoteParamVal = true
+ case itemParamVal:
+ val := item.val
+ if p.unquoteParamVal {
+ p.unquoteParamVal = false
+ us, err := strconv.Unquote("\"" + val + "\"")
+ if err != nil {
+ return err
+ }
+ val = us
+ } else {
+ us, err := UnescapeToString(val)
+ if err != nil {
+ return err
+ }
+ val = us
+ }
+ p.du.MediaType.Params[p.currentAttr] = val
+ case itemBase64Enc:
+ p.du.Encoding = EncodingBase64
+ p.encodedDataReaderFn = base64DataReader
+ case itemDataComma:
+ if p.encodedDataReaderFn == nil {
+ p.encodedDataReaderFn = asciiDataReader
+ }
+ case itemData:
+ reader, err := p.encodedDataReaderFn(item.val)
+ if err != nil {
+ return err
+ }
+ p.du.Data = reader
+ case itemEOF:
+ if p.du.Data == nil {
+ p.du.Data = []byte("")
+ }
+ return nil
+ }
+ }
+ panic("EOF not found")
+}
+
+// DecodeString decodes a Data URL scheme string.
+func DecodeString(s string) (*DataURL, error) {
+ du := &DataURL{
+ MediaType: defaultMediaType(),
+ Encoding: EncodingASCII,
+ }
+
+ parser := &parser{
+ du: du,
+ l: lex(s),
+ }
+ if err := parser.parse(); err != nil {
+ return nil, err
+ }
+ return du, nil
+}
+
+// Decode decodes a Data URL scheme from a io.Reader.
+func Decode(r io.Reader) (*DataURL, error) {
+ data, err := ioutil.ReadAll(r)
+ if err != nil {
+ return nil, err
+ }
+ return DecodeString(string(data))
+}
+
+// EncodeBytes encodes the data bytes into a Data URL string, using base 64 encoding.
+//
+// The media type of data is detected using http.DetectContentType.
+func EncodeBytes(data []byte) string {
+ mt := http.DetectContentType(data)
+ // http.DetectContentType may add spurious spaces between ; and a parameter.
+ // The canonical way is to not have them.
+ cleanedMt := strings.Replace(mt, "; ", ";", -1)
+
+ return New(data, cleanedMt).String()
+}
diff --git a/vendor/github.com/vincent-petithory/dataurl/doc.go b/vendor/github.com/vincent-petithory/dataurl/doc.go
new file mode 100644
index 00000000..56461d04
--- /dev/null
+++ b/vendor/github.com/vincent-petithory/dataurl/doc.go
@@ -0,0 +1,28 @@
+/*
+Package dataurl parses Data URL Schemes
+according to RFC 2397
+(http://tools.ietf.org/html/rfc2397).
+
+Data URLs are small chunks of data commonly used in browsers to display inline data,
+typically like small images, or when you use the FileReader API of the browser.
+
+A dataurl looks like:
+
+ data:text/plain;charset=utf-8,A%20brief%20note
+
+Or, with base64 encoding:
+
+ data:image/vnd.microsoft.icon;name=golang%20favicon;base64,AAABAAEAEBAAAAEAIABoBAAAFgAAACgAAAAQAAAAIAAAAAEAIAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAD///8AVE44//7hdv/+4Xb//uF2//7hdv/+4Xb//uF2//7hdv/+4Xb//uF2//7hdv/+4Xb/
+ /uF2/1ROOP////8A////AFROOP/+4Xb//uF2//7hdv/+4Xb//uF2//7hdv/+4Xb//uF2//7hdv/+
+ ...
+ /6CcjP97c07/e3NO/1dOMf9BOiX/TkUn/2VXLf97c07/e3NO/6CcjP/h4uX/////AP///wD///8A
+ ////AP///wD///8A////AP///wDq6/H/3N/j/9fZ3f/q6/H/////AP///wD///8A////AP///wD/
+ //8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+ AAAAAAAAAAAAAA==
+
+Common functions are Decode and DecodeString to obtain a DataURL,
+and DataURL.String() and DataURL.WriteTo to generate a Data URL string.
+
+*/
+package dataurl
diff --git a/vendor/github.com/vincent-petithory/dataurl/lex.go b/vendor/github.com/vincent-petithory/dataurl/lex.go
new file mode 100644
index 00000000..1a8717f5
--- /dev/null
+++ b/vendor/github.com/vincent-petithory/dataurl/lex.go
@@ -0,0 +1,521 @@
+package dataurl
+
+import (
+ "fmt"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+)
+
+type item struct {
+ t itemType
+ val string
+}
+
+func (i item) String() string {
+ switch i.t {
+ case itemEOF:
+ return "EOF"
+ case itemError:
+ return i.val
+ }
+ if len(i.val) > 10 {
+ return fmt.Sprintf("%.10q...", i.val)
+ }
+ return fmt.Sprintf("%q", i.val)
+}
+
+type itemType int
+
+const (
+ itemError itemType = iota
+ itemEOF
+
+ itemDataPrefix
+
+ itemMediaType
+ itemMediaSep
+ itemMediaSubType
+ itemParamSemicolon
+ itemParamAttr
+ itemParamEqual
+ itemLeftStringQuote
+ itemRightStringQuote
+ itemParamVal
+
+ itemBase64Enc
+
+ itemDataComma
+ itemData
+)
+
+const eof rune = -1
+
+func isTokenRune(r rune) bool {
+ return r <= unicode.MaxASCII &&
+ !unicode.IsControl(r) &&
+ !unicode.IsSpace(r) &&
+ !isTSpecialRune(r)
+}
+
+func isTSpecialRune(r rune) bool {
+ return r == '(' ||
+ r == ')' ||
+ r == '<' ||
+ r == '>' ||
+ r == '@' ||
+ r == ',' ||
+ r == ';' ||
+ r == ':' ||
+ r == '\\' ||
+ r == '"' ||
+ r == '/' ||
+ r == '[' ||
+ r == ']' ||
+ r == '?' ||
+ r == '='
+}
+
+// See http://tools.ietf.org/html/rfc2045
+// This doesn't include extension-token case
+// as it's handled separatly
+func isDiscreteType(s string) bool {
+ if strings.HasPrefix(s, "text") ||
+ strings.HasPrefix(s, "image") ||
+ strings.HasPrefix(s, "audio") ||
+ strings.HasPrefix(s, "video") ||
+ strings.HasPrefix(s, "application") {
+ return true
+ }
+ return false
+}
+
+// See http://tools.ietf.org/html/rfc2045
+// This doesn't include extension-token case
+// as it's handled separatly
+func isCompositeType(s string) bool {
+ if strings.HasPrefix(s, "message") ||
+ strings.HasPrefix(s, "multipart") {
+ return true
+ }
+ return false
+}
+
+func isURLCharRune(r rune) bool {
+ // We're a bit permissive here,
+ // by not including '%' in delims
+ // This is okay, since url unescaping will validate
+ // that later in the parser.
+ return r <= unicode.MaxASCII &&
+ !(r >= 0x00 && r <= 0x1F) && r != 0x7F && /* control */
+ // delims
+ r != ' ' &&
+ r != '<' &&
+ r != '>' &&
+ r != '#' &&
+ r != '"' &&
+ // unwise
+ r != '{' &&
+ r != '}' &&
+ r != '|' &&
+ r != '\\' &&
+ r != '^' &&
+ r != '[' &&
+ r != ']' &&
+ r != '`'
+}
+
+func isBase64Rune(r rune) bool {
+ return (r >= 'a' && r <= 'z') ||
+ (r >= 'A' && r <= 'Z') ||
+ (r >= '0' && r <= '9') ||
+ r == '+' ||
+ r == '/' ||
+ r == '=' ||
+ r == '\n'
+}
+
+type stateFn func(*lexer) stateFn
+
+// lexer lexes the data URL scheme input string.
+// The implementation is from the text/template/parser package.
+type lexer struct {
+ input string
+ start int
+ pos int
+ width int
+ seenBase64Item bool
+ items chan item
+}
+
+func (l *lexer) run() {
+ for state := lexBeforeDataPrefix; state != nil; {
+ state = state(l)
+ }
+ close(l.items)
+}
+
+func (l *lexer) emit(t itemType) {
+ l.items <- item{t, l.input[l.start:l.pos]}
+ l.start = l.pos
+}
+
+func (l *lexer) next() (r rune) {
+ if l.pos >= len(l.input) {
+ l.width = 0
+ return eof
+ }
+ r, l.width = utf8.DecodeRuneInString(l.input[l.pos:])
+ l.pos += l.width
+ return r
+}
+
+func (l *lexer) backup() {
+ l.pos -= l.width
+}
+
+func (l *lexer) ignore() {
+ l.start = l.pos
+}
+
+func (l *lexer) errorf(format string, args ...interface{}) stateFn {
+ l.items <- item{itemError, fmt.Sprintf(format, args...)}
+ return nil
+}
+
+func lex(input string) *lexer {
+ l := &lexer{
+ input: input,
+ items: make(chan item),
+ }
+ go l.run() // Concurrently run state machine.
+ return l
+}
+
+const (
+ dataPrefix = "data:"
+ mediaSep = '/'
+ paramSemicolon = ';'
+ paramEqual = '='
+ dataComma = ','
+)
+
+// start lexing by detecting data prefix
+func lexBeforeDataPrefix(l *lexer) stateFn {
+ if strings.HasPrefix(l.input[l.pos:], dataPrefix) {
+ return lexDataPrefix
+ }
+ return l.errorf("missing data prefix")
+}
+
+// lex data prefix
+func lexDataPrefix(l *lexer) stateFn {
+ l.pos += len(dataPrefix)
+ l.emit(itemDataPrefix)
+ return lexAfterDataPrefix
+}
+
+// lex what's after data prefix.
+// it can be the media type/subtype separator,
+// the base64 encoding, or the comma preceding the data
+func lexAfterDataPrefix(l *lexer) stateFn {
+ switch r := l.next(); {
+ case r == paramSemicolon:
+ l.backup()
+ return lexParamSemicolon
+ case r == dataComma:
+ l.backup()
+ return lexDataComma
+ case r == eof:
+ return l.errorf("missing comma before data")
+ case r == 'x' || r == 'X':
+ if l.next() == '-' {
+ return lexXTokenMediaType
+ }
+ return lexInDiscreteMediaType
+ case isTokenRune(r):
+ return lexInDiscreteMediaType
+ default:
+ return l.errorf("invalid character after data prefix")
+ }
+}
+
+func lexXTokenMediaType(l *lexer) stateFn {
+ for {
+ switch r := l.next(); {
+ case r == mediaSep:
+ l.backup()
+ return lexMediaType
+ case r == eof:
+ return l.errorf("missing media type slash")
+ case isTokenRune(r):
+ default:
+ return l.errorf("invalid character for media type")
+ }
+ }
+}
+
+func lexInDiscreteMediaType(l *lexer) stateFn {
+ for {
+ switch r := l.next(); {
+ case r == mediaSep:
+ l.backup()
+ // check it's valid discrete type
+ if !isDiscreteType(l.input[l.start:l.pos]) &&
+ !isCompositeType(l.input[l.start:l.pos]) {
+ return l.errorf("invalid media type")
+ }
+ return lexMediaType
+ case r == eof:
+ return l.errorf("missing media type slash")
+ case isTokenRune(r):
+ default:
+ return l.errorf("invalid character for media type")
+ }
+ }
+}
+
+func lexMediaType(l *lexer) stateFn {
+ if l.pos > l.start {
+ l.emit(itemMediaType)
+ }
+ return lexMediaSep
+}
+
+func lexMediaSep(l *lexer) stateFn {
+ l.next()
+ l.emit(itemMediaSep)
+ return lexAfterMediaSep
+}
+
+func lexAfterMediaSep(l *lexer) stateFn {
+ for {
+ switch r := l.next(); {
+ case r == paramSemicolon || r == dataComma:
+ l.backup()
+ return lexMediaSubType
+ case r == eof:
+ return l.errorf("incomplete media type")
+ case isTokenRune(r):
+ default:
+ return l.errorf("invalid character for media subtype")
+ }
+ }
+}
+
+func lexMediaSubType(l *lexer) stateFn {
+ if l.pos > l.start {
+ l.emit(itemMediaSubType)
+ }
+ return lexAfterMediaSubType
+}
+
+func lexAfterMediaSubType(l *lexer) stateFn {
+ switch r := l.next(); {
+ case r == paramSemicolon:
+ l.backup()
+ return lexParamSemicolon
+ case r == dataComma:
+ l.backup()
+ return lexDataComma
+ case r == eof:
+ return l.errorf("missing comma before data")
+ default:
+ return l.errorf("expected semicolon or comma")
+ }
+}
+
+func lexParamSemicolon(l *lexer) stateFn {
+ l.next()
+ l.emit(itemParamSemicolon)
+ return lexAfterParamSemicolon
+}
+
+func lexAfterParamSemicolon(l *lexer) stateFn {
+ switch r := l.next(); {
+ case r == eof:
+ return l.errorf("unterminated parameter sequence")
+ case r == paramEqual || r == dataComma:
+ return l.errorf("unterminated parameter sequence")
+ case isTokenRune(r):
+ l.backup()
+ return lexInParamAttr
+ default:
+ return l.errorf("invalid character for parameter attribute")
+ }
+}
+
+func lexBase64Enc(l *lexer) stateFn {
+ if l.pos > l.start {
+ if v := l.input[l.start:l.pos]; v != "base64" {
+ return l.errorf("expected base64, got %s", v)
+ }
+ l.seenBase64Item = true
+ l.emit(itemBase64Enc)
+ }
+ return lexDataComma
+}
+
+func lexInParamAttr(l *lexer) stateFn {
+ for {
+ switch r := l.next(); {
+ case r == paramEqual:
+ l.backup()
+ return lexParamAttr
+ case r == dataComma:
+ l.backup()
+ return lexBase64Enc
+ case r == eof:
+ return l.errorf("unterminated parameter sequence")
+ case isTokenRune(r):
+ default:
+ return l.errorf("invalid character for parameter attribute")
+ }
+ }
+}
+
+func lexParamAttr(l *lexer) stateFn {
+ if l.pos > l.start {
+ l.emit(itemParamAttr)
+ }
+ return lexParamEqual
+}
+
+func lexParamEqual(l *lexer) stateFn {
+ l.next()
+ l.emit(itemParamEqual)
+ return lexAfterParamEqual
+}
+
+func lexAfterParamEqual(l *lexer) stateFn {
+ switch r := l.next(); {
+ case r == '"':
+ l.emit(itemLeftStringQuote)
+ return lexInQuotedStringParamVal
+ case r == eof:
+ return l.errorf("missing comma before data")
+ case isTokenRune(r):
+ return lexInParamVal
+ default:
+ return l.errorf("invalid character for parameter value")
+ }
+}
+
+func lexInQuotedStringParamVal(l *lexer) stateFn {
+ for {
+ switch r := l.next(); {
+ case r == eof:
+ return l.errorf("unclosed quoted string")
+ case r == '\\':
+ return lexEscapedChar
+ case r == '"':
+ l.backup()
+ return lexQuotedStringParamVal
+ case r <= unicode.MaxASCII:
+ default:
+ return l.errorf("invalid character for parameter value")
+ }
+ }
+}
+
+func lexEscapedChar(l *lexer) stateFn {
+ switch r := l.next(); {
+ case r <= unicode.MaxASCII:
+ return lexInQuotedStringParamVal
+ case r == eof:
+ return l.errorf("unexpected eof")
+ default:
+ return l.errorf("invalid escaped character")
+ }
+}
+
+func lexInParamVal(l *lexer) stateFn {
+ for {
+ switch r := l.next(); {
+ case r == paramSemicolon || r == dataComma:
+ l.backup()
+ return lexParamVal
+ case r == eof:
+ return l.errorf("missing comma before data")
+ case isTokenRune(r):
+ default:
+ return l.errorf("invalid character for parameter value")
+ }
+ }
+}
+
+func lexQuotedStringParamVal(l *lexer) stateFn {
+ if l.pos > l.start {
+ l.emit(itemParamVal)
+ }
+ l.next()
+ l.emit(itemRightStringQuote)
+ return lexAfterParamVal
+}
+
+func lexParamVal(l *lexer) stateFn {
+ if l.pos > l.start {
+ l.emit(itemParamVal)
+ }
+ return lexAfterParamVal
+}
+
+func lexAfterParamVal(l *lexer) stateFn {
+ switch r := l.next(); {
+ case r == paramSemicolon:
+ l.backup()
+ return lexParamSemicolon
+ case r == dataComma:
+ l.backup()
+ return lexDataComma
+ case r == eof:
+ return l.errorf("missing comma before data")
+ default:
+ return l.errorf("expected semicolon or comma")
+ }
+}
+
+func lexDataComma(l *lexer) stateFn {
+ l.next()
+ l.emit(itemDataComma)
+ if l.seenBase64Item {
+ return lexBase64Data
+ }
+ return lexData
+}
+
+func lexData(l *lexer) stateFn {
+Loop:
+ for {
+ switch r := l.next(); {
+ case r == eof:
+ break Loop
+ case isURLCharRune(r):
+ default:
+ return l.errorf("invalid data character")
+ }
+ }
+ if l.pos > l.start {
+ l.emit(itemData)
+ }
+ l.emit(itemEOF)
+ return nil
+}
+
+func lexBase64Data(l *lexer) stateFn {
+Loop:
+ for {
+ switch r := l.next(); {
+ case r == eof:
+ break Loop
+ case isBase64Rune(r):
+ default:
+ return l.errorf("invalid data character")
+ }
+ }
+ if l.pos > l.start {
+ l.emit(itemData)
+ }
+ l.emit(itemEOF)
+ return nil
+}
diff --git a/vendor/github.com/vincent-petithory/dataurl/rfc2396.go b/vendor/github.com/vincent-petithory/dataurl/rfc2396.go
new file mode 100644
index 00000000..e2ea0cac
--- /dev/null
+++ b/vendor/github.com/vincent-petithory/dataurl/rfc2396.go
@@ -0,0 +1,130 @@
+package dataurl
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "strings"
+)
+
+// Escape implements URL escaping, as defined in RFC 2397 (http://tools.ietf.org/html/rfc2397).
+// It differs a bit from net/url's QueryEscape and QueryUnescape, e.g how spaces are treated (+ instead of %20):
+//
+// Only ASCII chars are allowed. Reserved chars are escaped to their %xx form.
+// Unreserved chars are [a-z], [A-Z], [0-9], and -_.!~*\().
+func Escape(data []byte) string {
+ var buf = new(bytes.Buffer)
+ for _, b := range data {
+ switch {
+ case isUnreserved(b):
+ buf.WriteByte(b)
+ default:
+ fmt.Fprintf(buf, "%%%02X", b)
+ }
+ }
+ return buf.String()
+}
+
+// EscapeString is like Escape, but taking
+// a string as argument.
+func EscapeString(s string) string {
+ return Escape([]byte(s))
+}
+
+// isUnreserved return true
+// if the byte c is an unreserved char,
+// as defined in RFC 2396.
+func isUnreserved(c byte) bool {
+ return (c >= 'a' && c <= 'z') ||
+ (c >= 'A' && c <= 'Z') ||
+ (c >= '0' && c <= '9') ||
+ c == '-' ||
+ c == '_' ||
+ c == '.' ||
+ c == '!' ||
+ c == '~' ||
+ c == '*' ||
+ c == '\'' ||
+ c == '(' ||
+ c == ')'
+}
+
+func isHex(c byte) bool {
+ switch {
+ case c >= 'a' && c <= 'f':
+ return true
+ case c >= 'A' && c <= 'F':
+ return true
+ case c >= '0' && c <= '9':
+ return true
+ }
+ return false
+}
+
+// borrowed from net/url/url.go
+func unhex(c byte) byte {
+ switch {
+ case '0' <= c && c <= '9':
+ return c - '0'
+ case 'a' <= c && c <= 'f':
+ return c - 'a' + 10
+ case 'A' <= c && c <= 'F':
+ return c - 'A' + 10
+ }
+ return 0
+}
+
+// Unescape unescapes a character sequence
+// escaped with Escape(String?).
+func Unescape(s string) ([]byte, error) {
+ var buf = new(bytes.Buffer)
+ reader := strings.NewReader(s)
+
+ for {
+ r, size, err := reader.ReadRune()
+ if err == io.EOF {
+ break
+ }
+ if err != nil {
+ return nil, err
+ }
+ if size > 1 {
+ return nil, fmt.Errorf("rfc2396: non-ASCII char detected")
+ }
+
+ switch r {
+ case '%':
+ eb1, err := reader.ReadByte()
+ if err == io.EOF {
+ return nil, fmt.Errorf("rfc2396: unexpected end of unescape sequence")
+ }
+ if err != nil {
+ return nil, err
+ }
+ if !isHex(eb1) {
+ return nil, fmt.Errorf("rfc2396: invalid char 0x%x in unescape sequence", r)
+ }
+ eb0, err := reader.ReadByte()
+ if err == io.EOF {
+ return nil, fmt.Errorf("rfc2396: unexpected end of unescape sequence")
+ }
+ if err != nil {
+ return nil, err
+ }
+ if !isHex(eb0) {
+ return nil, fmt.Errorf("rfc2396: invalid char 0x%x in unescape sequence", r)
+ }
+ buf.WriteByte(unhex(eb0) + unhex(eb1)*16)
+ default:
+ buf.WriteByte(byte(r))
+ }
+ }
+ return buf.Bytes(), nil
+}
+
+// UnescapeToString is like Unescape, but returning
+// a string.
+func UnescapeToString(s string) (string, error) {
+ b, err := Unescape(s)
+ return string(b), err
+}
diff --git a/vendor/github.com/vincent-petithory/dataurl/wercker.yml b/vendor/github.com/vincent-petithory/dataurl/wercker.yml
new file mode 100644
index 00000000..3ab8084c
--- /dev/null
+++ b/vendor/github.com/vincent-petithory/dataurl/wercker.yml
@@ -0,0 +1 @@
+box: wercker/default \ No newline at end of file