summaryrefslogtreecommitdiffstats
path: root/vendor/github.com/mattermost/mattermost-server/v5/shared
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/github.com/mattermost/mattermost-server/v5/shared')
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/filestore/filesstore.go83
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/filestore/localstore.go211
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/filestore/s3_overrides.go56
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/filestore/s3store.go442
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/i18n/i18n.go185
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/autolink.go255
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/block_quote.go62
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/blocks.go154
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/document.go22
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/fenced_code.go112
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/html.go192
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/html_entities.go2132
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/indented_code.go98
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/inlines.go663
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/inspect.go78
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/lines.go32
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/links.go184
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/list.go220
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/markdown.go147
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/paragraph.go71
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/reference_definition.go75
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/default.go99
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/errors.go32
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/global.go98
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/levels.go51
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/log.go361
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/logr.go244
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/stdlog.go87
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/sugar.go30
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/syslog.go142
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/tcp.go273
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/test-tls-client-cert.pem43
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/testing.go46
33 files changed, 6980 insertions, 0 deletions
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/filestore/filesstore.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/filestore/filesstore.go
new file mode 100644
index 00000000..ef02895d
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/filestore/filesstore.go
@@ -0,0 +1,83 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package filestore
+
+import (
+ "io"
+ "time"
+
+ "github.com/pkg/errors"
+)
+
+const (
+ driverS3 = "amazons3"
+ driverLocal = "local"
+)
+
+type ReadCloseSeeker interface {
+ io.ReadCloser
+ io.Seeker
+}
+
+type FileBackend interface {
+ TestConnection() error
+
+ Reader(path string) (ReadCloseSeeker, error)
+ ReadFile(path string) ([]byte, error)
+ FileExists(path string) (bool, error)
+ FileSize(path string) (int64, error)
+ CopyFile(oldPath, newPath string) error
+ MoveFile(oldPath, newPath string) error
+ WriteFile(fr io.Reader, path string) (int64, error)
+ AppendFile(fr io.Reader, path string) (int64, error)
+ RemoveFile(path string) error
+ FileModTime(path string) (time.Time, error)
+
+ ListDirectory(path string) ([]string, error)
+ RemoveDirectory(path string) error
+}
+
+type FileBackendSettings struct {
+ DriverName string
+ Directory string
+ AmazonS3AccessKeyId string
+ AmazonS3SecretAccessKey string
+ AmazonS3Bucket string
+ AmazonS3PathPrefix string
+ AmazonS3Region string
+ AmazonS3Endpoint string
+ AmazonS3SSL bool
+ AmazonS3SignV2 bool
+ AmazonS3SSE bool
+ AmazonS3Trace bool
+}
+
+func (settings *FileBackendSettings) CheckMandatoryS3Fields() error {
+ if settings.AmazonS3Bucket == "" {
+ return errors.New("missing s3 bucket settings")
+ }
+
+ // if S3 endpoint is not set call the set defaults to set that
+ if settings.AmazonS3Endpoint == "" {
+ settings.AmazonS3Endpoint = "s3.amazonaws.com"
+ }
+
+ return nil
+}
+
+func NewFileBackend(settings FileBackendSettings) (FileBackend, error) {
+ switch settings.DriverName {
+ case driverS3:
+ backend, err := NewS3FileBackend(settings)
+ if err != nil {
+ return nil, errors.Wrap(err, "unable to connect to the s3 backend")
+ }
+ return backend, nil
+ case driverLocal:
+ return &LocalFileBackend{
+ directory: settings.Directory,
+ }, nil
+ }
+ return nil, errors.New("no valid filestorage driver found")
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/filestore/localstore.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/filestore/localstore.go
new file mode 100644
index 00000000..6cd8c4ca
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/filestore/localstore.go
@@ -0,0 +1,211 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package filestore
+
+import (
+ "bytes"
+ "io"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "time"
+
+ "github.com/pkg/errors"
+
+ "github.com/mattermost/mattermost-server/v5/shared/mlog"
+)
+
+const (
+ TestFilePath = "/testfile"
+)
+
+type LocalFileBackend struct {
+ directory string
+}
+
+// copyFile will copy a file from src path to dst path.
+// Overwrites any existing files at dst.
+// Permissions are copied from file at src to the new file at dst.
+func copyFile(src, dst string) (err error) {
+ in, err := os.Open(src)
+ if err != nil {
+ return
+ }
+ defer in.Close()
+
+ if err = os.MkdirAll(filepath.Dir(dst), os.ModePerm); err != nil {
+ return
+ }
+ out, err := os.Create(dst)
+ if err != nil {
+ return
+ }
+ defer func() {
+ if e := out.Close(); e != nil {
+ err = e
+ }
+ }()
+
+ _, err = io.Copy(out, in)
+ if err != nil {
+ return
+ }
+
+ err = out.Sync()
+ if err != nil {
+ return
+ }
+
+ stat, err := os.Stat(src)
+ if err != nil {
+ return
+ }
+ err = os.Chmod(dst, stat.Mode())
+ if err != nil {
+ return
+ }
+
+ return
+}
+
+func (b *LocalFileBackend) TestConnection() error {
+ f := bytes.NewReader([]byte("testingwrite"))
+ if _, err := writeFileLocally(f, filepath.Join(b.directory, TestFilePath)); err != nil {
+ return errors.Wrap(err, "unable to write to the local filesystem storage")
+ }
+ os.Remove(filepath.Join(b.directory, TestFilePath))
+ mlog.Debug("Able to write files to local storage.")
+ return nil
+}
+
+func (b *LocalFileBackend) Reader(path string) (ReadCloseSeeker, error) {
+ f, err := os.Open(filepath.Join(b.directory, path))
+ if err != nil {
+ return nil, errors.Wrapf(err, "unable to open file %s", path)
+ }
+ return f, nil
+}
+
+func (b *LocalFileBackend) ReadFile(path string) ([]byte, error) {
+ f, err := ioutil.ReadFile(filepath.Join(b.directory, path))
+ if err != nil {
+ return nil, errors.Wrapf(err, "unable to read file %s", path)
+ }
+ return f, nil
+}
+
+func (b *LocalFileBackend) FileExists(path string) (bool, error) {
+ _, err := os.Stat(filepath.Join(b.directory, path))
+
+ if os.IsNotExist(err) {
+ return false, nil
+ }
+
+ if err != nil {
+ return false, errors.Wrapf(err, "unable to know if file %s exists", path)
+ }
+ return true, nil
+}
+
+func (b *LocalFileBackend) FileSize(path string) (int64, error) {
+ info, err := os.Stat(filepath.Join(b.directory, path))
+ if err != nil {
+ return 0, errors.Wrapf(err, "unable to get file size for %s", path)
+ }
+ return info.Size(), nil
+}
+
+func (b *LocalFileBackend) FileModTime(path string) (time.Time, error) {
+ info, err := os.Stat(filepath.Join(b.directory, path))
+ if err != nil {
+ return time.Time{}, errors.Wrapf(err, "unable to get modification time for file %s", path)
+ }
+ return info.ModTime(), nil
+}
+
+func (b *LocalFileBackend) CopyFile(oldPath, newPath string) error {
+ if err := copyFile(filepath.Join(b.directory, oldPath), filepath.Join(b.directory, newPath)); err != nil {
+ return errors.Wrapf(err, "unable to copy file from %s to %s", oldPath, newPath)
+ }
+ return nil
+}
+
+func (b *LocalFileBackend) MoveFile(oldPath, newPath string) error {
+ if err := os.MkdirAll(filepath.Dir(filepath.Join(b.directory, newPath)), 0750); err != nil {
+ return errors.Wrapf(err, "unable to create the new destination directory %s", filepath.Dir(newPath))
+ }
+
+ if err := os.Rename(filepath.Join(b.directory, oldPath), filepath.Join(b.directory, newPath)); err != nil {
+ return errors.Wrapf(err, "unable to move the file to %s to the destination directory", newPath)
+ }
+
+ return nil
+}
+
+func (b *LocalFileBackend) WriteFile(fr io.Reader, path string) (int64, error) {
+ return writeFileLocally(fr, filepath.Join(b.directory, path))
+}
+
+func writeFileLocally(fr io.Reader, path string) (int64, error) {
+ if err := os.MkdirAll(filepath.Dir(path), 0750); err != nil {
+ directory, _ := filepath.Abs(filepath.Dir(path))
+ return 0, errors.Wrapf(err, "unable to create the directory %s for the file %s", directory, path)
+ }
+ fw, err := os.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0600)
+ if err != nil {
+ return 0, errors.Wrapf(err, "unable to open the file %s to write the data", path)
+ }
+ defer fw.Close()
+ written, err := io.Copy(fw, fr)
+ if err != nil {
+ return written, errors.Wrapf(err, "unable write the data in the file %s", path)
+ }
+ return written, nil
+}
+
+func (b *LocalFileBackend) AppendFile(fr io.Reader, path string) (int64, error) {
+ fp := filepath.Join(b.directory, path)
+ if _, err := os.Stat(fp); err != nil {
+ return 0, errors.Wrapf(err, "unable to find the file %s to append the data", path)
+ }
+ fw, err := os.OpenFile(fp, os.O_WRONLY|os.O_APPEND, 0600)
+ if err != nil {
+ return 0, errors.Wrapf(err, "unable to open the file %s to append the data", path)
+ }
+ defer fw.Close()
+ written, err := io.Copy(fw, fr)
+ if err != nil {
+ return written, errors.Wrapf(err, "unable append the data in the file %s", path)
+ }
+ return written, nil
+}
+
+func (b *LocalFileBackend) RemoveFile(path string) error {
+ if err := os.Remove(filepath.Join(b.directory, path)); err != nil {
+ return errors.Wrapf(err, "unable to remove the file %s", path)
+ }
+ return nil
+}
+
+func (b *LocalFileBackend) ListDirectory(path string) ([]string, error) {
+ var paths []string
+ fileInfos, err := ioutil.ReadDir(filepath.Join(b.directory, path))
+ if err != nil {
+ if os.IsNotExist(err) {
+ return paths, nil
+ }
+ return nil, errors.Wrapf(err, "unable to list the directory %s", path)
+ }
+ for _, fileInfo := range fileInfos {
+ paths = append(paths, filepath.Join(path, fileInfo.Name()))
+ }
+ return paths, nil
+}
+
+func (b *LocalFileBackend) RemoveDirectory(path string) error {
+ if err := os.RemoveAll(filepath.Join(b.directory, path)); err != nil {
+ return errors.Wrapf(err, "unable to remove the directory %s", path)
+ }
+ return nil
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/filestore/s3_overrides.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/filestore/s3_overrides.go
new file mode 100644
index 00000000..e7b29b98
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/filestore/s3_overrides.go
@@ -0,0 +1,56 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package filestore
+
+import (
+ "context"
+ "net/http"
+
+ "github.com/minio/minio-go/v7/pkg/credentials"
+)
+
+// customTransport is used to point the request to a different server.
+// This is helpful in situations where a different service is handling AWS S3 requests
+// from multiple Mattermost applications, and the Mattermost service itself does not
+// have any S3 credentials.
+type customTransport struct {
+ base http.RoundTripper
+ host string
+ scheme string
+ client http.Client
+}
+
+// RoundTrip implements the http.Roundtripper interface.
+func (t *customTransport) RoundTrip(req *http.Request) (*http.Response, error) {
+ // Rountrippers should not modify the original request.
+ newReq := req.Clone(context.Background())
+ *newReq.URL = *req.URL
+ req.URL.Scheme = t.scheme
+ req.URL.Host = t.host
+ return t.client.Do(req)
+}
+
+// customProvider is a dummy credentials provider for the minio client to work
+// without actually providing credentials. This is needed with a custom transport
+// in cases where the minio client does not actually have credentials with itself,
+// rather needs responses from another entity.
+//
+// It satisfies the credentials.Provider interface.
+type customProvider struct {
+ isSignV2 bool
+}
+
+// Retrieve just returns empty credentials.
+func (cp customProvider) Retrieve() (credentials.Value, error) {
+ sign := credentials.SignatureV4
+ if cp.isSignV2 {
+ sign = credentials.SignatureV2
+ }
+ return credentials.Value{
+ SignerType: sign,
+ }, nil
+}
+
+// IsExpired always returns false.
+func (cp customProvider) IsExpired() bool { return false }
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/filestore/s3store.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/filestore/s3store.go
new file mode 100644
index 00000000..5d0bf38e
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/filestore/s3store.go
@@ -0,0 +1,442 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package filestore
+
+import (
+ "context"
+ "io"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "strings"
+ "time"
+
+ s3 "github.com/minio/minio-go/v7"
+ "github.com/minio/minio-go/v7/pkg/credentials"
+ "github.com/minio/minio-go/v7/pkg/encrypt"
+ "github.com/pkg/errors"
+
+ "github.com/mattermost/mattermost-server/v5/shared/mlog"
+)
+
+// S3FileBackend contains all necessary information to communicate with
+// an AWS S3 compatible API backend.
+type S3FileBackend struct {
+ endpoint string
+ accessKey string
+ secretKey string
+ secure bool
+ signV2 bool
+ region string
+ bucket string
+ pathPrefix string
+ encrypt bool
+ trace bool
+ client *s3.Client
+}
+
+type S3FileBackendAuthError struct {
+ DetailedError string
+}
+
+// S3FileBackendNoBucketError is returned when testing a connection and no S3 bucket is found
+type S3FileBackendNoBucketError struct{}
+
+const (
+ // This is not exported by minio. See: https://github.com/minio/minio-go/issues/1339
+ bucketNotFound = "NoSuchBucket"
+)
+
+var (
+ imageExtensions = map[string]bool{".jpg": true, ".jpeg": true, ".gif": true, ".bmp": true, ".png": true, ".tiff": true, "tif": true}
+ imageMimeTypes = map[string]string{".jpg": "image/jpeg", ".jpeg": "image/jpeg", ".gif": "image/gif", ".bmp": "image/bmp", ".png": "image/png", ".tiff": "image/tiff", ".tif": "image/tif"}
+)
+
+func isFileExtImage(ext string) bool {
+ ext = strings.ToLower(ext)
+ return imageExtensions[ext]
+}
+
+func getImageMimeType(ext string) string {
+ ext = strings.ToLower(ext)
+ if imageMimeTypes[ext] == "" {
+ return "image"
+ }
+ return imageMimeTypes[ext]
+}
+
+func (s *S3FileBackendAuthError) Error() string {
+ return s.DetailedError
+}
+
+func (s *S3FileBackendNoBucketError) Error() string {
+ return "no such bucket"
+}
+
+// NewS3FileBackend returns an instance of an S3FileBackend.
+func NewS3FileBackend(settings FileBackendSettings) (*S3FileBackend, error) {
+ backend := &S3FileBackend{
+ endpoint: settings.AmazonS3Endpoint,
+ accessKey: settings.AmazonS3AccessKeyId,
+ secretKey: settings.AmazonS3SecretAccessKey,
+ secure: settings.AmazonS3SSL,
+ signV2: settings.AmazonS3SignV2,
+ region: settings.AmazonS3Region,
+ bucket: settings.AmazonS3Bucket,
+ pathPrefix: settings.AmazonS3PathPrefix,
+ encrypt: settings.AmazonS3SSE,
+ trace: settings.AmazonS3Trace,
+ }
+ cli, err := backend.s3New()
+ if err != nil {
+ return nil, err
+ }
+ backend.client = cli
+ return backend, nil
+}
+
+// Similar to s3.New() but allows initialization of signature v2 or signature v4 client.
+// If signV2 input is false, function always returns signature v4.
+//
+// Additionally this function also takes a user defined region, if set
+// disables automatic region lookup.
+func (b *S3FileBackend) s3New() (*s3.Client, error) {
+ var creds *credentials.Credentials
+
+ isCloud := os.Getenv("MM_CLOUD_FILESTORE_BIFROST") != ""
+ if isCloud {
+ creds = credentials.New(customProvider{isSignV2: b.signV2})
+ } else if b.accessKey == "" && b.secretKey == "" {
+ creds = credentials.NewIAM("")
+ } else if b.signV2 {
+ creds = credentials.NewStatic(b.accessKey, b.secretKey, "", credentials.SignatureV2)
+ } else {
+ creds = credentials.NewStatic(b.accessKey, b.secretKey, "", credentials.SignatureV4)
+ }
+
+ opts := s3.Options{
+ Creds: creds,
+ Secure: b.secure,
+ Region: b.region,
+ }
+
+ // If this is a cloud installation, we override the default transport.
+ if isCloud {
+ tr, err := s3.DefaultTransport(b.secure)
+ if err != nil {
+ return nil, err
+ }
+ scheme := "http"
+ if b.secure {
+ scheme = "https"
+ }
+ opts.Transport = &customTransport{
+ base: tr,
+ host: b.endpoint,
+ scheme: scheme,
+ }
+ }
+
+ s3Clnt, err := s3.New(b.endpoint, &opts)
+ if err != nil {
+ return nil, err
+ }
+
+ if b.trace {
+ s3Clnt.TraceOn(os.Stdout)
+ }
+
+ return s3Clnt, nil
+}
+
+func (b *S3FileBackend) TestConnection() error {
+ exists := true
+ var err error
+ // If a path prefix is present, we attempt to test the bucket by listing objects under the path
+ // and just checking the first response. This is because the BucketExists call is only at a bucket level
+ // and sometimes the user might only be allowed access to the specified path prefix.
+ if b.pathPrefix != "" {
+ obj := <-b.client.ListObjects(context.Background(), b.bucket, s3.ListObjectsOptions{Prefix: b.pathPrefix})
+ if obj.Err != nil {
+ typedErr := s3.ToErrorResponse(obj.Err)
+ if typedErr.Code != bucketNotFound {
+ return &S3FileBackendAuthError{DetailedError: "unable to list objects in the S3 bucket"}
+ }
+ exists = false
+ }
+ } else {
+ exists, err = b.client.BucketExists(context.Background(), b.bucket)
+ if err != nil {
+ return &S3FileBackendAuthError{DetailedError: "unable to check if the S3 bucket exists"}
+ }
+ }
+
+ if !exists {
+ return &S3FileBackendNoBucketError{}
+ }
+ mlog.Debug("Connection to S3 or minio is good. Bucket exists.")
+ return nil
+}
+
+func (b *S3FileBackend) MakeBucket() error {
+ err := b.client.MakeBucket(context.Background(), b.bucket, s3.MakeBucketOptions{Region: b.region})
+ if err != nil {
+ return errors.Wrap(err, "unable to create the s3 bucket")
+ }
+ return nil
+}
+
+// Caller must close the first return value
+func (b *S3FileBackend) Reader(path string) (ReadCloseSeeker, error) {
+ path = filepath.Join(b.pathPrefix, path)
+ minioObject, err := b.client.GetObject(context.Background(), b.bucket, path, s3.GetObjectOptions{})
+ if err != nil {
+ return nil, errors.Wrapf(err, "unable to open file %s", path)
+ }
+
+ return minioObject, nil
+}
+
+func (b *S3FileBackend) ReadFile(path string) ([]byte, error) {
+ path = filepath.Join(b.pathPrefix, path)
+ minioObject, err := b.client.GetObject(context.Background(), b.bucket, path, s3.GetObjectOptions{})
+ if err != nil {
+ return nil, errors.Wrapf(err, "unable to open file %s", path)
+ }
+
+ defer minioObject.Close()
+ f, err := ioutil.ReadAll(minioObject)
+ if err != nil {
+ return nil, errors.Wrapf(err, "unable to read file %s", path)
+ }
+ return f, nil
+}
+
+func (b *S3FileBackend) FileExists(path string) (bool, error) {
+ path = filepath.Join(b.pathPrefix, path)
+
+ _, err := b.client.StatObject(context.Background(), b.bucket, path, s3.StatObjectOptions{})
+ if err == nil {
+ return true, nil
+ }
+
+ var s3Err s3.ErrorResponse
+ if errors.As(err, &s3Err); s3Err.Code == "NoSuchKey" {
+ return false, nil
+ }
+
+ return false, errors.Wrapf(err, "unable to know if file %s exists", path)
+}
+
+func (b *S3FileBackend) FileSize(path string) (int64, error) {
+ path = filepath.Join(b.pathPrefix, path)
+
+ info, err := b.client.StatObject(context.Background(), b.bucket, path, s3.StatObjectOptions{})
+ if err != nil {
+ return 0, errors.Wrapf(err, "unable to get file size for %s", path)
+ }
+
+ return info.Size, nil
+}
+
+func (b *S3FileBackend) FileModTime(path string) (time.Time, error) {
+ path = filepath.Join(b.pathPrefix, path)
+
+ info, err := b.client.StatObject(context.Background(), b.bucket, path, s3.StatObjectOptions{})
+ if err != nil {
+ return time.Time{}, errors.Wrapf(err, "unable to get modification time for file %s", path)
+ }
+
+ return info.LastModified, nil
+}
+
+func (b *S3FileBackend) CopyFile(oldPath, newPath string) error {
+ oldPath = filepath.Join(b.pathPrefix, oldPath)
+ newPath = filepath.Join(b.pathPrefix, newPath)
+ srcOpts := s3.CopySrcOptions{
+ Bucket: b.bucket,
+ Object: oldPath,
+ Encryption: encrypt.NewSSE(),
+ }
+ dstOpts := s3.CopyDestOptions{
+ Bucket: b.bucket,
+ Object: newPath,
+ Encryption: encrypt.NewSSE(),
+ }
+ if _, err := b.client.CopyObject(context.Background(), dstOpts, srcOpts); err != nil {
+ return errors.Wrapf(err, "unable to copy file from %s to %s", oldPath, newPath)
+ }
+ return nil
+}
+
+func (b *S3FileBackend) MoveFile(oldPath, newPath string) error {
+ oldPath = filepath.Join(b.pathPrefix, oldPath)
+ newPath = filepath.Join(b.pathPrefix, newPath)
+ srcOpts := s3.CopySrcOptions{
+ Bucket: b.bucket,
+ Object: oldPath,
+ Encryption: encrypt.NewSSE(),
+ }
+ dstOpts := s3.CopyDestOptions{
+ Bucket: b.bucket,
+ Object: newPath,
+ Encryption: encrypt.NewSSE(),
+ }
+
+ if _, err := b.client.CopyObject(context.Background(), dstOpts, srcOpts); err != nil {
+ return errors.Wrapf(err, "unable to copy the file to %s to the new destionation", newPath)
+ }
+
+ if err := b.client.RemoveObject(context.Background(), b.bucket, oldPath, s3.RemoveObjectOptions{}); err != nil {
+ return errors.Wrapf(err, "unable to remove the file old file %s", oldPath)
+ }
+
+ return nil
+}
+
+func (b *S3FileBackend) WriteFile(fr io.Reader, path string) (int64, error) {
+ var contentType string
+ path = filepath.Join(b.pathPrefix, path)
+ if ext := filepath.Ext(path); isFileExtImage(ext) {
+ contentType = getImageMimeType(ext)
+ } else {
+ contentType = "binary/octet-stream"
+ }
+
+ options := s3PutOptions(b.encrypt, contentType)
+ info, err := b.client.PutObject(context.Background(), b.bucket, path, fr, -1, options)
+ if err != nil {
+ return info.Size, errors.Wrapf(err, "unable write the data in the file %s", path)
+ }
+
+ return info.Size, nil
+}
+
+func (b *S3FileBackend) AppendFile(fr io.Reader, path string) (int64, error) {
+ fp := filepath.Join(b.pathPrefix, path)
+ if _, err := b.client.StatObject(context.Background(), b.bucket, fp, s3.StatObjectOptions{}); err != nil {
+ return 0, errors.Wrapf(err, "unable to find the file %s to append the data", path)
+ }
+
+ var contentType string
+ if ext := filepath.Ext(fp); isFileExtImage(ext) {
+ contentType = getImageMimeType(ext)
+ } else {
+ contentType = "binary/octet-stream"
+ }
+
+ options := s3PutOptions(b.encrypt, contentType)
+ sse := options.ServerSideEncryption
+ partName := fp + ".part"
+ info, err := b.client.PutObject(context.Background(), b.bucket, partName, fr, -1, options)
+ defer b.client.RemoveObject(context.Background(), b.bucket, partName, s3.RemoveObjectOptions{})
+ if info.Size > 0 {
+ src1Opts := s3.CopySrcOptions{
+ Bucket: b.bucket,
+ Object: fp,
+ }
+ src2Opts := s3.CopySrcOptions{
+ Bucket: b.bucket,
+ Object: partName,
+ }
+ dstOpts := s3.CopyDestOptions{
+ Bucket: b.bucket,
+ Object: fp,
+ Encryption: sse,
+ }
+ _, err = b.client.ComposeObject(context.Background(), dstOpts, src1Opts, src2Opts)
+ if err != nil {
+ return 0, errors.Wrapf(err, "unable append the data in the file %s", path)
+ }
+ return info.Size, nil
+ }
+
+ return 0, errors.Wrapf(err, "unable append the data in the file %s", path)
+}
+
+func (b *S3FileBackend) RemoveFile(path string) error {
+ path = filepath.Join(b.pathPrefix, path)
+ if err := b.client.RemoveObject(context.Background(), b.bucket, path, s3.RemoveObjectOptions{}); err != nil {
+ return errors.Wrapf(err, "unable to remove the file %s", path)
+ }
+
+ return nil
+}
+
+func getPathsFromObjectInfos(in <-chan s3.ObjectInfo) <-chan s3.ObjectInfo {
+ out := make(chan s3.ObjectInfo, 1)
+
+ go func() {
+ defer close(out)
+
+ for {
+ info, done := <-in
+
+ if !done {
+ break
+ }
+
+ out <- info
+ }
+ }()
+
+ return out
+}
+
+func (b *S3FileBackend) ListDirectory(path string) ([]string, error) {
+ path = filepath.Join(b.pathPrefix, path)
+ if !strings.HasSuffix(path, "/") && path != "" {
+ // s3Clnt returns only the path itself when "/" is not present
+ // appending "/" to make it consistent across all filestores
+ path = path + "/"
+ }
+
+ opts := s3.ListObjectsOptions{
+ Prefix: path,
+ }
+ var paths []string
+ for object := range b.client.ListObjects(context.Background(), b.bucket, opts) {
+ if object.Err != nil {
+ return nil, errors.Wrapf(object.Err, "unable to list the directory %s", path)
+ }
+ // We strip the path prefix that gets applied,
+ // so that it remains transparent to the application.
+ object.Key = strings.TrimPrefix(object.Key, b.pathPrefix)
+ trimmed := strings.Trim(object.Key, "/")
+ if trimmed != "" {
+ paths = append(paths, trimmed)
+ }
+ }
+
+ return paths, nil
+}
+
+func (b *S3FileBackend) RemoveDirectory(path string) error {
+ opts := s3.ListObjectsOptions{
+ Prefix: filepath.Join(b.pathPrefix, path),
+ Recursive: true,
+ }
+ list := b.client.ListObjects(context.Background(), b.bucket, opts)
+ objectsCh := b.client.RemoveObjects(context.Background(), b.bucket, getPathsFromObjectInfos(list), s3.RemoveObjectsOptions{})
+ for err := range objectsCh {
+ if err.Err != nil {
+ return errors.Wrapf(err.Err, "unable to remove the directory %s", path)
+ }
+ }
+
+ return nil
+}
+
+func s3PutOptions(encrypted bool, contentType string) s3.PutObjectOptions {
+ options := s3.PutObjectOptions{}
+ if encrypted {
+ options.ServerSideEncryption = encrypt.NewSSE()
+ }
+ options.ContentType = contentType
+ // We set the part size to the minimum allowed value of 5MBs
+ // to avoid an excessive allocation in minio.PutObject implementation.
+ options.PartSize = 1024 * 1024 * 5
+
+ return options
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/i18n/i18n.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/i18n/i18n.go
new file mode 100644
index 00000000..a5de30bb
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/i18n/i18n.go
@@ -0,0 +1,185 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package i18n
+
+import (
+ "fmt"
+ "html/template"
+ "io/ioutil"
+ "net/http"
+ "path/filepath"
+ "reflect"
+ "strings"
+
+ "github.com/mattermost/go-i18n/i18n"
+
+ "github.com/mattermost/mattermost-server/v5/shared/mlog"
+)
+
+const defaultLocale = "en"
+
+// TranslateFunc is the type of the translate functions
+type TranslateFunc func(translationID string, args ...interface{}) string
+
+// T is the translate function using the default server language as fallback language
+var T TranslateFunc
+
+// TDefault is the translate function using english as fallback language
+var TDefault TranslateFunc
+
+var locales map[string]string = make(map[string]string)
+var defaultServerLocale string
+var defaultClientLocale string
+
+// TranslationsPreInit loads translations from filesystem if they are not
+// loaded already and assigns english while loading server config
+func TranslationsPreInit(translationsDir string) error {
+ if T != nil {
+ return nil
+ }
+
+ // Set T even if we fail to load the translations. Lots of shutdown handling code will
+ // segfault trying to handle the error, and the untranslated IDs are strictly better.
+ T = tfuncWithFallback(defaultLocale)
+ TDefault = tfuncWithFallback(defaultLocale)
+
+ return initTranslationsWithDir(translationsDir)
+}
+
+// InitTranslations set the defaults configured in the server and initialize
+// the T function using the server default as fallback language
+func InitTranslations(serverLocale, clientLocale string) error {
+ defaultServerLocale = serverLocale
+ defaultClientLocale = clientLocale
+
+ var err error
+ T, err = getTranslationsBySystemLocale()
+ return err
+}
+
+func initTranslationsWithDir(dir string) error {
+ files, _ := ioutil.ReadDir(dir)
+ for _, f := range files {
+ if filepath.Ext(f.Name()) == ".json" {
+ filename := f.Name()
+ locales[strings.Split(filename, ".")[0]] = filepath.Join(dir, filename)
+
+ if err := i18n.LoadTranslationFile(filepath.Join(dir, filename)); err != nil {
+ return err
+ }
+ }
+ }
+
+ return nil
+}
+
+func getTranslationsBySystemLocale() (TranslateFunc, error) {
+ locale := defaultServerLocale
+ if _, ok := locales[locale]; !ok {
+ mlog.Warn("Failed to load system translations for", mlog.String("locale", locale), mlog.String("attempting to fall back to default locale", defaultLocale))
+ locale = defaultLocale
+ }
+
+ if locales[locale] == "" {
+ return nil, fmt.Errorf("failed to load system translations for '%v'", defaultLocale)
+ }
+
+ translations := tfuncWithFallback(locale)
+ if translations == nil {
+ return nil, fmt.Errorf("failed to load system translations")
+ }
+
+ mlog.Info("Loaded system translations", mlog.String("for locale", locale), mlog.String("from locale", locales[locale]))
+ return translations, nil
+}
+
+// GetUserTranslations get the translation function for an specific locale
+func GetUserTranslations(locale string) TranslateFunc {
+ if _, ok := locales[locale]; !ok {
+ locale = defaultLocale
+ }
+
+ translations := tfuncWithFallback(locale)
+ return translations
+}
+
+// GetTranslationsAndLocaleFromRequest return the translation function and the
+// locale based on a request headers
+func GetTranslationsAndLocaleFromRequest(r *http.Request) (TranslateFunc, string) {
+ // This is for checking against locales like pt_BR or zn_CN
+ headerLocaleFull := strings.Split(r.Header.Get("Accept-Language"), ",")[0]
+ // This is for checking against locales like en, es
+ headerLocale := strings.Split(strings.Split(r.Header.Get("Accept-Language"), ",")[0], "-")[0]
+ defaultLocale := defaultClientLocale
+ if locales[headerLocaleFull] != "" {
+ translations := tfuncWithFallback(headerLocaleFull)
+ return translations, headerLocaleFull
+ } else if locales[headerLocale] != "" {
+ translations := tfuncWithFallback(headerLocale)
+ return translations, headerLocale
+ } else if locales[defaultLocale] != "" {
+ translations := tfuncWithFallback(defaultLocale)
+ return translations, headerLocale
+ }
+
+ translations := tfuncWithFallback(defaultLocale)
+ return translations, defaultLocale
+}
+
+// GetSupportedLocales return a map of locale code and the file path with the
+// translations
+func GetSupportedLocales() map[string]string {
+ return locales
+}
+
+func tfuncWithFallback(pref string) TranslateFunc {
+ t, _ := i18n.Tfunc(pref)
+ return func(translationID string, args ...interface{}) string {
+ if translated := t(translationID, args...); translated != translationID {
+ return translated
+ }
+
+ t, _ := i18n.Tfunc(defaultLocale)
+ return t(translationID, args...)
+ }
+}
+
+// TranslateAsHTML translates the translationID provided and return a
+// template.HTML object
+func TranslateAsHTML(t TranslateFunc, translationID string, args map[string]interface{}) template.HTML {
+ message := t(translationID, escapeForHTML(args))
+ message = strings.Replace(message, "[[", "<strong>", -1)
+ message = strings.Replace(message, "]]", "</strong>", -1)
+ return template.HTML(message)
+}
+
+func escapeForHTML(arg interface{}) interface{} {
+ switch typedArg := arg.(type) {
+ case string:
+ return template.HTMLEscapeString(typedArg)
+ case *string:
+ return template.HTMLEscapeString(*typedArg)
+ case map[string]interface{}:
+ safeArg := make(map[string]interface{}, len(typedArg))
+ for key, value := range typedArg {
+ safeArg[key] = escapeForHTML(value)
+ }
+ return safeArg
+ default:
+ mlog.Warn(
+ "Unable to escape value for HTML template",
+ mlog.Any("html_template", arg),
+ mlog.String("template_type", reflect.ValueOf(arg).Type().String()),
+ )
+ return ""
+ }
+}
+
+// IdentityTfunc returns a translation function that don't translate, only
+// returns the same id
+func IdentityTfunc() TranslateFunc {
+ return func(translationID string, args ...interface{}) string {
+ return translationID
+ }
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/autolink.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/autolink.go
new file mode 100644
index 00000000..2eb05d90
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/autolink.go
@@ -0,0 +1,255 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package markdown
+
+import (
+ "regexp"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+)
+
+// Based off of extensions/autolink.c from https://github.com/github/cmark
+
+var (
+ DefaultURLSchemes = []string{"http", "https", "ftp", "mailto", "tel"}
+ wwwAutoLinkRegex = regexp.MustCompile(`^www\d{0,3}\.`)
+)
+
+// Given a string with a w at the given position, tries to parse and return a range containing a www link.
+// if one exists. If the text at the given position isn't a link, returns an empty string. Equivalent to
+// www_match from the reference code.
+func parseWWWAutolink(data string, position int) (Range, bool) {
+ // Check that this isn't part of another word
+ if position > 1 {
+ prevChar := data[position-1]
+
+ if !isWhitespaceByte(prevChar) && !isAllowedBeforeWWWLink(prevChar) {
+ return Range{}, false
+ }
+ }
+
+ // Check that this starts with www
+ if len(data)-position < 4 || !wwwAutoLinkRegex.MatchString(data[position:]) {
+ return Range{}, false
+ }
+
+ end := checkDomain(data[position:], false)
+ if end == 0 {
+ return Range{}, false
+ }
+
+ end += position
+
+ // Grab all text until the end of the string or the next whitespace character
+ for end < len(data) && !isWhitespaceByte(data[end]) {
+ end += 1
+ }
+
+ // Trim trailing punctuation
+ end = trimTrailingCharactersFromLink(data, position, end)
+ if position == end {
+ return Range{}, false
+ }
+
+ return Range{position, end}, true
+}
+
+func isAllowedBeforeWWWLink(c byte) bool {
+ switch c {
+ case '*', '_', '~', ')':
+ return true
+ }
+ return false
+}
+
+// Given a string with a : at the given position, tried to parse and return a range containing a URL scheme
+// if one exists. If the text around the given position isn't a link, returns an empty string. Equivalent to
+// url_match from the reference code.
+func parseURLAutolink(data string, position int) (Range, bool) {
+ // Check that a :// exists. This doesn't match the clients that treat the slashes as optional.
+ if len(data)-position < 4 || data[position+1] != '/' || data[position+2] != '/' {
+ return Range{}, false
+ }
+
+ start := position - 1
+ for start > 0 && isAlphanumericByte(data[start-1]) {
+ start -= 1
+ }
+
+ if start < 0 || position >= len(data) {
+ return Range{}, false
+ }
+
+ // Ensure that the URL scheme is allowed and that at least one character after the scheme is valid.
+ scheme := data[start:position]
+ if !isSchemeAllowed(scheme) || !isValidHostCharacter(data[position+3:]) {
+ return Range{}, false
+ }
+
+ end := checkDomain(data[position+3:], true)
+ if end == 0 {
+ return Range{}, false
+ }
+
+ end += position
+
+ // Grab all text until the end of the string or the next whitespace character
+ for end < len(data) && !isWhitespaceByte(data[end]) {
+ end += 1
+ }
+
+ // Trim trailing punctuation
+ end = trimTrailingCharactersFromLink(data, start, end)
+ if start == end {
+ return Range{}, false
+ }
+
+ return Range{start, end}, true
+}
+
+func isSchemeAllowed(scheme string) bool {
+ // Note that this doesn't support the custom URL schemes implemented by the client
+ for _, allowed := range DefaultURLSchemes {
+ if strings.EqualFold(allowed, scheme) {
+ return true
+ }
+ }
+
+ return false
+}
+
+// Given a string starting with a URL, returns the number of valid characters that make up the URL's domain.
+// Returns 0 if the string doesn't start with a domain name. allowShort determines whether or not the domain
+// needs to contain a period to be considered valid. Equivalent to check_domain from the reference code.
+func checkDomain(data string, allowShort bool) int {
+ foundUnderscore := false
+ foundPeriod := false
+
+ i := 1
+ for ; i < len(data)-1; i++ {
+ if data[i] == '_' {
+ foundUnderscore = true
+ break
+ } else if data[i] == '.' {
+ foundPeriod = true
+ } else if !isValidHostCharacter(data[i:]) && data[i] != '-' {
+ break
+ }
+ }
+
+ if foundUnderscore {
+ return 0
+ }
+
+ if allowShort {
+ // If allowShort is set, accept any string of valid domain characters
+ return i
+ }
+
+ // If allowShort isn't set, a valid domain just requires at least a single period. Note that this
+ // logic isn't entirely necessary because we already know the string starts with "www." when
+ // this is called from parseWWWAutolink
+ if foundPeriod {
+ return i
+ }
+ return 0
+}
+
+// Returns true if the provided link starts with a valid character for a domain name. Equivalent to
+// is_valid_hostchar from the reference code.
+func isValidHostCharacter(link string) bool {
+ c, _ := utf8.DecodeRuneInString(link)
+ if c == utf8.RuneError {
+ return false
+ }
+
+ return !unicode.IsSpace(c) && !unicode.IsPunct(c)
+}
+
+// Removes any trailing characters such as punctuation or stray brackets that shouldn't be part of the link.
+// Returns a new end position for the link. Equivalent to autolink_delim from the reference code.
+func trimTrailingCharactersFromLink(markdown string, start int, end int) int {
+ runes := []rune(markdown[start:end])
+ linkEnd := len(runes)
+
+ // Cut off the link before an open angle bracket if it contains one
+ for i, c := range runes {
+ if c == '<' {
+ linkEnd = i
+ break
+ }
+ }
+
+ for linkEnd > 0 {
+ c := runes[linkEnd-1]
+
+ if !canEndAutolink(c) {
+ // Trim trailing quotes, periods, etc
+ linkEnd = linkEnd - 1
+ } else if c == ';' {
+ // Trim a trailing HTML entity
+ newEnd := linkEnd - 2
+
+ for newEnd > 0 && ((runes[newEnd] >= 'a' && runes[newEnd] <= 'z') || (runes[newEnd] >= 'A' && runes[newEnd] <= 'Z')) {
+ newEnd -= 1
+ }
+
+ if newEnd < linkEnd-2 && runes[newEnd] == '&' {
+ linkEnd = newEnd
+ } else {
+ // This isn't actually an HTML entity, so just trim the semicolon
+ linkEnd = linkEnd - 1
+ }
+ } else if c == ')' {
+ // Only allow an autolink ending with a bracket if that bracket is part of a matching pair of brackets.
+ // If there are more closing brackets than opening ones, remove the extra bracket
+
+ numClosing := 0
+ numOpening := 0
+
+ // Examples (input text => output linked portion):
+ //
+ // http://www.pokemon.com/Pikachu_(Electric)
+ // => http://www.pokemon.com/Pikachu_(Electric)
+ //
+ // http://www.pokemon.com/Pikachu_((Electric)
+ // => http://www.pokemon.com/Pikachu_((Electric)
+ //
+ // http://www.pokemon.com/Pikachu_(Electric))
+ // => http://www.pokemon.com/Pikachu_(Electric)
+ //
+ // http://www.pokemon.com/Pikachu_((Electric))
+ // => http://www.pokemon.com/Pikachu_((Electric))
+
+ for i := 0; i < linkEnd; i++ {
+ if runes[i] == '(' {
+ numOpening += 1
+ } else if runes[i] == ')' {
+ numClosing += 1
+ }
+ }
+
+ if numClosing <= numOpening {
+ // There's fewer or equal closing brackets, so we've found the end of the link
+ break
+ }
+
+ linkEnd -= 1
+ } else {
+ // There's no special characters at the end of the link, so we're at the end
+ break
+ }
+ }
+
+ return start + len(string(runes[:linkEnd]))
+}
+
+func canEndAutolink(c rune) bool {
+ switch c {
+ case '?', '!', '.', ',', ':', '*', '_', '~', '\'', '"':
+ return false
+ }
+ return true
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/block_quote.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/block_quote.go
new file mode 100644
index 00000000..5cf66d10
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/block_quote.go
@@ -0,0 +1,62 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package markdown
+
+type BlockQuote struct {
+ blockBase
+ markdown string
+
+ Children []Block
+}
+
+func (b *BlockQuote) Continuation(indentation int, r Range) *continuation {
+ if indentation > 3 {
+ return nil
+ }
+ s := b.markdown[r.Position:r.End]
+ if s == "" || s[0] != '>' {
+ return nil
+ }
+ remaining := Range{r.Position + 1, r.End}
+ indentation, indentationBytes := countIndentation(b.markdown, remaining)
+ if indentation > 0 {
+ indentation--
+ }
+ return &continuation{
+ Indentation: indentation,
+ Remaining: Range{remaining.Position + indentationBytes, remaining.End},
+ }
+}
+
+func (b *BlockQuote) AddChild(openBlocks []Block) []Block {
+ b.Children = append(b.Children, openBlocks[0])
+ return openBlocks
+}
+
+func blockQuoteStart(markdown string, indent int, r Range) []Block {
+ if indent > 3 {
+ return nil
+ }
+ s := markdown[r.Position:r.End]
+ if s == "" || s[0] != '>' {
+ return nil
+ }
+
+ block := &BlockQuote{
+ markdown: markdown,
+ }
+ r.Position++
+ if len(s) > 1 && s[1] == ' ' {
+ r.Position++
+ }
+
+ indent, bytes := countIndentation(markdown, r)
+
+ ret := []Block{block}
+ if descendants := blockStartOrParagraph(markdown, indent, Range{r.Position + bytes, r.End}, nil, nil); descendants != nil {
+ block.Children = append(block.Children, descendants[0])
+ ret = append(ret, descendants...)
+ }
+ return ret
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/blocks.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/blocks.go
new file mode 100644
index 00000000..fe9e272f
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/blocks.go
@@ -0,0 +1,154 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package markdown
+
+import (
+ "strings"
+)
+
+type continuation struct {
+ Indentation int
+ Remaining Range
+}
+
+type Block interface {
+ Continuation(indentation int, r Range) *continuation
+ AddLine(indentation int, r Range) bool
+ Close()
+ AllowsBlockStarts() bool
+ HasTrailingBlankLine() bool
+}
+
+type blockBase struct{}
+
+func (*blockBase) AddLine(indentation int, r Range) bool { return false }
+func (*blockBase) Close() {}
+func (*blockBase) AllowsBlockStarts() bool { return true }
+func (*blockBase) HasTrailingBlankLine() bool { return false }
+
+type ContainerBlock interface {
+ Block
+ AddChild(openBlocks []Block) []Block
+}
+
+type Range struct {
+ Position int
+ End int
+}
+
+func closeBlocks(blocks []Block, referenceDefinitions []*ReferenceDefinition) []*ReferenceDefinition {
+ for _, block := range blocks {
+ block.Close()
+ if p, ok := block.(*Paragraph); ok && len(p.ReferenceDefinitions) > 0 {
+ referenceDefinitions = append(referenceDefinitions, p.ReferenceDefinitions...)
+ }
+ }
+ return referenceDefinitions
+}
+
+func ParseBlocks(markdown string, lines []Line) (*Document, []*ReferenceDefinition) {
+ document := &Document{}
+ var referenceDefinitions []*ReferenceDefinition
+
+ openBlocks := []Block{document}
+
+ for _, line := range lines {
+ r := line.Range
+ lastMatchIndex := 0
+
+ indentation, indentationBytes := countIndentation(markdown, r)
+ r = Range{r.Position + indentationBytes, r.End}
+
+ for i, block := range openBlocks {
+ if continuation := block.Continuation(indentation, r); continuation != nil {
+ indentation = continuation.Indentation
+ r = continuation.Remaining
+ additionalIndentation, additionalIndentationBytes := countIndentation(markdown, r)
+ r = Range{r.Position + additionalIndentationBytes, r.End}
+ indentation += additionalIndentation
+ lastMatchIndex = i
+ } else {
+ break
+ }
+ }
+
+ if openBlocks[lastMatchIndex].AllowsBlockStarts() {
+ if newBlocks := blockStart(markdown, indentation, r, openBlocks[:lastMatchIndex+1], openBlocks[lastMatchIndex+1:]); newBlocks != nil {
+ didAdd := false
+ for i := lastMatchIndex; i >= 0; i-- {
+ if container, ok := openBlocks[i].(ContainerBlock); ok {
+ if addedBlocks := container.AddChild(newBlocks); addedBlocks != nil {
+ referenceDefinitions = closeBlocks(openBlocks[i+1:], referenceDefinitions)
+ openBlocks = openBlocks[:i+1]
+ openBlocks = append(openBlocks, addedBlocks...)
+ didAdd = true
+ break
+ }
+ }
+ }
+ if didAdd {
+ continue
+ }
+ }
+ }
+
+ isBlank := strings.TrimSpace(markdown[r.Position:r.End]) == ""
+ if paragraph, ok := openBlocks[len(openBlocks)-1].(*Paragraph); ok && !isBlank {
+ paragraph.Text = append(paragraph.Text, r)
+ continue
+ }
+
+ referenceDefinitions = closeBlocks(openBlocks[lastMatchIndex+1:], referenceDefinitions)
+ openBlocks = openBlocks[:lastMatchIndex+1]
+
+ if openBlocks[lastMatchIndex].AddLine(indentation, r) {
+ continue
+ }
+
+ if paragraph := newParagraph(markdown, r); paragraph != nil {
+ for i := lastMatchIndex; i >= 0; i-- {
+ if container, ok := openBlocks[i].(ContainerBlock); ok {
+ if newBlocks := container.AddChild([]Block{paragraph}); newBlocks != nil {
+ referenceDefinitions = closeBlocks(openBlocks[i+1:], referenceDefinitions)
+ openBlocks = openBlocks[:i+1]
+ openBlocks = append(openBlocks, newBlocks...)
+ break
+ }
+ }
+ }
+ }
+ }
+
+ referenceDefinitions = closeBlocks(openBlocks, referenceDefinitions)
+
+ return document, referenceDefinitions
+}
+
+func blockStart(markdown string, indentation int, r Range, matchedBlocks, unmatchedBlocks []Block) []Block {
+ if r.Position >= r.End {
+ return nil
+ }
+
+ if start := blockQuoteStart(markdown, indentation, r); start != nil {
+ return start
+ } else if start := listStart(markdown, indentation, r, matchedBlocks, unmatchedBlocks); start != nil {
+ return start
+ } else if start := indentedCodeStart(markdown, indentation, r, matchedBlocks, unmatchedBlocks); start != nil {
+ return start
+ } else if start := fencedCodeStart(markdown, indentation, r); start != nil {
+ return start
+ }
+
+ return nil
+}
+
+func blockStartOrParagraph(markdown string, indentation int, r Range, matchedBlocks, unmatchedBlocks []Block) []Block {
+ if start := blockStart(markdown, indentation, r, matchedBlocks, unmatchedBlocks); start != nil {
+ return start
+ }
+ if paragraph := newParagraph(markdown, r); paragraph != nil {
+ return []Block{paragraph}
+ }
+ return nil
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/document.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/document.go
new file mode 100644
index 00000000..306b93da
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/document.go
@@ -0,0 +1,22 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package markdown
+
+type Document struct {
+ blockBase
+
+ Children []Block
+}
+
+func (b *Document) Continuation(indentation int, r Range) *continuation {
+ return &continuation{
+ Indentation: indentation,
+ Remaining: r,
+ }
+}
+
+func (b *Document) AddChild(openBlocks []Block) []Block {
+ b.Children = append(b.Children, openBlocks[0])
+ return openBlocks
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/fenced_code.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/fenced_code.go
new file mode 100644
index 00000000..c8caad55
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/fenced_code.go
@@ -0,0 +1,112 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package markdown
+
+import (
+ "strings"
+)
+
+type FencedCodeLine struct {
+ Indentation int
+ Range Range
+}
+
+type FencedCode struct {
+ blockBase
+ markdown string
+ didSeeClosingFence bool
+
+ Indentation int
+ OpeningFence Range
+ RawInfo Range
+ RawCode []FencedCodeLine
+}
+
+func (b *FencedCode) Code() (result string) {
+ for _, code := range b.RawCode {
+ result += strings.Repeat(" ", code.Indentation) + b.markdown[code.Range.Position:code.Range.End]
+ }
+ return
+}
+
+func (b *FencedCode) Info() string {
+ return Unescape(b.markdown[b.RawInfo.Position:b.RawInfo.End])
+}
+
+func (b *FencedCode) Continuation(indentation int, r Range) *continuation {
+ if b.didSeeClosingFence {
+ return nil
+ }
+ return &continuation{
+ Indentation: indentation,
+ Remaining: r,
+ }
+}
+
+func (b *FencedCode) AddLine(indentation int, r Range) bool {
+ s := b.markdown[r.Position:r.End]
+ if indentation <= 3 && strings.HasPrefix(s, b.markdown[b.OpeningFence.Position:b.OpeningFence.End]) {
+ suffix := strings.TrimSpace(s[b.OpeningFence.End-b.OpeningFence.Position:])
+ isClosingFence := true
+ for _, c := range suffix {
+ if c != rune(s[0]) {
+ isClosingFence = false
+ break
+ }
+ }
+ if isClosingFence {
+ b.didSeeClosingFence = true
+ return true
+ }
+ }
+
+ if indentation >= b.Indentation {
+ indentation -= b.Indentation
+ } else {
+ indentation = 0
+ }
+
+ b.RawCode = append(b.RawCode, FencedCodeLine{
+ Indentation: indentation,
+ Range: r,
+ })
+ return true
+}
+
+func (b *FencedCode) AllowsBlockStarts() bool {
+ return false
+}
+
+func fencedCodeStart(markdown string, indentation int, r Range) []Block {
+ s := markdown[r.Position:r.End]
+
+ if !strings.HasPrefix(s, "```") && !strings.HasPrefix(s, "~~~") {
+ return nil
+ }
+
+ fenceCharacter := rune(s[0])
+ fenceLength := 3
+ for _, c := range s[3:] {
+ if c == fenceCharacter {
+ fenceLength++
+ } else {
+ break
+ }
+ }
+
+ for i := r.Position + fenceLength; i < r.End; i++ {
+ if markdown[i] == '`' {
+ return nil
+ }
+ }
+
+ return []Block{
+ &FencedCode{
+ markdown: markdown,
+ Indentation: indentation,
+ RawInfo: trimRightSpace(markdown, Range{r.Position + fenceLength, r.End}),
+ OpeningFence: Range{r.Position, r.Position + fenceLength},
+ },
+ }
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/html.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/html.go
new file mode 100644
index 00000000..52583074
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/html.go
@@ -0,0 +1,192 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package markdown
+
+import (
+ "fmt"
+ "strings"
+)
+
+var htmlEscaper = strings.NewReplacer(
+ `&`, "&amp;",
+ `<`, "&lt;",
+ `>`, "&gt;",
+ `"`, "&quot;",
+)
+
+// RenderHTML produces HTML with the same behavior as the example renderer used in the CommonMark
+// reference materials except for one slight difference: for brevity, no unnecessary whitespace is
+// inserted between elements. The output is not defined by the CommonMark spec, and it exists
+// primarily as an aid in testing.
+func RenderHTML(markdown string) string {
+ return RenderBlockHTML(Parse(markdown))
+}
+
+func RenderBlockHTML(block Block, referenceDefinitions []*ReferenceDefinition) (result string) {
+ return renderBlockHTML(block, referenceDefinitions, false)
+}
+
+func renderBlockHTML(block Block, referenceDefinitions []*ReferenceDefinition, isTightList bool) (result string) {
+ switch v := block.(type) {
+ case *Document:
+ for _, block := range v.Children {
+ result += RenderBlockHTML(block, referenceDefinitions)
+ }
+ case *Paragraph:
+ if len(v.Text) == 0 {
+ return
+ }
+ if !isTightList {
+ result += "<p>"
+ }
+ for _, inline := range v.ParseInlines(referenceDefinitions) {
+ result += RenderInlineHTML(inline)
+ }
+ if !isTightList {
+ result += "</p>"
+ }
+ case *List:
+ if v.IsOrdered {
+ if v.OrderedStart != 1 {
+ result += fmt.Sprintf(`<ol start="%v">`, v.OrderedStart)
+ } else {
+ result += "<ol>"
+ }
+ } else {
+ result += "<ul>"
+ }
+ for _, block := range v.Children {
+ result += renderBlockHTML(block, referenceDefinitions, !v.IsLoose)
+ }
+ if v.IsOrdered {
+ result += "</ol>"
+ } else {
+ result += "</ul>"
+ }
+ case *ListItem:
+ result += "<li>"
+ for _, block := range v.Children {
+ result += renderBlockHTML(block, referenceDefinitions, isTightList)
+ }
+ result += "</li>"
+ case *BlockQuote:
+ result += "<blockquote>"
+ for _, block := range v.Children {
+ result += RenderBlockHTML(block, referenceDefinitions)
+ }
+ result += "</blockquote>"
+ case *FencedCode:
+ if info := v.Info(); info != "" {
+ language := strings.Fields(info)[0]
+ result += `<pre><code class="language-` + htmlEscaper.Replace(language) + `">`
+ } else {
+ result += "<pre><code>"
+ }
+ result += htmlEscaper.Replace(v.Code()) + "</code></pre>"
+ case *IndentedCode:
+ result += "<pre><code>" + htmlEscaper.Replace(v.Code()) + "</code></pre>"
+ default:
+ panic(fmt.Sprintf("missing case for type %T", v))
+ }
+ return
+}
+
+func escapeURL(url string) (result string) {
+ for i := 0; i < len(url); {
+ switch b := url[i]; b {
+ case ';', '/', '?', ':', '@', '&', '=', '+', '$', ',', '-', '_', '.', '!', '~', '*', '\'', '(', ')', '#':
+ result += string(b)
+ i++
+ default:
+ if b == '%' && i+2 < len(url) && isHexByte(url[i+1]) && isHexByte(url[i+2]) {
+ result += url[i : i+3]
+ i += 3
+ } else if (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || (b >= '0' && b <= '9') {
+ result += string(b)
+ i++
+ } else {
+ result += fmt.Sprintf("%%%0X", b)
+ i++
+ }
+ }
+ }
+ return
+}
+
+func RenderInlineHTML(inline Inline) (result string) {
+ switch v := inline.(type) {
+ case *Text:
+ return htmlEscaper.Replace(v.Text)
+ case *HardLineBreak:
+ return "<br />"
+ case *SoftLineBreak:
+ return "\n"
+ case *CodeSpan:
+ return "<code>" + htmlEscaper.Replace(v.Code) + "</code>"
+ case *InlineImage:
+ result += `<img src="` + htmlEscaper.Replace(escapeURL(v.Destination())) + `" alt="` + htmlEscaper.Replace(renderImageAltText(v.Children)) + `"`
+ if title := v.Title(); title != "" {
+ result += ` title="` + htmlEscaper.Replace(title) + `"`
+ }
+ result += ` />`
+ case *ReferenceImage:
+ result += `<img src="` + htmlEscaper.Replace(escapeURL(v.Destination())) + `" alt="` + htmlEscaper.Replace(renderImageAltText(v.Children)) + `"`
+ if title := v.Title(); title != "" {
+ result += ` title="` + htmlEscaper.Replace(title) + `"`
+ }
+ result += ` />`
+ case *InlineLink:
+ result += `<a href="` + htmlEscaper.Replace(escapeURL(v.Destination())) + `"`
+ if title := v.Title(); title != "" {
+ result += ` title="` + htmlEscaper.Replace(title) + `"`
+ }
+ result += `>`
+ for _, inline := range v.Children {
+ result += RenderInlineHTML(inline)
+ }
+ result += "</a>"
+ case *ReferenceLink:
+ result += `<a href="` + htmlEscaper.Replace(escapeURL(v.Destination())) + `"`
+ if title := v.Title(); title != "" {
+ result += ` title="` + htmlEscaper.Replace(title) + `"`
+ }
+ result += `>`
+ for _, inline := range v.Children {
+ result += RenderInlineHTML(inline)
+ }
+ result += "</a>"
+ case *Autolink:
+ result += `<a href="` + htmlEscaper.Replace(escapeURL(v.Destination())) + `">`
+ for _, inline := range v.Children {
+ result += RenderInlineHTML(inline)
+ }
+ result += "</a>"
+ default:
+ panic(fmt.Sprintf("missing case for type %T", v))
+ }
+ return
+}
+
+func renderImageAltText(children []Inline) (result string) {
+ for _, inline := range children {
+ result += renderImageChildAltText(inline)
+ }
+ return
+}
+
+func renderImageChildAltText(inline Inline) (result string) {
+ switch v := inline.(type) {
+ case *Text:
+ return v.Text
+ case *InlineImage:
+ for _, inline := range v.Children {
+ result += renderImageChildAltText(inline)
+ }
+ case *InlineLink:
+ for _, inline := range v.Children {
+ result += renderImageChildAltText(inline)
+ }
+ }
+ return
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/html_entities.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/html_entities.go
new file mode 100644
index 00000000..e94cebb9
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/html_entities.go
@@ -0,0 +1,2132 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package markdown
+
+var htmlEntities = map[string]string{
+ "AElig": "\u00C6",
+ "AMP": "\u0026",
+ "Aacute": "\u00C1",
+ "Abreve": "\u0102",
+ "Acirc": "\u00C2",
+ "Acy": "\u0410",
+ "Afr": "\U0001D504",
+ "Agrave": "\u00C0",
+ "Alpha": "\u0391",
+ "Amacr": "\u0100",
+ "And": "\u2A53",
+ "Aogon": "\u0104",
+ "Aopf": "\U0001D538",
+ "ApplyFunction": "\u2061",
+ "Aring": "\u00C5",
+ "Ascr": "\U0001D49C",
+ "Assign": "\u2254",
+ "Atilde": "\u00C3",
+ "Auml": "\u00C4",
+ "Backslash": "\u2216",
+ "Barv": "\u2AE7",
+ "Barwed": "\u2306",
+ "Bcy": "\u0411",
+ "Because": "\u2235",
+ "Bernoullis": "\u212C",
+ "Beta": "\u0392",
+ "Bfr": "\U0001D505",
+ "Bopf": "\U0001D539",
+ "Breve": "\u02D8",
+ "Bscr": "\u212C",
+ "Bumpeq": "\u224E",
+ "CHcy": "\u0427",
+ "COPY": "\u00A9",
+ "Cacute": "\u0106",
+ "Cap": "\u22D2",
+ "CapitalDifferentialD": "\u2145",
+ "Cayleys": "\u212D",
+ "Ccaron": "\u010C",
+ "Ccedil": "\u00C7",
+ "Ccirc": "\u0108",
+ "Cconint": "\u2230",
+ "Cdot": "\u010A",
+ "Cedilla": "\u00B8",
+ "CenterDot": "\u00B7",
+ "Cfr": "\u212D",
+ "Chi": "\u03A7",
+ "CircleDot": "\u2299",
+ "CircleMinus": "\u2296",
+ "CirclePlus": "\u2295",
+ "CircleTimes": "\u2297",
+ "ClockwiseContourIntegral": "\u2232",
+ "CloseCurlyDoubleQuote": "\u201D",
+ "CloseCurlyQuote": "\u2019",
+ "Colon": "\u2237",
+ "Colone": "\u2A74",
+ "Congruent": "\u2261",
+ "Conint": "\u222F",
+ "ContourIntegral": "\u222E",
+ "Copf": "\u2102",
+ "Coproduct": "\u2210",
+ "CounterClockwiseContourIntegral": "\u2233",
+ "Cross": "\u2A2F",
+ "Cscr": "\U0001D49E",
+ "Cup": "\u22D3",
+ "CupCap": "\u224D",
+ "DD": "\u2145",
+ "DDotrahd": "\u2911",
+ "DJcy": "\u0402",
+ "DScy": "\u0405",
+ "DZcy": "\u040F",
+ "Dagger": "\u2021",
+ "Darr": "\u21A1",
+ "Dashv": "\u2AE4",
+ "Dcaron": "\u010E",
+ "Dcy": "\u0414",
+ "Del": "\u2207",
+ "Delta": "\u0394",
+ "Dfr": "\U0001D507",
+ "DiacriticalAcute": "\u00B4",
+ "DiacriticalDot": "\u02D9",
+ "DiacriticalDoubleAcute": "\u02DD",
+ "DiacriticalGrave": "\u0060",
+ "DiacriticalTilde": "\u02DC",
+ "Diamond": "\u22C4",
+ "DifferentialD": "\u2146",
+ "Dopf": "\U0001D53B",
+ "Dot": "\u00A8",
+ "DotDot": "\u20DC",
+ "DotEqual": "\u2250",
+ "DoubleContourIntegral": "\u222F",
+ "DoubleDot": "\u00A8",
+ "DoubleDownArrow": "\u21D3",
+ "DoubleLeftArrow": "\u21D0",
+ "DoubleLeftRightArrow": "\u21D4",
+ "DoubleLeftTee": "\u2AE4",
+ "DoubleLongLeftArrow": "\u27F8",
+ "DoubleLongLeftRightArrow": "\u27FA",
+ "DoubleLongRightArrow": "\u27F9",
+ "DoubleRightArrow": "\u21D2",
+ "DoubleRightTee": "\u22A8",
+ "DoubleUpArrow": "\u21D1",
+ "DoubleUpDownArrow": "\u21D5",
+ "DoubleVerticalBar": "\u2225",
+ "DownArrow": "\u2193",
+ "DownArrowBar": "\u2913",
+ "DownArrowUpArrow": "\u21F5",
+ "DownBreve": "\u0311",
+ "DownLeftRightVector": "\u2950",
+ "DownLeftTeeVector": "\u295E",
+ "DownLeftVector": "\u21BD",
+ "DownLeftVectorBar": "\u2956",
+ "DownRightTeeVector": "\u295F",
+ "DownRightVector": "\u21C1",
+ "DownRightVectorBar": "\u2957",
+ "DownTee": "\u22A4",
+ "DownTeeArrow": "\u21A7",
+ "Downarrow": "\u21D3",
+ "Dscr": "\U0001D49F",
+ "Dstrok": "\u0110",
+ "ENG": "\u014A",
+ "ETH": "\u00D0",
+ "Eacute": "\u00C9",
+ "Ecaron": "\u011A",
+ "Ecirc": "\u00CA",
+ "Ecy": "\u042D",
+ "Edot": "\u0116",
+ "Efr": "\U0001D508",
+ "Egrave": "\u00C8",
+ "Element": "\u2208",
+ "Emacr": "\u0112",
+ "EmptySmallSquare": "\u25FB",
+ "EmptyVerySmallSquare": "\u25AB",
+ "Eogon": "\u0118",
+ "Eopf": "\U0001D53C",
+ "Epsilon": "\u0395",
+ "Equal": "\u2A75",
+ "EqualTilde": "\u2242",
+ "Equilibrium": "\u21CC",
+ "Escr": "\u2130",
+ "Esim": "\u2A73",
+ "Eta": "\u0397",
+ "Euml": "\u00CB",
+ "Exists": "\u2203",
+ "ExponentialE": "\u2147",
+ "Fcy": "\u0424",
+ "Ffr": "\U0001D509",
+ "FilledSmallSquare": "\u25FC",
+ "FilledVerySmallSquare": "\u25AA",
+ "Fopf": "\U0001D53D",
+ "ForAll": "\u2200",
+ "Fouriertrf": "\u2131",
+ "Fscr": "\u2131",
+ "GJcy": "\u0403",
+ "GT": "\u003E",
+ "Gamma": "\u0393",
+ "Gammad": "\u03DC",
+ "Gbreve": "\u011E",
+ "Gcedil": "\u0122",
+ "Gcirc": "\u011C",
+ "Gcy": "\u0413",
+ "Gdot": "\u0120",
+ "Gfr": "\U0001D50A",
+ "Gg": "\u22D9",
+ "Gopf": "\U0001D53E",
+ "GreaterEqual": "\u2265",
+ "GreaterEqualLess": "\u22DB",
+ "GreaterFullEqual": "\u2267",
+ "GreaterGreater": "\u2AA2",
+ "GreaterLess": "\u2277",
+ "GreaterSlantEqual": "\u2A7E",
+ "GreaterTilde": "\u2273",
+ "Gscr": "\U0001D4A2",
+ "Gt": "\u226B",
+ "HARDcy": "\u042A",
+ "Hacek": "\u02C7",
+ "Hat": "\u005E",
+ "Hcirc": "\u0124",
+ "Hfr": "\u210C",
+ "HilbertSpace": "\u210B",
+ "Hopf": "\u210D",
+ "HorizontalLine": "\u2500",
+ "Hscr": "\u210B",
+ "Hstrok": "\u0126",
+ "HumpDownHump": "\u224E",
+ "HumpEqual": "\u224F",
+ "IEcy": "\u0415",
+ "IJlig": "\u0132",
+ "IOcy": "\u0401",
+ "Iacute": "\u00CD",
+ "Icirc": "\u00CE",
+ "Icy": "\u0418",
+ "Idot": "\u0130",
+ "Ifr": "\u2111",
+ "Igrave": "\u00CC",
+ "Im": "\u2111",
+ "Imacr": "\u012A",
+ "ImaginaryI": "\u2148",
+ "Implies": "\u21D2",
+ "Int": "\u222C",
+ "Integral": "\u222B",
+ "Intersection": "\u22C2",
+ "InvisibleComma": "\u2063",
+ "InvisibleTimes": "\u2062",
+ "Iogon": "\u012E",
+ "Iopf": "\U0001D540",
+ "Iota": "\u0399",
+ "Iscr": "\u2110",
+ "Itilde": "\u0128",
+ "Iukcy": "\u0406",
+ "Iuml": "\u00CF",
+ "Jcirc": "\u0134",
+ "Jcy": "\u0419",
+ "Jfr": "\U0001D50D",
+ "Jopf": "\U0001D541",
+ "Jscr": "\U0001D4A5",
+ "Jsercy": "\u0408",
+ "Jukcy": "\u0404",
+ "KHcy": "\u0425",
+ "KJcy": "\u040C",
+ "Kappa": "\u039A",
+ "Kcedil": "\u0136",
+ "Kcy": "\u041A",
+ "Kfr": "\U0001D50E",
+ "Kopf": "\U0001D542",
+ "Kscr": "\U0001D4A6",
+ "LJcy": "\u0409",
+ "LT": "\u003C",
+ "Lacute": "\u0139",
+ "Lambda": "\u039B",
+ "Lang": "\u27EA",
+ "Laplacetrf": "\u2112",
+ "Larr": "\u219E",
+ "Lcaron": "\u013D",
+ "Lcedil": "\u013B",
+ "Lcy": "\u041B",
+ "LeftAngleBracket": "\u27E8",
+ "LeftArrow": "\u2190",
+ "LeftArrowBar": "\u21E4",
+ "LeftArrowRightArrow": "\u21C6",
+ "LeftCeiling": "\u2308",
+ "LeftDoubleBracket": "\u27E6",
+ "LeftDownTeeVector": "\u2961",
+ "LeftDownVector": "\u21C3",
+ "LeftDownVectorBar": "\u2959",
+ "LeftFloor": "\u230A",
+ "LeftRightArrow": "\u2194",
+ "LeftRightVector": "\u294E",
+ "LeftTee": "\u22A3",
+ "LeftTeeArrow": "\u21A4",
+ "LeftTeeVector": "\u295A",
+ "LeftTriangle": "\u22B2",
+ "LeftTriangleBar": "\u29CF",
+ "LeftTriangleEqual": "\u22B4",
+ "LeftUpDownVector": "\u2951",
+ "LeftUpTeeVector": "\u2960",
+ "LeftUpVector": "\u21BF",
+ "LeftUpVectorBar": "\u2958",
+ "LeftVector": "\u21BC",
+ "LeftVectorBar": "\u2952",
+ "Leftarrow": "\u21D0",
+ "Leftrightarrow": "\u21D4",
+ "LessEqualGreater": "\u22DA",
+ "LessFullEqual": "\u2266",
+ "LessGreater": "\u2276",
+ "LessLess": "\u2AA1",
+ "LessSlantEqual": "\u2A7D",
+ "LessTilde": "\u2272",
+ "Lfr": "\U0001D50F",
+ "Ll": "\u22D8",
+ "Lleftarrow": "\u21DA",
+ "Lmidot": "\u013F",
+ "LongLeftArrow": "\u27F5",
+ "LongLeftRightArrow": "\u27F7",
+ "LongRightArrow": "\u27F6",
+ "Longleftarrow": "\u27F8",
+ "Longleftrightarrow": "\u27FA",
+ "Longrightarrow": "\u27F9",
+ "Lopf": "\U0001D543",
+ "LowerLeftArrow": "\u2199",
+ "LowerRightArrow": "\u2198",
+ "Lscr": "\u2112",
+ "Lsh": "\u21B0",
+ "Lstrok": "\u0141",
+ "Lt": "\u226A",
+ "Map": "\u2905",
+ "Mcy": "\u041C",
+ "MediumSpace": "\u205F",
+ "Mellintrf": "\u2133",
+ "Mfr": "\U0001D510",
+ "MinusPlus": "\u2213",
+ "Mopf": "\U0001D544",
+ "Mscr": "\u2133",
+ "Mu": "\u039C",
+ "NJcy": "\u040A",
+ "Nacute": "\u0143",
+ "Ncaron": "\u0147",
+ "Ncedil": "\u0145",
+ "Ncy": "\u041D",
+ "NegativeMediumSpace": "\u200B",
+ "NegativeThickSpace": "\u200B",
+ "NegativeThinSpace": "\u200B",
+ "NegativeVeryThinSpace": "\u200B",
+ "NestedGreaterGreater": "\u226B",
+ "NestedLessLess": "\u226A",
+ "NewLine": "\u000A",
+ "Nfr": "\U0001D511",
+ "NoBreak": "\u2060",
+ "NonBreakingSpace": "\u00A0",
+ "Nopf": "\u2115",
+ "Not": "\u2AEC",
+ "NotCongruent": "\u2262",
+ "NotCupCap": "\u226D",
+ "NotDoubleVerticalBar": "\u2226",
+ "NotElement": "\u2209",
+ "NotEqual": "\u2260",
+ "NotEqualTilde": "\u2242\u0338",
+ "NotExists": "\u2204",
+ "NotGreater": "\u226F",
+ "NotGreaterEqual": "\u2271",
+ "NotGreaterFullEqual": "\u2267\u0338",
+ "NotGreaterGreater": "\u226B\u0338",
+ "NotGreaterLess": "\u2279",
+ "NotGreaterSlantEqual": "\u2A7E\u0338",
+ "NotGreaterTilde": "\u2275",
+ "NotHumpDownHump": "\u224E\u0338",
+ "NotHumpEqual": "\u224F\u0338",
+ "NotLeftTriangle": "\u22EA",
+ "NotLeftTriangleBar": "\u29CF\u0338",
+ "NotLeftTriangleEqual": "\u22EC",
+ "NotLess": "\u226E",
+ "NotLessEqual": "\u2270",
+ "NotLessGreater": "\u2278",
+ "NotLessLess": "\u226A\u0338",
+ "NotLessSlantEqual": "\u2A7D\u0338",
+ "NotLessTilde": "\u2274",
+ "NotNestedGreaterGreater": "\u2AA2\u0338",
+ "NotNestedLessLess": "\u2AA1\u0338",
+ "NotPrecedes": "\u2280",
+ "NotPrecedesEqual": "\u2AAF\u0338",
+ "NotPrecedesSlantEqual": "\u22E0",
+ "NotReverseElement": "\u220C",
+ "NotRightTriangle": "\u22EB",
+ "NotRightTriangleBar": "\u29D0\u0338",
+ "NotRightTriangleEqual": "\u22ED",
+ "NotSquareSubset": "\u228F\u0338",
+ "NotSquareSubsetEqual": "\u22E2",
+ "NotSquareSuperset": "\u2290\u0338",
+ "NotSquareSupersetEqual": "\u22E3",
+ "NotSubset": "\u2282\u20D2",
+ "NotSubsetEqual": "\u2288",
+ "NotSucceeds": "\u2281",
+ "NotSucceedsEqual": "\u2AB0\u0338",
+ "NotSucceedsSlantEqual": "\u22E1",
+ "NotSucceedsTilde": "\u227F\u0338",
+ "NotSuperset": "\u2283\u20D2",
+ "NotSupersetEqual": "\u2289",
+ "NotTilde": "\u2241",
+ "NotTildeEqual": "\u2244",
+ "NotTildeFullEqual": "\u2247",
+ "NotTildeTilde": "\u2249",
+ "NotVerticalBar": "\u2224",
+ "Nscr": "\U0001D4A9",
+ "Ntilde": "\u00D1",
+ "Nu": "\u039D",
+ "OElig": "\u0152",
+ "Oacute": "\u00D3",
+ "Ocirc": "\u00D4",
+ "Ocy": "\u041E",
+ "Odblac": "\u0150",
+ "Ofr": "\U0001D512",
+ "Ograve": "\u00D2",
+ "Omacr": "\u014C",
+ "Omega": "\u03A9",
+ "Omicron": "\u039F",
+ "Oopf": "\U0001D546",
+ "OpenCurlyDoubleQuote": "\u201C",
+ "OpenCurlyQuote": "\u2018",
+ "Or": "\u2A54",
+ "Oscr": "\U0001D4AA",
+ "Oslash": "\u00D8",
+ "Otilde": "\u00D5",
+ "Otimes": "\u2A37",
+ "Ouml": "\u00D6",
+ "OverBar": "\u203E",
+ "OverBrace": "\u23DE",
+ "OverBracket": "\u23B4",
+ "OverParenthesis": "\u23DC",
+ "PartialD": "\u2202",
+ "Pcy": "\u041F",
+ "Pfr": "\U0001D513",
+ "Phi": "\u03A6",
+ "Pi": "\u03A0",
+ "PlusMinus": "\u00B1",
+ "Poincareplane": "\u210C",
+ "Popf": "\u2119",
+ "Pr": "\u2ABB",
+ "Precedes": "\u227A",
+ "PrecedesEqual": "\u2AAF",
+ "PrecedesSlantEqual": "\u227C",
+ "PrecedesTilde": "\u227E",
+ "Prime": "\u2033",
+ "Product": "\u220F",
+ "Proportion": "\u2237",
+ "Proportional": "\u221D",
+ "Pscr": "\U0001D4AB",
+ "Psi": "\u03A8",
+ "QUOT": "\u0022",
+ "Qfr": "\U0001D514",
+ "Qopf": "\u211A",
+ "Qscr": "\U0001D4AC",
+ "RBarr": "\u2910",
+ "REG": "\u00AE",
+ "Racute": "\u0154",
+ "Rang": "\u27EB",
+ "Rarr": "\u21A0",
+ "Rarrtl": "\u2916",
+ "Rcaron": "\u0158",
+ "Rcedil": "\u0156",
+ "Rcy": "\u0420",
+ "Re": "\u211C",
+ "ReverseElement": "\u220B",
+ "ReverseEquilibrium": "\u21CB",
+ "ReverseUpEquilibrium": "\u296F",
+ "Rfr": "\u211C",
+ "Rho": "\u03A1",
+ "RightAngleBracket": "\u27E9",
+ "RightArrow": "\u2192",
+ "RightArrowBar": "\u21E5",
+ "RightArrowLeftArrow": "\u21C4",
+ "RightCeiling": "\u2309",
+ "RightDoubleBracket": "\u27E7",
+ "RightDownTeeVector": "\u295D",
+ "RightDownVector": "\u21C2",
+ "RightDownVectorBar": "\u2955",
+ "RightFloor": "\u230B",
+ "RightTee": "\u22A2",
+ "RightTeeArrow": "\u21A6",
+ "RightTeeVector": "\u295B",
+ "RightTriangle": "\u22B3",
+ "RightTriangleBar": "\u29D0",
+ "RightTriangleEqual": "\u22B5",
+ "RightUpDownVector": "\u294F",
+ "RightUpTeeVector": "\u295C",
+ "RightUpVector": "\u21BE",
+ "RightUpVectorBar": "\u2954",
+ "RightVector": "\u21C0",
+ "RightVectorBar": "\u2953",
+ "Rightarrow": "\u21D2",
+ "Ropf": "\u211D",
+ "RoundImplies": "\u2970",
+ "Rrightarrow": "\u21DB",
+ "Rscr": "\u211B",
+ "Rsh": "\u21B1",
+ "RuleDelayed": "\u29F4",
+ "SHCHcy": "\u0429",
+ "SHcy": "\u0428",
+ "SOFTcy": "\u042C",
+ "Sacute": "\u015A",
+ "Sc": "\u2ABC",
+ "Scaron": "\u0160",
+ "Scedil": "\u015E",
+ "Scirc": "\u015C",
+ "Scy": "\u0421",
+ "Sfr": "\U0001D516",
+ "ShortDownArrow": "\u2193",
+ "ShortLeftArrow": "\u2190",
+ "ShortRightArrow": "\u2192",
+ "ShortUpArrow": "\u2191",
+ "Sigma": "\u03A3",
+ "SmallCircle": "\u2218",
+ "Sopf": "\U0001D54A",
+ "Sqrt": "\u221A",
+ "Square": "\u25A1",
+ "SquareIntersection": "\u2293",
+ "SquareSubset": "\u228F",
+ "SquareSubsetEqual": "\u2291",
+ "SquareSuperset": "\u2290",
+ "SquareSupersetEqual": "\u2292",
+ "SquareUnion": "\u2294",
+ "Sscr": "\U0001D4AE",
+ "Star": "\u22C6",
+ "Sub": "\u22D0",
+ "Subset": "\u22D0",
+ "SubsetEqual": "\u2286",
+ "Succeeds": "\u227B",
+ "SucceedsEqual": "\u2AB0",
+ "SucceedsSlantEqual": "\u227D",
+ "SucceedsTilde": "\u227F",
+ "SuchThat": "\u220B",
+ "Sum": "\u2211",
+ "Sup": "\u22D1",
+ "Superset": "\u2283",
+ "SupersetEqual": "\u2287",
+ "Supset": "\u22D1",
+ "THORN": "\u00DE",
+ "TRADE": "\u2122",
+ "TSHcy": "\u040B",
+ "TScy": "\u0426",
+ "Tab": "\u0009",
+ "Tau": "\u03A4",
+ "Tcaron": "\u0164",
+ "Tcedil": "\u0162",
+ "Tcy": "\u0422",
+ "Tfr": "\U0001D517",
+ "Therefore": "\u2234",
+ "Theta": "\u0398",
+ "ThickSpace": "\u205F\u200A",
+ "ThinSpace": "\u2009",
+ "Tilde": "\u223C",
+ "TildeEqual": "\u2243",
+ "TildeFullEqual": "\u2245",
+ "TildeTilde": "\u2248",
+ "Topf": "\U0001D54B",
+ "TripleDot": "\u20DB",
+ "Tscr": "\U0001D4AF",
+ "Tstrok": "\u0166",
+ "Uacute": "\u00DA",
+ "Uarr": "\u219F",
+ "Uarrocir": "\u2949",
+ "Ubrcy": "\u040E",
+ "Ubreve": "\u016C",
+ "Ucirc": "\u00DB",
+ "Ucy": "\u0423",
+ "Udblac": "\u0170",
+ "Ufr": "\U0001D518",
+ "Ugrave": "\u00D9",
+ "Umacr": "\u016A",
+ "UnderBar": "\u005F",
+ "UnderBrace": "\u23DF",
+ "UnderBracket": "\u23B5",
+ "UnderParenthesis": "\u23DD",
+ "Union": "\u22C3",
+ "UnionPlus": "\u228E",
+ "Uogon": "\u0172",
+ "Uopf": "\U0001D54C",
+ "UpArrow": "\u2191",
+ "UpArrowBar": "\u2912",
+ "UpArrowDownArrow": "\u21C5",
+ "UpDownArrow": "\u2195",
+ "UpEquilibrium": "\u296E",
+ "UpTee": "\u22A5",
+ "UpTeeArrow": "\u21A5",
+ "Uparrow": "\u21D1",
+ "Updownarrow": "\u21D5",
+ "UpperLeftArrow": "\u2196",
+ "UpperRightArrow": "\u2197",
+ "Upsi": "\u03D2",
+ "Upsilon": "\u03A5",
+ "Uring": "\u016E",
+ "Uscr": "\U0001D4B0",
+ "Utilde": "\u0168",
+ "Uuml": "\u00DC",
+ "VDash": "\u22AB",
+ "Vbar": "\u2AEB",
+ "Vcy": "\u0412",
+ "Vdash": "\u22A9",
+ "Vdashl": "\u2AE6",
+ "Vee": "\u22C1",
+ "Verbar": "\u2016",
+ "Vert": "\u2016",
+ "VerticalBar": "\u2223",
+ "VerticalLine": "\u007C",
+ "VerticalSeparator": "\u2758",
+ "VerticalTilde": "\u2240",
+ "VeryThinSpace": "\u200A",
+ "Vfr": "\U0001D519",
+ "Vopf": "\U0001D54D",
+ "Vscr": "\U0001D4B1",
+ "Vvdash": "\u22AA",
+ "Wcirc": "\u0174",
+ "Wedge": "\u22C0",
+ "Wfr": "\U0001D51A",
+ "Wopf": "\U0001D54E",
+ "Wscr": "\U0001D4B2",
+ "Xfr": "\U0001D51B",
+ "Xi": "\u039E",
+ "Xopf": "\U0001D54F",
+ "Xscr": "\U0001D4B3",
+ "YAcy": "\u042F",
+ "YIcy": "\u0407",
+ "YUcy": "\u042E",
+ "Yacute": "\u00DD",
+ "Ycirc": "\u0176",
+ "Ycy": "\u042B",
+ "Yfr": "\U0001D51C",
+ "Yopf": "\U0001D550",
+ "Yscr": "\U0001D4B4",
+ "Yuml": "\u0178",
+ "ZHcy": "\u0416",
+ "Zacute": "\u0179",
+ "Zcaron": "\u017D",
+ "Zcy": "\u0417",
+ "Zdot": "\u017B",
+ "ZeroWidthSpace": "\u200B",
+ "Zeta": "\u0396",
+ "Zfr": "\u2128",
+ "Zopf": "\u2124",
+ "Zscr": "\U0001D4B5",
+ "aacute": "\u00E1",
+ "abreve": "\u0103",
+ "ac": "\u223E",
+ "acE": "\u223E\u0333",
+ "acd": "\u223F",
+ "acirc": "\u00E2",
+ "acute": "\u00B4",
+ "acy": "\u0430",
+ "aelig": "\u00E6",
+ "af": "\u2061",
+ "afr": "\U0001D51E",
+ "agrave": "\u00E0",
+ "alefsym": "\u2135",
+ "aleph": "\u2135",
+ "alpha": "\u03B1",
+ "amacr": "\u0101",
+ "amalg": "\u2A3F",
+ "amp": "\u0026",
+ "and": "\u2227",
+ "andand": "\u2A55",
+ "andd": "\u2A5C",
+ "andslope": "\u2A58",
+ "andv": "\u2A5A",
+ "ang": "\u2220",
+ "ange": "\u29A4",
+ "angle": "\u2220",
+ "angmsd": "\u2221",
+ "angmsdaa": "\u29A8",
+ "angmsdab": "\u29A9",
+ "angmsdac": "\u29AA",
+ "angmsdad": "\u29AB",
+ "angmsdae": "\u29AC",
+ "angmsdaf": "\u29AD",
+ "angmsdag": "\u29AE",
+ "angmsdah": "\u29AF",
+ "angrt": "\u221F",
+ "angrtvb": "\u22BE",
+ "angrtvbd": "\u299D",
+ "angsph": "\u2222",
+ "angst": "\u00C5",
+ "angzarr": "\u237C",
+ "aogon": "\u0105",
+ "aopf": "\U0001D552",
+ "ap": "\u2248",
+ "apE": "\u2A70",
+ "apacir": "\u2A6F",
+ "ape": "\u224A",
+ "apid": "\u224B",
+ "apos": "\u0027",
+ "approx": "\u2248",
+ "approxeq": "\u224A",
+ "aring": "\u00E5",
+ "ascr": "\U0001D4B6",
+ "ast": "\u002A",
+ "asymp": "\u2248",
+ "asympeq": "\u224D",
+ "atilde": "\u00E3",
+ "auml": "\u00E4",
+ "awconint": "\u2233",
+ "awint": "\u2A11",
+ "bNot": "\u2AED",
+ "backcong": "\u224C",
+ "backepsilon": "\u03F6",
+ "backprime": "\u2035",
+ "backsim": "\u223D",
+ "backsimeq": "\u22CD",
+ "barvee": "\u22BD",
+ "barwed": "\u2305",
+ "barwedge": "\u2305",
+ "bbrk": "\u23B5",
+ "bbrktbrk": "\u23B6",
+ "bcong": "\u224C",
+ "bcy": "\u0431",
+ "bdquo": "\u201E",
+ "becaus": "\u2235",
+ "because": "\u2235",
+ "bemptyv": "\u29B0",
+ "bepsi": "\u03F6",
+ "bernou": "\u212C",
+ "beta": "\u03B2",
+ "beth": "\u2136",
+ "between": "\u226C",
+ "bfr": "\U0001D51F",
+ "bigcap": "\u22C2",
+ "bigcirc": "\u25EF",
+ "bigcup": "\u22C3",
+ "bigodot": "\u2A00",
+ "bigoplus": "\u2A01",
+ "bigotimes": "\u2A02",
+ "bigsqcup": "\u2A06",
+ "bigstar": "\u2605",
+ "bigtriangledown": "\u25BD",
+ "bigtriangleup": "\u25B3",
+ "biguplus": "\u2A04",
+ "bigvee": "\u22C1",
+ "bigwedge": "\u22C0",
+ "bkarow": "\u290D",
+ "blacklozenge": "\u29EB",
+ "blacksquare": "\u25AA",
+ "blacktriangle": "\u25B4",
+ "blacktriangledown": "\u25BE",
+ "blacktriangleleft": "\u25C2",
+ "blacktriangleright": "\u25B8",
+ "blank": "\u2423",
+ "blk12": "\u2592",
+ "blk14": "\u2591",
+ "blk34": "\u2593",
+ "block": "\u2588",
+ "bne": "\u003D\u20E5",
+ "bnequiv": "\u2261\u20E5",
+ "bnot": "\u2310",
+ "bopf": "\U0001D553",
+ "bot": "\u22A5",
+ "bottom": "\u22A5",
+ "bowtie": "\u22C8",
+ "boxDL": "\u2557",
+ "boxDR": "\u2554",
+ "boxDl": "\u2556",
+ "boxDr": "\u2553",
+ "boxH": "\u2550",
+ "boxHD": "\u2566",
+ "boxHU": "\u2569",
+ "boxHd": "\u2564",
+ "boxHu": "\u2567",
+ "boxUL": "\u255D",
+ "boxUR": "\u255A",
+ "boxUl": "\u255C",
+ "boxUr": "\u2559",
+ "boxV": "\u2551",
+ "boxVH": "\u256C",
+ "boxVL": "\u2563",
+ "boxVR": "\u2560",
+ "boxVh": "\u256B",
+ "boxVl": "\u2562",
+ "boxVr": "\u255F",
+ "boxbox": "\u29C9",
+ "boxdL": "\u2555",
+ "boxdR": "\u2552",
+ "boxdl": "\u2510",
+ "boxdr": "\u250C",
+ "boxh": "\u2500",
+ "boxhD": "\u2565",
+ "boxhU": "\u2568",
+ "boxhd": "\u252C",
+ "boxhu": "\u2534",
+ "boxminus": "\u229F",
+ "boxplus": "\u229E",
+ "boxtimes": "\u22A0",
+ "boxuL": "\u255B",
+ "boxuR": "\u2558",
+ "boxul": "\u2518",
+ "boxur": "\u2514",
+ "boxv": "\u2502",
+ "boxvH": "\u256A",
+ "boxvL": "\u2561",
+ "boxvR": "\u255E",
+ "boxvh": "\u253C",
+ "boxvl": "\u2524",
+ "boxvr": "\u251C",
+ "bprime": "\u2035",
+ "breve": "\u02D8",
+ "brvbar": "\u00A6",
+ "bscr": "\U0001D4B7",
+ "bsemi": "\u204F",
+ "bsim": "\u223D",
+ "bsime": "\u22CD",
+ "bsol": "\u005C",
+ "bsolb": "\u29C5",
+ "bsolhsub": "\u27C8",
+ "bull": "\u2022",
+ "bullet": "\u2022",
+ "bump": "\u224E",
+ "bumpE": "\u2AAE",
+ "bumpe": "\u224F",
+ "bumpeq": "\u224F",
+ "cacute": "\u0107",
+ "cap": "\u2229",
+ "capand": "\u2A44",
+ "capbrcup": "\u2A49",
+ "capcap": "\u2A4B",
+ "capcup": "\u2A47",
+ "capdot": "\u2A40",
+ "caps": "\u2229\uFE00",
+ "caret": "\u2041",
+ "caron": "\u02C7",
+ "ccaps": "\u2A4D",
+ "ccaron": "\u010D",
+ "ccedil": "\u00E7",
+ "ccirc": "\u0109",
+ "ccups": "\u2A4C",
+ "ccupssm": "\u2A50",
+ "cdot": "\u010B",
+ "cedil": "\u00B8",
+ "cemptyv": "\u29B2",
+ "cent": "\u00A2",
+ "centerdot": "\u00B7",
+ "cfr": "\U0001D520",
+ "chcy": "\u0447",
+ "check": "\u2713",
+ "checkmark": "\u2713",
+ "chi": "\u03C7",
+ "cir": "\u25CB",
+ "cirE": "\u29C3",
+ "circ": "\u02C6",
+ "circeq": "\u2257",
+ "circlearrowleft": "\u21BA",
+ "circlearrowright": "\u21BB",
+ "circledR": "\u00AE",
+ "circledS": "\u24C8",
+ "circledast": "\u229B",
+ "circledcirc": "\u229A",
+ "circleddash": "\u229D",
+ "cire": "\u2257",
+ "cirfnint": "\u2A10",
+ "cirmid": "\u2AEF",
+ "cirscir": "\u29C2",
+ "clubs": "\u2663",
+ "clubsuit": "\u2663",
+ "colon": "\u003A",
+ "colone": "\u2254",
+ "coloneq": "\u2254",
+ "comma": "\u002C",
+ "commat": "\u0040",
+ "comp": "\u2201",
+ "compfn": "\u2218",
+ "complement": "\u2201",
+ "complexes": "\u2102",
+ "cong": "\u2245",
+ "congdot": "\u2A6D",
+ "conint": "\u222E",
+ "copf": "\U0001D554",
+ "coprod": "\u2210",
+ "copy": "\u00A9",
+ "copysr": "\u2117",
+ "crarr": "\u21B5",
+ "cross": "\u2717",
+ "cscr": "\U0001D4B8",
+ "csub": "\u2ACF",
+ "csube": "\u2AD1",
+ "csup": "\u2AD0",
+ "csupe": "\u2AD2",
+ "ctdot": "\u22EF",
+ "cudarrl": "\u2938",
+ "cudarrr": "\u2935",
+ "cuepr": "\u22DE",
+ "cuesc": "\u22DF",
+ "cularr": "\u21B6",
+ "cularrp": "\u293D",
+ "cup": "\u222A",
+ "cupbrcap": "\u2A48",
+ "cupcap": "\u2A46",
+ "cupcup": "\u2A4A",
+ "cupdot": "\u228D",
+ "cupor": "\u2A45",
+ "cups": "\u222A\uFE00",
+ "curarr": "\u21B7",
+ "curarrm": "\u293C",
+ "curlyeqprec": "\u22DE",
+ "curlyeqsucc": "\u22DF",
+ "curlyvee": "\u22CE",
+ "curlywedge": "\u22CF",
+ "curren": "\u00A4",
+ "curvearrowleft": "\u21B6",
+ "curvearrowright": "\u21B7",
+ "cuvee": "\u22CE",
+ "cuwed": "\u22CF",
+ "cwconint": "\u2232",
+ "cwint": "\u2231",
+ "cylcty": "\u232D",
+ "dArr": "\u21D3",
+ "dHar": "\u2965",
+ "dagger": "\u2020",
+ "daleth": "\u2138",
+ "darr": "\u2193",
+ "dash": "\u2010",
+ "dashv": "\u22A3",
+ "dbkarow": "\u290F",
+ "dblac": "\u02DD",
+ "dcaron": "\u010F",
+ "dcy": "\u0434",
+ "dd": "\u2146",
+ "ddagger": "\u2021",
+ "ddarr": "\u21CA",
+ "ddotseq": "\u2A77",
+ "deg": "\u00B0",
+ "delta": "\u03B4",
+ "demptyv": "\u29B1",
+ "dfisht": "\u297F",
+ "dfr": "\U0001D521",
+ "dharl": "\u21C3",
+ "dharr": "\u21C2",
+ "diam": "\u22C4",
+ "diamond": "\u22C4",
+ "diamondsuit": "\u2666",
+ "diams": "\u2666",
+ "die": "\u00A8",
+ "digamma": "\u03DD",
+ "disin": "\u22F2",
+ "div": "\u00F7",
+ "divide": "\u00F7",
+ "divideontimes": "\u22C7",
+ "divonx": "\u22C7",
+ "djcy": "\u0452",
+ "dlcorn": "\u231E",
+ "dlcrop": "\u230D",
+ "dollar": "\u0024",
+ "dopf": "\U0001D555",
+ "dot": "\u02D9",
+ "doteq": "\u2250",
+ "doteqdot": "\u2251",
+ "dotminus": "\u2238",
+ "dotplus": "\u2214",
+ "dotsquare": "\u22A1",
+ "doublebarwedge": "\u2306",
+ "downarrow": "\u2193",
+ "downdownarrows": "\u21CA",
+ "downharpoonleft": "\u21C3",
+ "downharpoonright": "\u21C2",
+ "drbkarow": "\u2910",
+ "drcorn": "\u231F",
+ "drcrop": "\u230C",
+ "dscr": "\U0001D4B9",
+ "dscy": "\u0455",
+ "dsol": "\u29F6",
+ "dstrok": "\u0111",
+ "dtdot": "\u22F1",
+ "dtri": "\u25BF",
+ "dtrif": "\u25BE",
+ "duarr": "\u21F5",
+ "duhar": "\u296F",
+ "dwangle": "\u29A6",
+ "dzcy": "\u045F",
+ "dzigrarr": "\u27FF",
+ "eDDot": "\u2A77",
+ "eDot": "\u2251",
+ "eacute": "\u00E9",
+ "easter": "\u2A6E",
+ "ecaron": "\u011B",
+ "ecir": "\u2256",
+ "ecirc": "\u00EA",
+ "ecolon": "\u2255",
+ "ecy": "\u044D",
+ "edot": "\u0117",
+ "ee": "\u2147",
+ "efDot": "\u2252",
+ "efr": "\U0001D522",
+ "eg": "\u2A9A",
+ "egrave": "\u00E8",
+ "egs": "\u2A96",
+ "egsdot": "\u2A98",
+ "el": "\u2A99",
+ "elinters": "\u23E7",
+ "ell": "\u2113",
+ "els": "\u2A95",
+ "elsdot": "\u2A97",
+ "emacr": "\u0113",
+ "empty": "\u2205",
+ "emptyset": "\u2205",
+ "emptyv": "\u2205",
+ "emsp": "\u2003",
+ "emsp13": "\u2004",
+ "emsp14": "\u2005",
+ "eng": "\u014B",
+ "ensp": "\u2002",
+ "eogon": "\u0119",
+ "eopf": "\U0001D556",
+ "epar": "\u22D5",
+ "eparsl": "\u29E3",
+ "eplus": "\u2A71",
+ "epsi": "\u03B5",
+ "epsilon": "\u03B5",
+ "epsiv": "\u03F5",
+ "eqcirc": "\u2256",
+ "eqcolon": "\u2255",
+ "eqsim": "\u2242",
+ "eqslantgtr": "\u2A96",
+ "eqslantless": "\u2A95",
+ "equals": "\u003D",
+ "equest": "\u225F",
+ "equiv": "\u2261",
+ "equivDD": "\u2A78",
+ "eqvparsl": "\u29E5",
+ "erDot": "\u2253",
+ "erarr": "\u2971",
+ "escr": "\u212F",
+ "esdot": "\u2250",
+ "esim": "\u2242",
+ "eta": "\u03B7",
+ "eth": "\u00F0",
+ "euml": "\u00EB",
+ "euro": "\u20AC",
+ "excl": "\u0021",
+ "exist": "\u2203",
+ "expectation": "\u2130",
+ "exponentiale": "\u2147",
+ "fallingdotseq": "\u2252",
+ "fcy": "\u0444",
+ "female": "\u2640",
+ "ffilig": "\uFB03",
+ "fflig": "\uFB00",
+ "ffllig": "\uFB04",
+ "ffr": "\U0001D523",
+ "filig": "\uFB01",
+ "fjlig": "\u0066\u006A",
+ "flat": "\u266D",
+ "fllig": "\uFB02",
+ "fltns": "\u25B1",
+ "fnof": "\u0192",
+ "fopf": "\U0001D557",
+ "forall": "\u2200",
+ "fork": "\u22D4",
+ "forkv": "\u2AD9",
+ "fpartint": "\u2A0D",
+ "frac12": "\u00BD",
+ "frac13": "\u2153",
+ "frac14": "\u00BC",
+ "frac15": "\u2155",
+ "frac16": "\u2159",
+ "frac18": "\u215B",
+ "frac23": "\u2154",
+ "frac25": "\u2156",
+ "frac34": "\u00BE",
+ "frac35": "\u2157",
+ "frac38": "\u215C",
+ "frac45": "\u2158",
+ "frac56": "\u215A",
+ "frac58": "\u215D",
+ "frac78": "\u215E",
+ "frasl": "\u2044",
+ "frown": "\u2322",
+ "fscr": "\U0001D4BB",
+ "gE": "\u2267",
+ "gEl": "\u2A8C",
+ "gacute": "\u01F5",
+ "gamma": "\u03B3",
+ "gammad": "\u03DD",
+ "gap": "\u2A86",
+ "gbreve": "\u011F",
+ "gcirc": "\u011D",
+ "gcy": "\u0433",
+ "gdot": "\u0121",
+ "ge": "\u2265",
+ "gel": "\u22DB",
+ "geq": "\u2265",
+ "geqq": "\u2267",
+ "geqslant": "\u2A7E",
+ "ges": "\u2A7E",
+ "gescc": "\u2AA9",
+ "gesdot": "\u2A80",
+ "gesdoto": "\u2A82",
+ "gesdotol": "\u2A84",
+ "gesl": "\u22DB\uFE00",
+ "gesles": "\u2A94",
+ "gfr": "\U0001D524",
+ "gg": "\u226B",
+ "ggg": "\u22D9",
+ "gimel": "\u2137",
+ "gjcy": "\u0453",
+ "gl": "\u2277",
+ "glE": "\u2A92",
+ "gla": "\u2AA5",
+ "glj": "\u2AA4",
+ "gnE": "\u2269",
+ "gnap": "\u2A8A",
+ "gnapprox": "\u2A8A",
+ "gne": "\u2A88",
+ "gneq": "\u2A88",
+ "gneqq": "\u2269",
+ "gnsim": "\u22E7",
+ "gopf": "\U0001D558",
+ "grave": "\u0060",
+ "gscr": "\u210A",
+ "gsim": "\u2273",
+ "gsime": "\u2A8E",
+ "gsiml": "\u2A90",
+ "gt": "\u003E",
+ "gtcc": "\u2AA7",
+ "gtcir": "\u2A7A",
+ "gtdot": "\u22D7",
+ "gtlPar": "\u2995",
+ "gtquest": "\u2A7C",
+ "gtrapprox": "\u2A86",
+ "gtrarr": "\u2978",
+ "gtrdot": "\u22D7",
+ "gtreqless": "\u22DB",
+ "gtreqqless": "\u2A8C",
+ "gtrless": "\u2277",
+ "gtrsim": "\u2273",
+ "gvertneqq": "\u2269\uFE00",
+ "gvnE": "\u2269\uFE00",
+ "hArr": "\u21D4",
+ "hairsp": "\u200A",
+ "half": "\u00BD",
+ "hamilt": "\u210B",
+ "hardcy": "\u044A",
+ "harr": "\u2194",
+ "harrcir": "\u2948",
+ "harrw": "\u21AD",
+ "hbar": "\u210F",
+ "hcirc": "\u0125",
+ "hearts": "\u2665",
+ "heartsuit": "\u2665",
+ "hellip": "\u2026",
+ "hercon": "\u22B9",
+ "hfr": "\U0001D525",
+ "hksearow": "\u2925",
+ "hkswarow": "\u2926",
+ "hoarr": "\u21FF",
+ "homtht": "\u223B",
+ "hookleftarrow": "\u21A9",
+ "hookrightarrow": "\u21AA",
+ "hopf": "\U0001D559",
+ "horbar": "\u2015",
+ "hscr": "\U0001D4BD",
+ "hslash": "\u210F",
+ "hstrok": "\u0127",
+ "hybull": "\u2043",
+ "hyphen": "\u2010",
+ "iacute": "\u00ED",
+ "ic": "\u2063",
+ "icirc": "\u00EE",
+ "icy": "\u0438",
+ "iecy": "\u0435",
+ "iexcl": "\u00A1",
+ "iff": "\u21D4",
+ "ifr": "\U0001D526",
+ "igrave": "\u00EC",
+ "ii": "\u2148",
+ "iiiint": "\u2A0C",
+ "iiint": "\u222D",
+ "iinfin": "\u29DC",
+ "iiota": "\u2129",
+ "ijlig": "\u0133",
+ "imacr": "\u012B",
+ "image": "\u2111",
+ "imagline": "\u2110",
+ "imagpart": "\u2111",
+ "imath": "\u0131",
+ "imof": "\u22B7",
+ "imped": "\u01B5",
+ "in": "\u2208",
+ "incare": "\u2105",
+ "infin": "\u221E",
+ "infintie": "\u29DD",
+ "inodot": "\u0131",
+ "int": "\u222B",
+ "intcal": "\u22BA",
+ "integers": "\u2124",
+ "intercal": "\u22BA",
+ "intlarhk": "\u2A17",
+ "intprod": "\u2A3C",
+ "iocy": "\u0451",
+ "iogon": "\u012F",
+ "iopf": "\U0001D55A",
+ "iota": "\u03B9",
+ "iprod": "\u2A3C",
+ "iquest": "\u00BF",
+ "iscr": "\U0001D4BE",
+ "isin": "\u2208",
+ "isinE": "\u22F9",
+ "isindot": "\u22F5",
+ "isins": "\u22F4",
+ "isinsv": "\u22F3",
+ "isinv": "\u2208",
+ "it": "\u2062",
+ "itilde": "\u0129",
+ "iukcy": "\u0456",
+ "iuml": "\u00EF",
+ "jcirc": "\u0135",
+ "jcy": "\u0439",
+ "jfr": "\U0001D527",
+ "jmath": "\u0237",
+ "jopf": "\U0001D55B",
+ "jscr": "\U0001D4BF",
+ "jsercy": "\u0458",
+ "jukcy": "\u0454",
+ "kappa": "\u03BA",
+ "kappav": "\u03F0",
+ "kcedil": "\u0137",
+ "kcy": "\u043A",
+ "kfr": "\U0001D528",
+ "kgreen": "\u0138",
+ "khcy": "\u0445",
+ "kjcy": "\u045C",
+ "kopf": "\U0001D55C",
+ "kscr": "\U0001D4C0",
+ "lAarr": "\u21DA",
+ "lArr": "\u21D0",
+ "lAtail": "\u291B",
+ "lBarr": "\u290E",
+ "lE": "\u2266",
+ "lEg": "\u2A8B",
+ "lHar": "\u2962",
+ "lacute": "\u013A",
+ "laemptyv": "\u29B4",
+ "lagran": "\u2112",
+ "lambda": "\u03BB",
+ "lang": "\u27E8",
+ "langd": "\u2991",
+ "langle": "\u27E8",
+ "lap": "\u2A85",
+ "laquo": "\u00AB",
+ "larr": "\u2190",
+ "larrb": "\u21E4",
+ "larrbfs": "\u291F",
+ "larrfs": "\u291D",
+ "larrhk": "\u21A9",
+ "larrlp": "\u21AB",
+ "larrpl": "\u2939",
+ "larrsim": "\u2973",
+ "larrtl": "\u21A2",
+ "lat": "\u2AAB",
+ "latail": "\u2919",
+ "late": "\u2AAD",
+ "lates": "\u2AAD\uFE00",
+ "lbarr": "\u290C",
+ "lbbrk": "\u2772",
+ "lbrace": "\u007B",
+ "lbrack": "\u005B",
+ "lbrke": "\u298B",
+ "lbrksld": "\u298F",
+ "lbrkslu": "\u298D",
+ "lcaron": "\u013E",
+ "lcedil": "\u013C",
+ "lceil": "\u2308",
+ "lcub": "\u007B",
+ "lcy": "\u043B",
+ "ldca": "\u2936",
+ "ldquo": "\u201C",
+ "ldquor": "\u201E",
+ "ldrdhar": "\u2967",
+ "ldrushar": "\u294B",
+ "ldsh": "\u21B2",
+ "le": "\u2264",
+ "leftarrow": "\u2190",
+ "leftarrowtail": "\u21A2",
+ "leftharpoondown": "\u21BD",
+ "leftharpoonup": "\u21BC",
+ "leftleftarrows": "\u21C7",
+ "leftrightarrow": "\u2194",
+ "leftrightarrows": "\u21C6",
+ "leftrightharpoons": "\u21CB",
+ "leftrightsquigarrow": "\u21AD",
+ "leftthreetimes": "\u22CB",
+ "leg": "\u22DA",
+ "leq": "\u2264",
+ "leqq": "\u2266",
+ "leqslant": "\u2A7D",
+ "les": "\u2A7D",
+ "lescc": "\u2AA8",
+ "lesdot": "\u2A7F",
+ "lesdoto": "\u2A81",
+ "lesdotor": "\u2A83",
+ "lesg": "\u22DA\uFE00",
+ "lesges": "\u2A93",
+ "lessapprox": "\u2A85",
+ "lessdot": "\u22D6",
+ "lesseqgtr": "\u22DA",
+ "lesseqqgtr": "\u2A8B",
+ "lessgtr": "\u2276",
+ "lesssim": "\u2272",
+ "lfisht": "\u297C",
+ "lfloor": "\u230A",
+ "lfr": "\U0001D529",
+ "lg": "\u2276",
+ "lgE": "\u2A91",
+ "lhard": "\u21BD",
+ "lharu": "\u21BC",
+ "lharul": "\u296A",
+ "lhblk": "\u2584",
+ "ljcy": "\u0459",
+ "ll": "\u226A",
+ "llarr": "\u21C7",
+ "llcorner": "\u231E",
+ "llhard": "\u296B",
+ "lltri": "\u25FA",
+ "lmidot": "\u0140",
+ "lmoust": "\u23B0",
+ "lmoustache": "\u23B0",
+ "lnE": "\u2268",
+ "lnap": "\u2A89",
+ "lnapprox": "\u2A89",
+ "lne": "\u2A87",
+ "lneq": "\u2A87",
+ "lneqq": "\u2268",
+ "lnsim": "\u22E6",
+ "loang": "\u27EC",
+ "loarr": "\u21FD",
+ "lobrk": "\u27E6",
+ "longleftarrow": "\u27F5",
+ "longleftrightarrow": "\u27F7",
+ "longmapsto": "\u27FC",
+ "longrightarrow": "\u27F6",
+ "looparrowleft": "\u21AB",
+ "looparrowright": "\u21AC",
+ "lopar": "\u2985",
+ "lopf": "\U0001D55D",
+ "loplus": "\u2A2D",
+ "lotimes": "\u2A34",
+ "lowast": "\u2217",
+ "lowbar": "\u005F",
+ "loz": "\u25CA",
+ "lozenge": "\u25CA",
+ "lozf": "\u29EB",
+ "lpar": "\u0028",
+ "lparlt": "\u2993",
+ "lrarr": "\u21C6",
+ "lrcorner": "\u231F",
+ "lrhar": "\u21CB",
+ "lrhard": "\u296D",
+ "lrm": "\u200E",
+ "lrtri": "\u22BF",
+ "lsaquo": "\u2039",
+ "lscr": "\U0001D4C1",
+ "lsh": "\u21B0",
+ "lsim": "\u2272",
+ "lsime": "\u2A8D",
+ "lsimg": "\u2A8F",
+ "lsqb": "\u005B",
+ "lsquo": "\u2018",
+ "lsquor": "\u201A",
+ "lstrok": "\u0142",
+ "lt": "\u003C",
+ "ltcc": "\u2AA6",
+ "ltcir": "\u2A79",
+ "ltdot": "\u22D6",
+ "lthree": "\u22CB",
+ "ltimes": "\u22C9",
+ "ltlarr": "\u2976",
+ "ltquest": "\u2A7B",
+ "ltrPar": "\u2996",
+ "ltri": "\u25C3",
+ "ltrie": "\u22B4",
+ "ltrif": "\u25C2",
+ "lurdshar": "\u294A",
+ "luruhar": "\u2966",
+ "lvertneqq": "\u2268\uFE00",
+ "lvnE": "\u2268\uFE00",
+ "mDDot": "\u223A",
+ "macr": "\u00AF",
+ "male": "\u2642",
+ "malt": "\u2720",
+ "maltese": "\u2720",
+ "map": "\u21A6",
+ "mapsto": "\u21A6",
+ "mapstodown": "\u21A7",
+ "mapstoleft": "\u21A4",
+ "mapstoup": "\u21A5",
+ "marker": "\u25AE",
+ "mcomma": "\u2A29",
+ "mcy": "\u043C",
+ "mdash": "\u2014",
+ "measuredangle": "\u2221",
+ "mfr": "\U0001D52A",
+ "mho": "\u2127",
+ "micro": "\u00B5",
+ "mid": "\u2223",
+ "midast": "\u002A",
+ "midcir": "\u2AF0",
+ "middot": "\u00B7",
+ "minus": "\u2212",
+ "minusb": "\u229F",
+ "minusd": "\u2238",
+ "minusdu": "\u2A2A",
+ "mlcp": "\u2ADB",
+ "mldr": "\u2026",
+ "mnplus": "\u2213",
+ "models": "\u22A7",
+ "mopf": "\U0001D55E",
+ "mp": "\u2213",
+ "mscr": "\U0001D4C2",
+ "mstpos": "\u223E",
+ "mu": "\u03BC",
+ "multimap": "\u22B8",
+ "mumap": "\u22B8",
+ "nGg": "\u22D9\u0338",
+ "nGt": "\u226B\u20D2",
+ "nGtv": "\u226B\u0338",
+ "nLeftarrow": "\u21CD",
+ "nLeftrightarrow": "\u21CE",
+ "nLl": "\u22D8\u0338",
+ "nLt": "\u226A\u20D2",
+ "nLtv": "\u226A\u0338",
+ "nRightarrow": "\u21CF",
+ "nVDash": "\u22AF",
+ "nVdash": "\u22AE",
+ "nabla": "\u2207",
+ "nacute": "\u0144",
+ "nang": "\u2220\u20D2",
+ "nap": "\u2249",
+ "napE": "\u2A70\u0338",
+ "napid": "\u224B\u0338",
+ "napos": "\u0149",
+ "napprox": "\u2249",
+ "natur": "\u266E",
+ "natural": "\u266E",
+ "naturals": "\u2115",
+ "nbsp": "\u00A0",
+ "nbump": "\u224E\u0338",
+ "nbumpe": "\u224F\u0338",
+ "ncap": "\u2A43",
+ "ncaron": "\u0148",
+ "ncedil": "\u0146",
+ "ncong": "\u2247",
+ "ncongdot": "\u2A6D\u0338",
+ "ncup": "\u2A42",
+ "ncy": "\u043D",
+ "ndash": "\u2013",
+ "ne": "\u2260",
+ "neArr": "\u21D7",
+ "nearhk": "\u2924",
+ "nearr": "\u2197",
+ "nearrow": "\u2197",
+ "nedot": "\u2250\u0338",
+ "nequiv": "\u2262",
+ "nesear": "\u2928",
+ "nesim": "\u2242\u0338",
+ "nexist": "\u2204",
+ "nexists": "\u2204",
+ "nfr": "\U0001D52B",
+ "ngE": "\u2267\u0338",
+ "nge": "\u2271",
+ "ngeq": "\u2271",
+ "ngeqq": "\u2267\u0338",
+ "ngeqslant": "\u2A7E\u0338",
+ "nges": "\u2A7E\u0338",
+ "ngsim": "\u2275",
+ "ngt": "\u226F",
+ "ngtr": "\u226F",
+ "nhArr": "\u21CE",
+ "nharr": "\u21AE",
+ "nhpar": "\u2AF2",
+ "ni": "\u220B",
+ "nis": "\u22FC",
+ "nisd": "\u22FA",
+ "niv": "\u220B",
+ "njcy": "\u045A",
+ "nlArr": "\u21CD",
+ "nlE": "\u2266\u0338",
+ "nlarr": "\u219A",
+ "nldr": "\u2025",
+ "nle": "\u2270",
+ "nleftarrow": "\u219A",
+ "nleftrightarrow": "\u21AE",
+ "nleq": "\u2270",
+ "nleqq": "\u2266\u0338",
+ "nleqslant": "\u2A7D\u0338",
+ "nles": "\u2A7D\u0338",
+ "nless": "\u226E",
+ "nlsim": "\u2274",
+ "nlt": "\u226E",
+ "nltri": "\u22EA",
+ "nltrie": "\u22EC",
+ "nmid": "\u2224",
+ "nopf": "\U0001D55F",
+ "not": "\u00AC",
+ "notin": "\u2209",
+ "notinE": "\u22F9\u0338",
+ "notindot": "\u22F5\u0338",
+ "notinva": "\u2209",
+ "notinvb": "\u22F7",
+ "notinvc": "\u22F6",
+ "notni": "\u220C",
+ "notniva": "\u220C",
+ "notnivb": "\u22FE",
+ "notnivc": "\u22FD",
+ "npar": "\u2226",
+ "nparallel": "\u2226",
+ "nparsl": "\u2AFD\u20E5",
+ "npart": "\u2202\u0338",
+ "npolint": "\u2A14",
+ "npr": "\u2280",
+ "nprcue": "\u22E0",
+ "npre": "\u2AAF\u0338",
+ "nprec": "\u2280",
+ "npreceq": "\u2AAF\u0338",
+ "nrArr": "\u21CF",
+ "nrarr": "\u219B",
+ "nrarrc": "\u2933\u0338",
+ "nrarrw": "\u219D\u0338",
+ "nrightarrow": "\u219B",
+ "nrtri": "\u22EB",
+ "nrtrie": "\u22ED",
+ "nsc": "\u2281",
+ "nsccue": "\u22E1",
+ "nsce": "\u2AB0\u0338",
+ "nscr": "\U0001D4C3",
+ "nshortmid": "\u2224",
+ "nshortparallel": "\u2226",
+ "nsim": "\u2241",
+ "nsime": "\u2244",
+ "nsimeq": "\u2244",
+ "nsmid": "\u2224",
+ "nspar": "\u2226",
+ "nsqsube": "\u22E2",
+ "nsqsupe": "\u22E3",
+ "nsub": "\u2284",
+ "nsubE": "\u2AC5\u0338",
+ "nsube": "\u2288",
+ "nsubset": "\u2282\u20D2",
+ "nsubseteq": "\u2288",
+ "nsubseteqq": "\u2AC5\u0338",
+ "nsucc": "\u2281",
+ "nsucceq": "\u2AB0\u0338",
+ "nsup": "\u2285",
+ "nsupE": "\u2AC6\u0338",
+ "nsupe": "\u2289",
+ "nsupset": "\u2283\u20D2",
+ "nsupseteq": "\u2289",
+ "nsupseteqq": "\u2AC6\u0338",
+ "ntgl": "\u2279",
+ "ntilde": "\u00F1",
+ "ntlg": "\u2278",
+ "ntriangleleft": "\u22EA",
+ "ntrianglelefteq": "\u22EC",
+ "ntriangleright": "\u22EB",
+ "ntrianglerighteq": "\u22ED",
+ "nu": "\u03BD",
+ "num": "\u0023",
+ "numero": "\u2116",
+ "numsp": "\u2007",
+ "nvDash": "\u22AD",
+ "nvHarr": "\u2904",
+ "nvap": "\u224D\u20D2",
+ "nvdash": "\u22AC",
+ "nvge": "\u2265\u20D2",
+ "nvgt": "\u003E\u20D2",
+ "nvinfin": "\u29DE",
+ "nvlArr": "\u2902",
+ "nvle": "\u2264\u20D2",
+ "nvlt": "\u003C\u20D2",
+ "nvltrie": "\u22B4\u20D2",
+ "nvrArr": "\u2903",
+ "nvrtrie": "\u22B5\u20D2",
+ "nvsim": "\u223C\u20D2",
+ "nwArr": "\u21D6",
+ "nwarhk": "\u2923",
+ "nwarr": "\u2196",
+ "nwarrow": "\u2196",
+ "nwnear": "\u2927",
+ "oS": "\u24C8",
+ "oacute": "\u00F3",
+ "oast": "\u229B",
+ "ocir": "\u229A",
+ "ocirc": "\u00F4",
+ "ocy": "\u043E",
+ "odash": "\u229D",
+ "odblac": "\u0151",
+ "odiv": "\u2A38",
+ "odot": "\u2299",
+ "odsold": "\u29BC",
+ "oelig": "\u0153",
+ "ofcir": "\u29BF",
+ "ofr": "\U0001D52C",
+ "ogon": "\u02DB",
+ "ograve": "\u00F2",
+ "ogt": "\u29C1",
+ "ohbar": "\u29B5",
+ "ohm": "\u03A9",
+ "oint": "\u222E",
+ "olarr": "\u21BA",
+ "olcir": "\u29BE",
+ "olcross": "\u29BB",
+ "oline": "\u203E",
+ "olt": "\u29C0",
+ "omacr": "\u014D",
+ "omega": "\u03C9",
+ "omicron": "\u03BF",
+ "omid": "\u29B6",
+ "ominus": "\u2296",
+ "oopf": "\U0001D560",
+ "opar": "\u29B7",
+ "operp": "\u29B9",
+ "oplus": "\u2295",
+ "or": "\u2228",
+ "orarr": "\u21BB",
+ "ord": "\u2A5D",
+ "order": "\u2134",
+ "orderof": "\u2134",
+ "ordf": "\u00AA",
+ "ordm": "\u00BA",
+ "origof": "\u22B6",
+ "oror": "\u2A56",
+ "orslope": "\u2A57",
+ "orv": "\u2A5B",
+ "oscr": "\u2134",
+ "oslash": "\u00F8",
+ "osol": "\u2298",
+ "otilde": "\u00F5",
+ "otimes": "\u2297",
+ "otimesas": "\u2A36",
+ "ouml": "\u00F6",
+ "ovbar": "\u233D",
+ "par": "\u2225",
+ "para": "\u00B6",
+ "parallel": "\u2225",
+ "parsim": "\u2AF3",
+ "parsl": "\u2AFD",
+ "part": "\u2202",
+ "pcy": "\u043F",
+ "percnt": "\u0025",
+ "period": "\u002E",
+ "permil": "\u2030",
+ "perp": "\u22A5",
+ "pertenk": "\u2031",
+ "pfr": "\U0001D52D",
+ "phi": "\u03C6",
+ "phiv": "\u03D5",
+ "phmmat": "\u2133",
+ "phone": "\u260E",
+ "pi": "\u03C0",
+ "pitchfork": "\u22D4",
+ "piv": "\u03D6",
+ "planck": "\u210F",
+ "planckh": "\u210E",
+ "plankv": "\u210F",
+ "plus": "\u002B",
+ "plusacir": "\u2A23",
+ "plusb": "\u229E",
+ "pluscir": "\u2A22",
+ "plusdo": "\u2214",
+ "plusdu": "\u2A25",
+ "pluse": "\u2A72",
+ "plusmn": "\u00B1",
+ "plussim": "\u2A26",
+ "plustwo": "\u2A27",
+ "pm": "\u00B1",
+ "pointint": "\u2A15",
+ "popf": "\U0001D561",
+ "pound": "\u00A3",
+ "pr": "\u227A",
+ "prE": "\u2AB3",
+ "prap": "\u2AB7",
+ "prcue": "\u227C",
+ "pre": "\u2AAF",
+ "prec": "\u227A",
+ "precapprox": "\u2AB7",
+ "preccurlyeq": "\u227C",
+ "preceq": "\u2AAF",
+ "precnapprox": "\u2AB9",
+ "precneqq": "\u2AB5",
+ "precnsim": "\u22E8",
+ "precsim": "\u227E",
+ "prime": "\u2032",
+ "primes": "\u2119",
+ "prnE": "\u2AB5",
+ "prnap": "\u2AB9",
+ "prnsim": "\u22E8",
+ "prod": "\u220F",
+ "profalar": "\u232E",
+ "profline": "\u2312",
+ "profsurf": "\u2313",
+ "prop": "\u221D",
+ "propto": "\u221D",
+ "prsim": "\u227E",
+ "prurel": "\u22B0",
+ "pscr": "\U0001D4C5",
+ "psi": "\u03C8",
+ "puncsp": "\u2008",
+ "qfr": "\U0001D52E",
+ "qint": "\u2A0C",
+ "qopf": "\U0001D562",
+ "qprime": "\u2057",
+ "qscr": "\U0001D4C6",
+ "quaternions": "\u210D",
+ "quatint": "\u2A16",
+ "quest": "\u003F",
+ "questeq": "\u225F",
+ "quot": "\u0022",
+ "rAarr": "\u21DB",
+ "rArr": "\u21D2",
+ "rAtail": "\u291C",
+ "rBarr": "\u290F",
+ "rHar": "\u2964",
+ "race": "\u223D\u0331",
+ "racute": "\u0155",
+ "radic": "\u221A",
+ "raemptyv": "\u29B3",
+ "rang": "\u27E9",
+ "rangd": "\u2992",
+ "range": "\u29A5",
+ "rangle": "\u27E9",
+ "raquo": "\u00BB",
+ "rarr": "\u2192",
+ "rarrap": "\u2975",
+ "rarrb": "\u21E5",
+ "rarrbfs": "\u2920",
+ "rarrc": "\u2933",
+ "rarrfs": "\u291E",
+ "rarrhk": "\u21AA",
+ "rarrlp": "\u21AC",
+ "rarrpl": "\u2945",
+ "rarrsim": "\u2974",
+ "rarrtl": "\u21A3",
+ "rarrw": "\u219D",
+ "ratail": "\u291A",
+ "ratio": "\u2236",
+ "rationals": "\u211A",
+ "rbarr": "\u290D",
+ "rbbrk": "\u2773",
+ "rbrace": "\u007D",
+ "rbrack": "\u005D",
+ "rbrke": "\u298C",
+ "rbrksld": "\u298E",
+ "rbrkslu": "\u2990",
+ "rcaron": "\u0159",
+ "rcedil": "\u0157",
+ "rceil": "\u2309",
+ "rcub": "\u007D",
+ "rcy": "\u0440",
+ "rdca": "\u2937",
+ "rdldhar": "\u2969",
+ "rdquo": "\u201D",
+ "rdquor": "\u201D",
+ "rdsh": "\u21B3",
+ "real": "\u211C",
+ "realine": "\u211B",
+ "realpart": "\u211C",
+ "reals": "\u211D",
+ "rect": "\u25AD",
+ "reg": "\u00AE",
+ "rfisht": "\u297D",
+ "rfloor": "\u230B",
+ "rfr": "\U0001D52F",
+ "rhard": "\u21C1",
+ "rharu": "\u21C0",
+ "rharul": "\u296C",
+ "rho": "\u03C1",
+ "rhov": "\u03F1",
+ "rightarrow": "\u2192",
+ "rightarrowtail": "\u21A3",
+ "rightharpoondown": "\u21C1",
+ "rightharpoonup": "\u21C0",
+ "rightleftarrows": "\u21C4",
+ "rightleftharpoons": "\u21CC",
+ "rightrightarrows": "\u21C9",
+ "rightsquigarrow": "\u219D",
+ "rightthreetimes": "\u22CC",
+ "ring": "\u02DA",
+ "risingdotseq": "\u2253",
+ "rlarr": "\u21C4",
+ "rlhar": "\u21CC",
+ "rlm": "\u200F",
+ "rmoust": "\u23B1",
+ "rmoustache": "\u23B1",
+ "rnmid": "\u2AEE",
+ "roang": "\u27ED",
+ "roarr": "\u21FE",
+ "robrk": "\u27E7",
+ "ropar": "\u2986",
+ "ropf": "\U0001D563",
+ "roplus": "\u2A2E",
+ "rotimes": "\u2A35",
+ "rpar": "\u0029",
+ "rpargt": "\u2994",
+ "rppolint": "\u2A12",
+ "rrarr": "\u21C9",
+ "rsaquo": "\u203A",
+ "rscr": "\U0001D4C7",
+ "rsh": "\u21B1",
+ "rsqb": "\u005D",
+ "rsquo": "\u2019",
+ "rsquor": "\u2019",
+ "rthree": "\u22CC",
+ "rtimes": "\u22CA",
+ "rtri": "\u25B9",
+ "rtrie": "\u22B5",
+ "rtrif": "\u25B8",
+ "rtriltri": "\u29CE",
+ "ruluhar": "\u2968",
+ "rx": "\u211E",
+ "sacute": "\u015B",
+ "sbquo": "\u201A",
+ "sc": "\u227B",
+ "scE": "\u2AB4",
+ "scap": "\u2AB8",
+ "scaron": "\u0161",
+ "sccue": "\u227D",
+ "sce": "\u2AB0",
+ "scedil": "\u015F",
+ "scirc": "\u015D",
+ "scnE": "\u2AB6",
+ "scnap": "\u2ABA",
+ "scnsim": "\u22E9",
+ "scpolint": "\u2A13",
+ "scsim": "\u227F",
+ "scy": "\u0441",
+ "sdot": "\u22C5",
+ "sdotb": "\u22A1",
+ "sdote": "\u2A66",
+ "seArr": "\u21D8",
+ "searhk": "\u2925",
+ "searr": "\u2198",
+ "searrow": "\u2198",
+ "sect": "\u00A7",
+ "semi": "\u003B",
+ "seswar": "\u2929",
+ "setminus": "\u2216",
+ "setmn": "\u2216",
+ "sext": "\u2736",
+ "sfr": "\U0001D530",
+ "sfrown": "\u2322",
+ "sharp": "\u266F",
+ "shchcy": "\u0449",
+ "shcy": "\u0448",
+ "shortmid": "\u2223",
+ "shortparallel": "\u2225",
+ "shy": "\u00AD",
+ "sigma": "\u03C3",
+ "sigmaf": "\u03C2",
+ "sigmav": "\u03C2",
+ "sim": "\u223C",
+ "simdot": "\u2A6A",
+ "sime": "\u2243",
+ "simeq": "\u2243",
+ "simg": "\u2A9E",
+ "simgE": "\u2AA0",
+ "siml": "\u2A9D",
+ "simlE": "\u2A9F",
+ "simne": "\u2246",
+ "simplus": "\u2A24",
+ "simrarr": "\u2972",
+ "slarr": "\u2190",
+ "smallsetminus": "\u2216",
+ "smashp": "\u2A33",
+ "smeparsl": "\u29E4",
+ "smid": "\u2223",
+ "smile": "\u2323",
+ "smt": "\u2AAA",
+ "smte": "\u2AAC",
+ "smtes": "\u2AAC\uFE00",
+ "softcy": "\u044C",
+ "sol": "\u002F",
+ "solb": "\u29C4",
+ "solbar": "\u233F",
+ "sopf": "\U0001D564",
+ "spades": "\u2660",
+ "spadesuit": "\u2660",
+ "spar": "\u2225",
+ "sqcap": "\u2293",
+ "sqcaps": "\u2293\uFE00",
+ "sqcup": "\u2294",
+ "sqcups": "\u2294\uFE00",
+ "sqsub": "\u228F",
+ "sqsube": "\u2291",
+ "sqsubset": "\u228F",
+ "sqsubseteq": "\u2291",
+ "sqsup": "\u2290",
+ "sqsupe": "\u2292",
+ "sqsupset": "\u2290",
+ "sqsupseteq": "\u2292",
+ "squ": "\u25A1",
+ "square": "\u25A1",
+ "squarf": "\u25AA",
+ "squf": "\u25AA",
+ "srarr": "\u2192",
+ "sscr": "\U0001D4C8",
+ "ssetmn": "\u2216",
+ "ssmile": "\u2323",
+ "sstarf": "\u22C6",
+ "star": "\u2606",
+ "starf": "\u2605",
+ "straightepsilon": "\u03F5",
+ "straightphi": "\u03D5",
+ "strns": "\u00AF",
+ "sub": "\u2282",
+ "subE": "\u2AC5",
+ "subdot": "\u2ABD",
+ "sube": "\u2286",
+ "subedot": "\u2AC3",
+ "submult": "\u2AC1",
+ "subnE": "\u2ACB",
+ "subne": "\u228A",
+ "subplus": "\u2ABF",
+ "subrarr": "\u2979",
+ "subset": "\u2282",
+ "subseteq": "\u2286",
+ "subseteqq": "\u2AC5",
+ "subsetneq": "\u228A",
+ "subsetneqq": "\u2ACB",
+ "subsim": "\u2AC7",
+ "subsub": "\u2AD5",
+ "subsup": "\u2AD3",
+ "succ": "\u227B",
+ "succapprox": "\u2AB8",
+ "succcurlyeq": "\u227D",
+ "succeq": "\u2AB0",
+ "succnapprox": "\u2ABA",
+ "succneqq": "\u2AB6",
+ "succnsim": "\u22E9",
+ "succsim": "\u227F",
+ "sum": "\u2211",
+ "sung": "\u266A",
+ "sup": "\u2283",
+ "sup1": "\u00B9",
+ "sup2": "\u00B2",
+ "sup3": "\u00B3",
+ "supE": "\u2AC6",
+ "supdot": "\u2ABE",
+ "supdsub": "\u2AD8",
+ "supe": "\u2287",
+ "supedot": "\u2AC4",
+ "suphsol": "\u27C9",
+ "suphsub": "\u2AD7",
+ "suplarr": "\u297B",
+ "supmult": "\u2AC2",
+ "supnE": "\u2ACC",
+ "supne": "\u228B",
+ "supplus": "\u2AC0",
+ "supset": "\u2283",
+ "supseteq": "\u2287",
+ "supseteqq": "\u2AC6",
+ "supsetneq": "\u228B",
+ "supsetneqq": "\u2ACC",
+ "supsim": "\u2AC8",
+ "supsub": "\u2AD4",
+ "supsup": "\u2AD6",
+ "swArr": "\u21D9",
+ "swarhk": "\u2926",
+ "swarr": "\u2199",
+ "swarrow": "\u2199",
+ "swnwar": "\u292A",
+ "szlig": "\u00DF",
+ "target": "\u2316",
+ "tau": "\u03C4",
+ "tbrk": "\u23B4",
+ "tcaron": "\u0165",
+ "tcedil": "\u0163",
+ "tcy": "\u0442",
+ "tdot": "\u20DB",
+ "telrec": "\u2315",
+ "tfr": "\U0001D531",
+ "there4": "\u2234",
+ "therefore": "\u2234",
+ "theta": "\u03B8",
+ "thetasym": "\u03D1",
+ "thetav": "\u03D1",
+ "thickapprox": "\u2248",
+ "thicksim": "\u223C",
+ "thinsp": "\u2009",
+ "thkap": "\u2248",
+ "thksim": "\u223C",
+ "thorn": "\u00FE",
+ "tilde": "\u02DC",
+ "times": "\u00D7",
+ "timesb": "\u22A0",
+ "timesbar": "\u2A31",
+ "timesd": "\u2A30",
+ "tint": "\u222D",
+ "toea": "\u2928",
+ "top": "\u22A4",
+ "topbot": "\u2336",
+ "topcir": "\u2AF1",
+ "topf": "\U0001D565",
+ "topfork": "\u2ADA",
+ "tosa": "\u2929",
+ "tprime": "\u2034",
+ "trade": "\u2122",
+ "triangle": "\u25B5",
+ "triangledown": "\u25BF",
+ "triangleleft": "\u25C3",
+ "trianglelefteq": "\u22B4",
+ "triangleq": "\u225C",
+ "triangleright": "\u25B9",
+ "trianglerighteq": "\u22B5",
+ "tridot": "\u25EC",
+ "trie": "\u225C",
+ "triminus": "\u2A3A",
+ "triplus": "\u2A39",
+ "trisb": "\u29CD",
+ "tritime": "\u2A3B",
+ "trpezium": "\u23E2",
+ "tscr": "\U0001D4C9",
+ "tscy": "\u0446",
+ "tshcy": "\u045B",
+ "tstrok": "\u0167",
+ "twixt": "\u226C",
+ "twoheadleftarrow": "\u219E",
+ "twoheadrightarrow": "\u21A0",
+ "uArr": "\u21D1",
+ "uHar": "\u2963",
+ "uacute": "\u00FA",
+ "uarr": "\u2191",
+ "ubrcy": "\u045E",
+ "ubreve": "\u016D",
+ "ucirc": "\u00FB",
+ "ucy": "\u0443",
+ "udarr": "\u21C5",
+ "udblac": "\u0171",
+ "udhar": "\u296E",
+ "ufisht": "\u297E",
+ "ufr": "\U0001D532",
+ "ugrave": "\u00F9",
+ "uharl": "\u21BF",
+ "uharr": "\u21BE",
+ "uhblk": "\u2580",
+ "ulcorn": "\u231C",
+ "ulcorner": "\u231C",
+ "ulcrop": "\u230F",
+ "ultri": "\u25F8",
+ "umacr": "\u016B",
+ "uml": "\u00A8",
+ "uogon": "\u0173",
+ "uopf": "\U0001D566",
+ "uparrow": "\u2191",
+ "updownarrow": "\u2195",
+ "upharpoonleft": "\u21BF",
+ "upharpoonright": "\u21BE",
+ "uplus": "\u228E",
+ "upsi": "\u03C5",
+ "upsih": "\u03D2",
+ "upsilon": "\u03C5",
+ "upuparrows": "\u21C8",
+ "urcorn": "\u231D",
+ "urcorner": "\u231D",
+ "urcrop": "\u230E",
+ "uring": "\u016F",
+ "urtri": "\u25F9",
+ "uscr": "\U0001D4CA",
+ "utdot": "\u22F0",
+ "utilde": "\u0169",
+ "utri": "\u25B5",
+ "utrif": "\u25B4",
+ "uuarr": "\u21C8",
+ "uuml": "\u00FC",
+ "uwangle": "\u29A7",
+ "vArr": "\u21D5",
+ "vBar": "\u2AE8",
+ "vBarv": "\u2AE9",
+ "vDash": "\u22A8",
+ "vangrt": "\u299C",
+ "varepsilon": "\u03F5",
+ "varkappa": "\u03F0",
+ "varnothing": "\u2205",
+ "varphi": "\u03D5",
+ "varpi": "\u03D6",
+ "varpropto": "\u221D",
+ "varr": "\u2195",
+ "varrho": "\u03F1",
+ "varsigma": "\u03C2",
+ "varsubsetneq": "\u228A\uFE00",
+ "varsubsetneqq": "\u2ACB\uFE00",
+ "varsupsetneq": "\u228B\uFE00",
+ "varsupsetneqq": "\u2ACC\uFE00",
+ "vartheta": "\u03D1",
+ "vartriangleleft": "\u22B2",
+ "vartriangleright": "\u22B3",
+ "vcy": "\u0432",
+ "vdash": "\u22A2",
+ "vee": "\u2228",
+ "veebar": "\u22BB",
+ "veeeq": "\u225A",
+ "vellip": "\u22EE",
+ "verbar": "\u007C",
+ "vert": "\u007C",
+ "vfr": "\U0001D533",
+ "vltri": "\u22B2",
+ "vnsub": "\u2282\u20D2",
+ "vnsup": "\u2283\u20D2",
+ "vopf": "\U0001D567",
+ "vprop": "\u221D",
+ "vrtri": "\u22B3",
+ "vscr": "\U0001D4CB",
+ "vsubnE": "\u2ACB\uFE00",
+ "vsubne": "\u228A\uFE00",
+ "vsupnE": "\u2ACC\uFE00",
+ "vsupne": "\u228B\uFE00",
+ "vzigzag": "\u299A",
+ "wcirc": "\u0175",
+ "wedbar": "\u2A5F",
+ "wedge": "\u2227",
+ "wedgeq": "\u2259",
+ "weierp": "\u2118",
+ "wfr": "\U0001D534",
+ "wopf": "\U0001D568",
+ "wp": "\u2118",
+ "wr": "\u2240",
+ "wreath": "\u2240",
+ "wscr": "\U0001D4CC",
+ "xcap": "\u22C2",
+ "xcirc": "\u25EF",
+ "xcup": "\u22C3",
+ "xdtri": "\u25BD",
+ "xfr": "\U0001D535",
+ "xhArr": "\u27FA",
+ "xharr": "\u27F7",
+ "xi": "\u03BE",
+ "xlArr": "\u27F8",
+ "xlarr": "\u27F5",
+ "xmap": "\u27FC",
+ "xnis": "\u22FB",
+ "xodot": "\u2A00",
+ "xopf": "\U0001D569",
+ "xoplus": "\u2A01",
+ "xotime": "\u2A02",
+ "xrArr": "\u27F9",
+ "xrarr": "\u27F6",
+ "xscr": "\U0001D4CD",
+ "xsqcup": "\u2A06",
+ "xuplus": "\u2A04",
+ "xutri": "\u25B3",
+ "xvee": "\u22C1",
+ "xwedge": "\u22C0",
+ "yacute": "\u00FD",
+ "yacy": "\u044F",
+ "ycirc": "\u0177",
+ "ycy": "\u044B",
+ "yen": "\u00A5",
+ "yfr": "\U0001D536",
+ "yicy": "\u0457",
+ "yopf": "\U0001D56A",
+ "yscr": "\U0001D4CE",
+ "yucy": "\u044E",
+ "yuml": "\u00FF",
+ "zacute": "\u017A",
+ "zcaron": "\u017E",
+ "zcy": "\u0437",
+ "zdot": "\u017C",
+ "zeetrf": "\u2128",
+ "zeta": "\u03B6",
+ "zfr": "\U0001D537",
+ "zhcy": "\u0436",
+ "zigrarr": "\u21DD",
+ "zopf": "\U0001D56B",
+ "zscr": "\U0001D4CF",
+ "zwj": "\u200D",
+ "zwnj": "\u200C",
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/indented_code.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/indented_code.go
new file mode 100644
index 00000000..a89ee6c7
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/indented_code.go
@@ -0,0 +1,98 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package markdown
+
+import (
+ "strings"
+)
+
+type IndentedCodeLine struct {
+ Indentation int
+ Range Range
+}
+
+type IndentedCode struct {
+ blockBase
+ markdown string
+
+ RawCode []IndentedCodeLine
+}
+
+func (b *IndentedCode) Code() (result string) {
+ for _, code := range b.RawCode {
+ result += strings.Repeat(" ", code.Indentation) + b.markdown[code.Range.Position:code.Range.End]
+ }
+ return
+}
+
+func (b *IndentedCode) Continuation(indentation int, r Range) *continuation {
+ if indentation >= 4 {
+ return &continuation{
+ Indentation: indentation - 4,
+ Remaining: r,
+ }
+ }
+ s := b.markdown[r.Position:r.End]
+ if strings.TrimSpace(s) == "" {
+ return &continuation{
+ Remaining: r,
+ }
+ }
+ return nil
+}
+
+func (b *IndentedCode) AddLine(indentation int, r Range) bool {
+ b.RawCode = append(b.RawCode, IndentedCodeLine{
+ Indentation: indentation,
+ Range: r,
+ })
+ return true
+}
+
+func (b *IndentedCode) Close() {
+ for {
+ last := b.RawCode[len(b.RawCode)-1]
+ s := b.markdown[last.Range.Position:last.Range.End]
+ if strings.TrimRight(s, "\r\n") == "" {
+ b.RawCode = b.RawCode[:len(b.RawCode)-1]
+ } else {
+ break
+ }
+ }
+}
+
+func (b *IndentedCode) AllowsBlockStarts() bool {
+ return false
+}
+
+func indentedCodeStart(markdown string, indentation int, r Range, matchedBlocks, unmatchedBlocks []Block) []Block {
+ if len(unmatchedBlocks) > 0 {
+ if _, ok := unmatchedBlocks[len(unmatchedBlocks)-1].(*Paragraph); ok {
+ return nil
+ }
+ } else if len(matchedBlocks) > 0 {
+ if _, ok := matchedBlocks[len(matchedBlocks)-1].(*Paragraph); ok {
+ return nil
+ }
+ }
+
+ if indentation < 4 {
+ return nil
+ }
+
+ s := markdown[r.Position:r.End]
+ if strings.TrimSpace(s) == "" {
+ return nil
+ }
+
+ return []Block{
+ &IndentedCode{
+ markdown: markdown,
+ RawCode: []IndentedCodeLine{{
+ Indentation: indentation - 4,
+ Range: r,
+ }},
+ },
+ }
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/inlines.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/inlines.go
new file mode 100644
index 00000000..43dee3bd
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/inlines.go
@@ -0,0 +1,663 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package markdown
+
+import (
+ "container/list"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+)
+
+type Inline interface {
+ IsInline() bool
+}
+
+type inlineBase struct{}
+
+func (inlineBase) IsInline() bool { return true }
+
+type Text struct {
+ inlineBase
+
+ Text string
+ Range Range
+}
+
+type CodeSpan struct {
+ inlineBase
+
+ Code string
+}
+
+type HardLineBreak struct {
+ inlineBase
+}
+
+type SoftLineBreak struct {
+ inlineBase
+}
+
+type InlineLinkOrImage struct {
+ inlineBase
+
+ Children []Inline
+
+ RawDestination Range
+
+ markdown string
+ rawTitle string
+}
+
+func (i *InlineLinkOrImage) Destination() string {
+ return Unescape(i.markdown[i.RawDestination.Position:i.RawDestination.End])
+}
+
+func (i *InlineLinkOrImage) Title() string {
+ return Unescape(i.rawTitle)
+}
+
+type InlineLink struct {
+ InlineLinkOrImage
+}
+
+type InlineImage struct {
+ InlineLinkOrImage
+}
+
+type ReferenceLinkOrImage struct {
+ inlineBase
+ *ReferenceDefinition
+
+ Children []Inline
+}
+
+type ReferenceLink struct {
+ ReferenceLinkOrImage
+}
+
+type ReferenceImage struct {
+ ReferenceLinkOrImage
+}
+
+type Autolink struct {
+ inlineBase
+
+ Children []Inline
+
+ RawDestination Range
+
+ markdown string
+}
+
+func (i *Autolink) Destination() string {
+ destination := Unescape(i.markdown[i.RawDestination.Position:i.RawDestination.End])
+
+ if strings.HasPrefix(destination, "www") {
+ destination = "http://" + destination
+ }
+
+ return destination
+}
+
+type delimiterType int
+
+const (
+ linkOpeningDelimiter delimiterType = iota
+ imageOpeningDelimiter
+)
+
+type delimiter struct {
+ Type delimiterType
+ IsInactive bool
+ TextNode int
+ Range Range
+}
+
+type inlineParser struct {
+ markdown string
+ ranges []Range
+ referenceDefinitions []*ReferenceDefinition
+
+ raw string
+ position int
+ inlines []Inline
+ delimiterStack *list.List
+}
+
+func newInlineParser(markdown string, ranges []Range, referenceDefinitions []*ReferenceDefinition) *inlineParser {
+ return &inlineParser{
+ markdown: markdown,
+ ranges: ranges,
+ referenceDefinitions: referenceDefinitions,
+ delimiterStack: list.New(),
+ }
+}
+
+func (p *inlineParser) parseBackticks() {
+ count := 1
+ for i := p.position + 1; i < len(p.raw) && p.raw[i] == '`'; i++ {
+ count++
+ }
+ opening := p.raw[p.position : p.position+count]
+ search := p.position + count
+ for search < len(p.raw) {
+ end := strings.Index(p.raw[search:], opening)
+ if end == -1 {
+ break
+ }
+ if search+end+count < len(p.raw) && p.raw[search+end+count] == '`' {
+ search += end + count
+ for search < len(p.raw) && p.raw[search] == '`' {
+ search++
+ }
+ continue
+ }
+ code := strings.Join(strings.Fields(p.raw[p.position+count:search+end]), " ")
+ p.position = search + end + count
+ p.inlines = append(p.inlines, &CodeSpan{
+ Code: code,
+ })
+ return
+ }
+ p.position += len(opening)
+ absPos := relativeToAbsolutePosition(p.ranges, p.position-len(opening))
+ p.inlines = append(p.inlines, &Text{
+ Text: opening,
+ Range: Range{absPos, absPos + len(opening)},
+ })
+}
+
+func (p *inlineParser) parseLineEnding() {
+ if p.position >= 1 && p.raw[p.position-1] == '\t' {
+ p.inlines = append(p.inlines, &HardLineBreak{})
+ } else if p.position >= 2 && p.raw[p.position-1] == ' ' && (p.raw[p.position-2] == '\t' || p.raw[p.position-1] == ' ') {
+ p.inlines = append(p.inlines, &HardLineBreak{})
+ } else {
+ p.inlines = append(p.inlines, &SoftLineBreak{})
+ }
+ p.position++
+ if p.position < len(p.raw) && p.raw[p.position] == '\n' {
+ p.position++
+ }
+}
+
+func (p *inlineParser) parseEscapeCharacter() {
+ if p.position+1 < len(p.raw) && isEscapableByte(p.raw[p.position+1]) {
+ absPos := relativeToAbsolutePosition(p.ranges, p.position+1)
+ p.inlines = append(p.inlines, &Text{
+ Text: string(p.raw[p.position+1]),
+ Range: Range{absPos, absPos + len(string(p.raw[p.position+1]))},
+ })
+ p.position += 2
+ } else {
+ absPos := relativeToAbsolutePosition(p.ranges, p.position)
+ p.inlines = append(p.inlines, &Text{
+ Text: `\`,
+ Range: Range{absPos, absPos + 1},
+ })
+ p.position++
+ }
+}
+
+func (p *inlineParser) parseText() {
+ if next := strings.IndexAny(p.raw[p.position:], "\r\n\\`&![]wW:"); next == -1 {
+ absPos := relativeToAbsolutePosition(p.ranges, p.position)
+ p.inlines = append(p.inlines, &Text{
+ Text: strings.TrimRightFunc(p.raw[p.position:], isWhitespace),
+ Range: Range{absPos, absPos + len(p.raw[p.position:])},
+ })
+ p.position = len(p.raw)
+ } else {
+ absPos := relativeToAbsolutePosition(p.ranges, p.position)
+ if p.raw[p.position+next] == '\r' || p.raw[p.position+next] == '\n' {
+ s := strings.TrimRightFunc(p.raw[p.position:p.position+next], isWhitespace)
+ p.inlines = append(p.inlines, &Text{
+ Text: s,
+ Range: Range{absPos, absPos + len(s)},
+ })
+ } else {
+ if next == 0 {
+ // Always read at least one character since 'w', 'W', and ':' may not actually match another
+ // type of node
+ next = 1
+ }
+
+ p.inlines = append(p.inlines, &Text{
+ Text: p.raw[p.position : p.position+next],
+ Range: Range{absPos, absPos + next},
+ })
+ }
+ p.position += next
+ }
+}
+
+func (p *inlineParser) parseLinkOrImageDelimiter() {
+ absPos := relativeToAbsolutePosition(p.ranges, p.position)
+ if p.raw[p.position] == '[' {
+ p.inlines = append(p.inlines, &Text{
+ Text: "[",
+ Range: Range{absPos, absPos + 1},
+ })
+ p.delimiterStack.PushBack(&delimiter{
+ Type: linkOpeningDelimiter,
+ TextNode: len(p.inlines) - 1,
+ Range: Range{p.position, p.position + 1},
+ })
+ p.position++
+ } else if p.raw[p.position] == '!' && p.position+1 < len(p.raw) && p.raw[p.position+1] == '[' {
+ p.inlines = append(p.inlines, &Text{
+ Text: "![",
+ Range: Range{absPos, absPos + 2},
+ })
+ p.delimiterStack.PushBack(&delimiter{
+ Type: imageOpeningDelimiter,
+ TextNode: len(p.inlines) - 1,
+ Range: Range{p.position, p.position + 2},
+ })
+ p.position += 2
+ } else {
+ p.inlines = append(p.inlines, &Text{
+ Text: "!",
+ Range: Range{absPos, absPos + 1},
+ })
+ p.position++
+ }
+}
+
+func (p *inlineParser) peekAtInlineLinkDestinationAndTitle(position int, isImage bool) (destination, title Range, end int, ok bool) {
+ if position >= len(p.raw) || p.raw[position] != '(' {
+ return
+ }
+ position++
+
+ destinationStart := nextNonWhitespace(p.raw, position)
+ if destinationStart >= len(p.raw) {
+ return
+ } else if p.raw[destinationStart] == ')' {
+ return Range{destinationStart, destinationStart}, Range{destinationStart, destinationStart}, destinationStart + 1, true
+ }
+
+ destination, end, ok = parseLinkDestination(p.raw, destinationStart)
+ if !ok {
+ return
+ }
+ position = end
+
+ if isImage && position < len(p.raw) && isWhitespaceByte(p.raw[position]) {
+ dimensionsStart := nextNonWhitespace(p.raw, position)
+ if dimensionsStart >= len(p.raw) {
+ return
+ }
+
+ if p.raw[dimensionsStart] == '=' {
+ // Read optional image dimensions even if we don't use them
+ _, end, ok = parseImageDimensions(p.raw, dimensionsStart)
+ if !ok {
+ return
+ }
+
+ position = end
+ }
+ }
+
+ if position < len(p.raw) && isWhitespaceByte(p.raw[position]) {
+ titleStart := nextNonWhitespace(p.raw, position)
+ if titleStart >= len(p.raw) {
+ return
+ } else if p.raw[titleStart] == ')' {
+ return destination, Range{titleStart, titleStart}, titleStart + 1, true
+ }
+
+ if p.raw[titleStart] == '"' || p.raw[titleStart] == '\'' || p.raw[titleStart] == '(' {
+ title, end, ok = parseLinkTitle(p.raw, titleStart)
+ if !ok {
+ return
+ }
+ position = end
+ }
+ }
+
+ closingPosition := nextNonWhitespace(p.raw, position)
+ if closingPosition >= len(p.raw) || p.raw[closingPosition] != ')' {
+ return Range{}, Range{}, 0, false
+ }
+
+ return destination, title, closingPosition + 1, true
+}
+
+func (p *inlineParser) referenceDefinition(label string) *ReferenceDefinition {
+ clean := strings.Join(strings.Fields(label), " ")
+ for _, d := range p.referenceDefinitions {
+ if strings.EqualFold(clean, strings.Join(strings.Fields(d.Label()), " ")) {
+ return d
+ }
+ }
+ return nil
+}
+
+func (p *inlineParser) lookForLinkOrImage() {
+ for element := p.delimiterStack.Back(); element != nil; element = element.Prev() {
+ d := element.Value.(*delimiter)
+ if d.Type != imageOpeningDelimiter && d.Type != linkOpeningDelimiter {
+ continue
+ }
+ if d.IsInactive {
+ p.delimiterStack.Remove(element)
+ break
+ }
+
+ isImage := d.Type == imageOpeningDelimiter
+
+ var inline Inline
+
+ if destination, title, next, ok := p.peekAtInlineLinkDestinationAndTitle(p.position+1, isImage); ok {
+ destinationMarkdownPosition := relativeToAbsolutePosition(p.ranges, destination.Position)
+ linkOrImage := InlineLinkOrImage{
+ Children: append([]Inline(nil), p.inlines[d.TextNode+1:]...),
+ RawDestination: Range{destinationMarkdownPosition, destinationMarkdownPosition + destination.End - destination.Position},
+ markdown: p.markdown,
+ rawTitle: p.raw[title.Position:title.End],
+ }
+ if d.Type == imageOpeningDelimiter {
+ inline = &InlineImage{linkOrImage}
+ } else {
+ inline = &InlineLink{linkOrImage}
+ }
+ p.position = next
+ } else {
+ referenceLabel := ""
+ label, next, hasLinkLabel := parseLinkLabel(p.raw, p.position+1)
+ if hasLinkLabel && label.End > label.Position {
+ referenceLabel = p.raw[label.Position:label.End]
+ } else {
+ referenceLabel = p.raw[d.Range.End:p.position]
+ if !hasLinkLabel {
+ next = p.position + 1
+ }
+ }
+ if referenceLabel != "" {
+ if reference := p.referenceDefinition(referenceLabel); reference != nil {
+ linkOrImage := ReferenceLinkOrImage{
+ ReferenceDefinition: reference,
+ Children: append([]Inline(nil), p.inlines[d.TextNode+1:]...),
+ }
+ if d.Type == imageOpeningDelimiter {
+ inline = &ReferenceImage{linkOrImage}
+ } else {
+ inline = &ReferenceLink{linkOrImage}
+ }
+ p.position = next
+ }
+ }
+ }
+
+ if inline != nil {
+ if d.Type == imageOpeningDelimiter {
+ p.inlines = append(p.inlines[:d.TextNode], inline)
+ } else {
+ p.inlines = append(p.inlines[:d.TextNode], inline)
+ for inlineElement := element.Prev(); inlineElement != nil; inlineElement = inlineElement.Prev() {
+ if d := inlineElement.Value.(*delimiter); d.Type == linkOpeningDelimiter {
+ d.IsInactive = true
+ }
+ }
+ }
+ p.delimiterStack.Remove(element)
+ return
+ }
+ p.delimiterStack.Remove(element)
+ break
+ }
+ absPos := relativeToAbsolutePosition(p.ranges, p.position)
+ p.inlines = append(p.inlines, &Text{
+ Text: "]",
+ Range: Range{absPos, absPos + 1},
+ })
+ p.position++
+}
+
+func CharacterReference(ref string) string {
+ if ref == "" {
+ return ""
+ }
+ if ref[0] == '#' {
+ if len(ref) < 2 {
+ return ""
+ }
+ n := 0
+ if ref[1] == 'X' || ref[1] == 'x' {
+ if len(ref) < 3 {
+ return ""
+ }
+ for i := 2; i < len(ref); i++ {
+ if i > 9 {
+ return ""
+ }
+ d := ref[i]
+ switch {
+ case d >= '0' && d <= '9':
+ n = n*16 + int(d-'0')
+ case d >= 'a' && d <= 'f':
+ n = n*16 + 10 + int(d-'a')
+ case d >= 'A' && d <= 'F':
+ n = n*16 + 10 + int(d-'A')
+ default:
+ return ""
+ }
+ }
+ } else {
+ for i := 1; i < len(ref); i++ {
+ if i > 8 || ref[i] < '0' || ref[i] > '9' {
+ return ""
+ }
+ n = n*10 + int(ref[i]-'0')
+ }
+ }
+ c := rune(n)
+ if c == '\u0000' || !utf8.ValidRune(c) {
+ return string(unicode.ReplacementChar)
+ }
+ return string(c)
+ }
+ if entity, ok := htmlEntities[ref]; ok {
+ return entity
+ }
+ return ""
+}
+
+func (p *inlineParser) parseCharacterReference() {
+ absPos := relativeToAbsolutePosition(p.ranges, p.position)
+ p.position++
+ if semicolon := strings.IndexByte(p.raw[p.position:], ';'); semicolon == -1 {
+ p.inlines = append(p.inlines, &Text{
+ Text: "&",
+ Range: Range{absPos, absPos + 1},
+ })
+ } else if s := CharacterReference(p.raw[p.position : p.position+semicolon]); s != "" {
+ p.position += semicolon + 1
+ p.inlines = append(p.inlines, &Text{
+ Text: s,
+ Range: Range{absPos, absPos + len(s)},
+ })
+ } else {
+ p.inlines = append(p.inlines, &Text{
+ Text: "&",
+ Range: Range{absPos, absPos + 1},
+ })
+ }
+}
+
+func (p *inlineParser) parseAutolink(c rune) bool {
+ for element := p.delimiterStack.Back(); element != nil; element = element.Prev() {
+ d := element.Value.(*delimiter)
+ if !d.IsInactive {
+ return false
+ }
+ }
+
+ var link Range
+ if c == ':' {
+ var ok bool
+ link, ok = parseURLAutolink(p.raw, p.position)
+
+ if !ok {
+ return false
+ }
+
+ // Since the current position is at the colon, we have to rewind the parsing slightly so that
+ // we don't duplicate the URL scheme
+ rewind := strings.Index(p.raw[link.Position:link.End], ":")
+ if rewind != -1 {
+ lastInline := p.inlines[len(p.inlines)-1]
+ lastText, ok := lastInline.(*Text)
+
+ if !ok {
+ // This should never occur since parseURLAutolink will only return a non-empty value
+ // when the previous text ends in a valid URL protocol which would mean that the previous
+ // node is a Text node
+ return false
+ }
+
+ p.inlines = p.inlines[0 : len(p.inlines)-1]
+ p.inlines = append(p.inlines, &Text{
+ Text: lastText.Text[:len(lastText.Text)-rewind],
+ Range: Range{lastText.Range.Position, lastText.Range.End - rewind},
+ })
+ p.position -= rewind
+ }
+ } else if c == 'w' || c == 'W' {
+ var ok bool
+ link, ok = parseWWWAutolink(p.raw, p.position)
+
+ if !ok {
+ return false
+ }
+ }
+
+ linkMarkdownPosition := relativeToAbsolutePosition(p.ranges, link.Position)
+ linkRange := Range{linkMarkdownPosition, linkMarkdownPosition + link.End - link.Position}
+
+ p.inlines = append(p.inlines, &Autolink{
+ Children: []Inline{
+ &Text{
+ Text: p.raw[link.Position:link.End],
+ Range: linkRange,
+ },
+ },
+ RawDestination: linkRange,
+ markdown: p.markdown,
+ })
+ p.position += (link.End - link.Position)
+
+ return true
+}
+
+func (p *inlineParser) Parse() []Inline {
+ for _, r := range p.ranges {
+ p.raw += p.markdown[r.Position:r.End]
+ }
+
+ for p.position < len(p.raw) {
+ c, _ := utf8.DecodeRuneInString(p.raw[p.position:])
+
+ switch c {
+ case '\r', '\n':
+ p.parseLineEnding()
+ case '\\':
+ p.parseEscapeCharacter()
+ case '`':
+ p.parseBackticks()
+ case '&':
+ p.parseCharacterReference()
+ case '!', '[':
+ p.parseLinkOrImageDelimiter()
+ case ']':
+ p.lookForLinkOrImage()
+ case 'w', 'W', ':':
+ matched := p.parseAutolink(c)
+
+ if !matched {
+ p.parseText()
+ }
+ default:
+ p.parseText()
+ }
+ }
+
+ return p.inlines
+}
+
+func ParseInlines(markdown string, ranges []Range, referenceDefinitions []*ReferenceDefinition) (inlines []Inline) {
+ return newInlineParser(markdown, ranges, referenceDefinitions).Parse()
+}
+
+func MergeInlineText(inlines []Inline) []Inline {
+ ret := inlines[:0]
+ for i, v := range inlines {
+ // always add first node
+ if i == 0 {
+ ret = append(ret, v)
+ continue
+ }
+ // not a text node? nothing to merge
+ text, ok := v.(*Text)
+ if !ok {
+ ret = append(ret, v)
+ continue
+ }
+ // previous node is not a text node? nothing to merge
+ prevText, ok := ret[len(ret)-1].(*Text)
+ if !ok {
+ ret = append(ret, v)
+ continue
+ }
+ // previous node is not right before this one
+ if prevText.Range.End != text.Range.Position {
+ ret = append(ret, v)
+ continue
+ }
+ // we have two consecutive text nodes
+ ret[len(ret)-1] = &Text{
+ Text: prevText.Text + text.Text,
+ Range: Range{prevText.Range.Position, text.Range.End},
+ }
+ }
+ return ret
+}
+
+func Unescape(markdown string) string {
+ ret := ""
+
+ position := 0
+ for position < len(markdown) {
+ c, cSize := utf8.DecodeRuneInString(markdown[position:])
+
+ switch c {
+ case '\\':
+ if position+1 < len(markdown) && isEscapableByte(markdown[position+1]) {
+ ret += string(markdown[position+1])
+ position += 2
+ } else {
+ ret += `\`
+ position++
+ }
+ case '&':
+ position++
+ if semicolon := strings.IndexByte(markdown[position:], ';'); semicolon == -1 {
+ ret += "&"
+ } else if s := CharacterReference(markdown[position : position+semicolon]); s != "" {
+ position += semicolon + 1
+ ret += s
+ } else {
+ ret += "&"
+ }
+ default:
+ ret += string(c)
+ position += cSize
+ }
+ }
+
+ return ret
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/inspect.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/inspect.go
new file mode 100644
index 00000000..3c7f2d1c
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/inspect.go
@@ -0,0 +1,78 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package markdown
+
+// Inspect traverses the markdown tree in depth-first order. If f returns true, Inspect invokes f
+// recursively for each child of the block or inline, followed by a call of f(nil).
+func Inspect(markdown string, f func(interface{}) bool) {
+ document, referenceDefinitions := Parse(markdown)
+ InspectBlock(document, func(block Block) bool {
+ if !f(block) {
+ return false
+ }
+ switch v := block.(type) {
+ case *Paragraph:
+ for _, inline := range MergeInlineText(v.ParseInlines(referenceDefinitions)) {
+ InspectInline(inline, func(inline Inline) bool {
+ return f(inline)
+ })
+ }
+ }
+ return true
+ })
+}
+
+// InspectBlock traverses the blocks in depth-first order, starting with block. If f returns true,
+// InspectBlock invokes f recursively for each child of the block, followed by a call of f(nil).
+func InspectBlock(block Block, f func(Block) bool) {
+ if !f(block) {
+ return
+ }
+ switch v := block.(type) {
+ case *Document:
+ for _, child := range v.Children {
+ InspectBlock(child, f)
+ }
+ case *List:
+ for _, child := range v.Children {
+ InspectBlock(child, f)
+ }
+ case *ListItem:
+ for _, child := range v.Children {
+ InspectBlock(child, f)
+ }
+ case *BlockQuote:
+ for _, child := range v.Children {
+ InspectBlock(child, f)
+ }
+ }
+ f(nil)
+}
+
+// InspectInline traverses the blocks in depth-first order, starting with block. If f returns true,
+// InspectInline invokes f recursively for each child of the block, followed by a call of f(nil).
+func InspectInline(inline Inline, f func(Inline) bool) {
+ if !f(inline) {
+ return
+ }
+ switch v := inline.(type) {
+ case *InlineImage:
+ for _, child := range v.Children {
+ InspectInline(child, f)
+ }
+ case *InlineLink:
+ for _, child := range v.Children {
+ InspectInline(child, f)
+ }
+ case *ReferenceImage:
+ for _, child := range v.Children {
+ InspectInline(child, f)
+ }
+ case *ReferenceLink:
+ for _, child := range v.Children {
+ InspectInline(child, f)
+ }
+ }
+ f(nil)
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/lines.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/lines.go
new file mode 100644
index 00000000..f59e5afe
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/lines.go
@@ -0,0 +1,32 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package markdown
+
+import (
+ "strings"
+)
+
+type Line struct {
+ Range
+}
+
+func ParseLines(markdown string) []Line {
+ lineStartPosition := 0
+ isAfterCarriageReturn := false
+ lines := make([]Line, 0, strings.Count(markdown, "\n"))
+ for position, r := range markdown {
+ if r == '\n' {
+ lines = append(lines, Line{Range{lineStartPosition, position + 1}})
+ lineStartPosition = position + 1
+ } else if isAfterCarriageReturn {
+ lines = append(lines, Line{Range{lineStartPosition, position}})
+ lineStartPosition = position
+ }
+ isAfterCarriageReturn = r == '\r'
+ }
+ if lineStartPosition < len(markdown) {
+ lines = append(lines, Line{Range{lineStartPosition, len(markdown)}})
+ }
+ return lines
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/links.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/links.go
new file mode 100644
index 00000000..6aa56f25
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/links.go
@@ -0,0 +1,184 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package markdown
+
+import (
+ "unicode/utf8"
+)
+
+func parseLinkDestination(markdown string, position int) (raw Range, next int, ok bool) {
+ if position >= len(markdown) {
+ return
+ }
+
+ if markdown[position] == '<' {
+ isEscaped := false
+
+ for offset, c := range []byte(markdown[position+1:]) {
+ if isEscaped {
+ isEscaped = false
+ if isEscapableByte(c) {
+ continue
+ }
+ }
+
+ if c == '\\' {
+ isEscaped = true
+ } else if c == '<' {
+ break
+ } else if c == '>' {
+ return Range{position + 1, position + 1 + offset}, position + 1 + offset + 1, true
+ } else if isWhitespaceByte(c) {
+ break
+ }
+ }
+ }
+
+ openCount := 0
+ isEscaped := false
+ for offset, c := range []byte(markdown[position:]) {
+ if isEscaped {
+ isEscaped = false
+ if isEscapableByte(c) {
+ continue
+ }
+ }
+
+ switch c {
+ case '\\':
+ isEscaped = true
+ case '(':
+ openCount++
+ case ')':
+ if openCount < 1 {
+ return Range{position, position + offset}, position + offset, true
+ }
+ openCount--
+ default:
+ if isWhitespaceByte(c) {
+ return Range{position, position + offset}, position + offset, true
+ }
+ }
+ }
+ return Range{position, len(markdown)}, len(markdown), true
+}
+
+func parseLinkTitle(markdown string, position int) (raw Range, next int, ok bool) {
+ if position >= len(markdown) {
+ return
+ }
+
+ originalPosition := position
+
+ var closer byte
+ switch markdown[position] {
+ case '"', '\'':
+ closer = markdown[position]
+ case '(':
+ closer = ')'
+ default:
+ return
+ }
+ position++
+
+ for position < len(markdown) {
+ switch markdown[position] {
+ case '\\':
+ position++
+ if position < len(markdown) && isEscapableByte(markdown[position]) {
+ position++
+ }
+ case closer:
+ return Range{originalPosition + 1, position}, position + 1, true
+ default:
+ position++
+ }
+ }
+
+ return
+}
+
+func parseLinkLabel(markdown string, position int) (raw Range, next int, ok bool) {
+ if position >= len(markdown) || markdown[position] != '[' {
+ return
+ }
+
+ originalPosition := position
+ position++
+
+ for position < len(markdown) {
+ switch markdown[position] {
+ case '\\':
+ position++
+ if position < len(markdown) && isEscapableByte(markdown[position]) {
+ position++
+ }
+ case '[':
+ return
+ case ']':
+ if position-originalPosition >= 1000 && utf8.RuneCountInString(markdown[originalPosition:position]) >= 1000 {
+ return
+ }
+ return Range{originalPosition + 1, position}, position + 1, true
+ default:
+ position++
+ }
+ }
+
+ return
+}
+
+// As a non-standard feature, we allow image links to specify dimensions of the image by adding "=WIDTHxHEIGHT"
+// after the image destination but before the image title like ![alt](http://example.com/image.png =100x200 "title").
+// Both width and height are optional, but at least one of them must be specified.
+func parseImageDimensions(markdown string, position int) (raw Range, next int, ok bool) {
+ if position >= len(markdown) {
+ return
+ }
+
+ originalPosition := position
+
+ // Read =
+ position += 1
+ if position >= len(markdown) {
+ return
+ }
+
+ // Read width
+ hasWidth := false
+ for position < len(markdown)-1 && isNumericByte(markdown[position]) {
+ hasWidth = true
+ position += 1
+ }
+
+ // Look for early end of dimensions
+ if isWhitespaceByte(markdown[position]) || markdown[position] == ')' {
+ return Range{originalPosition, position - 1}, position, true
+ }
+
+ // Read the x
+ if (markdown[position] != 'x' && markdown[position] != 'X') || position == len(markdown)-1 {
+ return
+ }
+ position += 1
+
+ // Read height
+ hasHeight := false
+ for position < len(markdown)-1 && isNumericByte(markdown[position]) {
+ hasHeight = true
+ position += 1
+ }
+
+ // Make sure the there's no trailing characters
+ if !isWhitespaceByte(markdown[position]) && markdown[position] != ')' {
+ return
+ }
+
+ if !hasWidth && !hasHeight {
+ // At least one of width or height is required
+ return
+ }
+
+ return Range{originalPosition, position - 1}, position, true
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/list.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/list.go
new file mode 100644
index 00000000..39039295
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/list.go
@@ -0,0 +1,220 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package markdown
+
+import (
+ "strings"
+)
+
+type ListItem struct {
+ blockBase
+ markdown string
+ hasTrailingBlankLine bool
+ hasBlankLineBetweenChildren bool
+
+ Indentation int
+ Children []Block
+}
+
+func (b *ListItem) Continuation(indentation int, r Range) *continuation {
+ s := b.markdown[r.Position:r.End]
+ if strings.TrimSpace(s) == "" {
+ if b.Children == nil {
+ return nil
+ }
+ return &continuation{
+ Remaining: r,
+ }
+ }
+ if indentation < b.Indentation {
+ return nil
+ }
+ return &continuation{
+ Indentation: indentation - b.Indentation,
+ Remaining: r,
+ }
+}
+
+func (b *ListItem) AddChild(openBlocks []Block) []Block {
+ b.Children = append(b.Children, openBlocks[0])
+ if b.hasTrailingBlankLine {
+ b.hasBlankLineBetweenChildren = true
+ }
+ b.hasTrailingBlankLine = false
+ return openBlocks
+}
+
+func (b *ListItem) AddLine(indentation int, r Range) bool {
+ isBlank := strings.TrimSpace(b.markdown[r.Position:r.End]) == ""
+ if isBlank {
+ b.hasTrailingBlankLine = true
+ }
+ return false
+}
+
+func (b *ListItem) HasTrailingBlankLine() bool {
+ return b.hasTrailingBlankLine || (len(b.Children) > 0 && b.Children[len(b.Children)-1].HasTrailingBlankLine())
+}
+
+func (b *ListItem) isLoose() bool {
+ if b.hasBlankLineBetweenChildren {
+ return true
+ }
+ for i, child := range b.Children {
+ if i < len(b.Children)-1 && child.HasTrailingBlankLine() {
+ return true
+ }
+ }
+ return false
+}
+
+type List struct {
+ blockBase
+ markdown string
+ hasTrailingBlankLine bool
+ hasBlankLineBetweenChildren bool
+
+ IsLoose bool
+ IsOrdered bool
+ OrderedStart int
+ BulletOrDelimiter byte
+ Children []*ListItem
+}
+
+func (b *List) Continuation(indentation int, r Range) *continuation {
+ s := b.markdown[r.Position:r.End]
+ if strings.TrimSpace(s) == "" {
+ return &continuation{
+ Remaining: r,
+ }
+ }
+ return &continuation{
+ Indentation: indentation,
+ Remaining: r,
+ }
+}
+
+func (b *List) AddChild(openBlocks []Block) []Block {
+ if item, ok := openBlocks[0].(*ListItem); ok {
+ b.Children = append(b.Children, item)
+ if b.hasTrailingBlankLine {
+ b.hasBlankLineBetweenChildren = true
+ }
+ b.hasTrailingBlankLine = false
+ return openBlocks
+ } else if list, ok := openBlocks[0].(*List); ok {
+ if len(list.Children) == 1 && list.IsOrdered == b.IsOrdered && list.BulletOrDelimiter == b.BulletOrDelimiter {
+ return b.AddChild(openBlocks[1:])
+ }
+ }
+ return nil
+}
+
+func (b *List) AddLine(indentation int, r Range) bool {
+ isBlank := strings.TrimSpace(b.markdown[r.Position:r.End]) == ""
+ if isBlank {
+ b.hasTrailingBlankLine = true
+ }
+ return false
+}
+
+func (b *List) HasTrailingBlankLine() bool {
+ return b.hasTrailingBlankLine || (len(b.Children) > 0 && b.Children[len(b.Children)-1].HasTrailingBlankLine())
+}
+
+func (b *List) isLoose() bool {
+ if b.hasBlankLineBetweenChildren {
+ return true
+ }
+ for i, child := range b.Children {
+ if child.isLoose() || (i < len(b.Children)-1 && child.HasTrailingBlankLine()) {
+ return true
+ }
+ }
+ return false
+}
+
+func (b *List) Close() {
+ b.IsLoose = b.isLoose()
+}
+
+func parseListMarker(markdown string, r Range) (success, isOrdered bool, orderedStart int, bulletOrDelimiter byte, markerWidth int, remaining Range) {
+ digits := 0
+ n := 0
+ for i := r.Position; i < r.End && markdown[i] >= '0' && markdown[i] <= '9'; i++ {
+ digits++
+ n = n*10 + int(markdown[i]-'0')
+ }
+ if digits > 0 {
+ if digits > 9 || r.Position+digits >= r.End {
+ return
+ }
+ next := markdown[r.Position+digits]
+ if next != '.' && next != ')' {
+ return
+ }
+ return true, true, n, next, digits + 1, Range{r.Position + digits + 1, r.End}
+ }
+ if r.Position >= r.End {
+ return
+ }
+ next := markdown[r.Position]
+ if next != '-' && next != '+' && next != '*' {
+ return
+ }
+ return true, false, 0, next, 1, Range{r.Position + 1, r.End}
+}
+
+func listStart(markdown string, indent int, r Range, matchedBlocks, unmatchedBlocks []Block) []Block {
+ afterList := false
+ if len(matchedBlocks) > 0 {
+ _, afterList = matchedBlocks[len(matchedBlocks)-1].(*List)
+ }
+ if !afterList && indent > 3 {
+ return nil
+ }
+
+ success, isOrdered, orderedStart, bulletOrDelimiter, markerWidth, remaining := parseListMarker(markdown, r)
+ if !success {
+ return nil
+ }
+
+ isBlank := strings.TrimSpace(markdown[remaining.Position:remaining.End]) == ""
+ if len(matchedBlocks) > 0 && len(unmatchedBlocks) == 0 {
+ if _, ok := matchedBlocks[len(matchedBlocks)-1].(*Paragraph); ok {
+ if isBlank || (isOrdered && orderedStart != 1) {
+ return nil
+ }
+ }
+ }
+
+ indentAfterMarker, indentBytesAfterMarker := countIndentation(markdown, remaining)
+ if !isBlank && indentAfterMarker < 1 {
+ return nil
+ }
+
+ remaining = Range{remaining.Position + indentBytesAfterMarker, remaining.End}
+ consumedIndentAfterMarker := indentAfterMarker
+ if isBlank || indentAfterMarker >= 5 {
+ consumedIndentAfterMarker = 1
+ }
+
+ listItem := &ListItem{
+ markdown: markdown,
+ Indentation: indent + markerWidth + consumedIndentAfterMarker,
+ }
+ list := &List{
+ markdown: markdown,
+ IsOrdered: isOrdered,
+ OrderedStart: orderedStart,
+ BulletOrDelimiter: bulletOrDelimiter,
+ Children: []*ListItem{listItem},
+ }
+ ret := []Block{list, listItem}
+ if descendants := blockStartOrParagraph(markdown, indentAfterMarker-consumedIndentAfterMarker, remaining, nil, nil); descendants != nil {
+ listItem.Children = append(listItem.Children, descendants[0])
+ ret = append(ret, descendants...)
+ }
+ return ret
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/markdown.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/markdown.go
new file mode 100644
index 00000000..5ccdad8c
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/markdown.go
@@ -0,0 +1,147 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+// This package implements a parser for the subset of the CommonMark spec necessary for us to do
+// server-side processing. It is not a full implementation and lacks many features. But it is
+// complete enough to efficiently and accurately allow us to do what we need to like rewrite image
+// URLs for proxying.
+package markdown
+
+import (
+ "strings"
+)
+
+func isEscapable(c rune) bool {
+ return c > ' ' && (c < '0' || (c > '9' && (c < 'A' || (c > 'Z' && (c < 'a' || (c > 'z' && c <= '~'))))))
+}
+
+func isEscapableByte(c byte) bool {
+ return isEscapable(rune(c))
+}
+
+func isWhitespace(c rune) bool {
+ switch c {
+ case ' ', '\t', '\n', '\u000b', '\u000c', '\r':
+ return true
+ }
+ return false
+}
+
+func isWhitespaceByte(c byte) bool {
+ return isWhitespace(rune(c))
+}
+
+func isNumeric(c rune) bool {
+ return c >= '0' && c <= '9'
+}
+
+func isNumericByte(c byte) bool {
+ return isNumeric(rune(c))
+}
+
+func isHex(c rune) bool {
+ return isNumeric(c) || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F')
+}
+
+func isHexByte(c byte) bool {
+ return isHex(rune(c))
+}
+
+func isAlphanumeric(c rune) bool {
+ return isNumeric(c) || (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z')
+}
+
+func isAlphanumericByte(c byte) bool {
+ return isAlphanumeric(rune(c))
+}
+
+func nextNonWhitespace(markdown string, position int) int {
+ for offset, c := range []byte(markdown[position:]) {
+ if !isWhitespaceByte(c) {
+ return position + offset
+ }
+ }
+ return len(markdown)
+}
+
+func nextLine(markdown string, position int) (linePosition int, skippedNonWhitespace bool) {
+ for i := position; i < len(markdown); i++ {
+ c := markdown[i]
+ if c == '\r' {
+ if i+1 < len(markdown) && markdown[i+1] == '\n' {
+ return i + 2, skippedNonWhitespace
+ }
+ return i + 1, skippedNonWhitespace
+ } else if c == '\n' {
+ return i + 1, skippedNonWhitespace
+ } else if !isWhitespaceByte(c) {
+ skippedNonWhitespace = true
+ }
+ }
+ return len(markdown), skippedNonWhitespace
+}
+
+func countIndentation(markdown string, r Range) (spaces, bytes int) {
+ for i := r.Position; i < r.End; i++ {
+ if markdown[i] == ' ' {
+ spaces++
+ bytes++
+ } else if markdown[i] == '\t' {
+ spaces += 4
+ bytes++
+ } else {
+ break
+ }
+ }
+ return
+}
+
+func trimLeftSpace(markdown string, r Range) Range {
+ s := markdown[r.Position:r.End]
+ trimmed := strings.TrimLeftFunc(s, isWhitespace)
+ return Range{r.Position, r.End - (len(s) - len(trimmed))}
+}
+
+func trimRightSpace(markdown string, r Range) Range {
+ s := markdown[r.Position:r.End]
+ trimmed := strings.TrimRightFunc(s, isWhitespace)
+ return Range{r.Position, r.End - (len(s) - len(trimmed))}
+}
+
+func relativeToAbsolutePosition(ranges []Range, position int) int {
+ rem := position
+ for _, r := range ranges {
+ l := r.End - r.Position
+ if rem < l {
+ return r.Position + rem
+ }
+ rem -= l
+ }
+ if len(ranges) == 0 {
+ return 0
+ }
+ return ranges[len(ranges)-1].End
+}
+
+func trimBytesFromRanges(ranges []Range, bytes int) (result []Range) {
+ rem := bytes
+ for _, r := range ranges {
+ if rem == 0 {
+ result = append(result, r)
+ continue
+ }
+ l := r.End - r.Position
+ if rem < l {
+ result = append(result, Range{r.Position + rem, r.End})
+ rem = 0
+ continue
+ }
+ rem -= l
+ }
+ return
+}
+
+func Parse(markdown string) (*Document, []*ReferenceDefinition) {
+ lines := ParseLines(markdown)
+ return ParseBlocks(markdown, lines)
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/paragraph.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/paragraph.go
new file mode 100644
index 00000000..aef01b5e
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/paragraph.go
@@ -0,0 +1,71 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package markdown
+
+import (
+ "strings"
+)
+
+type Paragraph struct {
+ blockBase
+ markdown string
+
+ Text []Range
+ ReferenceDefinitions []*ReferenceDefinition
+}
+
+func (b *Paragraph) ParseInlines(referenceDefinitions []*ReferenceDefinition) []Inline {
+ return ParseInlines(b.markdown, b.Text, referenceDefinitions)
+}
+
+func (b *Paragraph) Continuation(indentation int, r Range) *continuation {
+ s := b.markdown[r.Position:r.End]
+ if strings.TrimSpace(s) == "" {
+ return nil
+ }
+ return &continuation{
+ Indentation: indentation,
+ Remaining: r,
+ }
+}
+
+func (b *Paragraph) Close() {
+ for {
+ for i := 0; i < len(b.Text); i++ {
+ b.Text[i] = trimLeftSpace(b.markdown, b.Text[i])
+ if b.Text[i].Position < b.Text[i].End {
+ break
+ }
+ }
+
+ if len(b.Text) == 0 || b.Text[0].Position < b.Text[0].End && b.markdown[b.Text[0].Position] != '[' {
+ break
+ }
+
+ definition, remaining := parseReferenceDefinition(b.markdown, b.Text)
+ if definition == nil {
+ break
+ }
+ b.ReferenceDefinitions = append(b.ReferenceDefinitions, definition)
+ b.Text = remaining
+ }
+
+ for i := len(b.Text) - 1; i >= 0; i-- {
+ b.Text[i] = trimRightSpace(b.markdown, b.Text[i])
+ if b.Text[i].Position < b.Text[i].End {
+ break
+ }
+ }
+}
+
+func newParagraph(markdown string, r Range) *Paragraph {
+ s := markdown[r.Position:r.End]
+ if strings.TrimSpace(s) == "" {
+ return nil
+ }
+ return &Paragraph{
+ markdown: markdown,
+ Text: []Range{r},
+ }
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/reference_definition.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/reference_definition.go
new file mode 100644
index 00000000..69e8ed94
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/markdown/reference_definition.go
@@ -0,0 +1,75 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package markdown
+
+type ReferenceDefinition struct {
+ RawDestination Range
+
+ markdown string
+ rawLabel string
+ rawTitle string
+}
+
+func (d *ReferenceDefinition) Destination() string {
+ return Unescape(d.markdown[d.RawDestination.Position:d.RawDestination.End])
+}
+
+func (d *ReferenceDefinition) Label() string {
+ return d.rawLabel
+}
+
+func (d *ReferenceDefinition) Title() string {
+ return Unescape(d.rawTitle)
+}
+
+func parseReferenceDefinition(markdown string, ranges []Range) (*ReferenceDefinition, []Range) {
+ raw := ""
+ for _, r := range ranges {
+ raw += markdown[r.Position:r.End]
+ }
+
+ label, next, ok := parseLinkLabel(raw, 0)
+ if !ok {
+ return nil, nil
+ }
+ position := next
+
+ if position >= len(raw) || raw[position] != ':' {
+ return nil, nil
+ }
+ position++
+
+ destination, next, ok := parseLinkDestination(raw, nextNonWhitespace(raw, position))
+ if !ok {
+ return nil, nil
+ }
+ position = next
+
+ absoluteDestination := relativeToAbsolutePosition(ranges, destination.Position)
+ ret := &ReferenceDefinition{
+ RawDestination: Range{absoluteDestination, absoluteDestination + destination.End - destination.Position},
+ markdown: markdown,
+ rawLabel: raw[label.Position:label.End],
+ }
+
+ if position < len(raw) && isWhitespaceByte(raw[position]) {
+ title, next, ok := parseLinkTitle(raw, nextNonWhitespace(raw, position))
+ if !ok {
+ if nextLine, skippedNonWhitespace := nextLine(raw, position); !skippedNonWhitespace {
+ return ret, trimBytesFromRanges(ranges, nextLine)
+ }
+ return nil, nil
+ }
+ if nextLine, skippedNonWhitespace := nextLine(raw, next); !skippedNonWhitespace {
+ ret.rawTitle = raw[title.Position:title.End]
+ return ret, trimBytesFromRanges(ranges, nextLine)
+ }
+ }
+
+ if nextLine, skippedNonWhitespace := nextLine(raw, position); !skippedNonWhitespace {
+ return ret, trimBytesFromRanges(ranges, nextLine)
+ }
+
+ return nil, nil
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/default.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/default.go
new file mode 100644
index 00000000..e7faa8c4
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/default.go
@@ -0,0 +1,99 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package mlog
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "os"
+
+ "github.com/mattermost/logr"
+)
+
+// defaultLog manually encodes the log to STDERR, providing a basic, default logging implementation
+// before mlog is fully configured.
+func defaultLog(level, msg string, fields ...Field) {
+ log := struct {
+ Level string `json:"level"`
+ Message string `json:"msg"`
+ Fields []Field `json:"fields,omitempty"`
+ }{
+ level,
+ msg,
+ fields,
+ }
+
+ if b, err := json.Marshal(log); err != nil {
+ fmt.Fprintf(os.Stderr, `{"level":"error","msg":"failed to encode log message"}%s`, "\n")
+ } else {
+ fmt.Fprintf(os.Stderr, "%s\n", b)
+ }
+}
+
+func defaultIsLevelEnabled(level LogLevel) bool {
+ return true
+}
+
+func defaultDebugLog(msg string, fields ...Field) {
+ defaultLog("debug", msg, fields...)
+}
+
+func defaultInfoLog(msg string, fields ...Field) {
+ defaultLog("info", msg, fields...)
+}
+
+func defaultWarnLog(msg string, fields ...Field) {
+ defaultLog("warn", msg, fields...)
+}
+
+func defaultErrorLog(msg string, fields ...Field) {
+ defaultLog("error", msg, fields...)
+}
+
+func defaultCriticalLog(msg string, fields ...Field) {
+ // We map critical to error in zap, so be consistent.
+ defaultLog("error", msg, fields...)
+}
+
+func defaultCustomLog(lvl LogLevel, msg string, fields ...Field) {
+ // custom log levels are only output once log targets are configured.
+}
+
+func defaultCustomMultiLog(lvl []LogLevel, msg string, fields ...Field) {
+ // custom log levels are only output once log targets are configured.
+}
+
+func defaultFlush(ctx context.Context) error {
+ return nil
+}
+
+func defaultAdvancedConfig(cfg LogTargetCfg) error {
+ // mlog.ConfigAdvancedConfig should not be called until default
+ // logger is replaced with mlog.Logger instance.
+ return errors.New("cannot config advanced logging on default logger")
+}
+
+func defaultAdvancedShutdown(ctx context.Context) error {
+ return nil
+}
+
+func defaultAddTarget(targets ...logr.Target) error {
+ // mlog.AddTarget should not be called until default
+ // logger is replaced with mlog.Logger instance.
+ return errors.New("cannot AddTarget on default logger")
+}
+
+func defaultRemoveTargets(ctx context.Context, f func(TargetInfo) bool) error {
+ // mlog.RemoveTargets should not be called until default
+ // logger is replaced with mlog.Logger instance.
+ return errors.New("cannot RemoveTargets on default logger")
+}
+
+func defaultEnableMetrics(collector logr.MetricsCollector) error {
+ // mlog.EnableMetrics should not be called until default
+ // logger is replaced with mlog.Logger instance.
+ return errors.New("cannot EnableMetrics on default logger")
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/errors.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/errors.go
new file mode 100644
index 00000000..93762fda
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/errors.go
@@ -0,0 +1,32 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package mlog
+
+import (
+ "github.com/mattermost/logr"
+)
+
+// onLoggerError is called when the logging system encounters an error,
+// such as a target not able to write records. The targets will keep trying
+// however the error will be logged with a dedicated level that can be output
+// to a safe/always available target for monitoring or alerting.
+func onLoggerError(err error) {
+ Log(LvlLogError, "advanced logging error", Err(err))
+}
+
+// onQueueFull is called when the main logger queue is full, indicating the
+// volume and frequency of log record creation is too high for the queue size
+// and/or the target latencies.
+func onQueueFull(rec *logr.LogRec, maxQueueSize int) bool {
+ Log(LvlLogError, "main queue full, dropping record", Any("rec", rec))
+ return true // drop record
+}
+
+// onTargetQueueFull is called when the main logger queue is full, indicating the
+// volume and frequency of log record creation is too high for the target's queue size
+// and/or the target latency.
+func onTargetQueueFull(target logr.Target, rec *logr.LogRec, maxQueueSize int) bool {
+ Log(LvlLogError, "target queue full, dropping record", String("target", ""), Any("rec", rec))
+ return true // drop record
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/global.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/global.go
new file mode 100644
index 00000000..aba06646
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/global.go
@@ -0,0 +1,98 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package mlog
+
+import (
+ "context"
+ "log"
+ "sync/atomic"
+
+ "github.com/mattermost/logr"
+ "go.uber.org/zap"
+ "go.uber.org/zap/zapcore"
+)
+
+var globalLogger *Logger
+
+func InitGlobalLogger(logger *Logger) {
+ // Clean up previous instance.
+ if globalLogger != nil && globalLogger.logrLogger != nil {
+ globalLogger.logrLogger.Logr().Shutdown()
+ }
+ glob := *logger
+ glob.zap = glob.zap.WithOptions(zap.AddCallerSkip(1))
+ globalLogger = &glob
+ IsLevelEnabled = globalLogger.IsLevelEnabled
+ Debug = globalLogger.Debug
+ Info = globalLogger.Info
+ Warn = globalLogger.Warn
+ Error = globalLogger.Error
+ Critical = globalLogger.Critical
+ Log = globalLogger.Log
+ LogM = globalLogger.LogM
+ Flush = globalLogger.Flush
+ ConfigAdvancedLogging = globalLogger.ConfigAdvancedLogging
+ ShutdownAdvancedLogging = globalLogger.ShutdownAdvancedLogging
+ AddTarget = globalLogger.AddTarget
+ RemoveTargets = globalLogger.RemoveTargets
+ EnableMetrics = globalLogger.EnableMetrics
+}
+
+// logWriterFunc provides access to mlog via io.Writer, so the standard logger
+// can be redirected to use mlog and whatever targets are defined.
+type logWriterFunc func([]byte) (int, error)
+
+func (lw logWriterFunc) Write(p []byte) (int, error) {
+ return lw(p)
+}
+
+func RedirectStdLog(logger *Logger) {
+ if atomic.LoadInt32(&disableZap) == 0 {
+ zap.RedirectStdLogAt(logger.zap.With(zap.String("source", "stdlog")).WithOptions(zap.AddCallerSkip(-2)), zapcore.ErrorLevel)
+ return
+ }
+
+ writer := func(p []byte) (int, error) {
+ Log(LvlStdLog, string(p))
+ return len(p), nil
+ }
+ log.SetOutput(logWriterFunc(writer))
+}
+
+type IsLevelEnabledFunc func(LogLevel) bool
+type LogFunc func(string, ...Field)
+type LogFuncCustom func(LogLevel, string, ...Field)
+type LogFuncCustomMulti func([]LogLevel, string, ...Field)
+type FlushFunc func(context.Context) error
+type ConfigFunc func(cfg LogTargetCfg) error
+type ShutdownFunc func(context.Context) error
+type AddTargetFunc func(...logr.Target) error
+type RemoveTargetsFunc func(context.Context, func(TargetInfo) bool) error
+type EnableMetricsFunc func(logr.MetricsCollector) error
+
+// DON'T USE THIS Modify the level on the app logger
+func GloballyDisableDebugLogForTest() {
+ globalLogger.consoleLevel.SetLevel(zapcore.ErrorLevel)
+}
+
+// DON'T USE THIS Modify the level on the app logger
+func GloballyEnableDebugLogForTest() {
+ globalLogger.consoleLevel.SetLevel(zapcore.DebugLevel)
+}
+
+var IsLevelEnabled IsLevelEnabledFunc = defaultIsLevelEnabled
+var Debug LogFunc = defaultDebugLog
+var Info LogFunc = defaultInfoLog
+var Warn LogFunc = defaultWarnLog
+var Error LogFunc = defaultErrorLog
+var Critical LogFunc = defaultCriticalLog
+var Log LogFuncCustom = defaultCustomLog
+var LogM LogFuncCustomMulti = defaultCustomMultiLog
+var Flush FlushFunc = defaultFlush
+
+var ConfigAdvancedLogging ConfigFunc = defaultAdvancedConfig
+var ShutdownAdvancedLogging ShutdownFunc = defaultAdvancedShutdown
+var AddTarget AddTargetFunc = defaultAddTarget
+var RemoveTargets RemoveTargetsFunc = defaultRemoveTargets
+var EnableMetrics EnableMetricsFunc = defaultEnableMetrics
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/levels.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/levels.go
new file mode 100644
index 00000000..24d29e0b
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/levels.go
@@ -0,0 +1,51 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package mlog
+
+// Standard levels
+var (
+ LvlPanic = LogLevel{ID: 0, Name: "panic", Stacktrace: true}
+ LvlFatal = LogLevel{ID: 1, Name: "fatal", Stacktrace: true}
+ LvlError = LogLevel{ID: 2, Name: "error"}
+ LvlWarn = LogLevel{ID: 3, Name: "warn"}
+ LvlInfo = LogLevel{ID: 4, Name: "info"}
+ LvlDebug = LogLevel{ID: 5, Name: "debug"}
+ LvlTrace = LogLevel{ID: 6, Name: "trace"}
+ // used by redirected standard logger
+ LvlStdLog = LogLevel{ID: 10, Name: "stdlog"}
+ // used only by the logger
+ LvlLogError = LogLevel{ID: 11, Name: "logerror", Stacktrace: true}
+)
+
+// Register custom (discrete) levels here.
+// !!!!! ID's must not exceed 32,768 !!!!!!
+var (
+ // used by the audit system
+ LvlAuditAPI = LogLevel{ID: 100, Name: "audit-api"}
+ LvlAuditContent = LogLevel{ID: 101, Name: "audit-content"}
+ LvlAuditPerms = LogLevel{ID: 102, Name: "audit-permissions"}
+ LvlAuditCLI = LogLevel{ID: 103, Name: "audit-cli"}
+
+ // used by the TCP log target
+ LvlTcpLogTarget = LogLevel{ID: 120, Name: "TcpLogTarget"}
+
+ // used by Remote Cluster Service
+ LvlRemoteClusterServiceDebug = LogLevel{ID: 130, Name: "RemoteClusterServiceDebug"}
+ LvlRemoteClusterServiceError = LogLevel{ID: 131, Name: "RemoteClusterServiceError"}
+ LvlRemoteClusterServiceWarn = LogLevel{ID: 132, Name: "RemoteClusterServiceWarn"}
+
+ // used by Shared Channel Sync Service
+ LvlSharedChannelServiceDebug = LogLevel{ID: 200, Name: "SharedChannelServiceDebug"}
+ LvlSharedChannelServiceError = LogLevel{ID: 201, Name: "SharedChannelServiceError"}
+ LvlSharedChannelServiceWarn = LogLevel{ID: 202, Name: "SharedChannelServiceWarn"}
+ LvlSharedChannelServiceMessagesInbound = LogLevel{ID: 203, Name: "SharedChannelServiceMsgInbound"}
+ LvlSharedChannelServiceMessagesOutbound = LogLevel{ID: 204, Name: "SharedChannelServiceMsgOutbound"}
+
+ // add more here ...
+)
+
+// Combinations for LogM (log multi)
+var (
+ MLvlAuditAll = []LogLevel{LvlAuditAPI, LvlAuditContent, LvlAuditPerms, LvlAuditCLI}
+)
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/log.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/log.go
new file mode 100644
index 00000000..d50fc123
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/log.go
@@ -0,0 +1,361 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package mlog
+
+import (
+ "context"
+ "fmt"
+ "io"
+ "log"
+ "os"
+ "sync"
+ "sync/atomic"
+ "time"
+
+ "github.com/mattermost/logr"
+ "go.uber.org/zap"
+ "go.uber.org/zap/zapcore"
+ "gopkg.in/natefinch/lumberjack.v2"
+)
+
+const (
+ // Very verbose messages for debugging specific issues
+ LevelDebug = "debug"
+ // Default log level, informational
+ LevelInfo = "info"
+ // Warnings are messages about possible issues
+ LevelWarn = "warn"
+ // Errors are messages about things we know are problems
+ LevelError = "error"
+
+ // DefaultFlushTimeout is the default amount of time mlog.Flush will wait
+ // before timing out.
+ DefaultFlushTimeout = time.Second * 5
+)
+
+var (
+ // disableZap is set when Zap should be disabled and Logr used instead.
+ // This is needed for unit testing as Zap has no shutdown capabilities
+ // and holds file handles until process exit. Currently unit test create
+ // many server instances, and thus many Zap log files.
+ // This flag will be removed when Zap is permanently replaced.
+ disableZap int32
+)
+
+// Type and function aliases from zap to limit the libraries scope into MM code
+type Field = zapcore.Field
+
+var Int64 = zap.Int64
+var Int32 = zap.Int32
+var Int = zap.Int
+var Uint32 = zap.Uint32
+var String = zap.String
+var Any = zap.Any
+var Err = zap.Error
+var NamedErr = zap.NamedError
+var Bool = zap.Bool
+var Duration = zap.Duration
+
+type LoggerIFace interface {
+ IsLevelEnabled(LogLevel) bool
+ Debug(string, ...Field)
+ Info(string, ...Field)
+ Warn(string, ...Field)
+ Error(string, ...Field)
+ Critical(string, ...Field)
+ Log(LogLevel, string, ...Field)
+ LogM([]LogLevel, string, ...Field)
+}
+
+type TargetInfo logr.TargetInfo
+
+type LoggerConfiguration struct {
+ EnableConsole bool
+ ConsoleJson bool
+ EnableColor bool
+ ConsoleLevel string
+ EnableFile bool
+ FileJson bool
+ FileLevel string
+ FileLocation string
+}
+
+type Logger struct {
+ zap *zap.Logger
+ consoleLevel zap.AtomicLevel
+ fileLevel zap.AtomicLevel
+ logrLogger *logr.Logger
+ mutex *sync.RWMutex
+}
+
+func getZapLevel(level string) zapcore.Level {
+ switch level {
+ case LevelInfo:
+ return zapcore.InfoLevel
+ case LevelWarn:
+ return zapcore.WarnLevel
+ case LevelDebug:
+ return zapcore.DebugLevel
+ case LevelError:
+ return zapcore.ErrorLevel
+ default:
+ return zapcore.InfoLevel
+ }
+}
+
+func makeEncoder(json, color bool) zapcore.Encoder {
+ encoderConfig := zap.NewProductionEncoderConfig()
+ if json {
+ return zapcore.NewJSONEncoder(encoderConfig)
+ }
+
+ if color {
+ encoderConfig.EncodeLevel = zapcore.CapitalColorLevelEncoder
+ }
+ encoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder
+ return zapcore.NewConsoleEncoder(encoderConfig)
+}
+
+func NewLogger(config *LoggerConfiguration) *Logger {
+ cores := []zapcore.Core{}
+ logger := &Logger{
+ consoleLevel: zap.NewAtomicLevelAt(getZapLevel(config.ConsoleLevel)),
+ fileLevel: zap.NewAtomicLevelAt(getZapLevel(config.FileLevel)),
+ logrLogger: newLogr(),
+ mutex: &sync.RWMutex{},
+ }
+
+ if config.EnableConsole {
+ writer := zapcore.Lock(os.Stderr)
+ core := zapcore.NewCore(makeEncoder(config.ConsoleJson, config.EnableColor), writer, logger.consoleLevel)
+ cores = append(cores, core)
+ }
+
+ if config.EnableFile {
+ if atomic.LoadInt32(&disableZap) != 0 {
+ t := &LogTarget{
+ Type: "file",
+ Format: "json",
+ Levels: mlogLevelToLogrLevels(config.FileLevel),
+ MaxQueueSize: DefaultMaxTargetQueue,
+ Options: []byte(fmt.Sprintf(`{"Filename":"%s", "MaxSizeMB":%d, "Compress":%t}`,
+ config.FileLocation, 100, true)),
+ }
+ if !config.FileJson {
+ t.Format = "plain"
+ }
+ if tgt, err := NewLogrTarget("mlogFile", t); err == nil {
+ logger.logrLogger.Logr().AddTarget(tgt)
+ } else {
+ Error("error creating mlogFile", Err(err))
+ }
+ } else {
+ writer := zapcore.AddSync(&lumberjack.Logger{
+ Filename: config.FileLocation,
+ MaxSize: 100,
+ Compress: true,
+ })
+
+ core := zapcore.NewCore(makeEncoder(config.FileJson, false), writer, logger.fileLevel)
+ cores = append(cores, core)
+ }
+ }
+
+ combinedCore := zapcore.NewTee(cores...)
+
+ logger.zap = zap.New(combinedCore,
+ zap.AddCaller(),
+ )
+ return logger
+}
+
+func (l *Logger) ChangeLevels(config *LoggerConfiguration) {
+ l.consoleLevel.SetLevel(getZapLevel(config.ConsoleLevel))
+ l.fileLevel.SetLevel(getZapLevel(config.FileLevel))
+}
+
+func (l *Logger) SetConsoleLevel(level string) {
+ l.consoleLevel.SetLevel(getZapLevel(level))
+}
+
+func (l *Logger) With(fields ...Field) *Logger {
+ newLogger := *l
+ newLogger.zap = newLogger.zap.With(fields...)
+ if newLogger.getLogger() != nil {
+ ll := newLogger.getLogger().WithFields(zapToLogr(fields))
+ newLogger.logrLogger = &ll
+ }
+ return &newLogger
+}
+
+func (l *Logger) StdLog(fields ...Field) *log.Logger {
+ return zap.NewStdLog(l.With(fields...).zap.WithOptions(getStdLogOption()))
+}
+
+// StdLogAt returns *log.Logger which writes to supplied zap logger at required level.
+func (l *Logger) StdLogAt(level string, fields ...Field) (*log.Logger, error) {
+ return zap.NewStdLogAt(l.With(fields...).zap.WithOptions(getStdLogOption()), getZapLevel(level))
+}
+
+// StdLogWriter returns a writer that can be hooked up to the output of a golang standard logger
+// anything written will be interpreted as log entries accordingly
+func (l *Logger) StdLogWriter() io.Writer {
+ newLogger := *l
+ newLogger.zap = newLogger.zap.WithOptions(zap.AddCallerSkip(4), getStdLogOption())
+ f := newLogger.Info
+ return &loggerWriter{f}
+}
+
+func (l *Logger) WithCallerSkip(skip int) *Logger {
+ newLogger := *l
+ newLogger.zap = newLogger.zap.WithOptions(zap.AddCallerSkip(skip))
+ return &newLogger
+}
+
+// Made for the plugin interface, wraps mlog in a simpler interface
+// at the cost of performance
+func (l *Logger) Sugar() *SugarLogger {
+ return &SugarLogger{
+ wrappedLogger: l,
+ zapSugar: l.zap.Sugar(),
+ }
+}
+
+func (l *Logger) IsLevelEnabled(level LogLevel) bool {
+ return isLevelEnabled(l.getLogger(), logr.Level(level))
+}
+
+func (l *Logger) Debug(message string, fields ...Field) {
+ l.zap.Debug(message, fields...)
+ if isLevelEnabled(l.getLogger(), logr.Debug) {
+ l.getLogger().WithFields(zapToLogr(fields)).Debug(message)
+ }
+}
+
+func (l *Logger) Info(message string, fields ...Field) {
+ l.zap.Info(message, fields...)
+ if isLevelEnabled(l.getLogger(), logr.Info) {
+ l.getLogger().WithFields(zapToLogr(fields)).Info(message)
+ }
+}
+
+func (l *Logger) Warn(message string, fields ...Field) {
+ l.zap.Warn(message, fields...)
+ if isLevelEnabled(l.getLogger(), logr.Warn) {
+ l.getLogger().WithFields(zapToLogr(fields)).Warn(message)
+ }
+}
+
+func (l *Logger) Error(message string, fields ...Field) {
+ l.zap.Error(message, fields...)
+ if isLevelEnabled(l.getLogger(), logr.Error) {
+ l.getLogger().WithFields(zapToLogr(fields)).Error(message)
+ }
+}
+
+func (l *Logger) Critical(message string, fields ...Field) {
+ l.zap.Error(message, fields...)
+ if isLevelEnabled(l.getLogger(), logr.Error) {
+ l.getLogger().WithFields(zapToLogr(fields)).Error(message)
+ }
+}
+
+func (l *Logger) Log(level LogLevel, message string, fields ...Field) {
+ l.getLogger().WithFields(zapToLogr(fields)).Log(logr.Level(level), message)
+}
+
+func (l *Logger) LogM(levels []LogLevel, message string, fields ...Field) {
+ var logger *logr.Logger
+ for _, lvl := range levels {
+ if isLevelEnabled(l.getLogger(), logr.Level(lvl)) {
+ // don't create logger with fields unless at least one level is active.
+ if logger == nil {
+ l := l.getLogger().WithFields(zapToLogr(fields))
+ logger = &l
+ }
+ logger.Log(logr.Level(lvl), message)
+ }
+ }
+}
+
+func (l *Logger) Flush(cxt context.Context) error {
+ return l.getLogger().Logr().FlushWithTimeout(cxt)
+}
+
+// ShutdownAdvancedLogging stops the logger from accepting new log records and tries to
+// flush queues within the context timeout. Once complete all targets are shutdown
+// and any resources released.
+func (l *Logger) ShutdownAdvancedLogging(cxt context.Context) error {
+ err := l.getLogger().Logr().ShutdownWithTimeout(cxt)
+ l.setLogger(newLogr())
+ return err
+}
+
+// ConfigAdvancedLoggingConfig (re)configures advanced logging based on the
+// specified log targets. This is the easiest way to get the advanced logger
+// configured via a config source such as file.
+func (l *Logger) ConfigAdvancedLogging(targets LogTargetCfg) error {
+ if err := l.ShutdownAdvancedLogging(context.Background()); err != nil {
+ Error("error shutting down previous logger", Err(err))
+ }
+
+ err := logrAddTargets(l.getLogger(), targets)
+ return err
+}
+
+// AddTarget adds one or more logr.Target to the advanced logger. This is the preferred method
+// to add custom targets or provide configuration that cannot be expressed via a
+// config source.
+func (l *Logger) AddTarget(targets ...logr.Target) error {
+ return l.getLogger().Logr().AddTarget(targets...)
+}
+
+// RemoveTargets selectively removes targets that were previously added to this logger instance
+// using the passed in filter function. The filter function should return true to remove the target
+// and false to keep it.
+func (l *Logger) RemoveTargets(ctx context.Context, f func(ti TargetInfo) bool) error {
+ // Use locally defined TargetInfo type so we don't spread Logr dependencies.
+ fc := func(tic logr.TargetInfo) bool {
+ return f(TargetInfo(tic))
+ }
+ return l.getLogger().Logr().RemoveTargets(ctx, fc)
+}
+
+// EnableMetrics enables metrics collection by supplying a MetricsCollector.
+// The MetricsCollector provides counters and gauges that are updated by log targets.
+func (l *Logger) EnableMetrics(collector logr.MetricsCollector) error {
+ return l.getLogger().Logr().SetMetricsCollector(collector)
+}
+
+// getLogger is a concurrent safe getter of the logr logger
+func (l *Logger) getLogger() *logr.Logger {
+ defer l.mutex.RUnlock()
+ l.mutex.RLock()
+ return l.logrLogger
+}
+
+// setLogger is a concurrent safe setter of the logr logger
+func (l *Logger) setLogger(logger *logr.Logger) {
+ defer l.mutex.Unlock()
+ l.mutex.Lock()
+ l.logrLogger = logger
+}
+
+// DisableZap is called to disable Zap, and Logr will be used instead. Any Logger
+// instances created after this call will only use Logr.
+//
+// This is needed for unit testing as Zap has no shutdown capabilities
+// and holds file handles until process exit. Currently unit tests create
+// many server instances, and thus many Zap log file handles.
+//
+// This method will be removed when Zap is permanently replaced.
+func DisableZap() {
+ atomic.StoreInt32(&disableZap, 1)
+}
+
+// EnableZap re-enables Zap such that any Logger instances created after this
+// call will allow Zap targets.
+func EnableZap() {
+ atomic.StoreInt32(&disableZap, 0)
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/logr.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/logr.go
new file mode 100644
index 00000000..c44fafa0
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/logr.go
@@ -0,0 +1,244 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package mlog
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+ "os"
+
+ "github.com/hashicorp/go-multierror"
+ "github.com/mattermost/logr"
+ logrFmt "github.com/mattermost/logr/format"
+ "github.com/mattermost/logr/target"
+ "go.uber.org/zap/zapcore"
+)
+
+const (
+ DefaultMaxTargetQueue = 1000
+ DefaultSysLogPort = 514
+)
+
+type LogLevel struct {
+ ID logr.LevelID
+ Name string
+ Stacktrace bool
+}
+
+type LogTarget struct {
+ Type string // one of "console", "file", "tcp", "syslog", "none".
+ Format string // one of "json", "plain"
+ Levels []LogLevel
+ Options json.RawMessage
+ MaxQueueSize int
+}
+
+type LogTargetCfg map[string]*LogTarget
+type LogrCleanup func() error
+
+func newLogr() *logr.Logger {
+ lgr := &logr.Logr{}
+ lgr.OnExit = func(int) {}
+ lgr.OnPanic = func(interface{}) {}
+ lgr.OnLoggerError = onLoggerError
+ lgr.OnQueueFull = onQueueFull
+ lgr.OnTargetQueueFull = onTargetQueueFull
+
+ logger := lgr.NewLogger()
+ return &logger
+}
+
+func logrAddTargets(logger *logr.Logger, targets LogTargetCfg) error {
+ lgr := logger.Logr()
+ var errs error
+ for name, t := range targets {
+ target, err := NewLogrTarget(name, t)
+ if err != nil {
+ errs = multierror.Append(err)
+ continue
+ }
+ if target != nil {
+ target.SetName(name)
+ lgr.AddTarget(target)
+ }
+ }
+ return errs
+}
+
+// NewLogrTarget creates a `logr.Target` based on a target config.
+// Can be used when parsing custom config files, or when programmatically adding
+// built-in targets. Use `mlog.AddTarget` to add custom targets.
+func NewLogrTarget(name string, t *LogTarget) (logr.Target, error) {
+ formatter, err := newFormatter(name, t.Format)
+ if err != nil {
+ return nil, err
+ }
+ filter := newFilter(t.Levels)
+
+ if t.MaxQueueSize == 0 {
+ t.MaxQueueSize = DefaultMaxTargetQueue
+ }
+
+ switch t.Type {
+ case "console":
+ return newConsoleTarget(name, t, filter, formatter)
+ case "file":
+ return newFileTarget(name, t, filter, formatter)
+ case "syslog":
+ return newSyslogTarget(name, t, filter, formatter)
+ case "tcp":
+ return newTCPTarget(name, t, filter, formatter)
+ case "none":
+ return nil, nil
+ }
+ return nil, fmt.Errorf("invalid type '%s' for target %s", t.Type, name)
+}
+
+func newFilter(levels []LogLevel) logr.Filter {
+ filter := &logr.CustomFilter{}
+ for _, lvl := range levels {
+ filter.Add(logr.Level(lvl))
+ }
+ return filter
+}
+
+func newFormatter(name string, format string) (logr.Formatter, error) {
+ switch format {
+ case "json", "":
+ return &logrFmt.JSON{}, nil
+ case "plain":
+ return &logrFmt.Plain{Delim: " | "}, nil
+ default:
+ return nil, fmt.Errorf("invalid format '%s' for target %s", format, name)
+ }
+}
+
+func newConsoleTarget(name string, t *LogTarget, filter logr.Filter, formatter logr.Formatter) (logr.Target, error) {
+ type consoleOptions struct {
+ Out string `json:"Out"`
+ }
+ options := &consoleOptions{}
+ if err := json.Unmarshal(t.Options, options); err != nil {
+ return nil, err
+ }
+
+ var w io.Writer
+ switch options.Out {
+ case "stdout", "":
+ w = os.Stdout
+ case "stderr":
+ w = os.Stderr
+ default:
+ return nil, fmt.Errorf("invalid out '%s' for target %s", options.Out, name)
+ }
+
+ newTarget := target.NewWriterTarget(filter, formatter, w, t.MaxQueueSize)
+ return newTarget, nil
+}
+
+func newFileTarget(name string, t *LogTarget, filter logr.Filter, formatter logr.Formatter) (logr.Target, error) {
+ type fileOptions struct {
+ Filename string `json:"Filename"`
+ MaxSize int `json:"MaxSizeMB"`
+ MaxAge int `json:"MaxAgeDays"`
+ MaxBackups int `json:"MaxBackups"`
+ Compress bool `json:"Compress"`
+ }
+ options := &fileOptions{}
+ if err := json.Unmarshal(t.Options, options); err != nil {
+ return nil, err
+ }
+ return newFileTargetWithOpts(name, t, target.FileOptions(*options), filter, formatter)
+}
+
+func newFileTargetWithOpts(name string, t *LogTarget, opts target.FileOptions, filter logr.Filter, formatter logr.Formatter) (logr.Target, error) {
+ if opts.Filename == "" {
+ return nil, fmt.Errorf("missing 'Filename' option for target %s", name)
+ }
+ if err := checkFileWritable(opts.Filename); err != nil {
+ return nil, fmt.Errorf("error writing to 'Filename' for target %s: %w", name, err)
+ }
+
+ newTarget := target.NewFileTarget(filter, formatter, opts, t.MaxQueueSize)
+ return newTarget, nil
+}
+
+func newSyslogTarget(name string, t *LogTarget, filter logr.Filter, formatter logr.Formatter) (logr.Target, error) {
+ options := &SyslogParams{}
+ if err := json.Unmarshal(t.Options, options); err != nil {
+ return nil, err
+ }
+
+ if options.IP == "" {
+ return nil, fmt.Errorf("missing 'IP' option for target %s", name)
+ }
+ if options.Port == 0 {
+ options.Port = DefaultSysLogPort
+ }
+ return NewSyslogTarget(filter, formatter, options, t.MaxQueueSize)
+}
+
+func newTCPTarget(name string, t *LogTarget, filter logr.Filter, formatter logr.Formatter) (logr.Target, error) {
+ options := &TcpParams{}
+ if err := json.Unmarshal(t.Options, options); err != nil {
+ return nil, err
+ }
+
+ if options.IP == "" {
+ return nil, fmt.Errorf("missing 'IP' option for target %s", name)
+ }
+ if options.Port == 0 {
+ return nil, fmt.Errorf("missing 'Port' option for target %s", name)
+ }
+ return NewTcpTarget(filter, formatter, options, t.MaxQueueSize)
+}
+
+func checkFileWritable(filename string) error {
+ // try opening/creating the file for writing
+ file, err := os.OpenFile(filename, os.O_RDWR|os.O_APPEND|os.O_CREATE, 0600)
+ if err != nil {
+ return err
+ }
+ file.Close()
+ return nil
+}
+
+func isLevelEnabled(logger *logr.Logger, level logr.Level) bool {
+ if logger == nil || logger.Logr() == nil {
+ return false
+ }
+
+ status := logger.Logr().IsLevelEnabled(level)
+ return status.Enabled
+}
+
+// zapToLogr converts Zap fields to Logr fields.
+// This will not be needed once Logr is used for all logging.
+func zapToLogr(zapFields []Field) logr.Fields {
+ encoder := zapcore.NewMapObjectEncoder()
+ for _, zapField := range zapFields {
+ zapField.AddTo(encoder)
+ }
+ return logr.Fields(encoder.Fields)
+}
+
+// mlogLevelToLogrLevel converts a mlog logger level to
+// an array of discrete Logr levels.
+func mlogLevelToLogrLevels(level string) []LogLevel {
+ levels := make([]LogLevel, 0)
+ levels = append(levels, LvlError, LvlPanic, LvlFatal, LvlStdLog)
+
+ switch level {
+ case LevelDebug:
+ levels = append(levels, LvlDebug)
+ fallthrough
+ case LevelInfo:
+ levels = append(levels, LvlInfo)
+ fallthrough
+ case LevelWarn:
+ levels = append(levels, LvlWarn)
+ }
+ return levels
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/stdlog.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/stdlog.go
new file mode 100644
index 00000000..fd702abf
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/stdlog.go
@@ -0,0 +1,87 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package mlog
+
+import (
+ "bytes"
+ "strings"
+
+ "go.uber.org/zap"
+ "go.uber.org/zap/zapcore"
+)
+
+// Implementation of zapcore.Core to interpret log messages from a standard logger
+// and translate the levels to zapcore levels.
+type stdLogLevelInterpreterCore struct {
+ wrappedCore zapcore.Core
+}
+
+func stdLogInterpretZapEntry(entry zapcore.Entry) zapcore.Entry {
+ message := entry.Message
+ if strings.Index(message, "[DEBUG]") == 0 {
+ entry.Level = zapcore.DebugLevel
+ entry.Message = message[7:]
+ } else if strings.Index(message, "[DEBG]") == 0 {
+ entry.Level = zapcore.DebugLevel
+ entry.Message = message[6:]
+ } else if strings.Index(message, "[WARN]") == 0 {
+ entry.Level = zapcore.WarnLevel
+ entry.Message = message[6:]
+ } else if strings.Index(message, "[ERROR]") == 0 {
+ entry.Level = zapcore.ErrorLevel
+ entry.Message = message[7:]
+ } else if strings.Index(message, "[EROR]") == 0 {
+ entry.Level = zapcore.ErrorLevel
+ entry.Message = message[6:]
+ } else if strings.Index(message, "[ERR]") == 0 {
+ entry.Level = zapcore.ErrorLevel
+ entry.Message = message[5:]
+ } else if strings.Index(message, "[INFO]") == 0 {
+ entry.Level = zapcore.InfoLevel
+ entry.Message = message[6:]
+ }
+ return entry
+}
+
+func (s *stdLogLevelInterpreterCore) Enabled(lvl zapcore.Level) bool {
+ return s.wrappedCore.Enabled(lvl)
+}
+
+func (s *stdLogLevelInterpreterCore) With(fields []zapcore.Field) zapcore.Core {
+ return s.wrappedCore.With(fields)
+}
+
+func (s *stdLogLevelInterpreterCore) Check(entry zapcore.Entry, checkedEntry *zapcore.CheckedEntry) *zapcore.CheckedEntry {
+ entry = stdLogInterpretZapEntry(entry)
+ return s.wrappedCore.Check(entry, checkedEntry)
+}
+
+func (s *stdLogLevelInterpreterCore) Write(entry zapcore.Entry, fields []zapcore.Field) error {
+ entry = stdLogInterpretZapEntry(entry)
+ return s.wrappedCore.Write(entry, fields)
+}
+
+func (s *stdLogLevelInterpreterCore) Sync() error {
+ return s.wrappedCore.Sync()
+}
+
+func getStdLogOption() zap.Option {
+ return zap.WrapCore(
+ func(core zapcore.Core) zapcore.Core {
+ return &stdLogLevelInterpreterCore{core}
+ },
+ )
+}
+
+type loggerWriter struct {
+ logFunc func(msg string, fields ...Field)
+}
+
+func (l *loggerWriter) Write(p []byte) (int, error) {
+ trimmed := string(bytes.TrimSpace(p))
+ for _, line := range strings.Split(trimmed, "\n") {
+ l.logFunc(line)
+ }
+ return len(p), nil
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/sugar.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/sugar.go
new file mode 100644
index 00000000..2368b085
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/sugar.go
@@ -0,0 +1,30 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package mlog
+
+import (
+ "go.uber.org/zap"
+)
+
+// Made for the plugin interface, use the regular logger for other uses
+type SugarLogger struct {
+ wrappedLogger *Logger
+ zapSugar *zap.SugaredLogger
+}
+
+func (l *SugarLogger) Debug(msg string, keyValuePairs ...interface{}) {
+ l.zapSugar.Debugw(msg, keyValuePairs...)
+}
+
+func (l *SugarLogger) Info(msg string, keyValuePairs ...interface{}) {
+ l.zapSugar.Infow(msg, keyValuePairs...)
+}
+
+func (l *SugarLogger) Error(msg string, keyValuePairs ...interface{}) {
+ l.zapSugar.Errorw(msg, keyValuePairs...)
+}
+
+func (l *SugarLogger) Warn(msg string, keyValuePairs ...interface{}) {
+ l.zapSugar.Warnw(msg, keyValuePairs...)
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/syslog.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/syslog.go
new file mode 100644
index 00000000..8766a964
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/syslog.go
@@ -0,0 +1,142 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package mlog
+
+import (
+ "context"
+ "crypto/tls"
+ "crypto/x509"
+ "encoding/base64"
+ "errors"
+ "fmt"
+ "io/ioutil"
+
+ "github.com/mattermost/logr"
+ "github.com/wiggin77/merror"
+ syslog "github.com/wiggin77/srslog"
+)
+
+// Syslog outputs log records to local or remote syslog.
+type Syslog struct {
+ logr.Basic
+ w *syslog.Writer
+}
+
+// SyslogParams provides parameters for dialing a syslog daemon.
+type SyslogParams struct {
+ IP string `json:"IP"`
+ Port int `json:"Port"`
+ Tag string `json:"Tag"`
+ TLS bool `json:"TLS"`
+ Cert string `json:"Cert"`
+ Insecure bool `json:"Insecure"`
+}
+
+// NewSyslogTarget creates a target capable of outputting log records to remote or local syslog, with or without TLS.
+func NewSyslogTarget(filter logr.Filter, formatter logr.Formatter, params *SyslogParams, maxQueue int) (*Syslog, error) {
+ network := "tcp"
+ var config *tls.Config
+
+ if params.TLS {
+ network = "tcp+tls"
+ config = &tls.Config{InsecureSkipVerify: params.Insecure}
+ if params.Cert != "" {
+ pool, err := getCertPool(params.Cert)
+ if err != nil {
+ return nil, err
+ }
+ config.RootCAs = pool
+ }
+ }
+ raddr := fmt.Sprintf("%s:%d", params.IP, params.Port)
+
+ writer, err := syslog.DialWithTLSConfig(network, raddr, syslog.LOG_INFO, params.Tag, config)
+ if err != nil {
+ return nil, err
+ }
+
+ s := &Syslog{w: writer}
+ s.Basic.Start(s, s, filter, formatter, maxQueue)
+
+ return s, nil
+}
+
+// Shutdown stops processing log records after making best effort to flush queue.
+func (s *Syslog) Shutdown(ctx context.Context) error {
+ errs := merror.New()
+
+ err := s.Basic.Shutdown(ctx)
+ errs.Append(err)
+
+ err = s.w.Close()
+ errs.Append(err)
+
+ return errs.ErrorOrNil()
+}
+
+// getCertPool returns a x509.CertPool containing the cert(s)
+// from `cert`, which can be a path to a .pem or .crt file,
+// or a base64 encoded cert.
+func getCertPool(cert string) (*x509.CertPool, error) {
+ if cert == "" {
+ return nil, errors.New("no cert provided")
+ }
+
+ // first treat as a file and try to read.
+ serverCert, err := ioutil.ReadFile(cert)
+ if err != nil {
+ // maybe it's a base64 encoded cert
+ serverCert, err = base64.StdEncoding.DecodeString(cert)
+ if err != nil {
+ return nil, errors.New("cert cannot be read")
+ }
+ }
+
+ pool := x509.NewCertPool()
+ if ok := pool.AppendCertsFromPEM(serverCert); ok {
+ return pool, nil
+ }
+ return nil, errors.New("cannot parse cert")
+}
+
+// Write converts the log record to bytes, via the Formatter,
+// and outputs to syslog.
+func (s *Syslog) Write(rec *logr.LogRec) error {
+ _, stacktrace := s.IsLevelEnabled(rec.Level())
+
+ buf := rec.Logger().Logr().BorrowBuffer()
+ defer rec.Logger().Logr().ReleaseBuffer(buf)
+
+ buf, err := s.Formatter().Format(rec, stacktrace, buf)
+ if err != nil {
+ return err
+ }
+ txt := buf.String()
+
+ switch rec.Level() {
+ case logr.Panic, logr.Fatal:
+ err = s.w.Crit(txt)
+ case logr.Error:
+ err = s.w.Err(txt)
+ case logr.Warn:
+ err = s.w.Warning(txt)
+ case logr.Debug, logr.Trace:
+ err = s.w.Debug(txt)
+ default:
+ // logr.Info plus all custom levels.
+ err = s.w.Info(txt)
+ }
+
+ if err != nil {
+ reporter := rec.Logger().Logr().ReportError
+ reporter(fmt.Errorf("syslog write fail: %w", err))
+ // syslog writer will try to reconnect.
+ }
+ return err
+}
+
+// String returns a string representation of this target.
+func (s *Syslog) String() string {
+ return "SyslogTarget"
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/tcp.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/tcp.go
new file mode 100644
index 00000000..d65b43ee
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/tcp.go
@@ -0,0 +1,273 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package mlog
+
+import (
+ "context"
+ "crypto/tls"
+ "errors"
+ "fmt"
+ "net"
+ _ "net/http/pprof"
+ "sync"
+ "time"
+
+ "github.com/hashicorp/go-multierror"
+ "github.com/mattermost/logr"
+)
+
+const (
+ DialTimeoutSecs = 30
+ WriteTimeoutSecs = 30
+ RetryBackoffMillis int64 = 100
+ MaxRetryBackoffMillis int64 = 30 * 1000 // 30 seconds
+)
+
+// Tcp outputs log records to raw socket server.
+type Tcp struct {
+ logr.Basic
+
+ params *TcpParams
+ addy string
+
+ mutex sync.Mutex
+ conn net.Conn
+ monitor chan struct{}
+ shutdown chan struct{}
+}
+
+// TcpParams provides parameters for dialing a socket server.
+type TcpParams struct {
+ IP string `json:"IP"`
+ Port int `json:"Port"`
+ TLS bool `json:"TLS"`
+ Cert string `json:"Cert"`
+ Insecure bool `json:"Insecure"`
+}
+
+// NewTcpTarget creates a target capable of outputting log records to a raw socket, with or without TLS.
+func NewTcpTarget(filter logr.Filter, formatter logr.Formatter, params *TcpParams, maxQueue int) (*Tcp, error) {
+ tcp := &Tcp{
+ params: params,
+ addy: fmt.Sprintf("%s:%d", params.IP, params.Port),
+ monitor: make(chan struct{}),
+ shutdown: make(chan struct{}),
+ }
+ tcp.Basic.Start(tcp, tcp, filter, formatter, maxQueue)
+
+ return tcp, nil
+}
+
+// getConn provides a net.Conn. If a connection already exists, it is returned immediately,
+// otherwise this method blocks until a new connection is created, timeout or shutdown.
+func (tcp *Tcp) getConn() (net.Conn, error) {
+ tcp.mutex.Lock()
+ defer tcp.mutex.Unlock()
+
+ Log(LvlTcpLogTarget, "getConn enter", String("addy", tcp.addy))
+ defer Log(LvlTcpLogTarget, "getConn exit", String("addy", tcp.addy))
+
+ if tcp.conn != nil {
+ Log(LvlTcpLogTarget, "reusing existing conn", String("addy", tcp.addy)) // use "With" once Zap is removed
+ return tcp.conn, nil
+ }
+
+ type result struct {
+ conn net.Conn
+ err error
+ }
+
+ connChan := make(chan result)
+ ctx, cancel := context.WithTimeout(context.Background(), time.Second*DialTimeoutSecs)
+ defer cancel()
+
+ go func(ctx context.Context, ch chan result) {
+ Log(LvlTcpLogTarget, "dailing", String("addy", tcp.addy))
+ conn, err := tcp.dial(ctx)
+ if err == nil {
+ tcp.conn = conn
+ tcp.monitor = make(chan struct{})
+ go monitor(tcp.conn, tcp.monitor, Log)
+ }
+ ch <- result{conn: conn, err: err}
+ }(ctx, connChan)
+
+ select {
+ case <-tcp.shutdown:
+ return nil, errors.New("shutdown")
+ case res := <-connChan:
+ return res.conn, res.err
+ }
+}
+
+// dial connects to a TCP socket, and optionally performs a TLS handshake.
+// A non-nil context must be provided which can cancel the dial.
+func (tcp *Tcp) dial(ctx context.Context) (net.Conn, error) {
+ var dialer net.Dialer
+ dialer.Timeout = time.Second * DialTimeoutSecs
+ conn, err := dialer.DialContext(ctx, "tcp", fmt.Sprintf("%s:%d", tcp.params.IP, tcp.params.Port))
+ if err != nil {
+ return nil, err
+ }
+
+ if !tcp.params.TLS {
+ return conn, nil
+ }
+
+ Log(LvlTcpLogTarget, "TLS handshake", String("addy", tcp.addy))
+
+ tlsconfig := &tls.Config{
+ ServerName: tcp.params.IP,
+ InsecureSkipVerify: tcp.params.Insecure,
+ }
+ if tcp.params.Cert != "" {
+ pool, err := getCertPool(tcp.params.Cert)
+ if err != nil {
+ return nil, err
+ }
+ tlsconfig.RootCAs = pool
+ }
+
+ tlsConn := tls.Client(conn, tlsconfig)
+ if err := tlsConn.Handshake(); err != nil {
+ return nil, err
+ }
+ return tlsConn, nil
+}
+
+func (tcp *Tcp) close() error {
+ tcp.mutex.Lock()
+ defer tcp.mutex.Unlock()
+
+ var err error
+ if tcp.conn != nil {
+ Log(LvlTcpLogTarget, "closing connection", String("addy", tcp.addy))
+ close(tcp.monitor)
+ err = tcp.conn.Close()
+ tcp.conn = nil
+ }
+ return err
+}
+
+// Shutdown stops processing log records after making best effort to flush queue.
+func (tcp *Tcp) Shutdown(ctx context.Context) error {
+ errs := &multierror.Error{}
+
+ Log(LvlTcpLogTarget, "shutting down", String("addy", tcp.addy))
+
+ if err := tcp.Basic.Shutdown(ctx); err != nil {
+ errs = multierror.Append(errs, err)
+ }
+
+ if err := tcp.close(); err != nil {
+ errs = multierror.Append(errs, err)
+ }
+
+ close(tcp.shutdown)
+ return errs.ErrorOrNil()
+}
+
+// Write converts the log record to bytes, via the Formatter, and outputs to the socket.
+// Called by dedicated target goroutine and will block until success or shutdown.
+func (tcp *Tcp) Write(rec *logr.LogRec) error {
+ _, stacktrace := tcp.IsLevelEnabled(rec.Level())
+
+ buf := rec.Logger().Logr().BorrowBuffer()
+ defer rec.Logger().Logr().ReleaseBuffer(buf)
+
+ buf, err := tcp.Formatter().Format(rec, stacktrace, buf)
+ if err != nil {
+ return err
+ }
+
+ try := 1
+ backoff := RetryBackoffMillis
+ for {
+ select {
+ case <-tcp.shutdown:
+ return err
+ default:
+ }
+
+ conn, err := tcp.getConn()
+ if err != nil {
+ Log(LvlTcpLogTarget, "failed getting connection", String("addy", tcp.addy), Err(err))
+ reporter := rec.Logger().Logr().ReportError
+ reporter(fmt.Errorf("log target %s connection error: %w", tcp.String(), err))
+ backoff = tcp.sleep(backoff)
+ continue
+ }
+
+ conn.SetWriteDeadline(time.Now().Add(time.Second * WriteTimeoutSecs))
+ _, err = buf.WriteTo(conn)
+ if err == nil {
+ return nil
+ }
+
+ Log(LvlTcpLogTarget, "write error", String("addy", tcp.addy), Err(err))
+ reporter := rec.Logger().Logr().ReportError
+ reporter(fmt.Errorf("log target %s write error: %w", tcp.String(), err))
+
+ _ = tcp.close()
+
+ backoff = tcp.sleep(backoff)
+ try++
+ Log(LvlTcpLogTarget, "retrying write", String("addy", tcp.addy), Int("try", try))
+ }
+}
+
+// monitor continuously tries to read from the connection to detect socket close.
+// This is needed because TCP target uses a write only socket and Linux systems
+// take a long time to detect a loss of connectivity on a socket when only writing;
+// the writes simply fail without an error returned.
+func monitor(conn net.Conn, done <-chan struct{}, logFunc LogFuncCustom) {
+ addy := conn.RemoteAddr().String()
+ defer logFunc(LvlTcpLogTarget, "monitor exiting", String("addy", addy))
+
+ buf := make([]byte, 1)
+ for {
+ logFunc(LvlTcpLogTarget, "monitor loop", String("addy", addy))
+
+ select {
+ case <-done:
+ return
+ case <-time.After(1 * time.Second):
+ }
+
+ err := conn.SetReadDeadline(time.Now().Add(time.Second * 30))
+ if err != nil {
+ continue
+ }
+
+ _, err = conn.Read(buf)
+
+ if errt, ok := err.(net.Error); ok && errt.Timeout() {
+ // read timeout is expected, keep looping.
+ continue
+ }
+
+ // Any other error closes the connection, forcing a reconnect.
+ logFunc(LvlTcpLogTarget, "monitor closing connection", Err(err))
+ conn.Close()
+ return
+ }
+}
+
+// String returns a string representation of this target.
+func (tcp *Tcp) String() string {
+ return fmt.Sprintf("TcpTarget[%s:%d]", tcp.params.IP, tcp.params.Port)
+}
+
+func (tcp *Tcp) sleep(backoff int64) int64 {
+ select {
+ case <-tcp.shutdown:
+ case <-time.After(time.Millisecond * time.Duration(backoff)):
+ }
+
+ nextBackoff := backoff + (backoff >> 1)
+ if nextBackoff > MaxRetryBackoffMillis {
+ nextBackoff = MaxRetryBackoffMillis
+ }
+ return nextBackoff
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/test-tls-client-cert.pem b/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/test-tls-client-cert.pem
new file mode 100644
index 00000000..6ce8d042
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/test-tls-client-cert.pem
@@ -0,0 +1,43 @@
+-----BEGIN CERTIFICATE-----
+MIIDjzCCAnegAwIBAgIRAPYfRSwdzKopBKxYxKqslJUwDQYJKoZIhvcNAQELBQAw
+JzElMCMGA1UEAwwcTWF0dGVybW9zdCwgSW5jLiBJbnRlcm5hbCBDQTAeFw0xOTAz
+MjIwMDE0MTVaFw0yMjAzMDYwMDE0MTVaMDsxOTA3BgNVBAMTME1hdHRlcm1vc3Qs
+IEluYy4gSW50ZXJuYWwgSW50ZXJtZWRpYXRlIEF1dGhvcml0eTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAMjliRdmvnNL4u/Jr/M2dPwQmTJXEBY/Vq9Q
+vAU52X3tRMCPxcaFz+x6ftuvdO2NdohXGAmtx9QU5LZcvFeTDpoVEBo9A+4jtLvD
+DZYaTNLpJmoSoJHaDbdWX+OAOqyDiWS741LuiMKWHhew9QOisat2ZINPxjmAd9wE
+xthTMgzsv7MUqnMer8U5OGQ0Qy7wAmNRc+2K3qPwkxe2RUvcte50DUFNgxEginsh
+vrkOXR383vUCZfu72qu8oggjiQpyTllu5je2Ap6JLjYLkEMiMqrYADuWor/ZHwa6
+WrFqVETxWfAV5u9Eh0wZM/KKYwRQuw9y+Nans77FmUl1tVWWNN8CAwEAAaOBoTCB
+njAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBQY4Uqswyr2hO/HetZt2RDxJdTIPjBi
+BgNVHSMEWzBZgBRFZXVg2Z5tNIsWeWjBLEy2yzKbMKErpCkwJzElMCMGA1UEAwwc
+TWF0dGVybW9zdCwgSW5jLiBJbnRlcm5hbCBDQYIUEifGUOM+bIFZo1tkjZB5YGBr
+0xEwCwYDVR0PBAQDAgEGMA0GCSqGSIb3DQEBCwUAA4IBAQAEdexL30Q0zBHmPAH8
+LhdK7dbzW1CmILbxRZlKAwRN+hKRXiMW3MHIkhNuoV9Aev602Q+ja4lWsRi/ktOL
+ni1FWx5gSScgdG8JGj47dOmoT3vXKX7+umiv4rQLPDl9/DKMuv204OYJq6VT+uNU
+6C6kL157jGJEO76H4fMZ8oYsD7Sq0zjiNKtuCYii0ngH3j3gB1jACLqRgveU7MdT
+pqOV2KfY31+h8VBtkUvljNztQ9xNY8Fjmt0SMf7E3FaUcaar3ZCr70G5aU3dKbe7
+47vGOBa5tCqw4YK0jgDKid3IJQul9a3J1mSsH8Wy3to9cAV4KGZBQLnzCX15a/+v
+3yVh
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIDfjCCAmagAwIBAgIUEifGUOM+bIFZo1tkjZB5YGBr0xEwDQYJKoZIhvcNAQEL
+BQAwJzElMCMGA1UEAwwcTWF0dGVybW9zdCwgSW5jLiBJbnRlcm5hbCBDQTAeFw0x
+OTAzMjEyMTI4NDNaFw0yOTAzMTgyMTI4NDNaMCcxJTAjBgNVBAMMHE1hdHRlcm1v
+c3QsIEluYy4gSW50ZXJuYWwgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
+AoIBAQDH0Xq5rMBGpKOVWTpb5MnaJIWFP/vOtvEk+7hVrfOfe1/5x0Kk3UgAHj85
+otaEZD1Lhn/JLkEqCiE/UXMJFwJDlNcO4CkdKBSpYX4bKAqy5q/X3QwioMSNpJG1
++YYrNGBH0sgKcKjyCaLhmqYLD0xZDVOmWIYBU9jUPyXw5U0tnsVrTqGMxVkm1xCY
+krCWN1ZoUrLvL0MCZc5qpxoPTopr9UO9cqSBSuy6BVWVuEWBZhpqHt+ul8VxhzzY
+q1k4l7r2qw+/wm1iJBedTeBVeWNag8JaVfLgu+/W7oJVlPO32Po7pnvHp8iJ3b4K
+zXyVHaTX4S6Em+6LV8855TYrShzlAgMBAAGjgaEwgZ4wHQYDVR0OBBYEFEVldWDZ
+nm00ixZ5aMEsTLbLMpswMGIGA1UdIwRbMFmAFEVldWDZnm00ixZ5aMEsTLbLMpsw
+oSukKTAnMSUwIwYDVQQDDBxNYXR0ZXJtb3N0LCBJbmMuIEludGVybmFsIENBghQS
+J8ZQ4z5sgVmjW2SNkHlgYGvTETAMBgNVHRMEBTADAQH/MAsGA1UdDwQEAwIBBjAN
+BgkqhkiG9w0BAQsFAAOCAQEAPiCWFmopyAkY2T3Zyo4yaRPhX1+VOTMKJtY6EUhq
+/GHz6kzEyvCUBf0N892cibGxekrEoItY9NqO6RQRfowg+Gn5kc13z4NyL2W8/eoT
+Xy0ZvfaQbU++fQ6pVtWtMblDMU9xiYd7/MDvJpO328l1Vhcdp8kEi+lCvpy0sCRc
+PxzPhbgCMAbZEGx+4TMQd4SZKzlRxW/2fflpReh6v1Dv0VDUSYQWwsUnaLpdKHfh
+a5k0vuySYcszE4YKlY0zakeFlJfp7fBp1xTwcdW8aTfw15EicPMwTc6xxA4JJUJx
+cddu817n1nayK5u6r9Qh1oIVkr0nC9YELMMy4dpPgJ88SA==
+-----END CERTIFICATE-----
diff --git a/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/testing.go b/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/testing.go
new file mode 100644
index 00000000..6b41a7e4
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v5/shared/mlog/testing.go
@@ -0,0 +1,46 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package mlog
+
+import (
+ "io"
+ "strings"
+ "sync"
+ "testing"
+
+ "go.uber.org/zap"
+ "go.uber.org/zap/zapcore"
+)
+
+// testingWriter is an io.Writer that writes through t.Log
+type testingWriter struct {
+ tb testing.TB
+}
+
+func (tw *testingWriter) Write(b []byte) (int, error) {
+ tw.tb.Log(strings.TrimSpace(string(b)))
+ return len(b), nil
+}
+
+// NewTestingLogger creates a Logger that proxies logs through a testing interface.
+// This allows tests that spin up App instances to avoid spewing logs unless the test fails or -verbose is specified.
+func NewTestingLogger(tb testing.TB, writer io.Writer) *Logger {
+ logWriter := &testingWriter{tb}
+ multiWriter := io.MultiWriter(logWriter, writer)
+ logWriterSync := zapcore.AddSync(multiWriter)
+
+ testingLogger := &Logger{
+ consoleLevel: zap.NewAtomicLevelAt(getZapLevel("debug")),
+ fileLevel: zap.NewAtomicLevelAt(getZapLevel("info")),
+ logrLogger: newLogr(),
+ mutex: &sync.RWMutex{},
+ }
+
+ logWriterCore := zapcore.NewCore(makeEncoder(true, false), zapcore.Lock(logWriterSync), testingLogger.consoleLevel)
+
+ testingLogger.zap = zap.New(logWriterCore,
+ zap.AddCaller(),
+ )
+ return testingLogger
+}