summaryrefslogtreecommitdiffstats
path: root/vendor/github.com/mattermost/mattermost-server/v6/shared
diff options
context:
space:
mode:
authorWim <wim@42.be>2021-10-16 23:11:32 +0200
committerWim <wim@42.be>2021-10-16 23:23:24 +0200
commit20f6c05ec50739d31f4dbe9fde0d223f2c43f6e8 (patch)
tree230edca06449a8d1755f08aabf45a03e07e6f17c /vendor/github.com/mattermost/mattermost-server/v6/shared
parent57fce93af7f64f025cec6f3ed6088163086bc9fe (diff)
downloadmatterbridge-msglm-20f6c05ec50739d31f4dbe9fde0d223f2c43f6e8.tar.gz
matterbridge-msglm-20f6c05ec50739d31f4dbe9fde0d223f2c43f6e8.tar.bz2
matterbridge-msglm-20f6c05ec50739d31f4dbe9fde0d223f2c43f6e8.zip
Update vendor
Diffstat (limited to 'vendor/github.com/mattermost/mattermost-server/v6/shared')
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v6/shared/filestore/filesstore.go83
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v6/shared/filestore/localstore.go211
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v6/shared/filestore/s3_overrides.go56
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v6/shared/filestore/s3store.go442
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v6/shared/i18n/i18n.go185
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/autolink.go255
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/block_quote.go62
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/blocks.go154
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/document.go22
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/fenced_code.go112
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/html.go192
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/html_entities.go2132
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/indented_code.go98
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/inlines.go663
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/inspect.go78
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/lines.go32
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/links.go184
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/list.go220
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/markdown.go147
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/paragraph.go71
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/reference_definition.go75
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v6/shared/mlog/default.go63
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v6/shared/mlog/global.go132
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v6/shared/mlog/levels.go58
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v6/shared/mlog/mlog.go419
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v6/shared/mlog/options.go55
-rw-r--r--vendor/github.com/mattermost/mattermost-server/v6/shared/mlog/tlog.go79
27 files changed, 6280 insertions, 0 deletions
diff --git a/vendor/github.com/mattermost/mattermost-server/v6/shared/filestore/filesstore.go b/vendor/github.com/mattermost/mattermost-server/v6/shared/filestore/filesstore.go
new file mode 100644
index 00000000..ef02895d
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v6/shared/filestore/filesstore.go
@@ -0,0 +1,83 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package filestore
+
+import (
+ "io"
+ "time"
+
+ "github.com/pkg/errors"
+)
+
+const (
+ driverS3 = "amazons3"
+ driverLocal = "local"
+)
+
+type ReadCloseSeeker interface {
+ io.ReadCloser
+ io.Seeker
+}
+
+type FileBackend interface {
+ TestConnection() error
+
+ Reader(path string) (ReadCloseSeeker, error)
+ ReadFile(path string) ([]byte, error)
+ FileExists(path string) (bool, error)
+ FileSize(path string) (int64, error)
+ CopyFile(oldPath, newPath string) error
+ MoveFile(oldPath, newPath string) error
+ WriteFile(fr io.Reader, path string) (int64, error)
+ AppendFile(fr io.Reader, path string) (int64, error)
+ RemoveFile(path string) error
+ FileModTime(path string) (time.Time, error)
+
+ ListDirectory(path string) ([]string, error)
+ RemoveDirectory(path string) error
+}
+
+type FileBackendSettings struct {
+ DriverName string
+ Directory string
+ AmazonS3AccessKeyId string
+ AmazonS3SecretAccessKey string
+ AmazonS3Bucket string
+ AmazonS3PathPrefix string
+ AmazonS3Region string
+ AmazonS3Endpoint string
+ AmazonS3SSL bool
+ AmazonS3SignV2 bool
+ AmazonS3SSE bool
+ AmazonS3Trace bool
+}
+
+func (settings *FileBackendSettings) CheckMandatoryS3Fields() error {
+ if settings.AmazonS3Bucket == "" {
+ return errors.New("missing s3 bucket settings")
+ }
+
+ // if S3 endpoint is not set call the set defaults to set that
+ if settings.AmazonS3Endpoint == "" {
+ settings.AmazonS3Endpoint = "s3.amazonaws.com"
+ }
+
+ return nil
+}
+
+func NewFileBackend(settings FileBackendSettings) (FileBackend, error) {
+ switch settings.DriverName {
+ case driverS3:
+ backend, err := NewS3FileBackend(settings)
+ if err != nil {
+ return nil, errors.Wrap(err, "unable to connect to the s3 backend")
+ }
+ return backend, nil
+ case driverLocal:
+ return &LocalFileBackend{
+ directory: settings.Directory,
+ }, nil
+ }
+ return nil, errors.New("no valid filestorage driver found")
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v6/shared/filestore/localstore.go b/vendor/github.com/mattermost/mattermost-server/v6/shared/filestore/localstore.go
new file mode 100644
index 00000000..5ed882de
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v6/shared/filestore/localstore.go
@@ -0,0 +1,211 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package filestore
+
+import (
+ "bytes"
+ "io"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "time"
+
+ "github.com/pkg/errors"
+
+ "github.com/mattermost/mattermost-server/v6/shared/mlog"
+)
+
+const (
+ TestFilePath = "/testfile"
+)
+
+type LocalFileBackend struct {
+ directory string
+}
+
+// copyFile will copy a file from src path to dst path.
+// Overwrites any existing files at dst.
+// Permissions are copied from file at src to the new file at dst.
+func copyFile(src, dst string) (err error) {
+ in, err := os.Open(src)
+ if err != nil {
+ return
+ }
+ defer in.Close()
+
+ if err = os.MkdirAll(filepath.Dir(dst), os.ModePerm); err != nil {
+ return
+ }
+ out, err := os.Create(dst)
+ if err != nil {
+ return
+ }
+ defer func() {
+ if e := out.Close(); e != nil {
+ err = e
+ }
+ }()
+
+ _, err = io.Copy(out, in)
+ if err != nil {
+ return
+ }
+
+ err = out.Sync()
+ if err != nil {
+ return
+ }
+
+ stat, err := os.Stat(src)
+ if err != nil {
+ return
+ }
+ err = os.Chmod(dst, stat.Mode())
+ if err != nil {
+ return
+ }
+
+ return
+}
+
+func (b *LocalFileBackend) TestConnection() error {
+ f := bytes.NewReader([]byte("testingwrite"))
+ if _, err := writeFileLocally(f, filepath.Join(b.directory, TestFilePath)); err != nil {
+ return errors.Wrap(err, "unable to write to the local filesystem storage")
+ }
+ os.Remove(filepath.Join(b.directory, TestFilePath))
+ mlog.Debug("Able to write files to local storage.")
+ return nil
+}
+
+func (b *LocalFileBackend) Reader(path string) (ReadCloseSeeker, error) {
+ f, err := os.Open(filepath.Join(b.directory, path))
+ if err != nil {
+ return nil, errors.Wrapf(err, "unable to open file %s", path)
+ }
+ return f, nil
+}
+
+func (b *LocalFileBackend) ReadFile(path string) ([]byte, error) {
+ f, err := ioutil.ReadFile(filepath.Join(b.directory, path))
+ if err != nil {
+ return nil, errors.Wrapf(err, "unable to read file %s", path)
+ }
+ return f, nil
+}
+
+func (b *LocalFileBackend) FileExists(path string) (bool, error) {
+ _, err := os.Stat(filepath.Join(b.directory, path))
+
+ if os.IsNotExist(err) {
+ return false, nil
+ }
+
+ if err != nil {
+ return false, errors.Wrapf(err, "unable to know if file %s exists", path)
+ }
+ return true, nil
+}
+
+func (b *LocalFileBackend) FileSize(path string) (int64, error) {
+ info, err := os.Stat(filepath.Join(b.directory, path))
+ if err != nil {
+ return 0, errors.Wrapf(err, "unable to get file size for %s", path)
+ }
+ return info.Size(), nil
+}
+
+func (b *LocalFileBackend) FileModTime(path string) (time.Time, error) {
+ info, err := os.Stat(filepath.Join(b.directory, path))
+ if err != nil {
+ return time.Time{}, errors.Wrapf(err, "unable to get modification time for file %s", path)
+ }
+ return info.ModTime(), nil
+}
+
+func (b *LocalFileBackend) CopyFile(oldPath, newPath string) error {
+ if err := copyFile(filepath.Join(b.directory, oldPath), filepath.Join(b.directory, newPath)); err != nil {
+ return errors.Wrapf(err, "unable to copy file from %s to %s", oldPath, newPath)
+ }
+ return nil
+}
+
+func (b *LocalFileBackend) MoveFile(oldPath, newPath string) error {
+ if err := os.MkdirAll(filepath.Dir(filepath.Join(b.directory, newPath)), 0750); err != nil {
+ return errors.Wrapf(err, "unable to create the new destination directory %s", filepath.Dir(newPath))
+ }
+
+ if err := os.Rename(filepath.Join(b.directory, oldPath), filepath.Join(b.directory, newPath)); err != nil {
+ return errors.Wrapf(err, "unable to move the file to %s to the destination directory", newPath)
+ }
+
+ return nil
+}
+
+func (b *LocalFileBackend) WriteFile(fr io.Reader, path string) (int64, error) {
+ return writeFileLocally(fr, filepath.Join(b.directory, path))
+}
+
+func writeFileLocally(fr io.Reader, path string) (int64, error) {
+ if err := os.MkdirAll(filepath.Dir(path), 0750); err != nil {
+ directory, _ := filepath.Abs(filepath.Dir(path))
+ return 0, errors.Wrapf(err, "unable to create the directory %s for the file %s", directory, path)
+ }
+ fw, err := os.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0600)
+ if err != nil {
+ return 0, errors.Wrapf(err, "unable to open the file %s to write the data", path)
+ }
+ defer fw.Close()
+ written, err := io.Copy(fw, fr)
+ if err != nil {
+ return written, errors.Wrapf(err, "unable write the data in the file %s", path)
+ }
+ return written, nil
+}
+
+func (b *LocalFileBackend) AppendFile(fr io.Reader, path string) (int64, error) {
+ fp := filepath.Join(b.directory, path)
+ if _, err := os.Stat(fp); err != nil {
+ return 0, errors.Wrapf(err, "unable to find the file %s to append the data", path)
+ }
+ fw, err := os.OpenFile(fp, os.O_WRONLY|os.O_APPEND, 0600)
+ if err != nil {
+ return 0, errors.Wrapf(err, "unable to open the file %s to append the data", path)
+ }
+ defer fw.Close()
+ written, err := io.Copy(fw, fr)
+ if err != nil {
+ return written, errors.Wrapf(err, "unable append the data in the file %s", path)
+ }
+ return written, nil
+}
+
+func (b *LocalFileBackend) RemoveFile(path string) error {
+ if err := os.Remove(filepath.Join(b.directory, path)); err != nil {
+ return errors.Wrapf(err, "unable to remove the file %s", path)
+ }
+ return nil
+}
+
+func (b *LocalFileBackend) ListDirectory(path string) ([]string, error) {
+ var paths []string
+ fileInfos, err := ioutil.ReadDir(filepath.Join(b.directory, path))
+ if err != nil {
+ if os.IsNotExist(err) {
+ return paths, nil
+ }
+ return nil, errors.Wrapf(err, "unable to list the directory %s", path)
+ }
+ for _, fileInfo := range fileInfos {
+ paths = append(paths, filepath.Join(path, fileInfo.Name()))
+ }
+ return paths, nil
+}
+
+func (b *LocalFileBackend) RemoveDirectory(path string) error {
+ if err := os.RemoveAll(filepath.Join(b.directory, path)); err != nil {
+ return errors.Wrapf(err, "unable to remove the directory %s", path)
+ }
+ return nil
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v6/shared/filestore/s3_overrides.go b/vendor/github.com/mattermost/mattermost-server/v6/shared/filestore/s3_overrides.go
new file mode 100644
index 00000000..e7b29b98
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v6/shared/filestore/s3_overrides.go
@@ -0,0 +1,56 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package filestore
+
+import (
+ "context"
+ "net/http"
+
+ "github.com/minio/minio-go/v7/pkg/credentials"
+)
+
+// customTransport is used to point the request to a different server.
+// This is helpful in situations where a different service is handling AWS S3 requests
+// from multiple Mattermost applications, and the Mattermost service itself does not
+// have any S3 credentials.
+type customTransport struct {
+ base http.RoundTripper
+ host string
+ scheme string
+ client http.Client
+}
+
+// RoundTrip implements the http.Roundtripper interface.
+func (t *customTransport) RoundTrip(req *http.Request) (*http.Response, error) {
+ // Rountrippers should not modify the original request.
+ newReq := req.Clone(context.Background())
+ *newReq.URL = *req.URL
+ req.URL.Scheme = t.scheme
+ req.URL.Host = t.host
+ return t.client.Do(req)
+}
+
+// customProvider is a dummy credentials provider for the minio client to work
+// without actually providing credentials. This is needed with a custom transport
+// in cases where the minio client does not actually have credentials with itself,
+// rather needs responses from another entity.
+//
+// It satisfies the credentials.Provider interface.
+type customProvider struct {
+ isSignV2 bool
+}
+
+// Retrieve just returns empty credentials.
+func (cp customProvider) Retrieve() (credentials.Value, error) {
+ sign := credentials.SignatureV4
+ if cp.isSignV2 {
+ sign = credentials.SignatureV2
+ }
+ return credentials.Value{
+ SignerType: sign,
+ }, nil
+}
+
+// IsExpired always returns false.
+func (cp customProvider) IsExpired() bool { return false }
diff --git a/vendor/github.com/mattermost/mattermost-server/v6/shared/filestore/s3store.go b/vendor/github.com/mattermost/mattermost-server/v6/shared/filestore/s3store.go
new file mode 100644
index 00000000..f938f4d5
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v6/shared/filestore/s3store.go
@@ -0,0 +1,442 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package filestore
+
+import (
+ "context"
+ "io"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "strings"
+ "time"
+
+ s3 "github.com/minio/minio-go/v7"
+ "github.com/minio/minio-go/v7/pkg/credentials"
+ "github.com/minio/minio-go/v7/pkg/encrypt"
+ "github.com/pkg/errors"
+
+ "github.com/mattermost/mattermost-server/v6/shared/mlog"
+)
+
+// S3FileBackend contains all necessary information to communicate with
+// an AWS S3 compatible API backend.
+type S3FileBackend struct {
+ endpoint string
+ accessKey string
+ secretKey string
+ secure bool
+ signV2 bool
+ region string
+ bucket string
+ pathPrefix string
+ encrypt bool
+ trace bool
+ client *s3.Client
+}
+
+type S3FileBackendAuthError struct {
+ DetailedError string
+}
+
+// S3FileBackendNoBucketError is returned when testing a connection and no S3 bucket is found
+type S3FileBackendNoBucketError struct{}
+
+const (
+ // This is not exported by minio. See: https://github.com/minio/minio-go/issues/1339
+ bucketNotFound = "NoSuchBucket"
+)
+
+var (
+ imageExtensions = map[string]bool{".jpg": true, ".jpeg": true, ".gif": true, ".bmp": true, ".png": true, ".tiff": true, "tif": true}
+ imageMimeTypes = map[string]string{".jpg": "image/jpeg", ".jpeg": "image/jpeg", ".gif": "image/gif", ".bmp": "image/bmp", ".png": "image/png", ".tiff": "image/tiff", ".tif": "image/tif"}
+)
+
+func isFileExtImage(ext string) bool {
+ ext = strings.ToLower(ext)
+ return imageExtensions[ext]
+}
+
+func getImageMimeType(ext string) string {
+ ext = strings.ToLower(ext)
+ if imageMimeTypes[ext] == "" {
+ return "image"
+ }
+ return imageMimeTypes[ext]
+}
+
+func (s *S3FileBackendAuthError) Error() string {
+ return s.DetailedError
+}
+
+func (s *S3FileBackendNoBucketError) Error() string {
+ return "no such bucket"
+}
+
+// NewS3FileBackend returns an instance of an S3FileBackend.
+func NewS3FileBackend(settings FileBackendSettings) (*S3FileBackend, error) {
+ backend := &S3FileBackend{
+ endpoint: settings.AmazonS3Endpoint,
+ accessKey: settings.AmazonS3AccessKeyId,
+ secretKey: settings.AmazonS3SecretAccessKey,
+ secure: settings.AmazonS3SSL,
+ signV2: settings.AmazonS3SignV2,
+ region: settings.AmazonS3Region,
+ bucket: settings.AmazonS3Bucket,
+ pathPrefix: settings.AmazonS3PathPrefix,
+ encrypt: settings.AmazonS3SSE,
+ trace: settings.AmazonS3Trace,
+ }
+ cli, err := backend.s3New()
+ if err != nil {
+ return nil, err
+ }
+ backend.client = cli
+ return backend, nil
+}
+
+// Similar to s3.New() but allows initialization of signature v2 or signature v4 client.
+// If signV2 input is false, function always returns signature v4.
+//
+// Additionally this function also takes a user defined region, if set
+// disables automatic region lookup.
+func (b *S3FileBackend) s3New() (*s3.Client, error) {
+ var creds *credentials.Credentials
+
+ isCloud := os.Getenv("MM_CLOUD_FILESTORE_BIFROST") != ""
+ if isCloud {
+ creds = credentials.New(customProvider{isSignV2: b.signV2})
+ } else if b.accessKey == "" && b.secretKey == "" {
+ creds = credentials.NewIAM("")
+ } else if b.signV2 {
+ creds = credentials.NewStatic(b.accessKey, b.secretKey, "", credentials.SignatureV2)
+ } else {
+ creds = credentials.NewStatic(b.accessKey, b.secretKey, "", credentials.SignatureV4)
+ }
+
+ opts := s3.Options{
+ Creds: creds,
+ Secure: b.secure,
+ Region: b.region,
+ }
+
+ // If this is a cloud installation, we override the default transport.
+ if isCloud {
+ tr, err := s3.DefaultTransport(b.secure)
+ if err != nil {
+ return nil, err
+ }
+ scheme := "http"
+ if b.secure {
+ scheme = "https"
+ }
+ opts.Transport = &customTransport{
+ base: tr,
+ host: b.endpoint,
+ scheme: scheme,
+ }
+ }
+
+ s3Clnt, err := s3.New(b.endpoint, &opts)
+ if err != nil {
+ return nil, err
+ }
+
+ if b.trace {
+ s3Clnt.TraceOn(os.Stdout)
+ }
+
+ return s3Clnt, nil
+}
+
+func (b *S3FileBackend) TestConnection() error {
+ exists := true
+ var err error
+ // If a path prefix is present, we attempt to test the bucket by listing objects under the path
+ // and just checking the first response. This is because the BucketExists call is only at a bucket level
+ // and sometimes the user might only be allowed access to the specified path prefix.
+ if b.pathPrefix != "" {
+ obj := <-b.client.ListObjects(context.Background(), b.bucket, s3.ListObjectsOptions{Prefix: b.pathPrefix})
+ if obj.Err != nil {
+ typedErr := s3.ToErrorResponse(obj.Err)
+ if typedErr.Code != bucketNotFound {
+ return &S3FileBackendAuthError{DetailedError: "unable to list objects in the S3 bucket"}
+ }
+ exists = false
+ }
+ } else {
+ exists, err = b.client.BucketExists(context.Background(), b.bucket)
+ if err != nil {
+ return &S3FileBackendAuthError{DetailedError: "unable to check if the S3 bucket exists"}
+ }
+ }
+
+ if !exists {
+ return &S3FileBackendNoBucketError{}
+ }
+ mlog.Debug("Connection to S3 or minio is good. Bucket exists.")
+ return nil
+}
+
+func (b *S3FileBackend) MakeBucket() error {
+ err := b.client.MakeBucket(context.Background(), b.bucket, s3.MakeBucketOptions{Region: b.region})
+ if err != nil {
+ return errors.Wrap(err, "unable to create the s3 bucket")
+ }
+ return nil
+}
+
+// Caller must close the first return value
+func (b *S3FileBackend) Reader(path string) (ReadCloseSeeker, error) {
+ path = filepath.Join(b.pathPrefix, path)
+ minioObject, err := b.client.GetObject(context.Background(), b.bucket, path, s3.GetObjectOptions{})
+ if err != nil {
+ return nil, errors.Wrapf(err, "unable to open file %s", path)
+ }
+
+ return minioObject, nil
+}
+
+func (b *S3FileBackend) ReadFile(path string) ([]byte, error) {
+ path = filepath.Join(b.pathPrefix, path)
+ minioObject, err := b.client.GetObject(context.Background(), b.bucket, path, s3.GetObjectOptions{})
+ if err != nil {
+ return nil, errors.Wrapf(err, "unable to open file %s", path)
+ }
+
+ defer minioObject.Close()
+ f, err := ioutil.ReadAll(minioObject)
+ if err != nil {
+ return nil, errors.Wrapf(err, "unable to read file %s", path)
+ }
+ return f, nil
+}
+
+func (b *S3FileBackend) FileExists(path string) (bool, error) {
+ path = filepath.Join(b.pathPrefix, path)
+
+ _, err := b.client.StatObject(context.Background(), b.bucket, path, s3.StatObjectOptions{})
+ if err == nil {
+ return true, nil
+ }
+
+ var s3Err s3.ErrorResponse
+ if errors.As(err, &s3Err); s3Err.Code == "NoSuchKey" {
+ return false, nil
+ }
+
+ return false, errors.Wrapf(err, "unable to know if file %s exists", path)
+}
+
+func (b *S3FileBackend) FileSize(path string) (int64, error) {
+ path = filepath.Join(b.pathPrefix, path)
+
+ info, err := b.client.StatObject(context.Background(), b.bucket, path, s3.StatObjectOptions{})
+ if err != nil {
+ return 0, errors.Wrapf(err, "unable to get file size for %s", path)
+ }
+
+ return info.Size, nil
+}
+
+func (b *S3FileBackend) FileModTime(path string) (time.Time, error) {
+ path = filepath.Join(b.pathPrefix, path)
+
+ info, err := b.client.StatObject(context.Background(), b.bucket, path, s3.StatObjectOptions{})
+ if err != nil {
+ return time.Time{}, errors.Wrapf(err, "unable to get modification time for file %s", path)
+ }
+
+ return info.LastModified, nil
+}
+
+func (b *S3FileBackend) CopyFile(oldPath, newPath string) error {
+ oldPath = filepath.Join(b.pathPrefix, oldPath)
+ newPath = filepath.Join(b.pathPrefix, newPath)
+ srcOpts := s3.CopySrcOptions{
+ Bucket: b.bucket,
+ Object: oldPath,
+ Encryption: encrypt.NewSSE(),
+ }
+ dstOpts := s3.CopyDestOptions{
+ Bucket: b.bucket,
+ Object: newPath,
+ Encryption: encrypt.NewSSE(),
+ }
+ if _, err := b.client.CopyObject(context.Background(), dstOpts, srcOpts); err != nil {
+ return errors.Wrapf(err, "unable to copy file from %s to %s", oldPath, newPath)
+ }
+ return nil
+}
+
+func (b *S3FileBackend) MoveFile(oldPath, newPath string) error {
+ oldPath = filepath.Join(b.pathPrefix, oldPath)
+ newPath = filepath.Join(b.pathPrefix, newPath)
+ srcOpts := s3.CopySrcOptions{
+ Bucket: b.bucket,
+ Object: oldPath,
+ Encryption: encrypt.NewSSE(),
+ }
+ dstOpts := s3.CopyDestOptions{
+ Bucket: b.bucket,
+ Object: newPath,
+ Encryption: encrypt.NewSSE(),
+ }
+
+ if _, err := b.client.CopyObject(context.Background(), dstOpts, srcOpts); err != nil {
+ return errors.Wrapf(err, "unable to copy the file to %s to the new destionation", newPath)
+ }
+
+ if err := b.client.RemoveObject(context.Background(), b.bucket, oldPath, s3.RemoveObjectOptions{}); err != nil {
+ return errors.Wrapf(err, "unable to remove the file old file %s", oldPath)
+ }
+
+ return nil
+}
+
+func (b *S3FileBackend) WriteFile(fr io.Reader, path string) (int64, error) {
+ var contentType string
+ path = filepath.Join(b.pathPrefix, path)
+ if ext := filepath.Ext(path); isFileExtImage(ext) {
+ contentType = getImageMimeType(ext)
+ } else {
+ contentType = "binary/octet-stream"
+ }
+
+ options := s3PutOptions(b.encrypt, contentType)
+ info, err := b.client.PutObject(context.Background(), b.bucket, path, fr, -1, options)
+ if err != nil {
+ return info.Size, errors.Wrapf(err, "unable write the data in the file %s", path)
+ }
+
+ return info.Size, nil
+}
+
+func (b *S3FileBackend) AppendFile(fr io.Reader, path string) (int64, error) {
+ fp := filepath.Join(b.pathPrefix, path)
+ if _, err := b.client.StatObject(context.Background(), b.bucket, fp, s3.StatObjectOptions{}); err != nil {
+ return 0, errors.Wrapf(err, "unable to find the file %s to append the data", path)
+ }
+
+ var contentType string
+ if ext := filepath.Ext(fp); isFileExtImage(ext) {
+ contentType = getImageMimeType(ext)
+ } else {
+ contentType = "binary/octet-stream"
+ }
+
+ options := s3PutOptions(b.encrypt, contentType)
+ sse := options.ServerSideEncryption
+ partName := fp + ".part"
+ info, err := b.client.PutObject(context.Background(), b.bucket, partName, fr, -1, options)
+ defer b.client.RemoveObject(context.Background(), b.bucket, partName, s3.RemoveObjectOptions{})
+ if info.Size > 0 {
+ src1Opts := s3.CopySrcOptions{
+ Bucket: b.bucket,
+ Object: fp,
+ }
+ src2Opts := s3.CopySrcOptions{
+ Bucket: b.bucket,
+ Object: partName,
+ }
+ dstOpts := s3.CopyDestOptions{
+ Bucket: b.bucket,
+ Object: fp,
+ Encryption: sse,
+ }
+ _, err = b.client.ComposeObject(context.Background(), dstOpts, src1Opts, src2Opts)
+ if err != nil {
+ return 0, errors.Wrapf(err, "unable append the data in the file %s", path)
+ }
+ return info.Size, nil
+ }
+
+ return 0, errors.Wrapf(err, "unable append the data in the file %s", path)
+}
+
+func (b *S3FileBackend) RemoveFile(path string) error {
+ path = filepath.Join(b.pathPrefix, path)
+ if err := b.client.RemoveObject(context.Background(), b.bucket, path, s3.RemoveObjectOptions{}); err != nil {
+ return errors.Wrapf(err, "unable to remove the file %s", path)
+ }
+
+ return nil
+}
+
+func getPathsFromObjectInfos(in <-chan s3.ObjectInfo) <-chan s3.ObjectInfo {
+ out := make(chan s3.ObjectInfo, 1)
+
+ go func() {
+ defer close(out)
+
+ for {
+ info, done := <-in
+
+ if !done {
+ break
+ }
+
+ out <- info
+ }
+ }()
+
+ return out
+}
+
+func (b *S3FileBackend) ListDirectory(path string) ([]string, error) {
+ path = filepath.Join(b.pathPrefix, path)
+ if !strings.HasSuffix(path, "/") && path != "" {
+ // s3Clnt returns only the path itself when "/" is not present
+ // appending "/" to make it consistent across all filestores
+ path = path + "/"
+ }
+
+ opts := s3.ListObjectsOptions{
+ Prefix: path,
+ }
+ var paths []string
+ for object := range b.client.ListObjects(context.Background(), b.bucket, opts) {
+ if object.Err != nil {
+ return nil, errors.Wrapf(object.Err, "unable to list the directory %s", path)
+ }
+ // We strip the path prefix that gets applied,
+ // so that it remains transparent to the application.
+ object.Key = strings.TrimPrefix(object.Key, b.pathPrefix)
+ trimmed := strings.Trim(object.Key, "/")
+ if trimmed != "" {
+ paths = append(paths, trimmed)
+ }
+ }
+
+ return paths, nil
+}
+
+func (b *S3FileBackend) RemoveDirectory(path string) error {
+ opts := s3.ListObjectsOptions{
+ Prefix: filepath.Join(b.pathPrefix, path),
+ Recursive: true,
+ }
+ list := b.client.ListObjects(context.Background(), b.bucket, opts)
+ objectsCh := b.client.RemoveObjects(context.Background(), b.bucket, getPathsFromObjectInfos(list), s3.RemoveObjectsOptions{})
+ for err := range objectsCh {
+ if err.Err != nil {
+ return errors.Wrapf(err.Err, "unable to remove the directory %s", path)
+ }
+ }
+
+ return nil
+}
+
+func s3PutOptions(encrypted bool, contentType string) s3.PutObjectOptions {
+ options := s3.PutObjectOptions{}
+ if encrypted {
+ options.ServerSideEncryption = encrypt.NewSSE()
+ }
+ options.ContentType = contentType
+ // We set the part size to the minimum allowed value of 5MBs
+ // to avoid an excessive allocation in minio.PutObject implementation.
+ options.PartSize = 1024 * 1024 * 5
+
+ return options
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v6/shared/i18n/i18n.go b/vendor/github.com/mattermost/mattermost-server/v6/shared/i18n/i18n.go
new file mode 100644
index 00000000..0168b751
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v6/shared/i18n/i18n.go
@@ -0,0 +1,185 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package i18n
+
+import (
+ "fmt"
+ "html/template"
+ "io/ioutil"
+ "net/http"
+ "path/filepath"
+ "reflect"
+ "strings"
+
+ "github.com/mattermost/go-i18n/i18n"
+
+ "github.com/mattermost/mattermost-server/v6/shared/mlog"
+)
+
+const defaultLocale = "en"
+
+// TranslateFunc is the type of the translate functions
+type TranslateFunc func(translationID string, args ...interface{}) string
+
+// T is the translate function using the default server language as fallback language
+var T TranslateFunc
+
+// TDefault is the translate function using english as fallback language
+var TDefault TranslateFunc
+
+var locales map[string]string = make(map[string]string)
+var defaultServerLocale string
+var defaultClientLocale string
+
+// TranslationsPreInit loads translations from filesystem if they are not
+// loaded already and assigns english while loading server config
+func TranslationsPreInit(translationsDir string) error {
+ if T != nil {
+ return nil
+ }
+
+ // Set T even if we fail to load the translations. Lots of shutdown handling code will
+ // segfault trying to handle the error, and the untranslated IDs are strictly better.
+ T = tfuncWithFallback(defaultLocale)
+ TDefault = tfuncWithFallback(defaultLocale)
+
+ return initTranslationsWithDir(translationsDir)
+}
+
+// InitTranslations set the defaults configured in the server and initialize
+// the T function using the server default as fallback language
+func InitTranslations(serverLocale, clientLocale string) error {
+ defaultServerLocale = serverLocale
+ defaultClientLocale = clientLocale
+
+ var err error
+ T, err = getTranslationsBySystemLocale()
+ return err
+}
+
+func initTranslationsWithDir(dir string) error {
+ files, _ := ioutil.ReadDir(dir)
+ for _, f := range files {
+ if filepath.Ext(f.Name()) == ".json" {
+ filename := f.Name()
+ locales[strings.Split(filename, ".")[0]] = filepath.Join(dir, filename)
+
+ if err := i18n.LoadTranslationFile(filepath.Join(dir, filename)); err != nil {
+ return err
+ }
+ }
+ }
+
+ return nil
+}
+
+func getTranslationsBySystemLocale() (TranslateFunc, error) {
+ locale := defaultServerLocale
+ if _, ok := locales[locale]; !ok {
+ mlog.Warn("Failed to load system translations for", mlog.String("locale", locale), mlog.String("attempting to fall back to default locale", defaultLocale))
+ locale = defaultLocale
+ }
+
+ if locales[locale] == "" {
+ return nil, fmt.Errorf("failed to load system translations for '%v'", defaultLocale)
+ }
+
+ translations := tfuncWithFallback(locale)
+ if translations == nil {
+ return nil, fmt.Errorf("failed to load system translations")
+ }
+
+ mlog.Info("Loaded system translations", mlog.String("for locale", locale), mlog.String("from locale", locales[locale]))
+ return translations, nil
+}
+
+// GetUserTranslations get the translation function for an specific locale
+func GetUserTranslations(locale string) TranslateFunc {
+ if _, ok := locales[locale]; !ok {
+ locale = defaultLocale
+ }
+
+ translations := tfuncWithFallback(locale)
+ return translations
+}
+
+// GetTranslationsAndLocaleFromRequest return the translation function and the
+// locale based on a request headers
+func GetTranslationsAndLocaleFromRequest(r *http.Request) (TranslateFunc, string) {
+ // This is for checking against locales like pt_BR or zn_CN
+ headerLocaleFull := strings.Split(r.Header.Get("Accept-Language"), ",")[0]
+ // This is for checking against locales like en, es
+ headerLocale := strings.Split(strings.Split(r.Header.Get("Accept-Language"), ",")[0], "-")[0]
+ defaultLocale := defaultClientLocale
+ if locales[headerLocaleFull] != "" {
+ translations := tfuncWithFallback(headerLocaleFull)
+ return translations, headerLocaleFull
+ } else if locales[headerLocale] != "" {
+ translations := tfuncWithFallback(headerLocale)
+ return translations, headerLocale
+ } else if locales[defaultLocale] != "" {
+ translations := tfuncWithFallback(defaultLocale)
+ return translations, headerLocale
+ }
+
+ translations := tfuncWithFallback(defaultLocale)
+ return translations, defaultLocale
+}
+
+// GetSupportedLocales return a map of locale code and the file path with the
+// translations
+func GetSupportedLocales() map[string]string {
+ return locales
+}
+
+func tfuncWithFallback(pref string) TranslateFunc {
+ t, _ := i18n.Tfunc(pref)
+ return func(translationID string, args ...interface{}) string {
+ if translated := t(translationID, args...); translated != translationID {
+ return translated
+ }
+
+ t, _ := i18n.Tfunc(defaultLocale)
+ return t(translationID, args...)
+ }
+}
+
+// TranslateAsHTML translates the translationID provided and return a
+// template.HTML object
+func TranslateAsHTML(t TranslateFunc, translationID string, args map[string]interface{}) template.HTML {
+ message := t(translationID, escapeForHTML(args))
+ message = strings.Replace(message, "[[", "<strong>", -1)
+ message = strings.Replace(message, "]]", "</strong>", -1)
+ return template.HTML(message)
+}
+
+func escapeForHTML(arg interface{}) interface{} {
+ switch typedArg := arg.(type) {
+ case string:
+ return template.HTMLEscapeString(typedArg)
+ case *string:
+ return template.HTMLEscapeString(*typedArg)
+ case map[string]interface{}:
+ safeArg := make(map[string]interface{}, len(typedArg))
+ for key, value := range typedArg {
+ safeArg[key] = escapeForHTML(value)
+ }
+ return safeArg
+ default:
+ mlog.Warn(
+ "Unable to escape value for HTML template",
+ mlog.Any("html_template", arg),
+ mlog.String("template_type", reflect.ValueOf(arg).Type().String()),
+ )
+ return ""
+ }
+}
+
+// IdentityTfunc returns a translation function that don't translate, only
+// returns the same id
+func IdentityTfunc() TranslateFunc {
+ return func(translationID string, args ...interface{}) string {
+ return translationID
+ }
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/autolink.go b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/autolink.go
new file mode 100644
index 00000000..2eb05d90
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/autolink.go
@@ -0,0 +1,255 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package markdown
+
+import (
+ "regexp"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+)
+
+// Based off of extensions/autolink.c from https://github.com/github/cmark
+
+var (
+ DefaultURLSchemes = []string{"http", "https", "ftp", "mailto", "tel"}
+ wwwAutoLinkRegex = regexp.MustCompile(`^www\d{0,3}\.`)
+)
+
+// Given a string with a w at the given position, tries to parse and return a range containing a www link.
+// if one exists. If the text at the given position isn't a link, returns an empty string. Equivalent to
+// www_match from the reference code.
+func parseWWWAutolink(data string, position int) (Range, bool) {
+ // Check that this isn't part of another word
+ if position > 1 {
+ prevChar := data[position-1]
+
+ if !isWhitespaceByte(prevChar) && !isAllowedBeforeWWWLink(prevChar) {
+ return Range{}, false
+ }
+ }
+
+ // Check that this starts with www
+ if len(data)-position < 4 || !wwwAutoLinkRegex.MatchString(data[position:]) {
+ return Range{}, false
+ }
+
+ end := checkDomain(data[position:], false)
+ if end == 0 {
+ return Range{}, false
+ }
+
+ end += position
+
+ // Grab all text until the end of the string or the next whitespace character
+ for end < len(data) && !isWhitespaceByte(data[end]) {
+ end += 1
+ }
+
+ // Trim trailing punctuation
+ end = trimTrailingCharactersFromLink(data, position, end)
+ if position == end {
+ return Range{}, false
+ }
+
+ return Range{position, end}, true
+}
+
+func isAllowedBeforeWWWLink(c byte) bool {
+ switch c {
+ case '*', '_', '~', ')':
+ return true
+ }
+ return false
+}
+
+// Given a string with a : at the given position, tried to parse and return a range containing a URL scheme
+// if one exists. If the text around the given position isn't a link, returns an empty string. Equivalent to
+// url_match from the reference code.
+func parseURLAutolink(data string, position int) (Range, bool) {
+ // Check that a :// exists. This doesn't match the clients that treat the slashes as optional.
+ if len(data)-position < 4 || data[position+1] != '/' || data[position+2] != '/' {
+ return Range{}, false
+ }
+
+ start := position - 1
+ for start > 0 && isAlphanumericByte(data[start-1]) {
+ start -= 1
+ }
+
+ if start < 0 || position >= len(data) {
+ return Range{}, false
+ }
+
+ // Ensure that the URL scheme is allowed and that at least one character after the scheme is valid.
+ scheme := data[start:position]
+ if !isSchemeAllowed(scheme) || !isValidHostCharacter(data[position+3:]) {
+ return Range{}, false
+ }
+
+ end := checkDomain(data[position+3:], true)
+ if end == 0 {
+ return Range{}, false
+ }
+
+ end += position
+
+ // Grab all text until the end of the string or the next whitespace character
+ for end < len(data) && !isWhitespaceByte(data[end]) {
+ end += 1
+ }
+
+ // Trim trailing punctuation
+ end = trimTrailingCharactersFromLink(data, start, end)
+ if start == end {
+ return Range{}, false
+ }
+
+ return Range{start, end}, true
+}
+
+func isSchemeAllowed(scheme string) bool {
+ // Note that this doesn't support the custom URL schemes implemented by the client
+ for _, allowed := range DefaultURLSchemes {
+ if strings.EqualFold(allowed, scheme) {
+ return true
+ }
+ }
+
+ return false
+}
+
+// Given a string starting with a URL, returns the number of valid characters that make up the URL's domain.
+// Returns 0 if the string doesn't start with a domain name. allowShort determines whether or not the domain
+// needs to contain a period to be considered valid. Equivalent to check_domain from the reference code.
+func checkDomain(data string, allowShort bool) int {
+ foundUnderscore := false
+ foundPeriod := false
+
+ i := 1
+ for ; i < len(data)-1; i++ {
+ if data[i] == '_' {
+ foundUnderscore = true
+ break
+ } else if data[i] == '.' {
+ foundPeriod = true
+ } else if !isValidHostCharacter(data[i:]) && data[i] != '-' {
+ break
+ }
+ }
+
+ if foundUnderscore {
+ return 0
+ }
+
+ if allowShort {
+ // If allowShort is set, accept any string of valid domain characters
+ return i
+ }
+
+ // If allowShort isn't set, a valid domain just requires at least a single period. Note that this
+ // logic isn't entirely necessary because we already know the string starts with "www." when
+ // this is called from parseWWWAutolink
+ if foundPeriod {
+ return i
+ }
+ return 0
+}
+
+// Returns true if the provided link starts with a valid character for a domain name. Equivalent to
+// is_valid_hostchar from the reference code.
+func isValidHostCharacter(link string) bool {
+ c, _ := utf8.DecodeRuneInString(link)
+ if c == utf8.RuneError {
+ return false
+ }
+
+ return !unicode.IsSpace(c) && !unicode.IsPunct(c)
+}
+
+// Removes any trailing characters such as punctuation or stray brackets that shouldn't be part of the link.
+// Returns a new end position for the link. Equivalent to autolink_delim from the reference code.
+func trimTrailingCharactersFromLink(markdown string, start int, end int) int {
+ runes := []rune(markdown[start:end])
+ linkEnd := len(runes)
+
+ // Cut off the link before an open angle bracket if it contains one
+ for i, c := range runes {
+ if c == '<' {
+ linkEnd = i
+ break
+ }
+ }
+
+ for linkEnd > 0 {
+ c := runes[linkEnd-1]
+
+ if !canEndAutolink(c) {
+ // Trim trailing quotes, periods, etc
+ linkEnd = linkEnd - 1
+ } else if c == ';' {
+ // Trim a trailing HTML entity
+ newEnd := linkEnd - 2
+
+ for newEnd > 0 && ((runes[newEnd] >= 'a' && runes[newEnd] <= 'z') || (runes[newEnd] >= 'A' && runes[newEnd] <= 'Z')) {
+ newEnd -= 1
+ }
+
+ if newEnd < linkEnd-2 && runes[newEnd] == '&' {
+ linkEnd = newEnd
+ } else {
+ // This isn't actually an HTML entity, so just trim the semicolon
+ linkEnd = linkEnd - 1
+ }
+ } else if c == ')' {
+ // Only allow an autolink ending with a bracket if that bracket is part of a matching pair of brackets.
+ // If there are more closing brackets than opening ones, remove the extra bracket
+
+ numClosing := 0
+ numOpening := 0
+
+ // Examples (input text => output linked portion):
+ //
+ // http://www.pokemon.com/Pikachu_(Electric)
+ // => http://www.pokemon.com/Pikachu_(Electric)
+ //
+ // http://www.pokemon.com/Pikachu_((Electric)
+ // => http://www.pokemon.com/Pikachu_((Electric)
+ //
+ // http://www.pokemon.com/Pikachu_(Electric))
+ // => http://www.pokemon.com/Pikachu_(Electric)
+ //
+ // http://www.pokemon.com/Pikachu_((Electric))
+ // => http://www.pokemon.com/Pikachu_((Electric))
+
+ for i := 0; i < linkEnd; i++ {
+ if runes[i] == '(' {
+ numOpening += 1
+ } else if runes[i] == ')' {
+ numClosing += 1
+ }
+ }
+
+ if numClosing <= numOpening {
+ // There's fewer or equal closing brackets, so we've found the end of the link
+ break
+ }
+
+ linkEnd -= 1
+ } else {
+ // There's no special characters at the end of the link, so we're at the end
+ break
+ }
+ }
+
+ return start + len(string(runes[:linkEnd]))
+}
+
+func canEndAutolink(c rune) bool {
+ switch c {
+ case '?', '!', '.', ',', ':', '*', '_', '~', '\'', '"':
+ return false
+ }
+ return true
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/block_quote.go b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/block_quote.go
new file mode 100644
index 00000000..5cf66d10
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/block_quote.go
@@ -0,0 +1,62 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package markdown
+
+type BlockQuote struct {
+ blockBase
+ markdown string
+
+ Children []Block
+}
+
+func (b *BlockQuote) Continuation(indentation int, r Range) *continuation {
+ if indentation > 3 {
+ return nil
+ }
+ s := b.markdown[r.Position:r.End]
+ if s == "" || s[0] != '>' {
+ return nil
+ }
+ remaining := Range{r.Position + 1, r.End}
+ indentation, indentationBytes := countIndentation(b.markdown, remaining)
+ if indentation > 0 {
+ indentation--
+ }
+ return &continuation{
+ Indentation: indentation,
+ Remaining: Range{remaining.Position + indentationBytes, remaining.End},
+ }
+}
+
+func (b *BlockQuote) AddChild(openBlocks []Block) []Block {
+ b.Children = append(b.Children, openBlocks[0])
+ return openBlocks
+}
+
+func blockQuoteStart(markdown string, indent int, r Range) []Block {
+ if indent > 3 {
+ return nil
+ }
+ s := markdown[r.Position:r.End]
+ if s == "" || s[0] != '>' {
+ return nil
+ }
+
+ block := &BlockQuote{
+ markdown: markdown,
+ }
+ r.Position++
+ if len(s) > 1 && s[1] == ' ' {
+ r.Position++
+ }
+
+ indent, bytes := countIndentation(markdown, r)
+
+ ret := []Block{block}
+ if descendants := blockStartOrParagraph(markdown, indent, Range{r.Position + bytes, r.End}, nil, nil); descendants != nil {
+ block.Children = append(block.Children, descendants[0])
+ ret = append(ret, descendants...)
+ }
+ return ret
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/blocks.go b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/blocks.go
new file mode 100644
index 00000000..fe9e272f
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/blocks.go
@@ -0,0 +1,154 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package markdown
+
+import (
+ "strings"
+)
+
+type continuation struct {
+ Indentation int
+ Remaining Range
+}
+
+type Block interface {
+ Continuation(indentation int, r Range) *continuation
+ AddLine(indentation int, r Range) bool
+ Close()
+ AllowsBlockStarts() bool
+ HasTrailingBlankLine() bool
+}
+
+type blockBase struct{}
+
+func (*blockBase) AddLine(indentation int, r Range) bool { return false }
+func (*blockBase) Close() {}
+func (*blockBase) AllowsBlockStarts() bool { return true }
+func (*blockBase) HasTrailingBlankLine() bool { return false }
+
+type ContainerBlock interface {
+ Block
+ AddChild(openBlocks []Block) []Block
+}
+
+type Range struct {
+ Position int
+ End int
+}
+
+func closeBlocks(blocks []Block, referenceDefinitions []*ReferenceDefinition) []*ReferenceDefinition {
+ for _, block := range blocks {
+ block.Close()
+ if p, ok := block.(*Paragraph); ok && len(p.ReferenceDefinitions) > 0 {
+ referenceDefinitions = append(referenceDefinitions, p.ReferenceDefinitions...)
+ }
+ }
+ return referenceDefinitions
+}
+
+func ParseBlocks(markdown string, lines []Line) (*Document, []*ReferenceDefinition) {
+ document := &Document{}
+ var referenceDefinitions []*ReferenceDefinition
+
+ openBlocks := []Block{document}
+
+ for _, line := range lines {
+ r := line.Range
+ lastMatchIndex := 0
+
+ indentation, indentationBytes := countIndentation(markdown, r)
+ r = Range{r.Position + indentationBytes, r.End}
+
+ for i, block := range openBlocks {
+ if continuation := block.Continuation(indentation, r); continuation != nil {
+ indentation = continuation.Indentation
+ r = continuation.Remaining
+ additionalIndentation, additionalIndentationBytes := countIndentation(markdown, r)
+ r = Range{r.Position + additionalIndentationBytes, r.End}
+ indentation += additionalIndentation
+ lastMatchIndex = i
+ } else {
+ break
+ }
+ }
+
+ if openBlocks[lastMatchIndex].AllowsBlockStarts() {
+ if newBlocks := blockStart(markdown, indentation, r, openBlocks[:lastMatchIndex+1], openBlocks[lastMatchIndex+1:]); newBlocks != nil {
+ didAdd := false
+ for i := lastMatchIndex; i >= 0; i-- {
+ if container, ok := openBlocks[i].(ContainerBlock); ok {
+ if addedBlocks := container.AddChild(newBlocks); addedBlocks != nil {
+ referenceDefinitions = closeBlocks(openBlocks[i+1:], referenceDefinitions)
+ openBlocks = openBlocks[:i+1]
+ openBlocks = append(openBlocks, addedBlocks...)
+ didAdd = true
+ break
+ }
+ }
+ }
+ if didAdd {
+ continue
+ }
+ }
+ }
+
+ isBlank := strings.TrimSpace(markdown[r.Position:r.End]) == ""
+ if paragraph, ok := openBlocks[len(openBlocks)-1].(*Paragraph); ok && !isBlank {
+ paragraph.Text = append(paragraph.Text, r)
+ continue
+ }
+
+ referenceDefinitions = closeBlocks(openBlocks[lastMatchIndex+1:], referenceDefinitions)
+ openBlocks = openBlocks[:lastMatchIndex+1]
+
+ if openBlocks[lastMatchIndex].AddLine(indentation, r) {
+ continue
+ }
+
+ if paragraph := newParagraph(markdown, r); paragraph != nil {
+ for i := lastMatchIndex; i >= 0; i-- {
+ if container, ok := openBlocks[i].(ContainerBlock); ok {
+ if newBlocks := container.AddChild([]Block{paragraph}); newBlocks != nil {
+ referenceDefinitions = closeBlocks(openBlocks[i+1:], referenceDefinitions)
+ openBlocks = openBlocks[:i+1]
+ openBlocks = append(openBlocks, newBlocks...)
+ break
+ }
+ }
+ }
+ }
+ }
+
+ referenceDefinitions = closeBlocks(openBlocks, referenceDefinitions)
+
+ return document, referenceDefinitions
+}
+
+func blockStart(markdown string, indentation int, r Range, matchedBlocks, unmatchedBlocks []Block) []Block {
+ if r.Position >= r.End {
+ return nil
+ }
+
+ if start := blockQuoteStart(markdown, indentation, r); start != nil {
+ return start
+ } else if start := listStart(markdown, indentation, r, matchedBlocks, unmatchedBlocks); start != nil {
+ return start
+ } else if start := indentedCodeStart(markdown, indentation, r, matchedBlocks, unmatchedBlocks); start != nil {
+ return start
+ } else if start := fencedCodeStart(markdown, indentation, r); start != nil {
+ return start
+ }
+
+ return nil
+}
+
+func blockStartOrParagraph(markdown string, indentation int, r Range, matchedBlocks, unmatchedBlocks []Block) []Block {
+ if start := blockStart(markdown, indentation, r, matchedBlocks, unmatchedBlocks); start != nil {
+ return start
+ }
+ if paragraph := newParagraph(markdown, r); paragraph != nil {
+ return []Block{paragraph}
+ }
+ return nil
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/document.go b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/document.go
new file mode 100644
index 00000000..306b93da
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/document.go
@@ -0,0 +1,22 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package markdown
+
+type Document struct {
+ blockBase
+
+ Children []Block
+}
+
+func (b *Document) Continuation(indentation int, r Range) *continuation {
+ return &continuation{
+ Indentation: indentation,
+ Remaining: r,
+ }
+}
+
+func (b *Document) AddChild(openBlocks []Block) []Block {
+ b.Children = append(b.Children, openBlocks[0])
+ return openBlocks
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/fenced_code.go b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/fenced_code.go
new file mode 100644
index 00000000..c8caad55
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/fenced_code.go
@@ -0,0 +1,112 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package markdown
+
+import (
+ "strings"
+)
+
+type FencedCodeLine struct {
+ Indentation int
+ Range Range
+}
+
+type FencedCode struct {
+ blockBase
+ markdown string
+ didSeeClosingFence bool
+
+ Indentation int
+ OpeningFence Range
+ RawInfo Range
+ RawCode []FencedCodeLine
+}
+
+func (b *FencedCode) Code() (result string) {
+ for _, code := range b.RawCode {
+ result += strings.Repeat(" ", code.Indentation) + b.markdown[code.Range.Position:code.Range.End]
+ }
+ return
+}
+
+func (b *FencedCode) Info() string {
+ return Unescape(b.markdown[b.RawInfo.Position:b.RawInfo.End])
+}
+
+func (b *FencedCode) Continuation(indentation int, r Range) *continuation {
+ if b.didSeeClosingFence {
+ return nil
+ }
+ return &continuation{
+ Indentation: indentation,
+ Remaining: r,
+ }
+}
+
+func (b *FencedCode) AddLine(indentation int, r Range) bool {
+ s := b.markdown[r.Position:r.End]
+ if indentation <= 3 && strings.HasPrefix(s, b.markdown[b.OpeningFence.Position:b.OpeningFence.End]) {
+ suffix := strings.TrimSpace(s[b.OpeningFence.End-b.OpeningFence.Position:])
+ isClosingFence := true
+ for _, c := range suffix {
+ if c != rune(s[0]) {
+ isClosingFence = false
+ break
+ }
+ }
+ if isClosingFence {
+ b.didSeeClosingFence = true
+ return true
+ }
+ }
+
+ if indentation >= b.Indentation {
+ indentation -= b.Indentation
+ } else {
+ indentation = 0
+ }
+
+ b.RawCode = append(b.RawCode, FencedCodeLine{
+ Indentation: indentation,
+ Range: r,
+ })
+ return true
+}
+
+func (b *FencedCode) AllowsBlockStarts() bool {
+ return false
+}
+
+func fencedCodeStart(markdown string, indentation int, r Range) []Block {
+ s := markdown[r.Position:r.End]
+
+ if !strings.HasPrefix(s, "```") && !strings.HasPrefix(s, "~~~") {
+ return nil
+ }
+
+ fenceCharacter := rune(s[0])
+ fenceLength := 3
+ for _, c := range s[3:] {
+ if c == fenceCharacter {
+ fenceLength++
+ } else {
+ break
+ }
+ }
+
+ for i := r.Position + fenceLength; i < r.End; i++ {
+ if markdown[i] == '`' {
+ return nil
+ }
+ }
+
+ return []Block{
+ &FencedCode{
+ markdown: markdown,
+ Indentation: indentation,
+ RawInfo: trimRightSpace(markdown, Range{r.Position + fenceLength, r.End}),
+ OpeningFence: Range{r.Position, r.Position + fenceLength},
+ },
+ }
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/html.go b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/html.go
new file mode 100644
index 00000000..52583074
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/html.go
@@ -0,0 +1,192 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package markdown
+
+import (
+ "fmt"
+ "strings"
+)
+
+var htmlEscaper = strings.NewReplacer(
+ `&`, "&amp;",
+ `<`, "&lt;",
+ `>`, "&gt;",
+ `"`, "&quot;",
+)
+
+// RenderHTML produces HTML with the same behavior as the example renderer used in the CommonMark
+// reference materials except for one slight difference: for brevity, no unnecessary whitespace is
+// inserted between elements. The output is not defined by the CommonMark spec, and it exists
+// primarily as an aid in testing.
+func RenderHTML(markdown string) string {
+ return RenderBlockHTML(Parse(markdown))
+}
+
+func RenderBlockHTML(block Block, referenceDefinitions []*ReferenceDefinition) (result string) {
+ return renderBlockHTML(block, referenceDefinitions, false)
+}
+
+func renderBlockHTML(block Block, referenceDefinitions []*ReferenceDefinition, isTightList bool) (result string) {
+ switch v := block.(type) {
+ case *Document:
+ for _, block := range v.Children {
+ result += RenderBlockHTML(block, referenceDefinitions)
+ }
+ case *Paragraph:
+ if len(v.Text) == 0 {
+ return
+ }
+ if !isTightList {
+ result += "<p>"
+ }
+ for _, inline := range v.ParseInlines(referenceDefinitions) {
+ result += RenderInlineHTML(inline)
+ }
+ if !isTightList {
+ result += "</p>"
+ }
+ case *List:
+ if v.IsOrdered {
+ if v.OrderedStart != 1 {
+ result += fmt.Sprintf(`<ol start="%v">`, v.OrderedStart)
+ } else {
+ result += "<ol>"
+ }
+ } else {
+ result += "<ul>"
+ }
+ for _, block := range v.Children {
+ result += renderBlockHTML(block, referenceDefinitions, !v.IsLoose)
+ }
+ if v.IsOrdered {
+ result += "</ol>"
+ } else {
+ result += "</ul>"
+ }
+ case *ListItem:
+ result += "<li>"
+ for _, block := range v.Children {
+ result += renderBlockHTML(block, referenceDefinitions, isTightList)
+ }
+ result += "</li>"
+ case *BlockQuote:
+ result += "<blockquote>"
+ for _, block := range v.Children {
+ result += RenderBlockHTML(block, referenceDefinitions)
+ }
+ result += "</blockquote>"
+ case *FencedCode:
+ if info := v.Info(); info != "" {
+ language := strings.Fields(info)[0]
+ result += `<pre><code class="language-` + htmlEscaper.Replace(language) + `">`
+ } else {
+ result += "<pre><code>"
+ }
+ result += htmlEscaper.Replace(v.Code()) + "</code></pre>"
+ case *IndentedCode:
+ result += "<pre><code>" + htmlEscaper.Replace(v.Code()) + "</code></pre>"
+ default:
+ panic(fmt.Sprintf("missing case for type %T", v))
+ }
+ return
+}
+
+func escapeURL(url string) (result string) {
+ for i := 0; i < len(url); {
+ switch b := url[i]; b {
+ case ';', '/', '?', ':', '@', '&', '=', '+', '$', ',', '-', '_', '.', '!', '~', '*', '\'', '(', ')', '#':
+ result += string(b)
+ i++
+ default:
+ if b == '%' && i+2 < len(url) && isHexByte(url[i+1]) && isHexByte(url[i+2]) {
+ result += url[i : i+3]
+ i += 3
+ } else if (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || (b >= '0' && b <= '9') {
+ result += string(b)
+ i++
+ } else {
+ result += fmt.Sprintf("%%%0X", b)
+ i++
+ }
+ }
+ }
+ return
+}
+
+func RenderInlineHTML(inline Inline) (result string) {
+ switch v := inline.(type) {
+ case *Text:
+ return htmlEscaper.Replace(v.Text)
+ case *HardLineBreak:
+ return "<br />"
+ case *SoftLineBreak:
+ return "\n"
+ case *CodeSpan:
+ return "<code>" + htmlEscaper.Replace(v.Code) + "</code>"
+ case *InlineImage:
+ result += `<img src="` + htmlEscaper.Replace(escapeURL(v.Destination())) + `" alt="` + htmlEscaper.Replace(renderImageAltText(v.Children)) + `"`
+ if title := v.Title(); title != "" {
+ result += ` title="` + htmlEscaper.Replace(title) + `"`
+ }
+ result += ` />`
+ case *ReferenceImage:
+ result += `<img src="` + htmlEscaper.Replace(escapeURL(v.Destination())) + `" alt="` + htmlEscaper.Replace(renderImageAltText(v.Children)) + `"`
+ if title := v.Title(); title != "" {
+ result += ` title="` + htmlEscaper.Replace(title) + `"`
+ }
+ result += ` />`
+ case *InlineLink:
+ result += `<a href="` + htmlEscaper.Replace(escapeURL(v.Destination())) + `"`
+ if title := v.Title(); title != "" {
+ result += ` title="` + htmlEscaper.Replace(title) + `"`
+ }
+ result += `>`
+ for _, inline := range v.Children {
+ result += RenderInlineHTML(inline)
+ }
+ result += "</a>"
+ case *ReferenceLink:
+ result += `<a href="` + htmlEscaper.Replace(escapeURL(v.Destination())) + `"`
+ if title := v.Title(); title != "" {
+ result += ` title="` + htmlEscaper.Replace(title) + `"`
+ }
+ result += `>`
+ for _, inline := range v.Children {
+ result += RenderInlineHTML(inline)
+ }
+ result += "</a>"
+ case *Autolink:
+ result += `<a href="` + htmlEscaper.Replace(escapeURL(v.Destination())) + `">`
+ for _, inline := range v.Children {
+ result += RenderInlineHTML(inline)
+ }
+ result += "</a>"
+ default:
+ panic(fmt.Sprintf("missing case for type %T", v))
+ }
+ return
+}
+
+func renderImageAltText(children []Inline) (result string) {
+ for _, inline := range children {
+ result += renderImageChildAltText(inline)
+ }
+ return
+}
+
+func renderImageChildAltText(inline Inline) (result string) {
+ switch v := inline.(type) {
+ case *Text:
+ return v.Text
+ case *InlineImage:
+ for _, inline := range v.Children {
+ result += renderImageChildAltText(inline)
+ }
+ case *InlineLink:
+ for _, inline := range v.Children {
+ result += renderImageChildAltText(inline)
+ }
+ }
+ return
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/html_entities.go b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/html_entities.go
new file mode 100644
index 00000000..e94cebb9
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/html_entities.go
@@ -0,0 +1,2132 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package markdown
+
+var htmlEntities = map[string]string{
+ "AElig": "\u00C6",
+ "AMP": "\u0026",
+ "Aacute": "\u00C1",
+ "Abreve": "\u0102",
+ "Acirc": "\u00C2",
+ "Acy": "\u0410",
+ "Afr": "\U0001D504",
+ "Agrave": "\u00C0",
+ "Alpha": "\u0391",
+ "Amacr": "\u0100",
+ "And": "\u2A53",
+ "Aogon": "\u0104",
+ "Aopf": "\U0001D538",
+ "ApplyFunction": "\u2061",
+ "Aring": "\u00C5",
+ "Ascr": "\U0001D49C",
+ "Assign": "\u2254",
+ "Atilde": "\u00C3",
+ "Auml": "\u00C4",
+ "Backslash": "\u2216",
+ "Barv": "\u2AE7",
+ "Barwed": "\u2306",
+ "Bcy": "\u0411",
+ "Because": "\u2235",
+ "Bernoullis": "\u212C",
+ "Beta": "\u0392",
+ "Bfr": "\U0001D505",
+ "Bopf": "\U0001D539",
+ "Breve": "\u02D8",
+ "Bscr": "\u212C",
+ "Bumpeq": "\u224E",
+ "CHcy": "\u0427",
+ "COPY": "\u00A9",
+ "Cacute": "\u0106",
+ "Cap": "\u22D2",
+ "CapitalDifferentialD": "\u2145",
+ "Cayleys": "\u212D",
+ "Ccaron": "\u010C",
+ "Ccedil": "\u00C7",
+ "Ccirc": "\u0108",
+ "Cconint": "\u2230",
+ "Cdot": "\u010A",
+ "Cedilla": "\u00B8",
+ "CenterDot": "\u00B7",
+ "Cfr": "\u212D",
+ "Chi": "\u03A7",
+ "CircleDot": "\u2299",
+ "CircleMinus": "\u2296",
+ "CirclePlus": "\u2295",
+ "CircleTimes": "\u2297",
+ "ClockwiseContourIntegral": "\u2232",
+ "CloseCurlyDoubleQuote": "\u201D",
+ "CloseCurlyQuote": "\u2019",
+ "Colon": "\u2237",
+ "Colone": "\u2A74",
+ "Congruent": "\u2261",
+ "Conint": "\u222F",
+ "ContourIntegral": "\u222E",
+ "Copf": "\u2102",
+ "Coproduct": "\u2210",
+ "CounterClockwiseContourIntegral": "\u2233",
+ "Cross": "\u2A2F",
+ "Cscr": "\U0001D49E",
+ "Cup": "\u22D3",
+ "CupCap": "\u224D",
+ "DD": "\u2145",
+ "DDotrahd": "\u2911",
+ "DJcy": "\u0402",
+ "DScy": "\u0405",
+ "DZcy": "\u040F",
+ "Dagger": "\u2021",
+ "Darr": "\u21A1",
+ "Dashv": "\u2AE4",
+ "Dcaron": "\u010E",
+ "Dcy": "\u0414",
+ "Del": "\u2207",
+ "Delta": "\u0394",
+ "Dfr": "\U0001D507",
+ "DiacriticalAcute": "\u00B4",
+ "DiacriticalDot": "\u02D9",
+ "DiacriticalDoubleAcute": "\u02DD",
+ "DiacriticalGrave": "\u0060",
+ "DiacriticalTilde": "\u02DC",
+ "Diamond": "\u22C4",
+ "DifferentialD": "\u2146",
+ "Dopf": "\U0001D53B",
+ "Dot": "\u00A8",
+ "DotDot": "\u20DC",
+ "DotEqual": "\u2250",
+ "DoubleContourIntegral": "\u222F",
+ "DoubleDot": "\u00A8",
+ "DoubleDownArrow": "\u21D3",
+ "DoubleLeftArrow": "\u21D0",
+ "DoubleLeftRightArrow": "\u21D4",
+ "DoubleLeftTee": "\u2AE4",
+ "DoubleLongLeftArrow": "\u27F8",
+ "DoubleLongLeftRightArrow": "\u27FA",
+ "DoubleLongRightArrow": "\u27F9",
+ "DoubleRightArrow": "\u21D2",
+ "DoubleRightTee": "\u22A8",
+ "DoubleUpArrow": "\u21D1",
+ "DoubleUpDownArrow": "\u21D5",
+ "DoubleVerticalBar": "\u2225",
+ "DownArrow": "\u2193",
+ "DownArrowBar": "\u2913",
+ "DownArrowUpArrow": "\u21F5",
+ "DownBreve": "\u0311",
+ "DownLeftRightVector": "\u2950",
+ "DownLeftTeeVector": "\u295E",
+ "DownLeftVector": "\u21BD",
+ "DownLeftVectorBar": "\u2956",
+ "DownRightTeeVector": "\u295F",
+ "DownRightVector": "\u21C1",
+ "DownRightVectorBar": "\u2957",
+ "DownTee": "\u22A4",
+ "DownTeeArrow": "\u21A7",
+ "Downarrow": "\u21D3",
+ "Dscr": "\U0001D49F",
+ "Dstrok": "\u0110",
+ "ENG": "\u014A",
+ "ETH": "\u00D0",
+ "Eacute": "\u00C9",
+ "Ecaron": "\u011A",
+ "Ecirc": "\u00CA",
+ "Ecy": "\u042D",
+ "Edot": "\u0116",
+ "Efr": "\U0001D508",
+ "Egrave": "\u00C8",
+ "Element": "\u2208",
+ "Emacr": "\u0112",
+ "EmptySmallSquare": "\u25FB",
+ "EmptyVerySmallSquare": "\u25AB",
+ "Eogon": "\u0118",
+ "Eopf": "\U0001D53C",
+ "Epsilon": "\u0395",
+ "Equal": "\u2A75",
+ "EqualTilde": "\u2242",
+ "Equilibrium": "\u21CC",
+ "Escr": "\u2130",
+ "Esim": "\u2A73",
+ "Eta": "\u0397",
+ "Euml": "\u00CB",
+ "Exists": "\u2203",
+ "ExponentialE": "\u2147",
+ "Fcy": "\u0424",
+ "Ffr": "\U0001D509",
+ "FilledSmallSquare": "\u25FC",
+ "FilledVerySmallSquare": "\u25AA",
+ "Fopf": "\U0001D53D",
+ "ForAll": "\u2200",
+ "Fouriertrf": "\u2131",
+ "Fscr": "\u2131",
+ "GJcy": "\u0403",
+ "GT": "\u003E",
+ "Gamma": "\u0393",
+ "Gammad": "\u03DC",
+ "Gbreve": "\u011E",
+ "Gcedil": "\u0122",
+ "Gcirc": "\u011C",
+ "Gcy": "\u0413",
+ "Gdot": "\u0120",
+ "Gfr": "\U0001D50A",
+ "Gg": "\u22D9",
+ "Gopf": "\U0001D53E",
+ "GreaterEqual": "\u2265",
+ "GreaterEqualLess": "\u22DB",
+ "GreaterFullEqual": "\u2267",
+ "GreaterGreater": "\u2AA2",
+ "GreaterLess": "\u2277",
+ "GreaterSlantEqual": "\u2A7E",
+ "GreaterTilde": "\u2273",
+ "Gscr": "\U0001D4A2",
+ "Gt": "\u226B",
+ "HARDcy": "\u042A",
+ "Hacek": "\u02C7",
+ "Hat": "\u005E",
+ "Hcirc": "\u0124",
+ "Hfr": "\u210C",
+ "HilbertSpace": "\u210B",
+ "Hopf": "\u210D",
+ "HorizontalLine": "\u2500",
+ "Hscr": "\u210B",
+ "Hstrok": "\u0126",
+ "HumpDownHump": "\u224E",
+ "HumpEqual": "\u224F",
+ "IEcy": "\u0415",
+ "IJlig": "\u0132",
+ "IOcy": "\u0401",
+ "Iacute": "\u00CD",
+ "Icirc": "\u00CE",
+ "Icy": "\u0418",
+ "Idot": "\u0130",
+ "Ifr": "\u2111",
+ "Igrave": "\u00CC",
+ "Im": "\u2111",
+ "Imacr": "\u012A",
+ "ImaginaryI": "\u2148",
+ "Implies": "\u21D2",
+ "Int": "\u222C",
+ "Integral": "\u222B",
+ "Intersection": "\u22C2",
+ "InvisibleComma": "\u2063",
+ "InvisibleTimes": "\u2062",
+ "Iogon": "\u012E",
+ "Iopf": "\U0001D540",
+ "Iota": "\u0399",
+ "Iscr": "\u2110",
+ "Itilde": "\u0128",
+ "Iukcy": "\u0406",
+ "Iuml": "\u00CF",
+ "Jcirc": "\u0134",
+ "Jcy": "\u0419",
+ "Jfr": "\U0001D50D",
+ "Jopf": "\U0001D541",
+ "Jscr": "\U0001D4A5",
+ "Jsercy": "\u0408",
+ "Jukcy": "\u0404",
+ "KHcy": "\u0425",
+ "KJcy": "\u040C",
+ "Kappa": "\u039A",
+ "Kcedil": "\u0136",
+ "Kcy": "\u041A",
+ "Kfr": "\U0001D50E",
+ "Kopf": "\U0001D542",
+ "Kscr": "\U0001D4A6",
+ "LJcy": "\u0409",
+ "LT": "\u003C",
+ "Lacute": "\u0139",
+ "Lambda": "\u039B",
+ "Lang": "\u27EA",
+ "Laplacetrf": "\u2112",
+ "Larr": "\u219E",
+ "Lcaron": "\u013D",
+ "Lcedil": "\u013B",
+ "Lcy": "\u041B",
+ "LeftAngleBracket": "\u27E8",
+ "LeftArrow": "\u2190",
+ "LeftArrowBar": "\u21E4",
+ "LeftArrowRightArrow": "\u21C6",
+ "LeftCeiling": "\u2308",
+ "LeftDoubleBracket": "\u27E6",
+ "LeftDownTeeVector": "\u2961",
+ "LeftDownVector": "\u21C3",
+ "LeftDownVectorBar": "\u2959",
+ "LeftFloor": "\u230A",
+ "LeftRightArrow": "\u2194",
+ "LeftRightVector": "\u294E",
+ "LeftTee": "\u22A3",
+ "LeftTeeArrow": "\u21A4",
+ "LeftTeeVector": "\u295A",
+ "LeftTriangle": "\u22B2",
+ "LeftTriangleBar": "\u29CF",
+ "LeftTriangleEqual": "\u22B4",
+ "LeftUpDownVector": "\u2951",
+ "LeftUpTeeVector": "\u2960",
+ "LeftUpVector": "\u21BF",
+ "LeftUpVectorBar": "\u2958",
+ "LeftVector": "\u21BC",
+ "LeftVectorBar": "\u2952",
+ "Leftarrow": "\u21D0",
+ "Leftrightarrow": "\u21D4",
+ "LessEqualGreater": "\u22DA",
+ "LessFullEqual": "\u2266",
+ "LessGreater": "\u2276",
+ "LessLess": "\u2AA1",
+ "LessSlantEqual": "\u2A7D",
+ "LessTilde": "\u2272",
+ "Lfr": "\U0001D50F",
+ "Ll": "\u22D8",
+ "Lleftarrow": "\u21DA",
+ "Lmidot": "\u013F",
+ "LongLeftArrow": "\u27F5",
+ "LongLeftRightArrow": "\u27F7",
+ "LongRightArrow": "\u27F6",
+ "Longleftarrow": "\u27F8",
+ "Longleftrightarrow": "\u27FA",
+ "Longrightarrow": "\u27F9",
+ "Lopf": "\U0001D543",
+ "LowerLeftArrow": "\u2199",
+ "LowerRightArrow": "\u2198",
+ "Lscr": "\u2112",
+ "Lsh": "\u21B0",
+ "Lstrok": "\u0141",
+ "Lt": "\u226A",
+ "Map": "\u2905",
+ "Mcy": "\u041C",
+ "MediumSpace": "\u205F",
+ "Mellintrf": "\u2133",
+ "Mfr": "\U0001D510",
+ "MinusPlus": "\u2213",
+ "Mopf": "\U0001D544",
+ "Mscr": "\u2133",
+ "Mu": "\u039C",
+ "NJcy": "\u040A",
+ "Nacute": "\u0143",
+ "Ncaron": "\u0147",
+ "Ncedil": "\u0145",
+ "Ncy": "\u041D",
+ "NegativeMediumSpace": "\u200B",
+ "NegativeThickSpace": "\u200B",
+ "NegativeThinSpace": "\u200B",
+ "NegativeVeryThinSpace": "\u200B",
+ "NestedGreaterGreater": "\u226B",
+ "NestedLessLess": "\u226A",
+ "NewLine": "\u000A",
+ "Nfr": "\U0001D511",
+ "NoBreak": "\u2060",
+ "NonBreakingSpace": "\u00A0",
+ "Nopf": "\u2115",
+ "Not": "\u2AEC",
+ "NotCongruent": "\u2262",
+ "NotCupCap": "\u226D",
+ "NotDoubleVerticalBar": "\u2226",
+ "NotElement": "\u2209",
+ "NotEqual": "\u2260",
+ "NotEqualTilde": "\u2242\u0338",
+ "NotExists": "\u2204",
+ "NotGreater": "\u226F",
+ "NotGreaterEqual": "\u2271",
+ "NotGreaterFullEqual": "\u2267\u0338",
+ "NotGreaterGreater": "\u226B\u0338",
+ "NotGreaterLess": "\u2279",
+ "NotGreaterSlantEqual": "\u2A7E\u0338",
+ "NotGreaterTilde": "\u2275",
+ "NotHumpDownHump": "\u224E\u0338",
+ "NotHumpEqual": "\u224F\u0338",
+ "NotLeftTriangle": "\u22EA",
+ "NotLeftTriangleBar": "\u29CF\u0338",
+ "NotLeftTriangleEqual": "\u22EC",
+ "NotLess": "\u226E",
+ "NotLessEqual": "\u2270",
+ "NotLessGreater": "\u2278",
+ "NotLessLess": "\u226A\u0338",
+ "NotLessSlantEqual": "\u2A7D\u0338",
+ "NotLessTilde": "\u2274",
+ "NotNestedGreaterGreater": "\u2AA2\u0338",
+ "NotNestedLessLess": "\u2AA1\u0338",
+ "NotPrecedes": "\u2280",
+ "NotPrecedesEqual": "\u2AAF\u0338",
+ "NotPrecedesSlantEqual": "\u22E0",
+ "NotReverseElement": "\u220C",
+ "NotRightTriangle": "\u22EB",
+ "NotRightTriangleBar": "\u29D0\u0338",
+ "NotRightTriangleEqual": "\u22ED",
+ "NotSquareSubset": "\u228F\u0338",
+ "NotSquareSubsetEqual": "\u22E2",
+ "NotSquareSuperset": "\u2290\u0338",
+ "NotSquareSupersetEqual": "\u22E3",
+ "NotSubset": "\u2282\u20D2",
+ "NotSubsetEqual": "\u2288",
+ "NotSucceeds": "\u2281",
+ "NotSucceedsEqual": "\u2AB0\u0338",
+ "NotSucceedsSlantEqual": "\u22E1",
+ "NotSucceedsTilde": "\u227F\u0338",
+ "NotSuperset": "\u2283\u20D2",
+ "NotSupersetEqual": "\u2289",
+ "NotTilde": "\u2241",
+ "NotTildeEqual": "\u2244",
+ "NotTildeFullEqual": "\u2247",
+ "NotTildeTilde": "\u2249",
+ "NotVerticalBar": "\u2224",
+ "Nscr": "\U0001D4A9",
+ "Ntilde": "\u00D1",
+ "Nu": "\u039D",
+ "OElig": "\u0152",
+ "Oacute": "\u00D3",
+ "Ocirc": "\u00D4",
+ "Ocy": "\u041E",
+ "Odblac": "\u0150",
+ "Ofr": "\U0001D512",
+ "Ograve": "\u00D2",
+ "Omacr": "\u014C",
+ "Omega": "\u03A9",
+ "Omicron": "\u039F",
+ "Oopf": "\U0001D546",
+ "OpenCurlyDoubleQuote": "\u201C",
+ "OpenCurlyQuote": "\u2018",
+ "Or": "\u2A54",
+ "Oscr": "\U0001D4AA",
+ "Oslash": "\u00D8",
+ "Otilde": "\u00D5",
+ "Otimes": "\u2A37",
+ "Ouml": "\u00D6",
+ "OverBar": "\u203E",
+ "OverBrace": "\u23DE",
+ "OverBracket": "\u23B4",
+ "OverParenthesis": "\u23DC",
+ "PartialD": "\u2202",
+ "Pcy": "\u041F",
+ "Pfr": "\U0001D513",
+ "Phi": "\u03A6",
+ "Pi": "\u03A0",
+ "PlusMinus": "\u00B1",
+ "Poincareplane": "\u210C",
+ "Popf": "\u2119",
+ "Pr": "\u2ABB",
+ "Precedes": "\u227A",
+ "PrecedesEqual": "\u2AAF",
+ "PrecedesSlantEqual": "\u227C",
+ "PrecedesTilde": "\u227E",
+ "Prime": "\u2033",
+ "Product": "\u220F",
+ "Proportion": "\u2237",
+ "Proportional": "\u221D",
+ "Pscr": "\U0001D4AB",
+ "Psi": "\u03A8",
+ "QUOT": "\u0022",
+ "Qfr": "\U0001D514",
+ "Qopf": "\u211A",
+ "Qscr": "\U0001D4AC",
+ "RBarr": "\u2910",
+ "REG": "\u00AE",
+ "Racute": "\u0154",
+ "Rang": "\u27EB",
+ "Rarr": "\u21A0",
+ "Rarrtl": "\u2916",
+ "Rcaron": "\u0158",
+ "Rcedil": "\u0156",
+ "Rcy": "\u0420",
+ "Re": "\u211C",
+ "ReverseElement": "\u220B",
+ "ReverseEquilibrium": "\u21CB",
+ "ReverseUpEquilibrium": "\u296F",
+ "Rfr": "\u211C",
+ "Rho": "\u03A1",
+ "RightAngleBracket": "\u27E9",
+ "RightArrow": "\u2192",
+ "RightArrowBar": "\u21E5",
+ "RightArrowLeftArrow": "\u21C4",
+ "RightCeiling": "\u2309",
+ "RightDoubleBracket": "\u27E7",
+ "RightDownTeeVector": "\u295D",
+ "RightDownVector": "\u21C2",
+ "RightDownVectorBar": "\u2955",
+ "RightFloor": "\u230B",
+ "RightTee": "\u22A2",
+ "RightTeeArrow": "\u21A6",
+ "RightTeeVector": "\u295B",
+ "RightTriangle": "\u22B3",
+ "RightTriangleBar": "\u29D0",
+ "RightTriangleEqual": "\u22B5",
+ "RightUpDownVector": "\u294F",
+ "RightUpTeeVector": "\u295C",
+ "RightUpVector": "\u21BE",
+ "RightUpVectorBar": "\u2954",
+ "RightVector": "\u21C0",
+ "RightVectorBar": "\u2953",
+ "Rightarrow": "\u21D2",
+ "Ropf": "\u211D",
+ "RoundImplies": "\u2970",
+ "Rrightarrow": "\u21DB",
+ "Rscr": "\u211B",
+ "Rsh": "\u21B1",
+ "RuleDelayed": "\u29F4",
+ "SHCHcy": "\u0429",
+ "SHcy": "\u0428",
+ "SOFTcy": "\u042C",
+ "Sacute": "\u015A",
+ "Sc": "\u2ABC",
+ "Scaron": "\u0160",
+ "Scedil": "\u015E",
+ "Scirc": "\u015C",
+ "Scy": "\u0421",
+ "Sfr": "\U0001D516",
+ "ShortDownArrow": "\u2193",
+ "ShortLeftArrow": "\u2190",
+ "ShortRightArrow": "\u2192",
+ "ShortUpArrow": "\u2191",
+ "Sigma": "\u03A3",
+ "SmallCircle": "\u2218",
+ "Sopf": "\U0001D54A",
+ "Sqrt": "\u221A",
+ "Square": "\u25A1",
+ "SquareIntersection": "\u2293",
+ "SquareSubset": "\u228F",
+ "SquareSubsetEqual": "\u2291",
+ "SquareSuperset": "\u2290",
+ "SquareSupersetEqual": "\u2292",
+ "SquareUnion": "\u2294",
+ "Sscr": "\U0001D4AE",
+ "Star": "\u22C6",
+ "Sub": "\u22D0",
+ "Subset": "\u22D0",
+ "SubsetEqual": "\u2286",
+ "Succeeds": "\u227B",
+ "SucceedsEqual": "\u2AB0",
+ "SucceedsSlantEqual": "\u227D",
+ "SucceedsTilde": "\u227F",
+ "SuchThat": "\u220B",
+ "Sum": "\u2211",
+ "Sup": "\u22D1",
+ "Superset": "\u2283",
+ "SupersetEqual": "\u2287",
+ "Supset": "\u22D1",
+ "THORN": "\u00DE",
+ "TRADE": "\u2122",
+ "TSHcy": "\u040B",
+ "TScy": "\u0426",
+ "Tab": "\u0009",
+ "Tau": "\u03A4",
+ "Tcaron": "\u0164",
+ "Tcedil": "\u0162",
+ "Tcy": "\u0422",
+ "Tfr": "\U0001D517",
+ "Therefore": "\u2234",
+ "Theta": "\u0398",
+ "ThickSpace": "\u205F\u200A",
+ "ThinSpace": "\u2009",
+ "Tilde": "\u223C",
+ "TildeEqual": "\u2243",
+ "TildeFullEqual": "\u2245",
+ "TildeTilde": "\u2248",
+ "Topf": "\U0001D54B",
+ "TripleDot": "\u20DB",
+ "Tscr": "\U0001D4AF",
+ "Tstrok": "\u0166",
+ "Uacute": "\u00DA",
+ "Uarr": "\u219F",
+ "Uarrocir": "\u2949",
+ "Ubrcy": "\u040E",
+ "Ubreve": "\u016C",
+ "Ucirc": "\u00DB",
+ "Ucy": "\u0423",
+ "Udblac": "\u0170",
+ "Ufr": "\U0001D518",
+ "Ugrave": "\u00D9",
+ "Umacr": "\u016A",
+ "UnderBar": "\u005F",
+ "UnderBrace": "\u23DF",
+ "UnderBracket": "\u23B5",
+ "UnderParenthesis": "\u23DD",
+ "Union": "\u22C3",
+ "UnionPlus": "\u228E",
+ "Uogon": "\u0172",
+ "Uopf": "\U0001D54C",
+ "UpArrow": "\u2191",
+ "UpArrowBar": "\u2912",
+ "UpArrowDownArrow": "\u21C5",
+ "UpDownArrow": "\u2195",
+ "UpEquilibrium": "\u296E",
+ "UpTee": "\u22A5",
+ "UpTeeArrow": "\u21A5",
+ "Uparrow": "\u21D1",
+ "Updownarrow": "\u21D5",
+ "UpperLeftArrow": "\u2196",
+ "UpperRightArrow": "\u2197",
+ "Upsi": "\u03D2",
+ "Upsilon": "\u03A5",
+ "Uring": "\u016E",
+ "Uscr": "\U0001D4B0",
+ "Utilde": "\u0168",
+ "Uuml": "\u00DC",
+ "VDash": "\u22AB",
+ "Vbar": "\u2AEB",
+ "Vcy": "\u0412",
+ "Vdash": "\u22A9",
+ "Vdashl": "\u2AE6",
+ "Vee": "\u22C1",
+ "Verbar": "\u2016",
+ "Vert": "\u2016",
+ "VerticalBar": "\u2223",
+ "VerticalLine": "\u007C",
+ "VerticalSeparator": "\u2758",
+ "VerticalTilde": "\u2240",
+ "VeryThinSpace": "\u200A",
+ "Vfr": "\U0001D519",
+ "Vopf": "\U0001D54D",
+ "Vscr": "\U0001D4B1",
+ "Vvdash": "\u22AA",
+ "Wcirc": "\u0174",
+ "Wedge": "\u22C0",
+ "Wfr": "\U0001D51A",
+ "Wopf": "\U0001D54E",
+ "Wscr": "\U0001D4B2",
+ "Xfr": "\U0001D51B",
+ "Xi": "\u039E",
+ "Xopf": "\U0001D54F",
+ "Xscr": "\U0001D4B3",
+ "YAcy": "\u042F",
+ "YIcy": "\u0407",
+ "YUcy": "\u042E",
+ "Yacute": "\u00DD",
+ "Ycirc": "\u0176",
+ "Ycy": "\u042B",
+ "Yfr": "\U0001D51C",
+ "Yopf": "\U0001D550",
+ "Yscr": "\U0001D4B4",
+ "Yuml": "\u0178",
+ "ZHcy": "\u0416",
+ "Zacute": "\u0179",
+ "Zcaron": "\u017D",
+ "Zcy": "\u0417",
+ "Zdot": "\u017B",
+ "ZeroWidthSpace": "\u200B",
+ "Zeta": "\u0396",
+ "Zfr": "\u2128",
+ "Zopf": "\u2124",
+ "Zscr": "\U0001D4B5",
+ "aacute": "\u00E1",
+ "abreve": "\u0103",
+ "ac": "\u223E",
+ "acE": "\u223E\u0333",
+ "acd": "\u223F",
+ "acirc": "\u00E2",
+ "acute": "\u00B4",
+ "acy": "\u0430",
+ "aelig": "\u00E6",
+ "af": "\u2061",
+ "afr": "\U0001D51E",
+ "agrave": "\u00E0",
+ "alefsym": "\u2135",
+ "aleph": "\u2135",
+ "alpha": "\u03B1",
+ "amacr": "\u0101",
+ "amalg": "\u2A3F",
+ "amp": "\u0026",
+ "and": "\u2227",
+ "andand": "\u2A55",
+ "andd": "\u2A5C",
+ "andslope": "\u2A58",
+ "andv": "\u2A5A",
+ "ang": "\u2220",
+ "ange": "\u29A4",
+ "angle": "\u2220",
+ "angmsd": "\u2221",
+ "angmsdaa": "\u29A8",
+ "angmsdab": "\u29A9",
+ "angmsdac": "\u29AA",
+ "angmsdad": "\u29AB",
+ "angmsdae": "\u29AC",
+ "angmsdaf": "\u29AD",
+ "angmsdag": "\u29AE",
+ "angmsdah": "\u29AF",
+ "angrt": "\u221F",
+ "angrtvb": "\u22BE",
+ "angrtvbd": "\u299D",
+ "angsph": "\u2222",
+ "angst": "\u00C5",
+ "angzarr": "\u237C",
+ "aogon": "\u0105",
+ "aopf": "\U0001D552",
+ "ap": "\u2248",
+ "apE": "\u2A70",
+ "apacir": "\u2A6F",
+ "ape": "\u224A",
+ "apid": "\u224B",
+ "apos": "\u0027",
+ "approx": "\u2248",
+ "approxeq": "\u224A",
+ "aring": "\u00E5",
+ "ascr": "\U0001D4B6",
+ "ast": "\u002A",
+ "asymp": "\u2248",
+ "asympeq": "\u224D",
+ "atilde": "\u00E3",
+ "auml": "\u00E4",
+ "awconint": "\u2233",
+ "awint": "\u2A11",
+ "bNot": "\u2AED",
+ "backcong": "\u224C",
+ "backepsilon": "\u03F6",
+ "backprime": "\u2035",
+ "backsim": "\u223D",
+ "backsimeq": "\u22CD",
+ "barvee": "\u22BD",
+ "barwed": "\u2305",
+ "barwedge": "\u2305",
+ "bbrk": "\u23B5",
+ "bbrktbrk": "\u23B6",
+ "bcong": "\u224C",
+ "bcy": "\u0431",
+ "bdquo": "\u201E",
+ "becaus": "\u2235",
+ "because": "\u2235",
+ "bemptyv": "\u29B0",
+ "bepsi": "\u03F6",
+ "bernou": "\u212C",
+ "beta": "\u03B2",
+ "beth": "\u2136",
+ "between": "\u226C",
+ "bfr": "\U0001D51F",
+ "bigcap": "\u22C2",
+ "bigcirc": "\u25EF",
+ "bigcup": "\u22C3",
+ "bigodot": "\u2A00",
+ "bigoplus": "\u2A01",
+ "bigotimes": "\u2A02",
+ "bigsqcup": "\u2A06",
+ "bigstar": "\u2605",
+ "bigtriangledown": "\u25BD",
+ "bigtriangleup": "\u25B3",
+ "biguplus": "\u2A04",
+ "bigvee": "\u22C1",
+ "bigwedge": "\u22C0",
+ "bkarow": "\u290D",
+ "blacklozenge": "\u29EB",
+ "blacksquare": "\u25AA",
+ "blacktriangle": "\u25B4",
+ "blacktriangledown": "\u25BE",
+ "blacktriangleleft": "\u25C2",
+ "blacktriangleright": "\u25B8",
+ "blank": "\u2423",
+ "blk12": "\u2592",
+ "blk14": "\u2591",
+ "blk34": "\u2593",
+ "block": "\u2588",
+ "bne": "\u003D\u20E5",
+ "bnequiv": "\u2261\u20E5",
+ "bnot": "\u2310",
+ "bopf": "\U0001D553",
+ "bot": "\u22A5",
+ "bottom": "\u22A5",
+ "bowtie": "\u22C8",
+ "boxDL": "\u2557",
+ "boxDR": "\u2554",
+ "boxDl": "\u2556",
+ "boxDr": "\u2553",
+ "boxH": "\u2550",
+ "boxHD": "\u2566",
+ "boxHU": "\u2569",
+ "boxHd": "\u2564",
+ "boxHu": "\u2567",
+ "boxUL": "\u255D",
+ "boxUR": "\u255A",
+ "boxUl": "\u255C",
+ "boxUr": "\u2559",
+ "boxV": "\u2551",
+ "boxVH": "\u256C",
+ "boxVL": "\u2563",
+ "boxVR": "\u2560",
+ "boxVh": "\u256B",
+ "boxVl": "\u2562",
+ "boxVr": "\u255F",
+ "boxbox": "\u29C9",
+ "boxdL": "\u2555",
+ "boxdR": "\u2552",
+ "boxdl": "\u2510",
+ "boxdr": "\u250C",
+ "boxh": "\u2500",
+ "boxhD": "\u2565",
+ "boxhU": "\u2568",
+ "boxhd": "\u252C",
+ "boxhu": "\u2534",
+ "boxminus": "\u229F",
+ "boxplus": "\u229E",
+ "boxtimes": "\u22A0",
+ "boxuL": "\u255B",
+ "boxuR": "\u2558",
+ "boxul": "\u2518",
+ "boxur": "\u2514",
+ "boxv": "\u2502",
+ "boxvH": "\u256A",
+ "boxvL": "\u2561",
+ "boxvR": "\u255E",
+ "boxvh": "\u253C",
+ "boxvl": "\u2524",
+ "boxvr": "\u251C",
+ "bprime": "\u2035",
+ "breve": "\u02D8",
+ "brvbar": "\u00A6",
+ "bscr": "\U0001D4B7",
+ "bsemi": "\u204F",
+ "bsim": "\u223D",
+ "bsime": "\u22CD",
+ "bsol": "\u005C",
+ "bsolb": "\u29C5",
+ "bsolhsub": "\u27C8",
+ "bull": "\u2022",
+ "bullet": "\u2022",
+ "bump": "\u224E",
+ "bumpE": "\u2AAE",
+ "bumpe": "\u224F",
+ "bumpeq": "\u224F",
+ "cacute": "\u0107",
+ "cap": "\u2229",
+ "capand": "\u2A44",
+ "capbrcup": "\u2A49",
+ "capcap": "\u2A4B",
+ "capcup": "\u2A47",
+ "capdot": "\u2A40",
+ "caps": "\u2229\uFE00",
+ "caret": "\u2041",
+ "caron": "\u02C7",
+ "ccaps": "\u2A4D",
+ "ccaron": "\u010D",
+ "ccedil": "\u00E7",
+ "ccirc": "\u0109",
+ "ccups": "\u2A4C",
+ "ccupssm": "\u2A50",
+ "cdot": "\u010B",
+ "cedil": "\u00B8",
+ "cemptyv": "\u29B2",
+ "cent": "\u00A2",
+ "centerdot": "\u00B7",
+ "cfr": "\U0001D520",
+ "chcy": "\u0447",
+ "check": "\u2713",
+ "checkmark": "\u2713",
+ "chi": "\u03C7",
+ "cir": "\u25CB",
+ "cirE": "\u29C3",
+ "circ": "\u02C6",
+ "circeq": "\u2257",
+ "circlearrowleft": "\u21BA",
+ "circlearrowright": "\u21BB",
+ "circledR": "\u00AE",
+ "circledS": "\u24C8",
+ "circledast": "\u229B",
+ "circledcirc": "\u229A",
+ "circleddash": "\u229D",
+ "cire": "\u2257",
+ "cirfnint": "\u2A10",
+ "cirmid": "\u2AEF",
+ "cirscir": "\u29C2",
+ "clubs": "\u2663",
+ "clubsuit": "\u2663",
+ "colon": "\u003A",
+ "colone": "\u2254",
+ "coloneq": "\u2254",
+ "comma": "\u002C",
+ "commat": "\u0040",
+ "comp": "\u2201",
+ "compfn": "\u2218",
+ "complement": "\u2201",
+ "complexes": "\u2102",
+ "cong": "\u2245",
+ "congdot": "\u2A6D",
+ "conint": "\u222E",
+ "copf": "\U0001D554",
+ "coprod": "\u2210",
+ "copy": "\u00A9",
+ "copysr": "\u2117",
+ "crarr": "\u21B5",
+ "cross": "\u2717",
+ "cscr": "\U0001D4B8",
+ "csub": "\u2ACF",
+ "csube": "\u2AD1",
+ "csup": "\u2AD0",
+ "csupe": "\u2AD2",
+ "ctdot": "\u22EF",
+ "cudarrl": "\u2938",
+ "cudarrr": "\u2935",
+ "cuepr": "\u22DE",
+ "cuesc": "\u22DF",
+ "cularr": "\u21B6",
+ "cularrp": "\u293D",
+ "cup": "\u222A",
+ "cupbrcap": "\u2A48",
+ "cupcap": "\u2A46",
+ "cupcup": "\u2A4A",
+ "cupdot": "\u228D",
+ "cupor": "\u2A45",
+ "cups": "\u222A\uFE00",
+ "curarr": "\u21B7",
+ "curarrm": "\u293C",
+ "curlyeqprec": "\u22DE",
+ "curlyeqsucc": "\u22DF",
+ "curlyvee": "\u22CE",
+ "curlywedge": "\u22CF",
+ "curren": "\u00A4",
+ "curvearrowleft": "\u21B6",
+ "curvearrowright": "\u21B7",
+ "cuvee": "\u22CE",
+ "cuwed": "\u22CF",
+ "cwconint": "\u2232",
+ "cwint": "\u2231",
+ "cylcty": "\u232D",
+ "dArr": "\u21D3",
+ "dHar": "\u2965",
+ "dagger": "\u2020",
+ "daleth": "\u2138",
+ "darr": "\u2193",
+ "dash": "\u2010",
+ "dashv": "\u22A3",
+ "dbkarow": "\u290F",
+ "dblac": "\u02DD",
+ "dcaron": "\u010F",
+ "dcy": "\u0434",
+ "dd": "\u2146",
+ "ddagger": "\u2021",
+ "ddarr": "\u21CA",
+ "ddotseq": "\u2A77",
+ "deg": "\u00B0",
+ "delta": "\u03B4",
+ "demptyv": "\u29B1",
+ "dfisht": "\u297F",
+ "dfr": "\U0001D521",
+ "dharl": "\u21C3",
+ "dharr": "\u21C2",
+ "diam": "\u22C4",
+ "diamond": "\u22C4",
+ "diamondsuit": "\u2666",
+ "diams": "\u2666",
+ "die": "\u00A8",
+ "digamma": "\u03DD",
+ "disin": "\u22F2",
+ "div": "\u00F7",
+ "divide": "\u00F7",
+ "divideontimes": "\u22C7",
+ "divonx": "\u22C7",
+ "djcy": "\u0452",
+ "dlcorn": "\u231E",
+ "dlcrop": "\u230D",
+ "dollar": "\u0024",
+ "dopf": "\U0001D555",
+ "dot": "\u02D9",
+ "doteq": "\u2250",
+ "doteqdot": "\u2251",
+ "dotminus": "\u2238",
+ "dotplus": "\u2214",
+ "dotsquare": "\u22A1",
+ "doublebarwedge": "\u2306",
+ "downarrow": "\u2193",
+ "downdownarrows": "\u21CA",
+ "downharpoonleft": "\u21C3",
+ "downharpoonright": "\u21C2",
+ "drbkarow": "\u2910",
+ "drcorn": "\u231F",
+ "drcrop": "\u230C",
+ "dscr": "\U0001D4B9",
+ "dscy": "\u0455",
+ "dsol": "\u29F6",
+ "dstrok": "\u0111",
+ "dtdot": "\u22F1",
+ "dtri": "\u25BF",
+ "dtrif": "\u25BE",
+ "duarr": "\u21F5",
+ "duhar": "\u296F",
+ "dwangle": "\u29A6",
+ "dzcy": "\u045F",
+ "dzigrarr": "\u27FF",
+ "eDDot": "\u2A77",
+ "eDot": "\u2251",
+ "eacute": "\u00E9",
+ "easter": "\u2A6E",
+ "ecaron": "\u011B",
+ "ecir": "\u2256",
+ "ecirc": "\u00EA",
+ "ecolon": "\u2255",
+ "ecy": "\u044D",
+ "edot": "\u0117",
+ "ee": "\u2147",
+ "efDot": "\u2252",
+ "efr": "\U0001D522",
+ "eg": "\u2A9A",
+ "egrave": "\u00E8",
+ "egs": "\u2A96",
+ "egsdot": "\u2A98",
+ "el": "\u2A99",
+ "elinters": "\u23E7",
+ "ell": "\u2113",
+ "els": "\u2A95",
+ "elsdot": "\u2A97",
+ "emacr": "\u0113",
+ "empty": "\u2205",
+ "emptyset": "\u2205",
+ "emptyv": "\u2205",
+ "emsp": "\u2003",
+ "emsp13": "\u2004",
+ "emsp14": "\u2005",
+ "eng": "\u014B",
+ "ensp": "\u2002",
+ "eogon": "\u0119",
+ "eopf": "\U0001D556",
+ "epar": "\u22D5",
+ "eparsl": "\u29E3",
+ "eplus": "\u2A71",
+ "epsi": "\u03B5",
+ "epsilon": "\u03B5",
+ "epsiv": "\u03F5",
+ "eqcirc": "\u2256",
+ "eqcolon": "\u2255",
+ "eqsim": "\u2242",
+ "eqslantgtr": "\u2A96",
+ "eqslantless": "\u2A95",
+ "equals": "\u003D",
+ "equest": "\u225F",
+ "equiv": "\u2261",
+ "equivDD": "\u2A78",
+ "eqvparsl": "\u29E5",
+ "erDot": "\u2253",
+ "erarr": "\u2971",
+ "escr": "\u212F",
+ "esdot": "\u2250",
+ "esim": "\u2242",
+ "eta": "\u03B7",
+ "eth": "\u00F0",
+ "euml": "\u00EB",
+ "euro": "\u20AC",
+ "excl": "\u0021",
+ "exist": "\u2203",
+ "expectation": "\u2130",
+ "exponentiale": "\u2147",
+ "fallingdotseq": "\u2252",
+ "fcy": "\u0444",
+ "female": "\u2640",
+ "ffilig": "\uFB03",
+ "fflig": "\uFB00",
+ "ffllig": "\uFB04",
+ "ffr": "\U0001D523",
+ "filig": "\uFB01",
+ "fjlig": "\u0066\u006A",
+ "flat": "\u266D",
+ "fllig": "\uFB02",
+ "fltns": "\u25B1",
+ "fnof": "\u0192",
+ "fopf": "\U0001D557",
+ "forall": "\u2200",
+ "fork": "\u22D4",
+ "forkv": "\u2AD9",
+ "fpartint": "\u2A0D",
+ "frac12": "\u00BD",
+ "frac13": "\u2153",
+ "frac14": "\u00BC",
+ "frac15": "\u2155",
+ "frac16": "\u2159",
+ "frac18": "\u215B",
+ "frac23": "\u2154",
+ "frac25": "\u2156",
+ "frac34": "\u00BE",
+ "frac35": "\u2157",
+ "frac38": "\u215C",
+ "frac45": "\u2158",
+ "frac56": "\u215A",
+ "frac58": "\u215D",
+ "frac78": "\u215E",
+ "frasl": "\u2044",
+ "frown": "\u2322",
+ "fscr": "\U0001D4BB",
+ "gE": "\u2267",
+ "gEl": "\u2A8C",
+ "gacute": "\u01F5",
+ "gamma": "\u03B3",
+ "gammad": "\u03DD",
+ "gap": "\u2A86",
+ "gbreve": "\u011F",
+ "gcirc": "\u011D",
+ "gcy": "\u0433",
+ "gdot": "\u0121",
+ "ge": "\u2265",
+ "gel": "\u22DB",
+ "geq": "\u2265",
+ "geqq": "\u2267",
+ "geqslant": "\u2A7E",
+ "ges": "\u2A7E",
+ "gescc": "\u2AA9",
+ "gesdot": "\u2A80",
+ "gesdoto": "\u2A82",
+ "gesdotol": "\u2A84",
+ "gesl": "\u22DB\uFE00",
+ "gesles": "\u2A94",
+ "gfr": "\U0001D524",
+ "gg": "\u226B",
+ "ggg": "\u22D9",
+ "gimel": "\u2137",
+ "gjcy": "\u0453",
+ "gl": "\u2277",
+ "glE": "\u2A92",
+ "gla": "\u2AA5",
+ "glj": "\u2AA4",
+ "gnE": "\u2269",
+ "gnap": "\u2A8A",
+ "gnapprox": "\u2A8A",
+ "gne": "\u2A88",
+ "gneq": "\u2A88",
+ "gneqq": "\u2269",
+ "gnsim": "\u22E7",
+ "gopf": "\U0001D558",
+ "grave": "\u0060",
+ "gscr": "\u210A",
+ "gsim": "\u2273",
+ "gsime": "\u2A8E",
+ "gsiml": "\u2A90",
+ "gt": "\u003E",
+ "gtcc": "\u2AA7",
+ "gtcir": "\u2A7A",
+ "gtdot": "\u22D7",
+ "gtlPar": "\u2995",
+ "gtquest": "\u2A7C",
+ "gtrapprox": "\u2A86",
+ "gtrarr": "\u2978",
+ "gtrdot": "\u22D7",
+ "gtreqless": "\u22DB",
+ "gtreqqless": "\u2A8C",
+ "gtrless": "\u2277",
+ "gtrsim": "\u2273",
+ "gvertneqq": "\u2269\uFE00",
+ "gvnE": "\u2269\uFE00",
+ "hArr": "\u21D4",
+ "hairsp": "\u200A",
+ "half": "\u00BD",
+ "hamilt": "\u210B",
+ "hardcy": "\u044A",
+ "harr": "\u2194",
+ "harrcir": "\u2948",
+ "harrw": "\u21AD",
+ "hbar": "\u210F",
+ "hcirc": "\u0125",
+ "hearts": "\u2665",
+ "heartsuit": "\u2665",
+ "hellip": "\u2026",
+ "hercon": "\u22B9",
+ "hfr": "\U0001D525",
+ "hksearow": "\u2925",
+ "hkswarow": "\u2926",
+ "hoarr": "\u21FF",
+ "homtht": "\u223B",
+ "hookleftarrow": "\u21A9",
+ "hookrightarrow": "\u21AA",
+ "hopf": "\U0001D559",
+ "horbar": "\u2015",
+ "hscr": "\U0001D4BD",
+ "hslash": "\u210F",
+ "hstrok": "\u0127",
+ "hybull": "\u2043",
+ "hyphen": "\u2010",
+ "iacute": "\u00ED",
+ "ic": "\u2063",
+ "icirc": "\u00EE",
+ "icy": "\u0438",
+ "iecy": "\u0435",
+ "iexcl": "\u00A1",
+ "iff": "\u21D4",
+ "ifr": "\U0001D526",
+ "igrave": "\u00EC",
+ "ii": "\u2148",
+ "iiiint": "\u2A0C",
+ "iiint": "\u222D",
+ "iinfin": "\u29DC",
+ "iiota": "\u2129",
+ "ijlig": "\u0133",
+ "imacr": "\u012B",
+ "image": "\u2111",
+ "imagline": "\u2110",
+ "imagpart": "\u2111",
+ "imath": "\u0131",
+ "imof": "\u22B7",
+ "imped": "\u01B5",
+ "in": "\u2208",
+ "incare": "\u2105",
+ "infin": "\u221E",
+ "infintie": "\u29DD",
+ "inodot": "\u0131",
+ "int": "\u222B",
+ "intcal": "\u22BA",
+ "integers": "\u2124",
+ "intercal": "\u22BA",
+ "intlarhk": "\u2A17",
+ "intprod": "\u2A3C",
+ "iocy": "\u0451",
+ "iogon": "\u012F",
+ "iopf": "\U0001D55A",
+ "iota": "\u03B9",
+ "iprod": "\u2A3C",
+ "iquest": "\u00BF",
+ "iscr": "\U0001D4BE",
+ "isin": "\u2208",
+ "isinE": "\u22F9",
+ "isindot": "\u22F5",
+ "isins": "\u22F4",
+ "isinsv": "\u22F3",
+ "isinv": "\u2208",
+ "it": "\u2062",
+ "itilde": "\u0129",
+ "iukcy": "\u0456",
+ "iuml": "\u00EF",
+ "jcirc": "\u0135",
+ "jcy": "\u0439",
+ "jfr": "\U0001D527",
+ "jmath": "\u0237",
+ "jopf": "\U0001D55B",
+ "jscr": "\U0001D4BF",
+ "jsercy": "\u0458",
+ "jukcy": "\u0454",
+ "kappa": "\u03BA",
+ "kappav": "\u03F0",
+ "kcedil": "\u0137",
+ "kcy": "\u043A",
+ "kfr": "\U0001D528",
+ "kgreen": "\u0138",
+ "khcy": "\u0445",
+ "kjcy": "\u045C",
+ "kopf": "\U0001D55C",
+ "kscr": "\U0001D4C0",
+ "lAarr": "\u21DA",
+ "lArr": "\u21D0",
+ "lAtail": "\u291B",
+ "lBarr": "\u290E",
+ "lE": "\u2266",
+ "lEg": "\u2A8B",
+ "lHar": "\u2962",
+ "lacute": "\u013A",
+ "laemptyv": "\u29B4",
+ "lagran": "\u2112",
+ "lambda": "\u03BB",
+ "lang": "\u27E8",
+ "langd": "\u2991",
+ "langle": "\u27E8",
+ "lap": "\u2A85",
+ "laquo": "\u00AB",
+ "larr": "\u2190",
+ "larrb": "\u21E4",
+ "larrbfs": "\u291F",
+ "larrfs": "\u291D",
+ "larrhk": "\u21A9",
+ "larrlp": "\u21AB",
+ "larrpl": "\u2939",
+ "larrsim": "\u2973",
+ "larrtl": "\u21A2",
+ "lat": "\u2AAB",
+ "latail": "\u2919",
+ "late": "\u2AAD",
+ "lates": "\u2AAD\uFE00",
+ "lbarr": "\u290C",
+ "lbbrk": "\u2772",
+ "lbrace": "\u007B",
+ "lbrack": "\u005B",
+ "lbrke": "\u298B",
+ "lbrksld": "\u298F",
+ "lbrkslu": "\u298D",
+ "lcaron": "\u013E",
+ "lcedil": "\u013C",
+ "lceil": "\u2308",
+ "lcub": "\u007B",
+ "lcy": "\u043B",
+ "ldca": "\u2936",
+ "ldquo": "\u201C",
+ "ldquor": "\u201E",
+ "ldrdhar": "\u2967",
+ "ldrushar": "\u294B",
+ "ldsh": "\u21B2",
+ "le": "\u2264",
+ "leftarrow": "\u2190",
+ "leftarrowtail": "\u21A2",
+ "leftharpoondown": "\u21BD",
+ "leftharpoonup": "\u21BC",
+ "leftleftarrows": "\u21C7",
+ "leftrightarrow": "\u2194",
+ "leftrightarrows": "\u21C6",
+ "leftrightharpoons": "\u21CB",
+ "leftrightsquigarrow": "\u21AD",
+ "leftthreetimes": "\u22CB",
+ "leg": "\u22DA",
+ "leq": "\u2264",
+ "leqq": "\u2266",
+ "leqslant": "\u2A7D",
+ "les": "\u2A7D",
+ "lescc": "\u2AA8",
+ "lesdot": "\u2A7F",
+ "lesdoto": "\u2A81",
+ "lesdotor": "\u2A83",
+ "lesg": "\u22DA\uFE00",
+ "lesges": "\u2A93",
+ "lessapprox": "\u2A85",
+ "lessdot": "\u22D6",
+ "lesseqgtr": "\u22DA",
+ "lesseqqgtr": "\u2A8B",
+ "lessgtr": "\u2276",
+ "lesssim": "\u2272",
+ "lfisht": "\u297C",
+ "lfloor": "\u230A",
+ "lfr": "\U0001D529",
+ "lg": "\u2276",
+ "lgE": "\u2A91",
+ "lhard": "\u21BD",
+ "lharu": "\u21BC",
+ "lharul": "\u296A",
+ "lhblk": "\u2584",
+ "ljcy": "\u0459",
+ "ll": "\u226A",
+ "llarr": "\u21C7",
+ "llcorner": "\u231E",
+ "llhard": "\u296B",
+ "lltri": "\u25FA",
+ "lmidot": "\u0140",
+ "lmoust": "\u23B0",
+ "lmoustache": "\u23B0",
+ "lnE": "\u2268",
+ "lnap": "\u2A89",
+ "lnapprox": "\u2A89",
+ "lne": "\u2A87",
+ "lneq": "\u2A87",
+ "lneqq": "\u2268",
+ "lnsim": "\u22E6",
+ "loang": "\u27EC",
+ "loarr": "\u21FD",
+ "lobrk": "\u27E6",
+ "longleftarrow": "\u27F5",
+ "longleftrightarrow": "\u27F7",
+ "longmapsto": "\u27FC",
+ "longrightarrow": "\u27F6",
+ "looparrowleft": "\u21AB",
+ "looparrowright": "\u21AC",
+ "lopar": "\u2985",
+ "lopf": "\U0001D55D",
+ "loplus": "\u2A2D",
+ "lotimes": "\u2A34",
+ "lowast": "\u2217",
+ "lowbar": "\u005F",
+ "loz": "\u25CA",
+ "lozenge": "\u25CA",
+ "lozf": "\u29EB",
+ "lpar": "\u0028",
+ "lparlt": "\u2993",
+ "lrarr": "\u21C6",
+ "lrcorner": "\u231F",
+ "lrhar": "\u21CB",
+ "lrhard": "\u296D",
+ "lrm": "\u200E",
+ "lrtri": "\u22BF",
+ "lsaquo": "\u2039",
+ "lscr": "\U0001D4C1",
+ "lsh": "\u21B0",
+ "lsim": "\u2272",
+ "lsime": "\u2A8D",
+ "lsimg": "\u2A8F",
+ "lsqb": "\u005B",
+ "lsquo": "\u2018",
+ "lsquor": "\u201A",
+ "lstrok": "\u0142",
+ "lt": "\u003C",
+ "ltcc": "\u2AA6",
+ "ltcir": "\u2A79",
+ "ltdot": "\u22D6",
+ "lthree": "\u22CB",
+ "ltimes": "\u22C9",
+ "ltlarr": "\u2976",
+ "ltquest": "\u2A7B",
+ "ltrPar": "\u2996",
+ "ltri": "\u25C3",
+ "ltrie": "\u22B4",
+ "ltrif": "\u25C2",
+ "lurdshar": "\u294A",
+ "luruhar": "\u2966",
+ "lvertneqq": "\u2268\uFE00",
+ "lvnE": "\u2268\uFE00",
+ "mDDot": "\u223A",
+ "macr": "\u00AF",
+ "male": "\u2642",
+ "malt": "\u2720",
+ "maltese": "\u2720",
+ "map": "\u21A6",
+ "mapsto": "\u21A6",
+ "mapstodown": "\u21A7",
+ "mapstoleft": "\u21A4",
+ "mapstoup": "\u21A5",
+ "marker": "\u25AE",
+ "mcomma": "\u2A29",
+ "mcy": "\u043C",
+ "mdash": "\u2014",
+ "measuredangle": "\u2221",
+ "mfr": "\U0001D52A",
+ "mho": "\u2127",
+ "micro": "\u00B5",
+ "mid": "\u2223",
+ "midast": "\u002A",
+ "midcir": "\u2AF0",
+ "middot": "\u00B7",
+ "minus": "\u2212",
+ "minusb": "\u229F",
+ "minusd": "\u2238",
+ "minusdu": "\u2A2A",
+ "mlcp": "\u2ADB",
+ "mldr": "\u2026",
+ "mnplus": "\u2213",
+ "models": "\u22A7",
+ "mopf": "\U0001D55E",
+ "mp": "\u2213",
+ "mscr": "\U0001D4C2",
+ "mstpos": "\u223E",
+ "mu": "\u03BC",
+ "multimap": "\u22B8",
+ "mumap": "\u22B8",
+ "nGg": "\u22D9\u0338",
+ "nGt": "\u226B\u20D2",
+ "nGtv": "\u226B\u0338",
+ "nLeftarrow": "\u21CD",
+ "nLeftrightarrow": "\u21CE",
+ "nLl": "\u22D8\u0338",
+ "nLt": "\u226A\u20D2",
+ "nLtv": "\u226A\u0338",
+ "nRightarrow": "\u21CF",
+ "nVDash": "\u22AF",
+ "nVdash": "\u22AE",
+ "nabla": "\u2207",
+ "nacute": "\u0144",
+ "nang": "\u2220\u20D2",
+ "nap": "\u2249",
+ "napE": "\u2A70\u0338",
+ "napid": "\u224B\u0338",
+ "napos": "\u0149",
+ "napprox": "\u2249",
+ "natur": "\u266E",
+ "natural": "\u266E",
+ "naturals": "\u2115",
+ "nbsp": "\u00A0",
+ "nbump": "\u224E\u0338",
+ "nbumpe": "\u224F\u0338",
+ "ncap": "\u2A43",
+ "ncaron": "\u0148",
+ "ncedil": "\u0146",
+ "ncong": "\u2247",
+ "ncongdot": "\u2A6D\u0338",
+ "ncup": "\u2A42",
+ "ncy": "\u043D",
+ "ndash": "\u2013",
+ "ne": "\u2260",
+ "neArr": "\u21D7",
+ "nearhk": "\u2924",
+ "nearr": "\u2197",
+ "nearrow": "\u2197",
+ "nedot": "\u2250\u0338",
+ "nequiv": "\u2262",
+ "nesear": "\u2928",
+ "nesim": "\u2242\u0338",
+ "nexist": "\u2204",
+ "nexists": "\u2204",
+ "nfr": "\U0001D52B",
+ "ngE": "\u2267\u0338",
+ "nge": "\u2271",
+ "ngeq": "\u2271",
+ "ngeqq": "\u2267\u0338",
+ "ngeqslant": "\u2A7E\u0338",
+ "nges": "\u2A7E\u0338",
+ "ngsim": "\u2275",
+ "ngt": "\u226F",
+ "ngtr": "\u226F",
+ "nhArr": "\u21CE",
+ "nharr": "\u21AE",
+ "nhpar": "\u2AF2",
+ "ni": "\u220B",
+ "nis": "\u22FC",
+ "nisd": "\u22FA",
+ "niv": "\u220B",
+ "njcy": "\u045A",
+ "nlArr": "\u21CD",
+ "nlE": "\u2266\u0338",
+ "nlarr": "\u219A",
+ "nldr": "\u2025",
+ "nle": "\u2270",
+ "nleftarrow": "\u219A",
+ "nleftrightarrow": "\u21AE",
+ "nleq": "\u2270",
+ "nleqq": "\u2266\u0338",
+ "nleqslant": "\u2A7D\u0338",
+ "nles": "\u2A7D\u0338",
+ "nless": "\u226E",
+ "nlsim": "\u2274",
+ "nlt": "\u226E",
+ "nltri": "\u22EA",
+ "nltrie": "\u22EC",
+ "nmid": "\u2224",
+ "nopf": "\U0001D55F",
+ "not": "\u00AC",
+ "notin": "\u2209",
+ "notinE": "\u22F9\u0338",
+ "notindot": "\u22F5\u0338",
+ "notinva": "\u2209",
+ "notinvb": "\u22F7",
+ "notinvc": "\u22F6",
+ "notni": "\u220C",
+ "notniva": "\u220C",
+ "notnivb": "\u22FE",
+ "notnivc": "\u22FD",
+ "npar": "\u2226",
+ "nparallel": "\u2226",
+ "nparsl": "\u2AFD\u20E5",
+ "npart": "\u2202\u0338",
+ "npolint": "\u2A14",
+ "npr": "\u2280",
+ "nprcue": "\u22E0",
+ "npre": "\u2AAF\u0338",
+ "nprec": "\u2280",
+ "npreceq": "\u2AAF\u0338",
+ "nrArr": "\u21CF",
+ "nrarr": "\u219B",
+ "nrarrc": "\u2933\u0338",
+ "nrarrw": "\u219D\u0338",
+ "nrightarrow": "\u219B",
+ "nrtri": "\u22EB",
+ "nrtrie": "\u22ED",
+ "nsc": "\u2281",
+ "nsccue": "\u22E1",
+ "nsce": "\u2AB0\u0338",
+ "nscr": "\U0001D4C3",
+ "nshortmid": "\u2224",
+ "nshortparallel": "\u2226",
+ "nsim": "\u2241",
+ "nsime": "\u2244",
+ "nsimeq": "\u2244",
+ "nsmid": "\u2224",
+ "nspar": "\u2226",
+ "nsqsube": "\u22E2",
+ "nsqsupe": "\u22E3",
+ "nsub": "\u2284",
+ "nsubE": "\u2AC5\u0338",
+ "nsube": "\u2288",
+ "nsubset": "\u2282\u20D2",
+ "nsubseteq": "\u2288",
+ "nsubseteqq": "\u2AC5\u0338",
+ "nsucc": "\u2281",
+ "nsucceq": "\u2AB0\u0338",
+ "nsup": "\u2285",
+ "nsupE": "\u2AC6\u0338",
+ "nsupe": "\u2289",
+ "nsupset": "\u2283\u20D2",
+ "nsupseteq": "\u2289",
+ "nsupseteqq": "\u2AC6\u0338",
+ "ntgl": "\u2279",
+ "ntilde": "\u00F1",
+ "ntlg": "\u2278",
+ "ntriangleleft": "\u22EA",
+ "ntrianglelefteq": "\u22EC",
+ "ntriangleright": "\u22EB",
+ "ntrianglerighteq": "\u22ED",
+ "nu": "\u03BD",
+ "num": "\u0023",
+ "numero": "\u2116",
+ "numsp": "\u2007",
+ "nvDash": "\u22AD",
+ "nvHarr": "\u2904",
+ "nvap": "\u224D\u20D2",
+ "nvdash": "\u22AC",
+ "nvge": "\u2265\u20D2",
+ "nvgt": "\u003E\u20D2",
+ "nvinfin": "\u29DE",
+ "nvlArr": "\u2902",
+ "nvle": "\u2264\u20D2",
+ "nvlt": "\u003C\u20D2",
+ "nvltrie": "\u22B4\u20D2",
+ "nvrArr": "\u2903",
+ "nvrtrie": "\u22B5\u20D2",
+ "nvsim": "\u223C\u20D2",
+ "nwArr": "\u21D6",
+ "nwarhk": "\u2923",
+ "nwarr": "\u2196",
+ "nwarrow": "\u2196",
+ "nwnear": "\u2927",
+ "oS": "\u24C8",
+ "oacute": "\u00F3",
+ "oast": "\u229B",
+ "ocir": "\u229A",
+ "ocirc": "\u00F4",
+ "ocy": "\u043E",
+ "odash": "\u229D",
+ "odblac": "\u0151",
+ "odiv": "\u2A38",
+ "odot": "\u2299",
+ "odsold": "\u29BC",
+ "oelig": "\u0153",
+ "ofcir": "\u29BF",
+ "ofr": "\U0001D52C",
+ "ogon": "\u02DB",
+ "ograve": "\u00F2",
+ "ogt": "\u29C1",
+ "ohbar": "\u29B5",
+ "ohm": "\u03A9",
+ "oint": "\u222E",
+ "olarr": "\u21BA",
+ "olcir": "\u29BE",
+ "olcross": "\u29BB",
+ "oline": "\u203E",
+ "olt": "\u29C0",
+ "omacr": "\u014D",
+ "omega": "\u03C9",
+ "omicron": "\u03BF",
+ "omid": "\u29B6",
+ "ominus": "\u2296",
+ "oopf": "\U0001D560",
+ "opar": "\u29B7",
+ "operp": "\u29B9",
+ "oplus": "\u2295",
+ "or": "\u2228",
+ "orarr": "\u21BB",
+ "ord": "\u2A5D",
+ "order": "\u2134",
+ "orderof": "\u2134",
+ "ordf": "\u00AA",
+ "ordm": "\u00BA",
+ "origof": "\u22B6",
+ "oror": "\u2A56",
+ "orslope": "\u2A57",
+ "orv": "\u2A5B",
+ "oscr": "\u2134",
+ "oslash": "\u00F8",
+ "osol": "\u2298",
+ "otilde": "\u00F5",
+ "otimes": "\u2297",
+ "otimesas": "\u2A36",
+ "ouml": "\u00F6",
+ "ovbar": "\u233D",
+ "par": "\u2225",
+ "para": "\u00B6",
+ "parallel": "\u2225",
+ "parsim": "\u2AF3",
+ "parsl": "\u2AFD",
+ "part": "\u2202",
+ "pcy": "\u043F",
+ "percnt": "\u0025",
+ "period": "\u002E",
+ "permil": "\u2030",
+ "perp": "\u22A5",
+ "pertenk": "\u2031",
+ "pfr": "\U0001D52D",
+ "phi": "\u03C6",
+ "phiv": "\u03D5",
+ "phmmat": "\u2133",
+ "phone": "\u260E",
+ "pi": "\u03C0",
+ "pitchfork": "\u22D4",
+ "piv": "\u03D6",
+ "planck": "\u210F",
+ "planckh": "\u210E",
+ "plankv": "\u210F",
+ "plus": "\u002B",
+ "plusacir": "\u2A23",
+ "plusb": "\u229E",
+ "pluscir": "\u2A22",
+ "plusdo": "\u2214",
+ "plusdu": "\u2A25",
+ "pluse": "\u2A72",
+ "plusmn": "\u00B1",
+ "plussim": "\u2A26",
+ "plustwo": "\u2A27",
+ "pm": "\u00B1",
+ "pointint": "\u2A15",
+ "popf": "\U0001D561",
+ "pound": "\u00A3",
+ "pr": "\u227A",
+ "prE": "\u2AB3",
+ "prap": "\u2AB7",
+ "prcue": "\u227C",
+ "pre": "\u2AAF",
+ "prec": "\u227A",
+ "precapprox": "\u2AB7",
+ "preccurlyeq": "\u227C",
+ "preceq": "\u2AAF",
+ "precnapprox": "\u2AB9",
+ "precneqq": "\u2AB5",
+ "precnsim": "\u22E8",
+ "precsim": "\u227E",
+ "prime": "\u2032",
+ "primes": "\u2119",
+ "prnE": "\u2AB5",
+ "prnap": "\u2AB9",
+ "prnsim": "\u22E8",
+ "prod": "\u220F",
+ "profalar": "\u232E",
+ "profline": "\u2312",
+ "profsurf": "\u2313",
+ "prop": "\u221D",
+ "propto": "\u221D",
+ "prsim": "\u227E",
+ "prurel": "\u22B0",
+ "pscr": "\U0001D4C5",
+ "psi": "\u03C8",
+ "puncsp": "\u2008",
+ "qfr": "\U0001D52E",
+ "qint": "\u2A0C",
+ "qopf": "\U0001D562",
+ "qprime": "\u2057",
+ "qscr": "\U0001D4C6",
+ "quaternions": "\u210D",
+ "quatint": "\u2A16",
+ "quest": "\u003F",
+ "questeq": "\u225F",
+ "quot": "\u0022",
+ "rAarr": "\u21DB",
+ "rArr": "\u21D2",
+ "rAtail": "\u291C",
+ "rBarr": "\u290F",
+ "rHar": "\u2964",
+ "race": "\u223D\u0331",
+ "racute": "\u0155",
+ "radic": "\u221A",
+ "raemptyv": "\u29B3",
+ "rang": "\u27E9",
+ "rangd": "\u2992",
+ "range": "\u29A5",
+ "rangle": "\u27E9",
+ "raquo": "\u00BB",
+ "rarr": "\u2192",
+ "rarrap": "\u2975",
+ "rarrb": "\u21E5",
+ "rarrbfs": "\u2920",
+ "rarrc": "\u2933",
+ "rarrfs": "\u291E",
+ "rarrhk": "\u21AA",
+ "rarrlp": "\u21AC",
+ "rarrpl": "\u2945",
+ "rarrsim": "\u2974",
+ "rarrtl": "\u21A3",
+ "rarrw": "\u219D",
+ "ratail": "\u291A",
+ "ratio": "\u2236",
+ "rationals": "\u211A",
+ "rbarr": "\u290D",
+ "rbbrk": "\u2773",
+ "rbrace": "\u007D",
+ "rbrack": "\u005D",
+ "rbrke": "\u298C",
+ "rbrksld": "\u298E",
+ "rbrkslu": "\u2990",
+ "rcaron": "\u0159",
+ "rcedil": "\u0157",
+ "rceil": "\u2309",
+ "rcub": "\u007D",
+ "rcy": "\u0440",
+ "rdca": "\u2937",
+ "rdldhar": "\u2969",
+ "rdquo": "\u201D",
+ "rdquor": "\u201D",
+ "rdsh": "\u21B3",
+ "real": "\u211C",
+ "realine": "\u211B",
+ "realpart": "\u211C",
+ "reals": "\u211D",
+ "rect": "\u25AD",
+ "reg": "\u00AE",
+ "rfisht": "\u297D",
+ "rfloor": "\u230B",
+ "rfr": "\U0001D52F",
+ "rhard": "\u21C1",
+ "rharu": "\u21C0",
+ "rharul": "\u296C",
+ "rho": "\u03C1",
+ "rhov": "\u03F1",
+ "rightarrow": "\u2192",
+ "rightarrowtail": "\u21A3",
+ "rightharpoondown": "\u21C1",
+ "rightharpoonup": "\u21C0",
+ "rightleftarrows": "\u21C4",
+ "rightleftharpoons": "\u21CC",
+ "rightrightarrows": "\u21C9",
+ "rightsquigarrow": "\u219D",
+ "rightthreetimes": "\u22CC",
+ "ring": "\u02DA",
+ "risingdotseq": "\u2253",
+ "rlarr": "\u21C4",
+ "rlhar": "\u21CC",
+ "rlm": "\u200F",
+ "rmoust": "\u23B1",
+ "rmoustache": "\u23B1",
+ "rnmid": "\u2AEE",
+ "roang": "\u27ED",
+ "roarr": "\u21FE",
+ "robrk": "\u27E7",
+ "ropar": "\u2986",
+ "ropf": "\U0001D563",
+ "roplus": "\u2A2E",
+ "rotimes": "\u2A35",
+ "rpar": "\u0029",
+ "rpargt": "\u2994",
+ "rppolint": "\u2A12",
+ "rrarr": "\u21C9",
+ "rsaquo": "\u203A",
+ "rscr": "\U0001D4C7",
+ "rsh": "\u21B1",
+ "rsqb": "\u005D",
+ "rsquo": "\u2019",
+ "rsquor": "\u2019",
+ "rthree": "\u22CC",
+ "rtimes": "\u22CA",
+ "rtri": "\u25B9",
+ "rtrie": "\u22B5",
+ "rtrif": "\u25B8",
+ "rtriltri": "\u29CE",
+ "ruluhar": "\u2968",
+ "rx": "\u211E",
+ "sacute": "\u015B",
+ "sbquo": "\u201A",
+ "sc": "\u227B",
+ "scE": "\u2AB4",
+ "scap": "\u2AB8",
+ "scaron": "\u0161",
+ "sccue": "\u227D",
+ "sce": "\u2AB0",
+ "scedil": "\u015F",
+ "scirc": "\u015D",
+ "scnE": "\u2AB6",
+ "scnap": "\u2ABA",
+ "scnsim": "\u22E9",
+ "scpolint": "\u2A13",
+ "scsim": "\u227F",
+ "scy": "\u0441",
+ "sdot": "\u22C5",
+ "sdotb": "\u22A1",
+ "sdote": "\u2A66",
+ "seArr": "\u21D8",
+ "searhk": "\u2925",
+ "searr": "\u2198",
+ "searrow": "\u2198",
+ "sect": "\u00A7",
+ "semi": "\u003B",
+ "seswar": "\u2929",
+ "setminus": "\u2216",
+ "setmn": "\u2216",
+ "sext": "\u2736",
+ "sfr": "\U0001D530",
+ "sfrown": "\u2322",
+ "sharp": "\u266F",
+ "shchcy": "\u0449",
+ "shcy": "\u0448",
+ "shortmid": "\u2223",
+ "shortparallel": "\u2225",
+ "shy": "\u00AD",
+ "sigma": "\u03C3",
+ "sigmaf": "\u03C2",
+ "sigmav": "\u03C2",
+ "sim": "\u223C",
+ "simdot": "\u2A6A",
+ "sime": "\u2243",
+ "simeq": "\u2243",
+ "simg": "\u2A9E",
+ "simgE": "\u2AA0",
+ "siml": "\u2A9D",
+ "simlE": "\u2A9F",
+ "simne": "\u2246",
+ "simplus": "\u2A24",
+ "simrarr": "\u2972",
+ "slarr": "\u2190",
+ "smallsetminus": "\u2216",
+ "smashp": "\u2A33",
+ "smeparsl": "\u29E4",
+ "smid": "\u2223",
+ "smile": "\u2323",
+ "smt": "\u2AAA",
+ "smte": "\u2AAC",
+ "smtes": "\u2AAC\uFE00",
+ "softcy": "\u044C",
+ "sol": "\u002F",
+ "solb": "\u29C4",
+ "solbar": "\u233F",
+ "sopf": "\U0001D564",
+ "spades": "\u2660",
+ "spadesuit": "\u2660",
+ "spar": "\u2225",
+ "sqcap": "\u2293",
+ "sqcaps": "\u2293\uFE00",
+ "sqcup": "\u2294",
+ "sqcups": "\u2294\uFE00",
+ "sqsub": "\u228F",
+ "sqsube": "\u2291",
+ "sqsubset": "\u228F",
+ "sqsubseteq": "\u2291",
+ "sqsup": "\u2290",
+ "sqsupe": "\u2292",
+ "sqsupset": "\u2290",
+ "sqsupseteq": "\u2292",
+ "squ": "\u25A1",
+ "square": "\u25A1",
+ "squarf": "\u25AA",
+ "squf": "\u25AA",
+ "srarr": "\u2192",
+ "sscr": "\U0001D4C8",
+ "ssetmn": "\u2216",
+ "ssmile": "\u2323",
+ "sstarf": "\u22C6",
+ "star": "\u2606",
+ "starf": "\u2605",
+ "straightepsilon": "\u03F5",
+ "straightphi": "\u03D5",
+ "strns": "\u00AF",
+ "sub": "\u2282",
+ "subE": "\u2AC5",
+ "subdot": "\u2ABD",
+ "sube": "\u2286",
+ "subedot": "\u2AC3",
+ "submult": "\u2AC1",
+ "subnE": "\u2ACB",
+ "subne": "\u228A",
+ "subplus": "\u2ABF",
+ "subrarr": "\u2979",
+ "subset": "\u2282",
+ "subseteq": "\u2286",
+ "subseteqq": "\u2AC5",
+ "subsetneq": "\u228A",
+ "subsetneqq": "\u2ACB",
+ "subsim": "\u2AC7",
+ "subsub": "\u2AD5",
+ "subsup": "\u2AD3",
+ "succ": "\u227B",
+ "succapprox": "\u2AB8",
+ "succcurlyeq": "\u227D",
+ "succeq": "\u2AB0",
+ "succnapprox": "\u2ABA",
+ "succneqq": "\u2AB6",
+ "succnsim": "\u22E9",
+ "succsim": "\u227F",
+ "sum": "\u2211",
+ "sung": "\u266A",
+ "sup": "\u2283",
+ "sup1": "\u00B9",
+ "sup2": "\u00B2",
+ "sup3": "\u00B3",
+ "supE": "\u2AC6",
+ "supdot": "\u2ABE",
+ "supdsub": "\u2AD8",
+ "supe": "\u2287",
+ "supedot": "\u2AC4",
+ "suphsol": "\u27C9",
+ "suphsub": "\u2AD7",
+ "suplarr": "\u297B",
+ "supmult": "\u2AC2",
+ "supnE": "\u2ACC",
+ "supne": "\u228B",
+ "supplus": "\u2AC0",
+ "supset": "\u2283",
+ "supseteq": "\u2287",
+ "supseteqq": "\u2AC6",
+ "supsetneq": "\u228B",
+ "supsetneqq": "\u2ACC",
+ "supsim": "\u2AC8",
+ "supsub": "\u2AD4",
+ "supsup": "\u2AD6",
+ "swArr": "\u21D9",
+ "swarhk": "\u2926",
+ "swarr": "\u2199",
+ "swarrow": "\u2199",
+ "swnwar": "\u292A",
+ "szlig": "\u00DF",
+ "target": "\u2316",
+ "tau": "\u03C4",
+ "tbrk": "\u23B4",
+ "tcaron": "\u0165",
+ "tcedil": "\u0163",
+ "tcy": "\u0442",
+ "tdot": "\u20DB",
+ "telrec": "\u2315",
+ "tfr": "\U0001D531",
+ "there4": "\u2234",
+ "therefore": "\u2234",
+ "theta": "\u03B8",
+ "thetasym": "\u03D1",
+ "thetav": "\u03D1",
+ "thickapprox": "\u2248",
+ "thicksim": "\u223C",
+ "thinsp": "\u2009",
+ "thkap": "\u2248",
+ "thksim": "\u223C",
+ "thorn": "\u00FE",
+ "tilde": "\u02DC",
+ "times": "\u00D7",
+ "timesb": "\u22A0",
+ "timesbar": "\u2A31",
+ "timesd": "\u2A30",
+ "tint": "\u222D",
+ "toea": "\u2928",
+ "top": "\u22A4",
+ "topbot": "\u2336",
+ "topcir": "\u2AF1",
+ "topf": "\U0001D565",
+ "topfork": "\u2ADA",
+ "tosa": "\u2929",
+ "tprime": "\u2034",
+ "trade": "\u2122",
+ "triangle": "\u25B5",
+ "triangledown": "\u25BF",
+ "triangleleft": "\u25C3",
+ "trianglelefteq": "\u22B4",
+ "triangleq": "\u225C",
+ "triangleright": "\u25B9",
+ "trianglerighteq": "\u22B5",
+ "tridot": "\u25EC",
+ "trie": "\u225C",
+ "triminus": "\u2A3A",
+ "triplus": "\u2A39",
+ "trisb": "\u29CD",
+ "tritime": "\u2A3B",
+ "trpezium": "\u23E2",
+ "tscr": "\U0001D4C9",
+ "tscy": "\u0446",
+ "tshcy": "\u045B",
+ "tstrok": "\u0167",
+ "twixt": "\u226C",
+ "twoheadleftarrow": "\u219E",
+ "twoheadrightarrow": "\u21A0",
+ "uArr": "\u21D1",
+ "uHar": "\u2963",
+ "uacute": "\u00FA",
+ "uarr": "\u2191",
+ "ubrcy": "\u045E",
+ "ubreve": "\u016D",
+ "ucirc": "\u00FB",
+ "ucy": "\u0443",
+ "udarr": "\u21C5",
+ "udblac": "\u0171",
+ "udhar": "\u296E",
+ "ufisht": "\u297E",
+ "ufr": "\U0001D532",
+ "ugrave": "\u00F9",
+ "uharl": "\u21BF",
+ "uharr": "\u21BE",
+ "uhblk": "\u2580",
+ "ulcorn": "\u231C",
+ "ulcorner": "\u231C",
+ "ulcrop": "\u230F",
+ "ultri": "\u25F8",
+ "umacr": "\u016B",
+ "uml": "\u00A8",
+ "uogon": "\u0173",
+ "uopf": "\U0001D566",
+ "uparrow": "\u2191",
+ "updownarrow": "\u2195",
+ "upharpoonleft": "\u21BF",
+ "upharpoonright": "\u21BE",
+ "uplus": "\u228E",
+ "upsi": "\u03C5",
+ "upsih": "\u03D2",
+ "upsilon": "\u03C5",
+ "upuparrows": "\u21C8",
+ "urcorn": "\u231D",
+ "urcorner": "\u231D",
+ "urcrop": "\u230E",
+ "uring": "\u016F",
+ "urtri": "\u25F9",
+ "uscr": "\U0001D4CA",
+ "utdot": "\u22F0",
+ "utilde": "\u0169",
+ "utri": "\u25B5",
+ "utrif": "\u25B4",
+ "uuarr": "\u21C8",
+ "uuml": "\u00FC",
+ "uwangle": "\u29A7",
+ "vArr": "\u21D5",
+ "vBar": "\u2AE8",
+ "vBarv": "\u2AE9",
+ "vDash": "\u22A8",
+ "vangrt": "\u299C",
+ "varepsilon": "\u03F5",
+ "varkappa": "\u03F0",
+ "varnothing": "\u2205",
+ "varphi": "\u03D5",
+ "varpi": "\u03D6",
+ "varpropto": "\u221D",
+ "varr": "\u2195",
+ "varrho": "\u03F1",
+ "varsigma": "\u03C2",
+ "varsubsetneq": "\u228A\uFE00",
+ "varsubsetneqq": "\u2ACB\uFE00",
+ "varsupsetneq": "\u228B\uFE00",
+ "varsupsetneqq": "\u2ACC\uFE00",
+ "vartheta": "\u03D1",
+ "vartriangleleft": "\u22B2",
+ "vartriangleright": "\u22B3",
+ "vcy": "\u0432",
+ "vdash": "\u22A2",
+ "vee": "\u2228",
+ "veebar": "\u22BB",
+ "veeeq": "\u225A",
+ "vellip": "\u22EE",
+ "verbar": "\u007C",
+ "vert": "\u007C",
+ "vfr": "\U0001D533",
+ "vltri": "\u22B2",
+ "vnsub": "\u2282\u20D2",
+ "vnsup": "\u2283\u20D2",
+ "vopf": "\U0001D567",
+ "vprop": "\u221D",
+ "vrtri": "\u22B3",
+ "vscr": "\U0001D4CB",
+ "vsubnE": "\u2ACB\uFE00",
+ "vsubne": "\u228A\uFE00",
+ "vsupnE": "\u2ACC\uFE00",
+ "vsupne": "\u228B\uFE00",
+ "vzigzag": "\u299A",
+ "wcirc": "\u0175",
+ "wedbar": "\u2A5F",
+ "wedge": "\u2227",
+ "wedgeq": "\u2259",
+ "weierp": "\u2118",
+ "wfr": "\U0001D534",
+ "wopf": "\U0001D568",
+ "wp": "\u2118",
+ "wr": "\u2240",
+ "wreath": "\u2240",
+ "wscr": "\U0001D4CC",
+ "xcap": "\u22C2",
+ "xcirc": "\u25EF",
+ "xcup": "\u22C3",
+ "xdtri": "\u25BD",
+ "xfr": "\U0001D535",
+ "xhArr": "\u27FA",
+ "xharr": "\u27F7",
+ "xi": "\u03BE",
+ "xlArr": "\u27F8",
+ "xlarr": "\u27F5",
+ "xmap": "\u27FC",
+ "xnis": "\u22FB",
+ "xodot": "\u2A00",
+ "xopf": "\U0001D569",
+ "xoplus": "\u2A01",
+ "xotime": "\u2A02",
+ "xrArr": "\u27F9",
+ "xrarr": "\u27F6",
+ "xscr": "\U0001D4CD",
+ "xsqcup": "\u2A06",
+ "xuplus": "\u2A04",
+ "xutri": "\u25B3",
+ "xvee": "\u22C1",
+ "xwedge": "\u22C0",
+ "yacute": "\u00FD",
+ "yacy": "\u044F",
+ "ycirc": "\u0177",
+ "ycy": "\u044B",
+ "yen": "\u00A5",
+ "yfr": "\U0001D536",
+ "yicy": "\u0457",
+ "yopf": "\U0001D56A",
+ "yscr": "\U0001D4CE",
+ "yucy": "\u044E",
+ "yuml": "\u00FF",
+ "zacute": "\u017A",
+ "zcaron": "\u017E",
+ "zcy": "\u0437",
+ "zdot": "\u017C",
+ "zeetrf": "\u2128",
+ "zeta": "\u03B6",
+ "zfr": "\U0001D537",
+ "zhcy": "\u0436",
+ "zigrarr": "\u21DD",
+ "zopf": "\U0001D56B",
+ "zscr": "\U0001D4CF",
+ "zwj": "\u200D",
+ "zwnj": "\u200C",
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/indented_code.go b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/indented_code.go
new file mode 100644
index 00000000..a89ee6c7
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/indented_code.go
@@ -0,0 +1,98 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package markdown
+
+import (
+ "strings"
+)
+
+type IndentedCodeLine struct {
+ Indentation int
+ Range Range
+}
+
+type IndentedCode struct {
+ blockBase
+ markdown string
+
+ RawCode []IndentedCodeLine
+}
+
+func (b *IndentedCode) Code() (result string) {
+ for _, code := range b.RawCode {
+ result += strings.Repeat(" ", code.Indentation) + b.markdown[code.Range.Position:code.Range.End]
+ }
+ return
+}
+
+func (b *IndentedCode) Continuation(indentation int, r Range) *continuation {
+ if indentation >= 4 {
+ return &continuation{
+ Indentation: indentation - 4,
+ Remaining: r,
+ }
+ }
+ s := b.markdown[r.Position:r.End]
+ if strings.TrimSpace(s) == "" {
+ return &continuation{
+ Remaining: r,
+ }
+ }
+ return nil
+}
+
+func (b *IndentedCode) AddLine(indentation int, r Range) bool {
+ b.RawCode = append(b.RawCode, IndentedCodeLine{
+ Indentation: indentation,
+ Range: r,
+ })
+ return true
+}
+
+func (b *IndentedCode) Close() {
+ for {
+ last := b.RawCode[len(b.RawCode)-1]
+ s := b.markdown[last.Range.Position:last.Range.End]
+ if strings.TrimRight(s, "\r\n") == "" {
+ b.RawCode = b.RawCode[:len(b.RawCode)-1]
+ } else {
+ break
+ }
+ }
+}
+
+func (b *IndentedCode) AllowsBlockStarts() bool {
+ return false
+}
+
+func indentedCodeStart(markdown string, indentation int, r Range, matchedBlocks, unmatchedBlocks []Block) []Block {
+ if len(unmatchedBlocks) > 0 {
+ if _, ok := unmatchedBlocks[len(unmatchedBlocks)-1].(*Paragraph); ok {
+ return nil
+ }
+ } else if len(matchedBlocks) > 0 {
+ if _, ok := matchedBlocks[len(matchedBlocks)-1].(*Paragraph); ok {
+ return nil
+ }
+ }
+
+ if indentation < 4 {
+ return nil
+ }
+
+ s := markdown[r.Position:r.End]
+ if strings.TrimSpace(s) == "" {
+ return nil
+ }
+
+ return []Block{
+ &IndentedCode{
+ markdown: markdown,
+ RawCode: []IndentedCodeLine{{
+ Indentation: indentation - 4,
+ Range: r,
+ }},
+ },
+ }
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/inlines.go b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/inlines.go
new file mode 100644
index 00000000..43dee3bd
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/inlines.go
@@ -0,0 +1,663 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package markdown
+
+import (
+ "container/list"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+)
+
+type Inline interface {
+ IsInline() bool
+}
+
+type inlineBase struct{}
+
+func (inlineBase) IsInline() bool { return true }
+
+type Text struct {
+ inlineBase
+
+ Text string
+ Range Range
+}
+
+type CodeSpan struct {
+ inlineBase
+
+ Code string
+}
+
+type HardLineBreak struct {
+ inlineBase
+}
+
+type SoftLineBreak struct {
+ inlineBase
+}
+
+type InlineLinkOrImage struct {
+ inlineBase
+
+ Children []Inline
+
+ RawDestination Range
+
+ markdown string
+ rawTitle string
+}
+
+func (i *InlineLinkOrImage) Destination() string {
+ return Unescape(i.markdown[i.RawDestination.Position:i.RawDestination.End])
+}
+
+func (i *InlineLinkOrImage) Title() string {
+ return Unescape(i.rawTitle)
+}
+
+type InlineLink struct {
+ InlineLinkOrImage
+}
+
+type InlineImage struct {
+ InlineLinkOrImage
+}
+
+type ReferenceLinkOrImage struct {
+ inlineBase
+ *ReferenceDefinition
+
+ Children []Inline
+}
+
+type ReferenceLink struct {
+ ReferenceLinkOrImage
+}
+
+type ReferenceImage struct {
+ ReferenceLinkOrImage
+}
+
+type Autolink struct {
+ inlineBase
+
+ Children []Inline
+
+ RawDestination Range
+
+ markdown string
+}
+
+func (i *Autolink) Destination() string {
+ destination := Unescape(i.markdown[i.RawDestination.Position:i.RawDestination.End])
+
+ if strings.HasPrefix(destination, "www") {
+ destination = "http://" + destination
+ }
+
+ return destination
+}
+
+type delimiterType int
+
+const (
+ linkOpeningDelimiter delimiterType = iota
+ imageOpeningDelimiter
+)
+
+type delimiter struct {
+ Type delimiterType
+ IsInactive bool
+ TextNode int
+ Range Range
+}
+
+type inlineParser struct {
+ markdown string
+ ranges []Range
+ referenceDefinitions []*ReferenceDefinition
+
+ raw string
+ position int
+ inlines []Inline
+ delimiterStack *list.List
+}
+
+func newInlineParser(markdown string, ranges []Range, referenceDefinitions []*ReferenceDefinition) *inlineParser {
+ return &inlineParser{
+ markdown: markdown,
+ ranges: ranges,
+ referenceDefinitions: referenceDefinitions,
+ delimiterStack: list.New(),
+ }
+}
+
+func (p *inlineParser) parseBackticks() {
+ count := 1
+ for i := p.position + 1; i < len(p.raw) && p.raw[i] == '`'; i++ {
+ count++
+ }
+ opening := p.raw[p.position : p.position+count]
+ search := p.position + count
+ for search < len(p.raw) {
+ end := strings.Index(p.raw[search:], opening)
+ if end == -1 {
+ break
+ }
+ if search+end+count < len(p.raw) && p.raw[search+end+count] == '`' {
+ search += end + count
+ for search < len(p.raw) && p.raw[search] == '`' {
+ search++
+ }
+ continue
+ }
+ code := strings.Join(strings.Fields(p.raw[p.position+count:search+end]), " ")
+ p.position = search + end + count
+ p.inlines = append(p.inlines, &CodeSpan{
+ Code: code,
+ })
+ return
+ }
+ p.position += len(opening)
+ absPos := relativeToAbsolutePosition(p.ranges, p.position-len(opening))
+ p.inlines = append(p.inlines, &Text{
+ Text: opening,
+ Range: Range{absPos, absPos + len(opening)},
+ })
+}
+
+func (p *inlineParser) parseLineEnding() {
+ if p.position >= 1 && p.raw[p.position-1] == '\t' {
+ p.inlines = append(p.inlines, &HardLineBreak{})
+ } else if p.position >= 2 && p.raw[p.position-1] == ' ' && (p.raw[p.position-2] == '\t' || p.raw[p.position-1] == ' ') {
+ p.inlines = append(p.inlines, &HardLineBreak{})
+ } else {
+ p.inlines = append(p.inlines, &SoftLineBreak{})
+ }
+ p.position++
+ if p.position < len(p.raw) && p.raw[p.position] == '\n' {
+ p.position++
+ }
+}
+
+func (p *inlineParser) parseEscapeCharacter() {
+ if p.position+1 < len(p.raw) && isEscapableByte(p.raw[p.position+1]) {
+ absPos := relativeToAbsolutePosition(p.ranges, p.position+1)
+ p.inlines = append(p.inlines, &Text{
+ Text: string(p.raw[p.position+1]),
+ Range: Range{absPos, absPos + len(string(p.raw[p.position+1]))},
+ })
+ p.position += 2
+ } else {
+ absPos := relativeToAbsolutePosition(p.ranges, p.position)
+ p.inlines = append(p.inlines, &Text{
+ Text: `\`,
+ Range: Range{absPos, absPos + 1},
+ })
+ p.position++
+ }
+}
+
+func (p *inlineParser) parseText() {
+ if next := strings.IndexAny(p.raw[p.position:], "\r\n\\`&![]wW:"); next == -1 {
+ absPos := relativeToAbsolutePosition(p.ranges, p.position)
+ p.inlines = append(p.inlines, &Text{
+ Text: strings.TrimRightFunc(p.raw[p.position:], isWhitespace),
+ Range: Range{absPos, absPos + len(p.raw[p.position:])},
+ })
+ p.position = len(p.raw)
+ } else {
+ absPos := relativeToAbsolutePosition(p.ranges, p.position)
+ if p.raw[p.position+next] == '\r' || p.raw[p.position+next] == '\n' {
+ s := strings.TrimRightFunc(p.raw[p.position:p.position+next], isWhitespace)
+ p.inlines = append(p.inlines, &Text{
+ Text: s,
+ Range: Range{absPos, absPos + len(s)},
+ })
+ } else {
+ if next == 0 {
+ // Always read at least one character since 'w', 'W', and ':' may not actually match another
+ // type of node
+ next = 1
+ }
+
+ p.inlines = append(p.inlines, &Text{
+ Text: p.raw[p.position : p.position+next],
+ Range: Range{absPos, absPos + next},
+ })
+ }
+ p.position += next
+ }
+}
+
+func (p *inlineParser) parseLinkOrImageDelimiter() {
+ absPos := relativeToAbsolutePosition(p.ranges, p.position)
+ if p.raw[p.position] == '[' {
+ p.inlines = append(p.inlines, &Text{
+ Text: "[",
+ Range: Range{absPos, absPos + 1},
+ })
+ p.delimiterStack.PushBack(&delimiter{
+ Type: linkOpeningDelimiter,
+ TextNode: len(p.inlines) - 1,
+ Range: Range{p.position, p.position + 1},
+ })
+ p.position++
+ } else if p.raw[p.position] == '!' && p.position+1 < len(p.raw) && p.raw[p.position+1] == '[' {
+ p.inlines = append(p.inlines, &Text{
+ Text: "![",
+ Range: Range{absPos, absPos + 2},
+ })
+ p.delimiterStack.PushBack(&delimiter{
+ Type: imageOpeningDelimiter,
+ TextNode: len(p.inlines) - 1,
+ Range: Range{p.position, p.position + 2},
+ })
+ p.position += 2
+ } else {
+ p.inlines = append(p.inlines, &Text{
+ Text: "!",
+ Range: Range{absPos, absPos + 1},
+ })
+ p.position++
+ }
+}
+
+func (p *inlineParser) peekAtInlineLinkDestinationAndTitle(position int, isImage bool) (destination, title Range, end int, ok bool) {
+ if position >= len(p.raw) || p.raw[position] != '(' {
+ return
+ }
+ position++
+
+ destinationStart := nextNonWhitespace(p.raw, position)
+ if destinationStart >= len(p.raw) {
+ return
+ } else if p.raw[destinationStart] == ')' {
+ return Range{destinationStart, destinationStart}, Range{destinationStart, destinationStart}, destinationStart + 1, true
+ }
+
+ destination, end, ok = parseLinkDestination(p.raw, destinationStart)
+ if !ok {
+ return
+ }
+ position = end
+
+ if isImage && position < len(p.raw) && isWhitespaceByte(p.raw[position]) {
+ dimensionsStart := nextNonWhitespace(p.raw, position)
+ if dimensionsStart >= len(p.raw) {
+ return
+ }
+
+ if p.raw[dimensionsStart] == '=' {
+ // Read optional image dimensions even if we don't use them
+ _, end, ok = parseImageDimensions(p.raw, dimensionsStart)
+ if !ok {
+ return
+ }
+
+ position = end
+ }
+ }
+
+ if position < len(p.raw) && isWhitespaceByte(p.raw[position]) {
+ titleStart := nextNonWhitespace(p.raw, position)
+ if titleStart >= len(p.raw) {
+ return
+ } else if p.raw[titleStart] == ')' {
+ return destination, Range{titleStart, titleStart}, titleStart + 1, true
+ }
+
+ if p.raw[titleStart] == '"' || p.raw[titleStart] == '\'' || p.raw[titleStart] == '(' {
+ title, end, ok = parseLinkTitle(p.raw, titleStart)
+ if !ok {
+ return
+ }
+ position = end
+ }
+ }
+
+ closingPosition := nextNonWhitespace(p.raw, position)
+ if closingPosition >= len(p.raw) || p.raw[closingPosition] != ')' {
+ return Range{}, Range{}, 0, false
+ }
+
+ return destination, title, closingPosition + 1, true
+}
+
+func (p *inlineParser) referenceDefinition(label string) *ReferenceDefinition {
+ clean := strings.Join(strings.Fields(label), " ")
+ for _, d := range p.referenceDefinitions {
+ if strings.EqualFold(clean, strings.Join(strings.Fields(d.Label()), " ")) {
+ return d
+ }
+ }
+ return nil
+}
+
+func (p *inlineParser) lookForLinkOrImage() {
+ for element := p.delimiterStack.Back(); element != nil; element = element.Prev() {
+ d := element.Value.(*delimiter)
+ if d.Type != imageOpeningDelimiter && d.Type != linkOpeningDelimiter {
+ continue
+ }
+ if d.IsInactive {
+ p.delimiterStack.Remove(element)
+ break
+ }
+
+ isImage := d.Type == imageOpeningDelimiter
+
+ var inline Inline
+
+ if destination, title, next, ok := p.peekAtInlineLinkDestinationAndTitle(p.position+1, isImage); ok {
+ destinationMarkdownPosition := relativeToAbsolutePosition(p.ranges, destination.Position)
+ linkOrImage := InlineLinkOrImage{
+ Children: append([]Inline(nil), p.inlines[d.TextNode+1:]...),
+ RawDestination: Range{destinationMarkdownPosition, destinationMarkdownPosition + destination.End - destination.Position},
+ markdown: p.markdown,
+ rawTitle: p.raw[title.Position:title.End],
+ }
+ if d.Type == imageOpeningDelimiter {
+ inline = &InlineImage{linkOrImage}
+ } else {
+ inline = &InlineLink{linkOrImage}
+ }
+ p.position = next
+ } else {
+ referenceLabel := ""
+ label, next, hasLinkLabel := parseLinkLabel(p.raw, p.position+1)
+ if hasLinkLabel && label.End > label.Position {
+ referenceLabel = p.raw[label.Position:label.End]
+ } else {
+ referenceLabel = p.raw[d.Range.End:p.position]
+ if !hasLinkLabel {
+ next = p.position + 1
+ }
+ }
+ if referenceLabel != "" {
+ if reference := p.referenceDefinition(referenceLabel); reference != nil {
+ linkOrImage := ReferenceLinkOrImage{
+ ReferenceDefinition: reference,
+ Children: append([]Inline(nil), p.inlines[d.TextNode+1:]...),
+ }
+ if d.Type == imageOpeningDelimiter {
+ inline = &ReferenceImage{linkOrImage}
+ } else {
+ inline = &ReferenceLink{linkOrImage}
+ }
+ p.position = next
+ }
+ }
+ }
+
+ if inline != nil {
+ if d.Type == imageOpeningDelimiter {
+ p.inlines = append(p.inlines[:d.TextNode], inline)
+ } else {
+ p.inlines = append(p.inlines[:d.TextNode], inline)
+ for inlineElement := element.Prev(); inlineElement != nil; inlineElement = inlineElement.Prev() {
+ if d := inlineElement.Value.(*delimiter); d.Type == linkOpeningDelimiter {
+ d.IsInactive = true
+ }
+ }
+ }
+ p.delimiterStack.Remove(element)
+ return
+ }
+ p.delimiterStack.Remove(element)
+ break
+ }
+ absPos := relativeToAbsolutePosition(p.ranges, p.position)
+ p.inlines = append(p.inlines, &Text{
+ Text: "]",
+ Range: Range{absPos, absPos + 1},
+ })
+ p.position++
+}
+
+func CharacterReference(ref string) string {
+ if ref == "" {
+ return ""
+ }
+ if ref[0] == '#' {
+ if len(ref) < 2 {
+ return ""
+ }
+ n := 0
+ if ref[1] == 'X' || ref[1] == 'x' {
+ if len(ref) < 3 {
+ return ""
+ }
+ for i := 2; i < len(ref); i++ {
+ if i > 9 {
+ return ""
+ }
+ d := ref[i]
+ switch {
+ case d >= '0' && d <= '9':
+ n = n*16 + int(d-'0')
+ case d >= 'a' && d <= 'f':
+ n = n*16 + 10 + int(d-'a')
+ case d >= 'A' && d <= 'F':
+ n = n*16 + 10 + int(d-'A')
+ default:
+ return ""
+ }
+ }
+ } else {
+ for i := 1; i < len(ref); i++ {
+ if i > 8 || ref[i] < '0' || ref[i] > '9' {
+ return ""
+ }
+ n = n*10 + int(ref[i]-'0')
+ }
+ }
+ c := rune(n)
+ if c == '\u0000' || !utf8.ValidRune(c) {
+ return string(unicode.ReplacementChar)
+ }
+ return string(c)
+ }
+ if entity, ok := htmlEntities[ref]; ok {
+ return entity
+ }
+ return ""
+}
+
+func (p *inlineParser) parseCharacterReference() {
+ absPos := relativeToAbsolutePosition(p.ranges, p.position)
+ p.position++
+ if semicolon := strings.IndexByte(p.raw[p.position:], ';'); semicolon == -1 {
+ p.inlines = append(p.inlines, &Text{
+ Text: "&",
+ Range: Range{absPos, absPos + 1},
+ })
+ } else if s := CharacterReference(p.raw[p.position : p.position+semicolon]); s != "" {
+ p.position += semicolon + 1
+ p.inlines = append(p.inlines, &Text{
+ Text: s,
+ Range: Range{absPos, absPos + len(s)},
+ })
+ } else {
+ p.inlines = append(p.inlines, &Text{
+ Text: "&",
+ Range: Range{absPos, absPos + 1},
+ })
+ }
+}
+
+func (p *inlineParser) parseAutolink(c rune) bool {
+ for element := p.delimiterStack.Back(); element != nil; element = element.Prev() {
+ d := element.Value.(*delimiter)
+ if !d.IsInactive {
+ return false
+ }
+ }
+
+ var link Range
+ if c == ':' {
+ var ok bool
+ link, ok = parseURLAutolink(p.raw, p.position)
+
+ if !ok {
+ return false
+ }
+
+ // Since the current position is at the colon, we have to rewind the parsing slightly so that
+ // we don't duplicate the URL scheme
+ rewind := strings.Index(p.raw[link.Position:link.End], ":")
+ if rewind != -1 {
+ lastInline := p.inlines[len(p.inlines)-1]
+ lastText, ok := lastInline.(*Text)
+
+ if !ok {
+ // This should never occur since parseURLAutolink will only return a non-empty value
+ // when the previous text ends in a valid URL protocol which would mean that the previous
+ // node is a Text node
+ return false
+ }
+
+ p.inlines = p.inlines[0 : len(p.inlines)-1]
+ p.inlines = append(p.inlines, &Text{
+ Text: lastText.Text[:len(lastText.Text)-rewind],
+ Range: Range{lastText.Range.Position, lastText.Range.End - rewind},
+ })
+ p.position -= rewind
+ }
+ } else if c == 'w' || c == 'W' {
+ var ok bool
+ link, ok = parseWWWAutolink(p.raw, p.position)
+
+ if !ok {
+ return false
+ }
+ }
+
+ linkMarkdownPosition := relativeToAbsolutePosition(p.ranges, link.Position)
+ linkRange := Range{linkMarkdownPosition, linkMarkdownPosition + link.End - link.Position}
+
+ p.inlines = append(p.inlines, &Autolink{
+ Children: []Inline{
+ &Text{
+ Text: p.raw[link.Position:link.End],
+ Range: linkRange,
+ },
+ },
+ RawDestination: linkRange,
+ markdown: p.markdown,
+ })
+ p.position += (link.End - link.Position)
+
+ return true
+}
+
+func (p *inlineParser) Parse() []Inline {
+ for _, r := range p.ranges {
+ p.raw += p.markdown[r.Position:r.End]
+ }
+
+ for p.position < len(p.raw) {
+ c, _ := utf8.DecodeRuneInString(p.raw[p.position:])
+
+ switch c {
+ case '\r', '\n':
+ p.parseLineEnding()
+ case '\\':
+ p.parseEscapeCharacter()
+ case '`':
+ p.parseBackticks()
+ case '&':
+ p.parseCharacterReference()
+ case '!', '[':
+ p.parseLinkOrImageDelimiter()
+ case ']':
+ p.lookForLinkOrImage()
+ case 'w', 'W', ':':
+ matched := p.parseAutolink(c)
+
+ if !matched {
+ p.parseText()
+ }
+ default:
+ p.parseText()
+ }
+ }
+
+ return p.inlines
+}
+
+func ParseInlines(markdown string, ranges []Range, referenceDefinitions []*ReferenceDefinition) (inlines []Inline) {
+ return newInlineParser(markdown, ranges, referenceDefinitions).Parse()
+}
+
+func MergeInlineText(inlines []Inline) []Inline {
+ ret := inlines[:0]
+ for i, v := range inlines {
+ // always add first node
+ if i == 0 {
+ ret = append(ret, v)
+ continue
+ }
+ // not a text node? nothing to merge
+ text, ok := v.(*Text)
+ if !ok {
+ ret = append(ret, v)
+ continue
+ }
+ // previous node is not a text node? nothing to merge
+ prevText, ok := ret[len(ret)-1].(*Text)
+ if !ok {
+ ret = append(ret, v)
+ continue
+ }
+ // previous node is not right before this one
+ if prevText.Range.End != text.Range.Position {
+ ret = append(ret, v)
+ continue
+ }
+ // we have two consecutive text nodes
+ ret[len(ret)-1] = &Text{
+ Text: prevText.Text + text.Text,
+ Range: Range{prevText.Range.Position, text.Range.End},
+ }
+ }
+ return ret
+}
+
+func Unescape(markdown string) string {
+ ret := ""
+
+ position := 0
+ for position < len(markdown) {
+ c, cSize := utf8.DecodeRuneInString(markdown[position:])
+
+ switch c {
+ case '\\':
+ if position+1 < len(markdown) && isEscapableByte(markdown[position+1]) {
+ ret += string(markdown[position+1])
+ position += 2
+ } else {
+ ret += `\`
+ position++
+ }
+ case '&':
+ position++
+ if semicolon := strings.IndexByte(markdown[position:], ';'); semicolon == -1 {
+ ret += "&"
+ } else if s := CharacterReference(markdown[position : position+semicolon]); s != "" {
+ position += semicolon + 1
+ ret += s
+ } else {
+ ret += "&"
+ }
+ default:
+ ret += string(c)
+ position += cSize
+ }
+ }
+
+ return ret
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/inspect.go b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/inspect.go
new file mode 100644
index 00000000..3c7f2d1c
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/inspect.go
@@ -0,0 +1,78 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package markdown
+
+// Inspect traverses the markdown tree in depth-first order. If f returns true, Inspect invokes f
+// recursively for each child of the block or inline, followed by a call of f(nil).
+func Inspect(markdown string, f func(interface{}) bool) {
+ document, referenceDefinitions := Parse(markdown)
+ InspectBlock(document, func(block Block) bool {
+ if !f(block) {
+ return false
+ }
+ switch v := block.(type) {
+ case *Paragraph:
+ for _, inline := range MergeInlineText(v.ParseInlines(referenceDefinitions)) {
+ InspectInline(inline, func(inline Inline) bool {
+ return f(inline)
+ })
+ }
+ }
+ return true
+ })
+}
+
+// InspectBlock traverses the blocks in depth-first order, starting with block. If f returns true,
+// InspectBlock invokes f recursively for each child of the block, followed by a call of f(nil).
+func InspectBlock(block Block, f func(Block) bool) {
+ if !f(block) {
+ return
+ }
+ switch v := block.(type) {
+ case *Document:
+ for _, child := range v.Children {
+ InspectBlock(child, f)
+ }
+ case *List:
+ for _, child := range v.Children {
+ InspectBlock(child, f)
+ }
+ case *ListItem:
+ for _, child := range v.Children {
+ InspectBlock(child, f)
+ }
+ case *BlockQuote:
+ for _, child := range v.Children {
+ InspectBlock(child, f)
+ }
+ }
+ f(nil)
+}
+
+// InspectInline traverses the blocks in depth-first order, starting with block. If f returns true,
+// InspectInline invokes f recursively for each child of the block, followed by a call of f(nil).
+func InspectInline(inline Inline, f func(Inline) bool) {
+ if !f(inline) {
+ return
+ }
+ switch v := inline.(type) {
+ case *InlineImage:
+ for _, child := range v.Children {
+ InspectInline(child, f)
+ }
+ case *InlineLink:
+ for _, child := range v.Children {
+ InspectInline(child, f)
+ }
+ case *ReferenceImage:
+ for _, child := range v.Children {
+ InspectInline(child, f)
+ }
+ case *ReferenceLink:
+ for _, child := range v.Children {
+ InspectInline(child, f)
+ }
+ }
+ f(nil)
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/lines.go b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/lines.go
new file mode 100644
index 00000000..f59e5afe
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/lines.go
@@ -0,0 +1,32 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package markdown
+
+import (
+ "strings"
+)
+
+type Line struct {
+ Range
+}
+
+func ParseLines(markdown string) []Line {
+ lineStartPosition := 0
+ isAfterCarriageReturn := false
+ lines := make([]Line, 0, strings.Count(markdown, "\n"))
+ for position, r := range markdown {
+ if r == '\n' {
+ lines = append(lines, Line{Range{lineStartPosition, position + 1}})
+ lineStartPosition = position + 1
+ } else if isAfterCarriageReturn {
+ lines = append(lines, Line{Range{lineStartPosition, position}})
+ lineStartPosition = position
+ }
+ isAfterCarriageReturn = r == '\r'
+ }
+ if lineStartPosition < len(markdown) {
+ lines = append(lines, Line{Range{lineStartPosition, len(markdown)}})
+ }
+ return lines
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/links.go b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/links.go
new file mode 100644
index 00000000..6aa56f25
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/links.go
@@ -0,0 +1,184 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package markdown
+
+import (
+ "unicode/utf8"
+)
+
+func parseLinkDestination(markdown string, position int) (raw Range, next int, ok bool) {
+ if position >= len(markdown) {
+ return
+ }
+
+ if markdown[position] == '<' {
+ isEscaped := false
+
+ for offset, c := range []byte(markdown[position+1:]) {
+ if isEscaped {
+ isEscaped = false
+ if isEscapableByte(c) {
+ continue
+ }
+ }
+
+ if c == '\\' {
+ isEscaped = true
+ } else if c == '<' {
+ break
+ } else if c == '>' {
+ return Range{position + 1, position + 1 + offset}, position + 1 + offset + 1, true
+ } else if isWhitespaceByte(c) {
+ break
+ }
+ }
+ }
+
+ openCount := 0
+ isEscaped := false
+ for offset, c := range []byte(markdown[position:]) {
+ if isEscaped {
+ isEscaped = false
+ if isEscapableByte(c) {
+ continue
+ }
+ }
+
+ switch c {
+ case '\\':
+ isEscaped = true
+ case '(':
+ openCount++
+ case ')':
+ if openCount < 1 {
+ return Range{position, position + offset}, position + offset, true
+ }
+ openCount--
+ default:
+ if isWhitespaceByte(c) {
+ return Range{position, position + offset}, position + offset, true
+ }
+ }
+ }
+ return Range{position, len(markdown)}, len(markdown), true
+}
+
+func parseLinkTitle(markdown string, position int) (raw Range, next int, ok bool) {
+ if position >= len(markdown) {
+ return
+ }
+
+ originalPosition := position
+
+ var closer byte
+ switch markdown[position] {
+ case '"', '\'':
+ closer = markdown[position]
+ case '(':
+ closer = ')'
+ default:
+ return
+ }
+ position++
+
+ for position < len(markdown) {
+ switch markdown[position] {
+ case '\\':
+ position++
+ if position < len(markdown) && isEscapableByte(markdown[position]) {
+ position++
+ }
+ case closer:
+ return Range{originalPosition + 1, position}, position + 1, true
+ default:
+ position++
+ }
+ }
+
+ return
+}
+
+func parseLinkLabel(markdown string, position int) (raw Range, next int, ok bool) {
+ if position >= len(markdown) || markdown[position] != '[' {
+ return
+ }
+
+ originalPosition := position
+ position++
+
+ for position < len(markdown) {
+ switch markdown[position] {
+ case '\\':
+ position++
+ if position < len(markdown) && isEscapableByte(markdown[position]) {
+ position++
+ }
+ case '[':
+ return
+ case ']':
+ if position-originalPosition >= 1000 && utf8.RuneCountInString(markdown[originalPosition:position]) >= 1000 {
+ return
+ }
+ return Range{originalPosition + 1, position}, position + 1, true
+ default:
+ position++
+ }
+ }
+
+ return
+}
+
+// As a non-standard feature, we allow image links to specify dimensions of the image by adding "=WIDTHxHEIGHT"
+// after the image destination but before the image title like ![alt](http://example.com/image.png =100x200 "title").
+// Both width and height are optional, but at least one of them must be specified.
+func parseImageDimensions(markdown string, position int) (raw Range, next int, ok bool) {
+ if position >= len(markdown) {
+ return
+ }
+
+ originalPosition := position
+
+ // Read =
+ position += 1
+ if position >= len(markdown) {
+ return
+ }
+
+ // Read width
+ hasWidth := false
+ for position < len(markdown)-1 && isNumericByte(markdown[position]) {
+ hasWidth = true
+ position += 1
+ }
+
+ // Look for early end of dimensions
+ if isWhitespaceByte(markdown[position]) || markdown[position] == ')' {
+ return Range{originalPosition, position - 1}, position, true
+ }
+
+ // Read the x
+ if (markdown[position] != 'x' && markdown[position] != 'X') || position == len(markdown)-1 {
+ return
+ }
+ position += 1
+
+ // Read height
+ hasHeight := false
+ for position < len(markdown)-1 && isNumericByte(markdown[position]) {
+ hasHeight = true
+ position += 1
+ }
+
+ // Make sure the there's no trailing characters
+ if !isWhitespaceByte(markdown[position]) && markdown[position] != ')' {
+ return
+ }
+
+ if !hasWidth && !hasHeight {
+ // At least one of width or height is required
+ return
+ }
+
+ return Range{originalPosition, position - 1}, position, true
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/list.go b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/list.go
new file mode 100644
index 00000000..39039295
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/list.go
@@ -0,0 +1,220 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package markdown
+
+import (
+ "strings"
+)
+
+type ListItem struct {
+ blockBase
+ markdown string
+ hasTrailingBlankLine bool
+ hasBlankLineBetweenChildren bool
+
+ Indentation int
+ Children []Block
+}
+
+func (b *ListItem) Continuation(indentation int, r Range) *continuation {
+ s := b.markdown[r.Position:r.End]
+ if strings.TrimSpace(s) == "" {
+ if b.Children == nil {
+ return nil
+ }
+ return &continuation{
+ Remaining: r,
+ }
+ }
+ if indentation < b.Indentation {
+ return nil
+ }
+ return &continuation{
+ Indentation: indentation - b.Indentation,
+ Remaining: r,
+ }
+}
+
+func (b *ListItem) AddChild(openBlocks []Block) []Block {
+ b.Children = append(b.Children, openBlocks[0])
+ if b.hasTrailingBlankLine {
+ b.hasBlankLineBetweenChildren = true
+ }
+ b.hasTrailingBlankLine = false
+ return openBlocks
+}
+
+func (b *ListItem) AddLine(indentation int, r Range) bool {
+ isBlank := strings.TrimSpace(b.markdown[r.Position:r.End]) == ""
+ if isBlank {
+ b.hasTrailingBlankLine = true
+ }
+ return false
+}
+
+func (b *ListItem) HasTrailingBlankLine() bool {
+ return b.hasTrailingBlankLine || (len(b.Children) > 0 && b.Children[len(b.Children)-1].HasTrailingBlankLine())
+}
+
+func (b *ListItem) isLoose() bool {
+ if b.hasBlankLineBetweenChildren {
+ return true
+ }
+ for i, child := range b.Children {
+ if i < len(b.Children)-1 && child.HasTrailingBlankLine() {
+ return true
+ }
+ }
+ return false
+}
+
+type List struct {
+ blockBase
+ markdown string
+ hasTrailingBlankLine bool
+ hasBlankLineBetweenChildren bool
+
+ IsLoose bool
+ IsOrdered bool
+ OrderedStart int
+ BulletOrDelimiter byte
+ Children []*ListItem
+}
+
+func (b *List) Continuation(indentation int, r Range) *continuation {
+ s := b.markdown[r.Position:r.End]
+ if strings.TrimSpace(s) == "" {
+ return &continuation{
+ Remaining: r,
+ }
+ }
+ return &continuation{
+ Indentation: indentation,
+ Remaining: r,
+ }
+}
+
+func (b *List) AddChild(openBlocks []Block) []Block {
+ if item, ok := openBlocks[0].(*ListItem); ok {
+ b.Children = append(b.Children, item)
+ if b.hasTrailingBlankLine {
+ b.hasBlankLineBetweenChildren = true
+ }
+ b.hasTrailingBlankLine = false
+ return openBlocks
+ } else if list, ok := openBlocks[0].(*List); ok {
+ if len(list.Children) == 1 && list.IsOrdered == b.IsOrdered && list.BulletOrDelimiter == b.BulletOrDelimiter {
+ return b.AddChild(openBlocks[1:])
+ }
+ }
+ return nil
+}
+
+func (b *List) AddLine(indentation int, r Range) bool {
+ isBlank := strings.TrimSpace(b.markdown[r.Position:r.End]) == ""
+ if isBlank {
+ b.hasTrailingBlankLine = true
+ }
+ return false
+}
+
+func (b *List) HasTrailingBlankLine() bool {
+ return b.hasTrailingBlankLine || (len(b.Children) > 0 && b.Children[len(b.Children)-1].HasTrailingBlankLine())
+}
+
+func (b *List) isLoose() bool {
+ if b.hasBlankLineBetweenChildren {
+ return true
+ }
+ for i, child := range b.Children {
+ if child.isLoose() || (i < len(b.Children)-1 && child.HasTrailingBlankLine()) {
+ return true
+ }
+ }
+ return false
+}
+
+func (b *List) Close() {
+ b.IsLoose = b.isLoose()
+}
+
+func parseListMarker(markdown string, r Range) (success, isOrdered bool, orderedStart int, bulletOrDelimiter byte, markerWidth int, remaining Range) {
+ digits := 0
+ n := 0
+ for i := r.Position; i < r.End && markdown[i] >= '0' && markdown[i] <= '9'; i++ {
+ digits++
+ n = n*10 + int(markdown[i]-'0')
+ }
+ if digits > 0 {
+ if digits > 9 || r.Position+digits >= r.End {
+ return
+ }
+ next := markdown[r.Position+digits]
+ if next != '.' && next != ')' {
+ return
+ }
+ return true, true, n, next, digits + 1, Range{r.Position + digits + 1, r.End}
+ }
+ if r.Position >= r.End {
+ return
+ }
+ next := markdown[r.Position]
+ if next != '-' && next != '+' && next != '*' {
+ return
+ }
+ return true, false, 0, next, 1, Range{r.Position + 1, r.End}
+}
+
+func listStart(markdown string, indent int, r Range, matchedBlocks, unmatchedBlocks []Block) []Block {
+ afterList := false
+ if len(matchedBlocks) > 0 {
+ _, afterList = matchedBlocks[len(matchedBlocks)-1].(*List)
+ }
+ if !afterList && indent > 3 {
+ return nil
+ }
+
+ success, isOrdered, orderedStart, bulletOrDelimiter, markerWidth, remaining := parseListMarker(markdown, r)
+ if !success {
+ return nil
+ }
+
+ isBlank := strings.TrimSpace(markdown[remaining.Position:remaining.End]) == ""
+ if len(matchedBlocks) > 0 && len(unmatchedBlocks) == 0 {
+ if _, ok := matchedBlocks[len(matchedBlocks)-1].(*Paragraph); ok {
+ if isBlank || (isOrdered && orderedStart != 1) {
+ return nil
+ }
+ }
+ }
+
+ indentAfterMarker, indentBytesAfterMarker := countIndentation(markdown, remaining)
+ if !isBlank && indentAfterMarker < 1 {
+ return nil
+ }
+
+ remaining = Range{remaining.Position + indentBytesAfterMarker, remaining.End}
+ consumedIndentAfterMarker := indentAfterMarker
+ if isBlank || indentAfterMarker >= 5 {
+ consumedIndentAfterMarker = 1
+ }
+
+ listItem := &ListItem{
+ markdown: markdown,
+ Indentation: indent + markerWidth + consumedIndentAfterMarker,
+ }
+ list := &List{
+ markdown: markdown,
+ IsOrdered: isOrdered,
+ OrderedStart: orderedStart,
+ BulletOrDelimiter: bulletOrDelimiter,
+ Children: []*ListItem{listItem},
+ }
+ ret := []Block{list, listItem}
+ if descendants := blockStartOrParagraph(markdown, indentAfterMarker-consumedIndentAfterMarker, remaining, nil, nil); descendants != nil {
+ listItem.Children = append(listItem.Children, descendants[0])
+ ret = append(ret, descendants...)
+ }
+ return ret
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/markdown.go b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/markdown.go
new file mode 100644
index 00000000..5ccdad8c
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/markdown.go
@@ -0,0 +1,147 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+// This package implements a parser for the subset of the CommonMark spec necessary for us to do
+// server-side processing. It is not a full implementation and lacks many features. But it is
+// complete enough to efficiently and accurately allow us to do what we need to like rewrite image
+// URLs for proxying.
+package markdown
+
+import (
+ "strings"
+)
+
+func isEscapable(c rune) bool {
+ return c > ' ' && (c < '0' || (c > '9' && (c < 'A' || (c > 'Z' && (c < 'a' || (c > 'z' && c <= '~'))))))
+}
+
+func isEscapableByte(c byte) bool {
+ return isEscapable(rune(c))
+}
+
+func isWhitespace(c rune) bool {
+ switch c {
+ case ' ', '\t', '\n', '\u000b', '\u000c', '\r':
+ return true
+ }
+ return false
+}
+
+func isWhitespaceByte(c byte) bool {
+ return isWhitespace(rune(c))
+}
+
+func isNumeric(c rune) bool {
+ return c >= '0' && c <= '9'
+}
+
+func isNumericByte(c byte) bool {
+ return isNumeric(rune(c))
+}
+
+func isHex(c rune) bool {
+ return isNumeric(c) || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F')
+}
+
+func isHexByte(c byte) bool {
+ return isHex(rune(c))
+}
+
+func isAlphanumeric(c rune) bool {
+ return isNumeric(c) || (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z')
+}
+
+func isAlphanumericByte(c byte) bool {
+ return isAlphanumeric(rune(c))
+}
+
+func nextNonWhitespace(markdown string, position int) int {
+ for offset, c := range []byte(markdown[position:]) {
+ if !isWhitespaceByte(c) {
+ return position + offset
+ }
+ }
+ return len(markdown)
+}
+
+func nextLine(markdown string, position int) (linePosition int, skippedNonWhitespace bool) {
+ for i := position; i < len(markdown); i++ {
+ c := markdown[i]
+ if c == '\r' {
+ if i+1 < len(markdown) && markdown[i+1] == '\n' {
+ return i + 2, skippedNonWhitespace
+ }
+ return i + 1, skippedNonWhitespace
+ } else if c == '\n' {
+ return i + 1, skippedNonWhitespace
+ } else if !isWhitespaceByte(c) {
+ skippedNonWhitespace = true
+ }
+ }
+ return len(markdown), skippedNonWhitespace
+}
+
+func countIndentation(markdown string, r Range) (spaces, bytes int) {
+ for i := r.Position; i < r.End; i++ {
+ if markdown[i] == ' ' {
+ spaces++
+ bytes++
+ } else if markdown[i] == '\t' {
+ spaces += 4
+ bytes++
+ } else {
+ break
+ }
+ }
+ return
+}
+
+func trimLeftSpace(markdown string, r Range) Range {
+ s := markdown[r.Position:r.End]
+ trimmed := strings.TrimLeftFunc(s, isWhitespace)
+ return Range{r.Position, r.End - (len(s) - len(trimmed))}
+}
+
+func trimRightSpace(markdown string, r Range) Range {
+ s := markdown[r.Position:r.End]
+ trimmed := strings.TrimRightFunc(s, isWhitespace)
+ return Range{r.Position, r.End - (len(s) - len(trimmed))}
+}
+
+func relativeToAbsolutePosition(ranges []Range, position int) int {
+ rem := position
+ for _, r := range ranges {
+ l := r.End - r.Position
+ if rem < l {
+ return r.Position + rem
+ }
+ rem -= l
+ }
+ if len(ranges) == 0 {
+ return 0
+ }
+ return ranges[len(ranges)-1].End
+}
+
+func trimBytesFromRanges(ranges []Range, bytes int) (result []Range) {
+ rem := bytes
+ for _, r := range ranges {
+ if rem == 0 {
+ result = append(result, r)
+ continue
+ }
+ l := r.End - r.Position
+ if rem < l {
+ result = append(result, Range{r.Position + rem, r.End})
+ rem = 0
+ continue
+ }
+ rem -= l
+ }
+ return
+}
+
+func Parse(markdown string) (*Document, []*ReferenceDefinition) {
+ lines := ParseLines(markdown)
+ return ParseBlocks(markdown, lines)
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/paragraph.go b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/paragraph.go
new file mode 100644
index 00000000..aef01b5e
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/paragraph.go
@@ -0,0 +1,71 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package markdown
+
+import (
+ "strings"
+)
+
+type Paragraph struct {
+ blockBase
+ markdown string
+
+ Text []Range
+ ReferenceDefinitions []*ReferenceDefinition
+}
+
+func (b *Paragraph) ParseInlines(referenceDefinitions []*ReferenceDefinition) []Inline {
+ return ParseInlines(b.markdown, b.Text, referenceDefinitions)
+}
+
+func (b *Paragraph) Continuation(indentation int, r Range) *continuation {
+ s := b.markdown[r.Position:r.End]
+ if strings.TrimSpace(s) == "" {
+ return nil
+ }
+ return &continuation{
+ Indentation: indentation,
+ Remaining: r,
+ }
+}
+
+func (b *Paragraph) Close() {
+ for {
+ for i := 0; i < len(b.Text); i++ {
+ b.Text[i] = trimLeftSpace(b.markdown, b.Text[i])
+ if b.Text[i].Position < b.Text[i].End {
+ break
+ }
+ }
+
+ if len(b.Text) == 0 || b.Text[0].Position < b.Text[0].End && b.markdown[b.Text[0].Position] != '[' {
+ break
+ }
+
+ definition, remaining := parseReferenceDefinition(b.markdown, b.Text)
+ if definition == nil {
+ break
+ }
+ b.ReferenceDefinitions = append(b.ReferenceDefinitions, definition)
+ b.Text = remaining
+ }
+
+ for i := len(b.Text) - 1; i >= 0; i-- {
+ b.Text[i] = trimRightSpace(b.markdown, b.Text[i])
+ if b.Text[i].Position < b.Text[i].End {
+ break
+ }
+ }
+}
+
+func newParagraph(markdown string, r Range) *Paragraph {
+ s := markdown[r.Position:r.End]
+ if strings.TrimSpace(s) == "" {
+ return nil
+ }
+ return &Paragraph{
+ markdown: markdown,
+ Text: []Range{r},
+ }
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/reference_definition.go b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/reference_definition.go
new file mode 100644
index 00000000..69e8ed94
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v6/shared/markdown/reference_definition.go
@@ -0,0 +1,75 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package markdown
+
+type ReferenceDefinition struct {
+ RawDestination Range
+
+ markdown string
+ rawLabel string
+ rawTitle string
+}
+
+func (d *ReferenceDefinition) Destination() string {
+ return Unescape(d.markdown[d.RawDestination.Position:d.RawDestination.End])
+}
+
+func (d *ReferenceDefinition) Label() string {
+ return d.rawLabel
+}
+
+func (d *ReferenceDefinition) Title() string {
+ return Unescape(d.rawTitle)
+}
+
+func parseReferenceDefinition(markdown string, ranges []Range) (*ReferenceDefinition, []Range) {
+ raw := ""
+ for _, r := range ranges {
+ raw += markdown[r.Position:r.End]
+ }
+
+ label, next, ok := parseLinkLabel(raw, 0)
+ if !ok {
+ return nil, nil
+ }
+ position := next
+
+ if position >= len(raw) || raw[position] != ':' {
+ return nil, nil
+ }
+ position++
+
+ destination, next, ok := parseLinkDestination(raw, nextNonWhitespace(raw, position))
+ if !ok {
+ return nil, nil
+ }
+ position = next
+
+ absoluteDestination := relativeToAbsolutePosition(ranges, destination.Position)
+ ret := &ReferenceDefinition{
+ RawDestination: Range{absoluteDestination, absoluteDestination + destination.End - destination.Position},
+ markdown: markdown,
+ rawLabel: raw[label.Position:label.End],
+ }
+
+ if position < len(raw) && isWhitespaceByte(raw[position]) {
+ title, next, ok := parseLinkTitle(raw, nextNonWhitespace(raw, position))
+ if !ok {
+ if nextLine, skippedNonWhitespace := nextLine(raw, position); !skippedNonWhitespace {
+ return ret, trimBytesFromRanges(ranges, nextLine)
+ }
+ return nil, nil
+ }
+ if nextLine, skippedNonWhitespace := nextLine(raw, next); !skippedNonWhitespace {
+ ret.rawTitle = raw[title.Position:title.End]
+ return ret, trimBytesFromRanges(ranges, nextLine)
+ }
+ }
+
+ if nextLine, skippedNonWhitespace := nextLine(raw, position); !skippedNonWhitespace {
+ return ret, trimBytesFromRanges(ranges, nextLine)
+ }
+
+ return nil, nil
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v6/shared/mlog/default.go b/vendor/github.com/mattermost/mattermost-server/v6/shared/mlog/default.go
new file mode 100644
index 00000000..0567c016
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v6/shared/mlog/default.go
@@ -0,0 +1,63 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package mlog
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "os"
+)
+
+// defaultLog manually encodes the log to STDERR, providing a basic, default logging implementation
+// before mlog is fully configured.
+func defaultLog(level Level, msg string, fields ...Field) {
+ mFields := make(map[string]string)
+ buf := &bytes.Buffer{}
+
+ for _, fld := range fields {
+ buf.Reset()
+ fld.ValueString(buf, shouldQuote)
+ mFields[fld.Key] = buf.String()
+ }
+
+ log := struct {
+ Level string `json:"level"`
+ Message string `json:"msg"`
+ Fields map[string]string `json:"fields,omitempty"`
+ }{
+ level.Name,
+ msg,
+ mFields,
+ }
+
+ if b, err := json.Marshal(log); err != nil {
+ fmt.Fprintf(os.Stderr, `{"level":"error","msg":"failed to encode log message"}%s`, "\n")
+ } else {
+ fmt.Fprintf(os.Stderr, "%s\n", b)
+ }
+}
+
+func defaultIsLevelEnabled(level Level) bool {
+ return true
+}
+
+func defaultCustomMultiLog(lvl []Level, msg string, fields ...Field) {
+ for _, level := range lvl {
+ defaultLog(level, msg, fields...)
+ }
+}
+
+// shouldQuote returns true if val contains any characters that require quotations.
+func shouldQuote(val string) bool {
+ for _, c := range val {
+ if !((c >= '0' && c <= '9') ||
+ (c >= 'a' && c <= 'z') ||
+ (c >= 'A' && c <= 'Z') ||
+ c == '-' || c == '.' || c == '_' || c == '/' || c == '@' || c == '^' || c == '+') {
+ return true
+ }
+ }
+ return false
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v6/shared/mlog/global.go b/vendor/github.com/mattermost/mattermost-server/v6/shared/mlog/global.go
new file mode 100644
index 00000000..de346123
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v6/shared/mlog/global.go
@@ -0,0 +1,132 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package mlog
+
+import (
+ "sync"
+)
+
+var (
+ globalLogger *Logger
+ muxGlobalLogger sync.RWMutex
+)
+
+func InitGlobalLogger(logger *Logger) {
+ muxGlobalLogger.Lock()
+ defer muxGlobalLogger.Unlock()
+
+ globalLogger = logger
+}
+
+func getGlobalLogger() *Logger {
+ muxGlobalLogger.RLock()
+ defer muxGlobalLogger.RUnlock()
+
+ return globalLogger
+}
+
+// IsLevelEnabled returns true only if at least one log target is
+// configured to emit the specified log level. Use this check when
+// gathering the log info may be expensive.
+//
+// Note, transformations and serializations done via fields are already
+// lazily evaluated and don't require this check beforehand.
+func IsLevelEnabled(level Level) bool {
+ logger := getGlobalLogger()
+ if logger == nil {
+ return defaultIsLevelEnabled(level)
+ }
+ return logger.IsLevelEnabled(level)
+}
+
+// Log emits the log record for any targets configured for the specified level.
+func Log(level Level, msg string, fields ...Field) {
+ logger := getGlobalLogger()
+ if logger == nil {
+ defaultLog(level, msg, fields...)
+ return
+ }
+ logger.Log(level, msg, fields...)
+}
+
+// LogM emits the log record for any targets configured for the specified levels.
+// Equivalent to calling `Log` once for each level.
+func LogM(levels []Level, msg string, fields ...Field) {
+ logger := getGlobalLogger()
+ if logger == nil {
+ defaultCustomMultiLog(levels, msg, fields...)
+ return
+ }
+ logger.LogM(levels, msg, fields...)
+}
+
+// Convenience method equivalent to calling `Log` with the `Trace` level.
+func Trace(msg string, fields ...Field) {
+ logger := getGlobalLogger()
+ if logger == nil {
+ defaultLog(LvlTrace, msg, fields...)
+ return
+ }
+ logger.Trace(msg, fields...)
+}
+
+// Convenience method equivalent to calling `Log` with the `Debug` level.
+func Debug(msg string, fields ...Field) {
+ logger := getGlobalLogger()
+ if logger == nil {
+ defaultLog(LvlDebug, msg, fields...)
+ return
+ }
+ logger.Debug(msg, fields...)
+}
+
+// Convenience method equivalent to calling `Log` with the `Info` level.
+func Info(msg string, fields ...Field) {
+ logger := getGlobalLogger()
+ if logger == nil {
+ defaultLog(LvlInfo, msg, fields...)
+ return
+ }
+ logger.Info(msg, fields...)
+}
+
+// Convenience method equivalent to calling `Log` with the `Warn` level.
+func Warn(msg string, fields ...Field) {
+ logger := getGlobalLogger()
+ if logger == nil {
+ defaultLog(LvlWarn, msg, fields...)
+ return
+ }
+ logger.Warn(msg, fields...)
+}
+
+// Convenience method equivalent to calling `Log` with the `Error` level.
+func Error(msg string, fields ...Field) {
+ logger := getGlobalLogger()
+ if logger == nil {
+ defaultLog(LvlError, msg, fields...)
+ return
+ }
+ logger.Error(msg, fields...)
+}
+
+// Convenience method equivalent to calling `Log` with the `Critical` level.
+// DEPRECATED: Either use Error or Fatal.
+func Critical(msg string, fields ...Field) {
+ logger := getGlobalLogger()
+ if logger == nil {
+ defaultLog(LvlCritical, msg, fields...)
+ return
+ }
+ logger.Critical(msg, fields...)
+}
+
+func Fatal(msg string, fields ...Field) {
+ logger := getGlobalLogger()
+ if logger == nil {
+ defaultLog(LvlFatal, msg, fields...)
+ return
+ }
+ logger.Fatal(msg, fields...)
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v6/shared/mlog/levels.go b/vendor/github.com/mattermost/mattermost-server/v6/shared/mlog/levels.go
new file mode 100644
index 00000000..c0b30996
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v6/shared/mlog/levels.go
@@ -0,0 +1,58 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package mlog
+
+import "github.com/mattermost/logr/v2"
+
+// Standard levels.
+var (
+ LvlPanic = logr.Panic // ID = 0
+ LvlFatal = logr.Fatal // ID = 1
+ LvlError = logr.Error // ID = 2
+ LvlWarn = logr.Warn // ID = 3
+ LvlInfo = logr.Info // ID = 4
+ LvlDebug = logr.Debug // ID = 5
+ LvlTrace = logr.Trace // ID = 6
+ StdAll = []Level{LvlPanic, LvlFatal, LvlError, LvlWarn, LvlInfo, LvlDebug, LvlTrace}
+ // non-standard "critical" level
+ LvlCritical = Level{ID: 7, Name: "critical"}
+ // used by redirected standard logger
+ LvlStdLog = Level{ID: 10, Name: "stdlog"}
+ // used only by the logger
+ LvlLogError = Level{ID: 11, Name: "logerror", Stacktrace: true}
+)
+
+// Register custom (discrete) levels here.
+// !!!!! Custom ID's must be between 20 and 32,768 !!!!!!
+var (
+ // used by the audit system
+ LvlAuditAPI = Level{ID: 100, Name: "audit-api"}
+ LvlAuditContent = Level{ID: 101, Name: "audit-content"}
+ LvlAuditPerms = Level{ID: 102, Name: "audit-permissions"}
+ LvlAuditCLI = Level{ID: 103, Name: "audit-cli"}
+
+ // used by the TCP log target
+ LvlTCPLogTarget = Level{ID: 120, Name: "TcpLogTarget"}
+
+ // used by Remote Cluster Service
+ LvlRemoteClusterServiceDebug = Level{ID: 130, Name: "RemoteClusterServiceDebug"}
+ LvlRemoteClusterServiceError = Level{ID: 131, Name: "RemoteClusterServiceError"}
+ LvlRemoteClusterServiceWarn = Level{ID: 132, Name: "RemoteClusterServiceWarn"}
+
+ // used by Shared Channel Sync Service
+ LvlSharedChannelServiceDebug = Level{ID: 200, Name: "SharedChannelServiceDebug"}
+ LvlSharedChannelServiceError = Level{ID: 201, Name: "SharedChannelServiceError"}
+ LvlSharedChannelServiceWarn = Level{ID: 202, Name: "SharedChannelServiceWarn"}
+ LvlSharedChannelServiceMessagesInbound = Level{ID: 203, Name: "SharedChannelServiceMsgInbound"}
+ LvlSharedChannelServiceMessagesOutbound = Level{ID: 204, Name: "SharedChannelServiceMsgOutbound"}
+
+ // Focalboard
+ LvlFBTelemetry = Level{ID: 9000, Name: "telemetry"}
+ LvlFBMetrics = Level{ID: 9001, Name: "metrics"}
+)
+
+// Combinations for LogM (log multi).
+var (
+ MLvlAuditAll = []Level{LvlAuditAPI, LvlAuditContent, LvlAuditPerms, LvlAuditCLI}
+)
diff --git a/vendor/github.com/mattermost/mattermost-server/v6/shared/mlog/mlog.go b/vendor/github.com/mattermost/mattermost-server/v6/shared/mlog/mlog.go
new file mode 100644
index 00000000..ac56362c
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v6/shared/mlog/mlog.go
@@ -0,0 +1,419 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+// Package mlog provides a simple wrapper around Logr.
+package mlog
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "log"
+ "os"
+ "strings"
+ "sync/atomic"
+ "time"
+
+ "github.com/mattermost/logr/v2"
+ logrcfg "github.com/mattermost/logr/v2/config"
+)
+
+const (
+ ShutdownTimeout = time.Second * 15
+ FlushTimeout = time.Second * 15
+ DefaultMaxQueueSize = 1000
+ DefaultMetricsUpdateFreqMillis = 15000
+)
+
+type LoggerIFace interface {
+ IsLevelEnabled(Level) bool
+ Debug(string, ...Field)
+ Info(string, ...Field)
+ Warn(string, ...Field)
+ Error(string, ...Field)
+ Critical(string, ...Field)
+ Log(Level, string, ...Field)
+ LogM([]Level, string, ...Field)
+}
+
+// Type and function aliases from Logr to limit the spread of dependencies.
+type Field = logr.Field
+type Level = logr.Level
+type Option = logr.Option
+type Target = logr.Target
+type TargetInfo = logr.TargetInfo
+type LogRec = logr.LogRec
+type LogCloner = logr.LogCloner
+type MetricsCollector = logr.MetricsCollector
+type TargetCfg = logrcfg.TargetCfg
+type Sugar = logr.Sugar
+
+// LoggerConfiguration is a map of LogTarget configurations.
+type LoggerConfiguration map[string]TargetCfg
+
+func (lc LoggerConfiguration) Append(cfg LoggerConfiguration) {
+ for k, v := range cfg {
+ lc[k] = v
+ }
+}
+
+func (lc LoggerConfiguration) toTargetCfg() map[string]logrcfg.TargetCfg {
+ tcfg := make(map[string]logrcfg.TargetCfg)
+ for k, v := range lc {
+ tcfg[k] = v
+ }
+ return tcfg
+}
+
+// Any picks the best supported field type based on type of val.
+// For best performance when passing a struct (or struct pointer),
+// implement `logr.LogWriter` on the struct, otherwise reflection
+// will be used to generate a string representation.
+var Any = logr.Any
+
+// Int64 constructs a field containing a key and Int64 value.
+var Int64 = logr.Int64
+
+// Int32 constructs a field containing a key and Int32 value.
+var Int32 = logr.Int32
+
+// Int constructs a field containing a key and Int value.
+var Int = logr.Int
+
+// Uint64 constructs a field containing a key and Uint64 value.
+var Uint64 = logr.Uint64
+
+// Uint32 constructs a field containing a key and Uint32 value.
+var Uint32 = logr.Uint32
+
+// Uint constructs a field containing a key and Uint value.
+var Uint = logr.Uint
+
+// Float64 constructs a field containing a key and Float64 value.
+var Float64 = logr.Float64
+
+// Float32 constructs a field containing a key and Float32 value.
+var Float32 = logr.Float32
+
+// String constructs a field containing a key and String value.
+var String = logr.String
+
+// Stringer constructs a field containing a key and a fmt.Stringer value.
+// The fmt.Stringer's `String` method is called lazily.
+var Stringer = func(key string, s fmt.Stringer) logr.Field {
+ if s == nil {
+ return Field{Key: key, Type: logr.StringType, String: ""}
+ }
+ return Field{Key: key, Type: logr.StringType, String: s.String()}
+}
+
+// Err constructs a field containing a default key ("error") and error value.
+var Err = func(err error) logr.Field {
+ return NamedErr("error", err)
+}
+
+// NamedErr constructs a field containing a key and error value.
+var NamedErr = func(key string, err error) logr.Field {
+ if err == nil {
+ return Field{Key: key, Type: logr.StringType, String: ""}
+ }
+ return Field{Key: key, Type: logr.StringType, String: err.Error()}
+}
+
+// Bool constructs a field containing a key and bool value.
+var Bool = logr.Bool
+
+// Time constructs a field containing a key and time.Time value.
+var Time = logr.Time
+
+// Duration constructs a field containing a key and time.Duration value.
+var Duration = logr.Duration
+
+// Millis constructs a field containing a key and timestamp value.
+// The timestamp is expected to be milliseconds since Jan 1, 1970 UTC.
+var Millis = logr.Millis
+
+// Array constructs a field containing a key and array value.
+var Array = logr.Array
+
+// Map constructs a field containing a key and map value.
+var Map = logr.Map
+
+// Logger provides a thin wrapper around a Logr instance. This is a struct instead of an interface
+// so that there are no allocations on the heap each interface method invocation. Normally not
+// something to be concerned about, but logging calls for disabled levels should have as little CPU
+// and memory impact as possible. Most of these wrapper calls will be inlined as well.
+type Logger struct {
+ log *logr.Logger
+ lockConfig *int32
+}
+
+// NewLogger creates a new Logger instance which can be configured via `(*Logger).Configure`.
+// Some options with invalid values can cause an error to be returned, however `NewLogger()`
+// using just defaults never errors.
+func NewLogger(options ...Option) (*Logger, error) {
+ options = append(options, logr.StackFilter(logr.GetPackageName("NewLogger")))
+
+ lgr, err := logr.New(options...)
+ if err != nil {
+ return nil, err
+ }
+
+ log := lgr.NewLogger()
+ var lockConfig int32
+
+ return &Logger{
+ log: &log,
+ lockConfig: &lockConfig,
+ }, nil
+}
+
+// Configure provides a new configuration for this logger.
+// Zero or more sources of config can be provided:
+// cfgFile - path to file containing JSON
+// cfgEscaped - JSON string probably from ENV var
+//
+// For each case JSON containing log targets is provided. Target name collisions are resolved
+// using the following precedence:
+// cfgFile > cfgEscaped
+func (l *Logger) Configure(cfgFile string, cfgEscaped string) error {
+ if atomic.LoadInt32(l.lockConfig) != 0 {
+ return ErrConfigurationLock
+ }
+
+ cfgMap := make(LoggerConfiguration)
+
+ // Add config from file
+ if cfgFile != "" {
+ b, err := ioutil.ReadFile(cfgFile)
+ if err != nil {
+ return fmt.Errorf("error reading logger config file %s: %w", cfgFile, err)
+ }
+
+ var mapCfgFile LoggerConfiguration
+ if err := json.Unmarshal(b, &mapCfgFile); err != nil {
+ return fmt.Errorf("error decoding logger config file %s: %w", cfgFile, err)
+ }
+ cfgMap.Append(mapCfgFile)
+ }
+
+ // Add config from escaped json string
+ if cfgEscaped != "" {
+ var mapCfgEscaped LoggerConfiguration
+ if err := json.Unmarshal([]byte(cfgEscaped), &mapCfgEscaped); err != nil {
+ return fmt.Errorf("error decoding logger config as escaped json: %w", err)
+ }
+ cfgMap.Append(mapCfgEscaped)
+ }
+
+ if len(cfgMap) == 0 {
+ return nil
+ }
+
+ return logrcfg.ConfigureTargets(l.log.Logr(), cfgMap.toTargetCfg(), nil)
+}
+
+// ConfigureTargets provides a new configuration for this logger via a `LoggerConfig` map.
+// Typically `mlog.Configure` is used instead which accepts JSON formatted configuration.
+func (l *Logger) ConfigureTargets(cfg LoggerConfiguration) error {
+ if atomic.LoadInt32(l.lockConfig) != 0 {
+ return ErrConfigurationLock
+ }
+ return logrcfg.ConfigureTargets(l.log.Logr(), cfg.toTargetCfg(), nil)
+}
+
+// LockConfiguration disallows further configuration changes until `UnlockConfiguration`
+// is called. The previous locked stated is returned.
+func (l *Logger) LockConfiguration() bool {
+ old := atomic.SwapInt32(l.lockConfig, 1)
+ return old != 0
+}
+
+// UnlockConfiguration allows configuration changes. The previous locked stated is returned.
+func (l *Logger) UnlockConfiguration() bool {
+ old := atomic.SwapInt32(l.lockConfig, 0)
+ return old != 0
+}
+
+// IsConfigurationLocked returns the current state of the configuration lock.
+func (l *Logger) IsConfigurationLocked() bool {
+ return atomic.LoadInt32(l.lockConfig) != 0
+}
+
+// With creates a new Logger with the specified fields. This is a light-weight
+// operation and can be called on demand.
+func (l *Logger) With(fields ...Field) *Logger {
+ logWith := l.log.With(fields...)
+ return &Logger{
+ log: &logWith,
+ lockConfig: l.lockConfig,
+ }
+}
+
+// IsLevelEnabled returns true only if at least one log target is
+// configured to emit the specified log level. Use this check when
+// gathering the log info may be expensive.
+//
+// Note, transformations and serializations done via fields are already
+// lazily evaluated and don't require this check beforehand.
+func (l *Logger) IsLevelEnabled(level Level) bool {
+ return l.log.IsLevelEnabled(level)
+}
+
+// Log emits the log record for any targets configured for the specified level.
+func (l *Logger) Log(level Level, msg string, fields ...Field) {
+ l.log.Log(level, msg, fields...)
+}
+
+// LogM emits the log record for any targets configured for the specified levels.
+// Equivalent to calling `Log` once for each level.
+func (l *Logger) LogM(levels []Level, msg string, fields ...Field) {
+ l.log.LogM(levels, msg, fields...)
+}
+
+// Convenience method equivalent to calling `Log` with the `Trace` level.
+func (l *Logger) Trace(msg string, fields ...Field) {
+ l.log.Trace(msg, fields...)
+}
+
+// Convenience method equivalent to calling `Log` with the `Debug` level.
+func (l *Logger) Debug(msg string, fields ...Field) {
+ l.log.Debug(msg, fields...)
+}
+
+// Convenience method equivalent to calling `Log` with the `Info` level.
+func (l *Logger) Info(msg string, fields ...Field) {
+ l.log.Info(msg, fields...)
+}
+
+// Convenience method equivalent to calling `Log` with the `Warn` level.
+func (l *Logger) Warn(msg string, fields ...Field) {
+ l.log.Warn(msg, fields...)
+}
+
+// Convenience method equivalent to calling `Log` with the `Error` level.
+func (l *Logger) Error(msg string, fields ...Field) {
+ l.log.Error(msg, fields...)
+}
+
+// Convenience method equivalent to calling `Log` with the `Critical` level.
+func (l *Logger) Critical(msg string, fields ...Field) {
+ l.log.Log(LvlCritical, msg, fields...)
+}
+
+// Convenience method equivalent to calling `Log` with the `Fatal` level,
+// followed by `os.Exit(1)`.
+func (l *Logger) Fatal(msg string, fields ...Field) {
+ l.log.Log(logr.Fatal, msg, fields...)
+ _ = l.Shutdown()
+ os.Exit(1)
+}
+
+// HasTargets returns true if at least one log target has been added.
+func (l *Logger) HasTargets() bool {
+ return l.log.Logr().HasTargets()
+}
+
+// StdLogger creates a standard logger backed by this logger.
+// All log records are output with the specified level.
+func (l *Logger) StdLogger(level Level) *log.Logger {
+ return l.log.StdLogger(level)
+}
+
+// StdLogWriter returns a writer that can be hooked up to the output of a golang standard logger
+// anything written will be interpreted as log entries and passed to this logger.
+func (l *Logger) StdLogWriter() io.Writer {
+ return &logWriter{
+ logger: l,
+ }
+}
+
+// RedirectStdLog redirects output from the standard library's package-global logger
+// to this logger at the specified level and with zero or more Field's. Since this logger already
+// handles caller annotations, timestamps, etc., it automatically disables the standard
+// library's annotations and prefixing.
+// A function is returned that restores the original prefix and flags and resets the standard
+// library's output to os.Stdout.
+func (l *Logger) RedirectStdLog(level Level, fields ...Field) func() {
+ return l.log.Logr().RedirectStdLog(level, fields...)
+}
+
+// RemoveTargets safely removes one or more targets based on the filtering method.
+// `f` should return true to delete the target, false to keep it.
+// When removing a target, best effort is made to write any queued log records before
+// closing, with cxt determining how much time can be spent in total.
+// Note, keep the timeout short since this method blocks certain logging operations.
+func (l *Logger) RemoveTargets(ctx context.Context, f func(ti TargetInfo) bool) error {
+ return l.log.Logr().RemoveTargets(ctx, f)
+}
+
+// SetMetricsCollector sets (or resets) the metrics collector to be used for gathering
+// metrics for all targets. Only targets added after this call will use the collector.
+//
+// To ensure all targets use a collector, use the `SetMetricsCollector` option when
+// creating the Logger instead, or configure/reconfigure the Logger after calling this method.
+func (l *Logger) SetMetricsCollector(collector MetricsCollector, updateFrequencyMillis int64) {
+ l.log.Logr().SetMetricsCollector(collector, updateFrequencyMillis)
+}
+
+// Sugar creates a new `Logger` with a less structured API. Any fields are preserved.
+func (l *Logger) Sugar(fields ...Field) Sugar {
+ return l.log.Sugar(fields...)
+}
+
+// Flush forces all targets to write out any queued log records with a default timeout.
+func (l *Logger) Flush() error {
+ ctx, cancel := context.WithTimeout(context.Background(), FlushTimeout)
+ defer cancel()
+ return l.log.Logr().FlushWithTimeout(ctx)
+}
+
+// Flush forces all targets to write out any queued log records with the specfified timeout.
+func (l *Logger) FlushWithTimeout(ctx context.Context) error {
+ return l.log.Logr().FlushWithTimeout(ctx)
+}
+
+// Shutdown shuts down the logger after making best efforts to flush any
+// remaining records.
+func (l *Logger) Shutdown() error {
+ ctx, cancel := context.WithTimeout(context.Background(), ShutdownTimeout)
+ defer cancel()
+ return l.log.Logr().ShutdownWithTimeout(ctx)
+}
+
+// Shutdown shuts down the logger after making best efforts to flush any
+// remaining records.
+func (l *Logger) ShutdownWithTimeout(ctx context.Context) error {
+ return l.log.Logr().ShutdownWithTimeout(ctx)
+}
+
+// GetPackageName reduces a fully qualified function name to the package name
+// By sirupsen: https://github.com/sirupsen/logrus/blob/master/entry.go
+func GetPackageName(f string) string {
+ for {
+ lastPeriod := strings.LastIndex(f, ".")
+ lastSlash := strings.LastIndex(f, "/")
+ if lastPeriod > lastSlash {
+ f = f[:lastPeriod]
+ } else {
+ break
+ }
+ }
+ return f
+}
+
+type logWriter struct {
+ logger *Logger
+}
+
+func (lw *logWriter) Write(p []byte) (int, error) {
+ lw.logger.Info(string(p))
+ return len(p), nil
+}
+
+// ErrConfigurationLock is returned when one of a logger's configuration APIs is called
+// while the configuration is locked.
+var ErrConfigurationLock = errors.New("configuration is locked")
diff --git a/vendor/github.com/mattermost/mattermost-server/v6/shared/mlog/options.go b/vendor/github.com/mattermost/mattermost-server/v6/shared/mlog/options.go
new file mode 100644
index 00000000..3a98b480
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v6/shared/mlog/options.go
@@ -0,0 +1,55 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package mlog
+
+import "github.com/mattermost/logr/v2"
+
+// MaxQueueSize is the maximum number of log records that can be queued.
+// If exceeded, `OnQueueFull` is called which determines if the log
+// record will be dropped or block until add is successful.
+// Defaults to DefaultMaxQueueSize.
+func MaxQueueSize(size int) Option {
+ return logr.MaxQueueSize(size)
+}
+
+// OnLoggerError, when not nil, is called any time an internal
+// logging error occurs. For example, this can happen when a
+// target cannot connect to its data sink.
+func OnLoggerError(f func(error)) Option {
+ return logr.OnLoggerError(f)
+}
+
+// OnQueueFull, when not nil, is called on an attempt to add
+// a log record to a full Logr queue.
+// `MaxQueueSize` can be used to modify the maximum queue size.
+// This function should return quickly, with a bool indicating whether
+// the log record should be dropped (true) or block until the log record
+// is successfully added (false). If nil then blocking (false) is assumed.
+func OnQueueFull(f func(rec *LogRec, maxQueueSize int) bool) Option {
+ return logr.OnQueueFull(f)
+}
+
+// OnTargetQueueFull, when not nil, is called on an attempt to add
+// a log record to a full target queue provided the target supports reporting
+// this condition.
+// This function should return quickly, with a bool indicating whether
+// the log record should be dropped (true) or block until the log record
+// is successfully added (false). If nil then blocking (false) is assumed.
+func OnTargetQueueFull(f func(target Target, rec *LogRec, maxQueueSize int) bool) Option {
+ return logr.OnTargetQueueFull(f)
+}
+
+// SetMetricsCollector enables metrics collection by supplying a MetricsCollector.
+// The MetricsCollector provides counters and gauges that are updated by log targets.
+// `updateFreqMillis` determines how often polled metrics are updated. Defaults to 15000 (15 seconds)
+// and must be at least 250 so we don't peg the CPU.
+func SetMetricsCollector(collector MetricsCollector, updateFreqMillis int64) Option {
+ return logr.SetMetricsCollector(collector, updateFreqMillis)
+}
+
+// StackFilter provides a list of package names to exclude from the top of
+// stack traces. The Logr packages are automatically filtered.
+func StackFilter(pkg ...string) Option {
+ return logr.StackFilter(pkg...)
+}
diff --git a/vendor/github.com/mattermost/mattermost-server/v6/shared/mlog/tlog.go b/vendor/github.com/mattermost/mattermost-server/v6/shared/mlog/tlog.go
new file mode 100644
index 00000000..ef8f6016
--- /dev/null
+++ b/vendor/github.com/mattermost/mattermost-server/v6/shared/mlog/tlog.go
@@ -0,0 +1,79 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See LICENSE.txt for license information.
+
+package mlog
+
+import (
+ "bytes"
+ "io"
+ "os"
+ "sync"
+
+ "github.com/mattermost/logr/v2"
+ "github.com/mattermost/logr/v2/formatters"
+ "github.com/mattermost/logr/v2/targets"
+)
+
+// AddWriterTarget adds a simple io.Writer target to an existing Logger.
+// The `io.Writer` can be a buffer which is useful for testing.
+// When adding a buffer to collect logs make sure to use `mlog.Buffer` which is
+// a thread safe version of `bytes.Buffer`.
+func AddWriterTarget(logger *Logger, w io.Writer, useJSON bool, levels ...Level) error {
+ filter := logr.NewCustomFilter(levels...)
+
+ var formatter logr.Formatter
+ if useJSON {
+ formatter = &formatters.JSON{EnableCaller: true}
+ } else {
+ formatter = &formatters.Plain{EnableCaller: true}
+ }
+
+ target := targets.NewWriterTarget(w)
+ return logger.log.Logr().AddTarget(target, "_testWriter", filter, formatter, 1000)
+}
+
+// CreateConsoleTestLogger creates a logger for unit tests. Log records are output to `os.Stdout`.
+// Logs can also be mirrored to the optional `io.Writer`.
+func CreateConsoleTestLogger(useJSON bool, level Level) *Logger {
+ logger, _ := NewLogger()
+
+ filter := logr.StdFilter{
+ Lvl: level,
+ Stacktrace: LvlPanic,
+ }
+
+ var formatter logr.Formatter
+ if useJSON {
+ formatter = &formatters.JSON{EnableCaller: true}
+ } else {
+ formatter = &formatters.Plain{EnableCaller: true}
+ }
+
+ target := targets.NewWriterTarget(os.Stdout)
+ if err := logger.log.Logr().AddTarget(target, "_testcon", filter, formatter, 1000); err != nil {
+ panic(err)
+ }
+ return logger
+}
+
+// Buffer provides a thread-safe buffer useful for logging to memory in unit tests.
+type Buffer struct {
+ buf bytes.Buffer
+ mux sync.Mutex
+}
+
+func (b *Buffer) Read(p []byte) (n int, err error) {
+ b.mux.Lock()
+ defer b.mux.Unlock()
+ return b.buf.Read(p)
+}
+func (b *Buffer) Write(p []byte) (n int, err error) {
+ b.mux.Lock()
+ defer b.mux.Unlock()
+ return b.buf.Write(p)
+}
+func (b *Buffer) String() string {
+ b.mux.Lock()
+ defer b.mux.Unlock()
+ return b.buf.String()
+}