summaryrefslogtreecommitdiffstats
path: root/vendor/golang.org/x/tools
diff options
context:
space:
mode:
authorWim <wim@42.be>2022-11-27 00:42:16 +0100
committerGitHub <noreply@github.com>2022-11-27 00:42:16 +0100
commit4fd0a7672777f0ed15692ae2ba47838208537558 (patch)
treeb119834a8b9ee78aa8f1b2ad05efa7da50516cbf /vendor/golang.org/x/tools
parent6da9d567dc9195e9a5211f23a6795a41f56a1bfc (diff)
downloadmatterbridge-msglm-4fd0a7672777f0ed15692ae2ba47838208537558.tar.gz
matterbridge-msglm-4fd0a7672777f0ed15692ae2ba47838208537558.tar.bz2
matterbridge-msglm-4fd0a7672777f0ed15692ae2ba47838208537558.zip
Update dependencies (#1929)
Diffstat (limited to 'vendor/golang.org/x/tools')
-rw-r--r--vendor/golang.org/x/tools/AUTHORS3
-rw-r--r--vendor/golang.org/x/tools/CONTRIBUTORS3
-rw-r--r--vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go49
-rw-r--r--vendor/golang.org/x/tools/go/gcexportdata/importer.go2
-rw-r--r--vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go20
-rw-r--r--vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go161
-rw-r--r--vendor/golang.org/x/tools/go/internal/gcimporter/iexport.go5
-rw-r--r--vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go70
-rw-r--r--vendor/golang.org/x/tools/go/internal/gcimporter/unified_no.go10
-rw-r--r--vendor/golang.org/x/tools/go/internal/gcimporter/unified_yes.go10
-rw-r--r--vendor/golang.org/x/tools/go/internal/gcimporter/ureader_no.go19
-rw-r--r--vendor/golang.org/x/tools/go/internal/gcimporter/ureader_yes.go612
-rw-r--r--vendor/golang.org/x/tools/go/internal/pkgbits/codes.go77
-rw-r--r--vendor/golang.org/x/tools/go/internal/pkgbits/decoder.go433
-rw-r--r--vendor/golang.org/x/tools/go/internal/pkgbits/doc.go32
-rw-r--r--vendor/golang.org/x/tools/go/internal/pkgbits/encoder.go379
-rw-r--r--vendor/golang.org/x/tools/go/internal/pkgbits/flags.go9
-rw-r--r--vendor/golang.org/x/tools/go/internal/pkgbits/frames_go1.go21
-rw-r--r--vendor/golang.org/x/tools/go/internal/pkgbits/frames_go17.go28
-rw-r--r--vendor/golang.org/x/tools/go/internal/pkgbits/reloc.go42
-rw-r--r--vendor/golang.org/x/tools/go/internal/pkgbits/support.go17
-rw-r--r--vendor/golang.org/x/tools/go/internal/pkgbits/sync.go113
-rw-r--r--vendor/golang.org/x/tools/go/internal/pkgbits/syncmarker_string.go89
-rw-r--r--vendor/golang.org/x/tools/go/packages/doc.go1
-rw-r--r--vendor/golang.org/x/tools/go/packages/golist.go92
-rw-r--r--vendor/golang.org/x/tools/go/packages/loadmode_string.go4
-rw-r--r--vendor/golang.org/x/tools/go/packages/packages.go73
-rw-r--r--vendor/golang.org/x/tools/internal/gocommand/invoke.go6
-rw-r--r--vendor/golang.org/x/tools/internal/packagesinternal/packages.go2
-rw-r--r--vendor/golang.org/x/tools/internal/typeparams/common.go21
-rw-r--r--vendor/golang.org/x/tools/internal/typeparams/coretype.go122
-rw-r--r--vendor/golang.org/x/tools/internal/typeparams/normalize.go12
-rw-r--r--vendor/golang.org/x/tools/internal/typeparams/termlist.go9
33 files changed, 2356 insertions, 190 deletions
diff --git a/vendor/golang.org/x/tools/AUTHORS b/vendor/golang.org/x/tools/AUTHORS
deleted file mode 100644
index 15167cd7..00000000
--- a/vendor/golang.org/x/tools/AUTHORS
+++ /dev/null
@@ -1,3 +0,0 @@
-# This source code refers to The Go Authors for copyright purposes.
-# The master list of authors is in the main Go distribution,
-# visible at http://tip.golang.org/AUTHORS.
diff --git a/vendor/golang.org/x/tools/CONTRIBUTORS b/vendor/golang.org/x/tools/CONTRIBUTORS
deleted file mode 100644
index 1c4577e9..00000000
--- a/vendor/golang.org/x/tools/CONTRIBUTORS
+++ /dev/null
@@ -1,3 +0,0 @@
-# This source code was written by the Go contributors.
-# The master list of contributors is in the main Go distribution,
-# visible at http://tip.golang.org/CONTRIBUTORS.
diff --git a/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go b/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go
index cec819d6..2ed25a75 100644
--- a/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go
+++ b/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go
@@ -17,32 +17,47 @@
// developer tools, which will then be able to consume both Go 1.7 and
// Go 1.8 export data files, so they will work before and after the
// Go update. (See discussion at https://golang.org/issue/15651.)
-//
package gcexportdata // import "golang.org/x/tools/go/gcexportdata"
import (
"bufio"
"bytes"
+ "encoding/json"
"fmt"
"go/token"
"go/types"
"io"
"io/ioutil"
+ "os/exec"
"golang.org/x/tools/go/internal/gcimporter"
)
// Find returns the name of an object (.o) or archive (.a) file
// containing type information for the specified import path,
-// using the workspace layout conventions of go/build.
+// using the go command.
// If no file was found, an empty filename is returned.
//
// A relative srcDir is interpreted relative to the current working directory.
//
// Find also returns the package's resolved (canonical) import path,
// reflecting the effects of srcDir and vendoring on importPath.
+//
+// Deprecated: Use the higher-level API in golang.org/x/tools/go/packages,
+// which is more efficient.
func Find(importPath, srcDir string) (filename, path string) {
- return gcimporter.FindPkg(importPath, srcDir)
+ cmd := exec.Command("go", "list", "-json", "-export", "--", importPath)
+ cmd.Dir = srcDir
+ out, err := cmd.CombinedOutput()
+ if err != nil {
+ return "", ""
+ }
+ var data struct {
+ ImportPath string
+ Export string
+ }
+ json.Unmarshal(out, &data)
+ return data.Export, data.ImportPath
}
// NewReader returns a reader for the export data section of an object
@@ -101,13 +116,29 @@ func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package,
// The indexed export format starts with an 'i'; the older
// binary export format starts with a 'c', 'd', or 'v'
// (from "version"). Select appropriate importer.
- if len(data) > 0 && data[0] == 'i' {
- _, pkg, err := gcimporter.IImportData(fset, imports, data[1:], path)
- return pkg, err
- }
+ if len(data) > 0 {
+ switch data[0] {
+ case 'i':
+ _, pkg, err := gcimporter.IImportData(fset, imports, data[1:], path)
+ return pkg, err
+
+ case 'v', 'c', 'd':
+ _, pkg, err := gcimporter.BImportData(fset, imports, data, path)
+ return pkg, err
- _, pkg, err := gcimporter.BImportData(fset, imports, data, path)
- return pkg, err
+ case 'u':
+ _, pkg, err := gcimporter.UImportData(fset, imports, data[1:], path)
+ return pkg, err
+
+ default:
+ l := len(data)
+ if l > 10 {
+ l = 10
+ }
+ return nil, fmt.Errorf("unexpected export data with prefix %q for path %s", string(data[:l]), path)
+ }
+ }
+ return nil, fmt.Errorf("empty export data for %s", path)
}
// Write writes encoded type information for the specified package to out.
diff --git a/vendor/golang.org/x/tools/go/gcexportdata/importer.go b/vendor/golang.org/x/tools/go/gcexportdata/importer.go
index efe221e7..37a7247e 100644
--- a/vendor/golang.org/x/tools/go/gcexportdata/importer.go
+++ b/vendor/golang.org/x/tools/go/gcexportdata/importer.go
@@ -23,6 +23,8 @@ import (
// or to control the FileSet or access the imports map populated during
// package loading.
//
+// Deprecated: Use the higher-level API in golang.org/x/tools/go/packages,
+// which is more efficient.
func NewImporter(fset *token.FileSet, imports map[string]*types.Package) types.ImporterFrom {
return importer{fset, imports}
}
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go b/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go
index 0a3cdb9a..196cb3f9 100644
--- a/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go
@@ -35,16 +35,18 @@ import (
const debugFormat = false // default: false
// Current export format version. Increase with each format change.
+//
// Note: The latest binary (non-indexed) export format is at version 6.
-// This exporter is still at level 4, but it doesn't matter since
-// the binary importer can handle older versions just fine.
-// 6: package height (CL 105038) -- NOT IMPLEMENTED HERE
-// 5: improved position encoding efficiency (issue 20080, CL 41619) -- NOT IMPLEMEMTED HERE
-// 4: type name objects support type aliases, uses aliasTag
-// 3: Go1.8 encoding (same as version 2, aliasTag defined but never used)
-// 2: removed unused bool in ODCL export (compiler only)
-// 1: header format change (more regular), export package for _ struct fields
-// 0: Go1.7 encoding
+// This exporter is still at level 4, but it doesn't matter since
+// the binary importer can handle older versions just fine.
+//
+// 6: package height (CL 105038) -- NOT IMPLEMENTED HERE
+// 5: improved position encoding efficiency (issue 20080, CL 41619) -- NOT IMPLEMENTED HERE
+// 4: type name objects support type aliases, uses aliasTag
+// 3: Go1.8 encoding (same as version 2, aliasTag defined but never used)
+// 2: removed unused bool in ODCL export (compiler only)
+// 1: header format change (more regular), export package for _ struct fields
+// 0: Go1.7 encoding
const exportVersion = 4
// trackAllTypes enables cycle tracking for all types, not just named
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go b/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go
index 3ab66830..e96c3960 100644
--- a/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go
@@ -45,7 +45,6 @@ var pkgExts = [...]string{".a", ".o"}
// the build.Default build.Context). A relative srcDir is interpreted
// relative to the current working directory.
// If no file was found, an empty filename is returned.
-//
func FindPkg(path, srcDir string) (filename, id string) {
if path == "" {
return
@@ -109,7 +108,6 @@ func FindPkg(path, srcDir string) (filename, id string) {
// If packages[id] contains the completely imported package, that package
// can be used directly, and there is no need to call this function (but
// there is also no harm but for extra time used).
-//
func ImportData(packages map[string]*types.Package, filename, id string, data io.Reader) (pkg *types.Package, err error) {
// support for parser error handling
defer func() {
@@ -133,7 +131,6 @@ func ImportData(packages map[string]*types.Package, filename, id string, data io
// Import imports a gc-generated package given its import path and srcDir, adds
// the corresponding package object to the packages map, and returns the object.
// The packages map must contain all packages already imported.
-//
func Import(packages map[string]*types.Package, path, srcDir string, lookup func(path string) (io.ReadCloser, error)) (pkg *types.Package, err error) {
var rc io.ReadCloser
var filename, id string
@@ -184,8 +181,9 @@ func Import(packages map[string]*types.Package, path, srcDir string, lookup func
defer rc.Close()
var hdr string
+ var size int64
buf := bufio.NewReader(rc)
- if hdr, _, err = FindExportData(buf); err != nil {
+ if hdr, size, err = FindExportData(buf); err != nil {
return
}
@@ -213,10 +211,27 @@ func Import(packages map[string]*types.Package, path, srcDir string, lookup func
// The indexed export format starts with an 'i'; the older
// binary export format starts with a 'c', 'd', or 'v'
// (from "version"). Select appropriate importer.
- if len(data) > 0 && data[0] == 'i' {
- _, pkg, err = IImportData(fset, packages, data[1:], id)
- } else {
- _, pkg, err = BImportData(fset, packages, data, id)
+ if len(data) > 0 {
+ switch data[0] {
+ case 'i':
+ _, pkg, err := IImportData(fset, packages, data[1:], id)
+ return pkg, err
+
+ case 'v', 'c', 'd':
+ _, pkg, err := BImportData(fset, packages, data, id)
+ return pkg, err
+
+ case 'u':
+ _, pkg, err := UImportData(fset, packages, data[1:size], id)
+ return pkg, err
+
+ default:
+ l := len(data)
+ if l > 10 {
+ l = 10
+ }
+ return nil, fmt.Errorf("unexpected export data with prefix %q for path %s", string(data[:l]), id)
+ }
}
default:
@@ -348,8 +363,9 @@ func (p *parser) expectKeyword(keyword string) {
// ----------------------------------------------------------------------------
// Qualified and unqualified names
-// PackageId = string_lit .
+// parsePackageID parses a PackageId:
//
+// PackageId = string_lit .
func (p *parser) parsePackageID() string {
id, err := strconv.Unquote(p.expect(scanner.String))
if err != nil {
@@ -363,13 +379,16 @@ func (p *parser) parsePackageID() string {
return id
}
-// PackageName = ident .
+// parsePackageName parse a PackageName:
//
+// PackageName = ident .
func (p *parser) parsePackageName() string {
return p.expect(scanner.Ident)
}
-// dotIdentifier = ( ident | '·' ) { ident | int | '·' } .
+// parseDotIdent parses a dotIdentifier:
+//
+// dotIdentifier = ( ident | '·' ) { ident | int | '·' } .
func (p *parser) parseDotIdent() string {
ident := ""
if p.tok != scanner.Int {
@@ -386,8 +405,9 @@ func (p *parser) parseDotIdent() string {
return ident
}
-// QualifiedName = "@" PackageId "." ( "?" | dotIdentifier ) .
+// parseQualifiedName parses a QualifiedName:
//
+// QualifiedName = "@" PackageId "." ( "?" | dotIdentifier ) .
func (p *parser) parseQualifiedName() (id, name string) {
p.expect('@')
id = p.parsePackageID()
@@ -410,7 +430,6 @@ func (p *parser) parseQualifiedName() (id, name string) {
// id identifies a package, usually by a canonical package path like
// "encoding/json" but possibly by a non-canonical import path like
// "./json".
-//
func (p *parser) getPkg(id, name string) *types.Package {
// package unsafe is not in the packages maps - handle explicitly
if id == "unsafe" {
@@ -446,7 +465,6 @@ func (p *parser) getPkg(id, name string) *types.Package {
// parseExportedName is like parseQualifiedName, but
// the package id is resolved to an imported *types.Package.
-//
func (p *parser) parseExportedName() (pkg *types.Package, name string) {
id, name := p.parseQualifiedName()
pkg = p.getPkg(id, "")
@@ -456,8 +474,9 @@ func (p *parser) parseExportedName() (pkg *types.Package, name string) {
// ----------------------------------------------------------------------------
// Types
-// BasicType = identifier .
+// parseBasicType parses a BasicType:
//
+// BasicType = identifier .
func (p *parser) parseBasicType() types.Type {
id := p.expect(scanner.Ident)
obj := types.Universe.Lookup(id)
@@ -468,8 +487,9 @@ func (p *parser) parseBasicType() types.Type {
return nil
}
-// ArrayType = "[" int_lit "]" Type .
+// parseArrayType parses an ArrayType:
//
+// ArrayType = "[" int_lit "]" Type .
func (p *parser) parseArrayType(parent *types.Package) types.Type {
// "[" already consumed and lookahead known not to be "]"
lit := p.expect(scanner.Int)
@@ -482,8 +502,9 @@ func (p *parser) parseArrayType(parent *types.Package) types.Type {
return types.NewArray(elem, n)
}
-// MapType = "map" "[" Type "]" Type .
+// parseMapType parses a MapType:
//
+// MapType = "map" "[" Type "]" Type .
func (p *parser) parseMapType(parent *types.Package) types.Type {
p.expectKeyword("map")
p.expect('[')
@@ -493,7 +514,9 @@ func (p *parser) parseMapType(parent *types.Package) types.Type {
return types.NewMap(key, elem)
}
-// Name = identifier | "?" | QualifiedName .
+// parseName parses a Name:
+//
+// Name = identifier | "?" | QualifiedName .
//
// For unqualified and anonymous names, the returned package is the parent
// package unless parent == nil, in which case the returned package is the
@@ -505,7 +528,6 @@ func (p *parser) parseMapType(parent *types.Package) types.Type {
// it doesn't exist yet) unless materializePkg is set (which creates an
// unnamed package with valid package path). In the latter case, a
// subsequent import clause is expected to provide a name for the package.
-//
func (p *parser) parseName(parent *types.Package, materializePkg bool) (pkg *types.Package, name string) {
pkg = parent
if pkg == nil {
@@ -539,8 +561,9 @@ func deref(typ types.Type) types.Type {
return typ
}
-// Field = Name Type [ string_lit ] .
+// parseField parses a Field:
//
+// Field = Name Type [ string_lit ] .
func (p *parser) parseField(parent *types.Package) (*types.Var, string) {
pkg, name := p.parseName(parent, true)
@@ -583,9 +606,10 @@ func (p *parser) parseField(parent *types.Package) (*types.Var, string) {
return types.NewField(token.NoPos, pkg, name, typ, anonymous), tag
}
-// StructType = "struct" "{" [ FieldList ] "}" .
-// FieldList = Field { ";" Field } .
+// parseStructType parses a StructType:
//
+// StructType = "struct" "{" [ FieldList ] "}" .
+// FieldList = Field { ";" Field } .
func (p *parser) parseStructType(parent *types.Package) types.Type {
var fields []*types.Var
var tags []string
@@ -610,8 +634,9 @@ func (p *parser) parseStructType(parent *types.Package) types.Type {
return types.NewStruct(fields, tags)
}
-// Parameter = ( identifier | "?" ) [ "..." ] Type [ string_lit ] .
+// parseParameter parses a Parameter:
//
+// Parameter = ( identifier | "?" ) [ "..." ] Type [ string_lit ] .
func (p *parser) parseParameter() (par *types.Var, isVariadic bool) {
_, name := p.parseName(nil, false)
// remove gc-specific parameter numbering
@@ -635,9 +660,10 @@ func (p *parser) parseParameter() (par *types.Var, isVariadic bool) {
return
}
-// Parameters = "(" [ ParameterList ] ")" .
-// ParameterList = { Parameter "," } Parameter .
+// parseParameters parses a Parameters:
//
+// Parameters = "(" [ ParameterList ] ")" .
+// ParameterList = { Parameter "," } Parameter .
func (p *parser) parseParameters() (list []*types.Var, isVariadic bool) {
p.expect('(')
for p.tok != ')' && p.tok != scanner.EOF {
@@ -658,9 +684,10 @@ func (p *parser) parseParameters() (list []*types.Var, isVariadic bool) {
return
}
-// Signature = Parameters [ Result ] .
-// Result = Type | Parameters .
+// parseSignature parses a Signature:
//
+// Signature = Parameters [ Result ] .
+// Result = Type | Parameters .
func (p *parser) parseSignature(recv *types.Var) *types.Signature {
params, isVariadic := p.parseParameters()
@@ -677,14 +704,15 @@ func (p *parser) parseSignature(recv *types.Var) *types.Signature {
return types.NewSignature(recv, types.NewTuple(params...), types.NewTuple(results...), isVariadic)
}
-// InterfaceType = "interface" "{" [ MethodList ] "}" .
-// MethodList = Method { ";" Method } .
-// Method = Name Signature .
+// parseInterfaceType parses an InterfaceType:
+//
+// InterfaceType = "interface" "{" [ MethodList ] "}" .
+// MethodList = Method { ";" Method } .
+// Method = Name Signature .
//
// The methods of embedded interfaces are always "inlined"
// by the compiler and thus embedded interfaces are never
// visible in the export data.
-//
func (p *parser) parseInterfaceType(parent *types.Package) types.Type {
var methods []*types.Func
@@ -705,8 +733,9 @@ func (p *parser) parseInterfaceType(parent *types.Package) types.Type {
return newInterface(methods, nil).Complete()
}
-// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type .
+// parseChanType parses a ChanType:
//
+// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type .
func (p *parser) parseChanType(parent *types.Package) types.Type {
dir := types.SendRecv
if p.tok == scanner.Ident {
@@ -724,17 +753,18 @@ func (p *parser) parseChanType(parent *types.Package) types.Type {
return types.NewChan(dir, elem)
}
-// Type =
-// BasicType | TypeName | ArrayType | SliceType | StructType |
-// PointerType | FuncType | InterfaceType | MapType | ChanType |
-// "(" Type ")" .
+// parseType parses a Type:
//
-// BasicType = ident .
-// TypeName = ExportedName .
-// SliceType = "[" "]" Type .
-// PointerType = "*" Type .
-// FuncType = "func" Signature .
+// Type =
+// BasicType | TypeName | ArrayType | SliceType | StructType |
+// PointerType | FuncType | InterfaceType | MapType | ChanType |
+// "(" Type ")" .
//
+// BasicType = ident .
+// TypeName = ExportedName .
+// SliceType = "[" "]" Type .
+// PointerType = "*" Type .
+// FuncType = "func" Signature .
func (p *parser) parseType(parent *types.Package) types.Type {
switch p.tok {
case scanner.Ident:
@@ -786,16 +816,18 @@ func (p *parser) parseType(parent *types.Package) types.Type {
// ----------------------------------------------------------------------------
// Declarations
-// ImportDecl = "import" PackageName PackageId .
+// parseImportDecl parses an ImportDecl:
//
+// ImportDecl = "import" PackageName PackageId .
func (p *parser) parseImportDecl() {
p.expectKeyword("import")
name := p.parsePackageName()
p.getPkg(p.parsePackageID(), name)
}
-// int_lit = [ "+" | "-" ] { "0" ... "9" } .
+// parseInt parses an int_lit:
//
+// int_lit = [ "+" | "-" ] { "0" ... "9" } .
func (p *parser) parseInt() string {
s := ""
switch p.tok {
@@ -808,8 +840,9 @@ func (p *parser) parseInt() string {
return s + p.expect(scanner.Int)
}
-// number = int_lit [ "p" int_lit ] .
+// parseNumber parses a number:
//
+// number = int_lit [ "p" int_lit ] .
func (p *parser) parseNumber() (typ *types.Basic, val constant.Value) {
// mantissa
mant := constant.MakeFromLiteral(p.parseInt(), token.INT, 0)
@@ -844,13 +877,14 @@ func (p *parser) parseNumber() (typ *types.Basic, val constant.Value) {
return
}
-// ConstDecl = "const" ExportedName [ Type ] "=" Literal .
-// Literal = bool_lit | int_lit | float_lit | complex_lit | rune_lit | string_lit .
-// bool_lit = "true" | "false" .
-// complex_lit = "(" float_lit "+" float_lit "i" ")" .
-// rune_lit = "(" int_lit "+" int_lit ")" .
-// string_lit = `"` { unicode_char } `"` .
+// parseConstDecl parses a ConstDecl:
//
+// ConstDecl = "const" ExportedName [ Type ] "=" Literal .
+// Literal = bool_lit | int_lit | float_lit | complex_lit | rune_lit | string_lit .
+// bool_lit = "true" | "false" .
+// complex_lit = "(" float_lit "+" float_lit "i" ")" .
+// rune_lit = "(" int_lit "+" int_lit ")" .
+// string_lit = `"` { unicode_char } `"` .
func (p *parser) parseConstDecl() {
p.expectKeyword("const")
pkg, name := p.parseExportedName()
@@ -920,8 +954,9 @@ func (p *parser) parseConstDecl() {
pkg.Scope().Insert(types.NewConst(token.NoPos, pkg, name, typ0, val))
}
-// TypeDecl = "type" ExportedName Type .
+// parseTypeDecl parses a TypeDecl:
//
+// TypeDecl = "type" ExportedName Type .
func (p *parser) parseTypeDecl() {
p.expectKeyword("type")
pkg, name := p.parseExportedName()
@@ -939,8 +974,9 @@ func (p *parser) parseTypeDecl() {
}
}
-// VarDecl = "var" ExportedName Type .
+// parseVarDecl parses a VarDecl:
//
+// VarDecl = "var" ExportedName Type .
func (p *parser) parseVarDecl() {
p.expectKeyword("var")
pkg, name := p.parseExportedName()
@@ -948,9 +984,10 @@ func (p *parser) parseVarDecl() {
pkg.Scope().Insert(types.NewVar(token.NoPos, pkg, name, typ))
}
-// Func = Signature [ Body ] .
-// Body = "{" ... "}" .
+// parseFunc parses a Func:
//
+// Func = Signature [ Body ] .
+// Body = "{" ... "}" .
func (p *parser) parseFunc(recv *types.Var) *types.Signature {
sig := p.parseSignature(recv)
if p.tok == '{' {
@@ -967,9 +1004,10 @@ func (p *parser) parseFunc(recv *types.Var) *types.Signature {
return sig
}
-// MethodDecl = "func" Receiver Name Func .
-// Receiver = "(" ( identifier | "?" ) [ "*" ] ExportedName ")" .
+// parseMethodDecl parses a MethodDecl:
//
+// MethodDecl = "func" Receiver Name Func .
+// Receiver = "(" ( identifier | "?" ) [ "*" ] ExportedName ")" .
func (p *parser) parseMethodDecl() {
// "func" already consumed
p.expect('(')
@@ -992,8 +1030,9 @@ func (p *parser) parseMethodDecl() {
base.AddMethod(types.NewFunc(token.NoPos, pkg, name, sig))
}
-// FuncDecl = "func" ExportedName Func .
+// parseFuncDecl parses a FuncDecl:
//
+// FuncDecl = "func" ExportedName Func .
func (p *parser) parseFuncDecl() {
// "func" already consumed
pkg, name := p.parseExportedName()
@@ -1001,8 +1040,9 @@ func (p *parser) parseFuncDecl() {
pkg.Scope().Insert(types.NewFunc(token.NoPos, pkg, name, typ))
}
-// Decl = [ ImportDecl | ConstDecl | TypeDecl | VarDecl | FuncDecl | MethodDecl ] "\n" .
+// parseDecl parses a Decl:
//
+// Decl = [ ImportDecl | ConstDecl | TypeDecl | VarDecl | FuncDecl | MethodDecl ] "\n" .
func (p *parser) parseDecl() {
if p.tok == scanner.Ident {
switch p.lit {
@@ -1029,9 +1069,10 @@ func (p *parser) parseDecl() {
// ----------------------------------------------------------------------------
// Export
-// Export = "PackageClause { Decl } "$$" .
-// PackageClause = "package" PackageName [ "safe" ] "\n" .
+// parseExport parses an Export:
//
+// Export = "PackageClause { Decl } "$$" .
+// PackageClause = "package" PackageName [ "safe" ] "\n" .
func (p *parser) parseExport() *types.Package {
p.expectKeyword("package")
name := p.parsePackageName()
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/iexport.go b/vendor/golang.org/x/tools/go/internal/gcimporter/iexport.go
index 20955340..9a4ff329 100644
--- a/vendor/golang.org/x/tools/go/internal/gcimporter/iexport.go
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/iexport.go
@@ -251,7 +251,10 @@ func (p *iexporter) stringOff(s string) uint64 {
// pushDecl adds n to the declaration work queue, if not already present.
func (p *iexporter) pushDecl(obj types.Object) {
// Package unsafe is known to the compiler and predeclared.
- assert(obj.Pkg() != types.Unsafe)
+ // Caller should not ask us to do export it.
+ if obj.Pkg() == types.Unsafe {
+ panic("cannot export package unsafe")
+ }
if _, ok := p.declIndex[obj]; ok {
return
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go b/vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go
index 84cfb807..4caa0f55 100644
--- a/vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go
@@ -17,6 +17,7 @@ import (
"go/token"
"go/types"
"io"
+ "math/big"
"sort"
"strings"
@@ -53,7 +54,7 @@ const (
)
type ident struct {
- pkg string
+ pkg *types.Package
name string
}
@@ -100,7 +101,9 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data
if !debug {
defer func() {
if e := recover(); e != nil {
- if version > currentVersion {
+ if bundle {
+ err = fmt.Errorf("%v", e)
+ } else if version > currentVersion {
err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e)
} else {
err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e)
@@ -461,7 +464,7 @@ func (r *importReader) obj(name string) {
// To handle recursive references to the typeparam within its
// bound, save the partial type in tparamIndex before reading the bounds.
- id := ident{r.currPkg.Name(), name}
+ id := ident{r.currPkg, name}
r.p.tparamIndex[id] = t
var implicit bool
if r.p.version >= iexportVersionGo1_18 {
@@ -510,7 +513,9 @@ func (r *importReader) value() (typ types.Type, val constant.Value) {
val = constant.MakeString(r.string())
case types.IsInteger:
- val = r.mpint(b)
+ var x big.Int
+ r.mpint(&x, b)
+ val = constant.Make(&x)
case types.IsFloat:
val = r.mpfloat(b)
@@ -559,8 +564,8 @@ func intSize(b *types.Basic) (signed bool, maxBytes uint) {
return
}
-func (r *importReader) mpint(b *types.Basic) constant.Value {
- signed, maxBytes := intSize(b)
+func (r *importReader) mpint(x *big.Int, typ *types.Basic) {
+ signed, maxBytes := intSize(typ)
maxSmall := 256 - maxBytes
if signed {
@@ -579,7 +584,8 @@ func (r *importReader) mpint(b *types.Basic) constant.Value {
v = ^v
}
}
- return constant.MakeInt64(v)
+ x.SetInt64(v)
+ return
}
v := -n
@@ -589,47 +595,23 @@ func (r *importReader) mpint(b *types.Basic) constant.Value {
if v < 1 || uint(v) > maxBytes {
errorf("weird decoding: %v, %v => %v", n, signed, v)
}
-
- buf := make([]byte, v)
- io.ReadFull(&r.declReader, buf)
-
- // convert to little endian
- // TODO(gri) go/constant should have a more direct conversion function
- // (e.g., once it supports a big.Float based implementation)
- for i, j := 0, len(buf)-1; i < j; i, j = i+1, j-1 {
- buf[i], buf[j] = buf[j], buf[i]
- }
-
- x := constant.MakeFromBytes(buf)
+ b := make([]byte, v)
+ io.ReadFull(&r.declReader, b)
+ x.SetBytes(b)
if signed && n&1 != 0 {
- x = constant.UnaryOp(token.SUB, x, 0)
+ x.Neg(x)
}
- return x
}
-func (r *importReader) mpfloat(b *types.Basic) constant.Value {
- x := r.mpint(b)
- if constant.Sign(x) == 0 {
- return x
- }
-
- exp := r.int64()
- switch {
- case exp > 0:
- x = constant.Shift(x, token.SHL, uint(exp))
- // Ensure that the imported Kind is Float, else this constant may run into
- // bitsize limits on overlarge integers. Eventually we can instead adopt
- // the approach of CL 288632, but that CL relies on go/constant APIs that
- // were introduced in go1.13.
- //
- // TODO(rFindley): sync the logic here with tip Go once we no longer
- // support go1.12.
- x = constant.ToFloat(x)
- case exp < 0:
- d := constant.Shift(constant.MakeInt64(1), token.SHL, uint(-exp))
- x = constant.BinaryOp(x, token.QUO, d)
+func (r *importReader) mpfloat(typ *types.Basic) constant.Value {
+ var mant big.Int
+ r.mpint(&mant, typ)
+ var f big.Float
+ f.SetInt(&mant)
+ if f.Sign() != 0 {
+ f.SetMantExp(&f, int(r.int64()))
}
- return x
+ return constant.Make(&f)
}
func (r *importReader) ident() string {
@@ -777,7 +759,7 @@ func (r *importReader) doType(base *types.Named) (res types.Type) {
errorf("unexpected type param type")
}
pkg, name := r.qualifiedIdent()
- id := ident{pkg.Name(), name}
+ id := ident{pkg, name}
if t, ok := r.p.tparamIndex[id]; ok {
// We're already in the process of importing this typeparam.
return t
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/unified_no.go b/vendor/golang.org/x/tools/go/internal/gcimporter/unified_no.go
new file mode 100644
index 00000000..286bf445
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/unified_no.go
@@ -0,0 +1,10 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build !(go1.18 && goexperiment.unified)
+// +build !go1.18 !goexperiment.unified
+
+package gcimporter
+
+const unifiedIR = false
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/unified_yes.go b/vendor/golang.org/x/tools/go/internal/gcimporter/unified_yes.go
new file mode 100644
index 00000000..b5d69ffb
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/unified_yes.go
@@ -0,0 +1,10 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.18 && goexperiment.unified
+// +build go1.18,goexperiment.unified
+
+package gcimporter
+
+const unifiedIR = true
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/ureader_no.go b/vendor/golang.org/x/tools/go/internal/gcimporter/ureader_no.go
new file mode 100644
index 00000000..8eb20729
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/ureader_no.go
@@ -0,0 +1,19 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build !go1.18
+// +build !go1.18
+
+package gcimporter
+
+import (
+ "fmt"
+ "go/token"
+ "go/types"
+)
+
+func UImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) {
+ err = fmt.Errorf("go/tools compiled with a Go version earlier than 1.18 cannot read unified IR export data")
+ return
+}
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/ureader_yes.go b/vendor/golang.org/x/tools/go/internal/gcimporter/ureader_yes.go
new file mode 100644
index 00000000..3c1a4375
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/ureader_yes.go
@@ -0,0 +1,612 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Derived from go/internal/gcimporter/ureader.go
+
+//go:build go1.18
+// +build go1.18
+
+package gcimporter
+
+import (
+ "go/token"
+ "go/types"
+ "strings"
+
+ "golang.org/x/tools/go/internal/pkgbits"
+)
+
+// A pkgReader holds the shared state for reading a unified IR package
+// description.
+type pkgReader struct {
+ pkgbits.PkgDecoder
+
+ fake fakeFileSet
+
+ ctxt *types.Context
+ imports map[string]*types.Package // previously imported packages, indexed by path
+
+ // lazily initialized arrays corresponding to the unified IR
+ // PosBase, Pkg, and Type sections, respectively.
+ posBases []string // position bases (i.e., file names)
+ pkgs []*types.Package
+ typs []types.Type
+
+ // laterFns holds functions that need to be invoked at the end of
+ // import reading.
+ laterFns []func()
+}
+
+// later adds a function to be invoked at the end of import reading.
+func (pr *pkgReader) later(fn func()) {
+ pr.laterFns = append(pr.laterFns, fn)
+}
+
+// See cmd/compile/internal/noder.derivedInfo.
+type derivedInfo struct {
+ idx pkgbits.Index
+ needed bool
+}
+
+// See cmd/compile/internal/noder.typeInfo.
+type typeInfo struct {
+ idx pkgbits.Index
+ derived bool
+}
+
+func UImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) {
+ s := string(data)
+ s = s[:strings.LastIndex(s, "\n$$\n")]
+ input := pkgbits.NewPkgDecoder(path, s)
+ pkg = readUnifiedPackage(fset, nil, imports, input)
+ return
+}
+
+// readUnifiedPackage reads a package description from the given
+// unified IR export data decoder.
+func readUnifiedPackage(fset *token.FileSet, ctxt *types.Context, imports map[string]*types.Package, input pkgbits.PkgDecoder) *types.Package {
+ pr := pkgReader{
+ PkgDecoder: input,
+
+ fake: fakeFileSet{
+ fset: fset,
+ files: make(map[string]*fileInfo),
+ },
+
+ ctxt: ctxt,
+ imports: imports,
+
+ posBases: make([]string, input.NumElems(pkgbits.RelocPosBase)),
+ pkgs: make([]*types.Package, input.NumElems(pkgbits.RelocPkg)),
+ typs: make([]types.Type, input.NumElems(pkgbits.RelocType)),
+ }
+ defer pr.fake.setLines()
+
+ r := pr.newReader(pkgbits.RelocMeta, pkgbits.PublicRootIdx, pkgbits.SyncPublic)
+ pkg := r.pkg()
+ r.Bool() // has init
+
+ for i, n := 0, r.Len(); i < n; i++ {
+ // As if r.obj(), but avoiding the Scope.Lookup call,
+ // to avoid eager loading of imports.
+ r.Sync(pkgbits.SyncObject)
+ assert(!r.Bool())
+ r.p.objIdx(r.Reloc(pkgbits.RelocObj))
+ assert(r.Len() == 0)
+ }
+
+ r.Sync(pkgbits.SyncEOF)
+
+ for _, fn := range pr.laterFns {
+ fn()
+ }
+
+ pkg.MarkComplete()
+ return pkg
+}
+
+// A reader holds the state for reading a single unified IR element
+// within a package.
+type reader struct {
+ pkgbits.Decoder
+
+ p *pkgReader
+
+ dict *readerDict
+}
+
+// A readerDict holds the state for type parameters that parameterize
+// the current unified IR element.
+type readerDict struct {
+ // bounds is a slice of typeInfos corresponding to the underlying
+ // bounds of the element's type parameters.
+ bounds []typeInfo
+
+ // tparams is a slice of the constructed TypeParams for the element.
+ tparams []*types.TypeParam
+
+ // devived is a slice of types derived from tparams, which may be
+ // instantiated while reading the current element.
+ derived []derivedInfo
+ derivedTypes []types.Type // lazily instantiated from derived
+}
+
+func (pr *pkgReader) newReader(k pkgbits.RelocKind, idx pkgbits.Index, marker pkgbits.SyncMarker) *reader {
+ return &reader{
+ Decoder: pr.NewDecoder(k, idx, marker),
+ p: pr,
+ }
+}
+
+// @@@ Positions
+
+func (r *reader) pos() token.Pos {
+ r.Sync(pkgbits.SyncPos)
+ if !r.Bool() {
+ return token.NoPos
+ }
+
+ // TODO(mdempsky): Delta encoding.
+ posBase := r.posBase()
+ line := r.Uint()
+ col := r.Uint()
+ return r.p.fake.pos(posBase, int(line), int(col))
+}
+
+func (r *reader) posBase() string {
+ return r.p.posBaseIdx(r.Reloc(pkgbits.RelocPosBase))
+}
+
+func (pr *pkgReader) posBaseIdx(idx pkgbits.Index) string {
+ if b := pr.posBases[idx]; b != "" {
+ return b
+ }
+
+ r := pr.newReader(pkgbits.RelocPosBase, idx, pkgbits.SyncPosBase)
+
+ // Within types2, position bases have a lot more details (e.g.,
+ // keeping track of where //line directives appeared exactly).
+ //
+ // For go/types, we just track the file name.
+
+ filename := r.String()
+
+ if r.Bool() { // file base
+ // Was: "b = token.NewTrimmedFileBase(filename, true)"
+ } else { // line base
+ pos := r.pos()
+ line := r.Uint()
+ col := r.Uint()
+
+ // Was: "b = token.NewLineBase(pos, filename, true, line, col)"
+ _, _, _ = pos, line, col
+ }
+
+ b := filename
+ pr.posBases[idx] = b
+ return b
+}
+
+// @@@ Packages
+
+func (r *reader) pkg() *types.Package {
+ r.Sync(pkgbits.SyncPkg)
+ return r.p.pkgIdx(r.Reloc(pkgbits.RelocPkg))
+}
+
+func (pr *pkgReader) pkgIdx(idx pkgbits.Index) *types.Package {
+ // TODO(mdempsky): Consider using some non-nil pointer to indicate
+ // the universe scope, so we don't need to keep re-reading it.
+ if pkg := pr.pkgs[idx]; pkg != nil {
+ return pkg
+ }
+
+ pkg := pr.newReader(pkgbits.RelocPkg, idx, pkgbits.SyncPkgDef).doPkg()
+ pr.pkgs[idx] = pkg
+ return pkg
+}
+
+func (r *reader) doPkg() *types.Package {
+ path := r.String()
+ switch path {
+ case "":
+ path = r.p.PkgPath()
+ case "builtin":
+ return nil // universe
+ case "unsafe":
+ return types.Unsafe
+ }
+
+ if pkg := r.p.imports[path]; pkg != nil {
+ return pkg
+ }
+
+ name := r.String()
+
+ pkg := types.NewPackage(path, name)
+ r.p.imports[path] = pkg
+
+ imports := make([]*types.Package, r.Len())
+ for i := range imports {
+ imports[i] = r.pkg()
+ }
+ pkg.SetImports(imports)
+
+ return pkg
+}
+
+// @@@ Types
+
+func (r *reader) typ() types.Type {
+ return r.p.typIdx(r.typInfo(), r.dict)
+}
+
+func (r *reader) typInfo() typeInfo {
+ r.Sync(pkgbits.SyncType)
+ if r.Bool() {
+ return typeInfo{idx: pkgbits.Index(r.Len()), derived: true}
+ }
+ return typeInfo{idx: r.Reloc(pkgbits.RelocType), derived: false}
+}
+
+func (pr *pkgReader) typIdx(info typeInfo, dict *readerDict) types.Type {
+ idx := info.idx
+ var where *types.Type
+ if info.derived {
+ where = &dict.derivedTypes[idx]
+ idx = dict.derived[idx].idx
+ } else {
+ where = &pr.typs[idx]
+ }
+
+ if typ := *where; typ != nil {
+ return typ
+ }
+
+ r := pr.newReader(pkgbits.RelocType, idx, pkgbits.SyncTypeIdx)
+ r.dict = dict
+
+ typ := r.doTyp()
+ assert(typ != nil)
+
+ // See comment in pkgReader.typIdx explaining how this happens.
+ if prev := *where; prev != nil {
+ return prev
+ }
+
+ *where = typ
+ return typ
+}
+
+func (r *reader) doTyp() (res types.Type) {
+ switch tag := pkgbits.CodeType(r.Code(pkgbits.SyncType)); tag {
+ default:
+ errorf("unhandled type tag: %v", tag)
+ panic("unreachable")
+
+ case pkgbits.TypeBasic:
+ return types.Typ[r.Len()]
+
+ case pkgbits.TypeNamed:
+ obj, targs := r.obj()
+ name := obj.(*types.TypeName)
+ if len(targs) != 0 {
+ t, _ := types.Instantiate(r.p.ctxt, name.Type(), targs, false)
+ return t
+ }
+ return name.Type()
+
+ case pkgbits.TypeTypeParam:
+ return r.dict.tparams[r.Len()]
+
+ case pkgbits.TypeArray:
+ len := int64(r.Uint64())
+ return types.NewArray(r.typ(), len)
+ case pkgbits.TypeChan:
+ dir := types.ChanDir(r.Len())
+ return types.NewChan(dir, r.typ())
+ case pkgbits.TypeMap:
+ return types.NewMap(r.typ(), r.typ())
+ case pkgbits.TypePointer:
+ return types.NewPointer(r.typ())
+ case pkgbits.TypeSignature:
+ return r.signature(nil, nil, nil)
+ case pkgbits.TypeSlice:
+ return types.NewSlice(r.typ())
+ case pkgbits.TypeStruct:
+ return r.structType()
+ case pkgbits.TypeInterface:
+ return r.interfaceType()
+ case pkgbits.TypeUnion:
+ return r.unionType()
+ }
+}
+
+func (r *reader) structType() *types.Struct {
+ fields := make([]*types.Var, r.Len())
+ var tags []string
+ for i := range fields {
+ pos := r.pos()
+ pkg, name := r.selector()
+ ftyp := r.typ()
+ tag := r.String()
+ embedded := r.Bool()
+
+ fields[i] = types.NewField(pos, pkg, name, ftyp, embedded)
+ if tag != "" {
+ for len(tags) < i {
+ tags = append(tags, "")
+ }
+ tags = append(tags, tag)
+ }
+ }
+ return types.NewStruct(fields, tags)
+}
+
+func (r *reader) unionType() *types.Union {
+ terms := make([]*types.Term, r.Len())
+ for i := range terms {
+ terms[i] = types.NewTerm(r.Bool(), r.typ())
+ }
+ return types.NewUnion(terms)
+}
+
+func (r *reader) interfaceType() *types.Interface {
+ methods := make([]*types.Func, r.Len())
+ embeddeds := make([]types.Type, r.Len())
+ implicit := len(methods) == 0 && len(embeddeds) == 1 && r.Bool()
+
+ for i := range methods {
+ pos := r.pos()
+ pkg, name := r.selector()
+ mtyp := r.signature(nil, nil, nil)
+ methods[i] = types.NewFunc(pos, pkg, name, mtyp)
+ }
+
+ for i := range embeddeds {
+ embeddeds[i] = r.typ()
+ }
+
+ iface := types.NewInterfaceType(methods, embeddeds)
+ if implicit {
+ iface.MarkImplicit()
+ }
+ return iface
+}
+
+func (r *reader) signature(recv *types.Var, rtparams, tparams []*types.TypeParam) *types.Signature {
+ r.Sync(pkgbits.SyncSignature)
+
+ params := r.params()
+ results := r.params()
+ variadic := r.Bool()
+
+ return types.NewSignatureType(recv, rtparams, tparams, params, results, variadic)
+}
+
+func (r *reader) params() *types.Tuple {
+ r.Sync(pkgbits.SyncParams)
+
+ params := make([]*types.Var, r.Len())
+ for i := range params {
+ params[i] = r.param()
+ }
+
+ return types.NewTuple(params...)
+}
+
+func (r *reader) param() *types.Var {
+ r.Sync(pkgbits.SyncParam)
+
+ pos := r.pos()
+ pkg, name := r.localIdent()
+ typ := r.typ()
+
+ return types.NewParam(pos, pkg, name, typ)
+}
+
+// @@@ Objects
+
+func (r *reader) obj() (types.Object, []types.Type) {
+ r.Sync(pkgbits.SyncObject)
+
+ assert(!r.Bool())
+
+ pkg, name := r.p.objIdx(r.Reloc(pkgbits.RelocObj))
+ obj := pkgScope(pkg).Lookup(name)
+
+ targs := make([]types.Type, r.Len())
+ for i := range targs {
+ targs[i] = r.typ()
+ }
+
+ return obj, targs
+}
+
+func (pr *pkgReader) objIdx(idx pkgbits.Index) (*types.Package, string) {
+ rname := pr.newReader(pkgbits.RelocName, idx, pkgbits.SyncObject1)
+
+ objPkg, objName := rname.qualifiedIdent()
+ assert(objName != "")
+
+ tag := pkgbits.CodeObj(rname.Code(pkgbits.SyncCodeObj))
+
+ if tag == pkgbits.ObjStub {
+ assert(objPkg == nil || objPkg == types.Unsafe)
+ return objPkg, objName
+ }
+
+ if objPkg.Scope().Lookup(objName) == nil {
+ dict := pr.objDictIdx(idx)
+
+ r := pr.newReader(pkgbits.RelocObj, idx, pkgbits.SyncObject1)
+ r.dict = dict
+
+ declare := func(obj types.Object) {
+ objPkg.Scope().Insert(obj)
+ }
+
+ switch tag {
+ default:
+ panic("weird")
+
+ case pkgbits.ObjAlias:
+ pos := r.pos()
+ typ := r.typ()
+ declare(types.NewTypeName(pos, objPkg, objName, typ))
+
+ case pkgbits.ObjConst:
+ pos := r.pos()
+ typ := r.typ()
+ val := r.Value()
+ declare(types.NewConst(pos, objPkg, objName, typ, val))
+
+ case pkgbits.ObjFunc:
+ pos := r.pos()
+ tparams := r.typeParamNames()
+ sig := r.signature(nil, nil, tparams)
+ declare(types.NewFunc(pos, objPkg, objName, sig))
+
+ case pkgbits.ObjType:
+ pos := r.pos()
+
+ obj := types.NewTypeName(pos, objPkg, objName, nil)
+ named := types.NewNamed(obj, nil, nil)
+ declare(obj)
+
+ named.SetTypeParams(r.typeParamNames())
+
+ // TODO(mdempsky): Rewrite receiver types to underlying is an
+ // Interface? The go/types importer does this (I think because
+ // unit tests expected that), but cmd/compile doesn't care
+ // about it, so maybe we can avoid worrying about that here.
+ rhs := r.typ()
+ r.p.later(func() {
+ underlying := rhs.Underlying()
+ named.SetUnderlying(underlying)
+ })
+
+ for i, n := 0, r.Len(); i < n; i++ {
+ named.AddMethod(r.method())
+ }
+
+ case pkgbits.ObjVar:
+ pos := r.pos()
+ typ := r.typ()
+ declare(types.NewVar(pos, objPkg, objName, typ))
+ }
+ }
+
+ return objPkg, objName
+}
+
+func (pr *pkgReader) objDictIdx(idx pkgbits.Index) *readerDict {
+ r := pr.newReader(pkgbits.RelocObjDict, idx, pkgbits.SyncObject1)
+
+ var dict readerDict
+
+ if implicits := r.Len(); implicits != 0 {
+ errorf("unexpected object with %v implicit type parameter(s)", implicits)
+ }
+
+ dict.bounds = make([]typeInfo, r.Len())
+ for i := range dict.bounds {
+ dict.bounds[i] = r.typInfo()
+ }
+
+ dict.derived = make([]derivedInfo, r.Len())
+ dict.derivedTypes = make([]types.Type, len(dict.derived))
+ for i := range dict.derived {
+ dict.derived[i] = derivedInfo{r.Reloc(pkgbits.RelocType), r.Bool()}
+ }
+
+ // function references follow, but reader doesn't need those
+
+ return &dict
+}
+
+func (r *reader) typeParamNames() []*types.TypeParam {
+ r.Sync(pkgbits.SyncTypeParamNames)
+
+ // Note: This code assumes it only processes objects without
+ // implement type parameters. This is currently fine, because
+ // reader is only used to read in exported declarations, which are
+ // always package scoped.
+
+ if len(r.dict.bounds) == 0 {
+ return nil
+ }
+
+ // Careful: Type parameter lists may have cycles. To allow for this,
+ // we construct the type parameter list in two passes: first we
+ // create all the TypeNames and TypeParams, then we construct and
+ // set the bound type.
+
+ r.dict.tparams = make([]*types.TypeParam, len(r.dict.bounds))
+ for i := range r.dict.bounds {
+ pos := r.pos()
+ pkg, name := r.localIdent()
+
+ tname := types.NewTypeName(pos, pkg, name, nil)
+ r.dict.tparams[i] = types.NewTypeParam(tname, nil)
+ }
+
+ typs := make([]types.Type, len(r.dict.bounds))
+ for i, bound := range r.dict.bounds {
+ typs[i] = r.p.typIdx(bound, r.dict)
+ }
+
+ // TODO(mdempsky): This is subtle, elaborate further.
+ //
+ // We have to save tparams outside of the closure, because
+ // typeParamNames() can be called multiple times with the same
+ // dictionary instance.
+ //
+ // Also, this needs to happen later to make sure SetUnderlying has
+ // been called.
+ //
+ // TODO(mdempsky): Is it safe to have a single "later" slice or do
+ // we need to have multiple passes? See comments on CL 386002 and
+ // go.dev/issue/52104.
+ tparams := r.dict.tparams
+ r.p.later(func() {
+ for i, typ := range typs {
+ tparams[i].SetConstraint(typ)
+ }
+ })
+
+ return r.dict.tparams
+}
+
+func (r *reader) method() *types.Func {
+ r.Sync(pkgbits.SyncMethod)
+ pos := r.pos()
+ pkg, name := r.selector()
+
+ rparams := r.typeParamNames()
+ sig := r.signature(r.param(), rparams, nil)
+
+ _ = r.pos() // TODO(mdempsky): Remove; this is a hacker for linker.go.
+ return types.NewFunc(pos, pkg, name, sig)
+}
+
+func (r *reader) qualifiedIdent() (*types.Package, string) { return r.ident(pkgbits.SyncSym) }
+func (r *reader) localIdent() (*types.Package, string) { return r.ident(pkgbits.SyncLocalIdent) }
+func (r *reader) selector() (*types.Package, string) { return r.ident(pkgbits.SyncSelector) }
+
+func (r *reader) ident(marker pkgbits.SyncMarker) (*types.Package, string) {
+ r.Sync(marker)
+ return r.pkg(), r.String()
+}
+
+// pkgScope returns pkg.Scope().
+// If pkg is nil, it returns types.Universe instead.
+//
+// TODO(mdempsky): Remove after x/tools can depend on Go 1.19.
+func pkgScope(pkg *types.Package) *types.Scope {
+ if pkg != nil {
+ return pkg.Scope()
+ }
+ return types.Universe
+}
diff --git a/vendor/golang.org/x/tools/go/internal/pkgbits/codes.go b/vendor/golang.org/x/tools/go/internal/pkgbits/codes.go
new file mode 100644
index 00000000..f0cabde9
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/pkgbits/codes.go
@@ -0,0 +1,77 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package pkgbits
+
+// A Code is an enum value that can be encoded into bitstreams.
+//
+// Code types are preferable for enum types, because they allow
+// Decoder to detect desyncs.
+type Code interface {
+ // Marker returns the SyncMarker for the Code's dynamic type.
+ Marker() SyncMarker
+
+ // Value returns the Code's ordinal value.
+ Value() int
+}
+
+// A CodeVal distinguishes among go/constant.Value encodings.
+type CodeVal int
+
+func (c CodeVal) Marker() SyncMarker { return SyncVal }
+func (c CodeVal) Value() int { return int(c) }
+
+// Note: These values are public and cannot be changed without
+// updating the go/types importers.
+
+const (
+ ValBool CodeVal = iota
+ ValString
+ ValInt64
+ ValBigInt
+ ValBigRat
+ ValBigFloat
+)
+
+// A CodeType distinguishes among go/types.Type encodings.
+type CodeType int
+
+func (c CodeType) Marker() SyncMarker { return SyncType }
+func (c CodeType) Value() int { return int(c) }
+
+// Note: These values are public and cannot be changed without
+// updating the go/types importers.
+
+const (
+ TypeBasic CodeType = iota
+ TypeNamed
+ TypePointer
+ TypeSlice
+ TypeArray
+ TypeChan
+ TypeMap
+ TypeSignature
+ TypeStruct
+ TypeInterface
+ TypeUnion
+ TypeTypeParam
+)
+
+// A CodeObj distinguishes among go/types.Object encodings.
+type CodeObj int
+
+func (c CodeObj) Marker() SyncMarker { return SyncCodeObj }
+func (c CodeObj) Value() int { return int(c) }
+
+// Note: These values are public and cannot be changed without
+// updating the go/types importers.
+
+const (
+ ObjAlias CodeObj = iota
+ ObjConst
+ ObjType
+ ObjFunc
+ ObjVar
+ ObjStub
+)
diff --git a/vendor/golang.org/x/tools/go/internal/pkgbits/decoder.go b/vendor/golang.org/x/tools/go/internal/pkgbits/decoder.go
new file mode 100644
index 00000000..2bc79366
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/pkgbits/decoder.go
@@ -0,0 +1,433 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package pkgbits
+
+import (
+ "encoding/binary"
+ "fmt"
+ "go/constant"
+ "go/token"
+ "math/big"
+ "os"
+ "runtime"
+ "strings"
+)
+
+// A PkgDecoder provides methods for decoding a package's Unified IR
+// export data.
+type PkgDecoder struct {
+ // version is the file format version.
+ version uint32
+
+ // sync indicates whether the file uses sync markers.
+ sync bool
+
+ // pkgPath is the package path for the package to be decoded.
+ //
+ // TODO(mdempsky): Remove; unneeded since CL 391014.
+ pkgPath string
+
+ // elemData is the full data payload of the encoded package.
+ // Elements are densely and contiguously packed together.
+ //
+ // The last 8 bytes of elemData are the package fingerprint.
+ elemData string
+
+ // elemEnds stores the byte-offset end positions of element
+ // bitstreams within elemData.
+ //
+ // For example, element I's bitstream data starts at elemEnds[I-1]
+ // (or 0, if I==0) and ends at elemEnds[I].
+ //
+ // Note: elemEnds is indexed by absolute indices, not
+ // section-relative indices.
+ elemEnds []uint32
+
+ // elemEndsEnds stores the index-offset end positions of relocation
+ // sections within elemEnds.
+ //
+ // For example, section K's end positions start at elemEndsEnds[K-1]
+ // (or 0, if K==0) and end at elemEndsEnds[K].
+ elemEndsEnds [numRelocs]uint32
+}
+
+// PkgPath returns the package path for the package
+//
+// TODO(mdempsky): Remove; unneeded since CL 391014.
+func (pr *PkgDecoder) PkgPath() string { return pr.pkgPath }
+
+// SyncMarkers reports whether pr uses sync markers.
+func (pr *PkgDecoder) SyncMarkers() bool { return pr.sync }
+
+// NewPkgDecoder returns a PkgDecoder initialized to read the Unified
+// IR export data from input. pkgPath is the package path for the
+// compilation unit that produced the export data.
+//
+// TODO(mdempsky): Remove pkgPath parameter; unneeded since CL 391014.
+func NewPkgDecoder(pkgPath, input string) PkgDecoder {
+ pr := PkgDecoder{
+ pkgPath: pkgPath,
+ }
+
+ // TODO(mdempsky): Implement direct indexing of input string to
+ // avoid copying the position information.
+
+ r := strings.NewReader(input)
+
+ assert(binary.Read(r, binary.LittleEndian, &pr.version) == nil)
+
+ switch pr.version {
+ default:
+ panic(fmt.Errorf("unsupported version: %v", pr.version))
+ case 0:
+ // no flags
+ case 1:
+ var flags uint32
+ assert(binary.Read(r, binary.LittleEndian, &flags) == nil)
+ pr.sync = flags&flagSyncMarkers != 0
+ }
+
+ assert(binary.Read(r, binary.LittleEndian, pr.elemEndsEnds[:]) == nil)
+
+ pr.elemEnds = make([]uint32, pr.elemEndsEnds[len(pr.elemEndsEnds)-1])
+ assert(binary.Read(r, binary.LittleEndian, pr.elemEnds[:]) == nil)
+
+ pos, err := r.Seek(0, os.SEEK_CUR)
+ assert(err == nil)
+
+ pr.elemData = input[pos:]
+ assert(len(pr.elemData)-8 == int(pr.elemEnds[len(pr.elemEnds)-1]))
+
+ return pr
+}
+
+// NumElems returns the number of elements in section k.
+func (pr *PkgDecoder) NumElems(k RelocKind) int {
+ count := int(pr.elemEndsEnds[k])
+ if k > 0 {
+ count -= int(pr.elemEndsEnds[k-1])
+ }
+ return count
+}
+
+// TotalElems returns the total number of elements across all sections.
+func (pr *PkgDecoder) TotalElems() int {
+ return len(pr.elemEnds)
+}
+
+// Fingerprint returns the package fingerprint.
+func (pr *PkgDecoder) Fingerprint() [8]byte {
+ var fp [8]byte
+ copy(fp[:], pr.elemData[len(pr.elemData)-8:])
+ return fp
+}
+
+// AbsIdx returns the absolute index for the given (section, index)
+// pair.
+func (pr *PkgDecoder) AbsIdx(k RelocKind, idx Index) int {
+ absIdx := int(idx)
+ if k > 0 {
+ absIdx += int(pr.elemEndsEnds[k-1])
+ }
+ if absIdx >= int(pr.elemEndsEnds[k]) {
+ errorf("%v:%v is out of bounds; %v", k, idx, pr.elemEndsEnds)
+ }
+ return absIdx
+}
+
+// DataIdx returns the raw element bitstream for the given (section,
+// index) pair.
+func (pr *PkgDecoder) DataIdx(k RelocKind, idx Index) string {
+ absIdx := pr.AbsIdx(k, idx)
+
+ var start uint32
+ if absIdx > 0 {
+ start = pr.elemEnds[absIdx-1]
+ }
+ end := pr.elemEnds[absIdx]
+
+ return pr.elemData[start:end]
+}
+
+// StringIdx returns the string value for the given string index.
+func (pr *PkgDecoder) StringIdx(idx Index) string {
+ return pr.DataIdx(RelocString, idx)
+}
+
+// NewDecoder returns a Decoder for the given (section, index) pair,
+// and decodes the given SyncMarker from the element bitstream.
+func (pr *PkgDecoder) NewDecoder(k RelocKind, idx Index, marker SyncMarker) Decoder {
+ r := pr.NewDecoderRaw(k, idx)
+ r.Sync(marker)
+ return r
+}
+
+// NewDecoderRaw returns a Decoder for the given (section, index) pair.
+//
+// Most callers should use NewDecoder instead.
+func (pr *PkgDecoder) NewDecoderRaw(k RelocKind, idx Index) Decoder {
+ r := Decoder{
+ common: pr,
+ k: k,
+ Idx: idx,
+ }
+
+ // TODO(mdempsky) r.data.Reset(...) after #44505 is resolved.
+ r.Data = *strings.NewReader(pr.DataIdx(k, idx))
+
+ r.Sync(SyncRelocs)
+ r.Relocs = make([]RelocEnt, r.Len())
+ for i := range r.Relocs {
+ r.Sync(SyncReloc)
+ r.Relocs[i] = RelocEnt{RelocKind(r.Len()), Index(r.Len())}
+ }
+
+ return r
+}
+
+// A Decoder provides methods for decoding an individual element's
+// bitstream data.
+type Decoder struct {
+ common *PkgDecoder
+
+ Relocs []RelocEnt
+ Data strings.Reader
+
+ k RelocKind
+ Idx Index
+}
+
+func (r *Decoder) checkErr(err error) {
+ if err != nil {
+ errorf("unexpected decoding error: %w", err)
+ }
+}
+
+func (r *Decoder) rawUvarint() uint64 {
+ x, err := binary.ReadUvarint(&r.Data)
+ r.checkErr(err)
+ return x
+}
+
+func (r *Decoder) rawVarint() int64 {
+ ux := r.rawUvarint()
+
+ // Zig-zag decode.
+ x := int64(ux >> 1)
+ if ux&1 != 0 {
+ x = ^x
+ }
+ return x
+}
+
+func (r *Decoder) rawReloc(k RelocKind, idx int) Index {
+ e := r.Relocs[idx]
+ assert(e.Kind == k)
+ return e.Idx
+}
+
+// Sync decodes a sync marker from the element bitstream and asserts
+// that it matches the expected marker.
+//
+// If r.common.sync is false, then Sync is a no-op.
+func (r *Decoder) Sync(mWant SyncMarker) {
+ if !r.common.sync {
+ return
+ }
+
+ pos, _ := r.Data.Seek(0, os.SEEK_CUR) // TODO(mdempsky): io.SeekCurrent after #44505 is resolved
+ mHave := SyncMarker(r.rawUvarint())
+ writerPCs := make([]int, r.rawUvarint())
+ for i := range writerPCs {
+ writerPCs[i] = int(r.rawUvarint())
+ }
+
+ if mHave == mWant {
+ return
+ }
+
+ // There's some tension here between printing:
+ //
+ // (1) full file paths that tools can recognize (e.g., so emacs
+ // hyperlinks the "file:line" text for easy navigation), or
+ //
+ // (2) short file paths that are easier for humans to read (e.g., by
+ // omitting redundant or irrelevant details, so it's easier to
+ // focus on the useful bits that remain).
+ //
+ // The current formatting favors the former, as it seems more
+ // helpful in practice. But perhaps the formatting could be improved
+ // to better address both concerns. For example, use relative file
+ // paths if they would be shorter, or rewrite file paths to contain
+ // "$GOROOT" (like objabi.AbsFile does) if tools can be taught how
+ // to reliably expand that again.
+
+ fmt.Printf("export data desync: package %q, section %v, index %v, offset %v\n", r.common.pkgPath, r.k, r.Idx, pos)
+
+ fmt.Printf("\nfound %v, written at:\n", mHave)
+ if len(writerPCs) == 0 {
+ fmt.Printf("\t[stack trace unavailable; recompile package %q with -d=syncframes]\n", r.common.pkgPath)
+ }
+ for _, pc := range writerPCs {
+ fmt.Printf("\t%s\n", r.common.StringIdx(r.rawReloc(RelocString, pc)))
+ }
+
+ fmt.Printf("\nexpected %v, reading at:\n", mWant)
+ var readerPCs [32]uintptr // TODO(mdempsky): Dynamically size?
+ n := runtime.Callers(2, readerPCs[:])
+ for _, pc := range fmtFrames(readerPCs[:n]...) {
+ fmt.Printf("\t%s\n", pc)
+ }
+
+ // We already printed a stack trace for the reader, so now we can
+ // simply exit. Printing a second one with panic or base.Fatalf
+ // would just be noise.
+ os.Exit(1)
+}
+
+// Bool decodes and returns a bool value from the element bitstream.
+func (r *Decoder) Bool() bool {
+ r.Sync(SyncBool)
+ x, err := r.Data.ReadByte()
+ r.checkErr(err)
+ assert(x < 2)
+ return x != 0
+}
+
+// Int64 decodes and returns an int64 value from the element bitstream.
+func (r *Decoder) Int64() int64 {
+ r.Sync(SyncInt64)
+ return r.rawVarint()
+}
+
+// Int64 decodes and returns a uint64 value from the element bitstream.
+func (r *Decoder) Uint64() uint64 {
+ r.Sync(SyncUint64)
+ return r.rawUvarint()
+}
+
+// Len decodes and returns a non-negative int value from the element bitstream.
+func (r *Decoder) Len() int { x := r.Uint64(); v := int(x); assert(uint64(v) == x); return v }
+
+// Int decodes and returns an int value from the element bitstream.
+func (r *Decoder) Int() int { x := r.Int64(); v := int(x); assert(int64(v) == x); return v }
+
+// Uint decodes and returns a uint value from the element bitstream.
+func (r *Decoder) Uint() uint { x := r.Uint64(); v := uint(x); assert(uint64(v) == x); return v }
+
+// Code decodes a Code value from the element bitstream and returns
+// its ordinal value. It's the caller's responsibility to convert the
+// result to an appropriate Code type.
+//
+// TODO(mdempsky): Ideally this method would have signature "Code[T
+// Code] T" instead, but we don't allow generic methods and the
+// compiler can't depend on generics yet anyway.
+func (r *Decoder) Code(mark SyncMarker) int {
+ r.Sync(mark)
+ return r.Len()
+}
+
+// Reloc decodes a relocation of expected section k from the element
+// bitstream and returns an index to the referenced element.
+func (r *Decoder) Reloc(k RelocKind) Index {
+ r.Sync(SyncUseReloc)
+ return r.rawReloc(k, r.Len())
+}
+
+// String decodes and returns a string value from the element
+// bitstream.
+func (r *Decoder) String() string {
+ r.Sync(SyncString)
+ return r.common.StringIdx(r.Reloc(RelocString))
+}
+
+// Strings decodes and returns a variable-length slice of strings from
+// the element bitstream.
+func (r *Decoder) Strings() []string {
+ res := make([]string, r.Len())
+ for i := range res {
+ res[i] = r.String()
+ }
+ return res
+}
+
+// Value decodes and returns a constant.Value from the element
+// bitstream.
+func (r *Decoder) Value() constant.Value {
+ r.Sync(SyncValue)
+ isComplex := r.Bool()
+ val := r.scalar()
+ if isComplex {
+ val = constant.BinaryOp(val, token.ADD, constant.MakeImag(r.scalar()))
+ }
+ return val
+}
+
+func (r *Decoder) scalar() constant.Value {
+ switch tag := CodeVal(r.Code(SyncVal)); tag {
+ default:
+ panic(fmt.Errorf("unexpected scalar tag: %v", tag))
+
+ case ValBool:
+ return constant.MakeBool(r.Bool())
+ case ValString:
+ return constant.MakeString(r.String())
+ case ValInt64:
+ return constant.MakeInt64(r.Int64())
+ case ValBigInt:
+ return constant.Make(r.bigInt())
+ case ValBigRat:
+ num := r.bigInt()
+ denom := r.bigInt()
+ return constant.Make(new(big.Rat).SetFrac(num, denom))
+ case ValBigFloat:
+ return constant.Make(r.bigFloat())
+ }
+}
+
+func (r *Decoder) bigInt() *big.Int {
+ v := new(big.Int).SetBytes([]byte(r.String()))
+ if r.Bool() {
+ v.Neg(v)
+ }
+ return v
+}
+
+func (r *Decoder) bigFloat() *big.Float {
+ v := new(big.Float).SetPrec(512)
+ assert(v.UnmarshalText([]byte(r.String())) == nil)
+ return v
+}
+
+// @@@ Helpers
+
+// TODO(mdempsky): These should probably be removed. I think they're a
+// smell that the export data format is not yet quite right.
+
+// PeekPkgPath returns the package path for the specified package
+// index.
+func (pr *PkgDecoder) PeekPkgPath(idx Index) string {
+ r := pr.NewDecoder(RelocPkg, idx, SyncPkgDef)
+ path := r.String()
+ if path == "" {
+ path = pr.pkgPath
+ }
+ return path
+}
+
+// PeekObj returns the package path, object name, and CodeObj for the
+// specified object index.
+func (pr *PkgDecoder) PeekObj(idx Index) (string, string, CodeObj) {
+ r := pr.NewDecoder(RelocName, idx, SyncObject1)
+ r.Sync(SyncSym)
+ r.Sync(SyncPkg)
+ path := pr.PeekPkgPath(r.Reloc(RelocPkg))
+ name := r.String()
+ assert(name != "")
+
+ tag := CodeObj(r.Code(SyncCodeObj))
+
+ return path, name, tag
+}
diff --git a/vendor/golang.org/x/tools/go/internal/pkgbits/doc.go b/vendor/golang.org/x/tools/go/internal/pkgbits/doc.go
new file mode 100644
index 00000000..c8a2796b
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/pkgbits/doc.go
@@ -0,0 +1,32 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package pkgbits implements low-level coding abstractions for
+// Unified IR's export data format.
+//
+// At a low-level, a package is a collection of bitstream elements.
+// Each element has a "kind" and a dense, non-negative index.
+// Elements can be randomly accessed given their kind and index.
+//
+// Individual elements are sequences of variable-length values (e.g.,
+// integers, booleans, strings, go/constant values, cross-references
+// to other elements). Package pkgbits provides APIs for encoding and
+// decoding these low-level values, but the details of mapping
+// higher-level Go constructs into elements is left to higher-level
+// abstractions.
+//
+// Elements may cross-reference each other with "relocations." For
+// example, an element representing a pointer type has a relocation
+// referring to the element type.
+//
+// Go constructs may be composed as a constellation of multiple
+// elements. For example, a declared function may have one element to
+// describe the object (e.g., its name, type, position), and a
+// separate element to describe its function body. This allows readers
+// some flexibility in efficiently seeking or re-reading data (e.g.,
+// inlining requires re-reading the function body for each inlined
+// call, without needing to re-read the object-level details).
+//
+// This is a copy of internal/pkgbits in the Go implementation.
+package pkgbits
diff --git a/vendor/golang.org/x/tools/go/internal/pkgbits/encoder.go b/vendor/golang.org/x/tools/go/internal/pkgbits/encoder.go
new file mode 100644
index 00000000..c50c838c
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/pkgbits/encoder.go
@@ -0,0 +1,379 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package pkgbits
+
+import (
+ "bytes"
+ "crypto/md5"
+ "encoding/binary"
+ "go/constant"
+ "io"
+ "math/big"
+ "runtime"
+)
+
+// currentVersion is the current version number.
+//
+// - v0: initial prototype
+//
+// - v1: adds the flags uint32 word
+const currentVersion uint32 = 1
+
+// A PkgEncoder provides methods for encoding a package's Unified IR
+// export data.
+type PkgEncoder struct {
+ // elems holds the bitstream for previously encoded elements.
+ elems [numRelocs][]string
+
+ // stringsIdx maps previously encoded strings to their index within
+ // the RelocString section, to allow deduplication. That is,
+ // elems[RelocString][stringsIdx[s]] == s (if present).
+ stringsIdx map[string]Index
+
+ // syncFrames is the number of frames to write at each sync
+ // marker. A negative value means sync markers are omitted.
+ syncFrames int
+}
+
+// SyncMarkers reports whether pw uses sync markers.
+func (pw *PkgEncoder) SyncMarkers() bool { return pw.syncFrames >= 0 }
+
+// NewPkgEncoder returns an initialized PkgEncoder.
+//
+// syncFrames is the number of caller frames that should be serialized
+// at Sync points. Serializing additional frames results in larger
+// export data files, but can help diagnosing desync errors in
+// higher-level Unified IR reader/writer code. If syncFrames is
+// negative, then sync markers are omitted entirely.
+func NewPkgEncoder(syncFrames int) PkgEncoder {
+ return PkgEncoder{
+ stringsIdx: make(map[string]Index),
+ syncFrames: syncFrames,
+ }
+}
+
+// DumpTo writes the package's encoded data to out0 and returns the
+// package fingerprint.
+func (pw *PkgEncoder) DumpTo(out0 io.Writer) (fingerprint [8]byte) {
+ h := md5.New()
+ out := io.MultiWriter(out0, h)
+
+ writeUint32 := func(x uint32) {
+ assert(binary.Write(out, binary.LittleEndian, x) == nil)
+ }
+
+ writeUint32(currentVersion)
+
+ var flags uint32
+ if pw.SyncMarkers() {
+ flags |= flagSyncMarkers
+ }
+ writeUint32(flags)
+
+ // Write elemEndsEnds.
+ var sum uint32
+ for _, elems := range &pw.elems {
+ sum += uint32(len(elems))
+ writeUint32(sum)
+ }
+
+ // Write elemEnds.
+ sum = 0
+ for _, elems := range &pw.elems {
+ for _, elem := range elems {
+ sum += uint32(len(elem))
+ writeUint32(sum)
+ }
+ }
+
+ // Write elemData.
+ for _, elems := range &pw.elems {
+ for _, elem := range elems {
+ _, err := io.WriteString(out, elem)
+ assert(err == nil)
+ }
+ }
+
+ // Write fingerprint.
+ copy(fingerprint[:], h.Sum(nil))
+ _, err := out0.Write(fingerprint[:])
+ assert(err == nil)
+
+ return
+}
+
+// StringIdx adds a string value to the strings section, if not
+// already present, and returns its index.
+func (pw *PkgEncoder) StringIdx(s string) Index {
+ if idx, ok := pw.stringsIdx[s]; ok {
+ assert(pw.elems[RelocString][idx] == s)
+ return idx
+ }
+
+ idx := Index(len(pw.elems[RelocString]))
+ pw.elems[RelocString] = append(pw.elems[RelocString], s)
+ pw.stringsIdx[s] = idx
+ return idx
+}
+
+// NewEncoder returns an Encoder for a new element within the given
+// section, and encodes the given SyncMarker as the start of the
+// element bitstream.
+func (pw *PkgEncoder) NewEncoder(k RelocKind, marker SyncMarker) Encoder {
+ e := pw.NewEncoderRaw(k)
+ e.Sync(marker)
+ return e
+}
+
+// NewEncoderRaw returns an Encoder for a new element within the given
+// section.
+//
+// Most callers should use NewEncoder instead.
+func (pw *PkgEncoder) NewEncoderRaw(k RelocKind) Encoder {
+ idx := Index(len(pw.elems[k]))
+ pw.elems[k] = append(pw.elems[k], "") // placeholder
+
+ return Encoder{
+ p: pw,
+ k: k,
+ Idx: idx,
+ }
+}
+
+// An Encoder provides methods for encoding an individual element's
+// bitstream data.
+type Encoder struct {
+ p *PkgEncoder
+
+ Relocs []RelocEnt
+ Data bytes.Buffer // accumulated element bitstream data
+
+ encodingRelocHeader bool
+
+ k RelocKind
+ Idx Index // index within relocation section
+}
+
+// Flush finalizes the element's bitstream and returns its Index.
+func (w *Encoder) Flush() Index {
+ var sb bytes.Buffer // TODO(mdempsky): strings.Builder after #44505 is resolved
+
+ // Backup the data so we write the relocations at the front.
+ var tmp bytes.Buffer
+ io.Copy(&tmp, &w.Data)
+
+ // TODO(mdempsky): Consider writing these out separately so they're
+ // easier to strip, along with function bodies, so that we can prune
+ // down to just the data that's relevant to go/types.
+ if w.encodingRelocHeader {
+ panic("encodingRelocHeader already true; recursive flush?")
+ }
+ w.encodingRelocHeader = true
+ w.Sync(SyncRelocs)
+ w.Len(len(w.Relocs))
+ for _, rEnt := range w.Relocs {
+ w.Sync(SyncReloc)
+ w.Len(int(rEnt.Kind))
+ w.Len(int(rEnt.Idx))
+ }
+
+ io.Copy(&sb, &w.Data)
+ io.Copy(&sb, &tmp)
+ w.p.elems[w.k][w.Idx] = sb.String()
+
+ return w.Idx
+}
+
+func (w *Encoder) checkErr(err error) {
+ if err != nil {
+ errorf("unexpected encoding error: %v", err)
+ }
+}
+
+func (w *Encoder) rawUvarint(x uint64) {
+ var buf [binary.MaxVarintLen64]byte
+ n := binary.PutUvarint(buf[:], x)
+ _, err := w.Data.Write(buf[:n])
+ w.checkErr(err)
+}
+
+func (w *Encoder) rawVarint(x int64) {
+ // Zig-zag encode.
+ ux := uint64(x) << 1
+ if x < 0 {
+ ux = ^ux
+ }
+
+ w.rawUvarint(ux)
+}
+
+func (w *Encoder) rawReloc(r RelocKind, idx Index) int {
+ // TODO(mdempsky): Use map for lookup; this takes quadratic time.
+ for i, rEnt := range w.Relocs {
+ if rEnt.Kind == r && rEnt.Idx == idx {
+ return i
+ }
+ }
+
+ i := len(w.Relocs)
+ w.Relocs = append(w.Relocs, RelocEnt{r, idx})
+ return i
+}
+
+func (w *Encoder) Sync(m SyncMarker) {
+ if !w.p.SyncMarkers() {
+ return
+ }
+
+ // Writing out stack frame string references requires working
+ // relocations, but writing out the relocations themselves involves
+ // sync markers. To prevent infinite recursion, we simply trim the
+ // stack frame for sync markers within the relocation header.
+ var frames []string
+ if !w.encodingRelocHeader && w.p.syncFrames > 0 {
+ pcs := make([]uintptr, w.p.syncFrames)
+ n := runtime.Callers(2, pcs)
+ frames = fmtFrames(pcs[:n]...)
+ }
+
+ // TODO(mdempsky): Save space by writing out stack frames as a
+ // linked list so we can share common stack frames.
+ w.rawUvarint(uint64(m))
+ w.rawUvarint(uint64(len(frames)))
+ for _, frame := range frames {
+ w.rawUvarint(uint64(w.rawReloc(RelocString, w.p.StringIdx(frame))))
+ }
+}
+
+// Bool encodes and writes a bool value into the element bitstream,
+// and then returns the bool value.
+//
+// For simple, 2-alternative encodings, the idiomatic way to call Bool
+// is something like:
+//
+// if w.Bool(x != 0) {
+// // alternative #1
+// } else {
+// // alternative #2
+// }
+//
+// For multi-alternative encodings, use Code instead.
+func (w *Encoder) Bool(b bool) bool {
+ w.Sync(SyncBool)
+ var x byte
+ if b {
+ x = 1
+ }
+ err := w.Data.WriteByte(x)
+ w.checkErr(err)
+ return b
+}
+
+// Int64 encodes and writes an int64 value into the element bitstream.
+func (w *Encoder) Int64(x int64) {
+ w.Sync(SyncInt64)
+ w.rawVarint(x)
+}
+
+// Uint64 encodes and writes a uint64 value into the element bitstream.
+func (w *Encoder) Uint64(x uint64) {
+ w.Sync(SyncUint64)
+ w.rawUvarint(x)
+}
+
+// Len encodes and writes a non-negative int value into the element bitstream.
+func (w *Encoder) Len(x int) { assert(x >= 0); w.Uint64(uint64(x)) }
+
+// Int encodes and writes an int value into the element bitstream.
+func (w *Encoder) Int(x int) { w.Int64(int64(x)) }
+
+// Len encodes and writes a uint value into the element bitstream.
+func (w *Encoder) Uint(x uint) { w.Uint64(uint64(x)) }
+
+// Reloc encodes and writes a relocation for the given (section,
+// index) pair into the element bitstream.
+//
+// Note: Only the index is formally written into the element
+// bitstream, so bitstream decoders must know from context which
+// section an encoded relocation refers to.
+func (w *Encoder) Reloc(r RelocKind, idx Index) {
+ w.Sync(SyncUseReloc)
+ w.Len(w.rawReloc(r, idx))
+}
+
+// Code encodes and writes a Code value into the element bitstream.
+func (w *Encoder) Code(c Code) {
+ w.Sync(c.Marker())
+ w.Len(c.Value())
+}
+
+// String encodes and writes a string value into the element
+// bitstream.
+//
+// Internally, strings are deduplicated by adding them to the strings
+// section (if not already present), and then writing a relocation
+// into the element bitstream.
+func (w *Encoder) String(s string) {
+ w.Sync(SyncString)
+ w.Reloc(RelocString, w.p.StringIdx(s))
+}
+
+// Strings encodes and writes a variable-length slice of strings into
+// the element bitstream.
+func (w *Encoder) Strings(ss []string) {
+ w.Len(len(ss))
+ for _, s := range ss {
+ w.String(s)
+ }
+}
+
+// Value encodes and writes a constant.Value into the element
+// bitstream.
+func (w *Encoder) Value(val constant.Value) {
+ w.Sync(SyncValue)
+ if w.Bool(val.Kind() == constant.Complex) {
+ w.scalar(constant.Real(val))
+ w.scalar(constant.Imag(val))
+ } else {
+ w.scalar(val)
+ }
+}
+
+func (w *Encoder) scalar(val constant.Value) {
+ switch v := constant.Val(val).(type) {
+ default:
+ errorf("unhandled %v (%v)", val, val.Kind())
+ case bool:
+ w.Code(ValBool)
+ w.Bool(v)
+ case string:
+ w.Code(ValString)
+ w.String(v)
+ case int64:
+ w.Code(ValInt64)
+ w.Int64(v)
+ case *big.Int:
+ w.Code(ValBigInt)
+ w.bigInt(v)
+ case *big.Rat:
+ w.Code(ValBigRat)
+ w.bigInt(v.Num())
+ w.bigInt(v.Denom())
+ case *big.Float:
+ w.Code(ValBigFloat)
+ w.bigFloat(v)
+ }
+}
+
+func (w *Encoder) bigInt(v *big.Int) {
+ b := v.Bytes()
+ w.String(string(b)) // TODO: More efficient encoding.
+ w.Bool(v.Sign() < 0)
+}
+
+func (w *Encoder) bigFloat(v *big.Float) {
+ b := v.Append(nil, 'p', -1)
+ w.String(string(b)) // TODO: More efficient encoding.
+}
diff --git a/vendor/golang.org/x/tools/go/internal/pkgbits/flags.go b/vendor/golang.org/x/tools/go/internal/pkgbits/flags.go
new file mode 100644
index 00000000..65422274
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/pkgbits/flags.go
@@ -0,0 +1,9 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package pkgbits
+
+const (
+ flagSyncMarkers = 1 << iota // file format contains sync markers
+)
diff --git a/vendor/golang.org/x/tools/go/internal/pkgbits/frames_go1.go b/vendor/golang.org/x/tools/go/internal/pkgbits/frames_go1.go
new file mode 100644
index 00000000..5294f6a6
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/pkgbits/frames_go1.go
@@ -0,0 +1,21 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build !go1.7
+// +build !go1.7
+
+// TODO(mdempsky): Remove after #44505 is resolved
+
+package pkgbits
+
+import "runtime"
+
+func walkFrames(pcs []uintptr, visit frameVisitor) {
+ for _, pc := range pcs {
+ fn := runtime.FuncForPC(pc)
+ file, line := fn.FileLine(pc)
+
+ visit(file, line, fn.Name(), pc-fn.Entry())
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/internal/pkgbits/frames_go17.go b/vendor/golang.org/x/tools/go/internal/pkgbits/frames_go17.go
new file mode 100644
index 00000000..2324ae7a
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/pkgbits/frames_go17.go
@@ -0,0 +1,28 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.7
+// +build go1.7
+
+package pkgbits
+
+import "runtime"
+
+// walkFrames calls visit for each call frame represented by pcs.
+//
+// pcs should be a slice of PCs, as returned by runtime.Callers.
+func walkFrames(pcs []uintptr, visit frameVisitor) {
+ if len(pcs) == 0 {
+ return
+ }
+
+ frames := runtime.CallersFrames(pcs)
+ for {
+ frame, more := frames.Next()
+ visit(frame.File, frame.Line, frame.Function, frame.PC-frame.Entry)
+ if !more {
+ return
+ }
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/internal/pkgbits/reloc.go b/vendor/golang.org/x/tools/go/internal/pkgbits/reloc.go
new file mode 100644
index 00000000..7a8f04ab
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/pkgbits/reloc.go
@@ -0,0 +1,42 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package pkgbits
+
+// A RelocKind indicates a particular section within a unified IR export.
+type RelocKind int
+
+// An Index represents a bitstream element index within a particular
+// section.
+type Index int
+
+// A relocEnt (relocation entry) is an entry in an element's local
+// reference table.
+//
+// TODO(mdempsky): Rename this too.
+type RelocEnt struct {
+ Kind RelocKind
+ Idx Index
+}
+
+// Reserved indices within the meta relocation section.
+const (
+ PublicRootIdx Index = 0
+ PrivateRootIdx Index = 1
+)
+
+const (
+ RelocString RelocKind = iota
+ RelocMeta
+ RelocPosBase
+ RelocPkg
+ RelocName
+ RelocType
+ RelocObj
+ RelocObjExt
+ RelocObjDict
+ RelocBody
+
+ numRelocs = iota
+)
diff --git a/vendor/golang.org/x/tools/go/internal/pkgbits/support.go b/vendor/golang.org/x/tools/go/internal/pkgbits/support.go
new file mode 100644
index 00000000..ad26d3b2
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/pkgbits/support.go
@@ -0,0 +1,17 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package pkgbits
+
+import "fmt"
+
+func assert(b bool) {
+ if !b {
+ panic("assertion failed")
+ }
+}
+
+func errorf(format string, args ...interface{}) {
+ panic(fmt.Errorf(format, args...))
+}
diff --git a/vendor/golang.org/x/tools/go/internal/pkgbits/sync.go b/vendor/golang.org/x/tools/go/internal/pkgbits/sync.go
new file mode 100644
index 00000000..5bd51ef7
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/pkgbits/sync.go
@@ -0,0 +1,113 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package pkgbits
+
+import (
+ "fmt"
+ "strings"
+)
+
+// fmtFrames formats a backtrace for reporting reader/writer desyncs.
+func fmtFrames(pcs ...uintptr) []string {
+ res := make([]string, 0, len(pcs))
+ walkFrames(pcs, func(file string, line int, name string, offset uintptr) {
+ // Trim package from function name. It's just redundant noise.
+ name = strings.TrimPrefix(name, "cmd/compile/internal/noder.")
+
+ res = append(res, fmt.Sprintf("%s:%v: %s +0x%v", file, line, name, offset))
+ })
+ return res
+}
+
+type frameVisitor func(file string, line int, name string, offset uintptr)
+
+// SyncMarker is an enum type that represents markers that may be
+// written to export data to ensure the reader and writer stay
+// synchronized.
+type SyncMarker int
+
+//go:generate stringer -type=SyncMarker -trimprefix=Sync
+
+const (
+ _ SyncMarker = iota
+
+ // Public markers (known to go/types importers).
+
+ // Low-level coding markers.
+ SyncEOF
+ SyncBool
+ SyncInt64
+ SyncUint64
+ SyncString
+ SyncValue
+ SyncVal
+ SyncRelocs
+ SyncReloc
+ SyncUseReloc
+
+ // Higher-level object and type markers.
+ SyncPublic
+ SyncPos
+ SyncPosBase
+ SyncObject
+ SyncObject1
+ SyncPkg
+ SyncPkgDef
+ SyncMethod
+ SyncType
+ SyncTypeIdx
+ SyncTypeParamNames
+ SyncSignature
+ SyncParams
+ SyncParam
+ SyncCodeObj
+ SyncSym
+ SyncLocalIdent
+ SyncSelector
+
+ // Private markers (only known to cmd/compile).
+ SyncPrivate
+
+ SyncFuncExt
+ SyncVarExt
+ SyncTypeExt
+ SyncPragma
+
+ SyncExprList
+ SyncExprs
+ SyncExpr
+ SyncExprType
+ SyncAssign
+ SyncOp
+ SyncFuncLit
+ SyncCompLit
+
+ SyncDecl
+ SyncFuncBody
+ SyncOpenScope
+ SyncCloseScope
+ SyncCloseAnotherScope
+ SyncDeclNames
+ SyncDeclName
+
+ SyncStmts
+ SyncBlockStmt
+ SyncIfStmt
+ SyncForStmt
+ SyncSwitchStmt
+ SyncRangeStmt
+ SyncCaseClause
+ SyncCommClause
+ SyncSelectStmt
+ SyncDecls
+ SyncLabeledStmt
+ SyncUseObjLocal
+ SyncAddLocal
+ SyncLinkname
+ SyncStmt1
+ SyncStmtsEnd
+ SyncLabel
+ SyncOptLabel
+)
diff --git a/vendor/golang.org/x/tools/go/internal/pkgbits/syncmarker_string.go b/vendor/golang.org/x/tools/go/internal/pkgbits/syncmarker_string.go
new file mode 100644
index 00000000..4a5b0ca5
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/pkgbits/syncmarker_string.go
@@ -0,0 +1,89 @@
+// Code generated by "stringer -type=SyncMarker -trimprefix=Sync"; DO NOT EDIT.
+
+package pkgbits
+
+import "strconv"
+
+func _() {
+ // An "invalid array index" compiler error signifies that the constant values have changed.
+ // Re-run the stringer command to generate them again.
+ var x [1]struct{}
+ _ = x[SyncEOF-1]
+ _ = x[SyncBool-2]
+ _ = x[SyncInt64-3]
+ _ = x[SyncUint64-4]
+ _ = x[SyncString-5]
+ _ = x[SyncValue-6]
+ _ = x[SyncVal-7]
+ _ = x[SyncRelocs-8]
+ _ = x[SyncReloc-9]
+ _ = x[SyncUseReloc-10]
+ _ = x[SyncPublic-11]
+ _ = x[SyncPos-12]
+ _ = x[SyncPosBase-13]
+ _ = x[SyncObject-14]
+ _ = x[SyncObject1-15]
+ _ = x[SyncPkg-16]
+ _ = x[SyncPkgDef-17]
+ _ = x[SyncMethod-18]
+ _ = x[SyncType-19]
+ _ = x[SyncTypeIdx-20]
+ _ = x[SyncTypeParamNames-21]
+ _ = x[SyncSignature-22]
+ _ = x[SyncParams-23]
+ _ = x[SyncParam-24]
+ _ = x[SyncCodeObj-25]
+ _ = x[SyncSym-26]
+ _ = x[SyncLocalIdent-27]
+ _ = x[SyncSelector-28]
+ _ = x[SyncPrivate-29]
+ _ = x[SyncFuncExt-30]
+ _ = x[SyncVarExt-31]
+ _ = x[SyncTypeExt-32]
+ _ = x[SyncPragma-33]
+ _ = x[SyncExprList-34]
+ _ = x[SyncExprs-35]
+ _ = x[SyncExpr-36]
+ _ = x[SyncExprType-37]
+ _ = x[SyncAssign-38]
+ _ = x[SyncOp-39]
+ _ = x[SyncFuncLit-40]
+ _ = x[SyncCompLit-41]
+ _ = x[SyncDecl-42]
+ _ = x[SyncFuncBody-43]
+ _ = x[SyncOpenScope-44]
+ _ = x[SyncCloseScope-45]
+ _ = x[SyncCloseAnotherScope-46]
+ _ = x[SyncDeclNames-47]
+ _ = x[SyncDeclName-48]
+ _ = x[SyncStmts-49]
+ _ = x[SyncBlockStmt-50]
+ _ = x[SyncIfStmt-51]
+ _ = x[SyncForStmt-52]
+ _ = x[SyncSwitchStmt-53]
+ _ = x[SyncRangeStmt-54]
+ _ = x[SyncCaseClause-55]
+ _ = x[SyncCommClause-56]
+ _ = x[SyncSelectStmt-57]
+ _ = x[SyncDecls-58]
+ _ = x[SyncLabeledStmt-59]
+ _ = x[SyncUseObjLocal-60]
+ _ = x[SyncAddLocal-61]
+ _ = x[SyncLinkname-62]
+ _ = x[SyncStmt1-63]
+ _ = x[SyncStmtsEnd-64]
+ _ = x[SyncLabel-65]
+ _ = x[SyncOptLabel-66]
+}
+
+const _SyncMarker_name = "EOFBoolInt64Uint64StringValueValRelocsRelocUseRelocPublicPosPosBaseObjectObject1PkgPkgDefMethodTypeTypeIdxTypeParamNamesSignatureParamsParamCodeObjSymLocalIdentSelectorPrivateFuncExtVarExtTypeExtPragmaExprListExprsExprExprTypeAssignOpFuncLitCompLitDeclFuncBodyOpenScopeCloseScopeCloseAnotherScopeDeclNamesDeclNameStmtsBlockStmtIfStmtForStmtSwitchStmtRangeStmtCaseClauseCommClauseSelectStmtDeclsLabeledStmtUseObjLocalAddLocalLinknameStmt1StmtsEndLabelOptLabel"
+
+var _SyncMarker_index = [...]uint16{0, 3, 7, 12, 18, 24, 29, 32, 38, 43, 51, 57, 60, 67, 73, 80, 83, 89, 95, 99, 106, 120, 129, 135, 140, 147, 150, 160, 168, 175, 182, 188, 195, 201, 209, 214, 218, 226, 232, 234, 241, 248, 252, 260, 269, 279, 296, 305, 313, 318, 327, 333, 340, 350, 359, 369, 379, 389, 394, 405, 416, 424, 432, 437, 445, 450, 458}
+
+func (i SyncMarker) String() string {
+ i -= 1
+ if i < 0 || i >= SyncMarker(len(_SyncMarker_index)-1) {
+ return "SyncMarker(" + strconv.FormatInt(int64(i+1), 10) + ")"
+ }
+ return _SyncMarker_name[_SyncMarker_index[i]:_SyncMarker_index[i+1]]
+}
diff --git a/vendor/golang.org/x/tools/go/packages/doc.go b/vendor/golang.org/x/tools/go/packages/doc.go
index 4bfe28a5..da4ab89f 100644
--- a/vendor/golang.org/x/tools/go/packages/doc.go
+++ b/vendor/golang.org/x/tools/go/packages/doc.go
@@ -67,7 +67,6 @@ Most tools should pass their command-line arguments (after any flags)
uninterpreted to the loader, so that the loader can interpret them
according to the conventions of the underlying build system.
See the Example function for typical usage.
-
*/
package packages // import "golang.org/x/tools/go/packages"
diff --git a/vendor/golang.org/x/tools/go/packages/golist.go b/vendor/golang.org/x/tools/go/packages/golist.go
index 0e1e7f11..de881562 100644
--- a/vendor/golang.org/x/tools/go/packages/golist.go
+++ b/vendor/golang.org/x/tools/go/packages/golist.go
@@ -26,7 +26,6 @@ import (
"golang.org/x/tools/go/internal/packagesdriver"
"golang.org/x/tools/internal/gocommand"
"golang.org/x/tools/internal/packagesinternal"
- "golang.org/x/xerrors"
)
// debug controls verbose logging.
@@ -303,11 +302,12 @@ func (state *golistState) runContainsQueries(response *responseDeduper, queries
}
dirResponse, err := state.createDriverResponse(pattern)
- // If there was an error loading the package, or the package is returned
- // with errors, try to load the file as an ad-hoc package.
+ // If there was an error loading the package, or no packages are returned,
+ // or the package is returned with errors, try to load the file as an
+ // ad-hoc package.
// Usually the error will appear in a returned package, but may not if we're
// in module mode and the ad-hoc is located outside a module.
- if err != nil || len(dirResponse.Packages) == 1 && len(dirResponse.Packages[0].GoFiles) == 0 &&
+ if err != nil || len(dirResponse.Packages) == 0 || len(dirResponse.Packages) == 1 && len(dirResponse.Packages[0].GoFiles) == 0 &&
len(dirResponse.Packages[0].Errors) == 1 {
var queryErr error
if dirResponse, queryErr = state.adhocPackage(pattern, query); queryErr != nil {
@@ -393,6 +393,8 @@ type jsonPackage struct {
CompiledGoFiles []string
IgnoredGoFiles []string
IgnoredOtherFiles []string
+ EmbedPatterns []string
+ EmbedFiles []string
CFiles []string
CgoFiles []string
CXXFiles []string
@@ -444,7 +446,11 @@ func (state *golistState) createDriverResponse(words ...string) (*driverResponse
// Run "go list" for complete
// information on the specified packages.
- buf, err := state.invokeGo("list", golistargs(state.cfg, words)...)
+ goVersion, err := state.getGoVersion()
+ if err != nil {
+ return nil, err
+ }
+ buf, err := state.invokeGo("list", golistargs(state.cfg, words, goVersion)...)
if err != nil {
return nil, err
}
@@ -565,6 +571,8 @@ func (state *golistState) createDriverResponse(words ...string) (*driverResponse
GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles),
CompiledGoFiles: absJoin(p.Dir, p.CompiledGoFiles),
OtherFiles: absJoin(p.Dir, otherFiles(p)...),
+ EmbedFiles: absJoin(p.Dir, p.EmbedFiles),
+ EmbedPatterns: absJoin(p.Dir, p.EmbedPatterns),
IgnoredFiles: absJoin(p.Dir, p.IgnoredGoFiles, p.IgnoredOtherFiles),
forTest: p.ForTest,
depsErrors: p.DepsErrors,
@@ -805,17 +813,83 @@ func absJoin(dir string, fileses ...[]string) (res []string) {
return res
}
-func golistargs(cfg *Config, words []string) []string {
+func jsonFlag(cfg *Config, goVersion int) string {
+ if goVersion < 19 {
+ return "-json"
+ }
+ var fields []string
+ added := make(map[string]bool)
+ addFields := func(fs ...string) {
+ for _, f := range fs {
+ if !added[f] {
+ added[f] = true
+ fields = append(fields, f)
+ }
+ }
+ }
+ addFields("Name", "ImportPath", "Error") // These fields are always needed
+ if cfg.Mode&NeedFiles != 0 || cfg.Mode&NeedTypes != 0 {
+ addFields("Dir", "GoFiles", "IgnoredGoFiles", "IgnoredOtherFiles", "CFiles",
+ "CgoFiles", "CXXFiles", "MFiles", "HFiles", "FFiles", "SFiles",
+ "SwigFiles", "SwigCXXFiles", "SysoFiles")
+ if cfg.Tests {
+ addFields("TestGoFiles", "XTestGoFiles")
+ }
+ }
+ if cfg.Mode&NeedTypes != 0 {
+ // CompiledGoFiles seems to be required for the test case TestCgoNoSyntax,
+ // even when -compiled isn't passed in.
+ // TODO(#52435): Should we make the test ask for -compiled, or automatically
+ // request CompiledGoFiles in certain circumstances?
+ addFields("Dir", "CompiledGoFiles")
+ }
+ if cfg.Mode&NeedCompiledGoFiles != 0 {
+ addFields("Dir", "CompiledGoFiles", "Export")
+ }
+ if cfg.Mode&NeedImports != 0 {
+ // When imports are requested, DepOnly is used to distinguish between packages
+ // explicitly requested and transitive imports of those packages.
+ addFields("DepOnly", "Imports", "ImportMap")
+ if cfg.Tests {
+ addFields("TestImports", "XTestImports")
+ }
+ }
+ if cfg.Mode&NeedDeps != 0 {
+ addFields("DepOnly")
+ }
+ if usesExportData(cfg) {
+ // Request Dir in the unlikely case Export is not absolute.
+ addFields("Dir", "Export")
+ }
+ if cfg.Mode&needInternalForTest != 0 {
+ addFields("ForTest")
+ }
+ if cfg.Mode&needInternalDepsErrors != 0 {
+ addFields("DepsErrors")
+ }
+ if cfg.Mode&NeedModule != 0 {
+ addFields("Module")
+ }
+ if cfg.Mode&NeedEmbedFiles != 0 {
+ addFields("EmbedFiles")
+ }
+ if cfg.Mode&NeedEmbedPatterns != 0 {
+ addFields("EmbedPatterns")
+ }
+ return "-json=" + strings.Join(fields, ",")
+}
+
+func golistargs(cfg *Config, words []string, goVersion int) []string {
const findFlags = NeedImports | NeedTypes | NeedSyntax | NeedTypesInfo
fullargs := []string{
- "-e", "-json",
+ "-e", jsonFlag(cfg, goVersion),
fmt.Sprintf("-compiled=%t", cfg.Mode&(NeedCompiledGoFiles|NeedSyntax|NeedTypes|NeedTypesInfo|NeedTypesSizes) != 0),
fmt.Sprintf("-test=%t", cfg.Tests),
fmt.Sprintf("-export=%t", usesExportData(cfg)),
fmt.Sprintf("-deps=%t", cfg.Mode&NeedImports != 0),
// go list doesn't let you pass -test and -find together,
// probably because you'd just get the TestMain.
- fmt.Sprintf("-find=%t", !cfg.Tests && cfg.Mode&findFlags == 0),
+ fmt.Sprintf("-find=%t", !cfg.Tests && cfg.Mode&findFlags == 0 && !usesExportData(cfg)),
}
fullargs = append(fullargs, cfg.BuildFlags...)
fullargs = append(fullargs, "--")
@@ -879,7 +953,7 @@ func (state *golistState) invokeGo(verb string, args ...string) (*bytes.Buffer,
if !ok {
// Catastrophic error:
// - context cancellation
- return nil, xerrors.Errorf("couldn't run 'go': %w", err)
+ return nil, fmt.Errorf("couldn't run 'go': %w", err)
}
// Old go version?
diff --git a/vendor/golang.org/x/tools/go/packages/loadmode_string.go b/vendor/golang.org/x/tools/go/packages/loadmode_string.go
index 7ea37e7e..5c080d21 100644
--- a/vendor/golang.org/x/tools/go/packages/loadmode_string.go
+++ b/vendor/golang.org/x/tools/go/packages/loadmode_string.go
@@ -15,7 +15,7 @@ var allModes = []LoadMode{
NeedCompiledGoFiles,
NeedImports,
NeedDeps,
- NeedExportsFile,
+ NeedExportFile,
NeedTypes,
NeedSyntax,
NeedTypesInfo,
@@ -28,7 +28,7 @@ var modeStrings = []string{
"NeedCompiledGoFiles",
"NeedImports",
"NeedDeps",
- "NeedExportsFile",
+ "NeedExportFile",
"NeedTypes",
"NeedSyntax",
"NeedTypesInfo",
diff --git a/vendor/golang.org/x/tools/go/packages/packages.go b/vendor/golang.org/x/tools/go/packages/packages.go
index 1b5424e7..a93dc6ad 100644
--- a/vendor/golang.org/x/tools/go/packages/packages.go
+++ b/vendor/golang.org/x/tools/go/packages/packages.go
@@ -39,9 +39,6 @@ import (
// Load may return more information than requested.
type LoadMode int
-// TODO(matloob): When a V2 of go/packages is released, rename NeedExportsFile to
-// NeedExportFile to make it consistent with the Package field it's adding.
-
const (
// NeedName adds Name and PkgPath.
NeedName LoadMode = 1 << iota
@@ -59,8 +56,8 @@ const (
// NeedDeps adds the fields requested by the LoadMode in the packages in Imports.
NeedDeps
- // NeedExportsFile adds ExportFile.
- NeedExportsFile
+ // NeedExportFile adds ExportFile.
+ NeedExportFile
// NeedTypes adds Types, Fset, and IllTyped.
NeedTypes
@@ -74,12 +71,25 @@ const (
// NeedTypesSizes adds TypesSizes.
NeedTypesSizes
+ // needInternalDepsErrors adds the internal deps errors field for use by gopls.
+ needInternalDepsErrors
+
+ // needInternalForTest adds the internal forTest field.
+ // Tests must also be set on the context for this field to be populated.
+ needInternalForTest
+
// typecheckCgo enables full support for type checking cgo. Requires Go 1.15+.
// Modifies CompiledGoFiles and Types, and has no effect on its own.
typecheckCgo
// NeedModule adds Module.
NeedModule
+
+ // NeedEmbedFiles adds EmbedFiles.
+ NeedEmbedFiles
+
+ // NeedEmbedPatterns adds EmbedPatterns.
+ NeedEmbedPatterns
)
const (
@@ -102,6 +112,9 @@ const (
// Deprecated: LoadAllSyntax exists for historical compatibility
// and should not be used. Please directly specify the needed fields using the Need values.
LoadAllSyntax = LoadSyntax | NeedDeps
+
+ // Deprecated: NeedExportsFile is a historical misspelling of NeedExportFile.
+ NeedExportsFile = NeedExportFile
)
// A Config specifies details about how packages should be loaded.
@@ -296,6 +309,14 @@ type Package struct {
// including assembly, C, C++, Fortran, Objective-C, SWIG, and so on.
OtherFiles []string
+ // EmbedFiles lists the absolute file paths of the package's files
+ // embedded with go:embed.
+ EmbedFiles []string
+
+ // EmbedPatterns lists the absolute file patterns of the package's
+ // files embedded with go:embed.
+ EmbedPatterns []string
+
// IgnoredFiles lists source files that are not part of the package
// using the current build configuration but that might be part of
// the package using other build configurations.
@@ -389,6 +410,8 @@ func init() {
config.(*Config).modFlag = value
}
packagesinternal.TypecheckCgo = int(typecheckCgo)
+ packagesinternal.DepsErrors = int(needInternalDepsErrors)
+ packagesinternal.ForTest = int(needInternalForTest)
}
// An Error describes a problem with a package's metadata, syntax, or types.
@@ -431,6 +454,8 @@ type flatPackage struct {
GoFiles []string `json:",omitempty"`
CompiledGoFiles []string `json:",omitempty"`
OtherFiles []string `json:",omitempty"`
+ EmbedFiles []string `json:",omitempty"`
+ EmbedPatterns []string `json:",omitempty"`
IgnoredFiles []string `json:",omitempty"`
ExportFile string `json:",omitempty"`
Imports map[string]string `json:",omitempty"`
@@ -454,6 +479,8 @@ func (p *Package) MarshalJSON() ([]byte, error) {
GoFiles: p.GoFiles,
CompiledGoFiles: p.CompiledGoFiles,
OtherFiles: p.OtherFiles,
+ EmbedFiles: p.EmbedFiles,
+ EmbedPatterns: p.EmbedPatterns,
IgnoredFiles: p.IgnoredFiles,
ExportFile: p.ExportFile,
}
@@ -481,6 +508,8 @@ func (p *Package) UnmarshalJSON(b []byte) error {
GoFiles: flat.GoFiles,
CompiledGoFiles: flat.CompiledGoFiles,
OtherFiles: flat.OtherFiles,
+ EmbedFiles: flat.EmbedFiles,
+ EmbedPatterns: flat.EmbedPatterns,
ExportFile: flat.ExportFile,
}
if len(flat.Imports) > 0 {
@@ -614,7 +643,7 @@ func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) {
needsrc := ((ld.Mode&(NeedSyntax|NeedTypesInfo) != 0 && (rootIndex >= 0 || ld.Mode&NeedDeps != 0)) ||
// ... or if we need types and the exportData is invalid. We fall back to (incompletely)
// typechecking packages from source if they fail to compile.
- (ld.Mode&NeedTypes|NeedTypesInfo != 0 && exportDataInvalid)) && pkg.PkgPath != "unsafe"
+ (ld.Mode&(NeedTypes|NeedTypesInfo) != 0 && exportDataInvalid)) && pkg.PkgPath != "unsafe"
lpkg := &loaderPackage{
Package: pkg,
needtypes: needtypes,
@@ -752,13 +781,19 @@ func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) {
ld.pkgs[i].OtherFiles = nil
ld.pkgs[i].IgnoredFiles = nil
}
+ if ld.requestedMode&NeedEmbedFiles == 0 {
+ ld.pkgs[i].EmbedFiles = nil
+ }
+ if ld.requestedMode&NeedEmbedPatterns == 0 {
+ ld.pkgs[i].EmbedPatterns = nil
+ }
if ld.requestedMode&NeedCompiledGoFiles == 0 {
ld.pkgs[i].CompiledGoFiles = nil
}
if ld.requestedMode&NeedImports == 0 {
ld.pkgs[i].Imports = nil
}
- if ld.requestedMode&NeedExportsFile == 0 {
+ if ld.requestedMode&NeedExportFile == 0 {
ld.pkgs[i].ExportFile = ""
}
if ld.requestedMode&NeedTypes == 0 {
@@ -1053,7 +1088,6 @@ func (ld *loader) parseFile(filename string) (*ast.File, error) {
//
// Because files are scanned in parallel, the token.Pos
// positions of the resulting ast.Files are not ordered.
-//
func (ld *loader) parseFiles(filenames []string) ([]*ast.File, []error) {
var wg sync.WaitGroup
n := len(filenames)
@@ -1097,7 +1131,6 @@ func (ld *loader) parseFiles(filenames []string) ([]*ast.File, []error) {
// sameFile returns true if x and y have the same basename and denote
// the same file.
-//
func sameFile(x, y string) bool {
if x == y {
// It could be the case that y doesn't exist.
@@ -1210,8 +1243,13 @@ func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error
if err != nil {
return nil, fmt.Errorf("reading %s: %v", lpkg.ExportFile, err)
}
+ if _, ok := view["go.shape"]; ok {
+ // Account for the pseudopackage "go.shape" that gets
+ // created by generic code.
+ viewLen++
+ }
if viewLen != len(view) {
- log.Fatalf("Unexpected package creation during export data loading")
+ log.Panicf("golang.org/x/tools/go/packages: unexpected new packages during load of %s", lpkg.PkgPath)
}
lpkg.Types = tpkg
@@ -1222,17 +1260,8 @@ func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error
// impliedLoadMode returns loadMode with its dependencies.
func impliedLoadMode(loadMode LoadMode) LoadMode {
- if loadMode&NeedTypesInfo != 0 && loadMode&NeedImports == 0 {
- // If NeedTypesInfo, go/packages needs to do typechecking itself so it can
- // associate type info with the AST. To do so, we need the export data
- // for dependencies, which means we need to ask for the direct dependencies.
- // NeedImports is used to ask for the direct dependencies.
- loadMode |= NeedImports
- }
-
- if loadMode&NeedDeps != 0 && loadMode&NeedImports == 0 {
- // With NeedDeps we need to load at least direct dependencies.
- // NeedImports is used to ask for the direct dependencies.
+ if loadMode&(NeedDeps|NeedTypes|NeedTypesInfo) != 0 {
+ // All these things require knowing the import graph.
loadMode |= NeedImports
}
@@ -1240,5 +1269,5 @@ func impliedLoadMode(loadMode LoadMode) LoadMode {
}
func usesExportData(cfg *Config) bool {
- return cfg.Mode&NeedExportsFile != 0 || cfg.Mode&NeedTypes != 0 && cfg.Mode&NeedDeps == 0
+ return cfg.Mode&NeedExportFile != 0 || cfg.Mode&NeedTypes != 0 && cfg.Mode&NeedDeps == 0
}
diff --git a/vendor/golang.org/x/tools/internal/gocommand/invoke.go b/vendor/golang.org/x/tools/internal/gocommand/invoke.go
index f7533683..67256dc3 100644
--- a/vendor/golang.org/x/tools/internal/gocommand/invoke.go
+++ b/vendor/golang.org/x/tools/internal/gocommand/invoke.go
@@ -264,8 +264,10 @@ func cmdDebugStr(cmd *exec.Cmd) string {
env := make(map[string]string)
for _, kv := range cmd.Env {
split := strings.SplitN(kv, "=", 2)
- k, v := split[0], split[1]
- env[k] = v
+ if len(split) == 2 {
+ k, v := split[0], split[1]
+ env[k] = v
+ }
}
var args []string
diff --git a/vendor/golang.org/x/tools/internal/packagesinternal/packages.go b/vendor/golang.org/x/tools/internal/packagesinternal/packages.go
index 9702094c..d9950b1f 100644
--- a/vendor/golang.org/x/tools/internal/packagesinternal/packages.go
+++ b/vendor/golang.org/x/tools/internal/packagesinternal/packages.go
@@ -23,6 +23,8 @@ var GetGoCmdRunner = func(config interface{}) *gocommand.Runner { return nil }
var SetGoCmdRunner = func(config interface{}, runner *gocommand.Runner) {}
var TypecheckCgo int
+var DepsErrors int // must be set as a LoadMode to call GetDepsErrors
+var ForTest int // must be set as a LoadMode to call GetForTest
var SetModFlag = func(config interface{}, value string) {}
var SetModFile = func(config interface{}, value string) {}
diff --git a/vendor/golang.org/x/tools/internal/typeparams/common.go b/vendor/golang.org/x/tools/internal/typeparams/common.go
index ab6b30b8..25a1426d 100644
--- a/vendor/golang.org/x/tools/internal/typeparams/common.go
+++ b/vendor/golang.org/x/tools/internal/typeparams/common.go
@@ -16,11 +16,10 @@
// Additionally, this package contains common utilities for working with the
// new generic constructs, to supplement the standard library APIs. Notably,
// the StructuralTerms API computes a minimal representation of the structural
-// restrictions on a type parameter. In the future, this API may be available
-// from go/types.
+// restrictions on a type parameter.
//
-// See the example/README.md for a more detailed guide on how to update tools
-// to support generics.
+// An external version of these APIs is available in the
+// golang.org/x/exp/typeparams module.
package typeparams
import (
@@ -121,15 +120,15 @@ func OriginMethod(fn *types.Func) *types.Func {
//
// For example, consider the following type declarations:
//
-// type Interface[T any] interface {
-// Accept(T)
-// }
+// type Interface[T any] interface {
+// Accept(T)
+// }
//
-// type Container[T any] struct {
-// Element T
-// }
+// type Container[T any] struct {
+// Element T
+// }
//
-// func (c Container[T]) Accept(t T) { c.Element = t }
+// func (c Container[T]) Accept(t T) { c.Element = t }
//
// In this case, GenericAssignableTo reports that instantiations of Container
// are assignable to the corresponding instantiation of Interface.
diff --git a/vendor/golang.org/x/tools/internal/typeparams/coretype.go b/vendor/golang.org/x/tools/internal/typeparams/coretype.go
new file mode 100644
index 00000000..993135ec
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/typeparams/coretype.go
@@ -0,0 +1,122 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeparams
+
+import (
+ "go/types"
+)
+
+// CoreType returns the core type of T or nil if T does not have a core type.
+//
+// See https://go.dev/ref/spec#Core_types for the definition of a core type.
+func CoreType(T types.Type) types.Type {
+ U := T.Underlying()
+ if _, ok := U.(*types.Interface); !ok {
+ return U // for non-interface types,
+ }
+
+ terms, err := _NormalTerms(U)
+ if len(terms) == 0 || err != nil {
+ // len(terms) -> empty type set of interface.
+ // err != nil => U is invalid, exceeds complexity bounds, or has an empty type set.
+ return nil // no core type.
+ }
+
+ U = terms[0].Type().Underlying()
+ var identical int // i in [0,identical) => Identical(U, terms[i].Type().Underlying())
+ for identical = 1; identical < len(terms); identical++ {
+ if !types.Identical(U, terms[identical].Type().Underlying()) {
+ break
+ }
+ }
+
+ if identical == len(terms) {
+ // https://go.dev/ref/spec#Core_types
+ // "There is a single type U which is the underlying type of all types in the type set of T"
+ return U
+ }
+ ch, ok := U.(*types.Chan)
+ if !ok {
+ return nil // no core type as identical < len(terms) and U is not a channel.
+ }
+ // https://go.dev/ref/spec#Core_types
+ // "the type chan E if T contains only bidirectional channels, or the type chan<- E or
+ // <-chan E depending on the direction of the directional channels present."
+ for chans := identical; chans < len(terms); chans++ {
+ curr, ok := terms[chans].Type().Underlying().(*types.Chan)
+ if !ok {
+ return nil
+ }
+ if !types.Identical(ch.Elem(), curr.Elem()) {
+ return nil // channel elements are not identical.
+ }
+ if ch.Dir() == types.SendRecv {
+ // ch is bidirectional. We can safely always use curr's direction.
+ ch = curr
+ } else if curr.Dir() != types.SendRecv && ch.Dir() != curr.Dir() {
+ // ch and curr are not bidirectional and not the same direction.
+ return nil
+ }
+ }
+ return ch
+}
+
+// _NormalTerms returns a slice of terms representing the normalized structural
+// type restrictions of a type, if any.
+//
+// For all types other than *types.TypeParam, *types.Interface, and
+// *types.Union, this is just a single term with Tilde() == false and
+// Type() == typ. For *types.TypeParam, *types.Interface, and *types.Union, see
+// below.
+//
+// Structural type restrictions of a type parameter are created via
+// non-interface types embedded in its constraint interface (directly, or via a
+// chain of interface embeddings). For example, in the declaration type
+// T[P interface{~int; m()}] int the structural restriction of the type
+// parameter P is ~int.
+//
+// With interface embedding and unions, the specification of structural type
+// restrictions may be arbitrarily complex. For example, consider the
+// following:
+//
+// type A interface{ ~string|~[]byte }
+//
+// type B interface{ int|string }
+//
+// type C interface { ~string|~int }
+//
+// type T[P interface{ A|B; C }] int
+//
+// In this example, the structural type restriction of P is ~string|int: A|B
+// expands to ~string|~[]byte|int|string, which reduces to ~string|~[]byte|int,
+// which when intersected with C (~string|~int) yields ~string|int.
+//
+// _NormalTerms computes these expansions and reductions, producing a
+// "normalized" form of the embeddings. A structural restriction is normalized
+// if it is a single union containing no interface terms, and is minimal in the
+// sense that removing any term changes the set of types satisfying the
+// constraint. It is left as a proof for the reader that, modulo sorting, there
+// is exactly one such normalized form.
+//
+// Because the minimal representation always takes this form, _NormalTerms
+// returns a slice of tilde terms corresponding to the terms of the union in
+// the normalized structural restriction. An error is returned if the type is
+// invalid, exceeds complexity bounds, or has an empty type set. In the latter
+// case, _NormalTerms returns ErrEmptyTypeSet.
+//
+// _NormalTerms makes no guarantees about the order of terms, except that it
+// is deterministic.
+func _NormalTerms(typ types.Type) ([]*Term, error) {
+ switch typ := typ.(type) {
+ case *TypeParam:
+ return StructuralTerms(typ)
+ case *Union:
+ return UnionTermSet(typ)
+ case *types.Interface:
+ return InterfaceTermSet(typ)
+ default:
+ return []*Term{NewTerm(false, typ)}, nil
+ }
+}
diff --git a/vendor/golang.org/x/tools/internal/typeparams/normalize.go b/vendor/golang.org/x/tools/internal/typeparams/normalize.go
index 090f142a..9c631b65 100644
--- a/vendor/golang.org/x/tools/internal/typeparams/normalize.go
+++ b/vendor/golang.org/x/tools/internal/typeparams/normalize.go
@@ -24,20 +24,22 @@ var ErrEmptyTypeSet = errors.New("empty type set")
// Structural type restrictions of a type parameter are created via
// non-interface types embedded in its constraint interface (directly, or via a
// chain of interface embeddings). For example, in the declaration
-// type T[P interface{~int; m()}] int
+//
+// type T[P interface{~int; m()}] int
+//
// the structural restriction of the type parameter P is ~int.
//
// With interface embedding and unions, the specification of structural type
// restrictions may be arbitrarily complex. For example, consider the
// following:
//
-// type A interface{ ~string|~[]byte }
+// type A interface{ ~string|~[]byte }
//
-// type B interface{ int|string }
+// type B interface{ int|string }
//
-// type C interface { ~string|~int }
+// type C interface { ~string|~int }
//
-// type T[P interface{ A|B; C }] int
+// type T[P interface{ A|B; C }] int
//
// In this example, the structural type restriction of P is ~string|int: A|B
// expands to ~string|~[]byte|int|string, which reduces to ~string|~[]byte|int,
diff --git a/vendor/golang.org/x/tools/internal/typeparams/termlist.go b/vendor/golang.org/x/tools/internal/typeparams/termlist.go
index 10857d50..933106a2 100644
--- a/vendor/golang.org/x/tools/internal/typeparams/termlist.go
+++ b/vendor/golang.org/x/tools/internal/typeparams/termlist.go
@@ -97,15 +97,6 @@ func (xl termlist) norm() termlist {
return rl
}
-// If the type set represented by xl is specified by a single (non-𝓤) term,
-// structuralType returns that type. Otherwise it returns nil.
-func (xl termlist) structuralType() types.Type {
- if nl := xl.norm(); len(nl) == 1 {
- return nl[0].typ // if nl.isAll() then typ is nil, which is ok
- }
- return nil
-}
-
// union returns the union xl ∪ yl.
func (xl termlist) union(yl termlist) termlist {
return append(xl, yl...).norm()