summaryrefslogtreecommitdiffstats
path: root/vendor/golang.org/x/tools/go/internal/gcimporter
diff options
context:
space:
mode:
authorWim <wim@42.be>2022-11-27 00:42:16 +0100
committerGitHub <noreply@github.com>2022-11-27 00:42:16 +0100
commit4fd0a7672777f0ed15692ae2ba47838208537558 (patch)
treeb119834a8b9ee78aa8f1b2ad05efa7da50516cbf /vendor/golang.org/x/tools/go/internal/gcimporter
parent6da9d567dc9195e9a5211f23a6795a41f56a1bfc (diff)
downloadmatterbridge-msglm-4fd0a7672777f0ed15692ae2ba47838208537558.tar.gz
matterbridge-msglm-4fd0a7672777f0ed15692ae2ba47838208537558.tar.bz2
matterbridge-msglm-4fd0a7672777f0ed15692ae2ba47838208537558.zip
Update dependencies (#1929)
Diffstat (limited to 'vendor/golang.org/x/tools/go/internal/gcimporter')
-rw-r--r--vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go20
-rw-r--r--vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go161
-rw-r--r--vendor/golang.org/x/tools/go/internal/gcimporter/iexport.go5
-rw-r--r--vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go70
-rw-r--r--vendor/golang.org/x/tools/go/internal/gcimporter/unified_no.go10
-rw-r--r--vendor/golang.org/x/tools/go/internal/gcimporter/unified_yes.go10
-rw-r--r--vendor/golang.org/x/tools/go/internal/gcimporter/ureader_no.go19
-rw-r--r--vendor/golang.org/x/tools/go/internal/gcimporter/ureader_yes.go612
8 files changed, 793 insertions, 114 deletions
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go b/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go
index 0a3cdb9a..196cb3f9 100644
--- a/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go
@@ -35,16 +35,18 @@ import (
const debugFormat = false // default: false
// Current export format version. Increase with each format change.
+//
// Note: The latest binary (non-indexed) export format is at version 6.
-// This exporter is still at level 4, but it doesn't matter since
-// the binary importer can handle older versions just fine.
-// 6: package height (CL 105038) -- NOT IMPLEMENTED HERE
-// 5: improved position encoding efficiency (issue 20080, CL 41619) -- NOT IMPLEMEMTED HERE
-// 4: type name objects support type aliases, uses aliasTag
-// 3: Go1.8 encoding (same as version 2, aliasTag defined but never used)
-// 2: removed unused bool in ODCL export (compiler only)
-// 1: header format change (more regular), export package for _ struct fields
-// 0: Go1.7 encoding
+// This exporter is still at level 4, but it doesn't matter since
+// the binary importer can handle older versions just fine.
+//
+// 6: package height (CL 105038) -- NOT IMPLEMENTED HERE
+// 5: improved position encoding efficiency (issue 20080, CL 41619) -- NOT IMPLEMENTED HERE
+// 4: type name objects support type aliases, uses aliasTag
+// 3: Go1.8 encoding (same as version 2, aliasTag defined but never used)
+// 2: removed unused bool in ODCL export (compiler only)
+// 1: header format change (more regular), export package for _ struct fields
+// 0: Go1.7 encoding
const exportVersion = 4
// trackAllTypes enables cycle tracking for all types, not just named
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go b/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go
index 3ab66830..e96c3960 100644
--- a/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go
@@ -45,7 +45,6 @@ var pkgExts = [...]string{".a", ".o"}
// the build.Default build.Context). A relative srcDir is interpreted
// relative to the current working directory.
// If no file was found, an empty filename is returned.
-//
func FindPkg(path, srcDir string) (filename, id string) {
if path == "" {
return
@@ -109,7 +108,6 @@ func FindPkg(path, srcDir string) (filename, id string) {
// If packages[id] contains the completely imported package, that package
// can be used directly, and there is no need to call this function (but
// there is also no harm but for extra time used).
-//
func ImportData(packages map[string]*types.Package, filename, id string, data io.Reader) (pkg *types.Package, err error) {
// support for parser error handling
defer func() {
@@ -133,7 +131,6 @@ func ImportData(packages map[string]*types.Package, filename, id string, data io
// Import imports a gc-generated package given its import path and srcDir, adds
// the corresponding package object to the packages map, and returns the object.
// The packages map must contain all packages already imported.
-//
func Import(packages map[string]*types.Package, path, srcDir string, lookup func(path string) (io.ReadCloser, error)) (pkg *types.Package, err error) {
var rc io.ReadCloser
var filename, id string
@@ -184,8 +181,9 @@ func Import(packages map[string]*types.Package, path, srcDir string, lookup func
defer rc.Close()
var hdr string
+ var size int64
buf := bufio.NewReader(rc)
- if hdr, _, err = FindExportData(buf); err != nil {
+ if hdr, size, err = FindExportData(buf); err != nil {
return
}
@@ -213,10 +211,27 @@ func Import(packages map[string]*types.Package, path, srcDir string, lookup func
// The indexed export format starts with an 'i'; the older
// binary export format starts with a 'c', 'd', or 'v'
// (from "version"). Select appropriate importer.
- if len(data) > 0 && data[0] == 'i' {
- _, pkg, err = IImportData(fset, packages, data[1:], id)
- } else {
- _, pkg, err = BImportData(fset, packages, data, id)
+ if len(data) > 0 {
+ switch data[0] {
+ case 'i':
+ _, pkg, err := IImportData(fset, packages, data[1:], id)
+ return pkg, err
+
+ case 'v', 'c', 'd':
+ _, pkg, err := BImportData(fset, packages, data, id)
+ return pkg, err
+
+ case 'u':
+ _, pkg, err := UImportData(fset, packages, data[1:size], id)
+ return pkg, err
+
+ default:
+ l := len(data)
+ if l > 10 {
+ l = 10
+ }
+ return nil, fmt.Errorf("unexpected export data with prefix %q for path %s", string(data[:l]), id)
+ }
}
default:
@@ -348,8 +363,9 @@ func (p *parser) expectKeyword(keyword string) {
// ----------------------------------------------------------------------------
// Qualified and unqualified names
-// PackageId = string_lit .
+// parsePackageID parses a PackageId:
//
+// PackageId = string_lit .
func (p *parser) parsePackageID() string {
id, err := strconv.Unquote(p.expect(scanner.String))
if err != nil {
@@ -363,13 +379,16 @@ func (p *parser) parsePackageID() string {
return id
}
-// PackageName = ident .
+// parsePackageName parse a PackageName:
//
+// PackageName = ident .
func (p *parser) parsePackageName() string {
return p.expect(scanner.Ident)
}
-// dotIdentifier = ( ident | '·' ) { ident | int | '·' } .
+// parseDotIdent parses a dotIdentifier:
+//
+// dotIdentifier = ( ident | '·' ) { ident | int | '·' } .
func (p *parser) parseDotIdent() string {
ident := ""
if p.tok != scanner.Int {
@@ -386,8 +405,9 @@ func (p *parser) parseDotIdent() string {
return ident
}
-// QualifiedName = "@" PackageId "." ( "?" | dotIdentifier ) .
+// parseQualifiedName parses a QualifiedName:
//
+// QualifiedName = "@" PackageId "." ( "?" | dotIdentifier ) .
func (p *parser) parseQualifiedName() (id, name string) {
p.expect('@')
id = p.parsePackageID()
@@ -410,7 +430,6 @@ func (p *parser) parseQualifiedName() (id, name string) {
// id identifies a package, usually by a canonical package path like
// "encoding/json" but possibly by a non-canonical import path like
// "./json".
-//
func (p *parser) getPkg(id, name string) *types.Package {
// package unsafe is not in the packages maps - handle explicitly
if id == "unsafe" {
@@ -446,7 +465,6 @@ func (p *parser) getPkg(id, name string) *types.Package {
// parseExportedName is like parseQualifiedName, but
// the package id is resolved to an imported *types.Package.
-//
func (p *parser) parseExportedName() (pkg *types.Package, name string) {
id, name := p.parseQualifiedName()
pkg = p.getPkg(id, "")
@@ -456,8 +474,9 @@ func (p *parser) parseExportedName() (pkg *types.Package, name string) {
// ----------------------------------------------------------------------------
// Types
-// BasicType = identifier .
+// parseBasicType parses a BasicType:
//
+// BasicType = identifier .
func (p *parser) parseBasicType() types.Type {
id := p.expect(scanner.Ident)
obj := types.Universe.Lookup(id)
@@ -468,8 +487,9 @@ func (p *parser) parseBasicType() types.Type {
return nil
}
-// ArrayType = "[" int_lit "]" Type .
+// parseArrayType parses an ArrayType:
//
+// ArrayType = "[" int_lit "]" Type .
func (p *parser) parseArrayType(parent *types.Package) types.Type {
// "[" already consumed and lookahead known not to be "]"
lit := p.expect(scanner.Int)
@@ -482,8 +502,9 @@ func (p *parser) parseArrayType(parent *types.Package) types.Type {
return types.NewArray(elem, n)
}
-// MapType = "map" "[" Type "]" Type .
+// parseMapType parses a MapType:
//
+// MapType = "map" "[" Type "]" Type .
func (p *parser) parseMapType(parent *types.Package) types.Type {
p.expectKeyword("map")
p.expect('[')
@@ -493,7 +514,9 @@ func (p *parser) parseMapType(parent *types.Package) types.Type {
return types.NewMap(key, elem)
}
-// Name = identifier | "?" | QualifiedName .
+// parseName parses a Name:
+//
+// Name = identifier | "?" | QualifiedName .
//
// For unqualified and anonymous names, the returned package is the parent
// package unless parent == nil, in which case the returned package is the
@@ -505,7 +528,6 @@ func (p *parser) parseMapType(parent *types.Package) types.Type {
// it doesn't exist yet) unless materializePkg is set (which creates an
// unnamed package with valid package path). In the latter case, a
// subsequent import clause is expected to provide a name for the package.
-//
func (p *parser) parseName(parent *types.Package, materializePkg bool) (pkg *types.Package, name string) {
pkg = parent
if pkg == nil {
@@ -539,8 +561,9 @@ func deref(typ types.Type) types.Type {
return typ
}
-// Field = Name Type [ string_lit ] .
+// parseField parses a Field:
//
+// Field = Name Type [ string_lit ] .
func (p *parser) parseField(parent *types.Package) (*types.Var, string) {
pkg, name := p.parseName(parent, true)
@@ -583,9 +606,10 @@ func (p *parser) parseField(parent *types.Package) (*types.Var, string) {
return types.NewField(token.NoPos, pkg, name, typ, anonymous), tag
}
-// StructType = "struct" "{" [ FieldList ] "}" .
-// FieldList = Field { ";" Field } .
+// parseStructType parses a StructType:
//
+// StructType = "struct" "{" [ FieldList ] "}" .
+// FieldList = Field { ";" Field } .
func (p *parser) parseStructType(parent *types.Package) types.Type {
var fields []*types.Var
var tags []string
@@ -610,8 +634,9 @@ func (p *parser) parseStructType(parent *types.Package) types.Type {
return types.NewStruct(fields, tags)
}
-// Parameter = ( identifier | "?" ) [ "..." ] Type [ string_lit ] .
+// parseParameter parses a Parameter:
//
+// Parameter = ( identifier | "?" ) [ "..." ] Type [ string_lit ] .
func (p *parser) parseParameter() (par *types.Var, isVariadic bool) {
_, name := p.parseName(nil, false)
// remove gc-specific parameter numbering
@@ -635,9 +660,10 @@ func (p *parser) parseParameter() (par *types.Var, isVariadic bool) {
return
}
-// Parameters = "(" [ ParameterList ] ")" .
-// ParameterList = { Parameter "," } Parameter .
+// parseParameters parses a Parameters:
//
+// Parameters = "(" [ ParameterList ] ")" .
+// ParameterList = { Parameter "," } Parameter .
func (p *parser) parseParameters() (list []*types.Var, isVariadic bool) {
p.expect('(')
for p.tok != ')' && p.tok != scanner.EOF {
@@ -658,9 +684,10 @@ func (p *parser) parseParameters() (list []*types.Var, isVariadic bool) {
return
}
-// Signature = Parameters [ Result ] .
-// Result = Type | Parameters .
+// parseSignature parses a Signature:
//
+// Signature = Parameters [ Result ] .
+// Result = Type | Parameters .
func (p *parser) parseSignature(recv *types.Var) *types.Signature {
params, isVariadic := p.parseParameters()
@@ -677,14 +704,15 @@ func (p *parser) parseSignature(recv *types.Var) *types.Signature {
return types.NewSignature(recv, types.NewTuple(params...), types.NewTuple(results...), isVariadic)
}
-// InterfaceType = "interface" "{" [ MethodList ] "}" .
-// MethodList = Method { ";" Method } .
-// Method = Name Signature .
+// parseInterfaceType parses an InterfaceType:
+//
+// InterfaceType = "interface" "{" [ MethodList ] "}" .
+// MethodList = Method { ";" Method } .
+// Method = Name Signature .
//
// The methods of embedded interfaces are always "inlined"
// by the compiler and thus embedded interfaces are never
// visible in the export data.
-//
func (p *parser) parseInterfaceType(parent *types.Package) types.Type {
var methods []*types.Func
@@ -705,8 +733,9 @@ func (p *parser) parseInterfaceType(parent *types.Package) types.Type {
return newInterface(methods, nil).Complete()
}
-// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type .
+// parseChanType parses a ChanType:
//
+// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type .
func (p *parser) parseChanType(parent *types.Package) types.Type {
dir := types.SendRecv
if p.tok == scanner.Ident {
@@ -724,17 +753,18 @@ func (p *parser) parseChanType(parent *types.Package) types.Type {
return types.NewChan(dir, elem)
}
-// Type =
-// BasicType | TypeName | ArrayType | SliceType | StructType |
-// PointerType | FuncType | InterfaceType | MapType | ChanType |
-// "(" Type ")" .
+// parseType parses a Type:
//
-// BasicType = ident .
-// TypeName = ExportedName .
-// SliceType = "[" "]" Type .
-// PointerType = "*" Type .
-// FuncType = "func" Signature .
+// Type =
+// BasicType | TypeName | ArrayType | SliceType | StructType |
+// PointerType | FuncType | InterfaceType | MapType | ChanType |
+// "(" Type ")" .
//
+// BasicType = ident .
+// TypeName = ExportedName .
+// SliceType = "[" "]" Type .
+// PointerType = "*" Type .
+// FuncType = "func" Signature .
func (p *parser) parseType(parent *types.Package) types.Type {
switch p.tok {
case scanner.Ident:
@@ -786,16 +816,18 @@ func (p *parser) parseType(parent *types.Package) types.Type {
// ----------------------------------------------------------------------------
// Declarations
-// ImportDecl = "import" PackageName PackageId .
+// parseImportDecl parses an ImportDecl:
//
+// ImportDecl = "import" PackageName PackageId .
func (p *parser) parseImportDecl() {
p.expectKeyword("import")
name := p.parsePackageName()
p.getPkg(p.parsePackageID(), name)
}
-// int_lit = [ "+" | "-" ] { "0" ... "9" } .
+// parseInt parses an int_lit:
//
+// int_lit = [ "+" | "-" ] { "0" ... "9" } .
func (p *parser) parseInt() string {
s := ""
switch p.tok {
@@ -808,8 +840,9 @@ func (p *parser) parseInt() string {
return s + p.expect(scanner.Int)
}
-// number = int_lit [ "p" int_lit ] .
+// parseNumber parses a number:
//
+// number = int_lit [ "p" int_lit ] .
func (p *parser) parseNumber() (typ *types.Basic, val constant.Value) {
// mantissa
mant := constant.MakeFromLiteral(p.parseInt(), token.INT, 0)
@@ -844,13 +877,14 @@ func (p *parser) parseNumber() (typ *types.Basic, val constant.Value) {
return
}
-// ConstDecl = "const" ExportedName [ Type ] "=" Literal .
-// Literal = bool_lit | int_lit | float_lit | complex_lit | rune_lit | string_lit .
-// bool_lit = "true" | "false" .
-// complex_lit = "(" float_lit "+" float_lit "i" ")" .
-// rune_lit = "(" int_lit "+" int_lit ")" .
-// string_lit = `"` { unicode_char } `"` .
+// parseConstDecl parses a ConstDecl:
//
+// ConstDecl = "const" ExportedName [ Type ] "=" Literal .
+// Literal = bool_lit | int_lit | float_lit | complex_lit | rune_lit | string_lit .
+// bool_lit = "true" | "false" .
+// complex_lit = "(" float_lit "+" float_lit "i" ")" .
+// rune_lit = "(" int_lit "+" int_lit ")" .
+// string_lit = `"` { unicode_char } `"` .
func (p *parser) parseConstDecl() {
p.expectKeyword("const")
pkg, name := p.parseExportedName()
@@ -920,8 +954,9 @@ func (p *parser) parseConstDecl() {
pkg.Scope().Insert(types.NewConst(token.NoPos, pkg, name, typ0, val))
}
-// TypeDecl = "type" ExportedName Type .
+// parseTypeDecl parses a TypeDecl:
//
+// TypeDecl = "type" ExportedName Type .
func (p *parser) parseTypeDecl() {
p.expectKeyword("type")
pkg, name := p.parseExportedName()
@@ -939,8 +974,9 @@ func (p *parser) parseTypeDecl() {
}
}
-// VarDecl = "var" ExportedName Type .
+// parseVarDecl parses a VarDecl:
//
+// VarDecl = "var" ExportedName Type .
func (p *parser) parseVarDecl() {
p.expectKeyword("var")
pkg, name := p.parseExportedName()
@@ -948,9 +984,10 @@ func (p *parser) parseVarDecl() {
pkg.Scope().Insert(types.NewVar(token.NoPos, pkg, name, typ))
}
-// Func = Signature [ Body ] .
-// Body = "{" ... "}" .
+// parseFunc parses a Func:
//
+// Func = Signature [ Body ] .
+// Body = "{" ... "}" .
func (p *parser) parseFunc(recv *types.Var) *types.Signature {
sig := p.parseSignature(recv)
if p.tok == '{' {
@@ -967,9 +1004,10 @@ func (p *parser) parseFunc(recv *types.Var) *types.Signature {
return sig
}
-// MethodDecl = "func" Receiver Name Func .
-// Receiver = "(" ( identifier | "?" ) [ "*" ] ExportedName ")" .
+// parseMethodDecl parses a MethodDecl:
//
+// MethodDecl = "func" Receiver Name Func .
+// Receiver = "(" ( identifier | "?" ) [ "*" ] ExportedName ")" .
func (p *parser) parseMethodDecl() {
// "func" already consumed
p.expect('(')
@@ -992,8 +1030,9 @@ func (p *parser) parseMethodDecl() {
base.AddMethod(types.NewFunc(token.NoPos, pkg, name, sig))
}
-// FuncDecl = "func" ExportedName Func .
+// parseFuncDecl parses a FuncDecl:
//
+// FuncDecl = "func" ExportedName Func .
func (p *parser) parseFuncDecl() {
// "func" already consumed
pkg, name := p.parseExportedName()
@@ -1001,8 +1040,9 @@ func (p *parser) parseFuncDecl() {
pkg.Scope().Insert(types.NewFunc(token.NoPos, pkg, name, typ))
}
-// Decl = [ ImportDecl | ConstDecl | TypeDecl | VarDecl | FuncDecl | MethodDecl ] "\n" .
+// parseDecl parses a Decl:
//
+// Decl = [ ImportDecl | ConstDecl | TypeDecl | VarDecl | FuncDecl | MethodDecl ] "\n" .
func (p *parser) parseDecl() {
if p.tok == scanner.Ident {
switch p.lit {
@@ -1029,9 +1069,10 @@ func (p *parser) parseDecl() {
// ----------------------------------------------------------------------------
// Export
-// Export = "PackageClause { Decl } "$$" .
-// PackageClause = "package" PackageName [ "safe" ] "\n" .
+// parseExport parses an Export:
//
+// Export = "PackageClause { Decl } "$$" .
+// PackageClause = "package" PackageName [ "safe" ] "\n" .
func (p *parser) parseExport() *types.Package {
p.expectKeyword("package")
name := p.parsePackageName()
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/iexport.go b/vendor/golang.org/x/tools/go/internal/gcimporter/iexport.go
index 20955340..9a4ff329 100644
--- a/vendor/golang.org/x/tools/go/internal/gcimporter/iexport.go
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/iexport.go
@@ -251,7 +251,10 @@ func (p *iexporter) stringOff(s string) uint64 {
// pushDecl adds n to the declaration work queue, if not already present.
func (p *iexporter) pushDecl(obj types.Object) {
// Package unsafe is known to the compiler and predeclared.
- assert(obj.Pkg() != types.Unsafe)
+ // Caller should not ask us to do export it.
+ if obj.Pkg() == types.Unsafe {
+ panic("cannot export package unsafe")
+ }
if _, ok := p.declIndex[obj]; ok {
return
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go b/vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go
index 84cfb807..4caa0f55 100644
--- a/vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go
@@ -17,6 +17,7 @@ import (
"go/token"
"go/types"
"io"
+ "math/big"
"sort"
"strings"
@@ -53,7 +54,7 @@ const (
)
type ident struct {
- pkg string
+ pkg *types.Package
name string
}
@@ -100,7 +101,9 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data
if !debug {
defer func() {
if e := recover(); e != nil {
- if version > currentVersion {
+ if bundle {
+ err = fmt.Errorf("%v", e)
+ } else if version > currentVersion {
err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e)
} else {
err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e)
@@ -461,7 +464,7 @@ func (r *importReader) obj(name string) {
// To handle recursive references to the typeparam within its
// bound, save the partial type in tparamIndex before reading the bounds.
- id := ident{r.currPkg.Name(), name}
+ id := ident{r.currPkg, name}
r.p.tparamIndex[id] = t
var implicit bool
if r.p.version >= iexportVersionGo1_18 {
@@ -510,7 +513,9 @@ func (r *importReader) value() (typ types.Type, val constant.Value) {
val = constant.MakeString(r.string())
case types.IsInteger:
- val = r.mpint(b)
+ var x big.Int
+ r.mpint(&x, b)
+ val = constant.Make(&x)
case types.IsFloat:
val = r.mpfloat(b)
@@ -559,8 +564,8 @@ func intSize(b *types.Basic) (signed bool, maxBytes uint) {
return
}
-func (r *importReader) mpint(b *types.Basic) constant.Value {
- signed, maxBytes := intSize(b)
+func (r *importReader) mpint(x *big.Int, typ *types.Basic) {
+ signed, maxBytes := intSize(typ)
maxSmall := 256 - maxBytes
if signed {
@@ -579,7 +584,8 @@ func (r *importReader) mpint(b *types.Basic) constant.Value {
v = ^v
}
}
- return constant.MakeInt64(v)
+ x.SetInt64(v)
+ return
}
v := -n
@@ -589,47 +595,23 @@ func (r *importReader) mpint(b *types.Basic) constant.Value {
if v < 1 || uint(v) > maxBytes {
errorf("weird decoding: %v, %v => %v", n, signed, v)
}
-
- buf := make([]byte, v)
- io.ReadFull(&r.declReader, buf)
-
- // convert to little endian
- // TODO(gri) go/constant should have a more direct conversion function
- // (e.g., once it supports a big.Float based implementation)
- for i, j := 0, len(buf)-1; i < j; i, j = i+1, j-1 {
- buf[i], buf[j] = buf[j], buf[i]
- }
-
- x := constant.MakeFromBytes(buf)
+ b := make([]byte, v)
+ io.ReadFull(&r.declReader, b)
+ x.SetBytes(b)
if signed && n&1 != 0 {
- x = constant.UnaryOp(token.SUB, x, 0)
+ x.Neg(x)
}
- return x
}
-func (r *importReader) mpfloat(b *types.Basic) constant.Value {
- x := r.mpint(b)
- if constant.Sign(x) == 0 {
- return x
- }
-
- exp := r.int64()
- switch {
- case exp > 0:
- x = constant.Shift(x, token.SHL, uint(exp))
- // Ensure that the imported Kind is Float, else this constant may run into
- // bitsize limits on overlarge integers. Eventually we can instead adopt
- // the approach of CL 288632, but that CL relies on go/constant APIs that
- // were introduced in go1.13.
- //
- // TODO(rFindley): sync the logic here with tip Go once we no longer
- // support go1.12.
- x = constant.ToFloat(x)
- case exp < 0:
- d := constant.Shift(constant.MakeInt64(1), token.SHL, uint(-exp))
- x = constant.BinaryOp(x, token.QUO, d)
+func (r *importReader) mpfloat(typ *types.Basic) constant.Value {
+ var mant big.Int
+ r.mpint(&mant, typ)
+ var f big.Float
+ f.SetInt(&mant)
+ if f.Sign() != 0 {
+ f.SetMantExp(&f, int(r.int64()))
}
- return x
+ return constant.Make(&f)
}
func (r *importReader) ident() string {
@@ -777,7 +759,7 @@ func (r *importReader) doType(base *types.Named) (res types.Type) {
errorf("unexpected type param type")
}
pkg, name := r.qualifiedIdent()
- id := ident{pkg.Name(), name}
+ id := ident{pkg, name}
if t, ok := r.p.tparamIndex[id]; ok {
// We're already in the process of importing this typeparam.
return t
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/unified_no.go b/vendor/golang.org/x/tools/go/internal/gcimporter/unified_no.go
new file mode 100644
index 00000000..286bf445
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/unified_no.go
@@ -0,0 +1,10 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build !(go1.18 && goexperiment.unified)
+// +build !go1.18 !goexperiment.unified
+
+package gcimporter
+
+const unifiedIR = false
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/unified_yes.go b/vendor/golang.org/x/tools/go/internal/gcimporter/unified_yes.go
new file mode 100644
index 00000000..b5d69ffb
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/unified_yes.go
@@ -0,0 +1,10 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.18 && goexperiment.unified
+// +build go1.18,goexperiment.unified
+
+package gcimporter
+
+const unifiedIR = true
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/ureader_no.go b/vendor/golang.org/x/tools/go/internal/gcimporter/ureader_no.go
new file mode 100644
index 00000000..8eb20729
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/ureader_no.go
@@ -0,0 +1,19 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build !go1.18
+// +build !go1.18
+
+package gcimporter
+
+import (
+ "fmt"
+ "go/token"
+ "go/types"
+)
+
+func UImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) {
+ err = fmt.Errorf("go/tools compiled with a Go version earlier than 1.18 cannot read unified IR export data")
+ return
+}
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/ureader_yes.go b/vendor/golang.org/x/tools/go/internal/gcimporter/ureader_yes.go
new file mode 100644
index 00000000..3c1a4375
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/ureader_yes.go
@@ -0,0 +1,612 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Derived from go/internal/gcimporter/ureader.go
+
+//go:build go1.18
+// +build go1.18
+
+package gcimporter
+
+import (
+ "go/token"
+ "go/types"
+ "strings"
+
+ "golang.org/x/tools/go/internal/pkgbits"
+)
+
+// A pkgReader holds the shared state for reading a unified IR package
+// description.
+type pkgReader struct {
+ pkgbits.PkgDecoder
+
+ fake fakeFileSet
+
+ ctxt *types.Context
+ imports map[string]*types.Package // previously imported packages, indexed by path
+
+ // lazily initialized arrays corresponding to the unified IR
+ // PosBase, Pkg, and Type sections, respectively.
+ posBases []string // position bases (i.e., file names)
+ pkgs []*types.Package
+ typs []types.Type
+
+ // laterFns holds functions that need to be invoked at the end of
+ // import reading.
+ laterFns []func()
+}
+
+// later adds a function to be invoked at the end of import reading.
+func (pr *pkgReader) later(fn func()) {
+ pr.laterFns = append(pr.laterFns, fn)
+}
+
+// See cmd/compile/internal/noder.derivedInfo.
+type derivedInfo struct {
+ idx pkgbits.Index
+ needed bool
+}
+
+// See cmd/compile/internal/noder.typeInfo.
+type typeInfo struct {
+ idx pkgbits.Index
+ derived bool
+}
+
+func UImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) {
+ s := string(data)
+ s = s[:strings.LastIndex(s, "\n$$\n")]
+ input := pkgbits.NewPkgDecoder(path, s)
+ pkg = readUnifiedPackage(fset, nil, imports, input)
+ return
+}
+
+// readUnifiedPackage reads a package description from the given
+// unified IR export data decoder.
+func readUnifiedPackage(fset *token.FileSet, ctxt *types.Context, imports map[string]*types.Package, input pkgbits.PkgDecoder) *types.Package {
+ pr := pkgReader{
+ PkgDecoder: input,
+
+ fake: fakeFileSet{
+ fset: fset,
+ files: make(map[string]*fileInfo),
+ },
+
+ ctxt: ctxt,
+ imports: imports,
+
+ posBases: make([]string, input.NumElems(pkgbits.RelocPosBase)),
+ pkgs: make([]*types.Package, input.NumElems(pkgbits.RelocPkg)),
+ typs: make([]types.Type, input.NumElems(pkgbits.RelocType)),
+ }
+ defer pr.fake.setLines()
+
+ r := pr.newReader(pkgbits.RelocMeta, pkgbits.PublicRootIdx, pkgbits.SyncPublic)
+ pkg := r.pkg()
+ r.Bool() // has init
+
+ for i, n := 0, r.Len(); i < n; i++ {
+ // As if r.obj(), but avoiding the Scope.Lookup call,
+ // to avoid eager loading of imports.
+ r.Sync(pkgbits.SyncObject)
+ assert(!r.Bool())
+ r.p.objIdx(r.Reloc(pkgbits.RelocObj))
+ assert(r.Len() == 0)
+ }
+
+ r.Sync(pkgbits.SyncEOF)
+
+ for _, fn := range pr.laterFns {
+ fn()
+ }
+
+ pkg.MarkComplete()
+ return pkg
+}
+
+// A reader holds the state for reading a single unified IR element
+// within a package.
+type reader struct {
+ pkgbits.Decoder
+
+ p *pkgReader
+
+ dict *readerDict
+}
+
+// A readerDict holds the state for type parameters that parameterize
+// the current unified IR element.
+type readerDict struct {
+ // bounds is a slice of typeInfos corresponding to the underlying
+ // bounds of the element's type parameters.
+ bounds []typeInfo
+
+ // tparams is a slice of the constructed TypeParams for the element.
+ tparams []*types.TypeParam
+
+ // devived is a slice of types derived from tparams, which may be
+ // instantiated while reading the current element.
+ derived []derivedInfo
+ derivedTypes []types.Type // lazily instantiated from derived
+}
+
+func (pr *pkgReader) newReader(k pkgbits.RelocKind, idx pkgbits.Index, marker pkgbits.SyncMarker) *reader {
+ return &reader{
+ Decoder: pr.NewDecoder(k, idx, marker),
+ p: pr,
+ }
+}
+
+// @@@ Positions
+
+func (r *reader) pos() token.Pos {
+ r.Sync(pkgbits.SyncPos)
+ if !r.Bool() {
+ return token.NoPos
+ }
+
+ // TODO(mdempsky): Delta encoding.
+ posBase := r.posBase()
+ line := r.Uint()
+ col := r.Uint()
+ return r.p.fake.pos(posBase, int(line), int(col))
+}
+
+func (r *reader) posBase() string {
+ return r.p.posBaseIdx(r.Reloc(pkgbits.RelocPosBase))
+}
+
+func (pr *pkgReader) posBaseIdx(idx pkgbits.Index) string {
+ if b := pr.posBases[idx]; b != "" {
+ return b
+ }
+
+ r := pr.newReader(pkgbits.RelocPosBase, idx, pkgbits.SyncPosBase)
+
+ // Within types2, position bases have a lot more details (e.g.,
+ // keeping track of where //line directives appeared exactly).
+ //
+ // For go/types, we just track the file name.
+
+ filename := r.String()
+
+ if r.Bool() { // file base
+ // Was: "b = token.NewTrimmedFileBase(filename, true)"
+ } else { // line base
+ pos := r.pos()
+ line := r.Uint()
+ col := r.Uint()
+
+ // Was: "b = token.NewLineBase(pos, filename, true, line, col)"
+ _, _, _ = pos, line, col
+ }
+
+ b := filename
+ pr.posBases[idx] = b
+ return b
+}
+
+// @@@ Packages
+
+func (r *reader) pkg() *types.Package {
+ r.Sync(pkgbits.SyncPkg)
+ return r.p.pkgIdx(r.Reloc(pkgbits.RelocPkg))
+}
+
+func (pr *pkgReader) pkgIdx(idx pkgbits.Index) *types.Package {
+ // TODO(mdempsky): Consider using some non-nil pointer to indicate
+ // the universe scope, so we don't need to keep re-reading it.
+ if pkg := pr.pkgs[idx]; pkg != nil {
+ return pkg
+ }
+
+ pkg := pr.newReader(pkgbits.RelocPkg, idx, pkgbits.SyncPkgDef).doPkg()
+ pr.pkgs[idx] = pkg
+ return pkg
+}
+
+func (r *reader) doPkg() *types.Package {
+ path := r.String()
+ switch path {
+ case "":
+ path = r.p.PkgPath()
+ case "builtin":
+ return nil // universe
+ case "unsafe":
+ return types.Unsafe
+ }
+
+ if pkg := r.p.imports[path]; pkg != nil {
+ return pkg
+ }
+
+ name := r.String()
+
+ pkg := types.NewPackage(path, name)
+ r.p.imports[path] = pkg
+
+ imports := make([]*types.Package, r.Len())
+ for i := range imports {
+ imports[i] = r.pkg()
+ }
+ pkg.SetImports(imports)
+
+ return pkg
+}
+
+// @@@ Types
+
+func (r *reader) typ() types.Type {
+ return r.p.typIdx(r.typInfo(), r.dict)
+}
+
+func (r *reader) typInfo() typeInfo {
+ r.Sync(pkgbits.SyncType)
+ if r.Bool() {
+ return typeInfo{idx: pkgbits.Index(r.Len()), derived: true}
+ }
+ return typeInfo{idx: r.Reloc(pkgbits.RelocType), derived: false}
+}
+
+func (pr *pkgReader) typIdx(info typeInfo, dict *readerDict) types.Type {
+ idx := info.idx
+ var where *types.Type
+ if info.derived {
+ where = &dict.derivedTypes[idx]
+ idx = dict.derived[idx].idx
+ } else {
+ where = &pr.typs[idx]
+ }
+
+ if typ := *where; typ != nil {
+ return typ
+ }
+
+ r := pr.newReader(pkgbits.RelocType, idx, pkgbits.SyncTypeIdx)
+ r.dict = dict
+
+ typ := r.doTyp()
+ assert(typ != nil)
+
+ // See comment in pkgReader.typIdx explaining how this happens.
+ if prev := *where; prev != nil {
+ return prev
+ }
+
+ *where = typ
+ return typ
+}
+
+func (r *reader) doTyp() (res types.Type) {
+ switch tag := pkgbits.CodeType(r.Code(pkgbits.SyncType)); tag {
+ default:
+ errorf("unhandled type tag: %v", tag)
+ panic("unreachable")
+
+ case pkgbits.TypeBasic:
+ return types.Typ[r.Len()]
+
+ case pkgbits.TypeNamed:
+ obj, targs := r.obj()
+ name := obj.(*types.TypeName)
+ if len(targs) != 0 {
+ t, _ := types.Instantiate(r.p.ctxt, name.Type(), targs, false)
+ return t
+ }
+ return name.Type()
+
+ case pkgbits.TypeTypeParam:
+ return r.dict.tparams[r.Len()]
+
+ case pkgbits.TypeArray:
+ len := int64(r.Uint64())
+ return types.NewArray(r.typ(), len)
+ case pkgbits.TypeChan:
+ dir := types.ChanDir(r.Len())
+ return types.NewChan(dir, r.typ())
+ case pkgbits.TypeMap:
+ return types.NewMap(r.typ(), r.typ())
+ case pkgbits.TypePointer:
+ return types.NewPointer(r.typ())
+ case pkgbits.TypeSignature:
+ return r.signature(nil, nil, nil)
+ case pkgbits.TypeSlice:
+ return types.NewSlice(r.typ())
+ case pkgbits.TypeStruct:
+ return r.structType()
+ case pkgbits.TypeInterface:
+ return r.interfaceType()
+ case pkgbits.TypeUnion:
+ return r.unionType()
+ }
+}
+
+func (r *reader) structType() *types.Struct {
+ fields := make([]*types.Var, r.Len())
+ var tags []string
+ for i := range fields {
+ pos := r.pos()
+ pkg, name := r.selector()
+ ftyp := r.typ()
+ tag := r.String()
+ embedded := r.Bool()
+
+ fields[i] = types.NewField(pos, pkg, name, ftyp, embedded)
+ if tag != "" {
+ for len(tags) < i {
+ tags = append(tags, "")
+ }
+ tags = append(tags, tag)
+ }
+ }
+ return types.NewStruct(fields, tags)
+}
+
+func (r *reader) unionType() *types.Union {
+ terms := make([]*types.Term, r.Len())
+ for i := range terms {
+ terms[i] = types.NewTerm(r.Bool(), r.typ())
+ }
+ return types.NewUnion(terms)
+}
+
+func (r *reader) interfaceType() *types.Interface {
+ methods := make([]*types.Func, r.Len())
+ embeddeds := make([]types.Type, r.Len())
+ implicit := len(methods) == 0 && len(embeddeds) == 1 && r.Bool()
+
+ for i := range methods {
+ pos := r.pos()
+ pkg, name := r.selector()
+ mtyp := r.signature(nil, nil, nil)
+ methods[i] = types.NewFunc(pos, pkg, name, mtyp)
+ }
+
+ for i := range embeddeds {
+ embeddeds[i] = r.typ()
+ }
+
+ iface := types.NewInterfaceType(methods, embeddeds)
+ if implicit {
+ iface.MarkImplicit()
+ }
+ return iface
+}
+
+func (r *reader) signature(recv *types.Var, rtparams, tparams []*types.TypeParam) *types.Signature {
+ r.Sync(pkgbits.SyncSignature)
+
+ params := r.params()
+ results := r.params()
+ variadic := r.Bool()
+
+ return types.NewSignatureType(recv, rtparams, tparams, params, results, variadic)
+}
+
+func (r *reader) params() *types.Tuple {
+ r.Sync(pkgbits.SyncParams)
+
+ params := make([]*types.Var, r.Len())
+ for i := range params {
+ params[i] = r.param()
+ }
+
+ return types.NewTuple(params...)
+}
+
+func (r *reader) param() *types.Var {
+ r.Sync(pkgbits.SyncParam)
+
+ pos := r.pos()
+ pkg, name := r.localIdent()
+ typ := r.typ()
+
+ return types.NewParam(pos, pkg, name, typ)
+}
+
+// @@@ Objects
+
+func (r *reader) obj() (types.Object, []types.Type) {
+ r.Sync(pkgbits.SyncObject)
+
+ assert(!r.Bool())
+
+ pkg, name := r.p.objIdx(r.Reloc(pkgbits.RelocObj))
+ obj := pkgScope(pkg).Lookup(name)
+
+ targs := make([]types.Type, r.Len())
+ for i := range targs {
+ targs[i] = r.typ()
+ }
+
+ return obj, targs
+}
+
+func (pr *pkgReader) objIdx(idx pkgbits.Index) (*types.Package, string) {
+ rname := pr.newReader(pkgbits.RelocName, idx, pkgbits.SyncObject1)
+
+ objPkg, objName := rname.qualifiedIdent()
+ assert(objName != "")
+
+ tag := pkgbits.CodeObj(rname.Code(pkgbits.SyncCodeObj))
+
+ if tag == pkgbits.ObjStub {
+ assert(objPkg == nil || objPkg == types.Unsafe)
+ return objPkg, objName
+ }
+
+ if objPkg.Scope().Lookup(objName) == nil {
+ dict := pr.objDictIdx(idx)
+
+ r := pr.newReader(pkgbits.RelocObj, idx, pkgbits.SyncObject1)
+ r.dict = dict
+
+ declare := func(obj types.Object) {
+ objPkg.Scope().Insert(obj)
+ }
+
+ switch tag {
+ default:
+ panic("weird")
+
+ case pkgbits.ObjAlias:
+ pos := r.pos()
+ typ := r.typ()
+ declare(types.NewTypeName(pos, objPkg, objName, typ))
+
+ case pkgbits.ObjConst:
+ pos := r.pos()
+ typ := r.typ()
+ val := r.Value()
+ declare(types.NewConst(pos, objPkg, objName, typ, val))
+
+ case pkgbits.ObjFunc:
+ pos := r.pos()
+ tparams := r.typeParamNames()
+ sig := r.signature(nil, nil, tparams)
+ declare(types.NewFunc(pos, objPkg, objName, sig))
+
+ case pkgbits.ObjType:
+ pos := r.pos()
+
+ obj := types.NewTypeName(pos, objPkg, objName, nil)
+ named := types.NewNamed(obj, nil, nil)
+ declare(obj)
+
+ named.SetTypeParams(r.typeParamNames())
+
+ // TODO(mdempsky): Rewrite receiver types to underlying is an
+ // Interface? The go/types importer does this (I think because
+ // unit tests expected that), but cmd/compile doesn't care
+ // about it, so maybe we can avoid worrying about that here.
+ rhs := r.typ()
+ r.p.later(func() {
+ underlying := rhs.Underlying()
+ named.SetUnderlying(underlying)
+ })
+
+ for i, n := 0, r.Len(); i < n; i++ {
+ named.AddMethod(r.method())
+ }
+
+ case pkgbits.ObjVar:
+ pos := r.pos()
+ typ := r.typ()
+ declare(types.NewVar(pos, objPkg, objName, typ))
+ }
+ }
+
+ return objPkg, objName
+}
+
+func (pr *pkgReader) objDictIdx(idx pkgbits.Index) *readerDict {
+ r := pr.newReader(pkgbits.RelocObjDict, idx, pkgbits.SyncObject1)
+
+ var dict readerDict
+
+ if implicits := r.Len(); implicits != 0 {
+ errorf("unexpected object with %v implicit type parameter(s)", implicits)
+ }
+
+ dict.bounds = make([]typeInfo, r.Len())
+ for i := range dict.bounds {
+ dict.bounds[i] = r.typInfo()
+ }
+
+ dict.derived = make([]derivedInfo, r.Len())
+ dict.derivedTypes = make([]types.Type, len(dict.derived))
+ for i := range dict.derived {
+ dict.derived[i] = derivedInfo{r.Reloc(pkgbits.RelocType), r.Bool()}
+ }
+
+ // function references follow, but reader doesn't need those
+
+ return &dict
+}
+
+func (r *reader) typeParamNames() []*types.TypeParam {
+ r.Sync(pkgbits.SyncTypeParamNames)
+
+ // Note: This code assumes it only processes objects without
+ // implement type parameters. This is currently fine, because
+ // reader is only used to read in exported declarations, which are
+ // always package scoped.
+
+ if len(r.dict.bounds) == 0 {
+ return nil
+ }
+
+ // Careful: Type parameter lists may have cycles. To allow for this,
+ // we construct the type parameter list in two passes: first we
+ // create all the TypeNames and TypeParams, then we construct and
+ // set the bound type.
+
+ r.dict.tparams = make([]*types.TypeParam, len(r.dict.bounds))
+ for i := range r.dict.bounds {
+ pos := r.pos()
+ pkg, name := r.localIdent()
+
+ tname := types.NewTypeName(pos, pkg, name, nil)
+ r.dict.tparams[i] = types.NewTypeParam(tname, nil)
+ }
+
+ typs := make([]types.Type, len(r.dict.bounds))
+ for i, bound := range r.dict.bounds {
+ typs[i] = r.p.typIdx(bound, r.dict)
+ }
+
+ // TODO(mdempsky): This is subtle, elaborate further.
+ //
+ // We have to save tparams outside of the closure, because
+ // typeParamNames() can be called multiple times with the same
+ // dictionary instance.
+ //
+ // Also, this needs to happen later to make sure SetUnderlying has
+ // been called.
+ //
+ // TODO(mdempsky): Is it safe to have a single "later" slice or do
+ // we need to have multiple passes? See comments on CL 386002 and
+ // go.dev/issue/52104.
+ tparams := r.dict.tparams
+ r.p.later(func() {
+ for i, typ := range typs {
+ tparams[i].SetConstraint(typ)
+ }
+ })
+
+ return r.dict.tparams
+}
+
+func (r *reader) method() *types.Func {
+ r.Sync(pkgbits.SyncMethod)
+ pos := r.pos()
+ pkg, name := r.selector()
+
+ rparams := r.typeParamNames()
+ sig := r.signature(r.param(), rparams, nil)
+
+ _ = r.pos() // TODO(mdempsky): Remove; this is a hacker for linker.go.
+ return types.NewFunc(pos, pkg, name, sig)
+}
+
+func (r *reader) qualifiedIdent() (*types.Package, string) { return r.ident(pkgbits.SyncSym) }
+func (r *reader) localIdent() (*types.Package, string) { return r.ident(pkgbits.SyncLocalIdent) }
+func (r *reader) selector() (*types.Package, string) { return r.ident(pkgbits.SyncSelector) }
+
+func (r *reader) ident(marker pkgbits.SyncMarker) (*types.Package, string) {
+ r.Sync(marker)
+ return r.pkg(), r.String()
+}
+
+// pkgScope returns pkg.Scope().
+// If pkg is nil, it returns types.Universe instead.
+//
+// TODO(mdempsky): Remove after x/tools can depend on Go 1.19.
+func pkgScope(pkg *types.Package) *types.Scope {
+ if pkg != nil {
+ return pkg.Scope()
+ }
+ return types.Universe
+}