summaryrefslogtreecommitdiffstats
path: root/vendor/golang.org/x/tools
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/golang.org/x/tools')
-rw-r--r--vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go32
-rw-r--r--vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go1125
-rw-r--r--vendor/golang.org/x/tools/go/packages/golist.go22
-rw-r--r--vendor/golang.org/x/tools/go/packages/packages.go87
-rw-r--r--vendor/golang.org/x/tools/internal/gcimporter/bexport.go (renamed from vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go)9
-rw-r--r--vendor/golang.org/x/tools/internal/gcimporter/bimport.go (renamed from vendor/golang.org/x/tools/go/internal/gcimporter/bimport.go)0
-rw-r--r--vendor/golang.org/x/tools/internal/gcimporter/exportdata.go (renamed from vendor/golang.org/x/tools/go/internal/gcimporter/exportdata.go)0
-rw-r--r--vendor/golang.org/x/tools/internal/gcimporter/gcimporter.go265
-rw-r--r--vendor/golang.org/x/tools/internal/gcimporter/iexport.go (renamed from vendor/golang.org/x/tools/go/internal/gcimporter/iexport.go)198
-rw-r--r--vendor/golang.org/x/tools/internal/gcimporter/iimport.go (renamed from vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go)112
-rw-r--r--vendor/golang.org/x/tools/internal/gcimporter/newInterface10.go (renamed from vendor/golang.org/x/tools/go/internal/gcimporter/newInterface10.go)0
-rw-r--r--vendor/golang.org/x/tools/internal/gcimporter/newInterface11.go (renamed from vendor/golang.org/x/tools/go/internal/gcimporter/newInterface11.go)0
-rw-r--r--vendor/golang.org/x/tools/internal/gcimporter/support_go117.go (renamed from vendor/golang.org/x/tools/go/internal/gcimporter/support_go117.go)0
-rw-r--r--vendor/golang.org/x/tools/internal/gcimporter/support_go118.go (renamed from vendor/golang.org/x/tools/go/internal/gcimporter/support_go118.go)14
-rw-r--r--vendor/golang.org/x/tools/internal/gcimporter/unified_no.go (renamed from vendor/golang.org/x/tools/go/internal/gcimporter/unified_no.go)0
-rw-r--r--vendor/golang.org/x/tools/internal/gcimporter/unified_yes.go (renamed from vendor/golang.org/x/tools/go/internal/gcimporter/unified_yes.go)0
-rw-r--r--vendor/golang.org/x/tools/internal/gcimporter/ureader_no.go (renamed from vendor/golang.org/x/tools/go/internal/gcimporter/ureader_no.go)0
-rw-r--r--vendor/golang.org/x/tools/internal/gcimporter/ureader_yes.go (renamed from vendor/golang.org/x/tools/go/internal/gcimporter/ureader_yes.go)220
-rw-r--r--vendor/golang.org/x/tools/internal/gocommand/invoke.go83
-rw-r--r--vendor/golang.org/x/tools/internal/gocommand/version.go36
-rw-r--r--vendor/golang.org/x/tools/internal/pkgbits/codes.go (renamed from vendor/golang.org/x/tools/go/internal/pkgbits/codes.go)0
-rw-r--r--vendor/golang.org/x/tools/internal/pkgbits/decoder.go (renamed from vendor/golang.org/x/tools/go/internal/pkgbits/decoder.go)108
-rw-r--r--vendor/golang.org/x/tools/internal/pkgbits/doc.go (renamed from vendor/golang.org/x/tools/go/internal/pkgbits/doc.go)0
-rw-r--r--vendor/golang.org/x/tools/internal/pkgbits/encoder.go (renamed from vendor/golang.org/x/tools/go/internal/pkgbits/encoder.go)20
-rw-r--r--vendor/golang.org/x/tools/internal/pkgbits/flags.go (renamed from vendor/golang.org/x/tools/go/internal/pkgbits/flags.go)0
-rw-r--r--vendor/golang.org/x/tools/internal/pkgbits/frames_go1.go (renamed from vendor/golang.org/x/tools/go/internal/pkgbits/frames_go1.go)0
-rw-r--r--vendor/golang.org/x/tools/internal/pkgbits/frames_go17.go (renamed from vendor/golang.org/x/tools/go/internal/pkgbits/frames_go17.go)0
-rw-r--r--vendor/golang.org/x/tools/internal/pkgbits/reloc.go (renamed from vendor/golang.org/x/tools/go/internal/pkgbits/reloc.go)4
-rw-r--r--vendor/golang.org/x/tools/internal/pkgbits/support.go (renamed from vendor/golang.org/x/tools/go/internal/pkgbits/support.go)0
-rw-r--r--vendor/golang.org/x/tools/internal/pkgbits/sync.go (renamed from vendor/golang.org/x/tools/go/internal/pkgbits/sync.go)0
-rw-r--r--vendor/golang.org/x/tools/internal/pkgbits/syncmarker_string.go (renamed from vendor/golang.org/x/tools/go/internal/pkgbits/syncmarker_string.go)0
-rw-r--r--vendor/golang.org/x/tools/internal/tokeninternal/tokeninternal.go59
-rw-r--r--vendor/golang.org/x/tools/internal/typesinternal/errorcode.go38
-rw-r--r--vendor/golang.org/x/tools/internal/typesinternal/errorcode_string.go26
34 files changed, 1182 insertions, 1276 deletions
diff --git a/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go b/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go
index 2ed25a75..165ede0f 100644
--- a/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go
+++ b/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go
@@ -27,10 +27,9 @@ import (
"go/token"
"go/types"
"io"
- "io/ioutil"
"os/exec"
- "golang.org/x/tools/go/internal/gcimporter"
+ "golang.org/x/tools/internal/gcimporter"
)
// Find returns the name of an object (.o) or archive (.a) file
@@ -85,9 +84,26 @@ func NewReader(r io.Reader) (io.Reader, error) {
}
}
+// readAll works the same way as io.ReadAll, but avoids allocations and copies
+// by preallocating a byte slice of the necessary size if the size is known up
+// front. This is always possible when the input is an archive. In that case,
+// NewReader will return the known size using an io.LimitedReader.
+func readAll(r io.Reader) ([]byte, error) {
+ if lr, ok := r.(*io.LimitedReader); ok {
+ data := make([]byte, lr.N)
+ _, err := io.ReadFull(lr, data)
+ return data, err
+ }
+ return io.ReadAll(r)
+}
+
// Read reads export data from in, decodes it, and returns type
// information for the package.
-// The package name is specified by path.
+//
+// The package path (effectively its linker symbol prefix) is
+// specified by path, since unlike the package name, this information
+// may not be recorded in the export data.
+//
// File position information is added to fset.
//
// Read may inspect and add to the imports map to ensure that references
@@ -98,7 +114,7 @@ func NewReader(r io.Reader) (io.Reader, error) {
//
// On return, the state of the reader is undefined.
func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package, path string) (*types.Package, error) {
- data, err := ioutil.ReadAll(in)
+ data, err := readAll(in)
if err != nil {
return nil, fmt.Errorf("reading export data for %q: %v", path, err)
}
@@ -107,12 +123,6 @@ func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package,
return nil, fmt.Errorf("can't read export data for %q directly from an archive file (call gcexportdata.NewReader first to extract export data)", path)
}
- // The App Engine Go runtime v1.6 uses the old export data format.
- // TODO(adonovan): delete once v1.7 has been around for a while.
- if bytes.HasPrefix(data, []byte("package ")) {
- return gcimporter.ImportData(imports, path, path, bytes.NewReader(data))
- }
-
// The indexed export format starts with an 'i'; the older
// binary export format starts with a 'c', 'd', or 'v'
// (from "version"). Select appropriate importer.
@@ -161,7 +171,7 @@ func Write(out io.Writer, fset *token.FileSet, pkg *types.Package) error {
//
// Experimental: This API is experimental and may change in the future.
func ReadBundle(in io.Reader, fset *token.FileSet, imports map[string]*types.Package) ([]*types.Package, error) {
- data, err := ioutil.ReadAll(in)
+ data, err := readAll(in)
if err != nil {
return nil, fmt.Errorf("reading export bundle: %v", err)
}
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go b/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go
deleted file mode 100644
index e96c3960..00000000
--- a/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go
+++ /dev/null
@@ -1,1125 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// This file is a modified copy of $GOROOT/src/go/internal/gcimporter/gcimporter.go,
-// but it also contains the original source-based importer code for Go1.6.
-// Once we stop supporting 1.6, we can remove that code.
-
-// Package gcimporter provides various functions for reading
-// gc-generated object files that can be used to implement the
-// Importer interface defined by the Go 1.5 standard library package.
-package gcimporter // import "golang.org/x/tools/go/internal/gcimporter"
-
-import (
- "bufio"
- "errors"
- "fmt"
- "go/build"
- "go/constant"
- "go/token"
- "go/types"
- "io"
- "io/ioutil"
- "os"
- "path/filepath"
- "sort"
- "strconv"
- "strings"
- "text/scanner"
-)
-
-const (
- // Enable debug during development: it adds some additional checks, and
- // prevents errors from being recovered.
- debug = false
-
- // If trace is set, debugging output is printed to std out.
- trace = false
-)
-
-var pkgExts = [...]string{".a", ".o"}
-
-// FindPkg returns the filename and unique package id for an import
-// path based on package information provided by build.Import (using
-// the build.Default build.Context). A relative srcDir is interpreted
-// relative to the current working directory.
-// If no file was found, an empty filename is returned.
-func FindPkg(path, srcDir string) (filename, id string) {
- if path == "" {
- return
- }
-
- var noext string
- switch {
- default:
- // "x" -> "$GOPATH/pkg/$GOOS_$GOARCH/x.ext", "x"
- // Don't require the source files to be present.
- if abs, err := filepath.Abs(srcDir); err == nil { // see issue 14282
- srcDir = abs
- }
- bp, _ := build.Import(path, srcDir, build.FindOnly|build.AllowBinary)
- if bp.PkgObj == "" {
- id = path // make sure we have an id to print in error message
- return
- }
- noext = strings.TrimSuffix(bp.PkgObj, ".a")
- id = bp.ImportPath
-
- case build.IsLocalImport(path):
- // "./x" -> "/this/directory/x.ext", "/this/directory/x"
- noext = filepath.Join(srcDir, path)
- id = noext
-
- case filepath.IsAbs(path):
- // for completeness only - go/build.Import
- // does not support absolute imports
- // "/x" -> "/x.ext", "/x"
- noext = path
- id = path
- }
-
- if false { // for debugging
- if path != id {
- fmt.Printf("%s -> %s\n", path, id)
- }
- }
-
- // try extensions
- for _, ext := range pkgExts {
- filename = noext + ext
- if f, err := os.Stat(filename); err == nil && !f.IsDir() {
- return
- }
- }
-
- filename = "" // not found
- return
-}
-
-// ImportData imports a package by reading the gc-generated export data,
-// adds the corresponding package object to the packages map indexed by id,
-// and returns the object.
-//
-// The packages map must contains all packages already imported. The data
-// reader position must be the beginning of the export data section. The
-// filename is only used in error messages.
-//
-// If packages[id] contains the completely imported package, that package
-// can be used directly, and there is no need to call this function (but
-// there is also no harm but for extra time used).
-func ImportData(packages map[string]*types.Package, filename, id string, data io.Reader) (pkg *types.Package, err error) {
- // support for parser error handling
- defer func() {
- switch r := recover().(type) {
- case nil:
- // nothing to do
- case importError:
- err = r
- default:
- panic(r) // internal error
- }
- }()
-
- var p parser
- p.init(filename, id, data, packages)
- pkg = p.parseExport()
-
- return
-}
-
-// Import imports a gc-generated package given its import path and srcDir, adds
-// the corresponding package object to the packages map, and returns the object.
-// The packages map must contain all packages already imported.
-func Import(packages map[string]*types.Package, path, srcDir string, lookup func(path string) (io.ReadCloser, error)) (pkg *types.Package, err error) {
- var rc io.ReadCloser
- var filename, id string
- if lookup != nil {
- // With custom lookup specified, assume that caller has
- // converted path to a canonical import path for use in the map.
- if path == "unsafe" {
- return types.Unsafe, nil
- }
- id = path
-
- // No need to re-import if the package was imported completely before.
- if pkg = packages[id]; pkg != nil && pkg.Complete() {
- return
- }
- f, err := lookup(path)
- if err != nil {
- return nil, err
- }
- rc = f
- } else {
- filename, id = FindPkg(path, srcDir)
- if filename == "" {
- if path == "unsafe" {
- return types.Unsafe, nil
- }
- return nil, fmt.Errorf("can't find import: %q", id)
- }
-
- // no need to re-import if the package was imported completely before
- if pkg = packages[id]; pkg != nil && pkg.Complete() {
- return
- }
-
- // open file
- f, err := os.Open(filename)
- if err != nil {
- return nil, err
- }
- defer func() {
- if err != nil {
- // add file name to error
- err = fmt.Errorf("%s: %v", filename, err)
- }
- }()
- rc = f
- }
- defer rc.Close()
-
- var hdr string
- var size int64
- buf := bufio.NewReader(rc)
- if hdr, size, err = FindExportData(buf); err != nil {
- return
- }
-
- switch hdr {
- case "$$\n":
- // Work-around if we don't have a filename; happens only if lookup != nil.
- // Either way, the filename is only needed for importer error messages, so
- // this is fine.
- if filename == "" {
- filename = path
- }
- return ImportData(packages, filename, id, buf)
-
- case "$$B\n":
- var data []byte
- data, err = ioutil.ReadAll(buf)
- if err != nil {
- break
- }
-
- // TODO(gri): allow clients of go/importer to provide a FileSet.
- // Or, define a new standard go/types/gcexportdata package.
- fset := token.NewFileSet()
-
- // The indexed export format starts with an 'i'; the older
- // binary export format starts with a 'c', 'd', or 'v'
- // (from "version"). Select appropriate importer.
- if len(data) > 0 {
- switch data[0] {
- case 'i':
- _, pkg, err := IImportData(fset, packages, data[1:], id)
- return pkg, err
-
- case 'v', 'c', 'd':
- _, pkg, err := BImportData(fset, packages, data, id)
- return pkg, err
-
- case 'u':
- _, pkg, err := UImportData(fset, packages, data[1:size], id)
- return pkg, err
-
- default:
- l := len(data)
- if l > 10 {
- l = 10
- }
- return nil, fmt.Errorf("unexpected export data with prefix %q for path %s", string(data[:l]), id)
- }
- }
-
- default:
- err = fmt.Errorf("unknown export data header: %q", hdr)
- }
-
- return
-}
-
-// ----------------------------------------------------------------------------
-// Parser
-
-// TODO(gri) Imported objects don't have position information.
-// Ideally use the debug table line info; alternatively
-// create some fake position (or the position of the
-// import). That way error messages referring to imported
-// objects can print meaningful information.
-
-// parser parses the exports inside a gc compiler-produced
-// object/archive file and populates its scope with the results.
-type parser struct {
- scanner scanner.Scanner
- tok rune // current token
- lit string // literal string; only valid for Ident, Int, String tokens
- id string // package id of imported package
- sharedPkgs map[string]*types.Package // package id -> package object (across importer)
- localPkgs map[string]*types.Package // package id -> package object (just this package)
-}
-
-func (p *parser) init(filename, id string, src io.Reader, packages map[string]*types.Package) {
- p.scanner.Init(src)
- p.scanner.Error = func(_ *scanner.Scanner, msg string) { p.error(msg) }
- p.scanner.Mode = scanner.ScanIdents | scanner.ScanInts | scanner.ScanChars | scanner.ScanStrings | scanner.ScanComments | scanner.SkipComments
- p.scanner.Whitespace = 1<<'\t' | 1<<' '
- p.scanner.Filename = filename // for good error messages
- p.next()
- p.id = id
- p.sharedPkgs = packages
- if debug {
- // check consistency of packages map
- for _, pkg := range packages {
- if pkg.Name() == "" {
- fmt.Printf("no package name for %s\n", pkg.Path())
- }
- }
- }
-}
-
-func (p *parser) next() {
- p.tok = p.scanner.Scan()
- switch p.tok {
- case scanner.Ident, scanner.Int, scanner.Char, scanner.String, '·':
- p.lit = p.scanner.TokenText()
- default:
- p.lit = ""
- }
- if debug {
- fmt.Printf("%s: %q -> %q\n", scanner.TokenString(p.tok), p.scanner.TokenText(), p.lit)
- }
-}
-
-func declTypeName(pkg *types.Package, name string) *types.TypeName {
- scope := pkg.Scope()
- if obj := scope.Lookup(name); obj != nil {
- return obj.(*types.TypeName)
- }
- obj := types.NewTypeName(token.NoPos, pkg, name, nil)
- // a named type may be referred to before the underlying type
- // is known - set it up
- types.NewNamed(obj, nil, nil)
- scope.Insert(obj)
- return obj
-}
-
-// ----------------------------------------------------------------------------
-// Error handling
-
-// Internal errors are boxed as importErrors.
-type importError struct {
- pos scanner.Position
- err error
-}
-
-func (e importError) Error() string {
- return fmt.Sprintf("import error %s (byte offset = %d): %s", e.pos, e.pos.Offset, e.err)
-}
-
-func (p *parser) error(err interface{}) {
- if s, ok := err.(string); ok {
- err = errors.New(s)
- }
- // panic with a runtime.Error if err is not an error
- panic(importError{p.scanner.Pos(), err.(error)})
-}
-
-func (p *parser) errorf(format string, args ...interface{}) {
- p.error(fmt.Sprintf(format, args...))
-}
-
-func (p *parser) expect(tok rune) string {
- lit := p.lit
- if p.tok != tok {
- p.errorf("expected %s, got %s (%s)", scanner.TokenString(tok), scanner.TokenString(p.tok), lit)
- }
- p.next()
- return lit
-}
-
-func (p *parser) expectSpecial(tok string) {
- sep := 'x' // not white space
- i := 0
- for i < len(tok) && p.tok == rune(tok[i]) && sep > ' ' {
- sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token
- p.next()
- i++
- }
- if i < len(tok) {
- p.errorf("expected %q, got %q", tok, tok[0:i])
- }
-}
-
-func (p *parser) expectKeyword(keyword string) {
- lit := p.expect(scanner.Ident)
- if lit != keyword {
- p.errorf("expected keyword %s, got %q", keyword, lit)
- }
-}
-
-// ----------------------------------------------------------------------------
-// Qualified and unqualified names
-
-// parsePackageID parses a PackageId:
-//
-// PackageId = string_lit .
-func (p *parser) parsePackageID() string {
- id, err := strconv.Unquote(p.expect(scanner.String))
- if err != nil {
- p.error(err)
- }
- // id == "" stands for the imported package id
- // (only known at time of package installation)
- if id == "" {
- id = p.id
- }
- return id
-}
-
-// parsePackageName parse a PackageName:
-//
-// PackageName = ident .
-func (p *parser) parsePackageName() string {
- return p.expect(scanner.Ident)
-}
-
-// parseDotIdent parses a dotIdentifier:
-//
-// dotIdentifier = ( ident | '·' ) { ident | int | '·' } .
-func (p *parser) parseDotIdent() string {
- ident := ""
- if p.tok != scanner.Int {
- sep := 'x' // not white space
- for (p.tok == scanner.Ident || p.tok == scanner.Int || p.tok == '·') && sep > ' ' {
- ident += p.lit
- sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token
- p.next()
- }
- }
- if ident == "" {
- p.expect(scanner.Ident) // use expect() for error handling
- }
- return ident
-}
-
-// parseQualifiedName parses a QualifiedName:
-//
-// QualifiedName = "@" PackageId "." ( "?" | dotIdentifier ) .
-func (p *parser) parseQualifiedName() (id, name string) {
- p.expect('@')
- id = p.parsePackageID()
- p.expect('.')
- // Per rev f280b8a485fd (10/2/2013), qualified names may be used for anonymous fields.
- if p.tok == '?' {
- p.next()
- } else {
- name = p.parseDotIdent()
- }
- return
-}
-
-// getPkg returns the package for a given id. If the package is
-// not found, create the package and add it to the p.localPkgs
-// and p.sharedPkgs maps. name is the (expected) name of the
-// package. If name == "", the package name is expected to be
-// set later via an import clause in the export data.
-//
-// id identifies a package, usually by a canonical package path like
-// "encoding/json" but possibly by a non-canonical import path like
-// "./json".
-func (p *parser) getPkg(id, name string) *types.Package {
- // package unsafe is not in the packages maps - handle explicitly
- if id == "unsafe" {
- return types.Unsafe
- }
-
- pkg := p.localPkgs[id]
- if pkg == nil {
- // first import of id from this package
- pkg = p.sharedPkgs[id]
- if pkg == nil {
- // first import of id by this importer;
- // add (possibly unnamed) pkg to shared packages
- pkg = types.NewPackage(id, name)
- p.sharedPkgs[id] = pkg
- }
- // add (possibly unnamed) pkg to local packages
- if p.localPkgs == nil {
- p.localPkgs = make(map[string]*types.Package)
- }
- p.localPkgs[id] = pkg
- } else if name != "" {
- // package exists already and we have an expected package name;
- // make sure names match or set package name if necessary
- if pname := pkg.Name(); pname == "" {
- pkg.SetName(name)
- } else if pname != name {
- p.errorf("%s package name mismatch: %s (given) vs %s (expected)", id, pname, name)
- }
- }
- return pkg
-}
-
-// parseExportedName is like parseQualifiedName, but
-// the package id is resolved to an imported *types.Package.
-func (p *parser) parseExportedName() (pkg *types.Package, name string) {
- id, name := p.parseQualifiedName()
- pkg = p.getPkg(id, "")
- return
-}
-
-// ----------------------------------------------------------------------------
-// Types
-
-// parseBasicType parses a BasicType:
-//
-// BasicType = identifier .
-func (p *parser) parseBasicType() types.Type {
- id := p.expect(scanner.Ident)
- obj := types.Universe.Lookup(id)
- if obj, ok := obj.(*types.TypeName); ok {
- return obj.Type()
- }
- p.errorf("not a basic type: %s", id)
- return nil
-}
-
-// parseArrayType parses an ArrayType:
-//
-// ArrayType = "[" int_lit "]" Type .
-func (p *parser) parseArrayType(parent *types.Package) types.Type {
- // "[" already consumed and lookahead known not to be "]"
- lit := p.expect(scanner.Int)
- p.expect(']')
- elem := p.parseType(parent)
- n, err := strconv.ParseInt(lit, 10, 64)
- if err != nil {
- p.error(err)
- }
- return types.NewArray(elem, n)
-}
-
-// parseMapType parses a MapType:
-//
-// MapType = "map" "[" Type "]" Type .
-func (p *parser) parseMapType(parent *types.Package) types.Type {
- p.expectKeyword("map")
- p.expect('[')
- key := p.parseType(parent)
- p.expect(']')
- elem := p.parseType(parent)
- return types.NewMap(key, elem)
-}
-
-// parseName parses a Name:
-//
-// Name = identifier | "?" | QualifiedName .
-//
-// For unqualified and anonymous names, the returned package is the parent
-// package unless parent == nil, in which case the returned package is the
-// package being imported. (The parent package is not nil if the name
-// is an unqualified struct field or interface method name belonging to a
-// type declared in another package.)
-//
-// For qualified names, the returned package is nil (and not created if
-// it doesn't exist yet) unless materializePkg is set (which creates an
-// unnamed package with valid package path). In the latter case, a
-// subsequent import clause is expected to provide a name for the package.
-func (p *parser) parseName(parent *types.Package, materializePkg bool) (pkg *types.Package, name string) {
- pkg = parent
- if pkg == nil {
- pkg = p.sharedPkgs[p.id]
- }
- switch p.tok {
- case scanner.Ident:
- name = p.lit
- p.next()
- case '?':
- // anonymous
- p.next()
- case '@':
- // exported name prefixed with package path
- pkg = nil
- var id string
- id, name = p.parseQualifiedName()
- if materializePkg {
- pkg = p.getPkg(id, "")
- }
- default:
- p.error("name expected")
- }
- return
-}
-
-func deref(typ types.Type) types.Type {
- if p, _ := typ.(*types.Pointer); p != nil {
- return p.Elem()
- }
- return typ
-}
-
-// parseField parses a Field:
-//
-// Field = Name Type [ string_lit ] .
-func (p *parser) parseField(parent *types.Package) (*types.Var, string) {
- pkg, name := p.parseName(parent, true)
-
- if name == "_" {
- // Blank fields should be package-qualified because they
- // are unexported identifiers, but gc does not qualify them.
- // Assuming that the ident belongs to the current package
- // causes types to change during re-exporting, leading
- // to spurious "can't assign A to B" errors from go/types.
- // As a workaround, pretend all blank fields belong
- // to the same unique dummy package.
- const blankpkg = "<_>"
- pkg = p.getPkg(blankpkg, blankpkg)
- }
-
- typ := p.parseType(parent)
- anonymous := false
- if name == "" {
- // anonymous field - typ must be T or *T and T must be a type name
- switch typ := deref(typ).(type) {
- case *types.Basic: // basic types are named types
- pkg = nil // objects defined in Universe scope have no package
- name = typ.Name()
- case *types.Named:
- name = typ.Obj().Name()
- default:
- p.errorf("anonymous field expected")
- }
- anonymous = true
- }
- tag := ""
- if p.tok == scanner.String {
- s := p.expect(scanner.String)
- var err error
- tag, err = strconv.Unquote(s)
- if err != nil {
- p.errorf("invalid struct tag %s: %s", s, err)
- }
- }
- return types.NewField(token.NoPos, pkg, name, typ, anonymous), tag
-}
-
-// parseStructType parses a StructType:
-//
-// StructType = "struct" "{" [ FieldList ] "}" .
-// FieldList = Field { ";" Field } .
-func (p *parser) parseStructType(parent *types.Package) types.Type {
- var fields []*types.Var
- var tags []string
-
- p.expectKeyword("struct")
- p.expect('{')
- for i := 0; p.tok != '}' && p.tok != scanner.EOF; i++ {
- if i > 0 {
- p.expect(';')
- }
- fld, tag := p.parseField(parent)
- if tag != "" && tags == nil {
- tags = make([]string, i)
- }
- if tags != nil {
- tags = append(tags, tag)
- }
- fields = append(fields, fld)
- }
- p.expect('}')
-
- return types.NewStruct(fields, tags)
-}
-
-// parseParameter parses a Parameter:
-//
-// Parameter = ( identifier | "?" ) [ "..." ] Type [ string_lit ] .
-func (p *parser) parseParameter() (par *types.Var, isVariadic bool) {
- _, name := p.parseName(nil, false)
- // remove gc-specific parameter numbering
- if i := strings.Index(name, "·"); i >= 0 {
- name = name[:i]
- }
- if p.tok == '.' {
- p.expectSpecial("...")
- isVariadic = true
- }
- typ := p.parseType(nil)
- if isVariadic {
- typ = types.NewSlice(typ)
- }
- // ignore argument tag (e.g. "noescape")
- if p.tok == scanner.String {
- p.next()
- }
- // TODO(gri) should we provide a package?
- par = types.NewVar(token.NoPos, nil, name, typ)
- return
-}
-
-// parseParameters parses a Parameters:
-//
-// Parameters = "(" [ ParameterList ] ")" .
-// ParameterList = { Parameter "," } Parameter .
-func (p *parser) parseParameters() (list []*types.Var, isVariadic bool) {
- p.expect('(')
- for p.tok != ')' && p.tok != scanner.EOF {
- if len(list) > 0 {
- p.expect(',')
- }
- par, variadic := p.parseParameter()
- list = append(list, par)
- if variadic {
- if isVariadic {
- p.error("... not on final argument")
- }
- isVariadic = true
- }
- }
- p.expect(')')
-
- return
-}
-
-// parseSignature parses a Signature:
-//
-// Signature = Parameters [ Result ] .
-// Result = Type | Parameters .
-func (p *parser) parseSignature(recv *types.Var) *types.Signature {
- params, isVariadic := p.parseParameters()
-
- // optional result type
- var results []*types.Var
- if p.tok == '(' {
- var variadic bool
- results, variadic = p.parseParameters()
- if variadic {
- p.error("... not permitted on result type")
- }
- }
-
- return types.NewSignature(recv, types.NewTuple(params...), types.NewTuple(results...), isVariadic)
-}
-
-// parseInterfaceType parses an InterfaceType:
-//
-// InterfaceType = "interface" "{" [ MethodList ] "}" .
-// MethodList = Method { ";" Method } .
-// Method = Name Signature .
-//
-// The methods of embedded interfaces are always "inlined"
-// by the compiler and thus embedded interfaces are never
-// visible in the export data.
-func (p *parser) parseInterfaceType(parent *types.Package) types.Type {
- var methods []*types.Func
-
- p.expectKeyword("interface")
- p.expect('{')
- for i := 0; p.tok != '}' && p.tok != scanner.EOF; i++ {
- if i > 0 {
- p.expect(';')
- }
- pkg, name := p.parseName(parent, true)
- sig := p.parseSignature(nil)
- methods = append(methods, types.NewFunc(token.NoPos, pkg, name, sig))
- }
- p.expect('}')
-
- // Complete requires the type's embedded interfaces to be fully defined,
- // but we do not define any
- return newInterface(methods, nil).Complete()
-}
-
-// parseChanType parses a ChanType:
-//
-// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type .
-func (p *parser) parseChanType(parent *types.Package) types.Type {
- dir := types.SendRecv
- if p.tok == scanner.Ident {
- p.expectKeyword("chan")
- if p.tok == '<' {
- p.expectSpecial("<-")
- dir = types.SendOnly
- }
- } else {
- p.expectSpecial("<-")
- p.expectKeyword("chan")
- dir = types.RecvOnly
- }
- elem := p.parseType(parent)
- return types.NewChan(dir, elem)
-}
-
-// parseType parses a Type:
-//
-// Type =
-// BasicType | TypeName | ArrayType | SliceType | StructType |
-// PointerType | FuncType | InterfaceType | MapType | ChanType |
-// "(" Type ")" .
-//
-// BasicType = ident .
-// TypeName = ExportedName .
-// SliceType = "[" "]" Type .
-// PointerType = "*" Type .
-// FuncType = "func" Signature .
-func (p *parser) parseType(parent *types.Package) types.Type {
- switch p.tok {
- case scanner.Ident:
- switch p.lit {
- default:
- return p.parseBasicType()
- case "struct":
- return p.parseStructType(parent)
- case "func":
- // FuncType
- p.next()
- return p.parseSignature(nil)
- case "interface":
- return p.parseInterfaceType(parent)
- case "map":
- return p.parseMapType(parent)
- case "chan":
- return p.parseChanType(parent)
- }
- case '@':
- // TypeName
- pkg, name := p.parseExportedName()
- return declTypeName(pkg, name).Type()
- case '[':
- p.next() // look ahead
- if p.tok == ']' {
- // SliceType
- p.next()
- return types.NewSlice(p.parseType(parent))
- }
- return p.parseArrayType(parent)
- case '*':
- // PointerType
- p.next()
- return types.NewPointer(p.parseType(parent))
- case '<':
- return p.parseChanType(parent)
- case '(':
- // "(" Type ")"
- p.next()
- typ := p.parseType(parent)
- p.expect(')')
- return typ
- }
- p.errorf("expected type, got %s (%q)", scanner.TokenString(p.tok), p.lit)
- return nil
-}
-
-// ----------------------------------------------------------------------------
-// Declarations
-
-// parseImportDecl parses an ImportDecl:
-//
-// ImportDecl = "import" PackageName PackageId .
-func (p *parser) parseImportDecl() {
- p.expectKeyword("import")
- name := p.parsePackageName()
- p.getPkg(p.parsePackageID(), name)
-}
-
-// parseInt parses an int_lit:
-//
-// int_lit = [ "+" | "-" ] { "0" ... "9" } .
-func (p *parser) parseInt() string {
- s := ""
- switch p.tok {
- case '-':
- s = "-"
- p.next()
- case '+':
- p.next()
- }
- return s + p.expect(scanner.Int)
-}
-
-// parseNumber parses a number:
-//
-// number = int_lit [ "p" int_lit ] .
-func (p *parser) parseNumber() (typ *types.Basic, val constant.Value) {
- // mantissa
- mant := constant.MakeFromLiteral(p.parseInt(), token.INT, 0)
- if mant == nil {
- panic("invalid mantissa")
- }
-
- if p.lit == "p" {
- // exponent (base 2)
- p.next()
- exp, err := strconv.ParseInt(p.parseInt(), 10, 0)
- if err != nil {
- p.error(err)
- }
- if exp < 0 {
- denom := constant.MakeInt64(1)
- denom = constant.Shift(denom, token.SHL, uint(-exp))
- typ = types.Typ[types.UntypedFloat]
- val = constant.BinaryOp(mant, token.QUO, denom)
- return
- }
- if exp > 0 {
- mant = constant.Shift(mant, token.SHL, uint(exp))
- }
- typ = types.Typ[types.UntypedFloat]
- val = mant
- return
- }
-
- typ = types.Typ[types.UntypedInt]
- val = mant
- return
-}
-
-// parseConstDecl parses a ConstDecl:
-//
-// ConstDecl = "const" ExportedName [ Type ] "=" Literal .
-// Literal = bool_lit | int_lit | float_lit | complex_lit | rune_lit | string_lit .
-// bool_lit = "true" | "false" .
-// complex_lit = "(" float_lit "+" float_lit "i" ")" .
-// rune_lit = "(" int_lit "+" int_lit ")" .
-// string_lit = `"` { unicode_char } `"` .
-func (p *parser) parseConstDecl() {
- p.expectKeyword("const")
- pkg, name := p.parseExportedName()
-
- var typ0 types.Type
- if p.tok != '=' {
- // constant types are never structured - no need for parent type
- typ0 = p.parseType(nil)
- }
-
- p.expect('=')
- var typ types.Type
- var val constant.Value
- switch p.tok {
- case scanner.Ident:
- // bool_lit
- if p.lit != "true" && p.lit != "false" {
- p.error("expected true or false")
- }
- typ = types.Typ[types.UntypedBool]
- val = constant.MakeBool(p.lit == "true")
- p.next()
-
- case '-', scanner.Int:
- // int_lit
- typ, val = p.parseNumber()
-
- case '(':
- // complex_lit or rune_lit
- p.next()
- if p.tok == scanner.Char {
- p.next()
- p.expect('+')
- typ = types.Typ[types.UntypedRune]
- _, val = p.parseNumber()
- p.expect(')')
- break
- }
- _, re := p.parseNumber()
- p.expect('+')
- _, im := p.parseNumber()
- p.expectKeyword("i")
- p.expect(')')
- typ = types.Typ[types.UntypedComplex]
- val = constant.BinaryOp(re, token.ADD, constant.MakeImag(im))
-
- case scanner.Char:
- // rune_lit
- typ = types.Typ[types.UntypedRune]
- val = constant.MakeFromLiteral(p.lit, token.CHAR, 0)
- p.next()
-
- case scanner.String:
- // string_lit
- typ = types.Typ[types.UntypedString]
- val = constant.MakeFromLiteral(p.lit, token.STRING, 0)
- p.next()
-
- default:
- p.errorf("expected literal got %s", scanner.TokenString(p.tok))
- }
-
- if typ0 == nil {
- typ0 = typ
- }
-
- pkg.Scope().Insert(types.NewConst(token.NoPos, pkg, name, typ0, val))
-}
-
-// parseTypeDecl parses a TypeDecl:
-//
-// TypeDecl = "type" ExportedName Type .
-func (p *parser) parseTypeDecl() {
- p.expectKeyword("type")
- pkg, name := p.parseExportedName()
- obj := declTypeName(pkg, name)
-
- // The type object may have been imported before and thus already
- // have a type associated with it. We still need to parse the type
- // structure, but throw it away if the object already has a type.
- // This ensures that all imports refer to the same type object for
- // a given type declaration.
- typ := p.parseType(pkg)
-
- if name := obj.Type().(*types.Named); name.Underlying() == nil {
- name.SetUnderlying(typ)
- }
-}
-
-// parseVarDecl parses a VarDecl:
-//
-// VarDecl = "var" ExportedName Type .
-func (p *parser) parseVarDecl() {
- p.expectKeyword("var")
- pkg, name := p.parseExportedName()
- typ := p.parseType(pkg)
- pkg.Scope().Insert(types.NewVar(token.NoPos, pkg, name, typ))
-}
-
-// parseFunc parses a Func:
-//
-// Func = Signature [ Body ] .
-// Body = "{" ... "}" .
-func (p *parser) parseFunc(recv *types.Var) *types.Signature {
- sig := p.parseSignature(recv)
- if p.tok == '{' {
- p.next()
- for i := 1; i > 0; p.next() {
- switch p.tok {
- case '{':
- i++
- case '}':
- i--
- }
- }
- }
- return sig
-}
-
-// parseMethodDecl parses a MethodDecl:
-//
-// MethodDecl = "func" Receiver Name Func .
-// Receiver = "(" ( identifier | "?" ) [ "*" ] ExportedName ")" .
-func (p *parser) parseMethodDecl() {
- // "func" already consumed
- p.expect('(')
- recv, _ := p.parseParameter() // receiver
- p.expect(')')
-
- // determine receiver base type object
- base := deref(recv.Type()).(*types.Named)
-
- // parse method name, signature, and possibly inlined body
- _, name := p.parseName(nil, false)
- sig := p.parseFunc(recv)
-
- // methods always belong to the same package as the base type object
- pkg := base.Obj().Pkg()
-
- // add method to type unless type was imported before
- // and method exists already
- // TODO(gri) This leads to a quadratic algorithm - ok for now because method counts are small.
- base.AddMethod(types.NewFunc(token.NoPos, pkg, name, sig))
-}
-
-// parseFuncDecl parses a FuncDecl:
-//
-// FuncDecl = "func" ExportedName Func .
-func (p *parser) parseFuncDecl() {
- // "func" already consumed
- pkg, name := p.parseExportedName()
- typ := p.parseFunc(nil)
- pkg.Scope().Insert(types.NewFunc(token.NoPos, pkg, name, typ))
-}
-
-// parseDecl parses a Decl:
-//
-// Decl = [ ImportDecl | ConstDecl | TypeDecl | VarDecl | FuncDecl | MethodDecl ] "\n" .
-func (p *parser) parseDecl() {
- if p.tok == scanner.Ident {
- switch p.lit {
- case "import":
- p.parseImportDecl()
- case "const":
- p.parseConstDecl()
- case "type":
- p.parseTypeDecl()
- case "var":
- p.parseVarDecl()
- case "func":
- p.next() // look ahead
- if p.tok == '(' {
- p.parseMethodDecl()
- } else {
- p.parseFuncDecl()
- }
- }
- }
- p.expect('\n')
-}
-
-// ----------------------------------------------------------------------------
-// Export
-
-// parseExport parses an Export:
-//
-// Export = "PackageClause { Decl } "$$" .
-// PackageClause = "package" PackageName [ "safe" ] "\n" .
-func (p *parser) parseExport() *types.Package {
- p.expectKeyword("package")
- name := p.parsePackageName()
- if p.tok == scanner.Ident && p.lit == "safe" {
- // package was compiled with -u option - ignore
- p.next()
- }
- p.expect('\n')
-
- pkg := p.getPkg(p.id, name)
-
- for p.tok != '$' && p.tok != scanner.EOF {
- p.parseDecl()
- }
-
- if ch := p.scanner.Peek(); p.tok != '$' || ch != '$' {
- // don't call next()/expect() since reading past the
- // export data may cause scanner errors (e.g. NUL chars)
- p.errorf("expected '$$', got %s %c", scanner.TokenString(p.tok), ch)
- }
-
- if n := p.scanner.ErrorCount; n != 0 {
- p.errorf("expected no scanner errors, got %d", n)
- }
-
- // Record all locally referenced packages as imports.
- var imports []*types.Package
- for id, pkg2 := range p.localPkgs {
- if pkg2.Name() == "" {
- p.errorf("%s package has no name", id)
- }
- if id == p.id {
- continue // avoid self-edge
- }
- imports = append(imports, pkg2)
- }
- sort.Sort(byPath(imports))
- pkg.SetImports(imports)
-
- // package was imported completely and without errors
- pkg.MarkComplete()
-
- return pkg
-}
-
-type byPath []*types.Package
-
-func (a byPath) Len() int { return len(a) }
-func (a byPath) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
-func (a byPath) Less(i, j int) bool { return a[i].Path() < a[j].Path() }
diff --git a/vendor/golang.org/x/tools/go/packages/golist.go b/vendor/golang.org/x/tools/go/packages/golist.go
index de881562..6bb7168d 100644
--- a/vendor/golang.org/x/tools/go/packages/golist.go
+++ b/vendor/golang.org/x/tools/go/packages/golist.go
@@ -60,6 +60,7 @@ func (r *responseDeduper) addAll(dr *driverResponse) {
for _, root := range dr.Roots {
r.addRoot(root)
}
+ r.dr.GoVersion = dr.GoVersion
}
func (r *responseDeduper) addPackage(p *Package) {
@@ -454,11 +455,14 @@ func (state *golistState) createDriverResponse(words ...string) (*driverResponse
if err != nil {
return nil, err
}
+
seen := make(map[string]*jsonPackage)
pkgs := make(map[string]*Package)
additionalErrors := make(map[string][]Error)
// Decode the JSON and convert it to Package form.
- var response driverResponse
+ response := &driverResponse{
+ GoVersion: goVersion,
+ }
for dec := json.NewDecoder(buf); dec.More(); {
p := new(jsonPackage)
if err := dec.Decode(p); err != nil {
@@ -600,17 +604,12 @@ func (state *golistState) createDriverResponse(words ...string) (*driverResponse
// Work around https://golang.org/issue/28749:
// cmd/go puts assembly, C, and C++ files in CompiledGoFiles.
- // Filter out any elements of CompiledGoFiles that are also in OtherFiles.
- // We have to keep this workaround in place until go1.12 is a distant memory.
- if len(pkg.OtherFiles) > 0 {
- other := make(map[string]bool, len(pkg.OtherFiles))
- for _, f := range pkg.OtherFiles {
- other[f] = true
- }
-
+ // Remove files from CompiledGoFiles that are non-go files
+ // (or are not files that look like they are from the cache).
+ if len(pkg.CompiledGoFiles) > 0 {
out := pkg.CompiledGoFiles[:0]
for _, f := range pkg.CompiledGoFiles {
- if other[f] {
+ if ext := filepath.Ext(f); ext != ".go" && ext != "" { // ext == "" means the file is from the cache, so probably cgo-processed file
continue
}
out = append(out, f)
@@ -730,7 +729,7 @@ func (state *golistState) createDriverResponse(words ...string) (*driverResponse
}
sort.Slice(response.Packages, func(i, j int) bool { return response.Packages[i].ID < response.Packages[j].ID })
- return &response, nil
+ return response, nil
}
func (state *golistState) shouldAddFilenameFromError(p *jsonPackage) bool {
@@ -756,6 +755,7 @@ func (state *golistState) shouldAddFilenameFromError(p *jsonPackage) bool {
return len(p.Error.ImportStack) == 0 || p.Error.ImportStack[len(p.Error.ImportStack)-1] == p.ImportPath
}
+// getGoVersion returns the effective minor version of the go command.
func (state *golistState) getGoVersion() (int, error) {
state.goVersionOnce.Do(func() {
state.goVersion, state.goVersionError = gocommand.GoVersion(state.ctx, state.cfgInvocation(), state.cfg.gocmdRunner)
diff --git a/vendor/golang.org/x/tools/go/packages/packages.go b/vendor/golang.org/x/tools/go/packages/packages.go
index a93dc6ad..0f1505b8 100644
--- a/vendor/golang.org/x/tools/go/packages/packages.go
+++ b/vendor/golang.org/x/tools/go/packages/packages.go
@@ -15,10 +15,12 @@ import (
"go/scanner"
"go/token"
"go/types"
+ "io"
"io/ioutil"
"log"
"os"
"path/filepath"
+ "runtime"
"strings"
"sync"
"time"
@@ -233,6 +235,11 @@ type driverResponse struct {
// Imports will be connected and then type and syntax information added in a
// later pass (see refine).
Packages []*Package
+
+ // GoVersion is the minor version number used by the driver
+ // (e.g. the go command on the PATH) when selecting .go files.
+ // Zero means unknown.
+ GoVersion int
}
// Load loads and returns the Go packages named by the given patterns.
@@ -256,7 +263,7 @@ func Load(cfg *Config, patterns ...string) ([]*Package, error) {
return nil, err
}
l.sizes = response.Sizes
- return l.refine(response.Roots, response.Packages...)
+ return l.refine(response)
}
// defaultDriver is a driver that implements go/packages' fallback behavior.
@@ -297,6 +304,9 @@ type Package struct {
// of the package, or while parsing or type-checking its files.
Errors []Error
+ // TypeErrors contains the subset of errors produced during type checking.
+ TypeErrors []types.Error
+
// GoFiles lists the absolute file paths of the package's Go source files.
GoFiles []string
@@ -532,6 +542,7 @@ type loaderPackage struct {
needsrc bool // load from source (Mode >= LoadTypes)
needtypes bool // type information is either requested or depended on
initial bool // package was matched by a pattern
+ goVersion int // minor version number of go command on PATH
}
// loader holds the working state of a single call to load.
@@ -618,7 +629,8 @@ func newLoader(cfg *Config) *loader {
// refine connects the supplied packages into a graph and then adds type and
// and syntax information as requested by the LoadMode.
-func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) {
+func (ld *loader) refine(response *driverResponse) ([]*Package, error) {
+ roots := response.Roots
rootMap := make(map[string]int, len(roots))
for i, root := range roots {
rootMap[root] = i
@@ -626,7 +638,7 @@ func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) {
ld.pkgs = make(map[string]*loaderPackage)
// first pass, fixup and build the map and roots
var initial = make([]*loaderPackage, len(roots))
- for _, pkg := range list {
+ for _, pkg := range response.Packages {
rootIndex := -1
if i, found := rootMap[pkg.ID]; found {
rootIndex = i
@@ -648,6 +660,7 @@ func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) {
Package: pkg,
needtypes: needtypes,
needsrc: needsrc,
+ goVersion: response.GoVersion,
}
ld.pkgs[lpkg.ID] = lpkg
if rootIndex >= 0 {
@@ -865,12 +878,19 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) {
// never has to create a types.Package for an indirect dependency,
// which would then require that such created packages be explicitly
// inserted back into the Import graph as a final step after export data loading.
+ // (Hence this return is after the Types assignment.)
// The Diamond test exercises this case.
if !lpkg.needtypes && !lpkg.needsrc {
return
}
if !lpkg.needsrc {
- ld.loadFromExportData(lpkg)
+ if err := ld.loadFromExportData(lpkg); err != nil {
+ lpkg.Errors = append(lpkg.Errors, Error{
+ Pos: "-",
+ Msg: err.Error(),
+ Kind: UnknownError, // e.g. can't find/open/parse export data
+ })
+ }
return // not a source package, don't get syntax trees
}
@@ -902,6 +922,7 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) {
case types.Error:
// from type checker
+ lpkg.TypeErrors = append(lpkg.TypeErrors, err)
errs = append(errs, Error{
Pos: err.Fset.Position(err.Pos).String(),
Msg: err.Msg,
@@ -923,11 +944,41 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) {
lpkg.Errors = append(lpkg.Errors, errs...)
}
+ // If the go command on the PATH is newer than the runtime,
+ // then the go/{scanner,ast,parser,types} packages from the
+ // standard library may be unable to process the files
+ // selected by go list.
+ //
+ // There is currently no way to downgrade the effective
+ // version of the go command (see issue 52078), so we proceed
+ // with the newer go command but, in case of parse or type
+ // errors, we emit an additional diagnostic.
+ //
+ // See:
+ // - golang.org/issue/52078 (flag to set release tags)
+ // - golang.org/issue/50825 (gopls legacy version support)
+ // - golang.org/issue/55883 (go/packages confusing error)
+ //
+ // Should we assert a hard minimum of (currently) go1.16 here?
+ var runtimeVersion int
+ if _, err := fmt.Sscanf(runtime.Version(), "go1.%d", &runtimeVersion); err == nil && runtimeVersion < lpkg.goVersion {
+ defer func() {
+ if len(lpkg.Errors) > 0 {
+ appendError(Error{
+ Pos: "-",
+ Msg: fmt.Sprintf("This application uses version go1.%d of the source-processing packages but runs version go1.%d of 'go list'. It may fail to process source files that rely on newer language features. If so, rebuild the application using a newer version of Go.", runtimeVersion, lpkg.goVersion),
+ Kind: UnknownError,
+ })
+ }
+ }()
+ }
+
if ld.Config.Mode&NeedTypes != 0 && len(lpkg.CompiledGoFiles) == 0 && lpkg.ExportFile != "" {
// The config requested loading sources and types, but sources are missing.
// Add an error to the package and fall back to loading from export data.
appendError(Error{"-", fmt.Sprintf("sources missing for package %s", lpkg.ID), ParseError})
- ld.loadFromExportData(lpkg)
+ _ = ld.loadFromExportData(lpkg) // ignore any secondary errors
+
return // can't get syntax trees for this package
}
@@ -981,7 +1032,7 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) {
tc := &types.Config{
Importer: importer,
- // Type-check bodies of functions only in non-initial packages.
+ // Type-check bodies of functions only in initial packages.
// Example: for import graph A->B->C and initial packages {A,C},
// we can ignore function bodies in B.
IgnoreFuncBodies: ld.Mode&NeedDeps == 0 && !lpkg.initial,
@@ -1151,9 +1202,10 @@ func sameFile(x, y string) bool {
return false
}
-// loadFromExportData returns type information for the specified
+// loadFromExportData ensures that type information is present for the specified
// package, loading it from an export data file on the first request.
-func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error) {
+// On success it sets lpkg.Types to a new Package.
+func (ld *loader) loadFromExportData(lpkg *loaderPackage) error {
if lpkg.PkgPath == "" {
log.Fatalf("internal error: Package %s has no PkgPath", lpkg)
}
@@ -1164,8 +1216,8 @@ func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error
// must be sequential. (Finer-grained locking would require
// changes to the gcexportdata API.)
//
- // The exportMu lock guards the Package.Pkg field and the
- // types.Package it points to, for each Package in the graph.
+ // The exportMu lock guards the lpkg.Types field and the
+ // types.Package it points to, for each loaderPackage in the graph.
//
// Not all accesses to Package.Pkg need to be protected by exportMu:
// graph ordering ensures that direct dependencies of source
@@ -1174,18 +1226,18 @@ func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error
defer ld.exportMu.Unlock()
if tpkg := lpkg.Types; tpkg != nil && tpkg.Complete() {
- return tpkg, nil // cache hit
+ return nil // cache hit
}
lpkg.IllTyped = true // fail safe
if lpkg.ExportFile == "" {
// Errors while building export data will have been printed to stderr.
- return nil, fmt.Errorf("no export data file")
+ return fmt.Errorf("no export data file")
}
f, err := os.Open(lpkg.ExportFile)
if err != nil {
- return nil, err
+ return err
}
defer f.Close()
@@ -1197,7 +1249,7 @@ func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error
// queries.)
r, err := gcexportdata.NewReader(f)
if err != nil {
- return nil, fmt.Errorf("reading %s: %v", lpkg.ExportFile, err)
+ return fmt.Errorf("reading %s: %v", lpkg.ExportFile, err)
}
// Build the view.
@@ -1241,7 +1293,7 @@ func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error
// (May modify incomplete packages in view but not create new ones.)
tpkg, err := gcexportdata.Read(r, ld.Fset, view, lpkg.PkgPath)
if err != nil {
- return nil, fmt.Errorf("reading %s: %v", lpkg.ExportFile, err)
+ return fmt.Errorf("reading %s: %v", lpkg.ExportFile, err)
}
if _, ok := view["go.shape"]; ok {
// Account for the pseudopackage "go.shape" that gets
@@ -1254,8 +1306,7 @@ func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error
lpkg.Types = tpkg
lpkg.IllTyped = false
-
- return tpkg, nil
+ return nil
}
// impliedLoadMode returns loadMode with its dependencies.
@@ -1271,3 +1322,5 @@ func impliedLoadMode(loadMode LoadMode) LoadMode {
func usesExportData(cfg *Config) bool {
return cfg.Mode&NeedExportFile != 0 || cfg.Mode&NeedTypes != 0 && cfg.Mode&NeedDeps == 0
}
+
+var _ interface{} = io.Discard // assert build toolchain is go1.16 or later
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go b/vendor/golang.org/x/tools/internal/gcimporter/bexport.go
index 196cb3f9..30582ed6 100644
--- a/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go
+++ b/vendor/golang.org/x/tools/internal/gcimporter/bexport.go
@@ -12,7 +12,6 @@ import (
"bytes"
"encoding/binary"
"fmt"
- "go/ast"
"go/constant"
"go/token"
"go/types"
@@ -145,7 +144,7 @@ func BExportData(fset *token.FileSet, pkg *types.Package) (b []byte, err error)
objcount := 0
scope := pkg.Scope()
for _, name := range scope.Names() {
- if !ast.IsExported(name) {
+ if !token.IsExported(name) {
continue
}
if trace {
@@ -482,7 +481,7 @@ func (p *exporter) method(m *types.Func) {
p.pos(m)
p.string(m.Name())
- if m.Name() != "_" && !ast.IsExported(m.Name()) {
+ if m.Name() != "_" && !token.IsExported(m.Name()) {
p.pkg(m.Pkg(), false)
}
@@ -501,7 +500,7 @@ func (p *exporter) fieldName(f *types.Var) {
// 3) field name doesn't match base type name (alias name)
bname := basetypeName(f.Type())
if name == bname {
- if ast.IsExported(name) {
+ if token.IsExported(name) {
name = "" // 1) we don't need to know the field name or package
} else {
name = "?" // 2) use unexported name "?" to force package export
@@ -514,7 +513,7 @@ func (p *exporter) fieldName(f *types.Var) {
}
p.string(name)
- if name != "" && !ast.IsExported(name) {
+ if name != "" && !token.IsExported(name) {
p.pkg(f.Pkg(), false)
}
}
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/bimport.go b/vendor/golang.org/x/tools/internal/gcimporter/bimport.go
index b85de014..b85de014 100644
--- a/vendor/golang.org/x/tools/go/internal/gcimporter/bimport.go
+++ b/vendor/golang.org/x/tools/internal/gcimporter/bimport.go
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/exportdata.go b/vendor/golang.org/x/tools/internal/gcimporter/exportdata.go
index f6437feb..f6437feb 100644
--- a/vendor/golang.org/x/tools/go/internal/gcimporter/exportdata.go
+++ b/vendor/golang.org/x/tools/internal/gcimporter/exportdata.go
diff --git a/vendor/golang.org/x/tools/internal/gcimporter/gcimporter.go b/vendor/golang.org/x/tools/internal/gcimporter/gcimporter.go
new file mode 100644
index 00000000..0372fb3a
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/gcimporter/gcimporter.go
@@ -0,0 +1,265 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file is a reduced copy of $GOROOT/src/go/internal/gcimporter/gcimporter.go.
+
+// Package gcimporter provides various functions for reading
+// gc-generated object files that can be used to implement the
+// Importer interface defined by the Go 1.5 standard library package.
+package gcimporter // import "golang.org/x/tools/internal/gcimporter"
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "go/build"
+ "go/token"
+ "go/types"
+ "io"
+ "io/ioutil"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "strings"
+ "sync"
+)
+
+const (
+ // Enable debug during development: it adds some additional checks, and
+ // prevents errors from being recovered.
+ debug = false
+
+ // If trace is set, debugging output is printed to std out.
+ trace = false
+)
+
+var exportMap sync.Map // package dir → func() (string, bool)
+
+// lookupGorootExport returns the location of the export data
+// (normally found in the build cache, but located in GOROOT/pkg
+// in prior Go releases) for the package located in pkgDir.
+//
+// (We use the package's directory instead of its import path
+// mainly to simplify handling of the packages in src/vendor
+// and cmd/vendor.)
+func lookupGorootExport(pkgDir string) (string, bool) {
+ f, ok := exportMap.Load(pkgDir)
+ if !ok {
+ var (
+ listOnce sync.Once
+ exportPath string
+ )
+ f, _ = exportMap.LoadOrStore(pkgDir, func() (string, bool) {
+ listOnce.Do(func() {
+ cmd := exec.Command("go", "list", "-export", "-f", "{{.Export}}", pkgDir)
+ cmd.Dir = build.Default.GOROOT
+ var output []byte
+ output, err := cmd.Output()
+ if err != nil {
+ return
+ }
+
+ exports := strings.Split(string(bytes.TrimSpace(output)), "\n")
+ if len(exports) != 1 {
+ return
+ }
+
+ exportPath = exports[0]
+ })
+
+ return exportPath, exportPath != ""
+ })
+ }
+
+ return f.(func() (string, bool))()
+}
+
+var pkgExts = [...]string{".a", ".o"}
+
+// FindPkg returns the filename and unique package id for an import
+// path based on package information provided by build.Import (using
+// the build.Default build.Context). A relative srcDir is interpreted
+// relative to the current working directory.
+// If no file was found, an empty filename is returned.
+func FindPkg(path, srcDir string) (filename, id string) {
+ if path == "" {
+ return
+ }
+
+ var noext string
+ switch {
+ default:
+ // "x" -> "$GOPATH/pkg/$GOOS_$GOARCH/x.ext", "x"
+ // Don't require the source files to be present.
+ if abs, err := filepath.Abs(srcDir); err == nil { // see issue 14282
+ srcDir = abs
+ }
+ bp, _ := build.Import(path, srcDir, build.FindOnly|build.AllowBinary)
+ if bp.PkgObj == "" {
+ var ok bool
+ if bp.Goroot && bp.Dir != "" {
+ filename, ok = lookupGorootExport(bp.Dir)
+ }
+ if !ok {
+ id = path // make sure we have an id to print in error message
+ return
+ }
+ } else {
+ noext = strings.TrimSuffix(bp.PkgObj, ".a")
+ id = bp.ImportPath
+ }
+
+ case build.IsLocalImport(path):
+ // "./x" -> "/this/directory/x.ext", "/this/directory/x"
+ noext = filepath.Join(srcDir, path)
+ id = noext
+
+ case filepath.IsAbs(path):
+ // for completeness only - go/build.Import
+ // does not support absolute imports
+ // "/x" -> "/x.ext", "/x"
+ noext = path
+ id = path
+ }
+
+ if false { // for debugging
+ if path != id {
+ fmt.Printf("%s -> %s\n", path, id)
+ }
+ }
+
+ if filename != "" {
+ if f, err := os.Stat(filename); err == nil && !f.IsDir() {
+ return
+ }
+ }
+
+ // try extensions
+ for _, ext := range pkgExts {
+ filename = noext + ext
+ if f, err := os.Stat(filename); err == nil && !f.IsDir() {
+ return
+ }
+ }
+
+ filename = "" // not found
+ return
+}
+
+// Import imports a gc-generated package given its import path and srcDir, adds
+// the corresponding package object to the packages map, and returns the object.
+// The packages map must contain all packages already imported.
+func Import(packages map[string]*types.Package, path, srcDir string, lookup func(path string) (io.ReadCloser, error)) (pkg *types.Package, err error) {
+ var rc io.ReadCloser
+ var filename, id string
+ if lookup != nil {
+ // With custom lookup specified, assume that caller has
+ // converted path to a canonical import path for use in the map.
+ if path == "unsafe" {
+ return types.Unsafe, nil
+ }
+ id = path
+
+ // No need to re-import if the package was imported completely before.
+ if pkg = packages[id]; pkg != nil && pkg.Complete() {
+ return
+ }
+ f, err := lookup(path)
+ if err != nil {
+ return nil, err
+ }
+ rc = f
+ } else {
+ filename, id = FindPkg(path, srcDir)
+ if filename == "" {
+ if path == "unsafe" {
+ return types.Unsafe, nil
+ }
+ return nil, fmt.Errorf("can't find import: %q", id)
+ }
+
+ // no need to re-import if the package was imported completely before
+ if pkg = packages[id]; pkg != nil && pkg.Complete() {
+ return
+ }
+
+ // open file
+ f, err := os.Open(filename)
+ if err != nil {
+ return nil, err
+ }
+ defer func() {
+ if err != nil {
+ // add file name to error
+ err = fmt.Errorf("%s: %v", filename, err)
+ }
+ }()
+ rc = f
+ }
+ defer rc.Close()
+
+ var hdr string
+ var size int64
+ buf := bufio.NewReader(rc)
+ if hdr, size, err = FindExportData(buf); err != nil {
+ return
+ }
+
+ switch hdr {
+ case "$$B\n":
+ var data []byte
+ data, err = ioutil.ReadAll(buf)
+ if err != nil {
+ break
+ }
+
+ // TODO(gri): allow clients of go/importer to provide a FileSet.
+ // Or, define a new standard go/types/gcexportdata package.
+ fset := token.NewFileSet()
+
+ // The indexed export format starts with an 'i'; the older
+ // binary export format starts with a 'c', 'd', or 'v'
+ // (from "version"). Select appropriate importer.
+ if len(data) > 0 {
+ switch data[0] {
+ case 'i':
+ _, pkg, err := IImportData(fset, packages, data[1:], id)
+ return pkg, err
+
+ case 'v', 'c', 'd':
+ _, pkg, err := BImportData(fset, packages, data, id)
+ return pkg, err
+
+ case 'u':
+ _, pkg, err := UImportData(fset, packages, data[1:size], id)
+ return pkg, err
+
+ default:
+ l := len(data)
+ if l > 10 {
+ l = 10
+ }
+ return nil, fmt.Errorf("unexpected export data with prefix %q for path %s", string(data[:l]), id)
+ }
+ }
+
+ default:
+ err = fmt.Errorf("unknown export data header: %q", hdr)
+ }
+
+ return
+}
+
+func deref(typ types.Type) types.Type {
+ if p, _ := typ.(*types.Pointer); p != nil {
+ return p.Elem()
+ }
+ return typ
+}
+
+type byPath []*types.Package
+
+func (a byPath) Len() int { return len(a) }
+func (a byPath) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
+func (a byPath) Less(i, j int) bool { return a[i].Path() < a[j].Path() }
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/iexport.go b/vendor/golang.org/x/tools/internal/gcimporter/iexport.go
index 9a4ff329..ba53cdcd 100644
--- a/vendor/golang.org/x/tools/go/internal/gcimporter/iexport.go
+++ b/vendor/golang.org/x/tools/internal/gcimporter/iexport.go
@@ -12,7 +12,6 @@ import (
"bytes"
"encoding/binary"
"fmt"
- "go/ast"
"go/constant"
"go/token"
"go/types"
@@ -23,9 +22,45 @@ import (
"strconv"
"strings"
+ "golang.org/x/tools/internal/tokeninternal"
"golang.org/x/tools/internal/typeparams"
)
+// IExportShallow encodes "shallow" export data for the specified package.
+//
+// No promises are made about the encoding other than that it can be
+// decoded by the same version of IIExportShallow. If you plan to save
+// export data in the file system, be sure to include a cryptographic
+// digest of the executable in the key to avoid version skew.
+func IExportShallow(fset *token.FileSet, pkg *types.Package) ([]byte, error) {
+ // In principle this operation can only fail if out.Write fails,
+ // but that's impossible for bytes.Buffer---and as a matter of
+ // fact iexportCommon doesn't even check for I/O errors.
+ // TODO(adonovan): handle I/O errors properly.
+ // TODO(adonovan): use byte slices throughout, avoiding copying.
+ const bundle, shallow = false, true
+ var out bytes.Buffer
+ err := iexportCommon(&out, fset, bundle, shallow, iexportVersion, []*types.Package{pkg})
+ return out.Bytes(), err
+}
+
+// IImportShallow decodes "shallow" types.Package data encoded by IExportShallow
+// in the same executable. This function cannot import data from
+// cmd/compile or gcexportdata.Write.
+func IImportShallow(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string, insert InsertType) (*types.Package, error) {
+ const bundle = false
+ pkgs, err := iimportCommon(fset, imports, data, bundle, path, insert)
+ if err != nil {
+ return nil, err
+ }
+ return pkgs[0], nil
+}
+
+// InsertType is the type of a function that creates a types.TypeName
+// object for a named type and inserts it into the scope of the
+// specified Package.
+type InsertType = func(pkg *types.Package, name string)
+
// Current bundled export format version. Increase with each format change.
// 0: initial implementation
const bundleVersion = 0
@@ -36,15 +71,17 @@ const bundleVersion = 0
// The package path of the top-level package will not be recorded,
// so that calls to IImportData can override with a provided package path.
func IExportData(out io.Writer, fset *token.FileSet, pkg *types.Package) error {
- return iexportCommon(out, fset, false, iexportVersion, []*types.Package{pkg})
+ const bundle, shallow = false, false
+ return iexportCommon(out, fset, bundle, shallow, iexportVersion, []*types.Package{pkg})
}
// IExportBundle writes an indexed export bundle for pkgs to out.
func IExportBundle(out io.Writer, fset *token.FileSet, pkgs []*types.Package) error {
- return iexportCommon(out, fset, true, iexportVersion, pkgs)
+ const bundle, shallow = true, false
+ return iexportCommon(out, fset, bundle, shallow, iexportVersion, pkgs)
}
-func iexportCommon(out io.Writer, fset *token.FileSet, bundle bool, version int, pkgs []*types.Package) (err error) {
+func iexportCommon(out io.Writer, fset *token.FileSet, bundle, shallow bool, version int, pkgs []*types.Package) (err error) {
if !debug {
defer func() {
if e := recover(); e != nil {
@@ -61,6 +98,7 @@ func iexportCommon(out io.Writer, fset *token.FileSet, bundle bool, version int,
p := iexporter{
fset: fset,
version: version,
+ shallow: shallow,
allPkgs: map[*types.Package]bool{},
stringIndex: map[string]uint64{},
declIndex: map[types.Object]uint64{},
@@ -82,7 +120,7 @@ func iexportCommon(out io.Writer, fset *token.FileSet, bundle bool, version int,
for _, pkg := range pkgs {
scope := pkg.Scope()
for _, name := range scope.Names() {
- if ast.IsExported(name) {
+ if token.IsExported(name) {
p.pushDecl(scope.Lookup(name))
}
}
@@ -101,6 +139,17 @@ func iexportCommon(out io.Writer, fset *token.FileSet, bundle bool, version int,
p.doDecl(p.declTodo.popHead())
}
+ // Produce index of offset of each file record in files.
+ var files intWriter
+ var fileOffset []uint64 // fileOffset[i] is offset in files of file encoded as i
+ if p.shallow {
+ fileOffset = make([]uint64, len(p.fileInfos))
+ for i, info := range p.fileInfos {
+ fileOffset[i] = uint64(files.Len())
+ p.encodeFile(&files, info.file, info.needed)
+ }
+ }
+
// Append indices to data0 section.
dataLen := uint64(p.data0.Len())
w := p.newWriter()
@@ -126,16 +175,75 @@ func iexportCommon(out io.Writer, fset *token.FileSet, bundle bool, version int,
}
hdr.uint64(uint64(p.version))
hdr.uint64(uint64(p.strings.Len()))
+ if p.shallow {
+ hdr.uint64(uint64(files.Len()))
+ hdr.uint64(uint64(len(fileOffset)))
+ for _, offset := range fileOffset {
+ hdr.uint64(offset)
+ }
+ }
hdr.uint64(dataLen)
// Flush output.
io.Copy(out, &hdr)
io.Copy(out, &p.strings)
+ if p.shallow {
+ io.Copy(out, &files)
+ }
io.Copy(out, &p.data0)
return nil
}
+// encodeFile writes to w a representation of the file sufficient to
+// faithfully restore position information about all needed offsets.
+// Mutates the needed array.
+func (p *iexporter) encodeFile(w *intWriter, file *token.File, needed []uint64) {
+ _ = needed[0] // precondition: needed is non-empty
+
+ w.uint64(p.stringOff(file.Name()))
+
+ size := uint64(file.Size())
+ w.uint64(size)
+
+ // Sort the set of needed offsets. Duplicates are harmless.
+ sort.Slice(needed, func(i, j int) bool { return needed[i] < needed[j] })
+
+ lines := tokeninternal.GetLines(file) // byte offset of each line start
+ w.uint64(uint64(len(lines)))
+
+ // Rather than record the entire array of line start offsets,
+ // we save only a sparse list of (index, offset) pairs for
+ // the start of each line that contains a needed position.
+ var sparse [][2]int // (index, offset) pairs
+outer:
+ for i, lineStart := range lines {
+ lineEnd := size
+ if i < len(lines)-1 {
+ lineEnd = uint64(lines[i+1])
+ }
+ // Does this line contains a needed offset?
+ if needed[0] < lineEnd {
+ sparse = append(sparse, [2]int{i, lineStart})
+ for needed[0] < lineEnd {
+ needed = needed[1:]
+ if len(needed) == 0 {
+ break outer
+ }
+ }
+ }
+ }
+
+ // Delta-encode the columns.
+ w.uint64(uint64(len(sparse)))
+ var prev [2]int
+ for _, pair := range sparse {
+ w.uint64(uint64(pair[0] - prev[0]))
+ w.uint64(uint64(pair[1] - prev[1]))
+ prev = pair
+ }
+}
+
// writeIndex writes out an object index. mainIndex indicates whether
// we're writing out the main index, which is also read by
// non-compiler tools and includes a complete package description
@@ -205,7 +313,8 @@ type iexporter struct {
out *bytes.Buffer
version int
- localpkg *types.Package
+ shallow bool // don't put types from other packages in the index
+ localpkg *types.Package // (nil in bundle mode)
// allPkgs tracks all packages that have been referenced by
// the export data, so we can ensure to include them in the
@@ -217,6 +326,12 @@ type iexporter struct {
strings intWriter
stringIndex map[string]uint64
+ // In shallow mode, object positions are encoded as (file, offset).
+ // Each file is recorded as a line-number table.
+ // Only the lines of needed positions are saved faithfully.
+ fileInfo map[*token.File]uint64 // value is index in fileInfos
+ fileInfos []*filePositions
+
data0 intWriter
declIndex map[types.Object]uint64
tparamNames map[types.Object]string // typeparam->exported name
@@ -225,6 +340,11 @@ type iexporter struct {
indent int // for tracing support
}
+type filePositions struct {
+ file *token.File
+ needed []uint64 // unordered list of needed file offsets
+}
+
func (p *iexporter) trace(format string, args ...interface{}) {
if !trace {
// Call sites should also be guarded, but having this check here allows
@@ -248,6 +368,25 @@ func (p *iexporter) stringOff(s string) uint64 {
return off
}
+// fileIndexAndOffset returns the index of the token.File and the byte offset of pos within it.
+func (p *iexporter) fileIndexAndOffset(file *token.File, pos token.Pos) (uint64, uint64) {
+ index, ok := p.fileInfo[file]
+ if !ok {
+ index = uint64(len(p.fileInfo))
+ p.fileInfos = append(p.fileInfos, &filePositions{file: file})
+ if p.fileInfo == nil {
+ p.fileInfo = make(map[*token.File]uint64)
+ }
+ p.fileInfo[file] = index
+ }
+ // Record each needed offset.
+ info := p.fileInfos[index]
+ offset := uint64(file.Offset(pos))
+ info.needed = append(info.needed, offset)
+
+ return index, offset
+}
+
// pushDecl adds n to the declaration work queue, if not already present.
func (p *iexporter) pushDecl(obj types.Object) {
// Package unsafe is known to the compiler and predeclared.
@@ -256,6 +395,11 @@ func (p *iexporter) pushDecl(obj types.Object) {
panic("cannot export package unsafe")
}
+ // Shallow export data: don't index decls from other packages.
+ if p.shallow && obj.Pkg() != p.localpkg {
+ return
+ }
+
if _, ok := p.declIndex[obj]; ok {
return
}
@@ -303,7 +447,13 @@ func (p *iexporter) doDecl(obj types.Object) {
case *types.Func:
sig, _ := obj.Type().(*types.Signature)
if sig.Recv() != nil {
- panic(internalErrorf("unexpected method: %v", sig))
+ // We shouldn't see methods in the package scope,
+ // but the type checker may repair "func () F() {}"
+ // to "func (Invalid) F()" and then treat it like "func F()",
+ // so allow that. See golang/go#57729.
+ if sig.Recv().Type() != types.Typ[types.Invalid] {
+ panic(internalErrorf("unexpected method: %v", sig))
+ }
}
// Function.
@@ -415,13 +565,30 @@ func (w *exportWriter) tag(tag byte) {
}
func (w *exportWriter) pos(pos token.Pos) {
- if w.p.version >= iexportVersionPosCol {
+ if w.p.shallow {
+ w.posV2(pos)
+ } else if w.p.version >= iexportVersionPosCol {
w.posV1(pos)
} else {
w.posV0(pos)
}
}
+// posV2 encoding (used only in shallow mode) records positions as
+// (file, offset), where file is the index in the token.File table
+// (which records the file name and newline offsets) and offset is a
+// byte offset. It effectively ignores //line directives.
+func (w *exportWriter) posV2(pos token.Pos) {
+ if pos == token.NoPos {
+ w.uint64(0)
+ return
+ }
+ file := w.p.fset.File(pos) // fset must be non-nil
+ index, offset := w.p.fileIndexAndOffset(file, pos)
+ w.uint64(1 + index)
+ w.uint64(offset)
+}
+
func (w *exportWriter) posV1(pos token.Pos) {
if w.p.fset == nil {
w.int64(0)
@@ -497,7 +664,7 @@ func (w *exportWriter) pkg(pkg *types.Package) {
w.string(w.exportPath(pkg))
}
-func (w *exportWriter) qualifiedIdent(obj types.Object) {
+func (w *exportWriter) qualifiedType(obj *types.TypeName) {
name := w.p.exportName(obj)
// Ensure any referenced declarations are written out too.
@@ -556,11 +723,11 @@ func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) {
return
}
w.startType(definedType)
- w.qualifiedIdent(t.Obj())
+ w.qualifiedType(t.Obj())
case *typeparams.TypeParam:
w.startType(typeParamType)
- w.qualifiedIdent(t.Obj())
+ w.qualifiedType(t.Obj())
case *types.Pointer:
w.startType(pointerType)
@@ -602,14 +769,17 @@ func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) {
case *types.Struct:
w.startType(structType)
- w.setPkg(pkg, true)
-
n := t.NumFields()
+ if n > 0 {
+ w.setPkg(t.Field(0).Pkg(), true) // qualifying package for field objects
+ } else {
+ w.setPkg(pkg, true)
+ }
w.uint64(uint64(n))
for i := 0; i < n; i++ {
f := t.Field(i)
w.pos(f.Pos())
- w.string(f.Name())
+ w.string(f.Name()) // unexported fields implicitly qualified by prior setPkg
w.typ(f.Type(), pkg)
w.bool(f.Anonymous())
w.string(t.Tag(i)) // note (or tag)
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go b/vendor/golang.org/x/tools/internal/gcimporter/iimport.go
index 4caa0f55..448f903e 100644
--- a/vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go
+++ b/vendor/golang.org/x/tools/internal/gcimporter/iimport.go
@@ -51,6 +51,8 @@ const (
iexportVersionPosCol = 1
iexportVersionGo1_18 = 2
iexportVersionGenerics = 2
+
+ iexportVersionCurrent = 2
)
type ident struct {
@@ -83,7 +85,7 @@ const (
// If the export data version is not recognized or the format is otherwise
// compromised, an error is returned.
func IImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (int, *types.Package, error) {
- pkgs, err := iimportCommon(fset, imports, data, false, path)
+ pkgs, err := iimportCommon(fset, imports, data, false, path, nil)
if err != nil {
return 0, nil, err
}
@@ -92,11 +94,11 @@ func IImportData(fset *token.FileSet, imports map[string]*types.Package, data []
// IImportBundle imports a set of packages from the serialized package bundle.
func IImportBundle(fset *token.FileSet, imports map[string]*types.Package, data []byte) ([]*types.Package, error) {
- return iimportCommon(fset, imports, data, true, "")
+ return iimportCommon(fset, imports, data, true, "", nil)
}
-func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data []byte, bundle bool, path string) (pkgs []*types.Package, err error) {
- const currentVersion = 1
+func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data []byte, bundle bool, path string, insert InsertType) (pkgs []*types.Package, err error) {
+ const currentVersion = iexportVersionCurrent
version := int64(-1)
if !debug {
defer func() {
@@ -135,19 +137,34 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data
}
sLen := int64(r.uint64())
+ var fLen int64
+ var fileOffset []uint64
+ if insert != nil {
+ // Shallow mode uses a different position encoding.
+ fLen = int64(r.uint64())
+ fileOffset = make([]uint64, r.uint64())
+ for i := range fileOffset {
+ fileOffset[i] = r.uint64()
+ }
+ }
dLen := int64(r.uint64())
whence, _ := r.Seek(0, io.SeekCurrent)
stringData := data[whence : whence+sLen]
- declData := data[whence+sLen : whence+sLen+dLen]
- r.Seek(sLen+dLen, io.SeekCurrent)
+ fileData := data[whence+sLen : whence+sLen+fLen]
+ declData := data[whence+sLen+fLen : whence+sLen+fLen+dLen]
+ r.Seek(sLen+fLen+dLen, io.SeekCurrent)
p := iimporter{
version: int(version),
ipath: path,
+ insert: insert,
stringData: stringData,
stringCache: make(map[uint64]string),
+ fileOffset: fileOffset,
+ fileData: fileData,
+ fileCache: make([]*token.File, len(fileOffset)),
pkgCache: make(map[uint64]*types.Package),
declData: declData,
@@ -185,11 +202,18 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data
} else if pkg.Name() != pkgName {
errorf("conflicting names %s and %s for package %q", pkg.Name(), pkgName, path)
}
+ if i == 0 && !bundle {
+ p.localpkg = pkg
+ }
p.pkgCache[pkgPathOff] = pkg
+ // Read index for package.
nameIndex := make(map[string]uint64)
- for nSyms := r.uint64(); nSyms > 0; nSyms-- {
+ nSyms := r.uint64()
+ // In shallow mode we don't expect an index for other packages.
+ assert(nSyms == 0 || p.localpkg == pkg || p.insert == nil)
+ for ; nSyms > 0; nSyms-- {
name := p.stringAt(r.uint64())
nameIndex[name] = r.uint64()
}
@@ -265,8 +289,14 @@ type iimporter struct {
version int
ipath string
+ localpkg *types.Package
+ insert func(pkg *types.Package, name string) // "shallow" mode only
+
stringData []byte
stringCache map[uint64]string
+ fileOffset []uint64 // fileOffset[i] is offset in fileData for info about file encoded as i
+ fileData []byte
+ fileCache []*token.File // memoized decoding of file encoded as i
pkgCache map[uint64]*types.Package
declData []byte
@@ -308,6 +338,13 @@ func (p *iimporter) doDecl(pkg *types.Package, name string) {
off, ok := p.pkgIndex[pkg][name]
if !ok {
+ // In "shallow" mode, call back to the application to
+ // find the object and insert it into the package scope.
+ if p.insert != nil {
+ assert(pkg != p.localpkg)
+ p.insert(pkg, name) // "can't fail"
+ return
+ }
errorf("%v.%v not in index", pkg, name)
}
@@ -332,6 +369,55 @@ func (p *iimporter) stringAt(off uint64) string {
return s
}
+func (p *iimporter) fileAt(index uint64) *token.File {
+ file := p.fileCache[index]
+ if file == nil {
+ off := p.fileOffset[index]
+ file = p.decodeFile(intReader{bytes.NewReader(p.fileData[off:]), p.ipath})
+ p.fileCache[index] = file
+ }
+ return file
+}
+
+func (p *iimporter) decodeFile(rd intReader) *token.File {
+ filename := p.stringAt(rd.uint64())
+ size := int(rd.uint64())
+ file := p.fake.fset.AddFile(filename, -1, size)
+
+ // SetLines requires a nondecreasing sequence.
+ // Because it is common for clients to derive the interval
+ // [start, start+len(name)] from a start position, and we
+ // want to ensure that the end offset is on the same line,
+ // we fill in the gaps of the sparse encoding with values
+ // that strictly increase by the largest possible amount.
+ // This allows us to avoid having to record the actual end
+ // offset of each needed line.
+
+ lines := make([]int, int(rd.uint64()))
+ var index, offset int
+ for i, n := 0, int(rd.uint64()); i < n; i++ {
+ index += int(rd.uint64())
+ offset += int(rd.uint64())
+ lines[index] = offset
+
+ // Ensure monotonicity between points.
+ for j := index - 1; j > 0 && lines[j] == 0; j-- {
+ lines[j] = lines[j+1] - 1
+ }
+ }
+
+ // Ensure monotonicity after last point.
+ for j := len(lines) - 1; j > 0 && lines[j] == 0; j-- {
+ size--
+ lines[j] = size
+ }
+
+ if !file.SetLines(lines) {
+ errorf("SetLines failed: %d", lines) // can't happen
+ }
+ return file
+}
+
func (p *iimporter) pkgAt(off uint64) *types.Package {
if pkg, ok := p.pkgCache[off]; ok {
return pkg
@@ -625,6 +711,9 @@ func (r *importReader) qualifiedIdent() (*types.Package, string) {
}
func (r *importReader) pos() token.Pos {
+ if r.p.insert != nil { // shallow mode
+ return r.posv2()
+ }
if r.p.version >= iexportVersionPosCol {
r.posv1()
} else {
@@ -661,6 +750,15 @@ func (r *importReader) posv1() {
}
}
+func (r *importReader) posv2() token.Pos {
+ file := r.uint64()
+ if file == 0 {
+ return token.NoPos
+ }
+ tf := r.p.fileAt(file - 1)
+ return tf.Pos(int(r.uint64()))
+}
+
func (r *importReader) typ() types.Type {
return r.p.typAt(r.uint64(), nil)
}
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface10.go b/vendor/golang.org/x/tools/internal/gcimporter/newInterface10.go
index 8b163e3d..8b163e3d 100644
--- a/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface10.go
+++ b/vendor/golang.org/x/tools/internal/gcimporter/newInterface10.go
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface11.go b/vendor/golang.org/x/tools/internal/gcimporter/newInterface11.go
index 49984f40..49984f40 100644
--- a/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface11.go
+++ b/vendor/golang.org/x/tools/internal/gcimporter/newInterface11.go
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/support_go117.go b/vendor/golang.org/x/tools/internal/gcimporter/support_go117.go
index d892273e..d892273e 100644
--- a/vendor/golang.org/x/tools/go/internal/gcimporter/support_go117.go
+++ b/vendor/golang.org/x/tools/internal/gcimporter/support_go117.go
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/support_go118.go b/vendor/golang.org/x/tools/internal/gcimporter/support_go118.go
index a9938432..edbe6ea7 100644
--- a/vendor/golang.org/x/tools/go/internal/gcimporter/support_go118.go
+++ b/vendor/golang.org/x/tools/internal/gcimporter/support_go118.go
@@ -21,3 +21,17 @@ func additionalPredeclared() []types.Type {
types.Universe.Lookup("any").Type(),
}
}
+
+// See cmd/compile/internal/types.SplitVargenSuffix.
+func splitVargenSuffix(name string) (base, suffix string) {
+ i := len(name)
+ for i > 0 && name[i-1] >= '0' && name[i-1] <= '9' {
+ i--
+ }
+ const dot = "·"
+ if i >= len(dot) && name[i-len(dot):i] == dot {
+ i -= len(dot)
+ return name[:i], name[i:]
+ }
+ return name, ""
+}
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/unified_no.go b/vendor/golang.org/x/tools/internal/gcimporter/unified_no.go
index 286bf445..286bf445 100644
--- a/vendor/golang.org/x/tools/go/internal/gcimporter/unified_no.go
+++ b/vendor/golang.org/x/tools/internal/gcimporter/unified_no.go
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/unified_yes.go b/vendor/golang.org/x/tools/internal/gcimporter/unified_yes.go
index b5d69ffb..b5d69ffb 100644
--- a/vendor/golang.org/x/tools/go/internal/gcimporter/unified_yes.go
+++ b/vendor/golang.org/x/tools/internal/gcimporter/unified_yes.go
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/ureader_no.go b/vendor/golang.org/x/tools/internal/gcimporter/ureader_no.go
index 8eb20729..8eb20729 100644
--- a/vendor/golang.org/x/tools/go/internal/gcimporter/ureader_no.go
+++ b/vendor/golang.org/x/tools/internal/gcimporter/ureader_no.go
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/ureader_yes.go b/vendor/golang.org/x/tools/internal/gcimporter/ureader_yes.go
index 3c1a4375..b285a11c 100644
--- a/vendor/golang.org/x/tools/go/internal/gcimporter/ureader_yes.go
+++ b/vendor/golang.org/x/tools/internal/gcimporter/ureader_yes.go
@@ -14,7 +14,7 @@ import (
"go/types"
"strings"
- "golang.org/x/tools/go/internal/pkgbits"
+ "golang.org/x/tools/internal/pkgbits"
)
// A pkgReader holds the shared state for reading a unified IR package
@@ -36,6 +36,12 @@ type pkgReader struct {
// laterFns holds functions that need to be invoked at the end of
// import reading.
laterFns []func()
+ // laterFors is used in case of 'type A B' to ensure that B is processed before A.
+ laterFors map[types.Type]int
+
+ // ifaces holds a list of constructed Interfaces, which need to have
+ // Complete called after importing is done.
+ ifaces []*types.Interface
}
// later adds a function to be invoked at the end of import reading.
@@ -63,6 +69,15 @@ func UImportData(fset *token.FileSet, imports map[string]*types.Package, data []
return
}
+// laterFor adds a function to be invoked at the end of import reading, and records the type that function is finishing.
+func (pr *pkgReader) laterFor(t types.Type, fn func()) {
+ if pr.laterFors == nil {
+ pr.laterFors = make(map[types.Type]int)
+ }
+ pr.laterFors[t] = len(pr.laterFns)
+ pr.laterFns = append(pr.laterFns, fn)
+}
+
// readUnifiedPackage reads a package description from the given
// unified IR export data decoder.
func readUnifiedPackage(fset *token.FileSet, ctxt *types.Context, imports map[string]*types.Package, input pkgbits.PkgDecoder) *types.Package {
@@ -102,6 +117,10 @@ func readUnifiedPackage(fset *token.FileSet, ctxt *types.Context, imports map[st
fn()
}
+ for _, iface := range pr.ifaces {
+ iface.Complete()
+ }
+
pkg.MarkComplete()
return pkg
}
@@ -139,6 +158,17 @@ func (pr *pkgReader) newReader(k pkgbits.RelocKind, idx pkgbits.Index, marker pk
}
}
+func (pr *pkgReader) tempReader(k pkgbits.RelocKind, idx pkgbits.Index, marker pkgbits.SyncMarker) *reader {
+ return &reader{
+ Decoder: pr.TempDecoder(k, idx, marker),
+ p: pr,
+ }
+}
+
+func (pr *pkgReader) retireReader(r *reader) {
+ pr.RetireDecoder(&r.Decoder)
+}
+
// @@@ Positions
func (r *reader) pos() token.Pos {
@@ -163,26 +193,29 @@ func (pr *pkgReader) posBaseIdx(idx pkgbits.Index) string {
return b
}
- r := pr.newReader(pkgbits.RelocPosBase, idx, pkgbits.SyncPosBase)
+ var filename string
+ {
+ r := pr.tempReader(pkgbits.RelocPosBase, idx, pkgbits.SyncPosBase)
- // Within types2, position bases have a lot more details (e.g.,
- // keeping track of where //line directives appeared exactly).
- //
- // For go/types, we just track the file name.
+ // Within types2, position bases have a lot more details (e.g.,
+ // keeping track of where //line directives appeared exactly).
+ //
+ // For go/types, we just track the file name.
- filename := r.String()
+ filename = r.String()
- if r.Bool() { // file base
- // Was: "b = token.NewTrimmedFileBase(filename, true)"
- } else { // line base
- pos := r.pos()
- line := r.Uint()
- col := r.Uint()
+ if r.Bool() { // file base
+ // Was: "b = token.NewTrimmedFileBase(filename, true)"
+ } else { // line base
+ pos := r.pos()
+ line := r.Uint()
+ col := r.Uint()
- // Was: "b = token.NewLineBase(pos, filename, true, line, col)"
- _, _, _ = pos, line, col
+ // Was: "b = token.NewLineBase(pos, filename, true, line, col)"
+ _, _, _ = pos, line, col
+ }
+ pr.retireReader(r)
}
-
b := filename
pr.posBases[idx] = b
return b
@@ -231,11 +264,35 @@ func (r *reader) doPkg() *types.Package {
for i := range imports {
imports[i] = r.pkg()
}
- pkg.SetImports(imports)
+ pkg.SetImports(flattenImports(imports))
return pkg
}
+// flattenImports returns the transitive closure of all imported
+// packages rooted from pkgs.
+func flattenImports(pkgs []*types.Package) []*types.Package {
+ var res []*types.Package
+ seen := make(map[*types.Package]struct{})
+ for _, pkg := range pkgs {
+ if _, ok := seen[pkg]; ok {
+ continue
+ }
+ seen[pkg] = struct{}{}
+ res = append(res, pkg)
+
+ // pkg.Imports() is already flattened.
+ for _, pkg := range pkg.Imports() {
+ if _, ok := seen[pkg]; ok {
+ continue
+ }
+ seen[pkg] = struct{}{}
+ res = append(res, pkg)
+ }
+ }
+ return res
+}
+
// @@@ Types
func (r *reader) typ() types.Type {
@@ -264,12 +321,15 @@ func (pr *pkgReader) typIdx(info typeInfo, dict *readerDict) types.Type {
return typ
}
- r := pr.newReader(pkgbits.RelocType, idx, pkgbits.SyncTypeIdx)
- r.dict = dict
-
- typ := r.doTyp()
- assert(typ != nil)
+ var typ types.Type
+ {
+ r := pr.tempReader(pkgbits.RelocType, idx, pkgbits.SyncTypeIdx)
+ r.dict = dict
+ typ = r.doTyp()
+ assert(typ != nil)
+ pr.retireReader(r)
+ }
// See comment in pkgReader.typIdx explaining how this happens.
if prev := *where; prev != nil {
return prev
@@ -372,6 +432,16 @@ func (r *reader) interfaceType() *types.Interface {
if implicit {
iface.MarkImplicit()
}
+
+ // We need to call iface.Complete(), but if there are any embedded
+ // defined types, then we may not have set their underlying
+ // interface type yet. So we need to defer calling Complete until
+ // after we've called SetUnderlying everywhere.
+ //
+ // TODO(mdempsky): After CL 424876 lands, it should be safe to call
+ // iface.Complete() immediately.
+ r.p.ifaces = append(r.p.ifaces, iface)
+
return iface
}
@@ -425,18 +495,30 @@ func (r *reader) obj() (types.Object, []types.Type) {
}
func (pr *pkgReader) objIdx(idx pkgbits.Index) (*types.Package, string) {
- rname := pr.newReader(pkgbits.RelocName, idx, pkgbits.SyncObject1)
- objPkg, objName := rname.qualifiedIdent()
- assert(objName != "")
+ var objPkg *types.Package
+ var objName string
+ var tag pkgbits.CodeObj
+ {
+ rname := pr.tempReader(pkgbits.RelocName, idx, pkgbits.SyncObject1)
- tag := pkgbits.CodeObj(rname.Code(pkgbits.SyncCodeObj))
+ objPkg, objName = rname.qualifiedIdent()
+ assert(objName != "")
+
+ tag = pkgbits.CodeObj(rname.Code(pkgbits.SyncCodeObj))
+ pr.retireReader(rname)
+ }
if tag == pkgbits.ObjStub {
assert(objPkg == nil || objPkg == types.Unsafe)
return objPkg, objName
}
+ // Ignore local types promoted to global scope (#55110).
+ if _, suffix := splitVargenSuffix(objName); suffix != "" {
+ return objPkg, objName
+ }
+
if objPkg.Scope().Lookup(objName) == nil {
dict := pr.objDictIdx(idx)
@@ -477,15 +559,56 @@ func (pr *pkgReader) objIdx(idx pkgbits.Index) (*types.Package, string) {
named.SetTypeParams(r.typeParamNames())
- // TODO(mdempsky): Rewrite receiver types to underlying is an
- // Interface? The go/types importer does this (I think because
- // unit tests expected that), but cmd/compile doesn't care
- // about it, so maybe we can avoid worrying about that here.
- rhs := r.typ()
- r.p.later(func() {
- underlying := rhs.Underlying()
+ setUnderlying := func(underlying types.Type) {
+ // If the underlying type is an interface, we need to
+ // duplicate its methods so we can replace the receiver
+ // parameter's type (#49906).
+ if iface, ok := underlying.(*types.Interface); ok && iface.NumExplicitMethods() != 0 {
+ methods := make([]*types.Func, iface.NumExplicitMethods())
+ for i := range methods {
+ fn := iface.ExplicitMethod(i)
+ sig := fn.Type().(*types.Signature)
+
+ recv := types.NewVar(fn.Pos(), fn.Pkg(), "", named)
+ methods[i] = types.NewFunc(fn.Pos(), fn.Pkg(), fn.Name(), types.NewSignature(recv, sig.Params(), sig.Results(), sig.Variadic()))
+ }
+
+ embeds := make([]types.Type, iface.NumEmbeddeds())
+ for i := range embeds {
+ embeds[i] = iface.EmbeddedType(i)
+ }
+
+ newIface := types.NewInterfaceType(methods, embeds)
+ r.p.ifaces = append(r.p.ifaces, newIface)
+ underlying = newIface
+ }
+
named.SetUnderlying(underlying)
- })
+ }
+
+ // Since go.dev/cl/455279, we can assume rhs.Underlying() will
+ // always be non-nil. However, to temporarily support users of
+ // older snapshot releases, we continue to fallback to the old
+ // behavior for now.
+ //
+ // TODO(mdempsky): Remove fallback code and simplify after
+ // allowing time for snapshot users to upgrade.
+ rhs := r.typ()
+ if underlying := rhs.Underlying(); underlying != nil {
+ setUnderlying(underlying)
+ } else {
+ pk := r.p
+ pk.laterFor(named, func() {
+ // First be sure that the rhs is initialized, if it needs to be initialized.
+ delete(pk.laterFors, named) // prevent cycles
+ if i, ok := pk.laterFors[rhs]; ok {
+ f := pk.laterFns[i]
+ pk.laterFns[i] = func() {} // function is running now, so replace it with a no-op
+ f() // initialize RHS
+ }
+ setUnderlying(rhs.Underlying())
+ })
+ }
for i, n := 0, r.Len(); i < n; i++ {
named.AddMethod(r.method())
@@ -502,25 +625,28 @@ func (pr *pkgReader) objIdx(idx pkgbits.Index) (*types.Package, string) {
}
func (pr *pkgReader) objDictIdx(idx pkgbits.Index) *readerDict {
- r := pr.newReader(pkgbits.RelocObjDict, idx, pkgbits.SyncObject1)
var dict readerDict
- if implicits := r.Len(); implicits != 0 {
- errorf("unexpected object with %v implicit type parameter(s)", implicits)
- }
+ {
+ r := pr.tempReader(pkgbits.RelocObjDict, idx, pkgbits.SyncObject1)
+ if implicits := r.Len(); implicits != 0 {
+ errorf("unexpected object with %v implicit type parameter(s)", implicits)
+ }
- dict.bounds = make([]typeInfo, r.Len())
- for i := range dict.bounds {
- dict.bounds[i] = r.typInfo()
- }
+ dict.bounds = make([]typeInfo, r.Len())
+ for i := range dict.bounds {
+ dict.bounds[i] = r.typInfo()
+ }
- dict.derived = make([]derivedInfo, r.Len())
- dict.derivedTypes = make([]types.Type, len(dict.derived))
- for i := range dict.derived {
- dict.derived[i] = derivedInfo{r.Reloc(pkgbits.RelocType), r.Bool()}
- }
+ dict.derived = make([]derivedInfo, r.Len())
+ dict.derivedTypes = make([]types.Type, len(dict.derived))
+ for i := range dict.derived {
+ dict.derived[i] = derivedInfo{r.Reloc(pkgbits.RelocType), r.Bool()}
+ }
+ pr.retireReader(r)
+ }
// function references follow, but reader doesn't need those
return &dict
diff --git a/vendor/golang.org/x/tools/internal/gocommand/invoke.go b/vendor/golang.org/x/tools/internal/gocommand/invoke.go
index 67256dc3..d5055169 100644
--- a/vendor/golang.org/x/tools/internal/gocommand/invoke.go
+++ b/vendor/golang.org/x/tools/internal/gocommand/invoke.go
@@ -10,8 +10,10 @@ import (
"context"
"fmt"
"io"
+ "log"
"os"
"regexp"
+ "runtime"
"strconv"
"strings"
"sync"
@@ -232,6 +234,12 @@ func (i *Invocation) run(ctx context.Context, stdout, stderr io.Writer) error {
return runCmdContext(ctx, cmd)
}
+// DebugHangingGoCommands may be set by tests to enable additional
+// instrumentation (including panics) for debugging hanging Go commands.
+//
+// See golang/go#54461 for details.
+var DebugHangingGoCommands = false
+
// runCmdContext is like exec.CommandContext except it sends os.Interrupt
// before os.Kill.
func runCmdContext(ctx context.Context, cmd *exec.Cmd) error {
@@ -243,11 +251,24 @@ func runCmdContext(ctx context.Context, cmd *exec.Cmd) error {
resChan <- cmd.Wait()
}()
- select {
- case err := <-resChan:
- return err
- case <-ctx.Done():
+ // If we're interested in debugging hanging Go commands, stop waiting after a
+ // minute and panic with interesting information.
+ if DebugHangingGoCommands {
+ select {
+ case err := <-resChan:
+ return err
+ case <-time.After(1 * time.Minute):
+ HandleHangingGoCommand(cmd.Process)
+ case <-ctx.Done():
+ }
+ } else {
+ select {
+ case err := <-resChan:
+ return err
+ case <-ctx.Done():
+ }
}
+
// Cancelled. Interrupt and see if it ends voluntarily.
cmd.Process.Signal(os.Interrupt)
select {
@@ -255,11 +276,63 @@ func runCmdContext(ctx context.Context, cmd *exec.Cmd) error {
return err
case <-time.After(time.Second):
}
+
// Didn't shut down in response to interrupt. Kill it hard.
- cmd.Process.Kill()
+ // TODO(rfindley): per advice from bcmills@, it may be better to send SIGQUIT
+ // on certain platforms, such as unix.
+ if err := cmd.Process.Kill(); err != nil && DebugHangingGoCommands {
+ // Don't panic here as this reliably fails on windows with EINVAL.
+ log.Printf("error killing the Go command: %v", err)
+ }
+
+ // See above: don't wait indefinitely if we're debugging hanging Go commands.
+ if DebugHangingGoCommands {
+ select {
+ case err := <-resChan:
+ return err
+ case <-time.After(10 * time.Second): // a shorter wait as resChan should return quickly following Kill
+ HandleHangingGoCommand(cmd.Process)
+ }
+ }
return <-resChan
}
+func HandleHangingGoCommand(proc *os.Process) {
+ switch runtime.GOOS {
+ case "linux", "darwin", "freebsd", "netbsd":
+ fmt.Fprintln(os.Stderr, `DETECTED A HANGING GO COMMAND
+
+The gopls test runner has detected a hanging go command. In order to debug
+this, the output of ps and lsof/fstat is printed below.
+
+See golang/go#54461 for more details.`)
+
+ fmt.Fprintln(os.Stderr, "\nps axo ppid,pid,command:")
+ fmt.Fprintln(os.Stderr, "-------------------------")
+ psCmd := exec.Command("ps", "axo", "ppid,pid,command")
+ psCmd.Stdout = os.Stderr
+ psCmd.Stderr = os.Stderr
+ if err := psCmd.Run(); err != nil {
+ panic(fmt.Sprintf("running ps: %v", err))
+ }
+
+ listFiles := "lsof"
+ if runtime.GOOS == "freebsd" || runtime.GOOS == "netbsd" {
+ listFiles = "fstat"
+ }
+
+ fmt.Fprintln(os.Stderr, "\n"+listFiles+":")
+ fmt.Fprintln(os.Stderr, "-----")
+ listFilesCmd := exec.Command(listFiles)
+ listFilesCmd.Stdout = os.Stderr
+ listFilesCmd.Stderr = os.Stderr
+ if err := listFilesCmd.Run(); err != nil {
+ panic(fmt.Sprintf("running %s: %v", listFiles, err))
+ }
+ }
+ panic(fmt.Sprintf("detected hanging go command (pid %d): see golang/go#54461 for more details", proc.Pid))
+}
+
func cmdDebugStr(cmd *exec.Cmd) string {
env := make(map[string]string)
for _, kv := range cmd.Env {
diff --git a/vendor/golang.org/x/tools/internal/gocommand/version.go b/vendor/golang.org/x/tools/internal/gocommand/version.go
index 71304368..307a76d4 100644
--- a/vendor/golang.org/x/tools/internal/gocommand/version.go
+++ b/vendor/golang.org/x/tools/internal/gocommand/version.go
@@ -7,11 +7,19 @@ package gocommand
import (
"context"
"fmt"
+ "regexp"
"strings"
)
-// GoVersion checks the go version by running "go list" with modules off.
-// It returns the X in Go 1.X.
+// GoVersion reports the minor version number of the highest release
+// tag built into the go command on the PATH.
+//
+// Note that this may be higher than the version of the go tool used
+// to build this application, and thus the versions of the standard
+// go/{scanner,parser,ast,types} packages that are linked into it.
+// In that case, callers should either downgrade to the version of
+// go used to build the application, or report an error that the
+// application is too old to use the go command on the PATH.
func GoVersion(ctx context.Context, inv Invocation, r *Runner) (int, error) {
inv.Verb = "list"
inv.Args = []string{"-e", "-f", `{{context.ReleaseTags}}`, `--`, `unsafe`}
@@ -38,7 +46,7 @@ func GoVersion(ctx context.Context, inv Invocation, r *Runner) (int, error) {
if len(stdout) < 3 {
return 0, fmt.Errorf("bad ReleaseTags output: %q", stdout)
}
- // Split up "[go1.1 go1.15]"
+ // Split up "[go1.1 go1.15]" and return highest go1.X value.
tags := strings.Fields(stdout[1 : len(stdout)-2])
for i := len(tags) - 1; i >= 0; i-- {
var version int
@@ -49,3 +57,25 @@ func GoVersion(ctx context.Context, inv Invocation, r *Runner) (int, error) {
}
return 0, fmt.Errorf("no parseable ReleaseTags in %v", tags)
}
+
+// GoVersionOutput returns the complete output of the go version command.
+func GoVersionOutput(ctx context.Context, inv Invocation, r *Runner) (string, error) {
+ inv.Verb = "version"
+ goVersion, err := r.Run(ctx, inv)
+ if err != nil {
+ return "", err
+ }
+ return goVersion.String(), nil
+}
+
+// ParseGoVersionOutput extracts the Go version string
+// from the output of the "go version" command.
+// Given an unrecognized form, it returns an empty string.
+func ParseGoVersionOutput(data string) string {
+ re := regexp.MustCompile(`^go version (go\S+|devel \S+)`)
+ m := re.FindStringSubmatch(data)
+ if len(m) != 2 {
+ return "" // unrecognized version
+ }
+ return m[1]
+}
diff --git a/vendor/golang.org/x/tools/go/internal/pkgbits/codes.go b/vendor/golang.org/x/tools/internal/pkgbits/codes.go
index f0cabde9..f0cabde9 100644
--- a/vendor/golang.org/x/tools/go/internal/pkgbits/codes.go
+++ b/vendor/golang.org/x/tools/internal/pkgbits/codes.go
diff --git a/vendor/golang.org/x/tools/go/internal/pkgbits/decoder.go b/vendor/golang.org/x/tools/internal/pkgbits/decoder.go
index 2bc79366..b92e8e6e 100644
--- a/vendor/golang.org/x/tools/go/internal/pkgbits/decoder.go
+++ b/vendor/golang.org/x/tools/internal/pkgbits/decoder.go
@@ -6,9 +6,11 @@ package pkgbits
import (
"encoding/binary"
+ "errors"
"fmt"
"go/constant"
"go/token"
+ "io"
"math/big"
"os"
"runtime"
@@ -51,6 +53,8 @@ type PkgDecoder struct {
// For example, section K's end positions start at elemEndsEnds[K-1]
// (or 0, if K==0) and end at elemEndsEnds[K].
elemEndsEnds [numRelocs]uint32
+
+ scratchRelocEnt []RelocEnt
}
// PkgPath returns the package path for the package
@@ -94,7 +98,7 @@ func NewPkgDecoder(pkgPath, input string) PkgDecoder {
pr.elemEnds = make([]uint32, pr.elemEndsEnds[len(pr.elemEndsEnds)-1])
assert(binary.Read(r, binary.LittleEndian, pr.elemEnds[:]) == nil)
- pos, err := r.Seek(0, os.SEEK_CUR)
+ pos, err := r.Seek(0, io.SeekCurrent)
assert(err == nil)
pr.elemData = input[pos:]
@@ -164,6 +168,21 @@ func (pr *PkgDecoder) NewDecoder(k RelocKind, idx Index, marker SyncMarker) Deco
return r
}
+// TempDecoder returns a Decoder for the given (section, index) pair,
+// and decodes the given SyncMarker from the element bitstream.
+// If possible the Decoder should be RetireDecoder'd when it is no longer
+// needed, this will avoid heap allocations.
+func (pr *PkgDecoder) TempDecoder(k RelocKind, idx Index, marker SyncMarker) Decoder {
+ r := pr.TempDecoderRaw(k, idx)
+ r.Sync(marker)
+ return r
+}
+
+func (pr *PkgDecoder) RetireDecoder(d *Decoder) {
+ pr.scratchRelocEnt = d.Relocs
+ d.Relocs = nil
+}
+
// NewDecoderRaw returns a Decoder for the given (section, index) pair.
//
// Most callers should use NewDecoder instead.
@@ -187,6 +206,30 @@ func (pr *PkgDecoder) NewDecoderRaw(k RelocKind, idx Index) Decoder {
return r
}
+func (pr *PkgDecoder) TempDecoderRaw(k RelocKind, idx Index) Decoder {
+ r := Decoder{
+ common: pr,
+ k: k,
+ Idx: idx,
+ }
+
+ r.Data.Reset(pr.DataIdx(k, idx))
+ r.Sync(SyncRelocs)
+ l := r.Len()
+ if cap(pr.scratchRelocEnt) >= l {
+ r.Relocs = pr.scratchRelocEnt[:l]
+ pr.scratchRelocEnt = nil
+ } else {
+ r.Relocs = make([]RelocEnt, l)
+ }
+ for i := range r.Relocs {
+ r.Sync(SyncReloc)
+ r.Relocs[i] = RelocEnt{RelocKind(r.Len()), Index(r.Len())}
+ }
+
+ return r
+}
+
// A Decoder provides methods for decoding an individual element's
// bitstream data.
type Decoder struct {
@@ -206,11 +249,39 @@ func (r *Decoder) checkErr(err error) {
}
func (r *Decoder) rawUvarint() uint64 {
- x, err := binary.ReadUvarint(&r.Data)
+ x, err := readUvarint(&r.Data)
r.checkErr(err)
return x
}
+// readUvarint is a type-specialized copy of encoding/binary.ReadUvarint.
+// This avoids the interface conversion and thus has better escape properties,
+// which flows up the stack.
+func readUvarint(r *strings.Reader) (uint64, error) {
+ var x uint64
+ var s uint
+ for i := 0; i < binary.MaxVarintLen64; i++ {
+ b, err := r.ReadByte()
+ if err != nil {
+ if i > 0 && err == io.EOF {
+ err = io.ErrUnexpectedEOF
+ }
+ return x, err
+ }
+ if b < 0x80 {
+ if i == binary.MaxVarintLen64-1 && b > 1 {
+ return x, overflow
+ }
+ return x | uint64(b)<<s, nil
+ }
+ x |= uint64(b&0x7f) << s
+ s += 7
+ }
+ return x, overflow
+}
+
+var overflow = errors.New("pkgbits: readUvarint overflows a 64-bit integer")
+
func (r *Decoder) rawVarint() int64 {
ux := r.rawUvarint()
@@ -237,7 +308,7 @@ func (r *Decoder) Sync(mWant SyncMarker) {
return
}
- pos, _ := r.Data.Seek(0, os.SEEK_CUR) // TODO(mdempsky): io.SeekCurrent after #44505 is resolved
+ pos, _ := r.Data.Seek(0, io.SeekCurrent)
mHave := SyncMarker(r.rawUvarint())
writerPCs := make([]int, r.rawUvarint())
for i := range writerPCs {
@@ -302,7 +373,7 @@ func (r *Decoder) Int64() int64 {
return r.rawVarint()
}
-// Int64 decodes and returns a uint64 value from the element bitstream.
+// Uint64 decodes and returns a uint64 value from the element bitstream.
func (r *Decoder) Uint64() uint64 {
r.Sync(SyncUint64)
return r.rawUvarint()
@@ -409,8 +480,12 @@ func (r *Decoder) bigFloat() *big.Float {
// PeekPkgPath returns the package path for the specified package
// index.
func (pr *PkgDecoder) PeekPkgPath(idx Index) string {
- r := pr.NewDecoder(RelocPkg, idx, SyncPkgDef)
- path := r.String()
+ var path string
+ {
+ r := pr.TempDecoder(RelocPkg, idx, SyncPkgDef)
+ path = r.String()
+ pr.RetireDecoder(&r)
+ }
if path == "" {
path = pr.pkgPath
}
@@ -420,14 +495,23 @@ func (pr *PkgDecoder) PeekPkgPath(idx Index) string {
// PeekObj returns the package path, object name, and CodeObj for the
// specified object index.
func (pr *PkgDecoder) PeekObj(idx Index) (string, string, CodeObj) {
- r := pr.NewDecoder(RelocName, idx, SyncObject1)
- r.Sync(SyncSym)
- r.Sync(SyncPkg)
- path := pr.PeekPkgPath(r.Reloc(RelocPkg))
- name := r.String()
+ var ridx Index
+ var name string
+ var rcode int
+ {
+ r := pr.TempDecoder(RelocName, idx, SyncObject1)
+ r.Sync(SyncSym)
+ r.Sync(SyncPkg)
+ ridx = r.Reloc(RelocPkg)
+ name = r.String()
+ rcode = r.Code(SyncCodeObj)
+ pr.RetireDecoder(&r)
+ }
+
+ path := pr.PeekPkgPath(ridx)
assert(name != "")
- tag := CodeObj(r.Code(SyncCodeObj))
+ tag := CodeObj(rcode)
return path, name, tag
}
diff --git a/vendor/golang.org/x/tools/go/internal/pkgbits/doc.go b/vendor/golang.org/x/tools/internal/pkgbits/doc.go
index c8a2796b..c8a2796b 100644
--- a/vendor/golang.org/x/tools/go/internal/pkgbits/doc.go
+++ b/vendor/golang.org/x/tools/internal/pkgbits/doc.go
diff --git a/vendor/golang.org/x/tools/go/internal/pkgbits/encoder.go b/vendor/golang.org/x/tools/internal/pkgbits/encoder.go
index c50c838c..6482617a 100644
--- a/vendor/golang.org/x/tools/go/internal/pkgbits/encoder.go
+++ b/vendor/golang.org/x/tools/internal/pkgbits/encoder.go
@@ -147,8 +147,9 @@ func (pw *PkgEncoder) NewEncoderRaw(k RelocKind) Encoder {
type Encoder struct {
p *PkgEncoder
- Relocs []RelocEnt
- Data bytes.Buffer // accumulated element bitstream data
+ Relocs []RelocEnt
+ RelocMap map[RelocEnt]uint32
+ Data bytes.Buffer // accumulated element bitstream data
encodingRelocHeader bool
@@ -210,15 +211,18 @@ func (w *Encoder) rawVarint(x int64) {
}
func (w *Encoder) rawReloc(r RelocKind, idx Index) int {
- // TODO(mdempsky): Use map for lookup; this takes quadratic time.
- for i, rEnt := range w.Relocs {
- if rEnt.Kind == r && rEnt.Idx == idx {
- return i
+ e := RelocEnt{r, idx}
+ if w.RelocMap != nil {
+ if i, ok := w.RelocMap[e]; ok {
+ return int(i)
}
+ } else {
+ w.RelocMap = make(map[RelocEnt]uint32)
}
i := len(w.Relocs)
- w.Relocs = append(w.Relocs, RelocEnt{r, idx})
+ w.RelocMap[e] = uint32(i)
+ w.Relocs = append(w.Relocs, e)
return i
}
@@ -289,7 +293,7 @@ func (w *Encoder) Len(x int) { assert(x >= 0); w.Uint64(uint64(x)) }
// Int encodes and writes an int value into the element bitstream.
func (w *Encoder) Int(x int) { w.Int64(int64(x)) }
-// Len encodes and writes a uint value into the element bitstream.
+// Uint encodes and writes a uint value into the element bitstream.
func (w *Encoder) Uint(x uint) { w.Uint64(uint64(x)) }
// Reloc encodes and writes a relocation for the given (section,
diff --git a/vendor/golang.org/x/tools/go/internal/pkgbits/flags.go b/vendor/golang.org/x/tools/internal/pkgbits/flags.go
index 65422274..65422274 100644
--- a/vendor/golang.org/x/tools/go/internal/pkgbits/flags.go
+++ b/vendor/golang.org/x/tools/internal/pkgbits/flags.go
diff --git a/vendor/golang.org/x/tools/go/internal/pkgbits/frames_go1.go b/vendor/golang.org/x/tools/internal/pkgbits/frames_go1.go
index 5294f6a6..5294f6a6 100644
--- a/vendor/golang.org/x/tools/go/internal/pkgbits/frames_go1.go
+++ b/vendor/golang.org/x/tools/internal/pkgbits/frames_go1.go
diff --git a/vendor/golang.org/x/tools/go/internal/pkgbits/frames_go17.go b/vendor/golang.org/x/tools/internal/pkgbits/frames_go17.go
index 2324ae7a..2324ae7a 100644
--- a/vendor/golang.org/x/tools/go/internal/pkgbits/frames_go17.go
+++ b/vendor/golang.org/x/tools/internal/pkgbits/frames_go17.go
diff --git a/vendor/golang.org/x/tools/go/internal/pkgbits/reloc.go b/vendor/golang.org/x/tools/internal/pkgbits/reloc.go
index 7a8f04ab..fcdfb97c 100644
--- a/vendor/golang.org/x/tools/go/internal/pkgbits/reloc.go
+++ b/vendor/golang.org/x/tools/internal/pkgbits/reloc.go
@@ -5,11 +5,11 @@
package pkgbits
// A RelocKind indicates a particular section within a unified IR export.
-type RelocKind int
+type RelocKind int32
// An Index represents a bitstream element index within a particular
// section.
-type Index int
+type Index int32
// A relocEnt (relocation entry) is an entry in an element's local
// reference table.
diff --git a/vendor/golang.org/x/tools/go/internal/pkgbits/support.go b/vendor/golang.org/x/tools/internal/pkgbits/support.go
index ad26d3b2..ad26d3b2 100644
--- a/vendor/golang.org/x/tools/go/internal/pkgbits/support.go
+++ b/vendor/golang.org/x/tools/internal/pkgbits/support.go
diff --git a/vendor/golang.org/x/tools/go/internal/pkgbits/sync.go b/vendor/golang.org/x/tools/internal/pkgbits/sync.go
index 5bd51ef7..5bd51ef7 100644
--- a/vendor/golang.org/x/tools/go/internal/pkgbits/sync.go
+++ b/vendor/golang.org/x/tools/internal/pkgbits/sync.go
diff --git a/vendor/golang.org/x/tools/go/internal/pkgbits/syncmarker_string.go b/vendor/golang.org/x/tools/internal/pkgbits/syncmarker_string.go
index 4a5b0ca5..4a5b0ca5 100644
--- a/vendor/golang.org/x/tools/go/internal/pkgbits/syncmarker_string.go
+++ b/vendor/golang.org/x/tools/internal/pkgbits/syncmarker_string.go
diff --git a/vendor/golang.org/x/tools/internal/tokeninternal/tokeninternal.go b/vendor/golang.org/x/tools/internal/tokeninternal/tokeninternal.go
new file mode 100644
index 00000000..a3fb2d4f
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/tokeninternal/tokeninternal.go
@@ -0,0 +1,59 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// package tokeninternal provides access to some internal features of the token
+// package.
+package tokeninternal
+
+import (
+ "go/token"
+ "sync"
+ "unsafe"
+)
+
+// GetLines returns the table of line-start offsets from a token.File.
+func GetLines(file *token.File) []int {
+ // token.File has a Lines method on Go 1.21 and later.
+ if file, ok := (interface{})(file).(interface{ Lines() []int }); ok {
+ return file.Lines()
+ }
+
+ // This declaration must match that of token.File.
+ // This creates a risk of dependency skew.
+ // For now we check that the size of the two
+ // declarations is the same, on the (fragile) assumption
+ // that future changes would add fields.
+ type tokenFile119 struct {
+ _ string
+ _ int
+ _ int
+ mu sync.Mutex // we're not complete monsters
+ lines []int
+ _ []struct{}
+ }
+ type tokenFile118 struct {
+ _ *token.FileSet // deleted in go1.19
+ tokenFile119
+ }
+
+ type uP = unsafe.Pointer
+ switch unsafe.Sizeof(*file) {
+ case unsafe.Sizeof(tokenFile118{}):
+ var ptr *tokenFile118
+ *(*uP)(uP(&ptr)) = uP(file)
+ ptr.mu.Lock()
+ defer ptr.mu.Unlock()
+ return ptr.lines
+
+ case unsafe.Sizeof(tokenFile119{}):
+ var ptr *tokenFile119
+ *(*uP)(uP(&ptr)) = uP(file)
+ ptr.mu.Lock()
+ defer ptr.mu.Unlock()
+ return ptr.lines
+
+ default:
+ panic("unexpected token.File size")
+ }
+}
diff --git a/vendor/golang.org/x/tools/internal/typesinternal/errorcode.go b/vendor/golang.org/x/tools/internal/typesinternal/errorcode.go
index d38ee3c2..07484073 100644
--- a/vendor/golang.org/x/tools/internal/typesinternal/errorcode.go
+++ b/vendor/golang.org/x/tools/internal/typesinternal/errorcode.go
@@ -31,6 +31,12 @@ type ErrorCode int
// problem with types.
const (
+ // InvalidSyntaxTree occurs if an invalid syntax tree is provided
+ // to the type checker. It should never happen.
+ InvalidSyntaxTree ErrorCode = -1
+)
+
+const (
_ ErrorCode = iota
// Test is reserved for errors that only apply while in self-test mode.
@@ -153,12 +159,12 @@ const (
/* decls > var (+ other variable assignment codes) */
- // UntypedNil occurs when the predeclared (untyped) value nil is used to
+ // UntypedNilUse occurs when the predeclared (untyped) value nil is used to
// initialize a variable declared without an explicit type.
//
// Example:
// var x = nil
- UntypedNil
+ UntypedNilUse
// WrongAssignCount occurs when the number of values on the right-hand side
// of an assignment or or initialization expression does not match the number
@@ -1523,4 +1529,32 @@ const (
// Example:
// type T[P any] struct{ *P }
MisplacedTypeParam
+
+ // InvalidUnsafeSliceData occurs when unsafe.SliceData is called with
+ // an argument that is not of slice type. It also occurs if it is used
+ // in a package compiled for a language version before go1.20.
+ //
+ // Example:
+ // import "unsafe"
+ //
+ // var x int
+ // var _ = unsafe.SliceData(x)
+ InvalidUnsafeSliceData
+
+ // InvalidUnsafeString occurs when unsafe.String is called with
+ // a length argument that is not of integer type, negative, or
+ // out of bounds. It also occurs if it is used in a package
+ // compiled for a language version before go1.20.
+ //
+ // Example:
+ // import "unsafe"
+ //
+ // var b [10]byte
+ // var _ = unsafe.String(&b[0], -1)
+ InvalidUnsafeString
+
+ // InvalidUnsafeStringData occurs if it is used in a package
+ // compiled for a language version before go1.20.
+ _ // not used anymore
+
)
diff --git a/vendor/golang.org/x/tools/internal/typesinternal/errorcode_string.go b/vendor/golang.org/x/tools/internal/typesinternal/errorcode_string.go
index de90e951..15ecf7c5 100644
--- a/vendor/golang.org/x/tools/internal/typesinternal/errorcode_string.go
+++ b/vendor/golang.org/x/tools/internal/typesinternal/errorcode_string.go
@@ -8,6 +8,7 @@ func _() {
// An "invalid array index" compiler error signifies that the constant values have changed.
// Re-run the stringer command to generate them again.
var x [1]struct{}
+ _ = x[InvalidSyntaxTree - -1]
_ = x[Test-1]
_ = x[BlankPkgName-2]
_ = x[MismatchedPkgName-3]
@@ -23,7 +24,7 @@ func _() {
_ = x[InvalidConstInit-13]
_ = x[InvalidConstVal-14]
_ = x[InvalidConstType-15]
- _ = x[UntypedNil-16]
+ _ = x[UntypedNilUse-16]
_ = x[WrongAssignCount-17]
_ = x[UnassignableOperand-18]
_ = x[NoNewVar-19]
@@ -152,16 +153,27 @@ func _() {
_ = x[MisplacedConstraintIface-142]
_ = x[InvalidMethodTypeParams-143]
_ = x[MisplacedTypeParam-144]
+ _ = x[InvalidUnsafeSliceData-145]
+ _ = x[InvalidUnsafeString-146]
}
-const _ErrorCode_name = "TestBlankPkgNameMismatchedPkgNameInvalidPkgUseBadImportPathBrokenImportImportCRenamedUnusedImportInvalidInitCycleDuplicateDeclInvalidDeclCycleInvalidTypeCycleInvalidConstInitInvalidConstValInvalidConstTypeUntypedNilWrongAssignCountUnassignableOperandNoNewVarMultiValAssignOpInvalidIfaceAssignInvalidChanAssignIncompatibleAssignUnaddressableFieldAssignNotATypeInvalidArrayLenBlankIfaceMethodIncomparableMapKeyInvalidIfaceEmbedInvalidPtrEmbedBadRecvInvalidRecvDuplicateFieldAndMethodDuplicateMethodInvalidBlankInvalidIotaMissingInitBodyInvalidInitSigInvalidInitDeclInvalidMainDeclTooManyValuesNotAnExprTruncatedFloatNumericOverflowUndefinedOpMismatchedTypesDivByZeroNonNumericIncDecUnaddressableOperandInvalidIndirectionNonIndexableOperandInvalidIndexSwappedSliceIndicesNonSliceableOperandInvalidSliceExprInvalidShiftCountInvalidShiftOperandInvalidReceiveInvalidSendDuplicateLitKeyMissingLitKeyInvalidLitIndexOversizeArrayLitMixedStructLitInvalidStructLitMissingLitFieldDuplicateLitFieldUnexportedLitFieldInvalidLitFieldUntypedLitInvalidLitAmbiguousSelectorUndeclaredImportedNameUnexportedNameUndeclaredNameMissingFieldOrMethodBadDotDotDotSyntaxNonVariadicDotDotDotMisplacedDotDotDotInvalidDotDotDotOperandInvalidDotDotDotUncalledBuiltinInvalidAppendInvalidCapInvalidCloseInvalidCopyInvalidComplexInvalidDeleteInvalidImagInvalidLenSwappedMakeArgsInvalidMakeInvalidRealInvalidAssertImpossibleAssertInvalidConversionInvalidUntypedConversionBadOffsetofSyntaxInvalidOffsetofUnusedExprUnusedVarMissingReturnWrongResultCountOutOfScopeResultInvalidCondInvalidPostDeclInvalidChanRangeInvalidIterVarInvalidRangeExprMisplacedBreakMisplacedContinueMisplacedFallthroughDuplicateCaseDuplicateDefaultBadTypeKeywordInvalidTypeSwitchInvalidExprSwitchInvalidSelectCaseUndeclaredLabelDuplicateLabelMisplacedLabelUnusedLabelJumpOverDeclJumpIntoBlockInvalidMethodExprWrongArgCountInvalidCallUnusedResultsInvalidDeferInvalidGoBadDeclRepeatedDeclInvalidUnsafeAddInvalidUnsafeSliceUnsupportedFeatureNotAGenericTypeWrongTypeArgCountCannotInferTypeArgsInvalidTypeArgInvalidInstanceCycleInvalidUnionMisplacedConstraintIfaceInvalidMethodTypeParamsMisplacedTypeParam"
+const (
+ _ErrorCode_name_0 = "InvalidSyntaxTree"
+ _ErrorCode_name_1 = "TestBlankPkgNameMismatchedPkgNameInvalidPkgUseBadImportPathBrokenImportImportCRenamedUnusedImportInvalidInitCycleDuplicateDeclInvalidDeclCycleInvalidTypeCycleInvalidConstInitInvalidConstValInvalidConstTypeUntypedNilUseWrongAssignCountUnassignableOperandNoNewVarMultiValAssignOpInvalidIfaceAssignInvalidChanAssignIncompatibleAssignUnaddressableFieldAssignNotATypeInvalidArrayLenBlankIfaceMethodIncomparableMapKeyInvalidIfaceEmbedInvalidPtrEmbedBadRecvInvalidRecvDuplicateFieldAndMethodDuplicateMethodInvalidBlankInvalidIotaMissingInitBodyInvalidInitSigInvalidInitDeclInvalidMainDeclTooManyValuesNotAnExprTruncatedFloatNumericOverflowUndefinedOpMismatchedTypesDivByZeroNonNumericIncDecUnaddressableOperandInvalidIndirectionNonIndexableOperandInvalidIndexSwappedSliceIndicesNonSliceableOperandInvalidSliceExprInvalidShiftCountInvalidShiftOperandInvalidReceiveInvalidSendDuplicateLitKeyMissingLitKeyInvalidLitIndexOversizeArrayLitMixedStructLitInvalidStructLitMissingLitFieldDuplicateLitFieldUnexportedLitFieldInvalidLitFieldUntypedLitInvalidLitAmbiguousSelectorUndeclaredImportedNameUnexportedNameUndeclaredNameMissingFieldOrMethodBadDotDotDotSyntaxNonVariadicDotDotDotMisplacedDotDotDotInvalidDotDotDotOperandInvalidDotDotDotUncalledBuiltinInvalidAppendInvalidCapInvalidCloseInvalidCopyInvalidComplexInvalidDeleteInvalidImagInvalidLenSwappedMakeArgsInvalidMakeInvalidRealInvalidAssertImpossibleAssertInvalidConversionInvalidUntypedConversionBadOffsetofSyntaxInvalidOffsetofUnusedExprUnusedVarMissingReturnWrongResultCountOutOfScopeResultInvalidCondInvalidPostDeclInvalidChanRangeInvalidIterVarInvalidRangeExprMisplacedBreakMisplacedContinueMisplacedFallthroughDuplicateCaseDuplicateDefaultBadTypeKeywordInvalidTypeSwitchInvalidExprSwitchInvalidSelectCaseUndeclaredLabelDuplicateLabelMisplacedLabelUnusedLabelJumpOverDeclJumpIntoBlockInvalidMethodExprWrongArgCountInvalidCallUnusedResultsInvalidDeferInvalidGoBadDeclRepeatedDeclInvalidUnsafeAddInvalidUnsafeSliceUnsupportedFeatureNotAGenericTypeWrongTypeArgCountCannotInferTypeArgsInvalidTypeArgInvalidInstanceCycleInvalidUnionMisplacedConstraintIfaceInvalidMethodTypeParamsMisplacedTypeParamInvalidUnsafeSliceDataInvalidUnsafeString"
+)
-var _ErrorCode_index = [...]uint16{0, 4, 16, 33, 46, 59, 71, 85, 97, 113, 126, 142, 158, 174, 189, 205, 215, 231, 250, 258, 274, 292, 309, 327, 351, 359, 374, 390, 408, 425, 440, 447, 458, 481, 496, 508, 519, 534, 548, 563, 578, 591, 600, 614, 629, 640, 655, 664, 680, 700, 718, 737, 749, 768, 787, 803, 820, 839, 853, 864, 879, 892, 907, 923, 937, 953, 968, 985, 1003, 1018, 1028, 1038, 1055, 1077, 1091, 1105, 1125, 1143, 1163, 1181, 1204, 1220, 1235, 1248, 1258, 1270, 1281, 1295, 1308, 1319, 1329, 1344, 1355, 1366, 1379, 1395, 1412, 1436, 1453, 1468, 1478, 1487, 1500, 1516, 1532, 1543, 1558, 1574, 1588, 1604, 1618, 1635, 1655, 1668, 1684, 1698, 1715, 1732, 1749, 1764, 1778, 1792, 1803, 1815, 1828, 1845, 1858, 1869, 1882, 1894, 1903, 1910, 1922, 1938, 1956, 1974, 1989, 2006, 2025, 2039, 2059, 2071, 2095, 2118, 2136}
+var (
+ _ErrorCode_index_1 = [...]uint16{0, 4, 16, 33, 46, 59, 71, 85, 97, 113, 126, 142, 158, 174, 189, 205, 218, 234, 253, 261, 277, 295, 312, 330, 354, 362, 377, 393, 411, 428, 443, 450, 461, 484, 499, 511, 522, 537, 551, 566, 581, 594, 603, 617, 632, 643, 658, 667, 683, 703, 721, 740, 752, 771, 790, 806, 823, 842, 856, 867, 882, 895, 910, 926, 940, 956, 971, 988, 1006, 1021, 1031, 1041, 1058, 1080, 1094, 1108, 1128, 1146, 1166, 1184, 1207, 1223, 1238, 1251, 1261, 1273, 1284, 1298, 1311, 1322, 1332, 1347, 1358, 1369, 1382, 1398, 1415, 1439, 1456, 1471, 1481, 1490, 1503, 1519, 1535, 1546, 1561, 1577, 1591, 1607, 1621, 1638, 1658, 1671, 1687, 1701, 1718, 1735, 1752, 1767, 1781, 1795, 1806, 1818, 1831, 1848, 1861, 1872, 1885, 1897, 1906, 1913, 1925, 1941, 1959, 1977, 1992, 2009, 2028, 2042, 2062, 2074, 2098, 2121, 2139, 2161, 2180}
+)
func (i ErrorCode) String() string {
- i -= 1
- if i < 0 || i >= ErrorCode(len(_ErrorCode_index)-1) {
- return "ErrorCode(" + strconv.FormatInt(int64(i+1), 10) + ")"
+ switch {
+ case i == -1:
+ return _ErrorCode_name_0
+ case 1 <= i && i <= 146:
+ i -= 1
+ return _ErrorCode_name_1[_ErrorCode_index_1[i]:_ErrorCode_index_1[i+1]]
+ default:
+ return "ErrorCode(" + strconv.FormatInt(int64(i), 10) + ")"
}
- return _ErrorCode_name[_ErrorCode_index[i]:_ErrorCode_index[i+1]]
}