[chore]: Bump golang.org/x/text from 0.7.0 to 0.8.0 (#1594)
Bumps [golang.org/x/text](https://github.com/golang/text) from 0.7.0 to 0.8.0. - [Release notes](https://github.com/golang/text/releases) - [Commits](https://github.com/golang/text/compare/v0.7.0...v0.8.0) --- updated-dependencies: - dependency-name: golang.org/x/text dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
This commit is contained in:
parent
65aeeb4442
commit
06524ac259
6
go.mod
6
go.mod
|
@ -57,7 +57,7 @@ require (
|
||||||
golang.org/x/image v0.5.0
|
golang.org/x/image v0.5.0
|
||||||
golang.org/x/net v0.7.0
|
golang.org/x/net v0.7.0
|
||||||
golang.org/x/oauth2 v0.5.0
|
golang.org/x/oauth2 v0.5.0
|
||||||
golang.org/x/text v0.7.0
|
golang.org/x/text v0.8.0
|
||||||
gopkg.in/mcuadros/go-syslog.v2 v2.3.0
|
gopkg.in/mcuadros/go-syslog.v2 v2.3.0
|
||||||
gopkg.in/yaml.v3 v3.0.1
|
gopkg.in/yaml.v3 v3.0.1
|
||||||
modernc.org/sqlite v1.20.4
|
modernc.org/sqlite v1.20.4
|
||||||
|
@ -148,9 +148,9 @@ require (
|
||||||
github.com/vmihailenco/msgpack/v5 v5.3.5 // indirect
|
github.com/vmihailenco/msgpack/v5 v5.3.5 // indirect
|
||||||
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
|
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
|
||||||
golang.org/x/arch v0.0.0-20210923205945-b76863e36670 // indirect
|
golang.org/x/arch v0.0.0-20210923205945-b76863e36670 // indirect
|
||||||
golang.org/x/mod v0.7.0 // indirect
|
golang.org/x/mod v0.8.0 // indirect
|
||||||
golang.org/x/sys v0.5.0 // indirect
|
golang.org/x/sys v0.5.0 // indirect
|
||||||
golang.org/x/tools v0.3.0 // indirect
|
golang.org/x/tools v0.6.0 // indirect
|
||||||
google.golang.org/appengine v1.6.7 // indirect
|
google.golang.org/appengine v1.6.7 // indirect
|
||||||
google.golang.org/protobuf v1.28.1 // indirect
|
google.golang.org/protobuf v1.28.1 // indirect
|
||||||
gopkg.in/ini.v1 v1.67.0 // indirect
|
gopkg.in/ini.v1 v1.67.0 // indirect
|
||||||
|
|
9
go.sum
9
go.sum
|
@ -704,8 +704,9 @@ golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||||
golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||||
golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||||
golang.org/x/mod v0.7.0 h1:LapD9S96VoQRhi/GrNTqeBJFrUjs5UHCAtTlgwA5oZA=
|
|
||||||
golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||||
|
golang.org/x/mod v0.8.0 h1:LUYupSeNrTNCGzR/hVBk2NHZO4hXcVaW1k4Qx7rjPx8=
|
||||||
|
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
@ -854,8 +855,9 @@ golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||||
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||||
golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||||
golang.org/x/text v0.7.0 h1:4BRB4x83lYWy72KwLD/qYDuTu7q9PjSagHvijDw7cLo=
|
|
||||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||||
|
golang.org/x/text v0.8.0 h1:57P1ETyNKtuIjB4SRd15iJxuhj8Gc416Y78H3qgMh68=
|
||||||
|
golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
|
@ -915,8 +917,9 @@ golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4f
|
||||||
golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||||
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
|
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
|
||||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||||
golang.org/x/tools v0.3.0 h1:SrNbZl6ECOS1qFzgTdQfWXZM9XBkiA6tkFrH9YSTPHM=
|
|
||||||
golang.org/x/tools v0.3.0/go.mod h1:/rWhSS2+zyEVwoJf8YAX6L2f0ntZ7Kn/mGgAWcipA5k=
|
golang.org/x/tools v0.3.0/go.mod h1:/rWhSS2+zyEVwoJf8YAX6L2f0ntZ7Kn/mGgAWcipA5k=
|
||||||
|
golang.org/x/tools v0.6.0 h1:BOw41kyTf3PuCW1pVQf8+Cyg8pMlkYB1oo9iJ6D/lKM=
|
||||||
|
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||||
golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
|
|
@ -13,7 +13,7 @@ import "encoding/binary"
|
||||||
// a rune to a uint16. The values take two forms. For v >= 0x8000:
|
// a rune to a uint16. The values take two forms. For v >= 0x8000:
|
||||||
// bits
|
// bits
|
||||||
// 15: 1 (inverse of NFD_QC bit of qcInfo)
|
// 15: 1 (inverse of NFD_QC bit of qcInfo)
|
||||||
// 13..7: qcInfo (see below). isYesD is always true (no decompostion).
|
// 13..7: qcInfo (see below). isYesD is always true (no decomposition).
|
||||||
// 6..0: ccc (compressed CCC value).
|
// 6..0: ccc (compressed CCC value).
|
||||||
// For v < 0x8000, the respective rune has a decomposition and v is an index
|
// For v < 0x8000, the respective rune has a decomposition and v is an index
|
||||||
// into a byte array of UTF-8 decomposition sequences and additional info and
|
// into a byte array of UTF-8 decomposition sequences and additional info and
|
||||||
|
|
|
@ -27,7 +27,6 @@ import (
|
||||||
"go/token"
|
"go/token"
|
||||||
"go/types"
|
"go/types"
|
||||||
"io"
|
"io"
|
||||||
"io/ioutil"
|
|
||||||
"os/exec"
|
"os/exec"
|
||||||
|
|
||||||
"golang.org/x/tools/internal/gcimporter"
|
"golang.org/x/tools/internal/gcimporter"
|
||||||
|
@ -85,6 +84,19 @@ func NewReader(r io.Reader) (io.Reader, error) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// readAll works the same way as io.ReadAll, but avoids allocations and copies
|
||||||
|
// by preallocating a byte slice of the necessary size if the size is known up
|
||||||
|
// front. This is always possible when the input is an archive. In that case,
|
||||||
|
// NewReader will return the known size using an io.LimitedReader.
|
||||||
|
func readAll(r io.Reader) ([]byte, error) {
|
||||||
|
if lr, ok := r.(*io.LimitedReader); ok {
|
||||||
|
data := make([]byte, lr.N)
|
||||||
|
_, err := io.ReadFull(lr, data)
|
||||||
|
return data, err
|
||||||
|
}
|
||||||
|
return io.ReadAll(r)
|
||||||
|
}
|
||||||
|
|
||||||
// Read reads export data from in, decodes it, and returns type
|
// Read reads export data from in, decodes it, and returns type
|
||||||
// information for the package.
|
// information for the package.
|
||||||
//
|
//
|
||||||
|
@ -102,7 +114,7 @@ func NewReader(r io.Reader) (io.Reader, error) {
|
||||||
//
|
//
|
||||||
// On return, the state of the reader is undefined.
|
// On return, the state of the reader is undefined.
|
||||||
func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package, path string) (*types.Package, error) {
|
func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package, path string) (*types.Package, error) {
|
||||||
data, err := ioutil.ReadAll(in)
|
data, err := readAll(in)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("reading export data for %q: %v", path, err)
|
return nil, fmt.Errorf("reading export data for %q: %v", path, err)
|
||||||
}
|
}
|
||||||
|
@ -111,12 +123,6 @@ func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package,
|
||||||
return nil, fmt.Errorf("can't read export data for %q directly from an archive file (call gcexportdata.NewReader first to extract export data)", path)
|
return nil, fmt.Errorf("can't read export data for %q directly from an archive file (call gcexportdata.NewReader first to extract export data)", path)
|
||||||
}
|
}
|
||||||
|
|
||||||
// The App Engine Go runtime v1.6 uses the old export data format.
|
|
||||||
// TODO(adonovan): delete once v1.7 has been around for a while.
|
|
||||||
if bytes.HasPrefix(data, []byte("package ")) {
|
|
||||||
return gcimporter.ImportData(imports, path, path, bytes.NewReader(data))
|
|
||||||
}
|
|
||||||
|
|
||||||
// The indexed export format starts with an 'i'; the older
|
// The indexed export format starts with an 'i'; the older
|
||||||
// binary export format starts with a 'c', 'd', or 'v'
|
// binary export format starts with a 'c', 'd', or 'v'
|
||||||
// (from "version"). Select appropriate importer.
|
// (from "version"). Select appropriate importer.
|
||||||
|
@ -165,7 +171,7 @@ func Write(out io.Writer, fset *token.FileSet, pkg *types.Package) error {
|
||||||
//
|
//
|
||||||
// Experimental: This API is experimental and may change in the future.
|
// Experimental: This API is experimental and may change in the future.
|
||||||
func ReadBundle(in io.Reader, fset *token.FileSet, imports map[string]*types.Package) ([]*types.Package, error) {
|
func ReadBundle(in io.Reader, fset *token.FileSet, imports map[string]*types.Package) ([]*types.Package, error) {
|
||||||
data, err := ioutil.ReadAll(in)
|
data, err := readAll(in)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("reading export bundle: %v", err)
|
return nil, fmt.Errorf("reading export bundle: %v", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -604,17 +604,12 @@ func (state *golistState) createDriverResponse(words ...string) (*driverResponse
|
||||||
|
|
||||||
// Work around https://golang.org/issue/28749:
|
// Work around https://golang.org/issue/28749:
|
||||||
// cmd/go puts assembly, C, and C++ files in CompiledGoFiles.
|
// cmd/go puts assembly, C, and C++ files in CompiledGoFiles.
|
||||||
// Filter out any elements of CompiledGoFiles that are also in OtherFiles.
|
// Remove files from CompiledGoFiles that are non-go files
|
||||||
// We have to keep this workaround in place until go1.12 is a distant memory.
|
// (or are not files that look like they are from the cache).
|
||||||
if len(pkg.OtherFiles) > 0 {
|
if len(pkg.CompiledGoFiles) > 0 {
|
||||||
other := make(map[string]bool, len(pkg.OtherFiles))
|
|
||||||
for _, f := range pkg.OtherFiles {
|
|
||||||
other[f] = true
|
|
||||||
}
|
|
||||||
|
|
||||||
out := pkg.CompiledGoFiles[:0]
|
out := pkg.CompiledGoFiles[:0]
|
||||||
for _, f := range pkg.CompiledGoFiles {
|
for _, f := range pkg.CompiledGoFiles {
|
||||||
if other[f] {
|
if ext := filepath.Ext(f); ext != ".go" && ext != "" { // ext == "" means the file is from the cache, so probably cgo-processed file
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
out = append(out, f)
|
out = append(out, f)
|
||||||
|
|
|
@ -15,6 +15,7 @@ import (
|
||||||
"go/scanner"
|
"go/scanner"
|
||||||
"go/token"
|
"go/token"
|
||||||
"go/types"
|
"go/types"
|
||||||
|
"io"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"log"
|
"log"
|
||||||
"os"
|
"os"
|
||||||
|
@ -303,6 +304,9 @@ type Package struct {
|
||||||
// of the package, or while parsing or type-checking its files.
|
// of the package, or while parsing or type-checking its files.
|
||||||
Errors []Error
|
Errors []Error
|
||||||
|
|
||||||
|
// TypeErrors contains the subset of errors produced during type checking.
|
||||||
|
TypeErrors []types.Error
|
||||||
|
|
||||||
// GoFiles lists the absolute file paths of the package's Go source files.
|
// GoFiles lists the absolute file paths of the package's Go source files.
|
||||||
GoFiles []string
|
GoFiles []string
|
||||||
|
|
||||||
|
@ -874,12 +878,19 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) {
|
||||||
// never has to create a types.Package for an indirect dependency,
|
// never has to create a types.Package for an indirect dependency,
|
||||||
// which would then require that such created packages be explicitly
|
// which would then require that such created packages be explicitly
|
||||||
// inserted back into the Import graph as a final step after export data loading.
|
// inserted back into the Import graph as a final step after export data loading.
|
||||||
|
// (Hence this return is after the Types assignment.)
|
||||||
// The Diamond test exercises this case.
|
// The Diamond test exercises this case.
|
||||||
if !lpkg.needtypes && !lpkg.needsrc {
|
if !lpkg.needtypes && !lpkg.needsrc {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if !lpkg.needsrc {
|
if !lpkg.needsrc {
|
||||||
ld.loadFromExportData(lpkg)
|
if err := ld.loadFromExportData(lpkg); err != nil {
|
||||||
|
lpkg.Errors = append(lpkg.Errors, Error{
|
||||||
|
Pos: "-",
|
||||||
|
Msg: err.Error(),
|
||||||
|
Kind: UnknownError, // e.g. can't find/open/parse export data
|
||||||
|
})
|
||||||
|
}
|
||||||
return // not a source package, don't get syntax trees
|
return // not a source package, don't get syntax trees
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -911,6 +922,7 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) {
|
||||||
|
|
||||||
case types.Error:
|
case types.Error:
|
||||||
// from type checker
|
// from type checker
|
||||||
|
lpkg.TypeErrors = append(lpkg.TypeErrors, err)
|
||||||
errs = append(errs, Error{
|
errs = append(errs, Error{
|
||||||
Pos: err.Fset.Position(err.Pos).String(),
|
Pos: err.Fset.Position(err.Pos).String(),
|
||||||
Msg: err.Msg,
|
Msg: err.Msg,
|
||||||
|
@ -946,6 +958,8 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) {
|
||||||
// - golang.org/issue/52078 (flag to set release tags)
|
// - golang.org/issue/52078 (flag to set release tags)
|
||||||
// - golang.org/issue/50825 (gopls legacy version support)
|
// - golang.org/issue/50825 (gopls legacy version support)
|
||||||
// - golang.org/issue/55883 (go/packages confusing error)
|
// - golang.org/issue/55883 (go/packages confusing error)
|
||||||
|
//
|
||||||
|
// Should we assert a hard minimum of (currently) go1.16 here?
|
||||||
var runtimeVersion int
|
var runtimeVersion int
|
||||||
if _, err := fmt.Sscanf(runtime.Version(), "go1.%d", &runtimeVersion); err == nil && runtimeVersion < lpkg.goVersion {
|
if _, err := fmt.Sscanf(runtime.Version(), "go1.%d", &runtimeVersion); err == nil && runtimeVersion < lpkg.goVersion {
|
||||||
defer func() {
|
defer func() {
|
||||||
|
@ -963,7 +977,8 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) {
|
||||||
// The config requested loading sources and types, but sources are missing.
|
// The config requested loading sources and types, but sources are missing.
|
||||||
// Add an error to the package and fall back to loading from export data.
|
// Add an error to the package and fall back to loading from export data.
|
||||||
appendError(Error{"-", fmt.Sprintf("sources missing for package %s", lpkg.ID), ParseError})
|
appendError(Error{"-", fmt.Sprintf("sources missing for package %s", lpkg.ID), ParseError})
|
||||||
ld.loadFromExportData(lpkg)
|
_ = ld.loadFromExportData(lpkg) // ignore any secondary errors
|
||||||
|
|
||||||
return // can't get syntax trees for this package
|
return // can't get syntax trees for this package
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1017,7 +1032,7 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) {
|
||||||
tc := &types.Config{
|
tc := &types.Config{
|
||||||
Importer: importer,
|
Importer: importer,
|
||||||
|
|
||||||
// Type-check bodies of functions only in non-initial packages.
|
// Type-check bodies of functions only in initial packages.
|
||||||
// Example: for import graph A->B->C and initial packages {A,C},
|
// Example: for import graph A->B->C and initial packages {A,C},
|
||||||
// we can ignore function bodies in B.
|
// we can ignore function bodies in B.
|
||||||
IgnoreFuncBodies: ld.Mode&NeedDeps == 0 && !lpkg.initial,
|
IgnoreFuncBodies: ld.Mode&NeedDeps == 0 && !lpkg.initial,
|
||||||
|
@ -1187,9 +1202,10 @@ func sameFile(x, y string) bool {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
// loadFromExportData returns type information for the specified
|
// loadFromExportData ensures that type information is present for the specified
|
||||||
// package, loading it from an export data file on the first request.
|
// package, loading it from an export data file on the first request.
|
||||||
func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error) {
|
// On success it sets lpkg.Types to a new Package.
|
||||||
|
func (ld *loader) loadFromExportData(lpkg *loaderPackage) error {
|
||||||
if lpkg.PkgPath == "" {
|
if lpkg.PkgPath == "" {
|
||||||
log.Fatalf("internal error: Package %s has no PkgPath", lpkg)
|
log.Fatalf("internal error: Package %s has no PkgPath", lpkg)
|
||||||
}
|
}
|
||||||
|
@ -1200,8 +1216,8 @@ func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error
|
||||||
// must be sequential. (Finer-grained locking would require
|
// must be sequential. (Finer-grained locking would require
|
||||||
// changes to the gcexportdata API.)
|
// changes to the gcexportdata API.)
|
||||||
//
|
//
|
||||||
// The exportMu lock guards the Package.Pkg field and the
|
// The exportMu lock guards the lpkg.Types field and the
|
||||||
// types.Package it points to, for each Package in the graph.
|
// types.Package it points to, for each loaderPackage in the graph.
|
||||||
//
|
//
|
||||||
// Not all accesses to Package.Pkg need to be protected by exportMu:
|
// Not all accesses to Package.Pkg need to be protected by exportMu:
|
||||||
// graph ordering ensures that direct dependencies of source
|
// graph ordering ensures that direct dependencies of source
|
||||||
|
@ -1210,18 +1226,18 @@ func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error
|
||||||
defer ld.exportMu.Unlock()
|
defer ld.exportMu.Unlock()
|
||||||
|
|
||||||
if tpkg := lpkg.Types; tpkg != nil && tpkg.Complete() {
|
if tpkg := lpkg.Types; tpkg != nil && tpkg.Complete() {
|
||||||
return tpkg, nil // cache hit
|
return nil // cache hit
|
||||||
}
|
}
|
||||||
|
|
||||||
lpkg.IllTyped = true // fail safe
|
lpkg.IllTyped = true // fail safe
|
||||||
|
|
||||||
if lpkg.ExportFile == "" {
|
if lpkg.ExportFile == "" {
|
||||||
// Errors while building export data will have been printed to stderr.
|
// Errors while building export data will have been printed to stderr.
|
||||||
return nil, fmt.Errorf("no export data file")
|
return fmt.Errorf("no export data file")
|
||||||
}
|
}
|
||||||
f, err := os.Open(lpkg.ExportFile)
|
f, err := os.Open(lpkg.ExportFile)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
defer f.Close()
|
defer f.Close()
|
||||||
|
|
||||||
|
@ -1233,7 +1249,7 @@ func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error
|
||||||
// queries.)
|
// queries.)
|
||||||
r, err := gcexportdata.NewReader(f)
|
r, err := gcexportdata.NewReader(f)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("reading %s: %v", lpkg.ExportFile, err)
|
return fmt.Errorf("reading %s: %v", lpkg.ExportFile, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build the view.
|
// Build the view.
|
||||||
|
@ -1277,7 +1293,7 @@ func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error
|
||||||
// (May modify incomplete packages in view but not create new ones.)
|
// (May modify incomplete packages in view but not create new ones.)
|
||||||
tpkg, err := gcexportdata.Read(r, ld.Fset, view, lpkg.PkgPath)
|
tpkg, err := gcexportdata.Read(r, ld.Fset, view, lpkg.PkgPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("reading %s: %v", lpkg.ExportFile, err)
|
return fmt.Errorf("reading %s: %v", lpkg.ExportFile, err)
|
||||||
}
|
}
|
||||||
if _, ok := view["go.shape"]; ok {
|
if _, ok := view["go.shape"]; ok {
|
||||||
// Account for the pseudopackage "go.shape" that gets
|
// Account for the pseudopackage "go.shape" that gets
|
||||||
|
@ -1290,8 +1306,7 @@ func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error
|
||||||
|
|
||||||
lpkg.Types = tpkg
|
lpkg.Types = tpkg
|
||||||
lpkg.IllTyped = false
|
lpkg.IllTyped = false
|
||||||
|
return nil
|
||||||
return tpkg, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// impliedLoadMode returns loadMode with its dependencies.
|
// impliedLoadMode returns loadMode with its dependencies.
|
||||||
|
@ -1307,3 +1322,5 @@ func impliedLoadMode(loadMode LoadMode) LoadMode {
|
||||||
func usesExportData(cfg *Config) bool {
|
func usesExportData(cfg *Config) bool {
|
||||||
return cfg.Mode&NeedExportFile != 0 || cfg.Mode&NeedTypes != 0 && cfg.Mode&NeedDeps == 0
|
return cfg.Mode&NeedExportFile != 0 || cfg.Mode&NeedTypes != 0 && cfg.Mode&NeedDeps == 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var _ interface{} = io.Discard // assert build toolchain is go1.16 or later
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -22,6 +22,7 @@ import (
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/tools/internal/tokeninternal"
|
||||||
"golang.org/x/tools/internal/typeparams"
|
"golang.org/x/tools/internal/typeparams"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -138,6 +139,17 @@ func iexportCommon(out io.Writer, fset *token.FileSet, bundle, shallow bool, ver
|
||||||
p.doDecl(p.declTodo.popHead())
|
p.doDecl(p.declTodo.popHead())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Produce index of offset of each file record in files.
|
||||||
|
var files intWriter
|
||||||
|
var fileOffset []uint64 // fileOffset[i] is offset in files of file encoded as i
|
||||||
|
if p.shallow {
|
||||||
|
fileOffset = make([]uint64, len(p.fileInfos))
|
||||||
|
for i, info := range p.fileInfos {
|
||||||
|
fileOffset[i] = uint64(files.Len())
|
||||||
|
p.encodeFile(&files, info.file, info.needed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Append indices to data0 section.
|
// Append indices to data0 section.
|
||||||
dataLen := uint64(p.data0.Len())
|
dataLen := uint64(p.data0.Len())
|
||||||
w := p.newWriter()
|
w := p.newWriter()
|
||||||
|
@ -163,16 +175,75 @@ func iexportCommon(out io.Writer, fset *token.FileSet, bundle, shallow bool, ver
|
||||||
}
|
}
|
||||||
hdr.uint64(uint64(p.version))
|
hdr.uint64(uint64(p.version))
|
||||||
hdr.uint64(uint64(p.strings.Len()))
|
hdr.uint64(uint64(p.strings.Len()))
|
||||||
|
if p.shallow {
|
||||||
|
hdr.uint64(uint64(files.Len()))
|
||||||
|
hdr.uint64(uint64(len(fileOffset)))
|
||||||
|
for _, offset := range fileOffset {
|
||||||
|
hdr.uint64(offset)
|
||||||
|
}
|
||||||
|
}
|
||||||
hdr.uint64(dataLen)
|
hdr.uint64(dataLen)
|
||||||
|
|
||||||
// Flush output.
|
// Flush output.
|
||||||
io.Copy(out, &hdr)
|
io.Copy(out, &hdr)
|
||||||
io.Copy(out, &p.strings)
|
io.Copy(out, &p.strings)
|
||||||
|
if p.shallow {
|
||||||
|
io.Copy(out, &files)
|
||||||
|
}
|
||||||
io.Copy(out, &p.data0)
|
io.Copy(out, &p.data0)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// encodeFile writes to w a representation of the file sufficient to
|
||||||
|
// faithfully restore position information about all needed offsets.
|
||||||
|
// Mutates the needed array.
|
||||||
|
func (p *iexporter) encodeFile(w *intWriter, file *token.File, needed []uint64) {
|
||||||
|
_ = needed[0] // precondition: needed is non-empty
|
||||||
|
|
||||||
|
w.uint64(p.stringOff(file.Name()))
|
||||||
|
|
||||||
|
size := uint64(file.Size())
|
||||||
|
w.uint64(size)
|
||||||
|
|
||||||
|
// Sort the set of needed offsets. Duplicates are harmless.
|
||||||
|
sort.Slice(needed, func(i, j int) bool { return needed[i] < needed[j] })
|
||||||
|
|
||||||
|
lines := tokeninternal.GetLines(file) // byte offset of each line start
|
||||||
|
w.uint64(uint64(len(lines)))
|
||||||
|
|
||||||
|
// Rather than record the entire array of line start offsets,
|
||||||
|
// we save only a sparse list of (index, offset) pairs for
|
||||||
|
// the start of each line that contains a needed position.
|
||||||
|
var sparse [][2]int // (index, offset) pairs
|
||||||
|
outer:
|
||||||
|
for i, lineStart := range lines {
|
||||||
|
lineEnd := size
|
||||||
|
if i < len(lines)-1 {
|
||||||
|
lineEnd = uint64(lines[i+1])
|
||||||
|
}
|
||||||
|
// Does this line contains a needed offset?
|
||||||
|
if needed[0] < lineEnd {
|
||||||
|
sparse = append(sparse, [2]int{i, lineStart})
|
||||||
|
for needed[0] < lineEnd {
|
||||||
|
needed = needed[1:]
|
||||||
|
if len(needed) == 0 {
|
||||||
|
break outer
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delta-encode the columns.
|
||||||
|
w.uint64(uint64(len(sparse)))
|
||||||
|
var prev [2]int
|
||||||
|
for _, pair := range sparse {
|
||||||
|
w.uint64(uint64(pair[0] - prev[0]))
|
||||||
|
w.uint64(uint64(pair[1] - prev[1]))
|
||||||
|
prev = pair
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// writeIndex writes out an object index. mainIndex indicates whether
|
// writeIndex writes out an object index. mainIndex indicates whether
|
||||||
// we're writing out the main index, which is also read by
|
// we're writing out the main index, which is also read by
|
||||||
// non-compiler tools and includes a complete package description
|
// non-compiler tools and includes a complete package description
|
||||||
|
@ -255,6 +326,12 @@ type iexporter struct {
|
||||||
strings intWriter
|
strings intWriter
|
||||||
stringIndex map[string]uint64
|
stringIndex map[string]uint64
|
||||||
|
|
||||||
|
// In shallow mode, object positions are encoded as (file, offset).
|
||||||
|
// Each file is recorded as a line-number table.
|
||||||
|
// Only the lines of needed positions are saved faithfully.
|
||||||
|
fileInfo map[*token.File]uint64 // value is index in fileInfos
|
||||||
|
fileInfos []*filePositions
|
||||||
|
|
||||||
data0 intWriter
|
data0 intWriter
|
||||||
declIndex map[types.Object]uint64
|
declIndex map[types.Object]uint64
|
||||||
tparamNames map[types.Object]string // typeparam->exported name
|
tparamNames map[types.Object]string // typeparam->exported name
|
||||||
|
@ -263,6 +340,11 @@ type iexporter struct {
|
||||||
indent int // for tracing support
|
indent int // for tracing support
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type filePositions struct {
|
||||||
|
file *token.File
|
||||||
|
needed []uint64 // unordered list of needed file offsets
|
||||||
|
}
|
||||||
|
|
||||||
func (p *iexporter) trace(format string, args ...interface{}) {
|
func (p *iexporter) trace(format string, args ...interface{}) {
|
||||||
if !trace {
|
if !trace {
|
||||||
// Call sites should also be guarded, but having this check here allows
|
// Call sites should also be guarded, but having this check here allows
|
||||||
|
@ -286,6 +368,25 @@ func (p *iexporter) stringOff(s string) uint64 {
|
||||||
return off
|
return off
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// fileIndexAndOffset returns the index of the token.File and the byte offset of pos within it.
|
||||||
|
func (p *iexporter) fileIndexAndOffset(file *token.File, pos token.Pos) (uint64, uint64) {
|
||||||
|
index, ok := p.fileInfo[file]
|
||||||
|
if !ok {
|
||||||
|
index = uint64(len(p.fileInfo))
|
||||||
|
p.fileInfos = append(p.fileInfos, &filePositions{file: file})
|
||||||
|
if p.fileInfo == nil {
|
||||||
|
p.fileInfo = make(map[*token.File]uint64)
|
||||||
|
}
|
||||||
|
p.fileInfo[file] = index
|
||||||
|
}
|
||||||
|
// Record each needed offset.
|
||||||
|
info := p.fileInfos[index]
|
||||||
|
offset := uint64(file.Offset(pos))
|
||||||
|
info.needed = append(info.needed, offset)
|
||||||
|
|
||||||
|
return index, offset
|
||||||
|
}
|
||||||
|
|
||||||
// pushDecl adds n to the declaration work queue, if not already present.
|
// pushDecl adds n to the declaration work queue, if not already present.
|
||||||
func (p *iexporter) pushDecl(obj types.Object) {
|
func (p *iexporter) pushDecl(obj types.Object) {
|
||||||
// Package unsafe is known to the compiler and predeclared.
|
// Package unsafe is known to the compiler and predeclared.
|
||||||
|
@ -346,7 +447,13 @@ func (p *iexporter) doDecl(obj types.Object) {
|
||||||
case *types.Func:
|
case *types.Func:
|
||||||
sig, _ := obj.Type().(*types.Signature)
|
sig, _ := obj.Type().(*types.Signature)
|
||||||
if sig.Recv() != nil {
|
if sig.Recv() != nil {
|
||||||
panic(internalErrorf("unexpected method: %v", sig))
|
// We shouldn't see methods in the package scope,
|
||||||
|
// but the type checker may repair "func () F() {}"
|
||||||
|
// to "func (Invalid) F()" and then treat it like "func F()",
|
||||||
|
// so allow that. See golang/go#57729.
|
||||||
|
if sig.Recv().Type() != types.Typ[types.Invalid] {
|
||||||
|
panic(internalErrorf("unexpected method: %v", sig))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Function.
|
// Function.
|
||||||
|
@ -458,13 +565,30 @@ func (w *exportWriter) tag(tag byte) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (w *exportWriter) pos(pos token.Pos) {
|
func (w *exportWriter) pos(pos token.Pos) {
|
||||||
if w.p.version >= iexportVersionPosCol {
|
if w.p.shallow {
|
||||||
|
w.posV2(pos)
|
||||||
|
} else if w.p.version >= iexportVersionPosCol {
|
||||||
w.posV1(pos)
|
w.posV1(pos)
|
||||||
} else {
|
} else {
|
||||||
w.posV0(pos)
|
w.posV0(pos)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// posV2 encoding (used only in shallow mode) records positions as
|
||||||
|
// (file, offset), where file is the index in the token.File table
|
||||||
|
// (which records the file name and newline offsets) and offset is a
|
||||||
|
// byte offset. It effectively ignores //line directives.
|
||||||
|
func (w *exportWriter) posV2(pos token.Pos) {
|
||||||
|
if pos == token.NoPos {
|
||||||
|
w.uint64(0)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
file := w.p.fset.File(pos) // fset must be non-nil
|
||||||
|
index, offset := w.p.fileIndexAndOffset(file, pos)
|
||||||
|
w.uint64(1 + index)
|
||||||
|
w.uint64(offset)
|
||||||
|
}
|
||||||
|
|
||||||
func (w *exportWriter) posV1(pos token.Pos) {
|
func (w *exportWriter) posV1(pos token.Pos) {
|
||||||
if w.p.fset == nil {
|
if w.p.fset == nil {
|
||||||
w.int64(0)
|
w.int64(0)
|
||||||
|
|
|
@ -137,12 +137,23 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data
|
||||||
}
|
}
|
||||||
|
|
||||||
sLen := int64(r.uint64())
|
sLen := int64(r.uint64())
|
||||||
|
var fLen int64
|
||||||
|
var fileOffset []uint64
|
||||||
|
if insert != nil {
|
||||||
|
// Shallow mode uses a different position encoding.
|
||||||
|
fLen = int64(r.uint64())
|
||||||
|
fileOffset = make([]uint64, r.uint64())
|
||||||
|
for i := range fileOffset {
|
||||||
|
fileOffset[i] = r.uint64()
|
||||||
|
}
|
||||||
|
}
|
||||||
dLen := int64(r.uint64())
|
dLen := int64(r.uint64())
|
||||||
|
|
||||||
whence, _ := r.Seek(0, io.SeekCurrent)
|
whence, _ := r.Seek(0, io.SeekCurrent)
|
||||||
stringData := data[whence : whence+sLen]
|
stringData := data[whence : whence+sLen]
|
||||||
declData := data[whence+sLen : whence+sLen+dLen]
|
fileData := data[whence+sLen : whence+sLen+fLen]
|
||||||
r.Seek(sLen+dLen, io.SeekCurrent)
|
declData := data[whence+sLen+fLen : whence+sLen+fLen+dLen]
|
||||||
|
r.Seek(sLen+fLen+dLen, io.SeekCurrent)
|
||||||
|
|
||||||
p := iimporter{
|
p := iimporter{
|
||||||
version: int(version),
|
version: int(version),
|
||||||
|
@ -151,6 +162,9 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data
|
||||||
|
|
||||||
stringData: stringData,
|
stringData: stringData,
|
||||||
stringCache: make(map[uint64]string),
|
stringCache: make(map[uint64]string),
|
||||||
|
fileOffset: fileOffset,
|
||||||
|
fileData: fileData,
|
||||||
|
fileCache: make([]*token.File, len(fileOffset)),
|
||||||
pkgCache: make(map[uint64]*types.Package),
|
pkgCache: make(map[uint64]*types.Package),
|
||||||
|
|
||||||
declData: declData,
|
declData: declData,
|
||||||
|
@ -280,6 +294,9 @@ type iimporter struct {
|
||||||
|
|
||||||
stringData []byte
|
stringData []byte
|
||||||
stringCache map[uint64]string
|
stringCache map[uint64]string
|
||||||
|
fileOffset []uint64 // fileOffset[i] is offset in fileData for info about file encoded as i
|
||||||
|
fileData []byte
|
||||||
|
fileCache []*token.File // memoized decoding of file encoded as i
|
||||||
pkgCache map[uint64]*types.Package
|
pkgCache map[uint64]*types.Package
|
||||||
|
|
||||||
declData []byte
|
declData []byte
|
||||||
|
@ -352,6 +369,55 @@ func (p *iimporter) stringAt(off uint64) string {
|
||||||
return s
|
return s
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (p *iimporter) fileAt(index uint64) *token.File {
|
||||||
|
file := p.fileCache[index]
|
||||||
|
if file == nil {
|
||||||
|
off := p.fileOffset[index]
|
||||||
|
file = p.decodeFile(intReader{bytes.NewReader(p.fileData[off:]), p.ipath})
|
||||||
|
p.fileCache[index] = file
|
||||||
|
}
|
||||||
|
return file
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *iimporter) decodeFile(rd intReader) *token.File {
|
||||||
|
filename := p.stringAt(rd.uint64())
|
||||||
|
size := int(rd.uint64())
|
||||||
|
file := p.fake.fset.AddFile(filename, -1, size)
|
||||||
|
|
||||||
|
// SetLines requires a nondecreasing sequence.
|
||||||
|
// Because it is common for clients to derive the interval
|
||||||
|
// [start, start+len(name)] from a start position, and we
|
||||||
|
// want to ensure that the end offset is on the same line,
|
||||||
|
// we fill in the gaps of the sparse encoding with values
|
||||||
|
// that strictly increase by the largest possible amount.
|
||||||
|
// This allows us to avoid having to record the actual end
|
||||||
|
// offset of each needed line.
|
||||||
|
|
||||||
|
lines := make([]int, int(rd.uint64()))
|
||||||
|
var index, offset int
|
||||||
|
for i, n := 0, int(rd.uint64()); i < n; i++ {
|
||||||
|
index += int(rd.uint64())
|
||||||
|
offset += int(rd.uint64())
|
||||||
|
lines[index] = offset
|
||||||
|
|
||||||
|
// Ensure monotonicity between points.
|
||||||
|
for j := index - 1; j > 0 && lines[j] == 0; j-- {
|
||||||
|
lines[j] = lines[j+1] - 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure monotonicity after last point.
|
||||||
|
for j := len(lines) - 1; j > 0 && lines[j] == 0; j-- {
|
||||||
|
size--
|
||||||
|
lines[j] = size
|
||||||
|
}
|
||||||
|
|
||||||
|
if !file.SetLines(lines) {
|
||||||
|
errorf("SetLines failed: %d", lines) // can't happen
|
||||||
|
}
|
||||||
|
return file
|
||||||
|
}
|
||||||
|
|
||||||
func (p *iimporter) pkgAt(off uint64) *types.Package {
|
func (p *iimporter) pkgAt(off uint64) *types.Package {
|
||||||
if pkg, ok := p.pkgCache[off]; ok {
|
if pkg, ok := p.pkgCache[off]; ok {
|
||||||
return pkg
|
return pkg
|
||||||
|
@ -645,6 +711,9 @@ func (r *importReader) qualifiedIdent() (*types.Package, string) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *importReader) pos() token.Pos {
|
func (r *importReader) pos() token.Pos {
|
||||||
|
if r.p.insert != nil { // shallow mode
|
||||||
|
return r.posv2()
|
||||||
|
}
|
||||||
if r.p.version >= iexportVersionPosCol {
|
if r.p.version >= iexportVersionPosCol {
|
||||||
r.posv1()
|
r.posv1()
|
||||||
} else {
|
} else {
|
||||||
|
@ -681,6 +750,15 @@ func (r *importReader) posv1() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *importReader) posv2() token.Pos {
|
||||||
|
file := r.uint64()
|
||||||
|
if file == 0 {
|
||||||
|
return token.NoPos
|
||||||
|
}
|
||||||
|
tf := r.p.fileAt(file - 1)
|
||||||
|
return tf.Pos(int(r.uint64()))
|
||||||
|
}
|
||||||
|
|
||||||
func (r *importReader) typ() types.Type {
|
func (r *importReader) typ() types.Type {
|
||||||
return r.p.typAt(r.uint64(), nil)
|
return r.p.typAt(r.uint64(), nil)
|
||||||
}
|
}
|
||||||
|
|
|
@ -158,6 +158,17 @@ func (pr *pkgReader) newReader(k pkgbits.RelocKind, idx pkgbits.Index, marker pk
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (pr *pkgReader) tempReader(k pkgbits.RelocKind, idx pkgbits.Index, marker pkgbits.SyncMarker) *reader {
|
||||||
|
return &reader{
|
||||||
|
Decoder: pr.TempDecoder(k, idx, marker),
|
||||||
|
p: pr,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pr *pkgReader) retireReader(r *reader) {
|
||||||
|
pr.RetireDecoder(&r.Decoder)
|
||||||
|
}
|
||||||
|
|
||||||
// @@@ Positions
|
// @@@ Positions
|
||||||
|
|
||||||
func (r *reader) pos() token.Pos {
|
func (r *reader) pos() token.Pos {
|
||||||
|
@ -182,26 +193,29 @@ func (pr *pkgReader) posBaseIdx(idx pkgbits.Index) string {
|
||||||
return b
|
return b
|
||||||
}
|
}
|
||||||
|
|
||||||
r := pr.newReader(pkgbits.RelocPosBase, idx, pkgbits.SyncPosBase)
|
var filename string
|
||||||
|
{
|
||||||
|
r := pr.tempReader(pkgbits.RelocPosBase, idx, pkgbits.SyncPosBase)
|
||||||
|
|
||||||
// Within types2, position bases have a lot more details (e.g.,
|
// Within types2, position bases have a lot more details (e.g.,
|
||||||
// keeping track of where //line directives appeared exactly).
|
// keeping track of where //line directives appeared exactly).
|
||||||
//
|
//
|
||||||
// For go/types, we just track the file name.
|
// For go/types, we just track the file name.
|
||||||
|
|
||||||
filename := r.String()
|
filename = r.String()
|
||||||
|
|
||||||
if r.Bool() { // file base
|
if r.Bool() { // file base
|
||||||
// Was: "b = token.NewTrimmedFileBase(filename, true)"
|
// Was: "b = token.NewTrimmedFileBase(filename, true)"
|
||||||
} else { // line base
|
} else { // line base
|
||||||
pos := r.pos()
|
pos := r.pos()
|
||||||
line := r.Uint()
|
line := r.Uint()
|
||||||
col := r.Uint()
|
col := r.Uint()
|
||||||
|
|
||||||
// Was: "b = token.NewLineBase(pos, filename, true, line, col)"
|
// Was: "b = token.NewLineBase(pos, filename, true, line, col)"
|
||||||
_, _, _ = pos, line, col
|
_, _, _ = pos, line, col
|
||||||
|
}
|
||||||
|
pr.retireReader(r)
|
||||||
}
|
}
|
||||||
|
|
||||||
b := filename
|
b := filename
|
||||||
pr.posBases[idx] = b
|
pr.posBases[idx] = b
|
||||||
return b
|
return b
|
||||||
|
@ -259,22 +273,22 @@ func (r *reader) doPkg() *types.Package {
|
||||||
// packages rooted from pkgs.
|
// packages rooted from pkgs.
|
||||||
func flattenImports(pkgs []*types.Package) []*types.Package {
|
func flattenImports(pkgs []*types.Package) []*types.Package {
|
||||||
var res []*types.Package
|
var res []*types.Package
|
||||||
|
seen := make(map[*types.Package]struct{})
|
||||||
seen := make(map[*types.Package]bool)
|
|
||||||
var add func(pkg *types.Package)
|
|
||||||
add = func(pkg *types.Package) {
|
|
||||||
if seen[pkg] {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
seen[pkg] = true
|
|
||||||
res = append(res, pkg)
|
|
||||||
for _, imp := range pkg.Imports() {
|
|
||||||
add(imp)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, pkg := range pkgs {
|
for _, pkg := range pkgs {
|
||||||
add(pkg)
|
if _, ok := seen[pkg]; ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
seen[pkg] = struct{}{}
|
||||||
|
res = append(res, pkg)
|
||||||
|
|
||||||
|
// pkg.Imports() is already flattened.
|
||||||
|
for _, pkg := range pkg.Imports() {
|
||||||
|
if _, ok := seen[pkg]; ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
seen[pkg] = struct{}{}
|
||||||
|
res = append(res, pkg)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return res
|
return res
|
||||||
}
|
}
|
||||||
|
@ -307,12 +321,15 @@ func (pr *pkgReader) typIdx(info typeInfo, dict *readerDict) types.Type {
|
||||||
return typ
|
return typ
|
||||||
}
|
}
|
||||||
|
|
||||||
r := pr.newReader(pkgbits.RelocType, idx, pkgbits.SyncTypeIdx)
|
var typ types.Type
|
||||||
r.dict = dict
|
{
|
||||||
|
r := pr.tempReader(pkgbits.RelocType, idx, pkgbits.SyncTypeIdx)
|
||||||
typ := r.doTyp()
|
r.dict = dict
|
||||||
assert(typ != nil)
|
|
||||||
|
|
||||||
|
typ = r.doTyp()
|
||||||
|
assert(typ != nil)
|
||||||
|
pr.retireReader(r)
|
||||||
|
}
|
||||||
// See comment in pkgReader.typIdx explaining how this happens.
|
// See comment in pkgReader.typIdx explaining how this happens.
|
||||||
if prev := *where; prev != nil {
|
if prev := *where; prev != nil {
|
||||||
return prev
|
return prev
|
||||||
|
@ -478,12 +495,19 @@ func (r *reader) obj() (types.Object, []types.Type) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (pr *pkgReader) objIdx(idx pkgbits.Index) (*types.Package, string) {
|
func (pr *pkgReader) objIdx(idx pkgbits.Index) (*types.Package, string) {
|
||||||
rname := pr.newReader(pkgbits.RelocName, idx, pkgbits.SyncObject1)
|
|
||||||
|
|
||||||
objPkg, objName := rname.qualifiedIdent()
|
var objPkg *types.Package
|
||||||
assert(objName != "")
|
var objName string
|
||||||
|
var tag pkgbits.CodeObj
|
||||||
|
{
|
||||||
|
rname := pr.tempReader(pkgbits.RelocName, idx, pkgbits.SyncObject1)
|
||||||
|
|
||||||
tag := pkgbits.CodeObj(rname.Code(pkgbits.SyncCodeObj))
|
objPkg, objName = rname.qualifiedIdent()
|
||||||
|
assert(objName != "")
|
||||||
|
|
||||||
|
tag = pkgbits.CodeObj(rname.Code(pkgbits.SyncCodeObj))
|
||||||
|
pr.retireReader(rname)
|
||||||
|
}
|
||||||
|
|
||||||
if tag == pkgbits.ObjStub {
|
if tag == pkgbits.ObjStub {
|
||||||
assert(objPkg == nil || objPkg == types.Unsafe)
|
assert(objPkg == nil || objPkg == types.Unsafe)
|
||||||
|
@ -535,18 +559,7 @@ func (pr *pkgReader) objIdx(idx pkgbits.Index) (*types.Package, string) {
|
||||||
|
|
||||||
named.SetTypeParams(r.typeParamNames())
|
named.SetTypeParams(r.typeParamNames())
|
||||||
|
|
||||||
rhs := r.typ()
|
setUnderlying := func(underlying types.Type) {
|
||||||
pk := r.p
|
|
||||||
pk.laterFor(named, func() {
|
|
||||||
// First be sure that the rhs is initialized, if it needs to be initialized.
|
|
||||||
delete(pk.laterFors, named) // prevent cycles
|
|
||||||
if i, ok := pk.laterFors[rhs]; ok {
|
|
||||||
f := pk.laterFns[i]
|
|
||||||
pk.laterFns[i] = func() {} // function is running now, so replace it with a no-op
|
|
||||||
f() // initialize RHS
|
|
||||||
}
|
|
||||||
underlying := rhs.Underlying()
|
|
||||||
|
|
||||||
// If the underlying type is an interface, we need to
|
// If the underlying type is an interface, we need to
|
||||||
// duplicate its methods so we can replace the receiver
|
// duplicate its methods so we can replace the receiver
|
||||||
// parameter's type (#49906).
|
// parameter's type (#49906).
|
||||||
|
@ -571,7 +584,31 @@ func (pr *pkgReader) objIdx(idx pkgbits.Index) (*types.Package, string) {
|
||||||
}
|
}
|
||||||
|
|
||||||
named.SetUnderlying(underlying)
|
named.SetUnderlying(underlying)
|
||||||
})
|
}
|
||||||
|
|
||||||
|
// Since go.dev/cl/455279, we can assume rhs.Underlying() will
|
||||||
|
// always be non-nil. However, to temporarily support users of
|
||||||
|
// older snapshot releases, we continue to fallback to the old
|
||||||
|
// behavior for now.
|
||||||
|
//
|
||||||
|
// TODO(mdempsky): Remove fallback code and simplify after
|
||||||
|
// allowing time for snapshot users to upgrade.
|
||||||
|
rhs := r.typ()
|
||||||
|
if underlying := rhs.Underlying(); underlying != nil {
|
||||||
|
setUnderlying(underlying)
|
||||||
|
} else {
|
||||||
|
pk := r.p
|
||||||
|
pk.laterFor(named, func() {
|
||||||
|
// First be sure that the rhs is initialized, if it needs to be initialized.
|
||||||
|
delete(pk.laterFors, named) // prevent cycles
|
||||||
|
if i, ok := pk.laterFors[rhs]; ok {
|
||||||
|
f := pk.laterFns[i]
|
||||||
|
pk.laterFns[i] = func() {} // function is running now, so replace it with a no-op
|
||||||
|
f() // initialize RHS
|
||||||
|
}
|
||||||
|
setUnderlying(rhs.Underlying())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
for i, n := 0, r.Len(); i < n; i++ {
|
for i, n := 0, r.Len(); i < n; i++ {
|
||||||
named.AddMethod(r.method())
|
named.AddMethod(r.method())
|
||||||
|
@ -588,25 +625,28 @@ func (pr *pkgReader) objIdx(idx pkgbits.Index) (*types.Package, string) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (pr *pkgReader) objDictIdx(idx pkgbits.Index) *readerDict {
|
func (pr *pkgReader) objDictIdx(idx pkgbits.Index) *readerDict {
|
||||||
r := pr.newReader(pkgbits.RelocObjDict, idx, pkgbits.SyncObject1)
|
|
||||||
|
|
||||||
var dict readerDict
|
var dict readerDict
|
||||||
|
|
||||||
if implicits := r.Len(); implicits != 0 {
|
{
|
||||||
errorf("unexpected object with %v implicit type parameter(s)", implicits)
|
r := pr.tempReader(pkgbits.RelocObjDict, idx, pkgbits.SyncObject1)
|
||||||
}
|
if implicits := r.Len(); implicits != 0 {
|
||||||
|
errorf("unexpected object with %v implicit type parameter(s)", implicits)
|
||||||
|
}
|
||||||
|
|
||||||
dict.bounds = make([]typeInfo, r.Len())
|
dict.bounds = make([]typeInfo, r.Len())
|
||||||
for i := range dict.bounds {
|
for i := range dict.bounds {
|
||||||
dict.bounds[i] = r.typInfo()
|
dict.bounds[i] = r.typInfo()
|
||||||
}
|
}
|
||||||
|
|
||||||
dict.derived = make([]derivedInfo, r.Len())
|
dict.derived = make([]derivedInfo, r.Len())
|
||||||
dict.derivedTypes = make([]types.Type, len(dict.derived))
|
dict.derivedTypes = make([]types.Type, len(dict.derived))
|
||||||
for i := range dict.derived {
|
for i := range dict.derived {
|
||||||
dict.derived[i] = derivedInfo{r.Reloc(pkgbits.RelocType), r.Bool()}
|
dict.derived[i] = derivedInfo{r.Reloc(pkgbits.RelocType), r.Bool()}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pr.retireReader(r)
|
||||||
|
}
|
||||||
// function references follow, but reader doesn't need those
|
// function references follow, but reader doesn't need those
|
||||||
|
|
||||||
return &dict
|
return &dict
|
||||||
|
|
|
@ -7,6 +7,7 @@ package gocommand
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -56,3 +57,25 @@ func GoVersion(ctx context.Context, inv Invocation, r *Runner) (int, error) {
|
||||||
}
|
}
|
||||||
return 0, fmt.Errorf("no parseable ReleaseTags in %v", tags)
|
return 0, fmt.Errorf("no parseable ReleaseTags in %v", tags)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GoVersionOutput returns the complete output of the go version command.
|
||||||
|
func GoVersionOutput(ctx context.Context, inv Invocation, r *Runner) (string, error) {
|
||||||
|
inv.Verb = "version"
|
||||||
|
goVersion, err := r.Run(ctx, inv)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return goVersion.String(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseGoVersionOutput extracts the Go version string
|
||||||
|
// from the output of the "go version" command.
|
||||||
|
// Given an unrecognized form, it returns an empty string.
|
||||||
|
func ParseGoVersionOutput(data string) string {
|
||||||
|
re := regexp.MustCompile(`^go version (go\S+|devel \S+)`)
|
||||||
|
m := re.FindStringSubmatch(data)
|
||||||
|
if len(m) != 2 {
|
||||||
|
return "" // unrecognized version
|
||||||
|
}
|
||||||
|
return m[1]
|
||||||
|
}
|
||||||
|
|
|
@ -1,71 +0,0 @@
|
||||||
// Copyright 2022 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
// Package goroot is a copy of package internal/goroot
|
|
||||||
// in the main GO repot. It provides a utility to produce
|
|
||||||
// an importcfg and import path to package file map mapping
|
|
||||||
// standard library packages to the locations of their export
|
|
||||||
// data files.
|
|
||||||
package goroot
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"fmt"
|
|
||||||
"os/exec"
|
|
||||||
"strings"
|
|
||||||
"sync"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Importcfg returns an importcfg file to be passed to the
|
|
||||||
// Go compiler that contains the cached paths for the .a files for the
|
|
||||||
// standard library.
|
|
||||||
func Importcfg() (string, error) {
|
|
||||||
var icfg bytes.Buffer
|
|
||||||
|
|
||||||
m, err := PkgfileMap()
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
fmt.Fprintf(&icfg, "# import config")
|
|
||||||
for importPath, export := range m {
|
|
||||||
if importPath != "unsafe" && export != "" { // unsafe
|
|
||||||
fmt.Fprintf(&icfg, "\npackagefile %s=%s", importPath, export)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
s := icfg.String()
|
|
||||||
return s, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
var (
|
|
||||||
stdlibPkgfileMap map[string]string
|
|
||||||
stdlibPkgfileErr error
|
|
||||||
once sync.Once
|
|
||||||
)
|
|
||||||
|
|
||||||
// PkgfileMap returns a map of package paths to the location on disk
|
|
||||||
// of the .a file for the package.
|
|
||||||
// The caller must not modify the map.
|
|
||||||
func PkgfileMap() (map[string]string, error) {
|
|
||||||
once.Do(func() {
|
|
||||||
m := make(map[string]string)
|
|
||||||
output, err := exec.Command("go", "list", "-export", "-e", "-f", "{{.ImportPath}} {{.Export}}", "std", "cmd").Output()
|
|
||||||
if err != nil {
|
|
||||||
stdlibPkgfileErr = err
|
|
||||||
}
|
|
||||||
for _, line := range strings.Split(string(output), "\n") {
|
|
||||||
if line == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
sp := strings.SplitN(line, " ", 2)
|
|
||||||
if len(sp) != 2 {
|
|
||||||
err = fmt.Errorf("determining pkgfile map: invalid line in go list output: %q", line)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
importPath, export := sp[0], sp[1]
|
|
||||||
m[importPath] = export
|
|
||||||
}
|
|
||||||
stdlibPkgfileMap = m
|
|
||||||
})
|
|
||||||
return stdlibPkgfileMap, stdlibPkgfileErr
|
|
||||||
}
|
|
|
@ -6,6 +6,7 @@ package pkgbits
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/binary"
|
"encoding/binary"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"go/constant"
|
"go/constant"
|
||||||
"go/token"
|
"go/token"
|
||||||
|
@ -52,6 +53,8 @@ type PkgDecoder struct {
|
||||||
// For example, section K's end positions start at elemEndsEnds[K-1]
|
// For example, section K's end positions start at elemEndsEnds[K-1]
|
||||||
// (or 0, if K==0) and end at elemEndsEnds[K].
|
// (or 0, if K==0) and end at elemEndsEnds[K].
|
||||||
elemEndsEnds [numRelocs]uint32
|
elemEndsEnds [numRelocs]uint32
|
||||||
|
|
||||||
|
scratchRelocEnt []RelocEnt
|
||||||
}
|
}
|
||||||
|
|
||||||
// PkgPath returns the package path for the package
|
// PkgPath returns the package path for the package
|
||||||
|
@ -165,6 +168,21 @@ func (pr *PkgDecoder) NewDecoder(k RelocKind, idx Index, marker SyncMarker) Deco
|
||||||
return r
|
return r
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TempDecoder returns a Decoder for the given (section, index) pair,
|
||||||
|
// and decodes the given SyncMarker from the element bitstream.
|
||||||
|
// If possible the Decoder should be RetireDecoder'd when it is no longer
|
||||||
|
// needed, this will avoid heap allocations.
|
||||||
|
func (pr *PkgDecoder) TempDecoder(k RelocKind, idx Index, marker SyncMarker) Decoder {
|
||||||
|
r := pr.TempDecoderRaw(k, idx)
|
||||||
|
r.Sync(marker)
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pr *PkgDecoder) RetireDecoder(d *Decoder) {
|
||||||
|
pr.scratchRelocEnt = d.Relocs
|
||||||
|
d.Relocs = nil
|
||||||
|
}
|
||||||
|
|
||||||
// NewDecoderRaw returns a Decoder for the given (section, index) pair.
|
// NewDecoderRaw returns a Decoder for the given (section, index) pair.
|
||||||
//
|
//
|
||||||
// Most callers should use NewDecoder instead.
|
// Most callers should use NewDecoder instead.
|
||||||
|
@ -188,6 +206,30 @@ func (pr *PkgDecoder) NewDecoderRaw(k RelocKind, idx Index) Decoder {
|
||||||
return r
|
return r
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (pr *PkgDecoder) TempDecoderRaw(k RelocKind, idx Index) Decoder {
|
||||||
|
r := Decoder{
|
||||||
|
common: pr,
|
||||||
|
k: k,
|
||||||
|
Idx: idx,
|
||||||
|
}
|
||||||
|
|
||||||
|
r.Data.Reset(pr.DataIdx(k, idx))
|
||||||
|
r.Sync(SyncRelocs)
|
||||||
|
l := r.Len()
|
||||||
|
if cap(pr.scratchRelocEnt) >= l {
|
||||||
|
r.Relocs = pr.scratchRelocEnt[:l]
|
||||||
|
pr.scratchRelocEnt = nil
|
||||||
|
} else {
|
||||||
|
r.Relocs = make([]RelocEnt, l)
|
||||||
|
}
|
||||||
|
for i := range r.Relocs {
|
||||||
|
r.Sync(SyncReloc)
|
||||||
|
r.Relocs[i] = RelocEnt{RelocKind(r.Len()), Index(r.Len())}
|
||||||
|
}
|
||||||
|
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
// A Decoder provides methods for decoding an individual element's
|
// A Decoder provides methods for decoding an individual element's
|
||||||
// bitstream data.
|
// bitstream data.
|
||||||
type Decoder struct {
|
type Decoder struct {
|
||||||
|
@ -207,11 +249,39 @@ func (r *Decoder) checkErr(err error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Decoder) rawUvarint() uint64 {
|
func (r *Decoder) rawUvarint() uint64 {
|
||||||
x, err := binary.ReadUvarint(&r.Data)
|
x, err := readUvarint(&r.Data)
|
||||||
r.checkErr(err)
|
r.checkErr(err)
|
||||||
return x
|
return x
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// readUvarint is a type-specialized copy of encoding/binary.ReadUvarint.
|
||||||
|
// This avoids the interface conversion and thus has better escape properties,
|
||||||
|
// which flows up the stack.
|
||||||
|
func readUvarint(r *strings.Reader) (uint64, error) {
|
||||||
|
var x uint64
|
||||||
|
var s uint
|
||||||
|
for i := 0; i < binary.MaxVarintLen64; i++ {
|
||||||
|
b, err := r.ReadByte()
|
||||||
|
if err != nil {
|
||||||
|
if i > 0 && err == io.EOF {
|
||||||
|
err = io.ErrUnexpectedEOF
|
||||||
|
}
|
||||||
|
return x, err
|
||||||
|
}
|
||||||
|
if b < 0x80 {
|
||||||
|
if i == binary.MaxVarintLen64-1 && b > 1 {
|
||||||
|
return x, overflow
|
||||||
|
}
|
||||||
|
return x | uint64(b)<<s, nil
|
||||||
|
}
|
||||||
|
x |= uint64(b&0x7f) << s
|
||||||
|
s += 7
|
||||||
|
}
|
||||||
|
return x, overflow
|
||||||
|
}
|
||||||
|
|
||||||
|
var overflow = errors.New("pkgbits: readUvarint overflows a 64-bit integer")
|
||||||
|
|
||||||
func (r *Decoder) rawVarint() int64 {
|
func (r *Decoder) rawVarint() int64 {
|
||||||
ux := r.rawUvarint()
|
ux := r.rawUvarint()
|
||||||
|
|
||||||
|
@ -303,7 +373,7 @@ func (r *Decoder) Int64() int64 {
|
||||||
return r.rawVarint()
|
return r.rawVarint()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Int64 decodes and returns a uint64 value from the element bitstream.
|
// Uint64 decodes and returns a uint64 value from the element bitstream.
|
||||||
func (r *Decoder) Uint64() uint64 {
|
func (r *Decoder) Uint64() uint64 {
|
||||||
r.Sync(SyncUint64)
|
r.Sync(SyncUint64)
|
||||||
return r.rawUvarint()
|
return r.rawUvarint()
|
||||||
|
@ -410,8 +480,12 @@ func (r *Decoder) bigFloat() *big.Float {
|
||||||
// PeekPkgPath returns the package path for the specified package
|
// PeekPkgPath returns the package path for the specified package
|
||||||
// index.
|
// index.
|
||||||
func (pr *PkgDecoder) PeekPkgPath(idx Index) string {
|
func (pr *PkgDecoder) PeekPkgPath(idx Index) string {
|
||||||
r := pr.NewDecoder(RelocPkg, idx, SyncPkgDef)
|
var path string
|
||||||
path := r.String()
|
{
|
||||||
|
r := pr.TempDecoder(RelocPkg, idx, SyncPkgDef)
|
||||||
|
path = r.String()
|
||||||
|
pr.RetireDecoder(&r)
|
||||||
|
}
|
||||||
if path == "" {
|
if path == "" {
|
||||||
path = pr.pkgPath
|
path = pr.pkgPath
|
||||||
}
|
}
|
||||||
|
@ -421,14 +495,23 @@ func (pr *PkgDecoder) PeekPkgPath(idx Index) string {
|
||||||
// PeekObj returns the package path, object name, and CodeObj for the
|
// PeekObj returns the package path, object name, and CodeObj for the
|
||||||
// specified object index.
|
// specified object index.
|
||||||
func (pr *PkgDecoder) PeekObj(idx Index) (string, string, CodeObj) {
|
func (pr *PkgDecoder) PeekObj(idx Index) (string, string, CodeObj) {
|
||||||
r := pr.NewDecoder(RelocName, idx, SyncObject1)
|
var ridx Index
|
||||||
r.Sync(SyncSym)
|
var name string
|
||||||
r.Sync(SyncPkg)
|
var rcode int
|
||||||
path := pr.PeekPkgPath(r.Reloc(RelocPkg))
|
{
|
||||||
name := r.String()
|
r := pr.TempDecoder(RelocName, idx, SyncObject1)
|
||||||
|
r.Sync(SyncSym)
|
||||||
|
r.Sync(SyncPkg)
|
||||||
|
ridx = r.Reloc(RelocPkg)
|
||||||
|
name = r.String()
|
||||||
|
rcode = r.Code(SyncCodeObj)
|
||||||
|
pr.RetireDecoder(&r)
|
||||||
|
}
|
||||||
|
|
||||||
|
path := pr.PeekPkgPath(ridx)
|
||||||
assert(name != "")
|
assert(name != "")
|
||||||
|
|
||||||
tag := CodeObj(r.Code(SyncCodeObj))
|
tag := CodeObj(rcode)
|
||||||
|
|
||||||
return path, name, tag
|
return path, name, tag
|
||||||
}
|
}
|
||||||
|
|
|
@ -293,7 +293,7 @@ func (w *Encoder) Len(x int) { assert(x >= 0); w.Uint64(uint64(x)) }
|
||||||
// Int encodes and writes an int value into the element bitstream.
|
// Int encodes and writes an int value into the element bitstream.
|
||||||
func (w *Encoder) Int(x int) { w.Int64(int64(x)) }
|
func (w *Encoder) Int(x int) { w.Int64(int64(x)) }
|
||||||
|
|
||||||
// Len encodes and writes a uint value into the element bitstream.
|
// Uint encodes and writes a uint value into the element bitstream.
|
||||||
func (w *Encoder) Uint(x uint) { w.Uint64(uint64(x)) }
|
func (w *Encoder) Uint(x uint) { w.Uint64(uint64(x)) }
|
||||||
|
|
||||||
// Reloc encodes and writes a relocation for the given (section,
|
// Reloc encodes and writes a relocation for the given (section,
|
||||||
|
|
|
@ -0,0 +1,59 @@
|
||||||
|
// Copyright 2023 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// package tokeninternal provides access to some internal features of the token
|
||||||
|
// package.
|
||||||
|
package tokeninternal
|
||||||
|
|
||||||
|
import (
|
||||||
|
"go/token"
|
||||||
|
"sync"
|
||||||
|
"unsafe"
|
||||||
|
)
|
||||||
|
|
||||||
|
// GetLines returns the table of line-start offsets from a token.File.
|
||||||
|
func GetLines(file *token.File) []int {
|
||||||
|
// token.File has a Lines method on Go 1.21 and later.
|
||||||
|
if file, ok := (interface{})(file).(interface{ Lines() []int }); ok {
|
||||||
|
return file.Lines()
|
||||||
|
}
|
||||||
|
|
||||||
|
// This declaration must match that of token.File.
|
||||||
|
// This creates a risk of dependency skew.
|
||||||
|
// For now we check that the size of the two
|
||||||
|
// declarations is the same, on the (fragile) assumption
|
||||||
|
// that future changes would add fields.
|
||||||
|
type tokenFile119 struct {
|
||||||
|
_ string
|
||||||
|
_ int
|
||||||
|
_ int
|
||||||
|
mu sync.Mutex // we're not complete monsters
|
||||||
|
lines []int
|
||||||
|
_ []struct{}
|
||||||
|
}
|
||||||
|
type tokenFile118 struct {
|
||||||
|
_ *token.FileSet // deleted in go1.19
|
||||||
|
tokenFile119
|
||||||
|
}
|
||||||
|
|
||||||
|
type uP = unsafe.Pointer
|
||||||
|
switch unsafe.Sizeof(*file) {
|
||||||
|
case unsafe.Sizeof(tokenFile118{}):
|
||||||
|
var ptr *tokenFile118
|
||||||
|
*(*uP)(uP(&ptr)) = uP(file)
|
||||||
|
ptr.mu.Lock()
|
||||||
|
defer ptr.mu.Unlock()
|
||||||
|
return ptr.lines
|
||||||
|
|
||||||
|
case unsafe.Sizeof(tokenFile119{}):
|
||||||
|
var ptr *tokenFile119
|
||||||
|
*(*uP)(uP(&ptr)) = uP(file)
|
||||||
|
ptr.mu.Lock()
|
||||||
|
defer ptr.mu.Unlock()
|
||||||
|
return ptr.lines
|
||||||
|
|
||||||
|
default:
|
||||||
|
panic("unexpected token.File size")
|
||||||
|
}
|
||||||
|
}
|
|
@ -30,6 +30,12 @@ type ErrorCode int
|
||||||
// convention that "bad" implies a problem with syntax, and "invalid" implies a
|
// convention that "bad" implies a problem with syntax, and "invalid" implies a
|
||||||
// problem with types.
|
// problem with types.
|
||||||
|
|
||||||
|
const (
|
||||||
|
// InvalidSyntaxTree occurs if an invalid syntax tree is provided
|
||||||
|
// to the type checker. It should never happen.
|
||||||
|
InvalidSyntaxTree ErrorCode = -1
|
||||||
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
_ ErrorCode = iota
|
_ ErrorCode = iota
|
||||||
|
|
||||||
|
@ -153,12 +159,12 @@ const (
|
||||||
|
|
||||||
/* decls > var (+ other variable assignment codes) */
|
/* decls > var (+ other variable assignment codes) */
|
||||||
|
|
||||||
// UntypedNil occurs when the predeclared (untyped) value nil is used to
|
// UntypedNilUse occurs when the predeclared (untyped) value nil is used to
|
||||||
// initialize a variable declared without an explicit type.
|
// initialize a variable declared without an explicit type.
|
||||||
//
|
//
|
||||||
// Example:
|
// Example:
|
||||||
// var x = nil
|
// var x = nil
|
||||||
UntypedNil
|
UntypedNilUse
|
||||||
|
|
||||||
// WrongAssignCount occurs when the number of values on the right-hand side
|
// WrongAssignCount occurs when the number of values on the right-hand side
|
||||||
// of an assignment or or initialization expression does not match the number
|
// of an assignment or or initialization expression does not match the number
|
||||||
|
@ -1523,4 +1529,32 @@ const (
|
||||||
// Example:
|
// Example:
|
||||||
// type T[P any] struct{ *P }
|
// type T[P any] struct{ *P }
|
||||||
MisplacedTypeParam
|
MisplacedTypeParam
|
||||||
|
|
||||||
|
// InvalidUnsafeSliceData occurs when unsafe.SliceData is called with
|
||||||
|
// an argument that is not of slice type. It also occurs if it is used
|
||||||
|
// in a package compiled for a language version before go1.20.
|
||||||
|
//
|
||||||
|
// Example:
|
||||||
|
// import "unsafe"
|
||||||
|
//
|
||||||
|
// var x int
|
||||||
|
// var _ = unsafe.SliceData(x)
|
||||||
|
InvalidUnsafeSliceData
|
||||||
|
|
||||||
|
// InvalidUnsafeString occurs when unsafe.String is called with
|
||||||
|
// a length argument that is not of integer type, negative, or
|
||||||
|
// out of bounds. It also occurs if it is used in a package
|
||||||
|
// compiled for a language version before go1.20.
|
||||||
|
//
|
||||||
|
// Example:
|
||||||
|
// import "unsafe"
|
||||||
|
//
|
||||||
|
// var b [10]byte
|
||||||
|
// var _ = unsafe.String(&b[0], -1)
|
||||||
|
InvalidUnsafeString
|
||||||
|
|
||||||
|
// InvalidUnsafeStringData occurs if it is used in a package
|
||||||
|
// compiled for a language version before go1.20.
|
||||||
|
_ // not used anymore
|
||||||
|
|
||||||
)
|
)
|
||||||
|
|
|
@ -8,6 +8,7 @@ func _() {
|
||||||
// An "invalid array index" compiler error signifies that the constant values have changed.
|
// An "invalid array index" compiler error signifies that the constant values have changed.
|
||||||
// Re-run the stringer command to generate them again.
|
// Re-run the stringer command to generate them again.
|
||||||
var x [1]struct{}
|
var x [1]struct{}
|
||||||
|
_ = x[InvalidSyntaxTree - -1]
|
||||||
_ = x[Test-1]
|
_ = x[Test-1]
|
||||||
_ = x[BlankPkgName-2]
|
_ = x[BlankPkgName-2]
|
||||||
_ = x[MismatchedPkgName-3]
|
_ = x[MismatchedPkgName-3]
|
||||||
|
@ -23,7 +24,7 @@ func _() {
|
||||||
_ = x[InvalidConstInit-13]
|
_ = x[InvalidConstInit-13]
|
||||||
_ = x[InvalidConstVal-14]
|
_ = x[InvalidConstVal-14]
|
||||||
_ = x[InvalidConstType-15]
|
_ = x[InvalidConstType-15]
|
||||||
_ = x[UntypedNil-16]
|
_ = x[UntypedNilUse-16]
|
||||||
_ = x[WrongAssignCount-17]
|
_ = x[WrongAssignCount-17]
|
||||||
_ = x[UnassignableOperand-18]
|
_ = x[UnassignableOperand-18]
|
||||||
_ = x[NoNewVar-19]
|
_ = x[NoNewVar-19]
|
||||||
|
@ -152,16 +153,27 @@ func _() {
|
||||||
_ = x[MisplacedConstraintIface-142]
|
_ = x[MisplacedConstraintIface-142]
|
||||||
_ = x[InvalidMethodTypeParams-143]
|
_ = x[InvalidMethodTypeParams-143]
|
||||||
_ = x[MisplacedTypeParam-144]
|
_ = x[MisplacedTypeParam-144]
|
||||||
|
_ = x[InvalidUnsafeSliceData-145]
|
||||||
|
_ = x[InvalidUnsafeString-146]
|
||||||
}
|
}
|
||||||
|
|
||||||
const _ErrorCode_name = "TestBlankPkgNameMismatchedPkgNameInvalidPkgUseBadImportPathBrokenImportImportCRenamedUnusedImportInvalidInitCycleDuplicateDeclInvalidDeclCycleInvalidTypeCycleInvalidConstInitInvalidConstValInvalidConstTypeUntypedNilWrongAssignCountUnassignableOperandNoNewVarMultiValAssignOpInvalidIfaceAssignInvalidChanAssignIncompatibleAssignUnaddressableFieldAssignNotATypeInvalidArrayLenBlankIfaceMethodIncomparableMapKeyInvalidIfaceEmbedInvalidPtrEmbedBadRecvInvalidRecvDuplicateFieldAndMethodDuplicateMethodInvalidBlankInvalidIotaMissingInitBodyInvalidInitSigInvalidInitDeclInvalidMainDeclTooManyValuesNotAnExprTruncatedFloatNumericOverflowUndefinedOpMismatchedTypesDivByZeroNonNumericIncDecUnaddressableOperandInvalidIndirectionNonIndexableOperandInvalidIndexSwappedSliceIndicesNonSliceableOperandInvalidSliceExprInvalidShiftCountInvalidShiftOperandInvalidReceiveInvalidSendDuplicateLitKeyMissingLitKeyInvalidLitIndexOversizeArrayLitMixedStructLitInvalidStructLitMissingLitFieldDuplicateLitFieldUnexportedLitFieldInvalidLitFieldUntypedLitInvalidLitAmbiguousSelectorUndeclaredImportedNameUnexportedNameUndeclaredNameMissingFieldOrMethodBadDotDotDotSyntaxNonVariadicDotDotDotMisplacedDotDotDotInvalidDotDotDotOperandInvalidDotDotDotUncalledBuiltinInvalidAppendInvalidCapInvalidCloseInvalidCopyInvalidComplexInvalidDeleteInvalidImagInvalidLenSwappedMakeArgsInvalidMakeInvalidRealInvalidAssertImpossibleAssertInvalidConversionInvalidUntypedConversionBadOffsetofSyntaxInvalidOffsetofUnusedExprUnusedVarMissingReturnWrongResultCountOutOfScopeResultInvalidCondInvalidPostDeclInvalidChanRangeInvalidIterVarInvalidRangeExprMisplacedBreakMisplacedContinueMisplacedFallthroughDuplicateCaseDuplicateDefaultBadTypeKeywordInvalidTypeSwitchInvalidExprSwitchInvalidSelectCaseUndeclaredLabelDuplicateLabelMisplacedLabelUnusedLabelJumpOverDeclJumpIntoBlockInvalidMethodExprWrongArgCountInvalidCallUnusedResultsInvalidDeferInvalidGoBadDeclRepeatedDeclInvalidUnsafeAddInvalidUnsafeSliceUnsupportedFeatureNotAGenericTypeWrongTypeArgCountCannotInferTypeArgsInvalidTypeArgInvalidInstanceCycleInvalidUnionMisplacedConstraintIfaceInvalidMethodTypeParamsMisplacedTypeParam"
|
const (
|
||||||
|
_ErrorCode_name_0 = "InvalidSyntaxTree"
|
||||||
|
_ErrorCode_name_1 = "TestBlankPkgNameMismatchedPkgNameInvalidPkgUseBadImportPathBrokenImportImportCRenamedUnusedImportInvalidInitCycleDuplicateDeclInvalidDeclCycleInvalidTypeCycleInvalidConstInitInvalidConstValInvalidConstTypeUntypedNilUseWrongAssignCountUnassignableOperandNoNewVarMultiValAssignOpInvalidIfaceAssignInvalidChanAssignIncompatibleAssignUnaddressableFieldAssignNotATypeInvalidArrayLenBlankIfaceMethodIncomparableMapKeyInvalidIfaceEmbedInvalidPtrEmbedBadRecvInvalidRecvDuplicateFieldAndMethodDuplicateMethodInvalidBlankInvalidIotaMissingInitBodyInvalidInitSigInvalidInitDeclInvalidMainDeclTooManyValuesNotAnExprTruncatedFloatNumericOverflowUndefinedOpMismatchedTypesDivByZeroNonNumericIncDecUnaddressableOperandInvalidIndirectionNonIndexableOperandInvalidIndexSwappedSliceIndicesNonSliceableOperandInvalidSliceExprInvalidShiftCountInvalidShiftOperandInvalidReceiveInvalidSendDuplicateLitKeyMissingLitKeyInvalidLitIndexOversizeArrayLitMixedStructLitInvalidStructLitMissingLitFieldDuplicateLitFieldUnexportedLitFieldInvalidLitFieldUntypedLitInvalidLitAmbiguousSelectorUndeclaredImportedNameUnexportedNameUndeclaredNameMissingFieldOrMethodBadDotDotDotSyntaxNonVariadicDotDotDotMisplacedDotDotDotInvalidDotDotDotOperandInvalidDotDotDotUncalledBuiltinInvalidAppendInvalidCapInvalidCloseInvalidCopyInvalidComplexInvalidDeleteInvalidImagInvalidLenSwappedMakeArgsInvalidMakeInvalidRealInvalidAssertImpossibleAssertInvalidConversionInvalidUntypedConversionBadOffsetofSyntaxInvalidOffsetofUnusedExprUnusedVarMissingReturnWrongResultCountOutOfScopeResultInvalidCondInvalidPostDeclInvalidChanRangeInvalidIterVarInvalidRangeExprMisplacedBreakMisplacedContinueMisplacedFallthroughDuplicateCaseDuplicateDefaultBadTypeKeywordInvalidTypeSwitchInvalidExprSwitchInvalidSelectCaseUndeclaredLabelDuplicateLabelMisplacedLabelUnusedLabelJumpOverDeclJumpIntoBlockInvalidMethodExprWrongArgCountInvalidCallUnusedResultsInvalidDeferInvalidGoBadDeclRepeatedDeclInvalidUnsafeAddInvalidUnsafeSliceUnsupportedFeatureNotAGenericTypeWrongTypeArgCountCannotInferTypeArgsInvalidTypeArgInvalidInstanceCycleInvalidUnionMisplacedConstraintIfaceInvalidMethodTypeParamsMisplacedTypeParamInvalidUnsafeSliceDataInvalidUnsafeString"
|
||||||
|
)
|
||||||
|
|
||||||
var _ErrorCode_index = [...]uint16{0, 4, 16, 33, 46, 59, 71, 85, 97, 113, 126, 142, 158, 174, 189, 205, 215, 231, 250, 258, 274, 292, 309, 327, 351, 359, 374, 390, 408, 425, 440, 447, 458, 481, 496, 508, 519, 534, 548, 563, 578, 591, 600, 614, 629, 640, 655, 664, 680, 700, 718, 737, 749, 768, 787, 803, 820, 839, 853, 864, 879, 892, 907, 923, 937, 953, 968, 985, 1003, 1018, 1028, 1038, 1055, 1077, 1091, 1105, 1125, 1143, 1163, 1181, 1204, 1220, 1235, 1248, 1258, 1270, 1281, 1295, 1308, 1319, 1329, 1344, 1355, 1366, 1379, 1395, 1412, 1436, 1453, 1468, 1478, 1487, 1500, 1516, 1532, 1543, 1558, 1574, 1588, 1604, 1618, 1635, 1655, 1668, 1684, 1698, 1715, 1732, 1749, 1764, 1778, 1792, 1803, 1815, 1828, 1845, 1858, 1869, 1882, 1894, 1903, 1910, 1922, 1938, 1956, 1974, 1989, 2006, 2025, 2039, 2059, 2071, 2095, 2118, 2136}
|
var (
|
||||||
|
_ErrorCode_index_1 = [...]uint16{0, 4, 16, 33, 46, 59, 71, 85, 97, 113, 126, 142, 158, 174, 189, 205, 218, 234, 253, 261, 277, 295, 312, 330, 354, 362, 377, 393, 411, 428, 443, 450, 461, 484, 499, 511, 522, 537, 551, 566, 581, 594, 603, 617, 632, 643, 658, 667, 683, 703, 721, 740, 752, 771, 790, 806, 823, 842, 856, 867, 882, 895, 910, 926, 940, 956, 971, 988, 1006, 1021, 1031, 1041, 1058, 1080, 1094, 1108, 1128, 1146, 1166, 1184, 1207, 1223, 1238, 1251, 1261, 1273, 1284, 1298, 1311, 1322, 1332, 1347, 1358, 1369, 1382, 1398, 1415, 1439, 1456, 1471, 1481, 1490, 1503, 1519, 1535, 1546, 1561, 1577, 1591, 1607, 1621, 1638, 1658, 1671, 1687, 1701, 1718, 1735, 1752, 1767, 1781, 1795, 1806, 1818, 1831, 1848, 1861, 1872, 1885, 1897, 1906, 1913, 1925, 1941, 1959, 1977, 1992, 2009, 2028, 2042, 2062, 2074, 2098, 2121, 2139, 2161, 2180}
|
||||||
|
)
|
||||||
|
|
||||||
func (i ErrorCode) String() string {
|
func (i ErrorCode) String() string {
|
||||||
i -= 1
|
switch {
|
||||||
if i < 0 || i >= ErrorCode(len(_ErrorCode_index)-1) {
|
case i == -1:
|
||||||
return "ErrorCode(" + strconv.FormatInt(int64(i+1), 10) + ")"
|
return _ErrorCode_name_0
|
||||||
|
case 1 <= i && i <= 146:
|
||||||
|
i -= 1
|
||||||
|
return _ErrorCode_name_1[_ErrorCode_index_1[i]:_ErrorCode_index_1[i+1]]
|
||||||
|
default:
|
||||||
|
return "ErrorCode(" + strconv.FormatInt(int64(i), 10) + ")"
|
||||||
}
|
}
|
||||||
return _ErrorCode_name[_ErrorCode_index[i]:_ErrorCode_index[i+1]]
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -769,7 +769,7 @@ golang.org/x/image/tiff/lzw
|
||||||
golang.org/x/image/vp8
|
golang.org/x/image/vp8
|
||||||
golang.org/x/image/vp8l
|
golang.org/x/image/vp8l
|
||||||
golang.org/x/image/webp
|
golang.org/x/image/webp
|
||||||
# golang.org/x/mod v0.7.0
|
# golang.org/x/mod v0.8.0
|
||||||
## explicit; go 1.17
|
## explicit; go 1.17
|
||||||
golang.org/x/mod/semver
|
golang.org/x/mod/semver
|
||||||
# golang.org/x/net v0.7.0
|
# golang.org/x/net v0.7.0
|
||||||
|
@ -799,7 +799,7 @@ golang.org/x/sys/execabs
|
||||||
golang.org/x/sys/internal/unsafeheader
|
golang.org/x/sys/internal/unsafeheader
|
||||||
golang.org/x/sys/unix
|
golang.org/x/sys/unix
|
||||||
golang.org/x/sys/windows
|
golang.org/x/sys/windows
|
||||||
# golang.org/x/text v0.7.0
|
# golang.org/x/text v0.8.0
|
||||||
## explicit; go 1.17
|
## explicit; go 1.17
|
||||||
golang.org/x/text/cases
|
golang.org/x/text/cases
|
||||||
golang.org/x/text/internal
|
golang.org/x/text/internal
|
||||||
|
@ -814,7 +814,7 @@ golang.org/x/text/transform
|
||||||
golang.org/x/text/unicode/bidi
|
golang.org/x/text/unicode/bidi
|
||||||
golang.org/x/text/unicode/norm
|
golang.org/x/text/unicode/norm
|
||||||
golang.org/x/text/width
|
golang.org/x/text/width
|
||||||
# golang.org/x/tools v0.3.0
|
# golang.org/x/tools v0.6.0
|
||||||
## explicit; go 1.18
|
## explicit; go 1.18
|
||||||
golang.org/x/tools/go/gcexportdata
|
golang.org/x/tools/go/gcexportdata
|
||||||
golang.org/x/tools/go/internal/packagesdriver
|
golang.org/x/tools/go/internal/packagesdriver
|
||||||
|
@ -825,9 +825,9 @@ golang.org/x/tools/internal/event/keys
|
||||||
golang.org/x/tools/internal/event/label
|
golang.org/x/tools/internal/event/label
|
||||||
golang.org/x/tools/internal/gcimporter
|
golang.org/x/tools/internal/gcimporter
|
||||||
golang.org/x/tools/internal/gocommand
|
golang.org/x/tools/internal/gocommand
|
||||||
golang.org/x/tools/internal/goroot
|
|
||||||
golang.org/x/tools/internal/packagesinternal
|
golang.org/x/tools/internal/packagesinternal
|
||||||
golang.org/x/tools/internal/pkgbits
|
golang.org/x/tools/internal/pkgbits
|
||||||
|
golang.org/x/tools/internal/tokeninternal
|
||||||
golang.org/x/tools/internal/typeparams
|
golang.org/x/tools/internal/typeparams
|
||||||
golang.org/x/tools/internal/typesinternal
|
golang.org/x/tools/internal/typesinternal
|
||||||
# google.golang.org/appengine v1.6.7
|
# google.golang.org/appengine v1.6.7
|
||||||
|
|
Loading…
Reference in New Issue