[chore]: Bump github.com/miekg/dns from 1.1.63 to 1.1.64 (#3936)
Bumps [github.com/miekg/dns](https://github.com/miekg/dns) from 1.1.63 to 1.1.64. - [Changelog](https://github.com/miekg/dns/blob/master/Makefile.release) - [Commits](https://github.com/miekg/dns/compare/v1.1.63...v1.1.64) --- updated-dependencies: - dependency-name: github.com/miekg/dns dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
This commit is contained in:
parent
18c8f85a30
commit
a844f322ee
6
go.mod
6
go.mod
|
@ -50,7 +50,7 @@ require (
|
||||||
github.com/jackc/pgx/v5 v5.7.3
|
github.com/jackc/pgx/v5 v5.7.3
|
||||||
github.com/k3a/html2text v1.2.1
|
github.com/k3a/html2text v1.2.1
|
||||||
github.com/microcosm-cc/bluemonday v1.0.27
|
github.com/microcosm-cc/bluemonday v1.0.27
|
||||||
github.com/miekg/dns v1.1.63
|
github.com/miekg/dns v1.1.64
|
||||||
github.com/minio/minio-go/v7 v7.0.85
|
github.com/minio/minio-go/v7 v7.0.85
|
||||||
github.com/mitchellh/mapstructure v1.5.0
|
github.com/mitchellh/mapstructure v1.5.0
|
||||||
github.com/ncruces/go-sqlite3 v0.24.0
|
github.com/ncruces/go-sqlite3 v0.24.0
|
||||||
|
@ -214,9 +214,9 @@ require (
|
||||||
go.uber.org/multierr v1.11.0 // indirect
|
go.uber.org/multierr v1.11.0 // indirect
|
||||||
golang.org/x/arch v0.13.0 // indirect
|
golang.org/x/arch v0.13.0 // indirect
|
||||||
golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 // indirect
|
golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 // indirect
|
||||||
golang.org/x/mod v0.22.0 // indirect
|
golang.org/x/mod v0.23.0 // indirect
|
||||||
golang.org/x/sync v0.12.0 // indirect
|
golang.org/x/sync v0.12.0 // indirect
|
||||||
golang.org/x/tools v0.28.0 // indirect
|
golang.org/x/tools v0.30.0 // indirect
|
||||||
google.golang.org/genproto/googleapis/api v0.0.0-20250218202821-56aae31c358a // indirect
|
google.golang.org/genproto/googleapis/api v0.0.0-20250218202821-56aae31c358a // indirect
|
||||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250218202821-56aae31c358a // indirect
|
google.golang.org/genproto/googleapis/rpc v0.0.0-20250218202821-56aae31c358a // indirect
|
||||||
google.golang.org/grpc v1.71.0 // indirect
|
google.golang.org/grpc v1.71.0 // indirect
|
||||||
|
|
|
@ -298,8 +298,8 @@ github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWE
|
||||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||||
github.com/microcosm-cc/bluemonday v1.0.27 h1:MpEUotklkwCSLeH+Qdx1VJgNqLlpY2KXwXFM08ygZfk=
|
github.com/microcosm-cc/bluemonday v1.0.27 h1:MpEUotklkwCSLeH+Qdx1VJgNqLlpY2KXwXFM08ygZfk=
|
||||||
github.com/microcosm-cc/bluemonday v1.0.27/go.mod h1:jFi9vgW+H7c3V0lb6nR74Ib/DIB5OBs92Dimizgw2cA=
|
github.com/microcosm-cc/bluemonday v1.0.27/go.mod h1:jFi9vgW+H7c3V0lb6nR74Ib/DIB5OBs92Dimizgw2cA=
|
||||||
github.com/miekg/dns v1.1.63 h1:8M5aAw6OMZfFXTT7K5V0Eu5YiiL8l7nUAkyN6C9YwaY=
|
github.com/miekg/dns v1.1.64 h1:wuZgD9wwCE6XMT05UU/mlSko71eRSXEAm2EbjQXLKnQ=
|
||||||
github.com/miekg/dns v1.1.63/go.mod h1:6NGHfjhpmr5lt3XPLuyfDJi5AXbNIPM9PY6H6sF1Nfs=
|
github.com/miekg/dns v1.1.64/go.mod h1:Dzw9769uoKVaLuODMDZz9M6ynFU6Em65csPuoi8G0ck=
|
||||||
github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34=
|
github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34=
|
||||||
github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM=
|
github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM=
|
||||||
github.com/minio/minio-go/v7 v7.0.85 h1:9psTLS/NTvC3MWoyjhjXpwcKoNbkongaCSF3PNpSuXo=
|
github.com/minio/minio-go/v7 v7.0.85 h1:9psTLS/NTvC3MWoyjhjXpwcKoNbkongaCSF3PNpSuXo=
|
||||||
|
@ -540,8 +540,8 @@ golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||||
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||||
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||||
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||||
golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4=
|
golang.org/x/mod v0.23.0 h1:Zb7khfcRGKk+kqfxFaP5tZqCnDZMjC5VtUBs87Hr6QM=
|
||||||
golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY=
|
golang.org/x/mod v0.23.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY=
|
||||||
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
|
@ -620,8 +620,8 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc
|
||||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||||
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
|
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
|
||||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
||||||
golang.org/x/tools v0.28.0 h1:WuB6qZ4RPCQo5aP3WdKZS7i595EdWqWR8vqJTlwTVK8=
|
golang.org/x/tools v0.30.0 h1:BgcpHewrV5AUp2G9MebG4XPFI1E2W41zU1SaqVA9vJY=
|
||||||
golang.org/x/tools v0.28.0/go.mod h1:dcIOrVd3mfQKTgrDVQHqCPMWy6lnhfhtX3hLXYVLfRw=
|
golang.org/x/tools v0.30.0/go.mod h1:c347cR/OJfw5TI+GfX7RUPNMdDRRbjvYTS0jPyvsVtY=
|
||||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
google.golang.org/genproto/googleapis/api v0.0.0-20250218202821-56aae31c358a h1:nwKuGPlUAt+aR+pcrkfFRrTU1BVrSmYyYMxYbUIVHr0=
|
google.golang.org/genproto/googleapis/api v0.0.0-20250218202821-56aae31c358a h1:nwKuGPlUAt+aR+pcrkfFRrTU1BVrSmYyYMxYbUIVHr0=
|
||||||
google.golang.org/genproto/googleapis/api v0.0.0-20250218202821-56aae31c358a/go.mod h1:3kWAYMk1I75K4vykHtKt2ycnOgpA6974V7bREqbsenU=
|
google.golang.org/genproto/googleapis/api v0.0.0-20250218202821-56aae31c358a/go.mod h1:3kWAYMk1I75K4vykHtKt2ycnOgpA6974V7bREqbsenU=
|
||||||
|
|
|
@ -86,7 +86,7 @@ A not-so-up-to-date-list-that-may-be-actually-current:
|
||||||
* https://linuxcontainers.org/incus/
|
* https://linuxcontainers.org/incus/
|
||||||
* https://ifconfig.es
|
* https://ifconfig.es
|
||||||
* https://github.com/zmap/zdns
|
* https://github.com/zmap/zdns
|
||||||
|
* https://framagit.org/bortzmeyer/check-soa
|
||||||
|
|
||||||
Send pull request if you want to be listed here.
|
Send pull request if you want to be listed here.
|
||||||
|
|
||||||
|
@ -193,6 +193,9 @@ Example programs can be found in the `github.com/miekg/exdns` repository.
|
||||||
* 9460 - Service Binding and Parameter Specification via the DNS
|
* 9460 - Service Binding and Parameter Specification via the DNS
|
||||||
* 9461 - Service Binding Mapping for DNS Servers
|
* 9461 - Service Binding Mapping for DNS Servers
|
||||||
* 9462 - Discovery of Designated Resolvers
|
* 9462 - Discovery of Designated Resolvers
|
||||||
|
* 9460 - SVCB and HTTPS Records
|
||||||
|
* 9606 - DNS Resolver Information
|
||||||
|
* Draft - Compact Denial of Existence in DNSSEC
|
||||||
|
|
||||||
## Loosely Based Upon
|
## Loosely Based Upon
|
||||||
|
|
||||||
|
|
|
@ -27,6 +27,7 @@ const (
|
||||||
EDNS0LOCALSTART = 0xFDE9 // Beginning of range reserved for local/experimental use (See RFC 6891)
|
EDNS0LOCALSTART = 0xFDE9 // Beginning of range reserved for local/experimental use (See RFC 6891)
|
||||||
EDNS0LOCALEND = 0xFFFE // End of range reserved for local/experimental use (See RFC 6891)
|
EDNS0LOCALEND = 0xFFFE // End of range reserved for local/experimental use (See RFC 6891)
|
||||||
_DO = 1 << 15 // DNSSEC OK
|
_DO = 1 << 15 // DNSSEC OK
|
||||||
|
_CO = 1 << 14 // Compact Answers OK
|
||||||
)
|
)
|
||||||
|
|
||||||
// makeDataOpt is used to unpack the EDNS0 option(s) from a message.
|
// makeDataOpt is used to unpack the EDNS0 option(s) from a message.
|
||||||
|
@ -75,7 +76,11 @@ type OPT struct {
|
||||||
func (rr *OPT) String() string {
|
func (rr *OPT) String() string {
|
||||||
s := "\n;; OPT PSEUDOSECTION:\n; EDNS: version " + strconv.Itoa(int(rr.Version())) + "; "
|
s := "\n;; OPT PSEUDOSECTION:\n; EDNS: version " + strconv.Itoa(int(rr.Version())) + "; "
|
||||||
if rr.Do() {
|
if rr.Do() {
|
||||||
|
if rr.Co() {
|
||||||
|
s += "flags: do, co; "
|
||||||
|
} else {
|
||||||
s += "flags: do; "
|
s += "flags: do; "
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
s += "flags:; "
|
s += "flags:; "
|
||||||
}
|
}
|
||||||
|
@ -195,14 +200,34 @@ func (rr *OPT) SetDo(do ...bool) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Z returns the Z part of the OPT RR as a uint16 with only the 15 least significant bits used.
|
// Co returns the value of the CO (Compact Answers OK) bit.
|
||||||
func (rr *OPT) Z() uint16 {
|
func (rr *OPT) Co() bool {
|
||||||
return uint16(rr.Hdr.Ttl & 0x7FFF)
|
return rr.Hdr.Ttl&_CO == _CO
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetZ sets the Z part of the OPT RR, note only the 15 least significant bits of z are used.
|
// SetCo sets the CO (Compact Answers OK) bit.
|
||||||
|
// If we pass an argument, set the CO bit to that value.
|
||||||
|
// It is possible to pass 2 or more arguments, but they will be ignored.
|
||||||
|
func (rr *OPT) SetCo(co ...bool) {
|
||||||
|
if len(co) == 1 {
|
||||||
|
if co[0] {
|
||||||
|
rr.Hdr.Ttl |= _CO
|
||||||
|
} else {
|
||||||
|
rr.Hdr.Ttl &^= _CO
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
rr.Hdr.Ttl |= _CO
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Z returns the Z part of the OPT RR as a uint16 with only the 14 least significant bits used.
|
||||||
|
func (rr *OPT) Z() uint16 {
|
||||||
|
return uint16(rr.Hdr.Ttl & 0x3FFF)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetZ sets the Z part of the OPT RR, note only the 14 least significant bits of z are used.
|
||||||
func (rr *OPT) SetZ(z uint16) {
|
func (rr *OPT) SetZ(z uint16) {
|
||||||
rr.Hdr.Ttl = rr.Hdr.Ttl&^0x7FFF | uint32(z&0x7FFF)
|
rr.Hdr.Ttl = rr.Hdr.Ttl&^0x3FFF | uint32(z&0x3FFF)
|
||||||
}
|
}
|
||||||
|
|
||||||
// EDNS0 defines an EDNS0 Option. An OPT RR can have multiple options appended to it.
|
// EDNS0 defines an EDNS0 Option. An OPT RR can have multiple options appended to it.
|
||||||
|
|
|
@ -1620,6 +1620,16 @@ func (rr *NINFO) parse(c *zlexer, o string) *ParseError {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Uses the same format as TXT
|
||||||
|
func (rr *RESINFO) parse(c *zlexer, o string) *ParseError {
|
||||||
|
s, e := endingToTxtSlice(c, "bad RESINFO Resinfo")
|
||||||
|
if e != nil {
|
||||||
|
return e
|
||||||
|
}
|
||||||
|
rr.Txt = s
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func (rr *URI) parse(c *zlexer, o string) *ParseError {
|
func (rr *URI) parse(c *zlexer, o string) *ParseError {
|
||||||
l, _ := c.Next()
|
l, _ := c.Next()
|
||||||
i, e := strconv.ParseUint(l.token, 10, 16)
|
i, e := strconv.ParseUint(l.token, 10, 16)
|
||||||
|
|
|
@ -214,11 +214,7 @@ func makeSVCBKeyValue(key SVCBKey) SVCBKeyValue {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// SVCB RR. See RFC xxxx (https://tools.ietf.org/html/draft-ietf-dnsop-svcb-https-08).
|
// SVCB RR. See RFC 9460.
|
||||||
//
|
|
||||||
// NOTE: The HTTPS/SVCB RFCs are in the draft stage.
|
|
||||||
// The API, including constants and types related to SVCBKeyValues, may
|
|
||||||
// change in future versions in accordance with the latest drafts.
|
|
||||||
type SVCB struct {
|
type SVCB struct {
|
||||||
Hdr RR_Header
|
Hdr RR_Header
|
||||||
Priority uint16 // If zero, Value must be empty or discarded by the user of this library
|
Priority uint16 // If zero, Value must be empty or discarded by the user of this library
|
||||||
|
@ -226,12 +222,8 @@ type SVCB struct {
|
||||||
Value []SVCBKeyValue `dns:"pairs"`
|
Value []SVCBKeyValue `dns:"pairs"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// HTTPS RR. Everything valid for SVCB applies to HTTPS as well.
|
// HTTPS RR. See RFC 9460. Everything valid for SVCB applies to HTTPS as well.
|
||||||
// Except that the HTTPS record is intended for use with the HTTP and HTTPS protocols.
|
// Except that the HTTPS record is intended for use with the HTTP and HTTPS protocols.
|
||||||
//
|
|
||||||
// NOTE: The HTTPS/SVCB RFCs are in the draft stage.
|
|
||||||
// The API, including constants and types related to SVCBKeyValues, may
|
|
||||||
// change in future versions in accordance with the latest drafts.
|
|
||||||
type HTTPS struct {
|
type HTTPS struct {
|
||||||
SVCB
|
SVCB
|
||||||
}
|
}
|
||||||
|
|
|
@ -101,6 +101,7 @@ const (
|
||||||
TypeCAA uint16 = 257
|
TypeCAA uint16 = 257
|
||||||
TypeAVC uint16 = 258
|
TypeAVC uint16 = 258
|
||||||
TypeAMTRELAY uint16 = 260
|
TypeAMTRELAY uint16 = 260
|
||||||
|
TypeRESINFO uint16 = 261
|
||||||
|
|
||||||
TypeTKEY uint16 = 249
|
TypeTKEY uint16 = 249
|
||||||
TypeTSIG uint16 = 250
|
TypeTSIG uint16 = 250
|
||||||
|
@ -1508,6 +1509,15 @@ func (rr *ZONEMD) String() string {
|
||||||
" " + rr.Digest
|
" " + rr.Digest
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// RESINFO RR. See RFC 9606.
|
||||||
|
|
||||||
|
type RESINFO struct {
|
||||||
|
Hdr RR_Header
|
||||||
|
Txt []string `dns:"txt"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (rr *RESINFO) String() string { return rr.Hdr.String() + sprintTxt(rr.Txt) }
|
||||||
|
|
||||||
// APL RR. See RFC 3123.
|
// APL RR. See RFC 3123.
|
||||||
type APL struct {
|
type APL struct {
|
||||||
Hdr RR_Header
|
Hdr RR_Header
|
||||||
|
|
|
@ -3,7 +3,7 @@ package dns
|
||||||
import "fmt"
|
import "fmt"
|
||||||
|
|
||||||
// Version is current version of this library.
|
// Version is current version of this library.
|
||||||
var Version = v{1, 1, 63}
|
var Version = v{1, 1, 64}
|
||||||
|
|
||||||
// v holds the version of this library.
|
// v holds the version of this library.
|
||||||
type v struct {
|
type v struct {
|
||||||
|
|
|
@ -957,6 +957,23 @@ func (r1 *PX) isDuplicate(_r2 RR) bool {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r1 *RESINFO) isDuplicate(_r2 RR) bool {
|
||||||
|
r2, ok := _r2.(*RESINFO)
|
||||||
|
if !ok {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
_ = r2
|
||||||
|
if len(r1.Txt) != len(r2.Txt) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
for i := 0; i < len(r1.Txt); i++ {
|
||||||
|
if r1.Txt[i] != r2.Txt[i] {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
func (r1 *RFC3597) isDuplicate(_r2 RR) bool {
|
func (r1 *RFC3597) isDuplicate(_r2 RR) bool {
|
||||||
r2, ok := _r2.(*RFC3597)
|
r2, ok := _r2.(*RFC3597)
|
||||||
if !ok {
|
if !ok {
|
||||||
|
|
|
@ -762,6 +762,14 @@ func (rr *PX) pack(msg []byte, off int, compression compressionMap, compress boo
|
||||||
return off, nil
|
return off, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (rr *RESINFO) pack(msg []byte, off int, compression compressionMap, compress bool) (off1 int, err error) {
|
||||||
|
off, err = packStringTxt(rr.Txt, msg, off)
|
||||||
|
if err != nil {
|
||||||
|
return off, err
|
||||||
|
}
|
||||||
|
return off, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (rr *RFC3597) pack(msg []byte, off int, compression compressionMap, compress bool) (off1 int, err error) {
|
func (rr *RFC3597) pack(msg []byte, off int, compression compressionMap, compress bool) (off1 int, err error) {
|
||||||
off, err = packStringHex(rr.Rdata, msg, off)
|
off, err = packStringHex(rr.Rdata, msg, off)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -2353,6 +2361,17 @@ func (rr *PX) unpack(msg []byte, off int) (off1 int, err error) {
|
||||||
return off, nil
|
return off, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (rr *RESINFO) unpack(msg []byte, off int) (off1 int, err error) {
|
||||||
|
rdStart := off
|
||||||
|
_ = rdStart
|
||||||
|
|
||||||
|
rr.Txt, off, err = unpackStringTxt(msg, off)
|
||||||
|
if err != nil {
|
||||||
|
return off, err
|
||||||
|
}
|
||||||
|
return off, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (rr *RFC3597) unpack(msg []byte, off int) (off1 int, err error) {
|
func (rr *RFC3597) unpack(msg []byte, off int) (off1 int, err error) {
|
||||||
rdStart := off
|
rdStart := off
|
||||||
_ = rdStart
|
_ = rdStart
|
||||||
|
|
|
@ -66,6 +66,7 @@ var TypeToRR = map[uint16]func() RR{
|
||||||
TypeOPT: func() RR { return new(OPT) },
|
TypeOPT: func() RR { return new(OPT) },
|
||||||
TypePTR: func() RR { return new(PTR) },
|
TypePTR: func() RR { return new(PTR) },
|
||||||
TypePX: func() RR { return new(PX) },
|
TypePX: func() RR { return new(PX) },
|
||||||
|
TypeRESINFO: func() RR { return new(RESINFO) },
|
||||||
TypeRKEY: func() RR { return new(RKEY) },
|
TypeRKEY: func() RR { return new(RKEY) },
|
||||||
TypeRP: func() RR { return new(RP) },
|
TypeRP: func() RR { return new(RP) },
|
||||||
TypeRRSIG: func() RR { return new(RRSIG) },
|
TypeRRSIG: func() RR { return new(RRSIG) },
|
||||||
|
@ -154,6 +155,7 @@ var TypeToString = map[uint16]string{
|
||||||
TypeOPT: "OPT",
|
TypeOPT: "OPT",
|
||||||
TypePTR: "PTR",
|
TypePTR: "PTR",
|
||||||
TypePX: "PX",
|
TypePX: "PX",
|
||||||
|
TypeRESINFO: "RESINFO",
|
||||||
TypeRKEY: "RKEY",
|
TypeRKEY: "RKEY",
|
||||||
TypeRP: "RP",
|
TypeRP: "RP",
|
||||||
TypeRRSIG: "RRSIG",
|
TypeRRSIG: "RRSIG",
|
||||||
|
@ -238,6 +240,7 @@ func (rr *OPENPGPKEY) Header() *RR_Header { return &rr.Hdr }
|
||||||
func (rr *OPT) Header() *RR_Header { return &rr.Hdr }
|
func (rr *OPT) Header() *RR_Header { return &rr.Hdr }
|
||||||
func (rr *PTR) Header() *RR_Header { return &rr.Hdr }
|
func (rr *PTR) Header() *RR_Header { return &rr.Hdr }
|
||||||
func (rr *PX) Header() *RR_Header { return &rr.Hdr }
|
func (rr *PX) Header() *RR_Header { return &rr.Hdr }
|
||||||
|
func (rr *RESINFO) Header() *RR_Header { return &rr.Hdr }
|
||||||
func (rr *RFC3597) Header() *RR_Header { return &rr.Hdr }
|
func (rr *RFC3597) Header() *RR_Header { return &rr.Hdr }
|
||||||
func (rr *RKEY) Header() *RR_Header { return &rr.Hdr }
|
func (rr *RKEY) Header() *RR_Header { return &rr.Hdr }
|
||||||
func (rr *RP) Header() *RR_Header { return &rr.Hdr }
|
func (rr *RP) Header() *RR_Header { return &rr.Hdr }
|
||||||
|
@ -622,6 +625,14 @@ func (rr *PX) len(off int, compression map[string]struct{}) int {
|
||||||
return l
|
return l
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (rr *RESINFO) len(off int, compression map[string]struct{}) int {
|
||||||
|
l := rr.Hdr.len(off, compression)
|
||||||
|
for _, x := range rr.Txt {
|
||||||
|
l += len(x) + 1
|
||||||
|
}
|
||||||
|
return l
|
||||||
|
}
|
||||||
|
|
||||||
func (rr *RFC3597) len(off int, compression map[string]struct{}) int {
|
func (rr *RFC3597) len(off int, compression map[string]struct{}) int {
|
||||||
l := rr.Hdr.len(off, compression)
|
l := rr.Hdr.len(off, compression)
|
||||||
l += len(rr.Rdata) / 2
|
l += len(rr.Rdata) / 2
|
||||||
|
@ -1148,6 +1159,10 @@ func (rr *PX) copy() RR {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (rr *RESINFO) copy() RR {
|
||||||
|
return &RESINFO{rr.Hdr, cloneSlice(rr.Txt)}
|
||||||
|
}
|
||||||
|
|
||||||
func (rr *RFC3597) copy() RR {
|
func (rr *RFC3597) copy() RR {
|
||||||
return &RFC3597{rr.Hdr, rr.Rdata}
|
return &RFC3597{rr.Hdr, rr.Rdata}
|
||||||
}
|
}
|
||||||
|
|
|
@ -322,6 +322,7 @@ type jsonPackage struct {
|
||||||
ImportPath string
|
ImportPath string
|
||||||
Dir string
|
Dir string
|
||||||
Name string
|
Name string
|
||||||
|
Target string
|
||||||
Export string
|
Export string
|
||||||
GoFiles []string
|
GoFiles []string
|
||||||
CompiledGoFiles []string
|
CompiledGoFiles []string
|
||||||
|
@ -506,6 +507,7 @@ func (state *golistState) createDriverResponse(words ...string) (*DriverResponse
|
||||||
Name: p.Name,
|
Name: p.Name,
|
||||||
ID: p.ImportPath,
|
ID: p.ImportPath,
|
||||||
Dir: p.Dir,
|
Dir: p.Dir,
|
||||||
|
Target: p.Target,
|
||||||
GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles),
|
GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles),
|
||||||
CompiledGoFiles: absJoin(p.Dir, p.CompiledGoFiles),
|
CompiledGoFiles: absJoin(p.Dir, p.CompiledGoFiles),
|
||||||
OtherFiles: absJoin(p.Dir, otherFiles(p)...),
|
OtherFiles: absJoin(p.Dir, otherFiles(p)...),
|
||||||
|
@ -811,6 +813,9 @@ func jsonFlag(cfg *Config, goVersion int) string {
|
||||||
if cfg.Mode&NeedEmbedPatterns != 0 {
|
if cfg.Mode&NeedEmbedPatterns != 0 {
|
||||||
addFields("EmbedPatterns")
|
addFields("EmbedPatterns")
|
||||||
}
|
}
|
||||||
|
if cfg.Mode&NeedTarget != 0 {
|
||||||
|
addFields("Target")
|
||||||
|
}
|
||||||
return "-json=" + strings.Join(fields, ",")
|
return "-json=" + strings.Join(fields, ",")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -27,6 +27,7 @@ var modes = [...]struct {
|
||||||
{NeedModule, "NeedModule"},
|
{NeedModule, "NeedModule"},
|
||||||
{NeedEmbedFiles, "NeedEmbedFiles"},
|
{NeedEmbedFiles, "NeedEmbedFiles"},
|
||||||
{NeedEmbedPatterns, "NeedEmbedPatterns"},
|
{NeedEmbedPatterns, "NeedEmbedPatterns"},
|
||||||
|
{NeedTarget, "NeedTarget"},
|
||||||
}
|
}
|
||||||
|
|
||||||
func (mode LoadMode) String() string {
|
func (mode LoadMode) String() string {
|
||||||
|
|
|
@ -59,10 +59,10 @@ import (
|
||||||
//
|
//
|
||||||
// Unfortunately there are a number of open bugs related to
|
// Unfortunately there are a number of open bugs related to
|
||||||
// interactions among the LoadMode bits:
|
// interactions among the LoadMode bits:
|
||||||
// - https://github.com/golang/go/issues/56633
|
// - https://go.dev/issue/56633
|
||||||
// - https://github.com/golang/go/issues/56677
|
// - https://go.dev/issue/56677
|
||||||
// - https://github.com/golang/go/issues/58726
|
// - https://go.dev/issue/58726
|
||||||
// - https://github.com/golang/go/issues/63517
|
// - https://go.dev/issue/63517
|
||||||
type LoadMode int
|
type LoadMode int
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
@ -118,6 +118,9 @@ const (
|
||||||
// NeedEmbedPatterns adds EmbedPatterns.
|
// NeedEmbedPatterns adds EmbedPatterns.
|
||||||
NeedEmbedPatterns
|
NeedEmbedPatterns
|
||||||
|
|
||||||
|
// NeedTarget adds Target.
|
||||||
|
NeedTarget
|
||||||
|
|
||||||
// Be sure to update loadmode_string.go when adding new items!
|
// Be sure to update loadmode_string.go when adding new items!
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -479,6 +482,10 @@ type Package struct {
|
||||||
// information for the package as provided by the build system.
|
// information for the package as provided by the build system.
|
||||||
ExportFile string
|
ExportFile string
|
||||||
|
|
||||||
|
// Target is the absolute install path of the .a file, for libraries,
|
||||||
|
// and of the executable file, for binaries.
|
||||||
|
Target string
|
||||||
|
|
||||||
// Imports maps import paths appearing in the package's Go source files
|
// Imports maps import paths appearing in the package's Go source files
|
||||||
// to corresponding loaded Packages.
|
// to corresponding loaded Packages.
|
||||||
Imports map[string]*Package
|
Imports map[string]*Package
|
||||||
|
|
|
@ -2,30 +2,35 @@
|
||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
// Package typeutil defines various utilities for types, such as Map,
|
// Package typeutil defines various utilities for types, such as [Map],
|
||||||
// a mapping from types.Type to any values.
|
// a hash table that maps [types.Type] to any value.
|
||||||
package typeutil // import "golang.org/x/tools/go/types/typeutil"
|
package typeutil
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"fmt"
|
"fmt"
|
||||||
"go/types"
|
"go/types"
|
||||||
"reflect"
|
"hash/maphash"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
"golang.org/x/tools/internal/typeparams"
|
"golang.org/x/tools/internal/typeparams"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Map is a hash-table-based mapping from types (types.Type) to
|
// Map is a hash-table-based mapping from types (types.Type) to
|
||||||
// arbitrary any values. The concrete types that implement
|
// arbitrary values. The concrete types that implement
|
||||||
// the Type interface are pointers. Since they are not canonicalized,
|
// the Type interface are pointers. Since they are not canonicalized,
|
||||||
// == cannot be used to check for equivalence, and thus we cannot
|
// == cannot be used to check for equivalence, and thus we cannot
|
||||||
// simply use a Go map.
|
// simply use a Go map.
|
||||||
//
|
//
|
||||||
// Just as with map[K]V, a nil *Map is a valid empty map.
|
// Just as with map[K]V, a nil *Map is a valid empty map.
|
||||||
//
|
//
|
||||||
// Not thread-safe.
|
// Read-only map operations ([Map.At], [Map.Len], and so on) may
|
||||||
|
// safely be called concurrently.
|
||||||
|
//
|
||||||
|
// TODO(adonovan): deprecate in favor of https://go.dev/issues/69420
|
||||||
|
// and 69559, if the latter proposals for a generic hash-map type and
|
||||||
|
// a types.Hash function are accepted.
|
||||||
type Map struct {
|
type Map struct {
|
||||||
hasher Hasher // shared by many Maps
|
|
||||||
table map[uint32][]entry // maps hash to bucket; entry.key==nil means unused
|
table map[uint32][]entry // maps hash to bucket; entry.key==nil means unused
|
||||||
length int // number of map entries
|
length int // number of map entries
|
||||||
}
|
}
|
||||||
|
@ -36,35 +41,17 @@ type entry struct {
|
||||||
value any
|
value any
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetHasher sets the hasher used by Map.
|
// SetHasher has no effect.
|
||||||
//
|
//
|
||||||
// All Hashers are functionally equivalent but contain internal state
|
// It is a relic of an optimization that is no longer profitable. Do
|
||||||
// used to cache the results of hashing previously seen types.
|
// not use [Hasher], [MakeHasher], or [SetHasher] in new code.
|
||||||
//
|
func (m *Map) SetHasher(Hasher) {}
|
||||||
// A single Hasher created by MakeHasher() may be shared among many
|
|
||||||
// Maps. This is recommended if the instances have many keys in
|
|
||||||
// common, as it will amortize the cost of hash computation.
|
|
||||||
//
|
|
||||||
// A Hasher may grow without bound as new types are seen. Even when a
|
|
||||||
// type is deleted from the map, the Hasher never shrinks, since other
|
|
||||||
// types in the map may reference the deleted type indirectly.
|
|
||||||
//
|
|
||||||
// Hashers are not thread-safe, and read-only operations such as
|
|
||||||
// Map.Lookup require updates to the hasher, so a full Mutex lock (not a
|
|
||||||
// read-lock) is require around all Map operations if a shared
|
|
||||||
// hasher is accessed from multiple threads.
|
|
||||||
//
|
|
||||||
// If SetHasher is not called, the Map will create a private hasher at
|
|
||||||
// the first call to Insert.
|
|
||||||
func (m *Map) SetHasher(hasher Hasher) {
|
|
||||||
m.hasher = hasher
|
|
||||||
}
|
|
||||||
|
|
||||||
// Delete removes the entry with the given key, if any.
|
// Delete removes the entry with the given key, if any.
|
||||||
// It returns true if the entry was found.
|
// It returns true if the entry was found.
|
||||||
func (m *Map) Delete(key types.Type) bool {
|
func (m *Map) Delete(key types.Type) bool {
|
||||||
if m != nil && m.table != nil {
|
if m != nil && m.table != nil {
|
||||||
hash := m.hasher.Hash(key)
|
hash := hash(key)
|
||||||
bucket := m.table[hash]
|
bucket := m.table[hash]
|
||||||
for i, e := range bucket {
|
for i, e := range bucket {
|
||||||
if e.key != nil && types.Identical(key, e.key) {
|
if e.key != nil && types.Identical(key, e.key) {
|
||||||
|
@ -83,7 +70,7 @@ func (m *Map) Delete(key types.Type) bool {
|
||||||
// The result is nil if the entry is not present.
|
// The result is nil if the entry is not present.
|
||||||
func (m *Map) At(key types.Type) any {
|
func (m *Map) At(key types.Type) any {
|
||||||
if m != nil && m.table != nil {
|
if m != nil && m.table != nil {
|
||||||
for _, e := range m.table[m.hasher.Hash(key)] {
|
for _, e := range m.table[hash(key)] {
|
||||||
if e.key != nil && types.Identical(key, e.key) {
|
if e.key != nil && types.Identical(key, e.key) {
|
||||||
return e.value
|
return e.value
|
||||||
}
|
}
|
||||||
|
@ -96,7 +83,7 @@ func (m *Map) At(key types.Type) any {
|
||||||
// and returns the previous entry, if any.
|
// and returns the previous entry, if any.
|
||||||
func (m *Map) Set(key types.Type, value any) (prev any) {
|
func (m *Map) Set(key types.Type, value any) (prev any) {
|
||||||
if m.table != nil {
|
if m.table != nil {
|
||||||
hash := m.hasher.Hash(key)
|
hash := hash(key)
|
||||||
bucket := m.table[hash]
|
bucket := m.table[hash]
|
||||||
var hole *entry
|
var hole *entry
|
||||||
for i, e := range bucket {
|
for i, e := range bucket {
|
||||||
|
@ -115,10 +102,7 @@ func (m *Map) Set(key types.Type, value any) (prev any) {
|
||||||
m.table[hash] = append(bucket, entry{key, value})
|
m.table[hash] = append(bucket, entry{key, value})
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if m.hasher.memo == nil {
|
hash := hash(key)
|
||||||
m.hasher = MakeHasher()
|
|
||||||
}
|
|
||||||
hash := m.hasher.Hash(key)
|
|
||||||
m.table = map[uint32][]entry{hash: {entry{key, value}}}
|
m.table = map[uint32][]entry{hash: {entry{key, value}}}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -195,53 +179,35 @@ func (m *Map) KeysString() string {
|
||||||
return m.toString(false)
|
return m.toString(false)
|
||||||
}
|
}
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////
|
// -- Hasher --
|
||||||
// Hasher
|
|
||||||
|
|
||||||
// A Hasher maps each type to its hash value.
|
// hash returns the hash of type t.
|
||||||
// For efficiency, a hasher uses memoization; thus its memory
|
// TODO(adonovan): replace by types.Hash when Go proposal #69420 is accepted.
|
||||||
// footprint grows monotonically over time.
|
func hash(t types.Type) uint32 {
|
||||||
// Hashers are not thread-safe.
|
return theHasher.Hash(t)
|
||||||
// Hashers have reference semantics.
|
|
||||||
// Call MakeHasher to create a Hasher.
|
|
||||||
type Hasher struct {
|
|
||||||
memo map[types.Type]uint32
|
|
||||||
|
|
||||||
// ptrMap records pointer identity.
|
|
||||||
ptrMap map[any]uint32
|
|
||||||
|
|
||||||
// sigTParams holds type parameters from the signature being hashed.
|
|
||||||
// Signatures are considered identical modulo renaming of type parameters, so
|
|
||||||
// within the scope of a signature type the identity of the signature's type
|
|
||||||
// parameters is just their index.
|
|
||||||
//
|
|
||||||
// Since the language does not currently support referring to uninstantiated
|
|
||||||
// generic types or functions, and instantiated signatures do not have type
|
|
||||||
// parameter lists, we should never encounter a second non-empty type
|
|
||||||
// parameter list when hashing a generic signature.
|
|
||||||
sigTParams *types.TypeParamList
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// MakeHasher returns a new Hasher instance.
|
// A Hasher provides a [Hasher.Hash] method to map a type to its hash value.
|
||||||
func MakeHasher() Hasher {
|
// Hashers are stateless, and all are equivalent.
|
||||||
return Hasher{
|
type Hasher struct{}
|
||||||
memo: make(map[types.Type]uint32),
|
|
||||||
ptrMap: make(map[any]uint32),
|
var theHasher Hasher
|
||||||
sigTParams: nil,
|
|
||||||
}
|
// MakeHasher returns Hasher{}.
|
||||||
}
|
// Hashers are stateless; all are equivalent.
|
||||||
|
func MakeHasher() Hasher { return theHasher }
|
||||||
|
|
||||||
// Hash computes a hash value for the given type t such that
|
// Hash computes a hash value for the given type t such that
|
||||||
// Identical(t, t') => Hash(t) == Hash(t').
|
// Identical(t, t') => Hash(t) == Hash(t').
|
||||||
func (h Hasher) Hash(t types.Type) uint32 {
|
func (h Hasher) Hash(t types.Type) uint32 {
|
||||||
hash, ok := h.memo[t]
|
return hasher{inGenericSig: false}.hash(t)
|
||||||
if !ok {
|
|
||||||
hash = h.hashFor(t)
|
|
||||||
h.memo[t] = hash
|
|
||||||
}
|
|
||||||
return hash
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// hasher holds the state of a single Hash traversal: whether we are
|
||||||
|
// inside the signature of a generic function; this is used to
|
||||||
|
// optimize [hasher.hashTypeParam].
|
||||||
|
type hasher struct{ inGenericSig bool }
|
||||||
|
|
||||||
// hashString computes the Fowler–Noll–Vo hash of s.
|
// hashString computes the Fowler–Noll–Vo hash of s.
|
||||||
func hashString(s string) uint32 {
|
func hashString(s string) uint32 {
|
||||||
var h uint32
|
var h uint32
|
||||||
|
@ -252,21 +218,21 @@ func hashString(s string) uint32 {
|
||||||
return h
|
return h
|
||||||
}
|
}
|
||||||
|
|
||||||
// hashFor computes the hash of t.
|
// hash computes the hash of t.
|
||||||
func (h Hasher) hashFor(t types.Type) uint32 {
|
func (h hasher) hash(t types.Type) uint32 {
|
||||||
// See Identical for rationale.
|
// See Identical for rationale.
|
||||||
switch t := t.(type) {
|
switch t := t.(type) {
|
||||||
case *types.Basic:
|
case *types.Basic:
|
||||||
return uint32(t.Kind())
|
return uint32(t.Kind())
|
||||||
|
|
||||||
case *types.Alias:
|
case *types.Alias:
|
||||||
return h.Hash(types.Unalias(t))
|
return h.hash(types.Unalias(t))
|
||||||
|
|
||||||
case *types.Array:
|
case *types.Array:
|
||||||
return 9043 + 2*uint32(t.Len()) + 3*h.Hash(t.Elem())
|
return 9043 + 2*uint32(t.Len()) + 3*h.hash(t.Elem())
|
||||||
|
|
||||||
case *types.Slice:
|
case *types.Slice:
|
||||||
return 9049 + 2*h.Hash(t.Elem())
|
return 9049 + 2*h.hash(t.Elem())
|
||||||
|
|
||||||
case *types.Struct:
|
case *types.Struct:
|
||||||
var hash uint32 = 9059
|
var hash uint32 = 9059
|
||||||
|
@ -277,12 +243,12 @@ func (h Hasher) hashFor(t types.Type) uint32 {
|
||||||
}
|
}
|
||||||
hash += hashString(t.Tag(i))
|
hash += hashString(t.Tag(i))
|
||||||
hash += hashString(f.Name()) // (ignore f.Pkg)
|
hash += hashString(f.Name()) // (ignore f.Pkg)
|
||||||
hash += h.Hash(f.Type())
|
hash += h.hash(f.Type())
|
||||||
}
|
}
|
||||||
return hash
|
return hash
|
||||||
|
|
||||||
case *types.Pointer:
|
case *types.Pointer:
|
||||||
return 9067 + 2*h.Hash(t.Elem())
|
return 9067 + 2*h.hash(t.Elem())
|
||||||
|
|
||||||
case *types.Signature:
|
case *types.Signature:
|
||||||
var hash uint32 = 9091
|
var hash uint32 = 9091
|
||||||
|
@ -290,33 +256,14 @@ func (h Hasher) hashFor(t types.Type) uint32 {
|
||||||
hash *= 8863
|
hash *= 8863
|
||||||
}
|
}
|
||||||
|
|
||||||
// Use a separate hasher for types inside of the signature, where type
|
|
||||||
// parameter identity is modified to be (index, constraint). We must use a
|
|
||||||
// new memo for this hasher as type identity may be affected by this
|
|
||||||
// masking. For example, in func[T any](*T), the identity of *T depends on
|
|
||||||
// whether we are mapping the argument in isolation, or recursively as part
|
|
||||||
// of hashing the signature.
|
|
||||||
//
|
|
||||||
// We should never encounter a generic signature while hashing another
|
|
||||||
// generic signature, but defensively set sigTParams only if h.mask is
|
|
||||||
// unset.
|
|
||||||
tparams := t.TypeParams()
|
tparams := t.TypeParams()
|
||||||
if h.sigTParams == nil && tparams.Len() != 0 {
|
if n := tparams.Len(); n > 0 {
|
||||||
h = Hasher{
|
h.inGenericSig = true // affects constraints, params, and results
|
||||||
// There may be something more efficient than discarding the existing
|
|
||||||
// memo, but it would require detecting whether types are 'tainted' by
|
|
||||||
// references to type parameters.
|
|
||||||
memo: make(map[types.Type]uint32),
|
|
||||||
// Re-using ptrMap ensures that pointer identity is preserved in this
|
|
||||||
// hasher.
|
|
||||||
ptrMap: h.ptrMap,
|
|
||||||
sigTParams: tparams,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for i := 0; i < tparams.Len(); i++ {
|
for i := range n {
|
||||||
tparam := tparams.At(i)
|
tparam := tparams.At(i)
|
||||||
hash += 7 * h.Hash(tparam.Constraint())
|
hash += 7 * h.hash(tparam.Constraint())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return hash + 3*h.hashTuple(t.Params()) + 5*h.hashTuple(t.Results())
|
return hash + 3*h.hashTuple(t.Params()) + 5*h.hashTuple(t.Results())
|
||||||
|
@ -350,17 +297,17 @@ func (h Hasher) hashFor(t types.Type) uint32 {
|
||||||
return hash
|
return hash
|
||||||
|
|
||||||
case *types.Map:
|
case *types.Map:
|
||||||
return 9109 + 2*h.Hash(t.Key()) + 3*h.Hash(t.Elem())
|
return 9109 + 2*h.hash(t.Key()) + 3*h.hash(t.Elem())
|
||||||
|
|
||||||
case *types.Chan:
|
case *types.Chan:
|
||||||
return 9127 + 2*uint32(t.Dir()) + 3*h.Hash(t.Elem())
|
return 9127 + 2*uint32(t.Dir()) + 3*h.hash(t.Elem())
|
||||||
|
|
||||||
case *types.Named:
|
case *types.Named:
|
||||||
hash := h.hashPtr(t.Obj())
|
hash := h.hashTypeName(t.Obj())
|
||||||
targs := t.TypeArgs()
|
targs := t.TypeArgs()
|
||||||
for i := 0; i < targs.Len(); i++ {
|
for i := 0; i < targs.Len(); i++ {
|
||||||
targ := targs.At(i)
|
targ := targs.At(i)
|
||||||
hash += 2 * h.Hash(targ)
|
hash += 2 * h.hash(targ)
|
||||||
}
|
}
|
||||||
return hash
|
return hash
|
||||||
|
|
||||||
|
@ -374,17 +321,17 @@ func (h Hasher) hashFor(t types.Type) uint32 {
|
||||||
panic(fmt.Sprintf("%T: %v", t, t))
|
panic(fmt.Sprintf("%T: %v", t, t))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h Hasher) hashTuple(tuple *types.Tuple) uint32 {
|
func (h hasher) hashTuple(tuple *types.Tuple) uint32 {
|
||||||
// See go/types.identicalTypes for rationale.
|
// See go/types.identicalTypes for rationale.
|
||||||
n := tuple.Len()
|
n := tuple.Len()
|
||||||
hash := 9137 + 2*uint32(n)
|
hash := 9137 + 2*uint32(n)
|
||||||
for i := 0; i < n; i++ {
|
for i := range n {
|
||||||
hash += 3 * h.Hash(tuple.At(i).Type())
|
hash += 3 * h.hash(tuple.At(i).Type())
|
||||||
}
|
}
|
||||||
return hash
|
return hash
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h Hasher) hashUnion(t *types.Union) uint32 {
|
func (h hasher) hashUnion(t *types.Union) uint32 {
|
||||||
// Hash type restrictions.
|
// Hash type restrictions.
|
||||||
terms, err := typeparams.UnionTermSet(t)
|
terms, err := typeparams.UnionTermSet(t)
|
||||||
// if err != nil t has invalid type restrictions. Fall back on a non-zero
|
// if err != nil t has invalid type restrictions. Fall back on a non-zero
|
||||||
|
@ -395,11 +342,11 @@ func (h Hasher) hashUnion(t *types.Union) uint32 {
|
||||||
return h.hashTermSet(terms)
|
return h.hashTermSet(terms)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h Hasher) hashTermSet(terms []*types.Term) uint32 {
|
func (h hasher) hashTermSet(terms []*types.Term) uint32 {
|
||||||
hash := 9157 + 2*uint32(len(terms))
|
hash := 9157 + 2*uint32(len(terms))
|
||||||
for _, term := range terms {
|
for _, term := range terms {
|
||||||
// term order is not significant.
|
// term order is not significant.
|
||||||
termHash := h.Hash(term.Type())
|
termHash := h.hash(term.Type())
|
||||||
if term.Tilde() {
|
if term.Tilde() {
|
||||||
termHash *= 9161
|
termHash *= 9161
|
||||||
}
|
}
|
||||||
|
@ -408,36 +355,42 @@ func (h Hasher) hashTermSet(terms []*types.Term) uint32 {
|
||||||
return hash
|
return hash
|
||||||
}
|
}
|
||||||
|
|
||||||
// hashTypeParam returns a hash of the type parameter t, with a hash value
|
// hashTypeParam returns the hash of a type parameter.
|
||||||
// depending on whether t is contained in h.sigTParams.
|
func (h hasher) hashTypeParam(t *types.TypeParam) uint32 {
|
||||||
|
// Within the signature of a generic function, TypeParams are
|
||||||
|
// identical if they have the same index and constraint, so we
|
||||||
|
// hash them based on index.
|
||||||
//
|
//
|
||||||
// If h.sigTParams is set and contains t, then we are in the process of hashing
|
// When we are outside a generic function, free TypeParams are
|
||||||
// a signature, and the hash value of t must depend only on t's index and
|
// identical iff they are the same object, so we can use a
|
||||||
// constraint: signatures are considered identical modulo type parameter
|
// more discriminating hash consistent with object identity.
|
||||||
// renaming. To avoid infinite recursion, we only hash the type parameter
|
// This optimization saves [Map] about 4% when hashing all the
|
||||||
// index, and rely on types.Identical to handle signatures where constraints
|
// types.Info.Types in the forward closure of net/http.
|
||||||
// are not identical.
|
if !h.inGenericSig {
|
||||||
//
|
// Optimization: outside a generic function signature,
|
||||||
// Otherwise the hash of t depends only on t's pointer identity.
|
// use a more discrimating hash consistent with object identity.
|
||||||
func (h Hasher) hashTypeParam(t *types.TypeParam) uint32 {
|
return h.hashTypeName(t.Obj())
|
||||||
if h.sigTParams != nil {
|
|
||||||
i := t.Index()
|
|
||||||
if i >= 0 && i < h.sigTParams.Len() && t == h.sigTParams.At(i) {
|
|
||||||
return 9173 + 3*uint32(i)
|
|
||||||
}
|
}
|
||||||
}
|
return 9173 + 3*uint32(t.Index())
|
||||||
return h.hashPtr(t.Obj())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// hashPtr hashes the pointer identity of ptr. It uses h.ptrMap to ensure that
|
var theSeed = maphash.MakeSeed()
|
||||||
// pointers values are not dependent on the GC.
|
|
||||||
func (h Hasher) hashPtr(ptr any) uint32 {
|
// hashTypeName hashes the pointer of tname.
|
||||||
if hash, ok := h.ptrMap[ptr]; ok {
|
func (hasher) hashTypeName(tname *types.TypeName) uint32 {
|
||||||
return hash
|
// Since types.Identical uses == to compare TypeNames,
|
||||||
}
|
// the Hash function uses maphash.Comparable.
|
||||||
hash := uint32(reflect.ValueOf(ptr).Pointer())
|
// TODO(adonovan): or will, when it becomes available in go1.24.
|
||||||
h.ptrMap[ptr] = hash
|
// In the meantime we use the pointer's numeric value.
|
||||||
return hash
|
//
|
||||||
|
// hash := maphash.Comparable(theSeed, tname)
|
||||||
|
//
|
||||||
|
// (Another approach would be to hash the name and package
|
||||||
|
// path, and whether or not it is a package-level typename. It
|
||||||
|
// is rare for a package to define multiple local types with
|
||||||
|
// the same name.)
|
||||||
|
hash := uintptr(unsafe.Pointer(tname))
|
||||||
|
return uint32(hash ^ (hash >> 32))
|
||||||
}
|
}
|
||||||
|
|
||||||
// shallowHash computes a hash of t without looking at any of its
|
// shallowHash computes a hash of t without looking at any of its
|
||||||
|
@ -454,7 +407,7 @@ func (h Hasher) hashPtr(ptr any) uint32 {
|
||||||
// include m itself; there is no mention of the named type X that
|
// include m itself; there is no mention of the named type X that
|
||||||
// might help us break the cycle.
|
// might help us break the cycle.
|
||||||
// (See comment in go/types.identical, case *Interface, for more.)
|
// (See comment in go/types.identical, case *Interface, for more.)
|
||||||
func (h Hasher) shallowHash(t types.Type) uint32 {
|
func (h hasher) shallowHash(t types.Type) uint32 {
|
||||||
// t is the type of an interface method (Signature),
|
// t is the type of an interface method (Signature),
|
||||||
// its params or results (Tuples), or their immediate
|
// its params or results (Tuples), or their immediate
|
||||||
// elements (mostly Slice, Pointer, Basic, Named),
|
// elements (mostly Slice, Pointer, Basic, Named),
|
||||||
|
@ -475,7 +428,7 @@ func (h Hasher) shallowHash(t types.Type) uint32 {
|
||||||
case *types.Tuple:
|
case *types.Tuple:
|
||||||
n := t.Len()
|
n := t.Len()
|
||||||
hash := 9137 + 2*uint32(n)
|
hash := 9137 + 2*uint32(n)
|
||||||
for i := 0; i < n; i++ {
|
for i := range n {
|
||||||
hash += 53471161 * h.shallowHash(t.At(i).Type())
|
hash += 53471161 * h.shallowHash(t.At(i).Type())
|
||||||
}
|
}
|
||||||
return hash
|
return hash
|
||||||
|
@ -508,10 +461,10 @@ func (h Hasher) shallowHash(t types.Type) uint32 {
|
||||||
return 9127
|
return 9127
|
||||||
|
|
||||||
case *types.Named:
|
case *types.Named:
|
||||||
return h.hashPtr(t.Obj())
|
return h.hashTypeName(t.Obj())
|
||||||
|
|
||||||
case *types.TypeParam:
|
case *types.TypeParam:
|
||||||
return h.hashPtr(t.Obj())
|
return h.hashTypeParam(t)
|
||||||
}
|
}
|
||||||
panic(fmt.Sprintf("shallowHash: %T: %v", t, t))
|
panic(fmt.Sprintf("shallowHash: %T: %v", t, t))
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,52 +2,183 @@
|
||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
// This file is a copy of $GOROOT/src/go/internal/gcimporter/exportdata.go.
|
// This file should be kept in sync with $GOROOT/src/internal/exportdata/exportdata.go.
|
||||||
|
// This file also additionally implements FindExportData for gcexportdata.NewReader.
|
||||||
// This file implements FindExportData.
|
|
||||||
|
|
||||||
package gcimporter
|
package gcimporter
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bufio"
|
"bufio"
|
||||||
|
"bytes"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"go/build"
|
||||||
"io"
|
"io"
|
||||||
"strconv"
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
|
"sync"
|
||||||
)
|
)
|
||||||
|
|
||||||
func readGopackHeader(r *bufio.Reader) (name string, size int64, err error) {
|
|
||||||
// See $GOROOT/include/ar.h.
|
|
||||||
hdr := make([]byte, 16+12+6+6+8+10+2)
|
|
||||||
_, err = io.ReadFull(r, hdr)
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
// leave for debugging
|
|
||||||
if false {
|
|
||||||
fmt.Printf("header: %s", hdr)
|
|
||||||
}
|
|
||||||
s := strings.TrimSpace(string(hdr[16+12+6+6+8:][:10]))
|
|
||||||
length, err := strconv.Atoi(s)
|
|
||||||
size = int64(length)
|
|
||||||
if err != nil || hdr[len(hdr)-2] != '`' || hdr[len(hdr)-1] != '\n' {
|
|
||||||
err = fmt.Errorf("invalid archive header")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
name = strings.TrimSpace(string(hdr[:16]))
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// FindExportData positions the reader r at the beginning of the
|
// FindExportData positions the reader r at the beginning of the
|
||||||
// export data section of an underlying cmd/compile created archive
|
// export data section of an underlying cmd/compile created archive
|
||||||
// file by reading from it. The reader must be positioned at the
|
// file by reading from it. The reader must be positioned at the
|
||||||
// start of the file before calling this function.
|
// start of the file before calling this function.
|
||||||
// The size result is the length of the export data in bytes.
|
// This returns the length of the export data in bytes.
|
||||||
//
|
//
|
||||||
// This function is needed by [gcexportdata.Read], which must
|
// This function is needed by [gcexportdata.Read], which must
|
||||||
// accept inputs produced by the last two releases of cmd/compile,
|
// accept inputs produced by the last two releases of cmd/compile,
|
||||||
// plus tip.
|
// plus tip.
|
||||||
func FindExportData(r *bufio.Reader) (size int64, err error) {
|
func FindExportData(r *bufio.Reader) (size int64, err error) {
|
||||||
|
arsize, err := FindPackageDefinition(r)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
size = int64(arsize)
|
||||||
|
|
||||||
|
objapi, headers, err := ReadObjectHeaders(r)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
size -= int64(len(objapi))
|
||||||
|
for _, h := range headers {
|
||||||
|
size -= int64(len(h))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for the binary export data section header "$$B\n".
|
||||||
|
// TODO(taking): Unify with ReadExportDataHeader so that it stops at the 'u' instead of reading
|
||||||
|
line, err := r.ReadSlice('\n')
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
hdr := string(line)
|
||||||
|
if hdr != "$$B\n" {
|
||||||
|
err = fmt.Errorf("unknown export data header: %q", hdr)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
size -= int64(len(hdr))
|
||||||
|
|
||||||
|
// For files with a binary export data header "$$B\n",
|
||||||
|
// these are always terminated by an end-of-section marker "\n$$\n".
|
||||||
|
// So the last bytes must always be this constant.
|
||||||
|
//
|
||||||
|
// The end-of-section marker is not a part of the export data itself.
|
||||||
|
// Do not include these in size.
|
||||||
|
//
|
||||||
|
// It would be nice to have sanity check that the final bytes after
|
||||||
|
// the export data are indeed the end-of-section marker. The split
|
||||||
|
// of gcexportdata.NewReader and gcexportdata.Read make checking this
|
||||||
|
// ugly so gcimporter gives up enforcing this. The compiler and go/types
|
||||||
|
// importer do enforce this, which seems good enough.
|
||||||
|
const endofsection = "\n$$\n"
|
||||||
|
size -= int64(len(endofsection))
|
||||||
|
|
||||||
|
if size < 0 {
|
||||||
|
err = fmt.Errorf("invalid size (%d) in the archive file: %d bytes remain without section headers (recompile package)", arsize, size)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadUnified reads the contents of the unified export data from a reader r
|
||||||
|
// that contains the contents of a GC-created archive file.
|
||||||
|
//
|
||||||
|
// On success, the reader will be positioned after the end-of-section marker "\n$$\n".
|
||||||
|
//
|
||||||
|
// Supported GC-created archive files have 4 layers of nesting:
|
||||||
|
// - An archive file containing a package definition file.
|
||||||
|
// - The package definition file contains headers followed by a data section.
|
||||||
|
// Headers are lines (≤ 4kb) that do not start with "$$".
|
||||||
|
// - The data section starts with "$$B\n" followed by export data followed
|
||||||
|
// by an end of section marker "\n$$\n". (The section start "$$\n" is no
|
||||||
|
// longer supported.)
|
||||||
|
// - The export data starts with a format byte ('u') followed by the <data> in
|
||||||
|
// the given format. (See ReadExportDataHeader for older formats.)
|
||||||
|
//
|
||||||
|
// Putting this together, the bytes in a GC-created archive files are expected
|
||||||
|
// to look like the following.
|
||||||
|
// See cmd/internal/archive for more details on ar file headers.
|
||||||
|
//
|
||||||
|
// | <!arch>\n | ar file signature
|
||||||
|
// | __.PKGDEF...size...\n | ar header for __.PKGDEF including size.
|
||||||
|
// | go object <...>\n | objabi header
|
||||||
|
// | <optional headers>\n | other headers such as build id
|
||||||
|
// | $$B\n | binary format marker
|
||||||
|
// | u<data>\n | unified export <data>
|
||||||
|
// | $$\n | end-of-section marker
|
||||||
|
// | [optional padding] | padding byte (0x0A) if size is odd
|
||||||
|
// | [ar file header] | other ar files
|
||||||
|
// | [ar file data] |
|
||||||
|
func ReadUnified(r *bufio.Reader) (data []byte, err error) {
|
||||||
|
// We historically guaranteed headers at the default buffer size (4096) work.
|
||||||
|
// This ensures we can use ReadSlice throughout.
|
||||||
|
const minBufferSize = 4096
|
||||||
|
r = bufio.NewReaderSize(r, minBufferSize)
|
||||||
|
|
||||||
|
size, err := FindPackageDefinition(r)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
n := size
|
||||||
|
|
||||||
|
objapi, headers, err := ReadObjectHeaders(r)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
n -= len(objapi)
|
||||||
|
for _, h := range headers {
|
||||||
|
n -= len(h)
|
||||||
|
}
|
||||||
|
|
||||||
|
hdrlen, err := ReadExportDataHeader(r)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
n -= hdrlen
|
||||||
|
|
||||||
|
// size also includes the end of section marker. Remove that many bytes from the end.
|
||||||
|
const marker = "\n$$\n"
|
||||||
|
n -= len(marker)
|
||||||
|
|
||||||
|
if n < 0 {
|
||||||
|
err = fmt.Errorf("invalid size (%d) in the archive file: %d bytes remain without section headers (recompile package)", size, n)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read n bytes from buf.
|
||||||
|
data = make([]byte, n)
|
||||||
|
_, err = io.ReadFull(r, data)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for marker at the end.
|
||||||
|
var suffix [len(marker)]byte
|
||||||
|
_, err = io.ReadFull(r, suffix[:])
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if s := string(suffix[:]); s != marker {
|
||||||
|
err = fmt.Errorf("read %q instead of end-of-section marker (%q)", s, marker)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindPackageDefinition positions the reader r at the beginning of a package
|
||||||
|
// definition file ("__.PKGDEF") within a GC-created archive by reading
|
||||||
|
// from it, and returns the size of the package definition file in the archive.
|
||||||
|
//
|
||||||
|
// The reader must be positioned at the start of the archive file before calling
|
||||||
|
// this function, and "__.PKGDEF" is assumed to be the first file in the archive.
|
||||||
|
//
|
||||||
|
// See cmd/internal/archive for details on the archive format.
|
||||||
|
func FindPackageDefinition(r *bufio.Reader) (size int, err error) {
|
||||||
|
// Uses ReadSlice to limit risk of malformed inputs.
|
||||||
|
|
||||||
// Read first line to make sure this is an object file.
|
// Read first line to make sure this is an object file.
|
||||||
line, err := r.ReadSlice('\n')
|
line, err := r.ReadSlice('\n')
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -61,56 +192,230 @@ func FindExportData(r *bufio.Reader) (size int64, err error) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Archive file. Scan to __.PKGDEF.
|
// package export block should be first
|
||||||
var name string
|
size = readArchiveHeader(r, "__.PKGDEF")
|
||||||
if name, size, err = readGopackHeader(r); err != nil {
|
if size <= 0 {
|
||||||
return
|
err = fmt.Errorf("not a package file")
|
||||||
}
|
|
||||||
arsize := size
|
|
||||||
|
|
||||||
// First entry should be __.PKGDEF.
|
|
||||||
if name != "__.PKGDEF" {
|
|
||||||
err = fmt.Errorf("go archive is missing __.PKGDEF")
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read first line of __.PKGDEF data, so that line
|
return
|
||||||
// is once again the first line of the input.
|
}
|
||||||
|
|
||||||
|
// ReadObjectHeaders reads object headers from the reader. Object headers are
|
||||||
|
// lines that do not start with an end-of-section marker "$$". The first header
|
||||||
|
// is the objabi header. On success, the reader will be positioned at the beginning
|
||||||
|
// of the end-of-section marker.
|
||||||
|
//
|
||||||
|
// It returns an error if any header does not fit in r.Size() bytes.
|
||||||
|
func ReadObjectHeaders(r *bufio.Reader) (objapi string, headers []string, err error) {
|
||||||
|
// line is a temporary buffer for headers.
|
||||||
|
// Use bounded reads (ReadSlice, Peek) to limit risk of malformed inputs.
|
||||||
|
var line []byte
|
||||||
|
|
||||||
|
// objapi header should be the first line
|
||||||
if line, err = r.ReadSlice('\n'); err != nil {
|
if line, err = r.ReadSlice('\n'); err != nil {
|
||||||
err = fmt.Errorf("can't find export data (%v)", err)
|
err = fmt.Errorf("can't find export data (%v)", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
size -= int64(len(line))
|
objapi = string(line)
|
||||||
|
|
||||||
// Now at __.PKGDEF in archive or still at beginning of file.
|
// objapi header begins with "go object ".
|
||||||
// Either way, line should begin with "go object ".
|
if !strings.HasPrefix(objapi, "go object ") {
|
||||||
if !strings.HasPrefix(string(line), "go object ") {
|
err = fmt.Errorf("not a go object file: %s", objapi)
|
||||||
err = fmt.Errorf("not a Go object file")
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Skip over object headers to get to the export data section header "$$B\n".
|
// process remaining object header lines
|
||||||
// Object headers are lines that do not start with '$'.
|
for {
|
||||||
for line[0] != '$' {
|
// check for an end of section marker "$$"
|
||||||
if line, err = r.ReadSlice('\n'); err != nil {
|
line, err = r.Peek(2)
|
||||||
err = fmt.Errorf("can't find export data (%v)", err)
|
if err != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
size -= int64(len(line))
|
if string(line) == "$$" {
|
||||||
|
return // stop
|
||||||
|
}
|
||||||
|
|
||||||
|
// read next header
|
||||||
|
line, err = r.ReadSlice('\n')
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
headers = append(headers, string(line))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadExportDataHeader reads the export data header and format from r.
|
||||||
|
// It returns the number of bytes read, or an error if the format is no longer
|
||||||
|
// supported or it failed to read.
|
||||||
|
//
|
||||||
|
// The only currently supported format is binary export data in the
|
||||||
|
// unified export format.
|
||||||
|
func ReadExportDataHeader(r *bufio.Reader) (n int, err error) {
|
||||||
|
// Read export data header.
|
||||||
|
line, err := r.ReadSlice('\n')
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for the binary export data section header "$$B\n".
|
|
||||||
hdr := string(line)
|
hdr := string(line)
|
||||||
if hdr != "$$B\n" {
|
switch hdr {
|
||||||
|
case "$$\n":
|
||||||
|
err = fmt.Errorf("old textual export format no longer supported (recompile package)")
|
||||||
|
return
|
||||||
|
|
||||||
|
case "$$B\n":
|
||||||
|
var format byte
|
||||||
|
format, err = r.ReadByte()
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// The unified export format starts with a 'u'.
|
||||||
|
switch format {
|
||||||
|
case 'u':
|
||||||
|
default:
|
||||||
|
// Older no longer supported export formats include:
|
||||||
|
// indexed export format which started with an 'i'; and
|
||||||
|
// the older binary export format which started with a 'c',
|
||||||
|
// 'd', or 'v' (from "version").
|
||||||
|
err = fmt.Errorf("binary export format %q is no longer supported (recompile package)", format)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
default:
|
||||||
err = fmt.Errorf("unknown export data header: %q", hdr)
|
err = fmt.Errorf("unknown export data header: %q", hdr)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
// TODO(taking): Remove end-of-section marker "\n$$\n" from size.
|
|
||||||
|
|
||||||
if size < 0 {
|
n = len(hdr) + 1 // + 1 is for 'u'
|
||||||
err = fmt.Errorf("invalid size (%d) in the archive file: %d bytes remain without section headers (recompile package)", arsize, size)
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FindPkg returns the filename and unique package id for an import
|
||||||
|
// path based on package information provided by build.Import (using
|
||||||
|
// the build.Default build.Context). A relative srcDir is interpreted
|
||||||
|
// relative to the current working directory.
|
||||||
|
//
|
||||||
|
// FindPkg is only used in tests within x/tools.
|
||||||
|
func FindPkg(path, srcDir string) (filename, id string, err error) {
|
||||||
|
// TODO(taking): Move internal/exportdata.FindPkg into its own file,
|
||||||
|
// and then this copy into a _test package.
|
||||||
|
if path == "" {
|
||||||
|
return "", "", errors.New("path is empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
var noext string
|
||||||
|
switch {
|
||||||
|
default:
|
||||||
|
// "x" -> "$GOPATH/pkg/$GOOS_$GOARCH/x.ext", "x"
|
||||||
|
// Don't require the source files to be present.
|
||||||
|
if abs, err := filepath.Abs(srcDir); err == nil { // see issue 14282
|
||||||
|
srcDir = abs
|
||||||
|
}
|
||||||
|
var bp *build.Package
|
||||||
|
bp, err = build.Import(path, srcDir, build.FindOnly|build.AllowBinary)
|
||||||
|
if bp.PkgObj == "" {
|
||||||
|
if bp.Goroot && bp.Dir != "" {
|
||||||
|
filename, err = lookupGorootExport(bp.Dir)
|
||||||
|
if err == nil {
|
||||||
|
_, err = os.Stat(filename)
|
||||||
|
}
|
||||||
|
if err == nil {
|
||||||
|
return filename, bp.ImportPath, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
goto notfound
|
||||||
|
} else {
|
||||||
|
noext = strings.TrimSuffix(bp.PkgObj, ".a")
|
||||||
|
}
|
||||||
|
id = bp.ImportPath
|
||||||
|
|
||||||
|
case build.IsLocalImport(path):
|
||||||
|
// "./x" -> "/this/directory/x.ext", "/this/directory/x"
|
||||||
|
noext = filepath.Join(srcDir, path)
|
||||||
|
id = noext
|
||||||
|
|
||||||
|
case filepath.IsAbs(path):
|
||||||
|
// for completeness only - go/build.Import
|
||||||
|
// does not support absolute imports
|
||||||
|
// "/x" -> "/x.ext", "/x"
|
||||||
|
noext = path
|
||||||
|
id = path
|
||||||
|
}
|
||||||
|
|
||||||
|
if false { // for debugging
|
||||||
|
if path != id {
|
||||||
|
fmt.Printf("%s -> %s\n", path, id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// try extensions
|
||||||
|
for _, ext := range pkgExts {
|
||||||
|
filename = noext + ext
|
||||||
|
f, statErr := os.Stat(filename)
|
||||||
|
if statErr == nil && !f.IsDir() {
|
||||||
|
return filename, id, nil
|
||||||
|
}
|
||||||
|
if err == nil {
|
||||||
|
err = statErr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
notfound:
|
||||||
|
if err == nil {
|
||||||
|
return "", path, fmt.Errorf("can't find import: %q", path)
|
||||||
|
}
|
||||||
|
return "", path, fmt.Errorf("can't find import: %q: %w", path, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var pkgExts = [...]string{".a", ".o"} // a file from the build cache will have no extension
|
||||||
|
|
||||||
|
var exportMap sync.Map // package dir → func() (string, error)
|
||||||
|
|
||||||
|
// lookupGorootExport returns the location of the export data
|
||||||
|
// (normally found in the build cache, but located in GOROOT/pkg
|
||||||
|
// in prior Go releases) for the package located in pkgDir.
|
||||||
|
//
|
||||||
|
// (We use the package's directory instead of its import path
|
||||||
|
// mainly to simplify handling of the packages in src/vendor
|
||||||
|
// and cmd/vendor.)
|
||||||
|
//
|
||||||
|
// lookupGorootExport is only used in tests within x/tools.
|
||||||
|
func lookupGorootExport(pkgDir string) (string, error) {
|
||||||
|
f, ok := exportMap.Load(pkgDir)
|
||||||
|
if !ok {
|
||||||
|
var (
|
||||||
|
listOnce sync.Once
|
||||||
|
exportPath string
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
f, _ = exportMap.LoadOrStore(pkgDir, func() (string, error) {
|
||||||
|
listOnce.Do(func() {
|
||||||
|
cmd := exec.Command(filepath.Join(build.Default.GOROOT, "bin", "go"), "list", "-export", "-f", "{{.Export}}", pkgDir)
|
||||||
|
cmd.Dir = build.Default.GOROOT
|
||||||
|
cmd.Env = append(os.Environ(), "PWD="+cmd.Dir, "GOROOT="+build.Default.GOROOT)
|
||||||
|
var output []byte
|
||||||
|
output, err = cmd.Output()
|
||||||
|
if err != nil {
|
||||||
|
if ee, ok := err.(*exec.ExitError); ok && len(ee.Stderr) > 0 {
|
||||||
|
err = errors.New(string(ee.Stderr))
|
||||||
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
exports := strings.Split(string(bytes.TrimSpace(output)), "\n")
|
||||||
|
if len(exports) != 1 {
|
||||||
|
err = fmt.Errorf("go list reported %d exports; expected 1", len(exports))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
exportPath = exports[0]
|
||||||
|
})
|
||||||
|
|
||||||
|
return exportPath, err
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return f.(func() (string, error))()
|
||||||
|
}
|
||||||
|
|
|
@ -23,17 +23,11 @@ package gcimporter // import "golang.org/x/tools/internal/gcimporter"
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bufio"
|
"bufio"
|
||||||
"bytes"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"go/build"
|
|
||||||
"go/token"
|
"go/token"
|
||||||
"go/types"
|
"go/types"
|
||||||
"io"
|
"io"
|
||||||
"os"
|
"os"
|
||||||
"os/exec"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
"sync"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
@ -45,127 +39,14 @@ const (
|
||||||
trace = false
|
trace = false
|
||||||
)
|
)
|
||||||
|
|
||||||
var exportMap sync.Map // package dir → func() (string, bool)
|
|
||||||
|
|
||||||
// lookupGorootExport returns the location of the export data
|
|
||||||
// (normally found in the build cache, but located in GOROOT/pkg
|
|
||||||
// in prior Go releases) for the package located in pkgDir.
|
|
||||||
//
|
|
||||||
// (We use the package's directory instead of its import path
|
|
||||||
// mainly to simplify handling of the packages in src/vendor
|
|
||||||
// and cmd/vendor.)
|
|
||||||
func lookupGorootExport(pkgDir string) (string, bool) {
|
|
||||||
f, ok := exportMap.Load(pkgDir)
|
|
||||||
if !ok {
|
|
||||||
var (
|
|
||||||
listOnce sync.Once
|
|
||||||
exportPath string
|
|
||||||
)
|
|
||||||
f, _ = exportMap.LoadOrStore(pkgDir, func() (string, bool) {
|
|
||||||
listOnce.Do(func() {
|
|
||||||
cmd := exec.Command("go", "list", "-export", "-f", "{{.Export}}", pkgDir)
|
|
||||||
cmd.Dir = build.Default.GOROOT
|
|
||||||
var output []byte
|
|
||||||
output, err := cmd.Output()
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
exports := strings.Split(string(bytes.TrimSpace(output)), "\n")
|
|
||||||
if len(exports) != 1 {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
exportPath = exports[0]
|
|
||||||
})
|
|
||||||
|
|
||||||
return exportPath, exportPath != ""
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return f.(func() (string, bool))()
|
|
||||||
}
|
|
||||||
|
|
||||||
var pkgExts = [...]string{".a", ".o"}
|
|
||||||
|
|
||||||
// FindPkg returns the filename and unique package id for an import
|
|
||||||
// path based on package information provided by build.Import (using
|
|
||||||
// the build.Default build.Context). A relative srcDir is interpreted
|
|
||||||
// relative to the current working directory.
|
|
||||||
// If no file was found, an empty filename is returned.
|
|
||||||
func FindPkg(path, srcDir string) (filename, id string) {
|
|
||||||
if path == "" {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
var noext string
|
|
||||||
switch {
|
|
||||||
default:
|
|
||||||
// "x" -> "$GOPATH/pkg/$GOOS_$GOARCH/x.ext", "x"
|
|
||||||
// Don't require the source files to be present.
|
|
||||||
if abs, err := filepath.Abs(srcDir); err == nil { // see issue 14282
|
|
||||||
srcDir = abs
|
|
||||||
}
|
|
||||||
bp, _ := build.Import(path, srcDir, build.FindOnly|build.AllowBinary)
|
|
||||||
if bp.PkgObj == "" {
|
|
||||||
var ok bool
|
|
||||||
if bp.Goroot && bp.Dir != "" {
|
|
||||||
filename, ok = lookupGorootExport(bp.Dir)
|
|
||||||
}
|
|
||||||
if !ok {
|
|
||||||
id = path // make sure we have an id to print in error message
|
|
||||||
return
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
noext = strings.TrimSuffix(bp.PkgObj, ".a")
|
|
||||||
id = bp.ImportPath
|
|
||||||
}
|
|
||||||
|
|
||||||
case build.IsLocalImport(path):
|
|
||||||
// "./x" -> "/this/directory/x.ext", "/this/directory/x"
|
|
||||||
noext = filepath.Join(srcDir, path)
|
|
||||||
id = noext
|
|
||||||
|
|
||||||
case filepath.IsAbs(path):
|
|
||||||
// for completeness only - go/build.Import
|
|
||||||
// does not support absolute imports
|
|
||||||
// "/x" -> "/x.ext", "/x"
|
|
||||||
noext = path
|
|
||||||
id = path
|
|
||||||
}
|
|
||||||
|
|
||||||
if false { // for debugging
|
|
||||||
if path != id {
|
|
||||||
fmt.Printf("%s -> %s\n", path, id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if filename != "" {
|
|
||||||
if f, err := os.Stat(filename); err == nil && !f.IsDir() {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// try extensions
|
|
||||||
for _, ext := range pkgExts {
|
|
||||||
filename = noext + ext
|
|
||||||
if f, err := os.Stat(filename); err == nil && !f.IsDir() {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
filename = "" // not found
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Import imports a gc-generated package given its import path and srcDir, adds
|
// Import imports a gc-generated package given its import path and srcDir, adds
|
||||||
// the corresponding package object to the packages map, and returns the object.
|
// the corresponding package object to the packages map, and returns the object.
|
||||||
// The packages map must contain all packages already imported.
|
// The packages map must contain all packages already imported.
|
||||||
//
|
//
|
||||||
// TODO(taking): Import is only used in tests. Move to gcimporter_test.
|
// Import is only used in tests.
|
||||||
func Import(packages map[string]*types.Package, path, srcDir string, lookup func(path string) (io.ReadCloser, error)) (pkg *types.Package, err error) {
|
func Import(fset *token.FileSet, packages map[string]*types.Package, path, srcDir string, lookup func(path string) (io.ReadCloser, error)) (pkg *types.Package, err error) {
|
||||||
var rc io.ReadCloser
|
var rc io.ReadCloser
|
||||||
var filename, id string
|
var id string
|
||||||
if lookup != nil {
|
if lookup != nil {
|
||||||
// With custom lookup specified, assume that caller has
|
// With custom lookup specified, assume that caller has
|
||||||
// converted path to a canonical import path for use in the map.
|
// converted path to a canonical import path for use in the map.
|
||||||
|
@ -184,12 +65,13 @@ func Import(packages map[string]*types.Package, path, srcDir string, lookup func
|
||||||
}
|
}
|
||||||
rc = f
|
rc = f
|
||||||
} else {
|
} else {
|
||||||
filename, id = FindPkg(path, srcDir)
|
var filename string
|
||||||
|
filename, id, err = FindPkg(path, srcDir)
|
||||||
if filename == "" {
|
if filename == "" {
|
||||||
if path == "unsafe" {
|
if path == "unsafe" {
|
||||||
return types.Unsafe, nil
|
return types.Unsafe, nil
|
||||||
}
|
}
|
||||||
return nil, fmt.Errorf("can't find import: %q", id)
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// no need to re-import if the package was imported completely before
|
// no need to re-import if the package was imported completely before
|
||||||
|
@ -212,54 +94,15 @@ func Import(packages map[string]*types.Package, path, srcDir string, lookup func
|
||||||
}
|
}
|
||||||
defer rc.Close()
|
defer rc.Close()
|
||||||
|
|
||||||
var size int64
|
|
||||||
buf := bufio.NewReader(rc)
|
buf := bufio.NewReader(rc)
|
||||||
if size, err = FindExportData(buf); err != nil {
|
data, err := ReadUnified(buf)
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
var data []byte
|
|
||||||
data, err = io.ReadAll(buf)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
err = fmt.Errorf("import %q: %v", path, err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if len(data) == 0 {
|
|
||||||
return nil, fmt.Errorf("no data to load a package from for path %s", id)
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO(gri): allow clients of go/importer to provide a FileSet.
|
|
||||||
// Or, define a new standard go/types/gcexportdata package.
|
|
||||||
fset := token.NewFileSet()
|
|
||||||
|
|
||||||
// Select appropriate importer.
|
|
||||||
switch data[0] {
|
|
||||||
case 'v', 'c', 'd':
|
|
||||||
// binary: emitted by cmd/compile till go1.10; obsolete.
|
|
||||||
return nil, fmt.Errorf("binary (%c) import format is no longer supported", data[0])
|
|
||||||
|
|
||||||
case 'i':
|
|
||||||
// indexed: emitted by cmd/compile till go1.19;
|
|
||||||
// now used only for serializing go/types.
|
|
||||||
// See https://github.com/golang/go/issues/69491.
|
|
||||||
_, pkg, err := IImportData(fset, packages, data[1:], id)
|
|
||||||
return pkg, err
|
|
||||||
|
|
||||||
case 'u':
|
|
||||||
// unified: emitted by cmd/compile since go1.20.
|
// unified: emitted by cmd/compile since go1.20.
|
||||||
_, pkg, err := UImportData(fset, packages, data[1:size], id)
|
_, pkg, err = UImportData(fset, packages, data, id)
|
||||||
return pkg, err
|
|
||||||
|
|
||||||
default:
|
return
|
||||||
l := len(data)
|
|
||||||
if l > 10 {
|
|
||||||
l = 10
|
|
||||||
}
|
}
|
||||||
return nil, fmt.Errorf("unexpected export data with prefix %q for path %s", string(data[:l]), id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type byPath []*types.Package
|
|
||||||
|
|
||||||
func (a byPath) Len() int { return len(a) }
|
|
||||||
func (a byPath) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
|
|
||||||
func (a byPath) Less(i, j int) bool { return a[i].Path() < a[j].Path() }
|
|
||||||
|
|
|
@ -5,8 +5,6 @@
|
||||||
// Indexed package import.
|
// Indexed package import.
|
||||||
// See iexport.go for the export data format.
|
// See iexport.go for the export data format.
|
||||||
|
|
||||||
// This file is a copy of $GOROOT/src/go/internal/gcimporter/iimport.go.
|
|
||||||
|
|
||||||
package gcimporter
|
package gcimporter
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
@ -673,7 +671,9 @@ func (r *importReader) obj(name string) {
|
||||||
case varTag:
|
case varTag:
|
||||||
typ := r.typ()
|
typ := r.typ()
|
||||||
|
|
||||||
r.declare(types.NewVar(pos, r.currPkg, name, typ))
|
v := types.NewVar(pos, r.currPkg, name, typ)
|
||||||
|
typesinternal.SetVarKind(v, typesinternal.PackageVar)
|
||||||
|
r.declare(v)
|
||||||
|
|
||||||
default:
|
default:
|
||||||
errorf("unexpected tag: %v", tag)
|
errorf("unexpected tag: %v", tag)
|
||||||
|
@ -1111,3 +1111,9 @@ func (r *importReader) byte() byte {
|
||||||
}
|
}
|
||||||
return x
|
return x
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type byPath []*types.Package
|
||||||
|
|
||||||
|
func (a byPath) Len() int { return len(a) }
|
||||||
|
func (a byPath) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
|
||||||
|
func (a byPath) Less(i, j int) bool { return a[i].Path() < a[j].Path() }
|
||||||
|
|
|
@ -0,0 +1,30 @@
|
||||||
|
// Copyright 2024 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package gcimporter
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"io"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Copy of $GOROOT/src/cmd/internal/archive.ReadHeader.
|
||||||
|
func readArchiveHeader(b *bufio.Reader, name string) int {
|
||||||
|
// architecture-independent object file output
|
||||||
|
const HeaderSize = 60
|
||||||
|
|
||||||
|
var buf [HeaderSize]byte
|
||||||
|
if _, err := io.ReadFull(b, buf[:]); err != nil {
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
aname := strings.Trim(string(buf[0:16]), " ")
|
||||||
|
if !strings.HasPrefix(aname, name) {
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
asize := strings.Trim(string(buf[48:58]), " ")
|
||||||
|
i, _ := strconv.Atoi(asize)
|
||||||
|
return i
|
||||||
|
}
|
|
@ -11,10 +11,10 @@ import (
|
||||||
"go/token"
|
"go/token"
|
||||||
"go/types"
|
"go/types"
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
|
||||||
|
|
||||||
"golang.org/x/tools/internal/aliases"
|
"golang.org/x/tools/internal/aliases"
|
||||||
"golang.org/x/tools/internal/pkgbits"
|
"golang.org/x/tools/internal/pkgbits"
|
||||||
|
"golang.org/x/tools/internal/typesinternal"
|
||||||
)
|
)
|
||||||
|
|
||||||
// A pkgReader holds the shared state for reading a unified IR package
|
// A pkgReader holds the shared state for reading a unified IR package
|
||||||
|
@ -71,7 +71,6 @@ func UImportData(fset *token.FileSet, imports map[string]*types.Package, data []
|
||||||
}
|
}
|
||||||
|
|
||||||
s := string(data)
|
s := string(data)
|
||||||
s = s[:strings.LastIndex(s, "\n$$\n")]
|
|
||||||
input := pkgbits.NewPkgDecoder(path, s)
|
input := pkgbits.NewPkgDecoder(path, s)
|
||||||
pkg = readUnifiedPackage(fset, nil, imports, input)
|
pkg = readUnifiedPackage(fset, nil, imports, input)
|
||||||
return
|
return
|
||||||
|
@ -266,7 +265,12 @@ func (pr *pkgReader) pkgIdx(idx pkgbits.Index) *types.Package {
|
||||||
func (r *reader) doPkg() *types.Package {
|
func (r *reader) doPkg() *types.Package {
|
||||||
path := r.String()
|
path := r.String()
|
||||||
switch path {
|
switch path {
|
||||||
case "":
|
// cmd/compile emits path="main" for main packages because
|
||||||
|
// that's the linker symbol prefix it used; but we need
|
||||||
|
// the package's path as it would be reported by go list,
|
||||||
|
// hence "main" below.
|
||||||
|
// See test at go/packages.TestMainPackagePathInModeTypes.
|
||||||
|
case "", "main":
|
||||||
path = r.p.PkgPath()
|
path = r.p.PkgPath()
|
||||||
case "builtin":
|
case "builtin":
|
||||||
return nil // universe
|
return nil // universe
|
||||||
|
@ -569,6 +573,7 @@ func (pr *pkgReader) objIdx(idx pkgbits.Index) (*types.Package, string) {
|
||||||
sig := fn.Type().(*types.Signature)
|
sig := fn.Type().(*types.Signature)
|
||||||
|
|
||||||
recv := types.NewVar(fn.Pos(), fn.Pkg(), "", named)
|
recv := types.NewVar(fn.Pos(), fn.Pkg(), "", named)
|
||||||
|
typesinternal.SetVarKind(recv, typesinternal.RecvVar)
|
||||||
methods[i] = types.NewFunc(fn.Pos(), fn.Pkg(), fn.Name(), types.NewSignature(recv, sig.Params(), sig.Results(), sig.Variadic()))
|
methods[i] = types.NewFunc(fn.Pos(), fn.Pkg(), fn.Name(), types.NewSignature(recv, sig.Params(), sig.Results(), sig.Variadic()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -616,7 +621,9 @@ func (pr *pkgReader) objIdx(idx pkgbits.Index) (*types.Package, string) {
|
||||||
case pkgbits.ObjVar:
|
case pkgbits.ObjVar:
|
||||||
pos := r.pos()
|
pos := r.pos()
|
||||||
typ := r.typ()
|
typ := r.typ()
|
||||||
declare(types.NewVar(pos, objPkg, objName, typ))
|
v := types.NewVar(pos, objPkg, objName, typ)
|
||||||
|
typesinternal.SetVarKind(v, typesinternal.PackageVar)
|
||||||
|
declare(v)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -28,7 +28,7 @@ import (
|
||||||
"golang.org/x/tools/internal/event/label"
|
"golang.org/x/tools/internal/event/label"
|
||||||
)
|
)
|
||||||
|
|
||||||
// An Runner will run go command invocations and serialize
|
// A Runner will run go command invocations and serialize
|
||||||
// them if it sees a concurrency error.
|
// them if it sees a concurrency error.
|
||||||
type Runner struct {
|
type Runner struct {
|
||||||
// once guards the runner initialization.
|
// once guards the runner initialization.
|
||||||
|
@ -179,7 +179,7 @@ type Invocation struct {
|
||||||
CleanEnv bool
|
CleanEnv bool
|
||||||
Env []string
|
Env []string
|
||||||
WorkingDir string
|
WorkingDir string
|
||||||
Logf func(format string, args ...interface{})
|
Logf func(format string, args ...any)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Postcondition: both error results have same nilness.
|
// Postcondition: both error results have same nilness.
|
||||||
|
@ -388,7 +388,9 @@ func runCmdContext(ctx context.Context, cmd *exec.Cmd) (err error) {
|
||||||
case err := <-resChan:
|
case err := <-resChan:
|
||||||
return err
|
return err
|
||||||
case <-timer.C:
|
case <-timer.C:
|
||||||
HandleHangingGoCommand(startTime, cmd)
|
// HandleHangingGoCommand terminates this process.
|
||||||
|
// Pass off resChan in case we can collect the command error.
|
||||||
|
handleHangingGoCommand(startTime, cmd, resChan)
|
||||||
case <-ctx.Done():
|
case <-ctx.Done():
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -413,8 +415,6 @@ func runCmdContext(ctx context.Context, cmd *exec.Cmd) (err error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Didn't shut down in response to interrupt. Kill it hard.
|
// Didn't shut down in response to interrupt. Kill it hard.
|
||||||
// TODO(rfindley): per advice from bcmills@, it may be better to send SIGQUIT
|
|
||||||
// on certain platforms, such as unix.
|
|
||||||
if err := cmd.Process.Kill(); err != nil && !errors.Is(err, os.ErrProcessDone) && debug {
|
if err := cmd.Process.Kill(); err != nil && !errors.Is(err, os.ErrProcessDone) && debug {
|
||||||
log.Printf("error killing the Go command: %v", err)
|
log.Printf("error killing the Go command: %v", err)
|
||||||
}
|
}
|
||||||
|
@ -422,9 +422,11 @@ func runCmdContext(ctx context.Context, cmd *exec.Cmd) (err error) {
|
||||||
return <-resChan
|
return <-resChan
|
||||||
}
|
}
|
||||||
|
|
||||||
func HandleHangingGoCommand(start time.Time, cmd *exec.Cmd) {
|
// handleHangingGoCommand outputs debugging information to help diagnose the
|
||||||
|
// cause of a hanging Go command, and then exits with log.Fatalf.
|
||||||
|
func handleHangingGoCommand(start time.Time, cmd *exec.Cmd, resChan chan error) {
|
||||||
switch runtime.GOOS {
|
switch runtime.GOOS {
|
||||||
case "linux", "darwin", "freebsd", "netbsd":
|
case "linux", "darwin", "freebsd", "netbsd", "openbsd":
|
||||||
fmt.Fprintln(os.Stderr, `DETECTED A HANGING GO COMMAND
|
fmt.Fprintln(os.Stderr, `DETECTED A HANGING GO COMMAND
|
||||||
|
|
||||||
The gopls test runner has detected a hanging go command. In order to debug
|
The gopls test runner has detected a hanging go command. In order to debug
|
||||||
|
@ -438,7 +440,7 @@ See golang/go#54461 for more details.`)
|
||||||
psCmd.Stdout = os.Stderr
|
psCmd.Stdout = os.Stderr
|
||||||
psCmd.Stderr = os.Stderr
|
psCmd.Stderr = os.Stderr
|
||||||
if err := psCmd.Run(); err != nil {
|
if err := psCmd.Run(); err != nil {
|
||||||
panic(fmt.Sprintf("running ps: %v", err))
|
log.Printf("Handling hanging Go command: running ps: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
listFiles := "lsof"
|
listFiles := "lsof"
|
||||||
|
@ -452,10 +454,24 @@ See golang/go#54461 for more details.`)
|
||||||
listFilesCmd.Stdout = os.Stderr
|
listFilesCmd.Stdout = os.Stderr
|
||||||
listFilesCmd.Stderr = os.Stderr
|
listFilesCmd.Stderr = os.Stderr
|
||||||
if err := listFilesCmd.Run(); err != nil {
|
if err := listFilesCmd.Run(); err != nil {
|
||||||
panic(fmt.Sprintf("running %s: %v", listFiles, err))
|
log.Printf("Handling hanging Go command: running %s: %v", listFiles, err)
|
||||||
|
}
|
||||||
|
// Try to extract information about the slow go process by issuing a SIGQUIT.
|
||||||
|
if err := cmd.Process.Signal(sigStuckProcess); err == nil {
|
||||||
|
select {
|
||||||
|
case err := <-resChan:
|
||||||
|
stderr := "not a bytes.Buffer"
|
||||||
|
if buf, _ := cmd.Stderr.(*bytes.Buffer); buf != nil {
|
||||||
|
stderr = buf.String()
|
||||||
|
}
|
||||||
|
log.Printf("Quit hanging go command:\n\terr:%v\n\tstderr:\n%v\n\n", err, stderr)
|
||||||
|
case <-time.After(5 * time.Second):
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
log.Printf("Sending signal %d to hanging go command: %v", sigStuckProcess, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
panic(fmt.Sprintf("detected hanging go command (golang/go#54461); waited %s\n\tcommand:%s\n\tpid:%d", time.Since(start), cmd, cmd.Process.Pid))
|
log.Fatalf("detected hanging go command (golang/go#54461); waited %s\n\tcommand:%s\n\tpid:%d", time.Since(start), cmd, cmd.Process.Pid)
|
||||||
}
|
}
|
||||||
|
|
||||||
func cmdDebugStr(cmd *exec.Cmd) string {
|
func cmdDebugStr(cmd *exec.Cmd) string {
|
||||||
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
// Copyright 2025 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
//go:build !unix
|
||||||
|
|
||||||
|
package gocommand
|
||||||
|
|
||||||
|
import "os"
|
||||||
|
|
||||||
|
// sigStuckProcess is the signal to send to kill a hanging subprocess.
|
||||||
|
// On Unix we send SIGQUIT, but on non-Unix we only have os.Kill.
|
||||||
|
var sigStuckProcess = os.Kill
|
|
@ -0,0 +1,13 @@
|
||||||
|
// Copyright 2025 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
//go:build unix
|
||||||
|
|
||||||
|
package gocommand
|
||||||
|
|
||||||
|
import "syscall"
|
||||||
|
|
||||||
|
// Sigstuckprocess is the signal to send to kill a hanging subprocess.
|
||||||
|
// Send SIGQUIT to get a stack trace.
|
||||||
|
var sigStuckProcess = syscall.SIGQUIT
|
|
@ -780,7 +780,7 @@ func GetAllCandidates(ctx context.Context, wrapped func(ImportFix), searchPrefix
|
||||||
return true
|
return true
|
||||||
},
|
},
|
||||||
dirFound: func(pkg *pkg) bool {
|
dirFound: func(pkg *pkg) bool {
|
||||||
if !canUse(filename, pkg.dir) {
|
if !CanUse(filename, pkg.dir) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
// Try the assumed package name first, then a simpler path match
|
// Try the assumed package name first, then a simpler path match
|
||||||
|
@ -815,7 +815,7 @@ func GetImportPaths(ctx context.Context, wrapped func(ImportFix), searchPrefix,
|
||||||
return true
|
return true
|
||||||
},
|
},
|
||||||
dirFound: func(pkg *pkg) bool {
|
dirFound: func(pkg *pkg) bool {
|
||||||
if !canUse(filename, pkg.dir) {
|
if !CanUse(filename, pkg.dir) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
return strings.HasPrefix(pkg.importPathShort, searchPrefix)
|
return strings.HasPrefix(pkg.importPathShort, searchPrefix)
|
||||||
|
@ -927,7 +927,7 @@ type ProcessEnv struct {
|
||||||
WorkingDir string
|
WorkingDir string
|
||||||
|
|
||||||
// If Logf is non-nil, debug logging is enabled through this function.
|
// If Logf is non-nil, debug logging is enabled through this function.
|
||||||
Logf func(format string, args ...interface{})
|
Logf func(format string, args ...any)
|
||||||
|
|
||||||
// If set, ModCache holds a shared cache of directory info to use across
|
// If set, ModCache holds a shared cache of directory info to use across
|
||||||
// multiple ProcessEnvs.
|
// multiple ProcessEnvs.
|
||||||
|
@ -1132,6 +1132,9 @@ func addStdlibCandidates(pass *pass, refs References) error {
|
||||||
// but we have no way of figuring out what the user is using
|
// but we have no way of figuring out what the user is using
|
||||||
// TODO: investigate using the toolchain version to disambiguate in the stdlib
|
// TODO: investigate using the toolchain version to disambiguate in the stdlib
|
||||||
add("math/rand/v2")
|
add("math/rand/v2")
|
||||||
|
// math/rand has an overlapping API
|
||||||
|
// TestIssue66407 fails without this
|
||||||
|
add("math/rand")
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
for importPath := range stdlib.PackageSymbols {
|
for importPath := range stdlib.PackageSymbols {
|
||||||
|
@ -1736,7 +1739,7 @@ func (s *symbolSearcher) searchOne(ctx context.Context, c pkgDistance, symbols m
|
||||||
// searching for "client.New")
|
// searching for "client.New")
|
||||||
func pkgIsCandidate(filename string, refs References, pkg *pkg) bool {
|
func pkgIsCandidate(filename string, refs References, pkg *pkg) bool {
|
||||||
// Check "internal" and "vendor" visibility:
|
// Check "internal" and "vendor" visibility:
|
||||||
if !canUse(filename, pkg.dir) {
|
if !CanUse(filename, pkg.dir) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1759,9 +1762,9 @@ func pkgIsCandidate(filename string, refs References, pkg *pkg) bool {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
// canUse reports whether the package in dir is usable from filename,
|
// CanUse reports whether the package in dir is usable from filename,
|
||||||
// respecting the Go "internal" and "vendor" visibility rules.
|
// respecting the Go "internal" and "vendor" visibility rules.
|
||||||
func canUse(filename, dir string) bool {
|
func CanUse(filename, dir string) bool {
|
||||||
// Fast path check, before any allocations. If it doesn't contain vendor
|
// Fast path check, before any allocations. If it doesn't contain vendor
|
||||||
// or internal, it's not tricky:
|
// or internal, it's not tricky:
|
||||||
// Note that this can false-negative on directories like "notinternal",
|
// Note that this can false-negative on directories like "notinternal",
|
||||||
|
|
|
@ -67,7 +67,7 @@ func (s *ProcessEnvSource) ResolveReferences(ctx context.Context, filename strin
|
||||||
// same package name. Don't try to import ourselves.
|
// same package name. Don't try to import ourselves.
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if !canUse(filename, pkg.dir) {
|
if !CanUse(filename, pkg.dir) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
mu.Lock()
|
mu.Lock()
|
||||||
|
|
|
@ -17,6 +17,7 @@ import (
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -85,6 +86,28 @@ type Entry struct {
|
||||||
Names []string // exported names and information
|
Names []string // exported names and information
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// IndexDir is where the module index is stored.
|
||||||
|
var IndexDir string
|
||||||
|
|
||||||
|
// Set IndexDir
|
||||||
|
func init() {
|
||||||
|
var dir string
|
||||||
|
var err error
|
||||||
|
if testing.Testing() {
|
||||||
|
dir = os.TempDir()
|
||||||
|
} else {
|
||||||
|
dir, err = os.UserCacheDir()
|
||||||
|
// shouldn't happen, but TempDir is better than
|
||||||
|
// creating ./go/imports
|
||||||
|
if err != nil {
|
||||||
|
dir = os.TempDir()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
dir = filepath.Join(dir, "go", "imports")
|
||||||
|
os.MkdirAll(dir, 0777)
|
||||||
|
IndexDir = dir
|
||||||
|
}
|
||||||
|
|
||||||
// ReadIndex reads the latest version of the on-disk index
|
// ReadIndex reads the latest version of the on-disk index
|
||||||
// for the cache directory cd.
|
// for the cache directory cd.
|
||||||
// It returns (nil, nil) if there is no index, but returns
|
// It returns (nil, nil) if there is no index, but returns
|
||||||
|
@ -95,10 +118,7 @@ func ReadIndex(cachedir string) (*Index, error) {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
cd := Abspath(cachedir)
|
cd := Abspath(cachedir)
|
||||||
dir, err := IndexDir()
|
dir := IndexDir
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
base := indexNameBase(cd)
|
base := indexNameBase(cd)
|
||||||
iname := filepath.Join(dir, base)
|
iname := filepath.Join(dir, base)
|
||||||
buf, err := os.ReadFile(iname)
|
buf, err := os.ReadFile(iname)
|
||||||
|
@ -185,12 +205,8 @@ func readIndexFrom(cd Abspath, bx io.Reader) (*Index, error) {
|
||||||
|
|
||||||
// write the index as a text file
|
// write the index as a text file
|
||||||
func writeIndex(cachedir Abspath, ix *Index) error {
|
func writeIndex(cachedir Abspath, ix *Index) error {
|
||||||
dir, err := IndexDir()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
ipat := fmt.Sprintf("index-%d-*", CurrentVersion)
|
ipat := fmt.Sprintf("index-%d-*", CurrentVersion)
|
||||||
fd, err := os.CreateTemp(dir, ipat)
|
fd, err := os.CreateTemp(IndexDir, ipat)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err // can this happen?
|
return err // can this happen?
|
||||||
}
|
}
|
||||||
|
@ -201,7 +217,7 @@ func writeIndex(cachedir Abspath, ix *Index) error {
|
||||||
content := fd.Name()
|
content := fd.Name()
|
||||||
content = filepath.Base(content)
|
content = filepath.Base(content)
|
||||||
base := indexNameBase(cachedir)
|
base := indexNameBase(cachedir)
|
||||||
nm := filepath.Join(dir, base)
|
nm := filepath.Join(IndexDir, base)
|
||||||
err = os.WriteFile(nm, []byte(content), 0666)
|
err = os.WriteFile(nm, []byte(content), 0666)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -241,18 +257,6 @@ func writeIndexToFile(x *Index, fd *os.File) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// tests can override this
|
|
||||||
var IndexDir = indexDir
|
|
||||||
|
|
||||||
// IndexDir computes the directory containing the index
|
|
||||||
func indexDir() (string, error) {
|
|
||||||
dir, err := os.UserCacheDir()
|
|
||||||
if err != nil {
|
|
||||||
return "", fmt.Errorf("cannot open UserCacheDir, %w", err)
|
|
||||||
}
|
|
||||||
return filepath.Join(dir, "go", "imports"), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// return the base name of the file containing the name of the current index
|
// return the base name of the file containing the name of the current index
|
||||||
func indexNameBase(cachedir Abspath) string {
|
func indexNameBase(cachedir Abspath) string {
|
||||||
// crc64 is a way to convert path names into 16 hex digits.
|
// crc64 is a way to convert path names into 16 hex digits.
|
||||||
|
|
|
@ -16,6 +16,7 @@ type Candidate struct {
|
||||||
Dir string
|
Dir string
|
||||||
ImportPath string
|
ImportPath string
|
||||||
Type LexType
|
Type LexType
|
||||||
|
Deprecated bool
|
||||||
// information for Funcs
|
// information for Funcs
|
||||||
Results int16 // how many results
|
Results int16 // how many results
|
||||||
Sig []Field // arg names and types
|
Sig []Field // arg names and types
|
||||||
|
@ -34,6 +35,36 @@ const (
|
||||||
Func
|
Func
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// LookupAll only returns those Candidates whose import path
|
||||||
|
// finds all the nms.
|
||||||
|
func (ix *Index) LookupAll(pkg string, names ...string) map[string][]Candidate {
|
||||||
|
// this can be made faster when benchmarks show that it needs to be
|
||||||
|
names = uniquify(names)
|
||||||
|
byImpPath := make(map[string][]Candidate)
|
||||||
|
for _, nm := range names {
|
||||||
|
cands := ix.Lookup(pkg, nm, false)
|
||||||
|
for _, c := range cands {
|
||||||
|
byImpPath[c.ImportPath] = append(byImpPath[c.ImportPath], c)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for k, v := range byImpPath {
|
||||||
|
if len(v) != len(names) {
|
||||||
|
delete(byImpPath, k)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return byImpPath
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove duplicates
|
||||||
|
func uniquify(in []string) []string {
|
||||||
|
if len(in) == 0 {
|
||||||
|
return in
|
||||||
|
}
|
||||||
|
in = slices.Clone(in)
|
||||||
|
slices.Sort(in)
|
||||||
|
return slices.Compact(in)
|
||||||
|
}
|
||||||
|
|
||||||
// Lookup finds all the symbols in the index with the given PkgName and name.
|
// Lookup finds all the symbols in the index with the given PkgName and name.
|
||||||
// If prefix is true, it finds all of these with name as a prefix.
|
// If prefix is true, it finds all of these with name as a prefix.
|
||||||
func (ix *Index) Lookup(pkg, name string, prefix bool) []Candidate {
|
func (ix *Index) Lookup(pkg, name string, prefix bool) []Candidate {
|
||||||
|
@ -79,8 +110,9 @@ func (ix *Index) Lookup(pkg, name string, prefix bool) []Candidate {
|
||||||
Dir: string(e.Dir),
|
Dir: string(e.Dir),
|
||||||
ImportPath: e.ImportPath,
|
ImportPath: e.ImportPath,
|
||||||
Type: asLexType(flds[1][0]),
|
Type: asLexType(flds[1][0]),
|
||||||
|
Deprecated: len(flds[1]) > 1 && flds[1][1] == 'D',
|
||||||
}
|
}
|
||||||
if flds[1] == "F" {
|
if px.Type == Func {
|
||||||
n, err := strconv.Atoi(flds[2])
|
n, err := strconv.Atoi(flds[2])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
continue // should never happen
|
continue // should never happen
|
||||||
|
@ -111,6 +143,7 @@ func toFields(sig []string) []Field {
|
||||||
}
|
}
|
||||||
|
|
||||||
// benchmarks show this is measurably better than strings.Split
|
// benchmarks show this is measurably better than strings.Split
|
||||||
|
// split into first 4 fields separated by single space
|
||||||
func fastSplit(x string) []string {
|
func fastSplit(x string) []string {
|
||||||
ans := make([]string, 0, 4)
|
ans := make([]string, 0, 4)
|
||||||
nxt := 0
|
nxt := 0
|
||||||
|
|
|
@ -12,6 +12,7 @@ import (
|
||||||
"go/types"
|
"go/types"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"runtime"
|
||||||
"slices"
|
"slices"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
@ -19,29 +20,30 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// The name of a symbol contains information about the symbol:
|
// The name of a symbol contains information about the symbol:
|
||||||
// <name> T for types
|
// <name> T for types, TD if the type is deprecated
|
||||||
// <name> C for consts
|
// <name> C for consts, CD if the const is deprecated
|
||||||
// <name> V for vars
|
// <name> V for vars, VD if the var is deprecated
|
||||||
// and for funcs: <name> F <num of return values> (<arg-name> <arg-type>)*
|
// and for funcs: <name> F <num of return values> (<arg-name> <arg-type>)*
|
||||||
// any spaces in <arg-type> are replaced by $s so that the fields
|
// any spaces in <arg-type> are replaced by $s so that the fields
|
||||||
// of the name are space separated
|
// of the name are space separated. F is replaced by FD if the func
|
||||||
|
// is deprecated.
|
||||||
type symbol struct {
|
type symbol struct {
|
||||||
pkg string // name of the symbols's package
|
pkg string // name of the symbols's package
|
||||||
name string // declared name
|
name string // declared name
|
||||||
kind string // T, C, V, or F
|
kind string // T, C, V, or F, follwed by D if deprecated
|
||||||
sig string // signature information, for F
|
sig string // signature information, for F
|
||||||
}
|
}
|
||||||
|
|
||||||
// find the symbols for the best directories
|
// find the symbols for the best directories
|
||||||
func getSymbols(cd Abspath, dirs map[string][]*directory) {
|
func getSymbols(cd Abspath, dirs map[string][]*directory) {
|
||||||
var g errgroup.Group
|
var g errgroup.Group
|
||||||
g.SetLimit(-1) // maybe throttle this some day
|
g.SetLimit(max(2, runtime.GOMAXPROCS(0)/2))
|
||||||
for _, vv := range dirs {
|
for _, vv := range dirs {
|
||||||
// throttling some day?
|
// throttling some day?
|
||||||
d := vv[0]
|
d := vv[0]
|
||||||
g.Go(func() error {
|
g.Go(func() error {
|
||||||
thedir := filepath.Join(string(cd), string(d.path))
|
thedir := filepath.Join(string(cd), string(d.path))
|
||||||
mode := parser.SkipObjectResolution
|
mode := parser.SkipObjectResolution | parser.ParseComments
|
||||||
|
|
||||||
fi, err := os.ReadDir(thedir)
|
fi, err := os.ReadDir(thedir)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -84,6 +86,9 @@ func getFileExports(f *ast.File) []symbol {
|
||||||
// generic functions just like non-generic ones.
|
// generic functions just like non-generic ones.
|
||||||
sig := dtype.Params
|
sig := dtype.Params
|
||||||
kind := "F"
|
kind := "F"
|
||||||
|
if isDeprecated(decl.Doc) {
|
||||||
|
kind += "D"
|
||||||
|
}
|
||||||
result := []string{fmt.Sprintf("%d", dtype.Results.NumFields())}
|
result := []string{fmt.Sprintf("%d", dtype.Results.NumFields())}
|
||||||
for _, x := range sig.List {
|
for _, x := range sig.List {
|
||||||
// This code creates a string representing the type.
|
// This code creates a string representing the type.
|
||||||
|
@ -107,7 +112,7 @@ func getFileExports(f *ast.File) []symbol {
|
||||||
// print struct tags. So for this to happen the type of a formal parameter
|
// print struct tags. So for this to happen the type of a formal parameter
|
||||||
// has to be a explict struct, e.g. foo(x struct{a int "$"}) and ExprString
|
// has to be a explict struct, e.g. foo(x struct{a int "$"}) and ExprString
|
||||||
// would have to show the struct tag. Even testing for this case seems
|
// would have to show the struct tag. Even testing for this case seems
|
||||||
// a waste of effort, but let's not ignore such pathologies
|
// a waste of effort, but let's remember the possibility
|
||||||
if strings.Contains(tp, "$") {
|
if strings.Contains(tp, "$") {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
@ -127,12 +132,16 @@ func getFileExports(f *ast.File) []symbol {
|
||||||
ans = append(ans, *s)
|
ans = append(ans, *s)
|
||||||
}
|
}
|
||||||
case *ast.GenDecl:
|
case *ast.GenDecl:
|
||||||
|
depr := isDeprecated(decl.Doc)
|
||||||
switch decl.Tok {
|
switch decl.Tok {
|
||||||
case token.CONST, token.VAR:
|
case token.CONST, token.VAR:
|
||||||
tp := "V"
|
tp := "V"
|
||||||
if decl.Tok == token.CONST {
|
if decl.Tok == token.CONST {
|
||||||
tp = "C"
|
tp = "C"
|
||||||
}
|
}
|
||||||
|
if depr {
|
||||||
|
tp += "D"
|
||||||
|
}
|
||||||
for _, sp := range decl.Specs {
|
for _, sp := range decl.Specs {
|
||||||
for _, x := range sp.(*ast.ValueSpec).Names {
|
for _, x := range sp.(*ast.ValueSpec).Names {
|
||||||
if s := newsym(pkg, x.Name, tp, ""); s != nil {
|
if s := newsym(pkg, x.Name, tp, ""); s != nil {
|
||||||
|
@ -141,8 +150,12 @@ func getFileExports(f *ast.File) []symbol {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
case token.TYPE:
|
case token.TYPE:
|
||||||
|
tp := "T"
|
||||||
|
if depr {
|
||||||
|
tp += "D"
|
||||||
|
}
|
||||||
for _, sp := range decl.Specs {
|
for _, sp := range decl.Specs {
|
||||||
if s := newsym(pkg, sp.(*ast.TypeSpec).Name.Name, "T", ""); s != nil {
|
if s := newsym(pkg, sp.(*ast.TypeSpec).Name.Name, tp, ""); s != nil {
|
||||||
ans = append(ans, *s)
|
ans = append(ans, *s)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -160,6 +173,22 @@ func newsym(pkg, name, kind, sig string) *symbol {
|
||||||
return &sym
|
return &sym
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func isDeprecated(doc *ast.CommentGroup) bool {
|
||||||
|
if doc == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
// go.dev/wiki/Deprecated Paragraph starting 'Deprecated:'
|
||||||
|
// This code fails for /* Deprecated: */, but it's the code from
|
||||||
|
// gopls/internal/analysis/deprecated
|
||||||
|
lines := strings.Split(doc.Text(), "\n\n")
|
||||||
|
for _, line := range lines {
|
||||||
|
if strings.HasPrefix(line, "Deprecated:") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
// return the package name and the value for the symbols.
|
// return the package name and the value for the symbols.
|
||||||
// if there are multiple packages, choose one arbitrarily
|
// if there are multiple packages, choose one arbitrarily
|
||||||
// the returned slice is sorted lexicographically
|
// the returned slice is sorted lexicographically
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
// Package packagesinternal exposes internal-only fields from go/packages.
|
// Package packagesinternal exposes internal-only fields from go/packages.
|
||||||
package packagesinternal
|
package packagesinternal
|
||||||
|
|
||||||
var GetDepsErrors = func(p interface{}) []*PackageError { return nil }
|
var GetDepsErrors = func(p any) []*PackageError { return nil }
|
||||||
|
|
||||||
type PackageError struct {
|
type PackageError struct {
|
||||||
ImportStack []string // shortest path from package named on command line to this one
|
ImportStack []string // shortest path from package named on command line to this one
|
||||||
|
@ -16,5 +16,5 @@ type PackageError struct {
|
||||||
var TypecheckCgo int
|
var TypecheckCgo int
|
||||||
var DepsErrors int // must be set as a LoadMode to call GetDepsErrors
|
var DepsErrors int // must be set as a LoadMode to call GetDepsErrors
|
||||||
|
|
||||||
var SetModFlag = func(config interface{}, value string) {}
|
var SetModFlag = func(config any, value string) {}
|
||||||
var SetModFile = func(config interface{}, value string) {}
|
var SetModFile = func(config interface{}, value string) {}
|
||||||
|
|
|
@ -268,6 +268,8 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"ErrTooLarge", Var, 0},
|
{"ErrTooLarge", Var, 0},
|
||||||
{"Fields", Func, 0},
|
{"Fields", Func, 0},
|
||||||
{"FieldsFunc", Func, 0},
|
{"FieldsFunc", Func, 0},
|
||||||
|
{"FieldsFuncSeq", Func, 24},
|
||||||
|
{"FieldsSeq", Func, 24},
|
||||||
{"HasPrefix", Func, 0},
|
{"HasPrefix", Func, 0},
|
||||||
{"HasSuffix", Func, 0},
|
{"HasSuffix", Func, 0},
|
||||||
{"Index", Func, 0},
|
{"Index", Func, 0},
|
||||||
|
@ -280,6 +282,7 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"LastIndexAny", Func, 0},
|
{"LastIndexAny", Func, 0},
|
||||||
{"LastIndexByte", Func, 5},
|
{"LastIndexByte", Func, 5},
|
||||||
{"LastIndexFunc", Func, 0},
|
{"LastIndexFunc", Func, 0},
|
||||||
|
{"Lines", Func, 24},
|
||||||
{"Map", Func, 0},
|
{"Map", Func, 0},
|
||||||
{"MinRead", Const, 0},
|
{"MinRead", Const, 0},
|
||||||
{"NewBuffer", Func, 0},
|
{"NewBuffer", Func, 0},
|
||||||
|
@ -293,7 +296,9 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"Split", Func, 0},
|
{"Split", Func, 0},
|
||||||
{"SplitAfter", Func, 0},
|
{"SplitAfter", Func, 0},
|
||||||
{"SplitAfterN", Func, 0},
|
{"SplitAfterN", Func, 0},
|
||||||
|
{"SplitAfterSeq", Func, 24},
|
||||||
{"SplitN", Func, 0},
|
{"SplitN", Func, 0},
|
||||||
|
{"SplitSeq", Func, 24},
|
||||||
{"Title", Func, 0},
|
{"Title", Func, 0},
|
||||||
{"ToLower", Func, 0},
|
{"ToLower", Func, 0},
|
||||||
{"ToLowerSpecial", Func, 0},
|
{"ToLowerSpecial", Func, 0},
|
||||||
|
@ -535,6 +540,7 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"NewCTR", Func, 0},
|
{"NewCTR", Func, 0},
|
||||||
{"NewGCM", Func, 2},
|
{"NewGCM", Func, 2},
|
||||||
{"NewGCMWithNonceSize", Func, 5},
|
{"NewGCMWithNonceSize", Func, 5},
|
||||||
|
{"NewGCMWithRandomNonce", Func, 24},
|
||||||
{"NewGCMWithTagSize", Func, 11},
|
{"NewGCMWithTagSize", Func, 11},
|
||||||
{"NewOFB", Func, 0},
|
{"NewOFB", Func, 0},
|
||||||
{"Stream", Type, 0},
|
{"Stream", Type, 0},
|
||||||
|
@ -673,6 +679,14 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"Unmarshal", Func, 0},
|
{"Unmarshal", Func, 0},
|
||||||
{"UnmarshalCompressed", Func, 15},
|
{"UnmarshalCompressed", Func, 15},
|
||||||
},
|
},
|
||||||
|
"crypto/fips140": {
|
||||||
|
{"Enabled", Func, 24},
|
||||||
|
},
|
||||||
|
"crypto/hkdf": {
|
||||||
|
{"Expand", Func, 24},
|
||||||
|
{"Extract", Func, 24},
|
||||||
|
{"Key", Func, 24},
|
||||||
|
},
|
||||||
"crypto/hmac": {
|
"crypto/hmac": {
|
||||||
{"Equal", Func, 1},
|
{"Equal", Func, 1},
|
||||||
{"New", Func, 0},
|
{"New", Func, 0},
|
||||||
|
@ -683,11 +697,43 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"Size", Const, 0},
|
{"Size", Const, 0},
|
||||||
{"Sum", Func, 2},
|
{"Sum", Func, 2},
|
||||||
},
|
},
|
||||||
|
"crypto/mlkem": {
|
||||||
|
{"(*DecapsulationKey1024).Bytes", Method, 24},
|
||||||
|
{"(*DecapsulationKey1024).Decapsulate", Method, 24},
|
||||||
|
{"(*DecapsulationKey1024).EncapsulationKey", Method, 24},
|
||||||
|
{"(*DecapsulationKey768).Bytes", Method, 24},
|
||||||
|
{"(*DecapsulationKey768).Decapsulate", Method, 24},
|
||||||
|
{"(*DecapsulationKey768).EncapsulationKey", Method, 24},
|
||||||
|
{"(*EncapsulationKey1024).Bytes", Method, 24},
|
||||||
|
{"(*EncapsulationKey1024).Encapsulate", Method, 24},
|
||||||
|
{"(*EncapsulationKey768).Bytes", Method, 24},
|
||||||
|
{"(*EncapsulationKey768).Encapsulate", Method, 24},
|
||||||
|
{"CiphertextSize1024", Const, 24},
|
||||||
|
{"CiphertextSize768", Const, 24},
|
||||||
|
{"DecapsulationKey1024", Type, 24},
|
||||||
|
{"DecapsulationKey768", Type, 24},
|
||||||
|
{"EncapsulationKey1024", Type, 24},
|
||||||
|
{"EncapsulationKey768", Type, 24},
|
||||||
|
{"EncapsulationKeySize1024", Const, 24},
|
||||||
|
{"EncapsulationKeySize768", Const, 24},
|
||||||
|
{"GenerateKey1024", Func, 24},
|
||||||
|
{"GenerateKey768", Func, 24},
|
||||||
|
{"NewDecapsulationKey1024", Func, 24},
|
||||||
|
{"NewDecapsulationKey768", Func, 24},
|
||||||
|
{"NewEncapsulationKey1024", Func, 24},
|
||||||
|
{"NewEncapsulationKey768", Func, 24},
|
||||||
|
{"SeedSize", Const, 24},
|
||||||
|
{"SharedKeySize", Const, 24},
|
||||||
|
},
|
||||||
|
"crypto/pbkdf2": {
|
||||||
|
{"Key", Func, 24},
|
||||||
|
},
|
||||||
"crypto/rand": {
|
"crypto/rand": {
|
||||||
{"Int", Func, 0},
|
{"Int", Func, 0},
|
||||||
{"Prime", Func, 0},
|
{"Prime", Func, 0},
|
||||||
{"Read", Func, 0},
|
{"Read", Func, 0},
|
||||||
{"Reader", Var, 0},
|
{"Reader", Var, 0},
|
||||||
|
{"Text", Func, 24},
|
||||||
},
|
},
|
||||||
"crypto/rc4": {
|
"crypto/rc4": {
|
||||||
{"(*Cipher).Reset", Method, 0},
|
{"(*Cipher).Reset", Method, 0},
|
||||||
|
@ -766,6 +812,39 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"Sum224", Func, 2},
|
{"Sum224", Func, 2},
|
||||||
{"Sum256", Func, 2},
|
{"Sum256", Func, 2},
|
||||||
},
|
},
|
||||||
|
"crypto/sha3": {
|
||||||
|
{"(*SHA3).AppendBinary", Method, 24},
|
||||||
|
{"(*SHA3).BlockSize", Method, 24},
|
||||||
|
{"(*SHA3).MarshalBinary", Method, 24},
|
||||||
|
{"(*SHA3).Reset", Method, 24},
|
||||||
|
{"(*SHA3).Size", Method, 24},
|
||||||
|
{"(*SHA3).Sum", Method, 24},
|
||||||
|
{"(*SHA3).UnmarshalBinary", Method, 24},
|
||||||
|
{"(*SHA3).Write", Method, 24},
|
||||||
|
{"(*SHAKE).AppendBinary", Method, 24},
|
||||||
|
{"(*SHAKE).BlockSize", Method, 24},
|
||||||
|
{"(*SHAKE).MarshalBinary", Method, 24},
|
||||||
|
{"(*SHAKE).Read", Method, 24},
|
||||||
|
{"(*SHAKE).Reset", Method, 24},
|
||||||
|
{"(*SHAKE).UnmarshalBinary", Method, 24},
|
||||||
|
{"(*SHAKE).Write", Method, 24},
|
||||||
|
{"New224", Func, 24},
|
||||||
|
{"New256", Func, 24},
|
||||||
|
{"New384", Func, 24},
|
||||||
|
{"New512", Func, 24},
|
||||||
|
{"NewCSHAKE128", Func, 24},
|
||||||
|
{"NewCSHAKE256", Func, 24},
|
||||||
|
{"NewSHAKE128", Func, 24},
|
||||||
|
{"NewSHAKE256", Func, 24},
|
||||||
|
{"SHA3", Type, 24},
|
||||||
|
{"SHAKE", Type, 24},
|
||||||
|
{"Sum224", Func, 24},
|
||||||
|
{"Sum256", Func, 24},
|
||||||
|
{"Sum384", Func, 24},
|
||||||
|
{"Sum512", Func, 24},
|
||||||
|
{"SumSHAKE128", Func, 24},
|
||||||
|
{"SumSHAKE256", Func, 24},
|
||||||
|
},
|
||||||
"crypto/sha512": {
|
"crypto/sha512": {
|
||||||
{"BlockSize", Const, 0},
|
{"BlockSize", Const, 0},
|
||||||
{"New", Func, 0},
|
{"New", Func, 0},
|
||||||
|
@ -788,6 +867,7 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"ConstantTimeEq", Func, 0},
|
{"ConstantTimeEq", Func, 0},
|
||||||
{"ConstantTimeLessOrEq", Func, 2},
|
{"ConstantTimeLessOrEq", Func, 2},
|
||||||
{"ConstantTimeSelect", Func, 0},
|
{"ConstantTimeSelect", Func, 0},
|
||||||
|
{"WithDataIndependentTiming", Func, 24},
|
||||||
{"XORBytes", Func, 20},
|
{"XORBytes", Func, 20},
|
||||||
},
|
},
|
||||||
"crypto/tls": {
|
"crypto/tls": {
|
||||||
|
@ -864,6 +944,7 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"ClientHelloInfo", Type, 4},
|
{"ClientHelloInfo", Type, 4},
|
||||||
{"ClientHelloInfo.CipherSuites", Field, 4},
|
{"ClientHelloInfo.CipherSuites", Field, 4},
|
||||||
{"ClientHelloInfo.Conn", Field, 8},
|
{"ClientHelloInfo.Conn", Field, 8},
|
||||||
|
{"ClientHelloInfo.Extensions", Field, 24},
|
||||||
{"ClientHelloInfo.ServerName", Field, 4},
|
{"ClientHelloInfo.ServerName", Field, 4},
|
||||||
{"ClientHelloInfo.SignatureSchemes", Field, 8},
|
{"ClientHelloInfo.SignatureSchemes", Field, 8},
|
||||||
{"ClientHelloInfo.SupportedCurves", Field, 4},
|
{"ClientHelloInfo.SupportedCurves", Field, 4},
|
||||||
|
@ -881,6 +962,7 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"Config.CurvePreferences", Field, 3},
|
{"Config.CurvePreferences", Field, 3},
|
||||||
{"Config.DynamicRecordSizingDisabled", Field, 7},
|
{"Config.DynamicRecordSizingDisabled", Field, 7},
|
||||||
{"Config.EncryptedClientHelloConfigList", Field, 23},
|
{"Config.EncryptedClientHelloConfigList", Field, 23},
|
||||||
|
{"Config.EncryptedClientHelloKeys", Field, 24},
|
||||||
{"Config.EncryptedClientHelloRejectionVerify", Field, 23},
|
{"Config.EncryptedClientHelloRejectionVerify", Field, 23},
|
||||||
{"Config.GetCertificate", Field, 4},
|
{"Config.GetCertificate", Field, 4},
|
||||||
{"Config.GetClientCertificate", Field, 8},
|
{"Config.GetClientCertificate", Field, 8},
|
||||||
|
@ -934,6 +1016,10 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"ECHRejectionError", Type, 23},
|
{"ECHRejectionError", Type, 23},
|
||||||
{"ECHRejectionError.RetryConfigList", Field, 23},
|
{"ECHRejectionError.RetryConfigList", Field, 23},
|
||||||
{"Ed25519", Const, 13},
|
{"Ed25519", Const, 13},
|
||||||
|
{"EncryptedClientHelloKey", Type, 24},
|
||||||
|
{"EncryptedClientHelloKey.Config", Field, 24},
|
||||||
|
{"EncryptedClientHelloKey.PrivateKey", Field, 24},
|
||||||
|
{"EncryptedClientHelloKey.SendAsRetry", Field, 24},
|
||||||
{"InsecureCipherSuites", Func, 14},
|
{"InsecureCipherSuites", Func, 14},
|
||||||
{"Listen", Func, 0},
|
{"Listen", Func, 0},
|
||||||
{"LoadX509KeyPair", Func, 0},
|
{"LoadX509KeyPair", Func, 0},
|
||||||
|
@ -1032,6 +1118,7 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"VersionTLS12", Const, 2},
|
{"VersionTLS12", Const, 2},
|
||||||
{"VersionTLS13", Const, 12},
|
{"VersionTLS13", Const, 12},
|
||||||
{"X25519", Const, 8},
|
{"X25519", Const, 8},
|
||||||
|
{"X25519MLKEM768", Const, 24},
|
||||||
{"X509KeyPair", Func, 0},
|
{"X509KeyPair", Func, 0},
|
||||||
},
|
},
|
||||||
"crypto/x509": {
|
"crypto/x509": {
|
||||||
|
@ -1056,6 +1143,8 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"(ConstraintViolationError).Error", Method, 0},
|
{"(ConstraintViolationError).Error", Method, 0},
|
||||||
{"(HostnameError).Error", Method, 0},
|
{"(HostnameError).Error", Method, 0},
|
||||||
{"(InsecureAlgorithmError).Error", Method, 6},
|
{"(InsecureAlgorithmError).Error", Method, 6},
|
||||||
|
{"(OID).AppendBinary", Method, 24},
|
||||||
|
{"(OID).AppendText", Method, 24},
|
||||||
{"(OID).Equal", Method, 22},
|
{"(OID).Equal", Method, 22},
|
||||||
{"(OID).EqualASN1OID", Method, 22},
|
{"(OID).EqualASN1OID", Method, 22},
|
||||||
{"(OID).MarshalBinary", Method, 23},
|
{"(OID).MarshalBinary", Method, 23},
|
||||||
|
@ -1084,6 +1173,10 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"Certificate.Extensions", Field, 2},
|
{"Certificate.Extensions", Field, 2},
|
||||||
{"Certificate.ExtraExtensions", Field, 2},
|
{"Certificate.ExtraExtensions", Field, 2},
|
||||||
{"Certificate.IPAddresses", Field, 1},
|
{"Certificate.IPAddresses", Field, 1},
|
||||||
|
{"Certificate.InhibitAnyPolicy", Field, 24},
|
||||||
|
{"Certificate.InhibitAnyPolicyZero", Field, 24},
|
||||||
|
{"Certificate.InhibitPolicyMapping", Field, 24},
|
||||||
|
{"Certificate.InhibitPolicyMappingZero", Field, 24},
|
||||||
{"Certificate.IsCA", Field, 0},
|
{"Certificate.IsCA", Field, 0},
|
||||||
{"Certificate.Issuer", Field, 0},
|
{"Certificate.Issuer", Field, 0},
|
||||||
{"Certificate.IssuingCertificateURL", Field, 2},
|
{"Certificate.IssuingCertificateURL", Field, 2},
|
||||||
|
@ -1100,6 +1193,7 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"Certificate.PermittedURIDomains", Field, 10},
|
{"Certificate.PermittedURIDomains", Field, 10},
|
||||||
{"Certificate.Policies", Field, 22},
|
{"Certificate.Policies", Field, 22},
|
||||||
{"Certificate.PolicyIdentifiers", Field, 0},
|
{"Certificate.PolicyIdentifiers", Field, 0},
|
||||||
|
{"Certificate.PolicyMappings", Field, 24},
|
||||||
{"Certificate.PublicKey", Field, 0},
|
{"Certificate.PublicKey", Field, 0},
|
||||||
{"Certificate.PublicKeyAlgorithm", Field, 0},
|
{"Certificate.PublicKeyAlgorithm", Field, 0},
|
||||||
{"Certificate.Raw", Field, 0},
|
{"Certificate.Raw", Field, 0},
|
||||||
|
@ -1107,6 +1201,8 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"Certificate.RawSubject", Field, 0},
|
{"Certificate.RawSubject", Field, 0},
|
||||||
{"Certificate.RawSubjectPublicKeyInfo", Field, 0},
|
{"Certificate.RawSubjectPublicKeyInfo", Field, 0},
|
||||||
{"Certificate.RawTBSCertificate", Field, 0},
|
{"Certificate.RawTBSCertificate", Field, 0},
|
||||||
|
{"Certificate.RequireExplicitPolicy", Field, 24},
|
||||||
|
{"Certificate.RequireExplicitPolicyZero", Field, 24},
|
||||||
{"Certificate.SerialNumber", Field, 0},
|
{"Certificate.SerialNumber", Field, 0},
|
||||||
{"Certificate.Signature", Field, 0},
|
{"Certificate.Signature", Field, 0},
|
||||||
{"Certificate.SignatureAlgorithm", Field, 0},
|
{"Certificate.SignatureAlgorithm", Field, 0},
|
||||||
|
@ -1198,6 +1294,7 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"NameConstraintsWithoutSANs", Const, 10},
|
{"NameConstraintsWithoutSANs", Const, 10},
|
||||||
{"NameMismatch", Const, 8},
|
{"NameMismatch", Const, 8},
|
||||||
{"NewCertPool", Func, 0},
|
{"NewCertPool", Func, 0},
|
||||||
|
{"NoValidChains", Const, 24},
|
||||||
{"NotAuthorizedToSign", Const, 0},
|
{"NotAuthorizedToSign", Const, 0},
|
||||||
{"OID", Type, 22},
|
{"OID", Type, 22},
|
||||||
{"OIDFromInts", Func, 22},
|
{"OIDFromInts", Func, 22},
|
||||||
|
@ -1219,6 +1316,9 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"ParsePKCS8PrivateKey", Func, 0},
|
{"ParsePKCS8PrivateKey", Func, 0},
|
||||||
{"ParsePKIXPublicKey", Func, 0},
|
{"ParsePKIXPublicKey", Func, 0},
|
||||||
{"ParseRevocationList", Func, 19},
|
{"ParseRevocationList", Func, 19},
|
||||||
|
{"PolicyMapping", Type, 24},
|
||||||
|
{"PolicyMapping.IssuerDomainPolicy", Field, 24},
|
||||||
|
{"PolicyMapping.SubjectDomainPolicy", Field, 24},
|
||||||
{"PublicKeyAlgorithm", Type, 0},
|
{"PublicKeyAlgorithm", Type, 0},
|
||||||
{"PureEd25519", Const, 13},
|
{"PureEd25519", Const, 13},
|
||||||
{"RSA", Const, 0},
|
{"RSA", Const, 0},
|
||||||
|
@ -1265,6 +1365,7 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"UnknownPublicKeyAlgorithm", Const, 0},
|
{"UnknownPublicKeyAlgorithm", Const, 0},
|
||||||
{"UnknownSignatureAlgorithm", Const, 0},
|
{"UnknownSignatureAlgorithm", Const, 0},
|
||||||
{"VerifyOptions", Type, 0},
|
{"VerifyOptions", Type, 0},
|
||||||
|
{"VerifyOptions.CertificatePolicies", Field, 24},
|
||||||
{"VerifyOptions.CurrentTime", Field, 0},
|
{"VerifyOptions.CurrentTime", Field, 0},
|
||||||
{"VerifyOptions.DNSName", Field, 0},
|
{"VerifyOptions.DNSName", Field, 0},
|
||||||
{"VerifyOptions.Intermediates", Field, 0},
|
{"VerifyOptions.Intermediates", Field, 0},
|
||||||
|
@ -1975,6 +2076,8 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"(*File).DynString", Method, 1},
|
{"(*File).DynString", Method, 1},
|
||||||
{"(*File).DynValue", Method, 21},
|
{"(*File).DynValue", Method, 21},
|
||||||
{"(*File).DynamicSymbols", Method, 4},
|
{"(*File).DynamicSymbols", Method, 4},
|
||||||
|
{"(*File).DynamicVersionNeeds", Method, 24},
|
||||||
|
{"(*File).DynamicVersions", Method, 24},
|
||||||
{"(*File).ImportedLibraries", Method, 0},
|
{"(*File).ImportedLibraries", Method, 0},
|
||||||
{"(*File).ImportedSymbols", Method, 0},
|
{"(*File).ImportedSymbols", Method, 0},
|
||||||
{"(*File).Section", Method, 0},
|
{"(*File).Section", Method, 0},
|
||||||
|
@ -2240,6 +2343,19 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"DynFlag", Type, 0},
|
{"DynFlag", Type, 0},
|
||||||
{"DynFlag1", Type, 21},
|
{"DynFlag1", Type, 21},
|
||||||
{"DynTag", Type, 0},
|
{"DynTag", Type, 0},
|
||||||
|
{"DynamicVersion", Type, 24},
|
||||||
|
{"DynamicVersion.Deps", Field, 24},
|
||||||
|
{"DynamicVersion.Flags", Field, 24},
|
||||||
|
{"DynamicVersion.Index", Field, 24},
|
||||||
|
{"DynamicVersion.Name", Field, 24},
|
||||||
|
{"DynamicVersionDep", Type, 24},
|
||||||
|
{"DynamicVersionDep.Dep", Field, 24},
|
||||||
|
{"DynamicVersionDep.Flags", Field, 24},
|
||||||
|
{"DynamicVersionDep.Index", Field, 24},
|
||||||
|
{"DynamicVersionFlag", Type, 24},
|
||||||
|
{"DynamicVersionNeed", Type, 24},
|
||||||
|
{"DynamicVersionNeed.Name", Field, 24},
|
||||||
|
{"DynamicVersionNeed.Needs", Field, 24},
|
||||||
{"EI_ABIVERSION", Const, 0},
|
{"EI_ABIVERSION", Const, 0},
|
||||||
{"EI_CLASS", Const, 0},
|
{"EI_CLASS", Const, 0},
|
||||||
{"EI_DATA", Const, 0},
|
{"EI_DATA", Const, 0},
|
||||||
|
@ -3726,8 +3842,19 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"Symbol.Size", Field, 0},
|
{"Symbol.Size", Field, 0},
|
||||||
{"Symbol.Value", Field, 0},
|
{"Symbol.Value", Field, 0},
|
||||||
{"Symbol.Version", Field, 13},
|
{"Symbol.Version", Field, 13},
|
||||||
|
{"Symbol.VersionIndex", Field, 24},
|
||||||
|
{"Symbol.VersionScope", Field, 24},
|
||||||
|
{"SymbolVersionScope", Type, 24},
|
||||||
{"Type", Type, 0},
|
{"Type", Type, 0},
|
||||||
|
{"VER_FLG_BASE", Const, 24},
|
||||||
|
{"VER_FLG_INFO", Const, 24},
|
||||||
|
{"VER_FLG_WEAK", Const, 24},
|
||||||
{"Version", Type, 0},
|
{"Version", Type, 0},
|
||||||
|
{"VersionScopeGlobal", Const, 24},
|
||||||
|
{"VersionScopeHidden", Const, 24},
|
||||||
|
{"VersionScopeLocal", Const, 24},
|
||||||
|
{"VersionScopeNone", Const, 24},
|
||||||
|
{"VersionScopeSpecific", Const, 24},
|
||||||
},
|
},
|
||||||
"debug/gosym": {
|
"debug/gosym": {
|
||||||
{"(*DecodingError).Error", Method, 0},
|
{"(*DecodingError).Error", Method, 0},
|
||||||
|
@ -4453,8 +4580,10 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"FS", Type, 16},
|
{"FS", Type, 16},
|
||||||
},
|
},
|
||||||
"encoding": {
|
"encoding": {
|
||||||
|
{"BinaryAppender", Type, 24},
|
||||||
{"BinaryMarshaler", Type, 2},
|
{"BinaryMarshaler", Type, 2},
|
||||||
{"BinaryUnmarshaler", Type, 2},
|
{"BinaryUnmarshaler", Type, 2},
|
||||||
|
{"TextAppender", Type, 24},
|
||||||
{"TextMarshaler", Type, 2},
|
{"TextMarshaler", Type, 2},
|
||||||
{"TextUnmarshaler", Type, 2},
|
{"TextUnmarshaler", Type, 2},
|
||||||
},
|
},
|
||||||
|
@ -5984,13 +6113,16 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"(*Interface).Complete", Method, 5},
|
{"(*Interface).Complete", Method, 5},
|
||||||
{"(*Interface).Embedded", Method, 5},
|
{"(*Interface).Embedded", Method, 5},
|
||||||
{"(*Interface).EmbeddedType", Method, 11},
|
{"(*Interface).EmbeddedType", Method, 11},
|
||||||
|
{"(*Interface).EmbeddedTypes", Method, 24},
|
||||||
{"(*Interface).Empty", Method, 5},
|
{"(*Interface).Empty", Method, 5},
|
||||||
{"(*Interface).ExplicitMethod", Method, 5},
|
{"(*Interface).ExplicitMethod", Method, 5},
|
||||||
|
{"(*Interface).ExplicitMethods", Method, 24},
|
||||||
{"(*Interface).IsComparable", Method, 18},
|
{"(*Interface).IsComparable", Method, 18},
|
||||||
{"(*Interface).IsImplicit", Method, 18},
|
{"(*Interface).IsImplicit", Method, 18},
|
||||||
{"(*Interface).IsMethodSet", Method, 18},
|
{"(*Interface).IsMethodSet", Method, 18},
|
||||||
{"(*Interface).MarkImplicit", Method, 18},
|
{"(*Interface).MarkImplicit", Method, 18},
|
||||||
{"(*Interface).Method", Method, 5},
|
{"(*Interface).Method", Method, 5},
|
||||||
|
{"(*Interface).Methods", Method, 24},
|
||||||
{"(*Interface).NumEmbeddeds", Method, 5},
|
{"(*Interface).NumEmbeddeds", Method, 5},
|
||||||
{"(*Interface).NumExplicitMethods", Method, 5},
|
{"(*Interface).NumExplicitMethods", Method, 5},
|
||||||
{"(*Interface).NumMethods", Method, 5},
|
{"(*Interface).NumMethods", Method, 5},
|
||||||
|
@ -6011,9 +6143,11 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"(*MethodSet).At", Method, 5},
|
{"(*MethodSet).At", Method, 5},
|
||||||
{"(*MethodSet).Len", Method, 5},
|
{"(*MethodSet).Len", Method, 5},
|
||||||
{"(*MethodSet).Lookup", Method, 5},
|
{"(*MethodSet).Lookup", Method, 5},
|
||||||
|
{"(*MethodSet).Methods", Method, 24},
|
||||||
{"(*MethodSet).String", Method, 5},
|
{"(*MethodSet).String", Method, 5},
|
||||||
{"(*Named).AddMethod", Method, 5},
|
{"(*Named).AddMethod", Method, 5},
|
||||||
{"(*Named).Method", Method, 5},
|
{"(*Named).Method", Method, 5},
|
||||||
|
{"(*Named).Methods", Method, 24},
|
||||||
{"(*Named).NumMethods", Method, 5},
|
{"(*Named).NumMethods", Method, 5},
|
||||||
{"(*Named).Obj", Method, 5},
|
{"(*Named).Obj", Method, 5},
|
||||||
{"(*Named).Origin", Method, 18},
|
{"(*Named).Origin", Method, 18},
|
||||||
|
@ -6054,6 +6188,7 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"(*Pointer).String", Method, 5},
|
{"(*Pointer).String", Method, 5},
|
||||||
{"(*Pointer).Underlying", Method, 5},
|
{"(*Pointer).Underlying", Method, 5},
|
||||||
{"(*Scope).Child", Method, 5},
|
{"(*Scope).Child", Method, 5},
|
||||||
|
{"(*Scope).Children", Method, 24},
|
||||||
{"(*Scope).Contains", Method, 5},
|
{"(*Scope).Contains", Method, 5},
|
||||||
{"(*Scope).End", Method, 5},
|
{"(*Scope).End", Method, 5},
|
||||||
{"(*Scope).Innermost", Method, 5},
|
{"(*Scope).Innermost", Method, 5},
|
||||||
|
@ -6089,6 +6224,7 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"(*StdSizes).Offsetsof", Method, 5},
|
{"(*StdSizes).Offsetsof", Method, 5},
|
||||||
{"(*StdSizes).Sizeof", Method, 5},
|
{"(*StdSizes).Sizeof", Method, 5},
|
||||||
{"(*Struct).Field", Method, 5},
|
{"(*Struct).Field", Method, 5},
|
||||||
|
{"(*Struct).Fields", Method, 24},
|
||||||
{"(*Struct).NumFields", Method, 5},
|
{"(*Struct).NumFields", Method, 5},
|
||||||
{"(*Struct).String", Method, 5},
|
{"(*Struct).String", Method, 5},
|
||||||
{"(*Struct).Tag", Method, 5},
|
{"(*Struct).Tag", Method, 5},
|
||||||
|
@ -6100,8 +6236,10 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"(*Tuple).Len", Method, 5},
|
{"(*Tuple).Len", Method, 5},
|
||||||
{"(*Tuple).String", Method, 5},
|
{"(*Tuple).String", Method, 5},
|
||||||
{"(*Tuple).Underlying", Method, 5},
|
{"(*Tuple).Underlying", Method, 5},
|
||||||
|
{"(*Tuple).Variables", Method, 24},
|
||||||
{"(*TypeList).At", Method, 18},
|
{"(*TypeList).At", Method, 18},
|
||||||
{"(*TypeList).Len", Method, 18},
|
{"(*TypeList).Len", Method, 18},
|
||||||
|
{"(*TypeList).Types", Method, 24},
|
||||||
{"(*TypeName).Exported", Method, 5},
|
{"(*TypeName).Exported", Method, 5},
|
||||||
{"(*TypeName).Id", Method, 5},
|
{"(*TypeName).Id", Method, 5},
|
||||||
{"(*TypeName).IsAlias", Method, 9},
|
{"(*TypeName).IsAlias", Method, 9},
|
||||||
|
@ -6119,9 +6257,11 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"(*TypeParam).Underlying", Method, 18},
|
{"(*TypeParam).Underlying", Method, 18},
|
||||||
{"(*TypeParamList).At", Method, 18},
|
{"(*TypeParamList).At", Method, 18},
|
||||||
{"(*TypeParamList).Len", Method, 18},
|
{"(*TypeParamList).Len", Method, 18},
|
||||||
|
{"(*TypeParamList).TypeParams", Method, 24},
|
||||||
{"(*Union).Len", Method, 18},
|
{"(*Union).Len", Method, 18},
|
||||||
{"(*Union).String", Method, 18},
|
{"(*Union).String", Method, 18},
|
||||||
{"(*Union).Term", Method, 18},
|
{"(*Union).Term", Method, 18},
|
||||||
|
{"(*Union).Terms", Method, 24},
|
||||||
{"(*Union).Underlying", Method, 18},
|
{"(*Union).Underlying", Method, 18},
|
||||||
{"(*Var).Anonymous", Method, 5},
|
{"(*Var).Anonymous", Method, 5},
|
||||||
{"(*Var).Embedded", Method, 11},
|
{"(*Var).Embedded", Method, 11},
|
||||||
|
@ -6392,10 +6532,12 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"(*Hash).WriteByte", Method, 14},
|
{"(*Hash).WriteByte", Method, 14},
|
||||||
{"(*Hash).WriteString", Method, 14},
|
{"(*Hash).WriteString", Method, 14},
|
||||||
{"Bytes", Func, 19},
|
{"Bytes", Func, 19},
|
||||||
|
{"Comparable", Func, 24},
|
||||||
{"Hash", Type, 14},
|
{"Hash", Type, 14},
|
||||||
{"MakeSeed", Func, 14},
|
{"MakeSeed", Func, 14},
|
||||||
{"Seed", Type, 14},
|
{"Seed", Type, 14},
|
||||||
{"String", Func, 19},
|
{"String", Func, 19},
|
||||||
|
{"WriteComparable", Func, 24},
|
||||||
},
|
},
|
||||||
"html": {
|
"html": {
|
||||||
{"EscapeString", Func, 0},
|
{"EscapeString", Func, 0},
|
||||||
|
@ -7082,6 +7224,7 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"(*JSONHandler).WithGroup", Method, 21},
|
{"(*JSONHandler).WithGroup", Method, 21},
|
||||||
{"(*Level).UnmarshalJSON", Method, 21},
|
{"(*Level).UnmarshalJSON", Method, 21},
|
||||||
{"(*Level).UnmarshalText", Method, 21},
|
{"(*Level).UnmarshalText", Method, 21},
|
||||||
|
{"(*LevelVar).AppendText", Method, 24},
|
||||||
{"(*LevelVar).Level", Method, 21},
|
{"(*LevelVar).Level", Method, 21},
|
||||||
{"(*LevelVar).MarshalText", Method, 21},
|
{"(*LevelVar).MarshalText", Method, 21},
|
||||||
{"(*LevelVar).Set", Method, 21},
|
{"(*LevelVar).Set", Method, 21},
|
||||||
|
@ -7110,6 +7253,7 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"(Attr).Equal", Method, 21},
|
{"(Attr).Equal", Method, 21},
|
||||||
{"(Attr).String", Method, 21},
|
{"(Attr).String", Method, 21},
|
||||||
{"(Kind).String", Method, 21},
|
{"(Kind).String", Method, 21},
|
||||||
|
{"(Level).AppendText", Method, 24},
|
||||||
{"(Level).Level", Method, 21},
|
{"(Level).Level", Method, 21},
|
||||||
{"(Level).MarshalJSON", Method, 21},
|
{"(Level).MarshalJSON", Method, 21},
|
||||||
{"(Level).MarshalText", Method, 21},
|
{"(Level).MarshalText", Method, 21},
|
||||||
|
@ -7140,6 +7284,7 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"Debug", Func, 21},
|
{"Debug", Func, 21},
|
||||||
{"DebugContext", Func, 21},
|
{"DebugContext", Func, 21},
|
||||||
{"Default", Func, 21},
|
{"Default", Func, 21},
|
||||||
|
{"DiscardHandler", Var, 24},
|
||||||
{"Duration", Func, 21},
|
{"Duration", Func, 21},
|
||||||
{"DurationValue", Func, 21},
|
{"DurationValue", Func, 21},
|
||||||
{"Error", Func, 21},
|
{"Error", Func, 21},
|
||||||
|
@ -7375,6 +7520,7 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"(*Float).Acc", Method, 5},
|
{"(*Float).Acc", Method, 5},
|
||||||
{"(*Float).Add", Method, 5},
|
{"(*Float).Add", Method, 5},
|
||||||
{"(*Float).Append", Method, 5},
|
{"(*Float).Append", Method, 5},
|
||||||
|
{"(*Float).AppendText", Method, 24},
|
||||||
{"(*Float).Cmp", Method, 5},
|
{"(*Float).Cmp", Method, 5},
|
||||||
{"(*Float).Copy", Method, 5},
|
{"(*Float).Copy", Method, 5},
|
||||||
{"(*Float).Float32", Method, 5},
|
{"(*Float).Float32", Method, 5},
|
||||||
|
@ -7421,6 +7567,7 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"(*Int).And", Method, 0},
|
{"(*Int).And", Method, 0},
|
||||||
{"(*Int).AndNot", Method, 0},
|
{"(*Int).AndNot", Method, 0},
|
||||||
{"(*Int).Append", Method, 6},
|
{"(*Int).Append", Method, 6},
|
||||||
|
{"(*Int).AppendText", Method, 24},
|
||||||
{"(*Int).Binomial", Method, 0},
|
{"(*Int).Binomial", Method, 0},
|
||||||
{"(*Int).Bit", Method, 0},
|
{"(*Int).Bit", Method, 0},
|
||||||
{"(*Int).BitLen", Method, 0},
|
{"(*Int).BitLen", Method, 0},
|
||||||
|
@ -7477,6 +7624,7 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"(*Int).Xor", Method, 0},
|
{"(*Int).Xor", Method, 0},
|
||||||
{"(*Rat).Abs", Method, 0},
|
{"(*Rat).Abs", Method, 0},
|
||||||
{"(*Rat).Add", Method, 0},
|
{"(*Rat).Add", Method, 0},
|
||||||
|
{"(*Rat).AppendText", Method, 24},
|
||||||
{"(*Rat).Cmp", Method, 0},
|
{"(*Rat).Cmp", Method, 0},
|
||||||
{"(*Rat).Denom", Method, 0},
|
{"(*Rat).Denom", Method, 0},
|
||||||
{"(*Rat).Float32", Method, 4},
|
{"(*Rat).Float32", Method, 4},
|
||||||
|
@ -7659,11 +7807,13 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"Zipf", Type, 0},
|
{"Zipf", Type, 0},
|
||||||
},
|
},
|
||||||
"math/rand/v2": {
|
"math/rand/v2": {
|
||||||
|
{"(*ChaCha8).AppendBinary", Method, 24},
|
||||||
{"(*ChaCha8).MarshalBinary", Method, 22},
|
{"(*ChaCha8).MarshalBinary", Method, 22},
|
||||||
{"(*ChaCha8).Read", Method, 23},
|
{"(*ChaCha8).Read", Method, 23},
|
||||||
{"(*ChaCha8).Seed", Method, 22},
|
{"(*ChaCha8).Seed", Method, 22},
|
||||||
{"(*ChaCha8).Uint64", Method, 22},
|
{"(*ChaCha8).Uint64", Method, 22},
|
||||||
{"(*ChaCha8).UnmarshalBinary", Method, 22},
|
{"(*ChaCha8).UnmarshalBinary", Method, 22},
|
||||||
|
{"(*PCG).AppendBinary", Method, 24},
|
||||||
{"(*PCG).MarshalBinary", Method, 22},
|
{"(*PCG).MarshalBinary", Method, 22},
|
||||||
{"(*PCG).Seed", Method, 22},
|
{"(*PCG).Seed", Method, 22},
|
||||||
{"(*PCG).Uint64", Method, 22},
|
{"(*PCG).Uint64", Method, 22},
|
||||||
|
@ -7931,6 +8081,7 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"(*UnixListener).SyscallConn", Method, 10},
|
{"(*UnixListener).SyscallConn", Method, 10},
|
||||||
{"(Flags).String", Method, 0},
|
{"(Flags).String", Method, 0},
|
||||||
{"(HardwareAddr).String", Method, 0},
|
{"(HardwareAddr).String", Method, 0},
|
||||||
|
{"(IP).AppendText", Method, 24},
|
||||||
{"(IP).DefaultMask", Method, 0},
|
{"(IP).DefaultMask", Method, 0},
|
||||||
{"(IP).Equal", Method, 0},
|
{"(IP).Equal", Method, 0},
|
||||||
{"(IP).IsGlobalUnicast", Method, 0},
|
{"(IP).IsGlobalUnicast", Method, 0},
|
||||||
|
@ -8131,6 +8282,9 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"(*MaxBytesError).Error", Method, 19},
|
{"(*MaxBytesError).Error", Method, 19},
|
||||||
{"(*ProtocolError).Error", Method, 0},
|
{"(*ProtocolError).Error", Method, 0},
|
||||||
{"(*ProtocolError).Is", Method, 21},
|
{"(*ProtocolError).Is", Method, 21},
|
||||||
|
{"(*Protocols).SetHTTP1", Method, 24},
|
||||||
|
{"(*Protocols).SetHTTP2", Method, 24},
|
||||||
|
{"(*Protocols).SetUnencryptedHTTP2", Method, 24},
|
||||||
{"(*Request).AddCookie", Method, 0},
|
{"(*Request).AddCookie", Method, 0},
|
||||||
{"(*Request).BasicAuth", Method, 4},
|
{"(*Request).BasicAuth", Method, 4},
|
||||||
{"(*Request).Clone", Method, 13},
|
{"(*Request).Clone", Method, 13},
|
||||||
|
@ -8190,6 +8344,10 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"(Header).Values", Method, 14},
|
{"(Header).Values", Method, 14},
|
||||||
{"(Header).Write", Method, 0},
|
{"(Header).Write", Method, 0},
|
||||||
{"(Header).WriteSubset", Method, 0},
|
{"(Header).WriteSubset", Method, 0},
|
||||||
|
{"(Protocols).HTTP1", Method, 24},
|
||||||
|
{"(Protocols).HTTP2", Method, 24},
|
||||||
|
{"(Protocols).String", Method, 24},
|
||||||
|
{"(Protocols).UnencryptedHTTP2", Method, 24},
|
||||||
{"AllowQuerySemicolons", Func, 17},
|
{"AllowQuerySemicolons", Func, 17},
|
||||||
{"CanonicalHeaderKey", Func, 0},
|
{"CanonicalHeaderKey", Func, 0},
|
||||||
{"Client", Type, 0},
|
{"Client", Type, 0},
|
||||||
|
@ -8252,6 +8410,18 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"FileSystem", Type, 0},
|
{"FileSystem", Type, 0},
|
||||||
{"Flusher", Type, 0},
|
{"Flusher", Type, 0},
|
||||||
{"Get", Func, 0},
|
{"Get", Func, 0},
|
||||||
|
{"HTTP2Config", Type, 24},
|
||||||
|
{"HTTP2Config.CountError", Field, 24},
|
||||||
|
{"HTTP2Config.MaxConcurrentStreams", Field, 24},
|
||||||
|
{"HTTP2Config.MaxDecoderHeaderTableSize", Field, 24},
|
||||||
|
{"HTTP2Config.MaxEncoderHeaderTableSize", Field, 24},
|
||||||
|
{"HTTP2Config.MaxReadFrameSize", Field, 24},
|
||||||
|
{"HTTP2Config.MaxReceiveBufferPerConnection", Field, 24},
|
||||||
|
{"HTTP2Config.MaxReceiveBufferPerStream", Field, 24},
|
||||||
|
{"HTTP2Config.PermitProhibitedCipherSuites", Field, 24},
|
||||||
|
{"HTTP2Config.PingTimeout", Field, 24},
|
||||||
|
{"HTTP2Config.SendPingTimeout", Field, 24},
|
||||||
|
{"HTTP2Config.WriteByteTimeout", Field, 24},
|
||||||
{"Handle", Func, 0},
|
{"Handle", Func, 0},
|
||||||
{"HandleFunc", Func, 0},
|
{"HandleFunc", Func, 0},
|
||||||
{"Handler", Type, 0},
|
{"Handler", Type, 0},
|
||||||
|
@ -8292,6 +8462,7 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"PostForm", Func, 0},
|
{"PostForm", Func, 0},
|
||||||
{"ProtocolError", Type, 0},
|
{"ProtocolError", Type, 0},
|
||||||
{"ProtocolError.ErrorString", Field, 0},
|
{"ProtocolError.ErrorString", Field, 0},
|
||||||
|
{"Protocols", Type, 24},
|
||||||
{"ProxyFromEnvironment", Func, 0},
|
{"ProxyFromEnvironment", Func, 0},
|
||||||
{"ProxyURL", Func, 0},
|
{"ProxyURL", Func, 0},
|
||||||
{"PushOptions", Type, 8},
|
{"PushOptions", Type, 8},
|
||||||
|
@ -8361,9 +8532,11 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"Server.ConnState", Field, 3},
|
{"Server.ConnState", Field, 3},
|
||||||
{"Server.DisableGeneralOptionsHandler", Field, 20},
|
{"Server.DisableGeneralOptionsHandler", Field, 20},
|
||||||
{"Server.ErrorLog", Field, 3},
|
{"Server.ErrorLog", Field, 3},
|
||||||
|
{"Server.HTTP2", Field, 24},
|
||||||
{"Server.Handler", Field, 0},
|
{"Server.Handler", Field, 0},
|
||||||
{"Server.IdleTimeout", Field, 8},
|
{"Server.IdleTimeout", Field, 8},
|
||||||
{"Server.MaxHeaderBytes", Field, 0},
|
{"Server.MaxHeaderBytes", Field, 0},
|
||||||
|
{"Server.Protocols", Field, 24},
|
||||||
{"Server.ReadHeaderTimeout", Field, 8},
|
{"Server.ReadHeaderTimeout", Field, 8},
|
||||||
{"Server.ReadTimeout", Field, 0},
|
{"Server.ReadTimeout", Field, 0},
|
||||||
{"Server.TLSConfig", Field, 0},
|
{"Server.TLSConfig", Field, 0},
|
||||||
|
@ -8453,12 +8626,14 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"Transport.ExpectContinueTimeout", Field, 6},
|
{"Transport.ExpectContinueTimeout", Field, 6},
|
||||||
{"Transport.ForceAttemptHTTP2", Field, 13},
|
{"Transport.ForceAttemptHTTP2", Field, 13},
|
||||||
{"Transport.GetProxyConnectHeader", Field, 16},
|
{"Transport.GetProxyConnectHeader", Field, 16},
|
||||||
|
{"Transport.HTTP2", Field, 24},
|
||||||
{"Transport.IdleConnTimeout", Field, 7},
|
{"Transport.IdleConnTimeout", Field, 7},
|
||||||
{"Transport.MaxConnsPerHost", Field, 11},
|
{"Transport.MaxConnsPerHost", Field, 11},
|
||||||
{"Transport.MaxIdleConns", Field, 7},
|
{"Transport.MaxIdleConns", Field, 7},
|
||||||
{"Transport.MaxIdleConnsPerHost", Field, 0},
|
{"Transport.MaxIdleConnsPerHost", Field, 0},
|
||||||
{"Transport.MaxResponseHeaderBytes", Field, 7},
|
{"Transport.MaxResponseHeaderBytes", Field, 7},
|
||||||
{"Transport.OnProxyConnectResponse", Field, 20},
|
{"Transport.OnProxyConnectResponse", Field, 20},
|
||||||
|
{"Transport.Protocols", Field, 24},
|
||||||
{"Transport.Proxy", Field, 0},
|
{"Transport.Proxy", Field, 0},
|
||||||
{"Transport.ProxyConnectHeader", Field, 8},
|
{"Transport.ProxyConnectHeader", Field, 8},
|
||||||
{"Transport.ReadBufferSize", Field, 13},
|
{"Transport.ReadBufferSize", Field, 13},
|
||||||
|
@ -8646,6 +8821,8 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"(*AddrPort).UnmarshalText", Method, 18},
|
{"(*AddrPort).UnmarshalText", Method, 18},
|
||||||
{"(*Prefix).UnmarshalBinary", Method, 18},
|
{"(*Prefix).UnmarshalBinary", Method, 18},
|
||||||
{"(*Prefix).UnmarshalText", Method, 18},
|
{"(*Prefix).UnmarshalText", Method, 18},
|
||||||
|
{"(Addr).AppendBinary", Method, 24},
|
||||||
|
{"(Addr).AppendText", Method, 24},
|
||||||
{"(Addr).AppendTo", Method, 18},
|
{"(Addr).AppendTo", Method, 18},
|
||||||
{"(Addr).As16", Method, 18},
|
{"(Addr).As16", Method, 18},
|
||||||
{"(Addr).As4", Method, 18},
|
{"(Addr).As4", Method, 18},
|
||||||
|
@ -8676,6 +8853,8 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"(Addr).WithZone", Method, 18},
|
{"(Addr).WithZone", Method, 18},
|
||||||
{"(Addr).Zone", Method, 18},
|
{"(Addr).Zone", Method, 18},
|
||||||
{"(AddrPort).Addr", Method, 18},
|
{"(AddrPort).Addr", Method, 18},
|
||||||
|
{"(AddrPort).AppendBinary", Method, 24},
|
||||||
|
{"(AddrPort).AppendText", Method, 24},
|
||||||
{"(AddrPort).AppendTo", Method, 18},
|
{"(AddrPort).AppendTo", Method, 18},
|
||||||
{"(AddrPort).Compare", Method, 22},
|
{"(AddrPort).Compare", Method, 22},
|
||||||
{"(AddrPort).IsValid", Method, 18},
|
{"(AddrPort).IsValid", Method, 18},
|
||||||
|
@ -8684,6 +8863,8 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"(AddrPort).Port", Method, 18},
|
{"(AddrPort).Port", Method, 18},
|
||||||
{"(AddrPort).String", Method, 18},
|
{"(AddrPort).String", Method, 18},
|
||||||
{"(Prefix).Addr", Method, 18},
|
{"(Prefix).Addr", Method, 18},
|
||||||
|
{"(Prefix).AppendBinary", Method, 24},
|
||||||
|
{"(Prefix).AppendText", Method, 24},
|
||||||
{"(Prefix).AppendTo", Method, 18},
|
{"(Prefix).AppendTo", Method, 18},
|
||||||
{"(Prefix).Bits", Method, 18},
|
{"(Prefix).Bits", Method, 18},
|
||||||
{"(Prefix).Contains", Method, 18},
|
{"(Prefix).Contains", Method, 18},
|
||||||
|
@ -8868,6 +9049,7 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"(*Error).Temporary", Method, 6},
|
{"(*Error).Temporary", Method, 6},
|
||||||
{"(*Error).Timeout", Method, 6},
|
{"(*Error).Timeout", Method, 6},
|
||||||
{"(*Error).Unwrap", Method, 13},
|
{"(*Error).Unwrap", Method, 13},
|
||||||
|
{"(*URL).AppendBinary", Method, 24},
|
||||||
{"(*URL).EscapedFragment", Method, 15},
|
{"(*URL).EscapedFragment", Method, 15},
|
||||||
{"(*URL).EscapedPath", Method, 5},
|
{"(*URL).EscapedPath", Method, 5},
|
||||||
{"(*URL).Hostname", Method, 8},
|
{"(*URL).Hostname", Method, 8},
|
||||||
|
@ -8967,6 +9149,17 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"(*ProcessState).SysUsage", Method, 0},
|
{"(*ProcessState).SysUsage", Method, 0},
|
||||||
{"(*ProcessState).SystemTime", Method, 0},
|
{"(*ProcessState).SystemTime", Method, 0},
|
||||||
{"(*ProcessState).UserTime", Method, 0},
|
{"(*ProcessState).UserTime", Method, 0},
|
||||||
|
{"(*Root).Close", Method, 24},
|
||||||
|
{"(*Root).Create", Method, 24},
|
||||||
|
{"(*Root).FS", Method, 24},
|
||||||
|
{"(*Root).Lstat", Method, 24},
|
||||||
|
{"(*Root).Mkdir", Method, 24},
|
||||||
|
{"(*Root).Name", Method, 24},
|
||||||
|
{"(*Root).Open", Method, 24},
|
||||||
|
{"(*Root).OpenFile", Method, 24},
|
||||||
|
{"(*Root).OpenRoot", Method, 24},
|
||||||
|
{"(*Root).Remove", Method, 24},
|
||||||
|
{"(*Root).Stat", Method, 24},
|
||||||
{"(*SyscallError).Error", Method, 0},
|
{"(*SyscallError).Error", Method, 0},
|
||||||
{"(*SyscallError).Timeout", Method, 10},
|
{"(*SyscallError).Timeout", Method, 10},
|
||||||
{"(*SyscallError).Unwrap", Method, 13},
|
{"(*SyscallError).Unwrap", Method, 13},
|
||||||
|
@ -9060,6 +9253,8 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"O_WRONLY", Const, 0},
|
{"O_WRONLY", Const, 0},
|
||||||
{"Open", Func, 0},
|
{"Open", Func, 0},
|
||||||
{"OpenFile", Func, 0},
|
{"OpenFile", Func, 0},
|
||||||
|
{"OpenInRoot", Func, 24},
|
||||||
|
{"OpenRoot", Func, 24},
|
||||||
{"PathError", Type, 0},
|
{"PathError", Type, 0},
|
||||||
{"PathError.Err", Field, 0},
|
{"PathError.Err", Field, 0},
|
||||||
{"PathError.Op", Field, 0},
|
{"PathError.Op", Field, 0},
|
||||||
|
@ -9081,6 +9276,7 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"Remove", Func, 0},
|
{"Remove", Func, 0},
|
||||||
{"RemoveAll", Func, 0},
|
{"RemoveAll", Func, 0},
|
||||||
{"Rename", Func, 0},
|
{"Rename", Func, 0},
|
||||||
|
{"Root", Type, 24},
|
||||||
{"SEEK_CUR", Const, 0},
|
{"SEEK_CUR", Const, 0},
|
||||||
{"SEEK_END", Const, 0},
|
{"SEEK_END", Const, 0},
|
||||||
{"SEEK_SET", Const, 0},
|
{"SEEK_SET", Const, 0},
|
||||||
|
@ -9422,6 +9618,7 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"Zero", Func, 0},
|
{"Zero", Func, 0},
|
||||||
},
|
},
|
||||||
"regexp": {
|
"regexp": {
|
||||||
|
{"(*Regexp).AppendText", Method, 24},
|
||||||
{"(*Regexp).Copy", Method, 6},
|
{"(*Regexp).Copy", Method, 6},
|
||||||
{"(*Regexp).Expand", Method, 0},
|
{"(*Regexp).Expand", Method, 0},
|
||||||
{"(*Regexp).ExpandString", Method, 0},
|
{"(*Regexp).ExpandString", Method, 0},
|
||||||
|
@ -9602,6 +9799,8 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"(*StackRecord).Stack", Method, 0},
|
{"(*StackRecord).Stack", Method, 0},
|
||||||
{"(*TypeAssertionError).Error", Method, 0},
|
{"(*TypeAssertionError).Error", Method, 0},
|
||||||
{"(*TypeAssertionError).RuntimeError", Method, 0},
|
{"(*TypeAssertionError).RuntimeError", Method, 0},
|
||||||
|
{"(Cleanup).Stop", Method, 24},
|
||||||
|
{"AddCleanup", Func, 24},
|
||||||
{"BlockProfile", Func, 1},
|
{"BlockProfile", Func, 1},
|
||||||
{"BlockProfileRecord", Type, 1},
|
{"BlockProfileRecord", Type, 1},
|
||||||
{"BlockProfileRecord.Count", Field, 1},
|
{"BlockProfileRecord.Count", Field, 1},
|
||||||
|
@ -9612,6 +9811,7 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"Caller", Func, 0},
|
{"Caller", Func, 0},
|
||||||
{"Callers", Func, 0},
|
{"Callers", Func, 0},
|
||||||
{"CallersFrames", Func, 7},
|
{"CallersFrames", Func, 7},
|
||||||
|
{"Cleanup", Type, 24},
|
||||||
{"Compiler", Const, 0},
|
{"Compiler", Const, 0},
|
||||||
{"Error", Type, 0},
|
{"Error", Type, 0},
|
||||||
{"Frame", Type, 7},
|
{"Frame", Type, 7},
|
||||||
|
@ -9974,6 +10174,8 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"EqualFold", Func, 0},
|
{"EqualFold", Func, 0},
|
||||||
{"Fields", Func, 0},
|
{"Fields", Func, 0},
|
||||||
{"FieldsFunc", Func, 0},
|
{"FieldsFunc", Func, 0},
|
||||||
|
{"FieldsFuncSeq", Func, 24},
|
||||||
|
{"FieldsSeq", Func, 24},
|
||||||
{"HasPrefix", Func, 0},
|
{"HasPrefix", Func, 0},
|
||||||
{"HasSuffix", Func, 0},
|
{"HasSuffix", Func, 0},
|
||||||
{"Index", Func, 0},
|
{"Index", Func, 0},
|
||||||
|
@ -9986,6 +10188,7 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"LastIndexAny", Func, 0},
|
{"LastIndexAny", Func, 0},
|
||||||
{"LastIndexByte", Func, 5},
|
{"LastIndexByte", Func, 5},
|
||||||
{"LastIndexFunc", Func, 0},
|
{"LastIndexFunc", Func, 0},
|
||||||
|
{"Lines", Func, 24},
|
||||||
{"Map", Func, 0},
|
{"Map", Func, 0},
|
||||||
{"NewReader", Func, 0},
|
{"NewReader", Func, 0},
|
||||||
{"NewReplacer", Func, 0},
|
{"NewReplacer", Func, 0},
|
||||||
|
@ -9997,7 +10200,9 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"Split", Func, 0},
|
{"Split", Func, 0},
|
||||||
{"SplitAfter", Func, 0},
|
{"SplitAfter", Func, 0},
|
||||||
{"SplitAfterN", Func, 0},
|
{"SplitAfterN", Func, 0},
|
||||||
|
{"SplitAfterSeq", Func, 24},
|
||||||
{"SplitN", Func, 0},
|
{"SplitN", Func, 0},
|
||||||
|
{"SplitSeq", Func, 24},
|
||||||
{"Title", Func, 0},
|
{"Title", Func, 0},
|
||||||
{"ToLower", Func, 0},
|
{"ToLower", Func, 0},
|
||||||
{"ToLowerSpecial", Func, 0},
|
{"ToLowerSpecial", Func, 0},
|
||||||
|
@ -16413,7 +16618,9 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"ValueOf", Func, 0},
|
{"ValueOf", Func, 0},
|
||||||
},
|
},
|
||||||
"testing": {
|
"testing": {
|
||||||
|
{"(*B).Chdir", Method, 24},
|
||||||
{"(*B).Cleanup", Method, 14},
|
{"(*B).Cleanup", Method, 14},
|
||||||
|
{"(*B).Context", Method, 24},
|
||||||
{"(*B).Elapsed", Method, 20},
|
{"(*B).Elapsed", Method, 20},
|
||||||
{"(*B).Error", Method, 0},
|
{"(*B).Error", Method, 0},
|
||||||
{"(*B).Errorf", Method, 0},
|
{"(*B).Errorf", Method, 0},
|
||||||
|
@ -16425,6 +16632,7 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"(*B).Helper", Method, 9},
|
{"(*B).Helper", Method, 9},
|
||||||
{"(*B).Log", Method, 0},
|
{"(*B).Log", Method, 0},
|
||||||
{"(*B).Logf", Method, 0},
|
{"(*B).Logf", Method, 0},
|
||||||
|
{"(*B).Loop", Method, 24},
|
||||||
{"(*B).Name", Method, 8},
|
{"(*B).Name", Method, 8},
|
||||||
{"(*B).ReportAllocs", Method, 1},
|
{"(*B).ReportAllocs", Method, 1},
|
||||||
{"(*B).ReportMetric", Method, 13},
|
{"(*B).ReportMetric", Method, 13},
|
||||||
|
@ -16442,7 +16650,9 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"(*B).StopTimer", Method, 0},
|
{"(*B).StopTimer", Method, 0},
|
||||||
{"(*B).TempDir", Method, 15},
|
{"(*B).TempDir", Method, 15},
|
||||||
{"(*F).Add", Method, 18},
|
{"(*F).Add", Method, 18},
|
||||||
|
{"(*F).Chdir", Method, 24},
|
||||||
{"(*F).Cleanup", Method, 18},
|
{"(*F).Cleanup", Method, 18},
|
||||||
|
{"(*F).Context", Method, 24},
|
||||||
{"(*F).Error", Method, 18},
|
{"(*F).Error", Method, 18},
|
||||||
{"(*F).Errorf", Method, 18},
|
{"(*F).Errorf", Method, 18},
|
||||||
{"(*F).Fail", Method, 18},
|
{"(*F).Fail", Method, 18},
|
||||||
|
@ -16463,7 +16673,9 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"(*F).TempDir", Method, 18},
|
{"(*F).TempDir", Method, 18},
|
||||||
{"(*M).Run", Method, 4},
|
{"(*M).Run", Method, 4},
|
||||||
{"(*PB).Next", Method, 3},
|
{"(*PB).Next", Method, 3},
|
||||||
|
{"(*T).Chdir", Method, 24},
|
||||||
{"(*T).Cleanup", Method, 14},
|
{"(*T).Cleanup", Method, 14},
|
||||||
|
{"(*T).Context", Method, 24},
|
||||||
{"(*T).Deadline", Method, 15},
|
{"(*T).Deadline", Method, 15},
|
||||||
{"(*T).Error", Method, 0},
|
{"(*T).Error", Method, 0},
|
||||||
{"(*T).Errorf", Method, 0},
|
{"(*T).Errorf", Method, 0},
|
||||||
|
@ -16954,7 +17166,9 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"(Time).Add", Method, 0},
|
{"(Time).Add", Method, 0},
|
||||||
{"(Time).AddDate", Method, 0},
|
{"(Time).AddDate", Method, 0},
|
||||||
{"(Time).After", Method, 0},
|
{"(Time).After", Method, 0},
|
||||||
|
{"(Time).AppendBinary", Method, 24},
|
||||||
{"(Time).AppendFormat", Method, 5},
|
{"(Time).AppendFormat", Method, 5},
|
||||||
|
{"(Time).AppendText", Method, 24},
|
||||||
{"(Time).Before", Method, 0},
|
{"(Time).Before", Method, 0},
|
||||||
{"(Time).Clock", Method, 0},
|
{"(Time).Clock", Method, 0},
|
||||||
{"(Time).Compare", Method, 20},
|
{"(Time).Compare", Method, 20},
|
||||||
|
@ -17428,4 +17642,9 @@ var PackageSymbols = map[string][]Symbol{
|
||||||
{"String", Func, 0},
|
{"String", Func, 0},
|
||||||
{"StringData", Func, 0},
|
{"StringData", Func, 0},
|
||||||
},
|
},
|
||||||
|
"weak": {
|
||||||
|
{"(Pointer).Value", Method, 24},
|
||||||
|
{"Make", Func, 24},
|
||||||
|
{"Pointer", Type, 24},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -66,75 +66,3 @@ func IsTypeParam(t types.Type) bool {
|
||||||
_, ok := types.Unalias(t).(*types.TypeParam)
|
_, ok := types.Unalias(t).(*types.TypeParam)
|
||||||
return ok
|
return ok
|
||||||
}
|
}
|
||||||
|
|
||||||
// GenericAssignableTo is a generalization of types.AssignableTo that
|
|
||||||
// implements the following rule for uninstantiated generic types:
|
|
||||||
//
|
|
||||||
// If V and T are generic named types, then V is considered assignable to T if,
|
|
||||||
// for every possible instantiation of V[A_1, ..., A_N], the instantiation
|
|
||||||
// T[A_1, ..., A_N] is valid and V[A_1, ..., A_N] implements T[A_1, ..., A_N].
|
|
||||||
//
|
|
||||||
// If T has structural constraints, they must be satisfied by V.
|
|
||||||
//
|
|
||||||
// For example, consider the following type declarations:
|
|
||||||
//
|
|
||||||
// type Interface[T any] interface {
|
|
||||||
// Accept(T)
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// type Container[T any] struct {
|
|
||||||
// Element T
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// func (c Container[T]) Accept(t T) { c.Element = t }
|
|
||||||
//
|
|
||||||
// In this case, GenericAssignableTo reports that instantiations of Container
|
|
||||||
// are assignable to the corresponding instantiation of Interface.
|
|
||||||
func GenericAssignableTo(ctxt *types.Context, V, T types.Type) bool {
|
|
||||||
V = types.Unalias(V)
|
|
||||||
T = types.Unalias(T)
|
|
||||||
|
|
||||||
// If V and T are not both named, or do not have matching non-empty type
|
|
||||||
// parameter lists, fall back on types.AssignableTo.
|
|
||||||
|
|
||||||
VN, Vnamed := V.(*types.Named)
|
|
||||||
TN, Tnamed := T.(*types.Named)
|
|
||||||
if !Vnamed || !Tnamed {
|
|
||||||
return types.AssignableTo(V, T)
|
|
||||||
}
|
|
||||||
|
|
||||||
vtparams := VN.TypeParams()
|
|
||||||
ttparams := TN.TypeParams()
|
|
||||||
if vtparams.Len() == 0 || vtparams.Len() != ttparams.Len() || VN.TypeArgs().Len() != 0 || TN.TypeArgs().Len() != 0 {
|
|
||||||
return types.AssignableTo(V, T)
|
|
||||||
}
|
|
||||||
|
|
||||||
// V and T have the same (non-zero) number of type params. Instantiate both
|
|
||||||
// with the type parameters of V. This must always succeed for V, and will
|
|
||||||
// succeed for T if and only if the type set of each type parameter of V is a
|
|
||||||
// subset of the type set of the corresponding type parameter of T, meaning
|
|
||||||
// that every instantiation of V corresponds to a valid instantiation of T.
|
|
||||||
|
|
||||||
// Minor optimization: ensure we share a context across the two
|
|
||||||
// instantiations below.
|
|
||||||
if ctxt == nil {
|
|
||||||
ctxt = types.NewContext()
|
|
||||||
}
|
|
||||||
|
|
||||||
var targs []types.Type
|
|
||||||
for i := 0; i < vtparams.Len(); i++ {
|
|
||||||
targs = append(targs, vtparams.At(i))
|
|
||||||
}
|
|
||||||
|
|
||||||
vinst, err := types.Instantiate(ctxt, V, targs, true)
|
|
||||||
if err != nil {
|
|
||||||
panic("type parameters should satisfy their own constraints")
|
|
||||||
}
|
|
||||||
|
|
||||||
tinst, err := types.Instantiate(ctxt, T, targs, true)
|
|
||||||
if err != nil {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
return types.AssignableTo(vinst, tinst)
|
|
||||||
}
|
|
||||||
|
|
|
@ -109,8 +109,13 @@ func CoreType(T types.Type) types.Type {
|
||||||
//
|
//
|
||||||
// NormalTerms makes no guarantees about the order of terms, except that it
|
// NormalTerms makes no guarantees about the order of terms, except that it
|
||||||
// is deterministic.
|
// is deterministic.
|
||||||
func NormalTerms(typ types.Type) ([]*types.Term, error) {
|
func NormalTerms(T types.Type) ([]*types.Term, error) {
|
||||||
switch typ := typ.Underlying().(type) {
|
// typeSetOf(T) == typeSetOf(Unalias(T))
|
||||||
|
typ := types.Unalias(T)
|
||||||
|
if named, ok := typ.(*types.Named); ok {
|
||||||
|
typ = named.Underlying()
|
||||||
|
}
|
||||||
|
switch typ := typ.(type) {
|
||||||
case *types.TypeParam:
|
case *types.TypeParam:
|
||||||
return StructuralTerms(typ)
|
return StructuralTerms(typ)
|
||||||
case *types.Union:
|
case *types.Union:
|
||||||
|
@ -118,7 +123,7 @@ func NormalTerms(typ types.Type) ([]*types.Term, error) {
|
||||||
case *types.Interface:
|
case *types.Interface:
|
||||||
return InterfaceTermSet(typ)
|
return InterfaceTermSet(typ)
|
||||||
default:
|
default:
|
||||||
return []*types.Term{types.NewTerm(false, typ)}, nil
|
return []*types.Term{types.NewTerm(false, T)}, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -966,7 +966,7 @@ const (
|
||||||
// var _ = string(x)
|
// var _ = string(x)
|
||||||
InvalidConversion
|
InvalidConversion
|
||||||
|
|
||||||
// InvalidUntypedConversion occurs when an there is no valid implicit
|
// InvalidUntypedConversion occurs when there is no valid implicit
|
||||||
// conversion from an untyped value satisfying the type constraints of the
|
// conversion from an untyped value satisfying the type constraints of the
|
||||||
// context in which it is used.
|
// context in which it is used.
|
||||||
//
|
//
|
||||||
|
|
|
@ -0,0 +1,46 @@
|
||||||
|
// Copyright 2024 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package typesinternal
|
||||||
|
|
||||||
|
import (
|
||||||
|
"go/ast"
|
||||||
|
"go/types"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
// FileQualifier returns a [types.Qualifier] function that qualifies
|
||||||
|
// imported symbols appropriately based on the import environment of a given
|
||||||
|
// file.
|
||||||
|
// If the same package is imported multiple times, the last appearance is
|
||||||
|
// recorded.
|
||||||
|
func FileQualifier(f *ast.File, pkg *types.Package) types.Qualifier {
|
||||||
|
// Construct mapping of import paths to their defined names.
|
||||||
|
// It is only necessary to look at renaming imports.
|
||||||
|
imports := make(map[string]string)
|
||||||
|
for _, imp := range f.Imports {
|
||||||
|
if imp.Name != nil && imp.Name.Name != "_" {
|
||||||
|
path, _ := strconv.Unquote(imp.Path.Value)
|
||||||
|
imports[path] = imp.Name.Name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Define qualifier to replace full package paths with names of the imports.
|
||||||
|
return func(p *types.Package) string {
|
||||||
|
if p == nil || p == pkg {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
if name, ok := imports[p.Path()]; ok {
|
||||||
|
if name == "." {
|
||||||
|
return ""
|
||||||
|
} else {
|
||||||
|
return name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If there is no local renaming, fall back to the package name.
|
||||||
|
return p.Name()
|
||||||
|
}
|
||||||
|
}
|
|
@ -11,6 +11,9 @@ import (
|
||||||
// ReceiverNamed returns the named type (if any) associated with the
|
// ReceiverNamed returns the named type (if any) associated with the
|
||||||
// type of recv, which may be of the form N or *N, or aliases thereof.
|
// type of recv, which may be of the form N or *N, or aliases thereof.
|
||||||
// It also reports whether a Pointer was present.
|
// It also reports whether a Pointer was present.
|
||||||
|
//
|
||||||
|
// The named result may be nil if recv is from a method on an
|
||||||
|
// anonymous interface or struct types or in ill-typed code.
|
||||||
func ReceiverNamed(recv *types.Var) (isPtr bool, named *types.Named) {
|
func ReceiverNamed(recv *types.Var) (isPtr bool, named *types.Named) {
|
||||||
t := recv.Type()
|
t := recv.Type()
|
||||||
if ptr, ok := types.Unalias(t).(*types.Pointer); ok {
|
if ptr, ok := types.Unalias(t).(*types.Pointer); ok {
|
||||||
|
|
|
@ -82,6 +82,7 @@ func NameRelativeTo(pkg *types.Package) types.Qualifier {
|
||||||
type NamedOrAlias interface {
|
type NamedOrAlias interface {
|
||||||
types.Type
|
types.Type
|
||||||
Obj() *types.TypeName
|
Obj() *types.TypeName
|
||||||
|
// TODO(hxjiang): add method TypeArgs() *types.TypeList after stop supporting go1.22.
|
||||||
}
|
}
|
||||||
|
|
||||||
// TypeParams is a light shim around t.TypeParams().
|
// TypeParams is a light shim around t.TypeParams().
|
||||||
|
@ -119,3 +120,8 @@ func Origin(t NamedOrAlias) NamedOrAlias {
|
||||||
}
|
}
|
||||||
return t
|
return t
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// IsPackageLevel reports whether obj is a package-level symbol.
|
||||||
|
func IsPackageLevel(obj types.Object) bool {
|
||||||
|
return obj.Pkg() != nil && obj.Parent() == obj.Pkg().Scope()
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,40 @@
|
||||||
|
// Copyright 2024 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package typesinternal
|
||||||
|
|
||||||
|
// TODO(adonovan): when CL 645115 lands, define the go1.25 version of
|
||||||
|
// this API that actually does something.
|
||||||
|
|
||||||
|
import "go/types"
|
||||||
|
|
||||||
|
type VarKind uint8
|
||||||
|
|
||||||
|
const (
|
||||||
|
_ VarKind = iota // (not meaningful)
|
||||||
|
PackageVar // a package-level variable
|
||||||
|
LocalVar // a local variable
|
||||||
|
RecvVar // a method receiver variable
|
||||||
|
ParamVar // a function parameter variable
|
||||||
|
ResultVar // a function result variable
|
||||||
|
FieldVar // a struct field
|
||||||
|
)
|
||||||
|
|
||||||
|
func (kind VarKind) String() string {
|
||||||
|
return [...]string{
|
||||||
|
0: "VarKind(0)",
|
||||||
|
PackageVar: "PackageVar",
|
||||||
|
LocalVar: "LocalVar",
|
||||||
|
RecvVar: "RecvVar",
|
||||||
|
ParamVar: "ParamVar",
|
||||||
|
ResultVar: "ResultVar",
|
||||||
|
FieldVar: "FieldVar",
|
||||||
|
}[kind]
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetVarKind returns an invalid VarKind.
|
||||||
|
func GetVarKind(v *types.Var) VarKind { return 0 }
|
||||||
|
|
||||||
|
// SetVarKind has no effect.
|
||||||
|
func SetVarKind(v *types.Var, kind VarKind) {}
|
|
@ -9,62 +9,97 @@ import (
|
||||||
"go/ast"
|
"go/ast"
|
||||||
"go/token"
|
"go/token"
|
||||||
"go/types"
|
"go/types"
|
||||||
"strconv"
|
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
// ZeroString returns the string representation of the "zero" value of the type t.
|
// ZeroString returns the string representation of the zero value for any type t.
|
||||||
// This string can be used on the right-hand side of an assignment where the
|
// The boolean result indicates whether the type is or contains an invalid type
|
||||||
// left-hand side has that explicit type.
|
// or a non-basic (constraint) interface type.
|
||||||
// Exception: This does not apply to tuples. Their string representation is
|
//
|
||||||
// informational only and cannot be used in an assignment.
|
// Even for invalid input types, ZeroString may return a partially correct
|
||||||
|
// string representation. The caller should use the returned isValid boolean
|
||||||
|
// to determine the validity of the expression.
|
||||||
|
//
|
||||||
// When assigning to a wider type (such as 'any'), it's the caller's
|
// When assigning to a wider type (such as 'any'), it's the caller's
|
||||||
// responsibility to handle any necessary type conversions.
|
// responsibility to handle any necessary type conversions.
|
||||||
|
//
|
||||||
|
// This string can be used on the right-hand side of an assignment where the
|
||||||
|
// left-hand side has that explicit type.
|
||||||
|
// References to named types are qualified by an appropriate (optional)
|
||||||
|
// qualifier function.
|
||||||
|
// Exception: This does not apply to tuples. Their string representation is
|
||||||
|
// informational only and cannot be used in an assignment.
|
||||||
|
//
|
||||||
// See [ZeroExpr] for a variant that returns an [ast.Expr].
|
// See [ZeroExpr] for a variant that returns an [ast.Expr].
|
||||||
func ZeroString(t types.Type, qf types.Qualifier) string {
|
func ZeroString(t types.Type, qual types.Qualifier) (_ string, isValid bool) {
|
||||||
switch t := t.(type) {
|
switch t := t.(type) {
|
||||||
case *types.Basic:
|
case *types.Basic:
|
||||||
switch {
|
switch {
|
||||||
case t.Info()&types.IsBoolean != 0:
|
case t.Info()&types.IsBoolean != 0:
|
||||||
return "false"
|
return "false", true
|
||||||
case t.Info()&types.IsNumeric != 0:
|
case t.Info()&types.IsNumeric != 0:
|
||||||
return "0"
|
return "0", true
|
||||||
case t.Info()&types.IsString != 0:
|
case t.Info()&types.IsString != 0:
|
||||||
return `""`
|
return `""`, true
|
||||||
case t.Kind() == types.UnsafePointer:
|
case t.Kind() == types.UnsafePointer:
|
||||||
fallthrough
|
fallthrough
|
||||||
case t.Kind() == types.UntypedNil:
|
case t.Kind() == types.UntypedNil:
|
||||||
return "nil"
|
return "nil", true
|
||||||
|
case t.Kind() == types.Invalid:
|
||||||
|
return "invalid", false
|
||||||
default:
|
default:
|
||||||
panic(fmt.Sprint("ZeroString for unexpected type:", t))
|
panic(fmt.Sprintf("ZeroString for unexpected type %v", t))
|
||||||
}
|
}
|
||||||
|
|
||||||
case *types.Pointer, *types.Slice, *types.Interface, *types.Chan, *types.Map, *types.Signature:
|
case *types.Pointer, *types.Slice, *types.Chan, *types.Map, *types.Signature:
|
||||||
return "nil"
|
return "nil", true
|
||||||
|
|
||||||
case *types.Named, *types.Alias:
|
case *types.Interface:
|
||||||
|
if !t.IsMethodSet() {
|
||||||
|
return "invalid", false
|
||||||
|
}
|
||||||
|
return "nil", true
|
||||||
|
|
||||||
|
case *types.Named:
|
||||||
switch under := t.Underlying().(type) {
|
switch under := t.Underlying().(type) {
|
||||||
case *types.Struct, *types.Array:
|
case *types.Struct, *types.Array:
|
||||||
return types.TypeString(t, qf) + "{}"
|
return types.TypeString(t, qual) + "{}", true
|
||||||
default:
|
default:
|
||||||
return ZeroString(under, qf)
|
return ZeroString(under, qual)
|
||||||
|
}
|
||||||
|
|
||||||
|
case *types.Alias:
|
||||||
|
switch t.Underlying().(type) {
|
||||||
|
case *types.Struct, *types.Array:
|
||||||
|
return types.TypeString(t, qual) + "{}", true
|
||||||
|
default:
|
||||||
|
// A type parameter can have alias but alias type's underlying type
|
||||||
|
// can never be a type parameter.
|
||||||
|
// Use types.Unalias to preserve the info of type parameter instead
|
||||||
|
// of call Underlying() going right through and get the underlying
|
||||||
|
// type of the type parameter which is always an interface.
|
||||||
|
return ZeroString(types.Unalias(t), qual)
|
||||||
}
|
}
|
||||||
|
|
||||||
case *types.Array, *types.Struct:
|
case *types.Array, *types.Struct:
|
||||||
return types.TypeString(t, qf) + "{}"
|
return types.TypeString(t, qual) + "{}", true
|
||||||
|
|
||||||
case *types.TypeParam:
|
case *types.TypeParam:
|
||||||
// Assumes func new is not shadowed.
|
// Assumes func new is not shadowed.
|
||||||
return "*new(" + types.TypeString(t, qf) + ")"
|
return "*new(" + types.TypeString(t, qual) + ")", true
|
||||||
|
|
||||||
case *types.Tuple:
|
case *types.Tuple:
|
||||||
// Tuples are not normal values.
|
// Tuples are not normal values.
|
||||||
// We are currently format as "(t[0], ..., t[n])". Could be something else.
|
// We are currently format as "(t[0], ..., t[n])". Could be something else.
|
||||||
|
isValid := true
|
||||||
components := make([]string, t.Len())
|
components := make([]string, t.Len())
|
||||||
for i := 0; i < t.Len(); i++ {
|
for i := 0; i < t.Len(); i++ {
|
||||||
components[i] = ZeroString(t.At(i).Type(), qf)
|
comp, ok := ZeroString(t.At(i).Type(), qual)
|
||||||
|
|
||||||
|
components[i] = comp
|
||||||
|
isValid = isValid && ok
|
||||||
}
|
}
|
||||||
return "(" + strings.Join(components, ", ") + ")"
|
return "(" + strings.Join(components, ", ") + ")", isValid
|
||||||
|
|
||||||
case *types.Union:
|
case *types.Union:
|
||||||
// Variables of these types cannot be created, so it makes
|
// Variables of these types cannot be created, so it makes
|
||||||
|
@ -76,45 +111,72 @@ func ZeroString(t types.Type, qf types.Qualifier) string {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// ZeroExpr returns the ast.Expr representation of the "zero" value of the type t.
|
// ZeroExpr returns the ast.Expr representation of the zero value for any type t.
|
||||||
// ZeroExpr is defined for types that are suitable for variables.
|
// The boolean result indicates whether the type is or contains an invalid type
|
||||||
// It may panic for other types such as Tuple or Union.
|
// or a non-basic (constraint) interface type.
|
||||||
|
//
|
||||||
|
// Even for invalid input types, ZeroExpr may return a partially correct ast.Expr
|
||||||
|
// representation. The caller should use the returned isValid boolean to determine
|
||||||
|
// the validity of the expression.
|
||||||
|
//
|
||||||
|
// This function is designed for types suitable for variables and should not be
|
||||||
|
// used with Tuple or Union types.References to named types are qualified by an
|
||||||
|
// appropriate (optional) qualifier function.
|
||||||
|
//
|
||||||
// See [ZeroString] for a variant that returns a string.
|
// See [ZeroString] for a variant that returns a string.
|
||||||
func ZeroExpr(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr {
|
func ZeroExpr(t types.Type, qual types.Qualifier) (_ ast.Expr, isValid bool) {
|
||||||
switch t := typ.(type) {
|
switch t := t.(type) {
|
||||||
case *types.Basic:
|
case *types.Basic:
|
||||||
switch {
|
switch {
|
||||||
case t.Info()&types.IsBoolean != 0:
|
case t.Info()&types.IsBoolean != 0:
|
||||||
return &ast.Ident{Name: "false"}
|
return &ast.Ident{Name: "false"}, true
|
||||||
case t.Info()&types.IsNumeric != 0:
|
case t.Info()&types.IsNumeric != 0:
|
||||||
return &ast.BasicLit{Kind: token.INT, Value: "0"}
|
return &ast.BasicLit{Kind: token.INT, Value: "0"}, true
|
||||||
case t.Info()&types.IsString != 0:
|
case t.Info()&types.IsString != 0:
|
||||||
return &ast.BasicLit{Kind: token.STRING, Value: `""`}
|
return &ast.BasicLit{Kind: token.STRING, Value: `""`}, true
|
||||||
case t.Kind() == types.UnsafePointer:
|
case t.Kind() == types.UnsafePointer:
|
||||||
fallthrough
|
fallthrough
|
||||||
case t.Kind() == types.UntypedNil:
|
case t.Kind() == types.UntypedNil:
|
||||||
return ast.NewIdent("nil")
|
return ast.NewIdent("nil"), true
|
||||||
|
case t.Kind() == types.Invalid:
|
||||||
|
return &ast.BasicLit{Kind: token.STRING, Value: `"invalid"`}, false
|
||||||
default:
|
default:
|
||||||
panic(fmt.Sprint("ZeroExpr for unexpected type:", t))
|
panic(fmt.Sprintf("ZeroExpr for unexpected type %v", t))
|
||||||
}
|
}
|
||||||
|
|
||||||
case *types.Pointer, *types.Slice, *types.Interface, *types.Chan, *types.Map, *types.Signature:
|
case *types.Pointer, *types.Slice, *types.Chan, *types.Map, *types.Signature:
|
||||||
return ast.NewIdent("nil")
|
return ast.NewIdent("nil"), true
|
||||||
|
|
||||||
case *types.Named, *types.Alias:
|
case *types.Interface:
|
||||||
|
if !t.IsMethodSet() {
|
||||||
|
return &ast.BasicLit{Kind: token.STRING, Value: `"invalid"`}, false
|
||||||
|
}
|
||||||
|
return ast.NewIdent("nil"), true
|
||||||
|
|
||||||
|
case *types.Named:
|
||||||
switch under := t.Underlying().(type) {
|
switch under := t.Underlying().(type) {
|
||||||
case *types.Struct, *types.Array:
|
case *types.Struct, *types.Array:
|
||||||
return &ast.CompositeLit{
|
return &ast.CompositeLit{
|
||||||
Type: TypeExpr(f, pkg, typ),
|
Type: TypeExpr(t, qual),
|
||||||
}
|
}, true
|
||||||
default:
|
default:
|
||||||
return ZeroExpr(f, pkg, under)
|
return ZeroExpr(under, qual)
|
||||||
|
}
|
||||||
|
|
||||||
|
case *types.Alias:
|
||||||
|
switch t.Underlying().(type) {
|
||||||
|
case *types.Struct, *types.Array:
|
||||||
|
return &ast.CompositeLit{
|
||||||
|
Type: TypeExpr(t, qual),
|
||||||
|
}, true
|
||||||
|
default:
|
||||||
|
return ZeroExpr(types.Unalias(t), qual)
|
||||||
}
|
}
|
||||||
|
|
||||||
case *types.Array, *types.Struct:
|
case *types.Array, *types.Struct:
|
||||||
return &ast.CompositeLit{
|
return &ast.CompositeLit{
|
||||||
Type: TypeExpr(f, pkg, typ),
|
Type: TypeExpr(t, qual),
|
||||||
}
|
}, true
|
||||||
|
|
||||||
case *types.TypeParam:
|
case *types.TypeParam:
|
||||||
return &ast.StarExpr{ // *new(T)
|
return &ast.StarExpr{ // *new(T)
|
||||||
|
@ -125,7 +187,7 @@ func ZeroExpr(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr {
|
||||||
ast.NewIdent(t.Obj().Name()),
|
ast.NewIdent(t.Obj().Name()),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}, true
|
||||||
|
|
||||||
case *types.Tuple:
|
case *types.Tuple:
|
||||||
// Unlike ZeroString, there is no ast.Expr can express tuple by
|
// Unlike ZeroString, there is no ast.Expr can express tuple by
|
||||||
|
@ -157,16 +219,14 @@ func IsZeroExpr(expr ast.Expr) bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
// TypeExpr returns syntax for the specified type. References to named types
|
// TypeExpr returns syntax for the specified type. References to named types
|
||||||
// from packages other than pkg are qualified by an appropriate package name, as
|
// are qualified by an appropriate (optional) qualifier function.
|
||||||
// defined by the import environment of file.
|
|
||||||
// It may panic for types such as Tuple or Union.
|
// It may panic for types such as Tuple or Union.
|
||||||
func TypeExpr(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr {
|
func TypeExpr(t types.Type, qual types.Qualifier) ast.Expr {
|
||||||
switch t := typ.(type) {
|
switch t := t.(type) {
|
||||||
case *types.Basic:
|
case *types.Basic:
|
||||||
switch t.Kind() {
|
switch t.Kind() {
|
||||||
case types.UnsafePointer:
|
case types.UnsafePointer:
|
||||||
// TODO(hxjiang): replace the implementation with types.Qualifier.
|
return &ast.SelectorExpr{X: ast.NewIdent(qual(types.NewPackage("unsafe", "unsafe"))), Sel: ast.NewIdent("Pointer")}
|
||||||
return &ast.SelectorExpr{X: ast.NewIdent("unsafe"), Sel: ast.NewIdent("Pointer")}
|
|
||||||
default:
|
default:
|
||||||
return ast.NewIdent(t.Name())
|
return ast.NewIdent(t.Name())
|
||||||
}
|
}
|
||||||
|
@ -174,7 +234,7 @@ func TypeExpr(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr {
|
||||||
case *types.Pointer:
|
case *types.Pointer:
|
||||||
return &ast.UnaryExpr{
|
return &ast.UnaryExpr{
|
||||||
Op: token.MUL,
|
Op: token.MUL,
|
||||||
X: TypeExpr(f, pkg, t.Elem()),
|
X: TypeExpr(t.Elem(), qual),
|
||||||
}
|
}
|
||||||
|
|
||||||
case *types.Array:
|
case *types.Array:
|
||||||
|
@ -183,18 +243,18 @@ func TypeExpr(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr {
|
||||||
Kind: token.INT,
|
Kind: token.INT,
|
||||||
Value: fmt.Sprintf("%d", t.Len()),
|
Value: fmt.Sprintf("%d", t.Len()),
|
||||||
},
|
},
|
||||||
Elt: TypeExpr(f, pkg, t.Elem()),
|
Elt: TypeExpr(t.Elem(), qual),
|
||||||
}
|
}
|
||||||
|
|
||||||
case *types.Slice:
|
case *types.Slice:
|
||||||
return &ast.ArrayType{
|
return &ast.ArrayType{
|
||||||
Elt: TypeExpr(f, pkg, t.Elem()),
|
Elt: TypeExpr(t.Elem(), qual),
|
||||||
}
|
}
|
||||||
|
|
||||||
case *types.Map:
|
case *types.Map:
|
||||||
return &ast.MapType{
|
return &ast.MapType{
|
||||||
Key: TypeExpr(f, pkg, t.Key()),
|
Key: TypeExpr(t.Key(), qual),
|
||||||
Value: TypeExpr(f, pkg, t.Elem()),
|
Value: TypeExpr(t.Elem(), qual),
|
||||||
}
|
}
|
||||||
|
|
||||||
case *types.Chan:
|
case *types.Chan:
|
||||||
|
@ -204,14 +264,14 @@ func TypeExpr(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr {
|
||||||
}
|
}
|
||||||
return &ast.ChanType{
|
return &ast.ChanType{
|
||||||
Dir: dir,
|
Dir: dir,
|
||||||
Value: TypeExpr(f, pkg, t.Elem()),
|
Value: TypeExpr(t.Elem(), qual),
|
||||||
}
|
}
|
||||||
|
|
||||||
case *types.Signature:
|
case *types.Signature:
|
||||||
var params []*ast.Field
|
var params []*ast.Field
|
||||||
for i := 0; i < t.Params().Len(); i++ {
|
for i := 0; i < t.Params().Len(); i++ {
|
||||||
params = append(params, &ast.Field{
|
params = append(params, &ast.Field{
|
||||||
Type: TypeExpr(f, pkg, t.Params().At(i).Type()),
|
Type: TypeExpr(t.Params().At(i).Type(), qual),
|
||||||
Names: []*ast.Ident{
|
Names: []*ast.Ident{
|
||||||
{
|
{
|
||||||
Name: t.Params().At(i).Name(),
|
Name: t.Params().At(i).Name(),
|
||||||
|
@ -226,7 +286,7 @@ func TypeExpr(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr {
|
||||||
var returns []*ast.Field
|
var returns []*ast.Field
|
||||||
for i := 0; i < t.Results().Len(); i++ {
|
for i := 0; i < t.Results().Len(); i++ {
|
||||||
returns = append(returns, &ast.Field{
|
returns = append(returns, &ast.Field{
|
||||||
Type: TypeExpr(f, pkg, t.Results().At(i).Type()),
|
Type: TypeExpr(t.Results().At(i).Type(), qual),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
return &ast.FuncType{
|
return &ast.FuncType{
|
||||||
|
@ -238,23 +298,9 @@ func TypeExpr(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
case interface{ Obj() *types.TypeName }: // *types.{Alias,Named,TypeParam}
|
case *types.TypeParam:
|
||||||
switch t.Obj().Pkg() {
|
pkgName := qual(t.Obj().Pkg())
|
||||||
case pkg, nil:
|
if pkgName == "" || t.Obj().Pkg() == nil {
|
||||||
return ast.NewIdent(t.Obj().Name())
|
|
||||||
}
|
|
||||||
pkgName := t.Obj().Pkg().Name()
|
|
||||||
|
|
||||||
// TODO(hxjiang): replace the implementation with types.Qualifier.
|
|
||||||
// If the file already imports the package under another name, use that.
|
|
||||||
for _, cand := range f.Imports {
|
|
||||||
if path, _ := strconv.Unquote(cand.Path.Value); path == t.Obj().Pkg().Path() {
|
|
||||||
if cand.Name != nil && cand.Name.Name != "" {
|
|
||||||
pkgName = cand.Name.Name
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if pkgName == "." {
|
|
||||||
return ast.NewIdent(t.Obj().Name())
|
return ast.NewIdent(t.Obj().Name())
|
||||||
}
|
}
|
||||||
return &ast.SelectorExpr{
|
return &ast.SelectorExpr{
|
||||||
|
@ -262,6 +308,36 @@ func TypeExpr(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr {
|
||||||
Sel: ast.NewIdent(t.Obj().Name()),
|
Sel: ast.NewIdent(t.Obj().Name()),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// types.TypeParam also implements interface NamedOrAlias. To differentiate,
|
||||||
|
// case TypeParam need to be present before case NamedOrAlias.
|
||||||
|
// TODO(hxjiang): remove this comment once TypeArgs() is added to interface
|
||||||
|
// NamedOrAlias.
|
||||||
|
case NamedOrAlias:
|
||||||
|
var expr ast.Expr = ast.NewIdent(t.Obj().Name())
|
||||||
|
if pkgName := qual(t.Obj().Pkg()); pkgName != "." && pkgName != "" {
|
||||||
|
expr = &ast.SelectorExpr{
|
||||||
|
X: ast.NewIdent(pkgName),
|
||||||
|
Sel: expr.(*ast.Ident),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(hxjiang): call t.TypeArgs after adding method TypeArgs() to
|
||||||
|
// typesinternal.NamedOrAlias.
|
||||||
|
if hasTypeArgs, ok := t.(interface{ TypeArgs() *types.TypeList }); ok {
|
||||||
|
if typeArgs := hasTypeArgs.TypeArgs(); typeArgs != nil && typeArgs.Len() > 0 {
|
||||||
|
var indices []ast.Expr
|
||||||
|
for i := range typeArgs.Len() {
|
||||||
|
indices = append(indices, TypeExpr(typeArgs.At(i), qual))
|
||||||
|
}
|
||||||
|
expr = &ast.IndexListExpr{
|
||||||
|
X: expr,
|
||||||
|
Indices: indices,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return expr
|
||||||
|
|
||||||
case *types.Struct:
|
case *types.Struct:
|
||||||
return ast.NewIdent(t.String())
|
return ast.NewIdent(t.String())
|
||||||
|
|
||||||
|
@ -269,9 +345,43 @@ func TypeExpr(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr {
|
||||||
return ast.NewIdent(t.String())
|
return ast.NewIdent(t.String())
|
||||||
|
|
||||||
case *types.Union:
|
case *types.Union:
|
||||||
// TODO(hxjiang): handle the union through syntax (~A | ... | ~Z).
|
if t.Len() == 0 {
|
||||||
// Remove nil check when calling typesinternal.TypeExpr.
|
panic("Union type should have at least one term")
|
||||||
return nil
|
}
|
||||||
|
// Same as go/ast, the return expression will put last term in the
|
||||||
|
// Y field at topmost level of BinaryExpr.
|
||||||
|
// For union of type "float32 | float64 | int64", the structure looks
|
||||||
|
// similar to:
|
||||||
|
// {
|
||||||
|
// X: {
|
||||||
|
// X: float32,
|
||||||
|
// Op: |
|
||||||
|
// Y: float64,
|
||||||
|
// }
|
||||||
|
// Op: |,
|
||||||
|
// Y: int64,
|
||||||
|
// }
|
||||||
|
var union ast.Expr
|
||||||
|
for i := range t.Len() {
|
||||||
|
term := t.Term(i)
|
||||||
|
termExpr := TypeExpr(term.Type(), qual)
|
||||||
|
if term.Tilde() {
|
||||||
|
termExpr = &ast.UnaryExpr{
|
||||||
|
Op: token.TILDE,
|
||||||
|
X: termExpr,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if i == 0 {
|
||||||
|
union = termExpr
|
||||||
|
} else {
|
||||||
|
union = &ast.BinaryExpr{
|
||||||
|
X: union,
|
||||||
|
Op: token.OR,
|
||||||
|
Y: termExpr,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return union
|
||||||
|
|
||||||
case *types.Tuple:
|
case *types.Tuple:
|
||||||
panic("invalid input type types.Tuple")
|
panic("invalid input type types.Tuple")
|
||||||
|
|
|
@ -660,8 +660,8 @@ github.com/mattn/go-isatty
|
||||||
## explicit; go 1.19
|
## explicit; go 1.19
|
||||||
github.com/microcosm-cc/bluemonday
|
github.com/microcosm-cc/bluemonday
|
||||||
github.com/microcosm-cc/bluemonday/css
|
github.com/microcosm-cc/bluemonday/css
|
||||||
# github.com/miekg/dns v1.1.63
|
# github.com/miekg/dns v1.1.64
|
||||||
## explicit; go 1.19
|
## explicit; go 1.22.0
|
||||||
github.com/miekg/dns
|
github.com/miekg/dns
|
||||||
# github.com/minio/md5-simd v1.1.2
|
# github.com/minio/md5-simd v1.1.2
|
||||||
## explicit; go 1.14
|
## explicit; go 1.14
|
||||||
|
@ -1092,7 +1092,7 @@ golang.org/x/image/riff
|
||||||
golang.org/x/image/vp8
|
golang.org/x/image/vp8
|
||||||
golang.org/x/image/vp8l
|
golang.org/x/image/vp8l
|
||||||
golang.org/x/image/webp
|
golang.org/x/image/webp
|
||||||
# golang.org/x/mod v0.22.0
|
# golang.org/x/mod v0.23.0
|
||||||
## explicit; go 1.22.0
|
## explicit; go 1.22.0
|
||||||
golang.org/x/mod/internal/lazyregexp
|
golang.org/x/mod/internal/lazyregexp
|
||||||
golang.org/x/mod/module
|
golang.org/x/mod/module
|
||||||
|
@ -1152,7 +1152,7 @@ golang.org/x/text/transform
|
||||||
golang.org/x/text/unicode/bidi
|
golang.org/x/text/unicode/bidi
|
||||||
golang.org/x/text/unicode/norm
|
golang.org/x/text/unicode/norm
|
||||||
golang.org/x/text/width
|
golang.org/x/text/width
|
||||||
# golang.org/x/tools v0.28.0
|
# golang.org/x/tools v0.30.0
|
||||||
## explicit; go 1.22.0
|
## explicit; go 1.22.0
|
||||||
golang.org/x/tools/go/ast/astutil
|
golang.org/x/tools/go/ast/astutil
|
||||||
golang.org/x/tools/go/gcexportdata
|
golang.org/x/tools/go/gcexportdata
|
||||||
|
|
Loading…
Reference in New Issue