staticcheck (#313)

* CI: use staticcheck for linting

This commit switches the linter for Go code from golint to staticcheck.
Golint has been deprecated since last year and staticcheck is a
recommended replacement.

Signed-off-by: Lucas Servén Marín <lserven@gmail.com>

* revendor

Signed-off-by: Lucas Servén Marín <lserven@gmail.com>

* cmd,pkg: fix lint warnings

Signed-off-by: Lucas Servén Marín <lserven@gmail.com>
This commit is contained in:
Lucas Servén Marín
2022-05-19 19:45:43 +02:00
committed by GitHub
parent 93f46e03ea
commit 50fbc2eec2
227 changed files with 55458 additions and 2689 deletions

222
vendor/golang.org/x/tools/go/internal/cgo/cgo.go generated vendored Normal file
View File

@@ -0,0 +1,222 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package cgo handles cgo preprocessing of files containing `import "C"`.
//
// DESIGN
//
// The approach taken is to run the cgo processor on the package's
// CgoFiles and parse the output, faking the filenames of the
// resulting ASTs so that the synthetic file containing the C types is
// called "C" (e.g. "~/go/src/net/C") and the preprocessed files
// have their original names (e.g. "~/go/src/net/cgo_unix.go"),
// not the names of the actual temporary files.
//
// The advantage of this approach is its fidelity to 'go build'. The
// downside is that the token.Position.Offset for each AST node is
// incorrect, being an offset within the temporary file. Line numbers
// should still be correct because of the //line comments.
//
// The logic of this file is mostly plundered from the 'go build'
// tool, which also invokes the cgo preprocessor.
//
//
// REJECTED ALTERNATIVE
//
// An alternative approach that we explored is to extend go/types'
// Importer mechanism to provide the identity of the importing package
// so that each time `import "C"` appears it resolves to a different
// synthetic package containing just the objects needed in that case.
// The loader would invoke cgo but parse only the cgo_types.go file
// defining the package-level objects, discarding the other files
// resulting from preprocessing.
//
// The benefit of this approach would have been that source-level
// syntax information would correspond exactly to the original cgo
// file, with no preprocessing involved, making source tools like
// godoc, guru, and eg happy. However, the approach was rejected
// due to the additional complexity it would impose on go/types. (It
// made for a beautiful demo, though.)
//
// cgo files, despite their *.go extension, are not legal Go source
// files per the specification since they may refer to unexported
// members of package "C" such as C.int. Also, a function such as
// C.getpwent has in effect two types, one matching its C type and one
// which additionally returns (errno C.int). The cgo preprocessor
// uses name mangling to distinguish these two functions in the
// processed code, but go/types would need to duplicate this logic in
// its handling of function calls, analogous to the treatment of map
// lookups in which y=m[k] and y,ok=m[k] are both legal.
package cgo
import (
"fmt"
"go/ast"
"go/build"
"go/parser"
"go/token"
"io/ioutil"
"log"
"os"
"path/filepath"
"regexp"
"strings"
exec "golang.org/x/sys/execabs"
)
// ProcessFiles invokes the cgo preprocessor on bp.CgoFiles, parses
// the output and returns the resulting ASTs.
//
func ProcessFiles(bp *build.Package, fset *token.FileSet, DisplayPath func(path string) string, mode parser.Mode) ([]*ast.File, error) {
tmpdir, err := ioutil.TempDir("", strings.Replace(bp.ImportPath, "/", "_", -1)+"_C")
if err != nil {
return nil, err
}
defer os.RemoveAll(tmpdir)
pkgdir := bp.Dir
if DisplayPath != nil {
pkgdir = DisplayPath(pkgdir)
}
cgoFiles, cgoDisplayFiles, err := Run(bp, pkgdir, tmpdir, false)
if err != nil {
return nil, err
}
var files []*ast.File
for i := range cgoFiles {
rd, err := os.Open(cgoFiles[i])
if err != nil {
return nil, err
}
display := filepath.Join(bp.Dir, cgoDisplayFiles[i])
f, err := parser.ParseFile(fset, display, rd, mode)
rd.Close()
if err != nil {
return nil, err
}
files = append(files, f)
}
return files, nil
}
var cgoRe = regexp.MustCompile(`[/\\:]`)
// Run invokes the cgo preprocessor on bp.CgoFiles and returns two
// lists of files: the resulting processed files (in temporary
// directory tmpdir) and the corresponding names of the unprocessed files.
//
// Run is adapted from (*builder).cgo in
// $GOROOT/src/cmd/go/build.go, but these features are unsupported:
// Objective C, CGOPKGPATH, CGO_FLAGS.
//
// If useabs is set to true, absolute paths of the bp.CgoFiles will be passed in
// to the cgo preprocessor. This in turn will set the // line comments
// referring to those files to use absolute paths. This is needed for
// go/packages using the legacy go list support so it is able to find
// the original files.
func Run(bp *build.Package, pkgdir, tmpdir string, useabs bool) (files, displayFiles []string, err error) {
cgoCPPFLAGS, _, _, _ := cflags(bp, true)
_, cgoexeCFLAGS, _, _ := cflags(bp, false)
if len(bp.CgoPkgConfig) > 0 {
pcCFLAGS, err := pkgConfigFlags(bp)
if err != nil {
return nil, nil, err
}
cgoCPPFLAGS = append(cgoCPPFLAGS, pcCFLAGS...)
}
// Allows including _cgo_export.h from .[ch] files in the package.
cgoCPPFLAGS = append(cgoCPPFLAGS, "-I", tmpdir)
// _cgo_gotypes.go (displayed "C") contains the type definitions.
files = append(files, filepath.Join(tmpdir, "_cgo_gotypes.go"))
displayFiles = append(displayFiles, "C")
for _, fn := range bp.CgoFiles {
// "foo.cgo1.go" (displayed "foo.go") is the processed Go source.
f := cgoRe.ReplaceAllString(fn[:len(fn)-len("go")], "_")
files = append(files, filepath.Join(tmpdir, f+"cgo1.go"))
displayFiles = append(displayFiles, fn)
}
var cgoflags []string
if bp.Goroot && bp.ImportPath == "runtime/cgo" {
cgoflags = append(cgoflags, "-import_runtime_cgo=false")
}
if bp.Goroot && bp.ImportPath == "runtime/race" || bp.ImportPath == "runtime/cgo" {
cgoflags = append(cgoflags, "-import_syscall=false")
}
var cgoFiles []string = bp.CgoFiles
if useabs {
cgoFiles = make([]string, len(bp.CgoFiles))
for i := range cgoFiles {
cgoFiles[i] = filepath.Join(pkgdir, bp.CgoFiles[i])
}
}
args := stringList(
"go", "tool", "cgo", "-objdir", tmpdir, cgoflags, "--",
cgoCPPFLAGS, cgoexeCFLAGS, cgoFiles,
)
if false {
log.Printf("Running cgo for package %q: %s (dir=%s)", bp.ImportPath, args, pkgdir)
}
cmd := exec.Command(args[0], args[1:]...)
cmd.Dir = pkgdir
cmd.Env = append(os.Environ(), "PWD="+pkgdir)
cmd.Stdout = os.Stderr
cmd.Stderr = os.Stderr
if err := cmd.Run(); err != nil {
return nil, nil, fmt.Errorf("cgo failed: %s: %s", args, err)
}
return files, displayFiles, nil
}
// -- unmodified from 'go build' ---------------------------------------
// Return the flags to use when invoking the C or C++ compilers, or cgo.
func cflags(p *build.Package, def bool) (cppflags, cflags, cxxflags, ldflags []string) {
var defaults string
if def {
defaults = "-g -O2"
}
cppflags = stringList(envList("CGO_CPPFLAGS", ""), p.CgoCPPFLAGS)
cflags = stringList(envList("CGO_CFLAGS", defaults), p.CgoCFLAGS)
cxxflags = stringList(envList("CGO_CXXFLAGS", defaults), p.CgoCXXFLAGS)
ldflags = stringList(envList("CGO_LDFLAGS", defaults), p.CgoLDFLAGS)
return
}
// envList returns the value of the given environment variable broken
// into fields, using the default value when the variable is empty.
func envList(key, def string) []string {
v := os.Getenv(key)
if v == "" {
v = def
}
return strings.Fields(v)
}
// stringList's arguments should be a sequence of string or []string values.
// stringList flattens them into a single []string.
func stringList(args ...interface{}) []string {
var x []string
for _, arg := range args {
switch arg := arg.(type) {
case []string:
x = append(x, arg...)
case string:
x = append(x, arg)
default:
panic("stringList: invalid argument")
}
}
return x
}

View File

@@ -0,0 +1,39 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package cgo
import (
"errors"
"fmt"
"go/build"
exec "golang.org/x/sys/execabs"
"strings"
)
// pkgConfig runs pkg-config with the specified arguments and returns the flags it prints.
func pkgConfig(mode string, pkgs []string) (flags []string, err error) {
cmd := exec.Command("pkg-config", append([]string{mode}, pkgs...)...)
out, err := cmd.CombinedOutput()
if err != nil {
s := fmt.Sprintf("%s failed: %v", strings.Join(cmd.Args, " "), err)
if len(out) > 0 {
s = fmt.Sprintf("%s: %s", s, out)
}
return nil, errors.New(s)
}
if len(out) > 0 {
flags = strings.Fields(string(out))
}
return
}
// pkgConfigFlags calls pkg-config if needed and returns the cflags
// needed to build the package.
func pkgConfigFlags(p *build.Package) (cflags []string, err error) {
if len(p.CgoPkgConfig) == 0 {
return nil, nil
}
return pkgConfig("--cflags", p.CgoPkgConfig)
}

View File

@@ -34,9 +34,6 @@ import (
// (suspected) format errors, and whenever a change is made to the format.
const debugFormat = false // default: false
// If trace is set, debugging output is printed to std out.
const trace = false // default: false
// Current export format version. Increase with each format change.
// Note: The latest binary (non-indexed) export format is at version 6.
// This exporter is still at level 4, but it doesn't matter since
@@ -92,16 +89,18 @@ func internalErrorf(format string, args ...interface{}) error {
// BExportData returns binary export data for pkg.
// If no file set is provided, position info will be missing.
func BExportData(fset *token.FileSet, pkg *types.Package) (b []byte, err error) {
defer func() {
if e := recover(); e != nil {
if ierr, ok := e.(internalError); ok {
err = ierr
return
if !debug {
defer func() {
if e := recover(); e != nil {
if ierr, ok := e.(internalError); ok {
err = ierr
return
}
// Not an internal error; panic again.
panic(e)
}
// Not an internal error; panic again.
panic(e)
}
}()
}()
}
p := exporter{
fset: fset,

View File

@@ -74,9 +74,10 @@ func BImportData(fset *token.FileSet, imports map[string]*types.Package, data []
pathList: []string{""}, // empty string is mapped to 0
fake: fakeFileSet{
fset: fset,
files: make(map[string]*token.File),
files: make(map[string]*fileInfo),
},
}
defer p.fake.setLines() // set lines for files in fset
// read version info
var versionstr string
@@ -338,37 +339,49 @@ func (p *importer) pos() token.Pos {
// Synthesize a token.Pos
type fakeFileSet struct {
fset *token.FileSet
files map[string]*token.File
files map[string]*fileInfo
}
type fileInfo struct {
file *token.File
lastline int
}
const maxlines = 64 * 1024
func (s *fakeFileSet) pos(file string, line, column int) token.Pos {
// TODO(mdempsky): Make use of column.
// Since we don't know the set of needed file positions, we
// reserve maxlines positions per file.
const maxlines = 64 * 1024
// Since we don't know the set of needed file positions, we reserve maxlines
// positions per file. We delay calling token.File.SetLines until all
// positions have been calculated (by way of fakeFileSet.setLines), so that
// we can avoid setting unnecessary lines. See also golang/go#46586.
f := s.files[file]
if f == nil {
f = s.fset.AddFile(file, -1, maxlines)
f = &fileInfo{file: s.fset.AddFile(file, -1, maxlines)}
s.files[file] = f
// Allocate the fake linebreak indices on first use.
// TODO(adonovan): opt: save ~512KB using a more complex scheme?
fakeLinesOnce.Do(func() {
fakeLines = make([]int, maxlines)
for i := range fakeLines {
fakeLines[i] = i
}
})
f.SetLines(fakeLines)
}
if line > maxlines {
line = 1
}
if line > f.lastline {
f.lastline = line
}
// Treat the file as if it contained only newlines
// and column=1: use the line number as the offset.
return f.Pos(line - 1)
// Return a fake position assuming that f.file consists only of newlines.
return token.Pos(f.file.Base() + line - 1)
}
func (s *fakeFileSet) setLines() {
fakeLinesOnce.Do(func() {
fakeLines = make([]int, maxlines)
for i := range fakeLines {
fakeLines[i] = i
}
})
for _, f := range s.files {
f.file.SetLines(fakeLines[:f.lastline])
}
}
var (
@@ -1029,6 +1042,7 @@ func predeclared() []types.Type {
// used internally by gc; never used by this package or in .a files
anyType{},
}
predecl = append(predecl, additionalPredeclared()...)
})
return predecl
}

View File

@@ -16,7 +16,7 @@ import (
"strings"
)
func readGopackHeader(r *bufio.Reader) (name string, size int, err error) {
func readGopackHeader(r *bufio.Reader) (name string, size int64, err error) {
// See $GOROOT/include/ar.h.
hdr := make([]byte, 16+12+6+6+8+10+2)
_, err = io.ReadFull(r, hdr)
@@ -28,7 +28,8 @@ func readGopackHeader(r *bufio.Reader) (name string, size int, err error) {
fmt.Printf("header: %s", hdr)
}
s := strings.TrimSpace(string(hdr[16+12+6+6+8:][:10]))
size, err = strconv.Atoi(s)
length, err := strconv.Atoi(s)
size = int64(length)
if err != nil || hdr[len(hdr)-2] != '`' || hdr[len(hdr)-1] != '\n' {
err = fmt.Errorf("invalid archive header")
return
@@ -42,8 +43,8 @@ func readGopackHeader(r *bufio.Reader) (name string, size int, err error) {
// file by reading from it. The reader must be positioned at the
// start of the file before calling this function. The hdr result
// is the string before the export data, either "$$" or "$$B".
//
func FindExportData(r *bufio.Reader) (hdr string, err error) {
// The size result is the length of the export data in bytes, or -1 if not known.
func FindExportData(r *bufio.Reader) (hdr string, size int64, err error) {
// Read first line to make sure this is an object file.
line, err := r.ReadSlice('\n')
if err != nil {
@@ -54,7 +55,7 @@ func FindExportData(r *bufio.Reader) (hdr string, err error) {
if string(line) == "!<arch>\n" {
// Archive file. Scan to __.PKGDEF.
var name string
if name, _, err = readGopackHeader(r); err != nil {
if name, size, err = readGopackHeader(r); err != nil {
return
}
@@ -70,6 +71,7 @@ func FindExportData(r *bufio.Reader) (hdr string, err error) {
err = fmt.Errorf("can't find export data (%v)", err)
return
}
size -= int64(len(line))
}
// Now at __.PKGDEF in archive or still at beginning of file.
@@ -86,8 +88,12 @@ func FindExportData(r *bufio.Reader) (hdr string, err error) {
err = fmt.Errorf("can't find export data (%v)", err)
return
}
size -= int64(len(line))
}
hdr = string(line)
if size < 0 {
size = -1
}
return
}

View File

@@ -29,8 +29,14 @@ import (
"text/scanner"
)
// debugging/development support
const debug = false
const (
// Enable debug during development: it adds some additional checks, and
// prevents errors from being recovered.
debug = false
// If trace is set, debugging output is printed to std out.
trace = false
)
var pkgExts = [...]string{".a", ".o"}
@@ -179,7 +185,7 @@ func Import(packages map[string]*types.Package, path, srcDir string, lookup func
var hdr string
buf := bufio.NewReader(rc)
if hdr, err = FindExportData(buf); err != nil {
if hdr, _, err = FindExportData(buf); err != nil {
return
}

View File

@@ -11,6 +11,7 @@ package gcimporter
import (
"bytes"
"encoding/binary"
"fmt"
"go/ast"
"go/constant"
"go/token"
@@ -19,11 +20,11 @@ import (
"math/big"
"reflect"
"sort"
)
"strconv"
"strings"
// Current indexed export format version. Increase with each format change.
// 0: Go1.11 encoding
const iexportVersion = 0
"golang.org/x/tools/internal/typeparams"
)
// Current bundled export format version. Increase with each format change.
// 0: initial implementation
@@ -35,31 +36,35 @@ const bundleVersion = 0
// The package path of the top-level package will not be recorded,
// so that calls to IImportData can override with a provided package path.
func IExportData(out io.Writer, fset *token.FileSet, pkg *types.Package) error {
return iexportCommon(out, fset, false, []*types.Package{pkg})
return iexportCommon(out, fset, false, iexportVersion, []*types.Package{pkg})
}
// IExportBundle writes an indexed export bundle for pkgs to out.
func IExportBundle(out io.Writer, fset *token.FileSet, pkgs []*types.Package) error {
return iexportCommon(out, fset, true, pkgs)
return iexportCommon(out, fset, true, iexportVersion, pkgs)
}
func iexportCommon(out io.Writer, fset *token.FileSet, bundle bool, pkgs []*types.Package) (err error) {
defer func() {
if e := recover(); e != nil {
if ierr, ok := e.(internalError); ok {
err = ierr
return
func iexportCommon(out io.Writer, fset *token.FileSet, bundle bool, version int, pkgs []*types.Package) (err error) {
if !debug {
defer func() {
if e := recover(); e != nil {
if ierr, ok := e.(internalError); ok {
err = ierr
return
}
// Not an internal error; panic again.
panic(e)
}
// Not an internal error; panic again.
panic(e)
}
}()
}()
}
p := iexporter{
fset: fset,
version: version,
allPkgs: map[*types.Package]bool{},
stringIndex: map[string]uint64{},
declIndex: map[types.Object]uint64{},
tparamNames: map[types.Object]string{},
typIndex: map[types.Type]uint64{},
}
if !bundle {
@@ -119,7 +124,7 @@ func iexportCommon(out io.Writer, fset *token.FileSet, bundle bool, pkgs []*type
if bundle {
hdr.uint64(bundleVersion)
}
hdr.uint64(iexportVersion)
hdr.uint64(uint64(p.version))
hdr.uint64(uint64(p.strings.Len()))
hdr.uint64(dataLen)
@@ -136,8 +141,12 @@ func iexportCommon(out io.Writer, fset *token.FileSet, bundle bool, pkgs []*type
// non-compiler tools and includes a complete package description
// (i.e., name and height).
func (w *exportWriter) writeIndex(index map[types.Object]uint64) {
type pkgObj struct {
obj types.Object
name string // qualified name; differs from obj.Name for type params
}
// Build a map from packages to objects from that package.
pkgObjs := map[*types.Package][]types.Object{}
pkgObjs := map[*types.Package][]pkgObj{}
// For the main index, make sure to include every package that
// we reference, even if we're not exporting (or reexporting)
@@ -150,7 +159,8 @@ func (w *exportWriter) writeIndex(index map[types.Object]uint64) {
}
for obj := range index {
pkgObjs[obj.Pkg()] = append(pkgObjs[obj.Pkg()], obj)
name := w.p.exportName(obj)
pkgObjs[obj.Pkg()] = append(pkgObjs[obj.Pkg()], pkgObj{obj, name})
}
var pkgs []*types.Package
@@ -158,7 +168,7 @@ func (w *exportWriter) writeIndex(index map[types.Object]uint64) {
pkgs = append(pkgs, pkg)
sort.Slice(objs, func(i, j int) bool {
return objs[i].Name() < objs[j].Name()
return objs[i].name < objs[j].name
})
}
@@ -175,15 +185,25 @@ func (w *exportWriter) writeIndex(index map[types.Object]uint64) {
objs := pkgObjs[pkg]
w.uint64(uint64(len(objs)))
for _, obj := range objs {
w.string(obj.Name())
w.uint64(index[obj])
w.string(obj.name)
w.uint64(index[obj.obj])
}
}
}
// exportName returns the 'exported' name of an object. It differs from
// obj.Name() only for type parameters (see tparamExportName for details).
func (p *iexporter) exportName(obj types.Object) (res string) {
if name := p.tparamNames[obj]; name != "" {
return name
}
return obj.Name()
}
type iexporter struct {
fset *token.FileSet
out *bytes.Buffer
fset *token.FileSet
out *bytes.Buffer
version int
localpkg *types.Package
@@ -197,9 +217,21 @@ type iexporter struct {
strings intWriter
stringIndex map[string]uint64
data0 intWriter
declIndex map[types.Object]uint64
typIndex map[types.Type]uint64
data0 intWriter
declIndex map[types.Object]uint64
tparamNames map[types.Object]string // typeparam->exported name
typIndex map[types.Type]uint64
indent int // for tracing support
}
func (p *iexporter) trace(format string, args ...interface{}) {
if !trace {
// Call sites should also be guarded, but having this check here allows
// easily enabling/disabling debug trace statements.
return
}
fmt.Printf(strings.Repeat("..", p.indent)+format+"\n", args...)
}
// stringOff returns the offset of s within the string section.
@@ -225,7 +257,7 @@ func (p *iexporter) pushDecl(obj types.Object) {
return
}
p.declIndex[obj] = ^uint64(0) // mark n present in work queue
p.declIndex[obj] = ^uint64(0) // mark obj present in work queue
p.declTodo.pushTail(obj)
}
@@ -233,10 +265,11 @@ func (p *iexporter) pushDecl(obj types.Object) {
type exportWriter struct {
p *iexporter
data intWriter
currPkg *types.Package
prevFile string
prevLine int64
data intWriter
currPkg *types.Package
prevFile string
prevLine int64
prevColumn int64
}
func (w *exportWriter) exportPath(pkg *types.Package) string {
@@ -247,6 +280,14 @@ func (w *exportWriter) exportPath(pkg *types.Package) string {
}
func (p *iexporter) doDecl(obj types.Object) {
if trace {
p.trace("exporting decl %v (%T)", obj, obj)
p.indent++
defer func() {
p.indent--
p.trace("=> %s", obj)
}()
}
w := p.newWriter()
w.setPkg(obj.Pkg(), false)
@@ -261,8 +302,24 @@ func (p *iexporter) doDecl(obj types.Object) {
if sig.Recv() != nil {
panic(internalErrorf("unexpected method: %v", sig))
}
w.tag('F')
// Function.
if typeparams.ForSignature(sig).Len() == 0 {
w.tag('F')
} else {
w.tag('G')
}
w.pos(obj.Pos())
// The tparam list of the function type is the declaration of the type
// params. So, write out the type params right now. Then those type params
// will be referenced via their type offset (via typOff) in all other
// places in the signature and function where they are used.
//
// While importing the type parameters, tparamList computes and records
// their export name, so that it can be later used when writing the index.
if tparams := typeparams.ForSignature(sig); tparams.Len() > 0 {
w.tparamList(obj.Name(), tparams, obj.Pkg())
}
w.signature(sig)
case *types.Const:
@@ -271,30 +328,56 @@ func (p *iexporter) doDecl(obj types.Object) {
w.value(obj.Type(), obj.Val())
case *types.TypeName:
t := obj.Type()
if tparam, ok := t.(*typeparams.TypeParam); ok {
w.tag('P')
w.pos(obj.Pos())
constraint := tparam.Constraint()
if p.version >= iexportVersionGo1_18 {
implicit := false
if iface, _ := constraint.(*types.Interface); iface != nil {
implicit = typeparams.IsImplicit(iface)
}
w.bool(implicit)
}
w.typ(constraint, obj.Pkg())
break
}
if obj.IsAlias() {
w.tag('A')
w.pos(obj.Pos())
w.typ(obj.Type(), obj.Pkg())
w.typ(t, obj.Pkg())
break
}
// Defined type.
w.tag('T')
named, ok := t.(*types.Named)
if !ok {
panic(internalErrorf("%s is not a defined type", t))
}
if typeparams.ForNamed(named).Len() == 0 {
w.tag('T')
} else {
w.tag('U')
}
w.pos(obj.Pos())
if typeparams.ForNamed(named).Len() > 0 {
// While importing the type parameters, tparamList computes and records
// their export name, so that it can be later used when writing the index.
w.tparamList(obj.Name(), typeparams.ForNamed(named), obj.Pkg())
}
underlying := obj.Type().Underlying()
w.typ(underlying, obj.Pkg())
t := obj.Type()
if types.IsInterface(t) {
break
}
named, ok := t.(*types.Named)
if !ok {
panic(internalErrorf("%s is not a defined type", t))
}
n := named.NumMethods()
w.uint64(uint64(n))
for i := 0; i < n; i++ {
@@ -302,6 +385,17 @@ func (p *iexporter) doDecl(obj types.Object) {
w.pos(m.Pos())
w.string(m.Name())
sig, _ := m.Type().(*types.Signature)
// Receiver type parameters are type arguments of the receiver type, so
// their name must be qualified before exporting recv.
if rparams := typeparams.RecvTypeParams(sig); rparams.Len() > 0 {
prefix := obj.Name() + "." + m.Name()
for i := 0; i < rparams.Len(); i++ {
rparam := rparams.At(i)
name := tparamExportName(prefix, rparam)
w.p.tparamNames[rparam.Obj()] = name
}
}
w.param(sig.Recv())
w.signature(sig)
}
@@ -318,6 +412,48 @@ func (w *exportWriter) tag(tag byte) {
}
func (w *exportWriter) pos(pos token.Pos) {
if w.p.version >= iexportVersionPosCol {
w.posV1(pos)
} else {
w.posV0(pos)
}
}
func (w *exportWriter) posV1(pos token.Pos) {
if w.p.fset == nil {
w.int64(0)
return
}
p := w.p.fset.Position(pos)
file := p.Filename
line := int64(p.Line)
column := int64(p.Column)
deltaColumn := (column - w.prevColumn) << 1
deltaLine := (line - w.prevLine) << 1
if file != w.prevFile {
deltaLine |= 1
}
if deltaLine != 0 {
deltaColumn |= 1
}
w.int64(deltaColumn)
if deltaColumn&1 != 0 {
w.int64(deltaLine)
if deltaLine&1 != 0 {
w.string(file)
}
}
w.prevFile = file
w.prevLine = line
w.prevColumn = column
}
func (w *exportWriter) posV0(pos token.Pos) {
if w.p.fset == nil {
w.int64(0)
return
@@ -359,10 +495,11 @@ func (w *exportWriter) pkg(pkg *types.Package) {
}
func (w *exportWriter) qualifiedIdent(obj types.Object) {
name := w.p.exportName(obj)
// Ensure any referenced declarations are written out too.
w.p.pushDecl(obj)
w.string(obj.Name())
w.string(name)
w.pkg(obj.Pkg())
}
@@ -396,11 +533,32 @@ func (w *exportWriter) startType(k itag) {
}
func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) {
if trace {
w.p.trace("exporting type %s (%T)", t, t)
w.p.indent++
defer func() {
w.p.indent--
w.p.trace("=> %s", t)
}()
}
switch t := t.(type) {
case *types.Named:
if targs := typeparams.NamedTypeArgs(t); targs.Len() > 0 {
w.startType(instanceType)
// TODO(rfindley): investigate if this position is correct, and if it
// matters.
w.pos(t.Obj().Pos())
w.typeList(targs, pkg)
w.typ(typeparams.NamedTypeOrigin(t), pkg)
return
}
w.startType(definedType)
w.qualifiedIdent(t.Obj())
case *typeparams.TypeParam:
w.startType(typeParamType)
w.qualifiedIdent(t.Obj())
case *types.Pointer:
w.startType(pointerType)
w.typ(t.Elem(), pkg)
@@ -461,9 +619,14 @@ func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) {
n := t.NumEmbeddeds()
w.uint64(uint64(n))
for i := 0; i < n; i++ {
f := t.Embedded(i)
w.pos(f.Obj().Pos())
w.typ(f.Obj().Type(), f.Obj().Pkg())
ft := t.EmbeddedType(i)
tPkg := pkg
if named, _ := ft.(*types.Named); named != nil {
w.pos(named.Obj().Pos())
} else {
w.pos(token.NoPos)
}
w.typ(ft, tPkg)
}
n = t.NumExplicitMethods()
@@ -476,6 +639,16 @@ func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) {
w.signature(sig)
}
case *typeparams.Union:
w.startType(unionType)
nt := t.Len()
w.uint64(uint64(nt))
for i := 0; i < nt; i++ {
term := t.Term(i)
w.bool(term.Tilde())
w.typ(term.Type(), pkg)
}
default:
panic(internalErrorf("unexpected type: %v, %v", t, reflect.TypeOf(t)))
}
@@ -497,6 +670,56 @@ func (w *exportWriter) signature(sig *types.Signature) {
}
}
func (w *exportWriter) typeList(ts *typeparams.TypeList, pkg *types.Package) {
w.uint64(uint64(ts.Len()))
for i := 0; i < ts.Len(); i++ {
w.typ(ts.At(i), pkg)
}
}
func (w *exportWriter) tparamList(prefix string, list *typeparams.TypeParamList, pkg *types.Package) {
ll := uint64(list.Len())
w.uint64(ll)
for i := 0; i < list.Len(); i++ {
tparam := list.At(i)
// Set the type parameter exportName before exporting its type.
exportName := tparamExportName(prefix, tparam)
w.p.tparamNames[tparam.Obj()] = exportName
w.typ(list.At(i), pkg)
}
}
const blankMarker = "$"
// tparamExportName returns the 'exported' name of a type parameter, which
// differs from its actual object name: it is prefixed with a qualifier, and
// blank type parameter names are disambiguated by their index in the type
// parameter list.
func tparamExportName(prefix string, tparam *typeparams.TypeParam) string {
assert(prefix != "")
name := tparam.Obj().Name()
if name == "_" {
name = blankMarker + strconv.Itoa(tparam.Index())
}
return prefix + "." + name
}
// tparamName returns the real name of a type parameter, after stripping its
// qualifying prefix and reverting blank-name encoding. See tparamExportName
// for details.
func tparamName(exportName string) string {
// Remove the "path" from the type param name that makes it unique.
ix := strings.LastIndex(exportName, ".")
if ix < 0 {
errorf("malformed type parameter export name %s: missing prefix", exportName)
}
name := exportName[ix+1:]
if strings.HasPrefix(name, blankMarker) {
return "_"
}
return name
}
func (w *exportWriter) paramList(tup *types.Tuple) {
n := tup.Len()
w.uint64(uint64(n))
@@ -513,6 +736,9 @@ func (w *exportWriter) param(obj types.Object) {
func (w *exportWriter) value(typ types.Type, v constant.Value) {
w.typ(typ, nil)
if w.p.version >= iexportVersionGo1_18 {
w.int64(int64(v.Kind()))
}
switch b := typ.Underlying().(*types.Basic); b.Info() & types.IsConstType {
case types.IsBoolean:

View File

@@ -18,6 +18,9 @@ import (
"go/types"
"io"
"sort"
"strings"
"golang.org/x/tools/internal/typeparams"
)
type intReader struct {
@@ -41,6 +44,19 @@ func (r *intReader) uint64() uint64 {
return i
}
// Keep this in sync with constants in iexport.go.
const (
iexportVersionGo1_11 = 0
iexportVersionPosCol = 1
iexportVersionGo1_18 = 2
iexportVersionGenerics = 2
)
type ident struct {
pkg string
name string
}
const predeclReserved = 32
type itag uint64
@@ -56,6 +72,9 @@ const (
signatureType
structType
interfaceType
typeParamType
instanceType
unionType
)
// IImportData imports a package from the serialized package data
@@ -78,15 +97,17 @@ func IImportBundle(fset *token.FileSet, imports map[string]*types.Package, data
func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data []byte, bundle bool, path string) (pkgs []*types.Package, err error) {
const currentVersion = 1
version := int64(-1)
defer func() {
if e := recover(); e != nil {
if version > currentVersion {
err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e)
} else {
err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e)
if !debug {
defer func() {
if e := recover(); e != nil {
if version > currentVersion {
err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e)
} else {
err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e)
}
}
}
}()
}()
}
r := &intReader{bytes.NewReader(data), path}
@@ -101,9 +122,13 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data
version = int64(r.uint64())
switch version {
case currentVersion, 0:
case iexportVersionGo1_18, iexportVersionPosCol, iexportVersionGo1_11:
default:
errorf("unknown iexport format version %d", version)
if version > iexportVersionGo1_18 {
errorf("unstable iexport format version %d, just rebuild compiler and std library", version)
} else {
errorf("unknown iexport format version %d", version)
}
}
sLen := int64(r.uint64())
@@ -115,8 +140,8 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data
r.Seek(sLen+dLen, io.SeekCurrent)
p := iimporter{
ipath: path,
version: int(version),
ipath: path,
stringData: stringData,
stringCache: make(map[uint64]string),
@@ -125,12 +150,16 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data
declData: declData,
pkgIndex: make(map[*types.Package]map[string]uint64),
typCache: make(map[uint64]types.Type),
// Separate map for typeparams, keyed by their package and unique
// name.
tparamIndex: make(map[ident]types.Type),
fake: fakeFileSet{
fset: fset,
files: make(map[string]*token.File),
files: make(map[string]*fileInfo),
},
}
defer p.fake.setLines() // set lines for files in fset
for i, pt := range predeclared() {
p.typCache[uint64(i)] = pt
@@ -208,6 +237,15 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data
pkg.MarkComplete()
}
// SetConstraint can't be called if the constraint type is not yet complete.
// When type params are created in the 'P' case of (*importReader).obj(),
// the associated constraint type may not be complete due to recursion.
// Therefore, we defer calling SetConstraint there, and call it here instead
// after all types are complete.
for _, d := range p.later {
typeparams.SetTypeParamConstraint(d.t, d.constraint)
}
for _, typ := range p.interfaceList {
typ.Complete()
}
@@ -215,23 +253,51 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data
return pkgs, nil
}
type setConstraintArgs struct {
t *typeparams.TypeParam
constraint types.Type
}
type iimporter struct {
ipath string
version int
ipath string
stringData []byte
stringCache map[uint64]string
pkgCache map[uint64]*types.Package
declData []byte
pkgIndex map[*types.Package]map[string]uint64
typCache map[uint64]types.Type
declData []byte
pkgIndex map[*types.Package]map[string]uint64
typCache map[uint64]types.Type
tparamIndex map[ident]types.Type
fake fakeFileSet
interfaceList []*types.Interface
// Arguments for calls to SetConstraint that are deferred due to recursive types
later []setConstraintArgs
indent int // for tracing support
}
func (p *iimporter) trace(format string, args ...interface{}) {
if !trace {
// Call sites should also be guarded, but having this check here allows
// easily enabling/disabling debug trace statements.
return
}
fmt.Printf(strings.Repeat("..", p.indent)+format+"\n", args...)
}
func (p *iimporter) doDecl(pkg *types.Package, name string) {
if debug {
p.trace("import decl %s", name)
p.indent++
defer func() {
p.indent--
p.trace("=> %s", name)
}()
}
// See if we've already imported this declaration.
if obj := pkg.Scope().Lookup(name); obj != nil {
return
@@ -273,7 +339,7 @@ func (p *iimporter) pkgAt(off uint64) *types.Package {
}
func (p *iimporter) typAt(off uint64, base *types.Named) types.Type {
if t, ok := p.typCache[off]; ok && (base == nil || !isInterface(t)) {
if t, ok := p.typCache[off]; ok && canReuse(base, t) {
return t
}
@@ -285,12 +351,30 @@ func (p *iimporter) typAt(off uint64, base *types.Named) types.Type {
r.declReader.Reset(p.declData[off-predeclReserved:])
t := r.doType(base)
if base == nil || !isInterface(t) {
if canReuse(base, t) {
p.typCache[off] = t
}
return t
}
// canReuse reports whether the type rhs on the RHS of the declaration for def
// may be re-used.
//
// Specifically, if def is non-nil and rhs is an interface type with methods, it
// may not be re-used because we have a convention of setting the receiver type
// for interface methods to def.
func canReuse(def *types.Named, rhs types.Type) bool {
if def == nil {
return true
}
iface, _ := rhs.(*types.Interface)
if iface == nil {
return true
}
// Don't use iface.Empty() here as iface may not be complete.
return iface.NumEmbeddeds() == 0 && iface.NumExplicitMethods() == 0
}
type importReader struct {
p *iimporter
declReader bytes.Reader
@@ -315,17 +399,26 @@ func (r *importReader) obj(name string) {
r.declare(types.NewConst(pos, r.currPkg, name, typ, val))
case 'F':
sig := r.signature(nil)
case 'F', 'G':
var tparams []*typeparams.TypeParam
if tag == 'G' {
tparams = r.tparamList()
}
sig := r.signature(nil, nil, tparams)
r.declare(types.NewFunc(pos, r.currPkg, name, sig))
case 'T':
case 'T', 'U':
// Types can be recursive. We need to setup a stub
// declaration before recursing.
obj := types.NewTypeName(pos, r.currPkg, name, nil)
named := types.NewNamed(obj, nil, nil)
// Declare obj before calling r.tparamList, so the new type name is recognized
// if used in the constraint of one of its own typeparams (see #48280).
r.declare(obj)
if tag == 'U' {
tparams := r.tparamList()
typeparams.SetForNamed(named, tparams)
}
underlying := r.p.typAt(r.uint64(), named).Underlying()
named.SetUnderlying(underlying)
@@ -335,12 +428,59 @@ func (r *importReader) obj(name string) {
mpos := r.pos()
mname := r.ident()
recv := r.param()
msig := r.signature(recv)
// If the receiver has any targs, set those as the
// rparams of the method (since those are the
// typeparams being used in the method sig/body).
base := baseType(recv.Type())
assert(base != nil)
targs := typeparams.NamedTypeArgs(base)
var rparams []*typeparams.TypeParam
if targs.Len() > 0 {
rparams = make([]*typeparams.TypeParam, targs.Len())
for i := range rparams {
rparams[i] = targs.At(i).(*typeparams.TypeParam)
}
}
msig := r.signature(recv, rparams, nil)
named.AddMethod(types.NewFunc(mpos, r.currPkg, mname, msig))
}
}
case 'P':
// We need to "declare" a typeparam in order to have a name that
// can be referenced recursively (if needed) in the type param's
// bound.
if r.p.version < iexportVersionGenerics {
errorf("unexpected type param type")
}
name0 := tparamName(name)
tn := types.NewTypeName(pos, r.currPkg, name0, nil)
t := typeparams.NewTypeParam(tn, nil)
// To handle recursive references to the typeparam within its
// bound, save the partial type in tparamIndex before reading the bounds.
id := ident{r.currPkg.Name(), name}
r.p.tparamIndex[id] = t
var implicit bool
if r.p.version >= iexportVersionGo1_18 {
implicit = r.bool()
}
constraint := r.typ()
if implicit {
iface, _ := constraint.(*types.Interface)
if iface == nil {
errorf("non-interface constraint marked implicit")
}
typeparams.MarkImplicit(iface)
}
// The constraint type may not be complete, if we
// are in the middle of a type recursion involving type
// constraints. So, we defer SetConstraint until we have
// completely set up all types in ImportData.
r.p.later = append(r.p.later, setConstraintArgs{t: t, constraint: constraint})
case 'V':
typ := r.typ()
@@ -357,6 +497,10 @@ func (r *importReader) declare(obj types.Object) {
func (r *importReader) value() (typ types.Type, val constant.Value) {
typ = r.typ()
if r.p.version >= iexportVersionGo1_18 {
// TODO: add support for using the kind.
_ = constant.Kind(r.int64())
}
switch b := typ.Underlying().(*types.Basic); b.Info() & types.IsConstType {
case types.IsBoolean:
@@ -499,7 +643,7 @@ func (r *importReader) qualifiedIdent() (*types.Package, string) {
}
func (r *importReader) pos() token.Pos {
if r.p.version >= 1 {
if r.p.version >= iexportVersionPosCol {
r.posv1()
} else {
r.posv0()
@@ -547,8 +691,17 @@ func isInterface(t types.Type) bool {
func (r *importReader) pkg() *types.Package { return r.p.pkgAt(r.uint64()) }
func (r *importReader) string() string { return r.p.stringAt(r.uint64()) }
func (r *importReader) doType(base *types.Named) types.Type {
switch k := r.kind(); k {
func (r *importReader) doType(base *types.Named) (res types.Type) {
k := r.kind()
if debug {
r.p.trace("importing type %d (base: %s)", k, base)
r.p.indent++
defer func() {
r.p.indent--
r.p.trace("=> %s", res)
}()
}
switch k {
default:
errorf("unexpected kind tag in %q: %v", r.p.ipath, k)
return nil
@@ -571,7 +724,7 @@ func (r *importReader) doType(base *types.Named) types.Type {
return types.NewMap(r.typ(), r.typ())
case signatureType:
r.currPkg = r.pkg()
return r.signature(nil)
return r.signature(nil, nil, nil)
case structType:
r.currPkg = r.pkg()
@@ -611,13 +764,56 @@ func (r *importReader) doType(base *types.Named) types.Type {
recv = types.NewVar(token.NoPos, r.currPkg, "", base)
}
msig := r.signature(recv)
msig := r.signature(recv, nil, nil)
methods[i] = types.NewFunc(mpos, r.currPkg, mname, msig)
}
typ := newInterface(methods, embeddeds)
r.p.interfaceList = append(r.p.interfaceList, typ)
return typ
case typeParamType:
if r.p.version < iexportVersionGenerics {
errorf("unexpected type param type")
}
pkg, name := r.qualifiedIdent()
id := ident{pkg.Name(), name}
if t, ok := r.p.tparamIndex[id]; ok {
// We're already in the process of importing this typeparam.
return t
}
// Otherwise, import the definition of the typeparam now.
r.p.doDecl(pkg, name)
return r.p.tparamIndex[id]
case instanceType:
if r.p.version < iexportVersionGenerics {
errorf("unexpected instantiation type")
}
// pos does not matter for instances: they are positioned on the original
// type.
_ = r.pos()
len := r.uint64()
targs := make([]types.Type, len)
for i := range targs {
targs[i] = r.typ()
}
baseType := r.typ()
// The imported instantiated type doesn't include any methods, so
// we must always use the methods of the base (orig) type.
// TODO provide a non-nil *Environment
t, _ := typeparams.Instantiate(nil, baseType, targs, false)
return t
case unionType:
if r.p.version < iexportVersionGenerics {
errorf("unexpected instantiation type")
}
terms := make([]*typeparams.Term, r.uint64())
for i := range terms {
terms[i] = typeparams.NewTerm(r.bool(), r.typ())
}
return typeparams.NewUnion(terms)
}
}
@@ -625,11 +821,25 @@ func (r *importReader) kind() itag {
return itag(r.uint64())
}
func (r *importReader) signature(recv *types.Var) *types.Signature {
func (r *importReader) signature(recv *types.Var, rparams []*typeparams.TypeParam, tparams []*typeparams.TypeParam) *types.Signature {
params := r.paramList()
results := r.paramList()
variadic := params.Len() > 0 && r.bool()
return types.NewSignature(recv, params, results, variadic)
return typeparams.NewSignatureType(recv, rparams, tparams, params, results, variadic)
}
func (r *importReader) tparamList() []*typeparams.TypeParam {
n := r.uint64()
if n == 0 {
return nil
}
xs := make([]*typeparams.TypeParam, n)
for i := range xs {
// Note: the standard library importer is tolerant of nil types here,
// though would panic in SetTypeParams.
xs[i] = r.typ().(*typeparams.TypeParam)
}
return xs
}
func (r *importReader) paramList() *types.Tuple {
@@ -674,3 +884,13 @@ func (r *importReader) byte() byte {
}
return x
}
func baseType(typ types.Type) *types.Named {
// pointer receivers are never types.Named types
if p, _ := typ.(*types.Pointer); p != nil {
typ = p.Elem()
}
// receiver base types are always (possibly generic) types.Named types
n, _ := typ.(*types.Named)
return n
}

View File

@@ -0,0 +1,16 @@
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build !go1.18
// +build !go1.18
package gcimporter
import "go/types"
const iexportVersion = iexportVersionGo1_11
func additionalPredeclared() []types.Type {
return nil
}

View File

@@ -0,0 +1,23 @@
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build go1.18
// +build go1.18
package gcimporter
import "go/types"
const iexportVersion = iexportVersionGenerics
// additionalPredeclared returns additional predeclared types in go.1.18.
func additionalPredeclared() []types.Type {
return []types.Type{
// comparable
types.Universe.Lookup("comparable").Type(),
// any
types.Universe.Lookup("any").Type(),
}
}