staticcheck (#313)

* CI: use staticcheck for linting

This commit switches the linter for Go code from golint to staticcheck.
Golint has been deprecated since last year and staticcheck is a
recommended replacement.

Signed-off-by: Lucas Servén Marín <lserven@gmail.com>

* revendor

Signed-off-by: Lucas Servén Marín <lserven@gmail.com>

* cmd,pkg: fix lint warnings

Signed-off-by: Lucas Servén Marín <lserven@gmail.com>
This commit is contained in:
Lucas Servén Marín
2022-05-19 19:45:43 +02:00
committed by GitHub
parent 93f46e03ea
commit 50fbc2eec2
227 changed files with 55458 additions and 2689 deletions

View File

@@ -0,0 +1,342 @@
// Package code answers structural and type questions about Go code.
package code
import (
"flag"
"fmt"
"go/ast"
"go/constant"
"go/token"
"go/types"
"strings"
"honnef.co/go/tools/analysis/facts"
"honnef.co/go/tools/go/ast/astutil"
"honnef.co/go/tools/go/types/typeutil"
"honnef.co/go/tools/pattern"
"golang.org/x/exp/typeparams"
"golang.org/x/tools/go/analysis"
)
type Positioner interface {
Pos() token.Pos
}
func IsOfType(pass *analysis.Pass, expr ast.Expr, name string) bool {
return typeutil.IsType(pass.TypesInfo.TypeOf(expr), name)
}
func IsInTest(pass *analysis.Pass, node Positioner) bool {
// FIXME(dh): this doesn't work for global variables with
// initializers
f := pass.Fset.File(node.Pos())
return f != nil && strings.HasSuffix(f.Name(), "_test.go")
}
// IsMain reports whether the package being processed is a package
// main.
func IsMain(pass *analysis.Pass) bool {
return pass.Pkg.Name() == "main"
}
// IsMainLike reports whether the package being processed is a
// main-like package. A main-like package is a package that is
// package main, or that is intended to be used by a tool framework
// such as cobra to implement a command.
//
// Note that this function errs on the side of false positives; it may
// return true for packages that aren't main-like. IsMainLike is
// intended for analyses that wish to suppress diagnostics for
// main-like packages to avoid false positives.
func IsMainLike(pass *analysis.Pass) bool {
if pass.Pkg.Name() == "main" {
return true
}
for _, imp := range pass.Pkg.Imports() {
if imp.Path() == "github.com/spf13/cobra" {
return true
}
}
return false
}
func SelectorName(pass *analysis.Pass, expr *ast.SelectorExpr) string {
info := pass.TypesInfo
sel := info.Selections[expr]
if sel == nil {
if x, ok := expr.X.(*ast.Ident); ok {
pkg, ok := info.ObjectOf(x).(*types.PkgName)
if !ok {
// This shouldn't happen
return fmt.Sprintf("%s.%s", x.Name, expr.Sel.Name)
}
return fmt.Sprintf("%s.%s", pkg.Imported().Path(), expr.Sel.Name)
}
panic(fmt.Sprintf("unsupported selector: %v", expr))
}
if v, ok := sel.Obj().(*types.Var); ok && v.IsField() {
return fmt.Sprintf("(%s).%s", typeutil.DereferenceR(sel.Recv()), sel.Obj().Name())
} else {
return fmt.Sprintf("(%s).%s", sel.Recv(), sel.Obj().Name())
}
}
func IsNil(pass *analysis.Pass, expr ast.Expr) bool {
return pass.TypesInfo.Types[expr].IsNil()
}
func BoolConst(pass *analysis.Pass, expr ast.Expr) bool {
val := pass.TypesInfo.ObjectOf(expr.(*ast.Ident)).(*types.Const).Val()
return constant.BoolVal(val)
}
func IsBoolConst(pass *analysis.Pass, expr ast.Expr) bool {
// We explicitly don't support typed bools because more often than
// not, custom bool types are used as binary enums and the
// explicit comparison is desired.
ident, ok := expr.(*ast.Ident)
if !ok {
return false
}
obj := pass.TypesInfo.ObjectOf(ident)
c, ok := obj.(*types.Const)
if !ok {
return false
}
basic, ok := c.Type().(*types.Basic)
if !ok {
return false
}
if basic.Kind() != types.UntypedBool && basic.Kind() != types.Bool {
return false
}
return true
}
func ExprToInt(pass *analysis.Pass, expr ast.Expr) (int64, bool) {
tv := pass.TypesInfo.Types[expr]
if tv.Value == nil {
return 0, false
}
if tv.Value.Kind() != constant.Int {
return 0, false
}
return constant.Int64Val(tv.Value)
}
func ExprToString(pass *analysis.Pass, expr ast.Expr) (string, bool) {
val := pass.TypesInfo.Types[expr].Value
if val == nil {
return "", false
}
if val.Kind() != constant.String {
return "", false
}
return constant.StringVal(val), true
}
func CallName(pass *analysis.Pass, call *ast.CallExpr) string {
fun := astutil.Unparen(call.Fun)
// Instantiating a function cannot return another generic function, so doing this once is enough
switch idx := fun.(type) {
case *ast.IndexExpr:
fun = idx.X
case *typeparams.IndexListExpr:
fun = idx.X
}
// (foo)[T] is not a valid instantiationg, so no need to unparen again.
switch fun := fun.(type) {
case *ast.SelectorExpr:
fn, ok := pass.TypesInfo.ObjectOf(fun.Sel).(*types.Func)
if !ok {
return ""
}
return typeutil.FuncName(fn)
case *ast.Ident:
obj := pass.TypesInfo.ObjectOf(fun)
switch obj := obj.(type) {
case *types.Func:
return typeutil.FuncName(obj)
case *types.Builtin:
return obj.Name()
default:
return ""
}
default:
return ""
}
}
func IsCallTo(pass *analysis.Pass, node ast.Node, name string) bool {
call, ok := node.(*ast.CallExpr)
if !ok {
return false
}
return CallName(pass, call) == name
}
func IsCallToAny(pass *analysis.Pass, node ast.Node, names ...string) bool {
call, ok := node.(*ast.CallExpr)
if !ok {
return false
}
q := CallName(pass, call)
for _, name := range names {
if q == name {
return true
}
}
return false
}
func File(pass *analysis.Pass, node Positioner) *ast.File {
m := pass.ResultOf[facts.TokenFile].(map[*token.File]*ast.File)
return m[pass.Fset.File(node.Pos())]
}
// IsGenerated reports whether pos is in a generated file, It ignores
// //line directives.
func IsGenerated(pass *analysis.Pass, pos token.Pos) bool {
_, ok := Generator(pass, pos)
return ok
}
// Generator returns the generator that generated the file containing
// pos. It ignores //line directives.
func Generator(pass *analysis.Pass, pos token.Pos) (facts.Generator, bool) {
file := pass.Fset.PositionFor(pos, false).Filename
m := pass.ResultOf[facts.Generated].(map[string]facts.Generator)
g, ok := m[file]
return g, ok
}
// MayHaveSideEffects reports whether expr may have side effects. If
// the purity argument is nil, this function implements a purely
// syntactic check, meaning that any function call may have side
// effects, regardless of the called function's body. Otherwise,
// purity will be consulted to determine the purity of function calls.
func MayHaveSideEffects(pass *analysis.Pass, expr ast.Expr, purity facts.PurityResult) bool {
switch expr := expr.(type) {
case *ast.BadExpr:
return true
case *ast.Ellipsis:
return MayHaveSideEffects(pass, expr.Elt, purity)
case *ast.FuncLit:
// the literal itself cannot have side effects, only calling it
// might, which is handled by CallExpr.
return false
case *ast.ArrayType, *ast.StructType, *ast.FuncType, *ast.InterfaceType, *ast.MapType, *ast.ChanType:
// types cannot have side effects
return false
case *ast.BasicLit:
return false
case *ast.BinaryExpr:
return MayHaveSideEffects(pass, expr.X, purity) || MayHaveSideEffects(pass, expr.Y, purity)
case *ast.CallExpr:
if purity == nil {
return true
}
switch obj := typeutil.Callee(pass.TypesInfo, expr).(type) {
case *types.Func:
if _, ok := purity[obj]; !ok {
return true
}
case *types.Builtin:
switch obj.Name() {
case "len", "cap":
default:
return true
}
default:
return true
}
for _, arg := range expr.Args {
if MayHaveSideEffects(pass, arg, purity) {
return true
}
}
return false
case *ast.CompositeLit:
if MayHaveSideEffects(pass, expr.Type, purity) {
return true
}
for _, elt := range expr.Elts {
if MayHaveSideEffects(pass, elt, purity) {
return true
}
}
return false
case *ast.Ident:
return false
case *ast.IndexExpr:
return MayHaveSideEffects(pass, expr.X, purity) || MayHaveSideEffects(pass, expr.Index, purity)
case *typeparams.IndexListExpr:
// In theory, none of the checks are necessary, as IndexListExpr only involves types. But there is no harm in
// being safe.
if MayHaveSideEffects(pass, expr.X, purity) {
return true
}
for _, idx := range expr.Indices {
if MayHaveSideEffects(pass, idx, purity) {
return true
}
}
return false
case *ast.KeyValueExpr:
return MayHaveSideEffects(pass, expr.Key, purity) || MayHaveSideEffects(pass, expr.Value, purity)
case *ast.SelectorExpr:
return MayHaveSideEffects(pass, expr.X, purity)
case *ast.SliceExpr:
return MayHaveSideEffects(pass, expr.X, purity) ||
MayHaveSideEffects(pass, expr.Low, purity) ||
MayHaveSideEffects(pass, expr.High, purity) ||
MayHaveSideEffects(pass, expr.Max, purity)
case *ast.StarExpr:
return MayHaveSideEffects(pass, expr.X, purity)
case *ast.TypeAssertExpr:
return MayHaveSideEffects(pass, expr.X, purity)
case *ast.UnaryExpr:
if MayHaveSideEffects(pass, expr.X, purity) {
return true
}
return expr.Op == token.ARROW || expr.Op == token.AND
case *ast.ParenExpr:
return MayHaveSideEffects(pass, expr.X, purity)
case nil:
return false
default:
panic(fmt.Sprintf("internal error: unhandled type %T", expr))
}
}
func IsGoVersion(pass *analysis.Pass, minor int) bool {
f, ok := pass.Analyzer.Flags.Lookup("go").Value.(flag.Getter)
if !ok {
panic("requested Go version, but analyzer has no version flag")
}
version := f.Get().(int)
return version >= minor
}
var integerLiteralQ = pattern.MustParse(`(IntegerLiteral tv)`)
func IntegerLiteral(pass *analysis.Pass, node ast.Node) (types.TypeAndValue, bool) {
m, ok := Match(pass, integerLiteralQ, node)
if !ok {
return types.TypeAndValue{}, false
}
return m.State["tv"].(types.TypeAndValue), true
}
func IsIntegerLiteral(pass *analysis.Pass, node ast.Node, value constant.Value) bool {
tv, ok := IntegerLiteral(pass, node)
if !ok {
return false
}
return constant.Compare(tv.Value, token.EQL, value)
}

View File

@@ -0,0 +1,51 @@
package code
import (
"bytes"
"go/ast"
"go/format"
"honnef.co/go/tools/pattern"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
)
func Preorder(pass *analysis.Pass, fn func(ast.Node), types ...ast.Node) {
pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder(types, fn)
}
func PreorderStack(pass *analysis.Pass, fn func(ast.Node, []ast.Node), types ...ast.Node) {
pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).WithStack(types, func(n ast.Node, push bool, stack []ast.Node) (proceed bool) {
if push {
fn(n, stack)
}
return true
})
}
func Match(pass *analysis.Pass, q pattern.Pattern, node ast.Node) (*pattern.Matcher, bool) {
// Note that we ignore q.Relevant callers of Match usually use
// AST inspectors that already filter on nodes we're interested
// in.
m := &pattern.Matcher{TypesInfo: pass.TypesInfo}
ok := m.Match(q.Root, node)
return m, ok
}
func MatchAndEdit(pass *analysis.Pass, before, after pattern.Pattern, node ast.Node) (*pattern.Matcher, []analysis.TextEdit, bool) {
m, ok := Match(pass, before, node)
if !ok {
return m, nil, false
}
r := pattern.NodeToAST(after.Root, m.State)
buf := &bytes.Buffer{}
format.Node(buf, pass.Fset, r)
edit := []analysis.TextEdit{{
Pos: node.Pos(),
End: node.End(),
NewText: buf.Bytes(),
}}
return m, edit, true
}

View File

@@ -0,0 +1,83 @@
// Package edit contains helpers for creating suggested fixes.
package edit
import (
"bytes"
"go/ast"
"go/format"
"go/token"
"golang.org/x/tools/go/analysis"
"honnef.co/go/tools/pattern"
)
// Ranger describes values that have a start and end position.
// In most cases these are either ast.Node or manually constructed ranges.
type Ranger interface {
Pos() token.Pos
End() token.Pos
}
// Range implements the Ranger interface.
type Range [2]token.Pos
func (r Range) Pos() token.Pos { return r[0] }
func (r Range) End() token.Pos { return r[1] }
// ReplaceWithString replaces a range with a string.
func ReplaceWithString(old Ranger, new string) analysis.TextEdit {
return analysis.TextEdit{
Pos: old.Pos(),
End: old.End(),
NewText: []byte(new),
}
}
// ReplaceWithNode replaces a range with an AST node.
func ReplaceWithNode(fset *token.FileSet, old Ranger, new ast.Node) analysis.TextEdit {
buf := &bytes.Buffer{}
if err := format.Node(buf, fset, new); err != nil {
panic("internal error: " + err.Error())
}
return analysis.TextEdit{
Pos: old.Pos(),
End: old.End(),
NewText: buf.Bytes(),
}
}
// ReplaceWithPattern replaces a range with the result of executing a pattern.
func ReplaceWithPattern(fset *token.FileSet, old Ranger, new pattern.Pattern, state pattern.State) analysis.TextEdit {
r := pattern.NodeToAST(new.Root, state)
buf := &bytes.Buffer{}
format.Node(buf, fset, r)
return analysis.TextEdit{
Pos: old.Pos(),
End: old.End(),
NewText: buf.Bytes(),
}
}
// Delete deletes a range of code.
func Delete(old Ranger) analysis.TextEdit {
return analysis.TextEdit{
Pos: old.Pos(),
End: old.End(),
NewText: nil,
}
}
func Fix(msg string, edits ...analysis.TextEdit) analysis.SuggestedFix {
return analysis.SuggestedFix{
Message: msg,
TextEdits: edits,
}
}
// Selector creates a new selector expression.
func Selector(x, sel string) *ast.SelectorExpr {
return &ast.SelectorExpr{
X: &ast.Ident{Name: x},
Sel: &ast.Ident{Name: sel},
}
}

View File

@@ -0,0 +1,145 @@
package facts
import (
"go/ast"
"go/token"
"go/types"
"reflect"
"strings"
"golang.org/x/tools/go/analysis"
)
type IsDeprecated struct{ Msg string }
func (*IsDeprecated) AFact() {}
func (d *IsDeprecated) String() string { return "Deprecated: " + d.Msg }
type DeprecatedResult struct {
Objects map[types.Object]*IsDeprecated
Packages map[*types.Package]*IsDeprecated
}
var Deprecated = &analysis.Analyzer{
Name: "fact_deprecated",
Doc: "Mark deprecated objects",
Run: deprecated,
FactTypes: []analysis.Fact{(*IsDeprecated)(nil)},
ResultType: reflect.TypeOf(DeprecatedResult{}),
}
func deprecated(pass *analysis.Pass) (interface{}, error) {
var names []*ast.Ident
extractDeprecatedMessage := func(docs []*ast.CommentGroup) string {
for _, doc := range docs {
if doc == nil {
continue
}
parts := strings.Split(doc.Text(), "\n\n")
for _, part := range parts {
if !strings.HasPrefix(part, "Deprecated: ") {
continue
}
alt := part[len("Deprecated: "):]
alt = strings.Replace(alt, "\n", " ", -1)
return alt
}
}
return ""
}
doDocs := func(names []*ast.Ident, docs []*ast.CommentGroup) {
alt := extractDeprecatedMessage(docs)
if alt == "" {
return
}
for _, name := range names {
obj := pass.TypesInfo.ObjectOf(name)
pass.ExportObjectFact(obj, &IsDeprecated{alt})
}
}
var docs []*ast.CommentGroup
for _, f := range pass.Files {
docs = append(docs, f.Doc)
}
if alt := extractDeprecatedMessage(docs); alt != "" {
// Don't mark package syscall as deprecated, even though
// it is. A lot of people still use it for simple
// constants like SIGKILL, and I am not comfortable
// telling them to use x/sys for that.
if pass.Pkg.Path() != "syscall" {
pass.ExportPackageFact(&IsDeprecated{alt})
}
}
docs = docs[:0]
for _, f := range pass.Files {
fn := func(node ast.Node) bool {
if node == nil {
return true
}
var ret bool
switch node := node.(type) {
case *ast.GenDecl:
switch node.Tok {
case token.TYPE, token.CONST, token.VAR:
docs = append(docs, node.Doc)
return true
default:
return false
}
case *ast.FuncDecl:
docs = append(docs, node.Doc)
names = []*ast.Ident{node.Name}
ret = false
case *ast.TypeSpec:
docs = append(docs, node.Doc)
names = []*ast.Ident{node.Name}
ret = true
case *ast.ValueSpec:
docs = append(docs, node.Doc)
names = node.Names
ret = false
case *ast.File:
return true
case *ast.StructType:
for _, field := range node.Fields.List {
doDocs(field.Names, []*ast.CommentGroup{field.Doc})
}
return false
case *ast.InterfaceType:
for _, field := range node.Methods.List {
doDocs(field.Names, []*ast.CommentGroup{field.Doc})
}
return false
default:
return false
}
if len(names) == 0 || len(docs) == 0 {
return ret
}
doDocs(names, docs)
docs = docs[:0]
names = nil
return ret
}
ast.Inspect(f, fn)
}
out := DeprecatedResult{
Objects: map[types.Object]*IsDeprecated{},
Packages: map[*types.Package]*IsDeprecated{},
}
for _, fact := range pass.AllObjectFacts() {
out.Objects[fact.Object] = fact.Fact.(*IsDeprecated)
}
for _, fact := range pass.AllPackageFacts() {
out.Packages[fact.Package] = fact.Fact.(*IsDeprecated)
}
return out, nil
}

View File

@@ -0,0 +1,20 @@
package facts
import (
"reflect"
"golang.org/x/tools/go/analysis"
"honnef.co/go/tools/analysis/lint"
)
func directives(pass *analysis.Pass) (interface{}, error) {
return lint.ParseDirectives(pass.Files, pass.Fset), nil
}
var Directives = &analysis.Analyzer{
Name: "directives",
Doc: "extracts linter directives",
Run: directives,
RunDespiteErrors: true,
ResultType: reflect.TypeOf([]lint.Directive{}),
}

View File

@@ -0,0 +1,97 @@
package facts
import (
"bufio"
"bytes"
"io"
"os"
"reflect"
"strings"
"golang.org/x/tools/go/analysis"
)
type Generator int
// A list of known generators we can detect
const (
Unknown Generator = iota
Goyacc
Cgo
Stringer
ProtocGenGo
)
var (
// used by cgo before Go 1.11
oldCgo = []byte("// Created by cgo - DO NOT EDIT")
prefix = []byte("// Code generated ")
suffix = []byte(" DO NOT EDIT.")
nl = []byte("\n")
crnl = []byte("\r\n")
)
func isGenerated(path string) (Generator, bool) {
f, err := os.Open(path)
if err != nil {
return 0, false
}
defer f.Close()
br := bufio.NewReader(f)
for {
s, err := br.ReadBytes('\n')
if err != nil && err != io.EOF {
return 0, false
}
s = bytes.TrimSuffix(s, crnl)
s = bytes.TrimSuffix(s, nl)
if bytes.HasPrefix(s, prefix) && bytes.HasSuffix(s, suffix) {
if len(s)-len(suffix) < len(prefix) {
return Unknown, true
}
text := string(s[len(prefix) : len(s)-len(suffix)])
switch text {
case "by goyacc.":
return Goyacc, true
case "by cmd/cgo;":
return Cgo, true
case "by protoc-gen-go.":
return ProtocGenGo, true
}
if strings.HasPrefix(text, `by "stringer `) {
return Stringer, true
}
if strings.HasPrefix(text, `by goyacc `) {
return Goyacc, true
}
return Unknown, true
}
if bytes.Equal(s, oldCgo) {
return Cgo, true
}
if err == io.EOF {
break
}
}
return 0, false
}
var Generated = &analysis.Analyzer{
Name: "isgenerated",
Doc: "annotate file names that have been code generated",
Run: func(pass *analysis.Pass) (interface{}, error) {
m := map[string]Generator{}
for _, f := range pass.Files {
path := pass.Fset.PositionFor(f.Pos(), false).Filename
g, ok := isGenerated(path)
if ok {
m[path] = g
}
}
return m, nil
},
RunDespiteErrors: true,
ResultType: reflect.TypeOf(map[string]Generator{}),
}

View File

@@ -0,0 +1,251 @@
package nilness
import (
"fmt"
"go/token"
"go/types"
"reflect"
"honnef.co/go/tools/go/ir"
"honnef.co/go/tools/go/types/typeutil"
"honnef.co/go/tools/internal/passes/buildir"
"golang.org/x/tools/go/analysis"
)
// neverReturnsNilFact denotes that a function's return value will never
// be nil (typed or untyped). The analysis errs on the side of false
// negatives.
type neverReturnsNilFact struct {
Rets []neverNilness
}
func (*neverReturnsNilFact) AFact() {}
func (fact *neverReturnsNilFact) String() string {
return fmt.Sprintf("never returns nil: %v", fact.Rets)
}
type Result struct {
m map[*types.Func][]neverNilness
}
var Analysis = &analysis.Analyzer{
Name: "nilness",
Doc: "Annotates return values that will never be nil (typed or untyped)",
Run: run,
Requires: []*analysis.Analyzer{buildir.Analyzer},
FactTypes: []analysis.Fact{(*neverReturnsNilFact)(nil)},
ResultType: reflect.TypeOf((*Result)(nil)),
}
// MayReturnNil reports whether the ret's return value of fn might be
// a typed or untyped nil value. The value of ret is zero-based. When
// globalOnly is true, the only possible nil values are global
// variables.
//
// The analysis has false positives: MayReturnNil can incorrectly
// report true, but never incorrectly reports false.
func (r *Result) MayReturnNil(fn *types.Func, ret int) (yes bool, globalOnly bool) {
if !typeutil.IsPointerLike(fn.Type().(*types.Signature).Results().At(ret).Type()) {
return false, false
}
if len(r.m[fn]) == 0 {
return true, false
}
v := r.m[fn][ret]
return v != neverNil, v == onlyGlobal
}
func run(pass *analysis.Pass) (interface{}, error) {
seen := map[*ir.Function]struct{}{}
out := &Result{
m: map[*types.Func][]neverNilness{},
}
for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
impl(pass, fn, seen)
}
for _, fact := range pass.AllObjectFacts() {
out.m[fact.Object.(*types.Func)] = fact.Fact.(*neverReturnsNilFact).Rets
}
return out, nil
}
type neverNilness uint8
const (
neverNil neverNilness = 1
onlyGlobal neverNilness = 2
nilly neverNilness = 3
)
func (n neverNilness) String() string {
switch n {
case neverNil:
return "never"
case onlyGlobal:
return "global"
case nilly:
return "nil"
default:
return "BUG"
}
}
func impl(pass *analysis.Pass, fn *ir.Function, seenFns map[*ir.Function]struct{}) []neverNilness {
if fn.Object() == nil {
// TODO(dh): support closures
return nil
}
if fact := new(neverReturnsNilFact); pass.ImportObjectFact(fn.Object(), fact) {
return fact.Rets
}
if fn.Pkg != pass.ResultOf[buildir.Analyzer].(*buildir.IR).Pkg {
return nil
}
if fn.Blocks == nil {
return nil
}
if _, ok := seenFns[fn]; ok {
// break recursion
return nil
}
seenFns[fn] = struct{}{}
seen := map[ir.Value]struct{}{}
var mightReturnNil func(v ir.Value) neverNilness
mightReturnNil = func(v ir.Value) neverNilness {
if _, ok := seen[v]; ok {
// break cycle
return nilly
}
if !typeutil.IsPointerLike(v.Type()) {
return neverNil
}
seen[v] = struct{}{}
switch v := v.(type) {
case *ir.MakeInterface:
return mightReturnNil(v.X)
case *ir.Convert:
return mightReturnNil(v.X)
case *ir.SliceToArrayPointer:
if typeutil.CoreType(v.Type()).(*types.Pointer).Elem().Underlying().(*types.Array).Len() == 0 {
return mightReturnNil(v.X)
} else {
// converting a slice to an array pointer of length > 0 panics if the slice is nil
return neverNil
}
case *ir.Slice:
return mightReturnNil(v.X)
case *ir.Phi:
ret := neverNil
for _, e := range v.Edges {
if n := mightReturnNil(e); n > ret {
ret = n
}
}
return ret
case *ir.Extract:
switch d := v.Tuple.(type) {
case *ir.Call:
if callee := d.Call.StaticCallee(); callee != nil {
ret := impl(pass, callee, seenFns)
if len(ret) == 0 {
return nilly
}
return ret[v.Index]
} else {
return nilly
}
case *ir.TypeAssert, *ir.Next, *ir.Select, *ir.MapLookup, *ir.TypeSwitch, *ir.Recv, *ir.Sigma:
// we don't need to look at the Extract's index
// because we've already checked its type.
return nilly
default:
panic(fmt.Sprintf("internal error: unhandled type %T", d))
}
case *ir.Call:
if callee := v.Call.StaticCallee(); callee != nil {
ret := impl(pass, callee, seenFns)
if len(ret) == 0 {
return nilly
}
return ret[0]
} else {
return nilly
}
case *ir.BinOp, *ir.UnOp, *ir.Alloc, *ir.FieldAddr, *ir.IndexAddr, *ir.Global, *ir.MakeSlice, *ir.MakeClosure, *ir.Function, *ir.MakeMap, *ir.MakeChan:
return neverNil
case *ir.Sigma:
iff, ok := v.From.Control().(*ir.If)
if !ok {
return nilly
}
binop, ok := iff.Cond.(*ir.BinOp)
if !ok {
return nilly
}
isNil := func(v ir.Value) bool {
k, ok := v.(*ir.Const)
if !ok {
return false
}
return k.Value == nil
}
if binop.X == v.X && isNil(binop.Y) || binop.Y == v.X && isNil(binop.X) {
op := binop.Op
if v.From.Succs[0] != v.Block() {
// we're in the false branch, negate op
switch op {
case token.EQL:
op = token.NEQ
case token.NEQ:
op = token.EQL
default:
panic(fmt.Sprintf("internal error: unhandled token %v", op))
}
}
switch op {
case token.EQL:
return nilly
case token.NEQ:
return neverNil
default:
panic(fmt.Sprintf("internal error: unhandled token %v", op))
}
}
return nilly
case *ir.ChangeType:
return mightReturnNil(v.X)
case *ir.Load:
if _, ok := v.X.(*ir.Global); ok {
return onlyGlobal
}
return nilly
case *ir.AggregateConst:
return neverNil
case *ir.TypeAssert, *ir.ChangeInterface, *ir.Field, *ir.Const, *ir.GenericConst, *ir.Index, *ir.MapLookup, *ir.Parameter, *ir.Recv, *ir.TypeSwitch:
return nilly
default:
panic(fmt.Sprintf("internal error: unhandled type %T", v))
}
}
ret := fn.Exit.Control().(*ir.Return)
out := make([]neverNilness, len(ret.Results))
export := false
for i, v := range ret.Results {
v := mightReturnNil(v)
out[i] = v
if v != nilly && typeutil.IsPointerLike(fn.Signature.Results().At(i).Type()) {
export = true
}
}
if export {
pass.ExportObjectFact(fn.Object(), &neverReturnsNilFact{out})
}
return out
}

View File

@@ -0,0 +1,178 @@
package facts
import (
"go/types"
"reflect"
"honnef.co/go/tools/go/ir"
"honnef.co/go/tools/go/ir/irutil"
"honnef.co/go/tools/internal/passes/buildir"
"golang.org/x/tools/go/analysis"
)
type IsPure struct{}
func (*IsPure) AFact() {}
func (d *IsPure) String() string { return "is pure" }
type PurityResult map[*types.Func]*IsPure
var Purity = &analysis.Analyzer{
Name: "fact_purity",
Doc: "Mark pure functions",
Run: purity,
Requires: []*analysis.Analyzer{buildir.Analyzer},
FactTypes: []analysis.Fact{(*IsPure)(nil)},
ResultType: reflect.TypeOf(PurityResult{}),
}
var pureStdlib = map[string]struct{}{
"errors.New": {},
"fmt.Errorf": {},
"fmt.Sprintf": {},
"fmt.Sprint": {},
"sort.Reverse": {},
"strings.Map": {},
"strings.Repeat": {},
"strings.Replace": {},
"strings.Title": {},
"strings.ToLower": {},
"strings.ToLowerSpecial": {},
"strings.ToTitle": {},
"strings.ToTitleSpecial": {},
"strings.ToUpper": {},
"strings.ToUpperSpecial": {},
"strings.Trim": {},
"strings.TrimFunc": {},
"strings.TrimLeft": {},
"strings.TrimLeftFunc": {},
"strings.TrimPrefix": {},
"strings.TrimRight": {},
"strings.TrimRightFunc": {},
"strings.TrimSpace": {},
"strings.TrimSuffix": {},
"(*net/http.Request).WithContext": {},
}
func purity(pass *analysis.Pass) (interface{}, error) {
seen := map[*ir.Function]struct{}{}
irpkg := pass.ResultOf[buildir.Analyzer].(*buildir.IR).Pkg
var check func(fn *ir.Function) (ret bool)
check = func(fn *ir.Function) (ret bool) {
if fn.Object() == nil {
// TODO(dh): support closures
return false
}
if pass.ImportObjectFact(fn.Object(), new(IsPure)) {
return true
}
if fn.Pkg != irpkg {
// Function is in another package but wasn't marked as
// pure, ergo it isn't pure
return false
}
// Break recursion
if _, ok := seen[fn]; ok {
return false
}
seen[fn] = struct{}{}
defer func() {
if ret {
pass.ExportObjectFact(fn.Object(), &IsPure{})
}
}()
if irutil.IsStub(fn) {
return false
}
if _, ok := pureStdlib[fn.Object().(*types.Func).FullName()]; ok {
return true
}
if fn.Signature.Results().Len() == 0 {
// A function with no return values is empty or is doing some
// work we cannot see (for example because of build tags);
// don't consider it pure.
return false
}
for _, param := range fn.Params {
// TODO(dh): this may not be strictly correct. pure code
// can, to an extent, operate on non-basic types.
if _, ok := param.Type().Underlying().(*types.Basic); !ok {
return false
}
}
// Don't consider external functions pure.
if fn.Blocks == nil {
return false
}
checkCall := func(common *ir.CallCommon) bool {
if common.IsInvoke() {
return false
}
builtin, ok := common.Value.(*ir.Builtin)
if !ok {
if common.StaticCallee() != fn {
if common.StaticCallee() == nil {
return false
}
if !check(common.StaticCallee()) {
return false
}
}
} else {
switch builtin.Name() {
case "len", "cap":
default:
return false
}
}
return true
}
for _, b := range fn.Blocks {
for _, ins := range b.Instrs {
switch ins := ins.(type) {
case *ir.Call:
if !checkCall(ins.Common()) {
return false
}
case *ir.Defer:
if !checkCall(&ins.Call) {
return false
}
case *ir.Select:
return false
case *ir.Send:
return false
case *ir.Go:
return false
case *ir.Panic:
return false
case *ir.Store:
return false
case *ir.FieldAddr:
return false
case *ir.Alloc:
return false
case *ir.Load:
return false
}
}
}
return true
}
for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
check(fn)
}
out := PurityResult{}
for _, fact := range pass.AllObjectFacts() {
out[fact.Object.(*types.Func)] = fact.Fact.(*IsPure)
}
return out, nil
}

View File

@@ -0,0 +1,24 @@
package facts
import (
"go/ast"
"go/token"
"reflect"
"golang.org/x/tools/go/analysis"
)
var TokenFile = &analysis.Analyzer{
Name: "tokenfileanalyzer",
Doc: "creates a mapping of *token.File to *ast.File",
Run: func(pass *analysis.Pass) (interface{}, error) {
m := map[*token.File]*ast.File{}
for _, af := range pass.Files {
tf := pass.Fset.File(af.Pos())
m[tf] = af
}
return m, nil
},
RunDespiteErrors: true,
ResultType: reflect.TypeOf(map[*token.File]*ast.File{}),
}

View File

@@ -0,0 +1,253 @@
package typedness
import (
"fmt"
"go/token"
"go/types"
"reflect"
"honnef.co/go/tools/go/ir"
"honnef.co/go/tools/go/ir/irutil"
"honnef.co/go/tools/internal/passes/buildir"
"golang.org/x/exp/typeparams"
"golang.org/x/tools/go/analysis"
)
// alwaysTypedFact denotes that a function's return value will never
// be untyped nil. The analysis errs on the side of false negatives.
type alwaysTypedFact struct {
Rets uint8
}
func (*alwaysTypedFact) AFact() {}
func (fact *alwaysTypedFact) String() string {
return fmt.Sprintf("always typed: %08b", fact.Rets)
}
type Result struct {
m map[*types.Func]uint8
}
var Analysis = &analysis.Analyzer{
Name: "typedness",
Doc: "Annotates return values that are always typed values",
Run: run,
Requires: []*analysis.Analyzer{buildir.Analyzer},
FactTypes: []analysis.Fact{(*alwaysTypedFact)(nil)},
ResultType: reflect.TypeOf((*Result)(nil)),
}
// MustReturnTyped reports whether the ret's return value of fn must
// be a typed value, i.e. an interface value containing a concrete
// type or trivially a concrete type. The value of ret is zero-based.
//
// The analysis has false negatives: MustReturnTyped may incorrectly
// report false, but never incorrectly reports true.
func (r *Result) MustReturnTyped(fn *types.Func, ret int) bool {
if _, ok := fn.Type().(*types.Signature).Results().At(ret).Type().Underlying().(*types.Interface); !ok {
return true
}
return (r.m[fn] & (1 << ret)) != 0
}
func run(pass *analysis.Pass) (interface{}, error) {
seen := map[*ir.Function]struct{}{}
out := &Result{
m: map[*types.Func]uint8{},
}
for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
impl(pass, fn, seen)
}
for _, fact := range pass.AllObjectFacts() {
out.m[fact.Object.(*types.Func)] = fact.Fact.(*alwaysTypedFact).Rets
}
return out, nil
}
func impl(pass *analysis.Pass, fn *ir.Function, seenFns map[*ir.Function]struct{}) (out uint8) {
if fn.Signature.Results().Len() > 8 {
return 0
}
if fn.Object() == nil {
// TODO(dh): support closures
return 0
}
if fact := new(alwaysTypedFact); pass.ImportObjectFact(fn.Object(), fact) {
return fact.Rets
}
if fn.Pkg != pass.ResultOf[buildir.Analyzer].(*buildir.IR).Pkg {
return 0
}
if fn.Blocks == nil {
return 0
}
if irutil.IsStub(fn) {
return 0
}
if _, ok := seenFns[fn]; ok {
// break recursion
return 0
}
seenFns[fn] = struct{}{}
defer func() {
for i := 0; i < fn.Signature.Results().Len(); i++ {
if _, ok := fn.Signature.Results().At(i).Type().Underlying().(*types.Interface); !ok {
// we don't need facts to know that non-interface
// types can't be untyped nil. zeroing out those bits
// may result in all bits being zero, in which case we
// don't have to save any fact.
out &= ^(1 << i)
}
}
if out > 0 {
pass.ExportObjectFact(fn.Object(), &alwaysTypedFact{out})
}
}()
isUntypedNil := func(v ir.Value) bool {
k, ok := v.(*ir.Const)
if !ok {
return false
}
if _, ok := k.Type().Underlying().(*types.Interface); !ok {
return false
}
return k.Value == nil
}
var do func(v ir.Value, seen map[ir.Value]struct{}) bool
do = func(v ir.Value, seen map[ir.Value]struct{}) bool {
if _, ok := seen[v]; ok {
// break cycle
return false
}
seen[v] = struct{}{}
switch v := v.(type) {
case *ir.Const:
// can't be a typed nil, because then we'd be returning the
// result of MakeInterface.
return false
case *ir.ChangeInterface:
return do(v.X, seen)
case *ir.Extract:
call, ok := v.Tuple.(*ir.Call)
if !ok {
// We only care about extracts of function results. For
// everything else (e.g. channel receives and map
// lookups), we can either not deduce any information, or
// will see a MakeInterface.
return false
}
if callee := call.Call.StaticCallee(); callee != nil {
return impl(pass, callee, seenFns)&(1<<v.Index) != 0
} else {
// we don't know what function we're calling. no need
// to look at the signature, though. if it weren't an
// interface, we'd be seeing a MakeInterface
// instruction.
return false
}
case *ir.Call:
if callee := v.Call.StaticCallee(); callee != nil {
return impl(pass, callee, seenFns)&1 != 0
} else {
// we don't know what function we're calling. no need
// to look at the signature, though. if it weren't an
// interface, we'd be seeing a MakeInterface
// instruction.
return false
}
case *ir.Sigma:
iff, ok := v.From.Control().(*ir.If)
if !ok {
// give up
return false
}
binop, ok := iff.Cond.(*ir.BinOp)
if !ok {
// give up
return false
}
if (binop.X == v.X && isUntypedNil(binop.Y)) || (isUntypedNil(binop.X) && binop.Y == v.X) {
op := binop.Op
if v.From.Succs[0] != v.Block() {
// we're in the false branch, negate op
switch op {
case token.EQL:
op = token.NEQ
case token.NEQ:
op = token.EQL
default:
panic(fmt.Sprintf("internal error: unhandled token %v", op))
}
}
switch op {
case token.EQL:
// returned value equals untyped nil
return false
case token.NEQ:
// returned value does not equal untyped nil
return true
default:
panic(fmt.Sprintf("internal error: unhandled token %v", op))
}
}
// TODO(dh): handle comparison with typed nil
// give up
return false
case *ir.Phi:
for _, pv := range v.Edges {
if !do(pv, seen) {
return false
}
}
return true
case *ir.MakeInterface:
terms, err := typeparams.NormalTerms(v.X.Type())
if len(terms) == 0 || err != nil {
// Type is a type parameter with no type terms (or we couldn't determine the terms). Such a type
// _can_ be nil when put in an interface value.
//
// There is no instruction that can create a guaranteed non-nil instance of a type parameter without
// type constraints, so we return false right away, without checking v.X's typedness.
return false
}
return true
case *ir.TypeAssert:
// type assertions fail for untyped nils. Either we have a
// single lhs and the type assertion succeeds or panics,
// or we have two lhs and we'll return Extract instead.
return true
case *ir.ChangeType:
// we'll only see interface->interface conversions, which
// don't tell us anything about the nilness.
return false
case *ir.MapLookup, *ir.Index, *ir.Recv, *ir.Parameter, *ir.Load, *ir.Field:
// All other instructions that tell us nothing about the
// typedness of interface values.
return false
default:
panic(fmt.Sprintf("internal error: unhandled type %T", v))
}
}
ret := fn.Exit.Control().(*ir.Return)
for i, v := range ret.Results {
typ := fn.Signature.Results().At(i).Type()
if _, ok := typ.Underlying().(*types.Interface); ok && !typeparams.IsTypeParam(typ) {
if do(v, map[ir.Value]struct{}{}) {
out |= 1 << i
}
}
}
return out
}

View File

@@ -0,0 +1,283 @@
// Package lint provides abstractions on top of go/analysis.
// These abstractions add extra information to analyzes, such as structured documentation and severities.
package lint
import (
"flag"
"fmt"
"go/ast"
"go/build"
"go/token"
"strconv"
"strings"
"golang.org/x/tools/go/analysis"
)
// Analyzer wraps a go/analysis.Analyzer and provides structured documentation.
type Analyzer struct {
// The analyzer's documentation. Unlike go/analysis.Analyzer.Doc,
// this field is structured, providing access to severity, options
// etc.
Doc *Documentation
Analyzer *analysis.Analyzer
}
func (a *Analyzer) initialize() {
a.Analyzer.Doc = a.Doc.String()
if a.Analyzer.Flags.Usage == nil {
fs := flag.NewFlagSet("", flag.PanicOnError)
fs.Var(newVersionFlag(), "go", "Target Go version")
a.Analyzer.Flags = *fs
}
}
// InitializeAnalyzers takes a map of documentation and a map of go/analysis.Analyzers and returns a slice of Analyzers.
// The map keys are the analyzer names.
func InitializeAnalyzers(docs map[string]*Documentation, analyzers map[string]*analysis.Analyzer) []*Analyzer {
out := make([]*Analyzer, 0, len(analyzers))
for k, v := range analyzers {
v.Name = k
a := &Analyzer{
Doc: docs[k],
Analyzer: v,
}
a.initialize()
out = append(out, a)
}
return out
}
// Severity describes the severity of diagnostics reported by an analyzer.
type Severity int
const (
SeverityNone Severity = iota
SeverityError
SeverityDeprecated
SeverityWarning
SeverityInfo
SeverityHint
)
// MergeStrategy sets how merge mode should behave for diagnostics of an analyzer.
type MergeStrategy int
const (
MergeIfAny MergeStrategy = iota
MergeIfAll
)
type RawDocumentation struct {
Title string
Text string
Before string
After string
Since string
NonDefault bool
Options []string
Severity Severity
MergeIf MergeStrategy
}
type Documentation struct {
Title string
Text string
TitleMarkdown string
TextMarkdown string
Before string
After string
Since string
NonDefault bool
Options []string
Severity Severity
MergeIf MergeStrategy
}
func Markdownify(m map[string]*RawDocumentation) map[string]*Documentation {
out := make(map[string]*Documentation, len(m))
for k, v := range m {
out[k] = &Documentation{
Title: strings.TrimSpace(stripMarkdown(v.Title)),
Text: strings.TrimSpace(stripMarkdown(v.Text)),
TitleMarkdown: strings.TrimSpace(toMarkdown(v.Title)),
TextMarkdown: strings.TrimSpace(toMarkdown(v.Text)),
Before: strings.TrimSpace(v.Before),
After: strings.TrimSpace(v.After),
Since: v.Since,
NonDefault: v.NonDefault,
Options: v.Options,
Severity: v.Severity,
MergeIf: v.MergeIf,
}
}
return out
}
func toMarkdown(s string) string {
return strings.NewReplacer(`\'`, "`", `\"`, "`").Replace(s)
}
func stripMarkdown(s string) string {
return strings.NewReplacer(`\'`, "", `\"`, "'").Replace(s)
}
func (doc *Documentation) Format(metadata bool) string {
return doc.format(false, metadata)
}
func (doc *Documentation) FormatMarkdown(metadata bool) string {
return doc.format(true, metadata)
}
func (doc *Documentation) format(markdown bool, metadata bool) string {
b := &strings.Builder{}
if markdown {
fmt.Fprintf(b, "%s\n\n", doc.TitleMarkdown)
if doc.Text != "" {
fmt.Fprintf(b, "%s\n\n", doc.TextMarkdown)
}
} else {
fmt.Fprintf(b, "%s\n\n", doc.Title)
if doc.Text != "" {
fmt.Fprintf(b, "%s\n\n", doc.Text)
}
}
if doc.Before != "" {
fmt.Fprintln(b, "Before:")
fmt.Fprintln(b, "")
for _, line := range strings.Split(doc.Before, "\n") {
fmt.Fprint(b, " ", line, "\n")
}
fmt.Fprintln(b, "")
fmt.Fprintln(b, "After:")
fmt.Fprintln(b, "")
for _, line := range strings.Split(doc.After, "\n") {
fmt.Fprint(b, " ", line, "\n")
}
fmt.Fprintln(b, "")
}
if metadata {
fmt.Fprint(b, "Available since\n ")
if doc.Since == "" {
fmt.Fprint(b, "unreleased")
} else {
fmt.Fprintf(b, "%s", doc.Since)
}
if doc.NonDefault {
fmt.Fprint(b, ", non-default")
}
fmt.Fprint(b, "\n")
if len(doc.Options) > 0 {
fmt.Fprintf(b, "\nOptions\n")
for _, opt := range doc.Options {
fmt.Fprintf(b, " %s", opt)
}
fmt.Fprint(b, "\n")
}
}
return b.String()
}
func (doc *Documentation) String() string {
return doc.Format(true)
}
func newVersionFlag() flag.Getter {
tags := build.Default.ReleaseTags
v := tags[len(tags)-1][2:]
version := new(VersionFlag)
if err := version.Set(v); err != nil {
panic(fmt.Sprintf("internal error: %s", err))
}
return version
}
type VersionFlag int
func (v *VersionFlag) String() string {
return fmt.Sprintf("1.%d", *v)
}
func (v *VersionFlag) Set(s string) error {
if len(s) < 3 {
return fmt.Errorf("invalid Go version: %q", s)
}
if s[0] != '1' {
return fmt.Errorf("invalid Go version: %q", s)
}
if s[1] != '.' {
return fmt.Errorf("invalid Go version: %q", s)
}
i, err := strconv.Atoi(s[2:])
if err != nil {
return fmt.Errorf("invalid Go version: %q", s)
}
*v = VersionFlag(i)
return nil
}
func (v *VersionFlag) Get() interface{} {
return int(*v)
}
// ExhaustiveTypeSwitch panics when called. It can be used to ensure
// that type switches are exhaustive.
func ExhaustiveTypeSwitch(v interface{}) {
panic(fmt.Sprintf("internal error: unhandled case %T", v))
}
// A directive is a comment of the form '//lint:<command>
// [arguments...]'. It represents instructions to the static analysis
// tool.
type Directive struct {
Command string
Arguments []string
Directive *ast.Comment
Node ast.Node
}
func parseDirective(s string) (cmd string, args []string) {
if !strings.HasPrefix(s, "//lint:") {
return "", nil
}
s = strings.TrimPrefix(s, "//lint:")
fields := strings.Split(s, " ")
return fields[0], fields[1:]
}
// ParseDirectives extracts all directives from a list of Go files.
func ParseDirectives(files []*ast.File, fset *token.FileSet) []Directive {
var dirs []Directive
for _, f := range files {
// OPT(dh): in our old code, we skip all the comment map work if we
// couldn't find any directives, benchmark if that's actually
// worth doing
cm := ast.NewCommentMap(fset, f, f.Comments)
for node, cgs := range cm {
for _, cg := range cgs {
for _, c := range cg.List {
if !strings.HasPrefix(c.Text, "//lint:") {
continue
}
cmd, args := parseDirective(c.Text)
d := Directive{
Command: cmd,
Arguments: args,
Directive: c,
Node: node,
}
dirs = append(dirs, d)
}
}
}
}
return dirs
}

View File

@@ -0,0 +1,247 @@
package report
import (
"bytes"
"fmt"
"go/ast"
"go/format"
"go/token"
"path/filepath"
"strconv"
"strings"
"honnef.co/go/tools/analysis/facts"
"honnef.co/go/tools/go/ast/astutil"
"golang.org/x/tools/go/analysis"
)
type Options struct {
ShortRange bool
FilterGenerated bool
Fixes []analysis.SuggestedFix
Related []analysis.RelatedInformation
}
type Option func(*Options)
func ShortRange() Option {
return func(opts *Options) {
opts.ShortRange = true
}
}
func FilterGenerated() Option {
return func(opts *Options) {
opts.FilterGenerated = true
}
}
func Fixes(fixes ...analysis.SuggestedFix) Option {
return func(opts *Options) {
opts.Fixes = append(opts.Fixes, fixes...)
}
}
func Related(node Positioner, message string) Option {
return func(opts *Options) {
pos, end, ok := getRange(node, opts.ShortRange)
if !ok {
return
}
r := analysis.RelatedInformation{
Pos: pos,
End: end,
Message: message,
}
opts.Related = append(opts.Related, r)
}
}
type Positioner interface {
Pos() token.Pos
}
type fullPositioner interface {
Pos() token.Pos
End() token.Pos
}
type sourcer interface {
Source() ast.Node
}
// shortRange returns the position and end of the main component of an
// AST node. For nodes that have no body, the short range is identical
// to the node's Pos and End. For nodes that do have a body, the short
// range excludes the body.
func shortRange(node ast.Node) (pos, end token.Pos) {
switch node := node.(type) {
case *ast.File:
return node.Pos(), node.Name.End()
case *ast.CaseClause:
return node.Pos(), node.Colon + 1
case *ast.CommClause:
return node.Pos(), node.Colon + 1
case *ast.DeferStmt:
return node.Pos(), node.Defer + token.Pos(len("defer"))
case *ast.ExprStmt:
return shortRange(node.X)
case *ast.ForStmt:
if node.Post != nil {
return node.For, node.Post.End()
} else if node.Cond != nil {
return node.For, node.Cond.End()
} else if node.Init != nil {
// +1 to catch the semicolon, for gofmt'ed code
return node.Pos(), node.Init.End() + 1
} else {
return node.Pos(), node.For + token.Pos(len("for"))
}
case *ast.FuncDecl:
return node.Pos(), node.Type.End()
case *ast.FuncLit:
return node.Pos(), node.Type.End()
case *ast.GoStmt:
if _, ok := astutil.Unparen(node.Call.Fun).(*ast.FuncLit); ok {
return node.Pos(), node.Go + token.Pos(len("go"))
} else {
return node.Pos(), node.End()
}
case *ast.IfStmt:
return node.Pos(), node.Cond.End()
case *ast.RangeStmt:
return node.Pos(), node.X.End()
case *ast.SelectStmt:
return node.Pos(), node.Pos() + token.Pos(len("select"))
case *ast.SwitchStmt:
if node.Tag != nil {
return node.Pos(), node.Tag.End()
} else if node.Init != nil {
// +1 to catch the semicolon, for gofmt'ed code
return node.Pos(), node.Init.End() + 1
} else {
return node.Pos(), node.Pos() + token.Pos(len("switch"))
}
case *ast.TypeSwitchStmt:
return node.Pos(), node.Assign.End()
default:
return node.Pos(), node.End()
}
}
func HasRange(node Positioner) bool {
// we don't know if getRange will be called with shortRange set to
// true, so make sure that both work.
_, _, ok := getRange(node, false)
if !ok {
return false
}
_, _, ok = getRange(node, true)
return ok
}
func getRange(node Positioner, short bool) (pos, end token.Pos, ok bool) {
switch n := node.(type) {
case sourcer:
s := n.Source()
if s == nil {
return 0, 0, false
}
if short {
p, e := shortRange(s)
return p, e, true
}
return s.Pos(), s.End(), true
case fullPositioner:
if short {
p, e := shortRange(n)
return p, e, true
}
return n.Pos(), n.End(), true
default:
return n.Pos(), token.NoPos, true
}
}
func Report(pass *analysis.Pass, node Positioner, message string, opts ...Option) {
cfg := &Options{}
for _, opt := range opts {
opt(cfg)
}
file := DisplayPosition(pass.Fset, node.Pos()).Filename
if cfg.FilterGenerated {
m := pass.ResultOf[facts.Generated].(map[string]facts.Generator)
if _, ok := m[file]; ok {
return
}
}
pos, end, ok := getRange(node, cfg.ShortRange)
if !ok {
panic(fmt.Sprintf("no valid position for reporting node %v", node))
}
d := analysis.Diagnostic{
Pos: pos,
End: end,
Message: message,
SuggestedFixes: cfg.Fixes,
Related: cfg.Related,
}
pass.Report(d)
}
func Render(pass *analysis.Pass, x interface{}) string {
var buf bytes.Buffer
if err := format.Node(&buf, pass.Fset, x); err != nil {
panic(err)
}
return buf.String()
}
func RenderArgs(pass *analysis.Pass, args []ast.Expr) string {
var ss []string
for _, arg := range args {
ss = append(ss, Render(pass, arg))
}
return strings.Join(ss, ", ")
}
func DisplayPosition(fset *token.FileSet, p token.Pos) token.Position {
if p == token.NoPos {
return token.Position{}
}
// Only use the adjusted position if it points to another Go file.
// This means we'll point to the original file for cgo files, but
// we won't point to a YACC grammar file.
pos := fset.PositionFor(p, false)
adjPos := fset.PositionFor(p, true)
if filepath.Ext(adjPos.Filename) == ".go" {
return adjPos
}
return pos
}
func Ordinal(n int) string {
suffix := "th"
if n < 10 || n > 20 {
switch n % 10 {
case 0:
suffix = "th"
case 1:
suffix = "st"
case 2:
suffix = "nd"
case 3:
suffix = "rd"
default:
suffix = "th"
}
}
return strconv.Itoa(n) + suffix
}