Update gometalinter config (#331)

* Update gometalinter config

gometalinter now uses `maligned` instead of `aligncheck`
(https://github.com/alecthomas/gometalinter/pull/367), so we need to update our
config accordingly.

* Update gometalinter

* Disable gotype linter

gotype does not seem to play nicely with the gb vendor directory. In
particular, it wants each of our dependencies to be built and installed (see
https://github.com/golang/go/issues/10969), but (empirically) it will not
accept them being installed in `pkg` but insists on them being in `vendor/pkg`.

This presents a problem because `gb build` builds the packages into `pkg`
(which doesn't seem entirely unreasonable since `.` comes before `vendor` in
`$GOPATH`). `go install github.com/x/y` does install in `vendor/pkg` but
requires us to know the name of each package.

The general conclusion of https://github.com/alecthomas/gometalinter/issues/91
seems to have been that the easiest thing to do is to disable `gotype` for now.

* Fix `unparam` lint

* Fix goshadow lint
main
Richard van der Hoff 2017-11-15 10:25:48 +00:00 committed by Erik Johnston
parent dc782ec399
commit 8fff0e887c
95 changed files with 8927 additions and 1176 deletions

View File

@ -4,7 +4,6 @@
"Deadline": "5m", "Deadline": "5m",
"Enable": [ "Enable": [
"vetshadow", "vetshadow",
"gotype",
"deadcode", "deadcode",
"gocyclo", "gocyclo",
"ineffassign", "ineffassign",

View File

@ -4,13 +4,12 @@
"Deadline": "5m", "Deadline": "5m",
"Enable": [ "Enable": [
"vetshadow", "vetshadow",
"gotype",
"deadcode", "deadcode",
"gocyclo", "gocyclo",
"golint", "golint",
"varcheck", "varcheck",
"structcheck", "structcheck",
"aligncheck", "maligned",
"ineffassign", "ineffassign",
"gas", "gas",
"misspell", "misspell",

View File

@ -80,7 +80,7 @@ func GetDevicesByLocalpart(
} }
ctx := req.Context() ctx := req.Context()
devices, err := deviceDB.GetDevicesByLocalpart(ctx, localpart) deviceList, err := deviceDB.GetDevicesByLocalpart(ctx, localpart)
if err != nil { if err != nil {
return httputil.LogThenError(req, err) return httputil.LogThenError(req, err)
@ -88,7 +88,7 @@ func GetDevicesByLocalpart(
res := devicesJSON{} res := devicesJSON{}
for _, dev := range devices { for _, dev := range deviceList {
res.Devices = append(res.Devices, deviceJSON{ res.Devices = append(res.Devices, deviceJSON{
DeviceID: dev.ID, DeviceID: dev.ID,
UserID: dev.UserID, UserID: dev.UserID,

View File

@ -33,7 +33,7 @@ type response struct {
// GetMemberships implements GET /rooms/{roomId}/members // GetMemberships implements GET /rooms/{roomId}/members
func GetMemberships( func GetMemberships(
req *http.Request, device *authtypes.Device, roomID string, joinedOnly bool, req *http.Request, device *authtypes.Device, roomID string, joinedOnly bool,
cfg config.Dendrite, _ config.Dendrite,
queryAPI api.RoomserverQueryAPI, queryAPI api.RoomserverQueryAPI,
) util.JSONResponse { ) util.JSONResponse {
queryReq := api.QueryMembershipsForRoomRequest{ queryReq := api.QueryMembershipsForRoomRequest{

View File

@ -75,7 +75,7 @@ var timeout time.Duration
var port = 10000 var port = 10000
func startMediaAPI(suffix string, dynamicThumbnails bool) (*exec.Cmd, chan error, string, *exec.Cmd, chan error, string, string) { func startMediaAPI(suffix string, dynamicThumbnails bool) (*exec.Cmd, chan error, *exec.Cmd, string, string) {
dir, err := ioutil.TempDir("", serverType+"-server-test"+suffix) dir, err := ioutil.TempDir("", serverType+"-server-test"+suffix)
if err != nil { if err != nil {
panic(err) panic(err)
@ -107,7 +107,7 @@ func startMediaAPI(suffix string, dynamicThumbnails bool) (*exec.Cmd, chan error
testDatabaseName + suffix, testDatabaseName + suffix,
} }
proxyCmd, proxyCmdChan := test.StartProxy(proxyAddr, cfg) proxyCmd, _ := test.StartProxy(proxyAddr, cfg)
test.InitDatabase( test.InitDatabase(
postgresDatabase, postgresDatabase,
@ -121,7 +121,7 @@ func startMediaAPI(suffix string, dynamicThumbnails bool) (*exec.Cmd, chan error
) )
fmt.Printf("==TESTSERVER== STARTED %v -> %v : %v\n", proxyAddr, cfg.Listen.MediaAPI, dir) fmt.Printf("==TESTSERVER== STARTED %v -> %v : %v\n", proxyAddr, cfg.Listen.MediaAPI, dir)
return cmd, cmdChan, string(cfg.Listen.MediaAPI), proxyCmd, proxyCmdChan, proxyAddr, dir return cmd, cmdChan, proxyCmd, proxyAddr, dir
} }
func cleanUpServer(cmd *exec.Cmd, dir string) { func cleanUpServer(cmd *exec.Cmd, dir string) {
@ -145,7 +145,7 @@ func main() {
} }
// create server1 with only pre-generated thumbnails allowed // create server1 with only pre-generated thumbnails allowed
server1Cmd, server1CmdChan, _, server1ProxyCmd, _, server1ProxyAddr, server1Dir := startMediaAPI("1", false) server1Cmd, server1CmdChan, server1ProxyCmd, server1ProxyAddr, server1Dir := startMediaAPI("1", false)
defer cleanUpServer(server1Cmd, server1Dir) defer cleanUpServer(server1Cmd, server1Dir)
defer server1ProxyCmd.Process.Kill() // nolint: errcheck defer server1ProxyCmd.Process.Kill() // nolint: errcheck
testDownload(server1ProxyAddr, server1ProxyAddr, "doesnotexist", 404, server1CmdChan) testDownload(server1ProxyAddr, server1ProxyAddr, "doesnotexist", 404, server1CmdChan)
@ -162,7 +162,7 @@ func main() {
testThumbnail(64, 64, "crop", server1ProxyAddr, server1CmdChan) testThumbnail(64, 64, "crop", server1ProxyAddr, server1CmdChan)
// create server2 with dynamic thumbnail generation // create server2 with dynamic thumbnail generation
server2Cmd, server2CmdChan, _, server2ProxyCmd, _, server2ProxyAddr, server2Dir := startMediaAPI("2", true) server2Cmd, server2CmdChan, server2ProxyCmd, server2ProxyAddr, server2Dir := startMediaAPI("2", true)
defer cleanUpServer(server2Cmd, server2Dir) defer cleanUpServer(server2Cmd, server2Dir)
defer server2ProxyCmd.Process.Kill() // nolint: errcheck defer server2ProxyCmd.Process.Kill() // nolint: errcheck
testDownload(server2ProxyAddr, server2ProxyAddr, "doesnotexist", 404, server2CmdChan) testDownload(server2ProxyAddr, server2ProxyAddr, "doesnotexist", 404, server2CmdChan)

View File

@ -28,10 +28,10 @@ import (
func GetEvent( func GetEvent(
ctx context.Context, ctx context.Context,
request *gomatrixserverlib.FederationRequest, request *gomatrixserverlib.FederationRequest,
cfg config.Dendrite, _ config.Dendrite,
query api.RoomserverQueryAPI, query api.RoomserverQueryAPI,
now time.Time, _ time.Time,
keys gomatrixserverlib.KeyRing, _ gomatrixserverlib.KeyRing,
eventID string, eventID string,
) util.JSONResponse { ) util.JSONResponse {
var authResponse api.QueryServerAllowedToSeeEventResponse var authResponse api.QueryServerAllowedToSeeEventResponse

View File

@ -287,7 +287,7 @@ func buildMembershipEvent(
// them responded with an error. // them responded with an error.
func sendToRemoteServer( func sendToRemoteServer(
ctx context.Context, inv invite, ctx context.Context, inv invite,
federation *gomatrixserverlib.FederationClient, cfg config.Dendrite, federation *gomatrixserverlib.FederationClient, _ config.Dendrite,
builder gomatrixserverlib.EventBuilder, builder gomatrixserverlib.EventBuilder,
) (err error) { ) (err error) {
remoteServers := make([]gomatrixserverlib.ServerName, 2) remoteServers := make([]gomatrixserverlib.ServerName, 2)

View File

@ -89,7 +89,7 @@ func SelectThumbnail(desired types.ThumbnailSize, thumbnails []*types.ThumbnailM
} }
// getActiveThumbnailGeneration checks for active thumbnail generation // getActiveThumbnailGeneration checks for active thumbnail generation
func getActiveThumbnailGeneration(dst types.Path, config types.ThumbnailSize, activeThumbnailGeneration *types.ActiveThumbnailGeneration, maxThumbnailGenerators int, logger *log.Entry) (isActive bool, busy bool, errorReturn error) { func getActiveThumbnailGeneration(dst types.Path, _ types.ThumbnailSize, activeThumbnailGeneration *types.ActiveThumbnailGeneration, maxThumbnailGenerators int, logger *log.Entry) (isActive bool, busy bool, errorReturn error) {
// Check if there is active thumbnail generation. // Check if there is active thumbnail generation.
activeThumbnailGeneration.Lock() activeThumbnailGeneration.Lock()
defer activeThumbnailGeneration.Unlock() defer activeThumbnailGeneration.Unlock()
@ -119,7 +119,7 @@ func getActiveThumbnailGeneration(dst types.Path, config types.ThumbnailSize, ac
// broadcastGeneration broadcasts that thumbnail generation completed and the error to all waiting goroutines // broadcastGeneration broadcasts that thumbnail generation completed and the error to all waiting goroutines
// Note: This should only be called by the owner of the activeThumbnailGenerationResult // Note: This should only be called by the owner of the activeThumbnailGenerationResult
func broadcastGeneration(dst types.Path, activeThumbnailGeneration *types.ActiveThumbnailGeneration, config types.ThumbnailSize, errorReturn error, logger *log.Entry) { func broadcastGeneration(dst types.Path, activeThumbnailGeneration *types.ActiveThumbnailGeneration, _ types.ThumbnailSize, errorReturn error, logger *log.Entry) {
activeThumbnailGeneration.Lock() activeThumbnailGeneration.Lock()
defer activeThumbnailGeneration.Unlock() defer activeThumbnailGeneration.Unlock()
if activeThumbnailGenerationResult, ok := activeThumbnailGeneration.PathToResult[string(dst)]; ok { if activeThumbnailGenerationResult, ok := activeThumbnailGeneration.PathToResult[string(dst)]; ok {

2
vendor/manifest vendored
View File

@ -10,7 +10,7 @@
{ {
"importpath": "github.com/alecthomas/gometalinter", "importpath": "github.com/alecthomas/gometalinter",
"repository": "https://github.com/alecthomas/gometalinter", "repository": "https://github.com/alecthomas/gometalinter",
"revision": "5507b26af3204e949ffe50ec08ee73e5847938e1", "revision": "0262fb20957a4c2d3bb7c834a6a125ae3884a2c6",
"branch": "master" "branch": "master"
}, },
{ {

View File

@ -19,7 +19,6 @@
- [2. Analyse the debug output](#2-analyse-the-debug-output) - [2. Analyse the debug output](#2-analyse-the-debug-output)
- [3. Report an issue.](#3-report-an-issue) - [3. Report an issue.](#3-report-an-issue)
- [How do I filter issues between two git refs?](#how-do-i-filter-issues-between-two-git-refs) - [How do I filter issues between two git refs?](#how-do-i-filter-issues-between-two-git-refs)
- [Details](#details)
- [Checkstyle XML format](#checkstyle-xml-format) - [Checkstyle XML format](#checkstyle-xml-format)
<!-- /MarkdownTOC --> <!-- /MarkdownTOC -->
@ -57,12 +56,13 @@ It is intended for use with editor/IDE integration.
- [go vet](https://golang.org/cmd/vet/) - Reports potential errors that otherwise compile. - [go vet](https://golang.org/cmd/vet/) - Reports potential errors that otherwise compile.
- [go tool vet --shadow](https://golang.org/cmd/vet/#hdr-Shadowed_variables) - Reports variables that may have been unintentionally shadowed. - [go tool vet --shadow](https://golang.org/cmd/vet/#hdr-Shadowed_variables) - Reports variables that may have been unintentionally shadowed.
- [gotype](https://golang.org/x/tools/cmd/gotype) - Syntactic and semantic analysis similar to the Go compiler. - [gotype](https://golang.org/x/tools/cmd/gotype) - Syntactic and semantic analysis similar to the Go compiler.
- [gotype -x](https://golang.org/x/tools/cmd/gotype) - Syntactic and semantic analysis in external test packages (similar to the Go compiler).
- [deadcode](https://github.com/tsenart/deadcode) - Finds unused code. - [deadcode](https://github.com/tsenart/deadcode) - Finds unused code.
- [gocyclo](https://github.com/alecthomas/gocyclo) - Computes the cyclomatic complexity of functions. - [gocyclo](https://github.com/alecthomas/gocyclo) - Computes the cyclomatic complexity of functions.
- [golint](https://github.com/golang/lint) - Google's (mostly stylistic) linter. - [golint](https://github.com/golang/lint) - Google's (mostly stylistic) linter.
- [varcheck](https://github.com/opennota/check) - Find unused global variables and constants. - [varcheck](https://github.com/opennota/check) - Find unused global variables and constants.
- [structcheck](https://github.com/opennota/check) - Find unused struct fields. - [structcheck](https://github.com/opennota/check) - Find unused struct fields.
- [aligncheck](https://github.com/opennota/check) - Warn about un-optimally aligned structures. - [maligned](https://github.com/mdempsky/maligned) - Detect structs that would take less memory if their fields were sorted.
- [errcheck](https://github.com/kisielk/errcheck) - Check that error return values are used. - [errcheck](https://github.com/kisielk/errcheck) - Check that error return values are used.
- [megacheck](https://github.com/dominikh/go-tools/tree/master/cmd/megacheck) - Run staticcheck, gosimple and unused, sharing work. - [megacheck](https://github.com/dominikh/go-tools/tree/master/cmd/megacheck) - Run staticcheck, gosimple and unused, sharing work.
- [dupl](https://github.com/mibk/dupl) - Reports potentially duplicated code. - [dupl](https://github.com/mibk/dupl) - Reports potentially duplicated code.
@ -81,6 +81,7 @@ Disabled by default (enable with `--enable=<linter>`):
- [gosimple](https://github.com/dominikh/go-tools/tree/master/cmd/gosimple) - Report simplifications in code. - [gosimple](https://github.com/dominikh/go-tools/tree/master/cmd/gosimple) - Report simplifications in code.
- [lll](https://github.com/walle/lll) - Report long lines (see `--line-length=N`). - [lll](https://github.com/walle/lll) - Report long lines (see `--line-length=N`).
- [misspell](https://github.com/client9/misspell) - Finds commonly misspelled English words. - [misspell](https://github.com/client9/misspell) - Finds commonly misspelled English words.
- [nakedret](https://github.com/alexkohler/nakedret) - Finds naked returns.
- [unparam](https://github.com/mvdan/unparam) - Find unused function parameters. - [unparam](https://github.com/mvdan/unparam) - Find unused function parameters.
- [unused](https://github.com/dominikh/go-tools/tree/master/cmd/unused) - Find unused variables. - [unused](https://github.com/dominikh/go-tools/tree/master/cmd/unused) - Find unused variables.
- [safesql](https://github.com/stripe/safesql) - Finds potential SQL injection vulnerabilities. - [safesql](https://github.com/stripe/safesql) - Finds potential SQL injection vulnerabilities.
@ -91,14 +92,15 @@ Additional linters can be added through the command line with `--linter=NAME:COM
## Configuration file ## Configuration file
gometalinter now supports a JSON configuration file which can be loaded via gometalinter now supports a JSON configuration file which can be loaded via
`--config=<file>`. The format of this file is determined by the Config struct `--config=<file>`. The format of this file is determined by the `Config` struct
in `config.go`. in [config.go](https://github.com/alecthomas/gometalinter/blob/master/config.go).
The configuration file mostly corresponds to command-line flags, with the following exceptions: The configuration file mostly corresponds to command-line flags, with the following exceptions:
- Linters defined in the configuration file will overlay existing definitions, not replace them. - Linters defined in the configuration file will overlay existing definitions, not replace them.
- "Enable" defines the exact set of linters that will be enabled (default - "Enable" defines the exact set of linters that will be enabled (default
linters are disabled). linters are disabled). `--help` displays the list of default linters with the exact names
you must use.
Here is an example configuration file: Here is an example configuration file:
@ -108,6 +110,34 @@ Here is an example configuration file:
} }
``` ```
### Adding Custom linters
Linters can be added and customized from the config file using the `Linters` field.
Linters supports the following fields:
* `Command` - the path to the linter binary and any default arguments
* `Pattern` - a regular expression used to parse the linter output
* `IsFast` - if the linter should be run when the `--fast` flag is used
* `PartitionStrategy` - how paths args should be passed to the linter command:
* `directories` - call the linter once with a list of all the directories
* `files` - call the linter once with a list of all the files
* `packages` - call the linter once with a list of all the package paths
* `files-by-package` - call the linter once per package with a list of the
files in the package.
* `single-directory` - call the linter once per directory
The config for default linters can be overridden by using the name of the
linter.
Additional linters can be configured via the command line using the format
`NAME:COMMAND:PATTERN`.
Example:
```
$ gometalinter --linter='vet:go tool vet -printfuncs=Infof,Debugf,Warningf,Errorf:PATH:LINE:MESSAGE' .
```
## Installing ## Installing
There are two options for installing gometalinter. There are two options for installing gometalinter.
@ -171,7 +201,8 @@ Install all known linters:
$ gometalinter --install $ gometalinter --install
Installing: Installing:
structcheck structcheck
aligncheck maligned
nakedret
deadcode deadcode
gocyclo gocyclo
ineffassign ineffassign
@ -308,21 +339,6 @@ gometalinter |& revgrep master # Show issues between master and HEAD (or
gometalinter |& revgrep origin/master # Show issues that haven't been pushed. gometalinter |& revgrep origin/master # Show issues that haven't been pushed.
``` ```
## Details
Additional linters can be configured via the command line:
```
$ gometalinter --linter='vet:go tool vet -printfuncs=Infof,Debugf,Warningf,Errorf:PATH:LINE:MESSAGE' .
stutter.go:21:15:warning: error return value not checked (defer a.Close()) (errcheck)
stutter.go:22:15:warning: error return value not checked (defer a.Close()) (errcheck)
stutter.go:27:6:warning: error return value not checked (doit() // test for errcheck) (errcheck)
stutter.go:9::warning: unused global variable unusedGlobal (varcheck)
stutter.go:13::warning: unused struct field MyStruct.Unused (structcheck)
stutter.go:12:6:warning: exported type MyStruct should have comment or be unexported (golint)
stutter.go:16:6:warning: exported type PublicUndocumented should have comment or be unexported (deadcode)
```
## Checkstyle XML format ## Checkstyle XML format
`gometalinter` supports [checkstyle](http://checkstyle.sourceforge.net/) `gometalinter` supports [checkstyle](http://checkstyle.sourceforge.net/)

View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2017 Alex Kohler
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -0,0 +1,310 @@
package main
/*
This file holds a direct copy of the import path matching code of
https://github.com/golang/go/blob/master/src/cmd/go/main.go. It can be
replaced when https://golang.org/issue/8768 is resolved.
It has been updated to follow upstream changes in a few ways.
*/
import (
"fmt"
"go/build"
"log"
"os"
"path"
"path/filepath"
"regexp"
"runtime"
"strings"
)
var buildContext = build.Default
var (
goroot = filepath.Clean(runtime.GOROOT())
gorootSrc = filepath.Join(goroot, "src")
)
// importPathsNoDotExpansion returns the import paths to use for the given
// command line, but it does no ... expansion.
func importPathsNoDotExpansion(args []string) []string {
if len(args) == 0 {
return []string{"."}
}
var out []string
for _, a := range args {
// Arguments are supposed to be import paths, but
// as a courtesy to Windows developers, rewrite \ to /
// in command-line arguments. Handles .\... and so on.
if filepath.Separator == '\\' {
a = strings.Replace(a, `\`, `/`, -1)
}
// Put argument in canonical form, but preserve leading ./.
if strings.HasPrefix(a, "./") {
a = "./" + path.Clean(a)
if a == "./." {
a = "."
}
} else {
a = path.Clean(a)
}
if a == "all" || a == "std" {
out = append(out, allPackages(a)...)
continue
}
out = append(out, a)
}
return out
}
// importPaths returns the import paths to use for the given command line.
func importPaths(args []string) []string {
args = importPathsNoDotExpansion(args)
var out []string
for _, a := range args {
if strings.Contains(a, "...") {
if build.IsLocalImport(a) {
out = append(out, allPackagesInFS(a)...)
} else {
out = append(out, allPackages(a)...)
}
continue
}
out = append(out, a)
}
return out
}
// matchPattern(pattern)(name) reports whether
// name matches pattern. Pattern is a limited glob
// pattern in which '...' means 'any string' and there
// is no other special syntax.
func matchPattern(pattern string) func(name string) bool {
re := regexp.QuoteMeta(pattern)
re = strings.Replace(re, `\.\.\.`, `.*`, -1)
// Special case: foo/... matches foo too.
if strings.HasSuffix(re, `/.*`) {
re = re[:len(re)-len(`/.*`)] + `(/.*)?`
}
reg := regexp.MustCompile(`^` + re + `$`)
return func(name string) bool {
return reg.MatchString(name)
}
}
// hasPathPrefix reports whether the path s begins with the
// elements in prefix.
func hasPathPrefix(s, prefix string) bool {
switch {
default:
return false
case len(s) == len(prefix):
return s == prefix
case len(s) > len(prefix):
if prefix != "" && prefix[len(prefix)-1] == '/' {
return strings.HasPrefix(s, prefix)
}
return s[len(prefix)] == '/' && s[:len(prefix)] == prefix
}
}
// treeCanMatchPattern(pattern)(name) reports whether
// name or children of name can possibly match pattern.
// Pattern is the same limited glob accepted by matchPattern.
func treeCanMatchPattern(pattern string) func(name string) bool {
wildCard := false
if i := strings.Index(pattern, "..."); i >= 0 {
wildCard = true
pattern = pattern[:i]
}
return func(name string) bool {
return len(name) <= len(pattern) && hasPathPrefix(pattern, name) ||
wildCard && strings.HasPrefix(name, pattern)
}
}
// allPackages returns all the packages that can be found
// under the $GOPATH directories and $GOROOT matching pattern.
// The pattern is either "all" (all packages), "std" (standard packages)
// or a path including "...".
func allPackages(pattern string) []string {
pkgs := matchPackages(pattern)
if len(pkgs) == 0 {
fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern)
}
return pkgs
}
func matchPackages(pattern string) []string {
match := func(string) bool { return true }
treeCanMatch := func(string) bool { return true }
if pattern != "all" && pattern != "std" {
match = matchPattern(pattern)
treeCanMatch = treeCanMatchPattern(pattern)
}
have := map[string]bool{
"builtin": true, // ignore pseudo-package that exists only for documentation
}
if !buildContext.CgoEnabled {
have["runtime/cgo"] = true // ignore during walk
}
var pkgs []string
// Commands
cmd := filepath.Join(goroot, "src/cmd") + string(filepath.Separator)
filepath.Walk(cmd, func(path string, fi os.FileInfo, err error) error {
if err != nil || !fi.IsDir() || path == cmd {
return nil
}
name := path[len(cmd):]
if !treeCanMatch(name) {
return filepath.SkipDir
}
// Commands are all in cmd/, not in subdirectories.
if strings.Contains(name, string(filepath.Separator)) {
return filepath.SkipDir
}
// We use, e.g., cmd/gofmt as the pseudo import path for gofmt.
name = "cmd/" + name
if have[name] {
return nil
}
have[name] = true
if !match(name) {
return nil
}
_, err = buildContext.ImportDir(path, 0)
if err != nil {
if _, noGo := err.(*build.NoGoError); !noGo {
log.Print(err)
}
return nil
}
pkgs = append(pkgs, name)
return nil
})
for _, src := range buildContext.SrcDirs() {
if (pattern == "std" || pattern == "cmd") && src != gorootSrc {
continue
}
src = filepath.Clean(src) + string(filepath.Separator)
root := src
if pattern == "cmd" {
root += "cmd" + string(filepath.Separator)
}
filepath.Walk(root, func(path string, fi os.FileInfo, err error) error {
if err != nil || !fi.IsDir() || path == src {
return nil
}
// Avoid .foo, _foo, testdata and vendor directory trees.
_, elem := filepath.Split(path)
if strings.HasPrefix(elem, ".") || strings.HasPrefix(elem, "_") || elem == "testdata" || elem == "vendor" {
return filepath.SkipDir
}
name := filepath.ToSlash(path[len(src):])
if pattern == "std" && (strings.Contains(name, ".") || name == "cmd") {
// The name "std" is only the standard library.
// If the name is cmd, it's the root of the command tree.
return filepath.SkipDir
}
if !treeCanMatch(name) {
return filepath.SkipDir
}
if have[name] {
return nil
}
have[name] = true
if !match(name) {
return nil
}
_, err = buildContext.ImportDir(path, 0)
if err != nil {
if _, noGo := err.(*build.NoGoError); noGo {
return nil
}
}
pkgs = append(pkgs, name)
return nil
})
}
return pkgs
}
// allPackagesInFS is like allPackages but is passed a pattern
// beginning ./ or ../, meaning it should scan the tree rooted
// at the given directory. There are ... in the pattern too.
func allPackagesInFS(pattern string) []string {
pkgs := matchPackagesInFS(pattern)
if len(pkgs) == 0 {
fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern)
}
return pkgs
}
func matchPackagesInFS(pattern string) []string {
// Find directory to begin the scan.
// Could be smarter but this one optimization
// is enough for now, since ... is usually at the
// end of a path.
i := strings.Index(pattern, "...")
dir, _ := path.Split(pattern[:i])
// pattern begins with ./ or ../.
// path.Clean will discard the ./ but not the ../.
// We need to preserve the ./ for pattern matching
// and in the returned import paths.
prefix := ""
if strings.HasPrefix(pattern, "./") {
prefix = "./"
}
match := matchPattern(pattern)
var pkgs []string
filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error {
if err != nil || !fi.IsDir() {
return nil
}
if path == dir {
// filepath.Walk starts at dir and recurses. For the recursive case,
// the path is the result of filepath.Join, which calls filepath.Clean.
// The initial case is not Cleaned, though, so we do this explicitly.
//
// This converts a path like "./io/" to "io". Without this step, running
// "cd $GOROOT/src/pkg; go list ./io/..." would incorrectly skip the io
// package, because prepending the prefix "./" to the unclean path would
// result in "././io", and match("././io") returns false.
path = filepath.Clean(path)
}
// Avoid .foo, _foo, testdata and vendor directory trees, but do not avoid "." or "..".
_, elem := filepath.Split(path)
dot := strings.HasPrefix(elem, ".") && elem != "." && elem != ".."
if dot || strings.HasPrefix(elem, "_") || elem == "testdata" || elem == "vendor" {
return filepath.SkipDir
}
name := prefix + filepath.ToSlash(path)
if !match(name) {
return nil
}
if _, err = build.ImportDir(path, 0); err != nil {
if _, noGo := err.(*build.NoGoError); !noGo {
log.Print(err)
}
return nil
}
pkgs = append(pkgs, name)
return nil
})
return pkgs
}

View File

@ -0,0 +1,213 @@
package main
import (
"errors"
"flag"
"fmt"
"go/ast"
"go/build"
"go/parser"
"go/token"
"log"
"os"
"path/filepath"
"strings"
)
const (
pwd = "./"
)
func init() {
//TODO allow build tags
build.Default.UseAllFiles = true
}
func usage() {
log.Printf("Usage of %s:\n", os.Args[0])
log.Printf("\nnakedret [flags] # runs on package in current directory\n")
log.Printf("\nnakedret [flags] [packages]\n")
log.Printf("Flags:\n")
flag.PrintDefaults()
}
type returnsVisitor struct {
f *token.FileSet
maxLength uint
}
func main() {
// Remove log timestamp
log.SetFlags(0)
maxLength := flag.Uint("l", 5, "maximum number of lines for a naked return function")
flag.Usage = usage
flag.Parse()
if err := checkNakedReturns(flag.Args(), maxLength); err != nil {
log.Println(err)
}
}
func checkNakedReturns(args []string, maxLength *uint) error {
fset := token.NewFileSet()
files, err := parseInput(args, fset)
if err != nil {
return fmt.Errorf("could not parse input %v", err)
}
if maxLength == nil {
return errors.New("max length nil")
}
retVis := &returnsVisitor{
f: fset,
maxLength: *maxLength,
}
for _, f := range files {
ast.Walk(retVis, f)
}
return nil
}
func parseInput(args []string, fset *token.FileSet) ([]*ast.File, error) {
var directoryList []string
var fileMode bool
files := make([]*ast.File, 0)
if len(args) == 0 {
directoryList = append(directoryList, pwd)
} else {
for _, arg := range args {
if strings.HasSuffix(arg, "/...") && isDir(arg[:len(arg)-len("/...")]) {
for _, dirname := range allPackagesInFS(arg) {
directoryList = append(directoryList, dirname)
}
} else if isDir(arg) {
directoryList = append(directoryList, arg)
} else if exists(arg) {
if strings.HasSuffix(arg, ".go") {
fileMode = true
f, err := parser.ParseFile(fset, arg, nil, 0)
if err != nil {
return nil, err
}
files = append(files, f)
} else {
return nil, fmt.Errorf("invalid file %v specified", arg)
}
} else {
//TODO clean this up a bit
imPaths := importPaths([]string{arg})
for _, importPath := range imPaths {
pkg, err := build.Import(importPath, ".", 0)
if err != nil {
return nil, err
}
var stringFiles []string
stringFiles = append(stringFiles, pkg.GoFiles...)
// files = append(files, pkg.CgoFiles...)
stringFiles = append(stringFiles, pkg.TestGoFiles...)
if pkg.Dir != "." {
for i, f := range stringFiles {
stringFiles[i] = filepath.Join(pkg.Dir, f)
}
}
fileMode = true
for _, stringFile := range stringFiles {
f, err := parser.ParseFile(fset, stringFile, nil, 0)
if err != nil {
return nil, err
}
files = append(files, f)
}
}
}
}
}
// if we're not in file mode, then we need to grab each and every package in each directory
// we can to grab all the files
if !fileMode {
for _, fpath := range directoryList {
pkgs, err := parser.ParseDir(fset, fpath, nil, 0)
if err != nil {
return nil, err
}
for _, pkg := range pkgs {
for _, f := range pkg.Files {
files = append(files, f)
}
}
}
}
return files, nil
}
func isDir(filename string) bool {
fi, err := os.Stat(filename)
return err == nil && fi.IsDir()
}
func exists(filename string) bool {
_, err := os.Stat(filename)
return err == nil
}
func (v *returnsVisitor) Visit(node ast.Node) ast.Visitor {
var namedReturns []*ast.Ident
funcDecl, ok := node.(*ast.FuncDecl)
if !ok {
return v
}
var functionLineLength int
// We've found a function
if funcDecl.Type != nil && funcDecl.Type.Results != nil {
for _, field := range funcDecl.Type.Results.List {
for _, ident := range field.Names {
if ident != nil {
namedReturns = append(namedReturns, ident)
}
}
}
file := v.f.File(funcDecl.Pos())
functionLineLength = file.Position(funcDecl.End()).Line - file.Position(funcDecl.Pos()).Line
}
if len(namedReturns) > 0 && funcDecl.Body != nil {
// Scan the body for usage of the named returns
for _, stmt := range funcDecl.Body.List {
switch s := stmt.(type) {
case *ast.ReturnStmt:
if len(s.Results) == 0 {
file := v.f.File(s.Pos())
if file != nil && uint(functionLineLength) > v.maxLength {
if funcDecl.Name != nil {
log.Printf("%v:%v %v naked returns on %v line function \n", file.Name(), file.Position(s.Pos()).Line, funcDecl.Name.Name, functionLineLength)
}
}
continue
}
default:
}
}
}
return v
}

View File

@ -0,0 +1,27 @@
Copyright (c) 2009 The Go Authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@ -0,0 +1,682 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Identify mismatches between assembly files and Go func declarations.
package main
import (
"bytes"
"fmt"
"go/ast"
"go/token"
"regexp"
"strconv"
"strings"
)
// 'kind' is a kind of assembly variable.
// The kinds 1, 2, 4, 8 stand for values of that size.
type asmKind int
// These special kinds are not valid sizes.
const (
asmString asmKind = 100 + iota
asmSlice
asmInterface
asmEmptyInterface
)
// An asmArch describes assembly parameters for an architecture
type asmArch struct {
name string
ptrSize int
intSize int
maxAlign int
bigEndian bool
stack string
lr bool
}
// An asmFunc describes the expected variables for a function on a given architecture.
type asmFunc struct {
arch *asmArch
size int // size of all arguments
vars map[string]*asmVar
varByOffset map[int]*asmVar
}
// An asmVar describes a single assembly variable.
type asmVar struct {
name string
kind asmKind
typ string
off int
size int
inner []*asmVar
}
var (
asmArch386 = asmArch{"386", 4, 4, 4, false, "SP", false}
asmArchArm = asmArch{"arm", 4, 4, 4, false, "R13", true}
asmArchArm64 = asmArch{"arm64", 8, 8, 8, false, "RSP", true}
asmArchAmd64 = asmArch{"amd64", 8, 8, 8, false, "SP", false}
asmArchAmd64p32 = asmArch{"amd64p32", 4, 4, 8, false, "SP", false}
asmArchMips64 = asmArch{"mips64", 8, 8, 8, true, "R29", true}
asmArchMips64LE = asmArch{"mips64", 8, 8, 8, false, "R29", true}
asmArchPpc64 = asmArch{"ppc64", 8, 8, 8, true, "R1", true}
asmArchPpc64LE = asmArch{"ppc64le", 8, 8, 8, false, "R1", true}
arches = []*asmArch{
&asmArch386,
&asmArchArm,
&asmArchArm64,
&asmArchAmd64,
&asmArchAmd64p32,
&asmArchMips64,
&asmArchMips64LE,
&asmArchPpc64,
&asmArchPpc64LE,
}
)
var (
re = regexp.MustCompile
asmPlusBuild = re(`//\s+\+build\s+([^\n]+)`)
asmTEXT = re(`\bTEXT\b.*·([^\(]+)\(SB\)(?:\s*,\s*([0-9A-Z|+]+))?(?:\s*,\s*\$(-?[0-9]+)(?:-([0-9]+))?)?`)
asmDATA = re(`\b(DATA|GLOBL)\b`)
asmNamedFP = re(`([a-zA-Z0-9_\xFF-\x{10FFFF}]+)(?:\+([0-9]+))\(FP\)`)
asmUnnamedFP = re(`[^+\-0-9](([0-9]+)\(FP\))`)
asmSP = re(`[^+\-0-9](([0-9]+)\(([A-Z0-9]+)\))`)
asmOpcode = re(`^\s*(?:[A-Z0-9a-z_]+:)?\s*([A-Z]+)\s*([^,]*)(?:,\s*(.*))?`)
ppc64Suff = re(`([BHWD])(ZU|Z|U|BR)?$`)
)
func asmCheck(pkg *Package) {
if !vet("asmdecl") {
return
}
// No work if no assembly files.
if !pkg.hasFileWithSuffix(".s") {
return
}
// Gather declarations. knownFunc[name][arch] is func description.
knownFunc := make(map[string]map[string]*asmFunc)
for _, f := range pkg.files {
if f.file != nil {
for _, decl := range f.file.Decls {
if decl, ok := decl.(*ast.FuncDecl); ok && decl.Body == nil {
knownFunc[decl.Name.Name] = f.asmParseDecl(decl)
}
}
}
}
Files:
for _, f := range pkg.files {
if !strings.HasSuffix(f.name, ".s") {
continue
}
Println("Checking file", f.name)
// Determine architecture from file name if possible.
var arch string
var archDef *asmArch
for _, a := range arches {
if strings.HasSuffix(f.name, "_"+a.name+".s") {
arch = a.name
archDef = a
break
}
}
lines := strings.SplitAfter(string(f.content), "\n")
var (
fn *asmFunc
fnName string
localSize, argSize int
wroteSP bool
haveRetArg bool
retLine []int
)
flushRet := func() {
if fn != nil && fn.vars["ret"] != nil && !haveRetArg && len(retLine) > 0 {
v := fn.vars["ret"]
for _, line := range retLine {
f.Badf(token.NoPos, "%s:%d: [%s] %s: RET without writing to %d-byte ret+%d(FP)", f.name, line, arch, fnName, v.size, v.off)
}
}
retLine = nil
}
for lineno, line := range lines {
lineno++
badf := func(format string, args ...interface{}) {
f.Badf(token.NoPos, "%s:%d: [%s] %s: %s", f.name, lineno, arch, fnName, fmt.Sprintf(format, args...))
}
if arch == "" {
// Determine architecture from +build line if possible.
if m := asmPlusBuild.FindStringSubmatch(line); m != nil {
Fields:
for _, fld := range strings.Fields(m[1]) {
for _, a := range arches {
if a.name == fld {
arch = a.name
archDef = a
break Fields
}
}
}
}
}
if m := asmTEXT.FindStringSubmatch(line); m != nil {
flushRet()
if arch == "" {
f.Warnf(token.NoPos, "%s: cannot determine architecture for assembly file", f.name)
continue Files
}
fnName = m[1]
fn = knownFunc[m[1]][arch]
if fn != nil {
size, _ := strconv.Atoi(m[4])
if size != fn.size && (m[2] != "7" && !strings.Contains(m[2], "NOSPLIT") || size != 0) {
badf("wrong argument size %d; expected $...-%d", size, fn.size)
}
}
localSize, _ = strconv.Atoi(m[3])
localSize += archDef.intSize
if archDef.lr {
// Account for caller's saved LR
localSize += archDef.intSize
}
argSize, _ = strconv.Atoi(m[4])
if fn == nil && !strings.Contains(fnName, "<>") {
badf("function %s missing Go declaration", fnName)
}
wroteSP = false
haveRetArg = false
continue
} else if strings.Contains(line, "TEXT") && strings.Contains(line, "SB") {
// function, but not visible from Go (didn't match asmTEXT), so stop checking
flushRet()
fn = nil
fnName = ""
continue
}
if strings.Contains(line, "RET") {
retLine = append(retLine, lineno)
}
if fnName == "" {
continue
}
if asmDATA.FindStringSubmatch(line) != nil {
fn = nil
}
if archDef == nil {
continue
}
if strings.Contains(line, ", "+archDef.stack) || strings.Contains(line, ",\t"+archDef.stack) {
wroteSP = true
continue
}
for _, m := range asmSP.FindAllStringSubmatch(line, -1) {
if m[3] != archDef.stack || wroteSP {
continue
}
off := 0
if m[1] != "" {
off, _ = strconv.Atoi(m[2])
}
if off >= localSize {
if fn != nil {
v := fn.varByOffset[off-localSize]
if v != nil {
badf("%s should be %s+%d(FP)", m[1], v.name, off-localSize)
continue
}
}
if off >= localSize+argSize {
badf("use of %s points beyond argument frame", m[1])
continue
}
badf("use of %s to access argument frame", m[1])
}
}
if fn == nil {
continue
}
for _, m := range asmUnnamedFP.FindAllStringSubmatch(line, -1) {
off, _ := strconv.Atoi(m[2])
v := fn.varByOffset[off]
if v != nil {
badf("use of unnamed argument %s; offset %d is %s+%d(FP)", m[1], off, v.name, v.off)
} else {
badf("use of unnamed argument %s", m[1])
}
}
for _, m := range asmNamedFP.FindAllStringSubmatch(line, -1) {
name := m[1]
off := 0
if m[2] != "" {
off, _ = strconv.Atoi(m[2])
}
if name == "ret" || strings.HasPrefix(name, "ret_") {
haveRetArg = true
}
v := fn.vars[name]
if v == nil {
// Allow argframe+0(FP).
if name == "argframe" && off == 0 {
continue
}
v = fn.varByOffset[off]
if v != nil {
badf("unknown variable %s; offset %d is %s+%d(FP)", name, off, v.name, v.off)
} else {
badf("unknown variable %s", name)
}
continue
}
asmCheckVar(badf, fn, line, m[0], off, v)
}
}
flushRet()
}
}
// asmParseDecl parses a function decl for expected assembly variables.
func (f *File) asmParseDecl(decl *ast.FuncDecl) map[string]*asmFunc {
var (
arch *asmArch
fn *asmFunc
offset int
failed bool
)
addVar := func(outer string, v asmVar) {
if vo := fn.vars[outer]; vo != nil {
vo.inner = append(vo.inner, &v)
}
fn.vars[v.name] = &v
for i := 0; i < v.size; i++ {
fn.varByOffset[v.off+i] = &v
}
}
addParams := func(list []*ast.Field) {
for i, fld := range list {
// Determine alignment, size, and kind of type in declaration.
var align, size int
var kind asmKind
names := fld.Names
typ := f.gofmt(fld.Type)
switch t := fld.Type.(type) {
default:
switch typ {
default:
f.Warnf(fld.Type.Pos(), "unknown assembly argument type %s", typ)
failed = true
return
case "int8", "uint8", "byte", "bool":
size = 1
case "int16", "uint16":
size = 2
case "int32", "uint32", "float32":
size = 4
case "int64", "uint64", "float64":
align = arch.maxAlign
size = 8
case "int", "uint":
size = arch.intSize
case "uintptr", "iword", "Word", "Errno", "unsafe.Pointer":
size = arch.ptrSize
case "string", "ErrorString":
size = arch.ptrSize * 2
align = arch.ptrSize
kind = asmString
}
case *ast.ChanType, *ast.FuncType, *ast.MapType, *ast.StarExpr:
size = arch.ptrSize
case *ast.InterfaceType:
align = arch.ptrSize
size = 2 * arch.ptrSize
if len(t.Methods.List) > 0 {
kind = asmInterface
} else {
kind = asmEmptyInterface
}
case *ast.ArrayType:
if t.Len == nil {
size = arch.ptrSize + 2*arch.intSize
align = arch.ptrSize
kind = asmSlice
break
}
f.Warnf(fld.Type.Pos(), "unsupported assembly argument type %s", typ)
failed = true
case *ast.StructType:
f.Warnf(fld.Type.Pos(), "unsupported assembly argument type %s", typ)
failed = true
}
if align == 0 {
align = size
}
if kind == 0 {
kind = asmKind(size)
}
offset += -offset & (align - 1)
// Create variable for each name being declared with this type.
if len(names) == 0 {
name := "unnamed"
if decl.Type.Results != nil && len(decl.Type.Results.List) > 0 && &list[0] == &decl.Type.Results.List[0] && i == 0 {
// Assume assembly will refer to single unnamed result as r.
name = "ret"
}
names = []*ast.Ident{{Name: name}}
}
for _, id := range names {
name := id.Name
addVar("", asmVar{
name: name,
kind: kind,
typ: typ,
off: offset,
size: size,
})
switch kind {
case 8:
if arch.ptrSize == 4 {
w1, w2 := "lo", "hi"
if arch.bigEndian {
w1, w2 = w2, w1
}
addVar(name, asmVar{
name: name + "_" + w1,
kind: 4,
typ: "half " + typ,
off: offset,
size: 4,
})
addVar(name, asmVar{
name: name + "_" + w2,
kind: 4,
typ: "half " + typ,
off: offset + 4,
size: 4,
})
}
case asmEmptyInterface:
addVar(name, asmVar{
name: name + "_type",
kind: asmKind(arch.ptrSize),
typ: "interface type",
off: offset,
size: arch.ptrSize,
})
addVar(name, asmVar{
name: name + "_data",
kind: asmKind(arch.ptrSize),
typ: "interface data",
off: offset + arch.ptrSize,
size: arch.ptrSize,
})
case asmInterface:
addVar(name, asmVar{
name: name + "_itable",
kind: asmKind(arch.ptrSize),
typ: "interface itable",
off: offset,
size: arch.ptrSize,
})
addVar(name, asmVar{
name: name + "_data",
kind: asmKind(arch.ptrSize),
typ: "interface data",
off: offset + arch.ptrSize,
size: arch.ptrSize,
})
case asmSlice:
addVar(name, asmVar{
name: name + "_base",
kind: asmKind(arch.ptrSize),
typ: "slice base",
off: offset,
size: arch.ptrSize,
})
addVar(name, asmVar{
name: name + "_len",
kind: asmKind(arch.intSize),
typ: "slice len",
off: offset + arch.ptrSize,
size: arch.intSize,
})
addVar(name, asmVar{
name: name + "_cap",
kind: asmKind(arch.intSize),
typ: "slice cap",
off: offset + arch.ptrSize + arch.intSize,
size: arch.intSize,
})
case asmString:
addVar(name, asmVar{
name: name + "_base",
kind: asmKind(arch.ptrSize),
typ: "string base",
off: offset,
size: arch.ptrSize,
})
addVar(name, asmVar{
name: name + "_len",
kind: asmKind(arch.intSize),
typ: "string len",
off: offset + arch.ptrSize,
size: arch.intSize,
})
}
offset += size
}
}
}
m := make(map[string]*asmFunc)
for _, arch = range arches {
fn = &asmFunc{
arch: arch,
vars: make(map[string]*asmVar),
varByOffset: make(map[int]*asmVar),
}
offset = 0
addParams(decl.Type.Params.List)
if decl.Type.Results != nil && len(decl.Type.Results.List) > 0 {
offset += -offset & (arch.maxAlign - 1)
addParams(decl.Type.Results.List)
}
fn.size = offset
m[arch.name] = fn
}
if failed {
return nil
}
return m
}
// asmCheckVar checks a single variable reference.
func asmCheckVar(badf func(string, ...interface{}), fn *asmFunc, line, expr string, off int, v *asmVar) {
m := asmOpcode.FindStringSubmatch(line)
if m == nil {
if !strings.HasPrefix(strings.TrimSpace(line), "//") {
badf("cannot find assembly opcode")
}
return
}
// Determine operand sizes from instruction.
// Typically the suffix suffices, but there are exceptions.
var src, dst, kind asmKind
op := m[1]
switch fn.arch.name + "." + op {
case "386.FMOVLP":
src, dst = 8, 4
case "arm.MOVD":
src = 8
case "arm.MOVW":
src = 4
case "arm.MOVH", "arm.MOVHU":
src = 2
case "arm.MOVB", "arm.MOVBU":
src = 1
// LEA* opcodes don't really read the second arg.
// They just take the address of it.
case "386.LEAL":
dst = 4
case "amd64.LEAQ":
dst = 8
case "amd64p32.LEAL":
dst = 4
default:
switch fn.arch.name {
case "386", "amd64":
if strings.HasPrefix(op, "F") && (strings.HasSuffix(op, "D") || strings.HasSuffix(op, "DP")) {
// FMOVDP, FXCHD, etc
src = 8
break
}
if strings.HasPrefix(op, "P") && strings.HasSuffix(op, "RD") {
// PINSRD, PEXTRD, etc
src = 4
break
}
if strings.HasPrefix(op, "F") && (strings.HasSuffix(op, "F") || strings.HasSuffix(op, "FP")) {
// FMOVFP, FXCHF, etc
src = 4
break
}
if strings.HasSuffix(op, "SD") {
// MOVSD, SQRTSD, etc
src = 8
break
}
if strings.HasSuffix(op, "SS") {
// MOVSS, SQRTSS, etc
src = 4
break
}
if strings.HasPrefix(op, "SET") {
// SETEQ, etc
src = 1
break
}
switch op[len(op)-1] {
case 'B':
src = 1
case 'W':
src = 2
case 'L':
src = 4
case 'D', 'Q':
src = 8
}
case "ppc64", "ppc64le":
// Strip standard suffixes to reveal size letter.
m := ppc64Suff.FindStringSubmatch(op)
if m != nil {
switch m[1][0] {
case 'B':
src = 1
case 'H':
src = 2
case 'W':
src = 4
case 'D':
src = 8
}
}
case "mips64", "mips64le":
switch op {
case "MOVB", "MOVBU":
src = 1
case "MOVH", "MOVHU":
src = 2
case "MOVW", "MOVWU", "MOVF":
src = 4
case "MOVV", "MOVD":
src = 8
}
}
}
if dst == 0 {
dst = src
}
// Determine whether the match we're holding
// is the first or second argument.
if strings.Index(line, expr) > strings.Index(line, ",") {
kind = dst
} else {
kind = src
}
vk := v.kind
vt := v.typ
switch vk {
case asmInterface, asmEmptyInterface, asmString, asmSlice:
// allow reference to first word (pointer)
vk = v.inner[0].kind
vt = v.inner[0].typ
}
if off != v.off {
var inner bytes.Buffer
for i, vi := range v.inner {
if len(v.inner) > 1 {
fmt.Fprintf(&inner, ",")
}
fmt.Fprintf(&inner, " ")
if i == len(v.inner)-1 {
fmt.Fprintf(&inner, "or ")
}
fmt.Fprintf(&inner, "%s+%d(FP)", vi.name, vi.off)
}
badf("invalid offset %s; expected %s+%d(FP)%s", expr, v.name, v.off, inner.String())
return
}
if kind != 0 && kind != vk {
var inner bytes.Buffer
if len(v.inner) > 0 {
fmt.Fprintf(&inner, " containing")
for i, vi := range v.inner {
if i > 0 && len(v.inner) > 2 {
fmt.Fprintf(&inner, ",")
}
fmt.Fprintf(&inner, " ")
if i > 0 && i == len(v.inner)-1 {
fmt.Fprintf(&inner, "and ")
}
fmt.Fprintf(&inner, "%s+%d(FP)", vi.name, vi.off)
}
}
badf("invalid %s of %s; %s is %d-byte value%s", op, expr, vt, vk, inner.String())
}
}

View File

@ -0,0 +1,49 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
/*
This file contains the code to check for useless assignments.
*/
package main
import (
"go/ast"
"go/token"
"reflect"
)
func init() {
register("assign",
"check for useless assignments",
checkAssignStmt,
assignStmt)
}
// TODO: should also check for assignments to struct fields inside methods
// that are on T instead of *T.
// checkAssignStmt checks for assignments of the form "<expr> = <expr>".
// These are almost always useless, and even when they aren't they are usually a mistake.
func checkAssignStmt(f *File, node ast.Node) {
stmt := node.(*ast.AssignStmt)
if stmt.Tok != token.ASSIGN {
return // ignore :=
}
if len(stmt.Lhs) != len(stmt.Rhs) {
// If LHS and RHS have different cardinality, they can't be the same.
return
}
for i, lhs := range stmt.Lhs {
rhs := stmt.Rhs[i]
if reflect.TypeOf(lhs) != reflect.TypeOf(rhs) {
continue // short-circuit the heavy-weight gofmt check
}
le := f.gofmt(lhs)
re := f.gofmt(rhs)
if le == re {
f.Badf(stmt.Pos(), "self-assignment of %s to %s", re, le)
}
}
}

View File

@ -0,0 +1,69 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"go/ast"
"go/token"
)
func init() {
register("atomic",
"check for common mistaken usages of the sync/atomic package",
checkAtomicAssignment,
assignStmt)
}
// checkAtomicAssignment walks the assignment statement checking for common
// mistaken usage of atomic package, such as: x = atomic.AddUint64(&x, 1)
func checkAtomicAssignment(f *File, node ast.Node) {
n := node.(*ast.AssignStmt)
if len(n.Lhs) != len(n.Rhs) {
return
}
if len(n.Lhs) == 1 && n.Tok == token.DEFINE {
return
}
for i, right := range n.Rhs {
call, ok := right.(*ast.CallExpr)
if !ok {
continue
}
sel, ok := call.Fun.(*ast.SelectorExpr)
if !ok {
continue
}
pkg, ok := sel.X.(*ast.Ident)
if !ok || pkg.Name != "atomic" {
continue
}
switch sel.Sel.Name {
case "AddInt32", "AddInt64", "AddUint32", "AddUint64", "AddUintptr":
f.checkAtomicAddAssignment(n.Lhs[i], call)
}
}
}
// checkAtomicAddAssignment walks the atomic.Add* method calls checking for assigning the return value
// to the same variable being used in the operation
func (f *File) checkAtomicAddAssignment(left ast.Expr, call *ast.CallExpr) {
if len(call.Args) != 2 {
return
}
arg := call.Args[0]
broken := false
if uarg, ok := arg.(*ast.UnaryExpr); ok && uarg.Op == token.AND {
broken = f.gofmt(left) == f.gofmt(uarg.X)
} else if star, ok := left.(*ast.StarExpr); ok {
broken = f.gofmt(star.X) == f.gofmt(arg)
}
if broken {
f.Bad(left.Pos(), "direct assignment to atomic value")
}
}

View File

@ -0,0 +1,186 @@
// Copyright 2014 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file contains boolean condition tests.
package main
import (
"go/ast"
"go/token"
)
func init() {
register("bool",
"check for mistakes involving boolean operators",
checkBool,
binaryExpr)
}
func checkBool(f *File, n ast.Node) {
e := n.(*ast.BinaryExpr)
var op boolOp
switch e.Op {
case token.LOR:
op = or
case token.LAND:
op = and
default:
return
}
comm := op.commutativeSets(e)
for _, exprs := range comm {
op.checkRedundant(f, exprs)
op.checkSuspect(f, exprs)
}
}
type boolOp struct {
name string
tok token.Token // token corresponding to this operator
badEq token.Token // token corresponding to the equality test that should not be used with this operator
}
var (
or = boolOp{"or", token.LOR, token.NEQ}
and = boolOp{"and", token.LAND, token.EQL}
)
// commutativeSets returns all side effect free sets of
// expressions in e that are connected by op.
// For example, given 'a || b || f() || c || d' with the or op,
// commutativeSets returns {{b, a}, {d, c}}.
func (op boolOp) commutativeSets(e *ast.BinaryExpr) [][]ast.Expr {
exprs := op.split(e)
// Partition the slice of expressions into commutative sets.
i := 0
var sets [][]ast.Expr
for j := 0; j <= len(exprs); j++ {
if j == len(exprs) || hasSideEffects(exprs[j]) {
if i < j {
sets = append(sets, exprs[i:j])
}
i = j + 1
}
}
return sets
}
// checkRedundant checks for expressions of the form
// e && e
// e || e
// Exprs must contain only side effect free expressions.
func (op boolOp) checkRedundant(f *File, exprs []ast.Expr) {
seen := make(map[string]bool)
for _, e := range exprs {
efmt := f.gofmt(e)
if seen[efmt] {
f.Badf(e.Pos(), "redundant %s: %s %s %s", op.name, efmt, op.tok, efmt)
} else {
seen[efmt] = true
}
}
}
// checkSuspect checks for expressions of the form
// x != c1 || x != c2
// x == c1 && x == c2
// where c1 and c2 are constant expressions.
// If c1 and c2 are the same then it's redundant;
// if c1 and c2 are different then it's always true or always false.
// Exprs must contain only side effect free expressions.
func (op boolOp) checkSuspect(f *File, exprs []ast.Expr) {
// seen maps from expressions 'x' to equality expressions 'x != c'.
seen := make(map[string]string)
for _, e := range exprs {
bin, ok := e.(*ast.BinaryExpr)
if !ok || bin.Op != op.badEq {
continue
}
// In order to avoid false positives, restrict to cases
// in which one of the operands is constant. We're then
// interested in the other operand.
// In the rare case in which both operands are constant
// (e.g. runtime.GOOS and "windows"), we'll only catch
// mistakes if the LHS is repeated, which is how most
// code is written.
var x ast.Expr
switch {
case f.pkg.types[bin.Y].Value != nil:
x = bin.X
case f.pkg.types[bin.X].Value != nil:
x = bin.Y
default:
continue
}
// e is of the form 'x != c' or 'x == c'.
xfmt := f.gofmt(x)
efmt := f.gofmt(e)
if prev, found := seen[xfmt]; found {
// checkRedundant handles the case in which efmt == prev.
if efmt != prev {
f.Badf(e.Pos(), "suspect %s: %s %s %s", op.name, efmt, op.tok, prev)
}
} else {
seen[xfmt] = efmt
}
}
}
// hasSideEffects reports whether evaluation of e has side effects.
func hasSideEffects(e ast.Expr) bool {
safe := true
ast.Inspect(e, func(node ast.Node) bool {
switch n := node.(type) {
// Using CallExpr here will catch conversions
// as well as function and method invocations.
// We'll live with the false negatives for now.
case *ast.CallExpr:
safe = false
return false
case *ast.UnaryExpr:
if n.Op == token.ARROW {
safe = false
return false
}
}
return true
})
return !safe
}
// split returns a slice of all subexpressions in e that are connected by op.
// For example, given 'a || (b || c) || d' with the or op,
// split returns []{d, c, b, a}.
func (op boolOp) split(e ast.Expr) (exprs []ast.Expr) {
for {
e = unparen(e)
if b, ok := e.(*ast.BinaryExpr); ok && b.Op == op.tok {
exprs = append(exprs, op.split(b.Y)...)
e = b.X
} else {
exprs = append(exprs, e)
break
}
}
return
}
// unparen returns e with any enclosing parentheses stripped.
func unparen(e ast.Expr) ast.Expr {
for {
p, ok := e.(*ast.ParenExpr)
if !ok {
return e
}
e = p.X
}
}

View File

@ -0,0 +1,91 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"bytes"
"fmt"
"os"
"strings"
"unicode"
)
var (
nl = []byte("\n")
slashSlash = []byte("//")
plusBuild = []byte("+build")
)
// checkBuildTag checks that build tags are in the correct location and well-formed.
func checkBuildTag(name string, data []byte) {
if !vet("buildtags") {
return
}
lines := bytes.SplitAfter(data, nl)
// Determine cutpoint where +build comments are no longer valid.
// They are valid in leading // comments in the file followed by
// a blank line.
var cutoff int
for i, line := range lines {
line = bytes.TrimSpace(line)
if len(line) == 0 {
cutoff = i
continue
}
if bytes.HasPrefix(line, slashSlash) {
continue
}
break
}
for i, line := range lines {
line = bytes.TrimSpace(line)
if !bytes.HasPrefix(line, slashSlash) {
continue
}
text := bytes.TrimSpace(line[2:])
if bytes.HasPrefix(text, plusBuild) {
fields := bytes.Fields(text)
if !bytes.Equal(fields[0], plusBuild) {
// Comment is something like +buildasdf not +build.
fmt.Fprintf(os.Stderr, "%s:%d: possible malformed +build comment\n", name, i+1)
continue
}
if i >= cutoff {
fmt.Fprintf(os.Stderr, "%s:%d: +build comment must appear before package clause and be followed by a blank line\n", name, i+1)
setExit(1)
continue
}
// Check arguments.
Args:
for _, arg := range fields[1:] {
for _, elem := range strings.Split(string(arg), ",") {
if strings.HasPrefix(elem, "!!") {
fmt.Fprintf(os.Stderr, "%s:%d: invalid double negative in build constraint: %s\n", name, i+1, arg)
setExit(1)
break Args
}
if strings.HasPrefix(elem, "!") {
elem = elem[1:]
}
for _, c := range elem {
if !unicode.IsLetter(c) && !unicode.IsDigit(c) && c != '_' && c != '.' {
fmt.Fprintf(os.Stderr, "%s:%d: invalid non-alphanumeric build constraint: %s\n", name, i+1, arg)
setExit(1)
break Args
}
}
}
}
continue
}
// Comment with +build but not at beginning.
if bytes.Contains(line, plusBuild) && i < cutoff {
fmt.Fprintf(os.Stderr, "%s:%d: possible malformed +build comment\n", name, i+1)
continue
}
}
}

View File

@ -0,0 +1,130 @@
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Check for invalid cgo pointer passing.
// This looks for code that uses cgo to call C code passing values
// whose types are almost always invalid according to the cgo pointer
// sharing rules.
// Specifically, it warns about attempts to pass a Go chan, map, func,
// or slice to C, either directly, or via a pointer, array, or struct.
package main
import (
"go/ast"
"go/token"
"go/types"
)
func init() {
register("cgocall",
"check for types that may not be passed to cgo calls",
checkCgoCall,
callExpr)
}
func checkCgoCall(f *File, node ast.Node) {
x := node.(*ast.CallExpr)
// We are only looking for calls to functions imported from
// the "C" package.
sel, ok := x.Fun.(*ast.SelectorExpr)
if !ok {
return
}
id, ok := sel.X.(*ast.Ident)
if !ok || id.Name != "C" {
return
}
for _, arg := range x.Args {
if !typeOKForCgoCall(cgoBaseType(f, arg)) {
f.Badf(arg.Pos(), "possibly passing Go type with embedded pointer to C")
}
// Check for passing the address of a bad type.
if conv, ok := arg.(*ast.CallExpr); ok && len(conv.Args) == 1 && f.hasBasicType(conv.Fun, types.UnsafePointer) {
arg = conv.Args[0]
}
if u, ok := arg.(*ast.UnaryExpr); ok && u.Op == token.AND {
if !typeOKForCgoCall(cgoBaseType(f, u.X)) {
f.Badf(arg.Pos(), "possibly passing Go type with embedded pointer to C")
}
}
}
}
// cgoBaseType tries to look through type conversions involving
// unsafe.Pointer to find the real type. It converts:
// unsafe.Pointer(x) => x
// *(*unsafe.Pointer)(unsafe.Pointer(&x)) => x
func cgoBaseType(f *File, arg ast.Expr) types.Type {
switch arg := arg.(type) {
case *ast.CallExpr:
if len(arg.Args) == 1 && f.hasBasicType(arg.Fun, types.UnsafePointer) {
return cgoBaseType(f, arg.Args[0])
}
case *ast.StarExpr:
call, ok := arg.X.(*ast.CallExpr)
if !ok || len(call.Args) != 1 {
break
}
// Here arg is *f(v).
t := f.pkg.types[call.Fun].Type
if t == nil {
break
}
ptr, ok := t.Underlying().(*types.Pointer)
if !ok {
break
}
// Here arg is *(*p)(v)
elem, ok := ptr.Elem().Underlying().(*types.Basic)
if !ok || elem.Kind() != types.UnsafePointer {
break
}
// Here arg is *(*unsafe.Pointer)(v)
call, ok = call.Args[0].(*ast.CallExpr)
if !ok || len(call.Args) != 1 {
break
}
// Here arg is *(*unsafe.Pointer)(f(v))
if !f.hasBasicType(call.Fun, types.UnsafePointer) {
break
}
// Here arg is *(*unsafe.Pointer)(unsafe.Pointer(v))
u, ok := call.Args[0].(*ast.UnaryExpr)
if !ok || u.Op != token.AND {
break
}
// Here arg is *(*unsafe.Pointer)(unsafe.Pointer(&v))
return cgoBaseType(f, u.X)
}
return f.pkg.types[arg].Type
}
// typeOKForCgoCall returns true if the type of arg is OK to pass to a
// C function using cgo. This is not true for Go types with embedded
// pointers.
func typeOKForCgoCall(t types.Type) bool {
if t == nil {
return true
}
switch t := t.Underlying().(type) {
case *types.Chan, *types.Map, *types.Signature, *types.Slice:
return false
case *types.Pointer:
return typeOKForCgoCall(t.Elem())
case *types.Array:
return typeOKForCgoCall(t.Elem())
case *types.Struct:
for i := 0; i < t.NumFields(); i++ {
if !typeOKForCgoCall(t.Field(i).Type()) {
return false
}
}
}
return true
}

View File

@ -0,0 +1,82 @@
// Copyright 2012 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file contains the test for unkeyed struct literals.
package main
import (
"github.com/dnephin/govet/internal/whitelist"
"flag"
"go/ast"
"go/types"
"strings"
)
var compositeWhiteList = flag.Bool("compositewhitelist", true, "use composite white list; for testing only")
func init() {
register("composites",
"check that composite literals used field-keyed elements",
checkUnkeyedLiteral,
compositeLit)
}
// checkUnkeyedLiteral checks if a composite literal is a struct literal with
// unkeyed fields.
func checkUnkeyedLiteral(f *File, node ast.Node) {
cl := node.(*ast.CompositeLit)
typ := f.pkg.types[cl].Type
if typ == nil {
// cannot determine composite literals' type, skip it
return
}
typeName := typ.String()
if *compositeWhiteList && whitelist.UnkeyedLiteral[typeName] {
// skip whitelisted types
return
}
if _, ok := typ.Underlying().(*types.Struct); !ok {
// skip non-struct composite literals
return
}
if isLocalType(f, typeName) {
// allow unkeyed locally defined composite literal
return
}
// check if the CompositeLit contains an unkeyed field
allKeyValue := true
for _, e := range cl.Elts {
if _, ok := e.(*ast.KeyValueExpr); !ok {
allKeyValue = false
break
}
}
if allKeyValue {
// all the composite literal fields are keyed
return
}
f.Badf(cl.Pos(), "%s composite literal uses unkeyed fields", typeName)
}
func isLocalType(f *File, typeName string) bool {
if strings.HasPrefix(typeName, "struct{") {
// struct literals are local types
return true
}
pkgname := f.pkg.path
if strings.HasPrefix(typeName, pkgname+".") {
return true
}
// treat types as local inside test packages with _test name suffix
if strings.HasSuffix(pkgname, "_test") {
pkgname = pkgname[:len(pkgname)-len("_test")]
}
return strings.HasPrefix(typeName, pkgname+".")
}

View File

@ -0,0 +1,239 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file contains the code to check that locks are not passed by value.
package main
import (
"bytes"
"fmt"
"go/ast"
"go/token"
"go/types"
)
func init() {
register("copylocks",
"check that locks are not passed by value",
checkCopyLocks,
funcDecl, rangeStmt, funcLit, callExpr, assignStmt, genDecl, compositeLit, returnStmt)
}
// checkCopyLocks checks whether node might
// inadvertently copy a lock.
func checkCopyLocks(f *File, node ast.Node) {
switch node := node.(type) {
case *ast.RangeStmt:
checkCopyLocksRange(f, node)
case *ast.FuncDecl:
checkCopyLocksFunc(f, node.Name.Name, node.Recv, node.Type)
case *ast.FuncLit:
checkCopyLocksFunc(f, "func", nil, node.Type)
case *ast.CallExpr:
checkCopyLocksCallExpr(f, node)
case *ast.AssignStmt:
checkCopyLocksAssign(f, node)
case *ast.GenDecl:
checkCopyLocksGenDecl(f, node)
case *ast.CompositeLit:
checkCopyLocksCompositeLit(f, node)
case *ast.ReturnStmt:
checkCopyLocksReturnStmt(f, node)
}
}
// checkCopyLocksAssign checks whether an assignment
// copies a lock.
func checkCopyLocksAssign(f *File, as *ast.AssignStmt) {
for i, x := range as.Rhs {
if path := lockPathRhs(f, x); path != nil {
f.Badf(x.Pos(), "assignment copies lock value to %v: %v", f.gofmt(as.Lhs[i]), path)
}
}
}
// checkCopyLocksGenDecl checks whether lock is copied
// in variable declaration.
func checkCopyLocksGenDecl(f *File, gd *ast.GenDecl) {
if gd.Tok != token.VAR {
return
}
for _, spec := range gd.Specs {
valueSpec := spec.(*ast.ValueSpec)
for i, x := range valueSpec.Values {
if path := lockPathRhs(f, x); path != nil {
f.Badf(x.Pos(), "variable declaration copies lock value to %v: %v", valueSpec.Names[i].Name, path)
}
}
}
}
// checkCopyLocksCompositeLit detects lock copy inside a composite literal
func checkCopyLocksCompositeLit(f *File, cl *ast.CompositeLit) {
for _, x := range cl.Elts {
if node, ok := x.(*ast.KeyValueExpr); ok {
x = node.Value
}
if path := lockPathRhs(f, x); path != nil {
f.Badf(x.Pos(), "literal copies lock value from %v: %v", f.gofmt(x), path)
}
}
}
// checkCopyLocksReturnStmt detects lock copy in return statement
func checkCopyLocksReturnStmt(f *File, rs *ast.ReturnStmt) {
for _, x := range rs.Results {
if path := lockPathRhs(f, x); path != nil {
f.Badf(x.Pos(), "return copies lock value: %v", path)
}
}
}
// checkCopyLocksCallExpr detects lock copy in the arguments to a function call
func checkCopyLocksCallExpr(f *File, ce *ast.CallExpr) {
if id, ok := ce.Fun.(*ast.Ident); ok && id.Name == "new" && f.pkg.types[id].IsBuiltin() {
// Skip 'new(Type)' for built-in 'new'
return
}
for _, x := range ce.Args {
if path := lockPathRhs(f, x); path != nil {
f.Badf(x.Pos(), "function call copies lock value: %v", path)
}
}
}
// checkCopyLocksFunc checks whether a function might
// inadvertently copy a lock, by checking whether
// its receiver, parameters, or return values
// are locks.
func checkCopyLocksFunc(f *File, name string, recv *ast.FieldList, typ *ast.FuncType) {
if recv != nil && len(recv.List) > 0 {
expr := recv.List[0].Type
if path := lockPath(f.pkg.typesPkg, f.pkg.types[expr].Type); path != nil {
f.Badf(expr.Pos(), "%s passes lock by value: %v", name, path)
}
}
if typ.Params != nil {
for _, field := range typ.Params.List {
expr := field.Type
if path := lockPath(f.pkg.typesPkg, f.pkg.types[expr].Type); path != nil {
f.Badf(expr.Pos(), "%s passes lock by value: %v", name, path)
}
}
}
// Don't check typ.Results. If T has a Lock field it's OK to write
// return T{}
// because that is returning the zero value. Leave result checking
// to the return statement.
}
// checkCopyLocksRange checks whether a range statement
// might inadvertently copy a lock by checking whether
// any of the range variables are locks.
func checkCopyLocksRange(f *File, r *ast.RangeStmt) {
checkCopyLocksRangeVar(f, r.Tok, r.Key)
checkCopyLocksRangeVar(f, r.Tok, r.Value)
}
func checkCopyLocksRangeVar(f *File, rtok token.Token, e ast.Expr) {
if e == nil {
return
}
id, isId := e.(*ast.Ident)
if isId && id.Name == "_" {
return
}
var typ types.Type
if rtok == token.DEFINE {
if !isId {
return
}
obj := f.pkg.defs[id]
if obj == nil {
return
}
typ = obj.Type()
} else {
typ = f.pkg.types[e].Type
}
if typ == nil {
return
}
if path := lockPath(f.pkg.typesPkg, typ); path != nil {
f.Badf(e.Pos(), "range var %s copies lock: %v", f.gofmt(e), path)
}
}
type typePath []types.Type
// String pretty-prints a typePath.
func (path typePath) String() string {
n := len(path)
var buf bytes.Buffer
for i := range path {
if i > 0 {
fmt.Fprint(&buf, " contains ")
}
// The human-readable path is in reverse order, outermost to innermost.
fmt.Fprint(&buf, path[n-i-1].String())
}
return buf.String()
}
func lockPathRhs(f *File, x ast.Expr) typePath {
if _, ok := x.(*ast.CompositeLit); ok {
return nil
}
if _, ok := x.(*ast.CallExpr); ok {
// A call may return a zero value.
return nil
}
if star, ok := x.(*ast.StarExpr); ok {
if _, ok := star.X.(*ast.CallExpr); ok {
// A call may return a pointer to a zero value.
return nil
}
}
return lockPath(f.pkg.typesPkg, f.pkg.types[x].Type)
}
// lockPath returns a typePath describing the location of a lock value
// contained in typ. If there is no contained lock, it returns nil.
func lockPath(tpkg *types.Package, typ types.Type) typePath {
if typ == nil {
return nil
}
// We're only interested in the case in which the underlying
// type is a struct. (Interfaces and pointers are safe to copy.)
styp, ok := typ.Underlying().(*types.Struct)
if !ok {
return nil
}
// We're looking for cases in which a reference to this type
// can be locked, but a value cannot. This differentiates
// embedded interfaces from embedded values.
if plock := types.NewMethodSet(types.NewPointer(typ)).Lookup(tpkg, "Lock"); plock != nil {
if lock := types.NewMethodSet(typ).Lookup(tpkg, "Lock"); lock == nil {
return []types.Type{typ}
}
}
nfields := styp.NumFields()
for i := 0; i < nfields; i++ {
ftyp := styp.Field(i).Type()
subpath := lockPath(tpkg, ftyp)
if subpath != nil {
return append(subpath, typ)
}
}
return nil
}

View File

@ -0,0 +1,298 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Check for syntactically unreachable code.
package main
import (
"go/ast"
"go/token"
)
func init() {
register("unreachable",
"check for unreachable code",
checkUnreachable,
funcDecl, funcLit)
}
type deadState struct {
f *File
hasBreak map[ast.Stmt]bool
hasGoto map[string]bool
labels map[string]ast.Stmt
breakTarget ast.Stmt
reachable bool
}
// checkUnreachable checks a function body for dead code.
//
// TODO(adonovan): use the new cfg package, which is more precise.
func checkUnreachable(f *File, node ast.Node) {
var body *ast.BlockStmt
switch n := node.(type) {
case *ast.FuncDecl:
body = n.Body
case *ast.FuncLit:
body = n.Body
}
if body == nil {
return
}
d := &deadState{
f: f,
hasBreak: make(map[ast.Stmt]bool),
hasGoto: make(map[string]bool),
labels: make(map[string]ast.Stmt),
}
d.findLabels(body)
d.reachable = true
d.findDead(body)
}
// findLabels gathers information about the labels defined and used by stmt
// and about which statements break, whether a label is involved or not.
func (d *deadState) findLabels(stmt ast.Stmt) {
switch x := stmt.(type) {
default:
d.f.Warnf(x.Pos(), "internal error in findLabels: unexpected statement %T", x)
case *ast.AssignStmt,
*ast.BadStmt,
*ast.DeclStmt,
*ast.DeferStmt,
*ast.EmptyStmt,
*ast.ExprStmt,
*ast.GoStmt,
*ast.IncDecStmt,
*ast.ReturnStmt,
*ast.SendStmt:
// no statements inside
case *ast.BlockStmt:
for _, stmt := range x.List {
d.findLabels(stmt)
}
case *ast.BranchStmt:
switch x.Tok {
case token.GOTO:
if x.Label != nil {
d.hasGoto[x.Label.Name] = true
}
case token.BREAK:
stmt := d.breakTarget
if x.Label != nil {
stmt = d.labels[x.Label.Name]
}
if stmt != nil {
d.hasBreak[stmt] = true
}
}
case *ast.IfStmt:
d.findLabels(x.Body)
if x.Else != nil {
d.findLabels(x.Else)
}
case *ast.LabeledStmt:
d.labels[x.Label.Name] = x.Stmt
d.findLabels(x.Stmt)
// These cases are all the same, but the x.Body only works
// when the specific type of x is known, so the cases cannot
// be merged.
case *ast.ForStmt:
outer := d.breakTarget
d.breakTarget = x
d.findLabels(x.Body)
d.breakTarget = outer
case *ast.RangeStmt:
outer := d.breakTarget
d.breakTarget = x
d.findLabels(x.Body)
d.breakTarget = outer
case *ast.SelectStmt:
outer := d.breakTarget
d.breakTarget = x
d.findLabels(x.Body)
d.breakTarget = outer
case *ast.SwitchStmt:
outer := d.breakTarget
d.breakTarget = x
d.findLabels(x.Body)
d.breakTarget = outer
case *ast.TypeSwitchStmt:
outer := d.breakTarget
d.breakTarget = x
d.findLabels(x.Body)
d.breakTarget = outer
case *ast.CommClause:
for _, stmt := range x.Body {
d.findLabels(stmt)
}
case *ast.CaseClause:
for _, stmt := range x.Body {
d.findLabels(stmt)
}
}
}
// findDead walks the statement looking for dead code.
// If d.reachable is false on entry, stmt itself is dead.
// When findDead returns, d.reachable tells whether the
// statement following stmt is reachable.
func (d *deadState) findDead(stmt ast.Stmt) {
// Is this a labeled goto target?
// If so, assume it is reachable due to the goto.
// This is slightly conservative, in that we don't
// check that the goto is reachable, so
// L: goto L
// will not provoke a warning.
// But it's good enough.
if x, isLabel := stmt.(*ast.LabeledStmt); isLabel && d.hasGoto[x.Label.Name] {
d.reachable = true
}
if !d.reachable {
switch stmt.(type) {
case *ast.EmptyStmt:
// do not warn about unreachable empty statements
default:
d.f.Bad(stmt.Pos(), "unreachable code")
d.reachable = true // silence error about next statement
}
}
switch x := stmt.(type) {
default:
d.f.Warnf(x.Pos(), "internal error in findDead: unexpected statement %T", x)
case *ast.AssignStmt,
*ast.BadStmt,
*ast.DeclStmt,
*ast.DeferStmt,
*ast.EmptyStmt,
*ast.GoStmt,
*ast.IncDecStmt,
*ast.SendStmt:
// no control flow
case *ast.BlockStmt:
for _, stmt := range x.List {
d.findDead(stmt)
}
case *ast.BranchStmt:
switch x.Tok {
case token.BREAK, token.GOTO, token.FALLTHROUGH:
d.reachable = false
case token.CONTINUE:
// NOTE: We accept "continue" statements as terminating.
// They are not necessary in the spec definition of terminating,
// because a continue statement cannot be the final statement
// before a return. But for the more general problem of syntactically
// identifying dead code, continue redirects control flow just
// like the other terminating statements.
d.reachable = false
}
case *ast.ExprStmt:
// Call to panic?
call, ok := x.X.(*ast.CallExpr)
if ok {
name, ok := call.Fun.(*ast.Ident)
if ok && name.Name == "panic" && name.Obj == nil {
d.reachable = false
}
}
case *ast.ForStmt:
d.findDead(x.Body)
d.reachable = x.Cond != nil || d.hasBreak[x]
case *ast.IfStmt:
d.findDead(x.Body)
if x.Else != nil {
r := d.reachable
d.reachable = true
d.findDead(x.Else)
d.reachable = d.reachable || r
} else {
// might not have executed if statement
d.reachable = true
}
case *ast.LabeledStmt:
d.findDead(x.Stmt)
case *ast.RangeStmt:
d.findDead(x.Body)
d.reachable = true
case *ast.ReturnStmt:
d.reachable = false
case *ast.SelectStmt:
// NOTE: Unlike switch and type switch below, we don't care
// whether a select has a default, because a select without a
// default blocks until one of the cases can run. That's different
// from a switch without a default, which behaves like it has
// a default with an empty body.
anyReachable := false
for _, comm := range x.Body.List {
d.reachable = true
for _, stmt := range comm.(*ast.CommClause).Body {
d.findDead(stmt)
}
anyReachable = anyReachable || d.reachable
}
d.reachable = anyReachable || d.hasBreak[x]
case *ast.SwitchStmt:
anyReachable := false
hasDefault := false
for _, cas := range x.Body.List {
cc := cas.(*ast.CaseClause)
if cc.List == nil {
hasDefault = true
}
d.reachable = true
for _, stmt := range cc.Body {
d.findDead(stmt)
}
anyReachable = anyReachable || d.reachable
}
d.reachable = anyReachable || d.hasBreak[x] || !hasDefault
case *ast.TypeSwitchStmt:
anyReachable := false
hasDefault := false
for _, cas := range x.Body.List {
cc := cas.(*ast.CaseClause)
if cc.List == nil {
hasDefault = true
}
d.reachable = true
for _, stmt := range cc.Body {
d.findDead(stmt)
}
anyReachable = anyReachable || d.reachable
}
d.reachable = anyReachable || d.hasBreak[x] || !hasDefault
}
}

View File

@ -0,0 +1,205 @@
// Copyright 2010 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
/*
Vet examines Go source code and reports suspicious constructs, such as Printf
calls whose arguments do not align with the format string. Vet uses heuristics
that do not guarantee all reports are genuine problems, but it can find errors
not caught by the compilers.
It can be invoked three ways:
By package, from the go tool:
go vet package/path/name
vets the package whose path is provided.
By files:
go tool vet source/directory/*.go
vets the files named, all of which must be in the same package.
By directory:
go tool vet source/directory
recursively descends the directory, vetting each package it finds.
Vet's exit code is 2 for erroneous invocation of the tool, 1 if a
problem was reported, and 0 otherwise. Note that the tool does not
check every possible problem and depends on unreliable heuristics
so it should be used as guidance only, not as a firm indicator of
program correctness.
By default the -all flag is set so all checks are performed.
If any flags are explicitly set to true, only those tests are run. Conversely, if
any flag is explicitly set to false, only those tests are disabled. Thus -printf=true
runs the printf check, -printf=false runs all checks except the printf check.
Available checks:
Assembly declarations
Flag: -asmdecl
Mismatches between assembly files and Go function declarations.
Useless assignments
Flag: -assign
Check for useless assignments.
Atomic mistakes
Flag: -atomic
Common mistaken usages of the sync/atomic package.
Boolean conditions
Flag: -bool
Mistakes involving boolean operators.
Build tags
Flag: -buildtags
Badly formed or misplaced +build tags.
Invalid uses of cgo
Flag: -cgocall
Detect some violations of the cgo pointer passing rules.
Unkeyed composite literals
Flag: -composites
Composite struct literals that do not use the field-keyed syntax.
Copying locks
Flag: -copylocks
Locks that are erroneously passed by value.
Tests, benchmarks and documentation examples
Flag: -tests
Mistakes involving tests including functions with incorrect names or signatures
and example tests that document identifiers not in the package.
Failure to call the cancelation function returned by context.WithCancel.
Flag: -lostcancel
The cancelation function returned by context.WithCancel, WithTimeout,
and WithDeadline must be called or the new context will remain live
until its parent context is cancelled.
(The background context is never cancelled.)
Methods
Flag: -methods
Non-standard signatures for methods with familiar names, including:
Format GobEncode GobDecode MarshalJSON MarshalXML
Peek ReadByte ReadFrom ReadRune Scan Seek
UnmarshalJSON UnreadByte UnreadRune WriteByte
WriteTo
Nil function comparison
Flag: -nilfunc
Comparisons between functions and nil.
Printf family
Flag: -printf
Suspicious calls to functions in the Printf family, including any functions
with these names, disregarding case:
Print Printf Println
Fprint Fprintf Fprintln
Sprint Sprintf Sprintln
Error Errorf
Fatal Fatalf
Log Logf
Panic Panicf Panicln
The -printfuncs flag can be used to redefine this list.
If the function name ends with an 'f', the function is assumed to take
a format descriptor string in the manner of fmt.Printf. If not, vet
complains about arguments that look like format descriptor strings.
It also checks for errors such as using a Writer as the first argument of
Printf.
Struct tags
Range loop variables
Flag: -rangeloops
Incorrect uses of range loop variables in closures.
Shadowed variables
Flag: -shadow=false (experimental; must be set explicitly)
Variables that may have been unintentionally shadowed.
Shifts
Flag: -shift
Shifts equal to or longer than the variable's length.
Flag: -structtags
Struct tags that do not follow the format understood by reflect.StructTag.Get.
Well-known encoding struct tags (json, xml) used with unexported fields.
Unreachable code
Flag: -unreachable
Unreachable code.
Misuse of unsafe Pointers
Flag: -unsafeptr
Likely incorrect uses of unsafe.Pointer to convert integers to pointers.
A conversion from uintptr to unsafe.Pointer is invalid if it implies that
there is a uintptr-typed word in memory that holds a pointer value,
because that word will be invisible to stack copying and to the garbage
collector.
Unused result of certain function calls
Flag: -unusedresult
Calls to well-known functions and methods that return a value that is
discarded. By default, this includes functions like fmt.Errorf and
fmt.Sprintf and methods like String and Error. The flags -unusedfuncs
and -unusedstringmethods control the set.
Other flags
These flags configure the behavior of vet:
-all (default true)
Enable all non-experimental checks.
-v
Verbose mode
-printfuncs
A comma-separated list of print-like function names
to supplement the standard list.
For more information, see the discussion of the -printf flag.
-shadowstrict
Whether to be strict about shadowing; can be noisy.
*/
package main

View File

@ -0,0 +1,512 @@
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package cfg
// This file implements the CFG construction pass.
import (
"fmt"
"go/ast"
"go/token"
)
type builder struct {
cfg *CFG
mayReturn func(*ast.CallExpr) bool
current *Block
lblocks map[*ast.Object]*lblock // labeled blocks
targets *targets // linked stack of branch targets
}
func (b *builder) stmt(_s ast.Stmt) {
// The label of the current statement. If non-nil, its _goto
// target is always set; its _break and _continue are set only
// within the body of switch/typeswitch/select/for/range.
// It is effectively an additional default-nil parameter of stmt().
var label *lblock
start:
switch s := _s.(type) {
case *ast.BadStmt,
*ast.SendStmt,
*ast.IncDecStmt,
*ast.GoStmt,
*ast.DeferStmt,
*ast.EmptyStmt,
*ast.AssignStmt:
// No effect on control flow.
b.add(s)
case *ast.ExprStmt:
b.add(s)
if call, ok := s.X.(*ast.CallExpr); ok && !b.mayReturn(call) {
// Calls to panic, os.Exit, etc, never return.
b.current = b.newUnreachableBlock("unreachable.call")
}
case *ast.DeclStmt:
// Treat each var ValueSpec as a separate statement.
d := s.Decl.(*ast.GenDecl)
if d.Tok == token.VAR {
for _, spec := range d.Specs {
if spec, ok := spec.(*ast.ValueSpec); ok {
b.add(spec)
}
}
}
case *ast.LabeledStmt:
label = b.labeledBlock(s.Label)
b.jump(label._goto)
b.current = label._goto
_s = s.Stmt
goto start // effectively: tailcall stmt(g, s.Stmt, label)
case *ast.ReturnStmt:
b.add(s)
b.current = b.newUnreachableBlock("unreachable.return")
case *ast.BranchStmt:
var block *Block
switch s.Tok {
case token.BREAK:
if s.Label != nil {
if lb := b.labeledBlock(s.Label); lb != nil {
block = lb._break
}
} else {
for t := b.targets; t != nil && block == nil; t = t.tail {
block = t._break
}
}
case token.CONTINUE:
if s.Label != nil {
if lb := b.labeledBlock(s.Label); lb != nil {
block = lb._continue
}
} else {
for t := b.targets; t != nil && block == nil; t = t.tail {
block = t._continue
}
}
case token.FALLTHROUGH:
for t := b.targets; t != nil; t = t.tail {
block = t._fallthrough
}
case token.GOTO:
if s.Label != nil {
block = b.labeledBlock(s.Label)._goto
}
}
if block == nil {
block = b.newBlock("undefined.branch")
}
b.jump(block)
b.current = b.newUnreachableBlock("unreachable.branch")
case *ast.BlockStmt:
b.stmtList(s.List)
case *ast.IfStmt:
if s.Init != nil {
b.stmt(s.Init)
}
then := b.newBlock("if.then")
done := b.newBlock("if.done")
_else := done
if s.Else != nil {
_else = b.newBlock("if.else")
}
b.add(s.Cond)
b.ifelse(then, _else)
b.current = then
b.stmt(s.Body)
b.jump(done)
if s.Else != nil {
b.current = _else
b.stmt(s.Else)
b.jump(done)
}
b.current = done
case *ast.SwitchStmt:
b.switchStmt(s, label)
case *ast.TypeSwitchStmt:
b.typeSwitchStmt(s, label)
case *ast.SelectStmt:
b.selectStmt(s, label)
case *ast.ForStmt:
b.forStmt(s, label)
case *ast.RangeStmt:
b.rangeStmt(s, label)
default:
panic(fmt.Sprintf("unexpected statement kind: %T", s))
}
}
func (b *builder) stmtList(list []ast.Stmt) {
for _, s := range list {
b.stmt(s)
}
}
func (b *builder) switchStmt(s *ast.SwitchStmt, label *lblock) {
if s.Init != nil {
b.stmt(s.Init)
}
if s.Tag != nil {
b.add(s.Tag)
}
done := b.newBlock("switch.done")
if label != nil {
label._break = done
}
// We pull the default case (if present) down to the end.
// But each fallthrough label must point to the next
// body block in source order, so we preallocate a
// body block (fallthru) for the next case.
// Unfortunately this makes for a confusing block order.
var defaultBody *[]ast.Stmt
var defaultFallthrough *Block
var fallthru, defaultBlock *Block
ncases := len(s.Body.List)
for i, clause := range s.Body.List {
body := fallthru
if body == nil {
body = b.newBlock("switch.body") // first case only
}
// Preallocate body block for the next case.
fallthru = done
if i+1 < ncases {
fallthru = b.newBlock("switch.body")
}
cc := clause.(*ast.CaseClause)
if cc.List == nil {
// Default case.
defaultBody = &cc.Body
defaultFallthrough = fallthru
defaultBlock = body
continue
}
var nextCond *Block
for _, cond := range cc.List {
nextCond = b.newBlock("switch.next")
b.add(cond) // one half of the tag==cond condition
b.ifelse(body, nextCond)
b.current = nextCond
}
b.current = body
b.targets = &targets{
tail: b.targets,
_break: done,
_fallthrough: fallthru,
}
b.stmtList(cc.Body)
b.targets = b.targets.tail
b.jump(done)
b.current = nextCond
}
if defaultBlock != nil {
b.jump(defaultBlock)
b.current = defaultBlock
b.targets = &targets{
tail: b.targets,
_break: done,
_fallthrough: defaultFallthrough,
}
b.stmtList(*defaultBody)
b.targets = b.targets.tail
}
b.jump(done)
b.current = done
}
func (b *builder) typeSwitchStmt(s *ast.TypeSwitchStmt, label *lblock) {
if s.Init != nil {
b.stmt(s.Init)
}
if s.Assign != nil {
b.add(s.Assign)
}
done := b.newBlock("typeswitch.done")
if label != nil {
label._break = done
}
var default_ *ast.CaseClause
for _, clause := range s.Body.List {
cc := clause.(*ast.CaseClause)
if cc.List == nil {
default_ = cc
continue
}
body := b.newBlock("typeswitch.body")
var next *Block
for _, casetype := range cc.List {
next = b.newBlock("typeswitch.next")
// casetype is a type, so don't call b.add(casetype).
// This block logically contains a type assertion,
// x.(casetype), but it's unclear how to represent x.
_ = casetype
b.ifelse(body, next)
b.current = next
}
b.current = body
b.typeCaseBody(cc, done)
b.current = next
}
if default_ != nil {
b.typeCaseBody(default_, done)
} else {
b.jump(done)
}
b.current = done
}
func (b *builder) typeCaseBody(cc *ast.CaseClause, done *Block) {
b.targets = &targets{
tail: b.targets,
_break: done,
}
b.stmtList(cc.Body)
b.targets = b.targets.tail
b.jump(done)
}
func (b *builder) selectStmt(s *ast.SelectStmt, label *lblock) {
// First evaluate channel expressions.
// TODO(adonovan): fix: evaluate only channel exprs here.
for _, clause := range s.Body.List {
if comm := clause.(*ast.CommClause).Comm; comm != nil {
b.stmt(comm)
}
}
done := b.newBlock("select.done")
if label != nil {
label._break = done
}
var defaultBody *[]ast.Stmt
for _, cc := range s.Body.List {
clause := cc.(*ast.CommClause)
if clause.Comm == nil {
defaultBody = &clause.Body
continue
}
body := b.newBlock("select.body")
next := b.newBlock("select.next")
b.ifelse(body, next)
b.current = body
b.targets = &targets{
tail: b.targets,
_break: done,
}
switch comm := clause.Comm.(type) {
case *ast.ExprStmt: // <-ch
// nop
case *ast.AssignStmt: // x := <-states[state].Chan
b.add(comm.Lhs[0])
}
b.stmtList(clause.Body)
b.targets = b.targets.tail
b.jump(done)
b.current = next
}
if defaultBody != nil {
b.targets = &targets{
tail: b.targets,
_break: done,
}
b.stmtList(*defaultBody)
b.targets = b.targets.tail
b.jump(done)
}
b.current = done
}
func (b *builder) forStmt(s *ast.ForStmt, label *lblock) {
// ...init...
// jump loop
// loop:
// if cond goto body else done
// body:
// ...body...
// jump post
// post: (target of continue)
// ...post...
// jump loop
// done: (target of break)
if s.Init != nil {
b.stmt(s.Init)
}
body := b.newBlock("for.body")
done := b.newBlock("for.done") // target of 'break'
loop := body // target of back-edge
if s.Cond != nil {
loop = b.newBlock("for.loop")
}
cont := loop // target of 'continue'
if s.Post != nil {
cont = b.newBlock("for.post")
}
if label != nil {
label._break = done
label._continue = cont
}
b.jump(loop)
b.current = loop
if loop != body {
b.add(s.Cond)
b.ifelse(body, done)
b.current = body
}
b.targets = &targets{
tail: b.targets,
_break: done,
_continue: cont,
}
b.stmt(s.Body)
b.targets = b.targets.tail
b.jump(cont)
if s.Post != nil {
b.current = cont
b.stmt(s.Post)
b.jump(loop) // back-edge
}
b.current = done
}
func (b *builder) rangeStmt(s *ast.RangeStmt, label *lblock) {
b.add(s.X)
if s.Key != nil {
b.add(s.Key)
}
if s.Value != nil {
b.add(s.Value)
}
// ...
// loop: (target of continue)
// if ... goto body else done
// body:
// ...
// jump loop
// done: (target of break)
loop := b.newBlock("range.loop")
b.jump(loop)
b.current = loop
body := b.newBlock("range.body")
done := b.newBlock("range.done")
b.ifelse(body, done)
b.current = body
if label != nil {
label._break = done
label._continue = loop
}
b.targets = &targets{
tail: b.targets,
_break: done,
_continue: loop,
}
b.stmt(s.Body)
b.targets = b.targets.tail
b.jump(loop) // back-edge
b.current = done
}
// -------- helpers --------
// Destinations associated with unlabeled for/switch/select stmts.
// We push/pop one of these as we enter/leave each construct and for
// each BranchStmt we scan for the innermost target of the right type.
//
type targets struct {
tail *targets // rest of stack
_break *Block
_continue *Block
_fallthrough *Block
}
// Destinations associated with a labeled block.
// We populate these as labels are encountered in forward gotos or
// labeled statements.
//
type lblock struct {
_goto *Block
_break *Block
_continue *Block
}
// labeledBlock returns the branch target associated with the
// specified label, creating it if needed.
//
func (b *builder) labeledBlock(label *ast.Ident) *lblock {
lb := b.lblocks[label.Obj]
if lb == nil {
lb = &lblock{_goto: b.newBlock(label.Name)}
if b.lblocks == nil {
b.lblocks = make(map[*ast.Object]*lblock)
}
b.lblocks[label.Obj] = lb
}
return lb
}
// newBlock appends a new unconnected basic block to b.cfg's block
// slice and returns it.
// It does not automatically become the current block.
// comment is an optional string for more readable debugging output.
func (b *builder) newBlock(comment string) *Block {
g := b.cfg
block := &Block{
index: int32(len(g.Blocks)),
comment: comment,
}
block.Succs = block.succs2[:0]
g.Blocks = append(g.Blocks, block)
return block
}
func (b *builder) newUnreachableBlock(comment string) *Block {
block := b.newBlock(comment)
block.unreachable = true
return block
}
func (b *builder) add(n ast.Node) {
b.current.Nodes = append(b.current.Nodes, n)
}
// jump adds an edge from the current block to the target block,
// and sets b.current to nil.
func (b *builder) jump(target *Block) {
b.current.Succs = append(b.current.Succs, target)
b.current = nil
}
// ifelse emits edges from the current block to the t and f blocks,
// and sets b.current to nil.
func (b *builder) ifelse(t, f *Block) {
b.current.Succs = append(b.current.Succs, t, f)
b.current = nil
}

View File

@ -0,0 +1,142 @@
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This package constructs a simple control-flow graph (CFG) of the
// statements and expressions within a single function.
//
// Use cfg.New to construct the CFG for a function body.
//
// The blocks of the CFG contain all the function's non-control
// statements. The CFG does not contain control statements such as If,
// Switch, Select, and Branch, but does contain their subexpressions.
// For example, this source code:
//
// if x := f(); x != nil {
// T()
// } else {
// F()
// }
//
// produces this CFG:
//
// 1: x := f()
// x != nil
// succs: 2, 3
// 2: T()
// succs: 4
// 3: F()
// succs: 4
// 4:
//
// The CFG does contain Return statements; even implicit returns are
// materialized (at the position of the function's closing brace).
//
// The CFG does not record conditions associated with conditional branch
// edges, nor the short-circuit semantics of the && and || operators,
// nor abnormal control flow caused by panic. If you need this
// information, use golang.org/x/tools/go/ssa instead.
//
package cfg
// Although the vet tool has type information, it is often extremely
// fragmentary, so for simplicity this package does not depend on
// go/types. Consequently control-flow conditions are ignored even
// when constant, and "mayReturn" information must be provided by the
// client.
import (
"bytes"
"fmt"
"go/ast"
"go/format"
"go/token"
)
// A CFG represents the control-flow graph of a single function.
//
// The entry point is Blocks[0]; there may be multiple return blocks.
type CFG struct {
Blocks []*Block // block[0] is entry; order otherwise undefined
}
// A Block represents a basic block: a list of statements and
// expressions that are always evaluated sequentially.
//
// A block may have 0-2 successors: zero for a return block or a block
// that calls a function such as panic that never returns; one for a
// normal (jump) block; and two for a conditional (if) block.
type Block struct {
Nodes []ast.Node // statements, expressions, and ValueSpecs
Succs []*Block // successor nodes in the graph
comment string // for debugging
index int32 // index within CFG.Blocks
unreachable bool // is block of stmts following return/panic/for{}
succs2 [2]*Block // underlying array for Succs
}
// New returns a new control-flow graph for the specified function body,
// which must be non-nil.
//
// The CFG builder calls mayReturn to determine whether a given function
// call may return. For example, calls to panic, os.Exit, and log.Fatal
// do not return, so the builder can remove infeasible graph edges
// following such calls. The builder calls mayReturn only for a
// CallExpr beneath an ExprStmt.
func New(body *ast.BlockStmt, mayReturn func(*ast.CallExpr) bool) *CFG {
b := builder{
mayReturn: mayReturn,
cfg: new(CFG),
}
b.current = b.newBlock("entry")
b.stmt(body)
// Does control fall off the end of the function's body?
// Make implicit return explicit.
if b.current != nil && !b.current.unreachable {
b.add(&ast.ReturnStmt{
Return: body.End() - 1,
})
}
return b.cfg
}
func (b *Block) String() string {
return fmt.Sprintf("block %d (%s)", b.index, b.comment)
}
// Return returns the return statement at the end of this block if present, nil otherwise.
func (b *Block) Return() (ret *ast.ReturnStmt) {
if len(b.Nodes) > 0 {
ret, _ = b.Nodes[len(b.Nodes)-1].(*ast.ReturnStmt)
}
return
}
// Format formats the control-flow graph for ease of debugging.
func (g *CFG) Format(fset *token.FileSet) string {
var buf bytes.Buffer
for _, b := range g.Blocks {
fmt.Fprintf(&buf, ".%d: # %s\n", b.index, b.comment)
for _, n := range b.Nodes {
fmt.Fprintf(&buf, "\t%s\n", formatNode(fset, n))
}
if len(b.Succs) > 0 {
fmt.Fprintf(&buf, "\tsuccs:")
for _, succ := range b.Succs {
fmt.Fprintf(&buf, " %d", succ.index)
}
buf.WriteByte('\n')
}
buf.WriteByte('\n')
}
return buf.String()
}
func formatNode(fset *token.FileSet, n ast.Node) string {
var buf bytes.Buffer
format.Node(&buf, fset, n)
// Indent secondary lines by a tab.
return string(bytes.Replace(buf.Bytes(), []byte("\n"), []byte("\n\t"), -1))
}

View File

@ -0,0 +1,28 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package whitelist defines exceptions for the vet tool.
package whitelist
// UnkeyedLiteral is a white list of types in the standard packages
// that are used with unkeyed literals we deem to be acceptable.
var UnkeyedLiteral = map[string]bool{
// These image and image/color struct types are frozen. We will never add fields to them.
"image/color.Alpha16": true,
"image/color.Alpha": true,
"image/color.CMYK": true,
"image/color.Gray16": true,
"image/color.Gray": true,
"image/color.NRGBA64": true,
"image/color.NRGBA": true,
"image/color.NYCbCrA": true,
"image/color.RGBA64": true,
"image/color.RGBA": true,
"image/color.YCbCr": true,
"image.Point": true,
"image.Rectangle": true,
"image.Uniform": true,
"unicode.Range16": true,
}

View File

@ -0,0 +1,318 @@
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"github.com/dnephin/govet/internal/cfg"
"fmt"
"go/ast"
"go/types"
"strconv"
)
func init() {
register("lostcancel",
"check for failure to call cancelation function returned by context.WithCancel",
checkLostCancel,
funcDecl, funcLit)
}
const debugLostCancel = false
var contextPackage = "context"
// checkLostCancel reports a failure to the call the cancel function
// returned by context.WithCancel, either because the variable was
// assigned to the blank identifier, or because there exists a
// control-flow path from the call to a return statement and that path
// does not "use" the cancel function. Any reference to the variable
// counts as a use, even within a nested function literal.
//
// checkLostCancel analyzes a single named or literal function.
func checkLostCancel(f *File, node ast.Node) {
// Fast path: bypass check if file doesn't use context.WithCancel.
if !hasImport(f.file, contextPackage) {
return
}
// Maps each cancel variable to its defining ValueSpec/AssignStmt.
cancelvars := make(map[*types.Var]ast.Node)
// Find the set of cancel vars to analyze.
stack := make([]ast.Node, 0, 32)
ast.Inspect(node, func(n ast.Node) bool {
switch n.(type) {
case *ast.FuncLit:
if len(stack) > 0 {
return false // don't stray into nested functions
}
case nil:
stack = stack[:len(stack)-1] // pop
return true
}
stack = append(stack, n) // push
// Look for [{AssignStmt,ValueSpec} CallExpr SelectorExpr]:
//
// ctx, cancel := context.WithCancel(...)
// ctx, cancel = context.WithCancel(...)
// var ctx, cancel = context.WithCancel(...)
//
if isContextWithCancel(f, n) && isCall(stack[len(stack)-2]) {
var id *ast.Ident // id of cancel var
stmt := stack[len(stack)-3]
switch stmt := stmt.(type) {
case *ast.ValueSpec:
if len(stmt.Names) > 1 {
id = stmt.Names[1]
}
case *ast.AssignStmt:
if len(stmt.Lhs) > 1 {
id, _ = stmt.Lhs[1].(*ast.Ident)
}
}
if id != nil {
if id.Name == "_" {
f.Badf(id.Pos(), "the cancel function returned by context.%s should be called, not discarded, to avoid a context leak",
n.(*ast.SelectorExpr).Sel.Name)
} else if v, ok := f.pkg.uses[id].(*types.Var); ok {
cancelvars[v] = stmt
} else if v, ok := f.pkg.defs[id].(*types.Var); ok {
cancelvars[v] = stmt
}
}
}
return true
})
if len(cancelvars) == 0 {
return // no need to build CFG
}
// Tell the CFG builder which functions never return.
info := &types.Info{Uses: f.pkg.uses, Selections: f.pkg.selectors}
mayReturn := func(call *ast.CallExpr) bool {
name := callName(info, call)
return !noReturnFuncs[name]
}
// Build the CFG.
var g *cfg.CFG
var sig *types.Signature
switch node := node.(type) {
case *ast.FuncDecl:
sig, _ = f.pkg.defs[node.Name].Type().(*types.Signature)
g = cfg.New(node.Body, mayReturn)
case *ast.FuncLit:
sig, _ = f.pkg.types[node.Type].Type.(*types.Signature)
g = cfg.New(node.Body, mayReturn)
}
// Print CFG.
if debugLostCancel {
fmt.Println(g.Format(f.fset))
}
// Examine the CFG for each variable in turn.
// (It would be more efficient to analyze all cancelvars in a
// single pass over the AST, but seldom is there more than one.)
for v, stmt := range cancelvars {
if ret := lostCancelPath(f, g, v, stmt, sig); ret != nil {
lineno := f.fset.Position(stmt.Pos()).Line
f.Badf(stmt.Pos(), "the %s function is not used on all paths (possible context leak)", v.Name())
f.Badf(ret.Pos(), "this return statement may be reached without using the %s var defined on line %d", v.Name(), lineno)
}
}
}
func isCall(n ast.Node) bool { _, ok := n.(*ast.CallExpr); return ok }
func hasImport(f *ast.File, path string) bool {
for _, imp := range f.Imports {
v, _ := strconv.Unquote(imp.Path.Value)
if v == path {
return true
}
}
return false
}
// isContextWithCancel reports whether n is one of the qualified identifiers
// context.With{Cancel,Timeout,Deadline}.
func isContextWithCancel(f *File, n ast.Node) bool {
if sel, ok := n.(*ast.SelectorExpr); ok {
switch sel.Sel.Name {
case "WithCancel", "WithTimeout", "WithDeadline":
if x, ok := sel.X.(*ast.Ident); ok {
if pkgname, ok := f.pkg.uses[x].(*types.PkgName); ok {
return pkgname.Imported().Path() == contextPackage
}
// Import failed, so we can't check package path.
// Just check the local package name (heuristic).
return x.Name == "context"
}
}
}
return false
}
// lostCancelPath finds a path through the CFG, from stmt (which defines
// the 'cancel' variable v) to a return statement, that doesn't "use" v.
// If it finds one, it returns the return statement (which may be synthetic).
// sig is the function's type, if known.
func lostCancelPath(f *File, g *cfg.CFG, v *types.Var, stmt ast.Node, sig *types.Signature) *ast.ReturnStmt {
vIsNamedResult := sig != nil && tupleContains(sig.Results(), v)
// uses reports whether stmts contain a "use" of variable v.
uses := func(f *File, v *types.Var, stmts []ast.Node) bool {
found := false
for _, stmt := range stmts {
ast.Inspect(stmt, func(n ast.Node) bool {
switch n := n.(type) {
case *ast.Ident:
if f.pkg.uses[n] == v {
found = true
}
case *ast.ReturnStmt:
// A naked return statement counts as a use
// of the named result variables.
if n.Results == nil && vIsNamedResult {
found = true
}
}
return !found
})
}
return found
}
// blockUses computes "uses" for each block, caching the result.
memo := make(map[*cfg.Block]bool)
blockUses := func(f *File, v *types.Var, b *cfg.Block) bool {
res, ok := memo[b]
if !ok {
res = uses(f, v, b.Nodes)
memo[b] = res
}
return res
}
// Find the var's defining block in the CFG,
// plus the rest of the statements of that block.
var defblock *cfg.Block
var rest []ast.Node
outer:
for _, b := range g.Blocks {
for i, n := range b.Nodes {
if n == stmt {
defblock = b
rest = b.Nodes[i+1:]
break outer
}
}
}
if defblock == nil {
panic("internal error: can't find defining block for cancel var")
}
// Is v "used" in the remainder of its defining block?
if uses(f, v, rest) {
return nil
}
// Does the defining block return without using v?
if ret := defblock.Return(); ret != nil {
return ret
}
// Search the CFG depth-first for a path, from defblock to a
// return block, in which v is never "used".
seen := make(map[*cfg.Block]bool)
var search func(blocks []*cfg.Block) *ast.ReturnStmt
search = func(blocks []*cfg.Block) *ast.ReturnStmt {
for _, b := range blocks {
if !seen[b] {
seen[b] = true
// Prune the search if the block uses v.
if blockUses(f, v, b) {
continue
}
// Found path to return statement?
if ret := b.Return(); ret != nil {
if debugLostCancel {
fmt.Printf("found path to return in block %s\n", b)
}
return ret // found
}
// Recur
if ret := search(b.Succs); ret != nil {
if debugLostCancel {
fmt.Printf(" from block %s\n", b)
}
return ret
}
}
}
return nil
}
return search(defblock.Succs)
}
func tupleContains(tuple *types.Tuple, v *types.Var) bool {
for i := 0; i < tuple.Len(); i++ {
if tuple.At(i) == v {
return true
}
}
return false
}
var noReturnFuncs = map[string]bool{
"(*testing.common).FailNow": true,
"(*testing.common).Fatal": true,
"(*testing.common).Fatalf": true,
"(*testing.common).Skip": true,
"(*testing.common).SkipNow": true,
"(*testing.common).Skipf": true,
"log.Fatal": true,
"log.Fatalf": true,
"log.Fatalln": true,
"os.Exit": true,
"panic": true,
"runtime.Goexit": true,
}
// callName returns the canonical name of the builtin, method, or
// function called by call, if known.
func callName(info *types.Info, call *ast.CallExpr) string {
switch fun := call.Fun.(type) {
case *ast.Ident:
// builtin, e.g. "panic"
if obj, ok := info.Uses[fun].(*types.Builtin); ok {
return obj.Name()
}
case *ast.SelectorExpr:
if sel, ok := info.Selections[fun]; ok && sel.Kind() == types.MethodVal {
// method call, e.g. "(*testing.common).Fatal"
meth := sel.Obj()
return fmt.Sprintf("(%s).%s",
meth.Type().(*types.Signature).Recv().Type(),
meth.Name())
}
if obj, ok := info.Uses[fun.Sel]; ok {
// qualified identifier, e.g. "os.Exit"
return fmt.Sprintf("%s.%s",
obj.Pkg().Path(),
obj.Name())
}
}
// function with no name, or defined in missing imported package
return ""
}

View File

@ -0,0 +1,504 @@
// Copyright 2010 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Vet is a simple checker for static errors in Go source code.
// See doc.go for more information.
package main
import (
"bytes"
"flag"
"fmt"
"go/ast"
"go/build"
"go/parser"
"go/printer"
"go/token"
"go/types"
"io/ioutil"
"os"
"path/filepath"
"strconv"
"strings"
)
var (
verbose = flag.Bool("v", false, "verbose")
tags = flag.String("tags", "", "comma-separated list of build tags to apply when parsing")
noRecurse = flag.Bool("no-recurse", false, "disable recursive directory walking")
tagList = []string{} // exploded version of tags flag; set in main
)
var exitCode = 0
// "-all" flag enables all non-experimental checks
var all = triStateFlag("all", unset, "enable all non-experimental checks")
// Flags to control which individual checks to perform.
var report = map[string]*triState{
// Only unusual checks are written here.
// Most checks that operate during the AST walk are added by register.
"asmdecl": triStateFlag("asmdecl", unset, "check assembly against Go declarations"),
"buildtags": triStateFlag("buildtags", unset, "check that +build tags are valid"),
}
// experimental records the flags enabling experimental features. These must be
// requested explicitly; they are not enabled by -all.
var experimental = map[string]bool{}
// setTrueCount record how many flags are explicitly set to true.
var setTrueCount int
// dirsRun and filesRun indicate whether the vet is applied to directory or
// file targets. The distinction affects which checks are run.
var dirsRun, filesRun bool
// includesNonTest indicates whether the vet is applied to non-test targets.
// Certain checks are relevant only if they touch both test and non-test files.
var includesNonTest bool
// A triState is a boolean that knows whether it has been set to either true or false.
// It is used to identify if a flag appears; the standard boolean flag cannot
// distinguish missing from unset. It also satisfies flag.Value.
type triState int
const (
unset triState = iota
setTrue
setFalse
)
func triStateFlag(name string, value triState, usage string) *triState {
flag.Var(&value, name, usage)
return &value
}
// triState implements flag.Value, flag.Getter, and flag.boolFlag.
// They work like boolean flags: we can say vet -printf as well as vet -printf=true
func (ts *triState) Get() interface{} {
return *ts == setTrue
}
func (ts triState) isTrue() bool {
return ts == setTrue
}
func (ts *triState) Set(value string) error {
b, err := strconv.ParseBool(value)
if err != nil {
return err
}
if b {
*ts = setTrue
setTrueCount++
} else {
*ts = setFalse
}
return nil
}
func (ts *triState) String() string {
switch *ts {
case unset:
return "true" // An unset flag will be set by -all, so defaults to true.
case setTrue:
return "true"
case setFalse:
return "false"
}
panic("not reached")
}
func (ts triState) IsBoolFlag() bool {
return true
}
// vet tells whether to report errors for the named check, a flag name.
func vet(name string) bool {
return report[name].isTrue()
}
// setExit sets the value for os.Exit when it is called, later. It
// remembers the highest value.
func setExit(err int) {
if err > exitCode {
exitCode = err
}
}
var (
// Each of these vars has a corresponding case in (*File).Visit.
assignStmt *ast.AssignStmt
binaryExpr *ast.BinaryExpr
callExpr *ast.CallExpr
compositeLit *ast.CompositeLit
exprStmt *ast.ExprStmt
field *ast.Field
funcDecl *ast.FuncDecl
funcLit *ast.FuncLit
genDecl *ast.GenDecl
interfaceType *ast.InterfaceType
rangeStmt *ast.RangeStmt
returnStmt *ast.ReturnStmt
// checkers is a two-level map.
// The outer level is keyed by a nil pointer, one of the AST vars above.
// The inner level is keyed by checker name.
checkers = make(map[ast.Node]map[string]func(*File, ast.Node))
)
func register(name, usage string, fn func(*File, ast.Node), types ...ast.Node) {
report[name] = triStateFlag(name, unset, usage)
for _, typ := range types {
m := checkers[typ]
if m == nil {
m = make(map[string]func(*File, ast.Node))
checkers[typ] = m
}
m[name] = fn
}
}
// Usage is a replacement usage function for the flags package.
func Usage() {
fmt.Fprintf(os.Stderr, "Usage of %s:\n", os.Args[0])
fmt.Fprintf(os.Stderr, "\tvet [flags] directory...\n")
fmt.Fprintf(os.Stderr, "\tvet [flags] files... # Must be a single package\n")
fmt.Fprintf(os.Stderr, "By default, -all is set and all non-experimental checks are run.\n")
fmt.Fprintf(os.Stderr, "For more information run\n")
fmt.Fprintf(os.Stderr, "\tgo doc cmd/vet\n\n")
fmt.Fprintf(os.Stderr, "Flags:\n")
flag.PrintDefaults()
os.Exit(2)
}
// File is a wrapper for the state of a file used in the parser.
// The parse tree walkers are all methods of this type.
type File struct {
pkg *Package
fset *token.FileSet
name string
content []byte
file *ast.File
b bytes.Buffer // for use by methods
// Parsed package "foo" when checking package "foo_test"
basePkg *Package
// The objects that are receivers of a "String() string" method.
// This is used by the recursiveStringer method in print.go.
stringers map[*ast.Object]bool
// Registered checkers to run.
checkers map[ast.Node][]func(*File, ast.Node)
}
func main() {
flag.Usage = Usage
flag.Parse()
// If any flag is set, we run only those checks requested.
// If all flag is set true or if no flags are set true, set all the non-experimental ones
// not explicitly set (in effect, set the "-all" flag).
if setTrueCount == 0 || *all == setTrue {
for name, setting := range report {
if *setting == unset && !experimental[name] {
*setting = setTrue
}
}
}
tagList = strings.Split(*tags, ",")
initPrintFlags()
initUnusedFlags()
if flag.NArg() == 0 {
Usage()
}
for _, name := range flag.Args() {
// Is it a directory?
fi, err := os.Stat(name)
if err != nil {
warnf("error walking tree: %s", err)
continue
}
if fi.IsDir() {
dirsRun = true
} else {
filesRun = true
if !strings.HasSuffix(name, "_test.go") {
includesNonTest = true
}
}
}
if dirsRun && filesRun {
Usage()
}
if dirsRun {
for _, name := range flag.Args() {
if *noRecurse {
doPackageDir(name)
} else {
walkDir(name)
}
}
os.Exit(exitCode)
}
if doPackage(".", flag.Args(), nil) == nil {
warnf("no files checked")
}
os.Exit(exitCode)
}
// prefixDirectory places the directory name on the beginning of each name in the list.
func prefixDirectory(directory string, names []string) {
if directory != "." {
for i, name := range names {
names[i] = filepath.Join(directory, name)
}
}
}
// doPackageDir analyzes the single package found in the directory, if there is one,
// plus a test package, if there is one.
func doPackageDir(directory string) {
context := build.Default
if len(context.BuildTags) != 0 {
warnf("build tags %s previously set", context.BuildTags)
}
context.BuildTags = append(tagList, context.BuildTags...)
pkg, err := context.ImportDir(directory, 0)
if err != nil {
// If it's just that there are no go source files, that's fine.
if _, nogo := err.(*build.NoGoError); nogo {
return
}
// Non-fatal: we are doing a recursive walk and there may be other directories.
warnf("cannot process directory %s: %s", directory, err)
return
}
var names []string
names = append(names, pkg.GoFiles...)
names = append(names, pkg.CgoFiles...)
names = append(names, pkg.TestGoFiles...) // These are also in the "foo" package.
names = append(names, pkg.SFiles...)
prefixDirectory(directory, names)
basePkg := doPackage(directory, names, nil)
// Is there also a "foo_test" package? If so, do that one as well.
if len(pkg.XTestGoFiles) > 0 {
names = pkg.XTestGoFiles
prefixDirectory(directory, names)
doPackage(directory, names, basePkg)
}
}
type Package struct {
path string
defs map[*ast.Ident]types.Object
uses map[*ast.Ident]types.Object
selectors map[*ast.SelectorExpr]*types.Selection
types map[ast.Expr]types.TypeAndValue
spans map[types.Object]Span
files []*File
typesPkg *types.Package
}
// doPackage analyzes the single package constructed from the named files.
// It returns the parsed Package or nil if none of the files have been checked.
func doPackage(directory string, names []string, basePkg *Package) *Package {
var files []*File
var astFiles []*ast.File
fs := token.NewFileSet()
for _, name := range names {
data, err := ioutil.ReadFile(name)
if err != nil {
// Warn but continue to next package.
warnf("%s: %s", name, err)
return nil
}
checkBuildTag(name, data)
var parsedFile *ast.File
if strings.HasSuffix(name, ".go") {
parsedFile, err = parser.ParseFile(fs, name, data, 0)
if err != nil {
warnf("%s: %s", name, err)
return nil
}
astFiles = append(astFiles, parsedFile)
}
files = append(files, &File{fset: fs, content: data, name: name, file: parsedFile})
}
if len(astFiles) == 0 {
return nil
}
pkg := new(Package)
pkg.path = astFiles[0].Name.Name
pkg.files = files
// Type check the package.
err := pkg.check(fs, astFiles)
if err != nil && *verbose {
warnf("%s", err)
}
// Check.
chk := make(map[ast.Node][]func(*File, ast.Node))
for typ, set := range checkers {
for name, fn := range set {
if vet(name) {
chk[typ] = append(chk[typ], fn)
}
}
}
for _, file := range files {
file.pkg = pkg
file.basePkg = basePkg
file.checkers = chk
if file.file != nil {
file.walkFile(file.name, file.file)
}
}
asmCheck(pkg)
return pkg
}
func visit(path string, f os.FileInfo, err error) error {
if err != nil {
warnf("walk error: %s", err)
return err
}
// One package per directory. Ignore the files themselves.
if !f.IsDir() {
return nil
}
doPackageDir(path)
return nil
}
func (pkg *Package) hasFileWithSuffix(suffix string) bool {
for _, f := range pkg.files {
if strings.HasSuffix(f.name, suffix) {
return true
}
}
return false
}
// walkDir recursively walks the tree looking for Go packages.
func walkDir(root string) {
filepath.Walk(root, visit)
}
// errorf formats the error to standard error, adding program
// identification and a newline, and exits.
func errorf(format string, args ...interface{}) {
fmt.Fprintf(os.Stderr, "vet: "+format+"\n", args...)
os.Exit(2)
}
// warnf formats the error to standard error, adding program
// identification and a newline, but does not exit.
func warnf(format string, args ...interface{}) {
fmt.Fprintf(os.Stderr, "vet: "+format+"\n", args...)
setExit(1)
}
// Println is fmt.Println guarded by -v.
func Println(args ...interface{}) {
if !*verbose {
return
}
fmt.Println(args...)
}
// Printf is fmt.Printf guarded by -v.
func Printf(format string, args ...interface{}) {
if !*verbose {
return
}
fmt.Printf(format+"\n", args...)
}
// Bad reports an error and sets the exit code..
func (f *File) Bad(pos token.Pos, args ...interface{}) {
f.Warn(pos, args...)
setExit(1)
}
// Badf reports a formatted error and sets the exit code.
func (f *File) Badf(pos token.Pos, format string, args ...interface{}) {
f.Warnf(pos, format, args...)
setExit(1)
}
// loc returns a formatted representation of the position.
func (f *File) loc(pos token.Pos) string {
if pos == token.NoPos {
return ""
}
// Do not print columns. Because the pos often points to the start of an
// expression instead of the inner part with the actual error, the
// precision can mislead.
posn := f.fset.Position(pos)
return fmt.Sprintf("%s:%d", posn.Filename, posn.Line)
}
// Warn reports an error but does not set the exit code.
func (f *File) Warn(pos token.Pos, args ...interface{}) {
fmt.Fprintf(os.Stderr, "%s: %s", f.loc(pos), fmt.Sprintln(args...))
}
// Warnf reports a formatted error but does not set the exit code.
func (f *File) Warnf(pos token.Pos, format string, args ...interface{}) {
fmt.Fprintf(os.Stderr, "%s: %s\n", f.loc(pos), fmt.Sprintf(format, args...))
}
// walkFile walks the file's tree.
func (f *File) walkFile(name string, file *ast.File) {
Println("Checking file", name)
ast.Walk(f, file)
}
// Visit implements the ast.Visitor interface.
func (f *File) Visit(node ast.Node) ast.Visitor {
var key ast.Node
switch node.(type) {
case *ast.AssignStmt:
key = assignStmt
case *ast.BinaryExpr:
key = binaryExpr
case *ast.CallExpr:
key = callExpr
case *ast.CompositeLit:
key = compositeLit
case *ast.ExprStmt:
key = exprStmt
case *ast.Field:
key = field
case *ast.FuncDecl:
key = funcDecl
case *ast.FuncLit:
key = funcLit
case *ast.GenDecl:
key = genDecl
case *ast.InterfaceType:
key = interfaceType
case *ast.RangeStmt:
key = rangeStmt
case *ast.ReturnStmt:
key = returnStmt
}
for _, fn := range f.checkers[key] {
fn(f, node)
}
return f
}
// gofmt returns a string representation of the expression.
func (f *File) gofmt(x ast.Expr) string {
f.b.Reset()
printer.Fprint(&f.b, f.fset, x)
return f.b.String()
}

View File

@ -0,0 +1,182 @@
// Copyright 2010 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file contains the code to check canonical methods.
package main
import (
"fmt"
"go/ast"
"go/printer"
"strings"
)
func init() {
register("methods",
"check that canonically named methods are canonically defined",
checkCanonicalMethod,
funcDecl, interfaceType)
}
type MethodSig struct {
args []string
results []string
}
// canonicalMethods lists the input and output types for Go methods
// that are checked using dynamic interface checks. Because the
// checks are dynamic, such methods would not cause a compile error
// if they have the wrong signature: instead the dynamic check would
// fail, sometimes mysteriously. If a method is found with a name listed
// here but not the input/output types listed here, vet complains.
//
// A few of the canonical methods have very common names.
// For example, a type might implement a Scan method that
// has nothing to do with fmt.Scanner, but we still want to check
// the methods that are intended to implement fmt.Scanner.
// To do that, the arguments that have a = prefix are treated as
// signals that the canonical meaning is intended: if a Scan
// method doesn't have a fmt.ScanState as its first argument,
// we let it go. But if it does have a fmt.ScanState, then the
// rest has to match.
var canonicalMethods = map[string]MethodSig{
// "Flush": {{}, {"error"}}, // http.Flusher and jpeg.writer conflict
"Format": {[]string{"=fmt.State", "rune"}, []string{}}, // fmt.Formatter
"GobDecode": {[]string{"[]byte"}, []string{"error"}}, // gob.GobDecoder
"GobEncode": {[]string{}, []string{"[]byte", "error"}}, // gob.GobEncoder
"MarshalJSON": {[]string{}, []string{"[]byte", "error"}}, // json.Marshaler
"MarshalXML": {[]string{"*xml.Encoder", "xml.StartElement"}, []string{"error"}}, // xml.Marshaler
"Peek": {[]string{"=int"}, []string{"[]byte", "error"}}, // image.reader (matching bufio.Reader)
"ReadByte": {[]string{}, []string{"byte", "error"}}, // io.ByteReader
"ReadFrom": {[]string{"=io.Reader"}, []string{"int64", "error"}}, // io.ReaderFrom
"ReadRune": {[]string{}, []string{"rune", "int", "error"}}, // io.RuneReader
"Scan": {[]string{"=fmt.ScanState", "rune"}, []string{"error"}}, // fmt.Scanner
"Seek": {[]string{"=int64", "int"}, []string{"int64", "error"}}, // io.Seeker
"UnmarshalJSON": {[]string{"[]byte"}, []string{"error"}}, // json.Unmarshaler
"UnmarshalXML": {[]string{"*xml.Decoder", "xml.StartElement"}, []string{"error"}}, // xml.Unmarshaler
"UnreadByte": {[]string{}, []string{"error"}},
"UnreadRune": {[]string{}, []string{"error"}},
"WriteByte": {[]string{"byte"}, []string{"error"}}, // jpeg.writer (matching bufio.Writer)
"WriteTo": {[]string{"=io.Writer"}, []string{"int64", "error"}}, // io.WriterTo
}
func checkCanonicalMethod(f *File, node ast.Node) {
switch n := node.(type) {
case *ast.FuncDecl:
if n.Recv != nil {
canonicalMethod(f, n.Name, n.Type)
}
case *ast.InterfaceType:
for _, field := range n.Methods.List {
for _, id := range field.Names {
canonicalMethod(f, id, field.Type.(*ast.FuncType))
}
}
}
}
func canonicalMethod(f *File, id *ast.Ident, t *ast.FuncType) {
// Expected input/output.
expect, ok := canonicalMethods[id.Name]
if !ok {
return
}
// Actual input/output
args := typeFlatten(t.Params.List)
var results []ast.Expr
if t.Results != nil {
results = typeFlatten(t.Results.List)
}
// Do the =s (if any) all match?
if !f.matchParams(expect.args, args, "=") || !f.matchParams(expect.results, results, "=") {
return
}
// Everything must match.
if !f.matchParams(expect.args, args, "") || !f.matchParams(expect.results, results, "") {
expectFmt := id.Name + "(" + argjoin(expect.args) + ")"
if len(expect.results) == 1 {
expectFmt += " " + argjoin(expect.results)
} else if len(expect.results) > 1 {
expectFmt += " (" + argjoin(expect.results) + ")"
}
f.b.Reset()
if err := printer.Fprint(&f.b, f.fset, t); err != nil {
fmt.Fprintf(&f.b, "<%s>", err)
}
actual := f.b.String()
actual = strings.TrimPrefix(actual, "func")
actual = id.Name + actual
f.Badf(id.Pos(), "method %s should have signature %s", actual, expectFmt)
}
}
func argjoin(x []string) string {
y := make([]string, len(x))
for i, s := range x {
if s[0] == '=' {
s = s[1:]
}
y[i] = s
}
return strings.Join(y, ", ")
}
// Turn parameter list into slice of types
// (in the ast, types are Exprs).
// Have to handle f(int, bool) and f(x, y, z int)
// so not a simple 1-to-1 conversion.
func typeFlatten(l []*ast.Field) []ast.Expr {
var t []ast.Expr
for _, f := range l {
if len(f.Names) == 0 {
t = append(t, f.Type)
continue
}
for _ = range f.Names {
t = append(t, f.Type)
}
}
return t
}
// Does each type in expect with the given prefix match the corresponding type in actual?
func (f *File) matchParams(expect []string, actual []ast.Expr, prefix string) bool {
for i, x := range expect {
if !strings.HasPrefix(x, prefix) {
continue
}
if i >= len(actual) {
return false
}
if !f.matchParamType(x, actual[i]) {
return false
}
}
if prefix == "" && len(actual) > len(expect) {
return false
}
return true
}
// Does this one type match?
func (f *File) matchParamType(expect string, actual ast.Expr) bool {
if strings.HasPrefix(expect, "=") {
expect = expect[1:]
}
// Strip package name if we're in that package.
if n := len(f.file.Name.Name); len(expect) > n && expect[:n] == f.file.Name.Name && expect[n] == '.' {
expect = expect[n+1:]
}
// Overkill but easy.
f.b.Reset()
printer.Fprint(&f.b, f.fset, actual)
return f.b.String() == expect
}

View File

@ -0,0 +1,67 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
/*
This file contains the code to check for useless function comparisons.
A useless comparison is one like f == nil as opposed to f() == nil.
*/
package main
import (
"go/ast"
"go/token"
"go/types"
)
func init() {
register("nilfunc",
"check for comparisons between functions and nil",
checkNilFuncComparison,
binaryExpr)
}
func checkNilFuncComparison(f *File, node ast.Node) {
e := node.(*ast.BinaryExpr)
// Only want == or != comparisons.
if e.Op != token.EQL && e.Op != token.NEQ {
return
}
// Only want comparisons with a nil identifier on one side.
var e2 ast.Expr
switch {
case f.isNil(e.X):
e2 = e.Y
case f.isNil(e.Y):
e2 = e.X
default:
return
}
// Only want identifiers or selector expressions.
var obj types.Object
switch v := e2.(type) {
case *ast.Ident:
obj = f.pkg.uses[v]
case *ast.SelectorExpr:
obj = f.pkg.uses[v.Sel]
default:
return
}
// Only want functions.
if _, ok := obj.(*types.Func); !ok {
return
}
f.Badf(e.Pos(), "comparison of function %v %v nil is always %v", obj.Name(), e.Op, e.Op == token.NEQ)
}
// isNil reports whether the provided expression is the built-in nil
// identifier.
func (f *File) isNil(e ast.Expr) bool {
return f.pkg.types[e].Type == types.Typ[types.UntypedNil]
}

View File

@ -0,0 +1,650 @@
// Copyright 2010 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file contains the printf-checker.
package main
import (
"bytes"
"flag"
"go/ast"
"go/constant"
"go/token"
"go/types"
"strconv"
"strings"
"unicode/utf8"
)
var printfuncs = flag.String("printfuncs", "", "comma-separated list of print function names to check")
func init() {
register("printf",
"check printf-like invocations",
checkFmtPrintfCall,
funcDecl, callExpr)
}
func initPrintFlags() {
if *printfuncs == "" {
return
}
for _, name := range strings.Split(*printfuncs, ",") {
if len(name) == 0 {
flag.Usage()
}
// Backwards compatibility: skip optional first argument
// index after the colon.
if colon := strings.LastIndex(name, ":"); colon > 0 {
name = name[:colon]
}
name = strings.ToLower(name)
if name[len(name)-1] == 'f' {
isFormattedPrint[name] = true
} else {
isPrint[name] = true
}
}
}
// isFormattedPrint records the formatted-print functions. Names are
// lower-cased so the lookup is case insensitive.
var isFormattedPrint = map[string]bool{
"errorf": true,
"fatalf": true,
"fprintf": true,
"logf": true,
"panicf": true,
"printf": true,
"sprintf": true,
}
// isPrint records the unformatted-print functions. Names are lower-cased
// so the lookup is case insensitive.
var isPrint = map[string]bool{
"error": true,
"fatal": true,
"fprint": true,
"fprintln": true,
"log": true,
"panic": true,
"panicln": true,
"print": true,
"println": true,
"sprint": true,
"sprintln": true,
}
// formatString returns the format string argument and its index within
// the given printf-like call expression.
//
// The last parameter before variadic arguments is assumed to be
// a format string.
//
// The first string literal or string constant is assumed to be a format string
// if the call's signature cannot be determined.
//
// If it cannot find any format string parameter, it returns ("", -1).
func formatString(f *File, call *ast.CallExpr) (string, int) {
typ := f.pkg.types[call.Fun].Type
if typ != nil {
if sig, ok := typ.(*types.Signature); ok {
if !sig.Variadic() {
// Skip checking non-variadic functions
return "", -1
}
idx := sig.Params().Len() - 2
if idx < 0 {
// Skip checking variadic functions without
// fixed arguments.
return "", -1
}
s, ok := stringLiteralArg(f, call, idx)
if !ok {
// The last argument before variadic args isn't a string
return "", -1
}
return s, idx
}
}
// Cannot determine call's signature. Fallback to scanning for the first
// string argument in the call
for idx := range call.Args {
if s, ok := stringLiteralArg(f, call, idx); ok {
return s, idx
}
}
return "", -1
}
// stringLiteralArg returns call's string constant argument at the index idx.
//
// ("", false) is returned if call's argument at the index idx isn't a string
// literal.
func stringLiteralArg(f *File, call *ast.CallExpr, idx int) (string, bool) {
if idx >= len(call.Args) {
return "", false
}
arg := call.Args[idx]
lit := f.pkg.types[arg].Value
if lit != nil && lit.Kind() == constant.String {
return constant.StringVal(lit), true
}
return "", false
}
// checkCall triggers the print-specific checks if the call invokes a print function.
func checkFmtPrintfCall(f *File, node ast.Node) {
if d, ok := node.(*ast.FuncDecl); ok && isStringer(f, d) {
// Remember we saw this.
if f.stringers == nil {
f.stringers = make(map[*ast.Object]bool)
}
if l := d.Recv.List; len(l) == 1 {
if n := l[0].Names; len(n) == 1 {
f.stringers[n[0].Obj] = true
}
}
return
}
call, ok := node.(*ast.CallExpr)
if !ok {
return
}
var Name string
switch x := call.Fun.(type) {
case *ast.Ident:
Name = x.Name
case *ast.SelectorExpr:
Name = x.Sel.Name
default:
return
}
name := strings.ToLower(Name)
if _, ok := isFormattedPrint[name]; ok {
f.checkPrintf(call, Name)
return
}
if _, ok := isPrint[name]; ok {
f.checkPrint(call, Name)
return
}
}
// isStringer returns true if the provided declaration is a "String() string"
// method, an implementation of fmt.Stringer.
func isStringer(f *File, d *ast.FuncDecl) bool {
return d.Recv != nil && d.Name.Name == "String" && d.Type.Results != nil &&
len(d.Type.Params.List) == 0 && len(d.Type.Results.List) == 1 &&
f.pkg.types[d.Type.Results.List[0].Type].Type == types.Typ[types.String]
}
// formatState holds the parsed representation of a printf directive such as "%3.*[4]d".
// It is constructed by parsePrintfVerb.
type formatState struct {
verb rune // the format verb: 'd' for "%d"
format string // the full format directive from % through verb, "%.3d".
name string // Printf, Sprintf etc.
flags []byte // the list of # + etc.
argNums []int // the successive argument numbers that are consumed, adjusted to refer to actual arg in call
indexed bool // whether an indexing expression appears: %[1]d.
firstArg int // Index of first argument after the format in the Printf call.
// Used only during parse.
file *File
call *ast.CallExpr
argNum int // Which argument we're expecting to format now.
indexPending bool // Whether we have an indexed argument that has not resolved.
nbytes int // number of bytes of the format string consumed.
}
// checkPrintf checks a call to a formatted print routine such as Printf.
func (f *File) checkPrintf(call *ast.CallExpr, name string) {
format, idx := formatString(f, call)
if idx < 0 {
if *verbose {
f.Warn(call.Pos(), "can't check non-constant format in call to", name)
}
return
}
firstArg := idx + 1 // Arguments are immediately after format string.
if !strings.Contains(format, "%") {
if len(call.Args) > firstArg {
f.Badf(call.Pos(), "no formatting directive in %s call", name)
}
return
}
// Hard part: check formats against args.
argNum := firstArg
indexed := false
for i, w := 0, 0; i < len(format); i += w {
w = 1
if format[i] == '%' {
state := f.parsePrintfVerb(call, name, format[i:], firstArg, argNum)
if state == nil {
return
}
w = len(state.format)
if state.indexed {
indexed = true
}
if !f.okPrintfArg(call, state) { // One error per format is enough.
return
}
if len(state.argNums) > 0 {
// Continue with the next sequential argument.
argNum = state.argNums[len(state.argNums)-1] + 1
}
}
}
// Dotdotdot is hard.
if call.Ellipsis.IsValid() && argNum >= len(call.Args)-1 {
return
}
// If the arguments were direct indexed, we assume the programmer knows what's up.
// Otherwise, there should be no leftover arguments.
if !indexed && argNum != len(call.Args) {
expect := argNum - firstArg
numArgs := len(call.Args) - firstArg
f.Badf(call.Pos(), "wrong number of args for format in %s call: %d needed but %d args", name, expect, numArgs)
}
}
// parseFlags accepts any printf flags.
func (s *formatState) parseFlags() {
for s.nbytes < len(s.format) {
switch c := s.format[s.nbytes]; c {
case '#', '0', '+', '-', ' ':
s.flags = append(s.flags, c)
s.nbytes++
default:
return
}
}
}
// scanNum advances through a decimal number if present.
func (s *formatState) scanNum() {
for ; s.nbytes < len(s.format); s.nbytes++ {
c := s.format[s.nbytes]
if c < '0' || '9' < c {
return
}
}
}
// parseIndex scans an index expression. It returns false if there is a syntax error.
func (s *formatState) parseIndex() bool {
if s.nbytes == len(s.format) || s.format[s.nbytes] != '[' {
return true
}
// Argument index present.
s.indexed = true
s.nbytes++ // skip '['
start := s.nbytes
s.scanNum()
if s.nbytes == len(s.format) || s.nbytes == start || s.format[s.nbytes] != ']' {
s.file.Badf(s.call.Pos(), "illegal syntax for printf argument index")
return false
}
arg32, err := strconv.ParseInt(s.format[start:s.nbytes], 10, 32)
if err != nil {
s.file.Badf(s.call.Pos(), "illegal syntax for printf argument index: %s", err)
return false
}
s.nbytes++ // skip ']'
arg := int(arg32)
arg += s.firstArg - 1 // We want to zero-index the actual arguments.
s.argNum = arg
s.indexPending = true
return true
}
// parseNum scans a width or precision (or *). It returns false if there's a bad index expression.
func (s *formatState) parseNum() bool {
if s.nbytes < len(s.format) && s.format[s.nbytes] == '*' {
if s.indexPending { // Absorb it.
s.indexPending = false
}
s.nbytes++
s.argNums = append(s.argNums, s.argNum)
s.argNum++
} else {
s.scanNum()
}
return true
}
// parsePrecision scans for a precision. It returns false if there's a bad index expression.
func (s *formatState) parsePrecision() bool {
// If there's a period, there may be a precision.
if s.nbytes < len(s.format) && s.format[s.nbytes] == '.' {
s.flags = append(s.flags, '.') // Treat precision as a flag.
s.nbytes++
if !s.parseIndex() {
return false
}
if !s.parseNum() {
return false
}
}
return true
}
// parsePrintfVerb looks the formatting directive that begins the format string
// and returns a formatState that encodes what the directive wants, without looking
// at the actual arguments present in the call. The result is nil if there is an error.
func (f *File) parsePrintfVerb(call *ast.CallExpr, name, format string, firstArg, argNum int) *formatState {
state := &formatState{
format: format,
name: name,
flags: make([]byte, 0, 5),
argNum: argNum,
argNums: make([]int, 0, 1),
nbytes: 1, // There's guaranteed to be a percent sign.
indexed: false,
firstArg: firstArg,
file: f,
call: call,
}
// There may be flags.
state.parseFlags()
indexPending := false
// There may be an index.
if !state.parseIndex() {
return nil
}
// There may be a width.
if !state.parseNum() {
return nil
}
// There may be a precision.
if !state.parsePrecision() {
return nil
}
// Now a verb, possibly prefixed by an index (which we may already have).
if !indexPending && !state.parseIndex() {
return nil
}
if state.nbytes == len(state.format) {
f.Badf(call.Pos(), "missing verb at end of format string in %s call", name)
return nil
}
verb, w := utf8.DecodeRuneInString(state.format[state.nbytes:])
state.verb = verb
state.nbytes += w
if verb != '%' {
state.argNums = append(state.argNums, state.argNum)
}
state.format = state.format[:state.nbytes]
return state
}
// printfArgType encodes the types of expressions a printf verb accepts. It is a bitmask.
type printfArgType int
const (
argBool printfArgType = 1 << iota
argInt
argRune
argString
argFloat
argComplex
argPointer
anyType printfArgType = ^0
)
type printVerb struct {
verb rune // User may provide verb through Formatter; could be a rune.
flags string // known flags are all ASCII
typ printfArgType
}
// Common flag sets for printf verbs.
const (
noFlag = ""
numFlag = " -+.0"
sharpNumFlag = " -+.0#"
allFlags = " -+.0#"
)
// printVerbs identifies which flags are known to printf for each verb.
// TODO: A type that implements Formatter may do what it wants, and vet
// will complain incorrectly.
var printVerbs = []printVerb{
// '-' is a width modifier, always valid.
// '.' is a precision for float, max width for strings.
// '+' is required sign for numbers, Go format for %v.
// '#' is alternate format for several verbs.
// ' ' is spacer for numbers
{'%', noFlag, 0},
{'b', numFlag, argInt | argFloat | argComplex},
{'c', "-", argRune | argInt},
{'d', numFlag, argInt},
{'e', numFlag, argFloat | argComplex},
{'E', numFlag, argFloat | argComplex},
{'f', numFlag, argFloat | argComplex},
{'F', numFlag, argFloat | argComplex},
{'g', numFlag, argFloat | argComplex},
{'G', numFlag, argFloat | argComplex},
{'o', sharpNumFlag, argInt},
{'p', "-#", argPointer},
{'q', " -+.0#", argRune | argInt | argString},
{'s', " -+.0", argString},
{'t', "-", argBool},
{'T', "-", anyType},
{'U', "-#", argRune | argInt},
{'v', allFlags, anyType},
{'x', sharpNumFlag, argRune | argInt | argString},
{'X', sharpNumFlag, argRune | argInt | argString},
}
// okPrintfArg compares the formatState to the arguments actually present,
// reporting any discrepancies it can discern. If the final argument is ellipsissed,
// there's little it can do for that.
func (f *File) okPrintfArg(call *ast.CallExpr, state *formatState) (ok bool) {
var v printVerb
found := false
// Linear scan is fast enough for a small list.
for _, v = range printVerbs {
if v.verb == state.verb {
found = true
break
}
}
if !found {
f.Badf(call.Pos(), "unrecognized printf verb %q", state.verb)
return false
}
for _, flag := range state.flags {
if !strings.ContainsRune(v.flags, rune(flag)) {
f.Badf(call.Pos(), "unrecognized printf flag for verb %q: %q", state.verb, flag)
return false
}
}
// Verb is good. If len(state.argNums)>trueArgs, we have something like %.*s and all
// but the final arg must be an integer.
trueArgs := 1
if state.verb == '%' {
trueArgs = 0
}
nargs := len(state.argNums)
for i := 0; i < nargs-trueArgs; i++ {
argNum := state.argNums[i]
if !f.argCanBeChecked(call, i, true, state) {
return
}
arg := call.Args[argNum]
if !f.matchArgType(argInt, nil, arg) {
f.Badf(call.Pos(), "arg %s for * in printf format not of type int", f.gofmt(arg))
return false
}
}
if state.verb == '%' {
return true
}
argNum := state.argNums[len(state.argNums)-1]
if !f.argCanBeChecked(call, len(state.argNums)-1, false, state) {
return false
}
arg := call.Args[argNum]
if f.isFunctionValue(arg) && state.verb != 'p' && state.verb != 'T' {
f.Badf(call.Pos(), "arg %s in printf call is a function value, not a function call", f.gofmt(arg))
return false
}
if !f.matchArgType(v.typ, nil, arg) {
typeString := ""
if typ := f.pkg.types[arg].Type; typ != nil {
typeString = typ.String()
}
f.Badf(call.Pos(), "arg %s for printf verb %%%c of wrong type: %s", f.gofmt(arg), state.verb, typeString)
return false
}
if v.typ&argString != 0 && v.verb != 'T' && !bytes.Contains(state.flags, []byte{'#'}) && f.recursiveStringer(arg) {
f.Badf(call.Pos(), "arg %s for printf causes recursive call to String method", f.gofmt(arg))
return false
}
return true
}
// recursiveStringer reports whether the provided argument is r or &r for the
// fmt.Stringer receiver identifier r.
func (f *File) recursiveStringer(e ast.Expr) bool {
if len(f.stringers) == 0 {
return false
}
var obj *ast.Object
switch e := e.(type) {
case *ast.Ident:
obj = e.Obj
case *ast.UnaryExpr:
if id, ok := e.X.(*ast.Ident); ok && e.Op == token.AND {
obj = id.Obj
}
}
// It's unlikely to be a recursive stringer if it has a Format method.
if typ := f.pkg.types[e].Type; typ != nil {
// Not a perfect match; see issue 6259.
if f.hasMethod(typ, "Format") {
return false
}
}
// We compare the underlying Object, which checks that the identifier
// is the one we declared as the receiver for the String method in
// which this printf appears.
return f.stringers[obj]
}
// isFunctionValue reports whether the expression is a function as opposed to a function call.
// It is almost always a mistake to print a function value.
func (f *File) isFunctionValue(e ast.Expr) bool {
if typ := f.pkg.types[e].Type; typ != nil {
_, ok := typ.(*types.Signature)
return ok
}
return false
}
// argCanBeChecked reports whether the specified argument is statically present;
// it may be beyond the list of arguments or in a terminal slice... argument, which
// means we can't see it.
func (f *File) argCanBeChecked(call *ast.CallExpr, formatArg int, isStar bool, state *formatState) bool {
argNum := state.argNums[formatArg]
if argNum < 0 {
// Shouldn't happen, so catch it with prejudice.
panic("negative arg num")
}
if argNum == 0 {
f.Badf(call.Pos(), `index value [0] for %s("%s"); indexes start at 1`, state.name, state.format)
return false
}
if argNum < len(call.Args)-1 {
return true // Always OK.
}
if call.Ellipsis.IsValid() {
return false // We just can't tell; there could be many more arguments.
}
if argNum < len(call.Args) {
return true
}
// There are bad indexes in the format or there are fewer arguments than the format needs.
// This is the argument number relative to the format: Printf("%s", "hi") will give 1 for the "hi".
arg := argNum - state.firstArg + 1 // People think of arguments as 1-indexed.
f.Badf(call.Pos(), `missing argument for %s("%s"): format reads arg %d, have only %d args`, state.name, state.format, arg, len(call.Args)-state.firstArg)
return false
}
// checkPrint checks a call to an unformatted print routine such as Println.
func (f *File) checkPrint(call *ast.CallExpr, name string) {
firstArg := 0
typ := f.pkg.types[call.Fun].Type
if typ == nil {
// Skip checking functions with unknown type.
return
}
if sig, ok := typ.(*types.Signature); ok {
if !sig.Variadic() {
// Skip checking non-variadic functions.
return
}
params := sig.Params()
firstArg = params.Len() - 1
typ := params.At(firstArg).Type()
typ = typ.(*types.Slice).Elem()
it, ok := typ.(*types.Interface)
if !ok || !it.Empty() {
// Skip variadic functions accepting non-interface{} args.
return
}
}
args := call.Args
if len(args) <= firstArg {
// Skip calls without variadic args.
return
}
args = args[firstArg:]
// check for Println(os.Stderr, ...)
if firstArg == 0 {
if sel, ok := args[0].(*ast.SelectorExpr); ok {
if x, ok := sel.X.(*ast.Ident); ok {
if x.Name == "os" && strings.HasPrefix(sel.Sel.Name, "Std") {
f.Badf(call.Pos(), "first argument to %s is %s.%s", name, x.Name, sel.Sel.Name)
}
}
}
}
arg := args[0]
if lit, ok := arg.(*ast.BasicLit); ok && lit.Kind == token.STRING {
if strings.Contains(lit.Value, "%") {
f.Badf(call.Pos(), "possible formatting directive in %s call", name)
}
}
if strings.HasSuffix(name, "ln") {
// The last item, if a string, should not have a newline.
arg = args[len(args)-1]
if lit, ok := arg.(*ast.BasicLit); ok && lit.Kind == token.STRING {
if strings.HasSuffix(lit.Value, `\n"`) {
f.Badf(call.Pos(), "%s call ends with newline", name)
}
}
}
for _, arg := range args {
if f.isFunctionValue(arg) {
f.Badf(call.Pos(), "arg %s in %s call is a function value, not a function call", f.gofmt(arg), name)
}
if f.recursiveStringer(arg) {
f.Badf(call.Pos(), "arg %s in %s call causes recursive call to String method", f.gofmt(arg), name)
}
}
}

View File

@ -0,0 +1,74 @@
// Copyright 2012 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
/*
This file contains the code to check range loop variables bound inside function
literals that are deferred or launched in new goroutines. We only check
instances where the defer or go statement is the last statement in the loop
body, as otherwise we would need whole program analysis.
For example:
for i, v := range s {
go func() {
println(i, v) // not what you might expect
}()
}
See: https://golang.org/doc/go_faq.html#closures_and_goroutines
*/
package main
import "go/ast"
func init() {
register("rangeloops",
"check that range loop variables are used correctly",
checkRangeLoop,
rangeStmt)
}
// checkRangeLoop walks the body of the provided range statement, checking if
// its index or value variables are used unsafely inside goroutines or deferred
// function literals.
func checkRangeLoop(f *File, node ast.Node) {
n := node.(*ast.RangeStmt)
key, _ := n.Key.(*ast.Ident)
val, _ := n.Value.(*ast.Ident)
if key == nil && val == nil {
return
}
sl := n.Body.List
if len(sl) == 0 {
return
}
var last *ast.CallExpr
switch s := sl[len(sl)-1].(type) {
case *ast.GoStmt:
last = s.Call
case *ast.DeferStmt:
last = s.Call
default:
return
}
lit, ok := last.Fun.(*ast.FuncLit)
if !ok {
return
}
ast.Inspect(lit.Body, func(n ast.Node) bool {
id, ok := n.(*ast.Ident)
if !ok || id.Obj == nil {
return true
}
if f.pkg.types[id].Type == nil {
// Not referring to a variable
return true
}
if key != nil && id.Obj == key.Obj || val != nil && id.Obj == val.Obj {
f.Bad(id.Pos(), "range variable", id.Name, "captured by func literal")
}
return true
})
}

View File

@ -0,0 +1,246 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
/*
This file contains the code to check for shadowed variables.
A shadowed variable is a variable declared in an inner scope
with the same name and type as a variable in an outer scope,
and where the outer variable is mentioned after the inner one
is declared.
(This definition can be refined; the module generates too many
false positives and is not yet enabled by default.)
For example:
func BadRead(f *os.File, buf []byte) error {
var err error
for {
n, err := f.Read(buf) // shadows the function variable 'err'
if err != nil {
break // causes return of wrong value
}
foo(buf)
}
return err
}
*/
package main
import (
"flag"
"go/ast"
"go/token"
"go/types"
)
var strictShadowing = flag.Bool("shadowstrict", false, "whether to be strict about shadowing; can be noisy")
func init() {
register("shadow",
"check for shadowed variables (experimental; must be set explicitly)",
checkShadow,
assignStmt, genDecl)
experimental["shadow"] = true
}
func checkShadow(f *File, node ast.Node) {
switch n := node.(type) {
case *ast.AssignStmt:
checkShadowAssignment(f, n)
case *ast.GenDecl:
checkShadowDecl(f, n)
}
}
// Span stores the minimum range of byte positions in the file in which a
// given variable (types.Object) is mentioned. It is lexically defined: it spans
// from the beginning of its first mention to the end of its last mention.
// A variable is considered shadowed (if *strictShadowing is off) only if the
// shadowing variable is declared within the span of the shadowed variable.
// In other words, if a variable is shadowed but not used after the shadowed
// variable is declared, it is inconsequential and not worth complaining about.
// This simple check dramatically reduces the nuisance rate for the shadowing
// check, at least until something cleverer comes along.
//
// One wrinkle: A "naked return" is a silent use of a variable that the Span
// will not capture, but the compilers catch naked returns of shadowed
// variables so we don't need to.
//
// Cases this gets wrong (TODO):
// - If a for loop's continuation statement mentions a variable redeclared in
// the block, we should complain about it but don't.
// - A variable declared inside a function literal can falsely be identified
// as shadowing a variable in the outer function.
//
type Span struct {
min token.Pos
max token.Pos
}
// contains reports whether the position is inside the span.
func (s Span) contains(pos token.Pos) bool {
return s.min <= pos && pos < s.max
}
// growSpan expands the span for the object to contain the instance represented
// by the identifier.
func (pkg *Package) growSpan(ident *ast.Ident, obj types.Object) {
if *strictShadowing {
return // No need
}
pos := ident.Pos()
end := ident.End()
span, ok := pkg.spans[obj]
if ok {
if span.min > pos {
span.min = pos
}
if span.max < end {
span.max = end
}
} else {
span = Span{pos, end}
}
pkg.spans[obj] = span
}
// checkShadowAssignment checks for shadowing in a short variable declaration.
func checkShadowAssignment(f *File, a *ast.AssignStmt) {
if a.Tok != token.DEFINE {
return
}
if f.idiomaticShortRedecl(a) {
return
}
for _, expr := range a.Lhs {
ident, ok := expr.(*ast.Ident)
if !ok {
f.Badf(expr.Pos(), "invalid AST: short variable declaration of non-identifier")
return
}
checkShadowing(f, ident)
}
}
// idiomaticShortRedecl reports whether this short declaration can be ignored for
// the purposes of shadowing, that is, that any redeclarations it contains are deliberate.
func (f *File) idiomaticShortRedecl(a *ast.AssignStmt) bool {
// Don't complain about deliberate redeclarations of the form
// i := i
// Such constructs are idiomatic in range loops to create a new variable
// for each iteration. Another example is
// switch n := n.(type)
if len(a.Rhs) != len(a.Lhs) {
return false
}
// We know it's an assignment, so the LHS must be all identifiers. (We check anyway.)
for i, expr := range a.Lhs {
lhs, ok := expr.(*ast.Ident)
if !ok {
f.Badf(expr.Pos(), "invalid AST: short variable declaration of non-identifier")
return true // Don't do any more processing.
}
switch rhs := a.Rhs[i].(type) {
case *ast.Ident:
if lhs.Name != rhs.Name {
return false
}
case *ast.TypeAssertExpr:
if id, ok := rhs.X.(*ast.Ident); ok {
if lhs.Name != id.Name {
return false
}
}
default:
return false
}
}
return true
}
// idiomaticRedecl reports whether this declaration spec can be ignored for
// the purposes of shadowing, that is, that any redeclarations it contains are deliberate.
func (f *File) idiomaticRedecl(d *ast.ValueSpec) bool {
// Don't complain about deliberate redeclarations of the form
// var i, j = i, j
if len(d.Names) != len(d.Values) {
return false
}
for i, lhs := range d.Names {
if rhs, ok := d.Values[i].(*ast.Ident); ok {
if lhs.Name != rhs.Name {
return false
}
}
}
return true
}
// checkShadowDecl checks for shadowing in a general variable declaration.
func checkShadowDecl(f *File, d *ast.GenDecl) {
if d.Tok != token.VAR {
return
}
for _, spec := range d.Specs {
valueSpec, ok := spec.(*ast.ValueSpec)
if !ok {
f.Badf(spec.Pos(), "invalid AST: var GenDecl not ValueSpec")
return
}
// Don't complain about deliberate redeclarations of the form
// var i = i
if f.idiomaticRedecl(valueSpec) {
return
}
for _, ident := range valueSpec.Names {
checkShadowing(f, ident)
}
}
}
// checkShadowing checks whether the identifier shadows an identifier in an outer scope.
func checkShadowing(f *File, ident *ast.Ident) {
if ident.Name == "_" {
// Can't shadow the blank identifier.
return
}
obj := f.pkg.defs[ident]
if obj == nil {
return
}
// obj.Parent.Parent is the surrounding scope. If we can find another declaration
// starting from there, we have a shadowed identifier.
_, shadowed := obj.Parent().Parent().LookupParent(obj.Name(), obj.Pos())
if shadowed == nil {
return
}
// Don't complain if it's shadowing a universe-declared identifier; that's fine.
if shadowed.Parent() == types.Universe {
return
}
if *strictShadowing {
// The shadowed identifier must appear before this one to be an instance of shadowing.
if shadowed.Pos() > ident.Pos() {
return
}
} else {
// Don't complain if the span of validity of the shadowed identifier doesn't include
// the shadowing identifier.
span, ok := f.pkg.spans[shadowed]
if !ok {
f.Badf(ident.Pos(), "internal error: no range for %q", ident.Name)
return
}
if !span.contains(ident.Pos()) {
return
}
}
// Don't complain if the types differ: that implies the programmer really wants two different things.
if types.Identical(obj.Type(), shadowed.Type()) {
f.Badf(ident.Pos(), "declaration of %q shadows declaration at %s", obj.Name(), f.loc(shadowed.Pos()))
}
}

View File

@ -0,0 +1,82 @@
// Copyright 2014 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
/*
This file contains the code to check for suspicious shifts.
*/
package main
import (
"go/ast"
"go/constant"
"go/token"
"go/types"
)
func init() {
register("shift",
"check for useless shifts",
checkShift,
binaryExpr, assignStmt)
}
func checkShift(f *File, node ast.Node) {
switch node := node.(type) {
case *ast.BinaryExpr:
if node.Op == token.SHL || node.Op == token.SHR {
checkLongShift(f, node, node.X, node.Y)
}
case *ast.AssignStmt:
if len(node.Lhs) != 1 || len(node.Rhs) != 1 {
return
}
if node.Tok == token.SHL_ASSIGN || node.Tok == token.SHR_ASSIGN {
checkLongShift(f, node, node.Lhs[0], node.Rhs[0])
}
}
}
// checkLongShift checks if shift or shift-assign operations shift by more than
// the length of the underlying variable.
func checkLongShift(f *File, node ast.Node, x, y ast.Expr) {
v := f.pkg.types[y].Value
if v == nil {
return
}
amt, ok := constant.Int64Val(v)
if !ok {
return
}
t := f.pkg.types[x].Type
if t == nil {
return
}
b, ok := t.Underlying().(*types.Basic)
if !ok {
return
}
var size int64
var msg string
switch b.Kind() {
case types.Uint8, types.Int8:
size = 8
case types.Uint16, types.Int16:
size = 16
case types.Uint32, types.Int32:
size = 32
case types.Uint64, types.Int64:
size = 64
case types.Int, types.Uint, types.Uintptr:
// These types may be as small as 32 bits, but no smaller.
size = 32
msg = "might be "
default:
return
}
if amt >= size {
ident := f.gofmt(x)
f.Badf(node.Pos(), "%s %stoo small for shift of %d", ident, msg, amt)
}
}

View File

@ -0,0 +1,122 @@
// Copyright 2010 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file contains the test for canonical struct tags.
package main
import (
"errors"
"go/ast"
"reflect"
"strconv"
)
func init() {
register("structtags",
"check that struct field tags have canonical format and apply to exported fields as needed",
checkCanonicalFieldTag,
field)
}
// checkCanonicalFieldTag checks a struct field tag.
func checkCanonicalFieldTag(f *File, node ast.Node) {
field := node.(*ast.Field)
if field.Tag == nil {
return
}
tag, err := strconv.Unquote(field.Tag.Value)
if err != nil {
f.Badf(field.Pos(), "unable to read struct tag %s", field.Tag.Value)
return
}
if err := validateStructTag(tag); err != nil {
f.Badf(field.Pos(), "struct field tag %s not compatible with reflect.StructTag.Get: %s", field.Tag.Value, err)
}
// Check for use of json or xml tags with unexported fields.
// Embedded struct. Nothing to do for now, but that
// may change, depending on what happens with issue 7363.
if len(field.Names) == 0 {
return
}
if field.Names[0].IsExported() {
return
}
st := reflect.StructTag(tag)
for _, enc := range [...]string{"json", "xml"} {
if st.Get(enc) != "" {
f.Badf(field.Pos(), "struct field %s has %s tag but is not exported", field.Names[0].Name, enc)
return
}
}
}
var (
errTagSyntax = errors.New("bad syntax for struct tag pair")
errTagKeySyntax = errors.New("bad syntax for struct tag key")
errTagValueSyntax = errors.New("bad syntax for struct tag value")
)
// validateStructTag parses the struct tag and returns an error if it is not
// in the canonical format, which is a space-separated list of key:"value"
// settings. The value may contain spaces.
func validateStructTag(tag string) error {
// This code is based on the StructTag.Get code in package reflect.
for tag != "" {
// Skip leading space.
i := 0
for i < len(tag) && tag[i] == ' ' {
i++
}
tag = tag[i:]
if tag == "" {
break
}
// Scan to colon. A space, a quote or a control character is a syntax error.
// Strictly speaking, control chars include the range [0x7f, 0x9f], not just
// [0x00, 0x1f], but in practice, we ignore the multi-byte control characters
// as it is simpler to inspect the tag's bytes than the tag's runes.
i = 0
for i < len(tag) && tag[i] > ' ' && tag[i] != ':' && tag[i] != '"' && tag[i] != 0x7f {
i++
}
if i == 0 {
return errTagKeySyntax
}
if i+1 >= len(tag) || tag[i] != ':' {
return errTagSyntax
}
if tag[i+1] != '"' {
return errTagValueSyntax
}
tag = tag[i+1:]
// Scan quoted string to find value.
i = 1
for i < len(tag) && tag[i] != '"' {
if tag[i] == '\\' {
i++
}
i++
}
if i >= len(tag) {
return errTagValueSyntax
}
qvalue := tag[:i+1]
tag = tag[i+1:]
if _, err := strconv.Unquote(qvalue); err != nil {
return errTagValueSyntax
}
}
return nil
}

View File

@ -0,0 +1,187 @@
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"go/ast"
"go/types"
"strings"
"unicode"
"unicode/utf8"
)
func init() {
register("tests",
"check for common mistaken usages of tests/documentation examples",
checkTestFunctions,
funcDecl)
}
func isExampleSuffix(s string) bool {
r, size := utf8.DecodeRuneInString(s)
return size > 0 && unicode.IsLower(r)
}
func isTestSuffix(name string) bool {
if len(name) == 0 {
// "Test" is ok.
return true
}
r, _ := utf8.DecodeRuneInString(name)
return !unicode.IsLower(r)
}
func isTestParam(typ ast.Expr, wantType string) bool {
ptr, ok := typ.(*ast.StarExpr)
if !ok {
// Not a pointer.
return false
}
// No easy way of making sure it's a *testing.T or *testing.B:
// ensure the name of the type matches.
if name, ok := ptr.X.(*ast.Ident); ok {
return name.Name == wantType
}
if sel, ok := ptr.X.(*ast.SelectorExpr); ok {
return sel.Sel.Name == wantType
}
return false
}
func lookup(name string, scopes []*types.Scope) types.Object {
for _, scope := range scopes {
if o := scope.Lookup(name); o != nil {
return o
}
}
return nil
}
func extendedScope(f *File) []*types.Scope {
scopes := []*types.Scope{f.pkg.typesPkg.Scope()}
if f.basePkg != nil {
scopes = append(scopes, f.basePkg.typesPkg.Scope())
} else {
// If basePkg is not specified (e.g. when checking a single file) try to
// find it among imports.
pkgName := f.pkg.typesPkg.Name()
if strings.HasSuffix(pkgName, "_test") {
basePkgName := strings.TrimSuffix(pkgName, "_test")
for _, p := range f.pkg.typesPkg.Imports() {
if p.Name() == basePkgName {
scopes = append(scopes, p.Scope())
break
}
}
}
}
return scopes
}
func checkExample(fn *ast.FuncDecl, f *File, report reporter) {
fnName := fn.Name.Name
if params := fn.Type.Params; len(params.List) != 0 {
report("%s should be niladic", fnName)
}
if results := fn.Type.Results; results != nil && len(results.List) != 0 {
report("%s should return nothing", fnName)
}
if filesRun && !includesNonTest {
// The coherence checks between a test and the package it tests
// will report false positives if no non-test files have
// been provided.
return
}
if fnName == "Example" {
// Nothing more to do.
return
}
var (
exName = strings.TrimPrefix(fnName, "Example")
elems = strings.SplitN(exName, "_", 3)
ident = elems[0]
obj = lookup(ident, extendedScope(f))
)
if ident != "" && obj == nil {
// Check ExampleFoo and ExampleBadFoo.
report("%s refers to unknown identifier: %s", fnName, ident)
// Abort since obj is absent and no subsequent checks can be performed.
return
}
if len(elems) < 2 {
// Nothing more to do.
return
}
if ident == "" {
// Check Example_suffix and Example_BadSuffix.
if residual := strings.TrimPrefix(exName, "_"); !isExampleSuffix(residual) {
report("%s has malformed example suffix: %s", fnName, residual)
}
return
}
mmbr := elems[1]
if !isExampleSuffix(mmbr) {
// Check ExampleFoo_Method and ExampleFoo_BadMethod.
if obj, _, _ := types.LookupFieldOrMethod(obj.Type(), true, obj.Pkg(), mmbr); obj == nil {
report("%s refers to unknown field or method: %s.%s", fnName, ident, mmbr)
}
}
if len(elems) == 3 && !isExampleSuffix(elems[2]) {
// Check ExampleFoo_Method_suffix and ExampleFoo_Method_Badsuffix.
report("%s has malformed example suffix: %s", fnName, elems[2])
}
}
func checkTest(fn *ast.FuncDecl, prefix string, report reporter) {
// Want functions with 0 results and 1 parameter.
if fn.Type.Results != nil && len(fn.Type.Results.List) > 0 ||
fn.Type.Params == nil ||
len(fn.Type.Params.List) != 1 ||
len(fn.Type.Params.List[0].Names) > 1 {
return
}
// The param must look like a *testing.T or *testing.B.
if !isTestParam(fn.Type.Params.List[0].Type, prefix[:1]) {
return
}
if !isTestSuffix(fn.Name.Name[len(prefix):]) {
report("%s has malformed name: first letter after '%s' must not be lowercase", fn.Name.Name, prefix)
}
}
type reporter func(format string, args ...interface{})
// checkTestFunctions walks Test, Benchmark and Example functions checking
// malformed names, wrong signatures and examples documenting inexistent
// identifiers.
func checkTestFunctions(f *File, node ast.Node) {
if !strings.HasSuffix(f.name, "_test.go") {
return
}
fn, ok := node.(*ast.FuncDecl)
if !ok || fn.Recv != nil {
// Ignore non-functions or functions with receivers.
return
}
report := func(format string, args ...interface{}) { f.Badf(node.Pos(), format, args...) }
switch {
case strings.HasPrefix(fn.Name.Name, "Example"):
checkExample(fn, f, report)
case strings.HasPrefix(fn.Name.Name, "Test"):
checkTest(fn, "Test", report)
case strings.HasPrefix(fn.Name.Name, "Benchmark"):
checkTest(fn, "Benchmark", report)
}
}

View File

@ -0,0 +1,281 @@
// Copyright 2010 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file contains the pieces of the tool that use typechecking from the go/types package.
package main
import (
"go/ast"
"go/importer"
"go/token"
"go/types"
)
// stdImporter is the importer we use to import packages.
// It is created during initialization so that all packages
// are imported by the same importer.
var stdImporter = importer.Default()
var (
errorType *types.Interface
stringerType *types.Interface // possibly nil
formatterType *types.Interface // possibly nil
)
func init() {
errorType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface)
if typ := importType("fmt", "Stringer"); typ != nil {
stringerType = typ.Underlying().(*types.Interface)
}
if typ := importType("fmt", "Formatter"); typ != nil {
formatterType = typ.Underlying().(*types.Interface)
}
}
// importType returns the type denoted by the qualified identifier
// path.name, and adds the respective package to the imports map
// as a side effect. In case of an error, importType returns nil.
func importType(path, name string) types.Type {
pkg, err := stdImporter.Import(path)
if err != nil {
// This can happen if the package at path hasn't been compiled yet.
warnf("import failed: %v", err)
return nil
}
if obj, ok := pkg.Scope().Lookup(name).(*types.TypeName); ok {
return obj.Type()
}
warnf("invalid type name %q", name)
return nil
}
func (pkg *Package) check(fs *token.FileSet, astFiles []*ast.File) error {
pkg.defs = make(map[*ast.Ident]types.Object)
pkg.uses = make(map[*ast.Ident]types.Object)
pkg.selectors = make(map[*ast.SelectorExpr]*types.Selection)
pkg.spans = make(map[types.Object]Span)
pkg.types = make(map[ast.Expr]types.TypeAndValue)
config := types.Config{
// We use the same importer for all imports to ensure that
// everybody sees identical packages for the given paths.
Importer: stdImporter,
// By providing a Config with our own error function, it will continue
// past the first error. There is no need for that function to do anything.
Error: func(error) {},
}
info := &types.Info{
Selections: pkg.selectors,
Types: pkg.types,
Defs: pkg.defs,
Uses: pkg.uses,
}
typesPkg, err := config.Check(pkg.path, fs, astFiles, info)
pkg.typesPkg = typesPkg
// update spans
for id, obj := range pkg.defs {
pkg.growSpan(id, obj)
}
for id, obj := range pkg.uses {
pkg.growSpan(id, obj)
}
return err
}
// matchArgType reports an error if printf verb t is not appropriate
// for operand arg.
//
// typ is used only for recursive calls; external callers must supply nil.
//
// (Recursion arises from the compound types {map,chan,slice} which
// may be printed with %d etc. if that is appropriate for their element
// types.)
func (f *File) matchArgType(t printfArgType, typ types.Type, arg ast.Expr) bool {
return f.matchArgTypeInternal(t, typ, arg, make(map[types.Type]bool))
}
// matchArgTypeInternal is the internal version of matchArgType. It carries a map
// remembering what types are in progress so we don't recur when faced with recursive
// types or mutually recursive types.
func (f *File) matchArgTypeInternal(t printfArgType, typ types.Type, arg ast.Expr, inProgress map[types.Type]bool) bool {
// %v, %T accept any argument type.
if t == anyType {
return true
}
if typ == nil {
// external call
typ = f.pkg.types[arg].Type
if typ == nil {
return true // probably a type check problem
}
}
// If the type implements fmt.Formatter, we have nothing to check.
// formatterTyp may be nil - be conservative and check for Format method in that case.
if formatterType != nil && types.Implements(typ, formatterType) || f.hasMethod(typ, "Format") {
return true
}
// If we can use a string, might arg (dynamically) implement the Stringer or Error interface?
if t&argString != 0 {
if types.AssertableTo(errorType, typ) || stringerType != nil && types.AssertableTo(stringerType, typ) {
return true
}
}
typ = typ.Underlying()
if inProgress[typ] {
// We're already looking at this type. The call that started it will take care of it.
return true
}
inProgress[typ] = true
switch typ := typ.(type) {
case *types.Signature:
return t&argPointer != 0
case *types.Map:
// Recur: map[int]int matches %d.
return t&argPointer != 0 ||
(f.matchArgTypeInternal(t, typ.Key(), arg, inProgress) && f.matchArgTypeInternal(t, typ.Elem(), arg, inProgress))
case *types.Chan:
return t&argPointer != 0
case *types.Array:
// Same as slice.
if types.Identical(typ.Elem().Underlying(), types.Typ[types.Byte]) && t&argString != 0 {
return true // %s matches []byte
}
// Recur: []int matches %d.
return t&argPointer != 0 || f.matchArgTypeInternal(t, typ.Elem().Underlying(), arg, inProgress)
case *types.Slice:
// Same as array.
if types.Identical(typ.Elem().Underlying(), types.Typ[types.Byte]) && t&argString != 0 {
return true // %s matches []byte
}
// Recur: []int matches %d. But watch out for
// type T []T
// If the element is a pointer type (type T[]*T), it's handled fine by the Pointer case below.
return t&argPointer != 0 || f.matchArgTypeInternal(t, typ.Elem(), arg, inProgress)
case *types.Pointer:
// Ugly, but dealing with an edge case: a known pointer to an invalid type,
// probably something from a failed import.
if typ.Elem().String() == "invalid type" {
if *verbose {
f.Warnf(arg.Pos(), "printf argument %v is pointer to invalid or unknown type", f.gofmt(arg))
}
return true // special case
}
// If it's actually a pointer with %p, it prints as one.
if t == argPointer {
return true
}
// If it's pointer to struct, that's equivalent in our analysis to whether we can print the struct.
if str, ok := typ.Elem().Underlying().(*types.Struct); ok {
return f.matchStructArgType(t, str, arg, inProgress)
}
// The rest can print with %p as pointers, or as integers with %x etc.
return t&(argInt|argPointer) != 0
case *types.Struct:
return f.matchStructArgType(t, typ, arg, inProgress)
case *types.Interface:
// If the static type of the argument is empty interface, there's little we can do.
// Example:
// func f(x interface{}) { fmt.Printf("%s", x) }
// Whether x is valid for %s depends on the type of the argument to f. One day
// we will be able to do better. For now, we assume that empty interface is OK
// but non-empty interfaces, with Stringer and Error handled above, are errors.
return typ.NumMethods() == 0
case *types.Basic:
switch typ.Kind() {
case types.UntypedBool,
types.Bool:
return t&argBool != 0
case types.UntypedInt,
types.Int,
types.Int8,
types.Int16,
types.Int32,
types.Int64,
types.Uint,
types.Uint8,
types.Uint16,
types.Uint32,
types.Uint64,
types.Uintptr:
return t&argInt != 0
case types.UntypedFloat,
types.Float32,
types.Float64:
return t&argFloat != 0
case types.UntypedComplex,
types.Complex64,
types.Complex128:
return t&argComplex != 0
case types.UntypedString,
types.String:
return t&argString != 0
case types.UnsafePointer:
return t&(argPointer|argInt) != 0
case types.UntypedRune:
return t&(argInt|argRune) != 0
case types.UntypedNil:
return t&argPointer != 0 // TODO?
case types.Invalid:
if *verbose {
f.Warnf(arg.Pos(), "printf argument %v has invalid or unknown type", f.gofmt(arg))
}
return true // Probably a type check problem.
}
panic("unreachable")
}
return false
}
// hasBasicType reports whether x's type is a types.Basic with the given kind.
func (f *File) hasBasicType(x ast.Expr, kind types.BasicKind) bool {
t := f.pkg.types[x].Type
if t != nil {
t = t.Underlying()
}
b, ok := t.(*types.Basic)
return ok && b.Kind() == kind
}
// matchStructArgType reports whether all the elements of the struct match the expected
// type. For instance, with "%d" all the elements must be printable with the "%d" format.
func (f *File) matchStructArgType(t printfArgType, typ *types.Struct, arg ast.Expr, inProgress map[types.Type]bool) bool {
for i := 0; i < typ.NumFields(); i++ {
if !f.matchArgTypeInternal(t, typ.Field(i).Type(), arg, inProgress) {
return false
}
}
return true
}
// hasMethod reports whether the type contains a method with the given name.
// It is part of the workaround for Formatters and should be deleted when
// that workaround is no longer necessary.
// TODO: This could be better once issue 6259 is fixed.
func (f *File) hasMethod(typ types.Type, name string) bool {
// assume we have an addressable variable of type typ
obj, _, _ := types.LookupFieldOrMethod(typ, true, f.pkg.typesPkg, name)
_, ok := obj.(*types.Func)
return ok
}

View File

@ -0,0 +1,97 @@
// Copyright 2014 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Check for invalid uintptr -> unsafe.Pointer conversions.
package main
import (
"go/ast"
"go/token"
"go/types"
)
func init() {
register("unsafeptr",
"check for misuse of unsafe.Pointer",
checkUnsafePointer,
callExpr)
}
func checkUnsafePointer(f *File, node ast.Node) {
x := node.(*ast.CallExpr)
if len(x.Args) != 1 {
return
}
if f.hasBasicType(x.Fun, types.UnsafePointer) && f.hasBasicType(x.Args[0], types.Uintptr) && !f.isSafeUintptr(x.Args[0]) {
f.Badf(x.Pos(), "possible misuse of unsafe.Pointer")
}
}
// isSafeUintptr reports whether x - already known to be a uintptr -
// is safe to convert to unsafe.Pointer. It is safe if x is itself derived
// directly from an unsafe.Pointer via conversion and pointer arithmetic
// or if x is the result of reflect.Value.Pointer or reflect.Value.UnsafeAddr
// or obtained from the Data field of a *reflect.SliceHeader or *reflect.StringHeader.
func (f *File) isSafeUintptr(x ast.Expr) bool {
switch x := x.(type) {
case *ast.ParenExpr:
return f.isSafeUintptr(x.X)
case *ast.SelectorExpr:
switch x.Sel.Name {
case "Data":
// reflect.SliceHeader and reflect.StringHeader are okay,
// but only if they are pointing at a real slice or string.
// It's not okay to do:
// var x SliceHeader
// x.Data = uintptr(unsafe.Pointer(...))
// ... use x ...
// p := unsafe.Pointer(x.Data)
// because in the middle the garbage collector doesn't
// see x.Data as a pointer and so x.Data may be dangling
// by the time we get to the conversion at the end.
// For now approximate by saying that *Header is okay
// but Header is not.
pt, ok := f.pkg.types[x.X].Type.(*types.Pointer)
if ok {
t, ok := pt.Elem().(*types.Named)
if ok && t.Obj().Pkg().Path() == "reflect" {
switch t.Obj().Name() {
case "StringHeader", "SliceHeader":
return true
}
}
}
}
case *ast.CallExpr:
switch len(x.Args) {
case 0:
// maybe call to reflect.Value.Pointer or reflect.Value.UnsafeAddr.
sel, ok := x.Fun.(*ast.SelectorExpr)
if !ok {
break
}
switch sel.Sel.Name {
case "Pointer", "UnsafeAddr":
t, ok := f.pkg.types[sel.X].Type.(*types.Named)
if ok && t.Obj().Pkg().Path() == "reflect" && t.Obj().Name() == "Value" {
return true
}
}
case 1:
// maybe conversion of uintptr to unsafe.Pointer
return f.hasBasicType(x.Fun, types.Uintptr) && f.hasBasicType(x.Args[0], types.UnsafePointer)
}
case *ast.BinaryExpr:
switch x.Op {
case token.ADD, token.SUB:
return f.isSafeUintptr(x.X) && !f.isSafeUintptr(x.Y)
}
}
return false
}

View File

@ -0,0 +1,93 @@
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file defines the check for unused results of calls to certain
// pure functions.
package main
import (
"flag"
"go/ast"
"go/token"
"go/types"
"strings"
)
var unusedFuncsFlag = flag.String("unusedfuncs",
"errors.New,fmt.Errorf,fmt.Sprintf,fmt.Sprint,sort.Reverse",
"comma-separated list of functions whose results must be used")
var unusedStringMethodsFlag = flag.String("unusedstringmethods",
"Error,String",
"comma-separated list of names of methods of type func() string whose results must be used")
func init() {
register("unusedresult",
"check for unused result of calls to functions in -unusedfuncs list and methods in -unusedstringmethods list",
checkUnusedResult,
exprStmt)
}
// func() string
var sigNoArgsStringResult = types.NewSignature(nil, nil,
types.NewTuple(types.NewVar(token.NoPos, nil, "", types.Typ[types.String])),
false)
var unusedFuncs = make(map[string]bool)
var unusedStringMethods = make(map[string]bool)
func initUnusedFlags() {
commaSplit := func(s string, m map[string]bool) {
if s != "" {
for _, name := range strings.Split(s, ",") {
if len(name) == 0 {
flag.Usage()
}
m[name] = true
}
}
}
commaSplit(*unusedFuncsFlag, unusedFuncs)
commaSplit(*unusedStringMethodsFlag, unusedStringMethods)
}
func checkUnusedResult(f *File, n ast.Node) {
call, ok := unparen(n.(*ast.ExprStmt).X).(*ast.CallExpr)
if !ok {
return // not a call statement
}
fun := unparen(call.Fun)
if f.pkg.types[fun].IsType() {
return // a conversion, not a call
}
selector, ok := fun.(*ast.SelectorExpr)
if !ok {
return // neither a method call nor a qualified ident
}
sel, ok := f.pkg.selectors[selector]
if ok && sel.Kind() == types.MethodVal {
// method (e.g. foo.String())
obj := sel.Obj().(*types.Func)
sig := sel.Type().(*types.Signature)
if types.Identical(sig, sigNoArgsStringResult) {
if unusedStringMethods[obj.Name()] {
f.Badf(call.Lparen, "result of (%s).%s call not used",
sig.Recv().Type(), obj.Name())
}
}
} else if !ok {
// package-qualified function (e.g. fmt.Errorf)
obj, _ := f.pkg.uses[selector.Sel]
if obj, ok := obj.(*types.Func); ok {
qname := obj.Pkg().Path() + "." + obj.Name()
if unusedFuncs[qname] {
f.Badf(call.Lparen, "result of %v call not used", qname)
}
}
}
}

View File

@ -0,0 +1,27 @@
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build go1.9
package load
import (
"strings"
)
// hasPathPrefix reports whether the path s begins with the
// elements in prefix.
func hasPathPrefix(s, prefix string) bool {
switch {
default:
return false
case len(s) == len(prefix):
return s == prefix
case len(s) > len(prefix):
if prefix != "" && prefix[len(prefix)-1] == '/' {
return strings.HasPrefix(s, prefix)
}
return s[len(prefix)] == '/' && s[:len(prefix)] == prefix
}
}

View File

@ -0,0 +1,25 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build go1.9
// Package load loads packages.
package load
import (
"strings"
)
// isStandardImportPath reports whether $GOROOT/src/path should be considered
// part of the standard distribution. For historical reasons we allow people to add
// their own code to $GOROOT instead of using $GOPATH, but we assume that
// code will start with a domain name (dot in the first element).
func isStandardImportPath(path string) bool {
i := strings.Index(path, "/")
if i < 0 {
i = len(path)
}
elem := path[:i]
return !strings.Contains(elem, ".")
}

View File

@ -0,0 +1,354 @@
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build go1.9
package load
import (
"fmt"
"go/build"
"log"
"os"
"path"
"path/filepath"
"regexp"
"strings"
)
// Context specifies values for operation of ImportPaths that would
// otherwise come from cmd/go/internal/cfg package.
//
// This is a construct added for gotool purposes and doesn't have
// an equivalent upstream in cmd/go.
type Context struct {
// BuildContext is the build context to use.
BuildContext build.Context
// GOROOTsrc is the location of the src directory in GOROOT.
// At this time, it's used only in MatchPackages to skip
// GOOROOT/src entry from BuildContext.SrcDirs output.
GOROOTsrc string
}
// allPackages returns all the packages that can be found
// under the $GOPATH directories and $GOROOT matching pattern.
// The pattern is either "all" (all packages), "std" (standard packages),
// "cmd" (standard commands), or a path including "...".
func (c *Context) allPackages(pattern string) []string {
pkgs := c.MatchPackages(pattern)
if len(pkgs) == 0 {
fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern)
}
return pkgs
}
// allPackagesInFS is like allPackages but is passed a pattern
// beginning ./ or ../, meaning it should scan the tree rooted
// at the given directory. There are ... in the pattern too.
func (c *Context) allPackagesInFS(pattern string) []string {
pkgs := c.MatchPackagesInFS(pattern)
if len(pkgs) == 0 {
fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern)
}
return pkgs
}
// MatchPackages returns a list of package paths matching pattern
// (see go help packages for pattern syntax).
func (c *Context) MatchPackages(pattern string) []string {
match := func(string) bool { return true }
treeCanMatch := func(string) bool { return true }
if !IsMetaPackage(pattern) {
match = matchPattern(pattern)
treeCanMatch = treeCanMatchPattern(pattern)
}
have := map[string]bool{
"builtin": true, // ignore pseudo-package that exists only for documentation
}
if !c.BuildContext.CgoEnabled {
have["runtime/cgo"] = true // ignore during walk
}
var pkgs []string
for _, src := range c.BuildContext.SrcDirs() {
if (pattern == "std" || pattern == "cmd") && src != c.GOROOTsrc {
continue
}
src = filepath.Clean(src) + string(filepath.Separator)
root := src
if pattern == "cmd" {
root += "cmd" + string(filepath.Separator)
}
filepath.Walk(root, func(path string, fi os.FileInfo, err error) error {
if err != nil || path == src {
return nil
}
want := true
// Avoid .foo, _foo, and testdata directory trees.
_, elem := filepath.Split(path)
if strings.HasPrefix(elem, ".") || strings.HasPrefix(elem, "_") || elem == "testdata" {
want = false
}
name := filepath.ToSlash(path[len(src):])
if pattern == "std" && (!isStandardImportPath(name) || name == "cmd") {
// The name "std" is only the standard library.
// If the name is cmd, it's the root of the command tree.
want = false
}
if !treeCanMatch(name) {
want = false
}
if !fi.IsDir() {
if fi.Mode()&os.ModeSymlink != 0 && want {
if target, err := os.Stat(path); err == nil && target.IsDir() {
fmt.Fprintf(os.Stderr, "warning: ignoring symlink %s\n", path)
}
}
return nil
}
if !want {
return filepath.SkipDir
}
if have[name] {
return nil
}
have[name] = true
if !match(name) {
return nil
}
pkg, err := c.BuildContext.ImportDir(path, 0)
if err != nil {
if _, noGo := err.(*build.NoGoError); noGo {
return nil
}
}
// If we are expanding "cmd", skip main
// packages under cmd/vendor. At least as of
// March, 2017, there is one there for the
// vendored pprof tool.
if pattern == "cmd" && strings.HasPrefix(pkg.ImportPath, "cmd/vendor") && pkg.Name == "main" {
return nil
}
pkgs = append(pkgs, name)
return nil
})
}
return pkgs
}
// MatchPackagesInFS returns a list of package paths matching pattern,
// which must begin with ./ or ../
// (see go help packages for pattern syntax).
func (c *Context) MatchPackagesInFS(pattern string) []string {
// Find directory to begin the scan.
// Could be smarter but this one optimization
// is enough for now, since ... is usually at the
// end of a path.
i := strings.Index(pattern, "...")
dir, _ := path.Split(pattern[:i])
// pattern begins with ./ or ../.
// path.Clean will discard the ./ but not the ../.
// We need to preserve the ./ for pattern matching
// and in the returned import paths.
prefix := ""
if strings.HasPrefix(pattern, "./") {
prefix = "./"
}
match := matchPattern(pattern)
var pkgs []string
filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error {
if err != nil || !fi.IsDir() {
return nil
}
if path == dir {
// filepath.Walk starts at dir and recurses. For the recursive case,
// the path is the result of filepath.Join, which calls filepath.Clean.
// The initial case is not Cleaned, though, so we do this explicitly.
//
// This converts a path like "./io/" to "io". Without this step, running
// "cd $GOROOT/src; go list ./io/..." would incorrectly skip the io
// package, because prepending the prefix "./" to the unclean path would
// result in "././io", and match("././io") returns false.
path = filepath.Clean(path)
}
// Avoid .foo, _foo, and testdata directory trees, but do not avoid "." or "..".
_, elem := filepath.Split(path)
dot := strings.HasPrefix(elem, ".") && elem != "." && elem != ".."
if dot || strings.HasPrefix(elem, "_") || elem == "testdata" {
return filepath.SkipDir
}
name := prefix + filepath.ToSlash(path)
if !match(name) {
return nil
}
// We keep the directory if we can import it, or if we can't import it
// due to invalid Go source files. This means that directories containing
// parse errors will be built (and fail) instead of being silently skipped
// as not matching the pattern. Go 1.5 and earlier skipped, but that
// behavior means people miss serious mistakes.
// See golang.org/issue/11407.
if p, err := c.BuildContext.ImportDir(path, 0); err != nil && (p == nil || len(p.InvalidGoFiles) == 0) {
if _, noGo := err.(*build.NoGoError); !noGo {
log.Print(err)
}
return nil
}
pkgs = append(pkgs, name)
return nil
})
return pkgs
}
// treeCanMatchPattern(pattern)(name) reports whether
// name or children of name can possibly match pattern.
// Pattern is the same limited glob accepted by matchPattern.
func treeCanMatchPattern(pattern string) func(name string) bool {
wildCard := false
if i := strings.Index(pattern, "..."); i >= 0 {
wildCard = true
pattern = pattern[:i]
}
return func(name string) bool {
return len(name) <= len(pattern) && hasPathPrefix(pattern, name) ||
wildCard && strings.HasPrefix(name, pattern)
}
}
// matchPattern(pattern)(name) reports whether
// name matches pattern. Pattern is a limited glob
// pattern in which '...' means 'any string' and there
// is no other special syntax.
// Unfortunately, there are two special cases. Quoting "go help packages":
//
// First, /... at the end of the pattern can match an empty string,
// so that net/... matches both net and packages in its subdirectories, like net/http.
// Second, any slash-separted pattern element containing a wildcard never
// participates in a match of the "vendor" element in the path of a vendored
// package, so that ./... does not match packages in subdirectories of
// ./vendor or ./mycode/vendor, but ./vendor/... and ./mycode/vendor/... do.
// Note, however, that a directory named vendor that itself contains code
// is not a vendored package: cmd/vendor would be a command named vendor,
// and the pattern cmd/... matches it.
func matchPattern(pattern string) func(name string) bool {
// Convert pattern to regular expression.
// The strategy for the trailing /... is to nest it in an explicit ? expression.
// The strategy for the vendor exclusion is to change the unmatchable
// vendor strings to a disallowed code point (vendorChar) and to use
// "(anything but that codepoint)*" as the implementation of the ... wildcard.
// This is a bit complicated but the obvious alternative,
// namely a hand-written search like in most shell glob matchers,
// is too easy to make accidentally exponential.
// Using package regexp guarantees linear-time matching.
const vendorChar = "\x00"
if strings.Contains(pattern, vendorChar) {
return func(name string) bool { return false }
}
re := regexp.QuoteMeta(pattern)
re = replaceVendor(re, vendorChar)
switch {
case strings.HasSuffix(re, `/`+vendorChar+`/\.\.\.`):
re = strings.TrimSuffix(re, `/`+vendorChar+`/\.\.\.`) + `(/vendor|/` + vendorChar + `/\.\.\.)`
case re == vendorChar+`/\.\.\.`:
re = `(/vendor|/` + vendorChar + `/\.\.\.)`
case strings.HasSuffix(re, `/\.\.\.`):
re = strings.TrimSuffix(re, `/\.\.\.`) + `(/\.\.\.)?`
}
re = strings.Replace(re, `\.\.\.`, `[^`+vendorChar+`]*`, -1)
reg := regexp.MustCompile(`^` + re + `$`)
return func(name string) bool {
if strings.Contains(name, vendorChar) {
return false
}
return reg.MatchString(replaceVendor(name, vendorChar))
}
}
// replaceVendor returns the result of replacing
// non-trailing vendor path elements in x with repl.
func replaceVendor(x, repl string) string {
if !strings.Contains(x, "vendor") {
return x
}
elem := strings.Split(x, "/")
for i := 0; i < len(elem)-1; i++ {
if elem[i] == "vendor" {
elem[i] = repl
}
}
return strings.Join(elem, "/")
}
// ImportPaths returns the import paths to use for the given command line.
func (c *Context) ImportPaths(args []string) []string {
args = c.ImportPathsNoDotExpansion(args)
var out []string
for _, a := range args {
if strings.Contains(a, "...") {
if build.IsLocalImport(a) {
out = append(out, c.allPackagesInFS(a)...)
} else {
out = append(out, c.allPackages(a)...)
}
continue
}
out = append(out, a)
}
return out
}
// ImportPathsNoDotExpansion returns the import paths to use for the given
// command line, but it does no ... expansion.
func (c *Context) ImportPathsNoDotExpansion(args []string) []string {
if len(args) == 0 {
return []string{"."}
}
var out []string
for _, a := range args {
// Arguments are supposed to be import paths, but
// as a courtesy to Windows developers, rewrite \ to /
// in command-line arguments. Handles .\... and so on.
if filepath.Separator == '\\' {
a = strings.Replace(a, `\`, `/`, -1)
}
// Put argument in canonical form, but preserve leading ./.
if strings.HasPrefix(a, "./") {
a = "./" + path.Clean(a)
if a == "./." {
a = "."
}
} else {
a = path.Clean(a)
}
if IsMetaPackage(a) {
out = append(out, c.allPackages(a)...)
continue
}
out = append(out, a)
}
return out
}
// IsMetaPackage checks if name is a reserved package name that expands to multiple packages.
func IsMetaPackage(name string) bool {
return name == "std" || name == "cmd" || name == "all"
}

View File

@ -26,285 +26,31 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// +build go1.9
package gotool package gotool
import ( import (
"fmt"
"go/build"
"log"
"os"
"path"
"path/filepath" "path/filepath"
"regexp"
"strings" "github.com/kisielk/gotool/internal/load"
) )
// This file contains code from the Go distribution.
// matchPattern(pattern)(name) reports whether
// name matches pattern. Pattern is a limited glob
// pattern in which '...' means 'any string' and there
// is no other special syntax.
func matchPattern(pattern string) func(name string) bool {
re := regexp.QuoteMeta(pattern)
re = strings.Replace(re, `\.\.\.`, `.*`, -1)
// Special case: foo/... matches foo too.
if strings.HasSuffix(re, `/.*`) {
re = re[:len(re)-len(`/.*`)] + `(/.*)?`
}
reg := regexp.MustCompile(`^` + re + `$`)
return reg.MatchString
}
func (c *Context) matchPackages(pattern string) []string {
match := func(string) bool { return true }
treeCanMatch := func(string) bool { return true }
if !isMetaPackage(pattern) {
match = matchPattern(pattern)
treeCanMatch = treeCanMatchPattern(pattern)
}
have := map[string]bool{
"builtin": true, // ignore pseudo-package that exists only for documentation
}
if !c.BuildContext.CgoEnabled {
have["runtime/cgo"] = true // ignore during walk
}
var pkgs []string
for _, src := range c.BuildContext.SrcDirs() {
if (pattern == "std" || pattern == "cmd") && src != gorootSrc {
continue
}
src = filepath.Clean(src) + string(filepath.Separator)
root := src
if pattern == "cmd" {
root += "cmd" + string(filepath.Separator)
}
filepath.Walk(root, func(path string, fi os.FileInfo, err error) error {
if err != nil || !fi.IsDir() || path == src {
return nil
}
// Avoid .foo, _foo, and testdata directory trees.
_, elem := filepath.Split(path)
if strings.HasPrefix(elem, ".") || strings.HasPrefix(elem, "_") || elem == "testdata" {
return filepath.SkipDir
}
name := filepath.ToSlash(path[len(src):])
if pattern == "std" && (!isStandardImportPath(name) || name == "cmd") {
// The name "std" is only the standard library.
// If the name is cmd, it's the root of the command tree.
return filepath.SkipDir
}
if !treeCanMatch(name) {
return filepath.SkipDir
}
if have[name] {
return nil
}
have[name] = true
if !match(name) {
return nil
}
_, err = c.BuildContext.ImportDir(path, 0)
if err != nil {
if _, noGo := err.(*build.NoGoError); noGo {
return nil
}
}
pkgs = append(pkgs, name)
return nil
})
}
return pkgs
}
// importPathsNoDotExpansion returns the import paths to use for the given
// command line, but it does no ... expansion.
func (c *Context) importPathsNoDotExpansion(args []string) []string {
if len(args) == 0 {
return []string{"."}
}
var out []string
for _, a := range args {
// Arguments are supposed to be import paths, but
// as a courtesy to Windows developers, rewrite \ to /
// in command-line arguments. Handles .\... and so on.
if filepath.Separator == '\\' {
a = strings.Replace(a, `\`, `/`, -1)
}
// Put argument in canonical form, but preserve leading ./.
if strings.HasPrefix(a, "./") {
a = "./" + path.Clean(a)
if a == "./." {
a = "."
}
} else {
a = path.Clean(a)
}
if isMetaPackage(a) {
out = append(out, c.allPackages(a)...)
continue
}
out = append(out, a)
}
return out
}
// importPaths returns the import paths to use for the given command line. // importPaths returns the import paths to use for the given command line.
func (c *Context) importPaths(args []string) []string { func (c *Context) importPaths(args []string) []string {
args = c.importPathsNoDotExpansion(args) lctx := load.Context{
var out []string BuildContext: c.BuildContext,
for _, a := range args { GOROOTsrc: c.joinPath(c.BuildContext.GOROOT, "src"),
if strings.Contains(a, "...") {
if build.IsLocalImport(a) {
out = append(out, c.allPackagesInFS(a)...)
} else {
out = append(out, c.allPackages(a)...)
} }
continue return lctx.ImportPaths(args)
}
out = append(out, a)
}
return out
} }
// allPackages returns all the packages that can be found // joinPath calls c.BuildContext.JoinPath (if not nil) or else filepath.Join.
// under the $GOPATH directories and $GOROOT matching pattern. //
// The pattern is either "all" (all packages), "std" (standard packages), // It's a copy of the unexported build.Context.joinPath helper.
// "cmd" (standard commands), or a path including "...". func (c *Context) joinPath(elem ...string) string {
func (c *Context) allPackages(pattern string) []string { if f := c.BuildContext.JoinPath; f != nil {
pkgs := c.matchPackages(pattern) return f(elem...)
if len(pkgs) == 0 {
fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern)
}
return pkgs
}
// allPackagesInFS is like allPackages but is passed a pattern
// beginning ./ or ../, meaning it should scan the tree rooted
// at the given directory. There are ... in the pattern too.
func (c *Context) allPackagesInFS(pattern string) []string {
pkgs := c.matchPackagesInFS(pattern)
if len(pkgs) == 0 {
fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern)
}
return pkgs
}
func (c *Context) matchPackagesInFS(pattern string) []string {
// Find directory to begin the scan.
// Could be smarter but this one optimization
// is enough for now, since ... is usually at the
// end of a path.
i := strings.Index(pattern, "...")
dir, _ := path.Split(pattern[:i])
// pattern begins with ./ or ../.
// path.Clean will discard the ./ but not the ../.
// We need to preserve the ./ for pattern matching
// and in the returned import paths.
prefix := ""
if strings.HasPrefix(pattern, "./") {
prefix = "./"
}
match := matchPattern(pattern)
var pkgs []string
filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error {
if err != nil || !fi.IsDir() {
return nil
}
if path == dir {
// filepath.Walk starts at dir and recurses. For the recursive case,
// the path is the result of filepath.Join, which calls filepath.Clean.
// The initial case is not Cleaned, though, so we do this explicitly.
//
// This converts a path like "./io/" to "io". Without this step, running
// "cd $GOROOT/src; go list ./io/..." would incorrectly skip the io
// package, because prepending the prefix "./" to the unclean path would
// result in "././io", and match("././io") returns false.
path = filepath.Clean(path)
}
// Avoid .foo, _foo, and testdata directory trees, but do not avoid "." or "..".
_, elem := filepath.Split(path)
dot := strings.HasPrefix(elem, ".") && elem != "." && elem != ".."
if dot || strings.HasPrefix(elem, "_") || elem == "testdata" {
return filepath.SkipDir
}
name := prefix + filepath.ToSlash(path)
if !match(name) {
return nil
}
// We keep the directory if we can import it, or if we can't import it
// due to invalid Go source files. This means that directories containing
// parse errors will be built (and fail) instead of being silently skipped
// as not matching the pattern. Go 1.5 and earlier skipped, but that
// behavior means people miss serious mistakes.
// See golang.org/issue/11407.
if p, err := c.BuildContext.ImportDir(path, 0); err != nil && shouldIgnoreImport(p) {
if _, noGo := err.(*build.NoGoError); !noGo {
log.Print(err)
}
return nil
}
pkgs = append(pkgs, name)
return nil
})
return pkgs
}
// isMetaPackage checks if name is a reserved package name that expands to multiple packages
func isMetaPackage(name string) bool {
return name == "std" || name == "cmd" || name == "all"
}
// isStandardImportPath reports whether $GOROOT/src/path should be considered
// part of the standard distribution. For historical reasons we allow people to add
// their own code to $GOROOT instead of using $GOPATH, but we assume that
// code will start with a domain name (dot in the first element).
func isStandardImportPath(path string) bool {
i := strings.Index(path, "/")
if i < 0 {
i = len(path)
}
elem := path[:i]
return !strings.Contains(elem, ".")
}
// hasPathPrefix reports whether the path s begins with the
// elements in prefix.
func hasPathPrefix(s, prefix string) bool {
switch {
default:
return false
case len(s) == len(prefix):
return s == prefix
case len(s) > len(prefix):
if prefix != "" && prefix[len(prefix)-1] == '/' {
return strings.HasPrefix(s, prefix)
}
return s[len(prefix)] == '/' && s[:len(prefix)] == prefix
}
}
// treeCanMatchPattern(pattern)(name) reports whether
// name or children of name can possibly match pattern.
// Pattern is the same limited glob accepted by matchPattern.
func treeCanMatchPattern(pattern string) func(name string) bool {
wildCard := false
if i := strings.Index(pattern, "..."); i >= 0 {
wildCard = true
pattern = pattern[:i]
}
return func(name string) bool {
return len(name) <= len(pattern) && hasPathPrefix(pattern, name) ||
wildCard && strings.HasPrefix(name, pattern)
} }
return filepath.Join(elem...)
} }

View File

@ -0,0 +1,317 @@
// Copyright (c) 2009 The Go Authors. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// +build !go1.9
package gotool
import (
"fmt"
"go/build"
"log"
"os"
"path"
"path/filepath"
"regexp"
"strings"
)
// This file contains code from the Go distribution.
// matchPattern(pattern)(name) reports whether
// name matches pattern. Pattern is a limited glob
// pattern in which '...' means 'any string' and there
// is no other special syntax.
func matchPattern(pattern string) func(name string) bool {
re := regexp.QuoteMeta(pattern)
re = strings.Replace(re, `\.\.\.`, `.*`, -1)
// Special case: foo/... matches foo too.
if strings.HasSuffix(re, `/.*`) {
re = re[:len(re)-len(`/.*`)] + `(/.*)?`
}
reg := regexp.MustCompile(`^` + re + `$`)
return reg.MatchString
}
// matchPackages returns a list of package paths matching pattern
// (see go help packages for pattern syntax).
func (c *Context) matchPackages(pattern string) []string {
match := func(string) bool { return true }
treeCanMatch := func(string) bool { return true }
if !isMetaPackage(pattern) {
match = matchPattern(pattern)
treeCanMatch = treeCanMatchPattern(pattern)
}
have := map[string]bool{
"builtin": true, // ignore pseudo-package that exists only for documentation
}
if !c.BuildContext.CgoEnabled {
have["runtime/cgo"] = true // ignore during walk
}
var pkgs []string
for _, src := range c.BuildContext.SrcDirs() {
if (pattern == "std" || pattern == "cmd") && src != gorootSrc {
continue
}
src = filepath.Clean(src) + string(filepath.Separator)
root := src
if pattern == "cmd" {
root += "cmd" + string(filepath.Separator)
}
filepath.Walk(root, func(path string, fi os.FileInfo, err error) error {
if err != nil || !fi.IsDir() || path == src {
return nil
}
// Avoid .foo, _foo, and testdata directory trees.
_, elem := filepath.Split(path)
if strings.HasPrefix(elem, ".") || strings.HasPrefix(elem, "_") || elem == "testdata" {
return filepath.SkipDir
}
name := filepath.ToSlash(path[len(src):])
if pattern == "std" && (!isStandardImportPath(name) || name == "cmd") {
// The name "std" is only the standard library.
// If the name is cmd, it's the root of the command tree.
return filepath.SkipDir
}
if !treeCanMatch(name) {
return filepath.SkipDir
}
if have[name] {
return nil
}
have[name] = true
if !match(name) {
return nil
}
_, err = c.BuildContext.ImportDir(path, 0)
if err != nil {
if _, noGo := err.(*build.NoGoError); noGo {
return nil
}
}
pkgs = append(pkgs, name)
return nil
})
}
return pkgs
}
// importPathsNoDotExpansion returns the import paths to use for the given
// command line, but it does no ... expansion.
func (c *Context) importPathsNoDotExpansion(args []string) []string {
if len(args) == 0 {
return []string{"."}
}
var out []string
for _, a := range args {
// Arguments are supposed to be import paths, but
// as a courtesy to Windows developers, rewrite \ to /
// in command-line arguments. Handles .\... and so on.
if filepath.Separator == '\\' {
a = strings.Replace(a, `\`, `/`, -1)
}
// Put argument in canonical form, but preserve leading ./.
if strings.HasPrefix(a, "./") {
a = "./" + path.Clean(a)
if a == "./." {
a = "."
}
} else {
a = path.Clean(a)
}
if isMetaPackage(a) {
out = append(out, c.allPackages(a)...)
continue
}
out = append(out, a)
}
return out
}
// importPaths returns the import paths to use for the given command line.
func (c *Context) importPaths(args []string) []string {
args = c.importPathsNoDotExpansion(args)
var out []string
for _, a := range args {
if strings.Contains(a, "...") {
if build.IsLocalImport(a) {
out = append(out, c.allPackagesInFS(a)...)
} else {
out = append(out, c.allPackages(a)...)
}
continue
}
out = append(out, a)
}
return out
}
// allPackages returns all the packages that can be found
// under the $GOPATH directories and $GOROOT matching pattern.
// The pattern is either "all" (all packages), "std" (standard packages),
// "cmd" (standard commands), or a path including "...".
func (c *Context) allPackages(pattern string) []string {
pkgs := c.matchPackages(pattern)
if len(pkgs) == 0 {
fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern)
}
return pkgs
}
// allPackagesInFS is like allPackages but is passed a pattern
// beginning ./ or ../, meaning it should scan the tree rooted
// at the given directory. There are ... in the pattern too.
func (c *Context) allPackagesInFS(pattern string) []string {
pkgs := c.matchPackagesInFS(pattern)
if len(pkgs) == 0 {
fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern)
}
return pkgs
}
// matchPackagesInFS returns a list of package paths matching pattern,
// which must begin with ./ or ../
// (see go help packages for pattern syntax).
func (c *Context) matchPackagesInFS(pattern string) []string {
// Find directory to begin the scan.
// Could be smarter but this one optimization
// is enough for now, since ... is usually at the
// end of a path.
i := strings.Index(pattern, "...")
dir, _ := path.Split(pattern[:i])
// pattern begins with ./ or ../.
// path.Clean will discard the ./ but not the ../.
// We need to preserve the ./ for pattern matching
// and in the returned import paths.
prefix := ""
if strings.HasPrefix(pattern, "./") {
prefix = "./"
}
match := matchPattern(pattern)
var pkgs []string
filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error {
if err != nil || !fi.IsDir() {
return nil
}
if path == dir {
// filepath.Walk starts at dir and recurses. For the recursive case,
// the path is the result of filepath.Join, which calls filepath.Clean.
// The initial case is not Cleaned, though, so we do this explicitly.
//
// This converts a path like "./io/" to "io". Without this step, running
// "cd $GOROOT/src; go list ./io/..." would incorrectly skip the io
// package, because prepending the prefix "./" to the unclean path would
// result in "././io", and match("././io") returns false.
path = filepath.Clean(path)
}
// Avoid .foo, _foo, and testdata directory trees, but do not avoid "." or "..".
_, elem := filepath.Split(path)
dot := strings.HasPrefix(elem, ".") && elem != "." && elem != ".."
if dot || strings.HasPrefix(elem, "_") || elem == "testdata" {
return filepath.SkipDir
}
name := prefix + filepath.ToSlash(path)
if !match(name) {
return nil
}
// We keep the directory if we can import it, or if we can't import it
// due to invalid Go source files. This means that directories containing
// parse errors will be built (and fail) instead of being silently skipped
// as not matching the pattern. Go 1.5 and earlier skipped, but that
// behavior means people miss serious mistakes.
// See golang.org/issue/11407.
if p, err := c.BuildContext.ImportDir(path, 0); err != nil && shouldIgnoreImport(p) {
if _, noGo := err.(*build.NoGoError); !noGo {
log.Print(err)
}
return nil
}
pkgs = append(pkgs, name)
return nil
})
return pkgs
}
// isMetaPackage checks if name is a reserved package name that expands to multiple packages.
func isMetaPackage(name string) bool {
return name == "std" || name == "cmd" || name == "all"
}
// isStandardImportPath reports whether $GOROOT/src/path should be considered
// part of the standard distribution. For historical reasons we allow people to add
// their own code to $GOROOT instead of using $GOPATH, but we assume that
// code will start with a domain name (dot in the first element).
func isStandardImportPath(path string) bool {
i := strings.Index(path, "/")
if i < 0 {
i = len(path)
}
elem := path[:i]
return !strings.Contains(elem, ".")
}
// hasPathPrefix reports whether the path s begins with the
// elements in prefix.
func hasPathPrefix(s, prefix string) bool {
switch {
default:
return false
case len(s) == len(prefix):
return s == prefix
case len(s) > len(prefix):
if prefix != "" && prefix[len(prefix)-1] == '/' {
return strings.HasPrefix(s, prefix)
}
return s[len(prefix)] == '/' && s[:len(prefix)] == prefix
}
}
// treeCanMatchPattern(pattern)(name) reports whether
// name or children of name can possibly match pattern.
// Pattern is the same limited glob accepted by matchPattern.
func treeCanMatchPattern(pattern string) func(name string) bool {
wildCard := false
if i := strings.Index(pattern, "..."); i >= 0 {
wildCard = true
pattern = pattern[:i]
}
return func(name string) bool {
return len(name) <= len(pattern) && hasPathPrefix(pattern, name) ||
wildCard && strings.HasPrefix(name, pattern)
}
}

View File

@ -0,0 +1,27 @@
Copyright (c) 2012 The Go Authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@ -0,0 +1,229 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"flag"
"fmt"
"go/ast"
"go/build"
"go/token"
"go/types"
"log"
"sort"
"github.com/kisielk/gotool"
"golang.org/x/tools/go/loader"
)
var fset = token.NewFileSet()
func main() {
flag.Parse()
importPaths := gotool.ImportPaths(flag.Args())
if len(importPaths) == 0 {
return
}
var conf loader.Config
conf.Fset = fset
for _, importPath := range importPaths {
conf.Import(importPath)
}
prog, err := conf.Load()
if err != nil {
log.Fatal(err)
}
for _, pkg := range prog.InitialPackages() {
for _, file := range pkg.Files {
ast.Inspect(file, func(node ast.Node) bool {
if s, ok := node.(*ast.StructType); ok {
malign(node.Pos(), pkg.Types[s].Type.(*types.Struct))
}
return true
})
}
}
}
func malign(pos token.Pos, str *types.Struct) {
wordSize := int64(8)
maxAlign := int64(8)
switch build.Default.GOARCH {
case "386", "arm":
wordSize, maxAlign = 4, 4
case "amd64p32":
wordSize = 4
}
s := gcSizes{wordSize, maxAlign}
sz, opt := s.Sizeof(str), optimalSize(str, &s)
if sz != opt {
fmt.Printf("%s: struct of size %d could be %d\n", fset.Position(pos), sz, opt)
}
}
func optimalSize(str *types.Struct, sizes *gcSizes) int64 {
nf := str.NumFields()
fields := make([]*types.Var, nf)
alignofs := make([]int64, nf)
sizeofs := make([]int64, nf)
for i := 0; i < nf; i++ {
fields[i] = str.Field(i)
ft := fields[i].Type()
alignofs[i] = sizes.Alignof(ft)
sizeofs[i] = sizes.Sizeof(ft)
}
sort.Sort(&byAlignAndSize{fields, alignofs, sizeofs})
return sizes.Sizeof(types.NewStruct(fields, nil))
}
type byAlignAndSize struct {
fields []*types.Var
alignofs []int64
sizeofs []int64
}
func (s *byAlignAndSize) Len() int { return len(s.fields) }
func (s *byAlignAndSize) Swap(i, j int) {
s.fields[i], s.fields[j] = s.fields[j], s.fields[i]
s.alignofs[i], s.alignofs[j] = s.alignofs[j], s.alignofs[i]
s.sizeofs[i], s.sizeofs[j] = s.sizeofs[j], s.sizeofs[i]
}
func (s *byAlignAndSize) Less(i, j int) bool {
// Place zero sized objects before non-zero sized objects.
if s.sizeofs[i] == 0 && s.sizeofs[j] != 0 {
return true
}
if s.sizeofs[j] == 0 && s.sizeofs[i] != 0 {
return false
}
// Next, place more tightly aligned objects before less tightly aligned objects.
if s.alignofs[i] != s.alignofs[j] {
return s.alignofs[i] > s.alignofs[j]
}
// Lastly, order by size.
if s.sizeofs[i] != s.sizeofs[j] {
return s.sizeofs[i] > s.sizeofs[j]
}
return false
}
// Code below based on go/types.StdSizes.
type gcSizes struct {
WordSize int64
MaxAlign int64
}
func (s *gcSizes) Alignof(T types.Type) int64 {
// NOTE: On amd64, complex64 is 8 byte aligned,
// even though float32 is only 4 byte aligned.
// For arrays and structs, alignment is defined in terms
// of alignment of the elements and fields, respectively.
switch t := T.Underlying().(type) {
case *types.Array:
// spec: "For a variable x of array type: unsafe.Alignof(x)
// is the same as unsafe.Alignof(x[0]), but at least 1."
return s.Alignof(t.Elem())
case *types.Struct:
// spec: "For a variable x of struct type: unsafe.Alignof(x)
// is the largest of the values unsafe.Alignof(x.f) for each
// field f of x, but at least 1."
max := int64(1)
for i, nf := 0, t.NumFields(); i < nf; i++ {
if a := s.Alignof(t.Field(i).Type()); a > max {
max = a
}
}
return max
}
a := s.Sizeof(T) // may be 0
// spec: "For a variable x of any type: unsafe.Alignof(x) is at least 1."
if a < 1 {
return 1
}
if a > s.MaxAlign {
return s.MaxAlign
}
return a
}
var basicSizes = [...]byte{
types.Bool: 1,
types.Int8: 1,
types.Int16: 2,
types.Int32: 4,
types.Int64: 8,
types.Uint8: 1,
types.Uint16: 2,
types.Uint32: 4,
types.Uint64: 8,
types.Float32: 4,
types.Float64: 8,
types.Complex64: 8,
types.Complex128: 16,
}
func (s *gcSizes) Sizeof(T types.Type) int64 {
switch t := T.Underlying().(type) {
case *types.Basic:
k := t.Kind()
if int(k) < len(basicSizes) {
if s := basicSizes[k]; s > 0 {
return int64(s)
}
}
if k == types.String {
return s.WordSize * 2
}
case *types.Array:
n := t.Len()
if n == 0 {
return 0
}
a := s.Alignof(t.Elem())
z := s.Sizeof(t.Elem())
return align(z, a)*(n-1) + z
case *types.Slice:
return s.WordSize * 3
case *types.Struct:
nf := t.NumFields()
if nf == 0 {
return 0
}
var o int64
max := int64(1)
for i := 0; i < nf; i++ {
ft := t.Field(i).Type()
a, sz := s.Alignof(ft), s.Sizeof(ft)
if a > max {
max = a
}
if i == nf-1 && sz == 0 && o != 0 {
sz = 1
}
o = align(o, a) + sz
}
return align(o, max)
case *types.Interface:
return s.WordSize * 2
}
return s.WordSize // catch-all
}
// align returns the smallest y >= x such that y % a == 0.
func align(x, a int64) int64 {
y := x + a - 1
return y - y%a
}

View File

@ -1,26 +0,0 @@
// Copyright (c) 2017, Daniel Martí <mvdan@mvdan.cc>
// See LICENSE for licensing information
package main
import (
"flag"
"fmt"
"os"
"github.com/mvdan/unparam/check"
)
var tests = flag.Bool("tests", true, "include tests")
func main() {
flag.Parse()
warns, err := check.UnusedParams(*tests, flag.Args()...)
if err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
for _, warn := range warns {
fmt.Println(warn)
}
}

View File

@ -21,10 +21,6 @@ package intsets // import "golang.org/x/tools/container/intsets"
// The space usage would be proportional to Max(), not Len(), and the // The space usage would be proportional to Max(), not Len(), and the
// implementation would be based upon big.Int. // implementation would be based upon big.Int.
// //
// TODO(adonovan): experiment with making the root block indirect (nil
// iff IsEmpty). This would reduce the memory usage when empty and
// might simplify the aliasing invariants.
//
// TODO(adonovan): opt: make UnionWith and Difference faster. // TODO(adonovan): opt: make UnionWith and Difference faster.
// These are the hot-spots for go/pointer. // These are the hot-spots for go/pointer.
@ -45,9 +41,10 @@ type Sparse struct {
// An uninitialized Sparse represents an empty set. // An uninitialized Sparse represents an empty set.
// An empty set may also be represented by // An empty set may also be represented by
// root.next == root.prev == &root. // root.next == root.prev == &root.
// In a non-empty set, root.next points to the first block and //
// root.prev to the last. // The root is always the block with the smallest offset.
// root.offset and root.bits are unused. // It can be empty, but only if it is the only block; in that case, offset is
// MaxInt (which is not a valid offset).
root block root block
} }
@ -144,7 +141,6 @@ func (b *block) len() int {
// max returns the maximum element of the block. // max returns the maximum element of the block.
// The block must not be empty. // The block must not be empty.
//
func (b *block) max() int { func (b *block) max() int {
bi := b.offset + bitsPerBlock bi := b.offset + bitsPerBlock
// Decrement bi by number of high zeros in last.bits. // Decrement bi by number of high zeros in last.bits.
@ -161,7 +157,6 @@ func (b *block) max() int {
// and also removes it if take is set. // and also removes it if take is set.
// The block must not be initially empty. // The block must not be initially empty.
// NB: may leave the block empty. // NB: may leave the block empty.
//
func (b *block) min(take bool) int { func (b *block) min(take bool) int {
for i, w := range b.bits { for i, w := range b.bits {
if w != 0 { if w != 0 {
@ -175,6 +170,26 @@ func (b *block) min(take bool) int {
panic("BUG: empty block") panic("BUG: empty block")
} }
// lowerBound returns the smallest element of the block that is greater than or
// equal to the element corresponding to the ith bit. If there is no such
// element, the second return value is false.
func (b *block) lowerBound(i uint) (int, bool) {
w := i / bitsPerWord
bit := i % bitsPerWord
if val := b.bits[w] >> bit; val != 0 {
return b.offset + int(i) + ntz(val), true
}
for w++; w < wordsPerBlock; w++ {
if val := b.bits[w]; val != 0 {
return b.offset + int(w*bitsPerWord) + ntz(val), true
}
}
return 0, false
}
// forEach calls f for each element of block b. // forEach calls f for each element of block b.
// f must not mutate b's enclosing Sparse. // f must not mutate b's enclosing Sparse.
func (b *block) forEach(f func(int)) { func (b *block) forEach(f func(int)) {
@ -204,14 +219,20 @@ func offsetAndBitIndex(x int) (int, uint) {
// -- Sparse -------------------------------------------------------------- // -- Sparse --------------------------------------------------------------
// start returns the root's next block, which is the root block // none is a shared, empty, sentinel block that indicates the end of a block
// (if s.IsEmpty()) or the first true block otherwise. // list.
// start has the side effect of ensuring that s is properly var none block
// initialized.
// // Dummy type used to generate an implicit panic. This must be defined at the
func (s *Sparse) start() *block { // package level; if it is defined inside a function, it prevents the inlining
// of that function.
type to_copy_a_sparse_you_must_call_its_Copy_method struct{}
// init ensures s is properly initialized.
func (s *Sparse) init() {
root := &s.root root := &s.root
if root.next == nil { if root.next == nil {
root.offset = MaxInt
root.next = root root.next = root
root.prev = root root.prev = root
} else if root.next.prev != root { } else if root.next.prev != root {
@ -219,21 +240,45 @@ func (s *Sparse) start() *block {
// new Sparse y shares the old linked list, but iteration // new Sparse y shares the old linked list, but iteration
// on y will never encounter &y.root so it goes into a // on y will never encounter &y.root so it goes into a
// loop. Fail fast before this occurs. // loop. Fail fast before this occurs.
panic("A Sparse has been copied without (*Sparse).Copy()") // We don't want to call panic here because it prevents the
// inlining of this function.
_ = (interface{}(nil)).(to_copy_a_sparse_you_must_call_its_Copy_method)
} }
}
return root.next func (s *Sparse) first() *block {
s.init()
if s.root.offset == MaxInt {
return &none
}
return &s.root
}
// next returns the next block in the list, or end if b is the last block.
func (s *Sparse) next(b *block) *block {
if b.next == &s.root {
return &none
}
return b.next
}
// prev returns the previous block in the list, or end if b is the first block.
func (s *Sparse) prev(b *block) *block {
if b.prev == &s.root {
return &none
}
return b.prev
} }
// IsEmpty reports whether the set s is empty. // IsEmpty reports whether the set s is empty.
func (s *Sparse) IsEmpty() bool { func (s *Sparse) IsEmpty() bool {
return s.start() == &s.root return s.root.next == nil || s.root.offset == MaxInt
} }
// Len returns the number of elements in the set s. // Len returns the number of elements in the set s.
func (s *Sparse) Len() int { func (s *Sparse) Len() int {
var l int var l int
for b := s.start(); b != &s.root; b = b.next { for b := s.first(); b != &none; b = s.next(b) {
l += b.len() l += b.len()
} }
return l return l
@ -252,19 +297,34 @@ func (s *Sparse) Min() int {
if s.IsEmpty() { if s.IsEmpty() {
return MaxInt return MaxInt
} }
return s.root.next.min(false) return s.root.min(false)
}
// LowerBound returns the smallest element >= x, or MaxInt if there is no such
// element.
func (s *Sparse) LowerBound(x int) int {
offset, i := offsetAndBitIndex(x)
for b := s.first(); b != &none; b = s.next(b) {
if b.offset > offset {
return b.min(false)
}
if b.offset == offset {
if y, ok := b.lowerBound(i); ok {
return y
}
}
}
return MaxInt
} }
// block returns the block that would contain offset, // block returns the block that would contain offset,
// or nil if s contains no such block. // or nil if s contains no such block.
// // Precondition: offset is a multiple of bitsPerBlock.
func (s *Sparse) block(offset int) *block { func (s *Sparse) block(offset int) *block {
b := s.start() for b := s.first(); b != &none && b.offset <= offset; b = s.next(b) {
for b != &s.root && b.offset <= offset {
if b.offset == offset { if b.offset == offset {
return b return b
} }
b = b.next
} }
return nil return nil
} }
@ -272,26 +332,49 @@ func (s *Sparse) block(offset int) *block {
// Insert adds x to the set s, and reports whether the set grew. // Insert adds x to the set s, and reports whether the set grew.
func (s *Sparse) Insert(x int) bool { func (s *Sparse) Insert(x int) bool {
offset, i := offsetAndBitIndex(x) offset, i := offsetAndBitIndex(x)
b := s.start()
for b != &s.root && b.offset <= offset { b := s.first()
for ; b != &none && b.offset <= offset; b = s.next(b) {
if b.offset == offset { if b.offset == offset {
return b.insert(i) return b.insert(i)
} }
b = b.next
} }
// Insert new block before b. // Insert new block before b.
new := &block{offset: offset} new := s.insertBlockBefore(b)
new.next = b new.offset = offset
new.prev = b.prev
new.prev.next = new
new.next.prev = new
return new.insert(i) return new.insert(i)
} }
func (s *Sparse) removeBlock(b *block) { // removeBlock removes a block and returns the block that followed it (or end if
// it was the last block).
func (s *Sparse) removeBlock(b *block) *block {
if b != &s.root {
b.prev.next = b.next b.prev.next = b.next
b.next.prev = b.prev b.next.prev = b.prev
if b.next == &s.root {
return &none
}
return b.next
}
first := s.root.next
if first == &s.root {
// This was the only block.
s.Clear()
return &none
}
s.root.offset = first.offset
s.root.bits = first.bits
if first.next == &s.root {
// Single block remaining.
s.root.next = &s.root
s.root.prev = &s.root
} else {
s.root.next = first.next
first.next.prev = &s.root
}
return &s.root
} }
// Remove removes x from the set s, and reports whether the set shrank. // Remove removes x from the set s, and reports whether the set shrank.
@ -311,8 +394,11 @@ func (s *Sparse) Remove(x int) bool {
// Clear removes all elements from the set s. // Clear removes all elements from the set s.
func (s *Sparse) Clear() { func (s *Sparse) Clear() {
s.root.next = &s.root s.root = block{
s.root.prev = &s.root offset: MaxInt,
next: &s.root,
prev: &s.root,
}
} }
// If set s is non-empty, TakeMin sets *p to the minimum element of // If set s is non-empty, TakeMin sets *p to the minimum element of
@ -325,13 +411,12 @@ func (s *Sparse) Clear() {
// for worklist.TakeMin(&x) { use(x) } // for worklist.TakeMin(&x) { use(x) }
// //
func (s *Sparse) TakeMin(p *int) bool { func (s *Sparse) TakeMin(p *int) bool {
head := s.start() if s.IsEmpty() {
if head == &s.root {
return false return false
} }
*p = head.min(true) *p = s.root.min(true)
if head.empty() { if s.root.empty() {
s.removeBlock(head) s.removeBlock(&s.root)
} }
return true return true
} }
@ -352,7 +437,7 @@ func (s *Sparse) Has(x int) bool {
// natural control flow with continue/break/return. // natural control flow with continue/break/return.
// //
func (s *Sparse) forEach(f func(int)) { func (s *Sparse) forEach(f func(int)) {
for b := s.start(); b != &s.root; b = b.next { for b := s.first(); b != &none; b = s.next(b) {
b.forEach(f) b.forEach(f)
} }
} }
@ -363,22 +448,51 @@ func (s *Sparse) Copy(x *Sparse) {
return return
} }
xb := x.start() xb := x.first()
sb := s.start() sb := s.first()
for xb != &x.root { for xb != &none {
if sb == &s.root { if sb == &none {
sb = s.insertBlockBefore(sb) sb = s.insertBlockBefore(sb)
} }
sb.offset = xb.offset sb.offset = xb.offset
sb.bits = xb.bits sb.bits = xb.bits
xb = xb.next xb = x.next(xb)
sb = sb.next sb = s.next(sb)
} }
s.discardTail(sb) s.discardTail(sb)
} }
// insertBlockBefore returns a new block, inserting it before next. // insertBlockBefore returns a new block, inserting it before next.
// If next is the root, the root is replaced. If next is end, the block is
// inserted at the end.
func (s *Sparse) insertBlockBefore(next *block) *block { func (s *Sparse) insertBlockBefore(next *block) *block {
if s.IsEmpty() {
if next != &none {
panic("BUG: passed block with empty set")
}
return &s.root
}
if next == &s.root {
// Special case: we need to create a new block that will become the root
// block.The old root block becomes the second block.
second := s.root
s.root = block{
next: &second,
}
if second.next == &s.root {
s.root.prev = &second
} else {
s.root.prev = second.prev
second.next.prev = &second
second.prev = &s.root
}
return &s.root
}
if next == &none {
// Insert before root.
next = &s.root
}
b := new(block) b := new(block)
b.next = next b.next = next
b.prev = next.prev b.prev = next.prev
@ -389,10 +503,14 @@ func (s *Sparse) insertBlockBefore(next *block) *block {
// discardTail removes block b and all its successors from s. // discardTail removes block b and all its successors from s.
func (s *Sparse) discardTail(b *block) { func (s *Sparse) discardTail(b *block) {
if b != &s.root { if b != &none {
if b == &s.root {
s.Clear()
} else {
b.prev.next = &s.root b.prev.next = &s.root
s.root.prev = b.prev s.root.prev = b.prev
} }
}
} }
// IntersectionWith sets s to the intersection s ∩ x. // IntersectionWith sets s to the intersection s ∩ x.
@ -401,16 +519,15 @@ func (s *Sparse) IntersectionWith(x *Sparse) {
return return
} }
xb := x.start() xb := x.first()
sb := s.start() sb := s.first()
for xb != &x.root && sb != &s.root { for xb != &none && sb != &none {
switch { switch {
case xb.offset < sb.offset: case xb.offset < sb.offset:
xb = xb.next xb = x.next(xb)
case xb.offset > sb.offset: case xb.offset > sb.offset:
sb = sb.next sb = s.removeBlock(sb)
s.removeBlock(sb.prev)
default: default:
var sum word var sum word
@ -420,12 +537,12 @@ func (s *Sparse) IntersectionWith(x *Sparse) {
sum |= r sum |= r
} }
if sum != 0 { if sum != 0 {
sb = sb.next sb = s.next(sb)
} else { } else {
// sb will be overwritten or removed // sb will be overwritten or removed
} }
xb = xb.next xb = x.next(xb)
} }
} }
@ -446,20 +563,20 @@ func (s *Sparse) Intersection(x, y *Sparse) {
return return
} }
xb := x.start() xb := x.first()
yb := y.start() yb := y.first()
sb := s.start() sb := s.first()
for xb != &x.root && yb != &y.root { for xb != &none && yb != &none {
switch { switch {
case xb.offset < yb.offset: case xb.offset < yb.offset:
xb = xb.next xb = x.next(xb)
continue continue
case xb.offset > yb.offset: case xb.offset > yb.offset:
yb = yb.next yb = y.next(yb)
continue continue
} }
if sb == &s.root { if sb == &none {
sb = s.insertBlockBefore(sb) sb = s.insertBlockBefore(sb)
} }
sb.offset = xb.offset sb.offset = xb.offset
@ -471,13 +588,13 @@ func (s *Sparse) Intersection(x, y *Sparse) {
sum |= r sum |= r
} }
if sum != 0 { if sum != 0 {
sb = sb.next sb = s.next(sb)
} else { } else {
// sb will be overwritten or removed // sb will be overwritten or removed
} }
xb = xb.next xb = x.next(xb)
yb = yb.next yb = y.next(yb)
} }
s.discardTail(sb) s.discardTail(sb)
@ -485,22 +602,22 @@ func (s *Sparse) Intersection(x, y *Sparse) {
// Intersects reports whether s ∩ x ≠ ∅. // Intersects reports whether s ∩ x ≠ ∅.
func (s *Sparse) Intersects(x *Sparse) bool { func (s *Sparse) Intersects(x *Sparse) bool {
sb := s.start() sb := s.first()
xb := x.start() xb := x.first()
for sb != &s.root && xb != &x.root { for sb != &none && xb != &none {
switch { switch {
case xb.offset < sb.offset: case xb.offset < sb.offset:
xb = xb.next xb = x.next(xb)
case xb.offset > sb.offset: case xb.offset > sb.offset:
sb = sb.next sb = s.next(sb)
default: default:
for i := range sb.bits { for i := range sb.bits {
if sb.bits[i]&xb.bits[i] != 0 { if sb.bits[i]&xb.bits[i] != 0 {
return true return true
} }
} }
sb = sb.next sb = s.next(sb)
xb = xb.next xb = x.next(xb)
} }
} }
return false return false
@ -513,26 +630,26 @@ func (s *Sparse) UnionWith(x *Sparse) bool {
} }
var changed bool var changed bool
xb := x.start() xb := x.first()
sb := s.start() sb := s.first()
for xb != &x.root { for xb != &none {
if sb != &s.root && sb.offset == xb.offset { if sb != &none && sb.offset == xb.offset {
for i := range xb.bits { for i := range xb.bits {
if sb.bits[i] != xb.bits[i] { if sb.bits[i] != xb.bits[i] {
sb.bits[i] |= xb.bits[i] sb.bits[i] |= xb.bits[i]
changed = true changed = true
} }
} }
xb = xb.next xb = x.next(xb)
} else if sb == &s.root || sb.offset > xb.offset { } else if sb == &none || sb.offset > xb.offset {
sb = s.insertBlockBefore(sb) sb = s.insertBlockBefore(sb)
sb.offset = xb.offset sb.offset = xb.offset
sb.bits = xb.bits sb.bits = xb.bits
changed = true changed = true
xb = xb.next xb = x.next(xb)
} }
sb = sb.next sb = s.next(sb)
} }
return changed return changed
} }
@ -551,33 +668,33 @@ func (s *Sparse) Union(x, y *Sparse) {
return return
} }
xb := x.start() xb := x.first()
yb := y.start() yb := y.first()
sb := s.start() sb := s.first()
for xb != &x.root || yb != &y.root { for xb != &none || yb != &none {
if sb == &s.root { if sb == &none {
sb = s.insertBlockBefore(sb) sb = s.insertBlockBefore(sb)
} }
switch { switch {
case yb == &y.root || (xb != &x.root && xb.offset < yb.offset): case yb == &none || (xb != &none && xb.offset < yb.offset):
sb.offset = xb.offset sb.offset = xb.offset
sb.bits = xb.bits sb.bits = xb.bits
xb = xb.next xb = x.next(xb)
case xb == &x.root || (yb != &y.root && yb.offset < xb.offset): case xb == &none || (yb != &none && yb.offset < xb.offset):
sb.offset = yb.offset sb.offset = yb.offset
sb.bits = yb.bits sb.bits = yb.bits
yb = yb.next yb = y.next(yb)
default: default:
sb.offset = xb.offset sb.offset = xb.offset
for i := range xb.bits { for i := range xb.bits {
sb.bits[i] = xb.bits[i] | yb.bits[i] sb.bits[i] = xb.bits[i] | yb.bits[i]
} }
xb = xb.next xb = x.next(xb)
yb = yb.next yb = y.next(yb)
} }
sb = sb.next sb = s.next(sb)
} }
s.discardTail(sb) s.discardTail(sb)
@ -590,15 +707,15 @@ func (s *Sparse) DifferenceWith(x *Sparse) {
return return
} }
xb := x.start() xb := x.first()
sb := s.start() sb := s.first()
for xb != &x.root && sb != &s.root { for xb != &none && sb != &none {
switch { switch {
case xb.offset > sb.offset: case xb.offset > sb.offset:
sb = sb.next sb = s.next(sb)
case xb.offset < sb.offset: case xb.offset < sb.offset:
xb = xb.next xb = x.next(xb)
default: default:
var sum word var sum word
@ -607,12 +724,12 @@ func (s *Sparse) DifferenceWith(x *Sparse) {
sb.bits[i] = r sb.bits[i] = r
sum |= r sum |= r
} }
sb = sb.next
xb = xb.next
if sum == 0 { if sum == 0 {
s.removeBlock(sb.prev) sb = s.removeBlock(sb)
} else {
sb = s.next(sb)
} }
xb = x.next(xb)
} }
} }
} }
@ -633,27 +750,27 @@ func (s *Sparse) Difference(x, y *Sparse) {
return return
} }
xb := x.start() xb := x.first()
yb := y.start() yb := y.first()
sb := s.start() sb := s.first()
for xb != &x.root && yb != &y.root { for xb != &none && yb != &none {
if xb.offset > yb.offset { if xb.offset > yb.offset {
// y has block, x has none // y has block, x has &none
yb = yb.next yb = y.next(yb)
continue continue
} }
if sb == &s.root { if sb == &none {
sb = s.insertBlockBefore(sb) sb = s.insertBlockBefore(sb)
} }
sb.offset = xb.offset sb.offset = xb.offset
switch { switch {
case xb.offset < yb.offset: case xb.offset < yb.offset:
// x has block, y has none // x has block, y has &none
sb.bits = xb.bits sb.bits = xb.bits
sb = sb.next sb = s.next(sb)
default: default:
// x and y have corresponding blocks // x and y have corresponding blocks
@ -664,25 +781,25 @@ func (s *Sparse) Difference(x, y *Sparse) {
sum |= r sum |= r
} }
if sum != 0 { if sum != 0 {
sb = sb.next sb = s.next(sb)
} else { } else {
// sb will be overwritten or removed // sb will be overwritten or removed
} }
yb = yb.next yb = y.next(yb)
} }
xb = xb.next xb = x.next(xb)
} }
for xb != &x.root { for xb != &none {
if sb == &s.root { if sb == &none {
sb = s.insertBlockBefore(sb) sb = s.insertBlockBefore(sb)
} }
sb.offset = xb.offset sb.offset = xb.offset
sb.bits = xb.bits sb.bits = xb.bits
sb = sb.next sb = s.next(sb)
xb = xb.next xb = x.next(xb)
} }
s.discardTail(sb) s.discardTail(sb)
@ -695,17 +812,17 @@ func (s *Sparse) SymmetricDifferenceWith(x *Sparse) {
return return
} }
sb := s.start() sb := s.first()
xb := x.start() xb := x.first()
for xb != &x.root && sb != &s.root { for xb != &none && sb != &none {
switch { switch {
case sb.offset < xb.offset: case sb.offset < xb.offset:
sb = sb.next sb = s.next(sb)
case xb.offset < sb.offset: case xb.offset < sb.offset:
nb := s.insertBlockBefore(sb) nb := s.insertBlockBefore(sb)
nb.offset = xb.offset nb.offset = xb.offset
nb.bits = xb.bits nb.bits = xb.bits
xb = xb.next xb = x.next(xb)
default: default:
var sum word var sum word
for i := range sb.bits { for i := range sb.bits {
@ -713,20 +830,21 @@ func (s *Sparse) SymmetricDifferenceWith(x *Sparse) {
sb.bits[i] = r sb.bits[i] = r
sum |= r sum |= r
} }
sb = sb.next
xb = xb.next
if sum == 0 { if sum == 0 {
s.removeBlock(sb.prev) sb = s.removeBlock(sb)
} else {
sb = s.next(sb)
} }
xb = x.next(xb)
} }
} }
for xb != &x.root { // append the tail of x to s for xb != &none { // append the tail of x to s
sb = s.insertBlockBefore(sb) sb = s.insertBlockBefore(sb)
sb.offset = xb.offset sb.offset = xb.offset
sb.bits = xb.bits sb.bits = xb.bits
sb = sb.next sb = s.next(sb)
xb = xb.next xb = x.next(xb)
} }
} }
@ -744,24 +862,24 @@ func (s *Sparse) SymmetricDifference(x, y *Sparse) {
return return
} }
sb := s.start() sb := s.first()
xb := x.start() xb := x.first()
yb := y.start() yb := y.first()
for xb != &x.root && yb != &y.root { for xb != &none && yb != &none {
if sb == &s.root { if sb == &none {
sb = s.insertBlockBefore(sb) sb = s.insertBlockBefore(sb)
} }
switch { switch {
case yb.offset < xb.offset: case yb.offset < xb.offset:
sb.offset = yb.offset sb.offset = yb.offset
sb.bits = yb.bits sb.bits = yb.bits
sb = sb.next sb = s.next(sb)
yb = yb.next yb = y.next(yb)
case xb.offset < yb.offset: case xb.offset < yb.offset:
sb.offset = xb.offset sb.offset = xb.offset
sb.bits = xb.bits sb.bits = xb.bits
sb = sb.next sb = s.next(sb)
xb = xb.next xb = x.next(xb)
default: default:
var sum word var sum word
for i := range sb.bits { for i := range sb.bits {
@ -771,31 +889,31 @@ func (s *Sparse) SymmetricDifference(x, y *Sparse) {
} }
if sum != 0 { if sum != 0 {
sb.offset = xb.offset sb.offset = xb.offset
sb = sb.next sb = s.next(sb)
} }
xb = xb.next xb = x.next(xb)
yb = yb.next yb = y.next(yb)
} }
} }
for xb != &x.root { // append the tail of x to s for xb != &none { // append the tail of x to s
if sb == &s.root { if sb == &none {
sb = s.insertBlockBefore(sb) sb = s.insertBlockBefore(sb)
} }
sb.offset = xb.offset sb.offset = xb.offset
sb.bits = xb.bits sb.bits = xb.bits
sb = sb.next sb = s.next(sb)
xb = xb.next xb = x.next(xb)
} }
for yb != &y.root { // append the tail of y to s for yb != &none { // append the tail of y to s
if sb == &s.root { if sb == &none {
sb = s.insertBlockBefore(sb) sb = s.insertBlockBefore(sb)
} }
sb.offset = yb.offset sb.offset = yb.offset
sb.bits = yb.bits sb.bits = yb.bits
sb = sb.next sb = s.next(sb)
yb = yb.next yb = y.next(yb)
} }
s.discardTail(sb) s.discardTail(sb)
@ -807,22 +925,22 @@ func (s *Sparse) SubsetOf(x *Sparse) bool {
return true return true
} }
sb := s.start() sb := s.first()
xb := x.start() xb := x.first()
for sb != &s.root { for sb != &none {
switch { switch {
case xb == &x.root || xb.offset > sb.offset: case xb == &none || xb.offset > sb.offset:
return false return false
case xb.offset < sb.offset: case xb.offset < sb.offset:
xb = xb.next xb = x.next(xb)
default: default:
for i := range sb.bits { for i := range sb.bits {
if sb.bits[i]&^xb.bits[i] != 0 { if sb.bits[i]&^xb.bits[i] != 0 {
return false return false
} }
} }
sb = sb.next sb = s.next(sb)
xb = xb.next xb = x.next(xb)
} }
} }
return true return true
@ -833,13 +951,13 @@ func (s *Sparse) Equals(t *Sparse) bool {
if s == t { if s == t {
return true return true
} }
sb := s.start() sb := s.first()
tb := t.start() tb := t.first()
for { for {
switch { switch {
case sb == &s.root && tb == &t.root: case sb == &none && tb == &none:
return true return true
case sb == &s.root || tb == &t.root: case sb == &none || tb == &none:
return false return false
case sb.offset != tb.offset: case sb.offset != tb.offset:
return false return false
@ -847,8 +965,8 @@ func (s *Sparse) Equals(t *Sparse) bool {
return false return false
} }
sb = sb.next sb = s.next(sb)
tb = tb.next tb = t.next(tb)
} }
} }
@ -913,7 +1031,7 @@ func (s *Sparse) BitString() string {
// //
func (s *Sparse) GoString() string { func (s *Sparse) GoString() string {
var buf bytes.Buffer var buf bytes.Buffer
for b := s.start(); b != &s.root; b = b.next { for b := s.first(); b != &none; b = s.next(b) {
fmt.Fprintf(&buf, "block %p {offset=%d next=%p prev=%p", fmt.Fprintf(&buf, "block %p {offset=%d next=%p prev=%p",
b, b.offset, b.next, b.prev) b, b.offset, b.next, b.prev)
for _, w := range b.bits { for _, w := range b.bits {
@ -937,13 +1055,18 @@ func (s *Sparse) AppendTo(slice []int) []int {
// check returns an error if the representation invariants of s are violated. // check returns an error if the representation invariants of s are violated.
func (s *Sparse) check() error { func (s *Sparse) check() error {
if !s.root.empty() { s.init()
return fmt.Errorf("non-empty root block") if s.root.empty() {
// An empty set must have only the root block with offset MaxInt.
if s.root.next != &s.root {
return fmt.Errorf("multiple blocks with empty root block")
} }
if s.root.offset != 0 { if s.root.offset != MaxInt {
return fmt.Errorf("root block has non-zero offset %d", s.root.offset) return fmt.Errorf("empty set has offset %d, should be MaxInt", s.root.offset)
} }
for b := s.start(); b != &s.root; b = b.next { return nil
}
for b := s.first(); ; b = s.next(b) {
if b.offset%bitsPerBlock != 0 { if b.offset%bitsPerBlock != 0 {
return fmt.Errorf("bad offset modulo: %d", b.offset) return fmt.Errorf("bad offset modulo: %d", b.offset)
} }
@ -956,11 +1079,12 @@ func (s *Sparse) check() error {
if b.next.prev != b { if b.next.prev != b {
return fmt.Errorf("bad next.prev link") return fmt.Errorf("bad next.prev link")
} }
if b.prev != &s.root { if b.next == &s.root {
if b.offset <= b.prev.offset { break
return fmt.Errorf("bad offset order: b.offset=%d, prev.offset=%d",
b.offset, b.prev.offset)
} }
if b.offset >= b.next.offset {
return fmt.Errorf("bad offset order: b.offset=%d, b.next.offset=%d",
b.offset, b.next.offset)
} }
} }
return nil return nil

View File

@ -49,6 +49,8 @@ func AddNamedImport(fset *token.FileSet, f *ast.File, name, ipath string) (added
lastImport = -1 // index in f.Decls of the file's final import decl lastImport = -1 // index in f.Decls of the file's final import decl
impDecl *ast.GenDecl // import decl containing the best match impDecl *ast.GenDecl // import decl containing the best match
impIndex = -1 // spec index in impDecl containing the best match impIndex = -1 // spec index in impDecl containing the best match
isThirdPartyPath = isThirdParty(ipath)
) )
for i, decl := range f.Decls { for i, decl := range f.Decls {
gen, ok := decl.(*ast.GenDecl) gen, ok := decl.(*ast.GenDecl)
@ -65,15 +67,27 @@ func AddNamedImport(fset *token.FileSet, f *ast.File, name, ipath string) (added
impDecl = gen impDecl = gen
} }
// Compute longest shared prefix with imports in this group. // Compute longest shared prefix with imports in this group and find best
// matched import spec.
// 1. Always prefer import spec with longest shared prefix.
// 2. While match length is 0,
// - for stdlib package: prefer first import spec.
// - for third party package: prefer first third party import spec.
// We cannot use last import spec as best match for third party package
// because grouped imports are usually placed last by goimports -local
// flag.
// See issue #19190.
seenAnyThirdParty := false
for j, spec := range gen.Specs { for j, spec := range gen.Specs {
impspec := spec.(*ast.ImportSpec) impspec := spec.(*ast.ImportSpec)
n := matchLen(importPath(impspec), ipath) p := importPath(impspec)
if n > bestMatch { n := matchLen(p, ipath)
if n > bestMatch || (bestMatch == 0 && !seenAnyThirdParty && isThirdPartyPath) {
bestMatch = n bestMatch = n
impDecl = gen impDecl = gen
impIndex = j impIndex = j
} }
seenAnyThirdParty = seenAnyThirdParty || isThirdParty(p)
} }
} }
} }
@ -175,6 +189,12 @@ func AddNamedImport(fset *token.FileSet, f *ast.File, name, ipath string) (added
return true return true
} }
func isThirdParty(importPath string) bool {
// Third party package import path usually contains "." (".com", ".org", ...)
// This logic is taken from golang.org/x/tools/imports package.
return strings.Contains(importPath, ".")
}
// DeleteImport deletes the import path from the file f, if present. // DeleteImport deletes the import path from the file f, if present.
func DeleteImport(fset *token.FileSet, f *ast.File, path string) (deleted bool) { func DeleteImport(fset *token.FileSet, f *ast.File, path string) (deleted bool) {
return DeleteNamedImport(fset, f, "", path) return DeleteNamedImport(fset, f, "", path)

View File

@ -2,7 +2,8 @@
// Use of this source code is governed by a BSD-style // Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
// +build linux,!appengine darwin // +build linux darwin
// +build !appengine
package imports package imports

View File

@ -2,7 +2,8 @@
// Use of this source code is governed by a BSD-style // Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
// +build linux,!appengine darwin freebsd openbsd netbsd // +build linux darwin freebsd openbsd netbsd
// +build !appengine
package imports package imports

View File

@ -776,7 +776,7 @@ func findImportGoPath(pkgName string, symbols map[string]bool, filename string)
sort.Sort(byImportPathShortLength(candidates)) sort.Sort(byImportPathShortLength(candidates))
if Debug { if Debug {
for i, pkg := range candidates { for i, pkg := range candidates {
log.Printf("%s candidate %d/%d: %v", pkgName, i+1, len(candidates), pkg.importPathShort) log.Printf("%s candidate %d/%d: %v in %v", pkgName, i+1, len(candidates), pkg.importPathShort, pkg.dir)
} }
} }

View File

@ -53,6 +53,7 @@ func main() {
mustOpen(api("go1.6.txt")), mustOpen(api("go1.6.txt")),
mustOpen(api("go1.7.txt")), mustOpen(api("go1.7.txt")),
mustOpen(api("go1.8.txt")), mustOpen(api("go1.8.txt")),
mustOpen(api("go1.9.txt")),
) )
sc := bufio.NewScanner(f) sc := bufio.NewScanner(f)
fullImport := map[string]string{} // "zip.NewReader" => "archive/zip" fullImport := map[string]string{} // "zip.NewReader" => "archive/zip"

View File

@ -23,6 +23,8 @@ var stdlib = map[string]string{
"asn1.Enumerated": "encoding/asn1", "asn1.Enumerated": "encoding/asn1",
"asn1.Flag": "encoding/asn1", "asn1.Flag": "encoding/asn1",
"asn1.Marshal": "encoding/asn1", "asn1.Marshal": "encoding/asn1",
"asn1.NullBytes": "encoding/asn1",
"asn1.NullRawValue": "encoding/asn1",
"asn1.ObjectIdentifier": "encoding/asn1", "asn1.ObjectIdentifier": "encoding/asn1",
"asn1.RawContent": "encoding/asn1", "asn1.RawContent": "encoding/asn1",
"asn1.RawValue": "encoding/asn1", "asn1.RawValue": "encoding/asn1",
@ -35,6 +37,7 @@ var stdlib = map[string]string{
"asn1.TagGeneralizedTime": "encoding/asn1", "asn1.TagGeneralizedTime": "encoding/asn1",
"asn1.TagIA5String": "encoding/asn1", "asn1.TagIA5String": "encoding/asn1",
"asn1.TagInteger": "encoding/asn1", "asn1.TagInteger": "encoding/asn1",
"asn1.TagNull": "encoding/asn1",
"asn1.TagOID": "encoding/asn1", "asn1.TagOID": "encoding/asn1",
"asn1.TagOctetString": "encoding/asn1", "asn1.TagOctetString": "encoding/asn1",
"asn1.TagPrintableString": "encoding/asn1", "asn1.TagPrintableString": "encoding/asn1",
@ -177,7 +180,9 @@ var stdlib = map[string]string{
"base32.NewDecoder": "encoding/base32", "base32.NewDecoder": "encoding/base32",
"base32.NewEncoder": "encoding/base32", "base32.NewEncoder": "encoding/base32",
"base32.NewEncoding": "encoding/base32", "base32.NewEncoding": "encoding/base32",
"base32.NoPadding": "encoding/base32",
"base32.StdEncoding": "encoding/base32", "base32.StdEncoding": "encoding/base32",
"base32.StdPadding": "encoding/base32",
"base64.CorruptInputError": "encoding/base64", "base64.CorruptInputError": "encoding/base64",
"base64.Encoding": "encoding/base64", "base64.Encoding": "encoding/base64",
"base64.NewDecoder": "encoding/base64", "base64.NewDecoder": "encoding/base64",
@ -229,6 +234,41 @@ var stdlib = map[string]string{
"binary.Uvarint": "encoding/binary", "binary.Uvarint": "encoding/binary",
"binary.Varint": "encoding/binary", "binary.Varint": "encoding/binary",
"binary.Write": "encoding/binary", "binary.Write": "encoding/binary",
"bits.LeadingZeros": "math/bits",
"bits.LeadingZeros16": "math/bits",
"bits.LeadingZeros32": "math/bits",
"bits.LeadingZeros64": "math/bits",
"bits.LeadingZeros8": "math/bits",
"bits.Len": "math/bits",
"bits.Len16": "math/bits",
"bits.Len32": "math/bits",
"bits.Len64": "math/bits",
"bits.Len8": "math/bits",
"bits.OnesCount": "math/bits",
"bits.OnesCount16": "math/bits",
"bits.OnesCount32": "math/bits",
"bits.OnesCount64": "math/bits",
"bits.OnesCount8": "math/bits",
"bits.Reverse": "math/bits",
"bits.Reverse16": "math/bits",
"bits.Reverse32": "math/bits",
"bits.Reverse64": "math/bits",
"bits.Reverse8": "math/bits",
"bits.ReverseBytes": "math/bits",
"bits.ReverseBytes16": "math/bits",
"bits.ReverseBytes32": "math/bits",
"bits.ReverseBytes64": "math/bits",
"bits.RotateLeft": "math/bits",
"bits.RotateLeft16": "math/bits",
"bits.RotateLeft32": "math/bits",
"bits.RotateLeft64": "math/bits",
"bits.RotateLeft8": "math/bits",
"bits.TrailingZeros": "math/bits",
"bits.TrailingZeros16": "math/bits",
"bits.TrailingZeros32": "math/bits",
"bits.TrailingZeros64": "math/bits",
"bits.TrailingZeros8": "math/bits",
"bits.UintSize": "math/bits",
"bufio.ErrAdvanceTooFar": "bufio", "bufio.ErrAdvanceTooFar": "bufio",
"bufio.ErrBufferFull": "bufio", "bufio.ErrBufferFull": "bufio",
"bufio.ErrFinalToken": "bufio", "bufio.ErrFinalToken": "bufio",
@ -471,6 +511,10 @@ var stdlib = map[string]string{
"crc64.Size": "hash/crc64", "crc64.Size": "hash/crc64",
"crc64.Table": "hash/crc64", "crc64.Table": "hash/crc64",
"crc64.Update": "hash/crc64", "crc64.Update": "hash/crc64",
"crypto.BLAKE2b_256": "crypto",
"crypto.BLAKE2b_384": "crypto",
"crypto.BLAKE2b_512": "crypto",
"crypto.BLAKE2s_256": "crypto",
"crypto.Decrypter": "crypto", "crypto.Decrypter": "crypto",
"crypto.DecrypterOpts": "crypto", "crypto.DecrypterOpts": "crypto",
"crypto.Hash": "crypto", "crypto.Hash": "crypto",
@ -552,6 +596,7 @@ var stdlib = map[string]string{
"driver.DefaultParameterConverter": "database/sql/driver", "driver.DefaultParameterConverter": "database/sql/driver",
"driver.Driver": "database/sql/driver", "driver.Driver": "database/sql/driver",
"driver.ErrBadConn": "database/sql/driver", "driver.ErrBadConn": "database/sql/driver",
"driver.ErrRemoveArgument": "database/sql/driver",
"driver.ErrSkip": "database/sql/driver", "driver.ErrSkip": "database/sql/driver",
"driver.Execer": "database/sql/driver", "driver.Execer": "database/sql/driver",
"driver.ExecerContext": "database/sql/driver", "driver.ExecerContext": "database/sql/driver",
@ -560,6 +605,7 @@ var stdlib = map[string]string{
"driver.IsValue": "database/sql/driver", "driver.IsValue": "database/sql/driver",
"driver.IsolationLevel": "database/sql/driver", "driver.IsolationLevel": "database/sql/driver",
"driver.NamedValue": "database/sql/driver", "driver.NamedValue": "database/sql/driver",
"driver.NamedValueChecker": "database/sql/driver",
"driver.NotNull": "database/sql/driver", "driver.NotNull": "database/sql/driver",
"driver.Null": "database/sql/driver", "driver.Null": "database/sql/driver",
"driver.Pinger": "database/sql/driver", "driver.Pinger": "database/sql/driver",
@ -1690,6 +1736,7 @@ var stdlib = map[string]string{
"expvar.Var": "expvar", "expvar.Var": "expvar",
"fcgi.ErrConnClosed": "net/http/fcgi", "fcgi.ErrConnClosed": "net/http/fcgi",
"fcgi.ErrRequestAborted": "net/http/fcgi", "fcgi.ErrRequestAborted": "net/http/fcgi",
"fcgi.ProcessEnv": "net/http/fcgi",
"fcgi.Serve": "net/http/fcgi", "fcgi.Serve": "net/http/fcgi",
"filepath.Abs": "path/filepath", "filepath.Abs": "path/filepath",
"filepath.Base": "path/filepath", "filepath.Base": "path/filepath",
@ -1796,6 +1843,8 @@ var stdlib = map[string]string{
"fmt.Sscanln": "fmt", "fmt.Sscanln": "fmt",
"fmt.State": "fmt", "fmt.State": "fmt",
"fmt.Stringer": "fmt", "fmt.Stringer": "fmt",
"fnv.New128": "hash/fnv",
"fnv.New128a": "hash/fnv",
"fnv.New32": "hash/fnv", "fnv.New32": "hash/fnv",
"fnv.New32a": "hash/fnv", "fnv.New32a": "hash/fnv",
"fnv.New64": "hash/fnv", "fnv.New64": "hash/fnv",
@ -1954,6 +2003,7 @@ var stdlib = map[string]string{
"http.ServeContent": "net/http", "http.ServeContent": "net/http",
"http.ServeFile": "net/http", "http.ServeFile": "net/http",
"http.ServeMux": "net/http", "http.ServeMux": "net/http",
"http.ServeTLS": "net/http",
"http.Server": "net/http", "http.Server": "net/http",
"http.ServerContextKey": "net/http", "http.ServerContextKey": "net/http",
"http.SetCookie": "net/http", "http.SetCookie": "net/http",
@ -2205,6 +2255,7 @@ var stdlib = map[string]string{
"json.Unmarshaler": "encoding/json", "json.Unmarshaler": "encoding/json",
"json.UnsupportedTypeError": "encoding/json", "json.UnsupportedTypeError": "encoding/json",
"json.UnsupportedValueError": "encoding/json", "json.UnsupportedValueError": "encoding/json",
"json.Valid": "encoding/json",
"jsonrpc.Dial": "net/rpc/jsonrpc", "jsonrpc.Dial": "net/rpc/jsonrpc",
"jsonrpc.NewClient": "net/rpc/jsonrpc", "jsonrpc.NewClient": "net/rpc/jsonrpc",
"jsonrpc.NewClientCodec": "net/rpc/jsonrpc", "jsonrpc.NewClientCodec": "net/rpc/jsonrpc",
@ -2402,6 +2453,7 @@ var stdlib = map[string]string{
"md5.Sum": "crypto/md5", "md5.Sum": "crypto/md5",
"mime.AddExtensionType": "mime", "mime.AddExtensionType": "mime",
"mime.BEncoding": "mime", "mime.BEncoding": "mime",
"mime.ErrInvalidMediaParameter": "mime",
"mime.ExtensionsByType": "mime", "mime.ExtensionsByType": "mime",
"mime.FormatMediaType": "mime", "mime.FormatMediaType": "mime",
"mime.ParseMediaType": "mime", "mime.ParseMediaType": "mime",
@ -2409,6 +2461,7 @@ var stdlib = map[string]string{
"mime.TypeByExtension": "mime", "mime.TypeByExtension": "mime",
"mime.WordDecoder": "mime", "mime.WordDecoder": "mime",
"mime.WordEncoder": "mime", "mime.WordEncoder": "mime",
"multipart.ErrMessageTooLarge": "mime/multipart",
"multipart.File": "mime/multipart", "multipart.File": "mime/multipart",
"multipart.FileHeader": "mime/multipart", "multipart.FileHeader": "mime/multipart",
"multipart.Form": "mime/multipart", "multipart.Form": "mime/multipart",
@ -2753,20 +2806,29 @@ var stdlib = map[string]string{
"png.DefaultCompression": "image/png", "png.DefaultCompression": "image/png",
"png.Encode": "image/png", "png.Encode": "image/png",
"png.Encoder": "image/png", "png.Encoder": "image/png",
"png.EncoderBuffer": "image/png",
"png.EncoderBufferPool": "image/png",
"png.FormatError": "image/png", "png.FormatError": "image/png",
"png.NoCompression": "image/png", "png.NoCompression": "image/png",
"png.UnsupportedError": "image/png", "png.UnsupportedError": "image/png",
"pprof.Cmdline": "net/http/pprof", "pprof.Cmdline": "net/http/pprof",
"pprof.Do": "runtime/pprof",
"pprof.ForLabels": "runtime/pprof",
"pprof.Handler": "net/http/pprof", "pprof.Handler": "net/http/pprof",
"pprof.Index": "net/http/pprof", "pprof.Index": "net/http/pprof",
"pprof.Label": "runtime/pprof",
"pprof.LabelSet": "runtime/pprof",
"pprof.Labels": "runtime/pprof",
"pprof.Lookup": "runtime/pprof", "pprof.Lookup": "runtime/pprof",
"pprof.NewProfile": "runtime/pprof", "pprof.NewProfile": "runtime/pprof",
// "pprof.Profile" is ambiguous // "pprof.Profile" is ambiguous
"pprof.Profiles": "runtime/pprof", "pprof.Profiles": "runtime/pprof",
"pprof.SetGoroutineLabels": "runtime/pprof",
"pprof.StartCPUProfile": "runtime/pprof", "pprof.StartCPUProfile": "runtime/pprof",
"pprof.StopCPUProfile": "runtime/pprof", "pprof.StopCPUProfile": "runtime/pprof",
"pprof.Symbol": "net/http/pprof", "pprof.Symbol": "net/http/pprof",
"pprof.Trace": "net/http/pprof", "pprof.Trace": "net/http/pprof",
"pprof.WithLabels": "runtime/pprof",
"pprof.WriteHeapProfile": "runtime/pprof", "pprof.WriteHeapProfile": "runtime/pprof",
"printer.CommentedNode": "go/printer", "printer.CommentedNode": "go/printer",
"printer.Config": "go/printer", "printer.Config": "go/printer",
@ -2844,6 +2906,7 @@ var stdlib = map[string]string{
"reflect.MakeChan": "reflect", "reflect.MakeChan": "reflect",
"reflect.MakeFunc": "reflect", "reflect.MakeFunc": "reflect",
"reflect.MakeMap": "reflect", "reflect.MakeMap": "reflect",
"reflect.MakeMapWithSize": "reflect",
"reflect.MakeSlice": "reflect", "reflect.MakeSlice": "reflect",
"reflect.Map": "reflect", "reflect.Map": "reflect",
"reflect.MapOf": "reflect", "reflect.MapOf": "reflect",
@ -3071,9 +3134,11 @@ var stdlib = map[string]string{
"sort.Strings": "sort", "sort.Strings": "sort",
"sort.StringsAreSorted": "sort", "sort.StringsAreSorted": "sort",
"sql.ColumnType": "database/sql", "sql.ColumnType": "database/sql",
"sql.Conn": "database/sql",
"sql.DB": "database/sql", "sql.DB": "database/sql",
"sql.DBStats": "database/sql", "sql.DBStats": "database/sql",
"sql.Drivers": "database/sql", "sql.Drivers": "database/sql",
"sql.ErrConnDone": "database/sql",
"sql.ErrNoRows": "database/sql", "sql.ErrNoRows": "database/sql",
"sql.ErrTxDone": "database/sql", "sql.ErrTxDone": "database/sql",
"sql.IsolationLevel": "database/sql", "sql.IsolationLevel": "database/sql",
@ -3092,6 +3157,7 @@ var stdlib = map[string]string{
"sql.NullInt64": "database/sql", "sql.NullInt64": "database/sql",
"sql.NullString": "database/sql", "sql.NullString": "database/sql",
"sql.Open": "database/sql", "sql.Open": "database/sql",
"sql.Out": "database/sql",
"sql.RawBytes": "database/sql", "sql.RawBytes": "database/sql",
"sql.Register": "database/sql", "sql.Register": "database/sql",
"sql.Result": "database/sql", "sql.Result": "database/sql",
@ -3193,6 +3259,7 @@ var stdlib = map[string]string{
"suffixarray.New": "index/suffixarray", "suffixarray.New": "index/suffixarray",
"sync.Cond": "sync", "sync.Cond": "sync",
"sync.Locker": "sync", "sync.Locker": "sync",
"sync.Map": "sync",
"sync.Mutex": "sync", "sync.Mutex": "sync",
"sync.NewCond": "sync", "sync.NewCond": "sync",
"sync.Once": "sync", "sync.Once": "sync",
@ -3772,6 +3839,7 @@ var stdlib = map[string]string{
"syscall.Cmsghdr": "syscall", "syscall.Cmsghdr": "syscall",
"syscall.CommandLineToArgv": "syscall", "syscall.CommandLineToArgv": "syscall",
"syscall.ComputerName": "syscall", "syscall.ComputerName": "syscall",
"syscall.Conn": "syscall",
"syscall.Connect": "syscall", "syscall.Connect": "syscall",
"syscall.ConnectEx": "syscall", "syscall.ConnectEx": "syscall",
"syscall.ConvertSidToStringSid": "syscall", "syscall.ConvertSidToStringSid": "syscall",
@ -6581,6 +6649,7 @@ var stdlib = map[string]string{
"syscall.RUSAGE_SELF": "syscall", "syscall.RUSAGE_SELF": "syscall",
"syscall.RUSAGE_THREAD": "syscall", "syscall.RUSAGE_THREAD": "syscall",
"syscall.Radvisory_t": "syscall", "syscall.Radvisory_t": "syscall",
"syscall.RawConn": "syscall",
"syscall.RawSockaddr": "syscall", "syscall.RawSockaddr": "syscall",
"syscall.RawSockaddrAny": "syscall", "syscall.RawSockaddrAny": "syscall",
"syscall.RawSockaddrDatalink": "syscall", "syscall.RawSockaddrDatalink": "syscall",
@ -8357,6 +8426,7 @@ var stdlib = map[string]string{
"syscall.WSADESCRIPTION_LEN": "syscall", "syscall.WSADESCRIPTION_LEN": "syscall",
"syscall.WSAData": "syscall", "syscall.WSAData": "syscall",
"syscall.WSAEACCES": "syscall", "syscall.WSAEACCES": "syscall",
"syscall.WSAECONNABORTED": "syscall",
"syscall.WSAECONNRESET": "syscall", "syscall.WSAECONNRESET": "syscall",
"syscall.WSAEnumProtocols": "syscall", "syscall.WSAEnumProtocols": "syscall",
"syscall.WSAID_CONNECTEX": "syscall", "syscall.WSAID_CONNECTEX": "syscall",
@ -8478,6 +8548,7 @@ var stdlib = map[string]string{
"template.ErrOutputContext": "html/template", "template.ErrOutputContext": "html/template",
"template.ErrPartialCharset": "html/template", "template.ErrPartialCharset": "html/template",
"template.ErrPartialEscape": "html/template", "template.ErrPartialEscape": "html/template",
"template.ErrPredefinedEscaper": "html/template",
"template.ErrRangeLoopReentry": "html/template", "template.ErrRangeLoopReentry": "html/template",
"template.ErrSlashAmbig": "html/template", "template.ErrSlashAmbig": "html/template",
"template.Error": "html/template", "template.Error": "html/template",
@ -8871,6 +8942,7 @@ var stdlib = map[string]string{
"types.SendRecv": "go/types", "types.SendRecv": "go/types",
"types.Signature": "go/types", "types.Signature": "go/types",
"types.Sizes": "go/types", "types.Sizes": "go/types",
"types.SizesFor": "go/types",
"types.Slice": "go/types", "types.Slice": "go/types",
"types.StdSizes": "go/types", "types.StdSizes": "go/types",
"types.String": "go/types", "types.String": "go/types",

View File

@ -11,6 +11,14 @@ import (
) )
var stdlibDescs = map[string]Description{ var stdlibDescs = map[string]Description{
"errors.New": Description{Pure: true},
"fmt.Errorf": Description{Pure: true},
"fmt.Sprintf": Description{Pure: true},
"fmt.Sprint": Description{Pure: true},
"sort.Reverse": Description{Pure: true},
"strings.Map": Description{Pure: true}, "strings.Map": Description{Pure: true},
"strings.Repeat": Description{Pure: true}, "strings.Repeat": Description{Pure: true},
"strings.Replace": Description{Pure: true}, "strings.Replace": Description{Pure: true},

View File

@ -6,9 +6,7 @@ import (
"go/constant" "go/constant"
"go/token" "go/token"
"go/types" "go/types"
"math"
"reflect" "reflect"
"strconv"
"strings" "strings"
"honnef.co/go/tools/internal/sharedcheck" "honnef.co/go/tools/internal/sharedcheck"
@ -42,7 +40,7 @@ func (c *Checker) Funcs() map[string]lint.Func {
"S1002": c.LintIfBoolCmp, "S1002": c.LintIfBoolCmp,
"S1003": c.LintStringsContains, "S1003": c.LintStringsContains,
"S1004": c.LintBytesCompare, "S1004": c.LintBytesCompare,
"S1005": c.LintRanges, "S1005": c.LintUnnecessaryBlank,
"S1006": c.LintForTrue, "S1006": c.LintForTrue,
"S1007": c.LintRegexpRaw, "S1007": c.LintRegexpRaw,
"S1008": c.LintIfReturn, "S1008": c.LintIfReturn,
@ -51,22 +49,24 @@ func (c *Checker) Funcs() map[string]lint.Func {
"S1011": c.LintLoopAppend, "S1011": c.LintLoopAppend,
"S1012": c.LintTimeSince, "S1012": c.LintTimeSince,
"S1013": c.LintSimplerReturn, "S1013": c.LintSimplerReturn,
"S1014": c.LintReceiveIntoBlank, "S1014": nil,
"S1015": c.LintFormatInt, "S1015": nil,
"S1016": c.LintSimplerStructConversion, "S1016": c.LintSimplerStructConversion,
"S1017": c.LintTrim, "S1017": c.LintTrim,
"S1018": c.LintLoopSlide, "S1018": c.LintLoopSlide,
"S1019": c.LintMakeLenCap, "S1019": c.LintMakeLenCap,
"S1020": c.LintAssertNotNil, "S1020": c.LintAssertNotNil,
"S1021": c.LintDeclareAssign, "S1021": c.LintDeclareAssign,
"S1022": c.LintBlankOK, "S1022": nil,
"S1023": c.LintRedundantBreak, "S1023": c.LintRedundantBreak,
"S1024": c.LintTimeUntil, "S1024": c.LintTimeUntil,
"S1025": c.LintRedundantSprintf, "S1025": c.LintRedundantSprintf,
"S1026": c.LintStringCopy, "S1026": c.LintStringCopy,
"S1027": c.LintRedundantReturn, "S1027": nil,
"S1028": c.LintErrorsNewSprintf, "S1028": c.LintErrorsNewSprintf,
"S1029": c.LintRangeStringRunes, "S1029": c.LintRangeStringRunes,
"S1030": c.LintBytesBufferConversions,
"S1031": c.LintNilCheckAroundRange,
} }
} }
@ -247,6 +247,36 @@ func (c *Checker) LintIfBoolCmp(j *lint.Job) {
} }
} }
func (c *Checker) LintBytesBufferConversions(j *lint.Job) {
fn := func(node ast.Node) bool {
call, ok := node.(*ast.CallExpr)
if !ok || len(call.Args) != 1 {
return true
}
argCall, ok := call.Args[0].(*ast.CallExpr)
if !ok {
return true
}
sel, ok := argCall.Fun.(*ast.SelectorExpr)
if !ok {
return true
}
typ := j.Program.Info.TypeOf(call.Fun)
if typ == types.Universe.Lookup("string").Type() && j.IsCallToAST(call.Args[0], "(*bytes.Buffer).Bytes") {
j.Errorf(call, "should use %v.String() instead of %v", j.Render(sel.X), j.Render(call))
} else if typ, ok := typ.(*types.Slice); ok && typ.Elem() == types.Universe.Lookup("byte").Type() && j.IsCallToAST(call.Args[0], "(*bytes.Buffer).String") {
j.Errorf(call, "should use %v.Bytes() instead of %v", j.Render(sel.X), j.Render(call))
}
return true
}
for _, f := range c.filterGenerated(j.Program.Files) {
ast.Inspect(f, fn)
}
}
func (c *Checker) LintStringsContains(j *lint.Job) { func (c *Checker) LintStringsContains(j *lint.Job) {
// map of value to token to bool value // map of value to token to bool value
allowed := map[int64]map[token.Token]bool{ allowed := map[int64]map[token.Token]bool{
@ -352,23 +382,6 @@ func (c *Checker) LintBytesCompare(j *lint.Job) {
} }
} }
func (c *Checker) LintRanges(j *lint.Job) {
fn := func(node ast.Node) bool {
rs, ok := node.(*ast.RangeStmt)
if !ok {
return true
}
if lint.IsBlank(rs.Key) && (rs.Value == nil || lint.IsBlank(rs.Value)) {
j.Errorf(rs.Key, "should omit values from range; this loop is equivalent to `for range ...`")
}
return true
}
for _, f := range c.filterGenerated(j.Program.Files) {
ast.Inspect(f, fn)
}
}
func (c *Checker) LintForTrue(j *lint.Job) { func (c *Checker) LintForTrue(j *lint.Job) {
fn := func(node ast.Node) bool { fn := func(node ast.Node) bool {
loop, ok := node.(*ast.ForStmt) loop, ok := node.(*ast.ForStmt)
@ -941,14 +954,44 @@ func (c *Checker) LintSimplerReturn(j *lint.Job) {
} }
} }
func (c *Checker) LintReceiveIntoBlank(j *lint.Job) { func (c *Checker) LintUnnecessaryBlank(j *lint.Job) {
fn := func(node ast.Node) bool { fn1 := func(node ast.Node) {
assign, ok := node.(*ast.AssignStmt)
if !ok {
return
}
if len(assign.Lhs) != 2 || len(assign.Rhs) != 1 {
return
}
if !lint.IsBlank(assign.Lhs[1]) {
return
}
switch rhs := assign.Rhs[0].(type) {
case *ast.IndexExpr:
// The type-checker should make sure that it's a map, but
// let's be safe.
if _, ok := j.Program.Info.TypeOf(rhs.X).Underlying().(*types.Map); !ok {
return
}
case *ast.UnaryExpr:
if rhs.Op != token.ARROW {
return
}
default:
return
}
cp := *assign
cp.Lhs = cp.Lhs[0:1]
j.Errorf(assign, "should write %s instead of %s", j.Render(&cp), j.Render(assign))
}
fn2 := func(node ast.Node) {
stmt, ok := node.(*ast.AssignStmt) stmt, ok := node.(*ast.AssignStmt)
if !ok { if !ok {
return true return
} }
if len(stmt.Lhs) != len(stmt.Rhs) { if len(stmt.Lhs) != len(stmt.Rhs) {
return true return
} }
for i, lh := range stmt.Lhs { for i, lh := range stmt.Lhs {
rh := stmt.Rhs[i] rh := stmt.Rhs[i]
@ -964,101 +1007,22 @@ func (c *Checker) LintReceiveIntoBlank(j *lint.Job) {
} }
j.Errorf(lh, "'_ = <-ch' can be simplified to '<-ch'") j.Errorf(lh, "'_ = <-ch' can be simplified to '<-ch'")
} }
return true
} }
for _, f := range c.filterGenerated(j.Program.Files) {
ast.Inspect(f, fn)
}
}
func (c *Checker) LintFormatInt(j *lint.Job) { fn3 := func(node ast.Node) {
checkBasic := func(v ast.Expr) bool { rs, ok := node.(*ast.RangeStmt)
typ, ok := j.Program.Info.TypeOf(v).(*types.Basic)
if !ok { if !ok {
return false return
} }
return typ.Kind() == types.Int if lint.IsBlank(rs.Key) && (rs.Value == nil || lint.IsBlank(rs.Value)) {
j.Errorf(rs.Key, "should omit values from range; this loop is equivalent to `for range ...`")
} }
checkConst := func(v *ast.Ident) bool {
c, ok := j.Program.Info.ObjectOf(v).(*types.Const)
if !ok {
return false
}
if c.Val().Kind() != constant.Int {
return false
}
i, _ := constant.Int64Val(c.Val())
return i <= math.MaxInt32
}
checkConstStrict := func(v *ast.Ident) bool {
if !checkConst(v) {
return false
}
basic, ok := j.Program.Info.ObjectOf(v).(*types.Const).Type().(*types.Basic)
return ok && basic.Kind() == types.UntypedInt
} }
fn := func(node ast.Node) bool { fn := func(node ast.Node) bool {
call, ok := node.(*ast.CallExpr) fn1(node)
if !ok { fn2(node)
return true fn3(node)
}
if !j.IsCallToAST(call, "strconv.FormatInt") {
return true
}
if len(call.Args) != 2 {
return true
}
if lit, ok := call.Args[1].(*ast.BasicLit); !ok || lit.Value != "10" {
return true
}
matches := false
switch v := call.Args[0].(type) {
case *ast.CallExpr:
if len(v.Args) != 1 {
return true
}
ident, ok := v.Fun.(*ast.Ident)
if !ok {
return true
}
obj, ok := j.Program.Info.ObjectOf(ident).(*types.TypeName)
if !ok || obj.Parent() != types.Universe || obj.Name() != "int64" {
return true
}
switch vv := v.Args[0].(type) {
case *ast.BasicLit:
i, _ := strconv.ParseInt(vv.Value, 10, 64)
if i <= math.MaxInt32 {
matches = true
}
case *ast.Ident:
if checkConst(vv) || checkBasic(v.Args[0]) {
matches = true
}
default:
if checkBasic(v.Args[0]) {
matches = true
}
}
case *ast.BasicLit:
if v.Kind != token.INT {
return true
}
i, _ := strconv.ParseInt(v.Value, 10, 64)
if i <= math.MaxInt32 {
matches = true
}
case *ast.Ident:
if checkConstStrict(v) {
matches = true
}
}
if matches {
j.Errorf(call, "should use strconv.Itoa instead of strconv.FormatInt")
}
return true return true
} }
for _, f := range c.filterGenerated(j.Program.Files) { for _, f := range c.filterGenerated(j.Program.Files) {
@ -1067,23 +1031,34 @@ func (c *Checker) LintFormatInt(j *lint.Job) {
} }
func (c *Checker) LintSimplerStructConversion(j *lint.Job) { func (c *Checker) LintSimplerStructConversion(j *lint.Job) {
var skip ast.Node
fn := func(node ast.Node) bool { fn := func(node ast.Node) bool {
// Do not suggest type conversion between pointers
if unary, ok := node.(*ast.UnaryExpr); ok && unary.Op == token.AND {
if lit, ok := unary.X.(*ast.CompositeLit); ok {
skip = lit
}
return true
}
if node == skip {
return true
}
lit, ok := node.(*ast.CompositeLit) lit, ok := node.(*ast.CompositeLit)
if !ok { if !ok {
return true return true
} }
typ1 := j.Program.Info.TypeOf(lit.Type) typ1, _ := j.Program.Info.TypeOf(lit.Type).(*types.Named)
if typ1 == nil { if typ1 == nil {
return true return true
} }
// FIXME support pointer to struct
s1, ok := typ1.Underlying().(*types.Struct) s1, ok := typ1.Underlying().(*types.Struct)
if !ok { if !ok {
return true return true
} }
n := s1.NumFields() var typ2 *types.Named
var typ2 types.Type
var ident *ast.Ident var ident *ast.Ident
getSelType := func(expr ast.Expr) (types.Type, *ast.Ident, bool) { getSelType := func(expr ast.Expr) (types.Type, *ast.Ident, bool) {
sel, ok := expr.(*ast.SelectorExpr) sel, ok := expr.(*ast.SelectorExpr)
@ -1100,8 +1075,10 @@ func (c *Checker) LintSimplerStructConversion(j *lint.Job) {
if len(lit.Elts) == 0 { if len(lit.Elts) == 0 {
return true return true
} }
if s1.NumFields() != len(lit.Elts) {
return true
}
for i, elt := range lit.Elts { for i, elt := range lit.Elts {
n--
var t types.Type var t types.Type
var id *ast.Ident var id *ast.Ident
var ok bool var ok bool
@ -1129,21 +1106,27 @@ func (c *Checker) LintSimplerStructConversion(j *lint.Job) {
if !ok { if !ok {
return true return true
} }
if typ2 != nil && typ2 != t { // All fields must be initialized from the same object
return true
}
if ident != nil && ident.Obj != id.Obj { if ident != nil && ident.Obj != id.Obj {
return true return true
} }
typ2 = t typ2, _ = t.(*types.Named)
if typ2 == nil {
return true
}
ident = id ident = id
} }
if n != 0 { if typ2 == nil {
return true return true
} }
if typ2 == nil { if typ1.Obj().Pkg() != typ2.Obj().Pkg() {
// Do not suggest type conversions between different
// packages. Types in different packages might only match
// by coincidence. Furthermore, if the dependency ever
// adds more fields to its type, it could break the code
// that relies on the type conversion to work.
return true return true
} }
@ -1157,7 +1140,8 @@ func (c *Checker) LintSimplerStructConversion(j *lint.Job) {
if !structsIdentical(s1, s2) { if !structsIdentical(s1, s2) {
return true return true
} }
j.Errorf(node, "should use type conversion instead of struct literal") j.Errorf(node, "should convert %s (type %s) to %s instead of using struct literal",
ident.Name, typ2.Obj().Name(), typ1.Obj().Name())
return true return true
} }
for _, f := range c.filterGenerated(j.Program.Files) { for _, f := range c.filterGenerated(j.Program.Files) {
@ -1598,56 +1582,52 @@ func (c *Checker) LintDeclareAssign(j *lint.Job) {
} }
} }
func (c *Checker) LintBlankOK(j *lint.Job) {
fn := func(node ast.Node) bool {
assign, ok := node.(*ast.AssignStmt)
if !ok {
return true
}
if len(assign.Lhs) != 2 || len(assign.Rhs) != 1 {
return true
}
if !lint.IsBlank(assign.Lhs[1]) {
return true
}
switch rhs := assign.Rhs[0].(type) {
case *ast.IndexExpr:
// The type-checker should make sure that it's a map, but
// let's be safe.
if _, ok := j.Program.Info.TypeOf(rhs.X).Underlying().(*types.Map); !ok {
return true
}
case *ast.UnaryExpr:
if rhs.Op != token.ARROW {
return true
}
default:
return true
}
cp := *assign
cp.Lhs = cp.Lhs[0:1]
j.Errorf(assign, "should write %s instead of %s", j.Render(&cp), j.Render(assign))
return true
}
for _, f := range c.filterGenerated(j.Program.Files) {
ast.Inspect(f, fn)
}
}
func (c *Checker) LintRedundantBreak(j *lint.Job) { func (c *Checker) LintRedundantBreak(j *lint.Job) {
fn := func(node ast.Node) bool { fn1 := func(node ast.Node) {
clause, ok := node.(*ast.CaseClause) clause, ok := node.(*ast.CaseClause)
if !ok { if !ok {
return true return
} }
if len(clause.Body) < 2 { if len(clause.Body) < 2 {
return true return
} }
branch, ok := clause.Body[len(clause.Body)-1].(*ast.BranchStmt) branch, ok := clause.Body[len(clause.Body)-1].(*ast.BranchStmt)
if !ok || branch.Tok != token.BREAK || branch.Label != nil { if !ok || branch.Tok != token.BREAK || branch.Label != nil {
return true return
} }
j.Errorf(branch, "redundant break statement") j.Errorf(branch, "redundant break statement")
return
}
fn2 := func(node ast.Node) {
var ret *ast.FieldList
var body *ast.BlockStmt
switch x := node.(type) {
case *ast.FuncDecl:
ret = x.Type.Results
body = x.Body
case *ast.FuncLit:
ret = x.Type.Results
body = x.Body
default:
return
}
// if the func has results, a return can't be redundant.
// similarly, if there are no statements, there can be
// no return.
if ret != nil || body == nil || len(body.List) < 1 {
return
}
rst, ok := body.List[len(body.List)-1].(*ast.ReturnStmt)
if !ok {
return
}
// we don't need to check rst.Results as we already
// checked x.Type.Results to be nil.
j.Errorf(rst, "redundant return statement")
}
fn := func(node ast.Node) bool {
fn1(node)
fn2(node)
return true return true
} }
for _, f := range c.filterGenerated(j.Program.Files) { for _, f := range c.filterGenerated(j.Program.Files) {
@ -1797,40 +1777,6 @@ func (c *Checker) LintStringCopy(j *lint.Job) {
} }
} }
func (c *Checker) LintRedundantReturn(j *lint.Job) {
fn := func(node ast.Node) bool {
var ret *ast.FieldList
var body *ast.BlockStmt
switch x := node.(type) {
case *ast.FuncDecl:
ret = x.Type.Results
body = x.Body
case *ast.FuncLit:
ret = x.Type.Results
body = x.Body
default:
return true
}
// if the func has results, a return can't be redundant.
// similarly, if there are no statements, there can be
// no return.
if ret != nil || body == nil || len(body.List) < 1 {
return true
}
rst, ok := body.List[len(body.List)-1].(*ast.ReturnStmt)
if !ok {
return true
}
// we don't need to check rst.Results as we already
// checked x.Type.Results to be nil.
j.Errorf(rst, "redundant return statement")
return true
}
for _, f := range c.filterGenerated(j.Program.Files) {
ast.Inspect(f, fn)
}
}
func (c *Checker) LintErrorsNewSprintf(j *lint.Job) { func (c *Checker) LintErrorsNewSprintf(j *lint.Job) {
fn := func(node ast.Node) bool { fn := func(node ast.Node) bool {
if !j.IsCallToAST(node, "errors.New") { if !j.IsCallToAST(node, "errors.New") {
@ -1851,3 +1797,45 @@ func (c *Checker) LintErrorsNewSprintf(j *lint.Job) {
func (c *Checker) LintRangeStringRunes(j *lint.Job) { func (c *Checker) LintRangeStringRunes(j *lint.Job) {
sharedcheck.CheckRangeStringRunes(c.nodeFns, j) sharedcheck.CheckRangeStringRunes(c.nodeFns, j)
} }
func (c *Checker) LintNilCheckAroundRange(j *lint.Job) {
fn := func(node ast.Node) bool {
ifstmt, ok := node.(*ast.IfStmt)
if !ok {
return true
}
cond, ok := ifstmt.Cond.(*ast.BinaryExpr)
if !ok {
return true
}
if cond.Op != token.NEQ || !j.IsNil(cond.Y) || len(ifstmt.Body.List) != 1 {
return true
}
loop, ok := ifstmt.Body.List[0].(*ast.RangeStmt)
if !ok {
return true
}
ifXIdent, ok := cond.X.(*ast.Ident)
if !ok {
return true
}
rangeXIdent, ok := loop.X.(*ast.Ident)
if !ok {
return true
}
if ifXIdent.Obj != rangeXIdent.Obj {
return true
}
switch j.Program.Info.TypeOf(rangeXIdent).(type) {
case *types.Slice, *types.Map:
j.Errorf(node, "unnecessary nil check around range")
}
return true
}
for _, f := range c.filterGenerated(j.Program.Files) {
ast.Inspect(f, fn)
}
}

View File

@ -10,6 +10,7 @@ import (
"go/types" "go/types"
htmltemplate "html/template" htmltemplate "html/template"
"net/http" "net/http"
"regexp"
"strconv" "strconv"
"strings" "strings"
"sync" "sync"
@ -232,7 +233,7 @@ func (c *Checker) Funcs() map[string]lint.Func {
"SA1019": c.CheckDeprecated, "SA1019": c.CheckDeprecated,
"SA1020": c.callChecker(checkListenAddressRules), "SA1020": c.callChecker(checkListenAddressRules),
"SA1021": c.callChecker(checkBytesEqualIPRules), "SA1021": c.callChecker(checkBytesEqualIPRules),
"SA1022": c.CheckFlagUsage, "SA1022": nil,
"SA1023": c.CheckWriterBufferModified, "SA1023": c.CheckWriterBufferModified,
"SA1024": c.callChecker(checkUniqueCutsetRules), "SA1024": c.callChecker(checkUniqueCutsetRules),
@ -249,7 +250,7 @@ func (c *Checker) Funcs() map[string]lint.Func {
"SA4002": c.CheckDiffSizeComparison, "SA4002": c.CheckDiffSizeComparison,
"SA4003": c.CheckUnsignedComparison, "SA4003": c.CheckUnsignedComparison,
"SA4004": c.CheckIneffectiveLoop, "SA4004": c.CheckIneffectiveLoop,
"SA4005": c.CheckIneffecitiveFieldAssignments, "SA4005": c.CheckIneffectiveFieldAssignments,
"SA4006": c.CheckUnreadVariableValues, "SA4006": c.CheckUnreadVariableValues,
// "SA4007": c.CheckPredeterminedBooleanExprs, // "SA4007": c.CheckPredeterminedBooleanExprs,
"SA4007": nil, "SA4007": nil,
@ -263,6 +264,7 @@ func (c *Checker) Funcs() map[string]lint.Func {
"SA4015": c.callChecker(checkMathIntRules), "SA4015": c.callChecker(checkMathIntRules),
"SA4016": c.CheckSillyBitwiseOps, "SA4016": c.CheckSillyBitwiseOps,
"SA4017": c.CheckPureFunctions, "SA4017": c.CheckPureFunctions,
"SA4018": c.CheckSelfAssignment,
"SA5000": c.CheckNilMaps, "SA5000": c.CheckNilMaps,
"SA5001": c.CheckEarlyDefer, "SA5001": c.CheckEarlyDefer,
@ -277,6 +279,7 @@ func (c *Checker) Funcs() map[string]lint.Func {
"SA6001": c.CheckMapBytesKey, "SA6001": c.CheckMapBytesKey,
"SA6002": c.callChecker(checkSyncPoolSizeRules), "SA6002": c.callChecker(checkSyncPoolSizeRules),
"SA6003": c.CheckRangeStringRunes, "SA6003": c.CheckRangeStringRunes,
"SA6004": nil,
"SA9000": nil, "SA9000": nil,
"SA9001": c.CheckDubiousDeferInChannelRangeLoop, "SA9001": c.CheckDubiousDeferInChannelRangeLoop,
@ -1137,13 +1140,20 @@ func (c *Checker) CheckEmptyCriticalSection(j *lint.Job) {
} }
} }
// cgo produces code like fn(&*_Cvar_kSomeCallbacks) which we don't
// want to flag.
var cgoIdent = regexp.MustCompile(`^_C(func|var)_.+$`)
func (c *Checker) CheckIneffectiveCopy(j *lint.Job) { func (c *Checker) CheckIneffectiveCopy(j *lint.Job) {
fn := func(node ast.Node) bool { fn := func(node ast.Node) bool {
if unary, ok := node.(*ast.UnaryExpr); ok { if unary, ok := node.(*ast.UnaryExpr); ok {
if _, ok := unary.X.(*ast.StarExpr); ok && unary.Op == token.AND { if star, ok := unary.X.(*ast.StarExpr); ok && unary.Op == token.AND {
ident, ok := star.X.(*ast.Ident)
if !ok || !cgoIdent.MatchString(ident.Name) {
j.Errorf(unary, "&*x will be simplified to x. It will not copy x.") j.Errorf(unary, "&*x will be simplified to x. It will not copy x.")
} }
} }
}
if star, ok := node.(*ast.StarExpr); ok { if star, ok := node.(*ast.StarExpr); ok {
if unary, ok := star.X.(*ast.UnaryExpr); ok && unary.Op == token.AND { if unary, ok := star.X.(*ast.UnaryExpr); ok && unary.Op == token.AND {
@ -1254,7 +1264,7 @@ func (c *Checker) CheckBenchmarkN(j *lint.Job) {
} }
} }
func (c *Checker) CheckIneffecitiveFieldAssignments(j *lint.Job) { func (c *Checker) CheckIneffectiveFieldAssignments(j *lint.Job) {
for _, ssafn := range j.Program.InitialFunctions { for _, ssafn := range j.Program.InitialFunctions {
// fset := j.Program.SSA.Fset // fset := j.Program.SSA.Fset
// if fset.File(f.File.Pos()) != fset.File(ssafn.Pos()) { // if fset.File(f.File.Pos()) != fset.File(ssafn.Pos()) {
@ -2558,46 +2568,6 @@ func (c *Checker) checkCalls(j *lint.Job, rules map[string]CallCheck) {
} }
} }
func (c *Checker) CheckFlagUsage(j *lint.Job) {
for _, ssafn := range j.Program.InitialFunctions {
for _, block := range ssafn.Blocks {
for _, ins := range block.Instrs {
store, ok := ins.(*ssa.Store)
if !ok {
continue
}
switch addr := store.Addr.(type) {
case *ssa.FieldAddr:
typ := addr.X.Type()
st := deref(typ).Underlying().(*types.Struct)
if types.TypeString(typ, nil) != "*flag.FlagSet" {
continue
}
if st.Field(addr.Field).Name() != "Usage" {
continue
}
case *ssa.Global:
if addr.Pkg.Pkg.Path() != "flag" || addr.Name() != "Usage" {
continue
}
default:
continue
}
fn := unwrapFunction(store.Val)
if fn == nil {
continue
}
for _, oblock := range fn.Blocks {
if hasCallTo(oblock, "os.Exit") {
j.Errorf(store, "the function assigned to Usage shouldn't call os.Exit, but it does")
break
}
}
}
}
}
}
func unwrapFunction(val ssa.Value) *ssa.Function { func unwrapFunction(val ssa.Value) *ssa.Function {
switch val := val.(type) { switch val := val.(type) {
case *ssa.Function: case *ssa.Function:
@ -2791,3 +2761,26 @@ func (c *Checker) CheckMapBytesKey(j *lint.Job) {
func (c *Checker) CheckRangeStringRunes(j *lint.Job) { func (c *Checker) CheckRangeStringRunes(j *lint.Job) {
sharedcheck.CheckRangeStringRunes(c.nodeFns, j) sharedcheck.CheckRangeStringRunes(c.nodeFns, j)
} }
func (c *Checker) CheckSelfAssignment(j *lint.Job) {
fn := func(node ast.Node) bool {
assign, ok := node.(*ast.AssignStmt)
if !ok {
return true
}
if assign.Tok != token.ASSIGN || len(assign.Lhs) != len(assign.Rhs) {
return true
}
for i, stmt := range assign.Lhs {
rlh := j.Render(stmt)
rrh := j.Render(assign.Rhs[i])
if rlh == rrh {
j.Errorf(assign, "self-assignment of %s to %s", rrh, rlh)
}
}
return true
}
for _, f := range c.filterGenerated(j.Program.Files) {
ast.Inspect(f, fn)
}
}

View File

@ -5,7 +5,7 @@
"importpath": "github.com/GoASTScanner/gas", "importpath": "github.com/GoASTScanner/gas",
"repository": "https://github.com/GoASTScanner/gas", "repository": "https://github.com/GoASTScanner/gas",
"vcs": "git", "vcs": "git",
"revision": "1beec25f7754273c9672a3368ea7048d4e73138e", "revision": "f22c701483ba201fbdb79c3667a28ef6a4e4a25c",
"branch": "master", "branch": "master",
"notests": true "notests": true
}, },
@ -17,6 +17,14 @@
"branch": "master", "branch": "master",
"notests": true "notests": true
}, },
{
"importpath": "github.com/alexkohler/nakedret",
"repository": "https://github.com/alexkohler/nakedret",
"vcs": "git",
"revision": "ca8b55b818e14bc0f1f52b714e7474634eadcd34",
"branch": "master",
"notests": true
},
{ {
"importpath": "github.com/client9/misspell", "importpath": "github.com/client9/misspell",
"repository": "https://github.com/client9/misspell", "repository": "https://github.com/client9/misspell",
@ -25,6 +33,14 @@
"branch": "master", "branch": "master",
"notests": true "notests": true
}, },
{
"importpath": "github.com/dnephin/govet",
"repository": "https://github.com/dnephin/govet",
"vcs": "git",
"revision": "4a96d43e39d340b63daa8bc5576985aa599885f6",
"branch": "fork",
"notests": true
},
{ {
"importpath": "github.com/golang/lint", "importpath": "github.com/golang/lint",
"repository": "https://github.com/golang/lint", "repository": "https://github.com/golang/lint",
@ -61,7 +77,15 @@
"importpath": "github.com/kisielk/gotool", "importpath": "github.com/kisielk/gotool",
"repository": "https://github.com/kisielk/gotool", "repository": "https://github.com/kisielk/gotool",
"vcs": "git", "vcs": "git",
"revision": "0de1eaf82fa3f583ce21fde859f1e7e0c5e9b220", "revision": "d6ce6262d87e3a4e153e86023ff56ae771554a41",
"branch": "master",
"notests": true
},
{
"importpath": "github.com/mdempsky/maligned",
"repository": "https://github.com/mdempsky/maligned",
"vcs": "git",
"revision": "08c8e9db1bce03f1af283686c0943fcb75f0109e",
"branch": "master", "branch": "master",
"notests": true "notests": true
}, },
@ -81,30 +105,6 @@
"branch": "master", "branch": "master",
"notests": true "notests": true
}, },
{
"importpath": "github.com/mvdan/interfacer",
"repository": "https://github.com/mvdan/interfacer",
"vcs": "git",
"revision": "22c51662ff476dfd97944f74db1b263ed920ee83",
"branch": "master",
"notests": true
},
{
"importpath": "github.com/mvdan/lint",
"repository": "https://github.com/mvdan/lint",
"vcs": "git",
"revision": "c9cbe299b369cbfea16318baaa037b19a69e45d2",
"branch": "master",
"notests": true
},
{
"importpath": "github.com/mvdan/unparam",
"repository": "https://github.com/mvdan/unparam",
"vcs": "git",
"revision": "d647bb803b10a6777ee4c6a176416b91fa14713e",
"branch": "master",
"notests": true
},
{ {
"importpath": "github.com/opennota/check", "importpath": "github.com/opennota/check",
"repository": "https://github.com/opennota/check", "repository": "https://github.com/opennota/check",
@ -141,7 +141,7 @@
"importpath": "golang.org/x/text/internal/gen", "importpath": "golang.org/x/text/internal/gen",
"repository": "https://go.googlesource.com/text", "repository": "https://go.googlesource.com/text",
"vcs": "git", "vcs": "git",
"revision": "cfdf022e86b4ecfb646e1efbd7db175dd623a8fa", "revision": "bd91bbf73e9a4a801adbfb97133c992678533126",
"branch": "master", "branch": "master",
"path": "internal/gen", "path": "internal/gen",
"notests": true "notests": true
@ -150,7 +150,7 @@
"importpath": "golang.org/x/text/internal/triegen", "importpath": "golang.org/x/text/internal/triegen",
"repository": "https://go.googlesource.com/text", "repository": "https://go.googlesource.com/text",
"vcs": "git", "vcs": "git",
"revision": "cfdf022e86b4ecfb646e1efbd7db175dd623a8fa", "revision": "bd91bbf73e9a4a801adbfb97133c992678533126",
"branch": "master", "branch": "master",
"path": "internal/triegen", "path": "internal/triegen",
"notests": true "notests": true
@ -159,7 +159,7 @@
"importpath": "golang.org/x/text/internal/ucd", "importpath": "golang.org/x/text/internal/ucd",
"repository": "https://go.googlesource.com/text", "repository": "https://go.googlesource.com/text",
"vcs": "git", "vcs": "git",
"revision": "cfdf022e86b4ecfb646e1efbd7db175dd623a8fa", "revision": "bd91bbf73e9a4a801adbfb97133c992678533126",
"branch": "master", "branch": "master",
"path": "internal/ucd", "path": "internal/ucd",
"notests": true "notests": true
@ -168,7 +168,7 @@
"importpath": "golang.org/x/text/transform", "importpath": "golang.org/x/text/transform",
"repository": "https://go.googlesource.com/text", "repository": "https://go.googlesource.com/text",
"vcs": "git", "vcs": "git",
"revision": "cfdf022e86b4ecfb646e1efbd7db175dd623a8fa", "revision": "bd91bbf73e9a4a801adbfb97133c992678533126",
"branch": "master", "branch": "master",
"path": "transform", "path": "transform",
"notests": true "notests": true
@ -177,7 +177,7 @@
"importpath": "golang.org/x/text/unicode/cldr", "importpath": "golang.org/x/text/unicode/cldr",
"repository": "https://go.googlesource.com/text", "repository": "https://go.googlesource.com/text",
"vcs": "git", "vcs": "git",
"revision": "cfdf022e86b4ecfb646e1efbd7db175dd623a8fa", "revision": "bd91bbf73e9a4a801adbfb97133c992678533126",
"branch": "master", "branch": "master",
"path": "unicode/cldr", "path": "unicode/cldr",
"notests": true "notests": true
@ -186,7 +186,7 @@
"importpath": "golang.org/x/text/width", "importpath": "golang.org/x/text/width",
"repository": "https://go.googlesource.com/text", "repository": "https://go.googlesource.com/text",
"vcs": "git", "vcs": "git",
"revision": "cfdf022e86b4ecfb646e1efbd7db175dd623a8fa", "revision": "bd91bbf73e9a4a801adbfb97133c992678533126",
"branch": "master", "branch": "master",
"path": "/width", "path": "/width",
"notests": true "notests": true
@ -195,7 +195,7 @@
"importpath": "golang.org/x/tools/cmd/goimports", "importpath": "golang.org/x/tools/cmd/goimports",
"repository": "https://go.googlesource.com/tools", "repository": "https://go.googlesource.com/tools",
"vcs": "git", "vcs": "git",
"revision": "bce9606b3f617bc6280aab6abbf25962c23f398d", "revision": "3b1faeda9afbcba128c2d794b38ffe7982141139",
"branch": "master", "branch": "master",
"path": "/cmd/goimports", "path": "/cmd/goimports",
"notests": true "notests": true
@ -204,7 +204,7 @@
"importpath": "golang.org/x/tools/cmd/gotype", "importpath": "golang.org/x/tools/cmd/gotype",
"repository": "https://go.googlesource.com/tools", "repository": "https://go.googlesource.com/tools",
"vcs": "git", "vcs": "git",
"revision": "bce9606b3f617bc6280aab6abbf25962c23f398d", "revision": "3b1faeda9afbcba128c2d794b38ffe7982141139",
"branch": "master", "branch": "master",
"path": "/cmd/gotype", "path": "/cmd/gotype",
"notests": true "notests": true
@ -213,7 +213,7 @@
"importpath": "golang.org/x/tools/container/intsets", "importpath": "golang.org/x/tools/container/intsets",
"repository": "https://go.googlesource.com/tools", "repository": "https://go.googlesource.com/tools",
"vcs": "git", "vcs": "git",
"revision": "bce9606b3f617bc6280aab6abbf25962c23f398d", "revision": "3b1faeda9afbcba128c2d794b38ffe7982141139",
"branch": "master", "branch": "master",
"path": "/container/intsets", "path": "/container/intsets",
"notests": true "notests": true
@ -222,7 +222,7 @@
"importpath": "golang.org/x/tools/go/ast/astutil", "importpath": "golang.org/x/tools/go/ast/astutil",
"repository": "https://go.googlesource.com/tools", "repository": "https://go.googlesource.com/tools",
"vcs": "git", "vcs": "git",
"revision": "bce9606b3f617bc6280aab6abbf25962c23f398d", "revision": "3b1faeda9afbcba128c2d794b38ffe7982141139",
"branch": "master", "branch": "master",
"path": "go/ast/astutil", "path": "go/ast/astutil",
"notests": true "notests": true
@ -231,7 +231,7 @@
"importpath": "golang.org/x/tools/go/buildutil", "importpath": "golang.org/x/tools/go/buildutil",
"repository": "https://go.googlesource.com/tools", "repository": "https://go.googlesource.com/tools",
"vcs": "git", "vcs": "git",
"revision": "bce9606b3f617bc6280aab6abbf25962c23f398d", "revision": "3b1faeda9afbcba128c2d794b38ffe7982141139",
"branch": "master", "branch": "master",
"path": "go/buildutil", "path": "go/buildutil",
"notests": true "notests": true
@ -240,7 +240,7 @@
"importpath": "golang.org/x/tools/go/callgraph", "importpath": "golang.org/x/tools/go/callgraph",
"repository": "https://go.googlesource.com/tools", "repository": "https://go.googlesource.com/tools",
"vcs": "git", "vcs": "git",
"revision": "bce9606b3f617bc6280aab6abbf25962c23f398d", "revision": "3b1faeda9afbcba128c2d794b38ffe7982141139",
"branch": "master", "branch": "master",
"path": "/go/callgraph", "path": "/go/callgraph",
"notests": true "notests": true
@ -249,7 +249,7 @@
"importpath": "golang.org/x/tools/go/gcexportdata", "importpath": "golang.org/x/tools/go/gcexportdata",
"repository": "https://go.googlesource.com/tools", "repository": "https://go.googlesource.com/tools",
"vcs": "git", "vcs": "git",
"revision": "bce9606b3f617bc6280aab6abbf25962c23f398d", "revision": "3b1faeda9afbcba128c2d794b38ffe7982141139",
"branch": "master", "branch": "master",
"path": "/go/gcexportdata", "path": "/go/gcexportdata",
"notests": true "notests": true
@ -258,7 +258,7 @@
"importpath": "golang.org/x/tools/go/gcimporter15", "importpath": "golang.org/x/tools/go/gcimporter15",
"repository": "https://go.googlesource.com/tools", "repository": "https://go.googlesource.com/tools",
"vcs": "git", "vcs": "git",
"revision": "bce9606b3f617bc6280aab6abbf25962c23f398d", "revision": "3b1faeda9afbcba128c2d794b38ffe7982141139",
"branch": "master", "branch": "master",
"path": "/go/gcimporter15", "path": "/go/gcimporter15",
"notests": true "notests": true
@ -267,7 +267,7 @@
"importpath": "golang.org/x/tools/go/loader", "importpath": "golang.org/x/tools/go/loader",
"repository": "https://go.googlesource.com/tools", "repository": "https://go.googlesource.com/tools",
"vcs": "git", "vcs": "git",
"revision": "bce9606b3f617bc6280aab6abbf25962c23f398d", "revision": "3b1faeda9afbcba128c2d794b38ffe7982141139",
"branch": "master", "branch": "master",
"path": "/go/loader", "path": "/go/loader",
"notests": true "notests": true
@ -276,7 +276,7 @@
"importpath": "golang.org/x/tools/go/pointer", "importpath": "golang.org/x/tools/go/pointer",
"repository": "https://go.googlesource.com/tools", "repository": "https://go.googlesource.com/tools",
"vcs": "git", "vcs": "git",
"revision": "bce9606b3f617bc6280aab6abbf25962c23f398d", "revision": "3b1faeda9afbcba128c2d794b38ffe7982141139",
"branch": "master", "branch": "master",
"path": "go/pointer", "path": "go/pointer",
"notests": true "notests": true
@ -285,7 +285,7 @@
"importpath": "golang.org/x/tools/go/ssa", "importpath": "golang.org/x/tools/go/ssa",
"repository": "https://go.googlesource.com/tools", "repository": "https://go.googlesource.com/tools",
"vcs": "git", "vcs": "git",
"revision": "bce9606b3f617bc6280aab6abbf25962c23f398d", "revision": "3b1faeda9afbcba128c2d794b38ffe7982141139",
"branch": "master", "branch": "master",
"path": "/go/ssa", "path": "/go/ssa",
"notests": true "notests": true
@ -294,7 +294,7 @@
"importpath": "golang.org/x/tools/go/types/typeutil", "importpath": "golang.org/x/tools/go/types/typeutil",
"repository": "https://go.googlesource.com/tools", "repository": "https://go.googlesource.com/tools",
"vcs": "git", "vcs": "git",
"revision": "bce9606b3f617bc6280aab6abbf25962c23f398d", "revision": "3b1faeda9afbcba128c2d794b38ffe7982141139",
"branch": "master", "branch": "master",
"path": "/go/types/typeutil", "path": "/go/types/typeutil",
"notests": true "notests": true
@ -303,7 +303,7 @@
"importpath": "golang.org/x/tools/imports", "importpath": "golang.org/x/tools/imports",
"repository": "https://go.googlesource.com/tools", "repository": "https://go.googlesource.com/tools",
"vcs": "git", "vcs": "git",
"revision": "bce9606b3f617bc6280aab6abbf25962c23f398d", "revision": "3b1faeda9afbcba128c2d794b38ffe7982141139",
"branch": "master", "branch": "master",
"path": "imports", "path": "imports",
"notests": true "notests": true
@ -312,7 +312,7 @@
"importpath": "honnef.co/go/tools/callgraph", "importpath": "honnef.co/go/tools/callgraph",
"repository": "https://github.com/dominikh/go-tools", "repository": "https://github.com/dominikh/go-tools",
"vcs": "git", "vcs": "git",
"revision": "f583b587b6ff1149f9a9b0c16ebdda74da44e1a2", "revision": "49f44f893d933fd08cd7d67d65ccefa5d7c23329",
"branch": "master", "branch": "master",
"path": "callgraph", "path": "callgraph",
"notests": true "notests": true
@ -321,7 +321,7 @@
"importpath": "honnef.co/go/tools/cmd/gosimple", "importpath": "honnef.co/go/tools/cmd/gosimple",
"repository": "https://github.com/dominikh/go-tools", "repository": "https://github.com/dominikh/go-tools",
"vcs": "git", "vcs": "git",
"revision": "f583b587b6ff1149f9a9b0c16ebdda74da44e1a2", "revision": "49f44f893d933fd08cd7d67d65ccefa5d7c23329",
"branch": "master", "branch": "master",
"path": "/cmd/gosimple", "path": "/cmd/gosimple",
"notests": true "notests": true
@ -330,7 +330,7 @@
"importpath": "honnef.co/go/tools/cmd/megacheck", "importpath": "honnef.co/go/tools/cmd/megacheck",
"repository": "https://github.com/dominikh/go-tools", "repository": "https://github.com/dominikh/go-tools",
"vcs": "git", "vcs": "git",
"revision": "f583b587b6ff1149f9a9b0c16ebdda74da44e1a2", "revision": "49f44f893d933fd08cd7d67d65ccefa5d7c23329",
"branch": "master", "branch": "master",
"path": "/cmd/megacheck", "path": "/cmd/megacheck",
"notests": true "notests": true
@ -339,7 +339,7 @@
"importpath": "honnef.co/go/tools/cmd/staticcheck", "importpath": "honnef.co/go/tools/cmd/staticcheck",
"repository": "https://github.com/dominikh/go-tools", "repository": "https://github.com/dominikh/go-tools",
"vcs": "git", "vcs": "git",
"revision": "f583b587b6ff1149f9a9b0c16ebdda74da44e1a2", "revision": "49f44f893d933fd08cd7d67d65ccefa5d7c23329",
"branch": "master", "branch": "master",
"path": "/cmd/staticcheck", "path": "/cmd/staticcheck",
"notests": true "notests": true
@ -348,7 +348,7 @@
"importpath": "honnef.co/go/tools/cmd/unused", "importpath": "honnef.co/go/tools/cmd/unused",
"repository": "https://github.com/dominikh/go-tools", "repository": "https://github.com/dominikh/go-tools",
"vcs": "git", "vcs": "git",
"revision": "f583b587b6ff1149f9a9b0c16ebdda74da44e1a2", "revision": "49f44f893d933fd08cd7d67d65ccefa5d7c23329",
"branch": "master", "branch": "master",
"path": "/cmd/unused", "path": "/cmd/unused",
"notests": true "notests": true
@ -357,7 +357,7 @@
"importpath": "honnef.co/go/tools/functions", "importpath": "honnef.co/go/tools/functions",
"repository": "https://github.com/dominikh/go-tools", "repository": "https://github.com/dominikh/go-tools",
"vcs": "git", "vcs": "git",
"revision": "f583b587b6ff1149f9a9b0c16ebdda74da44e1a2", "revision": "49f44f893d933fd08cd7d67d65ccefa5d7c23329",
"branch": "master", "branch": "master",
"path": "functions", "path": "functions",
"notests": true "notests": true
@ -366,7 +366,7 @@
"importpath": "honnef.co/go/tools/gcsizes", "importpath": "honnef.co/go/tools/gcsizes",
"repository": "https://github.com/dominikh/go-tools", "repository": "https://github.com/dominikh/go-tools",
"vcs": "git", "vcs": "git",
"revision": "f583b587b6ff1149f9a9b0c16ebdda74da44e1a2", "revision": "49f44f893d933fd08cd7d67d65ccefa5d7c23329",
"branch": "master", "branch": "master",
"path": "gcsizes", "path": "gcsizes",
"notests": true "notests": true
@ -375,7 +375,7 @@
"importpath": "honnef.co/go/tools/internal/sharedcheck", "importpath": "honnef.co/go/tools/internal/sharedcheck",
"repository": "https://github.com/dominikh/go-tools", "repository": "https://github.com/dominikh/go-tools",
"vcs": "git", "vcs": "git",
"revision": "f583b587b6ff1149f9a9b0c16ebdda74da44e1a2", "revision": "49f44f893d933fd08cd7d67d65ccefa5d7c23329",
"branch": "master", "branch": "master",
"path": "/internal/sharedcheck", "path": "/internal/sharedcheck",
"notests": true "notests": true
@ -384,7 +384,7 @@
"importpath": "honnef.co/go/tools/lint", "importpath": "honnef.co/go/tools/lint",
"repository": "https://github.com/dominikh/go-tools", "repository": "https://github.com/dominikh/go-tools",
"vcs": "git", "vcs": "git",
"revision": "f583b587b6ff1149f9a9b0c16ebdda74da44e1a2", "revision": "49f44f893d933fd08cd7d67d65ccefa5d7c23329",
"branch": "master", "branch": "master",
"path": "lint", "path": "lint",
"notests": true "notests": true
@ -393,7 +393,7 @@
"importpath": "honnef.co/go/tools/simple", "importpath": "honnef.co/go/tools/simple",
"repository": "https://github.com/dominikh/go-tools", "repository": "https://github.com/dominikh/go-tools",
"vcs": "git", "vcs": "git",
"revision": "f583b587b6ff1149f9a9b0c16ebdda74da44e1a2", "revision": "49f44f893d933fd08cd7d67d65ccefa5d7c23329",
"branch": "master", "branch": "master",
"path": "simple", "path": "simple",
"notests": true "notests": true
@ -402,7 +402,7 @@
"importpath": "honnef.co/go/tools/ssa", "importpath": "honnef.co/go/tools/ssa",
"repository": "https://github.com/dominikh/go-tools", "repository": "https://github.com/dominikh/go-tools",
"vcs": "git", "vcs": "git",
"revision": "f583b587b6ff1149f9a9b0c16ebdda74da44e1a2", "revision": "49f44f893d933fd08cd7d67d65ccefa5d7c23329",
"branch": "master", "branch": "master",
"path": "ssa", "path": "ssa",
"notests": true "notests": true
@ -411,7 +411,7 @@
"importpath": "honnef.co/go/tools/staticcheck", "importpath": "honnef.co/go/tools/staticcheck",
"repository": "https://github.com/dominikh/go-tools", "repository": "https://github.com/dominikh/go-tools",
"vcs": "git", "vcs": "git",
"revision": "f583b587b6ff1149f9a9b0c16ebdda74da44e1a2", "revision": "49f44f893d933fd08cd7d67d65ccefa5d7c23329",
"branch": "master", "branch": "master",
"path": "staticcheck", "path": "staticcheck",
"notests": true "notests": true
@ -420,10 +420,34 @@
"importpath": "honnef.co/go/tools/unused", "importpath": "honnef.co/go/tools/unused",
"repository": "https://github.com/dominikh/go-tools", "repository": "https://github.com/dominikh/go-tools",
"vcs": "git", "vcs": "git",
"revision": "f583b587b6ff1149f9a9b0c16ebdda74da44e1a2", "revision": "49f44f893d933fd08cd7d67d65ccefa5d7c23329",
"branch": "master", "branch": "master",
"path": "unused", "path": "unused",
"notests": true "notests": true
},
{
"importpath": "mvdan.cc/interfacer",
"repository": "https://github.com/mvdan/interfacer",
"vcs": "git",
"revision": "d7e7372184a059b8fd99d96a593e3811bf989d75",
"branch": "master",
"notests": true
},
{
"importpath": "mvdan.cc/lint",
"repository": "https://github.com/mvdan/lint",
"vcs": "git",
"revision": "adc824a0674b99099789b6188a058d485eaf61c0",
"branch": "master",
"notests": true
},
{
"importpath": "mvdan.cc/unparam",
"repository": "https://github.com/mvdan/unparam",
"vcs": "git",
"revision": "6b9a9bf4cdf71fae79104529ee3f16148302cc71",
"branch": "master",
"notests": true
} }
] ]
} }

View File

@ -1,7 +1,7 @@
// Copyright (c) 2015, Daniel Martí <mvdan@mvdan.cc> // Copyright (c) 2015, Daniel Martí <mvdan@mvdan.cc>
// See LICENSE for licensing information // See LICENSE for licensing information
package interfacer package check
import ( import (
"go/ast" "go/ast"

View File

@ -1,7 +1,7 @@
// Copyright (c) 2015, Daniel Martí <mvdan@mvdan.cc> // Copyright (c) 2015, Daniel Martí <mvdan@mvdan.cc>
// See LICENSE for licensing information // See LICENSE for licensing information
package interfacer package check // import "mvdan.cc/interfacer/check"
import ( import (
"fmt" "fmt"
@ -16,7 +16,7 @@ import (
"golang.org/x/tools/go/ssa/ssautil" "golang.org/x/tools/go/ssa/ssautil"
"github.com/kisielk/gotool" "github.com/kisielk/gotool"
"github.com/mvdan/lint" "mvdan.cc/lint"
) )
func toDiscard(usage *varUsage) bool { func toDiscard(usage *varUsage) bool {
@ -111,7 +111,6 @@ type Checker struct {
pkgTypes pkgTypes
*loader.PackageInfo *loader.PackageInfo
fset *token.FileSet
funcs []*funcDecl funcs []*funcDecl
ssaByPos map[token.Pos]*ssa.Function ssaByPos map[token.Pos]*ssa.Function

View File

@ -1,7 +1,7 @@
// Copyright (c) 2015, Daniel Martí <mvdan@mvdan.cc> // Copyright (c) 2015, Daniel Martí <mvdan@mvdan.cc>
// See LICENSE for licensing information // See LICENSE for licensing information
package interfacer package check
import ( import (
"bytes" "bytes"

View File

@ -1,21 +1,21 @@
// Copyright (c) 2015, Daniel Martí <mvdan@mvdan.cc> // Copyright (c) 2015, Daniel Martí <mvdan@mvdan.cc>
// See LICENSE for licensing information // See LICENSE for licensing information
package main package main // import "mvdan.cc/interfacer"
import ( import (
"flag" "flag"
"fmt" "fmt"
"os" "os"
"github.com/mvdan/interfacer" "mvdan.cc/interfacer/check"
) )
var _ = flag.Bool("v", false, "print the names of packages as they are checked") var _ = flag.Bool("v", false, "print the names of packages as they are checked")
func main() { func main() {
flag.Parse() flag.Parse()
lines, err := interfacer.CheckArgs(flag.Args()) lines, err := check.CheckArgs(flag.Args())
if err != nil { if err != nil {
fmt.Fprintln(os.Stderr, err) fmt.Fprintln(os.Stderr, err)
os.Exit(1) os.Exit(1)

View File

@ -1,7 +1,7 @@
// Copyright (c) 2017, Daniel Martí <mvdan@mvdan.cc> // Copyright (c) 2017, Daniel Martí <mvdan@mvdan.cc>
// See LICENSE for licensing information // See LICENSE for licensing information
package main package main // import "mvdan.cc/lint/cmd/metalint"
import ( import (
"flag" "flag"
@ -13,12 +13,12 @@ import (
"golang.org/x/tools/go/ssa" "golang.org/x/tools/go/ssa"
"golang.org/x/tools/go/ssa/ssautil" "golang.org/x/tools/go/ssa/ssautil"
"github.com/mvdan/lint" "mvdan.cc/lint"
"github.com/kisielk/gotool" "github.com/kisielk/gotool"
"github.com/mvdan/interfacer" interfacer "mvdan.cc/interfacer/check"
unparam "github.com/mvdan/unparam/check" unparam "mvdan.cc/unparam/check"
) )
var tests = flag.Bool("tests", false, "include tests") var tests = flag.Bool("tests", false, "include tests")

View File

@ -2,7 +2,7 @@
// See LICENSE for licensing information // See LICENSE for licensing information
// Package lint defines common interfaces for Go code checkers. // Package lint defines common interfaces for Go code checkers.
package lint package lint // import "mvdan.cc/lint"
import ( import (
"go/token" "go/token"

View File

@ -3,7 +3,7 @@
// Package check implements the unparam linter. Note that its API is not // Package check implements the unparam linter. Note that its API is not
// stable. // stable.
package check package check // import "mvdan.cc/unparam/check"
import ( import (
"fmt" "fmt"
@ -12,6 +12,7 @@ import (
"go/parser" "go/parser"
"go/token" "go/token"
"go/types" "go/types"
"io"
"os" "os"
"path/filepath" "path/filepath"
"regexp" "regexp"
@ -25,17 +26,20 @@ import (
"golang.org/x/tools/go/ssa/ssautil" "golang.org/x/tools/go/ssa/ssautil"
"github.com/kisielk/gotool" "github.com/kisielk/gotool"
"github.com/mvdan/lint" "mvdan.cc/lint"
) )
func UnusedParams(tests bool, args ...string) ([]string, error) { func UnusedParams(tests, debug bool, args ...string) ([]string, error) {
wd, err := os.Getwd() wd, err := os.Getwd()
if err != nil { if err != nil {
return nil, err return nil, err
} }
c := &Checker{ c := &Checker{
wd: wd, tests: tests, wd: wd,
cachedDeclCounts: make(map[string]map[string]int), tests: tests,
}
if debug {
c.debugLog = os.Stderr
} }
return c.lines(args...) return c.lines(args...)
} }
@ -47,6 +51,7 @@ type Checker struct {
wd string wd string
tests bool tests bool
debugLog io.Writer
cachedDeclCounts map[string]map[string]int cachedDeclCounts map[string]map[string]int
} }
@ -54,6 +59,8 @@ type Checker struct {
var ( var (
_ lint.Checker = (*Checker)(nil) _ lint.Checker = (*Checker)(nil)
_ lint.WithSSA = (*Checker)(nil) _ lint.WithSSA = (*Checker)(nil)
skipValue = new(ssa.Value)
) )
func (c *Checker) lines(args ...string) ([]string, error) { func (c *Checker) lines(args ...string) ([]string, error) {
@ -101,7 +108,14 @@ func (c *Checker) ProgramSSA(prog *ssa.Program) {
c.prog = prog c.prog = prog
} }
func (c *Checker) debug(format string, a ...interface{}) {
if c.debugLog != nil {
fmt.Fprintf(c.debugLog, format, a...)
}
}
func (c *Checker) Check() ([]lint.Issue, error) { func (c *Checker) Check() ([]lint.Issue, error) {
c.cachedDeclCounts = make(map[string]map[string]int)
wantPkg := make(map[*types.Package]*loader.PackageInfo) wantPkg := make(map[*types.Package]*loader.PackageInfo)
for _, info := range c.lprog.InitialPackages() { for _, info := range c.lprog.InitialPackages() {
wantPkg[info.Pkg] = info wantPkg[info.Pkg] = info
@ -121,7 +135,9 @@ funcLoop:
if info == nil { // not part of given pkgs if info == nil { // not part of given pkgs
continue continue
} }
c.debug("func %s\n", fn.String())
if dummyImpl(fn.Blocks[0]) { // panic implementation if dummyImpl(fn.Blocks[0]) { // panic implementation
c.debug(" skip - dummy implementation\n")
continue continue
} }
for _, edge := range cg.Nodes[fn].In { for _, edge := range cg.Nodes[fn].In {
@ -130,24 +146,100 @@ funcLoop:
default: default:
// called via a parameter or field, type // called via a parameter or field, type
// is set in stone. // is set in stone.
c.debug(" skip - type is required via call\n")
continue funcLoop continue funcLoop
} }
} }
if c.multipleImpls(info, fn) { if c.multipleImpls(info, fn) {
c.debug(" skip - multiple implementations via build tags\n")
continue continue
} }
callers := cg.Nodes[fn].In
results := fn.Signature.Results()
// skip exported funcs, as well as those that are
// entirely unused
if !ast.IsExported(fn.Name()) && len(callers) > 0 {
resLoop:
for i := 0; i < results.Len(); i++ {
for _, edge := range callers {
val := edge.Site.Value()
if val == nil { // e.g. go statement
continue
}
for _, instr := range *val.Referrers() {
extract, ok := instr.(*ssa.Extract)
if !ok {
continue resLoop // direct, real use
}
if extract.Index != i {
continue // not the same result param
}
if len(*extract.Referrers()) > 0 {
continue resLoop // real use after extraction
}
}
}
res := results.At(i)
name := paramDesc(i, res)
issues = append(issues, Issue{
pos: res.Pos(),
msg: fmt.Sprintf("result %s is never used", name),
})
}
}
seen := make([]constant.Value, results.Len())
numRets := 0
for _, block := range fn.Blocks {
last := block.Instrs[len(block.Instrs)-1]
ret, ok := last.(*ssa.Return)
if !ok {
continue
}
for i, val := range ret.Results {
cnst, ok := val.(*ssa.Const)
switch {
case !ok:
seen[i] = nil
case numRets == 0:
seen[i] = cnst.Value
case seen[i] == nil:
case !constant.Compare(seen[i], token.EQL, cnst.Value):
seen[i] = nil
}
}
numRets++
}
if numRets > 1 {
for i, val := range seen {
if val == nil {
continue
}
res := results.At(i)
name := paramDesc(i, res)
issues = append(issues, Issue{
pos: res.Pos(),
msg: fmt.Sprintf("result %s is always %s", name, val.String()),
})
}
}
for i, par := range fn.Params { for i, par := range fn.Params {
if i == 0 && fn.Signature.Recv() != nil { // receiver if i == 0 && fn.Signature.Recv() != nil { // receiver
continue continue
} }
c.debug("%s\n", par.String())
switch par.Object().Name() { switch par.Object().Name() {
case "", "_": // unnamed case "", "_": // unnamed
c.debug(" skip - unnamed\n")
continue continue
} }
reason := "is unused" reason := "is unused"
if cv := receivesSameValue(cg.Nodes[fn].In, par, i); cv != nil { if cv := receivesSameValue(cg.Nodes[fn].In, par, i); cv != nil {
reason = fmt.Sprintf("always receives %v", cv) reason = fmt.Sprintf("always receives %v", cv)
} else if anyRealUse(par, i) { } else if anyRealUse(par, i) {
c.debug(" skip - used somewhere in the func body\n")
continue continue
} }
issues = append(issues, Issue{ issues = append(issues, Issue{
@ -158,15 +250,25 @@ funcLoop:
} }
// TODO: replace by sort.Slice once we drop Go 1.7 support // TODO: replace by sort.Slice once we drop Go 1.7 support
sort.Sort(byPos(issues)) sort.Sort(byNamePos{c.prog.Fset, issues})
return issues, nil return issues, nil
} }
type byPos []lint.Issue type byNamePos struct {
fset *token.FileSet
l []lint.Issue
}
func (p byPos) Len() int { return len(p) } func (p byNamePos) Len() int { return len(p.l) }
func (p byPos) Swap(i, j int) { p[i], p[j] = p[j], p[i] } func (p byNamePos) Swap(i, j int) { p.l[i], p.l[j] = p.l[j], p.l[i] }
func (p byPos) Less(i, j int) bool { return p[i].Pos() < p[j].Pos() } func (p byNamePos) Less(i, j int) bool {
p1 := p.fset.Position(p.l[i].Pos())
p2 := p.fset.Position(p.l[j].Pos())
if p1.Filename == p2.Filename {
return p1.Offset < p2.Offset
}
return p1.Filename < p2.Filename
}
func receivesSameValue(in []*callgraph.Edge, par *ssa.Parameter, pos int) constant.Value { func receivesSameValue(in []*callgraph.Edge, par *ssa.Parameter, pos int) constant.Value {
if ast.IsExported(par.Parent().Name()) { if ast.IsExported(par.Parent().Name()) {
@ -192,14 +294,12 @@ func receivesSameValue(in []*callgraph.Edge, par *ssa.Parameter, pos int) consta
func anyRealUse(par *ssa.Parameter, pos int) bool { func anyRealUse(par *ssa.Parameter, pos int) bool {
refLoop: refLoop:
for _, ref := range *par.Referrers() { for _, ref := range *par.Referrers() {
call, ok := ref.(*ssa.Call) switch x := ref.(type) {
if !ok { case *ssa.Call:
return true if x.Call.Value != par.Parent() {
}
if call.Call.Value != par.Parent() {
return true // not a recursive call return true // not a recursive call
} }
for i, arg := range call.Call.Args { for i, arg := range x.Call.Args {
if arg != par { if arg != par {
continue continue
} }
@ -209,10 +309,32 @@ refLoop:
} }
} }
return true return true
case *ssa.Store:
if insertedStore(x) {
continue // inserted by go/ssa, not from the code
}
return true
default:
return true
}
} }
return false return false
} }
func insertedStore(instr ssa.Instruction) bool {
if instr.Pos() != token.NoPos {
return false
}
store, ok := instr.(*ssa.Store)
if !ok {
return false
}
alloc, ok := store.Addr.(*ssa.Alloc)
// we want exactly one use of this alloc value for it to be
// inserted by ssa and dummy - the alloc instruction itself.
return ok && len(*alloc.Referrers()) == 1
}
var rxHarmlessCall = regexp.MustCompile(`(?i)\b(log(ger)?|errors)\b|\bf?print`) var rxHarmlessCall = regexp.MustCompile(`(?i)\b(log(ger)?|errors)\b|\bf?print`)
// dummyImpl reports whether a block is a dummy implementation. This is // dummyImpl reports whether a block is a dummy implementation. This is
@ -221,11 +343,15 @@ var rxHarmlessCall = regexp.MustCompile(`(?i)\b(log(ger)?|errors)\b|\bf?print`)
func dummyImpl(blk *ssa.BasicBlock) bool { func dummyImpl(blk *ssa.BasicBlock) bool {
var ops [8]*ssa.Value var ops [8]*ssa.Value
for _, instr := range blk.Instrs { for _, instr := range blk.Instrs {
if insertedStore(instr) {
continue // inserted by go/ssa, not from the code
}
for _, val := range instr.Operands(ops[:0]) { for _, val := range instr.Operands(ops[:0]) {
switch x := (*val).(type) { switch x := (*val).(type) {
case nil, *ssa.Const, *ssa.ChangeType, *ssa.Alloc, case nil, *ssa.Const, *ssa.ChangeType, *ssa.Alloc,
*ssa.MakeInterface, *ssa.Function, *ssa.MakeInterface, *ssa.Function,
*ssa.Global, *ssa.IndexAddr, *ssa.Slice: *ssa.Global, *ssa.IndexAddr, *ssa.Slice,
*ssa.UnOp:
case *ssa.Call: case *ssa.Call:
if rxHarmlessCall.MatchString(x.Call.Value.String()) { if rxHarmlessCall.MatchString(x.Call.Value.String()) {
continue continue
@ -322,3 +448,11 @@ func (c *Checker) multipleImpls(info *loader.PackageInfo, fn *ssa.Function) bool
} }
return count[name] > 1 return count[name] > 1
} }
func paramDesc(i int, v *types.Var) string {
name := v.Name()
if name != "" {
return name
}
return fmt.Sprintf("%d (%s)", i, v.Type().String())
}

View File

@ -0,0 +1,33 @@
// Copyright (c) 2017, Daniel Martí <mvdan@mvdan.cc>
// See LICENSE for licensing information
package main // import "mvdan.cc/unparam"
import (
"flag"
"fmt"
"os"
"mvdan.cc/unparam/check"
)
var (
tests = flag.Bool("tests", true, "include tests")
debug = flag.Bool("debug", false, "debug prints")
)
func main() {
flag.Usage = func() {
fmt.Fprintln(os.Stderr, "usage: unparam [flags] [package ...]")
flag.PrintDefaults()
}
flag.Parse()
warns, err := check.UnusedParams(*tests, *debug, flag.Args()...)
if err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
for _, warn := range warns {
fmt.Println(warn)
}
}

View File

@ -5,27 +5,21 @@ import (
"strings" "strings"
) )
type ( type issueKey struct {
issueKey struct {
path string path string
line, col int line, col int
message string message string
}
multiIssue struct {
*Issue
linterNames []string
}
)
func maybeAggregateIssues(issues chan *Issue) chan *Issue {
if !config.Aggregate {
return issues
}
return aggregateIssues(issues)
} }
func aggregateIssues(issues chan *Issue) chan *Issue { type multiIssue struct {
*Issue
linterNames []string
}
// AggregateIssueChan reads issues from a channel, aggregates issues which have
// the same file, line, vol, and message, and returns aggregated issues on
// a new channel.
func AggregateIssueChan(issues chan *Issue) chan *Issue {
out := make(chan *Issue, 1000000) out := make(chan *Issue, 1000000)
issueMap := make(map[issueKey]*multiIssue) issueMap := make(map[issueKey]*multiIssue)
go func() { go func() {

View File

@ -4,7 +4,7 @@ import (
"encoding/xml" "encoding/xml"
"fmt" "fmt"
"gopkg.in/alecthomas/kingpin.v3-unstable" kingpin "gopkg.in/alecthomas/kingpin.v3-unstable"
) )
type checkstyleOutput struct { type checkstyleOutput struct {

View File

@ -8,7 +8,7 @@ import (
) )
// Config for gometalinter. This can be loaded from a JSON file with --config. // Config for gometalinter. This can be loaded from a JSON file with --config.
type Config struct { // nolint: aligncheck type Config struct { // nolint: maligned
// A map from linter name -> <LinterConfig|string>. // A map from linter name -> <LinterConfig|string>.
// //
// For backwards compatibility, the value stored in the JSON blob can also // For backwards compatibility, the value stored in the JSON blob can also
@ -51,6 +51,11 @@ type Config struct { // nolint: aligncheck
EnableGC bool EnableGC bool
Aggregate bool Aggregate bool
EnableAll bool EnableAll bool
// Warn if a nolint directive was never matched to a linter issue
WarnUnmatchedDirective bool
formatTemplate *template.Template
} }
type StringOrLinterConfig LinterConfig type StringOrLinterConfig LinterConfig
@ -58,7 +63,8 @@ type StringOrLinterConfig LinterConfig
func (c *StringOrLinterConfig) UnmarshalJSON(raw []byte) error { func (c *StringOrLinterConfig) UnmarshalJSON(raw []byte) error {
var linterConfig LinterConfig var linterConfig LinterConfig
// first try to un-marshall directly into struct // first try to un-marshall directly into struct
if err := json.Unmarshal(raw, &linterConfig); err == nil { origErr := json.Unmarshal(raw, &linterConfig)
if origErr == nil {
*c = StringOrLinterConfig(linterConfig) *c = StringOrLinterConfig(linterConfig)
return nil return nil
} }
@ -66,7 +72,7 @@ func (c *StringOrLinterConfig) UnmarshalJSON(raw []byte) error {
// i.e. bytes didn't represent the struct, treat them as a string // i.e. bytes didn't represent the struct, treat them as a string
var linterSpec string var linterSpec string
if err := json.Unmarshal(raw, &linterSpec); err != nil { if err := json.Unmarshal(raw, &linterSpec); err != nil {
return err return origErr
} }
linter, err := parseLinterConfigSpec("", linterSpec) linter, err := parseLinterConfigSpec("", linterSpec)
if err != nil { if err != nil {
@ -93,18 +99,16 @@ func (td *jsonDuration) Duration() time.Duration {
return time.Duration(*td) return time.Duration(*td)
} }
// TODO: should be a field on Config struct
var formatTemplate = &template.Template{}
var sortKeys = []string{"none", "path", "line", "column", "severity", "message", "linter"} var sortKeys = []string{"none", "path", "line", "column", "severity", "message", "linter"}
// Configuration defaults. // Configuration defaults.
var config = &Config{ var config = &Config{
Format: "{{.Path}}:{{.Line}}:{{if .Col}}{{.Col}}{{end}}:{{.Severity}}: {{.Message}} ({{.Linter}})", Format: DefaultIssueFormat,
Linters: map[string]StringOrLinterConfig{}, Linters: map[string]StringOrLinterConfig{},
Severity: map[string]string{ Severity: map[string]string{
"gotype": "error", "gotype": "error",
"gotypex": "error",
"test": "error", "test": "error",
"testify": "error", "testify": "error",
"vet": "error", "vet": "error",

View File

@ -0,0 +1,21 @@
package main
import (
"encoding/json"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestLinterConfigUnmarshalJSON(t *testing.T) {
source := `{
"Command": "/bin/custom",
"PartitionStrategy": "directories"
}`
var config StringOrLinterConfig
err := json.Unmarshal([]byte(source), &config)
require.NoError(t, err)
assert.Equal(t, "/bin/custom", config.Command)
assert.Equal(t, functionName(partitionPathsAsDirectories), functionName(config.PartitionStrategy))
}

View File

@ -1,6 +1,7 @@
package main package main
import ( import (
"fmt"
"go/ast" "go/ast"
"go/parser" "go/parser"
"go/token" "go/token"
@ -14,6 +15,7 @@ type ignoredRange struct {
col int col int
start, end int start, end int
linters []string linters []string
matched bool
} }
func (i *ignoredRange) matches(issue *Issue) bool { func (i *ignoredRange) matches(issue *Issue) bool {
@ -35,6 +37,14 @@ func (i *ignoredRange) near(col, start int) bool {
return col == i.col && i.end == start-1 return col == i.col && i.end == start-1
} }
func (i *ignoredRange) String() string {
linters := strings.Join(i.linters, ",")
if len(i.linters) == 0 {
linters = "all"
}
return fmt.Sprintf("%s:%d-%d", linters, i.start, i.end)
}
type ignoredRanges []*ignoredRange type ignoredRanges []*ignoredRange
func (ir ignoredRanges) Len() int { return len(ir) } func (ir ignoredRanges) Len() int { return len(ir) }
@ -66,12 +76,43 @@ func (d *directiveParser) IsIgnored(issue *Issue) bool {
d.lock.Unlock() d.lock.Unlock()
for _, r := range ranges { for _, r := range ranges {
if r.matches(issue) { if r.matches(issue) {
debug("nolint: matched %s to issue %s", r, issue)
r.matched = true
return true return true
} }
} }
return false return false
} }
// Unmatched returns all the ranges which were never used to ignore an issue
func (d *directiveParser) Unmatched() map[string]ignoredRanges {
unmatched := map[string]ignoredRanges{}
for path, ranges := range d.files {
for _, ignore := range ranges {
if !ignore.matched {
unmatched[path] = append(unmatched[path], ignore)
}
}
}
return unmatched
}
// LoadFiles from a list of directories
func (d *directiveParser) LoadFiles(paths []string) error {
d.lock.Lock()
defer d.lock.Unlock()
filenames, err := pathsToFileGlobs(paths)
if err != nil {
return err
}
for _, filename := range filenames {
ranges := d.parseFile(filename)
sort.Sort(ranges)
d.files[filename] = ranges
}
return nil
}
// Takes a set of ignoredRanges, determines if they immediately precede a statement // Takes a set of ignoredRanges, determines if they immediately precede a statement
// construct, and expands the range to include that construct. Why? So you can // construct, and expands the range to include that construct. Why? So you can
// precede a function or struct with //nolint // precede a function or struct with //nolint
@ -150,7 +191,28 @@ func filterIssuesViaDirectives(directives *directiveParser, issues chan *Issue)
out <- issue out <- issue
} }
} }
if config.WarnUnmatchedDirective {
for _, issue := range warnOnUnusedDirective(directives) {
out <- issue
}
}
close(out) close(out)
}() }()
return out return out
} }
func warnOnUnusedDirective(directives *directiveParser) []*Issue {
out := []*Issue{}
for path, ranges := range directives.Unmatched() {
for _, ignore := range ranges {
issue, _ := NewIssue("nolint", config.formatTemplate)
issue.Path = path
issue.Line = ignore.start
issue.Col = ignore.col
issue.Message = "nolint directive did not match any issue"
out = append(out, issue)
}
}
return out
}

View File

@ -1 +1,42 @@
package main package main
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestIgnoreRangeMatch(t *testing.T) {
var testcases = []struct {
doc string
issue Issue
linters []string
expected bool
}{
{
doc: "unmatched line",
issue: Issue{Line: 100},
},
{
doc: "matched line, all linters",
issue: Issue{Line: 5},
expected: true,
},
{
doc: "matched line, unmatched linter",
issue: Issue{Line: 5},
linters: []string{"vet"},
},
{
doc: "matched line and linters",
issue: Issue{Line: 20, Linter: "vet"},
linters: []string{"vet"},
expected: true,
},
}
for _, testcase := range testcases {
ir := ignoredRange{col: 20, start: 5, end: 20, linters: testcase.linters}
assert.Equal(t, testcase.expected, ir.matches(&testcase.issue), testcase.doc)
}
}

View File

@ -8,14 +8,13 @@ import (
"path/filepath" "path/filepath"
"reflect" "reflect"
"regexp" "regexp"
"sort"
"strconv" "strconv"
"strings" "strings"
"sync" "sync"
"time" "time"
"github.com/google/shlex" "github.com/google/shlex"
"gopkg.in/alecthomas/kingpin.v3-unstable" kingpin "gopkg.in/alecthomas/kingpin.v3-unstable"
) )
type Vars map[string]string type Vars map[string]string
@ -41,35 +40,8 @@ func (v Vars) Replace(s string) string {
return s return s
} }
// Severity of linter message.
type Severity string
// Linter message severity levels.
const ( // nolint: deadcode
Error Severity = "error"
Warning Severity = "warning"
)
type Issue struct {
Linter string `json:"linter"`
Severity Severity `json:"severity"`
Path string `json:"path"`
Line int `json:"line"`
Col int `json:"col"`
Message string `json:"message"`
}
func (i *Issue) String() string {
buf := new(bytes.Buffer)
err := formatTemplate.Execute(buf, i)
kingpin.FatalIfError(err, "Invalid output format")
return buf.String()
}
type linterState struct { type linterState struct {
*Linter *Linter
id int
paths []string
issues chan *Issue issues chan *Issue
vars Vars vars Vars
exclude *regexp.Regexp exclude *regexp.Regexp
@ -77,26 +49,34 @@ type linterState struct {
deadline <-chan time.Time deadline <-chan time.Time
} }
func (l *linterState) Partitions() ([][]string, error) { func (l *linterState) Partitions(paths []string) ([][]string, error) {
command := l.vars.Replace(l.Command) cmdArgs, err := parseCommand(l.command())
cmdArgs, err := parseCommand(command)
if err != nil { if err != nil {
return nil, err return nil, err
} }
parts, err := l.Linter.PartitionStrategy(cmdArgs, l.paths) parts, err := l.Linter.PartitionStrategy(cmdArgs, paths)
if err != nil { if err != nil {
return nil, err return nil, err
} }
return parts, nil return parts, nil
} }
func (l *linterState) command() string {
return l.vars.Replace(l.Command)
}
func runLinters(linters map[string]*Linter, paths []string, concurrency int, exclude, include *regexp.Regexp) (chan *Issue, chan error) { func runLinters(linters map[string]*Linter, paths []string, concurrency int, exclude, include *regexp.Regexp) (chan *Issue, chan error) {
errch := make(chan error, len(linters)) errch := make(chan error, len(linters))
concurrencych := make(chan bool, concurrency) concurrencych := make(chan bool, concurrency)
incomingIssues := make(chan *Issue, 1000000) incomingIssues := make(chan *Issue, 1000000)
processedIssues := filterIssuesViaDirectives(
newDirectiveParser(), directiveParser := newDirectiveParser()
maybeSortIssues(maybeAggregateIssues(incomingIssues))) if config.WarnUnmatchedDirective {
directiveParser.LoadFiles(paths)
}
processedIssues := maybeSortIssues(filterIssuesViaDirectives(
directiveParser, maybeAggregateIssues(incomingIssues)))
vars := Vars{ vars := Vars{
"duplthreshold": fmt.Sprintf("%d", config.DuplThreshold), "duplthreshold": fmt.Sprintf("%d", config.DuplThreshold),
@ -106,9 +86,11 @@ func runLinters(linters map[string]*Linter, paths []string, concurrency int, exc
"min_occurrences": fmt.Sprintf("%d", config.MinOccurrences), "min_occurrences": fmt.Sprintf("%d", config.MinOccurrences),
"min_const_length": fmt.Sprintf("%d", config.MinConstLength), "min_const_length": fmt.Sprintf("%d", config.MinConstLength),
"tests": "", "tests": "",
"not_tests": "true",
} }
if config.Test { if config.Test {
vars["tests"] = "-t" vars["tests"] = "true"
vars["not_tests"] = ""
} }
wg := &sync.WaitGroup{} wg := &sync.WaitGroup{}
@ -118,25 +100,24 @@ func runLinters(linters map[string]*Linter, paths []string, concurrency int, exc
state := &linterState{ state := &linterState{
Linter: linter, Linter: linter,
issues: incomingIssues, issues: incomingIssues,
paths: paths,
vars: vars, vars: vars,
exclude: exclude, exclude: exclude,
include: include, include: include,
deadline: deadline, deadline: deadline,
} }
partitions, err := state.Partitions() partitions, err := state.Partitions(paths)
if err != nil { if err != nil {
errch <- err errch <- err
continue continue
} }
for _, args := range partitions { for _, args := range partitions {
wg.Add(1) wg.Add(1)
concurrencych <- true
// Call the goroutine with a copy of the args array so that the // Call the goroutine with a copy of the args array so that the
// contents of the array are not modified by the next iteration of // contents of the array are not modified by the next iteration of
// the above for loop // the above for loop
go func(id int, args []string) { go func(id int, args []string) {
concurrencych <- true
err := executeLinter(id, state, args) err := executeLinter(id, state, args)
if err != nil { if err != nil {
errch <- err errch <- err
@ -243,7 +224,9 @@ func processOutput(dbg debugFunction, state *linterState, out []byte) {
group = append(group, fragment) group = append(group, fragment)
} }
issue := &Issue{Line: 1, Linter: state.Linter.Name} issue, err := NewIssue(state.Linter.Name, config.formatTemplate)
kingpin.FatalIfError(err, "Invalid output format")
for i, name := range re.SubexpNames() { for i, name := range re.SubexpNames() {
if group[i] == nil { if group[i] == nil {
continue continue
@ -279,8 +262,6 @@ func processOutput(dbg debugFunction, state *linterState, out []byte) {
} }
if sev, ok := config.Severity[state.Name]; ok { if sev, ok := config.Severity[state.Name]; ok {
issue.Severity = Severity(sev) issue.Severity = Severity(sev)
} else {
issue.Severity = Warning
} }
if state.exclude != nil && state.exclude.MatchString(issue.String()) { if state.exclude != nil && state.exclude.MatchString(issue.String()) {
continue continue
@ -323,66 +304,16 @@ func resolvePath(path string) string {
return path return path
} }
type sortedIssues struct {
issues []*Issue
order []string
}
func (s *sortedIssues) Len() int { return len(s.issues) }
func (s *sortedIssues) Swap(i, j int) { s.issues[i], s.issues[j] = s.issues[j], s.issues[i] }
// nolint: gocyclo
func (s *sortedIssues) Less(i, j int) bool {
l, r := s.issues[i], s.issues[j]
for _, key := range s.order {
switch key {
case "path":
if l.Path > r.Path {
return false
}
case "line":
if l.Line > r.Line {
return false
}
case "column":
if l.Col > r.Col {
return false
}
case "severity":
if l.Severity > r.Severity {
return false
}
case "message":
if l.Message > r.Message {
return false
}
case "linter":
if l.Linter > r.Linter {
return false
}
}
}
return true
}
func maybeSortIssues(issues chan *Issue) chan *Issue { func maybeSortIssues(issues chan *Issue) chan *Issue {
if reflect.DeepEqual([]string{"none"}, config.Sort) { if reflect.DeepEqual([]string{"none"}, config.Sort) {
return issues return issues
} }
out := make(chan *Issue, 1000000) return SortIssueChan(issues, config.Sort)
sorted := &sortedIssues{ }
issues: []*Issue{},
order: config.Sort, func maybeAggregateIssues(issues chan *Issue) chan *Issue {
} if !config.Aggregate {
go func() { return issues
for issue := range issues { }
sorted.issues = append(sorted.issues, issue) return AggregateIssueChan(issues)
}
sort.Sort(sorted)
for _, issue := range sorted.issues {
out <- issue
}
close(out)
}()
return out
} }

View File

@ -1,29 +1,67 @@
package main package main
import ( import (
"sort"
"testing" "testing"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/assert"
) )
func TestSortedIssues(t *testing.T) { func TestLinterStateCommand(t *testing.T) {
actual := []*Issue{ varsDefault := Vars{"tests": "", "not_tests": "true"}
{Path: "b.go", Line: 5}, varsWithTest := Vars{"tests": "true", "not_tests": ""}
{Path: "a.go", Line: 3},
{Path: "b.go", Line: 1}, var testcases = []struct {
{Path: "a.go", Line: 1}, linter string
vars Vars
expected string
}{
{
linter: "errcheck",
vars: varsWithTest,
expected: `errcheck -abspath `,
},
{
linter: "errcheck",
vars: varsDefault,
expected: `errcheck -abspath -ignoretests`,
},
{
linter: "gotype",
vars: varsDefault,
expected: `gotype -e `,
},
{
linter: "gotype",
vars: varsWithTest,
expected: `gotype -e -t`,
},
{
linter: "structcheck",
vars: varsDefault,
expected: `structcheck `,
},
{
linter: "structcheck",
vars: varsWithTest,
expected: `structcheck -t`,
},
{
linter: "unparam",
vars: varsDefault,
expected: `unparam -tests=false`,
},
{
linter: "unparam",
vars: varsWithTest,
expected: `unparam `,
},
} }
issues := &sortedIssues{
issues: actual, for _, testcase := range testcases {
order: []string{"path", "line"}, ls := linterState{
Linter: getLinterByName(testcase.linter, LinterConfig{}),
vars: testcase.vars,
} }
sort.Sort(issues) assert.Equal(t, testcase.expected, ls.command())
expected := []*Issue{
{Path: "a.go", Line: 1},
{Path: "a.go", Line: 3},
{Path: "b.go", Line: 1},
{Path: "b.go", Line: 5},
} }
require.Equal(t, expected, actual)
} }

View File

@ -0,0 +1,114 @@
package main
import (
"bytes"
"fmt"
"io/ioutil"
"sort"
"strings"
"text/template"
)
// DefaultIssueFormat used to print an issue
const DefaultIssueFormat = "{{.Path}}:{{.Line}}:{{if .Col}}{{.Col}}{{end}}:{{.Severity}}: {{.Message}} ({{.Linter}})"
// Severity of linter message
type Severity string
// Linter message severity levels.
const (
Error Severity = "error"
Warning Severity = "warning"
)
type Issue struct {
Linter string `json:"linter"`
Severity Severity `json:"severity"`
Path string `json:"path"`
Line int `json:"line"`
Col int `json:"col"`
Message string `json:"message"`
formatTmpl *template.Template
}
// NewIssue returns a new issue. Returns an error if formatTmpl is not a valid
// template for an Issue.
func NewIssue(linter string, formatTmpl *template.Template) (*Issue, error) {
issue := &Issue{
Line: 1,
Severity: Warning,
Linter: linter,
formatTmpl: formatTmpl,
}
err := formatTmpl.Execute(ioutil.Discard, issue)
return issue, err
}
func (i *Issue) String() string {
if i.formatTmpl == nil {
col := ""
if i.Col != 0 {
col = fmt.Sprintf("%d", i.Col)
}
return fmt.Sprintf("%s:%d:%s:%s: %s (%s)", strings.TrimSpace(i.Path), i.Line, col, i.Severity, strings.TrimSpace(i.Message), i.Linter)
}
buf := new(bytes.Buffer)
_ = i.formatTmpl.Execute(buf, i)
return buf.String()
}
type sortedIssues struct {
issues []*Issue
order []string
}
func (s *sortedIssues) Len() int { return len(s.issues) }
func (s *sortedIssues) Swap(i, j int) { s.issues[i], s.issues[j] = s.issues[j], s.issues[i] }
func (s *sortedIssues) Less(i, j int) bool {
l, r := s.issues[i], s.issues[j]
return CompareIssue(*l, *r, s.order)
}
// CompareIssue two Issues and return true if left should sort before right
// nolint: gocyclo
func CompareIssue(l, r Issue, order []string) bool {
for _, key := range order {
switch {
case key == "path" && l.Path != r.Path:
return l.Path < r.Path
case key == "line" && l.Line != r.Line:
return l.Line < r.Line
case key == "column" && l.Col != r.Col:
return l.Col < r.Col
case key == "severity" && l.Severity != r.Severity:
return l.Severity < r.Severity
case key == "message" && l.Message != r.Message:
return l.Message < r.Message
case key == "linter" && l.Linter != r.Linter:
return l.Linter < r.Linter
}
}
return true
}
// SortIssueChan reads issues from one channel, sorts them, and returns them to another
// channel
func SortIssueChan(issues chan *Issue, order []string) chan *Issue {
out := make(chan *Issue, 1000000)
sorted := &sortedIssues{
issues: []*Issue{},
order: order,
}
go func() {
for issue := range issues {
sorted.issues = append(sorted.issues, issue)
}
sort.Sort(sorted)
for _, issue := range sorted.issues {
out <- issue
}
close(out)
}()
return out
}

View File

@ -0,0 +1,39 @@
package main
import (
"sort"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestSortedIssues(t *testing.T) {
actual := []*Issue{
{Path: "b.go", Line: 5, Col: 1},
{Path: "a.go", Line: 3, Col: 2},
{Path: "b.go", Line: 1, Col: 3},
{Path: "a.go", Line: 1, Col: 4},
}
issues := &sortedIssues{
issues: actual,
order: []string{"path", "line", "column"},
}
sort.Sort(issues)
expected := []*Issue{
{Path: "a.go", Line: 1, Col: 4},
{Path: "a.go", Line: 3, Col: 2},
{Path: "b.go", Line: 1, Col: 3},
{Path: "b.go", Line: 5, Col: 1},
}
require.Equal(t, expected, actual)
}
func TestCompareOrderWithMessage(t *testing.T) {
order := []string{"path", "line", "column", "message"}
issueM := Issue{Path: "file.go", Message: "message"}
issueU := Issue{Path: "file.go", Message: "unknown"}
assert.True(t, CompareIssue(issueM, issueU, order))
assert.False(t, CompareIssue(issueU, issueM, order))
}

View File

@ -8,11 +8,10 @@ import (
"sort" "sort"
"strings" "strings"
"gopkg.in/alecthomas/kingpin.v3-unstable" kingpin "gopkg.in/alecthomas/kingpin.v3-unstable"
) )
type LinterConfig struct { type LinterConfig struct {
Name string
Command string Command string
Pattern string Pattern string
InstallFrom string InstallFrom string
@ -23,11 +22,12 @@ type LinterConfig struct {
type Linter struct { type Linter struct {
LinterConfig LinterConfig
Name string
regex *regexp.Regexp regex *regexp.Regexp
} }
// NewLinter returns a new linter from a config // NewLinter returns a new linter from a config
func NewLinter(config LinterConfig) (*Linter, error) { func NewLinter(name string, config LinterConfig) (*Linter, error) {
if p, ok := predefinedPatterns[config.Pattern]; ok { if p, ok := predefinedPatterns[config.Pattern]; ok {
config.Pattern = p config.Pattern = p
} }
@ -36,10 +36,11 @@ func NewLinter(config LinterConfig) (*Linter, error) {
return nil, err return nil, err
} }
if config.PartitionStrategy == nil { if config.PartitionStrategy == nil {
config.PartitionStrategy = partitionToMaxArgSize config.PartitionStrategy = partitionPathsAsDirectories
} }
return &Linter{ return &Linter{
LinterConfig: config, LinterConfig: config,
Name: name,
regex: regex, regex: regex,
}, nil }, nil
} }
@ -61,7 +62,17 @@ func getLinterByName(name string, overrideConf LinterConfig) *Linter {
if val := overrideConf.Pattern; val != "" { if val := overrideConf.Pattern; val != "" {
conf.Pattern = val conf.Pattern = val
} }
linter, _ := NewLinter(conf) if val := overrideConf.InstallFrom; val != "" {
conf.InstallFrom = val
}
if overrideConf.IsFast {
conf.IsFast = true
}
if val := overrideConf.PartitionStrategy; val != nil {
conf.PartitionStrategy = val
}
linter, _ := NewLinter(name, conf)
return linter return linter
} }
@ -73,7 +84,9 @@ func parseLinterConfigSpec(name string, spec string) (LinterConfig, error) {
config := defaultLinters[name] config := defaultLinters[name]
config.Command, config.Pattern = parts[0], parts[1] config.Command, config.Pattern = parts[0], parts[1]
config.Name = name if predefined, ok := predefinedPatterns[config.Pattern]; ok {
config.Pattern = predefined
}
return config, nil return config, nil
} }
@ -154,9 +167,9 @@ func installLinters() {
func getDefaultLinters() []*Linter { func getDefaultLinters() []*Linter {
out := []*Linter{} out := []*Linter{}
for _, config := range defaultLinters { for name, config := range defaultLinters {
linter, err := NewLinter(config) linter, err := NewLinter(name, config)
kingpin.FatalIfError(err, "invalid linter %q", config.Name) kingpin.FatalIfError(err, "invalid linter %q", name)
out = append(out, linter) out = append(out, linter)
} }
return out return out
@ -172,226 +185,228 @@ func defaultEnabled() []string {
return enabled return enabled
} }
func validateLinters(linters map[string]*Linter, config *Config) error {
var unknownLinters []string
for name := range linters {
if _, isDefault := defaultLinters[name]; !isDefault {
if _, isCustom := config.Linters[name]; !isCustom {
unknownLinters = append(unknownLinters, name)
}
}
}
if len(unknownLinters) > 0 {
return fmt.Errorf("unknown linters: %s", strings.Join(unknownLinters, ", "))
}
return nil
}
const vetPattern = `^(?:vet:.*?\.go:\s+(?P<path>.*?\.go):(?P<line>\d+):(?P<col>\d+):\s*(?P<message>.*))|(?:(?P<path>.*?\.go):(?P<line>\d+):\s*(?P<message>.*))$` const vetPattern = `^(?:vet:.*?\.go:\s+(?P<path>.*?\.go):(?P<line>\d+):(?P<col>\d+):\s*(?P<message>.*))|(?:(?P<path>.*?\.go):(?P<line>\d+):\s*(?P<message>.*))$`
var defaultLinters = map[string]LinterConfig{ var defaultLinters = map[string]LinterConfig{
"aligncheck": { "maligned": {
Name: "aligncheck", Command: "maligned",
Command: "aligncheck",
Pattern: `^(?:[^:]+: )?(?P<path>.*?\.go):(?P<line>\d+):(?P<col>\d+):\s*(?P<message>.+)$`, Pattern: `^(?:[^:]+: )?(?P<path>.*?\.go):(?P<line>\d+):(?P<col>\d+):\s*(?P<message>.+)$`,
InstallFrom: "github.com/opennota/check/cmd/aligncheck", InstallFrom: "github.com/mdempsky/maligned",
PartitionStrategy: partitionToMaxArgSizeWithPackagePaths, PartitionStrategy: partitionPathsAsPackages,
defaultEnabled: true, defaultEnabled: true,
}, },
"deadcode": { "deadcode": {
Name: "deadcode",
Command: "deadcode", Command: "deadcode",
Pattern: `^deadcode: (?P<path>.*?\.go):(?P<line>\d+):(?P<col>\d+):\s*(?P<message>.*)$`, Pattern: `^deadcode: (?P<path>.*?\.go):(?P<line>\d+):(?P<col>\d+):\s*(?P<message>.*)$`,
InstallFrom: "github.com/tsenart/deadcode", InstallFrom: "github.com/tsenart/deadcode",
PartitionStrategy: partitionToMaxArgSize, PartitionStrategy: partitionPathsAsDirectories,
defaultEnabled: true, defaultEnabled: true,
}, },
"dupl": { "dupl": {
Name: "dupl",
Command: `dupl -plumbing -threshold {duplthreshold}`, Command: `dupl -plumbing -threshold {duplthreshold}`,
Pattern: `^(?P<path>.*?\.go):(?P<line>\d+)-\d+:\s*(?P<message>.*)$`, Pattern: `^(?P<path>.*?\.go):(?P<line>\d+)-\d+:\s*(?P<message>.*)$`,
InstallFrom: "github.com/mibk/dupl", InstallFrom: "github.com/mibk/dupl",
PartitionStrategy: partitionToMaxArgSizeWithFileGlobs, PartitionStrategy: partitionPathsAsFiles,
IsFast: true, IsFast: true,
}, },
"errcheck": { "errcheck": {
Name: "errcheck", Command: `errcheck -abspath {not_tests=-ignoretests}`,
Command: `errcheck -abspath`,
Pattern: `PATH:LINE:COL:MESSAGE`, Pattern: `PATH:LINE:COL:MESSAGE`,
InstallFrom: "github.com/kisielk/errcheck", InstallFrom: "github.com/kisielk/errcheck",
PartitionStrategy: partitionToMaxArgSizeWithPackagePaths, PartitionStrategy: partitionPathsAsPackages,
defaultEnabled: true, defaultEnabled: true,
}, },
"gas": { "gas": {
Name: "gas",
Command: `gas -fmt=csv`, Command: `gas -fmt=csv`,
Pattern: `^(?P<path>.*?\.go),(?P<line>\d+),(?P<message>[^,]+,[^,]+,[^,]+)`, Pattern: `^(?P<path>.*?\.go),(?P<line>\d+),(?P<message>[^,]+,[^,]+,[^,]+)`,
InstallFrom: "github.com/GoASTScanner/gas", InstallFrom: "github.com/GoASTScanner/gas",
PartitionStrategy: partitionToMaxArgSize, PartitionStrategy: partitionPathsAsDirectories,
defaultEnabled: true, defaultEnabled: true,
IsFast: true, IsFast: true,
}, },
"goconst": { "goconst": {
Name: "goconst",
Command: `goconst -min-occurrences {min_occurrences} -min-length {min_const_length}`, Command: `goconst -min-occurrences {min_occurrences} -min-length {min_const_length}`,
Pattern: `PATH:LINE:COL:MESSAGE`, Pattern: `PATH:LINE:COL:MESSAGE`,
InstallFrom: "github.com/jgautheron/goconst/cmd/goconst", InstallFrom: "github.com/jgautheron/goconst/cmd/goconst",
PartitionStrategy: partitionToMaxArgSize, PartitionStrategy: partitionPathsAsDirectories,
defaultEnabled: true, defaultEnabled: true,
IsFast: true, IsFast: true,
}, },
"gocyclo": { "gocyclo": {
Name: "gocyclo",
Command: `gocyclo -over {mincyclo}`, Command: `gocyclo -over {mincyclo}`,
Pattern: `^(?P<cyclo>\d+)\s+\S+\s(?P<function>\S+)\s+(?P<path>.*?\.go):(?P<line>\d+):(\d+)$`, Pattern: `^(?P<cyclo>\d+)\s+\S+\s(?P<function>\S+)\s+(?P<path>.*?\.go):(?P<line>\d+):(\d+)$`,
InstallFrom: "github.com/alecthomas/gocyclo", InstallFrom: "github.com/alecthomas/gocyclo",
PartitionStrategy: partitionToMaxArgSize, PartitionStrategy: partitionPathsAsDirectories,
defaultEnabled: true, defaultEnabled: true,
IsFast: true, IsFast: true,
}, },
"gofmt": { "gofmt": {
Name: "gofmt",
Command: `gofmt -l -s`, Command: `gofmt -l -s`,
Pattern: `^(?P<path>.*?\.go)$`, Pattern: `^(?P<path>.*?\.go)$`,
PartitionStrategy: partitionToMaxArgSizeWithFileGlobs, PartitionStrategy: partitionPathsAsFiles,
IsFast: true, IsFast: true,
}, },
"goimports": { "goimports": {
Name: "goimports",
Command: `goimports -l`, Command: `goimports -l`,
Pattern: `^(?P<path>.*?\.go)$`, Pattern: `^(?P<path>.*?\.go)$`,
InstallFrom: "golang.org/x/tools/cmd/goimports", InstallFrom: "golang.org/x/tools/cmd/goimports",
PartitionStrategy: partitionToMaxArgSizeWithFileGlobs, PartitionStrategy: partitionPathsAsFiles,
IsFast: true, IsFast: true,
}, },
"golint": { "golint": {
Name: "golint",
Command: `golint -min_confidence {min_confidence}`, Command: `golint -min_confidence {min_confidence}`,
Pattern: `PATH:LINE:COL:MESSAGE`, Pattern: `PATH:LINE:COL:MESSAGE`,
InstallFrom: "github.com/golang/lint/golint", InstallFrom: "github.com/golang/lint/golint",
PartitionStrategy: partitionToMaxArgSize, PartitionStrategy: partitionPathsAsDirectories,
defaultEnabled: true, defaultEnabled: true,
IsFast: true, IsFast: true,
}, },
"gosimple": { "gosimple": {
Name: "gosimple",
Command: `gosimple`, Command: `gosimple`,
Pattern: `PATH:LINE:COL:MESSAGE`, Pattern: `PATH:LINE:COL:MESSAGE`,
InstallFrom: "honnef.co/go/tools/cmd/gosimple", InstallFrom: "honnef.co/go/tools/cmd/gosimple",
PartitionStrategy: partitionToMaxArgSizeWithPackagePaths, PartitionStrategy: partitionPathsAsPackages,
}, },
"gotype": { "gotype": {
Name: "gotype",
Command: `gotype -e {tests=-t}`, Command: `gotype -e {tests=-t}`,
Pattern: `PATH:LINE:COL:MESSAGE`, Pattern: `PATH:LINE:COL:MESSAGE`,
InstallFrom: "golang.org/x/tools/cmd/gotype", InstallFrom: "golang.org/x/tools/cmd/gotype",
PartitionStrategy: partitionToMaxArgSize, PartitionStrategy: partitionPathsByDirectory,
defaultEnabled: true,
IsFast: true,
},
"gotypex": {
Command: `gotype -e -x`,
Pattern: `PATH:LINE:COL:MESSAGE`,
InstallFrom: "golang.org/x/tools/cmd/gotype",
PartitionStrategy: partitionPathsByDirectory,
defaultEnabled: true, defaultEnabled: true,
IsFast: true, IsFast: true,
}, },
"ineffassign": { "ineffassign": {
Name: "ineffassign",
Command: `ineffassign -n`, Command: `ineffassign -n`,
Pattern: `PATH:LINE:COL:MESSAGE`, Pattern: `PATH:LINE:COL:MESSAGE`,
InstallFrom: "github.com/gordonklaus/ineffassign", InstallFrom: "github.com/gordonklaus/ineffassign",
PartitionStrategy: partitionToMaxArgSize, PartitionStrategy: partitionPathsAsDirectories,
defaultEnabled: true, defaultEnabled: true,
IsFast: true, IsFast: true,
}, },
"interfacer": { "interfacer": {
Name: "interfacer",
Command: `interfacer`, Command: `interfacer`,
Pattern: `PATH:LINE:COL:MESSAGE`, Pattern: `PATH:LINE:COL:MESSAGE`,
InstallFrom: "github.com/mvdan/interfacer/cmd/interfacer", InstallFrom: "mvdan.cc/interfacer",
PartitionStrategy: partitionToMaxArgSizeWithPackagePaths, PartitionStrategy: partitionPathsAsPackages,
defaultEnabled: true, defaultEnabled: true,
}, },
"lll": { "lll": {
Name: "lll",
Command: `lll -g -l {maxlinelength}`, Command: `lll -g -l {maxlinelength}`,
Pattern: `PATH:LINE:MESSAGE`, Pattern: `PATH:LINE:MESSAGE`,
InstallFrom: "github.com/walle/lll/cmd/lll", InstallFrom: "github.com/walle/lll/cmd/lll",
PartitionStrategy: partitionToMaxArgSizeWithFileGlobs, PartitionStrategy: partitionPathsAsFiles,
IsFast: true, IsFast: true,
}, },
"megacheck": { "megacheck": {
Name: "megacheck",
Command: `megacheck`, Command: `megacheck`,
Pattern: `PATH:LINE:COL:MESSAGE`, Pattern: `PATH:LINE:COL:MESSAGE`,
InstallFrom: "honnef.co/go/tools/cmd/megacheck", InstallFrom: "honnef.co/go/tools/cmd/megacheck",
PartitionStrategy: partitionToMaxArgSizeWithPackagePaths, PartitionStrategy: partitionPathsAsPackages,
defaultEnabled: true, defaultEnabled: true,
}, },
"misspell": { "misspell": {
Name: "misspell",
Command: `misspell -j 1`, Command: `misspell -j 1`,
Pattern: `PATH:LINE:COL:MESSAGE`, Pattern: `PATH:LINE:COL:MESSAGE`,
InstallFrom: "github.com/client9/misspell/cmd/misspell", InstallFrom: "github.com/client9/misspell/cmd/misspell",
PartitionStrategy: partitionToMaxArgSizeWithFileGlobs, PartitionStrategy: partitionPathsAsFiles,
IsFast: true, IsFast: true,
}, },
"nakedret": {
Command: `nakedret`,
Pattern: `^(?P<path>.*?\.go):(?P<line>\d+)\s*(?P<message>.*)$`,
InstallFrom: "github.com/alexkohler/nakedret",
PartitionStrategy: partitionPathsAsDirectories,
},
"safesql": { "safesql": {
Name: "safesql",
Command: `safesql`, Command: `safesql`,
Pattern: `^- (?P<path>.*?\.go):(?P<line>\d+):(?P<col>\d+)$`, Pattern: `^- (?P<path>.*?\.go):(?P<line>\d+):(?P<col>\d+)$`,
InstallFrom: "github.com/stripe/safesql", InstallFrom: "github.com/stripe/safesql",
PartitionStrategy: partitionToMaxArgSizeWithPackagePaths, PartitionStrategy: partitionPathsAsPackages,
}, },
"staticcheck": { "staticcheck": {
Name: "staticcheck",
Command: `staticcheck`, Command: `staticcheck`,
Pattern: `PATH:LINE:COL:MESSAGE`, Pattern: `PATH:LINE:COL:MESSAGE`,
InstallFrom: "honnef.co/go/tools/cmd/staticcheck", InstallFrom: "honnef.co/go/tools/cmd/staticcheck",
PartitionStrategy: partitionToMaxArgSizeWithPackagePaths, PartitionStrategy: partitionPathsAsPackages,
}, },
"structcheck": { "structcheck": {
Name: "structcheck",
Command: `structcheck {tests=-t}`, Command: `structcheck {tests=-t}`,
Pattern: `^(?:[^:]+: )?(?P<path>.*?\.go):(?P<line>\d+):(?P<col>\d+):\s*(?P<message>.+)$`, Pattern: `^(?:[^:]+: )?(?P<path>.*?\.go):(?P<line>\d+):(?P<col>\d+):\s*(?P<message>.+)$`,
InstallFrom: "github.com/opennota/check/cmd/structcheck", InstallFrom: "github.com/opennota/check/cmd/structcheck",
PartitionStrategy: partitionToMaxArgSizeWithPackagePaths, PartitionStrategy: partitionPathsAsPackages,
defaultEnabled: true, defaultEnabled: true,
}, },
"test": { "test": {
Name: "test",
Command: `go test`, Command: `go test`,
Pattern: `^--- FAIL: .*$\s+(?P<path>.*?\.go):(?P<line>\d+): (?P<message>.*)$`, Pattern: `^--- FAIL: .*$\s+(?P<path>.*?\.go):(?P<line>\d+): (?P<message>.*)$`,
PartitionStrategy: partitionToMaxArgSizeWithPackagePaths, PartitionStrategy: partitionPathsAsPackages,
}, },
"testify": { "testify": {
Name: "testify",
Command: `go test`, Command: `go test`,
Pattern: `Location:\s+(?P<path>.*?\.go):(?P<line>\d+)$\s+Error:\s+(?P<message>[^\n]+)`, Pattern: `Location:\s+(?P<path>.*?\.go):(?P<line>\d+)$\s+Error:\s+(?P<message>[^\n]+)`,
PartitionStrategy: partitionToMaxArgSizeWithPackagePaths, PartitionStrategy: partitionPathsAsPackages,
}, },
"unconvert": { "unconvert": {
Name: "unconvert",
Command: `unconvert`, Command: `unconvert`,
Pattern: `PATH:LINE:COL:MESSAGE`, Pattern: `PATH:LINE:COL:MESSAGE`,
InstallFrom: "github.com/mdempsky/unconvert", InstallFrom: "github.com/mdempsky/unconvert",
PartitionStrategy: partitionToMaxArgSizeWithPackagePaths, PartitionStrategy: partitionPathsAsPackages,
defaultEnabled: true, defaultEnabled: true,
}, },
"unparam": { "unparam": {
Name: "unparam", Command: `unparam {not_tests=-tests=false}`,
Command: `unparam`,
Pattern: `PATH:LINE:COL:MESSAGE`, Pattern: `PATH:LINE:COL:MESSAGE`,
InstallFrom: "github.com/mvdan/unparam", InstallFrom: "mvdan.cc/unparam",
PartitionStrategy: partitionToMaxArgSizeWithPackagePaths, PartitionStrategy: partitionPathsAsPackages,
}, },
"unused": { "unused": {
Name: "unused",
Command: `unused`, Command: `unused`,
Pattern: `PATH:LINE:COL:MESSAGE`, Pattern: `PATH:LINE:COL:MESSAGE`,
InstallFrom: "honnef.co/go/tools/cmd/unused", InstallFrom: "honnef.co/go/tools/cmd/unused",
PartitionStrategy: partitionToMaxArgSizeWithPackagePaths, PartitionStrategy: partitionPathsAsPackages,
}, },
"varcheck": { "varcheck": {
Name: "varcheck",
Command: `varcheck`, Command: `varcheck`,
Pattern: `^(?:[^:]+: )?(?P<path>.*?\.go):(?P<line>\d+):(?P<col>\d+):\s*(?P<message>.*)$`, Pattern: `^(?:[^:]+: )?(?P<path>.*?\.go):(?P<line>\d+):(?P<col>\d+):\s*(?P<message>.*)$`,
InstallFrom: "github.com/opennota/check/cmd/varcheck", InstallFrom: "github.com/opennota/check/cmd/varcheck",
PartitionStrategy: partitionToMaxArgSizeWithPackagePaths, PartitionStrategy: partitionPathsAsPackages,
defaultEnabled: true, defaultEnabled: true,
}, },
"vet": { "vet": {
Name: "vet", Command: `govet --no-recurse`,
Command: `go tool vet`,
Pattern: vetPattern, Pattern: vetPattern,
PartitionStrategy: partitionToPackageFileGlobs, InstallFrom: "github.com/dnephin/govet",
PartitionStrategy: partitionPathsAsDirectories,
defaultEnabled: true, defaultEnabled: true,
IsFast: true, IsFast: true,
}, },
"vetshadow": { "vetshadow": {
Name: "vetshadow", Command: `govet --no-recurse --shadow`,
Command: `go tool vet --shadow`,
Pattern: vetPattern, Pattern: vetPattern,
PartitionStrategy: partitionToPackageFileGlobs, PartitionStrategy: partitionPathsAsDirectories,
defaultEnabled: true, defaultEnabled: true,
IsFast: true, IsFast: true,
}, },

View File

@ -1,6 +1,8 @@
package main package main
import ( import (
"reflect"
"runtime"
"testing" "testing"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
@ -12,17 +14,48 @@ func TestNewLinterWithCustomLinter(t *testing.T) {
Command: "/usr/bin/custom", Command: "/usr/bin/custom",
Pattern: "path", Pattern: "path",
} }
linter, err := NewLinter(config) linter, err := NewLinter("thename", config)
require.NoError(t, err) require.NoError(t, err)
assert.NotNil(t, linter.LinterConfig.PartitionStrategy) assert.Equal(t, functionName(partitionPathsAsDirectories), functionName(linter.LinterConfig.PartitionStrategy))
assert.Equal(t, "(?m:path)", linter.regex.String())
assert.Equal(t, "thename", linter.Name)
assert.Equal(t, config.Command, linter.Command)
} }
func TestGetLinterByName(t *testing.T) { func TestGetLinterByName(t *testing.T) {
config := LinterConfig{ config := LinterConfig{
Command: "aligncheck", Command: "maligned",
Pattern: "path", Pattern: "path",
InstallFrom: "./install/path",
PartitionStrategy: partitionPathsAsDirectories,
IsFast: true,
} }
overrideConfig := getLinterByName(config.Command, config) overrideConfig := getLinterByName(config.Command, config)
require.Equal(t, config.Command, overrideConfig.Command) assert.Equal(t, config.Command, overrideConfig.Command)
require.Equal(t, config.Pattern, overrideConfig.Pattern) assert.Equal(t, config.Pattern, overrideConfig.Pattern)
assert.Equal(t, config.InstallFrom, overrideConfig.InstallFrom)
assert.Equal(t, functionName(config.PartitionStrategy), functionName(overrideConfig.PartitionStrategy))
assert.Equal(t, config.IsFast, overrideConfig.IsFast)
}
func TestValidateLinters(t *testing.T) {
originalConfig := *config
defer func() { config = &originalConfig }()
config = &Config{
Enable: []string{"_dummylinter_"},
}
err := validateLinters(lintersFromConfig(config), config)
require.Error(t, err, "expected unknown linter error for _dummylinter_")
config = &Config{
Enable: defaultEnabled(),
}
err = validateLinters(lintersFromConfig(config), config)
require.NoError(t, err)
}
func functionName(i interface{}) string {
return runtime.FuncForPC(reflect.ValueOf(i).Pointer()).Name()
} }

View File

@ -14,7 +14,7 @@ import (
"text/template" "text/template"
"time" "time"
"gopkg.in/alecthomas/kingpin.v3-unstable" kingpin "gopkg.in/alecthomas/kingpin.v3-unstable"
) )
var ( var (
@ -51,16 +51,17 @@ func setupFlags(app *kingpin.Application) {
app.Flag("line-length", "Report lines longer than N (using lll).").PlaceHolder("80").IntVar(&config.LineLength) app.Flag("line-length", "Report lines longer than N (using lll).").PlaceHolder("80").IntVar(&config.LineLength)
app.Flag("min-confidence", "Minimum confidence interval to pass to golint.").PlaceHolder(".80").FloatVar(&config.MinConfidence) app.Flag("min-confidence", "Minimum confidence interval to pass to golint.").PlaceHolder(".80").FloatVar(&config.MinConfidence)
app.Flag("min-occurrences", "Minimum occurrences to pass to goconst.").PlaceHolder("3").IntVar(&config.MinOccurrences) app.Flag("min-occurrences", "Minimum occurrences to pass to goconst.").PlaceHolder("3").IntVar(&config.MinOccurrences)
app.Flag("min-const-length", "Minimumum constant length.").PlaceHolder("3").IntVar(&config.MinConstLength) app.Flag("min-const-length", "Minimum constant length.").PlaceHolder("3").IntVar(&config.MinConstLength)
app.Flag("dupl-threshold", "Minimum token sequence as a clone for dupl.").PlaceHolder("50").IntVar(&config.DuplThreshold) app.Flag("dupl-threshold", "Minimum token sequence as a clone for dupl.").PlaceHolder("50").IntVar(&config.DuplThreshold)
app.Flag("sort", fmt.Sprintf("Sort output by any of %s.", strings.Join(sortKeys, ", "))).PlaceHolder("none").EnumsVar(&config.Sort, sortKeys...) app.Flag("sort", fmt.Sprintf("Sort output by any of %s.", strings.Join(sortKeys, ", "))).PlaceHolder("none").EnumsVar(&config.Sort, sortKeys...)
app.Flag("tests", "Include test files for linters that support this option").Short('t').BoolVar(&config.Test) app.Flag("tests", "Include test files for linters that support this option.").Short('t').BoolVar(&config.Test)
app.Flag("deadline", "Cancel linters if they have not completed within this duration.").PlaceHolder("30s").DurationVar((*time.Duration)(&config.Deadline)) app.Flag("deadline", "Cancel linters if they have not completed within this duration.").PlaceHolder("30s").DurationVar((*time.Duration)(&config.Deadline))
app.Flag("errors", "Only show errors.").BoolVar(&config.Errors) app.Flag("errors", "Only show errors.").BoolVar(&config.Errors)
app.Flag("json", "Generate structured JSON rather than standard line-based output.").BoolVar(&config.JSON) app.Flag("json", "Generate structured JSON rather than standard line-based output.").BoolVar(&config.JSON)
app.Flag("checkstyle", "Generate checkstyle XML rather than standard line-based output.").BoolVar(&config.Checkstyle) app.Flag("checkstyle", "Generate checkstyle XML rather than standard line-based output.").BoolVar(&config.Checkstyle)
app.Flag("enable-gc", "Enable GC for linters (useful on large repositories).").BoolVar(&config.EnableGC) app.Flag("enable-gc", "Enable GC for linters (useful on large repositories).").BoolVar(&config.EnableGC)
app.Flag("aggregate", "Aggregate issues reported by several linters.").BoolVar(&config.Aggregate) app.Flag("aggregate", "Aggregate issues reported by several linters.").BoolVar(&config.Aggregate)
app.Flag("warn-unmatched-nolint", "Warn if a nolint directive is not matched with an issue.").BoolVar(&config.WarnUnmatchedDirective)
app.GetFlag("help").Short('h') app.GetFlag("help").Short('h')
} }
@ -200,6 +201,9 @@ Severity override map (default is "warning"):
paths := resolvePaths(*pathsArg, config.Skip) paths := resolvePaths(*pathsArg, config.Skip)
linters := lintersFromConfig(config) linters := lintersFromConfig(config)
err := validateLinters(linters, config)
kingpin.FatalIfError(err, "")
issues, errch := runLinters(linters, paths, config.Concurrency, exclude, include) issues, errch := runLinters(linters, paths, config.Concurrency, exclude, include)
status := 0 status := 0
if config.JSON { if config.JSON {
@ -222,7 +226,7 @@ Severity override map (default is "warning"):
func processConfig(config *Config) (include *regexp.Regexp, exclude *regexp.Regexp) { func processConfig(config *Config) (include *regexp.Regexp, exclude *regexp.Regexp) {
tmpl, err := template.New("output").Parse(config.Format) tmpl, err := template.New("output").Parse(config.Format)
kingpin.FatalIfError(err, "invalid format %q", config.Format) kingpin.FatalIfError(err, "invalid format %q", config.Format)
formatTemplate = tmpl config.formatTemplate = tmpl
// Linters are by their very nature, short lived, so disable GC. // Linters are by their very nature, short lived, so disable GC.
// Reduced (user) linting time on kingpin from 0.97s to 0.64s. // Reduced (user) linting time on kingpin from 0.97s to 0.64s.

View File

@ -1,6 +1,7 @@
package main package main
import ( import (
"encoding/json"
"fmt" "fmt"
"path/filepath" "path/filepath"
) )
@ -10,6 +11,29 @@ const MaxCommandBytes = 32000
type partitionStrategy func([]string, []string) ([][]string, error) type partitionStrategy func([]string, []string) ([][]string, error)
func (ps *partitionStrategy) UnmarshalJSON(raw []byte) error {
var strategyName string
if err := json.Unmarshal(raw, &strategyName); err != nil {
return err
}
switch strategyName {
case "directories":
*ps = partitionPathsAsDirectories
case "files":
*ps = partitionPathsAsFiles
case "packages":
*ps = partitionPathsAsPackages
case "files-by-package":
*ps = partitionPathsAsFilesGroupedByPackage
case "single-directory":
*ps = partitionPathsByDirectory
default:
return fmt.Errorf("unknown parition strategy %s", strategyName)
}
return nil
}
func pathsToFileGlobs(paths []string) ([]string, error) { func pathsToFileGlobs(paths []string) ([]string, error) {
filePaths := []string{} filePaths := []string{}
for _, dir := range paths { for _, dir := range paths {
@ -22,7 +46,7 @@ func pathsToFileGlobs(paths []string) ([]string, error) {
return filePaths, nil return filePaths, nil
} }
func partitionToMaxArgSize(cmdArgs []string, paths []string) ([][]string, error) { func partitionPathsAsDirectories(cmdArgs []string, paths []string) ([][]string, error) {
return partitionToMaxSize(cmdArgs, paths, MaxCommandBytes), nil return partitionToMaxSize(cmdArgs, paths, MaxCommandBytes), nil
} }
@ -72,15 +96,15 @@ func (p *sizePartitioner) end() [][]string {
return p.parts return p.parts
} }
func partitionToMaxArgSizeWithFileGlobs(cmdArgs []string, paths []string) ([][]string, error) { func partitionPathsAsFiles(cmdArgs []string, paths []string) ([][]string, error) {
filePaths, err := pathsToFileGlobs(paths) filePaths, err := pathsToFileGlobs(paths)
if err != nil || len(filePaths) == 0 { if err != nil || len(filePaths) == 0 {
return nil, err return nil, err
} }
return partitionToMaxArgSize(cmdArgs, filePaths) return partitionPathsAsDirectories(cmdArgs, filePaths)
} }
func partitionToPackageFileGlobs(cmdArgs []string, paths []string) ([][]string, error) { func partitionPathsAsFilesGroupedByPackage(cmdArgs []string, paths []string) ([][]string, error) {
parts := [][]string{} parts := [][]string{}
for _, path := range paths { for _, path := range paths {
filePaths, err := pathsToFileGlobs([]string{path}) filePaths, err := pathsToFileGlobs([]string{path})
@ -95,12 +119,12 @@ func partitionToPackageFileGlobs(cmdArgs []string, paths []string) ([][]string,
return parts, nil return parts, nil
} }
func partitionToMaxArgSizeWithPackagePaths(cmdArgs []string, paths []string) ([][]string, error) { func partitionPathsAsPackages(cmdArgs []string, paths []string) ([][]string, error) {
packagePaths, err := pathsToPackagePaths(paths) packagePaths, err := pathsToPackagePaths(paths)
if err != nil || len(packagePaths) == 0 { if err != nil || len(packagePaths) == 0 {
return nil, err return nil, err
} }
return partitionToMaxArgSize(cmdArgs, packagePaths) return partitionPathsAsDirectories(cmdArgs, packagePaths)
} }
func pathsToPackagePaths(paths []string) ([]string, error) { func pathsToPackagePaths(paths []string) ([]string, error) {
@ -129,3 +153,11 @@ func packageNameFromPath(path string) (string, error) {
} }
return "", fmt.Errorf("%s not in GOPATH", path) return "", fmt.Errorf("%s not in GOPATH", path)
} }
func partitionPathsByDirectory(cmdArgs []string, paths []string) ([][]string, error) {
parts := [][]string{}
for _, path := range paths {
parts = append(parts, append(cmdArgs, path))
}
return parts, nil
}

View File

@ -38,7 +38,7 @@ func TestPartitionToPackageFileGlobs(t *testing.T) {
mkGoFile(t, dir, "other.go") mkGoFile(t, dir, "other.go")
} }
parts, err := partitionToPackageFileGlobs(cmdArgs, paths) parts, err := partitionPathsAsFilesGroupedByPackage(cmdArgs, paths)
require.NoError(t, err) require.NoError(t, err)
expected := [][]string{ expected := [][]string{
append(cmdArgs, packagePaths(paths[0], "file.go", "other.go")...), append(cmdArgs, packagePaths(paths[0], "file.go", "other.go")...),
@ -62,7 +62,7 @@ func TestPartitionToPackageFileGlobsNoFiles(t *testing.T) {
cmdArgs := []string{"/usr/bin/foo", "-c"} cmdArgs := []string{"/usr/bin/foo", "-c"}
paths := []string{filepath.Join(tmpdir, "one"), filepath.Join(tmpdir, "two")} paths := []string{filepath.Join(tmpdir, "one"), filepath.Join(tmpdir, "two")}
parts, err := partitionToPackageFileGlobs(cmdArgs, paths) parts, err := partitionPathsAsFilesGroupedByPackage(cmdArgs, paths)
require.NoError(t, err) require.NoError(t, err)
assert.Len(t, parts, 0) assert.Len(t, parts, 0)
} }
@ -74,7 +74,7 @@ func TestPartitionToMaxArgSizeWithFileGlobsNoFiles(t *testing.T) {
cmdArgs := []string{"/usr/bin/foo", "-c"} cmdArgs := []string{"/usr/bin/foo", "-c"}
paths := []string{filepath.Join(tmpdir, "one"), filepath.Join(tmpdir, "two")} paths := []string{filepath.Join(tmpdir, "one"), filepath.Join(tmpdir, "two")}
parts, err := partitionToMaxArgSizeWithFileGlobs(cmdArgs, paths) parts, err := partitionPathsAsFiles(cmdArgs, paths)
require.NoError(t, err) require.NoError(t, err)
assert.Len(t, parts, 0) assert.Len(t, parts, 0)
} }
@ -97,3 +97,18 @@ func fakeGoPath(t *testing.T, path string) func() {
require.NoError(t, os.Setenv("GOPATH", path)) require.NoError(t, os.Setenv("GOPATH", path))
return func() { require.NoError(t, os.Setenv("GOPATH", oldpath)) } return func() { require.NoError(t, os.Setenv("GOPATH", oldpath)) }
} }
func TestPartitionPathsByDirectory(t *testing.T) {
cmdArgs := []string{"/usr/bin/foo", "-c"}
paths := []string{"one", "two", "three"}
parts, err := partitionPathsByDirectory(cmdArgs, paths)
require.NoError(t, err)
expected := [][]string{
append(cmdArgs, "one"),
append(cmdArgs, "two"),
append(cmdArgs, "three"),
}
assert.Equal(t, expected, parts)
}

View File

@ -1,20 +0,0 @@
package regressiontests
import "testing"
func TestAlignCheck(t *testing.T) {
t.Parallel()
source := `package test
type unaligned struct {
a uint16
b uint64
c uint16
}
`
expected := Issues{
{Linter: "aligncheck", Severity: "warning", Path: "test.go", Line: 3, Col: 6, Message: "struct unaligned could have size 16 (currently 24)"},
}
ExpectIssues(t, "aligncheck", source, expected)
}

View File

@ -40,8 +40,8 @@ func two() string {
` `
expected := Issues{ expected := Issues{
{Linter: "dupl", Severity: "warning", Path: "test.go", Line: 19, Col: 0, Message: "duplicate of test.go:3-17"},
{Linter: "dupl", Severity: "warning", Path: "test.go", Line: 3, Col: 0, Message: "duplicate of test.go:19-33"}, {Linter: "dupl", Severity: "warning", Path: "test.go", Line: 3, Col: 0, Message: "duplicate of test.go:19-33"},
{Linter: "dupl", Severity: "warning", Path: "test.go", Line: 19, Col: 0, Message: "duplicate of test.go:3-17"},
} }
ExpectIssues(t, "dupl", source, expected) ExpectIssues(t, "dupl", source, expected)
} }

View File

@ -22,9 +22,9 @@ func a(ok bool, ch chan bool) {
} }
` `
expected := Issues{ expected := Issues{
{Linter: "gosimple", Severity: "warning", Path: "test.go", Line: 4, Col: 2, Message: "should use a simple channel send/receive instead of select with a single case (S1000)"},
{Linter: "gosimple", Severity: "warning", Path: "test.go", Line: 8, Col: 2, Message: "should use for range instead of for { select {} } (S1000)"}, {Linter: "gosimple", Severity: "warning", Path: "test.go", Line: 8, Col: 2, Message: "should use for range instead of for { select {} } (S1000)"},
{Linter: "gosimple", Severity: "warning", Path: "test.go", Line: 14, Col: 5, Message: "should omit comparison to bool constant, can be simplified to ok (S1002)"}, {Linter: "gosimple", Severity: "warning", Path: "test.go", Line: 14, Col: 5, Message: "should omit comparison to bool constant, can be simplified to ok (S1002)"},
{Linter: "gosimple", Severity: "warning", Path: "test.go", Line: 4, Col: 2, Message: "should use a simple channel send/receive instead of select with a single case (S1000)"},
} }
ExpectIssues(t, "gosimple", source, expected) ExpectIssues(t, "gosimple", source, expected)
} }

View File

@ -1,17 +1,55 @@
package regressiontests package regressiontests
import "testing" import (
"fmt"
"testing"
"github.com/gotestyourself/gotestyourself/fs"
"github.com/stretchr/testify/assert"
)
func TestGoType(t *testing.T) { func TestGoType(t *testing.T) {
t.Parallel() t.Parallel()
source := `package test
func test() { dir := fs.NewDir(t, "test-gotype",
fs.WithFile("file.go", goTypeFile("root")),
fs.WithDir("sub",
fs.WithFile("file.go", goTypeFile("sub"))),
fs.WithDir("excluded",
fs.WithFile("file.go", goTypeFile("excluded"))))
defer dir.Remove()
expected := Issues{
{Linter: "gotype", Severity: "error", Path: "file.go", Line: 4, Col: 6, Message: "foo declared but not used"},
{Linter: "gotype", Severity: "error", Path: "sub/file.go", Line: 4, Col: 6, Message: "foo declared but not used"},
}
actual := RunLinter(t, "gotype", dir.Path(), "--skip=excluded")
assert.Equal(t, expected, actual)
}
func TestGoTypeWithMultiPackageDirectoryTest(t *testing.T) {
t.Parallel()
dir := fs.NewDir(t, "test-gotype",
fs.WithFile("file.go", goTypeFile("root")),
fs.WithFile("file_test.go", goTypeFile("root_test")))
defer dir.Remove()
expected := Issues{
{Linter: "gotype", Severity: "error", Path: "file.go", Line: 4, Col: 6, Message: "foo declared but not used"},
{Linter: "gotypex", Severity: "error", Path: "file_test.go", Line: 4, Col: 6, Message: "foo declared but not used"},
}
actual := RunLinter(t, "gotype", dir.Path())
actual = append(actual, RunLinter(t, "gotypex", dir.Path())...)
assert.Equal(t, expected, actual)
}
func goTypeFile(pkg string) string {
return fmt.Sprintf(`package %s
func badFunction() {
var foo string var foo string
} }
` `, pkg)
expected := Issues{
{Linter: "gotype", Severity: "error", Path: "test.go", Line: 4, Col: 6, Message: "foo declared but not used"},
}
ExpectIssues(t, "gotype", source, expected)
} }

View File

@ -0,0 +1,20 @@
package regressiontests
import "testing"
func TestMaligned(t *testing.T) {
t.Parallel()
source := `package test
type unaligned struct {
a uint16
b uint64
c uint16
}
`
expected := Issues{
{Linter: "maligned", Severity: "warning", Path: "test.go", Line: 3, Col: 16, Message: "struct of size 24 could be 16"},
}
ExpectIssues(t, "maligned", source, expected)
}

View File

@ -0,0 +1,29 @@
package regressiontests
import "testing"
func TestNakedret(t *testing.T) {
t.Parallel()
source := `package test
func shortFunc() (r uint32) {
r = r + r
return
}
func longFunc() (r uint32) {
r = r + r
r = r - r
r = r * r
r = r / r
r = r % r
r = r^r
r = r&r
return
}
`
expected := Issues{
{Linter: "nakedret", Severity: "warning", Path: "test.go", Line: 16, Message: "longFunc naked returns on 9 line function "},
}
ExpectIssues(t, "nakedret", source, expected)
}

View File

@ -8,10 +8,10 @@ import (
"os" "os"
"os/exec" "os/exec"
"path/filepath" "path/filepath"
"sort"
"strings" "strings"
"testing" "testing"
"github.com/gotestyourself/gotestyourself/fs"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -35,10 +35,6 @@ func (i *Issue) String() string {
type Issues []Issue type Issues []Issue
func (e Issues) Len() int { return len(e) }
func (e Issues) Swap(i, j int) { e[i], e[j] = e[j], e[i] }
func (e Issues) Less(i, j int) bool { return e[i].String() < e[j].String() }
// ExpectIssues runs gometalinter and expects it to generate exactly the // ExpectIssues runs gometalinter and expects it to generate exactly the
// issues provided. // issues provided.
func ExpectIssues(t *testing.T, linter string, source string, expected Issues, extraFlags ...string) { func ExpectIssues(t *testing.T, linter string, source string, expected Issues, extraFlags ...string) {
@ -51,50 +47,60 @@ func ExpectIssues(t *testing.T, linter string, source string, expected Issues, e
err = ioutil.WriteFile(testFile, []byte(source), 0644) err = ioutil.WriteFile(testFile, []byte(source), 0644)
require.NoError(t, err) require.NoError(t, err)
// Run gometalinter. actual := RunLinter(t, linter, dir, extraFlags...)
assert.Equal(t, expected, actual)
}
// RunLinter runs the gometalinter as a binary against the files at path and
// returns the issues it encountered
func RunLinter(t *testing.T, linter string, path string, extraFlags ...string) Issues {
binary, cleanup := buildBinary(t) binary, cleanup := buildBinary(t)
defer cleanup() defer cleanup()
args := []string{"-d", "--disable-all", "--enable", linter, "--json", dir}
args := []string{
"-d", "--disable-all", "--enable", linter, "--json",
"--sort=path", "--sort=line", "--sort=column", "--sort=message",
"./...",
}
args = append(args, extraFlags...) args = append(args, extraFlags...)
cmd := exec.Command(binary, args...) cmd := exec.Command(binary, args...)
cmd.Dir = path
errBuffer := new(bytes.Buffer) errBuffer := new(bytes.Buffer)
cmd.Stderr = errBuffer cmd.Stderr = errBuffer
require.NoError(t, err)
output, _ := cmd.Output() output, _ := cmd.Output()
var actual Issues var actual Issues
err = json.Unmarshal(output, &actual) err := json.Unmarshal(output, &actual)
if !assert.NoError(t, err) { if !assert.NoError(t, err) {
fmt.Printf("Stderr: %s\n", errBuffer) fmt.Printf("Stderr: %s\n", errBuffer)
fmt.Printf("Output: %s\n", output) fmt.Printf("Output: %s\n", output)
return return nil
}
// Remove output from other linters.
actualForLinter := Issues{}
for _, issue := range actual {
if issue.Linter == linter || linter == "" {
// Normalise path.
issue.Path = "test.go"
issue.Message = strings.Replace(issue.Message, testFile, "test.go", -1)
issue.Message = strings.Replace(issue.Message, dir, "", -1)
actualForLinter = append(actualForLinter, issue)
}
}
sort.Sort(expected)
sort.Sort(actualForLinter)
if !assert.Equal(t, expected, actualForLinter) {
fmt.Printf("Stderr: %s\n", errBuffer)
fmt.Printf("Output: %s\n", output)
} }
return filterIssues(actual, linter, path)
} }
func buildBinary(t *testing.T) (string, func()) { func buildBinary(t *testing.T) (string, func()) {
tmpdir, err := ioutil.TempDir("", "regression-test") tmpdir := fs.NewDir(t, "regression-test-binary")
require.NoError(t, err) path := tmpdir.Join("gometalinter")
path := filepath.Join(tmpdir, "binary")
cmd := exec.Command("go", "build", "-o", path, "..") cmd := exec.Command("go", "build", "-o", path, "..")
require.NoError(t, cmd.Run()) require.NoError(t, cmd.Run())
return path, func() { os.RemoveAll(tmpdir) } return path, tmpdir.Remove
}
// filterIssues to just the issues relevant for the current linter and normalize
// the error message by removing the directory part of the path from both Path
// and Message
func filterIssues(issues Issues, linterName string, dir string) Issues {
filtered := Issues{}
for _, issue := range issues {
if issue.Linter == linterName || linterName == "" {
issue.Path = strings.Replace(issue.Path, dir+string(os.PathSeparator), "", -1)
issue.Message = strings.Replace(issue.Message, dir+string(os.PathSeparator), "", -1)
issue.Message = strings.Replace(issue.Message, dir, "", -1)
filtered = append(filtered, issue)
}
}
return filtered
} }

View File

@ -1,6 +1,8 @@
package regressiontests package regressiontests
import "testing" import (
"testing"
)
func TestUnparam(t *testing.T) { func TestUnparam(t *testing.T) {
t.Parallel() t.Parallel()
@ -133,9 +135,6 @@ func AsSliceElem(f FooType) []int {
var SliceElems = []func(FooType) []int{AsSliceElem} ` var SliceElems = []func(FooType) []int{AsSliceElem} `
expected := Issues{ expected := Issues{
Issue{Linter: "unparam", Severity: "warning", Path: "test.go", Line: 103, Col: 16, Message: "parameter a is unused"},
Issue{Linter: "unparam", Severity: "warning", Path: "test.go", Line: 103, Col: 27, Message: "parameter r is unused"},
Issue{Linter: "unparam", Severity: "warning", Path: "test.go", Line: 123, Col: 18, Message: "parameter f is unused"},
Issue{Linter: "unparam", Severity: "warning", Path: "test.go", Line: 13, Col: 19, Message: "parameter b is unused"}, Issue{Linter: "unparam", Severity: "warning", Path: "test.go", Line: 13, Col: 19, Message: "parameter b is unused"},
Issue{Linter: "unparam", Severity: "warning", Path: "test.go", Line: 20, Col: 20, Message: "parameter f is unused"}, Issue{Linter: "unparam", Severity: "warning", Path: "test.go", Line: 20, Col: 20, Message: "parameter f is unused"},
Issue{Linter: "unparam", Severity: "warning", Path: "test.go", Line: 34, Col: 37, Message: "parameter code is unused"}, Issue{Linter: "unparam", Severity: "warning", Path: "test.go", Line: 34, Col: 37, Message: "parameter code is unused"},
@ -146,6 +145,9 @@ var SliceElems = []func(FooType) []int{AsSliceElem} `
Issue{Linter: "unparam", Severity: "warning", Path: "test.go", Line: 85, Col: 25, Message: "parameter s is unused"}, Issue{Linter: "unparam", Severity: "warning", Path: "test.go", Line: 85, Col: 25, Message: "parameter s is unused"},
Issue{Linter: "unparam", Severity: "warning", Path: "test.go", Line: 95, Col: 15, Message: "parameter a is unused"}, Issue{Linter: "unparam", Severity: "warning", Path: "test.go", Line: 95, Col: 15, Message: "parameter a is unused"},
Issue{Linter: "unparam", Severity: "warning", Path: "test.go", Line: 95, Col: 26, Message: "parameter b is unused"}, Issue{Linter: "unparam", Severity: "warning", Path: "test.go", Line: 95, Col: 26, Message: "parameter b is unused"},
Issue{Linter: "unparam", Severity: "warning", Path: "test.go", Line: 103, Col: 16, Message: "parameter a is unused"},
Issue{Linter: "unparam", Severity: "warning", Path: "test.go", Line: 103, Col: 27, Message: "parameter r is unused"},
Issue{Linter: "unparam", Severity: "warning", Path: "test.go", Line: 123, Col: 18, Message: "parameter f is unused"},
} }
ExpectIssues(t, "unparam", source, expected) ExpectIssues(t, "unparam", source, expected)
} }

View File

@ -1,20 +1,55 @@
package regressiontests package regressiontests
import "testing" import (
"testing"
"github.com/gotestyourself/gotestyourself/fs"
"github.com/stretchr/testify/assert"
)
func TestVet(t *testing.T) { func TestVet(t *testing.T) {
t.Parallel() t.Parallel()
dir := fs.NewDir(t, "test-vet",
fs.WithFile("file.go", vetFile("root")),
fs.WithFile("file_test.go", vetExternalPackageFile("root_test")),
fs.WithDir("sub",
fs.WithFile("file.go", vetFile("sub"))),
fs.WithDir("excluded",
fs.WithFile("file.go", vetFile("excluded"))))
defer dir.Remove()
expected := Issues{ expected := Issues{
{Linter: "vet", Severity: "error", Path: "test.go", Line: 7, Col: 0, Message: "missing argument for Printf(\"%d\"): format reads arg 1, have only 0 args"}, {Linter: "vet", Severity: "error", Path: "file.go", Line: 7, Col: 0, Message: "missing argument for Printf(\"%d\"): format reads arg 1, have only 0 args"},
{Linter: "vet", Severity: "error", Path: "test.go", Line: 7, Col: 0, Message: "unreachable code"}, {Linter: "vet", Severity: "error", Path: "file.go", Line: 7, Col: 0, Message: "unreachable code"},
{Linter: "vet", Severity: "error", Path: "file_test.go", Line: 7, Col: 0, Message: "unreachable code"},
{Linter: "vet", Severity: "error", Path: "sub/file.go", Line: 7, Col: 0, Message: "missing argument for Printf(\"%d\"): format reads arg 1, have only 0 args"},
{Linter: "vet", Severity: "error", Path: "sub/file.go", Line: 7, Col: 0, Message: "unreachable code"},
} }
ExpectIssues(t, "vet", `package main actual := RunLinter(t, "vet", dir.Path(), "--skip=excluded")
assert.Equal(t, expected, actual)
}
func vetFile(pkg string) string {
return `package ` + pkg + `
import "fmt" import "fmt"
func main() { func Something() {
return return
fmt.Printf("%d") fmt.Printf("%d")
} }
`, expected) `
}
func vetExternalPackageFile(pkg string) string {
return `package ` + pkg + `
import "fmt"
func ExampleSomething() {
return
root.Something()
}
`
} }