diff --git a/README.md b/README.md index 2e55f4e92826..dadb216a3e20 100644 --- a/README.md +++ b/README.md @@ -166,7 +166,7 @@ GolangCI-Lint can be used with zero configuration. By default the following lint ```bash $ golangci-lint help linters Enabled by default linters: -govet: Vet examines Go source code and reports suspicious constructs, such as Printf calls whose arguments do not align with the format string [fast: true] +govet (vet, vetshadow): Vet examines Go source code and reports suspicious constructs, such as Printf calls whose arguments do not align with the format string [fast: true] errcheck: Errcheck is a program for checking for unchecked errors in go programs. These unchecked errors can be critical bugs in some cases [fast: true] staticcheck: Staticcheck is a go vet on steroids, applying a ton of static analysis checks [fast: false] unused: Checks Go code for unused constants, variables, functions and types [fast: false] @@ -185,6 +185,7 @@ $ golangci-lint help linters ... Disabled by default linters: golint: Golint differs from gofmt. Gofmt reformats Go source code, whereas golint prints out style mistakes [fast: true] +stylecheck: Stylecheck is a replacement for golint [fast: false] gosec (gas): Inspects source code for security problems [fast: true] interfacer: Linter that suggests narrower interface types [fast: false] unconvert: Remove unnecessary type conversions [fast: true] @@ -194,7 +195,6 @@ gocyclo: Computes and checks the cyclomatic complexity of functions [fast: true] gofmt: Gofmt checks whether code was gofmt-ed. By default this tool runs with -s option to check for code simplification [fast: true] goimports: Goimports does everything that gofmt does. Additionally it checks unused imports [fast: true] maligned: Tool to detect Go structs that would take less memory if their fields were sorted [fast: true] -megacheck: 3 sub-linters in one: unused, gosimple and staticcheck [fast: false] depguard: Go linter that checks if package imports are in a list of acceptable packages [fast: true] misspell: Finds commonly misspelled English words in comments [fast: true] lll: Reports long lines [fast: true] @@ -388,6 +388,7 @@ golangci-lint help linters ### Disabled By Default Linters (`-E/--enable`) - [golint](https://github.com/golang/lint) - Golint differs from gofmt. Gofmt reformats Go source code, whereas golint prints out style mistakes +- [stylecheck](https://github.com/dominikh/go-tools/tree/master/stylecheck) - Stylecheck is a replacement for golint - [gosec](https://github.com/securego/gosec) - Inspects source code for security problems - [interfacer](https://github.com/mvdan/interfacer) - Linter that suggests narrower interface types - [unconvert](https://github.com/mdempsky/unconvert) - Remove unnecessary type conversions @@ -397,7 +398,6 @@ golangci-lint help linters - [gofmt](https://golang.org/cmd/gofmt/) - Gofmt checks whether code was gofmt-ed. By default this tool runs with -s option to check for code simplification - [goimports](https://godoc.org/golang.org/x/tools/cmd/goimports) - Goimports does everything that gofmt does. Additionally it checks unused imports - [maligned](https://github.com/mdempsky/maligned) - Tool to detect Go structs that would take less memory if their fields were sorted -- [megacheck](https://github.com/dominikh/go-tools/tree/master/cmd/megacheck) - 3 sub-linters in one: unused, gosimple and staticcheck - [depguard](https://github.com/OpenPeeDeeP/depguard) - Go linter that checks if package imports are in a list of acceptable packages - [misspell](https://github.com/client9/misspell) - Finds commonly misspelled English words in comments - [lll](https://github.com/walle/lll) - Reports long lines @@ -461,7 +461,7 @@ Flags: # govet: Common false positives - (possible misuse of unsafe.Pointer|should have signature) - # megacheck: Developers tend to write in C-style with an explicit 'break' in a 'switch', so it's ok to ignore + # staticcheck: Developers tend to write in C-style with an explicit 'break' in a 'switch', so it's ok to ignore - ineffective break statement. Did you mean to break out of the outer loop # gosec: Too many false-positives on 'unsafe' usage diff --git a/go.mod b/go.mod index 37622081d375..ab6a2c184a67 100644 --- a/go.mod +++ b/go.mod @@ -21,23 +21,19 @@ require ( github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a github.com/golangci/errcheck v0.0.0-20181003203344-ef45e06d44b6 github.com/golangci/go-misc v0.0.0-20180628070357-927a3d87b613 - github.com/golangci/go-tools v0.0.0-20180902103155-93eecd106a0b + github.com/golangci/go-tools v0.0.0-20180109140146-be4842e24b95 github.com/golangci/goconst v0.0.0-20180610141641-041c5f2b40f3 github.com/golangci/gocyclo v0.0.0-20180528134321-2becd97e67ee github.com/golangci/gofmt v0.0.0-20181105071733-0b8337e80d98 github.com/golangci/gosec v0.0.0-20180901114220-8afd9cbb6cfb github.com/golangci/govet v0.0.0-20180818181408-44ddbe260190 github.com/golangci/ineffassign v0.0.0-20180808204949-2ee8f2867dde - github.com/golangci/interfacer v0.0.0-20180902080945-01958817a6ec - github.com/golangci/lint v0.0.0-20180902080404-c2187e7932b5 // indirect github.com/golangci/lint-1 v0.0.0-20180610141402-4bf9709227d1 github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca github.com/golangci/misspell v0.0.0-20180809174111-950f5d19e770 github.com/golangci/prealloc v0.0.0-20180630174525-215b22d4de21 github.com/golangci/revgrep v0.0.0-20180526074752-d9c87f5ffaf0 - github.com/golangci/tools v0.0.0-20180902102414-2cefd77fef9b github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4 - github.com/golangci/unparam v0.0.0-20180902112548-7ad9dbcccc16 github.com/hashicorp/hcl v0.0.0-20180404174102-ef8a98b0bbce // indirect github.com/inconshreveable/mousetrap v1.0.0 // indirect github.com/magiconair/properties v1.7.6 // indirect @@ -68,6 +64,9 @@ require ( gopkg.in/airbrake/gobrake.v2 v2.0.9 // indirect gopkg.in/gemnasium/logrus-airbrake-hook.v2 v2.1.2 // indirect gopkg.in/yaml.v2 v2.2.1 + mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed + mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b // indirect + mvdan.cc/unparam v0.0.0-20181201214637-68701730a1d7 sourcegraph.com/sourcegraph/go-diff v0.0.0-20171119081133-3f415a150aec sourcegraph.com/sqs/pbtypes v0.0.0-20160107090929-4d1b9dc7ffc3 // indirect ) diff --git a/go.sum b/go.sum index a3f169ec30ad..83028cf5401e 100644 --- a/go.sum +++ b/go.sum @@ -49,8 +49,8 @@ github.com/golangci/errcheck v0.0.0-20181003203344-ef45e06d44b6 h1:i2jIkQFb8RG45 github.com/golangci/errcheck v0.0.0-20181003203344-ef45e06d44b6/go.mod h1:DbHgvLiFKX1Sh2T1w8Q/h4NAI8MHIpzCdnBUDTXU3I0= github.com/golangci/go-misc v0.0.0-20180628070357-927a3d87b613 h1:WadunOE/TeHR8U7f0TXiJACHeU3cuFOXuKafw4rozqU= github.com/golangci/go-misc v0.0.0-20180628070357-927a3d87b613/go.mod h1:SyvUF2NxV+sN8upjjeVYr5W7tyxaT1JVtvhKhOn2ii8= -github.com/golangci/go-tools v0.0.0-20180902103155-93eecd106a0b h1:FSrt9JBK7JINu5UobyIF6epfpjL66H+67KZoTbE0zwk= -github.com/golangci/go-tools v0.0.0-20180902103155-93eecd106a0b/go.mod h1:unzUULGw35sjyOYjUt0jMTXqHlZPpPc6e+xfO4cd6mM= +github.com/golangci/go-tools v0.0.0-20180109140146-be4842e24b95 h1:msnLojqdJ37Bszm2D+srIkHJZvTKPFQWuyJAeMU6Ilo= +github.com/golangci/go-tools v0.0.0-20180109140146-be4842e24b95/go.mod h1:unzUULGw35sjyOYjUt0jMTXqHlZPpPc6e+xfO4cd6mM= github.com/golangci/goconst v0.0.0-20180610141641-041c5f2b40f3 h1:pe9JHs3cHHDQgOFXJJdYkK6fLz2PWyYtP4hthoCMvs8= github.com/golangci/goconst v0.0.0-20180610141641-041c5f2b40f3/go.mod h1:JXrF4TWy4tXYn62/9x8Wm/K/dm06p8tCKwFRDPZG/1o= github.com/golangci/gocyclo v0.0.0-20180528134321-2becd97e67ee h1:J2XAy40+7yz70uaOiMbNnluTg7gyQhtGqLQncQh+4J8= @@ -63,11 +63,6 @@ github.com/golangci/govet v0.0.0-20180818181408-44ddbe260190 h1:SLIgprnxQNjBpkz5 github.com/golangci/govet v0.0.0-20180818181408-44ddbe260190/go.mod h1:pPwb+AK755h3/r73avHz5bEN6sa51/2HEZlLaV53hCo= github.com/golangci/ineffassign v0.0.0-20180808204949-2ee8f2867dde h1:qEGp3ZF1Qw6TkbWKn6GdJ12Ssu/CpJBaBcJ4hrUjrSo= github.com/golangci/ineffassign v0.0.0-20180808204949-2ee8f2867dde/go.mod h1:e5tpTHCfVze+7EpLEozzMB3eafxo2KT5veNg1k6byQU= -github.com/golangci/interfacer v0.0.0-20180902080945-01958817a6ec h1:rvg392QhtAd/yOevPRX4wzYMIDYyq99j9MV+Mb1uVHs= -github.com/golangci/interfacer v0.0.0-20180902080945-01958817a6ec/go.mod h1:yBorupihJ5OYDFE7/EZwrslyNyZaaidqqVptYTcNxnk= -github.com/golangci/lint v0.0.0-20170908181259-c2187e7932b5/go.mod h1:zs8jPuoOp76KrjiydDqO3CGeS4v9gq77HNNiYcxxTGw= -github.com/golangci/lint v0.0.0-20180902080404-c2187e7932b5 h1:9NYm50bkzER4RayDaggNjxF5kesUJREASyFgk4AcIis= -github.com/golangci/lint v0.0.0-20180902080404-c2187e7932b5/go.mod h1:zs8jPuoOp76KrjiydDqO3CGeS4v9gq77HNNiYcxxTGw= github.com/golangci/lint-1 v0.0.0-20180610141402-4bf9709227d1 h1:PHK2kIh21Zt4IcG0bBRzQwEDVKF64LnkoSXnm8lfJUk= github.com/golangci/lint-1 v0.0.0-20180610141402-4bf9709227d1/go.mod h1:/X8TswGSh1pIozq4ZwCfxS0WA5JGXguxk94ar/4c87Y= github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca h1:kNY3/svz5T29MYHubXix4aDDuE3RWHkPvopM/EDv/MA= @@ -78,12 +73,8 @@ github.com/golangci/prealloc v0.0.0-20180630174525-215b22d4de21 h1:leSNB7iYzLYSS github.com/golangci/prealloc v0.0.0-20180630174525-215b22d4de21/go.mod h1:tf5+bzsHdTM0bsB7+8mt0GUMvjCgwLpTapNZHU8AajI= github.com/golangci/revgrep v0.0.0-20180526074752-d9c87f5ffaf0 h1:HVfrLniijszjS1aiNg8JbBMO2+E1WIQ+j/gL4SQqGPg= github.com/golangci/revgrep v0.0.0-20180526074752-d9c87f5ffaf0/go.mod h1:qOQCunEYvmd/TLamH+7LlVccLvUH5kZNhbCgTHoBbp4= -github.com/golangci/tools v0.0.0-20180902102414-2cefd77fef9b h1:3hI7NZ9D3edEBVbN6V1urHWbFKJfcIlOFvX5m10jB88= -github.com/golangci/tools v0.0.0-20180902102414-2cefd77fef9b/go.mod h1:zgj6NOYXOC1cexsdtDceI4/mj3aXK4JOVg9AV3C5LWI= github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4 h1:zwtduBRr5SSWhqsYNgcuWO2kFlpdOZbP0+yRjmvPGys= github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4/go.mod h1:Izgrg8RkN3rCIMLGE9CyYmU9pY2Jer6DgANEnZ/L/cQ= -github.com/golangci/unparam v0.0.0-20180902112548-7ad9dbcccc16 h1:QURX/XMP2uJUzzEvfJ291v1snmbJuyznAJLSQVnPyko= -github.com/golangci/unparam v0.0.0-20180902112548-7ad9dbcccc16/go.mod h1:KW2L33j82vo0S0U6RP6uUQSuat+0Q457Yf+1mXC98/M= github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/hashicorp/hcl v0.0.0-20180404174102-ef8a98b0bbce h1:xdsDDbiBDQTKASoGEZ+pEmF1OnWuu8AQ9I8iNbHNeno= @@ -154,8 +145,8 @@ golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5h golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20180826000951-f6ba57429505/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20181117154741-2ddaf7f79a09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20181201035826-d0ca3933b724/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20181220024903-92cdcd90bf52 h1:oOIe9Zzq27JsS/3ACpGF1HwWnWNflZWT/3EvM7mtcEk= golang.org/x/tools v0.0.0-20181220024903-92cdcd90bf52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= gopkg.in/airbrake/gobrake.v2 v2.0.9 h1:7z2uVWwn7oVeeugY1DtlPAy5H+KYgB1KeKTnqjNatLo= @@ -170,6 +161,12 @@ gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkep gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= gopkg.in/yaml.v2 v2.2.1 h1:mUhvW9EsL+naU5Q3cakzfE91YhliOondGd6ZrsDBHQE= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed h1:WX1yoOaKQfddO/mLzdV4wptyWgoH/6hwLs7QHTixo0I= +mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed/go.mod h1:Xkxe497xwlCKkIaQYRfC7CSLworTXY9RMqwhhCm+8Nc= +mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b h1:DxJ5nJdkhDlLok9K6qO+5290kphDJbHOQO1DFFFTeBo= +mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b/go.mod h1:2odslEg/xrtNQqCYg2/jCoyKnw3vv5biOc3JnIcYfL4= +mvdan.cc/unparam v0.0.0-20181201214637-68701730a1d7 h1:AHc3hAmhXTHwSA40I4CJW2w2iirqwLzZuFgC6LdbtJk= +mvdan.cc/unparam v0.0.0-20181201214637-68701730a1d7/go.mod h1:N4YHaPPCFjRU1vNrla2C9eoyqLxMaHmrsI8Th4iuQMY= sourcegraph.com/sourcegraph/go-diff v0.0.0-20171119081133-3f415a150aec h1:wAAdENPXC7bE1oxY4VqSDdhaA+XQ8TgQHsZMMnrXjEk= sourcegraph.com/sourcegraph/go-diff v0.0.0-20171119081133-3f415a150aec/go.mod h1:R09mWeb9JcPbO+A3cYDc11xjz0wp6r9+KnqdqROAoRU= sourcegraph.com/sqs/pbtypes v0.0.0-20160107090929-4d1b9dc7ffc3 h1:hXy8YsgVLDz5mlngKhNHQhAsAGrSp3dlXZN4b0/4UUI= diff --git a/pkg/commands/config.go b/pkg/commands/config.go index 452024739b55..537b36fb0908 100644 --- a/pkg/commands/config.go +++ b/pkg/commands/config.go @@ -17,6 +17,9 @@ func (e *Executor) initConfig() { Use: "config", Short: "Config", Run: func(cmd *cobra.Command, args []string) { + if len(args) != 0 { + e.log.Fatalf("Usage: golangci-lint config") + } if err := cmd.Help(); err != nil { e.log.Fatalf("Can't run help: %s", err) } @@ -34,7 +37,11 @@ func (e *Executor) initConfig() { } -func (e *Executor) executePathCmd(cmd *cobra.Command, args []string) { +func (e *Executor) executePathCmd(_ *cobra.Command, args []string) { + if len(args) != 0 { + e.log.Fatalf("Usage: golangci-lint config path") + } + usedConfigFile := viper.ConfigFileUsed() if usedConfigFile == "" { e.log.Warnf("No config file detected") diff --git a/pkg/commands/help.go b/pkg/commands/help.go index abcacb9d4cb2..d1e1bc1139b7 100644 --- a/pkg/commands/help.go +++ b/pkg/commands/help.go @@ -17,6 +17,9 @@ func (e *Executor) initHelp() { Use: "help", Short: "Help", Run: func(cmd *cobra.Command, args []string) { + if len(args) != 0 { + e.log.Fatalf("Usage: golangci-lint help") + } if err := cmd.Help(); err != nil { e.log.Fatalf("Can't run help: %s", err) } @@ -43,7 +46,7 @@ func printLinterConfigs(lcs []*linter.Config) { } } -func (e *Executor) executeLintersHelp(cmd *cobra.Command, args []string) { +func (e *Executor) executeLintersHelp(_ *cobra.Command, args []string) { if len(args) != 0 { e.log.Fatalf("Usage: golangci-lint help linters") } diff --git a/pkg/commands/linters.go b/pkg/commands/linters.go index 786a2d086430..8bde5f58fdf7 100644 --- a/pkg/commands/linters.go +++ b/pkg/commands/linters.go @@ -30,7 +30,7 @@ func IsLinterInConfigsList(name string, linters []*linter.Config) bool { return false } -func (e *Executor) executeLinters(cmd *cobra.Command, args []string) { +func (e *Executor) executeLinters(_ *cobra.Command, args []string) { if len(args) != 0 { e.log.Fatalf("Usage: golangci-lint linters") } diff --git a/pkg/commands/root.go b/pkg/commands/root.go index b0074fa71f57..f41e957de082 100644 --- a/pkg/commands/root.go +++ b/pkg/commands/root.go @@ -13,7 +13,7 @@ import ( "github.com/golangci/golangci-lint/pkg/logutils" ) -func (e *Executor) persistentPreRun(cmd *cobra.Command, args []string) { +func (e *Executor) persistentPreRun(_ *cobra.Command, _ []string) { if e.cfg.Run.PrintVersion { fmt.Fprintf(logutils.StdOut, "golangci-lint has version %s built from %s on %s\n", e.version, e.commit, e.date) os.Exit(0) @@ -32,7 +32,7 @@ func (e *Executor) persistentPreRun(cmd *cobra.Command, args []string) { } } -func (e *Executor) persistentPostRun(cmd *cobra.Command, args []string) { +func (e *Executor) persistentPostRun(_ *cobra.Command, _ []string) { if e.cfg.Run.CPUProfilePath != "" { pprof.StopCPUProfile() } @@ -64,6 +64,9 @@ func (e *Executor) initRoot() { Short: "golangci-lint is a smart linters runner.", Long: `Smart, fast linters runner. Run it in cloud for every GitHub pull request on https://golangci.com`, Run: func(cmd *cobra.Command, args []string) { + if len(args) != 0 { + e.log.Fatalf("Usage: golangci-lint") + } if err := cmd.Help(); err != nil { e.log.Fatalf("Can't run help: %s", err) } diff --git a/pkg/commands/run.go b/pkg/commands/run.go index 3e7fe672e449..67f8fe8b4053 100644 --- a/pkg/commands/run.go +++ b/pkg/commands/run.go @@ -370,7 +370,7 @@ func (e *Executor) createPrinter() (printers.Printer, error) { return p, nil } -func (e *Executor) executeRun(cmd *cobra.Command, args []string) { +func (e *Executor) executeRun(_ *cobra.Command, args []string) { needTrackResources := e.cfg.Run.IsVerbose || e.cfg.Run.PrintResourcesUsage trackResourcesEndCh := make(chan struct{}) defer func() { // XXX: this defer must be before ctx.cancel defer diff --git a/pkg/config/config.go b/pkg/config/config.go index 3a1fe08938e0..8485b22e9b50 100644 --- a/pkg/config/config.go +++ b/pkg/config/config.go @@ -55,7 +55,7 @@ var DefaultExcludePatterns = []ExcludePattern{ }, { Pattern: "ineffective break statement. Did you mean to break out of the outer loop", - Linter: "megacheck", + Linter: "staticcheck", Why: "Developers tend to write in C-style with an explicit 'break' in a 'switch', so it's ok to ignore", }, { diff --git a/pkg/golinters/gas.go b/pkg/golinters/gosec.go similarity index 100% rename from pkg/golinters/gas.go rename to pkg/golinters/gosec.go diff --git a/pkg/golinters/interfacer.go b/pkg/golinters/interfacer.go index b8f7f8c4b026..7395706ff02d 100644 --- a/pkg/golinters/interfacer.go +++ b/pkg/golinters/interfacer.go @@ -3,7 +3,7 @@ package golinters import ( "context" - "github.com/golangci/interfacer/check" + "mvdan.cc/interfacer/check" "github.com/golangci/golangci-lint/pkg/lint/linter" "github.com/golangci/golangci-lint/pkg/result" diff --git a/pkg/golinters/megacheck.go b/pkg/golinters/megacheck.go index f2a190299abf..63a1343b2811 100644 --- a/pkg/golinters/megacheck.go +++ b/pkg/golinters/megacheck.go @@ -2,172 +2,287 @@ package golinters import ( "context" - "errors" "fmt" "strings" + "time" + + "github.com/pkg/errors" + + "github.com/golangci/go-tools/config" + "github.com/golangci/go-tools/stylecheck" "github.com/golangci/go-tools/lint" "github.com/golangci/go-tools/lint/lintutil" "github.com/golangci/go-tools/simple" "github.com/golangci/go-tools/staticcheck" "github.com/golangci/go-tools/unused" - "github.com/golangci/tools/go/ssa" - "golang.org/x/tools/go/loader" "golang.org/x/tools/go/packages" - "github.com/golangci/golangci-lint/pkg/fsutils" "github.com/golangci/golangci-lint/pkg/lint/linter" - libpackages "github.com/golangci/golangci-lint/pkg/packages" "github.com/golangci/golangci-lint/pkg/result" ) -const megacheckName = "megacheck" +const ( + MegacheckParentName = "megacheck" + MegacheckStaticcheckName = "staticcheck" + MegacheckUnusedName = "unused" + MegacheckGosimpleName = "gosimple" + MegacheckStylecheckName = "stylecheck" +) -type Megacheck struct { - UnusedEnabled bool - GosimpleEnabled bool - StaticcheckEnabled bool +type Staticcheck struct { + megacheck } -func (m Megacheck) Name() string { - names := []string{} - if m.UnusedEnabled { - names = append(names, "unused") - } - if m.GosimpleEnabled { - names = append(names, "gosimple") - } - if m.StaticcheckEnabled { - names = append(names, "staticcheck") +func NewStaticcheck() *Staticcheck { + return &Staticcheck{ + megacheck: megacheck{ + staticcheckEnabled: true, + }, } +} - if len(names) == 1 { - return names[0] // only one sublinter is enabled - } +func (Staticcheck) Name() string { return MegacheckStaticcheckName } +func (Staticcheck) Desc() string { + return "Staticcheck is a go vet on steroids, applying a ton of static analysis checks" +} + +type Gosimple struct { + megacheck +} - if len(names) == 3 { - return megacheckName // all enabled +func NewGosimple() *Gosimple { + return &Gosimple{ + megacheck: megacheck{ + gosimpleEnabled: true, + }, } +} - return fmt.Sprintf("megacheck.{%s}", strings.Join(names, ",")) +func (Gosimple) Name() string { return MegacheckGosimpleName } +func (Gosimple) Desc() string { + return "Linter for Go source code that specializes in simplifying a code" } -func (m Megacheck) Desc() string { - descs := map[string]string{ - "unused": "Checks Go code for unused constants, variables, functions and types", - "gosimple": "Linter for Go source code that specializes in simplifying a code", - "staticcheck": "Staticcheck is a go vet on steroids, applying a ton of static analysis checks", - "megacheck": "3 sub-linters in one: unused, gosimple and staticcheck", +type Unused struct { + megacheck +} + +func NewUnused() *Unused { + return &Unused{ + megacheck: megacheck{ + unusedEnabled: true, + }, } +} - return descs[m.Name()] +func (Unused) Name() string { return MegacheckUnusedName } +func (Unused) Desc() string { + return "Checks Go code for unused constants, variables, functions and types" } -func prettifyCompilationError(err packages.Error) error { - i, _ := TypeCheck{}.parseError(err) - if i == nil { - return err - } +type Stylecheck struct { + megacheck +} - shortFilename, pathErr := fsutils.ShortestRelPath(i.Pos.Filename, "") - if pathErr != nil { - return err +func NewStylecheck() *Stylecheck { + return &Stylecheck{ + megacheck: megacheck{ + stylecheckEnabled: true, + }, } +} - errText := shortFilename - if i.Line() != 0 { - errText += fmt.Sprintf(":%d", i.Line()) - } - errText += fmt.Sprintf(": %s", i.Text) - return errors.New(errText) +func (Stylecheck) Name() string { return MegacheckStylecheckName } +func (Stylecheck) Desc() string { return "Stylecheck is a replacement for golint" } + +type megacheck struct { + unusedEnabled bool + gosimpleEnabled bool + staticcheckEnabled bool + stylecheckEnabled bool } -func (m Megacheck) canAnalyze(lintCtx *linter.Context) bool { - if len(lintCtx.NotCompilingPackages) == 0 { - return true - } +func (megacheck) Name() string { + return MegacheckParentName +} - var errPkgs []string - var errs []packages.Error - for _, p := range lintCtx.NotCompilingPackages { - if p.Name == "main" { - // megacheck crashes on not compiling packages but main packages - // aren't reachable by megacheck: other packages can't depend on them. - continue - } +func (megacheck) Desc() string { + return "" // shouldn't be called +} - errPkgs = append(errPkgs, p.String()) - errs = append(errs, libpackages.ExtractErrors(p, lintCtx.ASTCache)...) +func (m *megacheck) enableChildLinter(name string) error { + switch name { + case MegacheckStaticcheckName: + m.staticcheckEnabled = true + case MegacheckGosimpleName: + m.gosimpleEnabled = true + case MegacheckUnusedName: + m.unusedEnabled = true + case MegacheckStylecheckName: + m.stylecheckEnabled = true + default: + return fmt.Errorf("invalid child linter name %s for metalinter %s", name, m.Name()) } - if len(errPkgs) == 0 { // only main packages do not compile - return true + return nil +} + +type MegacheckMetalinter struct{} + +func (MegacheckMetalinter) Name() string { + return MegacheckParentName +} + +func (MegacheckMetalinter) BuildLinterConfig(enabledChildren []string) (*linter.Config, error) { + var m megacheck + for _, name := range enabledChildren { + if err := m.enableChildLinter(name); err != nil { + return nil, err + } } - warnText := fmt.Sprintf("Can't run megacheck because of compilation errors in packages %s", errPkgs) - if len(errs) != 0 { - warnText += fmt.Sprintf(": %s", prettifyCompilationError(errs[0])) - if len(errs) > 1 { - const runCmd = "golangci-lint run --no-config --disable-all -E typecheck" - warnText += fmt.Sprintf(" and %d more errors: run `%s` to see all errors", len(errs)-1, runCmd) + // TODO: merge linter.Config and linter.Linter or refactor it in another way + return &linter.Config{ + Linter: m, + EnabledByDefault: false, + NeedsTypeInfo: true, + NeedsSSARepr: true, + InPresets: []string{linter.PresetStyle, linter.PresetBugs, linter.PresetUnused}, + Speed: 1, + AlternativeNames: nil, + OriginalURL: "", + ParentLinterName: "", + }, nil +} + +func (MegacheckMetalinter) DefaultChildLinterNames() []string { + // no stylecheck here for backwards compatibility for users who enabled megacheck: don't enable extra + // linter for them + return []string{MegacheckStaticcheckName, MegacheckGosimpleName, MegacheckUnusedName} +} + +func (m MegacheckMetalinter) AllChildLinterNames() []string { + return append(m.DefaultChildLinterNames(), MegacheckStylecheckName) +} + +func (m MegacheckMetalinter) isValidChild(name string) bool { + for _, child := range m.AllChildLinterNames() { + if child == name { + return true } } - lintCtx.Log.Warnf("%s", warnText) - // megacheck crashes if there are not compiling packages return false } -func (m Megacheck) Run(ctx context.Context, lintCtx *linter.Context) ([]result.Issue, error) { - if !m.canAnalyze(lintCtx) { - return nil, nil +func (m megacheck) Run(ctx context.Context, lintCtx *linter.Context) ([]result.Issue, error) { + issues, err := m.runMegacheck(lintCtx.Packages, lintCtx.Settings().Unused.CheckExported) + if err != nil { + return nil, errors.Wrap(err, "failed to run megacheck") } - issues := runMegacheck(lintCtx.Program, lintCtx.MegacheckSSAProgram, lintCtx.LoaderConfig, - m.StaticcheckEnabled, m.GosimpleEnabled, m.UnusedEnabled, lintCtx.Settings().Unused.CheckExported) if len(issues) == 0 { return nil, nil } res := make([]result.Issue, 0, len(issues)) + meta := MegacheckMetalinter{} for _, i := range issues { + if !meta.isValidChild(i.Checker) { + lintCtx.Log.Warnf("Bad megacheck checker name %q", i.Checker) + continue + } + res = append(res, result.Issue{ Pos: i.Position, Text: markIdentifiers(i.Text), - FromLinter: m.Name(), + FromLinter: i.Checker, }) } return res, nil } -func runMegacheck(program *loader.Program, ssaProg *ssa.Program, conf *loader.Config, - enableStaticcheck, enableGosimple, enableUnused, checkExportedUnused bool) []lint.Problem { +func (m megacheck) runMegacheck(workingPkgs []*packages.Package, checkExportedUnused bool) ([]lint.Problem, error) { + var checkers []lint.Checker - var checkers []lintutil.CheckerConfig + if m.gosimpleEnabled { + checkers = append(checkers, simple.NewChecker()) + } + if m.staticcheckEnabled { + checkers = append(checkers, staticcheck.NewChecker()) + } + if m.stylecheckEnabled { + checkers = append(checkers, stylecheck.NewChecker()) + } + if m.unusedEnabled { + uc := unused.NewChecker(unused.CheckAll) + uc.ConsiderReflection = true + uc.WholeProgram = checkExportedUnused + checkers = append(checkers, unused.NewLintChecker(uc)) + } - if enableStaticcheck { - sac := staticcheck.NewChecker() - checkers = append(checkers, lintutil.CheckerConfig{ - Checker: sac, - }) + if len(checkers) == 0 { + return nil, nil } - if enableGosimple { - sc := simple.NewChecker() - checkers = append(checkers, lintutil.CheckerConfig{ - Checker: sc, - }) + cfg := config.Config{} + opts := &lintutil.Options{ + // TODO: get current go version, but now it doesn't matter, + // may be needed after next updates of megacheck + GoVersion: 11, + + Config: cfg, + // TODO: support Ignores option } - if enableUnused { - uc := unused.NewChecker(unused.CheckAll) - uc.WholeProgram = checkExportedUnused - uc.ConsiderReflection = true - checkers = append(checkers, lintutil.CheckerConfig{ - Checker: unused.NewLintChecker(uc), - }) + return runMegacheckCheckers(checkers, opts, workingPkgs) +} + +// parseIgnore is a copy from megacheck code just to not fork megacheck +func parseIgnore(s string) ([]lint.Ignore, error) { + var out []lint.Ignore + if len(s) == 0 { + return nil, nil + } + for _, part := range strings.Fields(s) { + p := strings.Split(part, ":") + if len(p) != 2 { + return nil, errors.New("malformed ignore string") + } + path := p[0] + checks := strings.Split(p[1], ",") + out = append(out, &lint.GlobIgnore{Pattern: path, Checks: checks}) + } + return out, nil +} + +func runMegacheckCheckers(cs []lint.Checker, opt *lintutil.Options, workingPkgs []*packages.Package) ([]lint.Problem, error) { + stats := lint.PerfStats{ + CheckerInits: map[string]time.Duration{}, + } + + ignores, err := parseIgnore(opt.Ignores) + if err != nil { + return nil, err + } + + var problems []lint.Problem + if len(workingPkgs) == 0 { + return problems, nil + } + + l := &lint.Linter{ + Checkers: cs, + Ignores: ignores, + GoVersion: opt.GoVersion, + ReturnIgnored: opt.ReturnIgnored, + Config: opt.Config, + + MaxConcurrentJobs: opt.MaxConcurrentJobs, + PrintStats: opt.PrintStats, } + problems = append(problems, l.Lint(workingPkgs, &stats)...) - fs := lintutil.FlagSet(megacheckName) - return lintutil.ProcessFlagSet(checkers, fs, program, ssaProg, conf) + return problems, nil } diff --git a/pkg/golinters/unparam.go b/pkg/golinters/unparam.go index 8ea2f8e9f87c..6dd4970d2e09 100644 --- a/pkg/golinters/unparam.go +++ b/pkg/golinters/unparam.go @@ -3,7 +3,7 @@ package golinters import ( "context" - "github.com/golangci/unparam/check" + "mvdan.cc/unparam/check" "github.com/golangci/golangci-lint/pkg/lint/linter" "github.com/golangci/golangci-lint/pkg/result" @@ -25,7 +25,7 @@ func (lint Unparam) Run(ctx context.Context, lintCtx *linter.Context) ([]result. c := &check.Checker{} c.CallgraphAlgorithm(us.Algo) c.CheckExportedFuncs(us.CheckExported) - c.Program(lintCtx.Program) + c.Packages(lintCtx.Packages) c.ProgramSSA(lintCtx.SSAProgram) unparamIssues, err := c.Check() diff --git a/pkg/golinters/util.go b/pkg/golinters/util.go index 5dd2669b7a07..dc02c5ee1aea 100644 --- a/pkg/golinters/util.go +++ b/pkg/golinters/util.go @@ -61,6 +61,9 @@ var replacePatterns = []replacePattern{ "Blacklisted import `${1}`: weak cryptographic primitive"}, {`^TLS InsecureSkipVerify set true.$`, "TLS `InsecureSkipVerify` set true."}, + // gosimple + {`^should replace loop with (.*)$`, "should replace loop with `${1}`"}, + // megacheck {`^this value of (\S+) is never used$`, "this value of `${1}` is never used"}, {`^should use time.Since instead of time.Now().Sub$`, diff --git a/pkg/lint/linter/config.go b/pkg/lint/linter/config.go index 64ac432701e6..c4ffaa42c9bd 100644 --- a/pkg/lint/linter/config.go +++ b/pkg/lint/linter/config.go @@ -20,7 +20,8 @@ type Config struct { Speed int // more value means faster execution of linter AlternativeNames []string - OriginalURL string // URL of original (not forked) repo, needed for autogenerated README + OriginalURL string // URL of original (not forked) repo, needed for autogenerated README + ParentLinterName string // used only for megacheck's children now } func (lc *Config) WithTypeInfo() *Config { @@ -54,6 +55,11 @@ func (lc *Config) WithAlternativeNames(names ...string) *Config { return lc } +func (lc *Config) WithParent(parentLinterName string) *Config { + lc.ParentLinterName = parentLinterName + return lc +} + func (lc *Config) GetSpeed() int { return lc.Speed } diff --git a/pkg/lint/linter/context.go b/pkg/lint/linter/context.go index 02d4acefd249..f9aeb233646b 100644 --- a/pkg/lint/linter/context.go +++ b/pkg/lint/linter/context.go @@ -1,9 +1,9 @@ package linter import ( - "github.com/golangci/tools/go/ssa" "golang.org/x/tools/go/loader" "golang.org/x/tools/go/packages" + "golang.org/x/tools/go/ssa" "github.com/golangci/golangci-lint/pkg/config" "github.com/golangci/golangci-lint/pkg/lint/astcache" @@ -17,8 +17,7 @@ type Context struct { LoaderConfig *loader.Config // deprecated, don't use for new linters Program *loader.Program // deprecated, use Packages for new linters - SSAProgram *ssa.Program // for unparam and interfacer: they don't change it - MegacheckSSAProgram *ssa.Program // for megacheck: it modifies ssa program + SSAProgram *ssa.Program // for unparam and interfacer but not for megacheck (it change it) Cfg *config.Config ASTCache *astcache.Cache diff --git a/pkg/lint/linter/metalinter.go b/pkg/lint/linter/metalinter.go new file mode 100644 index 000000000000..ee235640f2ee --- /dev/null +++ b/pkg/lint/linter/metalinter.go @@ -0,0 +1,8 @@ +package linter + +type MetaLinter interface { + Name() string + BuildLinterConfig(enabledChildren []string) (*Config, error) + AllChildLinterNames() []string + DefaultChildLinterNames() []string +} diff --git a/pkg/lint/lintersdb/enabled_set.go b/pkg/lint/lintersdb/enabled_set.go index 7e29c11e3127..a5db6a9f9977 100644 --- a/pkg/lint/lintersdb/enabled_set.go +++ b/pkg/lint/lintersdb/enabled_set.go @@ -4,7 +4,6 @@ import ( "sort" "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters" "github.com/golangci/golangci-lint/pkg/lint/linter" "github.com/golangci/golangci-lint/pkg/logutils" ) @@ -58,17 +57,29 @@ func (es EnabledSet) build(lcfg *config.Linters, enabledByDefaultLinters []*lint } } + metaLinters := es.m.GetMetaLinters() + for _, name := range lcfg.Enable { + if metaLinter := metaLinters[name]; metaLinter != nil { + // e.g. if we use --enable=megacheck we should add staticcheck,unused and gosimple to result set + for _, childLinter := range metaLinter.DefaultChildLinterNames() { + resultLintersSet[childLinter] = es.m.GetLinterConfig(childLinter) + } + continue + } + lc := es.m.GetLinterConfig(name) // it's important to use lc.Name() nor name because name can be alias resultLintersSet[lc.Name()] = lc } for _, name := range lcfg.Disable { - if name == "megacheck" { - for _, ln := range getAllMegacheckSubLinterNames() { - delete(resultLintersSet, ln) + if metaLinter := metaLinters[name]; metaLinter != nil { + // e.g. if we use --disable=megacheck we should remove staticcheck,unused and gosimple from result set + for _, childLinter := range metaLinter.DefaultChildLinterNames() { + delete(resultLintersSet, childLinter) } + continue } lc := es.m.GetLinterConfig(name) @@ -76,49 +87,32 @@ func (es EnabledSet) build(lcfg *config.Linters, enabledByDefaultLinters []*lint delete(resultLintersSet, lc.Name()) } - es.optimizeLintersSet(resultLintersSet) return resultLintersSet } -func getAllMegacheckSubLinterNames() []string { - unusedName := golinters.Megacheck{UnusedEnabled: true}.Name() - gosimpleName := golinters.Megacheck{GosimpleEnabled: true}.Name() - staticcheckName := golinters.Megacheck{StaticcheckEnabled: true}.Name() - return []string{unusedName, gosimpleName, staticcheckName} -} - func (es EnabledSet) optimizeLintersSet(linters map[string]*linter.Config) { - unusedName := golinters.Megacheck{UnusedEnabled: true}.Name() - gosimpleName := golinters.Megacheck{GosimpleEnabled: true}.Name() - staticcheckName := golinters.Megacheck{StaticcheckEnabled: true}.Name() - fullName := golinters.Megacheck{GosimpleEnabled: true, UnusedEnabled: true, StaticcheckEnabled: true}.Name() - allNames := []string{unusedName, gosimpleName, staticcheckName, fullName} - - megacheckCount := 0 - for _, n := range allNames { - if linters[n] != nil { - megacheckCount++ + for _, metaLinter := range es.m.GetMetaLinters() { + var children []string + for _, child := range metaLinter.AllChildLinterNames() { + if _, ok := linters[child]; ok { + children = append(children, child) + } } - } - if megacheckCount <= 1 { - return - } - - isFullEnabled := linters[fullName] != nil - mega := golinters.Megacheck{ - UnusedEnabled: isFullEnabled || linters[unusedName] != nil, - GosimpleEnabled: isFullEnabled || linters[gosimpleName] != nil, - StaticcheckEnabled: isFullEnabled || linters[staticcheckName] != nil, - } + if len(children) <= 1 { + continue + } - for _, n := range allNames { - delete(linters, n) + for _, child := range children { + delete(linters, child) + } + builtLinterConfig, err := metaLinter.BuildLinterConfig(children) + if err != nil { + panic("shouldn't fail during linter building: " + err.Error()) + } + linters[metaLinter.Name()] = builtLinterConfig + es.log.Infof("Optimized sublinters %s into metalinter %s", children, metaLinter.Name()) } - - lc := *es.m.GetLinterConfig("megacheck") - lc.Linter = mega - linters[mega.Name()] = &lc } func (es EnabledSet) Get() ([]*linter.Config, error) { @@ -127,17 +121,18 @@ func (es EnabledSet) Get() ([]*linter.Config, error) { } resultLintersSet := es.build(&es.cfg.Linters, es.m.GetAllEnabledByDefaultLinters()) + es.verbosePrintLintersStatus(resultLintersSet) + es.optimizeLintersSet(resultLintersSet) var resultLinters []*linter.Config for _, lc := range resultLintersSet { resultLinters = append(resultLinters, lc) } - es.verbosePrintLintersStatus(resultLinters) return resultLinters, nil } -func (es EnabledSet) verbosePrintLintersStatus(lcs []*linter.Config) { +func (es EnabledSet) verbosePrintLintersStatus(lcs map[string]*linter.Config) { var linterNames []string for _, lc := range lcs { linterNames = append(linterNames, lc.Name()) diff --git a/pkg/lint/lintersdb/enabled_set_test.go b/pkg/lint/lintersdb/enabled_set_test.go index 2c7a3a676f8e..6cf4f86ec53a 100644 --- a/pkg/lint/lintersdb/enabled_set_test.go +++ b/pkg/lint/lintersdb/enabled_set_test.go @@ -4,6 +4,8 @@ import ( "sort" "testing" + "github.com/golangci/golangci-lint/pkg/golinters" + "github.com/stretchr/testify/assert" "github.com/golangci/golangci-lint/pkg/config" @@ -20,23 +22,32 @@ func TestGetEnabledLintersSet(t *testing.T) { cases := []cs{ { cfg: config.Linters{ - Disable: []string{"megacheck"}, + Disable: []string{golinters.MegacheckMetalinter{}.Name()}, }, name: "disable all linters from megacheck", - def: getAllMegacheckSubLinterNames(), + def: golinters.MegacheckMetalinter{}.DefaultChildLinterNames(), + exp: nil, // all disabled }, { cfg: config.Linters{ - Disable: []string{"staticcheck"}, + Disable: []string{golinters.MegacheckStaticcheckName}, }, name: "disable only staticcheck", - def: getAllMegacheckSubLinterNames(), - exp: []string{"megacheck.{unused,gosimple}"}, + def: golinters.MegacheckMetalinter{}.DefaultChildLinterNames(), + exp: []string{golinters.MegacheckGosimpleName, golinters.MegacheckUnusedName}, + }, + { + name: "don't merge into megacheck", + def: golinters.MegacheckMetalinter{}.DefaultChildLinterNames(), + exp: golinters.MegacheckMetalinter{}.DefaultChildLinterNames(), }, { - name: "merge into megacheck", - def: getAllMegacheckSubLinterNames(), - exp: []string{"megacheck"}, + name: "expand megacheck", + cfg: config.Linters{ + Enable: []string{golinters.MegacheckMetalinter{}.Name()}, + }, + def: nil, + exp: golinters.MegacheckMetalinter{}.DefaultChildLinterNames(), }, { name: "don't disable anything", @@ -87,8 +98,11 @@ func TestGetEnabledLintersSet(t *testing.T) { t.Run(c.name, func(t *testing.T) { var defaultLinters []*linter.Config for _, ln := range c.def { - defaultLinters = append(defaultLinters, m.GetLinterConfig(ln)) + lc := m.GetLinterConfig(ln) + assert.NotNil(t, lc, ln) + defaultLinters = append(defaultLinters, lc) } + els := es.build(&c.cfg, defaultLinters) var enabledLinters []string for ln, lc := range els { diff --git a/pkg/lint/lintersdb/manager.go b/pkg/lint/lintersdb/manager.go index 64958823198f..b54e9d2663f0 100644 --- a/pkg/lint/lintersdb/manager.go +++ b/pkg/lint/lintersdb/manager.go @@ -37,6 +37,10 @@ func (m Manager) allPresetsSet() map[string]bool { return ret } +func (m Manager) GetMetaLinter(name string) linter.MetaLinter { + return m.GetMetaLinters()[name] +} + func (m Manager) GetLinterConfig(name string) *linter.Config { lc, ok := m.nameToLC[name] if !ok { @@ -57,12 +61,26 @@ func enableLinterConfigs(lcs []*linter.Config, isEnabled func(lc *linter.Config) return ret } +func (Manager) GetMetaLinters() map[string]linter.MetaLinter { + metaLinters := []linter.MetaLinter{ + golinters.MegacheckMetalinter{}, + } + + ret := map[string]linter.MetaLinter{} + for _, metaLinter := range metaLinters { + ret[metaLinter.Name()] = metaLinter + } + + return ret +} + func (Manager) GetAllSupportedLinterConfigs() []*linter.Config { lcs := []*linter.Config{ linter.NewConfig(golinters.Govet{}). WithTypeInfo(). WithPresets(linter.PresetBugs). WithSpeed(4). + WithAlternativeNames("vet", "vetshadow"). WithURL("https://golang.org/cmd/vet/"), linter.NewConfig(golinters.Errcheck{}). WithTypeInfo(). @@ -74,21 +92,26 @@ func (Manager) GetAllSupportedLinterConfigs() []*linter.Config { WithSpeed(3). WithURL("https://github.com/golang/lint"), - linter.NewConfig(golinters.Megacheck{StaticcheckEnabled: true}). + linter.NewConfig(golinters.NewStaticcheck()). WithSSA(). WithPresets(linter.PresetBugs). WithSpeed(2). WithURL("https://staticcheck.io/"), - linter.NewConfig(golinters.Megacheck{UnusedEnabled: true}). + linter.NewConfig(golinters.NewUnused()). WithSSA(). WithPresets(linter.PresetUnused). WithSpeed(5). WithURL("https://github.com/dominikh/go-tools/tree/master/cmd/unused"), - linter.NewConfig(golinters.Megacheck{GosimpleEnabled: true}). + linter.NewConfig(golinters.NewGosimple()). WithSSA(). WithPresets(linter.PresetStyle). WithSpeed(5). WithURL("https://github.com/dominikh/go-tools/tree/master/cmd/gosimple"), + linter.NewConfig(golinters.NewStylecheck()). + WithSSA(). + WithPresets(linter.PresetStyle). + WithSpeed(5). + WithURL("https://github.com/dominikh/go-tools/tree/master/stylecheck"), linter.NewConfig(golinters.Gosec{}). WithTypeInfo(). @@ -156,11 +179,6 @@ func (Manager) GetAllSupportedLinterConfigs() []*linter.Config { WithPresets(linter.PresetPerformance). WithSpeed(10). WithURL("https://github.com/mdempsky/maligned"), - linter.NewConfig(golinters.Megacheck{GosimpleEnabled: true, UnusedEnabled: true, StaticcheckEnabled: true}). - WithSSA(). - WithPresets(linter.PresetStyle, linter.PresetBugs, linter.PresetUnused). - WithSpeed(1). - WithURL("https://github.com/dominikh/go-tools/tree/master/cmd/megacheck"), linter.NewConfig(golinters.Depguard{}). WithTypeInfo(). WithPresets(linter.PresetStyle). @@ -207,22 +225,22 @@ func (Manager) GetAllSupportedLinterConfigs() []*linter.Config { } isLocalRun := os.Getenv("GOLANGCI_COM_RUN") == "" - enabled := map[string]bool{ - golinters.Govet{}.Name(): true, - golinters.Errcheck{}.Name(): true, - golinters.Megacheck{StaticcheckEnabled: true}.Name(): true, - golinters.Megacheck{UnusedEnabled: true}.Name(): true, - golinters.Megacheck{GosimpleEnabled: true}.Name(): true, - golinters.Structcheck{}.Name(): true, - golinters.Varcheck{}.Name(): true, - golinters.Ineffassign{}.Name(): true, - golinters.Deadcode{}.Name(): true, + enabledByDefault := map[string]bool{ + golinters.Govet{}.Name(): true, + golinters.Errcheck{}.Name(): true, + golinters.Staticcheck{}.Name(): true, + golinters.Unused{}.Name(): true, + golinters.Gosimple{}.Name(): true, + golinters.Structcheck{}.Name(): true, + golinters.Varcheck{}.Name(): true, + golinters.Ineffassign{}.Name(): true, + golinters.Deadcode{}.Name(): true, // don't typecheck for golangci.com: too many troubles golinters.TypeCheck{}.Name(): isLocalRun, } return enableLinterConfigs(lcs, func(lc *linter.Config) bool { - return enabled[lc.Name()] + return enabledByDefault[lc.Name()] }) } diff --git a/pkg/lint/lintersdb/validator.go b/pkg/lint/lintersdb/validator.go index 3a154c7e36e2..7e1f878897b7 100644 --- a/pkg/lint/lintersdb/validator.go +++ b/pkg/lint/lintersdb/validator.go @@ -21,7 +21,7 @@ func (v Validator) validateLintersNames(cfg *config.Linters) error { allNames := append([]string{}, cfg.Enable...) allNames = append(allNames, cfg.Disable...) for _, name := range allNames { - if v.m.GetLinterConfig(name) == nil { + if v.m.GetLinterConfig(name) == nil && v.m.GetMetaLinter(name) == nil { return fmt.Errorf("no such linter %q", name) } } diff --git a/pkg/lint/load.go b/pkg/lint/load.go index 925275bf7724..bddbd4e59d6d 100644 --- a/pkg/lint/load.go +++ b/pkg/lint/load.go @@ -11,11 +11,11 @@ import ( "strings" "time" - "github.com/golangci/tools/go/ssa" - "github.com/golangci/tools/go/ssa/ssautil" "github.com/pkg/errors" "golang.org/x/tools/go/loader" "golang.org/x/tools/go/packages" + "golang.org/x/tools/go/ssa" + "golang.org/x/tools/go/ssa/ssautil" "github.com/golangci/golangci-lint/pkg/config" "github.com/golangci/golangci-lint/pkg/exitcodes" @@ -126,12 +126,12 @@ func (cl ContextLoader) makeFakeLoaderProgram(pkgs []*packages.Package) *loader. } } -func (cl ContextLoader) buildSSAProgram(pkgs []*packages.Package, name string) *ssa.Program { +func (cl ContextLoader) buildSSAProgram(pkgs []*packages.Package) *ssa.Program { startedAt := time.Now() var pkgsBuiltDuration time.Duration defer func() { - cl.log.Infof("SSA %srepr building timing: packages building %s, total %s", - name, pkgsBuiltDuration, time.Since(startedAt)) + cl.log.Infof("SSA repr building timing: packages building %s, total %s", + pkgsBuiltDuration, time.Since(startedAt)) }() ssaProg, _ := ssautil.Packages(pkgs, ssa.GlobalDebug) @@ -332,10 +332,9 @@ func (cl ContextLoader) Load(ctx context.Context, linters []*linter.Config) (*li prog = cl.makeFakeLoaderProgram(pkgs) } - var ssaProg, megacheckSSAProg *ssa.Program + var ssaProg *ssa.Program if loadMode == packages.LoadAllSyntax { - ssaProg = cl.buildSSAProgram(pkgs, "") - megacheckSSAProg = cl.buildSSAProgram(pkgs, "for megacheck ") + ssaProg = cl.buildSSAProgram(pkgs) } astLog := cl.log.Child("astcache") @@ -345,10 +344,9 @@ func (cl ContextLoader) Load(ctx context.Context, linters []*linter.Config) (*li } ret := &linter.Context{ - Packages: pkgs, - Program: prog, - SSAProgram: ssaProg, - MegacheckSSAProgram: megacheckSSAProg, + Packages: pkgs, + Program: prog, + SSAProgram: ssaProg, LoaderConfig: &loader.Config{ Cwd: "", // used by depguard and fallbacked to os.Getcwd Build: nil, // used by depguard and megacheck and fallbacked to build.Default diff --git a/pkg/result/processors/nolint.go b/pkg/result/processors/nolint.go index 2601dbecb06c..a4a2593631cb 100644 --- a/pkg/result/processors/nolint.go +++ b/pkg/result/processors/nolint.go @@ -176,43 +176,71 @@ func (p *Nolint) extractFileCommentsInlineRanges(fset *token.FileSet, comments . var ret []ignoredRange for _, g := range comments { for _, c := range g.List { - text := strings.TrimLeft(c.Text, "/ ") - if !strings.HasPrefix(text, "nolint") { - continue + ir := p.extractInlineRangeFromComment(c.Text, g, fset) + if ir != nil { + ret = append(ret, *ir) } - - var linters []string - if strings.HasPrefix(text, "nolint:") { - // ignore specific linters - text = strings.Split(text, "//")[0] // allow another comment after this comment - linterItems := strings.Split(strings.TrimPrefix(text, "nolint:"), ",") - for _, linter := range linterItems { - linterName := strings.ToLower(strings.TrimSpace(linter)) - lc := p.dbManager.GetLinterConfig(linterName) - if lc == nil { - p.unknownLintersSet[linterName] = true - continue - } - linters = append(linters, lc.Name()) // normalize name to work with aliases - } - } // else ignore all linters - nolintDebugf("%d: linters are %s", fset.Position(g.Pos()).Line, linters) - - pos := fset.Position(g.Pos()) - ret = append(ret, ignoredRange{ - Range: result.Range{ - From: pos.Line, - To: fset.Position(g.End()).Line, - }, - col: pos.Column, - linters: linters, - }) } } return ret } +func (p *Nolint) extractInlineRangeFromComment(text string, g ast.Node, fset *token.FileSet) *ignoredRange { + text = strings.TrimLeft(text, "/ ") + if !strings.HasPrefix(text, "nolint") { + return nil + } + + buildRange := func(linters []string) *ignoredRange { + pos := fset.Position(g.Pos()) + return &ignoredRange{ + Range: result.Range{ + From: pos.Line, + To: fset.Position(g.End()).Line, + }, + col: pos.Column, + linters: linters, + } + } + + if !strings.HasPrefix(text, "nolint:") { + return buildRange(nil) // ignore all linters + } + + // ignore specific linters + var linters []string + text = strings.Split(text, "//")[0] // allow another comment after this comment + linterItems := strings.Split(strings.TrimPrefix(text, "nolint:"), ",") + var gotUnknownLinters bool + for _, linter := range linterItems { + linterName := strings.ToLower(strings.TrimSpace(linter)) + metaLinter := p.dbManager.GetMetaLinter(linterName) + if metaLinter != nil { + // user can set metalinter name in nolint directive (e.g. megacheck), then + // we should add to nolint all the metalinter's default children + linters = append(linters, metaLinter.DefaultChildLinterNames()...) + continue + } + + lc := p.dbManager.GetLinterConfig(linterName) + if lc == nil { + p.unknownLintersSet[linterName] = true + gotUnknownLinters = true + continue + } + + linters = append(linters, lc.Name()) // normalize name to work with aliases + } + + if gotUnknownLinters { + return buildRange(nil) // ignore all linters to not annoy user + } + + nolintDebugf("%d: linters are %s", fset.Position(g.Pos()).Line, linters) + return buildRange(linters) +} + func (p Nolint) Finish() { if len(p.unknownLintersSet) == 0 { return diff --git a/test/enabled_linters.go b/test/enabled_linters_test.go similarity index 85% rename from test/enabled_linters.go rename to test/enabled_linters_test.go index 48f4aaf4faab..32f8b58a5e31 100644 --- a/test/enabled_linters.go +++ b/test/enabled_linters_test.go @@ -74,23 +74,6 @@ func getEnabledByDefaultFastLintersWith(with ...string) []string { return ret } -func mergeMegacheck(linters []string) []string { - if inSlice(linters, "staticcheck") && - inSlice(linters, "gosimple") && - inSlice(linters, "unused") { - ret := []string{"megacheck"} - for _, linter := range linters { - if !inSlice([]string{"staticcheck", "gosimple", "unused"}, linter) { - ret = append(ret, linter) - } - } - - return ret - } - - return linters -} - func TestEnabledLinters(t *testing.T) { type tc struct { name string @@ -180,8 +163,8 @@ func TestEnabledLinters(t *testing.T) { }, { name: "fast option combined with enable and enable-all", - args: "--enable-all --fast --enable=megacheck", - el: getAllFastLintersWith("megacheck"), + args: "--enable-all --fast --enable=staticcheck", + el: getAllFastLintersWith("staticcheck"), noImplicitFast: true, }, } @@ -197,10 +180,9 @@ func TestEnabledLinters(t *testing.T) { runArgs = append(runArgs, strings.Split(c.args, " ")...) } r := testshared.NewLintRunner(t).RunWithYamlConfig(c.cfg, runArgs...) - el := mergeMegacheck(c.el) - sort.StringSlice(el).Sort() + sort.StringSlice(c.el).Sort() - expectedLine := fmt.Sprintf("Active %d linters: [%s]", len(el), strings.Join(el, " ")) + expectedLine := fmt.Sprintf("Active %d linters: [%s]", len(c.el), strings.Join(c.el, " ")) r.ExpectOutputContains(expectedLine) }) } diff --git a/test/testdata/gosec.go b/test/testdata/gosec.go index 2aaecda02a2f..df5ab3eaaec2 100644 --- a/test/testdata/gosec.go +++ b/test/testdata/gosec.go @@ -10,3 +10,13 @@ func Gosec() { h := md5.New() // ERROR "G401: Use of weak cryptographic primitive" log.Print(h) } + +func GosecNolintGas() { + h := md5.New() //nolint:gas + log.Print(h) +} + +func GosecNolintGosec() { + h := md5.New() //nolint:gosec + log.Print(h) +} diff --git a/test/testdata/gosimple.go b/test/testdata/gosimple.go new file mode 100644 index 000000000000..cc183033e33e --- /dev/null +++ b/test/testdata/gosimple.go @@ -0,0 +1,25 @@ +//args: -Egosimple +package testdata + +import "strings" + +func Gosimple(id1, s1 string) string { + if strings.HasPrefix(id1, s1) { // ERROR "should replace.*with.*strings.TrimPrefix" + id1 = strings.TrimPrefix(id1, s1) + } + return id1 +} + +func GosimpleNolintGosimple(id1, s1 string) string { + if strings.HasPrefix(id1, s1) { //nolint:gosimple + id1 = strings.TrimPrefix(id1, s1) + } + return id1 +} + +func GosimpleNolintMegacheck(id1, s1 string) string { + if strings.HasPrefix(id1, s1) { //nolint:megacheck + id1 = strings.TrimPrefix(id1, s1) + } + return id1 +} diff --git a/test/testdata/govet.go b/test/testdata/govet.go index 54abc62c8b27..fea8959af453 100644 --- a/test/testdata/govet.go +++ b/test/testdata/govet.go @@ -22,3 +22,11 @@ func GovetShadow(f io.Reader, buf []byte) (err error) { _ = err return } + +func GovetNolintVet() error { + return &os.PathError{"first", "path", os.ErrNotExist} //nolint:vet +} + +func GovetNolintVetShadow() error { + return &os.PathError{"first", "path", os.ErrNotExist} //nolint:vetshadow +} diff --git a/test/testdata/staticcheck.go b/test/testdata/staticcheck.go new file mode 100644 index 000000000000..5cdd1abb1753 --- /dev/null +++ b/test/testdata/staticcheck.go @@ -0,0 +1,17 @@ +//args: -Estaticcheck +package testdata + +func Staticcheck() { + var x int + x = x // ERROR "self-assignment of x to x" +} + +func StaticcheckNolintStaticcheck() { + var x int + x = x //nolint:staticcheck +} + +func StaticcheckNolintMegacheck() { + var x int + x = x //nolint:megacheck +} diff --git a/test/testdata/staticcheck_in_megacheck.go b/test/testdata/staticcheck_in_megacheck.go new file mode 100644 index 000000000000..801c4c87ec9f --- /dev/null +++ b/test/testdata/staticcheck_in_megacheck.go @@ -0,0 +1,17 @@ +//args: -Emegacheck +package testdata + +func StaticcheckInMegacheck() { + var x int + x = x // ERROR "self-assignment of x to x" +} + +func StaticcheckNolintStaticcheckInMegacheck() { + var x int + x = x //nolint:staticcheck +} + +func StaticcheckNolintMegacheckInMegacheck() { + var x int + x = x //nolint:megacheck +} diff --git a/test/testdata/stylecheck.go b/test/testdata/stylecheck.go new file mode 100644 index 000000000000..20ebad368a83 --- /dev/null +++ b/test/testdata/stylecheck.go @@ -0,0 +1,20 @@ +//args: -Estylecheck +package testdata + +func Stylecheck(x int) { + if 0 == x { // ERROR "don't use Yoda conditions" + panic(x) + } +} + +func StylecheckNolintStylecheck(x int) { + if 0 == x { //nolint:stylecheck + panic(x) + } +} + +func StylecheckNolintMegacheck(x int) { + if 0 == x { //nolint:megacheck // ERROR "don't use Yoda conditions" + panic(x) + } +} diff --git a/test/testdata/megacheck.go b/test/testdata/stylecheck_not_in_megacheck.go similarity index 51% rename from test/testdata/megacheck.go rename to test/testdata/stylecheck_not_in_megacheck.go index 3b501c82db31..0358f59566a3 100644 --- a/test/testdata/megacheck.go +++ b/test/testdata/stylecheck_not_in_megacheck.go @@ -1,7 +1,13 @@ //args: -Emegacheck package testdata -func Megacheck() { +func StylecheckNotInMegacheck(x int) { + if 0 == x { + panic(x) + } +} + +func Staticcheck2() { var x int x = x // ERROR "self-assignment of x to x" } diff --git a/test/testdata/unused.go b/test/testdata/unused.go new file mode 100644 index 000000000000..46c497292eec --- /dev/null +++ b/test/testdata/unused.go @@ -0,0 +1,8 @@ +//args: -Eunused +package testdata + +type unusedStruct struct{} // ERROR "type `unusedStruct` is unused" + +type unusedStructNolintUnused struct{} //nolint:unused + +type unusedStructNolintMegacheck struct{} //nolint:megacheck diff --git a/vendor/github.com/BurntSushi/toml/.gitignore b/vendor/github.com/BurntSushi/toml/.gitignore new file mode 100644 index 000000000000..0cd3800377d4 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/.gitignore @@ -0,0 +1,5 @@ +TAGS +tags +.*.swp +tomlcheck/tomlcheck +toml.test diff --git a/vendor/github.com/BurntSushi/toml/.travis.yml b/vendor/github.com/BurntSushi/toml/.travis.yml new file mode 100644 index 000000000000..8b8afc4f0e00 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/.travis.yml @@ -0,0 +1,15 @@ +language: go +go: + - 1.1 + - 1.2 + - 1.3 + - 1.4 + - 1.5 + - 1.6 + - tip +install: + - go install ./... + - go get github.com/BurntSushi/toml-test +script: + - export PATH="$PATH:$HOME/gopath/bin" + - make test diff --git a/vendor/github.com/BurntSushi/toml/COMPATIBLE b/vendor/github.com/BurntSushi/toml/COMPATIBLE new file mode 100644 index 000000000000..6efcfd0ce55e --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/COMPATIBLE @@ -0,0 +1,3 @@ +Compatible with TOML version +[v0.4.0](https://github.com/toml-lang/toml/blob/v0.4.0/versions/en/toml-v0.4.0.md) + diff --git a/vendor/github.com/BurntSushi/toml/COPYING b/vendor/github.com/BurntSushi/toml/COPYING new file mode 100644 index 000000000000..01b5743200b8 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/COPYING @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2013 TOML authors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/vendor/github.com/BurntSushi/toml/Makefile b/vendor/github.com/BurntSushi/toml/Makefile new file mode 100644 index 000000000000..3600848d331a --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/Makefile @@ -0,0 +1,19 @@ +install: + go install ./... + +test: install + go test -v + toml-test toml-test-decoder + toml-test -encoder toml-test-encoder + +fmt: + gofmt -w *.go */*.go + colcheck *.go */*.go + +tags: + find ./ -name '*.go' -print0 | xargs -0 gotags > TAGS + +push: + git push origin master + git push github master + diff --git a/vendor/github.com/BurntSushi/toml/README.md b/vendor/github.com/BurntSushi/toml/README.md new file mode 100644 index 000000000000..7c1b37ecc7a0 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/README.md @@ -0,0 +1,218 @@ +## TOML parser and encoder for Go with reflection + +TOML stands for Tom's Obvious, Minimal Language. This Go package provides a +reflection interface similar to Go's standard library `json` and `xml` +packages. This package also supports the `encoding.TextUnmarshaler` and +`encoding.TextMarshaler` interfaces so that you can define custom data +representations. (There is an example of this below.) + +Spec: https://github.com/toml-lang/toml + +Compatible with TOML version +[v0.4.0](https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.4.0.md) + +Documentation: https://godoc.org/github.com/BurntSushi/toml + +Installation: + +```bash +go get github.com/BurntSushi/toml +``` + +Try the toml validator: + +```bash +go get github.com/BurntSushi/toml/cmd/tomlv +tomlv some-toml-file.toml +``` + +[![Build Status](https://travis-ci.org/BurntSushi/toml.svg?branch=master)](https://travis-ci.org/BurntSushi/toml) [![GoDoc](https://godoc.org/github.com/BurntSushi/toml?status.svg)](https://godoc.org/github.com/BurntSushi/toml) + +### Testing + +This package passes all tests in +[toml-test](https://github.com/BurntSushi/toml-test) for both the decoder +and the encoder. + +### Examples + +This package works similarly to how the Go standard library handles `XML` +and `JSON`. Namely, data is loaded into Go values via reflection. + +For the simplest example, consider some TOML file as just a list of keys +and values: + +```toml +Age = 25 +Cats = [ "Cauchy", "Plato" ] +Pi = 3.14 +Perfection = [ 6, 28, 496, 8128 ] +DOB = 1987-07-05T05:45:00Z +``` + +Which could be defined in Go as: + +```go +type Config struct { + Age int + Cats []string + Pi float64 + Perfection []int + DOB time.Time // requires `import time` +} +``` + +And then decoded with: + +```go +var conf Config +if _, err := toml.Decode(tomlData, &conf); err != nil { + // handle error +} +``` + +You can also use struct tags if your struct field name doesn't map to a TOML +key value directly: + +```toml +some_key_NAME = "wat" +``` + +```go +type TOML struct { + ObscureKey string `toml:"some_key_NAME"` +} +``` + +### Using the `encoding.TextUnmarshaler` interface + +Here's an example that automatically parses duration strings into +`time.Duration` values: + +```toml +[[song]] +name = "Thunder Road" +duration = "4m49s" + +[[song]] +name = "Stairway to Heaven" +duration = "8m03s" +``` + +Which can be decoded with: + +```go +type song struct { + Name string + Duration duration +} +type songs struct { + Song []song +} +var favorites songs +if _, err := toml.Decode(blob, &favorites); err != nil { + log.Fatal(err) +} + +for _, s := range favorites.Song { + fmt.Printf("%s (%s)\n", s.Name, s.Duration) +} +``` + +And you'll also need a `duration` type that satisfies the +`encoding.TextUnmarshaler` interface: + +```go +type duration struct { + time.Duration +} + +func (d *duration) UnmarshalText(text []byte) error { + var err error + d.Duration, err = time.ParseDuration(string(text)) + return err +} +``` + +### More complex usage + +Here's an example of how to load the example from the official spec page: + +```toml +# This is a TOML document. Boom. + +title = "TOML Example" + +[owner] +name = "Tom Preston-Werner" +organization = "GitHub" +bio = "GitHub Cofounder & CEO\nLikes tater tots and beer." +dob = 1979-05-27T07:32:00Z # First class dates? Why not? + +[database] +server = "192.168.1.1" +ports = [ 8001, 8001, 8002 ] +connection_max = 5000 +enabled = true + +[servers] + + # You can indent as you please. Tabs or spaces. TOML don't care. + [servers.alpha] + ip = "10.0.0.1" + dc = "eqdc10" + + [servers.beta] + ip = "10.0.0.2" + dc = "eqdc10" + +[clients] +data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it + +# Line breaks are OK when inside arrays +hosts = [ + "alpha", + "omega" +] +``` + +And the corresponding Go types are: + +```go +type tomlConfig struct { + Title string + Owner ownerInfo + DB database `toml:"database"` + Servers map[string]server + Clients clients +} + +type ownerInfo struct { + Name string + Org string `toml:"organization"` + Bio string + DOB time.Time +} + +type database struct { + Server string + Ports []int + ConnMax int `toml:"connection_max"` + Enabled bool +} + +type server struct { + IP string + DC string +} + +type clients struct { + Data [][]interface{} + Hosts []string +} +``` + +Note that a case insensitive match will be tried if an exact match can't be +found. + +A working example of the above can be found in `_examples/example.{go,toml}`. diff --git a/vendor/github.com/BurntSushi/toml/decode.go b/vendor/github.com/BurntSushi/toml/decode.go new file mode 100644 index 000000000000..b0fd51d5b6ea --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/decode.go @@ -0,0 +1,509 @@ +package toml + +import ( + "fmt" + "io" + "io/ioutil" + "math" + "reflect" + "strings" + "time" +) + +func e(format string, args ...interface{}) error { + return fmt.Errorf("toml: "+format, args...) +} + +// Unmarshaler is the interface implemented by objects that can unmarshal a +// TOML description of themselves. +type Unmarshaler interface { + UnmarshalTOML(interface{}) error +} + +// Unmarshal decodes the contents of `p` in TOML format into a pointer `v`. +func Unmarshal(p []byte, v interface{}) error { + _, err := Decode(string(p), v) + return err +} + +// Primitive is a TOML value that hasn't been decoded into a Go value. +// When using the various `Decode*` functions, the type `Primitive` may +// be given to any value, and its decoding will be delayed. +// +// A `Primitive` value can be decoded using the `PrimitiveDecode` function. +// +// The underlying representation of a `Primitive` value is subject to change. +// Do not rely on it. +// +// N.B. Primitive values are still parsed, so using them will only avoid +// the overhead of reflection. They can be useful when you don't know the +// exact type of TOML data until run time. +type Primitive struct { + undecoded interface{} + context Key +} + +// DEPRECATED! +// +// Use MetaData.PrimitiveDecode instead. +func PrimitiveDecode(primValue Primitive, v interface{}) error { + md := MetaData{decoded: make(map[string]bool)} + return md.unify(primValue.undecoded, rvalue(v)) +} + +// PrimitiveDecode is just like the other `Decode*` functions, except it +// decodes a TOML value that has already been parsed. Valid primitive values +// can *only* be obtained from values filled by the decoder functions, +// including this method. (i.e., `v` may contain more `Primitive` +// values.) +// +// Meta data for primitive values is included in the meta data returned by +// the `Decode*` functions with one exception: keys returned by the Undecoded +// method will only reflect keys that were decoded. Namely, any keys hidden +// behind a Primitive will be considered undecoded. Executing this method will +// update the undecoded keys in the meta data. (See the example.) +func (md *MetaData) PrimitiveDecode(primValue Primitive, v interface{}) error { + md.context = primValue.context + defer func() { md.context = nil }() + return md.unify(primValue.undecoded, rvalue(v)) +} + +// Decode will decode the contents of `data` in TOML format into a pointer +// `v`. +// +// TOML hashes correspond to Go structs or maps. (Dealer's choice. They can be +// used interchangeably.) +// +// TOML arrays of tables correspond to either a slice of structs or a slice +// of maps. +// +// TOML datetimes correspond to Go `time.Time` values. +// +// All other TOML types (float, string, int, bool and array) correspond +// to the obvious Go types. +// +// An exception to the above rules is if a type implements the +// encoding.TextUnmarshaler interface. In this case, any primitive TOML value +// (floats, strings, integers, booleans and datetimes) will be converted to +// a byte string and given to the value's UnmarshalText method. See the +// Unmarshaler example for a demonstration with time duration strings. +// +// Key mapping +// +// TOML keys can map to either keys in a Go map or field names in a Go +// struct. The special `toml` struct tag may be used to map TOML keys to +// struct fields that don't match the key name exactly. (See the example.) +// A case insensitive match to struct names will be tried if an exact match +// can't be found. +// +// The mapping between TOML values and Go values is loose. That is, there +// may exist TOML values that cannot be placed into your representation, and +// there may be parts of your representation that do not correspond to +// TOML values. This loose mapping can be made stricter by using the IsDefined +// and/or Undecoded methods on the MetaData returned. +// +// This decoder will not handle cyclic types. If a cyclic type is passed, +// `Decode` will not terminate. +func Decode(data string, v interface{}) (MetaData, error) { + rv := reflect.ValueOf(v) + if rv.Kind() != reflect.Ptr { + return MetaData{}, e("Decode of non-pointer %s", reflect.TypeOf(v)) + } + if rv.IsNil() { + return MetaData{}, e("Decode of nil %s", reflect.TypeOf(v)) + } + p, err := parse(data) + if err != nil { + return MetaData{}, err + } + md := MetaData{ + p.mapping, p.types, p.ordered, + make(map[string]bool, len(p.ordered)), nil, + } + return md, md.unify(p.mapping, indirect(rv)) +} + +// DecodeFile is just like Decode, except it will automatically read the +// contents of the file at `fpath` and decode it for you. +func DecodeFile(fpath string, v interface{}) (MetaData, error) { + bs, err := ioutil.ReadFile(fpath) + if err != nil { + return MetaData{}, err + } + return Decode(string(bs), v) +} + +// DecodeReader is just like Decode, except it will consume all bytes +// from the reader and decode it for you. +func DecodeReader(r io.Reader, v interface{}) (MetaData, error) { + bs, err := ioutil.ReadAll(r) + if err != nil { + return MetaData{}, err + } + return Decode(string(bs), v) +} + +// unify performs a sort of type unification based on the structure of `rv`, +// which is the client representation. +// +// Any type mismatch produces an error. Finding a type that we don't know +// how to handle produces an unsupported type error. +func (md *MetaData) unify(data interface{}, rv reflect.Value) error { + + // Special case. Look for a `Primitive` value. + if rv.Type() == reflect.TypeOf((*Primitive)(nil)).Elem() { + // Save the undecoded data and the key context into the primitive + // value. + context := make(Key, len(md.context)) + copy(context, md.context) + rv.Set(reflect.ValueOf(Primitive{ + undecoded: data, + context: context, + })) + return nil + } + + // Special case. Unmarshaler Interface support. + if rv.CanAddr() { + if v, ok := rv.Addr().Interface().(Unmarshaler); ok { + return v.UnmarshalTOML(data) + } + } + + // Special case. Handle time.Time values specifically. + // TODO: Remove this code when we decide to drop support for Go 1.1. + // This isn't necessary in Go 1.2 because time.Time satisfies the encoding + // interfaces. + if rv.Type().AssignableTo(rvalue(time.Time{}).Type()) { + return md.unifyDatetime(data, rv) + } + + // Special case. Look for a value satisfying the TextUnmarshaler interface. + if v, ok := rv.Interface().(TextUnmarshaler); ok { + return md.unifyText(data, v) + } + // BUG(burntsushi) + // The behavior here is incorrect whenever a Go type satisfies the + // encoding.TextUnmarshaler interface but also corresponds to a TOML + // hash or array. In particular, the unmarshaler should only be applied + // to primitive TOML values. But at this point, it will be applied to + // all kinds of values and produce an incorrect error whenever those values + // are hashes or arrays (including arrays of tables). + + k := rv.Kind() + + // laziness + if k >= reflect.Int && k <= reflect.Uint64 { + return md.unifyInt(data, rv) + } + switch k { + case reflect.Ptr: + elem := reflect.New(rv.Type().Elem()) + err := md.unify(data, reflect.Indirect(elem)) + if err != nil { + return err + } + rv.Set(elem) + return nil + case reflect.Struct: + return md.unifyStruct(data, rv) + case reflect.Map: + return md.unifyMap(data, rv) + case reflect.Array: + return md.unifyArray(data, rv) + case reflect.Slice: + return md.unifySlice(data, rv) + case reflect.String: + return md.unifyString(data, rv) + case reflect.Bool: + return md.unifyBool(data, rv) + case reflect.Interface: + // we only support empty interfaces. + if rv.NumMethod() > 0 { + return e("unsupported type %s", rv.Type()) + } + return md.unifyAnything(data, rv) + case reflect.Float32: + fallthrough + case reflect.Float64: + return md.unifyFloat64(data, rv) + } + return e("unsupported type %s", rv.Kind()) +} + +func (md *MetaData) unifyStruct(mapping interface{}, rv reflect.Value) error { + tmap, ok := mapping.(map[string]interface{}) + if !ok { + if mapping == nil { + return nil + } + return e("type mismatch for %s: expected table but found %T", + rv.Type().String(), mapping) + } + + for key, datum := range tmap { + var f *field + fields := cachedTypeFields(rv.Type()) + for i := range fields { + ff := &fields[i] + if ff.name == key { + f = ff + break + } + if f == nil && strings.EqualFold(ff.name, key) { + f = ff + } + } + if f != nil { + subv := rv + for _, i := range f.index { + subv = indirect(subv.Field(i)) + } + if isUnifiable(subv) { + md.decoded[md.context.add(key).String()] = true + md.context = append(md.context, key) + if err := md.unify(datum, subv); err != nil { + return err + } + md.context = md.context[0 : len(md.context)-1] + } else if f.name != "" { + // Bad user! No soup for you! + return e("cannot write unexported field %s.%s", + rv.Type().String(), f.name) + } + } + } + return nil +} + +func (md *MetaData) unifyMap(mapping interface{}, rv reflect.Value) error { + tmap, ok := mapping.(map[string]interface{}) + if !ok { + if tmap == nil { + return nil + } + return badtype("map", mapping) + } + if rv.IsNil() { + rv.Set(reflect.MakeMap(rv.Type())) + } + for k, v := range tmap { + md.decoded[md.context.add(k).String()] = true + md.context = append(md.context, k) + + rvkey := indirect(reflect.New(rv.Type().Key())) + rvval := reflect.Indirect(reflect.New(rv.Type().Elem())) + if err := md.unify(v, rvval); err != nil { + return err + } + md.context = md.context[0 : len(md.context)-1] + + rvkey.SetString(k) + rv.SetMapIndex(rvkey, rvval) + } + return nil +} + +func (md *MetaData) unifyArray(data interface{}, rv reflect.Value) error { + datav := reflect.ValueOf(data) + if datav.Kind() != reflect.Slice { + if !datav.IsValid() { + return nil + } + return badtype("slice", data) + } + sliceLen := datav.Len() + if sliceLen != rv.Len() { + return e("expected array length %d; got TOML array of length %d", + rv.Len(), sliceLen) + } + return md.unifySliceArray(datav, rv) +} + +func (md *MetaData) unifySlice(data interface{}, rv reflect.Value) error { + datav := reflect.ValueOf(data) + if datav.Kind() != reflect.Slice { + if !datav.IsValid() { + return nil + } + return badtype("slice", data) + } + n := datav.Len() + if rv.IsNil() || rv.Cap() < n { + rv.Set(reflect.MakeSlice(rv.Type(), n, n)) + } + rv.SetLen(n) + return md.unifySliceArray(datav, rv) +} + +func (md *MetaData) unifySliceArray(data, rv reflect.Value) error { + sliceLen := data.Len() + for i := 0; i < sliceLen; i++ { + v := data.Index(i).Interface() + sliceval := indirect(rv.Index(i)) + if err := md.unify(v, sliceval); err != nil { + return err + } + } + return nil +} + +func (md *MetaData) unifyDatetime(data interface{}, rv reflect.Value) error { + if _, ok := data.(time.Time); ok { + rv.Set(reflect.ValueOf(data)) + return nil + } + return badtype("time.Time", data) +} + +func (md *MetaData) unifyString(data interface{}, rv reflect.Value) error { + if s, ok := data.(string); ok { + rv.SetString(s) + return nil + } + return badtype("string", data) +} + +func (md *MetaData) unifyFloat64(data interface{}, rv reflect.Value) error { + if num, ok := data.(float64); ok { + switch rv.Kind() { + case reflect.Float32: + fallthrough + case reflect.Float64: + rv.SetFloat(num) + default: + panic("bug") + } + return nil + } + return badtype("float", data) +} + +func (md *MetaData) unifyInt(data interface{}, rv reflect.Value) error { + if num, ok := data.(int64); ok { + if rv.Kind() >= reflect.Int && rv.Kind() <= reflect.Int64 { + switch rv.Kind() { + case reflect.Int, reflect.Int64: + // No bounds checking necessary. + case reflect.Int8: + if num < math.MinInt8 || num > math.MaxInt8 { + return e("value %d is out of range for int8", num) + } + case reflect.Int16: + if num < math.MinInt16 || num > math.MaxInt16 { + return e("value %d is out of range for int16", num) + } + case reflect.Int32: + if num < math.MinInt32 || num > math.MaxInt32 { + return e("value %d is out of range for int32", num) + } + } + rv.SetInt(num) + } else if rv.Kind() >= reflect.Uint && rv.Kind() <= reflect.Uint64 { + unum := uint64(num) + switch rv.Kind() { + case reflect.Uint, reflect.Uint64: + // No bounds checking necessary. + case reflect.Uint8: + if num < 0 || unum > math.MaxUint8 { + return e("value %d is out of range for uint8", num) + } + case reflect.Uint16: + if num < 0 || unum > math.MaxUint16 { + return e("value %d is out of range for uint16", num) + } + case reflect.Uint32: + if num < 0 || unum > math.MaxUint32 { + return e("value %d is out of range for uint32", num) + } + } + rv.SetUint(unum) + } else { + panic("unreachable") + } + return nil + } + return badtype("integer", data) +} + +func (md *MetaData) unifyBool(data interface{}, rv reflect.Value) error { + if b, ok := data.(bool); ok { + rv.SetBool(b) + return nil + } + return badtype("boolean", data) +} + +func (md *MetaData) unifyAnything(data interface{}, rv reflect.Value) error { + rv.Set(reflect.ValueOf(data)) + return nil +} + +func (md *MetaData) unifyText(data interface{}, v TextUnmarshaler) error { + var s string + switch sdata := data.(type) { + case TextMarshaler: + text, err := sdata.MarshalText() + if err != nil { + return err + } + s = string(text) + case fmt.Stringer: + s = sdata.String() + case string: + s = sdata + case bool: + s = fmt.Sprintf("%v", sdata) + case int64: + s = fmt.Sprintf("%d", sdata) + case float64: + s = fmt.Sprintf("%f", sdata) + default: + return badtype("primitive (string-like)", data) + } + if err := v.UnmarshalText([]byte(s)); err != nil { + return err + } + return nil +} + +// rvalue returns a reflect.Value of `v`. All pointers are resolved. +func rvalue(v interface{}) reflect.Value { + return indirect(reflect.ValueOf(v)) +} + +// indirect returns the value pointed to by a pointer. +// Pointers are followed until the value is not a pointer. +// New values are allocated for each nil pointer. +// +// An exception to this rule is if the value satisfies an interface of +// interest to us (like encoding.TextUnmarshaler). +func indirect(v reflect.Value) reflect.Value { + if v.Kind() != reflect.Ptr { + if v.CanSet() { + pv := v.Addr() + if _, ok := pv.Interface().(TextUnmarshaler); ok { + return pv + } + } + return v + } + if v.IsNil() { + v.Set(reflect.New(v.Type().Elem())) + } + return indirect(reflect.Indirect(v)) +} + +func isUnifiable(rv reflect.Value) bool { + if rv.CanSet() { + return true + } + if _, ok := rv.Interface().(TextUnmarshaler); ok { + return true + } + return false +} + +func badtype(expected string, data interface{}) error { + return e("cannot load TOML value of type %T into a Go %s", data, expected) +} diff --git a/vendor/github.com/BurntSushi/toml/decode_meta.go b/vendor/github.com/BurntSushi/toml/decode_meta.go new file mode 100644 index 000000000000..b9914a6798cf --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/decode_meta.go @@ -0,0 +1,121 @@ +package toml + +import "strings" + +// MetaData allows access to meta information about TOML data that may not +// be inferrable via reflection. In particular, whether a key has been defined +// and the TOML type of a key. +type MetaData struct { + mapping map[string]interface{} + types map[string]tomlType + keys []Key + decoded map[string]bool + context Key // Used only during decoding. +} + +// IsDefined returns true if the key given exists in the TOML data. The key +// should be specified hierarchially. e.g., +// +// // access the TOML key 'a.b.c' +// IsDefined("a", "b", "c") +// +// IsDefined will return false if an empty key given. Keys are case sensitive. +func (md *MetaData) IsDefined(key ...string) bool { + if len(key) == 0 { + return false + } + + var hash map[string]interface{} + var ok bool + var hashOrVal interface{} = md.mapping + for _, k := range key { + if hash, ok = hashOrVal.(map[string]interface{}); !ok { + return false + } + if hashOrVal, ok = hash[k]; !ok { + return false + } + } + return true +} + +// Type returns a string representation of the type of the key specified. +// +// Type will return the empty string if given an empty key or a key that +// does not exist. Keys are case sensitive. +func (md *MetaData) Type(key ...string) string { + fullkey := strings.Join(key, ".") + if typ, ok := md.types[fullkey]; ok { + return typ.typeString() + } + return "" +} + +// Key is the type of any TOML key, including key groups. Use (MetaData).Keys +// to get values of this type. +type Key []string + +func (k Key) String() string { + return strings.Join(k, ".") +} + +func (k Key) maybeQuotedAll() string { + var ss []string + for i := range k { + ss = append(ss, k.maybeQuoted(i)) + } + return strings.Join(ss, ".") +} + +func (k Key) maybeQuoted(i int) string { + quote := false + for _, c := range k[i] { + if !isBareKeyChar(c) { + quote = true + break + } + } + if quote { + return "\"" + strings.Replace(k[i], "\"", "\\\"", -1) + "\"" + } + return k[i] +} + +func (k Key) add(piece string) Key { + newKey := make(Key, len(k)+1) + copy(newKey, k) + newKey[len(k)] = piece + return newKey +} + +// Keys returns a slice of every key in the TOML data, including key groups. +// Each key is itself a slice, where the first element is the top of the +// hierarchy and the last is the most specific. +// +// The list will have the same order as the keys appeared in the TOML data. +// +// All keys returned are non-empty. +func (md *MetaData) Keys() []Key { + return md.keys +} + +// Undecoded returns all keys that have not been decoded in the order in which +// they appear in the original TOML document. +// +// This includes keys that haven't been decoded because of a Primitive value. +// Once the Primitive value is decoded, the keys will be considered decoded. +// +// Also note that decoding into an empty interface will result in no decoding, +// and so no keys will be considered decoded. +// +// In this sense, the Undecoded keys correspond to keys in the TOML document +// that do not have a concrete type in your representation. +func (md *MetaData) Undecoded() []Key { + undecoded := make([]Key, 0, len(md.keys)) + for _, key := range md.keys { + if !md.decoded[key.String()] { + undecoded = append(undecoded, key) + } + } + return undecoded +} diff --git a/vendor/github.com/BurntSushi/toml/doc.go b/vendor/github.com/BurntSushi/toml/doc.go new file mode 100644 index 000000000000..b371f396edca --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/doc.go @@ -0,0 +1,27 @@ +/* +Package toml provides facilities for decoding and encoding TOML configuration +files via reflection. There is also support for delaying decoding with +the Primitive type, and querying the set of keys in a TOML document with the +MetaData type. + +The specification implemented: https://github.com/toml-lang/toml + +The sub-command github.com/BurntSushi/toml/cmd/tomlv can be used to verify +whether a file is a valid TOML document. It can also be used to print the +type of each key in a TOML document. + +Testing + +There are two important types of tests used for this package. The first is +contained inside '*_test.go' files and uses the standard Go unit testing +framework. These tests are primarily devoted to holistically testing the +decoder and encoder. + +The second type of testing is used to verify the implementation's adherence +to the TOML specification. These tests have been factored into their own +project: https://github.com/BurntSushi/toml-test + +The reason the tests are in a separate project is so that they can be used by +any implementation of TOML. Namely, it is language agnostic. +*/ +package toml diff --git a/vendor/github.com/BurntSushi/toml/encode.go b/vendor/github.com/BurntSushi/toml/encode.go new file mode 100644 index 000000000000..d905c21a2466 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/encode.go @@ -0,0 +1,568 @@ +package toml + +import ( + "bufio" + "errors" + "fmt" + "io" + "reflect" + "sort" + "strconv" + "strings" + "time" +) + +type tomlEncodeError struct{ error } + +var ( + errArrayMixedElementTypes = errors.New( + "toml: cannot encode array with mixed element types") + errArrayNilElement = errors.New( + "toml: cannot encode array with nil element") + errNonString = errors.New( + "toml: cannot encode a map with non-string key type") + errAnonNonStruct = errors.New( + "toml: cannot encode an anonymous field that is not a struct") + errArrayNoTable = errors.New( + "toml: TOML array element cannot contain a table") + errNoKey = errors.New( + "toml: top-level values must be Go maps or structs") + errAnything = errors.New("") // used in testing +) + +var quotedReplacer = strings.NewReplacer( + "\t", "\\t", + "\n", "\\n", + "\r", "\\r", + "\"", "\\\"", + "\\", "\\\\", +) + +// Encoder controls the encoding of Go values to a TOML document to some +// io.Writer. +// +// The indentation level can be controlled with the Indent field. +type Encoder struct { + // A single indentation level. By default it is two spaces. + Indent string + + // hasWritten is whether we have written any output to w yet. + hasWritten bool + w *bufio.Writer +} + +// NewEncoder returns a TOML encoder that encodes Go values to the io.Writer +// given. By default, a single indentation level is 2 spaces. +func NewEncoder(w io.Writer) *Encoder { + return &Encoder{ + w: bufio.NewWriter(w), + Indent: " ", + } +} + +// Encode writes a TOML representation of the Go value to the underlying +// io.Writer. If the value given cannot be encoded to a valid TOML document, +// then an error is returned. +// +// The mapping between Go values and TOML values should be precisely the same +// as for the Decode* functions. Similarly, the TextMarshaler interface is +// supported by encoding the resulting bytes as strings. (If you want to write +// arbitrary binary data then you will need to use something like base64 since +// TOML does not have any binary types.) +// +// When encoding TOML hashes (i.e., Go maps or structs), keys without any +// sub-hashes are encoded first. +// +// If a Go map is encoded, then its keys are sorted alphabetically for +// deterministic output. More control over this behavior may be provided if +// there is demand for it. +// +// Encoding Go values without a corresponding TOML representation---like map +// types with non-string keys---will cause an error to be returned. Similarly +// for mixed arrays/slices, arrays/slices with nil elements, embedded +// non-struct types and nested slices containing maps or structs. +// (e.g., [][]map[string]string is not allowed but []map[string]string is OK +// and so is []map[string][]string.) +func (enc *Encoder) Encode(v interface{}) error { + rv := eindirect(reflect.ValueOf(v)) + if err := enc.safeEncode(Key([]string{}), rv); err != nil { + return err + } + return enc.w.Flush() +} + +func (enc *Encoder) safeEncode(key Key, rv reflect.Value) (err error) { + defer func() { + if r := recover(); r != nil { + if terr, ok := r.(tomlEncodeError); ok { + err = terr.error + return + } + panic(r) + } + }() + enc.encode(key, rv) + return nil +} + +func (enc *Encoder) encode(key Key, rv reflect.Value) { + // Special case. Time needs to be in ISO8601 format. + // Special case. If we can marshal the type to text, then we used that. + // Basically, this prevents the encoder for handling these types as + // generic structs (or whatever the underlying type of a TextMarshaler is). + switch rv.Interface().(type) { + case time.Time, TextMarshaler: + enc.keyEqElement(key, rv) + return + } + + k := rv.Kind() + switch k { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, + reflect.Int64, + reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, + reflect.Uint64, + reflect.Float32, reflect.Float64, reflect.String, reflect.Bool: + enc.keyEqElement(key, rv) + case reflect.Array, reflect.Slice: + if typeEqual(tomlArrayHash, tomlTypeOfGo(rv)) { + enc.eArrayOfTables(key, rv) + } else { + enc.keyEqElement(key, rv) + } + case reflect.Interface: + if rv.IsNil() { + return + } + enc.encode(key, rv.Elem()) + case reflect.Map: + if rv.IsNil() { + return + } + enc.eTable(key, rv) + case reflect.Ptr: + if rv.IsNil() { + return + } + enc.encode(key, rv.Elem()) + case reflect.Struct: + enc.eTable(key, rv) + default: + panic(e("unsupported type for key '%s': %s", key, k)) + } +} + +// eElement encodes any value that can be an array element (primitives and +// arrays). +func (enc *Encoder) eElement(rv reflect.Value) { + switch v := rv.Interface().(type) { + case time.Time: + // Special case time.Time as a primitive. Has to come before + // TextMarshaler below because time.Time implements + // encoding.TextMarshaler, but we need to always use UTC. + enc.wf(v.UTC().Format("2006-01-02T15:04:05Z")) + return + case TextMarshaler: + // Special case. Use text marshaler if it's available for this value. + if s, err := v.MarshalText(); err != nil { + encPanic(err) + } else { + enc.writeQuoted(string(s)) + } + return + } + switch rv.Kind() { + case reflect.Bool: + enc.wf(strconv.FormatBool(rv.Bool())) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, + reflect.Int64: + enc.wf(strconv.FormatInt(rv.Int(), 10)) + case reflect.Uint, reflect.Uint8, reflect.Uint16, + reflect.Uint32, reflect.Uint64: + enc.wf(strconv.FormatUint(rv.Uint(), 10)) + case reflect.Float32: + enc.wf(floatAddDecimal(strconv.FormatFloat(rv.Float(), 'f', -1, 32))) + case reflect.Float64: + enc.wf(floatAddDecimal(strconv.FormatFloat(rv.Float(), 'f', -1, 64))) + case reflect.Array, reflect.Slice: + enc.eArrayOrSliceElement(rv) + case reflect.Interface: + enc.eElement(rv.Elem()) + case reflect.String: + enc.writeQuoted(rv.String()) + default: + panic(e("unexpected primitive type: %s", rv.Kind())) + } +} + +// By the TOML spec, all floats must have a decimal with at least one +// number on either side. +func floatAddDecimal(fstr string) string { + if !strings.Contains(fstr, ".") { + return fstr + ".0" + } + return fstr +} + +func (enc *Encoder) writeQuoted(s string) { + enc.wf("\"%s\"", quotedReplacer.Replace(s)) +} + +func (enc *Encoder) eArrayOrSliceElement(rv reflect.Value) { + length := rv.Len() + enc.wf("[") + for i := 0; i < length; i++ { + elem := rv.Index(i) + enc.eElement(elem) + if i != length-1 { + enc.wf(", ") + } + } + enc.wf("]") +} + +func (enc *Encoder) eArrayOfTables(key Key, rv reflect.Value) { + if len(key) == 0 { + encPanic(errNoKey) + } + for i := 0; i < rv.Len(); i++ { + trv := rv.Index(i) + if isNil(trv) { + continue + } + panicIfInvalidKey(key) + enc.newline() + enc.wf("%s[[%s]]", enc.indentStr(key), key.maybeQuotedAll()) + enc.newline() + enc.eMapOrStruct(key, trv) + } +} + +func (enc *Encoder) eTable(key Key, rv reflect.Value) { + panicIfInvalidKey(key) + if len(key) == 1 { + // Output an extra newline between top-level tables. + // (The newline isn't written if nothing else has been written though.) + enc.newline() + } + if len(key) > 0 { + enc.wf("%s[%s]", enc.indentStr(key), key.maybeQuotedAll()) + enc.newline() + } + enc.eMapOrStruct(key, rv) +} + +func (enc *Encoder) eMapOrStruct(key Key, rv reflect.Value) { + switch rv := eindirect(rv); rv.Kind() { + case reflect.Map: + enc.eMap(key, rv) + case reflect.Struct: + enc.eStruct(key, rv) + default: + panic("eTable: unhandled reflect.Value Kind: " + rv.Kind().String()) + } +} + +func (enc *Encoder) eMap(key Key, rv reflect.Value) { + rt := rv.Type() + if rt.Key().Kind() != reflect.String { + encPanic(errNonString) + } + + // Sort keys so that we have deterministic output. And write keys directly + // underneath this key first, before writing sub-structs or sub-maps. + var mapKeysDirect, mapKeysSub []string + for _, mapKey := range rv.MapKeys() { + k := mapKey.String() + if typeIsHash(tomlTypeOfGo(rv.MapIndex(mapKey))) { + mapKeysSub = append(mapKeysSub, k) + } else { + mapKeysDirect = append(mapKeysDirect, k) + } + } + + var writeMapKeys = func(mapKeys []string) { + sort.Strings(mapKeys) + for _, mapKey := range mapKeys { + mrv := rv.MapIndex(reflect.ValueOf(mapKey)) + if isNil(mrv) { + // Don't write anything for nil fields. + continue + } + enc.encode(key.add(mapKey), mrv) + } + } + writeMapKeys(mapKeysDirect) + writeMapKeys(mapKeysSub) +} + +func (enc *Encoder) eStruct(key Key, rv reflect.Value) { + // Write keys for fields directly under this key first, because if we write + // a field that creates a new table, then all keys under it will be in that + // table (not the one we're writing here). + rt := rv.Type() + var fieldsDirect, fieldsSub [][]int + var addFields func(rt reflect.Type, rv reflect.Value, start []int) + addFields = func(rt reflect.Type, rv reflect.Value, start []int) { + for i := 0; i < rt.NumField(); i++ { + f := rt.Field(i) + // skip unexported fields + if f.PkgPath != "" && !f.Anonymous { + continue + } + frv := rv.Field(i) + if f.Anonymous { + t := f.Type + switch t.Kind() { + case reflect.Struct: + // Treat anonymous struct fields with + // tag names as though they are not + // anonymous, like encoding/json does. + if getOptions(f.Tag).name == "" { + addFields(t, frv, f.Index) + continue + } + case reflect.Ptr: + if t.Elem().Kind() == reflect.Struct && + getOptions(f.Tag).name == "" { + if !frv.IsNil() { + addFields(t.Elem(), frv.Elem(), f.Index) + } + continue + } + // Fall through to the normal field encoding logic below + // for non-struct anonymous fields. + } + } + + if typeIsHash(tomlTypeOfGo(frv)) { + fieldsSub = append(fieldsSub, append(start, f.Index...)) + } else { + fieldsDirect = append(fieldsDirect, append(start, f.Index...)) + } + } + } + addFields(rt, rv, nil) + + var writeFields = func(fields [][]int) { + for _, fieldIndex := range fields { + sft := rt.FieldByIndex(fieldIndex) + sf := rv.FieldByIndex(fieldIndex) + if isNil(sf) { + // Don't write anything for nil fields. + continue + } + + opts := getOptions(sft.Tag) + if opts.skip { + continue + } + keyName := sft.Name + if opts.name != "" { + keyName = opts.name + } + if opts.omitempty && isEmpty(sf) { + continue + } + if opts.omitzero && isZero(sf) { + continue + } + + enc.encode(key.add(keyName), sf) + } + } + writeFields(fieldsDirect) + writeFields(fieldsSub) +} + +// tomlTypeName returns the TOML type name of the Go value's type. It is +// used to determine whether the types of array elements are mixed (which is +// forbidden). If the Go value is nil, then it is illegal for it to be an array +// element, and valueIsNil is returned as true. + +// Returns the TOML type of a Go value. The type may be `nil`, which means +// no concrete TOML type could be found. +func tomlTypeOfGo(rv reflect.Value) tomlType { + if isNil(rv) || !rv.IsValid() { + return nil + } + switch rv.Kind() { + case reflect.Bool: + return tomlBool + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, + reflect.Int64, + reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, + reflect.Uint64: + return tomlInteger + case reflect.Float32, reflect.Float64: + return tomlFloat + case reflect.Array, reflect.Slice: + if typeEqual(tomlHash, tomlArrayType(rv)) { + return tomlArrayHash + } + return tomlArray + case reflect.Ptr, reflect.Interface: + return tomlTypeOfGo(rv.Elem()) + case reflect.String: + return tomlString + case reflect.Map: + return tomlHash + case reflect.Struct: + switch rv.Interface().(type) { + case time.Time: + return tomlDatetime + case TextMarshaler: + return tomlString + default: + return tomlHash + } + default: + panic("unexpected reflect.Kind: " + rv.Kind().String()) + } +} + +// tomlArrayType returns the element type of a TOML array. The type returned +// may be nil if it cannot be determined (e.g., a nil slice or a zero length +// slize). This function may also panic if it finds a type that cannot be +// expressed in TOML (such as nil elements, heterogeneous arrays or directly +// nested arrays of tables). +func tomlArrayType(rv reflect.Value) tomlType { + if isNil(rv) || !rv.IsValid() || rv.Len() == 0 { + return nil + } + firstType := tomlTypeOfGo(rv.Index(0)) + if firstType == nil { + encPanic(errArrayNilElement) + } + + rvlen := rv.Len() + for i := 1; i < rvlen; i++ { + elem := rv.Index(i) + switch elemType := tomlTypeOfGo(elem); { + case elemType == nil: + encPanic(errArrayNilElement) + case !typeEqual(firstType, elemType): + encPanic(errArrayMixedElementTypes) + } + } + // If we have a nested array, then we must make sure that the nested + // array contains ONLY primitives. + // This checks arbitrarily nested arrays. + if typeEqual(firstType, tomlArray) || typeEqual(firstType, tomlArrayHash) { + nest := tomlArrayType(eindirect(rv.Index(0))) + if typeEqual(nest, tomlHash) || typeEqual(nest, tomlArrayHash) { + encPanic(errArrayNoTable) + } + } + return firstType +} + +type tagOptions struct { + skip bool // "-" + name string + omitempty bool + omitzero bool +} + +func getOptions(tag reflect.StructTag) tagOptions { + t := tag.Get("toml") + if t == "-" { + return tagOptions{skip: true} + } + var opts tagOptions + parts := strings.Split(t, ",") + opts.name = parts[0] + for _, s := range parts[1:] { + switch s { + case "omitempty": + opts.omitempty = true + case "omitzero": + opts.omitzero = true + } + } + return opts +} + +func isZero(rv reflect.Value) bool { + switch rv.Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return rv.Int() == 0 + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + return rv.Uint() == 0 + case reflect.Float32, reflect.Float64: + return rv.Float() == 0.0 + } + return false +} + +func isEmpty(rv reflect.Value) bool { + switch rv.Kind() { + case reflect.Array, reflect.Slice, reflect.Map, reflect.String: + return rv.Len() == 0 + case reflect.Bool: + return !rv.Bool() + } + return false +} + +func (enc *Encoder) newline() { + if enc.hasWritten { + enc.wf("\n") + } +} + +func (enc *Encoder) keyEqElement(key Key, val reflect.Value) { + if len(key) == 0 { + encPanic(errNoKey) + } + panicIfInvalidKey(key) + enc.wf("%s%s = ", enc.indentStr(key), key.maybeQuoted(len(key)-1)) + enc.eElement(val) + enc.newline() +} + +func (enc *Encoder) wf(format string, v ...interface{}) { + if _, err := fmt.Fprintf(enc.w, format, v...); err != nil { + encPanic(err) + } + enc.hasWritten = true +} + +func (enc *Encoder) indentStr(key Key) string { + return strings.Repeat(enc.Indent, len(key)-1) +} + +func encPanic(err error) { + panic(tomlEncodeError{err}) +} + +func eindirect(v reflect.Value) reflect.Value { + switch v.Kind() { + case reflect.Ptr, reflect.Interface: + return eindirect(v.Elem()) + default: + return v + } +} + +func isNil(rv reflect.Value) bool { + switch rv.Kind() { + case reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice: + return rv.IsNil() + default: + return false + } +} + +func panicIfInvalidKey(key Key) { + for _, k := range key { + if len(k) == 0 { + encPanic(e("Key '%s' is not a valid table name. Key names "+ + "cannot be empty.", key.maybeQuotedAll())) + } + } +} + +func isValidKeyName(s string) bool { + return len(s) != 0 +} diff --git a/vendor/github.com/BurntSushi/toml/encoding_types.go b/vendor/github.com/BurntSushi/toml/encoding_types.go new file mode 100644 index 000000000000..d36e1dd6002b --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/encoding_types.go @@ -0,0 +1,19 @@ +// +build go1.2 + +package toml + +// In order to support Go 1.1, we define our own TextMarshaler and +// TextUnmarshaler types. For Go 1.2+, we just alias them with the +// standard library interfaces. + +import ( + "encoding" +) + +// TextMarshaler is a synonym for encoding.TextMarshaler. It is defined here +// so that Go 1.1 can be supported. +type TextMarshaler encoding.TextMarshaler + +// TextUnmarshaler is a synonym for encoding.TextUnmarshaler. It is defined +// here so that Go 1.1 can be supported. +type TextUnmarshaler encoding.TextUnmarshaler diff --git a/vendor/github.com/BurntSushi/toml/encoding_types_1.1.go b/vendor/github.com/BurntSushi/toml/encoding_types_1.1.go new file mode 100644 index 000000000000..e8d503d04690 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/encoding_types_1.1.go @@ -0,0 +1,18 @@ +// +build !go1.2 + +package toml + +// These interfaces were introduced in Go 1.2, so we add them manually when +// compiling for Go 1.1. + +// TextMarshaler is a synonym for encoding.TextMarshaler. It is defined here +// so that Go 1.1 can be supported. +type TextMarshaler interface { + MarshalText() (text []byte, err error) +} + +// TextUnmarshaler is a synonym for encoding.TextUnmarshaler. It is defined +// here so that Go 1.1 can be supported. +type TextUnmarshaler interface { + UnmarshalText(text []byte) error +} diff --git a/vendor/github.com/BurntSushi/toml/lex.go b/vendor/github.com/BurntSushi/toml/lex.go new file mode 100644 index 000000000000..e0a742a8870f --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/lex.go @@ -0,0 +1,953 @@ +package toml + +import ( + "fmt" + "strings" + "unicode" + "unicode/utf8" +) + +type itemType int + +const ( + itemError itemType = iota + itemNIL // used in the parser to indicate no type + itemEOF + itemText + itemString + itemRawString + itemMultilineString + itemRawMultilineString + itemBool + itemInteger + itemFloat + itemDatetime + itemArray // the start of an array + itemArrayEnd + itemTableStart + itemTableEnd + itemArrayTableStart + itemArrayTableEnd + itemKeyStart + itemCommentStart + itemInlineTableStart + itemInlineTableEnd +) + +const ( + eof = 0 + comma = ',' + tableStart = '[' + tableEnd = ']' + arrayTableStart = '[' + arrayTableEnd = ']' + tableSep = '.' + keySep = '=' + arrayStart = '[' + arrayEnd = ']' + commentStart = '#' + stringStart = '"' + stringEnd = '"' + rawStringStart = '\'' + rawStringEnd = '\'' + inlineTableStart = '{' + inlineTableEnd = '}' +) + +type stateFn func(lx *lexer) stateFn + +type lexer struct { + input string + start int + pos int + line int + state stateFn + items chan item + + // Allow for backing up up to three runes. + // This is necessary because TOML contains 3-rune tokens (""" and '''). + prevWidths [3]int + nprev int // how many of prevWidths are in use + // If we emit an eof, we can still back up, but it is not OK to call + // next again. + atEOF bool + + // A stack of state functions used to maintain context. + // The idea is to reuse parts of the state machine in various places. + // For example, values can appear at the top level or within arbitrarily + // nested arrays. The last state on the stack is used after a value has + // been lexed. Similarly for comments. + stack []stateFn +} + +type item struct { + typ itemType + val string + line int +} + +func (lx *lexer) nextItem() item { + for { + select { + case item := <-lx.items: + return item + default: + lx.state = lx.state(lx) + } + } +} + +func lex(input string) *lexer { + lx := &lexer{ + input: input, + state: lexTop, + line: 1, + items: make(chan item, 10), + stack: make([]stateFn, 0, 10), + } + return lx +} + +func (lx *lexer) push(state stateFn) { + lx.stack = append(lx.stack, state) +} + +func (lx *lexer) pop() stateFn { + if len(lx.stack) == 0 { + return lx.errorf("BUG in lexer: no states to pop") + } + last := lx.stack[len(lx.stack)-1] + lx.stack = lx.stack[0 : len(lx.stack)-1] + return last +} + +func (lx *lexer) current() string { + return lx.input[lx.start:lx.pos] +} + +func (lx *lexer) emit(typ itemType) { + lx.items <- item{typ, lx.current(), lx.line} + lx.start = lx.pos +} + +func (lx *lexer) emitTrim(typ itemType) { + lx.items <- item{typ, strings.TrimSpace(lx.current()), lx.line} + lx.start = lx.pos +} + +func (lx *lexer) next() (r rune) { + if lx.atEOF { + panic("next called after EOF") + } + if lx.pos >= len(lx.input) { + lx.atEOF = true + return eof + } + + if lx.input[lx.pos] == '\n' { + lx.line++ + } + lx.prevWidths[2] = lx.prevWidths[1] + lx.prevWidths[1] = lx.prevWidths[0] + if lx.nprev < 3 { + lx.nprev++ + } + r, w := utf8.DecodeRuneInString(lx.input[lx.pos:]) + lx.prevWidths[0] = w + lx.pos += w + return r +} + +// ignore skips over the pending input before this point. +func (lx *lexer) ignore() { + lx.start = lx.pos +} + +// backup steps back one rune. Can be called only twice between calls to next. +func (lx *lexer) backup() { + if lx.atEOF { + lx.atEOF = false + return + } + if lx.nprev < 1 { + panic("backed up too far") + } + w := lx.prevWidths[0] + lx.prevWidths[0] = lx.prevWidths[1] + lx.prevWidths[1] = lx.prevWidths[2] + lx.nprev-- + lx.pos -= w + if lx.pos < len(lx.input) && lx.input[lx.pos] == '\n' { + lx.line-- + } +} + +// accept consumes the next rune if it's equal to `valid`. +func (lx *lexer) accept(valid rune) bool { + if lx.next() == valid { + return true + } + lx.backup() + return false +} + +// peek returns but does not consume the next rune in the input. +func (lx *lexer) peek() rune { + r := lx.next() + lx.backup() + return r +} + +// skip ignores all input that matches the given predicate. +func (lx *lexer) skip(pred func(rune) bool) { + for { + r := lx.next() + if pred(r) { + continue + } + lx.backup() + lx.ignore() + return + } +} + +// errorf stops all lexing by emitting an error and returning `nil`. +// Note that any value that is a character is escaped if it's a special +// character (newlines, tabs, etc.). +func (lx *lexer) errorf(format string, values ...interface{}) stateFn { + lx.items <- item{ + itemError, + fmt.Sprintf(format, values...), + lx.line, + } + return nil +} + +// lexTop consumes elements at the top level of TOML data. +func lexTop(lx *lexer) stateFn { + r := lx.next() + if isWhitespace(r) || isNL(r) { + return lexSkip(lx, lexTop) + } + switch r { + case commentStart: + lx.push(lexTop) + return lexCommentStart + case tableStart: + return lexTableStart + case eof: + if lx.pos > lx.start { + return lx.errorf("unexpected EOF") + } + lx.emit(itemEOF) + return nil + } + + // At this point, the only valid item can be a key, so we back up + // and let the key lexer do the rest. + lx.backup() + lx.push(lexTopEnd) + return lexKeyStart +} + +// lexTopEnd is entered whenever a top-level item has been consumed. (A value +// or a table.) It must see only whitespace, and will turn back to lexTop +// upon a newline. If it sees EOF, it will quit the lexer successfully. +func lexTopEnd(lx *lexer) stateFn { + r := lx.next() + switch { + case r == commentStart: + // a comment will read to a newline for us. + lx.push(lexTop) + return lexCommentStart + case isWhitespace(r): + return lexTopEnd + case isNL(r): + lx.ignore() + return lexTop + case r == eof: + lx.emit(itemEOF) + return nil + } + return lx.errorf("expected a top-level item to end with a newline, "+ + "comment, or EOF, but got %q instead", r) +} + +// lexTable lexes the beginning of a table. Namely, it makes sure that +// it starts with a character other than '.' and ']'. +// It assumes that '[' has already been consumed. +// It also handles the case that this is an item in an array of tables. +// e.g., '[[name]]'. +func lexTableStart(lx *lexer) stateFn { + if lx.peek() == arrayTableStart { + lx.next() + lx.emit(itemArrayTableStart) + lx.push(lexArrayTableEnd) + } else { + lx.emit(itemTableStart) + lx.push(lexTableEnd) + } + return lexTableNameStart +} + +func lexTableEnd(lx *lexer) stateFn { + lx.emit(itemTableEnd) + return lexTopEnd +} + +func lexArrayTableEnd(lx *lexer) stateFn { + if r := lx.next(); r != arrayTableEnd { + return lx.errorf("expected end of table array name delimiter %q, "+ + "but got %q instead", arrayTableEnd, r) + } + lx.emit(itemArrayTableEnd) + return lexTopEnd +} + +func lexTableNameStart(lx *lexer) stateFn { + lx.skip(isWhitespace) + switch r := lx.peek(); { + case r == tableEnd || r == eof: + return lx.errorf("unexpected end of table name " + + "(table names cannot be empty)") + case r == tableSep: + return lx.errorf("unexpected table separator " + + "(table names cannot be empty)") + case r == stringStart || r == rawStringStart: + lx.ignore() + lx.push(lexTableNameEnd) + return lexValue // reuse string lexing + default: + return lexBareTableName + } +} + +// lexBareTableName lexes the name of a table. It assumes that at least one +// valid character for the table has already been read. +func lexBareTableName(lx *lexer) stateFn { + r := lx.next() + if isBareKeyChar(r) { + return lexBareTableName + } + lx.backup() + lx.emit(itemText) + return lexTableNameEnd +} + +// lexTableNameEnd reads the end of a piece of a table name, optionally +// consuming whitespace. +func lexTableNameEnd(lx *lexer) stateFn { + lx.skip(isWhitespace) + switch r := lx.next(); { + case isWhitespace(r): + return lexTableNameEnd + case r == tableSep: + lx.ignore() + return lexTableNameStart + case r == tableEnd: + return lx.pop() + default: + return lx.errorf("expected '.' or ']' to end table name, "+ + "but got %q instead", r) + } +} + +// lexKeyStart consumes a key name up until the first non-whitespace character. +// lexKeyStart will ignore whitespace. +func lexKeyStart(lx *lexer) stateFn { + r := lx.peek() + switch { + case r == keySep: + return lx.errorf("unexpected key separator %q", keySep) + case isWhitespace(r) || isNL(r): + lx.next() + return lexSkip(lx, lexKeyStart) + case r == stringStart || r == rawStringStart: + lx.ignore() + lx.emit(itemKeyStart) + lx.push(lexKeyEnd) + return lexValue // reuse string lexing + default: + lx.ignore() + lx.emit(itemKeyStart) + return lexBareKey + } +} + +// lexBareKey consumes the text of a bare key. Assumes that the first character +// (which is not whitespace) has not yet been consumed. +func lexBareKey(lx *lexer) stateFn { + switch r := lx.next(); { + case isBareKeyChar(r): + return lexBareKey + case isWhitespace(r): + lx.backup() + lx.emit(itemText) + return lexKeyEnd + case r == keySep: + lx.backup() + lx.emit(itemText) + return lexKeyEnd + default: + return lx.errorf("bare keys cannot contain %q", r) + } +} + +// lexKeyEnd consumes the end of a key and trims whitespace (up to the key +// separator). +func lexKeyEnd(lx *lexer) stateFn { + switch r := lx.next(); { + case r == keySep: + return lexSkip(lx, lexValue) + case isWhitespace(r): + return lexSkip(lx, lexKeyEnd) + default: + return lx.errorf("expected key separator %q, but got %q instead", + keySep, r) + } +} + +// lexValue starts the consumption of a value anywhere a value is expected. +// lexValue will ignore whitespace. +// After a value is lexed, the last state on the next is popped and returned. +func lexValue(lx *lexer) stateFn { + // We allow whitespace to precede a value, but NOT newlines. + // In array syntax, the array states are responsible for ignoring newlines. + r := lx.next() + switch { + case isWhitespace(r): + return lexSkip(lx, lexValue) + case isDigit(r): + lx.backup() // avoid an extra state and use the same as above + return lexNumberOrDateStart + } + switch r { + case arrayStart: + lx.ignore() + lx.emit(itemArray) + return lexArrayValue + case inlineTableStart: + lx.ignore() + lx.emit(itemInlineTableStart) + return lexInlineTableValue + case stringStart: + if lx.accept(stringStart) { + if lx.accept(stringStart) { + lx.ignore() // Ignore """ + return lexMultilineString + } + lx.backup() + } + lx.ignore() // ignore the '"' + return lexString + case rawStringStart: + if lx.accept(rawStringStart) { + if lx.accept(rawStringStart) { + lx.ignore() // Ignore """ + return lexMultilineRawString + } + lx.backup() + } + lx.ignore() // ignore the "'" + return lexRawString + case '+', '-': + return lexNumberStart + case '.': // special error case, be kind to users + return lx.errorf("floats must start with a digit, not '.'") + } + if unicode.IsLetter(r) { + // Be permissive here; lexBool will give a nice error if the + // user wrote something like + // x = foo + // (i.e. not 'true' or 'false' but is something else word-like.) + lx.backup() + return lexBool + } + return lx.errorf("expected value but found %q instead", r) +} + +// lexArrayValue consumes one value in an array. It assumes that '[' or ',' +// have already been consumed. All whitespace and newlines are ignored. +func lexArrayValue(lx *lexer) stateFn { + r := lx.next() + switch { + case isWhitespace(r) || isNL(r): + return lexSkip(lx, lexArrayValue) + case r == commentStart: + lx.push(lexArrayValue) + return lexCommentStart + case r == comma: + return lx.errorf("unexpected comma") + case r == arrayEnd: + // NOTE(caleb): The spec isn't clear about whether you can have + // a trailing comma or not, so we'll allow it. + return lexArrayEnd + } + + lx.backup() + lx.push(lexArrayValueEnd) + return lexValue +} + +// lexArrayValueEnd consumes everything between the end of an array value and +// the next value (or the end of the array): it ignores whitespace and newlines +// and expects either a ',' or a ']'. +func lexArrayValueEnd(lx *lexer) stateFn { + r := lx.next() + switch { + case isWhitespace(r) || isNL(r): + return lexSkip(lx, lexArrayValueEnd) + case r == commentStart: + lx.push(lexArrayValueEnd) + return lexCommentStart + case r == comma: + lx.ignore() + return lexArrayValue // move on to the next value + case r == arrayEnd: + return lexArrayEnd + } + return lx.errorf( + "expected a comma or array terminator %q, but got %q instead", + arrayEnd, r, + ) +} + +// lexArrayEnd finishes the lexing of an array. +// It assumes that a ']' has just been consumed. +func lexArrayEnd(lx *lexer) stateFn { + lx.ignore() + lx.emit(itemArrayEnd) + return lx.pop() +} + +// lexInlineTableValue consumes one key/value pair in an inline table. +// It assumes that '{' or ',' have already been consumed. Whitespace is ignored. +func lexInlineTableValue(lx *lexer) stateFn { + r := lx.next() + switch { + case isWhitespace(r): + return lexSkip(lx, lexInlineTableValue) + case isNL(r): + return lx.errorf("newlines not allowed within inline tables") + case r == commentStart: + lx.push(lexInlineTableValue) + return lexCommentStart + case r == comma: + return lx.errorf("unexpected comma") + case r == inlineTableEnd: + return lexInlineTableEnd + } + lx.backup() + lx.push(lexInlineTableValueEnd) + return lexKeyStart +} + +// lexInlineTableValueEnd consumes everything between the end of an inline table +// key/value pair and the next pair (or the end of the table): +// it ignores whitespace and expects either a ',' or a '}'. +func lexInlineTableValueEnd(lx *lexer) stateFn { + r := lx.next() + switch { + case isWhitespace(r): + return lexSkip(lx, lexInlineTableValueEnd) + case isNL(r): + return lx.errorf("newlines not allowed within inline tables") + case r == commentStart: + lx.push(lexInlineTableValueEnd) + return lexCommentStart + case r == comma: + lx.ignore() + return lexInlineTableValue + case r == inlineTableEnd: + return lexInlineTableEnd + } + return lx.errorf("expected a comma or an inline table terminator %q, "+ + "but got %q instead", inlineTableEnd, r) +} + +// lexInlineTableEnd finishes the lexing of an inline table. +// It assumes that a '}' has just been consumed. +func lexInlineTableEnd(lx *lexer) stateFn { + lx.ignore() + lx.emit(itemInlineTableEnd) + return lx.pop() +} + +// lexString consumes the inner contents of a string. It assumes that the +// beginning '"' has already been consumed and ignored. +func lexString(lx *lexer) stateFn { + r := lx.next() + switch { + case r == eof: + return lx.errorf("unexpected EOF") + case isNL(r): + return lx.errorf("strings cannot contain newlines") + case r == '\\': + lx.push(lexString) + return lexStringEscape + case r == stringEnd: + lx.backup() + lx.emit(itemString) + lx.next() + lx.ignore() + return lx.pop() + } + return lexString +} + +// lexMultilineString consumes the inner contents of a string. It assumes that +// the beginning '"""' has already been consumed and ignored. +func lexMultilineString(lx *lexer) stateFn { + switch lx.next() { + case eof: + return lx.errorf("unexpected EOF") + case '\\': + return lexMultilineStringEscape + case stringEnd: + if lx.accept(stringEnd) { + if lx.accept(stringEnd) { + lx.backup() + lx.backup() + lx.backup() + lx.emit(itemMultilineString) + lx.next() + lx.next() + lx.next() + lx.ignore() + return lx.pop() + } + lx.backup() + } + } + return lexMultilineString +} + +// lexRawString consumes a raw string. Nothing can be escaped in such a string. +// It assumes that the beginning "'" has already been consumed and ignored. +func lexRawString(lx *lexer) stateFn { + r := lx.next() + switch { + case r == eof: + return lx.errorf("unexpected EOF") + case isNL(r): + return lx.errorf("strings cannot contain newlines") + case r == rawStringEnd: + lx.backup() + lx.emit(itemRawString) + lx.next() + lx.ignore() + return lx.pop() + } + return lexRawString +} + +// lexMultilineRawString consumes a raw string. Nothing can be escaped in such +// a string. It assumes that the beginning "'''" has already been consumed and +// ignored. +func lexMultilineRawString(lx *lexer) stateFn { + switch lx.next() { + case eof: + return lx.errorf("unexpected EOF") + case rawStringEnd: + if lx.accept(rawStringEnd) { + if lx.accept(rawStringEnd) { + lx.backup() + lx.backup() + lx.backup() + lx.emit(itemRawMultilineString) + lx.next() + lx.next() + lx.next() + lx.ignore() + return lx.pop() + } + lx.backup() + } + } + return lexMultilineRawString +} + +// lexMultilineStringEscape consumes an escaped character. It assumes that the +// preceding '\\' has already been consumed. +func lexMultilineStringEscape(lx *lexer) stateFn { + // Handle the special case first: + if isNL(lx.next()) { + return lexMultilineString + } + lx.backup() + lx.push(lexMultilineString) + return lexStringEscape(lx) +} + +func lexStringEscape(lx *lexer) stateFn { + r := lx.next() + switch r { + case 'b': + fallthrough + case 't': + fallthrough + case 'n': + fallthrough + case 'f': + fallthrough + case 'r': + fallthrough + case '"': + fallthrough + case '\\': + return lx.pop() + case 'u': + return lexShortUnicodeEscape + case 'U': + return lexLongUnicodeEscape + } + return lx.errorf("invalid escape character %q; only the following "+ + "escape characters are allowed: "+ + `\b, \t, \n, \f, \r, \", \\, \uXXXX, and \UXXXXXXXX`, r) +} + +func lexShortUnicodeEscape(lx *lexer) stateFn { + var r rune + for i := 0; i < 4; i++ { + r = lx.next() + if !isHexadecimal(r) { + return lx.errorf(`expected four hexadecimal digits after '\u', `+ + "but got %q instead", lx.current()) + } + } + return lx.pop() +} + +func lexLongUnicodeEscape(lx *lexer) stateFn { + var r rune + for i := 0; i < 8; i++ { + r = lx.next() + if !isHexadecimal(r) { + return lx.errorf(`expected eight hexadecimal digits after '\U', `+ + "but got %q instead", lx.current()) + } + } + return lx.pop() +} + +// lexNumberOrDateStart consumes either an integer, a float, or datetime. +func lexNumberOrDateStart(lx *lexer) stateFn { + r := lx.next() + if isDigit(r) { + return lexNumberOrDate + } + switch r { + case '_': + return lexNumber + case 'e', 'E': + return lexFloat + case '.': + return lx.errorf("floats must start with a digit, not '.'") + } + return lx.errorf("expected a digit but got %q", r) +} + +// lexNumberOrDate consumes either an integer, float or datetime. +func lexNumberOrDate(lx *lexer) stateFn { + r := lx.next() + if isDigit(r) { + return lexNumberOrDate + } + switch r { + case '-': + return lexDatetime + case '_': + return lexNumber + case '.', 'e', 'E': + return lexFloat + } + + lx.backup() + lx.emit(itemInteger) + return lx.pop() +} + +// lexDatetime consumes a Datetime, to a first approximation. +// The parser validates that it matches one of the accepted formats. +func lexDatetime(lx *lexer) stateFn { + r := lx.next() + if isDigit(r) { + return lexDatetime + } + switch r { + case '-', 'T', ':', '.', 'Z', '+': + return lexDatetime + } + + lx.backup() + lx.emit(itemDatetime) + return lx.pop() +} + +// lexNumberStart consumes either an integer or a float. It assumes that a sign +// has already been read, but that *no* digits have been consumed. +// lexNumberStart will move to the appropriate integer or float states. +func lexNumberStart(lx *lexer) stateFn { + // We MUST see a digit. Even floats have to start with a digit. + r := lx.next() + if !isDigit(r) { + if r == '.' { + return lx.errorf("floats must start with a digit, not '.'") + } + return lx.errorf("expected a digit but got %q", r) + } + return lexNumber +} + +// lexNumber consumes an integer or a float after seeing the first digit. +func lexNumber(lx *lexer) stateFn { + r := lx.next() + if isDigit(r) { + return lexNumber + } + switch r { + case '_': + return lexNumber + case '.', 'e', 'E': + return lexFloat + } + + lx.backup() + lx.emit(itemInteger) + return lx.pop() +} + +// lexFloat consumes the elements of a float. It allows any sequence of +// float-like characters, so floats emitted by the lexer are only a first +// approximation and must be validated by the parser. +func lexFloat(lx *lexer) stateFn { + r := lx.next() + if isDigit(r) { + return lexFloat + } + switch r { + case '_', '.', '-', '+', 'e', 'E': + return lexFloat + } + + lx.backup() + lx.emit(itemFloat) + return lx.pop() +} + +// lexBool consumes a bool string: 'true' or 'false. +func lexBool(lx *lexer) stateFn { + var rs []rune + for { + r := lx.next() + if !unicode.IsLetter(r) { + lx.backup() + break + } + rs = append(rs, r) + } + s := string(rs) + switch s { + case "true", "false": + lx.emit(itemBool) + return lx.pop() + } + return lx.errorf("expected value but found %q instead", s) +} + +// lexCommentStart begins the lexing of a comment. It will emit +// itemCommentStart and consume no characters, passing control to lexComment. +func lexCommentStart(lx *lexer) stateFn { + lx.ignore() + lx.emit(itemCommentStart) + return lexComment +} + +// lexComment lexes an entire comment. It assumes that '#' has been consumed. +// It will consume *up to* the first newline character, and pass control +// back to the last state on the stack. +func lexComment(lx *lexer) stateFn { + r := lx.peek() + if isNL(r) || r == eof { + lx.emit(itemText) + return lx.pop() + } + lx.next() + return lexComment +} + +// lexSkip ignores all slurped input and moves on to the next state. +func lexSkip(lx *lexer, nextState stateFn) stateFn { + return func(lx *lexer) stateFn { + lx.ignore() + return nextState + } +} + +// isWhitespace returns true if `r` is a whitespace character according +// to the spec. +func isWhitespace(r rune) bool { + return r == '\t' || r == ' ' +} + +func isNL(r rune) bool { + return r == '\n' || r == '\r' +} + +func isDigit(r rune) bool { + return r >= '0' && r <= '9' +} + +func isHexadecimal(r rune) bool { + return (r >= '0' && r <= '9') || + (r >= 'a' && r <= 'f') || + (r >= 'A' && r <= 'F') +} + +func isBareKeyChar(r rune) bool { + return (r >= 'A' && r <= 'Z') || + (r >= 'a' && r <= 'z') || + (r >= '0' && r <= '9') || + r == '_' || + r == '-' +} + +func (itype itemType) String() string { + switch itype { + case itemError: + return "Error" + case itemNIL: + return "NIL" + case itemEOF: + return "EOF" + case itemText: + return "Text" + case itemString, itemRawString, itemMultilineString, itemRawMultilineString: + return "String" + case itemBool: + return "Bool" + case itemInteger: + return "Integer" + case itemFloat: + return "Float" + case itemDatetime: + return "DateTime" + case itemTableStart: + return "TableStart" + case itemTableEnd: + return "TableEnd" + case itemKeyStart: + return "KeyStart" + case itemArray: + return "Array" + case itemArrayEnd: + return "ArrayEnd" + case itemCommentStart: + return "CommentStart" + } + panic(fmt.Sprintf("BUG: Unknown type '%d'.", int(itype))) +} + +func (item item) String() string { + return fmt.Sprintf("(%s, %s)", item.typ.String(), item.val) +} diff --git a/vendor/github.com/BurntSushi/toml/parse.go b/vendor/github.com/BurntSushi/toml/parse.go new file mode 100644 index 000000000000..50869ef9266e --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/parse.go @@ -0,0 +1,592 @@ +package toml + +import ( + "fmt" + "strconv" + "strings" + "time" + "unicode" + "unicode/utf8" +) + +type parser struct { + mapping map[string]interface{} + types map[string]tomlType + lx *lexer + + // A list of keys in the order that they appear in the TOML data. + ordered []Key + + // the full key for the current hash in scope + context Key + + // the base key name for everything except hashes + currentKey string + + // rough approximation of line number + approxLine int + + // A map of 'key.group.names' to whether they were created implicitly. + implicits map[string]bool +} + +type parseError string + +func (pe parseError) Error() string { + return string(pe) +} + +func parse(data string) (p *parser, err error) { + defer func() { + if r := recover(); r != nil { + var ok bool + if err, ok = r.(parseError); ok { + return + } + panic(r) + } + }() + + p = &parser{ + mapping: make(map[string]interface{}), + types: make(map[string]tomlType), + lx: lex(data), + ordered: make([]Key, 0), + implicits: make(map[string]bool), + } + for { + item := p.next() + if item.typ == itemEOF { + break + } + p.topLevel(item) + } + + return p, nil +} + +func (p *parser) panicf(format string, v ...interface{}) { + msg := fmt.Sprintf("Near line %d (last key parsed '%s'): %s", + p.approxLine, p.current(), fmt.Sprintf(format, v...)) + panic(parseError(msg)) +} + +func (p *parser) next() item { + it := p.lx.nextItem() + if it.typ == itemError { + p.panicf("%s", it.val) + } + return it +} + +func (p *parser) bug(format string, v ...interface{}) { + panic(fmt.Sprintf("BUG: "+format+"\n\n", v...)) +} + +func (p *parser) expect(typ itemType) item { + it := p.next() + p.assertEqual(typ, it.typ) + return it +} + +func (p *parser) assertEqual(expected, got itemType) { + if expected != got { + p.bug("Expected '%s' but got '%s'.", expected, got) + } +} + +func (p *parser) topLevel(item item) { + switch item.typ { + case itemCommentStart: + p.approxLine = item.line + p.expect(itemText) + case itemTableStart: + kg := p.next() + p.approxLine = kg.line + + var key Key + for ; kg.typ != itemTableEnd && kg.typ != itemEOF; kg = p.next() { + key = append(key, p.keyString(kg)) + } + p.assertEqual(itemTableEnd, kg.typ) + + p.establishContext(key, false) + p.setType("", tomlHash) + p.ordered = append(p.ordered, key) + case itemArrayTableStart: + kg := p.next() + p.approxLine = kg.line + + var key Key + for ; kg.typ != itemArrayTableEnd && kg.typ != itemEOF; kg = p.next() { + key = append(key, p.keyString(kg)) + } + p.assertEqual(itemArrayTableEnd, kg.typ) + + p.establishContext(key, true) + p.setType("", tomlArrayHash) + p.ordered = append(p.ordered, key) + case itemKeyStart: + kname := p.next() + p.approxLine = kname.line + p.currentKey = p.keyString(kname) + + val, typ := p.value(p.next()) + p.setValue(p.currentKey, val) + p.setType(p.currentKey, typ) + p.ordered = append(p.ordered, p.context.add(p.currentKey)) + p.currentKey = "" + default: + p.bug("Unexpected type at top level: %s", item.typ) + } +} + +// Gets a string for a key (or part of a key in a table name). +func (p *parser) keyString(it item) string { + switch it.typ { + case itemText: + return it.val + case itemString, itemMultilineString, + itemRawString, itemRawMultilineString: + s, _ := p.value(it) + return s.(string) + default: + p.bug("Unexpected key type: %s", it.typ) + panic("unreachable") + } +} + +// value translates an expected value from the lexer into a Go value wrapped +// as an empty interface. +func (p *parser) value(it item) (interface{}, tomlType) { + switch it.typ { + case itemString: + return p.replaceEscapes(it.val), p.typeOfPrimitive(it) + case itemMultilineString: + trimmed := stripFirstNewline(stripEscapedWhitespace(it.val)) + return p.replaceEscapes(trimmed), p.typeOfPrimitive(it) + case itemRawString: + return it.val, p.typeOfPrimitive(it) + case itemRawMultilineString: + return stripFirstNewline(it.val), p.typeOfPrimitive(it) + case itemBool: + switch it.val { + case "true": + return true, p.typeOfPrimitive(it) + case "false": + return false, p.typeOfPrimitive(it) + } + p.bug("Expected boolean value, but got '%s'.", it.val) + case itemInteger: + if !numUnderscoresOK(it.val) { + p.panicf("Invalid integer %q: underscores must be surrounded by digits", + it.val) + } + val := strings.Replace(it.val, "_", "", -1) + num, err := strconv.ParseInt(val, 10, 64) + if err != nil { + // Distinguish integer values. Normally, it'd be a bug if the lexer + // provides an invalid integer, but it's possible that the number is + // out of range of valid values (which the lexer cannot determine). + // So mark the former as a bug but the latter as a legitimate user + // error. + if e, ok := err.(*strconv.NumError); ok && + e.Err == strconv.ErrRange { + + p.panicf("Integer '%s' is out of the range of 64-bit "+ + "signed integers.", it.val) + } else { + p.bug("Expected integer value, but got '%s'.", it.val) + } + } + return num, p.typeOfPrimitive(it) + case itemFloat: + parts := strings.FieldsFunc(it.val, func(r rune) bool { + switch r { + case '.', 'e', 'E': + return true + } + return false + }) + for _, part := range parts { + if !numUnderscoresOK(part) { + p.panicf("Invalid float %q: underscores must be "+ + "surrounded by digits", it.val) + } + } + if !numPeriodsOK(it.val) { + // As a special case, numbers like '123.' or '1.e2', + // which are valid as far as Go/strconv are concerned, + // must be rejected because TOML says that a fractional + // part consists of '.' followed by 1+ digits. + p.panicf("Invalid float %q: '.' must be followed "+ + "by one or more digits", it.val) + } + val := strings.Replace(it.val, "_", "", -1) + num, err := strconv.ParseFloat(val, 64) + if err != nil { + if e, ok := err.(*strconv.NumError); ok && + e.Err == strconv.ErrRange { + + p.panicf("Float '%s' is out of the range of 64-bit "+ + "IEEE-754 floating-point numbers.", it.val) + } else { + p.panicf("Invalid float value: %q", it.val) + } + } + return num, p.typeOfPrimitive(it) + case itemDatetime: + var t time.Time + var ok bool + var err error + for _, format := range []string{ + "2006-01-02T15:04:05Z07:00", + "2006-01-02T15:04:05", + "2006-01-02", + } { + t, err = time.ParseInLocation(format, it.val, time.Local) + if err == nil { + ok = true + break + } + } + if !ok { + p.panicf("Invalid TOML Datetime: %q.", it.val) + } + return t, p.typeOfPrimitive(it) + case itemArray: + array := make([]interface{}, 0) + types := make([]tomlType, 0) + + for it = p.next(); it.typ != itemArrayEnd; it = p.next() { + if it.typ == itemCommentStart { + p.expect(itemText) + continue + } + + val, typ := p.value(it) + array = append(array, val) + types = append(types, typ) + } + return array, p.typeOfArray(types) + case itemInlineTableStart: + var ( + hash = make(map[string]interface{}) + outerContext = p.context + outerKey = p.currentKey + ) + + p.context = append(p.context, p.currentKey) + p.currentKey = "" + for it := p.next(); it.typ != itemInlineTableEnd; it = p.next() { + if it.typ != itemKeyStart { + p.bug("Expected key start but instead found %q, around line %d", + it.val, p.approxLine) + } + if it.typ == itemCommentStart { + p.expect(itemText) + continue + } + + // retrieve key + k := p.next() + p.approxLine = k.line + kname := p.keyString(k) + + // retrieve value + p.currentKey = kname + val, typ := p.value(p.next()) + // make sure we keep metadata up to date + p.setType(kname, typ) + p.ordered = append(p.ordered, p.context.add(p.currentKey)) + hash[kname] = val + } + p.context = outerContext + p.currentKey = outerKey + return hash, tomlHash + } + p.bug("Unexpected value type: %s", it.typ) + panic("unreachable") +} + +// numUnderscoresOK checks whether each underscore in s is surrounded by +// characters that are not underscores. +func numUnderscoresOK(s string) bool { + accept := false + for _, r := range s { + if r == '_' { + if !accept { + return false + } + accept = false + continue + } + accept = true + } + return accept +} + +// numPeriodsOK checks whether every period in s is followed by a digit. +func numPeriodsOK(s string) bool { + period := false + for _, r := range s { + if period && !isDigit(r) { + return false + } + period = r == '.' + } + return !period +} + +// establishContext sets the current context of the parser, +// where the context is either a hash or an array of hashes. Which one is +// set depends on the value of the `array` parameter. +// +// Establishing the context also makes sure that the key isn't a duplicate, and +// will create implicit hashes automatically. +func (p *parser) establishContext(key Key, array bool) { + var ok bool + + // Always start at the top level and drill down for our context. + hashContext := p.mapping + keyContext := make(Key, 0) + + // We only need implicit hashes for key[0:-1] + for _, k := range key[0 : len(key)-1] { + _, ok = hashContext[k] + keyContext = append(keyContext, k) + + // No key? Make an implicit hash and move on. + if !ok { + p.addImplicit(keyContext) + hashContext[k] = make(map[string]interface{}) + } + + // If the hash context is actually an array of tables, then set + // the hash context to the last element in that array. + // + // Otherwise, it better be a table, since this MUST be a key group (by + // virtue of it not being the last element in a key). + switch t := hashContext[k].(type) { + case []map[string]interface{}: + hashContext = t[len(t)-1] + case map[string]interface{}: + hashContext = t + default: + p.panicf("Key '%s' was already created as a hash.", keyContext) + } + } + + p.context = keyContext + if array { + // If this is the first element for this array, then allocate a new + // list of tables for it. + k := key[len(key)-1] + if _, ok := hashContext[k]; !ok { + hashContext[k] = make([]map[string]interface{}, 0, 5) + } + + // Add a new table. But make sure the key hasn't already been used + // for something else. + if hash, ok := hashContext[k].([]map[string]interface{}); ok { + hashContext[k] = append(hash, make(map[string]interface{})) + } else { + p.panicf("Key '%s' was already created and cannot be used as "+ + "an array.", keyContext) + } + } else { + p.setValue(key[len(key)-1], make(map[string]interface{})) + } + p.context = append(p.context, key[len(key)-1]) +} + +// setValue sets the given key to the given value in the current context. +// It will make sure that the key hasn't already been defined, account for +// implicit key groups. +func (p *parser) setValue(key string, value interface{}) { + var tmpHash interface{} + var ok bool + + hash := p.mapping + keyContext := make(Key, 0) + for _, k := range p.context { + keyContext = append(keyContext, k) + if tmpHash, ok = hash[k]; !ok { + p.bug("Context for key '%s' has not been established.", keyContext) + } + switch t := tmpHash.(type) { + case []map[string]interface{}: + // The context is a table of hashes. Pick the most recent table + // defined as the current hash. + hash = t[len(t)-1] + case map[string]interface{}: + hash = t + default: + p.bug("Expected hash to have type 'map[string]interface{}', but "+ + "it has '%T' instead.", tmpHash) + } + } + keyContext = append(keyContext, key) + + if _, ok := hash[key]; ok { + // Typically, if the given key has already been set, then we have + // to raise an error since duplicate keys are disallowed. However, + // it's possible that a key was previously defined implicitly. In this + // case, it is allowed to be redefined concretely. (See the + // `tests/valid/implicit-and-explicit-after.toml` test in `toml-test`.) + // + // But we have to make sure to stop marking it as an implicit. (So that + // another redefinition provokes an error.) + // + // Note that since it has already been defined (as a hash), we don't + // want to overwrite it. So our business is done. + if p.isImplicit(keyContext) { + p.removeImplicit(keyContext) + return + } + + // Otherwise, we have a concrete key trying to override a previous + // key, which is *always* wrong. + p.panicf("Key '%s' has already been defined.", keyContext) + } + hash[key] = value +} + +// setType sets the type of a particular value at a given key. +// It should be called immediately AFTER setValue. +// +// Note that if `key` is empty, then the type given will be applied to the +// current context (which is either a table or an array of tables). +func (p *parser) setType(key string, typ tomlType) { + keyContext := make(Key, 0, len(p.context)+1) + for _, k := range p.context { + keyContext = append(keyContext, k) + } + if len(key) > 0 { // allow type setting for hashes + keyContext = append(keyContext, key) + } + p.types[keyContext.String()] = typ +} + +// addImplicit sets the given Key as having been created implicitly. +func (p *parser) addImplicit(key Key) { + p.implicits[key.String()] = true +} + +// removeImplicit stops tagging the given key as having been implicitly +// created. +func (p *parser) removeImplicit(key Key) { + p.implicits[key.String()] = false +} + +// isImplicit returns true if the key group pointed to by the key was created +// implicitly. +func (p *parser) isImplicit(key Key) bool { + return p.implicits[key.String()] +} + +// current returns the full key name of the current context. +func (p *parser) current() string { + if len(p.currentKey) == 0 { + return p.context.String() + } + if len(p.context) == 0 { + return p.currentKey + } + return fmt.Sprintf("%s.%s", p.context, p.currentKey) +} + +func stripFirstNewline(s string) string { + if len(s) == 0 || s[0] != '\n' { + return s + } + return s[1:] +} + +func stripEscapedWhitespace(s string) string { + esc := strings.Split(s, "\\\n") + if len(esc) > 1 { + for i := 1; i < len(esc); i++ { + esc[i] = strings.TrimLeftFunc(esc[i], unicode.IsSpace) + } + } + return strings.Join(esc, "") +} + +func (p *parser) replaceEscapes(str string) string { + var replaced []rune + s := []byte(str) + r := 0 + for r < len(s) { + if s[r] != '\\' { + c, size := utf8.DecodeRune(s[r:]) + r += size + replaced = append(replaced, c) + continue + } + r += 1 + if r >= len(s) { + p.bug("Escape sequence at end of string.") + return "" + } + switch s[r] { + default: + p.bug("Expected valid escape code after \\, but got %q.", s[r]) + return "" + case 'b': + replaced = append(replaced, rune(0x0008)) + r += 1 + case 't': + replaced = append(replaced, rune(0x0009)) + r += 1 + case 'n': + replaced = append(replaced, rune(0x000A)) + r += 1 + case 'f': + replaced = append(replaced, rune(0x000C)) + r += 1 + case 'r': + replaced = append(replaced, rune(0x000D)) + r += 1 + case '"': + replaced = append(replaced, rune(0x0022)) + r += 1 + case '\\': + replaced = append(replaced, rune(0x005C)) + r += 1 + case 'u': + // At this point, we know we have a Unicode escape of the form + // `uXXXX` at [r, r+5). (Because the lexer guarantees this + // for us.) + escaped := p.asciiEscapeToUnicode(s[r+1 : r+5]) + replaced = append(replaced, escaped) + r += 5 + case 'U': + // At this point, we know we have a Unicode escape of the form + // `uXXXX` at [r, r+9). (Because the lexer guarantees this + // for us.) + escaped := p.asciiEscapeToUnicode(s[r+1 : r+9]) + replaced = append(replaced, escaped) + r += 9 + } + } + return string(replaced) +} + +func (p *parser) asciiEscapeToUnicode(bs []byte) rune { + s := string(bs) + hex, err := strconv.ParseUint(strings.ToLower(s), 16, 32) + if err != nil { + p.bug("Could not parse '%s' as a hexadecimal number, but the "+ + "lexer claims it's OK: %s", s, err) + } + if !utf8.ValidRune(rune(hex)) { + p.panicf("Escaped character '\\u%s' is not valid UTF-8.", s) + } + return rune(hex) +} + +func isStringType(ty itemType) bool { + return ty == itemString || ty == itemMultilineString || + ty == itemRawString || ty == itemRawMultilineString +} diff --git a/vendor/github.com/BurntSushi/toml/session.vim b/vendor/github.com/BurntSushi/toml/session.vim new file mode 100644 index 000000000000..562164be0603 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/session.vim @@ -0,0 +1 @@ +au BufWritePost *.go silent!make tags > /dev/null 2>&1 diff --git a/vendor/github.com/BurntSushi/toml/type_check.go b/vendor/github.com/BurntSushi/toml/type_check.go new file mode 100644 index 000000000000..c73f8afc1a6d --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/type_check.go @@ -0,0 +1,91 @@ +package toml + +// tomlType represents any Go type that corresponds to a TOML type. +// While the first draft of the TOML spec has a simplistic type system that +// probably doesn't need this level of sophistication, we seem to be militating +// toward adding real composite types. +type tomlType interface { + typeString() string +} + +// typeEqual accepts any two types and returns true if they are equal. +func typeEqual(t1, t2 tomlType) bool { + if t1 == nil || t2 == nil { + return false + } + return t1.typeString() == t2.typeString() +} + +func typeIsHash(t tomlType) bool { + return typeEqual(t, tomlHash) || typeEqual(t, tomlArrayHash) +} + +type tomlBaseType string + +func (btype tomlBaseType) typeString() string { + return string(btype) +} + +func (btype tomlBaseType) String() string { + return btype.typeString() +} + +var ( + tomlInteger tomlBaseType = "Integer" + tomlFloat tomlBaseType = "Float" + tomlDatetime tomlBaseType = "Datetime" + tomlString tomlBaseType = "String" + tomlBool tomlBaseType = "Bool" + tomlArray tomlBaseType = "Array" + tomlHash tomlBaseType = "Hash" + tomlArrayHash tomlBaseType = "ArrayHash" +) + +// typeOfPrimitive returns a tomlType of any primitive value in TOML. +// Primitive values are: Integer, Float, Datetime, String and Bool. +// +// Passing a lexer item other than the following will cause a BUG message +// to occur: itemString, itemBool, itemInteger, itemFloat, itemDatetime. +func (p *parser) typeOfPrimitive(lexItem item) tomlType { + switch lexItem.typ { + case itemInteger: + return tomlInteger + case itemFloat: + return tomlFloat + case itemDatetime: + return tomlDatetime + case itemString: + return tomlString + case itemMultilineString: + return tomlString + case itemRawString: + return tomlString + case itemRawMultilineString: + return tomlString + case itemBool: + return tomlBool + } + p.bug("Cannot infer primitive type of lex item '%s'.", lexItem) + panic("unreachable") +} + +// typeOfArray returns a tomlType for an array given a list of types of its +// values. +// +// In the current spec, if an array is homogeneous, then its type is always +// "Array". If the array is not homogeneous, an error is generated. +func (p *parser) typeOfArray(types []tomlType) tomlType { + // Empty arrays are cool. + if len(types) == 0 { + return tomlArray + } + + theType := types[0] + for _, t := range types[1:] { + if !typeEqual(theType, t) { + p.panicf("Array contains values of type '%s' and '%s', but "+ + "arrays must be homogeneous.", theType, t) + } + } + return tomlArray +} diff --git a/vendor/github.com/BurntSushi/toml/type_fields.go b/vendor/github.com/BurntSushi/toml/type_fields.go new file mode 100644 index 000000000000..608997c22f68 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/type_fields.go @@ -0,0 +1,242 @@ +package toml + +// Struct field handling is adapted from code in encoding/json: +// +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the Go distribution. + +import ( + "reflect" + "sort" + "sync" +) + +// A field represents a single field found in a struct. +type field struct { + name string // the name of the field (`toml` tag included) + tag bool // whether field has a `toml` tag + index []int // represents the depth of an anonymous field + typ reflect.Type // the type of the field +} + +// byName sorts field by name, breaking ties with depth, +// then breaking ties with "name came from toml tag", then +// breaking ties with index sequence. +type byName []field + +func (x byName) Len() int { return len(x) } + +func (x byName) Swap(i, j int) { x[i], x[j] = x[j], x[i] } + +func (x byName) Less(i, j int) bool { + if x[i].name != x[j].name { + return x[i].name < x[j].name + } + if len(x[i].index) != len(x[j].index) { + return len(x[i].index) < len(x[j].index) + } + if x[i].tag != x[j].tag { + return x[i].tag + } + return byIndex(x).Less(i, j) +} + +// byIndex sorts field by index sequence. +type byIndex []field + +func (x byIndex) Len() int { return len(x) } + +func (x byIndex) Swap(i, j int) { x[i], x[j] = x[j], x[i] } + +func (x byIndex) Less(i, j int) bool { + for k, xik := range x[i].index { + if k >= len(x[j].index) { + return false + } + if xik != x[j].index[k] { + return xik < x[j].index[k] + } + } + return len(x[i].index) < len(x[j].index) +} + +// typeFields returns a list of fields that TOML should recognize for the given +// type. The algorithm is breadth-first search over the set of structs to +// include - the top struct and then any reachable anonymous structs. +func typeFields(t reflect.Type) []field { + // Anonymous fields to explore at the current level and the next. + current := []field{} + next := []field{{typ: t}} + + // Count of queued names for current level and the next. + count := map[reflect.Type]int{} + nextCount := map[reflect.Type]int{} + + // Types already visited at an earlier level. + visited := map[reflect.Type]bool{} + + // Fields found. + var fields []field + + for len(next) > 0 { + current, next = next, current[:0] + count, nextCount = nextCount, map[reflect.Type]int{} + + for _, f := range current { + if visited[f.typ] { + continue + } + visited[f.typ] = true + + // Scan f.typ for fields to include. + for i := 0; i < f.typ.NumField(); i++ { + sf := f.typ.Field(i) + if sf.PkgPath != "" && !sf.Anonymous { // unexported + continue + } + opts := getOptions(sf.Tag) + if opts.skip { + continue + } + index := make([]int, len(f.index)+1) + copy(index, f.index) + index[len(f.index)] = i + + ft := sf.Type + if ft.Name() == "" && ft.Kind() == reflect.Ptr { + // Follow pointer. + ft = ft.Elem() + } + + // Record found field and index sequence. + if opts.name != "" || !sf.Anonymous || ft.Kind() != reflect.Struct { + tagged := opts.name != "" + name := opts.name + if name == "" { + name = sf.Name + } + fields = append(fields, field{name, tagged, index, ft}) + if count[f.typ] > 1 { + // If there were multiple instances, add a second, + // so that the annihilation code will see a duplicate. + // It only cares about the distinction between 1 or 2, + // so don't bother generating any more copies. + fields = append(fields, fields[len(fields)-1]) + } + continue + } + + // Record new anonymous struct to explore in next round. + nextCount[ft]++ + if nextCount[ft] == 1 { + f := field{name: ft.Name(), index: index, typ: ft} + next = append(next, f) + } + } + } + } + + sort.Sort(byName(fields)) + + // Delete all fields that are hidden by the Go rules for embedded fields, + // except that fields with TOML tags are promoted. + + // The fields are sorted in primary order of name, secondary order + // of field index length. Loop over names; for each name, delete + // hidden fields by choosing the one dominant field that survives. + out := fields[:0] + for advance, i := 0, 0; i < len(fields); i += advance { + // One iteration per name. + // Find the sequence of fields with the name of this first field. + fi := fields[i] + name := fi.name + for advance = 1; i+advance < len(fields); advance++ { + fj := fields[i+advance] + if fj.name != name { + break + } + } + if advance == 1 { // Only one field with this name + out = append(out, fi) + continue + } + dominant, ok := dominantField(fields[i : i+advance]) + if ok { + out = append(out, dominant) + } + } + + fields = out + sort.Sort(byIndex(fields)) + + return fields +} + +// dominantField looks through the fields, all of which are known to +// have the same name, to find the single field that dominates the +// others using Go's embedding rules, modified by the presence of +// TOML tags. If there are multiple top-level fields, the boolean +// will be false: This condition is an error in Go and we skip all +// the fields. +func dominantField(fields []field) (field, bool) { + // The fields are sorted in increasing index-length order. The winner + // must therefore be one with the shortest index length. Drop all + // longer entries, which is easy: just truncate the slice. + length := len(fields[0].index) + tagged := -1 // Index of first tagged field. + for i, f := range fields { + if len(f.index) > length { + fields = fields[:i] + break + } + if f.tag { + if tagged >= 0 { + // Multiple tagged fields at the same level: conflict. + // Return no field. + return field{}, false + } + tagged = i + } + } + if tagged >= 0 { + return fields[tagged], true + } + // All remaining fields have the same length. If there's more than one, + // we have a conflict (two fields named "X" at the same level) and we + // return no field. + if len(fields) > 1 { + return field{}, false + } + return fields[0], true +} + +var fieldCache struct { + sync.RWMutex + m map[reflect.Type][]field +} + +// cachedTypeFields is like typeFields but uses a cache to avoid repeated work. +func cachedTypeFields(t reflect.Type) []field { + fieldCache.RLock() + f := fieldCache.m[t] + fieldCache.RUnlock() + if f != nil { + return f + } + + // Compute fields without lock. + // Might duplicate effort but won't hold other computations back. + f = typeFields(t) + if f == nil { + f = []field{} + } + + fieldCache.Lock() + if fieldCache.m == nil { + fieldCache.m = map[reflect.Type][]field{} + } + fieldCache.m[t] = f + fieldCache.Unlock() + return f +} diff --git a/vendor/github.com/golangci/go-tools/arg/arg.go b/vendor/github.com/golangci/go-tools/arg/arg.go new file mode 100644 index 000000000000..6f3aafc4e5f2 --- /dev/null +++ b/vendor/github.com/golangci/go-tools/arg/arg.go @@ -0,0 +1,40 @@ +package arg + +var args = map[string]int{ + "(*sync.Pool).Put.x": 0, + "(*text/template.Template).Parse.text": 0, + "(io.Seeker).Seek.offset": 0, + "(time.Time).Sub.u": 0, + "append.elems": 1, + "append.slice": 0, + "bytes.Equal.a": 0, + "bytes.Equal.b": 1, + "encoding/binary.Write.data": 2, + "errors.New.text": 0, + "fmt.Printf.format": 0, + "fmt.Fprintf.format": 1, + "fmt.Sprintf.a[0]": 1, + "fmt.Sprintf.format": 0, + "len.v": 0, + "make.size[0]": 1, + "make.size[1]": 2, + "make.t": 0, + "net/url.Parse.rawurl": 0, + "os.OpenFile.flag": 1, + "os/exec.Command.name": 0, + "os/signal.Notify.c": 0, + "regexp.Compile.expr": 0, + "runtime.SetFinalizer.finalizer": 1, + "runtime.SetFinalizer.obj": 0, + "sort.Sort.data": 0, + "time.Parse.layout": 0, + "time.Sleep.d": 0, +} + +func Arg(name string) int { + n, ok := args[name] + if !ok { + panic("unknown argument " + name) + } + return n +} diff --git a/vendor/github.com/golangci/tools/go/callgraph/callgraph.go b/vendor/github.com/golangci/go-tools/callgraph/callgraph.go similarity index 97% rename from vendor/github.com/golangci/tools/go/callgraph/callgraph.go rename to vendor/github.com/golangci/go-tools/callgraph/callgraph.go index 0a1272b435c0..713a80383f22 100644 --- a/vendor/github.com/golangci/tools/go/callgraph/callgraph.go +++ b/vendor/github.com/golangci/go-tools/callgraph/callgraph.go @@ -32,7 +32,7 @@ in the call graph; they are treated like built-in operators of the language. */ -package callgraph // import "github.com/golangci/tools/go/callgraph" +package callgraph // import "github.com/golangci/go-tools/callgraph" // TODO(adonovan): add a function to eliminate wrappers from the // callgraph, preserving topology. @@ -43,7 +43,7 @@ import ( "fmt" "go/token" - "github.com/golangci/tools/go/ssa" + "github.com/golangci/go-tools/ssa" ) // A Graph represents a call graph. diff --git a/vendor/github.com/golangci/tools/go/callgraph/static/static.go b/vendor/github.com/golangci/go-tools/callgraph/static/static.go similarity index 83% rename from vendor/github.com/golangci/tools/go/callgraph/static/static.go rename to vendor/github.com/golangci/go-tools/callgraph/static/static.go index ac264242d147..6b4e1e3ba35b 100644 --- a/vendor/github.com/golangci/tools/go/callgraph/static/static.go +++ b/vendor/github.com/golangci/go-tools/callgraph/static/static.go @@ -1,11 +1,11 @@ // Package static computes the call graph of a Go program containing // only static call edges. -package static // import "github.com/golangci/tools/go/callgraph/static" +package static // import "github.com/golangci/go-tools/callgraph/static" import ( - "github.com/golangci/tools/go/callgraph" - "github.com/golangci/tools/go/ssa" - "github.com/golangci/tools/go/ssa/ssautil" + "github.com/golangci/go-tools/callgraph" + "github.com/golangci/go-tools/ssa" + "github.com/golangci/go-tools/ssa/ssautil" ) // CallGraph computes the call graph of the specified program diff --git a/vendor/github.com/golangci/tools/go/callgraph/util.go b/vendor/github.com/golangci/go-tools/callgraph/util.go similarity index 99% rename from vendor/github.com/golangci/tools/go/callgraph/util.go rename to vendor/github.com/golangci/go-tools/callgraph/util.go index 7d84efd1972e..2e8ded1dfa06 100644 --- a/vendor/github.com/golangci/tools/go/callgraph/util.go +++ b/vendor/github.com/golangci/go-tools/callgraph/util.go @@ -4,7 +4,7 @@ package callgraph -import "github.com/golangci/tools/go/ssa" +import "github.com/golangci/go-tools/ssa" // This file provides various utilities over call graphs, such as // visitation and path search. diff --git a/vendor/github.com/golangci/go-tools/config/config.go b/vendor/github.com/golangci/go-tools/config/config.go new file mode 100644 index 000000000000..112980b498dd --- /dev/null +++ b/vendor/github.com/golangci/go-tools/config/config.go @@ -0,0 +1,162 @@ +package config + +import ( + "os" + "path/filepath" + + "github.com/BurntSushi/toml" +) + +func mergeLists(a, b []string) []string { + out := make([]string, 0, len(a)+len(b)) + for _, el := range b { + if el == "inherit" { + out = append(out, a...) + } else { + out = append(out, el) + } + } + + return out +} + +func normalizeList(list []string) []string { + if len(list) > 1 { + nlist := make([]string, 0, len(list)) + nlist = append(nlist, list[0]) + for i, el := range list[1:] { + if el != list[i] { + nlist = append(nlist, el) + } + } + list = nlist + } + + for _, el := range list { + if el == "inherit" { + // This should never happen, because the default config + // should not use "inherit" + panic(`unresolved "inherit"`) + } + } + + return list +} + +func (cfg Config) Merge(ocfg Config) Config { + if ocfg.Checks != nil { + cfg.Checks = mergeLists(cfg.Checks, ocfg.Checks) + } + if ocfg.Initialisms != nil { + cfg.Initialisms = mergeLists(cfg.Initialisms, ocfg.Initialisms) + } + if ocfg.DotImportWhitelist != nil { + cfg.DotImportWhitelist = mergeLists(cfg.DotImportWhitelist, ocfg.DotImportWhitelist) + } + if ocfg.HTTPStatusCodeWhitelist != nil { + cfg.HTTPStatusCodeWhitelist = mergeLists(cfg.HTTPStatusCodeWhitelist, ocfg.HTTPStatusCodeWhitelist) + } + return cfg +} + +type Config struct { + // TODO(dh): this implementation makes it impossible for external + // clients to add their own checkers with configuration. At the + // moment, we don't really care about that; we don't encourage + // that people use this package. In the future, we may. The + // obvious solution would be using map[string]interface{}, but + // that's obviously subpar. + + Checks []string `toml:"checks"` + Initialisms []string `toml:"initialisms"` + DotImportWhitelist []string `toml:"dot_import_whitelist"` + HTTPStatusCodeWhitelist []string `toml:"http_status_code_whitelist"` +} + +var defaultConfig = Config{ + Checks: []string{"all", "-ST1000", "-ST1003", "-ST1016"}, + Initialisms: []string{ + "ACL", "API", "ASCII", "CPU", "CSS", "DNS", + "EOF", "GUID", "HTML", "HTTP", "HTTPS", "ID", + "IP", "JSON", "QPS", "RAM", "RPC", "SLA", + "SMTP", "SQL", "SSH", "TCP", "TLS", "TTL", + "UDP", "UI", "GID", "UID", "UUID", "URI", + "URL", "UTF8", "VM", "XML", "XMPP", "XSRF", + "XSS", + }, + DotImportWhitelist: []string{}, + HTTPStatusCodeWhitelist: []string{"200", "400", "404", "500"}, +} + +const configName = "staticcheck.conf" + +func parseConfigs(dir string) ([]Config, error) { + var out []Config + + // TODO(dh): consider stopping at the GOPATH/module boundary + for dir != "" { + f, err := os.Open(filepath.Join(dir, configName)) + if os.IsNotExist(err) { + ndir := filepath.Dir(dir) + if ndir == dir { + break + } + dir = ndir + continue + } + if err != nil { + return nil, err + } + var cfg Config + _, err = toml.DecodeReader(f, &cfg) + f.Close() + if err != nil { + return nil, err + } + out = append(out, cfg) + ndir := filepath.Dir(dir) + if ndir == dir { + break + } + dir = ndir + } + out = append(out, defaultConfig) + if len(out) < 2 { + return out, nil + } + for i := 0; i < len(out)/2; i++ { + out[i], out[len(out)-1-i] = out[len(out)-1-i], out[i] + } + return out, nil +} + +func mergeConfigs(confs []Config) Config { + if len(confs) == 0 { + // This shouldn't happen because we always have at least a + // default config. + panic("trying to merge zero configs") + } + if len(confs) == 1 { + return confs[0] + } + conf := confs[0] + for _, oconf := range confs[1:] { + conf = conf.Merge(oconf) + } + return conf +} + +func Load(dir string) (Config, error) { + confs, err := parseConfigs(dir) + if err != nil { + return Config{}, err + } + conf := mergeConfigs(confs) + + conf.Checks = normalizeList(conf.Checks) + conf.Initialisms = normalizeList(conf.Initialisms) + conf.DotImportWhitelist = normalizeList(conf.DotImportWhitelist) + conf.HTTPStatusCodeWhitelist = normalizeList(conf.HTTPStatusCodeWhitelist) + + return conf, nil +} diff --git a/vendor/github.com/golangci/go-tools/config/example.conf b/vendor/github.com/golangci/go-tools/config/example.conf new file mode 100644 index 000000000000..5ffc597f9964 --- /dev/null +++ b/vendor/github.com/golangci/go-tools/config/example.conf @@ -0,0 +1,10 @@ +checks = ["all", "-ST1003", "-ST1014"] +initialisms = ["ACL", "API", "ASCII", "CPU", "CSS", "DNS", + "EOF", "GUID", "HTML", "HTTP", "HTTPS", "ID", + "IP", "JSON", "QPS", "RAM", "RPC", "SLA", + "SMTP", "SQL", "SSH", "TCP", "TLS", "TTL", + "UDP", "UI", "GID", "UID", "UUID", "URI", + "URL", "UTF8", "VM", "XML", "XMPP", "XSRF", + "XSS"] +dot_import_whitelist = [] +http_status_code_whitelist = ["200", "400", "404", "500"] diff --git a/vendor/github.com/golangci/go-tools/functions/concrete.go b/vendor/github.com/golangci/go-tools/functions/concrete.go index 7cd720261182..4a47e9fa8667 100644 --- a/vendor/github.com/golangci/go-tools/functions/concrete.go +++ b/vendor/github.com/golangci/go-tools/functions/concrete.go @@ -4,7 +4,7 @@ import ( "go/token" "go/types" - "github.com/golangci/tools/go/ssa" + "github.com/golangci/go-tools/ssa" ) func concreteReturnTypes(fn *ssa.Function) []*types.Tuple { diff --git a/vendor/github.com/golangci/go-tools/functions/functions.go b/vendor/github.com/golangci/go-tools/functions/functions.go index 1f2a4bdf9f2a..e86796039038 100644 --- a/vendor/github.com/golangci/go-tools/functions/functions.go +++ b/vendor/github.com/golangci/go-tools/functions/functions.go @@ -4,9 +4,9 @@ import ( "go/types" "sync" - "github.com/golangci/tools/go/callgraph" - "github.com/golangci/tools/go/callgraph/static" - "github.com/golangci/tools/go/ssa" + "github.com/golangci/go-tools/callgraph" + "github.com/golangci/go-tools/callgraph/static" + "github.com/golangci/go-tools/ssa" "github.com/golangci/go-tools/staticcheck/vrp" ) diff --git a/vendor/github.com/golangci/go-tools/functions/loops.go b/vendor/github.com/golangci/go-tools/functions/loops.go index 23c0b16c8106..377fab87c065 100644 --- a/vendor/github.com/golangci/go-tools/functions/loops.go +++ b/vendor/github.com/golangci/go-tools/functions/loops.go @@ -1,6 +1,6 @@ package functions -import "github.com/golangci/tools/go/ssa" +import "github.com/golangci/go-tools/ssa" type Loop map[*ssa.BasicBlock]bool diff --git a/vendor/github.com/golangci/go-tools/functions/pure.go b/vendor/github.com/golangci/go-tools/functions/pure.go index 3b20ac654410..ae084073fbed 100644 --- a/vendor/github.com/golangci/go-tools/functions/pure.go +++ b/vendor/github.com/golangci/go-tools/functions/pure.go @@ -4,9 +4,9 @@ import ( "go/token" "go/types" - "github.com/golangci/tools/go/callgraph" - "github.com/golangci/tools/go/ssa" - "github.com/golangci/go-tools/lint" + "github.com/golangci/go-tools/callgraph" + "github.com/golangci/go-tools/lint/lintdsl" + "github.com/golangci/go-tools/ssa" ) // IsStub reports whether a function is a stub. A function is @@ -20,7 +20,7 @@ func (d *Descriptions) IsStub(fn *ssa.Function) bool { if len(fn.Blocks) > 1 { return false } - instrs := lint.FilterDebug(fn.Blocks[0].Instrs) + instrs := lintdsl.FilterDebug(fn.Blocks[0].Instrs) if len(instrs) != 1 { return false } diff --git a/vendor/github.com/golangci/go-tools/functions/terminates.go b/vendor/github.com/golangci/go-tools/functions/terminates.go index 500d5531403d..0674edadc57f 100644 --- a/vendor/github.com/golangci/go-tools/functions/terminates.go +++ b/vendor/github.com/golangci/go-tools/functions/terminates.go @@ -1,6 +1,6 @@ package functions -import "github.com/golangci/tools/go/ssa" +import "github.com/golangci/go-tools/ssa" // terminates reports whether fn is supposed to return, that is if it // has at least one theoretic path that returns from the function. diff --git a/vendor/github.com/golangci/go-tools/internal/sharedcheck/lint.go b/vendor/github.com/golangci/go-tools/internal/sharedcheck/lint.go index 6f124ba9a2e3..0b2e91b1eccf 100644 --- a/vendor/github.com/golangci/go-tools/internal/sharedcheck/lint.go +++ b/vendor/github.com/golangci/go-tools/internal/sharedcheck/lint.go @@ -5,66 +5,64 @@ import ( "go/types" "github.com/golangci/go-tools/lint" - "github.com/golangci/tools/go/ssa" + . "github.com/golangci/go-tools/lint/lintdsl" + "github.com/golangci/go-tools/ssa" ) -func CheckRangeStringRunes(nodeFns map[ast.Node]*ssa.Function, j *lint.Job) { - fn := func(node ast.Node) bool { - rng, ok := node.(*ast.RangeStmt) - if !ok || !lint.IsBlank(rng.Key) { - return true - } - ssafn := nodeFns[rng] - if ssafn == nil { - return true - } - v, _ := ssafn.ValueForExpr(rng.X) +func CheckRangeStringRunes(j *lint.Job) { + for _, ssafn := range j.Program.InitialFunctions { + fn := func(node ast.Node) bool { + rng, ok := node.(*ast.RangeStmt) + if !ok || !IsBlank(rng.Key) { + return true + } - // Check that we're converting from string to []rune - val, _ := v.(*ssa.Convert) - if val == nil { - return true - } - Tsrc, ok := val.X.Type().(*types.Basic) - if !ok || Tsrc.Kind() != types.String { - return true - } - Tdst, ok := val.Type().(*types.Slice) - if !ok { - return true - } - TdstElem, ok := Tdst.Elem().(*types.Basic) - if !ok || TdstElem.Kind() != types.Int32 { - return true - } + v, _ := ssafn.ValueForExpr(rng.X) - // Check that the result of the conversion is only used to - // range over - refs := val.Referrers() - if refs == nil { - return true - } + // Check that we're converting from string to []rune + val, _ := v.(*ssa.Convert) + if val == nil { + return true + } + Tsrc, ok := val.X.Type().(*types.Basic) + if !ok || Tsrc.Kind() != types.String { + return true + } + Tdst, ok := val.Type().(*types.Slice) + if !ok { + return true + } + TdstElem, ok := Tdst.Elem().(*types.Basic) + if !ok || TdstElem.Kind() != types.Int32 { + return true + } - // Expect two refs: one for obtaining the length of the slice, - // one for accessing the elements - if len(lint.FilterDebug(*refs)) != 2 { - // TODO(dh): right now, we check that only one place - // refers to our slice. This will miss cases such as - // ranging over the slice twice. Ideally, we'd ensure that - // the slice is only used for ranging over (without - // accessing the key), but that is harder to do because in - // SSA form, ranging over a slice looks like an ordinary - // loop with index increments and slice accesses. We'd - // have to look at the associated AST node to check that - // it's a range statement. - return true - } + // Check that the result of the conversion is only used to + // range over + refs := val.Referrers() + if refs == nil { + return true + } - j.Errorf(rng, "should range over string, not []rune(string)") + // Expect two refs: one for obtaining the length of the slice, + // one for accessing the elements + if len(FilterDebug(*refs)) != 2 { + // TODO(dh): right now, we check that only one place + // refers to our slice. This will miss cases such as + // ranging over the slice twice. Ideally, we'd ensure that + // the slice is only used for ranging over (without + // accessing the key), but that is harder to do because in + // SSA form, ranging over a slice looks like an ordinary + // loop with index increments and slice accesses. We'd + // have to look at the associated AST node to check that + // it's a range statement. + return true + } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) + j.Errorf(rng, "should range over string, not []rune(string)") + + return true + } + Inspect(ssafn.Syntax(), fn) } } diff --git a/vendor/github.com/golangci/go-tools/lint/generated.go b/vendor/github.com/golangci/go-tools/lint/generated.go new file mode 100644 index 000000000000..d407223e7805 --- /dev/null +++ b/vendor/github.com/golangci/go-tools/lint/generated.go @@ -0,0 +1,33 @@ +package lint + +import ( + "bufio" + "bytes" + "io" +) + +var ( + prefix = []byte("// Code generated ") + suffix = []byte(" DO NOT EDIT.") + nl = []byte("\n") + crnl = []byte("\r\n") +) + +func isGenerated(r io.Reader) bool { + br := bufio.NewReader(r) + for { + s, err := br.ReadBytes('\n') + if err != nil && err != io.EOF { + return false + } + s = bytes.TrimSuffix(s, crnl) + s = bytes.TrimSuffix(s, nl) + if bytes.HasPrefix(s, prefix) && bytes.HasSuffix(s, suffix) { + return true + } + if err == io.EOF { + break + } + } + return false +} diff --git a/vendor/github.com/golangci/go-tools/lint/lint.go b/vendor/github.com/golangci/go-tools/lint/lint.go index 2a12073ac1d7..e9aa7933fdb5 100644 --- a/vendor/github.com/golangci/go-tools/lint/lint.go +++ b/vendor/github.com/golangci/go-tools/lint/lint.go @@ -1,40 +1,34 @@ -// Copyright (c) 2013 The Go Authors. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file or at -// https://developers.google.com/open-source/licenses/bsd. - -// Package lint provides the foundation for tools like gosimple. +// Package lint provides the foundation for tools like staticcheck package lint // import "github.com/golangci/go-tools/lint" import ( - "bytes" "fmt" "go/ast" - "go/build" - "go/constant" - "go/printer" "go/token" "go/types" + "io" + "os" "path/filepath" - "runtime" "sort" "strings" "sync" + "time" "unicode" - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/go/loader" - "github.com/golangci/tools/go/ssa" - "github.com/golangci/tools/go/ssa/ssautil" + "golang.org/x/tools/go/packages" + "github.com/golangci/go-tools/config" + "github.com/golangci/go-tools/ssa" + "github.com/golangci/go-tools/ssa/ssautil" ) type Job struct { Program *Program checker string - check string + check Check problems []Problem + + duration time.Duration } type Ignore interface { @@ -94,7 +88,7 @@ type GlobIgnore struct { func (gi *GlobIgnore) Match(p Problem) bool { if gi.Pattern != "*" { - pkgpath := p.Package.Path() + pkgpath := p.Package.Types.Path() if strings.HasSuffix(pkgpath, "_test") { pkgpath = pkgpath[:len(pkgpath)-len("_test")] } @@ -112,31 +106,44 @@ func (gi *GlobIgnore) Match(p Problem) bool { } type Program struct { - SSA *ssa.Program - Prog *loader.Program - // TODO(dh): Rename to InitialPackages? - Packages []*Pkg + SSA *ssa.Program + InitialPackages []*Pkg InitialFunctions []*ssa.Function + AllPackages []*packages.Package AllFunctions []*ssa.Function Files []*ast.File - Info *types.Info GoVersion int tokenFileMap map[*token.File]*ast.File astFileMap map[*ast.File]*Pkg + packagesMap map[string]*packages.Package + + genMu sync.RWMutex + generatedMap map[string]bool +} + +func (prog *Program) Fset() *token.FileSet { + return prog.InitialPackages[0].Fset } type Func func(*Job) +type Severity uint8 + +const ( + Error Severity = iota + Warning + Ignored +) + // Problem represents a problem in some source code. type Problem struct { - pos token.Pos Position token.Position // position in source file Text string // the prose that describes the problem Check string Checker string - Package *types.Package - Ignored bool + Package *Pkg + Severity Severity } func (p *Problem) String() string { @@ -150,15 +157,25 @@ type Checker interface { Name() string Prefix() string Init(*Program) - Funcs() map[string]Func + Checks() []Check +} + +type Check struct { + Fn Func + ID string + FilterGenerated bool } // A Linter lints Go source code. type Linter struct { - Checker Checker + Checkers []Checker Ignores []Ignore GoVersion int ReturnIgnored bool + Config config.Config + + MaxConcurrentJobs int + PrintStats bool automaticIgnores []Ignore } @@ -196,116 +213,140 @@ func (j *Job) File(node Positioner) *ast.File { return j.Program.File(node) } -// TODO(dh): switch to sort.Slice when Go 1.9 lands. -type byPosition struct { - fset *token.FileSet - ps []Problem +func parseDirective(s string) (cmd string, args []string) { + if !strings.HasPrefix(s, "//lint:") { + return "", nil + } + s = strings.TrimPrefix(s, "//lint:") + fields := strings.Split(s, " ") + return fields[0], fields[1:] +} + +type PerfStats struct { + PackageLoading time.Duration + SSABuild time.Duration + OtherInitWork time.Duration + CheckerInits map[string]time.Duration + Jobs []JobStat } -func (ps byPosition) Len() int { - return len(ps.ps) +type JobStat struct { + Job string + Duration time.Duration } -func (ps byPosition) Less(i int, j int) bool { - pi, pj := ps.ps[i].Position, ps.ps[j].Position +func (stats *PerfStats) Print(w io.Writer) { + fmt.Fprintln(w, "Package loading:", stats.PackageLoading) + fmt.Fprintln(w, "SSA build:", stats.SSABuild) + fmt.Fprintln(w, "Other init work:", stats.OtherInitWork) - if pi.Filename != pj.Filename { - return pi.Filename < pj.Filename + fmt.Fprintln(w, "Checker inits:") + for checker, d := range stats.CheckerInits { + fmt.Fprintf(w, "\t%s: %s\n", checker, d) } - if pi.Line != pj.Line { - return pi.Line < pj.Line - } - if pi.Column != pj.Column { - return pi.Column < pj.Column - } - - return ps.ps[i].Text < ps.ps[j].Text -} + fmt.Fprintln(w) -func (ps byPosition) Swap(i int, j int) { - ps.ps[i], ps.ps[j] = ps.ps[j], ps.ps[i] + fmt.Fprintln(w, "Jobs:") + sort.Slice(stats.Jobs, func(i, j int) bool { + return stats.Jobs[i].Duration < stats.Jobs[j].Duration + }) + var total time.Duration + for _, job := range stats.Jobs { + fmt.Fprintf(w, "\t%s: %s\n", job.Job, job.Duration) + total += job.Duration + } + fmt.Fprintf(w, "\tTotal: %s\n", total) } -func parseDirective(s string) (cmd string, args []string) { - if !strings.HasPrefix(s, "//lint:") { - return "", nil +func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { + allPkgs := allPackages(initial) + t := time.Now() + ssaprog, _ := ssautil.Packages(allPkgs, ssa.GlobalDebug) + ssaprog.Build() + if stats != nil { + stats.SSABuild = time.Since(t) } - s = strings.TrimPrefix(s, "//lint:") - fields := strings.Split(s, " ") - return fields[0], fields[1:] -} -func (l *Linter) Lint(lprog *loader.Program, ssaprog *ssa.Program, conf *loader.Config) []Problem { + t = time.Now() pkgMap := map[*ssa.Package]*Pkg{} var pkgs []*Pkg - for _, pkginfo := range lprog.InitialPackages() { - ssapkg := ssaprog.Package(pkginfo.Pkg) - var bp *build.Package - if len(pkginfo.Files) != 0 { - path := lprog.Fset.Position(pkginfo.Files[0].Pos()).Filename + for _, pkg := range initial { + ssapkg := ssaprog.Package(pkg.Types) + var cfg config.Config + if len(pkg.GoFiles) != 0 { + path := pkg.GoFiles[0] dir := filepath.Dir(path) var err error - ctx := conf.Build - if ctx == nil { - ctx = &build.Default - } - bp, err = ctx.ImportDir(dir, 0) + // OPT(dh): we're rebuilding the entire config tree for + // each package. for example, if we check a/b/c and + // a/b/c/d, we'll process a, a/b, a/b/c, a, a/b, a/b/c, + // a/b/c/d – we should cache configs per package and only + // load the new levels. + cfg, err = config.Load(dir) if err != nil { - // shouldn't happen + // FIXME(dh): we couldn't load the config, what are we + // supposed to do? probably tell the user somehow } + cfg = cfg.Merge(l.Config) } + pkg := &Pkg{ - Package: ssapkg, - Info: pkginfo, - BuildPkg: bp, + SSA: ssapkg, + Package: pkg, + Config: cfg, } pkgMap[ssapkg] = pkg pkgs = append(pkgs, pkg) } + prog := &Program{ - SSA: ssaprog, - Prog: lprog, - Packages: pkgs, - Info: &types.Info{}, - GoVersion: l.GoVersion, - tokenFileMap: map[*token.File]*ast.File{}, - astFileMap: map[*ast.File]*Pkg{}, + SSA: ssaprog, + InitialPackages: pkgs, + AllPackages: allPkgs, + GoVersion: l.GoVersion, + tokenFileMap: map[*token.File]*ast.File{}, + astFileMap: map[*ast.File]*Pkg{}, + generatedMap: map[string]bool{}, + } + prog.packagesMap = map[string]*packages.Package{} + for _, pkg := range allPkgs { + prog.packagesMap[pkg.Types.Path()] = pkg } - initial := map[*types.Package]struct{}{} + isInitial := map[*types.Package]struct{}{} for _, pkg := range pkgs { - initial[pkg.Info.Pkg] = struct{}{} + isInitial[pkg.Types] = struct{}{} } for fn := range ssautil.AllFunctions(ssaprog) { if fn.Pkg == nil { continue } prog.AllFunctions = append(prog.AllFunctions, fn) - if _, ok := initial[fn.Pkg.Pkg]; ok { + if _, ok := isInitial[fn.Pkg.Pkg]; ok { prog.InitialFunctions = append(prog.InitialFunctions, fn) } } for _, pkg := range pkgs { - prog.Files = append(prog.Files, pkg.Info.Files...) + prog.Files = append(prog.Files, pkg.Syntax...) - ssapkg := ssaprog.Package(pkg.Info.Pkg) - for _, f := range pkg.Info.Files { + ssapkg := ssaprog.Package(pkg.Types) + for _, f := range pkg.Syntax { prog.astFileMap[f] = pkgMap[ssapkg] } } - for _, pkginfo := range lprog.AllPackages { - for _, f := range pkginfo.Files { - tf := lprog.Fset.File(f.Pos()) + for _, pkg := range allPkgs { + for _, f := range pkg.Syntax { + tf := pkg.Fset.File(f.Pos()) prog.tokenFileMap[tf] = f } } var out []Problem l.automaticIgnores = nil - for _, pkginfo := range lprog.InitialPackages() { - for _, f := range pkginfo.Files { - cm := ast.NewCommentMap(lprog.Fset, f, f.Comments) + for _, pkg := range initial { + for _, f := range pkg.Syntax { + cm := ast.NewCommentMap(pkg.Fset, f, f.Comments) for node, cgs := range cm { for _, cg := range cgs { for _, c := range cg.List { @@ -318,11 +359,10 @@ func (l *Linter) Lint(lprog *loader.Program, ssaprog *ssa.Program, conf *loader. if len(args) < 2 { // FIXME(dh): this causes duplicated warnings when using megacheck p := Problem{ - pos: c.Pos(), Position: prog.DisplayPosition(c.Pos()), Text: "malformed linter directive; missing the required reason field?", Check: "", - Checker: l.Checker.Name(), + Checker: "lint", Package: nil, } out = append(out, p) @@ -365,75 +405,84 @@ func (l *Linter) Lint(lprog *loader.Program, ssaprog *ssa.Program, conf *loader. scopes int }{} for _, pkg := range pkgs { - sizes.types += len(pkg.Info.Info.Types) - sizes.defs += len(pkg.Info.Info.Defs) - sizes.uses += len(pkg.Info.Info.Uses) - sizes.implicits += len(pkg.Info.Info.Implicits) - sizes.selections += len(pkg.Info.Info.Selections) - sizes.scopes += len(pkg.Info.Info.Scopes) - } - prog.Info.Types = make(map[ast.Expr]types.TypeAndValue, sizes.types) - prog.Info.Defs = make(map[*ast.Ident]types.Object, sizes.defs) - prog.Info.Uses = make(map[*ast.Ident]types.Object, sizes.uses) - prog.Info.Implicits = make(map[ast.Node]types.Object, sizes.implicits) - prog.Info.Selections = make(map[*ast.SelectorExpr]*types.Selection, sizes.selections) - prog.Info.Scopes = make(map[ast.Node]*types.Scope, sizes.scopes) - for _, pkg := range pkgs { - for k, v := range pkg.Info.Info.Types { - prog.Info.Types[k] = v - } - for k, v := range pkg.Info.Info.Defs { - prog.Info.Defs[k] = v - } - for k, v := range pkg.Info.Info.Uses { - prog.Info.Uses[k] = v - } - for k, v := range pkg.Info.Info.Implicits { - prog.Info.Implicits[k] = v - } - for k, v := range pkg.Info.Info.Selections { - prog.Info.Selections[k] = v - } - for k, v := range pkg.Info.Info.Scopes { - prog.Info.Scopes[k] = v - } + sizes.types += len(pkg.TypesInfo.Types) + sizes.defs += len(pkg.TypesInfo.Defs) + sizes.uses += len(pkg.TypesInfo.Uses) + sizes.implicits += len(pkg.TypesInfo.Implicits) + sizes.selections += len(pkg.TypesInfo.Selections) + sizes.scopes += len(pkg.TypesInfo.Scopes) } - l.Checker.Init(prog) - funcs := l.Checker.Funcs() - var keys []string - for k := range funcs { - keys = append(keys, k) + if stats != nil { + stats.OtherInitWork = time.Since(t) + } + + for _, checker := range l.Checkers { + t := time.Now() + checker.Init(prog) + if stats != nil { + stats.CheckerInits[checker.Name()] = time.Since(t) + } } - sort.Strings(keys) var jobs []*Job - for _, k := range keys { - j := &Job{ - Program: prog, - checker: l.Checker.Name(), - check: k, + var allChecks []string + + for _, checker := range l.Checkers { + checks := checker.Checks() + for _, check := range checks { + allChecks = append(allChecks, check.ID) + j := &Job{ + Program: prog, + checker: checker.Name(), + check: check, + } + jobs = append(jobs, j) } - jobs = append(jobs, j) } + + max := len(jobs) + if l.MaxConcurrentJobs > 0 { + max = l.MaxConcurrentJobs + } + + sem := make(chan struct{}, max) wg := &sync.WaitGroup{} for _, j := range jobs { wg.Add(1) go func(j *Job) { defer wg.Done() - fn := funcs[j.check] + sem <- struct{}{} + defer func() { <-sem }() + fn := j.check.Fn if fn == nil { return } + t := time.Now() fn(j) + j.duration = time.Since(t) }(j) } wg.Wait() for _, j := range jobs { + if stats != nil { + stats.Jobs = append(stats.Jobs, JobStat{j.check.ID, j.duration}) + } for _, p := range j.problems { - p.Ignored = l.ignore(p) - if l.ReturnIgnored || !p.Ignored { + allowedChecks := FilterChecks(allChecks, p.Package.Config.Checks) + + if l.ignore(p) { + p.Severity = Ignored + } + // TODO(dh): support globs in check white/blacklist + // OPT(dh): this approach doesn't actually disable checks, + // it just discards their results. For the moment, that's + // fine. None of our checks are super expensive. In the + // future, we may want to provide opt-in expensive + // analysis, which shouldn't run at all. It may be easiest + // to implement this in the individual checks. + if (l.ReturnIgnored || p.Severity != Ignored) && allowedChecks[p.Check] { out = append(out, p) } } @@ -447,217 +496,195 @@ func (l *Linter) Lint(lprog *loader.Program, ssaprog *ssa.Program, conf *loader. if ig.matched { continue } - for _, c := range ig.Checks { - idx := strings.IndexFunc(c, func(r rune) bool { - return unicode.IsNumber(r) - }) - if idx == -1 { - // malformed check name, backing out - continue - } - if c[:idx] != l.Checker.Prefix() { - // not for this checker + + couldveMatched := false + for f, pkg := range prog.astFileMap { + if prog.Fset().Position(f.Pos()).Filename != ig.File { continue } - p := Problem{ - pos: ig.pos, - Position: prog.DisplayPosition(ig.pos), - Text: "this linter directive didn't match anything; should it be removed?", - Check: "", - Checker: l.Checker.Name(), - Package: nil, + allowedChecks := FilterChecks(allChecks, pkg.Config.Checks) + for _, c := range ig.Checks { + if !allowedChecks[c] { + continue + } + couldveMatched = true + break } - out = append(out, p) + break } + + if !couldveMatched { + // The ignored checks were disabled for the containing package. + // Don't flag the ignore for not having matched. + continue + } + p := Problem{ + Position: prog.DisplayPosition(ig.pos), + Text: "this linter directive didn't match anything; should it be removed?", + Check: "", + Checker: "lint", + Package: nil, + } + out = append(out, p) } - sort.Sort(byPosition{lprog.Fset, out}) - return out -} + sort.Slice(out, func(i int, j int) bool { + pi, pj := out[i].Position, out[j].Position -// Pkg represents a package being linted. -type Pkg struct { - *ssa.Package - Info *loader.PackageInfo - BuildPkg *build.Package -} + if pi.Filename != pj.Filename { + return pi.Filename < pj.Filename + } + if pi.Line != pj.Line { + return pi.Line < pj.Line + } + if pi.Column != pj.Column { + return pi.Column < pj.Column + } -type packager interface { - Package() *ssa.Package -} + return out[i].Text < out[j].Text + }) -func IsExample(fn *ssa.Function) bool { - if !strings.HasPrefix(fn.Name(), "Example") { - return false + if l.PrintStats && stats != nil { + stats.Print(os.Stderr) } - f := fn.Prog.Fset.File(fn.Pos()) - if f == nil { - return false - } - return strings.HasSuffix(f.Name(), "_test.go") -} -func (j *Job) IsInTest(node Positioner) bool { - f := j.Program.SSA.Fset.File(node.Pos()) - return f != nil && strings.HasSuffix(f.Name(), "_test.go") -} - -func (j *Job) IsInMain(node Positioner) bool { - if node, ok := node.(packager); ok { - return node.Package().Pkg.Name() == "main" - } - pkg := j.NodePackage(node) - if pkg == nil { - return false + if len(out) < 2 { + return out } - return pkg.Pkg.Name() == "main" -} -type Positioner interface { - Pos() token.Pos -} - -func (prog *Program) DisplayPosition(p token.Pos) token.Position { - // The //line compiler directive can be used to change the file - // name and line numbers associated with code. This can, for - // example, be used by code generation tools. The most prominent - // example is 'go tool cgo', which uses //line directives to refer - // back to the original source code. - // - // In the context of our linters, we need to treat these - // directives differently depending on context. For cgo files, we - // want to honour the directives, so that line numbers are - // adjusted correctly. For all other files, we want to ignore the - // directives, so that problems are reported at their actual - // position and not, for example, a yacc grammar file. This also - // affects the ignore mechanism, since it operates on the position - // information stored within problems. With this implementation, a - // user will ignore foo.go, not foo.y - - pkg := prog.astFileMap[prog.tokenFileMap[prog.Prog.Fset.File(p)]] - bp := pkg.BuildPkg - adjPos := prog.Prog.Fset.Position(p) - if bp == nil { - // couldn't find the package for some reason (deleted? faulty - // file system?) - return adjPos - } - base := filepath.Base(adjPos.Filename) - for _, f := range bp.CgoFiles { - if f == base { - // this is a cgo file, use the adjusted position - return adjPos + uniq := make([]Problem, 0, len(out)) + uniq = append(uniq, out[0]) + prev := out[0] + for _, p := range out[1:] { + if prev.Position == p.Position && prev.Text == p.Text { + continue } + prev = p + uniq = append(uniq, p) } - // not a cgo file, ignore //line directives - return prog.Prog.Fset.PositionFor(p, false) -} - -func (j *Job) Errorf(n Positioner, format string, args ...interface{}) *Problem { - tf := j.Program.SSA.Fset.File(n.Pos()) - f := j.Program.tokenFileMap[tf] - pkg := j.Program.astFileMap[f].Pkg - pos := j.Program.DisplayPosition(n.Pos()) - problem := Problem{ - pos: n.Pos(), - Position: pos, - Text: fmt.Sprintf(format, args...), - Check: j.check, - Checker: j.checker, - Package: pkg, - } - j.problems = append(j.problems, problem) - return &j.problems[len(j.problems)-1] + return uniq } -func (j *Job) Render(x interface{}) string { - fset := j.Program.SSA.Fset - var buf bytes.Buffer - if err := printer.Fprint(&buf, fset, x); err != nil { - panic(err) - } - return buf.String() -} +func FilterChecks(allChecks []string, checks []string) map[string]bool { + // OPT(dh): this entire computation could be cached per package + allowedChecks := map[string]bool{} -func (j *Job) RenderArgs(args []ast.Expr) string { - var ss []string - for _, arg := range args { - ss = append(ss, j.Render(arg)) + for _, check := range checks { + b := true + if len(check) > 1 && check[0] == '-' { + b = false + check = check[1:] + } + if check == "*" || check == "all" { + // Match all + for _, c := range allChecks { + allowedChecks[c] = b + } + } else if strings.HasSuffix(check, "*") { + // Glob + prefix := check[:len(check)-1] + isCat := strings.IndexFunc(prefix, func(r rune) bool { return unicode.IsNumber(r) }) == -1 + + for _, c := range allChecks { + idx := strings.IndexFunc(c, func(r rune) bool { return unicode.IsNumber(r) }) + if isCat { + // Glob is S*, which should match S1000 but not SA1000 + cat := c[:idx] + if prefix == cat { + allowedChecks[c] = b + } + } else { + // Glob is S1* + if strings.HasPrefix(c, prefix) { + allowedChecks[c] = b + } + } + } + } else { + // Literal check name + allowedChecks[check] = b + } } - return strings.Join(ss, ", ") + return allowedChecks } -func IsIdent(expr ast.Expr, ident string) bool { - id, ok := expr.(*ast.Ident) - return ok && id.Name == ident +func (prog *Program) Package(path string) *packages.Package { + return prog.packagesMap[path] } -// isBlank returns whether id is the blank identifier "_". -// If id == nil, the answer is false. -func IsBlank(id ast.Expr) bool { - ident, ok := id.(*ast.Ident) - return ok && ident.Name == "_" +// Pkg represents a package being linted. +type Pkg struct { + SSA *ssa.Package + *packages.Package + Config config.Config } -func IsZero(expr ast.Expr) bool { - lit, ok := expr.(*ast.BasicLit) - return ok && lit.Kind == token.INT && lit.Value == "0" +type Positioner interface { + Pos() token.Pos } -func (j *Job) IsNil(expr ast.Expr) bool { - return j.Program.Info.Types[expr].IsNil() -} +func (prog *Program) DisplayPosition(p token.Pos) token.Position { + // Only use the adjusted position if it points to another Go file. + // This means we'll point to the original file for cgo files, but + // we won't point to a YACC grammar file. -func (j *Job) BoolConst(expr ast.Expr) bool { - val := j.Program.Info.ObjectOf(expr.(*ast.Ident)).(*types.Const).Val() - return constant.BoolVal(val) + pos := prog.Fset().PositionFor(p, false) + adjPos := prog.Fset().PositionFor(p, true) + + if filepath.Ext(adjPos.Filename) == ".go" { + return adjPos + } + return pos } -func (j *Job) IsBoolConst(expr ast.Expr) bool { - // We explicitly don't support typed bools because more often than - // not, custom bool types are used as binary enums and the - // explicit comparison is desired. +func (prog *Program) isGenerated(path string) bool { + // This function isn't very efficient in terms of lock contention + // and lack of parallelism, but it really shouldn't matter. + // Projects consists of thousands of files, and have hundreds of + // errors. That's not a lot of calls to isGenerated. - ident, ok := expr.(*ast.Ident) - if !ok { - return false + prog.genMu.RLock() + if b, ok := prog.generatedMap[path]; ok { + prog.genMu.RUnlock() + return b } - obj := j.Program.Info.ObjectOf(ident) - c, ok := obj.(*types.Const) - if !ok { - return false - } - basic, ok := c.Type().(*types.Basic) - if !ok { - return false + prog.genMu.RUnlock() + prog.genMu.Lock() + defer prog.genMu.Unlock() + // recheck to avoid doing extra work in case of race + if b, ok := prog.generatedMap[path]; ok { + return b } - if basic.Kind() != types.UntypedBool && basic.Kind() != types.Bool { + + f, err := os.Open(path) + if err != nil { return false } - return true + defer f.Close() + b := isGenerated(f) + prog.generatedMap[path] = b + return b } -func (j *Job) ExprToInt(expr ast.Expr) (int64, bool) { - tv := j.Program.Info.Types[expr] - if tv.Value == nil { - return 0, false - } - if tv.Value.Kind() != constant.Int { - return 0, false - } - return constant.Int64Val(tv.Value) -} +func (j *Job) Errorf(n Positioner, format string, args ...interface{}) *Problem { + tf := j.Program.SSA.Fset.File(n.Pos()) + f := j.Program.tokenFileMap[tf] + pkg := j.Program.astFileMap[f] -func (j *Job) ExprToString(expr ast.Expr) (string, bool) { - val := j.Program.Info.Types[expr].Value - if val == nil { - return "", false + pos := j.Program.DisplayPosition(n.Pos()) + if j.Program.isGenerated(pos.Filename) && j.check.FilterGenerated { + return nil } - if val.Kind() != constant.String { - return "", false + problem := Problem{ + Position: pos, + Text: fmt.Sprintf(format, args...), + Check: j.check.ID, + Checker: j.checker, + Package: pkg, } - return constant.StringVal(val), true + j.problems = append(j.problems, problem) + return &j.problems[len(j.problems)-1] } func (j *Job) NodePackage(node Positioner) *Pkg { @@ -665,194 +692,15 @@ func (j *Job) NodePackage(node Positioner) *Pkg { return j.Program.astFileMap[f] } -func IsGenerated(f *ast.File) bool { - comments := f.Comments - if len(comments) > 0 { - comment := comments[0].Text() - return strings.Contains(comment, "Code generated by") || - strings.Contains(comment, "DO NOT EDIT") - } - return false -} - -func Preamble(f *ast.File) string { - cutoff := f.Package - if f.Doc != nil { - cutoff = f.Doc.Pos() - } - var out []string - for _, cmt := range f.Comments { - if cmt.Pos() >= cutoff { - break - } - out = append(out, cmt.Text()) - } - return strings.Join(out, "\n") -} - -func IsPointerLike(T types.Type) bool { - switch T := T.Underlying().(type) { - case *types.Interface, *types.Chan, *types.Map, *types.Pointer: - return true - case *types.Basic: - return T.Kind() == types.UnsafePointer - } - return false -} - -func (j *Job) IsGoVersion(minor int) bool { - return j.Program.GoVersion >= minor -} - -func (j *Job) IsCallToAST(node ast.Node, name string) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return false - } - sel, ok := call.Fun.(*ast.SelectorExpr) - if !ok { - return false - } - fn, ok := j.Program.Info.ObjectOf(sel.Sel).(*types.Func) - return ok && fn.FullName() == name -} - -func (j *Job) IsCallToAnyAST(node ast.Node, names ...string) bool { - for _, name := range names { - if j.IsCallToAST(node, name) { +func allPackages(pkgs []*packages.Package) []*packages.Package { + var out []*packages.Package + packages.Visit( + pkgs, + func(pkg *packages.Package) bool { + out = append(out, pkg) return true - } - } - return false -} - -func (j *Job) SelectorName(expr *ast.SelectorExpr) string { - sel := j.Program.Info.Selections[expr] - if sel == nil { - if x, ok := expr.X.(*ast.Ident); ok { - pkg, ok := j.Program.Info.ObjectOf(x).(*types.PkgName) - if !ok { - // This shouldn't happen - return fmt.Sprintf("%s.%s", x.Name, expr.Sel.Name) - } - return fmt.Sprintf("%s.%s", pkg.Imported().Path(), expr.Sel.Name) - } - panic(fmt.Sprintf("unsupported selector: %v", expr)) - } - return fmt.Sprintf("(%s).%s", sel.Recv(), sel.Obj().Name()) -} - -func CallName(call *ssa.CallCommon) string { - if call.IsInvoke() { - return "" - } - switch v := call.Value.(type) { - case *ssa.Function: - fn, ok := v.Object().(*types.Func) - if !ok { - return "" - } - return fn.FullName() - case *ssa.Builtin: - return v.Name() - } - return "" -} - -func IsCallTo(call *ssa.CallCommon, name string) bool { - return CallName(call) == name -} - -func FilterDebug(instr []ssa.Instruction) []ssa.Instruction { - var out []ssa.Instruction - for _, ins := range instr { - if _, ok := ins.(*ssa.DebugRef); !ok { - out = append(out, ins) - } - } + }, + nil, + ) return out } - -func NodeFns(pkgs []*Pkg) map[ast.Node]*ssa.Function { - out := map[ast.Node]*ssa.Function{} - - wg := &sync.WaitGroup{} - chNodeFns := make(chan map[ast.Node]*ssa.Function, runtime.NumCPU()*2) - for _, pkg := range pkgs { - pkg := pkg - wg.Add(1) - go func() { - m := map[ast.Node]*ssa.Function{} - for _, f := range pkg.Info.Files { - ast.Walk(&globalVisitor{m, pkg, f}, f) - } - chNodeFns <- m - wg.Done() - }() - } - go func() { - wg.Wait() - close(chNodeFns) - }() - - for nodeFns := range chNodeFns { - for k, v := range nodeFns { - out[k] = v - } - } - - return out -} - -type globalVisitor struct { - m map[ast.Node]*ssa.Function - pkg *Pkg - f *ast.File -} - -func (v *globalVisitor) Visit(node ast.Node) ast.Visitor { - switch node := node.(type) { - case *ast.CallExpr: - v.m[node] = v.pkg.Func("init") - return v - case *ast.FuncDecl, *ast.FuncLit: - nv := &fnVisitor{v.m, v.f, v.pkg, nil} - return nv.Visit(node) - default: - return v - } -} - -type fnVisitor struct { - m map[ast.Node]*ssa.Function - f *ast.File - pkg *Pkg - ssafn *ssa.Function -} - -func (v *fnVisitor) Visit(node ast.Node) ast.Visitor { - switch node := node.(type) { - case *ast.FuncDecl: - var ssafn *ssa.Function - ssafn = v.pkg.Prog.FuncValue(v.pkg.Info.ObjectOf(node.Name).(*types.Func)) - v.m[node] = ssafn - if ssafn == nil { - return nil - } - return &fnVisitor{v.m, v.f, v.pkg, ssafn} - case *ast.FuncLit: - var ssafn *ssa.Function - path, _ := astutil.PathEnclosingInterval(v.f, node.Pos(), node.Pos()) - ssafn = ssa.EnclosingFunction(v.pkg.Package, path) - v.m[node] = ssafn - if ssafn == nil { - return nil - } - return &fnVisitor{v.m, v.f, v.pkg, ssafn} - case nil: - return nil - default: - v.m[node] = v.ssafn - return v - } -} diff --git a/vendor/github.com/golangci/go-tools/lint/lintdsl/lintdsl.go b/vendor/github.com/golangci/go-tools/lint/lintdsl/lintdsl.go new file mode 100644 index 000000000000..4ab5d84031cb --- /dev/null +++ b/vendor/github.com/golangci/go-tools/lint/lintdsl/lintdsl.go @@ -0,0 +1,342 @@ +// Package lintdsl provides helpers for implementing static analysis +// checks. Dot-importing this package is encouraged. +package lintdsl + +import ( + "bytes" + "fmt" + "go/ast" + "go/constant" + "go/printer" + "go/token" + "go/types" + "strings" + + "github.com/golangci/go-tools/lint" + "github.com/golangci/go-tools/ssa" +) + +type packager interface { + Package() *ssa.Package +} + +func CallName(call *ssa.CallCommon) string { + if call.IsInvoke() { + return "" + } + switch v := call.Value.(type) { + case *ssa.Function: + fn, ok := v.Object().(*types.Func) + if !ok { + return "" + } + return fn.FullName() + case *ssa.Builtin: + return v.Name() + } + return "" +} + +func IsCallTo(call *ssa.CallCommon, name string) bool { return CallName(call) == name } +func IsType(T types.Type, name string) bool { return types.TypeString(T, nil) == name } + +func FilterDebug(instr []ssa.Instruction) []ssa.Instruction { + var out []ssa.Instruction + for _, ins := range instr { + if _, ok := ins.(*ssa.DebugRef); !ok { + out = append(out, ins) + } + } + return out +} + +func IsExample(fn *ssa.Function) bool { + if !strings.HasPrefix(fn.Name(), "Example") { + return false + } + f := fn.Prog.Fset.File(fn.Pos()) + if f == nil { + return false + } + return strings.HasSuffix(f.Name(), "_test.go") +} + +func IsPointerLike(T types.Type) bool { + switch T := T.Underlying().(type) { + case *types.Interface, *types.Chan, *types.Map, *types.Pointer: + return true + case *types.Basic: + return T.Kind() == types.UnsafePointer + } + return false +} + +func IsGenerated(f *ast.File) bool { + comments := f.Comments + if len(comments) > 0 { + comment := comments[0].Text() + return strings.Contains(comment, "Code generated by") || + strings.Contains(comment, "DO NOT EDIT") + } + return false +} + +func IsIdent(expr ast.Expr, ident string) bool { + id, ok := expr.(*ast.Ident) + return ok && id.Name == ident +} + +// isBlank returns whether id is the blank identifier "_". +// If id == nil, the answer is false. +func IsBlank(id ast.Expr) bool { + ident, _ := id.(*ast.Ident) + return ident != nil && ident.Name == "_" +} + +func IsIntLiteral(expr ast.Expr, literal string) bool { + lit, ok := expr.(*ast.BasicLit) + return ok && lit.Kind == token.INT && lit.Value == literal +} + +// Deprecated: use IsIntLiteral instead +func IsZero(expr ast.Expr) bool { + return IsIntLiteral(expr, "0") +} + +func TypeOf(j *lint.Job, expr ast.Expr) types.Type { + if expr == nil { + return nil + } + return j.NodePackage(expr).TypesInfo.TypeOf(expr) +} + +func IsOfType(j *lint.Job, expr ast.Expr, name string) bool { return IsType(TypeOf(j, expr), name) } + +func ObjectOf(j *lint.Job, ident *ast.Ident) types.Object { + if ident == nil { + return nil + } + return j.NodePackage(ident).TypesInfo.ObjectOf(ident) +} + +func IsInTest(j *lint.Job, node lint.Positioner) bool { + // FIXME(dh): this doesn't work for global variables with + // initializers + f := j.Program.SSA.Fset.File(node.Pos()) + return f != nil && strings.HasSuffix(f.Name(), "_test.go") +} + +func IsInMain(j *lint.Job, node lint.Positioner) bool { + if node, ok := node.(packager); ok { + return node.Package().Pkg.Name() == "main" + } + pkg := j.NodePackage(node) + if pkg == nil { + return false + } + return pkg.Types.Name() == "main" +} + +func SelectorName(j *lint.Job, expr *ast.SelectorExpr) string { + info := j.NodePackage(expr).TypesInfo + sel := info.Selections[expr] + if sel == nil { + if x, ok := expr.X.(*ast.Ident); ok { + pkg, ok := info.ObjectOf(x).(*types.PkgName) + if !ok { + // This shouldn't happen + return fmt.Sprintf("%s.%s", x.Name, expr.Sel.Name) + } + return fmt.Sprintf("%s.%s", pkg.Imported().Path(), expr.Sel.Name) + } + panic(fmt.Sprintf("unsupported selector: %v", expr)) + } + return fmt.Sprintf("(%s).%s", sel.Recv(), sel.Obj().Name()) +} + +func IsNil(j *lint.Job, expr ast.Expr) bool { + return j.NodePackage(expr).TypesInfo.Types[expr].IsNil() +} + +func BoolConst(j *lint.Job, expr ast.Expr) bool { + val := j.NodePackage(expr).TypesInfo.ObjectOf(expr.(*ast.Ident)).(*types.Const).Val() + return constant.BoolVal(val) +} + +func IsBoolConst(j *lint.Job, expr ast.Expr) bool { + // We explicitly don't support typed bools because more often than + // not, custom bool types are used as binary enums and the + // explicit comparison is desired. + + ident, ok := expr.(*ast.Ident) + if !ok { + return false + } + obj := j.NodePackage(expr).TypesInfo.ObjectOf(ident) + c, ok := obj.(*types.Const) + if !ok { + return false + } + basic, ok := c.Type().(*types.Basic) + if !ok { + return false + } + if basic.Kind() != types.UntypedBool && basic.Kind() != types.Bool { + return false + } + return true +} + +func ExprToInt(j *lint.Job, expr ast.Expr) (int64, bool) { + tv := j.NodePackage(expr).TypesInfo.Types[expr] + if tv.Value == nil { + return 0, false + } + if tv.Value.Kind() != constant.Int { + return 0, false + } + return constant.Int64Val(tv.Value) +} + +func ExprToString(j *lint.Job, expr ast.Expr) (string, bool) { + val := j.NodePackage(expr).TypesInfo.Types[expr].Value + if val == nil { + return "", false + } + if val.Kind() != constant.String { + return "", false + } + return constant.StringVal(val), true +} + +// Dereference returns a pointer's element type; otherwise it returns +// T. +func Dereference(T types.Type) types.Type { + if p, ok := T.Underlying().(*types.Pointer); ok { + return p.Elem() + } + return T +} + +// DereferenceR returns a pointer's element type; otherwise it returns +// T. If the element type is itself a pointer, DereferenceR will be +// applied recursively. +func DereferenceR(T types.Type) types.Type { + if p, ok := T.Underlying().(*types.Pointer); ok { + return DereferenceR(p.Elem()) + } + return T +} + +func IsGoVersion(j *lint.Job, minor int) bool { + return j.Program.GoVersion >= minor +} + +func CallNameAST(j *lint.Job, call *ast.CallExpr) string { + switch fun := call.Fun.(type) { + case *ast.SelectorExpr: + fn, ok := ObjectOf(j, fun.Sel).(*types.Func) + if !ok { + return "" + } + return fn.FullName() + case *ast.Ident: + obj := ObjectOf(j, fun) + switch obj := obj.(type) { + case *types.Func: + return obj.FullName() + case *types.Builtin: + return obj.Name() + default: + return "" + } + default: + return "" + } +} + +func IsCallToAST(j *lint.Job, node ast.Node, name string) bool { + call, ok := node.(*ast.CallExpr) + if !ok { + return false + } + return CallNameAST(j, call) == name +} + +func IsCallToAnyAST(j *lint.Job, node ast.Node, names ...string) bool { + for _, name := range names { + if IsCallToAST(j, node, name) { + return true + } + } + return false +} + +func Render(j *lint.Job, x interface{}) string { + fset := j.Program.SSA.Fset + var buf bytes.Buffer + if err := printer.Fprint(&buf, fset, x); err != nil { + panic(err) + } + return buf.String() +} + +func RenderArgs(j *lint.Job, args []ast.Expr) string { + var ss []string + for _, arg := range args { + ss = append(ss, Render(j, arg)) + } + return strings.Join(ss, ", ") +} + +func Preamble(f *ast.File) string { + cutoff := f.Package + if f.Doc != nil { + cutoff = f.Doc.Pos() + } + var out []string + for _, cmt := range f.Comments { + if cmt.Pos() >= cutoff { + break + } + out = append(out, cmt.Text()) + } + return strings.Join(out, "\n") +} + +func Inspect(node ast.Node, fn func(node ast.Node) bool) { + if node == nil { + return + } + ast.Inspect(node, fn) +} + +func GroupSpecs(j *lint.Job, specs []ast.Spec) [][]ast.Spec { + if len(specs) == 0 { + return nil + } + fset := j.Program.SSA.Fset + groups := make([][]ast.Spec, 1) + groups[0] = append(groups[0], specs[0]) + + for _, spec := range specs[1:] { + g := groups[len(groups)-1] + if fset.PositionFor(spec.Pos(), false).Line-1 != + fset.PositionFor(g[len(g)-1].End(), false).Line { + + groups = append(groups, nil) + } + + groups[len(groups)-1] = append(groups[len(groups)-1], spec) + } + + return groups +} + +func IsObject(obj types.Object, name string) bool { + var path string + if pkg := obj.Pkg(); pkg != nil { + path = pkg.Path() + "." + } + return path+obj.Name() == name +} diff --git a/vendor/github.com/golangci/go-tools/lint/lintutil/format/format.go b/vendor/github.com/golangci/go-tools/lint/lintutil/format/format.go new file mode 100644 index 000000000000..96befb1cc317 --- /dev/null +++ b/vendor/github.com/golangci/go-tools/lint/lintutil/format/format.go @@ -0,0 +1,128 @@ +// Package format provides formatters for linter problems. +package format + +import ( + "encoding/json" + "fmt" + "go/token" + "io" + "os" + "path/filepath" + "text/tabwriter" + + "github.com/golangci/go-tools/lint" +) + +func shortPath(path string) string { + cwd, err := os.Getwd() + if err != nil { + return path + } + if rel, err := filepath.Rel(cwd, path); err == nil && len(rel) < len(path) { + return rel + } + return path +} + +func relativePositionString(pos token.Position) string { + s := shortPath(pos.Filename) + if pos.IsValid() { + if s != "" { + s += ":" + } + s += fmt.Sprintf("%d:%d", pos.Line, pos.Column) + } + if s == "" { + s = "-" + } + return s +} + +type Statter interface { + Stats(total, errors, warnings int) +} + +type Formatter interface { + Format(p lint.Problem) +} + +type Text struct { + W io.Writer +} + +func (o Text) Format(p lint.Problem) { + fmt.Fprintf(o.W, "%v: %s\n", relativePositionString(p.Position), p.String()) +} + +type JSON struct { + W io.Writer +} + +func severity(s lint.Severity) string { + switch s { + case lint.Error: + return "error" + case lint.Warning: + return "warning" + case lint.Ignored: + return "ignored" + } + return "" +} + +func (o JSON) Format(p lint.Problem) { + type location struct { + File string `json:"file"` + Line int `json:"line"` + Column int `json:"column"` + } + jp := struct { + Code string `json:"code"` + Severity string `json:"severity,omitempty"` + Location location `json:"location"` + Message string `json:"message"` + }{ + Code: p.Check, + Severity: severity(p.Severity), + Location: location{ + File: p.Position.Filename, + Line: p.Position.Line, + Column: p.Position.Column, + }, + Message: p.Text, + } + _ = json.NewEncoder(o.W).Encode(jp) +} + +type Stylish struct { + W io.Writer + + prevFile string + tw *tabwriter.Writer +} + +func (o *Stylish) Format(p lint.Problem) { + if p.Position.Filename == "" { + p.Position.Filename = "-" + } + + if p.Position.Filename != o.prevFile { + if o.prevFile != "" { + o.tw.Flush() + fmt.Fprintln(o.W) + } + fmt.Fprintln(o.W, p.Position.Filename) + o.prevFile = p.Position.Filename + o.tw = tabwriter.NewWriter(o.W, 0, 4, 2, ' ', 0) + } + fmt.Fprintf(o.tw, " (%d, %d)\t%s\t%s\n", p.Position.Line, p.Position.Column, p.Check, p.Text) +} + +func (o *Stylish) Stats(total, errors, warnings int) { + if o.tw != nil { + o.tw.Flush() + fmt.Fprintln(o.W) + } + fmt.Fprintf(o.W, " ✖ %d problems (%d errors, %d warnings)\n", + total, errors, warnings) +} diff --git a/vendor/github.com/golangci/go-tools/lint/lintutil/util.go b/vendor/github.com/golangci/go-tools/lint/lintutil/util.go index da2c753f73d6..a667c6a8c00c 100644 --- a/vendor/github.com/golangci/go-tools/lint/lintutil/util.go +++ b/vendor/github.com/golangci/go-tools/lint/lintutil/util.go @@ -8,68 +8,28 @@ package lintutil // import "github.com/golangci/go-tools/lint/lintutil" import ( - "encoding/json" "errors" "flag" "fmt" "go/build" "go/token" - "io" + "log" "os" - "path/filepath" + "regexp" + "runtime" + "runtime/pprof" "strconv" "strings" + "time" + "github.com/golangci/go-tools/config" "github.com/golangci/go-tools/lint" - "github.com/golangci/tools/go/ssa" + "github.com/golangci/go-tools/lint/lintutil/format" "github.com/golangci/go-tools/version" - "golang.org/x/tools/go/loader" + "golang.org/x/tools/go/packages" ) -type OutputFormatter interface { - Format(p lint.Problem) -} - -type TextOutput struct { - w io.Writer -} - -func (o TextOutput) Format(p lint.Problem) { - fmt.Fprintf(o.w, "%v: %s\n", relativePositionString(p.Position), p.String()) -} - -type JSONOutput struct { - w io.Writer -} - -func (o JSONOutput) Format(p lint.Problem) { - type location struct { - File string `json:"file"` - Line int `json:"line"` - Column int `json:"column"` - } - jp := struct { - Checker string `json:"checker"` - Code string `json:"code"` - Severity string `json:"severity,omitempty"` - Location location `json:"location"` - Message string `json:"message"` - Ignored bool `json:"ignored"` - }{ - p.Checker, - p.Check, - "", // TODO(dh): support severity - location{ - p.Position.Filename, - p.Position.Line, - p.Position.Column, - }, - p.Text, - p.Ignored, - } - _ = json.NewEncoder(o.w).Encode(jp) -} func usage(name string, flags *flag.FlagSet) func() { return func() { fmt.Fprintf(os.Stderr, "Usage of %s:\n", name) @@ -82,38 +42,6 @@ func usage(name string, flags *flag.FlagSet) func() { } } -type runner struct { - checker lint.Checker - tags []string - ignores []lint.Ignore - version int - returnIgnored bool -} - -func resolveRelative(importPaths []string, tags []string) (goFiles bool, err error) { - if len(importPaths) == 0 { - return false, nil - } - if strings.HasSuffix(importPaths[0], ".go") { - // User is specifying a package in terms of .go files, don't resolve - return true, nil - } - wd, err := os.Getwd() - if err != nil { - return false, err - } - ctx := build.Default - ctx.BuildTags = tags - for i, path := range importPaths { - bpkg, err := ctx.Import(path, wd, build.FindOnly) - if err != nil { - return false, fmt.Errorf("can't load package %q: %v", path, err) - } - importPaths[i] = bpkg.ImportPath - } - return false, nil -} - func parseIgnore(s string) ([]lint.Ignore, error) { var out []lint.Ignore if len(s) == 0 { @@ -156,16 +84,41 @@ func (v *versionFlag) Get() interface{} { return int(*v) } +type list []string + +func (list *list) String() string { + return `"` + strings.Join(*list, ",") + `"` +} + +func (list *list) Set(s string) error { + if s == "" { + *list = nil + return nil + } + + *list = strings.Split(s, ",") + return nil +} + func FlagSet(name string) *flag.FlagSet { flags := flag.NewFlagSet("", flag.ExitOnError) flags.Usage = usage(name, flags) - flags.Float64("min_confidence", 0, "Deprecated; use -ignore instead") flags.String("tags", "", "List of `build tags`") - flags.String("ignore", "", "Space separated list of checks to ignore, in the following format: 'import/path/file.go:Check1,Check2,...' Both the import path and file name sections support globbing, e.g. 'os/exec/*_test.go'") + flags.String("ignore", "", "Deprecated: use linter directives instead") flags.Bool("tests", true, "Include tests") flags.Bool("version", false, "Print version and exit") flags.Bool("show-ignored", false, "Don't filter ignored problems") - flags.String("f", "text", "Output `format` (valid choices are 'text' and 'json')") + flags.String("f", "text", "Output `format` (valid choices are 'stylish', 'text' and 'json')") + + flags.Int("debug.max-concurrent-jobs", 0, "Number of jobs to run concurrently") + flags.Bool("debug.print-stats", false, "Print debug statistics") + flags.String("debug.cpuprofile", "", "Write CPU profile to `file`") + flags.String("debug.memprofile", "", "Write memory profile to `file`") + + checks := list{"inherit"} + fail := list{"all"} + flags.Var(&checks, "checks", "Comma-separated list of `checks` to enable.") + flags.Var(&fail, "fail", "Comma-separated list of `checks` that can cause a non-zero exit status.") tags := build.Default.ReleaseTags v := tags[len(tags)-1][2:] @@ -178,56 +131,129 @@ func FlagSet(name string) *flag.FlagSet { return flags } -type CheckerConfig struct { - Checker lint.Checker - ExitNonZero bool -} - -func ProcessFlagSet(confs []CheckerConfig, fs *flag.FlagSet, lprog *loader.Program, ssaProg *ssa.Program, conf *loader.Config) []lint.Problem { +func ProcessFlagSet(cs []lint.Checker, fs *flag.FlagSet) { tags := fs.Lookup("tags").Value.(flag.Getter).Get().(string) ignore := fs.Lookup("ignore").Value.(flag.Getter).Get().(string) tests := fs.Lookup("tests").Value.(flag.Getter).Get().(bool) goVersion := fs.Lookup("go").Value.(flag.Getter).Get().(int) + formatter := fs.Lookup("f").Value.(flag.Getter).Get().(string) printVersion := fs.Lookup("version").Value.(flag.Getter).Get().(bool) showIgnored := fs.Lookup("show-ignored").Value.(flag.Getter).Get().(bool) + maxConcurrentJobs := fs.Lookup("debug.max-concurrent-jobs").Value.(flag.Getter).Get().(int) + printStats := fs.Lookup("debug.print-stats").Value.(flag.Getter).Get().(bool) + cpuProfile := fs.Lookup("debug.cpuprofile").Value.(flag.Getter).Get().(string) + memProfile := fs.Lookup("debug.memprofile").Value.(flag.Getter).Get().(string) + + cfg := config.Config{} + cfg.Checks = *fs.Lookup("checks").Value.(*list) + + exit := func(code int) { + if cpuProfile != "" { + pprof.StopCPUProfile() + } + if memProfile != "" { + f, err := os.Create(memProfile) + if err != nil { + panic(err) + } + runtime.GC() + pprof.WriteHeapProfile(f) + } + os.Exit(code) + } + if cpuProfile != "" { + f, err := os.Create(cpuProfile) + if err != nil { + log.Fatal(err) + } + pprof.StartCPUProfile(f) + } + if printVersion { version.Print() - os.Exit(0) + exit(0) } - var cs []lint.Checker - for _, conf := range confs { - cs = append(cs, conf.Checker) - } - pss, err := Lint(cs, lprog, ssaProg, conf, &Options{ + ps, err := Lint(cs, fs.Args(), &Options{ Tags: strings.Fields(tags), LintTests: tests, Ignores: ignore, GoVersion: goVersion, ReturnIgnored: showIgnored, + Config: cfg, + + MaxConcurrentJobs: maxConcurrentJobs, + PrintStats: printStats, }) if err != nil { - panic(err) + fmt.Fprintln(os.Stderr, err) + exit(1) } - var ps []lint.Problem - for _, p := range pss { - ps = append(ps, p...) + var f format.Formatter + switch formatter { + case "text": + f = format.Text{W: os.Stdout} + case "stylish": + f = &format.Stylish{W: os.Stdout} + case "json": + f = format.JSON{W: os.Stdout} + default: + fmt.Fprintf(os.Stderr, "unsupported output format %q\n", formatter) + exit(2) } - return ps + var ( + total int + errors int + warnings int + ) + + fail := *fs.Lookup("fail").Value.(*list) + var allChecks []string + for _, p := range ps { + allChecks = append(allChecks, p.Check) + } + + shouldExit := lint.FilterChecks(allChecks, fail) + + total = len(ps) + for _, p := range ps { + if shouldExit[p.Check] { + errors++ + } else { + p.Severity = lint.Warning + warnings++ + } + f.Format(p) + } + if f, ok := f.(format.Statter); ok { + f.Stats(total, errors, warnings) + } + if errors > 0 { + exit(1) + } } type Options struct { + Config config.Config + Tags []string LintTests bool Ignores string GoVersion int ReturnIgnored bool + + MaxConcurrentJobs int + PrintStats bool } -func Lint(cs []lint.Checker, lprog *loader.Program, ssaProg *ssa.Program, conf *loader.Config, opt *Options) ([][]lint.Problem, error) { +func Lint(cs []lint.Checker, paths []string, opt *Options) ([]lint.Problem, error) { + stats := lint.PerfStats{ + CheckerInits: map[string]time.Duration{}, + } + if opt == nil { opt = &Options{} } @@ -236,58 +262,101 @@ func Lint(cs []lint.Checker, lprog *loader.Program, ssaProg *ssa.Program, conf * return nil, err } - var problems [][]lint.Problem - for _, c := range cs { - runner := &runner{ - checker: c, - tags: opt.Tags, - ignores: ignores, - version: opt.GoVersion, - returnIgnored: opt.ReturnIgnored, + conf := &packages.Config{ + Mode: packages.LoadAllSyntax, + Tests: opt.LintTests, + BuildFlags: []string{ + "-tags=" + strings.Join(opt.Tags, " "), + }, + } + + t := time.Now() + if len(paths) == 0 { + paths = []string{"."} + } + pkgs, err := packages.Load(conf, paths...) + if err != nil { + return nil, err + } + stats.PackageLoading = time.Since(t) + + var problems []lint.Problem + workingPkgs := make([]*packages.Package, 0, len(pkgs)) + for _, pkg := range pkgs { + if pkg.IllTyped { + problems = append(problems, compileErrors(pkg)...) + } else { + workingPkgs = append(workingPkgs, pkg) } - problems = append(problems, runner.lint(lprog, ssaProg, conf)) } + + if len(workingPkgs) == 0 { + return problems, nil + } + + l := &lint.Linter{ + Checkers: cs, + Ignores: ignores, + GoVersion: opt.GoVersion, + ReturnIgnored: opt.ReturnIgnored, + Config: opt.Config, + + MaxConcurrentJobs: opt.MaxConcurrentJobs, + PrintStats: opt.PrintStats, + } + problems = append(problems, l.Lint(workingPkgs, &stats)...) + return problems, nil } -func shortPath(path string) string { - cwd, err := os.Getwd() - if err != nil { - return path +var posRe = regexp.MustCompile(`^(.+?):(\d+):(\d+)?$`) + +func parsePos(pos string) token.Position { + if pos == "-" || pos == "" { + return token.Position{} } - if rel, err := filepath.Rel(cwd, path); err == nil && len(rel) < len(path) { - return rel + parts := posRe.FindStringSubmatch(pos) + if parts == nil { + panic(fmt.Sprintf("internal error: malformed position %q", pos)) + } + file := parts[1] + line, _ := strconv.Atoi(parts[2]) + col, _ := strconv.Atoi(parts[3]) + return token.Position{ + Filename: file, + Line: line, + Column: col, } - return path } -func relativePositionString(pos token.Position) string { - s := shortPath(pos.Filename) - if pos.IsValid() { - if s != "" { - s += ":" +func compileErrors(pkg *packages.Package) []lint.Problem { + if !pkg.IllTyped { + return nil + } + if len(pkg.Errors) == 0 { + // transitively ill-typed + var ps []lint.Problem + for _, imp := range pkg.Imports { + ps = append(ps, compileErrors(imp)...) } - s += fmt.Sprintf("%d:%d", pos.Line, pos.Column) + return ps } - if s == "" { - s = "-" + var ps []lint.Problem + for _, err := range pkg.Errors { + p := lint.Problem{ + Position: parsePos(err.Pos), + Text: err.Msg, + Checker: "compiler", + Check: "compile", + } + ps = append(ps, p) } - return s + return ps } -func ProcessArgs(name string, cs []CheckerConfig, args []string) { +func ProcessArgs(name string, cs []lint.Checker, args []string) { flags := FlagSet(name) flags.Parse(args) - ProcessFlagSet(cs, flags, nil, nil, nil) -} - -func (runner *runner) lint(lprog *loader.Program, ssaProg *ssa.Program, conf *loader.Config) []lint.Problem { - l := &lint.Linter{ - Checker: runner.checker, - Ignores: runner.ignores, - GoVersion: runner.version, - ReturnIgnored: runner.returnIgnored, - } - return l.Lint(lprog, ssaProg, conf) + ProcessFlagSet(cs, flags) } diff --git a/vendor/github.com/golangci/go-tools/simple/lint.go b/vendor/github.com/golangci/go-tools/simple/lint.go index 6c509c2454f9..2c47c93f21bc 100644 --- a/vendor/github.com/golangci/go-tools/simple/lint.go +++ b/vendor/github.com/golangci/go-tools/simple/lint.go @@ -9,9 +9,10 @@ import ( "reflect" "strings" + . "github.com/golangci/go-tools/arg" "github.com/golangci/go-tools/internal/sharedcheck" "github.com/golangci/go-tools/lint" - "github.com/golangci/tools/go/ssa" + . "github.com/golangci/go-tools/lint/lintdsl" "golang.org/x/tools/go/types/typeutil" ) @@ -19,8 +20,6 @@ import ( type Checker struct { CheckGenerated bool MS *typeutil.MethodSetCache - - nodeFns map[ast.Node]*ssa.Function } func NewChecker() *Checker { @@ -32,59 +31,40 @@ func NewChecker() *Checker { func (*Checker) Name() string { return "gosimple" } func (*Checker) Prefix() string { return "S" } -func (c *Checker) Init(prog *lint.Program) { - c.nodeFns = lint.NodeFns(prog.Packages) -} - -func (c *Checker) Funcs() map[string]lint.Func { - return map[string]lint.Func{ - "S1000": c.LintSingleCaseSelect, - "S1001": c.LintLoopCopy, - "S1002": c.LintIfBoolCmp, - "S1003": c.LintStringsContains, - "S1004": c.LintBytesCompare, - "S1005": c.LintUnnecessaryBlank, - "S1006": c.LintForTrue, - "S1007": c.LintRegexpRaw, - "S1008": c.LintIfReturn, - "S1009": c.LintRedundantNilCheckWithLen, - "S1010": c.LintSlicing, - "S1011": c.LintLoopAppend, - "S1012": c.LintTimeSince, - "S1013": nil, - "S1014": nil, - "S1015": nil, - "S1016": c.LintSimplerStructConversion, - "S1017": c.LintTrim, - "S1018": c.LintLoopSlide, - "S1019": c.LintMakeLenCap, - "S1020": c.LintAssertNotNil, - "S1021": c.LintDeclareAssign, - "S1022": nil, - "S1023": c.LintRedundantBreak, - "S1024": c.LintTimeUntil, - "S1025": c.LintRedundantSprintf, - "S1026": nil, - "S1027": nil, - "S1028": c.LintErrorsNewSprintf, - "S1029": c.LintRangeStringRunes, - "S1030": c.LintBytesBufferConversions, - "S1031": c.LintNilCheckAroundRange, - "S1032": c.LintSortHelpers, - } -} - -func (c *Checker) filterGenerated(files []*ast.File) []*ast.File { - if c.CheckGenerated { - return files - } - var out []*ast.File - for _, f := range files { - if !lint.IsGenerated(f) { - out = append(out, f) - } +func (c *Checker) Init(prog *lint.Program) {} + +func (c *Checker) Checks() []lint.Check { + return []lint.Check{ + {ID: "S1000", FilterGenerated: true, Fn: c.LintSingleCaseSelect}, + {ID: "S1001", FilterGenerated: true, Fn: c.LintLoopCopy}, + {ID: "S1002", FilterGenerated: true, Fn: c.LintIfBoolCmp}, + {ID: "S1003", FilterGenerated: true, Fn: c.LintStringsContains}, + {ID: "S1004", FilterGenerated: true, Fn: c.LintBytesCompare}, + {ID: "S1005", FilterGenerated: true, Fn: c.LintUnnecessaryBlank}, + {ID: "S1006", FilterGenerated: true, Fn: c.LintForTrue}, + {ID: "S1007", FilterGenerated: true, Fn: c.LintRegexpRaw}, + {ID: "S1008", FilterGenerated: true, Fn: c.LintIfReturn}, + {ID: "S1009", FilterGenerated: true, Fn: c.LintRedundantNilCheckWithLen}, + {ID: "S1010", FilterGenerated: true, Fn: c.LintSlicing}, + {ID: "S1011", FilterGenerated: true, Fn: c.LintLoopAppend}, + {ID: "S1012", FilterGenerated: true, Fn: c.LintTimeSince}, + {ID: "S1016", FilterGenerated: true, Fn: c.LintSimplerStructConversion}, + {ID: "S1017", FilterGenerated: true, Fn: c.LintTrim}, + {ID: "S1018", FilterGenerated: true, Fn: c.LintLoopSlide}, + {ID: "S1019", FilterGenerated: true, Fn: c.LintMakeLenCap}, + {ID: "S1020", FilterGenerated: true, Fn: c.LintAssertNotNil}, + {ID: "S1021", FilterGenerated: true, Fn: c.LintDeclareAssign}, + {ID: "S1023", FilterGenerated: true, Fn: c.LintRedundantBreak}, + {ID: "S1024", FilterGenerated: true, Fn: c.LintTimeUntil}, + {ID: "S1025", FilterGenerated: true, Fn: c.LintRedundantSprintf}, + {ID: "S1028", FilterGenerated: true, Fn: c.LintErrorsNewSprintf}, + {ID: "S1029", FilterGenerated: false, Fn: c.LintRangeStringRunes}, + {ID: "S1030", FilterGenerated: true, Fn: c.LintBytesBufferConversions}, + {ID: "S1031", FilterGenerated: true, Fn: c.LintNilCheckAroundRange}, + {ID: "S1032", FilterGenerated: true, Fn: c.LintSortHelpers}, + {ID: "S1033", FilterGenerated: true, Fn: c.LintGuardedDelete}, + {ID: "S1034", FilterGenerated: true, Fn: c.LintSimplifyTypeSwitch}, } - return out } func (c *Checker) LintSingleCaseSelect(j *lint.Job) { @@ -124,7 +104,7 @@ func (c *Checker) LintSingleCaseSelect(j *lint.Job) { } return true } - for _, f := range c.filterGenerated(j.Program.Files) { + for _, f := range j.Program.Files { ast.Inspect(f, fn) } } @@ -153,7 +133,8 @@ func (c *Checker) LintLoopCopy(j *lint.Job) { if !ok { return true } - if _, ok := j.Program.Info.TypeOf(lhs.X).(*types.Slice); !ok { + + if _, ok := TypeOf(j, lhs.X).(*types.Slice); !ok { return true } lidx, ok := lhs.Index.(*ast.Ident) @@ -164,16 +145,16 @@ func (c *Checker) LintLoopCopy(j *lint.Job) { if !ok { return true } - if j.Program.Info.TypeOf(lhs) == nil || j.Program.Info.TypeOf(stmt.Rhs[0]) == nil { + if TypeOf(j, lhs) == nil || TypeOf(j, stmt.Rhs[0]) == nil { return true } - if j.Program.Info.ObjectOf(lidx) != j.Program.Info.ObjectOf(key) { + if ObjectOf(j, lidx) != ObjectOf(j, key) { return true } - if !types.Identical(j.Program.Info.TypeOf(lhs), j.Program.Info.TypeOf(stmt.Rhs[0])) { + if !types.Identical(TypeOf(j, lhs), TypeOf(j, stmt.Rhs[0])) { return true } - if _, ok := j.Program.Info.TypeOf(loop.X).(*types.Slice); !ok { + if _, ok := TypeOf(j, loop.X).(*types.Slice); !ok { return true } @@ -187,7 +168,7 @@ func (c *Checker) LintLoopCopy(j *lint.Job) { if !ok { return true } - if j.Program.Info.ObjectOf(ridx) != j.Program.Info.ObjectOf(key) { + if ObjectOf(j, ridx) != ObjectOf(j, key) { return true } } else if rhs, ok := stmt.Rhs[0].(*ast.Ident); ok { @@ -195,7 +176,7 @@ func (c *Checker) LintLoopCopy(j *lint.Job) { if !ok { return true } - if j.Program.Info.ObjectOf(rhs) != j.Program.Info.ObjectOf(value) { + if ObjectOf(j, rhs) != ObjectOf(j, value) { return true } } else { @@ -204,7 +185,7 @@ func (c *Checker) LintLoopCopy(j *lint.Job) { j.Errorf(loop, "should use copy() instead of a loop") return true } - for _, f := range c.filterGenerated(j.Program.Files) { + for _, f := range j.Program.Files { ast.Inspect(f, fn) } } @@ -215,21 +196,21 @@ func (c *Checker) LintIfBoolCmp(j *lint.Job) { if !ok || (expr.Op != token.EQL && expr.Op != token.NEQ) { return true } - x := j.IsBoolConst(expr.X) - y := j.IsBoolConst(expr.Y) + x := IsBoolConst(j, expr.X) + y := IsBoolConst(j, expr.Y) if !x && !y { return true } var other ast.Expr var val bool if x { - val = j.BoolConst(expr.X) + val = BoolConst(j, expr.X) other = expr.Y } else { - val = j.BoolConst(expr.Y) + val = BoolConst(j, expr.Y) other = expr.X } - basic, ok := j.Program.Info.TypeOf(other).Underlying().(*types.Basic) + basic, ok := TypeOf(j, other).Underlying().(*types.Basic) if !ok || basic.Kind() != types.Bool { return true } @@ -237,16 +218,19 @@ func (c *Checker) LintIfBoolCmp(j *lint.Job) { if (expr.Op == token.EQL && !val) || (expr.Op == token.NEQ && val) { op = "!" } - r := op + j.Render(other) + r := op + Render(j, other) l1 := len(r) r = strings.TrimLeft(r, "!") if (l1-len(r))%2 == 1 { r = "!" + r } + if IsInTest(j, node) { + return true + } j.Errorf(expr, "should omit comparison to bool constant, can be simplified to %s", r) return true } - for _, f := range c.filterGenerated(j.Program.Files) { + for _, f := range j.Program.Files { ast.Inspect(f, fn) } } @@ -267,16 +251,16 @@ func (c *Checker) LintBytesBufferConversions(j *lint.Job) { return true } - typ := j.Program.Info.TypeOf(call.Fun) - if typ == types.Universe.Lookup("string").Type() && j.IsCallToAST(call.Args[0], "(*bytes.Buffer).Bytes") { - j.Errorf(call, "should use %v.String() instead of %v", j.Render(sel.X), j.Render(call)) - } else if typ, ok := typ.(*types.Slice); ok && typ.Elem() == types.Universe.Lookup("byte").Type() && j.IsCallToAST(call.Args[0], "(*bytes.Buffer).String") { - j.Errorf(call, "should use %v.Bytes() instead of %v", j.Render(sel.X), j.Render(call)) + typ := TypeOf(j, call.Fun) + if typ == types.Universe.Lookup("string").Type() && IsCallToAST(j, call.Args[0], "(*bytes.Buffer).Bytes") { + j.Errorf(call, "should use %v.String() instead of %v", Render(j, sel.X), Render(j, call)) + } else if typ, ok := typ.(*types.Slice); ok && typ.Elem() == types.Universe.Lookup("byte").Type() && IsCallToAST(j, call.Args[0], "(*bytes.Buffer).String") { + j.Errorf(call, "should use %v.Bytes() instead of %v", Render(j, sel.X), Render(j, call)) } return true } - for _, f := range c.filterGenerated(j.Program.Files) { + for _, f := range j.Program.Files { ast.Inspect(f, fn) } } @@ -298,7 +282,7 @@ func (c *Checker) LintStringsContains(j *lint.Job) { return true } - value, ok := j.ExprToInt(expr.Y) + value, ok := ExprToInt(j, expr.Y) if !ok { return true } @@ -344,11 +328,11 @@ func (c *Checker) LintStringsContains(j *lint.Job) { if !b { prefix = "!" } - j.Errorf(node, "should use %s%s.%s(%s) instead", prefix, pkgIdent.Name, newFunc, j.RenderArgs(call.Args)) + j.Errorf(node, "should use %s%s.%s(%s) instead", prefix, pkgIdent.Name, newFunc, RenderArgs(j, call.Args)) return true } - for _, f := range c.filterGenerated(j.Program.Files) { + for _, f := range j.Program.Files { ast.Inspect(f, fn) } } @@ -366,14 +350,14 @@ func (c *Checker) LintBytesCompare(j *lint.Job) { if !ok { return true } - if !j.IsCallToAST(call, "bytes.Compare") { + if !IsCallToAST(j, call, "bytes.Compare") { return true } - value, ok := j.ExprToInt(expr.Y) + value, ok := ExprToInt(j, expr.Y) if !ok || value != 0 { return true } - args := j.RenderArgs(call.Args) + args := RenderArgs(j, call.Args) prefix := "" if expr.Op == token.NEQ { prefix = "!" @@ -381,7 +365,7 @@ func (c *Checker) LintBytesCompare(j *lint.Job) { j.Errorf(node, "should use %sbytes.Equal(%s) instead", prefix, args) return true } - for _, f := range c.filterGenerated(j.Program.Files) { + for _, f := range j.Program.Files { ast.Inspect(f, fn) } } @@ -395,13 +379,13 @@ func (c *Checker) LintForTrue(j *lint.Job) { if loop.Init != nil || loop.Post != nil { return true } - if !j.IsBoolConst(loop.Cond) || !j.BoolConst(loop.Cond) { + if !IsBoolConst(j, loop.Cond) || !BoolConst(j, loop.Cond) { return true } j.Errorf(loop, "should use for {} instead of for true {}") return true } - for _, f := range c.filterGenerated(j.Program.Files) { + for _, f := range j.Program.Files { ast.Inspect(f, fn) } } @@ -412,8 +396,8 @@ func (c *Checker) LintRegexpRaw(j *lint.Job) { if !ok { return true } - if !j.IsCallToAST(call, "regexp.MustCompile") && - !j.IsCallToAST(call, "regexp.Compile") { + if !IsCallToAST(j, call, "regexp.MustCompile") && + !IsCallToAST(j, call, "regexp.Compile") { return true } sel, ok := call.Fun.(*ast.SelectorExpr) @@ -424,7 +408,7 @@ func (c *Checker) LintRegexpRaw(j *lint.Job) { // invalid function call return true } - lit, ok := call.Args[0].(*ast.BasicLit) + lit, ok := call.Args[Arg("regexp.Compile.expr")].(*ast.BasicLit) if !ok { // TODO(dominikh): support string concat, maybe support constants return true @@ -441,6 +425,9 @@ func (c *Checker) LintRegexpRaw(j *lint.Job) { if !strings.Contains(val, `\\`) { return true } + if strings.Contains(val, "`") { + return true + } bs := false for _, c := range val { @@ -461,7 +448,7 @@ func (c *Checker) LintRegexpRaw(j *lint.Job) { j.Errorf(call, "should use raw string (`...`) with regexp.%s to avoid having to escape twice", sel.Sel.Name) return true } - for _, f := range c.filterGenerated(j.Program.Files) { + for _, f := range j.Program.Files { ast.Inspect(f, fn) } } @@ -511,7 +498,7 @@ func (c *Checker) LintIfReturn(j *lint.Job) { if len(ret1.Results) != 1 { return true } - if !j.IsBoolConst(ret1.Results[0]) { + if !IsBoolConst(j, ret1.Results[0]) { return true } @@ -522,13 +509,13 @@ func (c *Checker) LintIfReturn(j *lint.Job) { if len(ret2.Results) != 1 { return true } - if !j.IsBoolConst(ret2.Results[0]) { + if !IsBoolConst(j, ret2.Results[0]) { return true } j.Errorf(n1, "should use 'return ' instead of 'if { return }; return '") return true } - for _, f := range c.filterGenerated(j.Program.Files) { + for _, f := range j.Program.Files { ast.Inspect(f, fn) } } @@ -545,13 +532,13 @@ func (c *Checker) LintRedundantNilCheckWithLen(j *lint.Job) { isConstZero := func(expr ast.Expr) (isConst bool, isZero bool) { _, ok := expr.(*ast.BasicLit) if ok { - return true, lint.IsZero(expr) + return true, IsZero(expr) } id, ok := expr.(*ast.Ident) if !ok { return false, false } - c, ok := j.Program.Info.ObjectOf(id).(*types.Const) + c, ok := ObjectOf(j, id).(*types.Const) if !ok { return false, false } @@ -584,7 +571,7 @@ func (c *Checker) LintRedundantNilCheckWithLen(j *lint.Job) { if !ok { return true } - if !j.IsNil(x.Y) { + if !IsNil(j, x.Y) { return true } @@ -604,7 +591,7 @@ func (c *Checker) LintRedundantNilCheckWithLen(j *lint.Job) { if !ok || yxFun.Name != "len" || len(yx.Args) != 1 { return true } - yxArg, ok := yx.Args[0].(*ast.Ident) + yxArg, ok := yx.Args[Arg("len.v")].(*ast.Ident) if !ok { return true } @@ -612,7 +599,7 @@ func (c *Checker) LintRedundantNilCheckWithLen(j *lint.Job) { return true } - if eqNil && !lint.IsZero(y.Y) { // must be len(x) == *0* + if eqNil && !IsZero(y.Y) { // must be len(x) == *0* return true } @@ -647,7 +634,7 @@ func (c *Checker) LintRedundantNilCheckWithLen(j *lint.Job) { // finally check that xx type is one of array, slice, map or chan // this is to prevent false positive in case if xx is a pointer to an array var nilType string - switch j.Program.Info.TypeOf(xx).(type) { + switch TypeOf(j, xx).(type) { case *types.Slice: nilType = "nil slices" case *types.Map: @@ -660,7 +647,7 @@ func (c *Checker) LintRedundantNilCheckWithLen(j *lint.Job) { j.Errorf(expr, "should omit nil check; len() for %s is defined as zero", nilType) return true } - for _, f := range c.filterGenerated(j.Program.Files) { + for _, f := range j.Program.Files { ast.Inspect(f, fn) } } @@ -686,29 +673,29 @@ func (c *Checker) LintSlicing(j *lint.Job) { if !ok || fun.Name != "len" { return true } - if _, ok := j.Program.Info.ObjectOf(fun).(*types.Builtin); !ok { + if _, ok := ObjectOf(j, fun).(*types.Builtin); !ok { return true } - arg, ok := call.Args[0].(*ast.Ident) + arg, ok := call.Args[Arg("len.v")].(*ast.Ident) if !ok || arg.Obj != s.Obj { return true } j.Errorf(n, "should omit second index in slice, s[a:len(s)] is identical to s[a:]") return true } - for _, f := range c.filterGenerated(j.Program.Files) { + for _, f := range j.Program.Files { ast.Inspect(f, fn) } } -func refersTo(info *types.Info, expr ast.Expr, ident *ast.Ident) bool { +func refersTo(j *lint.Job, expr ast.Expr, ident *ast.Ident) bool { found := false fn := func(node ast.Node) bool { ident2, ok := node.(*ast.Ident) if !ok { return true } - if info.ObjectOf(ident) == info.ObjectOf(ident2) { + if ObjectOf(j, ident) == ObjectOf(j, ident2) { found = true return false } @@ -724,7 +711,7 @@ func (c *Checker) LintLoopAppend(j *lint.Job) { if !ok { return true } - if !lint.IsBlank(loop.Key) { + if !IsBlank(loop.Key) { return true } val, ok := loop.Value.(*ast.Ident) @@ -741,7 +728,7 @@ func (c *Checker) LintLoopAppend(j *lint.Job) { if stmt.Tok != token.ASSIGN || len(stmt.Lhs) != 1 || len(stmt.Rhs) != 1 { return true } - if refersTo(j.Program.Info, stmt.Lhs[0], val) { + if refersTo(j, stmt.Lhs[0], val) { return true } call, ok := stmt.Rhs[0].(*ast.CallExpr) @@ -755,14 +742,14 @@ func (c *Checker) LintLoopAppend(j *lint.Job) { if !ok { return true } - obj := j.Program.Info.ObjectOf(fun) + obj := ObjectOf(j, fun) fn, ok := obj.(*types.Builtin) if !ok || fn.Name() != "append" { return true } - src := j.Program.Info.TypeOf(loop.X) - dst := j.Program.Info.TypeOf(call.Args[0]) + src := TypeOf(j, loop.X) + dst := TypeOf(j, call.Args[Arg("append.slice")]) // TODO(dominikh) remove nil check once Go issue #15173 has // been fixed if src == nil { @@ -772,22 +759,22 @@ func (c *Checker) LintLoopAppend(j *lint.Job) { return true } - if j.Render(stmt.Lhs[0]) != j.Render(call.Args[0]) { + if Render(j, stmt.Lhs[0]) != Render(j, call.Args[Arg("append.slice")]) { return true } - el, ok := call.Args[1].(*ast.Ident) + el, ok := call.Args[Arg("append.elems")].(*ast.Ident) if !ok { return true } - if j.Program.Info.ObjectOf(val) != j.Program.Info.ObjectOf(el) { + if ObjectOf(j, val) != ObjectOf(j, el) { return true } j.Errorf(loop, "should replace loop with %s = append(%s, %s...)", - j.Render(stmt.Lhs[0]), j.Render(call.Args[0]), j.Render(loop.X)) + Render(j, stmt.Lhs[0]), Render(j, call.Args[Arg("append.slice")]), Render(j, loop.X)) return true } - for _, f := range c.filterGenerated(j.Program.Files) { + for _, f := range j.Program.Files { ast.Inspect(f, fn) } } @@ -802,7 +789,7 @@ func (c *Checker) LintTimeSince(j *lint.Job) { if !ok { return true } - if !j.IsCallToAST(sel.X, "time.Now") { + if !IsCallToAST(j, sel.X, "time.Now") { return true } if sel.Sel.Name != "Sub" { @@ -811,13 +798,13 @@ func (c *Checker) LintTimeSince(j *lint.Job) { j.Errorf(call, "should use time.Since instead of time.Now().Sub") return true } - for _, f := range c.filterGenerated(j.Program.Files) { + for _, f := range j.Program.Files { ast.Inspect(f, fn) } } func (c *Checker) LintTimeUntil(j *lint.Job) { - if !j.IsGoVersion(8) { + if !IsGoVersion(j, 8) { return } fn := func(node ast.Node) bool { @@ -825,16 +812,16 @@ func (c *Checker) LintTimeUntil(j *lint.Job) { if !ok { return true } - if !j.IsCallToAST(call, "(time.Time).Sub") { + if !IsCallToAST(j, call, "(time.Time).Sub") { return true } - if !j.IsCallToAST(call.Args[0], "time.Now") { + if !IsCallToAST(j, call.Args[Arg("(time.Time).Sub.u")], "time.Now") { return true } j.Errorf(call, "should use time.Until instead of t.Sub(time.Now())") return true } - for _, f := range c.filterGenerated(j.Program.Files) { + for _, f := range j.Program.Files { ast.Inspect(f, fn) } } @@ -848,14 +835,14 @@ func (c *Checker) LintUnnecessaryBlank(j *lint.Job) { if len(assign.Lhs) != 2 || len(assign.Rhs) != 1 { return } - if !lint.IsBlank(assign.Lhs[1]) { + if !IsBlank(assign.Lhs[1]) { return } switch rhs := assign.Rhs[0].(type) { case *ast.IndexExpr: // The type-checker should make sure that it's a map, but // let's be safe. - if _, ok := j.Program.Info.TypeOf(rhs.X).Underlying().(*types.Map); !ok { + if _, ok := TypeOf(j, rhs.X).Underlying().(*types.Map); !ok { return } case *ast.UnaryExpr: @@ -867,7 +854,7 @@ func (c *Checker) LintUnnecessaryBlank(j *lint.Job) { } cp := *assign cp.Lhs = cp.Lhs[0:1] - j.Errorf(assign, "should write %s instead of %s", j.Render(&cp), j.Render(assign)) + j.Errorf(assign, "should write %s instead of %s", Render(j, &cp), Render(j, assign)) } fn2 := func(node ast.Node) { @@ -880,7 +867,7 @@ func (c *Checker) LintUnnecessaryBlank(j *lint.Job) { } for i, lh := range stmt.Lhs { rh := stmt.Rhs[i] - if !lint.IsBlank(lh) { + if !IsBlank(lh) { continue } expr, ok := rh.(*ast.UnaryExpr) @@ -899,7 +886,13 @@ func (c *Checker) LintUnnecessaryBlank(j *lint.Job) { if !ok { return } - if lint.IsBlank(rs.Key) && (rs.Value == nil || lint.IsBlank(rs.Value)) { + + // for x, _ + if !IsBlank(rs.Key) && IsBlank(rs.Value) { + j.Errorf(rs.Value, "should omit value from range; this loop is equivalent to `for %s %s range ...`", Render(j, rs.Key), rs.Tok) + } + // for _, _ || for _ + if IsBlank(rs.Key) && (IsBlank(rs.Value) || rs.Value == nil) { j.Errorf(rs.Key, "should omit values from range; this loop is equivalent to `for range ...`") } } @@ -907,12 +900,12 @@ func (c *Checker) LintUnnecessaryBlank(j *lint.Job) { fn := func(node ast.Node) bool { fn1(node) fn2(node) - if j.IsGoVersion(4) { + if IsGoVersion(j, 4) { fn3(node) } return true } - for _, f := range c.filterGenerated(j.Program.Files) { + for _, f := range j.Program.Files { ast.Inspect(f, fn) } } @@ -936,7 +929,7 @@ func (c *Checker) LintSimplerStructConversion(j *lint.Job) { if !ok { return true } - typ1, _ := j.Program.Info.TypeOf(lit.Type).(*types.Named) + typ1, _ := TypeOf(j, lit.Type).(*types.Named) if typ1 == nil { return true } @@ -956,7 +949,7 @@ func (c *Checker) LintSimplerStructConversion(j *lint.Job) { if !ok { return nil, nil, false } - typ := j.Program.Info.TypeOf(sel.X) + typ := TypeOf(j, sel.X) return typ, ident, typ != nil } if len(lit.Elts) == 0 { @@ -1024,14 +1017,20 @@ func (c *Checker) LintSimplerStructConversion(j *lint.Job) { if typ1 == typ2 { return true } - if !structsIdentical(s1, s2) { - return true + if IsGoVersion(j, 8) { + if !types.IdenticalIgnoreTags(s1, s2) { + return true + } + } else { + if !types.Identical(s1, s2) { + return true + } } j.Errorf(node, "should convert %s (type %s) to %s instead of using struct literal", ident.Name, typ2.Obj().Name(), typ1.Obj().Name()) return true } - for _, f := range c.filterGenerated(j.Program.Files) { + for _, f := range j.Program.Files { ast.Inspect(f, fn) } } @@ -1046,9 +1045,9 @@ func (c *Checker) LintTrim(j *lint.Job) { case *ast.Ident: return node1.Obj == node2.(*ast.Ident).Obj case *ast.SelectorExpr: - return j.Render(node1) == j.Render(node2) + return Render(j, node1) == Render(j, node2) case *ast.IndexExpr: - return j.Render(node1) == j.Render(node2) + return Render(j, node1) == Render(j, node2) } return false } @@ -1064,7 +1063,7 @@ func (c *Checker) LintTrim(j *lint.Job) { if len(call.Args) != 1 { return false } - return sameNonDynamic(call.Args[0], ident) + return sameNonDynamic(call.Args[Arg("len.v")], ident) } fn := func(node ast.Node) bool { @@ -1088,22 +1087,26 @@ func (c *Checker) LintTrim(j *lint.Job) { if !ok { return true } - call, ok := condCall.Fun.(*ast.SelectorExpr) - if !ok { - return true - } - if lint.IsIdent(call.X, "strings") { + switch { + case IsCallToAST(j, condCall, "strings.HasPrefix"): + pkg = "strings" + fun = "HasPrefix" + case IsCallToAST(j, condCall, "strings.HasSuffix"): + pkg = "strings" + fun = "HasSuffix" + case IsCallToAST(j, condCall, "strings.Contains"): pkg = "strings" - } else if lint.IsIdent(call.X, "bytes") { + fun = "Contains" + case IsCallToAST(j, condCall, "bytes.HasPrefix"): pkg = "bytes" - } else { - return true - } - if lint.IsIdent(call.Sel, "HasPrefix") { fun = "HasPrefix" - } else if lint.IsIdent(call.Sel, "HasSuffix") { + case IsCallToAST(j, condCall, "bytes.HasSuffix"): + pkg = "bytes" fun = "HasSuffix" - } else { + case IsCallToAST(j, condCall, "bytes.Contains"): + pkg = "bytes" + fun = "Contains" + default: return true } @@ -1120,104 +1123,123 @@ func (c *Checker) LintTrim(j *lint.Job) { if !sameNonDynamic(condCall.Args[0], assign.Lhs[0]) { return true } - slice, ok := assign.Rhs[0].(*ast.SliceExpr) - if !ok { - return true - } - if slice.Slice3 { - return true - } - if !sameNonDynamic(slice.X, condCall.Args[0]) { - return true - } - var index ast.Expr - switch fun { - case "HasPrefix": - // TODO(dh) We could detect a High that is len(s), but another - // rule will already flag that, anyway. - if slice.High != nil { + + switch rhs := assign.Rhs[0].(type) { + case *ast.CallExpr: + if len(rhs.Args) < 2 || !sameNonDynamic(condCall.Args[0], rhs.Args[0]) || !sameNonDynamic(condCall.Args[1], rhs.Args[1]) { return true } - index = slice.Low - case "HasSuffix": - if slice.Low != nil { - n, ok := j.ExprToInt(slice.Low) - if !ok || n != 0 { - return true - } + if IsCallToAST(j, condCall, "strings.HasPrefix") && IsCallToAST(j, rhs, "strings.TrimPrefix") || + IsCallToAST(j, condCall, "strings.HasSuffix") && IsCallToAST(j, rhs, "strings.TrimSuffix") || + IsCallToAST(j, condCall, "strings.Contains") && IsCallToAST(j, rhs, "strings.Replace") || + IsCallToAST(j, condCall, "bytes.HasPrefix") && IsCallToAST(j, rhs, "bytes.TrimPrefix") || + IsCallToAST(j, condCall, "bytes.HasSuffix") && IsCallToAST(j, rhs, "bytes.TrimSuffix") || + IsCallToAST(j, condCall, "bytes.Contains") && IsCallToAST(j, rhs, "bytes.Replace") { + j.Errorf(ifstmt, "should replace this if statement with an unconditional %s", CallNameAST(j, rhs)) } - index = slice.High - } - - switch index := index.(type) { - case *ast.CallExpr: - if fun != "HasPrefix" { + return true + case *ast.SliceExpr: + slice := rhs + if !ok { return true } - if fn, ok := index.Fun.(*ast.Ident); !ok || fn.Name != "len" { + if slice.Slice3 { return true } - if len(index.Args) != 1 { + if !sameNonDynamic(slice.X, condCall.Args[0]) { return true } - id3 := index.Args[0] - switch oid3 := condCall.Args[1].(type) { - case *ast.BasicLit: + var index ast.Expr + switch fun { + case "HasPrefix": + // TODO(dh) We could detect a High that is len(s), but another + // rule will already flag that, anyway. + if slice.High != nil { + return true + } + index = slice.Low + case "HasSuffix": + if slice.Low != nil { + n, ok := ExprToInt(j, slice.Low) + if !ok || n != 0 { + return true + } + } + index = slice.High + } + + switch index := index.(type) { + case *ast.CallExpr: + if fun != "HasPrefix" { + return true + } + if fn, ok := index.Fun.(*ast.Ident); !ok || fn.Name != "len" { + return true + } + if len(index.Args) != 1 { + return true + } + id3 := index.Args[Arg("len.v")] + switch oid3 := condCall.Args[1].(type) { + case *ast.BasicLit: + if pkg != "strings" { + return false + } + lit, ok := id3.(*ast.BasicLit) + if !ok { + return true + } + s1, ok1 := ExprToString(j, lit) + s2, ok2 := ExprToString(j, condCall.Args[1]) + if !ok1 || !ok2 || s1 != s2 { + return true + } + default: + if !sameNonDynamic(id3, oid3) { + return true + } + } + case *ast.BasicLit, *ast.Ident: + if fun != "HasPrefix" { + return true + } if pkg != "strings" { - return false + return true } - lit, ok := id3.(*ast.BasicLit) - if !ok { + string, ok1 := ExprToString(j, condCall.Args[1]) + int, ok2 := ExprToInt(j, slice.Low) + if !ok1 || !ok2 || int != int64(len(string)) { return true } - s1, ok1 := j.ExprToString(lit) - s2, ok2 := j.ExprToString(condCall.Args[1]) - if !ok1 || !ok2 || s1 != s2 { + case *ast.BinaryExpr: + if fun != "HasSuffix" { return true } - default: - if !sameNonDynamic(id3, oid3) { + if index.Op != token.SUB { return true } - } - case *ast.BasicLit, *ast.Ident: - if fun != "HasPrefix" { - return true - } - if pkg != "strings" { - return true - } - string, ok1 := j.ExprToString(condCall.Args[1]) - int, ok2 := j.ExprToInt(slice.Low) - if !ok1 || !ok2 || int != int64(len(string)) { - return true - } - case *ast.BinaryExpr: - if fun != "HasSuffix" { - return true - } - if index.Op != token.SUB { + if !isLenOnIdent(index.X, condCall.Args[0]) || + !isLenOnIdent(index.Y, condCall.Args[1]) { + return true + } + default: return true } - if !isLenOnIdent(index.X, condCall.Args[0]) || - !isLenOnIdent(index.Y, condCall.Args[1]) { - return true + + var replacement string + switch fun { + case "HasPrefix": + replacement = "TrimPrefix" + case "HasSuffix": + replacement = "TrimSuffix" } + j.Errorf(ifstmt, "should replace this if statement with an unconditional %s.%s", pkg, replacement) + return true default: return true } - - var replacement string - switch fun { - case "HasPrefix": - replacement = "TrimPrefix" - case "HasSuffix": - replacement = "TrimSuffix" - } - j.Errorf(ifstmt, "should replace this if statement with an unconditional %s.%s", pkg, replacement) - return true } - for _, f := range c.filterGenerated(j.Program.Files) { + for _, f := range j.Program.Files { ast.Inspect(f, fn) } } @@ -1244,7 +1266,7 @@ func (c *Checker) LintLoopSlide(j *lint.Job) { return true } assign, ok := loop.Init.(*ast.AssignStmt) - if !ok || len(assign.Lhs) != 1 || len(assign.Rhs) != 1 || !lint.IsZero(assign.Rhs[0]) { + if !ok || len(assign.Lhs) != 1 || len(assign.Rhs) != 1 || !IsZero(assign.Rhs[0]) { return true } initvar, ok := assign.Lhs[0].(*ast.Ident) @@ -1256,7 +1278,7 @@ func (c *Checker) LintLoopSlide(j *lint.Job) { return true } postvar, ok := post.X.(*ast.Ident) - if !ok || j.Program.Info.ObjectOf(postvar) != j.Program.Info.ObjectOf(initvar) { + if !ok || ObjectOf(j, postvar) != ObjectOf(j, initvar) { return true } bin, ok := loop.Cond.(*ast.BinaryExpr) @@ -1264,7 +1286,7 @@ func (c *Checker) LintLoopSlide(j *lint.Job) { return true } binx, ok := bin.X.(*ast.Ident) - if !ok || j.Program.Info.ObjectOf(binx) != j.Program.Info.ObjectOf(initvar) { + if !ok || ObjectOf(j, binx) != ObjectOf(j, initvar) { return true } biny, ok := bin.Y.(*ast.Ident) @@ -1293,8 +1315,8 @@ func (c *Checker) LintLoopSlide(j *lint.Job) { if !ok { return true } - obj1 := j.Program.Info.ObjectOf(bs1) - obj2 := j.Program.Info.ObjectOf(bs2) + obj1 := ObjectOf(j, bs1) + obj2 := ObjectOf(j, bs2) if obj1 != obj2 { return true } @@ -1303,7 +1325,7 @@ func (c *Checker) LintLoopSlide(j *lint.Job) { } index1, ok := lhs.Index.(*ast.Ident) - if !ok || j.Program.Info.ObjectOf(index1) != j.Program.Info.ObjectOf(initvar) { + if !ok || ObjectOf(j, index1) != ObjectOf(j, initvar) { return true } index2, ok := rhs.Index.(*ast.BinaryExpr) @@ -1315,14 +1337,14 @@ func (c *Checker) LintLoopSlide(j *lint.Job) { return true } add2, ok := index2.Y.(*ast.Ident) - if !ok || j.Program.Info.ObjectOf(add2) != j.Program.Info.ObjectOf(initvar) { + if !ok || ObjectOf(j, add2) != ObjectOf(j, initvar) { return true } - j.Errorf(loop, "should use copy(%s[:%s], %s[%s:]) instead", j.Render(bs1), j.Render(biny), j.Render(bs1), j.Render(add1)) + j.Errorf(loop, "should use copy(%s[:%s], %s[%s:]) instead", Render(j, bs1), Render(j, biny), Render(j, bs1), Render(j, add1)) return true } - for _, f := range c.filterGenerated(j.Program.Files) { + for _, f := range j.Program.Files { ast.Inspect(f, fn) } } @@ -1340,21 +1362,23 @@ func (c *Checker) LintMakeLenCap(j *lint.Job) { switch len(call.Args) { case 2: // make(T, len) - if _, ok := j.Program.Info.TypeOf(call.Args[0]).Underlying().(*types.Slice); ok { + if _, ok := TypeOf(j, call.Args[Arg("make.t")]).Underlying().(*types.Slice); ok { break } - if lint.IsZero(call.Args[1]) { - j.Errorf(call.Args[1], "should use make(%s) instead", j.Render(call.Args[0])) + if IsZero(call.Args[Arg("make.size[0]")]) { + j.Errorf(call.Args[Arg("make.size[0]")], "should use make(%s) instead", Render(j, call.Args[Arg("make.t")])) } case 3: // make(T, len, cap) - if j.Render(call.Args[1]) == j.Render(call.Args[2]) { - j.Errorf(call.Args[1], "should use make(%s, %s) instead", j.Render(call.Args[0]), j.Render(call.Args[1])) + if Render(j, call.Args[Arg("make.size[0]")]) == Render(j, call.Args[Arg("make.size[1]")]) { + j.Errorf(call.Args[Arg("make.size[0]")], + "should use make(%s, %s) instead", + Render(j, call.Args[Arg("make.t")]), Render(j, call.Args[Arg("make.size[0]")])) } } return false } - for _, f := range c.filterGenerated(j.Program.Files) { + for _, f := range j.Program.Files { ast.Inspect(f, fn) } } @@ -1369,7 +1393,7 @@ func (c *Checker) LintAssertNotNil(j *lint.Job) { if !ok || xident.Obj != ident.Obj { return false } - if !j.IsNil(xbinop.Y) { + if !IsNil(j, xbinop.Y) { return false } return true @@ -1381,13 +1405,13 @@ func (c *Checker) LintAssertNotNil(j *lint.Job) { } return true } - fn := func(node ast.Node) bool { + fn1 := func(node ast.Node) bool { ifstmt, ok := node.(*ast.IfStmt) if !ok { return true } assign, ok := ifstmt.Init.(*ast.AssignStmt) - if !ok || len(assign.Lhs) != 2 || len(assign.Rhs) != 1 || !lint.IsBlank(assign.Lhs[0]) { + if !ok || len(assign.Lhs) != 2 || len(assign.Rhs) != 1 || !IsBlank(assign.Lhs[0]) { return true } assert, ok := assign.Rhs[0].(*ast.TypeAssertExpr) @@ -1410,10 +1434,75 @@ func (c *Checker) LintAssertNotNil(j *lint.Job) { !(isNilCheck(assertIdent, binop.Y) && isOKCheck(assignIdent, binop.X)) { return true } - j.Errorf(ifstmt, "when %s is true, %s can't be nil", j.Render(assignIdent), j.Render(assertIdent)) + j.Errorf(ifstmt, "when %s is true, %s can't be nil", Render(j, assignIdent), Render(j, assertIdent)) return true } - for _, f := range c.filterGenerated(j.Program.Files) { + fn2 := func(node ast.Node) bool { + // Check that outer ifstmt is an 'if x != nil {}' + ifstmt, ok := node.(*ast.IfStmt) + if !ok { + return true + } + if ifstmt.Init != nil { + return true + } + if ifstmt.Else != nil { + return true + } + if len(ifstmt.Body.List) != 1 { + return true + } + binop, ok := ifstmt.Cond.(*ast.BinaryExpr) + if !ok { + return true + } + if binop.Op != token.NEQ { + return true + } + lhs, ok := binop.X.(*ast.Ident) + if !ok { + return true + } + if !IsNil(j, binop.Y) { + return true + } + + // Check that inner ifstmt is an `if _, ok := x.(T); ok {}` + ifstmt, ok = ifstmt.Body.List[0].(*ast.IfStmt) + if !ok { + return true + } + assign, ok := ifstmt.Init.(*ast.AssignStmt) + if !ok || len(assign.Lhs) != 2 || len(assign.Rhs) != 1 || !IsBlank(assign.Lhs[0]) { + return true + } + assert, ok := assign.Rhs[0].(*ast.TypeAssertExpr) + if !ok { + return true + } + assertIdent, ok := assert.X.(*ast.Ident) + if !ok { + return true + } + if lhs.Obj != assertIdent.Obj { + return true + } + assignIdent, ok := assign.Lhs[1].(*ast.Ident) + if !ok { + return true + } + if !isOKCheck(assignIdent, ifstmt.Cond) { + return true + } + j.Errorf(ifstmt, "when %s is true, %s can't be nil", Render(j, assignIdent), Render(j, assertIdent)) + return true + } + fn := func(node ast.Node) bool { + b1 := fn1(node) + b2 := fn2(node) + return b1 || b2 + } + for _, f := range j.Program.Files { ast.Inspect(f, fn) } } @@ -1457,14 +1546,14 @@ func (c *Checker) LintDeclareAssign(j *lint.Job) { continue } - if refersTo(j.Program.Info, assign.Rhs[0], ident) { + if refersTo(j, assign.Rhs[0], ident) { continue } j.Errorf(decl, "should merge variable declaration with assignment on next line") } return true } - for _, f := range c.filterGenerated(j.Program.Files) { + for _, f := range j.Program.Files { ast.Inspect(f, fn) } } @@ -1483,7 +1572,6 @@ func (c *Checker) LintRedundantBreak(j *lint.Job) { return } j.Errorf(branch, "redundant break statement") - return } fn2 := func(node ast.Node) { var ret *ast.FieldList @@ -1517,7 +1605,7 @@ func (c *Checker) LintRedundantBreak(j *lint.Job) { fn2(node) return true } - for _, f := range c.filterGenerated(j.Program.Files) { + for _, f := range j.Program.Files { ast.Inspect(f, fn) } } @@ -1532,11 +1620,11 @@ func (c *Checker) Implements(j *lint.Job, typ types.Type, iface string) bool { ifaceName = iface } else { pkgName := iface[:idx] - pkg := j.Program.Prog.Package(pkgName) + pkg := j.Program.Package(pkgName) if pkg == nil { return false } - scope = pkg.Pkg.Scope() + scope = pkg.Types.Scope() ifaceName = iface[idx+1:] } @@ -1557,18 +1645,17 @@ func (c *Checker) LintRedundantSprintf(j *lint.Job) { if !ok { return true } - if !j.IsCallToAST(call, "fmt.Sprintf") { + if !IsCallToAST(j, call, "fmt.Sprintf") { return true } if len(call.Args) != 2 { return true } - if s, ok := j.ExprToString(call.Args[0]); !ok || s != "%s" { + if s, ok := ExprToString(j, call.Args[Arg("fmt.Sprintf.format")]); !ok || s != "%s" { return true } - pkg := j.NodePackage(call) - arg := call.Args[1] - typ := pkg.Info.TypeOf(arg) + arg := call.Args[Arg("fmt.Sprintf.a[0]")] + typ := TypeOf(j, arg) if c.Implements(j, typ, "fmt.Stringer") { j.Errorf(call, "should use String() instead of fmt.Sprintf") @@ -1584,30 +1671,30 @@ func (c *Checker) LintRedundantSprintf(j *lint.Job) { } return true } - for _, f := range c.filterGenerated(j.Program.Files) { + for _, f := range j.Program.Files { ast.Inspect(f, fn) } } func (c *Checker) LintErrorsNewSprintf(j *lint.Job) { fn := func(node ast.Node) bool { - if !j.IsCallToAST(node, "errors.New") { + if !IsCallToAST(j, node, "errors.New") { return true } call := node.(*ast.CallExpr) - if !j.IsCallToAST(call.Args[0], "fmt.Sprintf") { + if !IsCallToAST(j, call.Args[Arg("errors.New.text")], "fmt.Sprintf") { return true } j.Errorf(node, "should use fmt.Errorf(...) instead of errors.New(fmt.Sprintf(...))") return true } - for _, f := range c.filterGenerated(j.Program.Files) { + for _, f := range j.Program.Files { ast.Inspect(f, fn) } } func (c *Checker) LintRangeStringRunes(j *lint.Job) { - sharedcheck.CheckRangeStringRunes(c.nodeFns, j) + sharedcheck.CheckRangeStringRunes(j) } func (c *Checker) LintNilCheckAroundRange(j *lint.Job) { @@ -1622,7 +1709,7 @@ func (c *Checker) LintNilCheckAroundRange(j *lint.Job) { return true } - if cond.Op != token.NEQ || !j.IsNil(cond.Y) || len(ifstmt.Body.List) != 1 { + if cond.Op != token.NEQ || !IsNil(j, cond.Y) || len(ifstmt.Body.List) != 1 { return true } @@ -1641,13 +1728,13 @@ func (c *Checker) LintNilCheckAroundRange(j *lint.Job) { if ifXIdent.Obj != rangeXIdent.Obj { return true } - switch j.Program.Info.TypeOf(rangeXIdent).(type) { + switch TypeOf(j, rangeXIdent).(type) { case *types.Slice, *types.Map: j.Errorf(node, "unnecessary nil check around range") } return true } - for _, f := range c.filterGenerated(j.Program.Files) { + for _, f := range j.Program.Files { ast.Inspect(f, fn) } } @@ -1663,7 +1750,7 @@ func isPermissibleSort(j *lint.Job, node ast.Node) bool { if !ok { return true } - name := j.SelectorName(sel) + name := SelectorName(j, sel) switch name { case "sort.IntSlice", "sort.Float64Slice", "sort.StringSlice": default: @@ -1698,7 +1785,7 @@ func (c *Checker) LintSortHelpers(j *lint.Job) { if permissible { return false } - if !j.IsCallToAST(node, "sort.Sort") { + if !IsCallToAST(j, node, "sort.Sort") { return true } if isPermissibleSort(j, node) { @@ -1706,9 +1793,9 @@ func (c *Checker) LintSortHelpers(j *lint.Job) { return false } call := node.(*ast.CallExpr) - typeconv := call.Args[0].(*ast.CallExpr) + typeconv := call.Args[Arg("sort.Sort.data")].(*ast.CallExpr) sel := typeconv.Fun.(*ast.SelectorExpr) - name := j.SelectorName(sel) + name := SelectorName(j, sel) switch name { case "sort.IntSlice": @@ -1731,7 +1818,147 @@ func (c *Checker) LintSortHelpers(j *lint.Job) { return false } - for _, f := range c.filterGenerated(j.Program.Files) { + for _, f := range j.Program.Files { ast.Inspect(f, fnFuncs) } } + +func (c *Checker) LintGuardedDelete(j *lint.Job) { + isCommaOkMapIndex := func(stmt ast.Stmt) (b *ast.Ident, m ast.Expr, key ast.Expr, ok bool) { + // Has to be of the form `_, = [] + + assign, ok := stmt.(*ast.AssignStmt) + if !ok { + return nil, nil, nil, false + } + if len(assign.Lhs) != 2 || len(assign.Rhs) != 1 { + return nil, nil, nil, false + } + if !IsBlank(assign.Lhs[0]) { + return nil, nil, nil, false + } + ident, ok := assign.Lhs[1].(*ast.Ident) + if !ok { + return nil, nil, nil, false + } + index, ok := assign.Rhs[0].(*ast.IndexExpr) + if !ok { + return nil, nil, nil, false + } + if _, ok := TypeOf(j, index.X).(*types.Map); !ok { + return nil, nil, nil, false + } + key = index.Index + return ident, index.X, key, true + } + fn := func(node ast.Node) bool { + stmt, ok := node.(*ast.IfStmt) + if !ok { + return true + } + if len(stmt.Body.List) != 1 { + return true + } + expr, ok := stmt.Body.List[0].(*ast.ExprStmt) + if !ok { + return true + } + call, ok := expr.X.(*ast.CallExpr) + if !ok { + return true + } + if !IsCallToAST(j, call, "delete") { + return true + } + b, m, key, ok := isCommaOkMapIndex(stmt.Init) + if !ok { + return true + } + if cond, ok := stmt.Cond.(*ast.Ident); !ok || ObjectOf(j, cond) != ObjectOf(j, b) { + return true + } + if Render(j, call.Args[0]) != Render(j, m) || Render(j, call.Args[1]) != Render(j, key) { + return true + } + j.Errorf(stmt, "unnecessary guard around call to delete") + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func (c *Checker) LintSimplifyTypeSwitch(j *lint.Job) { + fn := func(node ast.Node) bool { + stmt, ok := node.(*ast.TypeSwitchStmt) + if !ok { + return true + } + if stmt.Init != nil { + // bailing out for now, can't anticipate how type switches with initializers are being used + return true + } + expr, ok := stmt.Assign.(*ast.ExprStmt) + if !ok { + // the user is in fact assigning the result + return true + } + assert := expr.X.(*ast.TypeAssertExpr) + ident, ok := assert.X.(*ast.Ident) + if !ok { + return true + } + x := ObjectOf(j, ident) + var allOffenders []ast.Node + for _, clause := range stmt.Body.List { + clause := clause.(*ast.CaseClause) + if len(clause.List) != 1 { + continue + } + hasUnrelatedAssertion := false + var offenders []ast.Node + ast.Inspect(clause, func(node ast.Node) bool { + assert2, ok := node.(*ast.TypeAssertExpr) + if !ok { + return true + } + ident, ok := assert2.X.(*ast.Ident) + if !ok { + hasUnrelatedAssertion = true + return false + } + if ObjectOf(j, ident) != x { + hasUnrelatedAssertion = true + return false + } + + if !types.Identical(TypeOf(j, clause.List[0]), TypeOf(j, assert2.Type)) { + hasUnrelatedAssertion = true + return false + } + offenders = append(offenders, assert2) + return true + }) + if !hasUnrelatedAssertion { + // don't flag cases that have other type assertions + // unrelated to the one in the case clause. often + // times, this is done for symmetry, when two + // different values have to be asserted to the same + // type. + allOffenders = append(allOffenders, offenders...) + } + } + if len(allOffenders) != 0 { + at := "" + for _, offender := range allOffenders { + pos := j.Program.DisplayPosition(offender.Pos()) + at += "\n\t" + pos.String() + } + j.Errorf(expr, "assigning the result of this type assertion to a variable (switch %s := %s.(type)) could eliminate the following type assertions:%s", Render(j, ident), Render(j, ident), at) + } + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} diff --git a/vendor/github.com/golangci/go-tools/simple/lint17.go b/vendor/github.com/golangci/go-tools/simple/lint17.go deleted file mode 100644 index 53f529c262db..000000000000 --- a/vendor/github.com/golangci/go-tools/simple/lint17.go +++ /dev/null @@ -1,7 +0,0 @@ -// +build !go1.8 - -package simple - -import "go/types" - -var structsIdentical = types.Identical diff --git a/vendor/github.com/golangci/go-tools/simple/lint18.go b/vendor/github.com/golangci/go-tools/simple/lint18.go deleted file mode 100644 index ab9ea727a71a..000000000000 --- a/vendor/github.com/golangci/go-tools/simple/lint18.go +++ /dev/null @@ -1,7 +0,0 @@ -// +build go1.8 - -package simple - -import "go/types" - -var structsIdentical = types.IdenticalIgnoreTags diff --git a/vendor/github.com/golangci/tools/go/ssa/LICENSE b/vendor/github.com/golangci/go-tools/ssa/LICENSE similarity index 100% rename from vendor/github.com/golangci/tools/go/ssa/LICENSE rename to vendor/github.com/golangci/go-tools/ssa/LICENSE diff --git a/vendor/github.com/golangci/tools/go/ssa/blockopt.go b/vendor/github.com/golangci/go-tools/ssa/blockopt.go similarity index 100% rename from vendor/github.com/golangci/tools/go/ssa/blockopt.go rename to vendor/github.com/golangci/go-tools/ssa/blockopt.go diff --git a/vendor/github.com/golangci/tools/go/ssa/builder.go b/vendor/github.com/golangci/go-tools/ssa/builder.go similarity index 99% rename from vendor/github.com/golangci/tools/go/ssa/builder.go rename to vendor/github.com/golangci/go-tools/ssa/builder.go index 602b27188e21..032819a2a110 100644 --- a/vendor/github.com/golangci/tools/go/ssa/builder.go +++ b/vendor/github.com/golangci/go-tools/ssa/builder.go @@ -1288,7 +1288,7 @@ func (b *builder) switchStmt(fn *Function, s *ast.SwitchStmt, label *lblock) { // instead of BinOp(EQL, tag, b.expr(cond)) // followed by If. Don't forget conversions // though. - cond := emitCompare(fn, token.EQL, tag, b.expr(fn, cond), token.NoPos) + cond := emitCompare(fn, token.EQL, tag, b.expr(fn, cond), cond.Pos()) emitIf(fn, cond, body, nextCond) fn.currentBlock = nextCond } diff --git a/vendor/github.com/golangci/tools/go/ssa/const.go b/vendor/github.com/golangci/go-tools/ssa/const.go similarity index 99% rename from vendor/github.com/golangci/tools/go/ssa/const.go rename to vendor/github.com/golangci/go-tools/ssa/const.go index 4d0a707f5286..140e778c56b4 100644 --- a/vendor/github.com/golangci/tools/go/ssa/const.go +++ b/vendor/github.com/golangci/go-tools/ssa/const.go @@ -115,7 +115,7 @@ func (c *Const) IsNil() bool { return c.Value == nil } -// TODO(adonovan): move everything below into github.com/golangci/tools/go/ssa/interp. +// TODO(adonovan): move everything below into github.com/golangci/go-tools/ssa/interp. // Int64 returns the numeric value of this constant truncated to fit // a signed 64-bit integer. diff --git a/vendor/github.com/golangci/tools/go/ssa/create.go b/vendor/github.com/golangci/go-tools/ssa/create.go similarity index 100% rename from vendor/github.com/golangci/tools/go/ssa/create.go rename to vendor/github.com/golangci/go-tools/ssa/create.go diff --git a/vendor/github.com/golangci/tools/go/ssa/doc.go b/vendor/github.com/golangci/go-tools/ssa/doc.go similarity index 99% rename from vendor/github.com/golangci/tools/go/ssa/doc.go rename to vendor/github.com/golangci/go-tools/ssa/doc.go index 5297b9cd0bb2..2406a31a18da 100644 --- a/vendor/github.com/golangci/tools/go/ssa/doc.go +++ b/vendor/github.com/golangci/go-tools/ssa/doc.go @@ -120,4 +120,4 @@ // domains of source locations, ast.Nodes, types.Objects, // ssa.Values/Instructions. // -package ssa // import "github.com/golangci/tools/go/ssa" +package ssa // import "github.com/golangci/go-tools/ssa" diff --git a/vendor/github.com/golangci/tools/go/ssa/dom.go b/vendor/github.com/golangci/go-tools/ssa/dom.go similarity index 100% rename from vendor/github.com/golangci/tools/go/ssa/dom.go rename to vendor/github.com/golangci/go-tools/ssa/dom.go diff --git a/vendor/github.com/golangci/tools/go/ssa/emit.go b/vendor/github.com/golangci/go-tools/ssa/emit.go similarity index 100% rename from vendor/github.com/golangci/tools/go/ssa/emit.go rename to vendor/github.com/golangci/go-tools/ssa/emit.go diff --git a/vendor/github.com/golangci/tools/go/ssa/func.go b/vendor/github.com/golangci/go-tools/ssa/func.go similarity index 99% rename from vendor/github.com/golangci/tools/go/ssa/func.go rename to vendor/github.com/golangci/go-tools/ssa/func.go index 53635ba0114e..8e958c42258f 100644 --- a/vendor/github.com/golangci/tools/go/ssa/func.go +++ b/vendor/github.com/golangci/go-tools/ssa/func.go @@ -265,6 +265,10 @@ func (f *Function) createSyntacticParams(recv *ast.FieldList, functype *ast.Func } } +type setNumable interface { + setNum(int) +} + // numberRegisters assigns numbers to all SSA registers // (value-defining Instructions) in f, to aid debugging. // (Non-Instruction Values are named at construction.) @@ -275,9 +279,7 @@ func numberRegisters(f *Function) { for _, instr := range b.Instrs { switch instr.(type) { case Value: - instr.(interface { - setNum(int) - }).setNum(v) + instr.(setNumable).setNum(v) v++ } } diff --git a/vendor/github.com/golangci/tools/go/ssa/identical.go b/vendor/github.com/golangci/go-tools/ssa/identical.go similarity index 100% rename from vendor/github.com/golangci/tools/go/ssa/identical.go rename to vendor/github.com/golangci/go-tools/ssa/identical.go diff --git a/vendor/github.com/golangci/tools/go/ssa/identical_17.go b/vendor/github.com/golangci/go-tools/ssa/identical_17.go similarity index 100% rename from vendor/github.com/golangci/tools/go/ssa/identical_17.go rename to vendor/github.com/golangci/go-tools/ssa/identical_17.go diff --git a/vendor/github.com/golangci/tools/go/ssa/lift.go b/vendor/github.com/golangci/go-tools/ssa/lift.go similarity index 100% rename from vendor/github.com/golangci/tools/go/ssa/lift.go rename to vendor/github.com/golangci/go-tools/ssa/lift.go diff --git a/vendor/github.com/golangci/tools/go/ssa/lvalue.go b/vendor/github.com/golangci/go-tools/ssa/lvalue.go similarity index 100% rename from vendor/github.com/golangci/tools/go/ssa/lvalue.go rename to vendor/github.com/golangci/go-tools/ssa/lvalue.go diff --git a/vendor/github.com/golangci/tools/go/ssa/methods.go b/vendor/github.com/golangci/go-tools/ssa/methods.go similarity index 100% rename from vendor/github.com/golangci/tools/go/ssa/methods.go rename to vendor/github.com/golangci/go-tools/ssa/methods.go diff --git a/vendor/github.com/golangci/tools/go/ssa/mode.go b/vendor/github.com/golangci/go-tools/ssa/mode.go similarity index 100% rename from vendor/github.com/golangci/tools/go/ssa/mode.go rename to vendor/github.com/golangci/go-tools/ssa/mode.go diff --git a/vendor/github.com/golangci/tools/go/ssa/print.go b/vendor/github.com/golangci/go-tools/ssa/print.go similarity index 100% rename from vendor/github.com/golangci/tools/go/ssa/print.go rename to vendor/github.com/golangci/go-tools/ssa/print.go diff --git a/vendor/github.com/golangci/tools/go/ssa/sanity.go b/vendor/github.com/golangci/go-tools/ssa/sanity.go similarity index 100% rename from vendor/github.com/golangci/tools/go/ssa/sanity.go rename to vendor/github.com/golangci/go-tools/ssa/sanity.go diff --git a/vendor/github.com/golangci/tools/go/ssa/source.go b/vendor/github.com/golangci/go-tools/ssa/source.go similarity index 100% rename from vendor/github.com/golangci/tools/go/ssa/source.go rename to vendor/github.com/golangci/go-tools/ssa/source.go diff --git a/vendor/github.com/golangci/tools/go/ssa/ssa.go b/vendor/github.com/golangci/go-tools/ssa/ssa.go similarity index 100% rename from vendor/github.com/golangci/tools/go/ssa/ssa.go rename to vendor/github.com/golangci/go-tools/ssa/ssa.go diff --git a/vendor/github.com/golangci/tools/go/ssa/ssautil/load.go b/vendor/github.com/golangci/go-tools/ssa/ssautil/load.go similarity index 99% rename from vendor/github.com/golangci/tools/go/ssa/ssautil/load.go rename to vendor/github.com/golangci/go-tools/ssa/ssautil/load.go index 004571eb0b6c..c7832678b9b7 100644 --- a/vendor/github.com/golangci/tools/go/ssa/ssautil/load.go +++ b/vendor/github.com/golangci/go-tools/ssa/ssautil/load.go @@ -11,9 +11,9 @@ import ( "go/token" "go/types" - "github.com/golangci/tools/go/ssa" "golang.org/x/tools/go/loader" "golang.org/x/tools/go/packages" + "github.com/golangci/go-tools/ssa" ) // Packages creates an SSA program for a set of packages loaded from diff --git a/vendor/github.com/golangci/tools/go/ssa/ssautil/switch.go b/vendor/github.com/golangci/go-tools/ssa/ssautil/switch.go similarity index 99% rename from vendor/github.com/golangci/tools/go/ssa/ssautil/switch.go rename to vendor/github.com/golangci/go-tools/ssa/ssautil/switch.go index 7c79719c6aba..05c388b2b439 100644 --- a/vendor/github.com/golangci/tools/go/ssa/ssautil/switch.go +++ b/vendor/github.com/golangci/go-tools/ssa/ssautil/switch.go @@ -24,7 +24,7 @@ import ( "go/token" "go/types" - "github.com/golangci/tools/go/ssa" + "github.com/golangci/go-tools/ssa" ) // A ConstCase represents a single constant comparison. diff --git a/vendor/github.com/golangci/tools/go/ssa/ssautil/visit.go b/vendor/github.com/golangci/go-tools/ssa/ssautil/visit.go similarity index 95% rename from vendor/github.com/golangci/tools/go/ssa/ssautil/visit.go rename to vendor/github.com/golangci/go-tools/ssa/ssautil/visit.go index 6dee49217374..02b2aac20f2c 100644 --- a/vendor/github.com/golangci/tools/go/ssa/ssautil/visit.go +++ b/vendor/github.com/golangci/go-tools/ssa/ssautil/visit.go @@ -2,9 +2,9 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package ssautil // import "github.com/golangci/tools/go/ssa/ssautil" +package ssautil // import "github.com/golangci/go-tools/ssa/ssautil" -import "github.com/golangci/tools/go/ssa" +import "github.com/golangci/go-tools/ssa" // This file defines utilities for visiting the SSA representation of // a Program. diff --git a/vendor/github.com/golangci/tools/go/ssa/testmain.go b/vendor/github.com/golangci/go-tools/ssa/testmain.go similarity index 100% rename from vendor/github.com/golangci/tools/go/ssa/testmain.go rename to vendor/github.com/golangci/go-tools/ssa/testmain.go diff --git a/vendor/github.com/golangci/tools/go/ssa/util.go b/vendor/github.com/golangci/go-tools/ssa/util.go similarity index 100% rename from vendor/github.com/golangci/tools/go/ssa/util.go rename to vendor/github.com/golangci/go-tools/ssa/util.go diff --git a/vendor/github.com/golangci/tools/go/ssa/wrappers.go b/vendor/github.com/golangci/go-tools/ssa/wrappers.go similarity index 99% rename from vendor/github.com/golangci/tools/go/ssa/wrappers.go rename to vendor/github.com/golangci/go-tools/ssa/wrappers.go index 555cd70d9555..701dd90d7d9d 100644 --- a/vendor/github.com/golangci/tools/go/ssa/wrappers.go +++ b/vendor/github.com/golangci/go-tools/ssa/wrappers.go @@ -21,6 +21,7 @@ package ssa import ( "fmt" + "go/types" ) diff --git a/vendor/github.com/golangci/tools/go/ssa/write.go b/vendor/github.com/golangci/go-tools/ssa/write.go similarity index 100% rename from vendor/github.com/golangci/tools/go/ssa/write.go rename to vendor/github.com/golangci/go-tools/ssa/write.go diff --git a/vendor/github.com/golangci/go-tools/ssautil/ssautil.go b/vendor/github.com/golangci/go-tools/ssautil/ssautil.go new file mode 100644 index 000000000000..19b7e1018866 --- /dev/null +++ b/vendor/github.com/golangci/go-tools/ssautil/ssautil.go @@ -0,0 +1,41 @@ +package ssautil + +import ( + "github.com/golangci/go-tools/ssa" +) + +func Reachable(from, to *ssa.BasicBlock) bool { + if from == to { + return true + } + if from.Dominates(to) { + return true + } + + found := false + Walk(from, func(b *ssa.BasicBlock) bool { + if b == to { + found = true + return false + } + return true + }) + return found +} + +func Walk(b *ssa.BasicBlock, fn func(*ssa.BasicBlock) bool) { + seen := map[*ssa.BasicBlock]bool{} + wl := []*ssa.BasicBlock{b} + for len(wl) > 0 { + b := wl[len(wl)-1] + wl = wl[:len(wl)-1] + if seen[b] { + continue + } + seen[b] = true + if !fn(b) { + continue + } + wl = append(wl, b.Succs...) + } +} diff --git a/vendor/github.com/golangci/go-tools/staticcheck/buildtag.go b/vendor/github.com/golangci/go-tools/staticcheck/buildtag.go index c0160d5dba16..d617d60298b6 100644 --- a/vendor/github.com/golangci/go-tools/staticcheck/buildtag.go +++ b/vendor/github.com/golangci/go-tools/staticcheck/buildtag.go @@ -4,12 +4,12 @@ import ( "go/ast" "strings" - "github.com/golangci/go-tools/lint" + . "github.com/golangci/go-tools/lint/lintdsl" ) func buildTags(f *ast.File) [][]string { var out [][]string - for _, line := range strings.Split(lint.Preamble(f), "\n") { + for _, line := range strings.Split(Preamble(f), "\n") { if !strings.HasPrefix(line, "+build ") { continue } diff --git a/vendor/github.com/golangci/go-tools/staticcheck/lint.go b/vendor/github.com/golangci/go-tools/staticcheck/lint.go index 27ab798357f9..46c869bb1111 100644 --- a/vendor/github.com/golangci/go-tools/staticcheck/lint.go +++ b/vendor/github.com/golangci/go-tools/staticcheck/lint.go @@ -10,20 +10,25 @@ import ( htmltemplate "html/template" "net/http" "regexp" + "regexp/syntax" "sort" "strconv" "strings" "sync" texttemplate "text/template" + . "github.com/golangci/go-tools/arg" "github.com/golangci/go-tools/deprecated" "github.com/golangci/go-tools/functions" "github.com/golangci/go-tools/internal/sharedcheck" "github.com/golangci/go-tools/lint" - "github.com/golangci/tools/go/ssa" + . "github.com/golangci/go-tools/lint/lintdsl" + "github.com/golangci/go-tools/ssa" + "github.com/golangci/go-tools/ssautil" "github.com/golangci/go-tools/staticcheck/vrp" "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/go/packages" ) func validRegexp(call *Call) { @@ -64,7 +69,7 @@ func unmarshalPointer(name string, arg int) CallCheck { func pointlessIntMath(call *Call) { if ConvertedFromInt(call.Args[0].Value) { - call.Invalid(fmt.Sprintf("calling %s on a converted integer is pointless", lint.CallName(call.Instr.Common()))) + call.Invalid(fmt.Sprintf("calling %s on a converted integer is pointless", CallName(call.Instr.Common()))) } } @@ -80,11 +85,14 @@ var ( checkRegexpRules = map[string]CallCheck{ "regexp.MustCompile": validRegexp, "regexp.Compile": validRegexp, + "regexp.Match": validRegexp, + "regexp.MatchReader": validRegexp, + "regexp.MatchString": validRegexp, } checkTimeParseRules = map[string]CallCheck{ "time.Parse": func(call *Call) { - arg := call.Args[0] + arg := call.Args[Arg("time.Parse.layout")] err := ValidateTimeLayout(arg.Value) if err != nil { arg.Invalid(err.Error()) @@ -94,7 +102,7 @@ var ( checkEncodingBinaryRules = map[string]CallCheck{ "encoding/binary.Write": func(call *Call) { - arg := call.Args[2] + arg := call.Args[Arg("encoding/binary.Write.data")] if !CanBinaryMarshal(call.Job, arg.Value) { arg.Invalid(fmt.Sprintf("value of type %s cannot be used with binary.Write", arg.Value.Value.Type())) } @@ -103,7 +111,7 @@ var ( checkURLsRules = map[string]CallCheck{ "net/url.Parse": func(call *Call) { - arg := call.Args[0] + arg := call.Args[Arg("net/url.Parse.rawurl")] err := ValidateURL(arg.Value) if err != nil { arg.Invalid(err.Error()) @@ -113,9 +121,9 @@ var ( checkSyncPoolValueRules = map[string]CallCheck{ "(*sync.Pool).Put": func(call *Call) { - arg := call.Args[0] + arg := call.Args[Arg("(*sync.Pool).Put.x")] typ := arg.Value.Value.Type() - if !lint.IsPointerLike(typ) { + if !IsPointerLike(typ) { arg.Invalid("argument should be pointer-like to avoid allocations") } }, @@ -157,7 +165,7 @@ var ( checkUnbufferedSignalChanRules = map[string]CallCheck{ "os/signal.Notify": func(call *Call) { - arg := call.Args[0] + arg := call.Args[Arg("os/signal.Notify.c")] if UnbufferedChannel(arg.Value) { arg.Invalid("the channel used with signal.Notify should be buffered") } @@ -184,7 +192,8 @@ var ( checkBytesEqualIPRules = map[string]CallCheck{ "bytes.Equal": func(call *Call) { - if ConvertedFrom(call.Args[0].Value, "net.IP") && ConvertedFrom(call.Args[1].Value, "net.IP") { + if ConvertedFrom(call.Args[Arg("bytes.Equal.a")].Value, "net.IP") && + ConvertedFrom(call.Args[Arg("bytes.Equal.b")].Value, "net.IP") { call.Invalid("use net.IP.Equal to compare net.IPs, not bytes.Equal") } }, @@ -201,7 +210,6 @@ type Checker struct { CheckGenerated bool funcDescs *functions.Descriptions deprecatedObjs map[types.Object]string - nodeFns map[ast.Node]*ssa.Function } func NewChecker() *Checker { @@ -211,118 +219,175 @@ func NewChecker() *Checker { func (*Checker) Name() string { return "staticcheck" } func (*Checker) Prefix() string { return "SA" } -func (c *Checker) Funcs() map[string]lint.Func { - return map[string]lint.Func{ - "SA1000": c.callChecker(checkRegexpRules), - "SA1001": c.CheckTemplate, - "SA1002": c.callChecker(checkTimeParseRules), - "SA1003": c.callChecker(checkEncodingBinaryRules), - "SA1004": c.CheckTimeSleepConstant, - "SA1005": c.CheckExec, - "SA1006": c.CheckUnsafePrintf, - "SA1007": c.callChecker(checkURLsRules), - "SA1008": c.CheckCanonicalHeaderKey, - "SA1009": nil, - "SA1010": c.callChecker(checkRegexpFindAllRules), - "SA1011": c.callChecker(checkUTF8CutsetRules), - "SA1012": c.CheckNilContext, - "SA1013": c.CheckSeeker, - "SA1014": c.callChecker(checkUnmarshalPointerRules), - "SA1015": c.CheckLeakyTimeTick, - "SA1016": c.CheckUntrappableSignal, - "SA1017": c.callChecker(checkUnbufferedSignalChanRules), - "SA1018": c.callChecker(checkStringsReplaceZeroRules), - "SA1019": c.CheckDeprecated, - "SA1020": c.callChecker(checkListenAddressRules), - "SA1021": c.callChecker(checkBytesEqualIPRules), - "SA1022": nil, - "SA1023": c.CheckWriterBufferModified, - "SA1024": c.callChecker(checkUniqueCutsetRules), - - "SA2000": c.CheckWaitgroupAdd, - "SA2001": c.CheckEmptyCriticalSection, - "SA2002": c.CheckConcurrentTesting, - "SA2003": c.CheckDeferLock, - - "SA3000": c.CheckTestMainExit, - "SA3001": c.CheckBenchmarkN, - - "SA4000": c.CheckLhsRhsIdentical, - "SA4001": c.CheckIneffectiveCopy, - "SA4002": c.CheckDiffSizeComparison, - "SA4003": c.CheckUnsignedComparison, - "SA4004": c.CheckIneffectiveLoop, - "SA4005": nil, - "SA4006": c.CheckUnreadVariableValues, - // "SA4007": c.CheckPredeterminedBooleanExprs, - "SA4007": nil, - "SA4008": c.CheckLoopCondition, - "SA4009": c.CheckArgOverwritten, - "SA4010": c.CheckIneffectiveAppend, - "SA4011": c.CheckScopedBreak, - "SA4012": c.CheckNaNComparison, - "SA4013": c.CheckDoubleNegation, - "SA4014": c.CheckRepeatedIfElse, - "SA4015": c.callChecker(checkMathIntRules), - "SA4016": c.CheckSillyBitwiseOps, - "SA4017": c.CheckPureFunctions, - "SA4018": c.CheckSelfAssignment, - "SA4019": c.CheckDuplicateBuildConstraints, - - "SA5000": c.CheckNilMaps, - "SA5001": c.CheckEarlyDefer, - "SA5002": c.CheckInfiniteEmptyLoop, - "SA5003": c.CheckDeferInInfiniteLoop, - "SA5004": c.CheckLoopEmptyDefault, - "SA5005": c.CheckCyclicFinalizer, - // "SA5006": c.CheckSliceOutOfBounds, - "SA5007": c.CheckInfiniteRecursion, - - "SA6000": c.callChecker(checkRegexpMatchLoopRules), - "SA6001": c.CheckMapBytesKey, - "SA6002": c.callChecker(checkSyncPoolValueRules), - "SA6003": c.CheckRangeStringRunes, - "SA6004": nil, - - "SA9000": nil, - "SA9001": c.CheckDubiousDeferInChannelRangeLoop, - "SA9002": c.CheckNonOctalFileMode, - "SA9003": c.CheckEmptyBranch, - "SA9004": c.CheckMissingEnumTypesInDeclaration, - } -} - -func (c *Checker) filterGenerated(files []*ast.File) []*ast.File { - if c.CheckGenerated { - return files - } - var out []*ast.File - for _, f := range files { - if !lint.IsGenerated(f) { - out = append(out, f) - } - } - return out -} - -func (c *Checker) deprecateObject(m map[types.Object]string, prog *lint.Program, obj types.Object) { - if obj.Pkg() == nil { - return - } +func (c *Checker) Checks() []lint.Check { + return []lint.Check{ + {ID: "SA1000", FilterGenerated: false, Fn: c.callChecker(checkRegexpRules)}, + {ID: "SA1001", FilterGenerated: false, Fn: c.CheckTemplate}, + {ID: "SA1002", FilterGenerated: false, Fn: c.callChecker(checkTimeParseRules)}, + {ID: "SA1003", FilterGenerated: false, Fn: c.callChecker(checkEncodingBinaryRules)}, + {ID: "SA1004", FilterGenerated: false, Fn: c.CheckTimeSleepConstant}, + {ID: "SA1005", FilterGenerated: false, Fn: c.CheckExec}, + {ID: "SA1006", FilterGenerated: false, Fn: c.CheckUnsafePrintf}, + {ID: "SA1007", FilterGenerated: false, Fn: c.callChecker(checkURLsRules)}, + {ID: "SA1008", FilterGenerated: false, Fn: c.CheckCanonicalHeaderKey}, + {ID: "SA1010", FilterGenerated: false, Fn: c.callChecker(checkRegexpFindAllRules)}, + {ID: "SA1011", FilterGenerated: false, Fn: c.callChecker(checkUTF8CutsetRules)}, + {ID: "SA1012", FilterGenerated: false, Fn: c.CheckNilContext}, + {ID: "SA1013", FilterGenerated: false, Fn: c.CheckSeeker}, + {ID: "SA1014", FilterGenerated: false, Fn: c.callChecker(checkUnmarshalPointerRules)}, + {ID: "SA1015", FilterGenerated: false, Fn: c.CheckLeakyTimeTick}, + {ID: "SA1016", FilterGenerated: false, Fn: c.CheckUntrappableSignal}, + {ID: "SA1017", FilterGenerated: false, Fn: c.callChecker(checkUnbufferedSignalChanRules)}, + {ID: "SA1018", FilterGenerated: false, Fn: c.callChecker(checkStringsReplaceZeroRules)}, + {ID: "SA1019", FilterGenerated: false, Fn: c.CheckDeprecated}, + {ID: "SA1020", FilterGenerated: false, Fn: c.callChecker(checkListenAddressRules)}, + {ID: "SA1021", FilterGenerated: false, Fn: c.callChecker(checkBytesEqualIPRules)}, + {ID: "SA1023", FilterGenerated: false, Fn: c.CheckWriterBufferModified}, + {ID: "SA1024", FilterGenerated: false, Fn: c.callChecker(checkUniqueCutsetRules)}, + {ID: "SA1025", FilterGenerated: false, Fn: c.CheckTimerResetReturnValue}, + + {ID: "SA2000", FilterGenerated: false, Fn: c.CheckWaitgroupAdd}, + {ID: "SA2001", FilterGenerated: false, Fn: c.CheckEmptyCriticalSection}, + {ID: "SA2002", FilterGenerated: false, Fn: c.CheckConcurrentTesting}, + {ID: "SA2003", FilterGenerated: false, Fn: c.CheckDeferLock}, + + {ID: "SA3000", FilterGenerated: false, Fn: c.CheckTestMainExit}, + {ID: "SA3001", FilterGenerated: false, Fn: c.CheckBenchmarkN}, + + {ID: "SA4000", FilterGenerated: false, Fn: c.CheckLhsRhsIdentical}, + {ID: "SA4001", FilterGenerated: false, Fn: c.CheckIneffectiveCopy}, + {ID: "SA4002", FilterGenerated: false, Fn: c.CheckDiffSizeComparison}, + {ID: "SA4003", FilterGenerated: false, Fn: c.CheckExtremeComparison}, + {ID: "SA4004", FilterGenerated: false, Fn: c.CheckIneffectiveLoop}, + {ID: "SA4006", FilterGenerated: false, Fn: c.CheckUnreadVariableValues}, + {ID: "SA4008", FilterGenerated: false, Fn: c.CheckLoopCondition}, + {ID: "SA4009", FilterGenerated: false, Fn: c.CheckArgOverwritten}, + {ID: "SA4010", FilterGenerated: false, Fn: c.CheckIneffectiveAppend}, + {ID: "SA4011", FilterGenerated: false, Fn: c.CheckScopedBreak}, + {ID: "SA4012", FilterGenerated: false, Fn: c.CheckNaNComparison}, + {ID: "SA4013", FilterGenerated: false, Fn: c.CheckDoubleNegation}, + {ID: "SA4014", FilterGenerated: false, Fn: c.CheckRepeatedIfElse}, + {ID: "SA4015", FilterGenerated: false, Fn: c.callChecker(checkMathIntRules)}, + {ID: "SA4016", FilterGenerated: false, Fn: c.CheckSillyBitwiseOps}, + {ID: "SA4017", FilterGenerated: false, Fn: c.CheckPureFunctions}, + {ID: "SA4018", FilterGenerated: true, Fn: c.CheckSelfAssignment}, + {ID: "SA4019", FilterGenerated: true, Fn: c.CheckDuplicateBuildConstraints}, + {ID: "SA4020", FilterGenerated: false, Fn: c.CheckUnreachableTypeCases}, + + {ID: "SA5000", FilterGenerated: false, Fn: c.CheckNilMaps}, + {ID: "SA5001", FilterGenerated: false, Fn: c.CheckEarlyDefer}, + {ID: "SA5002", FilterGenerated: false, Fn: c.CheckInfiniteEmptyLoop}, + {ID: "SA5003", FilterGenerated: false, Fn: c.CheckDeferInInfiniteLoop}, + {ID: "SA5004", FilterGenerated: false, Fn: c.CheckLoopEmptyDefault}, + {ID: "SA5005", FilterGenerated: false, Fn: c.CheckCyclicFinalizer}, + {ID: "SA5007", FilterGenerated: false, Fn: c.CheckInfiniteRecursion}, + + {ID: "SA6000", FilterGenerated: false, Fn: c.callChecker(checkRegexpMatchLoopRules)}, + {ID: "SA6001", FilterGenerated: false, Fn: c.CheckMapBytesKey}, + {ID: "SA6002", FilterGenerated: false, Fn: c.callChecker(checkSyncPoolValueRules)}, + {ID: "SA6003", FilterGenerated: false, Fn: c.CheckRangeStringRunes}, + // {ID: "SA6004", FilterGenerated: false, Fn: c.CheckSillyRegexp}, + {ID: "SA6005", FilterGenerated: false, Fn: c.CheckToLowerToUpperComparison}, + + {ID: "SA9001", FilterGenerated: false, Fn: c.CheckDubiousDeferInChannelRangeLoop}, + {ID: "SA9002", FilterGenerated: false, Fn: c.CheckNonOctalFileMode}, + {ID: "SA9003", FilterGenerated: false, Fn: c.CheckEmptyBranch}, + {ID: "SA9004", FilterGenerated: false, Fn: c.CheckMissingEnumTypesInDeclaration}, + } + + // "SA5006": c.CheckSliceOutOfBounds, + // "SA4007": c.CheckPredeterminedBooleanExprs, +} + +func (c *Checker) findDeprecated(prog *lint.Program) { + var docs []*ast.CommentGroup + var names []*ast.Ident - f := prog.File(obj) - if f == nil { - return + doDocs := func(pkg *packages.Package, names []*ast.Ident, docs []*ast.CommentGroup) { + var alt string + for _, doc := range docs { + if doc == nil { + continue + } + parts := strings.Split(doc.Text(), "\n\n") + last := parts[len(parts)-1] + if !strings.HasPrefix(last, "Deprecated: ") { + continue + } + alt = last[len("Deprecated: "):] + alt = strings.Replace(alt, "\n", " ", -1) + break + } + if alt == "" { + return + } + + for _, name := range names { + obj := pkg.TypesInfo.ObjectOf(name) + c.deprecatedObjs[obj] = alt + } } - msg := c.deprecationMessage(f, prog.Prog.Fset, obj) - if msg != "" { - m[obj] = msg + + for _, pkg := range prog.AllPackages { + for _, f := range pkg.Syntax { + fn := func(node ast.Node) bool { + if node == nil { + return true + } + var ret bool + switch node := node.(type) { + case *ast.GenDecl: + switch node.Tok { + case token.TYPE, token.CONST, token.VAR: + docs = append(docs, node.Doc) + return true + default: + return false + } + case *ast.FuncDecl: + docs = append(docs, node.Doc) + names = []*ast.Ident{node.Name} + ret = false + case *ast.TypeSpec: + docs = append(docs, node.Doc) + names = []*ast.Ident{node.Name} + ret = true + case *ast.ValueSpec: + docs = append(docs, node.Doc) + names = node.Names + ret = false + case *ast.File: + return true + case *ast.StructType: + for _, field := range node.Fields.List { + doDocs(pkg, field.Names, []*ast.CommentGroup{field.Doc}) + } + return false + case *ast.InterfaceType: + for _, field := range node.Methods.List { + doDocs(pkg, field.Names, []*ast.CommentGroup{field.Doc}) + } + return false + default: + return false + } + if len(names) == 0 || len(docs) == 0 { + return ret + } + doDocs(pkg, names, docs) + + docs = docs[:0] + names = nil + return ret + } + ast.Inspect(f, fn) + } } } func (c *Checker) Init(prog *lint.Program) { wg := &sync.WaitGroup{} - wg.Add(3) + wg.Add(2) go func() { c.funcDescs = functions.NewDescriptions(prog.SSA) for _, fn := range prog.AllFunctions { @@ -334,97 +399,15 @@ func (c *Checker) Init(prog *lint.Program) { wg.Done() }() - go func() { - c.nodeFns = lint.NodeFns(prog.Packages) - wg.Done() - }() - go func() { c.deprecatedObjs = map[types.Object]string{} - for _, ssapkg := range prog.SSA.AllPackages() { - ssapkg := ssapkg - for _, member := range ssapkg.Members { - obj := member.Object() - if obj == nil { - continue - } - c.deprecateObject(c.deprecatedObjs, prog, obj) - if typ, ok := obj.Type().(*types.Named); ok { - for i := 0; i < typ.NumMethods(); i++ { - meth := typ.Method(i) - c.deprecateObject(c.deprecatedObjs, prog, meth) - } - - if iface, ok := typ.Underlying().(*types.Interface); ok { - for i := 0; i < iface.NumExplicitMethods(); i++ { - meth := iface.ExplicitMethod(i) - c.deprecateObject(c.deprecatedObjs, prog, meth) - } - } - } - if typ, ok := obj.Type().Underlying().(*types.Struct); ok { - n := typ.NumFields() - for i := 0; i < n; i++ { - // FIXME(dh): This code will not find deprecated - // fields in anonymous structs. - field := typ.Field(i) - c.deprecateObject(c.deprecatedObjs, prog, field) - } - } - } - } + c.findDeprecated(prog) wg.Done() }() wg.Wait() } -func (c *Checker) deprecationMessage(file *ast.File, fset *token.FileSet, obj types.Object) (message string) { - pos := obj.Pos() - path, _ := astutil.PathEnclosingInterval(file, pos, pos) - if len(path) <= 2 { - return "" - } - var docs []*ast.CommentGroup - switch n := path[1].(type) { - case *ast.FuncDecl: - docs = append(docs, n.Doc) - case *ast.Field: - docs = append(docs, n.Doc) - case *ast.ValueSpec: - docs = append(docs, n.Doc) - if len(path) >= 3 { - if n, ok := path[2].(*ast.GenDecl); ok { - docs = append(docs, n.Doc) - } - } - case *ast.TypeSpec: - docs = append(docs, n.Doc) - if len(path) >= 3 { - if n, ok := path[2].(*ast.GenDecl); ok { - docs = append(docs, n.Doc) - } - } - default: - return "" - } - - for _, doc := range docs { - if doc == nil { - continue - } - parts := strings.Split(doc.Text(), "\n\n") - last := parts[len(parts)-1] - if !strings.HasPrefix(last, "Deprecated: ") { - continue - } - alt := last[len("Deprecated: "):] - alt = strings.Replace(alt, "\n", " ", -1) - return alt - } - return "" -} - func (c *Checker) isInLoop(b *ssa.BasicBlock) bool { sets := c.funcDescs.Get(b.Parent()).Loops for _, set := range sets { @@ -469,7 +452,7 @@ func applyStdlibKnowledge(fn *ssa.Function) { if !ok { continue } - if !lint.IsCallTo(call.Common(), "time.Tick") { + if !IsCallTo(call.Common(), "time.Tick") { continue } ex, ok := (*instrs[i+1]).(*ssa.Extract) @@ -491,7 +474,8 @@ func applyStdlibKnowledge(fn *ssa.Function) { } func hasType(j *lint.Job, expr ast.Expr, name string) bool { - return types.TypeString(j.Program.Info.TypeOf(expr), nil) == name + T := TypeOf(j, expr) + return IsType(T, name) } func (c *Checker) CheckUntrappableSignal(j *lint.Job) { @@ -500,7 +484,7 @@ func (c *Checker) CheckUntrappableSignal(j *lint.Job) { if !ok { return true } - if !j.IsCallToAnyAST(call, + if !IsCallToAnyAST(j, call, "os/signal.Ignore", "os/signal.Notify", "os/signal.Reset") { return true } @@ -510,10 +494,10 @@ func (c *Checker) CheckUntrappableSignal(j *lint.Job) { } if isName(j, arg, "os.Kill") || isName(j, arg, "syscall.SIGKILL") { - j.Errorf(arg, "%s cannot be trapped (did you mean syscall.SIGTERM?)", j.Render(arg)) + j.Errorf(arg, "%s cannot be trapped (did you mean syscall.SIGTERM?)", Render(j, arg)) } if isName(j, arg, "syscall.SIGSTOP") { - j.Errorf(arg, "%s signal cannot be trapped", j.Render(arg)) + j.Errorf(arg, "%s signal cannot be trapped", Render(j, arg)) } } return true @@ -530,23 +514,23 @@ func (c *Checker) CheckTemplate(j *lint.Job) { return true } var kind string - if j.IsCallToAST(call, "(*text/template.Template).Parse") { + if IsCallToAST(j, call, "(*text/template.Template).Parse") { kind = "text" - } else if j.IsCallToAST(call, "(*html/template.Template).Parse") { + } else if IsCallToAST(j, call, "(*html/template.Template).Parse") { kind = "html" } else { return true } sel := call.Fun.(*ast.SelectorExpr) - if !j.IsCallToAST(sel.X, "text/template.New") && - !j.IsCallToAST(sel.X, "html/template.New") { + if !IsCallToAST(j, sel.X, "text/template.New") && + !IsCallToAST(j, sel.X, "html/template.New") { // TODO(dh): this is a cheap workaround for templates with // different delims. A better solution with less false // negatives would use data flow analysis to see where the // template comes from and where it has been return true } - s, ok := j.ExprToString(call.Args[0]) + s, ok := ExprToString(j, call.Args[Arg("(*text/template.Template).Parse.text")]) if !ok { return true } @@ -560,7 +544,7 @@ func (c *Checker) CheckTemplate(j *lint.Job) { if err != nil { // TODO(dominikh): whitelist other parse errors, if any if strings.Contains(err.Error(), "unexpected") { - j.Errorf(call.Args[0], "%s", err) + j.Errorf(call.Args[Arg("(*text/template.Template).Parse.text")], "%s", err) } } return true @@ -576,10 +560,10 @@ func (c *Checker) CheckTimeSleepConstant(j *lint.Job) { if !ok { return true } - if !j.IsCallToAST(call, "time.Sleep") { + if !IsCallToAST(j, call, "time.Sleep") { return true } - lit, ok := call.Args[0].(*ast.BasicLit) + lit, ok := call.Args[Arg("time.Sleep.d")].(*ast.BasicLit) if !ok { return true } @@ -597,7 +581,8 @@ func (c *Checker) CheckTimeSleepConstant(j *lint.Job) { if n != 1 { recommendation = fmt.Sprintf("time.Sleep(%d * time.Nanosecond)", n) } - j.Errorf(call.Args[0], "sleeping for %d nanoseconds is probably a bug. Be explicit if it isn't: %s", n, recommendation) + j.Errorf(call.Args[Arg("time.Sleep.d")], + "sleeping for %d nanoseconds is probably a bug. Be explicit if it isn't: %s", n, recommendation) return true } for _, f := range j.Program.Files { @@ -630,13 +615,13 @@ func (c *Checker) CheckWaitgroupAdd(j *lint.Job) { if !ok { return true } - fn, ok := j.Program.Info.ObjectOf(sel.Sel).(*types.Func) + fn, ok := ObjectOf(j, sel.Sel).(*types.Func) if !ok { return true } if fn.FullName() == "(*sync.WaitGroup).Add" { j.Errorf(sel, "should call %s before starting the goroutine to avoid a race", - j.Render(stmt)) + Render(j, stmt)) } return true } @@ -669,14 +654,21 @@ func (c *Checker) CheckInfiniteEmptyLoop(j *lint.Job) { // is dynamic and the loop might terminate. Similarly for // channel receives. - if loop.Cond != nil && hasSideEffects(loop.Cond) { - return true - } - - j.Errorf(loop, "this loop will spin, using 100%% CPU") if loop.Cond != nil { + if hasSideEffects(loop.Cond) { + return true + } + if ident, ok := loop.Cond.(*ast.Ident); ok { + if k, ok := ObjectOf(j, ident).(*types.Const); ok { + if !constant.BoolVal(k.Val()) { + // don't flag `for false {}` loops. They're a debug aid. + return true + } + } + } j.Errorf(loop, "loop condition never changes or has a race condition") } + j.Errorf(loop, "this loop will spin, using 100%% CPU") return true } @@ -733,7 +725,7 @@ func (c *Checker) CheckDubiousDeferInChannelRangeLoop(j *lint.Job) { if !ok { return true } - typ := j.Program.Info.TypeOf(loop.X) + typ := TypeOf(j, loop.X) _, ok = typ.Underlying().(*types.Chan) if !ok { return true @@ -762,7 +754,7 @@ func (c *Checker) CheckTestMainExit(j *lint.Job) { return true } - arg := j.Program.Info.ObjectOf(node.(*ast.FuncDecl).Type.Params.List[0].Names[0]) + arg := ObjectOf(j, node.(*ast.FuncDecl).Type.Params.List[0].Names[0]) callsRun := false fn2 := func(node ast.Node) bool { call, ok := node.(*ast.CallExpr) @@ -777,7 +769,7 @@ func (c *Checker) CheckTestMainExit(j *lint.Job) { if !ok { return true } - if arg != j.Program.Info.ObjectOf(ident) { + if arg != ObjectOf(j, ident) { return true } if sel.Sel.Name == "Run" { @@ -790,7 +782,7 @@ func (c *Checker) CheckTestMainExit(j *lint.Job) { callsExit := false fn3 := func(node ast.Node) bool { - if j.IsCallToAST(node, "os.Exit") { + if IsCallToAST(j, node, "os.Exit") { callsExit = true return false } @@ -822,8 +814,7 @@ func isTestMain(j *lint.Job, node ast.Node) bool { if len(arg.Names) != 1 { return false } - typ := j.Program.Info.TypeOf(arg.Type) - return typ != nil && typ.String() == "*testing.M" + return IsOfType(j, arg.Type, "*testing.M") } func (c *Checker) CheckExec(j *lint.Job) { @@ -832,17 +823,18 @@ func (c *Checker) CheckExec(j *lint.Job) { if !ok { return true } - if !j.IsCallToAST(call, "os/exec.Command") { + if !IsCallToAST(j, call, "os/exec.Command") { return true } - val, ok := j.ExprToString(call.Args[0]) + val, ok := ExprToString(j, call.Args[Arg("os/exec.Command.name")]) if !ok { return true } if !strings.Contains(val, " ") || strings.Contains(val, `\`) || strings.Contains(val, "/") { return true } - j.Errorf(call.Args[0], "first argument to exec.Command looks like a shell command, but a program name or path are expected") + j.Errorf(call.Args[Arg("os/exec.Command.name")], + "first argument to exec.Command looks like a shell command, but a program name or path are expected") return true } for _, f := range j.Program.Files { @@ -880,7 +872,7 @@ func (c *Checker) CheckLhsRhsIdentical(j *lint.Job) { } switch op.Op { case token.EQL, token.NEQ: - if basic, ok := j.Program.Info.TypeOf(op.X).(*types.Basic); ok { + if basic, ok := TypeOf(j, op.X).Underlying().(*types.Basic); ok { if kind := basic.Kind(); kind == types.Float32 || kind == types.Float64 { // f == f and f != f might be used to check for NaN return true @@ -894,7 +886,7 @@ func (c *Checker) CheckLhsRhsIdentical(j *lint.Job) { return true } - if j.Render(op.X) != j.Render(op.Y) { + if Render(j, op.X) != Render(j, op.Y) { return true } j.Errorf(op, "identical expressions on the left and right side of the '%s' operator", op.Op) @@ -972,18 +964,24 @@ func (c *Checker) CheckUnsafePrintf(j *lint.Job) { if !ok { return true } - if !j.IsCallToAnyAST(call, "fmt.Printf", "fmt.Sprintf", "log.Printf") { + var arg int + if IsCallToAnyAST(j, call, "fmt.Printf", "fmt.Sprintf", "log.Printf") { + arg = Arg("fmt.Printf.format") + } else if IsCallToAnyAST(j, call, "fmt.Fprintf") { + arg = Arg("fmt.Fprintf.format") + } else { return true } - if len(call.Args) != 1 { + if len(call.Args) != arg+1 { return true } - switch call.Args[0].(type) { + switch call.Args[arg].(type) { case *ast.CallExpr, *ast.Ident: default: return true } - j.Errorf(call.Args[0], "printf-style function with dynamic first argument and no further arguments should use print-style function instead") + j.Errorf(call.Args[arg], + "printf-style function with dynamic format string and no further arguments should use print-style function instead") return true } for _, f := range j.Program.Files { @@ -1021,7 +1019,7 @@ func (c *Checker) CheckEarlyDefer(j *lint.Job) { if !ok { continue } - sig, ok := j.Program.Info.TypeOf(call.Fun).(*types.Signature) + sig, ok := TypeOf(j, call.Fun).(*types.Signature) if !ok { continue } @@ -1056,7 +1054,7 @@ func (c *Checker) CheckEarlyDefer(j *lint.Job) { if sel.Sel.Name != "Close" { continue } - j.Errorf(def, "should check returned error before deferring %s", j.Render(def.Call)) + j.Errorf(def, "should check returned error before deferring %s", Render(j, def.Call)) } return true } @@ -1100,7 +1098,7 @@ func (c *Checker) CheckEmptyCriticalSection(j *lint.Job) { return nil, "", false } - fn, ok := j.Program.Info.ObjectOf(sel.Sel).(*types.Func) + fn, ok := ObjectOf(j, sel.Sel).(*types.Func) if !ok { return nil, "", false } @@ -1124,7 +1122,7 @@ func (c *Checker) CheckEmptyCriticalSection(j *lint.Job) { sel1, method1, ok1 := mutexParams(block.List[i]) sel2, method2, ok2 := mutexParams(block.List[i+1]) - if !ok1 || !ok2 || j.Render(sel1) != j.Render(sel2) { + if !ok1 || !ok2 || Render(j, sel1) != Render(j, sel2) { continue } if (method1 == "Lock" && method2 == "Unlock") || @@ -1221,7 +1219,7 @@ func (c *Checker) CheckCanonicalHeaderKey(j *lint.Job) { if !hasType(j, op.X, "net/http.Header") { return true } - s, ok := j.ExprToString(op.Index) + s, ok := ExprToString(j, op.Index) if !ok { return true } @@ -1255,7 +1253,7 @@ func (c *Checker) CheckBenchmarkN(j *lint.Job) { if !hasType(j, sel.X, "*testing.B") { return true } - j.Errorf(assign, "should not assign to %s", j.Render(sel)) + j.Errorf(assign, "should not assign to %s", Render(j, sel)) return true } for _, f := range j.Program.Files { @@ -1264,20 +1262,15 @@ func (c *Checker) CheckBenchmarkN(j *lint.Job) { } func (c *Checker) CheckUnreadVariableValues(j *lint.Job) { - fn := func(node ast.Node) bool { - switch node.(type) { - case *ast.FuncDecl, *ast.FuncLit: - default: - return true - } - - ssafn := c.nodeFns[node] - if ssafn == nil { - return true + for _, ssafn := range j.Program.InitialFunctions { + if IsExample(ssafn) { + continue } - if lint.IsExample(ssafn) { - return true + node := ssafn.Syntax() + if node == nil { + continue } + ast.Inspect(node, func(node ast.Node) bool { assign, ok := node.(*ast.AssignStmt) if !ok { @@ -1304,7 +1297,7 @@ func (c *Checker) CheckUnreadVariableValues(j *lint.Job) { if exrefs == nil { continue } - if len(lint.FilterDebug(*exrefs)) == 0 { + if len(FilterDebug(*exrefs)) == 0 { lhs := assign.Lhs[ex.Index] if ident, ok := lhs.(*ast.Ident); !ok || ok && ident.Name == "_" { continue @@ -1329,16 +1322,12 @@ func (c *Checker) CheckUnreadVariableValues(j *lint.Job) { // TODO investigate why refs can be nil return true } - if len(lint.FilterDebug(*refs)) == 0 { + if len(FilterDebug(*refs)) == 0 { j.Errorf(lhs, "this value of %s is never used", lhs) } } return true }) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } } @@ -1407,30 +1396,87 @@ func (c *Checker) CheckNilMaps(j *lint.Job) { } } -func (c *Checker) CheckUnsignedComparison(j *lint.Job) { +func (c *Checker) CheckExtremeComparison(j *lint.Job) { + isobj := func(expr ast.Expr, name string) bool { + sel, ok := expr.(*ast.SelectorExpr) + if !ok { + return false + } + return IsObject(ObjectOf(j, sel.Sel), name) + } + fn := func(node ast.Node) bool { expr, ok := node.(*ast.BinaryExpr) if !ok { return true } - tx := j.Program.Info.TypeOf(expr.X) + tx := TypeOf(j, expr.X) basic, ok := tx.Underlying().(*types.Basic) if !ok { return true } - if (basic.Info() & types.IsUnsigned) == 0 { - return true - } - lit, ok := expr.Y.(*ast.BasicLit) - if !ok || lit.Value != "0" { - return true - } - switch expr.Op { - case token.GEQ: - j.Errorf(expr, "unsigned values are always >= 0") - case token.LSS: - j.Errorf(expr, "unsigned values are never < 0") + + var max string + var min string + + switch basic.Kind() { + case types.Uint8: + max = "math.MaxUint8" + case types.Uint16: + max = "math.MaxUint16" + case types.Uint32: + max = "math.MaxUint32" + case types.Uint64: + max = "math.MaxUint64" + case types.Uint: + max = "math.MaxUint64" + + case types.Int8: + min = "math.MinInt8" + max = "math.MaxInt8" + case types.Int16: + min = "math.MinInt16" + max = "math.MaxInt16" + case types.Int32: + min = "math.MinInt32" + max = "math.MaxInt32" + case types.Int64: + min = "math.MinInt64" + max = "math.MaxInt64" + case types.Int: + min = "math.MinInt64" + max = "math.MaxInt64" + } + + if (expr.Op == token.GTR || expr.Op == token.GEQ) && isobj(expr.Y, max) || + (expr.Op == token.LSS || expr.Op == token.LEQ) && isobj(expr.X, max) { + j.Errorf(expr, "no value of type %s is greater than %s", basic, max) + } + if expr.Op == token.LEQ && isobj(expr.Y, max) || + expr.Op == token.GEQ && isobj(expr.X, max) { + j.Errorf(expr, "every value of type %s is <= %s", basic, max) + } + + if (basic.Info() & types.IsUnsigned) != 0 { + if (expr.Op == token.LSS || expr.Op == token.LEQ) && IsIntLiteral(expr.Y, "0") || + (expr.Op == token.GTR || expr.Op == token.GEQ) && IsIntLiteral(expr.X, "0") { + j.Errorf(expr, "no value of type %s is less than 0", basic) + } + if expr.Op == token.GEQ && IsIntLiteral(expr.Y, "0") || + expr.Op == token.LEQ && IsIntLiteral(expr.X, "0") { + j.Errorf(expr, "every value of type %s is >= 0", basic) + } + } else { + if (expr.Op == token.LSS || expr.Op == token.LEQ) && isobj(expr.Y, min) || + (expr.Op == token.GTR || expr.Op == token.GEQ) && isobj(expr.X, min) { + j.Errorf(expr, "no value of type %s is less than %s", basic, min) + } + if expr.Op == token.GEQ && isobj(expr.Y, min) || + expr.Op == token.LEQ && isobj(expr.X, min) { + j.Errorf(expr, "every value of type %s is >= %s", basic, min) + } } + return true } for _, f := range j.Program.Files { @@ -1480,144 +1526,136 @@ func consts(val ssa.Value, out []*ssa.Const, visitedPhis map[string]bool) ([]*ss } func (c *Checker) CheckLoopCondition(j *lint.Job) { - fn := func(node ast.Node) bool { - loop, ok := node.(*ast.ForStmt) - if !ok { - return true - } - if loop.Init == nil || loop.Cond == nil || loop.Post == nil { - return true - } - init, ok := loop.Init.(*ast.AssignStmt) - if !ok || len(init.Lhs) != 1 || len(init.Rhs) != 1 { - return true - } - cond, ok := loop.Cond.(*ast.BinaryExpr) - if !ok { - return true - } - x, ok := cond.X.(*ast.Ident) - if !ok { - return true - } - lhs, ok := init.Lhs[0].(*ast.Ident) - if !ok { - return true - } - if x.Obj != lhs.Obj { - return true - } - if _, ok := loop.Post.(*ast.IncDecStmt); !ok { - return true - } - - ssafn := c.nodeFns[cond] - if ssafn == nil { - return true - } - v, isAddr := ssafn.ValueForExpr(cond.X) - if v == nil || isAddr { - return true - } - switch v := v.(type) { - case *ssa.Phi: - ops := v.Operands(nil) - if len(ops) != 2 { + for _, ssafn := range j.Program.InitialFunctions { + fn := func(node ast.Node) bool { + loop, ok := node.(*ast.ForStmt) + if !ok { + return true + } + if loop.Init == nil || loop.Cond == nil || loop.Post == nil { + return true + } + init, ok := loop.Init.(*ast.AssignStmt) + if !ok || len(init.Lhs) != 1 || len(init.Rhs) != 1 { + return true + } + cond, ok := loop.Cond.(*ast.BinaryExpr) + if !ok { return true } - _, ok := (*ops[0]).(*ssa.Const) + x, ok := cond.X.(*ast.Ident) if !ok { return true } - sigma, ok := (*ops[1]).(*ssa.Sigma) + lhs, ok := init.Lhs[0].(*ast.Ident) if !ok { return true } - if sigma.X != v { + if x.Obj != lhs.Obj { + return true + } + if _, ok := loop.Post.(*ast.IncDecStmt); !ok { + return true + } + + v, isAddr := ssafn.ValueForExpr(cond.X) + if v == nil || isAddr { + return true + } + switch v := v.(type) { + case *ssa.Phi: + ops := v.Operands(nil) + if len(ops) != 2 { + return true + } + _, ok := (*ops[0]).(*ssa.Const) + if !ok { + return true + } + sigma, ok := (*ops[1]).(*ssa.Sigma) + if !ok { + return true + } + if sigma.X != v { + return true + } + case *ssa.UnOp: return true } - case *ssa.UnOp: + j.Errorf(cond, "variable in loop condition never changes") + return true } - j.Errorf(cond, "variable in loop condition never changes") - - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) + Inspect(ssafn.Syntax(), fn) } } func (c *Checker) CheckArgOverwritten(j *lint.Job) { - fn := func(node ast.Node) bool { - var typ *ast.FuncType - var body *ast.BlockStmt - switch fn := node.(type) { - case *ast.FuncDecl: - typ = fn.Type - body = fn.Body - case *ast.FuncLit: - typ = fn.Type - body = fn.Body - } - if body == nil { - return true - } - ssafn := c.nodeFns[node] - if ssafn == nil { - return true - } - if len(typ.Params.List) == 0 { - return true - } - for _, field := range typ.Params.List { - for _, arg := range field.Names { - obj := j.Program.Info.ObjectOf(arg) - var ssaobj *ssa.Parameter - for _, param := range ssafn.Params { - if param.Object() == obj { - ssaobj = param - break + for _, ssafn := range j.Program.InitialFunctions { + fn := func(node ast.Node) bool { + var typ *ast.FuncType + var body *ast.BlockStmt + switch fn := node.(type) { + case *ast.FuncDecl: + typ = fn.Type + body = fn.Body + case *ast.FuncLit: + typ = fn.Type + body = fn.Body + } + if body == nil { + return true + } + if len(typ.Params.List) == 0 { + return true + } + for _, field := range typ.Params.List { + for _, arg := range field.Names { + obj := ObjectOf(j, arg) + var ssaobj *ssa.Parameter + for _, param := range ssafn.Params { + if param.Object() == obj { + ssaobj = param + break + } } - } - if ssaobj == nil { - continue - } - refs := ssaobj.Referrers() - if refs == nil { - continue - } - if len(lint.FilterDebug(*refs)) != 0 { - continue - } - - assigned := false - ast.Inspect(body, func(node ast.Node) bool { - assign, ok := node.(*ast.AssignStmt) - if !ok { - return true + if ssaobj == nil { + continue + } + refs := ssaobj.Referrers() + if refs == nil { + continue } - for _, lhs := range assign.Lhs { - ident, ok := lhs.(*ast.Ident) + if len(FilterDebug(*refs)) != 0 { + continue + } + + assigned := false + ast.Inspect(body, func(node ast.Node) bool { + assign, ok := node.(*ast.AssignStmt) if !ok { - continue + return true } - if j.Program.Info.ObjectOf(ident) == obj { - assigned = true - return false + for _, lhs := range assign.Lhs { + ident, ok := lhs.(*ast.Ident) + if !ok { + continue + } + if ObjectOf(j, ident) == obj { + assigned = true + return false + } } + return true + }) + if assigned { + j.Errorf(arg, "argument %s is overwritten before first use", arg) } - return true - }) - if assigned { - j.Errorf(arg, "argument %s is overwritten before first use", arg) } } + return true } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) + Inspect(ssafn.Syntax(), fn) } } @@ -1661,7 +1699,7 @@ func (c *Checker) CheckIneffectiveLoop(j *lint.Job) { body = node.Body loop = node case *ast.RangeStmt: - typ := j.Program.Info.TypeOf(node.X) + typ := TypeOf(j, node.X) if _, ok := typ.Underlying().(*types.Map); ok { // looping once over a map is a valid pattern for // getting an arbitrary element. @@ -1741,17 +1779,17 @@ func (c *Checker) CheckNilContext(j *lint.Job) { if len(call.Args) == 0 { return true } - if typ, ok := j.Program.Info.TypeOf(call.Args[0]).(*types.Basic); !ok || typ.Kind() != types.UntypedNil { + if typ, ok := TypeOf(j, call.Args[0]).(*types.Basic); !ok || typ.Kind() != types.UntypedNil { return true } - sig, ok := j.Program.Info.TypeOf(call.Fun).(*types.Signature) + sig, ok := TypeOf(j, call.Fun).(*types.Signature) if !ok { return true } if sig.Params().Len() == 0 { return true } - if types.TypeString(sig.Params().At(0).Type(), nil) != "context.Context" { + if !IsType(sig.Params().At(0).Type(), "context.Context") { return true } j.Errorf(call.Args[0], @@ -1779,7 +1817,7 @@ func (c *Checker) CheckSeeker(j *lint.Job) { if len(call.Args) != 2 { return true } - arg0, ok := call.Args[0].(*ast.SelectorExpr) + arg0, ok := call.Args[Arg("(io.Seeker).Seek.offset")].(*ast.SelectorExpr) if !ok { return true } @@ -1906,7 +1944,7 @@ func (c *Checker) CheckConcurrentTesting(j *lint.Job) { if recv == nil { continue } - if types.TypeString(recv.Type(), nil) != "*testing.common" { + if !IsType(recv.Type(), "*testing.common") { continue } fn, ok := call.Call.StaticCallee().Object().(*types.Func) @@ -1934,7 +1972,7 @@ func (c *Checker) CheckCyclicFinalizer(j *lint.Job) { if edge.Callee.Func.RelString(nil) != "runtime.SetFinalizer" { continue } - arg0 := edge.Site.Common().Args[0] + arg0 := edge.Site.Common().Args[Arg("runtime.SetFinalizer.obj")] if iface, ok := arg0.(*ssa.MakeInterface); ok { arg0 = iface.X } @@ -1946,7 +1984,7 @@ func (c *Checker) CheckCyclicFinalizer(j *lint.Job) { if !ok { continue } - arg1 := edge.Site.Common().Args[1] + arg1 := edge.Site.Common().Args[Arg("runtime.SetFinalizer.finalizer")] if iface, ok := arg1.(*ssa.MakeInterface); ok { arg1 = iface.X } @@ -1991,7 +2029,7 @@ func (c *Checker) CheckSliceOutOfBounds(j *lint.Job) { func (c *Checker) CheckDeferLock(j *lint.Job) { for _, ssafn := range j.Program.InitialFunctions { for _, block := range ssafn.Blocks { - instrs := lint.FilterDebug(block.Instrs) + instrs := FilterDebug(block.Instrs) if len(instrs) < 2 { continue } @@ -2000,14 +2038,14 @@ func (c *Checker) CheckDeferLock(j *lint.Job) { if !ok { continue } - if !lint.IsCallTo(call.Common(), "(*sync.Mutex).Lock") && !lint.IsCallTo(call.Common(), "(*sync.RWMutex).RLock") { + if !IsCallTo(call.Common(), "(*sync.Mutex).Lock") && !IsCallTo(call.Common(), "(*sync.RWMutex).RLock") { continue } nins, ok := instrs[i+1].(*ssa.Defer) if !ok { continue } - if !lint.IsCallTo(&nins.Call, "(*sync.Mutex).Lock") && !lint.IsCallTo(&nins.Call, "(*sync.RWMutex).RLock") { + if !IsCallTo(&nins.Call, "(*sync.Mutex).Lock") && !IsCallTo(&nins.Call, "(*sync.RWMutex).RLock") { continue } if call.Common().Args[0] != nins.Call.Args[0] { @@ -2033,7 +2071,7 @@ func (c *Checker) CheckNaNComparison(j *lint.Job) { if !ok { return false } - return lint.IsCallTo(call.Common(), "math.NaN") + return IsCallTo(call.Common(), "math.NaN") } for _, ssafn := range j.Program.InitialFunctions { for _, block := range ssafn.Blocks { @@ -2091,8 +2129,7 @@ func objectName(obj types.Object) string { } var name string if obj.Pkg() != nil && obj.Pkg().Scope().Lookup(obj.Name()) == obj { - var s string - s = obj.Pkg().Path() + s := obj.Pkg().Path() if s != "" { name += s + "." } @@ -2105,22 +2142,22 @@ func isName(j *lint.Job, expr ast.Expr, name string) bool { var obj types.Object switch expr := expr.(type) { case *ast.Ident: - obj = j.Program.Info.ObjectOf(expr) + obj = ObjectOf(j, expr) case *ast.SelectorExpr: - obj = j.Program.Info.ObjectOf(expr.Sel) + obj = ObjectOf(j, expr.Sel) } return objectName(obj) == name } func (c *Checker) CheckLeakyTimeTick(j *lint.Job) { for _, ssafn := range j.Program.InitialFunctions { - if j.IsInMain(ssafn) || j.IsInTest(ssafn) { + if IsInMain(j, ssafn) || IsInTest(j, ssafn) { continue } for _, block := range ssafn.Blocks { for _, ins := range block.Instrs { call, ok := ins.(*ssa.Call) - if !ok || !lint.IsCallTo(call.Common(), "time.Tick") { + if !ok || !IsCallTo(call.Common(), "time.Tick") { continue } if c.funcDescs.Get(call.Parent()).Infinite { @@ -2205,7 +2242,7 @@ func (c *Checker) CheckRepeatedIfElse(j *lint.Job) { } counts := map[string]int{} for _, cond := range conds { - s := j.Render(cond) + s := Render(j, cond) counts[s]++ if counts[s] == 2 { j.Errorf(cond, "this condition occurs multiple times in this if/else if chain") @@ -2241,7 +2278,7 @@ func (c *Checker) CheckSillyBitwiseOps(j *lint.Job) { if len(path) == 0 { continue } - if node, ok := path[0].(*ast.BinaryExpr); !ok || !lint.IsZero(node.Y) { + if node, ok := path[0].(*ast.BinaryExpr); !ok || !IsZero(node.Y) { continue } @@ -2262,7 +2299,7 @@ func (c *Checker) CheckNonOctalFileMode(j *lint.Job) { if !ok { return true } - sig, ok := j.Program.Info.TypeOf(call.Fun).(*types.Signature) + sig, ok := TypeOf(j, call.Fun).(*types.Signature) if !ok { return true } @@ -2270,7 +2307,7 @@ func (c *Checker) CheckNonOctalFileMode(j *lint.Job) { var args []int for i := 0; i < n; i++ { typ := sig.Params().At(i).Type() - if types.TypeString(typ, nil) == "os.FileMode" { + if IsType(typ, "os.FileMode") { args = append(args, i) } } @@ -2302,11 +2339,11 @@ func (c *Checker) CheckNonOctalFileMode(j *lint.Job) { func (c *Checker) CheckPureFunctions(j *lint.Job) { fnLoop: for _, ssafn := range j.Program.InitialFunctions { - if j.IsInTest(ssafn) { + if IsInTest(j, ssafn) { params := ssafn.Signature.Params() for i := 0; i < params.Len(); i++ { param := params.At(i) - if types.TypeString(param.Type(), nil) == "*testing.B" { + if IsType(param.Type(), "*testing.B") { // Ignore discarded pure functions in code related // to benchmarks. Instead of matching BenchmarkFoo // functions, we match any function accepting a @@ -2326,7 +2363,7 @@ fnLoop: continue } refs := ins.Referrers() - if refs == nil || len(lint.FilterDebug(*refs)) > 0 { + if refs == nil || len(FilterDebug(*refs)) > 0 { continue } callee := ins.Common().StaticCallee() @@ -2343,7 +2380,7 @@ fnLoop: } func (c *Checker) isDeprecated(j *lint.Job, ident *ast.Ident) (bool, string) { - obj := j.Program.Info.ObjectOf(ident) + obj := ObjectOf(j, ident) if obj.Pkg() == nil { return false, "" } @@ -2351,29 +2388,34 @@ func (c *Checker) isDeprecated(j *lint.Job, ident *ast.Ident) (bool, string) { return alt != "", alt } -func (c *Checker) enclosingFunc(sel *ast.SelectorExpr) *ssa.Function { - fn := c.nodeFns[sel] - if fn == nil { - return nil - } - for fn.Parent() != nil { - fn = fn.Parent() - } - return fn -} - func (c *Checker) CheckDeprecated(j *lint.Job) { + // Selectors can appear outside of function literals, e.g. when + // declaring package level variables. + + var ssafn *ssa.Function + stack := 0 fn := func(node ast.Node) bool { + if node == nil { + stack-- + } else { + stack++ + } + if stack == 1 { + ssafn = nil + } + if fn, ok := node.(*ast.FuncDecl); ok { + ssafn = j.Program.SSA.FuncValue(ObjectOf(j, fn.Name).(*types.Func)) + } sel, ok := node.(*ast.SelectorExpr) if !ok { return true } - obj := j.Program.Info.ObjectOf(sel.Sel) + obj := ObjectOf(j, sel.Sel) if obj.Pkg() == nil { return true } - nodePkg := j.NodePackage(node).Pkg + nodePkg := j.NodePackage(node).Types if nodePkg == obj.Pkg() || obj.Pkg().Path()+"_test" == nodePkg.Path() { // Don't flag stuff in our own package return true @@ -2385,19 +2427,19 @@ func (c *Checker) CheckDeprecated(j *lint.Job) { // already in 1.0, and we're targeting 1.2, it still // makes sense to use the alternative from 1.0, to be // future-proof. - minVersion := deprecated.Stdlib[j.SelectorName(sel)].AlternativeAvailableSince - if !j.IsGoVersion(minVersion) { + minVersion := deprecated.Stdlib[SelectorName(j, sel)].AlternativeAvailableSince + if !IsGoVersion(j, minVersion) { return true } - if fn := c.enclosingFunc(sel); fn != nil { - if _, ok := c.deprecatedObjs[fn.Object()]; ok { + if ssafn != nil { + if _, ok := c.deprecatedObjs[ssafn.Object()]; ok { // functions that are deprecated may use deprecated // symbols return true } } - j.Errorf(sel, "%s is deprecated: %s", j.Render(sel), alt) + j.Errorf(sel, "%s is deprecated: %s", Render(j, sel), alt) return true } return true @@ -2470,17 +2512,6 @@ func (c *Checker) checkCalls(j *lint.Job, rules map[string]CallCheck) { } } -func unwrapFunction(val ssa.Value) *ssa.Function { - switch val := val.(type) { - case *ssa.Function: - return val - case *ssa.MakeClosure: - return val.Fn.(*ssa.Function) - default: - return nil - } -} - func shortCallName(call *ssa.CallCommon) string { if call.IsInvoke() { return "" @@ -2498,27 +2529,6 @@ func shortCallName(call *ssa.CallCommon) string { return "" } -func hasCallTo(block *ssa.BasicBlock, name string) bool { - for _, ins := range block.Instrs { - call, ok := ins.(*ssa.Call) - if !ok { - continue - } - if lint.IsCallTo(call.Common(), name) { - return true - } - } - return false -} - -// deref returns a pointer's element type; otherwise it returns typ. -func deref(typ types.Type) types.Type { - if p, ok := typ.Underlying().(*types.Pointer); ok { - return p.Elem() - } - return typ -} - func (c *Checker) CheckWriterBufferModified(j *lint.Job) { // TODO(dh): this might be a good candidate for taint analysis. // Taint the argument as MUST_NOT_MODIFY, then propagate that @@ -2539,7 +2549,7 @@ func (c *Checker) CheckWriterBufferModified(j *lint.Job) { if basic, ok := sig.Results().At(0).Type().(*types.Basic); !ok || basic.Kind() != types.Int { continue } - if named, ok := sig.Results().At(1).Type().(*types.Named); !ok || types.TypeString(named, nil) != "error" { + if named, ok := sig.Results().At(1).Type().(*types.Named); !ok || !IsType(named, "error") { continue } @@ -2556,7 +2566,7 @@ func (c *Checker) CheckWriterBufferModified(j *lint.Job) { } j.Errorf(ins, "io.Writer.Write must not modify the provided buffer, not even temporarily") case *ssa.Call: - if !lint.IsCallTo(ins.Common(), "append") { + if !IsCallTo(ins.Common(), "append") { continue } if ins.Common().Args[0] != ssafn.Params[1] { @@ -2582,30 +2592,35 @@ func loopedRegexp(name string) CallCheck { } func (c *Checker) CheckEmptyBranch(j *lint.Job) { - fn := func(node ast.Node) bool { - ifstmt, ok := node.(*ast.IfStmt) - if !ok { - return true + for _, ssafn := range j.Program.InitialFunctions { + if ssafn.Syntax() == nil { + continue } - ssafn := c.nodeFns[node] - if lint.IsExample(ssafn) { - return true + if IsGenerated(j.File(ssafn.Syntax())) { + continue + } + if IsExample(ssafn) { + continue } - if ifstmt.Else != nil { - b, ok := ifstmt.Else.(*ast.BlockStmt) - if !ok || len(b.List) != 0 { + fn := func(node ast.Node) bool { + ifstmt, ok := node.(*ast.IfStmt) + if !ok { return true } - j.Errorf(ifstmt.Else, "empty branch") - } - if len(ifstmt.Body.List) != 0 { + if ifstmt.Else != nil { + b, ok := ifstmt.Else.(*ast.BlockStmt) + if !ok || len(b.List) != 0 { + return true + } + j.Errorf(ifstmt.Else, "empty branch") + } + if len(ifstmt.Body.List) != 0 { + return true + } + j.Errorf(ifstmt, "empty branch") return true } - j.Errorf(ifstmt, "empty branch") - return true - } - for _, f := range c.filterGenerated(j.Program.Files) { - ast.Inspect(f, fn) + Inspect(ssafn.Syntax(), fn) } } @@ -2661,7 +2676,7 @@ func (c *Checker) CheckMapBytesKey(j *lint.Job) { } func (c *Checker) CheckRangeStringRunes(j *lint.Job) { - sharedcheck.CheckRangeStringRunes(c.nodeFns, j) + sharedcheck.CheckRangeStringRunes(j) } func (c *Checker) CheckSelfAssignment(j *lint.Job) { @@ -2674,15 +2689,15 @@ func (c *Checker) CheckSelfAssignment(j *lint.Job) { return true } for i, stmt := range assign.Lhs { - rlh := j.Render(stmt) - rrh := j.Render(assign.Rhs[i]) + rlh := Render(j, stmt) + rrh := Render(j, assign.Rhs[i]) if rlh == rrh { j.Errorf(assign, "self-assignment of %s to %s", rrh, rlh) } } return true } - for _, f := range c.filterGenerated(j.Program.Files) { + for _, f := range j.Program.Files { ast.Inspect(f, fn) } } @@ -2706,7 +2721,7 @@ func buildTagsIdentical(s1, s2 []string) bool { } func (c *Checker) CheckDuplicateBuildConstraints(job *lint.Job) { - for _, f := range c.filterGenerated(job.Program.Files) { + for _, f := range job.Program.Files { constraints := buildTags(f) for i, constraint1 := range constraints { for j, constraint2 := range constraints { @@ -2723,6 +2738,39 @@ func (c *Checker) CheckDuplicateBuildConstraints(job *lint.Job) { } } +func (c *Checker) CheckSillyRegexp(j *lint.Job) { + // We could use the rule checking engine for this, but the + // arguments aren't really invalid. + for _, fn := range j.Program.InitialFunctions { + for _, b := range fn.Blocks { + for _, ins := range b.Instrs { + call, ok := ins.(*ssa.Call) + if !ok { + continue + } + switch CallName(call.Common()) { + case "regexp.MustCompile", "regexp.Compile", "regexp.Match", "regexp.MatchReader", "regexp.MatchString": + default: + continue + } + c, ok := call.Common().Args[0].(*ssa.Const) + if !ok { + continue + } + s := constant.StringVal(c.Value) + re, err := syntax.Parse(s, 0) + if err != nil { + continue + } + if re.Op != syntax.OpLiteral && re.Op != syntax.OpEmptyMatch { + continue + } + j.Errorf(call, "regular expression does not contain any meta characters") + } + } + } +} + func (c *Checker) CheckMissingEnumTypesInDeclaration(j *lint.Job) { fn := func(node ast.Node) bool { decl, ok := node.(*ast.GenDecl) @@ -2730,48 +2778,230 @@ func (c *Checker) CheckMissingEnumTypesInDeclaration(j *lint.Job) { return true } if !decl.Lparen.IsValid() { - // not a parenthesised gendecl - // - // TODO(dh): do we need this check, considering we require - // decl.Specs to contain 2+ elements? return true } if decl.Tok != token.CONST { return true } - if len(decl.Specs) < 2 { + + groups := GroupSpecs(j, decl.Specs) + groupLoop: + for _, group := range groups { + if len(group) < 2 { + continue + } + if group[0].(*ast.ValueSpec).Type == nil { + // first constant doesn't have a type + continue groupLoop + } + for i, spec := range group { + spec := spec.(*ast.ValueSpec) + if len(spec.Names) != 1 || len(spec.Values) != 1 { + continue groupLoop + } + switch v := spec.Values[0].(type) { + case *ast.BasicLit: + case *ast.UnaryExpr: + if _, ok := v.X.(*ast.BasicLit); !ok { + continue groupLoop + } + default: + // if it's not a literal it might be typed, such as + // time.Microsecond = 1000 * Nanosecond + continue groupLoop + } + if i == 0 { + continue + } + if spec.Type != nil { + continue groupLoop + } + } + j.Errorf(group[0], "only the first constant in this group has an explicit type") + } + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func (c *Checker) CheckTimerResetReturnValue(j *lint.Job) { + for _, fn := range j.Program.InitialFunctions { + for _, block := range fn.Blocks { + for _, ins := range block.Instrs { + call, ok := ins.(*ssa.Call) + if !ok { + continue + } + if !IsCallTo(call.Common(), "(*time.Timer).Reset") { + continue + } + refs := call.Referrers() + if refs == nil { + continue + } + for _, ref := range FilterDebug(*refs) { + ifstmt, ok := ref.(*ssa.If) + if !ok { + continue + } + + found := false + for _, succ := range ifstmt.Block().Succs { + if len(succ.Preds) != 1 { + // Merge point, not a branch in the + // syntactical sense. + + // FIXME(dh): this is broken for if + // statements a la "if x || y" + continue + } + ssautil.Walk(succ, func(b *ssa.BasicBlock) bool { + if !succ.Dominates(b) { + // We've reached the end of the branch + return false + } + for _, ins := range b.Instrs { + // TODO(dh): we should check that + // we're receiving from the channel of + // a time.Timer to further reduce + // false positives. Not a key + // priority, considering the rarity of + // Reset and the tiny likeliness of a + // false positive + if ins, ok := ins.(*ssa.UnOp); ok && ins.Op == token.ARROW && IsType(ins.X.Type(), "<-chan time.Time") { + found = true + return false + } + } + return true + }) + } + + if found { + j.Errorf(call, "it is not possible to use Reset's return value correctly, as there is a race condition between draining the channel and the new timer expiring") + } + } + } + } + } +} + +func (c *Checker) CheckToLowerToUpperComparison(j *lint.Job) { + fn := func(node ast.Node) bool { + binExpr, ok := node.(*ast.BinaryExpr) + if !ok { return true } - if decl.Specs[0].(*ast.ValueSpec).Type == nil { - // first constant doesn't have a type + + var negative bool + switch binExpr.Op { + case token.EQL: + negative = false + case token.NEQ: + negative = true + default: return true } - for i, spec := range decl.Specs { - spec := spec.(*ast.ValueSpec) - if len(spec.Names) != 1 || len(spec.Values) != 1 { - return true - } - switch v := spec.Values[0].(type) { - case *ast.BasicLit: - case *ast.UnaryExpr: - if _, ok := v.X.(*ast.BasicLit); !ok { - return true + + const ( + lo = "strings.ToLower" + up = "strings.ToUpper" + ) + + var call string + if IsCallToAST(j, binExpr.X, lo) && IsCallToAST(j, binExpr.Y, lo) { + call = lo + } else if IsCallToAST(j, binExpr.X, up) && IsCallToAST(j, binExpr.Y, up) { + call = up + } else { + return true + } + + bang := "" + if negative { + bang = "!" + } + + j.Errorf(binExpr, "should use %sstrings.EqualFold(a, b) instead of %s(a) %s %s(b)", bang, call, binExpr.Op, call) + return true + } + + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func (c *Checker) CheckUnreachableTypeCases(j *lint.Job) { + // Check if T subsumes V in a type switch. T subsumes V if T is an interface and T's method set is a subset of V's method set. + subsumes := func(T, V types.Type) bool { + tIface, ok := T.Underlying().(*types.Interface) + if !ok { + return false + } + + return types.Implements(V, tIface) + } + + subsumesAny := func(Ts, Vs []types.Type) (types.Type, types.Type, bool) { + for _, T := range Ts { + for _, V := range Vs { + if subsumes(T, V) { + return T, V, true } - default: - // if it's not a literal it might be typed, such as - // time.Microsecond = 1000 * Nanosecond - return true } - if i == 0 { + } + + return nil, nil, false + } + + fn := func(node ast.Node) bool { + tsStmt, ok := node.(*ast.TypeSwitchStmt) + if !ok { + return true + } + + type ccAndTypes struct { + cc *ast.CaseClause + types []types.Type + } + + // All asserted types in the order of case clauses. + ccs := make([]ccAndTypes, 0, len(tsStmt.Body.List)) + for _, stmt := range tsStmt.Body.List { + cc, _ := stmt.(*ast.CaseClause) + + // Exclude the 'default' case. + if len(cc.List) == 0 { continue } - if spec.Type != nil { - return true + + Ts := make([]types.Type, len(cc.List)) + for i, expr := range cc.List { + Ts[i] = TypeOf(j, expr) + } + + ccs = append(ccs, ccAndTypes{cc: cc, types: Ts}) + } + + if len(ccs) <= 1 { + // Zero or one case clauses, nothing to check. + return true + } + + // Check if case clauses following cc have types that are subsumed by cc. + for i, cc := range ccs[:len(ccs)-1] { + for _, next := range ccs[i+1:] { + if T, V, yes := subsumesAny(cc.types, next.types); yes { + j.Errorf(next.cc, "unreachable case clause: %s will always match before %s", T.String(), V.String()) + } } } - j.Errorf(decl, "only the first constant has an explicit type") + return true } + for _, f := range j.Program.Files { ast.Inspect(f, fn) } diff --git a/vendor/github.com/golangci/go-tools/staticcheck/rules.go b/vendor/github.com/golangci/go-tools/staticcheck/rules.go index 0767304f13e2..1e6ef8ce3e8a 100644 --- a/vendor/github.com/golangci/go-tools/staticcheck/rules.go +++ b/vendor/github.com/golangci/go-tools/staticcheck/rules.go @@ -14,7 +14,8 @@ import ( "unicode/utf8" "github.com/golangci/go-tools/lint" - "github.com/golangci/tools/go/ssa" + . "github.com/golangci/go-tools/lint/lintdsl" + "github.com/golangci/go-tools/ssa" "github.com/golangci/go-tools/staticcheck/vrp" ) @@ -193,7 +194,7 @@ func validEncodingBinaryType(j *lint.Job, typ types.Type) bool { types.Float32, types.Float64, types.Complex64, types.Complex128, types.Invalid: return true case types.Bool: - return j.IsGoVersion(8) + return IsGoVersion(j, 8) } return false case *types.Struct: @@ -294,7 +295,7 @@ func ValidHostPort(v Value) bool { // ConvertedFrom reports whether value v was converted from type typ. func ConvertedFrom(v Value, typ string) bool { change, ok := v.Value.(*ssa.ChangeType) - return ok && types.TypeString(change.X.Type(), nil) == typ + return ok && IsType(change.X.Type(), typ) } func UniqueStringCutset(v Value) bool { diff --git a/vendor/github.com/golangci/go-tools/staticcheck/vrp/channel.go b/vendor/github.com/golangci/go-tools/staticcheck/vrp/channel.go index 7d9c9b19e7ae..8f8a8fdfdb3c 100644 --- a/vendor/github.com/golangci/go-tools/staticcheck/vrp/channel.go +++ b/vendor/github.com/golangci/go-tools/staticcheck/vrp/channel.go @@ -3,7 +3,7 @@ package vrp import ( "fmt" - "github.com/golangci/tools/go/ssa" + "github.com/golangci/go-tools/ssa" ) type ChannelInterval struct { @@ -54,10 +54,10 @@ func (c *MakeChannelConstraint) Operands() []ssa.Value { return []ssa.Valu func (c *ChannelChangeTypeConstraint) Operands() []ssa.Value { return []ssa.Value{c.X} } func (c *MakeChannelConstraint) String() string { - return fmt.Sprintf("%s = make(chan, %s)", c.Y().Name, c.Buffer.Name()) + return fmt.Sprintf("%s = make(chan, %s)", c.Y().Name(), c.Buffer.Name()) } func (c *ChannelChangeTypeConstraint) String() string { - return fmt.Sprintf("%s = changetype(%s)", c.Y().Name, c.X.Name()) + return fmt.Sprintf("%s = changetype(%s)", c.Y().Name(), c.X.Name()) } func (c *MakeChannelConstraint) Eval(g *Graph) Range { diff --git a/vendor/github.com/golangci/go-tools/staticcheck/vrp/int.go b/vendor/github.com/golangci/go-tools/staticcheck/vrp/int.go index bf1afaee382d..630965930e40 100644 --- a/vendor/github.com/golangci/go-tools/staticcheck/vrp/int.go +++ b/vendor/github.com/golangci/go-tools/staticcheck/vrp/int.go @@ -6,7 +6,7 @@ import ( "go/types" "math/big" - "github.com/golangci/tools/go/ssa" + "github.com/golangci/go-tools/ssa" ) type Zs []Z diff --git a/vendor/github.com/golangci/go-tools/staticcheck/vrp/slice.go b/vendor/github.com/golangci/go-tools/staticcheck/vrp/slice.go index fbd451a92cb2..75ab9465f31c 100644 --- a/vendor/github.com/golangci/go-tools/staticcheck/vrp/slice.go +++ b/vendor/github.com/golangci/go-tools/staticcheck/vrp/slice.go @@ -7,7 +7,7 @@ import ( "fmt" "go/types" - "github.com/golangci/tools/go/ssa" + "github.com/golangci/go-tools/ssa" ) type SliceInterval struct { diff --git a/vendor/github.com/golangci/go-tools/staticcheck/vrp/string.go b/vendor/github.com/golangci/go-tools/staticcheck/vrp/string.go index 0b1ac4fba436..42e556ba77af 100644 --- a/vendor/github.com/golangci/go-tools/staticcheck/vrp/string.go +++ b/vendor/github.com/golangci/go-tools/staticcheck/vrp/string.go @@ -5,7 +5,7 @@ import ( "go/token" "go/types" - "github.com/golangci/tools/go/ssa" + "github.com/golangci/go-tools/ssa" ) type StringInterval struct { diff --git a/vendor/github.com/golangci/go-tools/staticcheck/vrp/vrp.go b/vendor/github.com/golangci/go-tools/staticcheck/vrp/vrp.go index 021f24e87659..d05c43e44ea4 100644 --- a/vendor/github.com/golangci/go-tools/staticcheck/vrp/vrp.go +++ b/vendor/github.com/golangci/go-tools/staticcheck/vrp/vrp.go @@ -12,7 +12,7 @@ import ( "sort" "strings" - "github.com/golangci/tools/go/ssa" + "github.com/golangci/go-tools/ssa" ) type Future interface { diff --git a/vendor/github.com/golangci/go-tools/stylecheck/lint.go b/vendor/github.com/golangci/go-tools/stylecheck/lint.go new file mode 100644 index 000000000000..41fec19dd1a5 --- /dev/null +++ b/vendor/github.com/golangci/go-tools/stylecheck/lint.go @@ -0,0 +1,643 @@ +package stylecheck // import "github.com/golangci/go-tools/stylecheck" + +import ( + "fmt" + "go/ast" + "go/constant" + "go/token" + "go/types" + "strconv" + "strings" + "unicode" + "unicode/utf8" + + "github.com/golangci/go-tools/lint" + . "github.com/golangci/go-tools/lint/lintdsl" + "github.com/golangci/go-tools/ssa" + + "golang.org/x/tools/go/types/typeutil" +) + +type Checker struct { + CheckGenerated bool +} + +func NewChecker() *Checker { + return &Checker{} +} + +func (*Checker) Name() string { return "stylecheck" } +func (*Checker) Prefix() string { return "ST" } +func (c *Checker) Init(prog *lint.Program) {} + +func (c *Checker) Checks() []lint.Check { + return []lint.Check{ + {ID: "ST1000", FilterGenerated: false, Fn: c.CheckPackageComment}, + {ID: "ST1001", FilterGenerated: true, Fn: c.CheckDotImports}, + // {ID: "ST1002", FilterGenerated: true, Fn: c.CheckBlankImports}, + {ID: "ST1003", FilterGenerated: true, Fn: c.CheckNames}, + // {ID: "ST1004", FilterGenerated: false, Fn: nil, }, + {ID: "ST1005", FilterGenerated: false, Fn: c.CheckErrorStrings}, + {ID: "ST1006", FilterGenerated: false, Fn: c.CheckReceiverNames}, + // {ID: "ST1007", FilterGenerated: true, Fn: c.CheckIncDec}, + {ID: "ST1008", FilterGenerated: false, Fn: c.CheckErrorReturn}, + // {ID: "ST1009", FilterGenerated: false, Fn: c.CheckUnexportedReturn}, + // {ID: "ST1010", FilterGenerated: false, Fn: c.CheckContextFirstArg}, + {ID: "ST1011", FilterGenerated: false, Fn: c.CheckTimeNames}, + {ID: "ST1012", FilterGenerated: false, Fn: c.CheckErrorVarNames}, + {ID: "ST1013", FilterGenerated: true, Fn: c.CheckHTTPStatusCodes}, + {ID: "ST1015", FilterGenerated: true, Fn: c.CheckDefaultCaseOrder}, + {ID: "ST1016", FilterGenerated: false, Fn: c.CheckReceiverNamesIdentical}, + {ID: "ST1017", FilterGenerated: true, Fn: c.CheckYodaConditions}, + } +} + +func (c *Checker) CheckPackageComment(j *lint.Job) { + // - At least one file in a non-main package should have a package comment + // + // - The comment should be of the form + // "Package x ...". This has a slight potential for false + // positives, as multiple files can have package comments, in + // which case they get appended. But that doesn't happen a lot in + // the real world. + + for _, pkg := range j.Program.InitialPackages { + if pkg.Name == "main" { + continue + } + hasDocs := false + for _, f := range pkg.Syntax { + if IsInTest(j, f) { + continue + } + if f.Doc != nil && len(f.Doc.List) > 0 { + hasDocs = true + prefix := "Package " + f.Name.Name + " " + if !strings.HasPrefix(strings.TrimSpace(f.Doc.Text()), prefix) { + j.Errorf(f.Doc, `package comment should be of the form "%s..."`, prefix) + } + f.Doc.Text() + } + } + + if !hasDocs { + for _, f := range pkg.Syntax { + if IsInTest(j, f) { + continue + } + j.Errorf(f, "at least one file in a package should have a package comment") + } + } + } +} + +func (c *Checker) CheckDotImports(j *lint.Job) { + for _, pkg := range j.Program.InitialPackages { + for _, f := range pkg.Syntax { + imports: + for _, imp := range f.Imports { + path := imp.Path.Value + path = path[1 : len(path)-1] + for _, w := range pkg.Config.DotImportWhitelist { + if w == path { + continue imports + } + } + + if imp.Name != nil && imp.Name.Name == "." && !IsInTest(j, f) { + j.Errorf(imp, "should not use dot imports") + } + } + } + } +} + +func (c *Checker) CheckBlankImports(j *lint.Job) { + fset := j.Program.Fset() + for _, f := range j.Program.Files { + if IsInMain(j, f) || IsInTest(j, f) { + continue + } + + // Collect imports of the form `import _ "foo"`, i.e. with no + // parentheses, as their comment will be associated with the + // (paren-free) GenDecl, not the import spec itself. + // + // We don't directly process the GenDecl so that we can + // correctly handle the following: + // + // import _ "foo" + // import _ "bar" + // + // where only the first import should get flagged. + skip := map[ast.Spec]bool{} + ast.Inspect(f, func(node ast.Node) bool { + switch node := node.(type) { + case *ast.File: + return true + case *ast.GenDecl: + if node.Tok != token.IMPORT { + return false + } + if node.Lparen == token.NoPos && node.Doc != nil { + skip[node.Specs[0]] = true + } + return false + } + return false + }) + for i, imp := range f.Imports { + pos := fset.Position(imp.Pos()) + + if !IsBlank(imp.Name) { + continue + } + // Only flag the first blank import in a group of imports, + // or don't flag any of them, if the first one is + // commented + if i > 0 { + prev := f.Imports[i-1] + prevPos := fset.Position(prev.Pos()) + if pos.Line-1 == prevPos.Line && IsBlank(prev.Name) { + continue + } + } + + if imp.Doc == nil && imp.Comment == nil && !skip[imp] { + j.Errorf(imp, "a blank import should be only in a main or test package, or have a comment justifying it") + } + } + } +} + +func (c *Checker) CheckIncDec(j *lint.Job) { + // TODO(dh): this can be noisy for function bodies that look like this: + // x += 3 + // ... + // x += 2 + // ... + // x += 1 + fn := func(node ast.Node) bool { + assign, ok := node.(*ast.AssignStmt) + if !ok || (assign.Tok != token.ADD_ASSIGN && assign.Tok != token.SUB_ASSIGN) { + return true + } + if (len(assign.Lhs) != 1 || len(assign.Rhs) != 1) || + !IsIntLiteral(assign.Rhs[0], "1") { + return true + } + + suffix := "" + switch assign.Tok { + case token.ADD_ASSIGN: + suffix = "++" + case token.SUB_ASSIGN: + suffix = "--" + } + + j.Errorf(assign, "should replace %s with %s%s", Render(j, assign), Render(j, assign.Lhs[0]), suffix) + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func (c *Checker) CheckErrorReturn(j *lint.Job) { +fnLoop: + for _, fn := range j.Program.InitialFunctions { + sig := fn.Type().(*types.Signature) + rets := sig.Results() + if rets == nil || rets.Len() < 2 { + continue + } + + if rets.At(rets.Len()-1).Type() == types.Universe.Lookup("error").Type() { + // Last return type is error. If the function also returns + // errors in other positions, that's fine. + continue + } + for i := rets.Len() - 2; i >= 0; i-- { + if rets.At(i).Type() == types.Universe.Lookup("error").Type() { + j.Errorf(rets.At(i), "error should be returned as the last argument") + continue fnLoop + } + } + } +} + +// CheckUnexportedReturn checks that exported functions on exported +// types do not return unexported types. +func (c *Checker) CheckUnexportedReturn(j *lint.Job) { + for _, fn := range j.Program.InitialFunctions { + if fn.Synthetic != "" || fn.Parent() != nil { + continue + } + if !ast.IsExported(fn.Name()) || IsInMain(j, fn) || IsInTest(j, fn) { + continue + } + sig := fn.Type().(*types.Signature) + if sig.Recv() != nil && !ast.IsExported(Dereference(sig.Recv().Type()).(*types.Named).Obj().Name()) { + continue + } + res := sig.Results() + for i := 0; i < res.Len(); i++ { + if named, ok := DereferenceR(res.At(i).Type()).(*types.Named); ok && + !ast.IsExported(named.Obj().Name()) && + named != types.Universe.Lookup("error").Type() { + j.Errorf(fn, "should not return unexported type") + } + } + } +} + +func (c *Checker) CheckReceiverNames(j *lint.Job) { + for _, pkg := range j.Program.InitialPackages { + for _, m := range pkg.SSA.Members { + if T, ok := m.Object().(*types.TypeName); ok && !T.IsAlias() { + ms := typeutil.IntuitiveMethodSet(T.Type(), nil) + for _, sel := range ms { + fn := sel.Obj().(*types.Func) + recv := fn.Type().(*types.Signature).Recv() + if Dereference(recv.Type()) != T.Type() { + // skip embedded methods + continue + } + if recv.Name() == "self" || recv.Name() == "this" { + j.Errorf(recv, `receiver name should be a reflection of its identity; don't use generic names such as "this" or "self"`) + } + if recv.Name() == "_" { + j.Errorf(recv, "receiver name should not be an underscore, omit the name if it is unused") + } + } + } + } + } +} + +func (c *Checker) CheckReceiverNamesIdentical(j *lint.Job) { + for _, pkg := range j.Program.InitialPackages { + for _, m := range pkg.SSA.Members { + names := map[string]int{} + + var firstFn *types.Func + if T, ok := m.Object().(*types.TypeName); ok && !T.IsAlias() { + ms := typeutil.IntuitiveMethodSet(T.Type(), nil) + for _, sel := range ms { + fn := sel.Obj().(*types.Func) + recv := fn.Type().(*types.Signature).Recv() + if Dereference(recv.Type()) != T.Type() { + // skip embedded methods + continue + } + if firstFn == nil { + firstFn = fn + } + if recv.Name() != "" && recv.Name() != "_" { + names[recv.Name()]++ + } + } + } + + if len(names) > 1 { + var seen []string + for name, count := range names { + seen = append(seen, fmt.Sprintf("%dx %q", count, name)) + } + + j.Errorf(firstFn, "methods on the same type should have the same receiver name (seen %s)", strings.Join(seen, ", ")) + } + } + } +} + +func (c *Checker) CheckContextFirstArg(j *lint.Job) { + // TODO(dh): this check doesn't apply to test helpers. Example from the stdlib: + // func helperCommandContext(t *testing.T, ctx context.Context, s ...string) (cmd *exec.Cmd) { +fnLoop: + for _, fn := range j.Program.InitialFunctions { + if fn.Synthetic != "" || fn.Parent() != nil { + continue + } + params := fn.Signature.Params() + if params.Len() < 2 { + continue + } + if types.TypeString(params.At(0).Type(), nil) == "context.Context" { + continue + } + for i := 1; i < params.Len(); i++ { + param := params.At(i) + if types.TypeString(param.Type(), nil) == "context.Context" { + j.Errorf(param, "context.Context should be the first argument of a function") + continue fnLoop + } + } + } +} + +func (c *Checker) CheckErrorStrings(j *lint.Job) { + fnNames := map[*ssa.Package]map[string]bool{} + for _, fn := range j.Program.InitialFunctions { + m := fnNames[fn.Package()] + if m == nil { + m = map[string]bool{} + fnNames[fn.Package()] = m + } + m[fn.Name()] = true + } + + for _, fn := range j.Program.InitialFunctions { + if IsInTest(j, fn) { + // We don't care about malformed error messages in tests; + // they're usually for direct human consumption, not part + // of an API + continue + } + for _, block := range fn.Blocks { + instrLoop: + for _, ins := range block.Instrs { + call, ok := ins.(*ssa.Call) + if !ok { + continue + } + if !IsCallTo(call.Common(), "errors.New") && !IsCallTo(call.Common(), "fmt.Errorf") { + continue + } + + k, ok := call.Common().Args[0].(*ssa.Const) + if !ok { + continue + } + + s := constant.StringVal(k.Value) + if len(s) == 0 { + continue + } + switch s[len(s)-1] { + case '.', ':', '!', '\n': + j.Errorf(call, "error strings should not end with punctuation or a newline") + } + idx := strings.IndexByte(s, ' ') + if idx == -1 { + // single word error message, probably not a real + // error but something used in tests or during + // debugging + continue + } + word := s[:idx] + first, n := utf8.DecodeRuneInString(word) + if !unicode.IsUpper(first) { + continue + } + for _, c := range word[n:] { + if unicode.IsUpper(c) { + // Word is probably an initialism or + // multi-word function name + continue instrLoop + } + } + + word = strings.TrimRightFunc(word, func(r rune) bool { return unicode.IsPunct(r) }) + if fnNames[fn.Package()][word] { + // Word is probably the name of a function in this package + continue + } + // First word in error starts with a capital + // letter, and the word doesn't contain any other + // capitals, making it unlikely to be an + // initialism or multi-word function name. + // + // It could still be a proper noun, though. + + j.Errorf(call, "error strings should not be capitalized") + } + } + } +} + +func (c *Checker) CheckTimeNames(j *lint.Job) { + suffixes := []string{ + "Sec", "Secs", "Seconds", + "Msec", "Msecs", + "Milli", "Millis", "Milliseconds", + "Usec", "Usecs", "Microseconds", + "MS", "Ms", + } + fn := func(T types.Type, names []*ast.Ident) { + if !IsType(T, "time.Duration") && !IsType(T, "*time.Duration") { + return + } + for _, name := range names { + for _, suffix := range suffixes { + if strings.HasSuffix(name.Name, suffix) { + j.Errorf(name, "var %s is of type %v; don't use unit-specific suffix %q", name.Name, T, suffix) + break + } + } + } + } + for _, f := range j.Program.Files { + ast.Inspect(f, func(node ast.Node) bool { + switch node := node.(type) { + case *ast.ValueSpec: + T := TypeOf(j, node.Type) + fn(T, node.Names) + case *ast.FieldList: + for _, field := range node.List { + T := TypeOf(j, field.Type) + fn(T, field.Names) + } + } + return true + }) + } +} + +func (c *Checker) CheckErrorVarNames(j *lint.Job) { + for _, f := range j.Program.Files { + for _, decl := range f.Decls { + gen, ok := decl.(*ast.GenDecl) + if !ok || gen.Tok != token.VAR { + continue + } + for _, spec := range gen.Specs { + spec := spec.(*ast.ValueSpec) + if len(spec.Names) != len(spec.Values) { + continue + } + + for i, name := range spec.Names { + val := spec.Values[i] + if !IsCallToAST(j, val, "errors.New") && !IsCallToAST(j, val, "fmt.Errorf") { + continue + } + + prefix := "err" + if name.IsExported() { + prefix = "Err" + } + if !strings.HasPrefix(name.Name, prefix) { + j.Errorf(name, "error var %s should have name of the form %sFoo", name.Name, prefix) + } + } + } + } + } +} + +var httpStatusCodes = map[int]string{ + 100: "StatusContinue", + 101: "StatusSwitchingProtocols", + 102: "StatusProcessing", + 200: "StatusOK", + 201: "StatusCreated", + 202: "StatusAccepted", + 203: "StatusNonAuthoritativeInfo", + 204: "StatusNoContent", + 205: "StatusResetContent", + 206: "StatusPartialContent", + 207: "StatusMultiStatus", + 208: "StatusAlreadyReported", + 226: "StatusIMUsed", + 300: "StatusMultipleChoices", + 301: "StatusMovedPermanently", + 302: "StatusFound", + 303: "StatusSeeOther", + 304: "StatusNotModified", + 305: "StatusUseProxy", + 307: "StatusTemporaryRedirect", + 308: "StatusPermanentRedirect", + 400: "StatusBadRequest", + 401: "StatusUnauthorized", + 402: "StatusPaymentRequired", + 403: "StatusForbidden", + 404: "StatusNotFound", + 405: "StatusMethodNotAllowed", + 406: "StatusNotAcceptable", + 407: "StatusProxyAuthRequired", + 408: "StatusRequestTimeout", + 409: "StatusConflict", + 410: "StatusGone", + 411: "StatusLengthRequired", + 412: "StatusPreconditionFailed", + 413: "StatusRequestEntityTooLarge", + 414: "StatusRequestURITooLong", + 415: "StatusUnsupportedMediaType", + 416: "StatusRequestedRangeNotSatisfiable", + 417: "StatusExpectationFailed", + 418: "StatusTeapot", + 422: "StatusUnprocessableEntity", + 423: "StatusLocked", + 424: "StatusFailedDependency", + 426: "StatusUpgradeRequired", + 428: "StatusPreconditionRequired", + 429: "StatusTooManyRequests", + 431: "StatusRequestHeaderFieldsTooLarge", + 451: "StatusUnavailableForLegalReasons", + 500: "StatusInternalServerError", + 501: "StatusNotImplemented", + 502: "StatusBadGateway", + 503: "StatusServiceUnavailable", + 504: "StatusGatewayTimeout", + 505: "StatusHTTPVersionNotSupported", + 506: "StatusVariantAlsoNegotiates", + 507: "StatusInsufficientStorage", + 508: "StatusLoopDetected", + 510: "StatusNotExtended", + 511: "StatusNetworkAuthenticationRequired", +} + +func (c *Checker) CheckHTTPStatusCodes(j *lint.Job) { + for _, pkg := range j.Program.InitialPackages { + whitelist := map[string]bool{} + for _, code := range pkg.Config.HTTPStatusCodeWhitelist { + whitelist[code] = true + } + fn := func(node ast.Node) bool { + call, ok := node.(*ast.CallExpr) + if !ok { + return true + } + + var arg int + switch CallNameAST(j, call) { + case "net/http.Error": + arg = 2 + case "net/http.Redirect": + arg = 3 + case "net/http.StatusText": + arg = 0 + case "net/http.RedirectHandler": + arg = 1 + default: + return true + } + lit, ok := call.Args[arg].(*ast.BasicLit) + if !ok { + return true + } + if whitelist[lit.Value] { + return true + } + + n, err := strconv.Atoi(lit.Value) + if err != nil { + return true + } + s, ok := httpStatusCodes[n] + if !ok { + return true + } + j.Errorf(lit, "should use constant http.%s instead of numeric literal %d", s, n) + return true + } + for _, f := range pkg.Syntax { + ast.Inspect(f, fn) + } + } +} + +func (c *Checker) CheckDefaultCaseOrder(j *lint.Job) { + fn := func(node ast.Node) bool { + stmt, ok := node.(*ast.SwitchStmt) + if !ok { + return true + } + list := stmt.Body.List + for i, c := range list { + if c.(*ast.CaseClause).List == nil && i != 0 && i != len(list)-1 { + j.Errorf(c, "default case should be first or last in switch statement") + break + } + } + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func (c *Checker) CheckYodaConditions(j *lint.Job) { + fn := func(node ast.Node) bool { + cond, ok := node.(*ast.BinaryExpr) + if !ok { + return true + } + if cond.Op != token.EQL && cond.Op != token.NEQ { + return true + } + if _, ok := cond.X.(*ast.BasicLit); !ok { + return true + } + if _, ok := cond.Y.(*ast.BasicLit); ok { + // Don't flag lit == lit conditions, just in case + return true + } + j.Errorf(cond, "don't use Yoda conditions") + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} diff --git a/vendor/github.com/golangci/go-tools/stylecheck/names.go b/vendor/github.com/golangci/go-tools/stylecheck/names.go new file mode 100644 index 000000000000..342b708f1cd9 --- /dev/null +++ b/vendor/github.com/golangci/go-tools/stylecheck/names.go @@ -0,0 +1,263 @@ +// Copyright (c) 2013 The Go Authors. All rights reserved. +// Copyright (c) 2018 Dominik Honnef. All rights reserved. + +package stylecheck + +import ( + "go/ast" + "go/token" + "strings" + "unicode" + + "github.com/golangci/go-tools/lint" + . "github.com/golangci/go-tools/lint/lintdsl" +) + +// knownNameExceptions is a set of names that are known to be exempt from naming checks. +// This is usually because they are constrained by having to match names in the +// standard library. +var knownNameExceptions = map[string]bool{ + "LastInsertId": true, // must match database/sql + "kWh": true, +} + +func (c *Checker) CheckNames(j *lint.Job) { + // A large part of this function is copied from + // github.com/golang/lint, Copyright (c) 2013 The Go Authors, + // licensed under the BSD 3-clause license. + + allCaps := func(s string) bool { + for _, r := range s { + if !((r >= 'A' && r <= 'Z') || (r >= '0' && r <= '9') || r == '_') { + return false + } + } + return true + } + + check := func(id *ast.Ident, thing string, initialisms map[string]bool) { + if id.Name == "_" { + return + } + if knownNameExceptions[id.Name] { + return + } + + // Handle two common styles from other languages that don't belong in Go. + if len(id.Name) >= 5 && allCaps(id.Name) && strings.Contains(id.Name, "_") { + j.Errorf(id, "should not use ALL_CAPS in Go names; use CamelCase instead") + return + } + + should := lintName(id.Name, initialisms) + if id.Name == should { + return + } + + if len(id.Name) > 2 && strings.Contains(id.Name[1:len(id.Name)-1], "_") { + j.Errorf(id, "should not use underscores in Go names; %s %s should be %s", thing, id.Name, should) + return + } + j.Errorf(id, "%s %s should be %s", thing, id.Name, should) + } + checkList := func(fl *ast.FieldList, thing string, initialisms map[string]bool) { + if fl == nil { + return + } + for _, f := range fl.List { + for _, id := range f.Names { + check(id, thing, initialisms) + } + } + } + + for _, pkg := range j.Program.InitialPackages { + initialisms := make(map[string]bool, len(pkg.Config.Initialisms)) + for _, word := range pkg.Config.Initialisms { + initialisms[word] = true + } + for _, f := range pkg.Syntax { + // Package names need slightly different handling than other names. + if !strings.HasSuffix(f.Name.Name, "_test") && strings.Contains(f.Name.Name, "_") { + j.Errorf(f, "should not use underscores in package names") + } + if strings.IndexFunc(f.Name.Name, unicode.IsUpper) != -1 { + j.Errorf(f, "should not use MixedCaps in package name; %s should be %s", f.Name.Name, strings.ToLower(f.Name.Name)) + } + + ast.Inspect(f, func(node ast.Node) bool { + switch v := node.(type) { + case *ast.AssignStmt: + if v.Tok != token.DEFINE { + return true + } + for _, exp := range v.Lhs { + if id, ok := exp.(*ast.Ident); ok { + check(id, "var", initialisms) + } + } + case *ast.FuncDecl: + // Functions with no body are defined elsewhere (in + // assembly, or via go:linkname). These are likely to + // be something very low level (such as the runtime), + // where our rules don't apply. + if v.Body == nil { + return true + } + + if IsInTest(j, v) && (strings.HasPrefix(v.Name.Name, "Example") || strings.HasPrefix(v.Name.Name, "Test") || strings.HasPrefix(v.Name.Name, "Benchmark")) { + return true + } + + thing := "func" + if v.Recv != nil { + thing = "method" + } + + if !isTechnicallyExported(v) { + check(v.Name, thing, initialisms) + } + + checkList(v.Type.Params, thing+" parameter", initialisms) + checkList(v.Type.Results, thing+" result", initialisms) + case *ast.GenDecl: + if v.Tok == token.IMPORT { + return true + } + var thing string + switch v.Tok { + case token.CONST: + thing = "const" + case token.TYPE: + thing = "type" + case token.VAR: + thing = "var" + } + for _, spec := range v.Specs { + switch s := spec.(type) { + case *ast.TypeSpec: + check(s.Name, thing, initialisms) + case *ast.ValueSpec: + for _, id := range s.Names { + check(id, thing, initialisms) + } + } + } + case *ast.InterfaceType: + // Do not check interface method names. + // They are often constrainted by the method names of concrete types. + for _, x := range v.Methods.List { + ft, ok := x.Type.(*ast.FuncType) + if !ok { // might be an embedded interface name + continue + } + checkList(ft.Params, "interface method parameter", initialisms) + checkList(ft.Results, "interface method result", initialisms) + } + case *ast.RangeStmt: + if v.Tok == token.ASSIGN { + return true + } + if id, ok := v.Key.(*ast.Ident); ok { + check(id, "range var", initialisms) + } + if id, ok := v.Value.(*ast.Ident); ok { + check(id, "range var", initialisms) + } + case *ast.StructType: + for _, f := range v.Fields.List { + for _, id := range f.Names { + check(id, "struct field", initialisms) + } + } + } + return true + }) + } + } +} + +// lintName returns a different name if it should be different. +func lintName(name string, initialisms map[string]bool) (should string) { + // A large part of this function is copied from + // github.com/golang/lint, Copyright (c) 2013 The Go Authors, + // licensed under the BSD 3-clause license. + + // Fast path for simple cases: "_" and all lowercase. + if name == "_" { + return name + } + if strings.IndexFunc(name, func(r rune) bool { return !unicode.IsLower(r) }) == -1 { + return name + } + + // Split camelCase at any lower->upper transition, and split on underscores. + // Check each word for common initialisms. + runes := []rune(name) + w, i := 0, 0 // index of start of word, scan + for i+1 <= len(runes) { + eow := false // whether we hit the end of a word + if i+1 == len(runes) { + eow = true + } else if runes[i+1] == '_' && i+1 != len(runes)-1 { + // underscore; shift the remainder forward over any run of underscores + eow = true + n := 1 + for i+n+1 < len(runes) && runes[i+n+1] == '_' { + n++ + } + + // Leave at most one underscore if the underscore is between two digits + if i+n+1 < len(runes) && unicode.IsDigit(runes[i]) && unicode.IsDigit(runes[i+n+1]) { + n-- + } + + copy(runes[i+1:], runes[i+n+1:]) + runes = runes[:len(runes)-n] + } else if unicode.IsLower(runes[i]) && !unicode.IsLower(runes[i+1]) { + // lower->non-lower + eow = true + } + i++ + if !eow { + continue + } + + // [w,i) is a word. + word := string(runes[w:i]) + if u := strings.ToUpper(word); initialisms[u] { + // Keep consistent case, which is lowercase only at the start. + if w == 0 && unicode.IsLower(runes[w]) { + u = strings.ToLower(u) + } + // All the common initialisms are ASCII, + // so we can replace the bytes exactly. + // TODO(dh): this won't be true once we allow custom initialisms + copy(runes[w:], []rune(u)) + } else if w > 0 && strings.ToLower(word) == word { + // already all lowercase, and not the first word, so uppercase the first character. + runes[w] = unicode.ToUpper(runes[w]) + } + w = i + } + return string(runes) +} + +func isTechnicallyExported(f *ast.FuncDecl) bool { + if f.Recv != nil || f.Doc == nil { + return false + } + + const export = "//export " + const linkname = "//go:linkname " + for _, c := range f.Doc.List { + if strings.HasPrefix(c.Text, export) && len(c.Text) == len(export)+len(f.Name.Name) && c.Text[len(export):] == f.Name.Name { + return true + } + + if strings.HasPrefix(c.Text, linkname) { + return true + } + } + return false +} diff --git a/vendor/github.com/golangci/go-tools/unused/unused.go b/vendor/github.com/golangci/go-tools/unused/unused.go index d1c87674909e..180fa1a19ffe 100644 --- a/vendor/github.com/golangci/go-tools/unused/unused.go +++ b/vendor/github.com/golangci/go-tools/unused/unused.go @@ -10,8 +10,9 @@ import ( "strings" "github.com/golangci/go-tools/lint" + . "github.com/golangci/go-tools/lint/lintdsl" - "golang.org/x/tools/go/loader" + "golang.org/x/tools/go/packages" "golang.org/x/tools/go/types/typeutil" ) @@ -30,9 +31,9 @@ func (*LintChecker) Name() string { return "unused" } func (*LintChecker) Prefix() string { return "U" } func (l *LintChecker) Init(*lint.Program) {} -func (l *LintChecker) Funcs() map[string]lint.Func { - return map[string]lint.Func{ - "U1000": l.Lint, +func (l *LintChecker) Checks() []lint.Check { + return []lint.Check{ + {ID: "U1000", FilterGenerated: true, Fn: l.Lint}, } } @@ -56,7 +57,7 @@ func typString(obj types.Object) string { } func (l *LintChecker) Lint(j *lint.Job) { - unused := l.c.Check(j.Program.Prog) + unused := l.c.Check(j.Program) for _, u := range unused { name := u.Obj.Name() if sig, ok := u.Obj.Type().(*types.Signature); ok && sig.Recv() != nil { @@ -157,7 +158,7 @@ type Checker struct { graph *graph msCache typeutil.MethodSetCache - lprog *loader.Program + prog *lint.Program topmostCache map[*types.Scope]*types.Scope interfaces []*types.Interface } @@ -198,13 +199,13 @@ func (e Error) Error() string { return fmt.Sprintf("errors in %d packages", len(e.Errors)) } -func (c *Checker) Check(lprog *loader.Program) []Unused { +func (c *Checker) Check(prog *lint.Program) []Unused { var unused []Unused - c.lprog = lprog + c.prog = prog if c.WholeProgram { c.findExportedInterfaces() } - for _, pkg := range c.lprog.InitialPackages() { + for _, pkg := range prog.InitialPackages { c.processDefs(pkg) c.processUses(pkg) c.processTypes(pkg) @@ -230,6 +231,7 @@ func (c *Checker) Check(lprog *loader.Program) []Unused { } markNodesUsed(roots) c.markNodesQuiet() + c.deduplicate() if c.Debug != nil { c.printDebugGraph(c.Debug) @@ -245,8 +247,8 @@ func (c *Checker) Check(lprog *loader.Program) []Unused { } found := false if !false { - for _, pkg := range c.lprog.InitialPackages() { - if pkg.Pkg == obj.Pkg() { + for _, pkg := range prog.InitialPackages { + if pkg.Types == obj.Pkg() { found = true break } @@ -256,23 +258,11 @@ func (c *Checker) Check(lprog *loader.Program) []Unused { continue } - pos := c.lprog.Fset.Position(obj.Pos()) + pos := c.prog.Fset().Position(obj.Pos()) if pos.Filename == "" || filepath.Base(pos.Filename) == "C" { continue } - generated := false - for _, file := range c.lprog.Package(obj.Pkg().Path()).Files { - if c.lprog.Fset.Position(file.Pos()).Filename != pos.Filename { - continue - } - if len(file.Comments) > 0 { - generated = isGenerated(file.Comments[0].Text()) - } - break - } - if generated { - continue - } + unused = append(unused, Unused{Obj: obj, Position: pos}) } return unused @@ -324,16 +314,24 @@ func (c *Checker) useNoCopyFields(typ types.Type) { } } -func (c *Checker) useExportedFields(typ types.Type) { +func (c *Checker) useExportedFields(typ types.Type, by types.Type) bool { + any := false if st, ok := typ.Underlying().(*types.Struct); ok { n := st.NumFields() for i := 0; i < n; i++ { field := st.Field(i) + if field.Anonymous() { + if c.useExportedFields(field.Type(), typ) { + c.graph.markUsedBy(field, typ) + } + } if field.Exported() { - c.graph.markUsedBy(field, typ) + c.graph.markUsedBy(field, by) + any = true } } } + return any } func (c *Checker) useExportedMethods(typ types.Type) { @@ -369,8 +367,8 @@ func (c *Checker) useExportedMethods(typ types.Type) { } } -func (c *Checker) processDefs(pkg *loader.PackageInfo) { - for _, obj := range pkg.Defs { +func (c *Checker) processDefs(pkg *lint.Pkg) { + for _, obj := range pkg.TypesInfo.Defs { if obj == nil { continue } @@ -391,7 +389,7 @@ func (c *Checker) processDefs(pkg *loader.PackageInfo) { // mark them used if an instance of the type was // accessible via an interface value. if !c.WholeProgram || c.ConsiderReflection { - c.useExportedFields(obj.Type()) + c.useExportedFields(obj.Type(), obj.Type()) } // TODO(dh): Traditionally we have not marked all exported @@ -419,8 +417,8 @@ func (c *Checker) processDefs(pkg *loader.PackageInfo) { if obj.Name() == "_" { node := c.graph.getNode(obj) node.quiet = true - scope := c.topmostScope(pkg.Pkg.Scope().Innermost(obj.Pos()), pkg.Pkg) - if scope == pkg.Pkg.Scope() { + scope := c.topmostScope(pkg.Types.Scope().Innermost(obj.Pos()), pkg.Types) + if scope == pkg.Types.Scope() { c.graph.roots = append(c.graph.roots, node) } else { c.graph.markUsedBy(obj, scope) @@ -470,15 +468,15 @@ func (c *Checker) processDefs(pkg *loader.PackageInfo) { } } -func (c *Checker) processUses(pkg *loader.PackageInfo) { - for ident, usedObj := range pkg.Uses { +func (c *Checker) processUses(pkg *lint.Pkg) { + for ident, usedObj := range pkg.TypesInfo.Uses { if _, ok := usedObj.(*types.PkgName); ok { continue } pos := ident.Pos() - scope := pkg.Pkg.Scope().Innermost(pos) - scope = c.topmostScope(scope, pkg.Pkg) - if scope != pkg.Pkg.Scope() { + scope := pkg.Types.Scope().Innermost(pos) + scope = c.topmostScope(scope, pkg.Types) + if scope != pkg.Types.Scope() { c.graph.markUsedBy(usedObj, scope) } @@ -491,17 +489,17 @@ func (c *Checker) processUses(pkg *loader.PackageInfo) { func (c *Checker) findExportedInterfaces() { c.interfaces = []*types.Interface{types.Universe.Lookup("error").Type().(*types.Named).Underlying().(*types.Interface)} - var pkgs []*loader.PackageInfo + var pkgs []*packages.Package if c.WholeProgram { - for _, pkg := range c.lprog.AllPackages { - pkgs = append(pkgs, pkg) - } + pkgs = append(pkgs, c.prog.AllPackages...) } else { - pkgs = c.lprog.InitialPackages() + for _, pkg := range c.prog.InitialPackages { + pkgs = append(pkgs, pkg.Package) + } } for _, pkg := range pkgs { - for _, tv := range pkg.Types { + for _, tv := range pkg.TypesInfo.Types { iface, ok := tv.Type.(*types.Interface) if !ok { continue @@ -514,10 +512,10 @@ func (c *Checker) findExportedInterfaces() { } } -func (c *Checker) processTypes(pkg *loader.PackageInfo) { +func (c *Checker) processTypes(pkg *lint.Pkg) { named := map[*types.Named]*types.Pointer{} var interfaces []*types.Interface - for _, tv := range pkg.Types { + for _, tv := range pkg.TypesInfo.Types { if typ, ok := tv.Type.(interface { Elem() types.Type }); ok { @@ -535,8 +533,8 @@ func (c *Checker) processTypes(pkg *loader.PackageInfo) { } case *types.Struct: c.useNoCopyFields(obj) - if pkg.Pkg.Name() != "main" && !c.WholeProgram { - c.useExportedFields(obj) + if pkg.Types.Name() != "main" && !c.WholeProgram { + c.useExportedFields(obj, obj) } } } @@ -546,11 +544,14 @@ func (c *Checker) processTypes(pkg *loader.PackageInfo) { // // TODO(dh): For normal operations, that's the best we can do, as // we have no idea what external users will do with our types. In - // whole-program mode, we could be more conservative, in two ways: + // whole-program mode, we could be more precise, in two ways: // 1) Only consider interfaces if a type has been assigned to one // 2) Use SSA and flow analysis and determine the exact set of // interfaces that is relevant. fn := func(iface *types.Interface) { + for i := 0; i < iface.NumEmbeddeds(); i++ { + c.graph.markUsedBy(iface.Embedded(i), iface) + } for obj, objPtr := range named { if !types.Implements(obj, iface) && !types.Implements(objPtr, iface) { continue @@ -590,10 +591,10 @@ func (c *Checker) processTypes(pkg *loader.PackageInfo) { } } -func (c *Checker) processSelections(pkg *loader.PackageInfo) { +func (c *Checker) processSelections(pkg *lint.Pkg) { fn := func(expr *ast.SelectorExpr, sel *types.Selection, offset int) { - scope := pkg.Pkg.Scope().Innermost(expr.Pos()) - c.graph.markUsedBy(expr.X, c.topmostScope(scope, pkg.Pkg)) + scope := pkg.Types.Scope().Innermost(expr.Pos()) + c.graph.markUsedBy(expr.X, c.topmostScope(scope, pkg.Types)) c.graph.markUsedBy(sel.Obj(), expr.X) if len(sel.Index()) > 1 { typ := sel.Recv() @@ -606,7 +607,7 @@ func (c *Checker) processSelections(pkg *loader.PackageInfo) { } } - for expr, sel := range pkg.Selections { + for expr, sel := range pkg.TypesInfo.Selections { switch sel.Kind() { case types.FieldVal: fn(expr, sel, 0) @@ -624,9 +625,9 @@ func dereferenceType(typ types.Type) types.Type { } // processConversion marks fields as used if they're part of a type conversion. -func (c *Checker) processConversion(pkg *loader.PackageInfo, node ast.Node) { +func (c *Checker) processConversion(pkg *lint.Pkg, node ast.Node) { if node, ok := node.(*ast.CallExpr); ok { - callTyp := pkg.TypeOf(node.Fun) + callTyp := pkg.TypesInfo.TypeOf(node.Fun) var typDst *types.Struct var ok bool switch typ := callTyp.(type) { @@ -641,7 +642,7 @@ func (c *Checker) processConversion(pkg *loader.PackageInfo, node ast.Node) { return } - if typ, ok := pkg.TypeOf(node.Args[0]).(*types.Basic); ok && typ.Kind() == types.UnsafePointer { + if typ, ok := pkg.TypesInfo.TypeOf(node.Args[0]).(*types.Basic); ok && typ.Kind() == types.UnsafePointer { // This is an unsafe conversion. Assume that all the // fields are relevant (they are, because of memory // layout) @@ -652,7 +653,7 @@ func (c *Checker) processConversion(pkg *loader.PackageInfo, node ast.Node) { return } - typSrc, ok := dereferenceType(pkg.TypeOf(node.Args[0])).Underlying().(*types.Struct) + typSrc, ok := dereferenceType(pkg.TypesInfo.TypeOf(node.Args[0])).Underlying().(*types.Struct) if !ok { return } @@ -682,10 +683,10 @@ func (c *Checker) processConversion(pkg *loader.PackageInfo, node ast.Node) { // processCompositeLiteral marks fields as used if the struct is used // in a composite literal. -func (c *Checker) processCompositeLiteral(pkg *loader.PackageInfo, node ast.Node) { +func (c *Checker) processCompositeLiteral(pkg *lint.Pkg, node ast.Node) { // XXX how does this actually work? wouldn't it match t{}? if node, ok := node.(*ast.CompositeLit); ok { - typ := pkg.TypeOf(node) + typ := pkg.TypesInfo.TypeOf(node) if _, ok := typ.(*types.Named); ok { typ = typ.Underlying() } @@ -701,7 +702,7 @@ func (c *Checker) processCompositeLiteral(pkg *loader.PackageInfo, node ast.Node // processCgoExported marks functions as used if they're being // exported to cgo. -func (c *Checker) processCgoExported(pkg *loader.PackageInfo, node ast.Node) { +func (c *Checker) processCgoExported(pkg *lint.Pkg, node ast.Node) { if node, ok := node.(*ast.FuncDecl); ok { if node.Doc == nil { return @@ -710,13 +711,13 @@ func (c *Checker) processCgoExported(pkg *loader.PackageInfo, node ast.Node) { if !strings.HasPrefix(cmt.Text, "//go:cgo_export_") { return } - obj := pkg.ObjectOf(node.Name) + obj := pkg.TypesInfo.ObjectOf(node.Name) c.graph.roots = append(c.graph.roots, c.graph.getNode(obj)) } } } -func (c *Checker) processVariableDeclaration(pkg *loader.PackageInfo, node ast.Node) { +func (c *Checker) processVariableDeclaration(pkg *lint.Pkg, node ast.Node) { if decl, ok := node.(*ast.GenDecl); ok { for _, spec := range decl.Specs { spec, ok := spec.(*ast.ValueSpec) @@ -730,11 +731,11 @@ func (c *Checker) processVariableDeclaration(pkg *loader.PackageInfo, node ast.N value := spec.Values[i] fn := func(node ast.Node) bool { if node3, ok := node.(*ast.Ident); ok { - obj := pkg.ObjectOf(node3) + obj := pkg.TypesInfo.ObjectOf(node3) if _, ok := obj.(*types.PkgName); ok { return true } - c.graph.markUsedBy(obj, pkg.ObjectOf(name)) + c.graph.markUsedBy(obj, pkg.TypesInfo.ObjectOf(name)) } return true } @@ -744,17 +745,17 @@ func (c *Checker) processVariableDeclaration(pkg *loader.PackageInfo, node ast.N } } -func (c *Checker) processArrayConstants(pkg *loader.PackageInfo, node ast.Node) { +func (c *Checker) processArrayConstants(pkg *lint.Pkg, node ast.Node) { if decl, ok := node.(*ast.ArrayType); ok { ident, ok := decl.Len.(*ast.Ident) if !ok { return } - c.graph.markUsedBy(pkg.ObjectOf(ident), pkg.TypeOf(decl)) + c.graph.markUsedBy(pkg.TypesInfo.ObjectOf(ident), pkg.TypesInfo.TypeOf(decl)) } } -func (c *Checker) processKnownReflectMethodCallers(pkg *loader.PackageInfo, node ast.Node) { +func (c *Checker) processKnownReflectMethodCallers(pkg *lint.Pkg, node ast.Node) { call, ok := node.(*ast.CallExpr) if !ok { return @@ -763,12 +764,12 @@ func (c *Checker) processKnownReflectMethodCallers(pkg *loader.PackageInfo, node if !ok { return } - if types.TypeString(pkg.TypeOf(sel.X), nil) != "*net/rpc.Server" { + if !IsType(pkg.TypesInfo.TypeOf(sel.X), "*net/rpc.Server") { x, ok := sel.X.(*ast.Ident) if !ok { return } - pkgname, ok := pkg.ObjectOf(x).(*types.PkgName) + pkgname, ok := pkg.TypesInfo.ObjectOf(x).(*types.PkgName) if !ok { return } @@ -790,14 +791,14 @@ func (c *Checker) processKnownReflectMethodCallers(pkg *loader.PackageInfo, node } arg = call.Args[1] } - typ := pkg.TypeOf(arg) + typ := pkg.TypesInfo.TypeOf(arg) ms := types.NewMethodSet(typ) for i := 0; i < ms.Len(); i++ { c.graph.markUsedBy(ms.At(i).Obj(), typ) } } -func (c *Checker) processAST(pkg *loader.PackageInfo) { +func (c *Checker) processAST(pkg *lint.Pkg) { fn := func(node ast.Node) bool { c.processConversion(pkg, node) c.processKnownReflectMethodCallers(pkg, node) @@ -807,7 +808,7 @@ func (c *Checker) processAST(pkg *loader.PackageInfo) { c.processArrayConstants(pkg, node) return true } - for _, file := range pkg.Files { + for _, file := range pkg.Syntax { ast.Inspect(file, fn) } } @@ -913,7 +914,7 @@ func (c *Checker) isRoot(obj types.Object) bool { return true } if obj.Exported() { - f := c.lprog.Fset.Position(obj.Pos()).Filename + f := c.prog.Fset().Position(obj.Pos()).Filename if strings.HasSuffix(f, "_test.go") { return strings.HasPrefix(obj.Name(), "Test") || strings.HasPrefix(obj.Name(), "Benchmark") || @@ -938,6 +939,33 @@ func markNodesUsed(nodes map[*graphNode]struct{}) { } } +// deduplicate merges objects based on their positions. This is done +// to work around packages existing multiple times in go/packages. +func (c *Checker) deduplicate() { + m := map[token.Position]struct{ used, quiet bool }{} + for _, node := range c.graph.nodes { + obj, ok := node.obj.(types.Object) + if !ok { + continue + } + pos := c.prog.Fset().Position(obj.Pos()) + m[pos] = struct{ used, quiet bool }{ + m[pos].used || node.used, + m[pos].quiet || node.quiet, + } + } + + for _, node := range c.graph.nodes { + obj, ok := node.obj.(types.Object) + if !ok { + continue + } + pos := c.prog.Fset().Position(obj.Pos()) + node.used = m[pos].used + node.quiet = m[pos].quiet + } +} + func (c *Checker) markNodesQuiet() { for _, node := range c.graph.nodes { if node.used { @@ -1057,8 +1085,3 @@ func (c *Checker) printDebugGraph(w io.Writer) { } fmt.Fprintln(w, "}") } - -func isGenerated(comment string) bool { - return strings.Contains(comment, "Code generated by") || - strings.Contains(comment, "DO NOT EDIT") -} diff --git a/vendor/github.com/golangci/tools/AUTHORS b/vendor/github.com/golangci/tools/AUTHORS deleted file mode 100644 index 15167cd746c5..000000000000 --- a/vendor/github.com/golangci/tools/AUTHORS +++ /dev/null @@ -1,3 +0,0 @@ -# This source code refers to The Go Authors for copyright purposes. -# The master list of authors is in the main Go distribution, -# visible at http://tip.golang.org/AUTHORS. diff --git a/vendor/github.com/golangci/tools/CONTRIBUTORS b/vendor/github.com/golangci/tools/CONTRIBUTORS deleted file mode 100644 index 1c4577e96806..000000000000 --- a/vendor/github.com/golangci/tools/CONTRIBUTORS +++ /dev/null @@ -1,3 +0,0 @@ -# This source code was written by the Go contributors. -# The master list of contributors is in the main Go distribution, -# visible at http://tip.golang.org/CONTRIBUTORS. diff --git a/vendor/github.com/golangci/tools/LICENSE b/vendor/github.com/golangci/tools/LICENSE deleted file mode 100644 index 6a66aea5eafe..000000000000 --- a/vendor/github.com/golangci/tools/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2009 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/golangci/tools/PATENTS b/vendor/github.com/golangci/tools/PATENTS deleted file mode 100644 index 733099041f84..000000000000 --- a/vendor/github.com/golangci/tools/PATENTS +++ /dev/null @@ -1,22 +0,0 @@ -Additional IP Rights Grant (Patents) - -"This implementation" means the copyrightable works distributed by -Google as part of the Go project. - -Google hereby grants to You a perpetual, worldwide, non-exclusive, -no-charge, royalty-free, irrevocable (except as stated in this section) -patent license to make, have made, use, offer to sell, sell, import, -transfer and otherwise run, modify and propagate the contents of this -implementation of Go, where such license applies only to those patent -claims, both currently owned or controlled by Google and acquired in -the future, licensable by Google that are necessarily infringed by this -implementation of Go. This grant does not include claims that would be -infringed only as a consequence of further modification of this -implementation. If you or your agent or exclusive licensee institute or -order or agree to the institution of patent litigation against any -entity (including a cross-claim or counterclaim in a lawsuit) alleging -that this implementation of Go or any code incorporated within this -implementation of Go constitutes direct or contributory patent -infringement, or inducement of patent infringement, then any patent -rights granted to you under this License for this implementation of Go -shall terminate as of the date such litigation is filed. diff --git a/vendor/golang.org/x/tools/go/callgraph/callgraph.go b/vendor/golang.org/x/tools/go/callgraph/callgraph.go new file mode 100644 index 000000000000..707a31931a70 --- /dev/null +++ b/vendor/golang.org/x/tools/go/callgraph/callgraph.go @@ -0,0 +1,129 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* + +Package callgraph defines the call graph and various algorithms +and utilities to operate on it. + +A call graph is a labelled directed graph whose nodes represent +functions and whose edge labels represent syntactic function call +sites. The presence of a labelled edge (caller, site, callee) +indicates that caller may call callee at the specified call site. + +A call graph is a multigraph: it may contain multiple edges (caller, +*, callee) connecting the same pair of nodes, so long as the edges +differ by label; this occurs when one function calls another function +from multiple call sites. Also, it may contain multiple edges +(caller, site, *) that differ only by callee; this indicates a +polymorphic call. + +A SOUND call graph is one that overapproximates the dynamic calling +behaviors of the program in all possible executions. One call graph +is more PRECISE than another if it is a smaller overapproximation of +the dynamic behavior. + +All call graphs have a synthetic root node which is responsible for +calling main() and init(). + +Calls to built-in functions (e.g. panic, println) are not represented +in the call graph; they are treated like built-in operators of the +language. + +*/ +package callgraph // import "golang.org/x/tools/go/callgraph" + +// TODO(adonovan): add a function to eliminate wrappers from the +// callgraph, preserving topology. +// More generally, we could eliminate "uninteresting" nodes such as +// nodes from packages we don't care about. + +import ( + "fmt" + "go/token" + + "golang.org/x/tools/go/ssa" +) + +// A Graph represents a call graph. +// +// A graph may contain nodes that are not reachable from the root. +// If the call graph is sound, such nodes indicate unreachable +// functions. +// +type Graph struct { + Root *Node // the distinguished root node + Nodes map[*ssa.Function]*Node // all nodes by function +} + +// New returns a new Graph with the specified root node. +func New(root *ssa.Function) *Graph { + g := &Graph{Nodes: make(map[*ssa.Function]*Node)} + g.Root = g.CreateNode(root) + return g +} + +// CreateNode returns the Node for fn, creating it if not present. +func (g *Graph) CreateNode(fn *ssa.Function) *Node { + n, ok := g.Nodes[fn] + if !ok { + n = &Node{Func: fn, ID: len(g.Nodes)} + g.Nodes[fn] = n + } + return n +} + +// A Node represents a node in a call graph. +type Node struct { + Func *ssa.Function // the function this node represents + ID int // 0-based sequence number + In []*Edge // unordered set of incoming call edges (n.In[*].Callee == n) + Out []*Edge // unordered set of outgoing call edges (n.Out[*].Caller == n) +} + +func (n *Node) String() string { + return fmt.Sprintf("n%d:%s", n.ID, n.Func) +} + +// A Edge represents an edge in the call graph. +// +// Site is nil for edges originating in synthetic or intrinsic +// functions, e.g. reflect.Call or the root of the call graph. +type Edge struct { + Caller *Node + Site ssa.CallInstruction + Callee *Node +} + +func (e Edge) String() string { + return fmt.Sprintf("%s --> %s", e.Caller, e.Callee) +} + +func (e Edge) Description() string { + var prefix string + switch e.Site.(type) { + case nil: + return "synthetic call" + case *ssa.Go: + prefix = "concurrent " + case *ssa.Defer: + prefix = "deferred " + } + return prefix + e.Site.Common().Description() +} + +func (e Edge) Pos() token.Pos { + if e.Site == nil { + return token.NoPos + } + return e.Site.Pos() +} + +// AddEdge adds the edge (caller, site, callee) to the call graph. +// Elimination of duplicate edges is the caller's responsibility. +func AddEdge(caller *Node, site ssa.CallInstruction, callee *Node) { + e := &Edge{caller, site, callee} + callee.In = append(callee.In, e) + caller.Out = append(caller.Out, e) +} diff --git a/vendor/github.com/golangci/tools/go/callgraph/cha/cha.go b/vendor/golang.org/x/tools/go/callgraph/cha/cha.go similarity index 76% rename from vendor/github.com/golangci/tools/go/callgraph/cha/cha.go rename to vendor/golang.org/x/tools/go/callgraph/cha/cha.go index 4940fe63a575..215ff173d958 100644 --- a/vendor/github.com/golangci/tools/go/callgraph/cha/cha.go +++ b/vendor/golang.org/x/tools/go/callgraph/cha/cha.go @@ -21,14 +21,14 @@ // and all concrete types are put into interfaces, it is sound to run on // partial programs, such as libraries without a main or test function. // -package cha // import "github.com/golangci/tools/go/callgraph/cha" +package cha // import "golang.org/x/tools/go/callgraph/cha" import ( "go/types" - "github.com/golangci/tools/go/callgraph" - "github.com/golangci/tools/go/ssa" - "github.com/golangci/tools/go/ssa/ssautil" + "golang.org/x/tools/go/callgraph" + "golang.org/x/tools/go/ssa" + "golang.org/x/tools/go/ssa/ssautil" "golang.org/x/tools/go/types/typeutil" ) @@ -47,23 +47,36 @@ func CallGraph(prog *ssa.Program) *callgraph.Graph { // methodsByName contains all methods, // grouped by name for efficient lookup. + // (methodsById would be better but not every SSA method has a go/types ID.) methodsByName := make(map[string][]*ssa.Function) - // methodsMemo records, for every abstract method call call I.f on - // interface type I, the set of concrete methods C.f of all + // An imethod represents an interface method I.m. + // (There's no go/types object for it; + // a *types.Func may be shared by many interfaces due to interface embedding.) + type imethod struct { + I *types.Interface + id string + } + // methodsMemo records, for every abstract method call I.m on + // interface type I, the set of concrete methods C.m of all // types C that satisfy interface I. - methodsMemo := make(map[*types.Func][]*ssa.Function) - lookupMethods := func(m *types.Func) []*ssa.Function { - methods, ok := methodsMemo[m] + // + // Abstract methods may be shared by several interfaces, + // hence we must pass I explicitly, not guess from m. + // + // methodsMemo is just a cache, so it needn't be a typeutil.Map. + methodsMemo := make(map[imethod][]*ssa.Function) + lookupMethods := func(I *types.Interface, m *types.Func) []*ssa.Function { + id := m.Id() + methods, ok := methodsMemo[imethod{I, id}] if !ok { - I := m.Type().(*types.Signature).Recv().Type().Underlying().(*types.Interface) for _, f := range methodsByName[m.Name()] { C := f.Signature.Recv().Type() // named or *named if types.Implements(C, I) { methods = append(methods, f) } } - methodsMemo[m] = methods + methodsMemo[imethod{I, id}] = methods } return methods } @@ -109,7 +122,8 @@ func CallGraph(prog *ssa.Program) *callgraph.Graph { if site, ok := instr.(ssa.CallInstruction); ok { call := site.Common() if call.IsInvoke() { - addEdges(fnode, site, lookupMethods(call.Method)) + tiface := call.Value.Type().Underlying().(*types.Interface) + addEdges(fnode, site, lookupMethods(tiface, call.Method)) } else if g := call.StaticCallee(); g != nil { addEdge(fnode, site, g) } else if _, ok := call.Value.(*ssa.Builtin); !ok { diff --git a/vendor/github.com/golangci/tools/go/callgraph/rta/rta.go b/vendor/golang.org/x/tools/go/callgraph/rta/rta.go similarity index 98% rename from vendor/github.com/golangci/tools/go/callgraph/rta/rta.go rename to vendor/golang.org/x/tools/go/callgraph/rta/rta.go index c24e189b3653..e6b44606ae85 100644 --- a/vendor/github.com/golangci/tools/go/callgraph/rta/rta.go +++ b/vendor/golang.org/x/tools/go/callgraph/rta/rta.go @@ -40,7 +40,7 @@ // cmd/callgraph tool on its own source takes ~2.1s for RTA and ~5.4s // for points-to analysis. // -package rta // import "github.com/golangci/tools/go/callgraph/rta" +package rta // import "golang.org/x/tools/go/callgraph/rta" // TODO(adonovan): test it by connecting it to the interpreter and // replacing all "unreachable" functions by a special intrinsic, and @@ -50,8 +50,8 @@ import ( "fmt" "go/types" - "github.com/golangci/tools/go/callgraph" - "github.com/golangci/tools/go/ssa" + "golang.org/x/tools/go/callgraph" + "golang.org/x/tools/go/ssa" "golang.org/x/tools/go/types/typeutil" ) @@ -184,7 +184,7 @@ func (r *rta) visitDynCall(site ssa.CallInstruction) { r.dynCallSites.Set(S, append(sites, site)) // For each function of signature S that we know is address-taken, - // mark it reachable. We'll add the callgraph edges later. + // add an edge and mark it reachable. funcs, _ := r.addrTakenFuncsBySig.At(S).(map[*ssa.Function]bool) for g := range funcs { r.addEdge(site, g, true) diff --git a/vendor/golang.org/x/tools/go/callgraph/util.go b/vendor/golang.org/x/tools/go/callgraph/util.go new file mode 100644 index 000000000000..a8f89031c058 --- /dev/null +++ b/vendor/golang.org/x/tools/go/callgraph/util.go @@ -0,0 +1,181 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package callgraph + +import "golang.org/x/tools/go/ssa" + +// This file provides various utilities over call graphs, such as +// visitation and path search. + +// CalleesOf returns a new set containing all direct callees of the +// caller node. +// +func CalleesOf(caller *Node) map[*Node]bool { + callees := make(map[*Node]bool) + for _, e := range caller.Out { + callees[e.Callee] = true + } + return callees +} + +// GraphVisitEdges visits all the edges in graph g in depth-first order. +// The edge function is called for each edge in postorder. If it +// returns non-nil, visitation stops and GraphVisitEdges returns that +// value. +// +func GraphVisitEdges(g *Graph, edge func(*Edge) error) error { + seen := make(map[*Node]bool) + var visit func(n *Node) error + visit = func(n *Node) error { + if !seen[n] { + seen[n] = true + for _, e := range n.Out { + if err := visit(e.Callee); err != nil { + return err + } + if err := edge(e); err != nil { + return err + } + } + } + return nil + } + for _, n := range g.Nodes { + if err := visit(n); err != nil { + return err + } + } + return nil +} + +// PathSearch finds an arbitrary path starting at node start and +// ending at some node for which isEnd() returns true. On success, +// PathSearch returns the path as an ordered list of edges; on +// failure, it returns nil. +// +func PathSearch(start *Node, isEnd func(*Node) bool) []*Edge { + stack := make([]*Edge, 0, 32) + seen := make(map[*Node]bool) + var search func(n *Node) []*Edge + search = func(n *Node) []*Edge { + if !seen[n] { + seen[n] = true + if isEnd(n) { + return stack + } + for _, e := range n.Out { + stack = append(stack, e) // push + if found := search(e.Callee); found != nil { + return found + } + stack = stack[:len(stack)-1] // pop + } + } + return nil + } + return search(start) +} + +// DeleteSyntheticNodes removes from call graph g all nodes for +// synthetic functions (except g.Root and package initializers), +// preserving the topology. In effect, calls to synthetic wrappers +// are "inlined". +// +func (g *Graph) DeleteSyntheticNodes() { + // Measurements on the standard library and go.tools show that + // resulting graph has ~15% fewer nodes and 4-8% fewer edges + // than the input. + // + // Inlining a wrapper of in-degree m, out-degree n adds m*n + // and removes m+n edges. Since most wrappers are monomorphic + // (n=1) this results in a slight reduction. Polymorphic + // wrappers (n>1), e.g. from embedding an interface value + // inside a struct to satisfy some interface, cause an + // increase in the graph, but they seem to be uncommon. + + // Hash all existing edges to avoid creating duplicates. + edges := make(map[Edge]bool) + for _, cgn := range g.Nodes { + for _, e := range cgn.Out { + edges[*e] = true + } + } + for fn, cgn := range g.Nodes { + if cgn == g.Root || fn.Synthetic == "" || isInit(cgn.Func) { + continue // keep + } + for _, eIn := range cgn.In { + for _, eOut := range cgn.Out { + newEdge := Edge{eIn.Caller, eIn.Site, eOut.Callee} + if edges[newEdge] { + continue // don't add duplicate + } + AddEdge(eIn.Caller, eIn.Site, eOut.Callee) + edges[newEdge] = true + } + } + g.DeleteNode(cgn) + } +} + +func isInit(fn *ssa.Function) bool { + return fn.Pkg != nil && fn.Pkg.Func("init") == fn +} + +// DeleteNode removes node n and its edges from the graph g. +// (NB: not efficient for batch deletion.) +func (g *Graph) DeleteNode(n *Node) { + n.deleteIns() + n.deleteOuts() + delete(g.Nodes, n.Func) +} + +// deleteIns deletes all incoming edges to n. +func (n *Node) deleteIns() { + for _, e := range n.In { + removeOutEdge(e) + } + n.In = nil +} + +// deleteOuts deletes all outgoing edges from n. +func (n *Node) deleteOuts() { + for _, e := range n.Out { + removeInEdge(e) + } + n.Out = nil +} + +// removeOutEdge removes edge.Caller's outgoing edge 'edge'. +func removeOutEdge(edge *Edge) { + caller := edge.Caller + n := len(caller.Out) + for i, e := range caller.Out { + if e == edge { + // Replace it with the final element and shrink the slice. + caller.Out[i] = caller.Out[n-1] + caller.Out[n-1] = nil // aid GC + caller.Out = caller.Out[:n-1] + return + } + } + panic("edge not found: " + edge.String()) +} + +// removeInEdge removes edge.Callee's incoming edge 'edge'. +func removeInEdge(edge *Edge) { + caller := edge.Callee + n := len(caller.In) + for i, e := range caller.In { + if e == edge { + // Replace it with the final element and shrink the slice. + caller.In[i] = caller.In[n-1] + caller.In[n-1] = nil // aid GC + caller.In = caller.In[:n-1] + return + } + } + panic("edge not found: " + edge.String()) +} diff --git a/vendor/golang.org/x/tools/go/ssa/blockopt.go b/vendor/golang.org/x/tools/go/ssa/blockopt.go new file mode 100644 index 000000000000..e79260a21a2e --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/blockopt.go @@ -0,0 +1,187 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// Simple block optimizations to simplify the control flow graph. + +// TODO(adonovan): opt: instead of creating several "unreachable" blocks +// per function in the Builder, reuse a single one (e.g. at Blocks[1]) +// to reduce garbage. + +import ( + "fmt" + "os" +) + +// If true, perform sanity checking and show progress at each +// successive iteration of optimizeBlocks. Very verbose. +const debugBlockOpt = false + +// markReachable sets Index=-1 for all blocks reachable from b. +func markReachable(b *BasicBlock) { + b.Index = -1 + for _, succ := range b.Succs { + if succ.Index == 0 { + markReachable(succ) + } + } +} + +// deleteUnreachableBlocks marks all reachable blocks of f and +// eliminates (nils) all others, including possibly cyclic subgraphs. +// +func deleteUnreachableBlocks(f *Function) { + const white, black = 0, -1 + // We borrow b.Index temporarily as the mark bit. + for _, b := range f.Blocks { + b.Index = white + } + markReachable(f.Blocks[0]) + if f.Recover != nil { + markReachable(f.Recover) + } + for i, b := range f.Blocks { + if b.Index == white { + for _, c := range b.Succs { + if c.Index == black { + c.removePred(b) // delete white->black edge + } + } + if debugBlockOpt { + fmt.Fprintln(os.Stderr, "unreachable", b) + } + f.Blocks[i] = nil // delete b + } + } + f.removeNilBlocks() +} + +// jumpThreading attempts to apply simple jump-threading to block b, +// in which a->b->c become a->c if b is just a Jump. +// The result is true if the optimization was applied. +// +func jumpThreading(f *Function, b *BasicBlock) bool { + if b.Index == 0 { + return false // don't apply to entry block + } + if b.Instrs == nil { + return false + } + if _, ok := b.Instrs[0].(*Jump); !ok { + return false // not just a jump + } + c := b.Succs[0] + if c == b { + return false // don't apply to degenerate jump-to-self. + } + if c.hasPhi() { + return false // not sound without more effort + } + for j, a := range b.Preds { + a.replaceSucc(b, c) + + // If a now has two edges to c, replace its degenerate If by Jump. + if len(a.Succs) == 2 && a.Succs[0] == c && a.Succs[1] == c { + jump := new(Jump) + jump.setBlock(a) + a.Instrs[len(a.Instrs)-1] = jump + a.Succs = a.Succs[:1] + c.removePred(b) + } else { + if j == 0 { + c.replacePred(b, a) + } else { + c.Preds = append(c.Preds, a) + } + } + + if debugBlockOpt { + fmt.Fprintln(os.Stderr, "jumpThreading", a, b, c) + } + } + f.Blocks[b.Index] = nil // delete b + return true +} + +// fuseBlocks attempts to apply the block fusion optimization to block +// a, in which a->b becomes ab if len(a.Succs)==len(b.Preds)==1. +// The result is true if the optimization was applied. +// +func fuseBlocks(f *Function, a *BasicBlock) bool { + if len(a.Succs) != 1 { + return false + } + b := a.Succs[0] + if len(b.Preds) != 1 { + return false + } + + // Degenerate &&/|| ops may result in a straight-line CFG + // containing φ-nodes. (Ideally we'd replace such them with + // their sole operand but that requires Referrers, built later.) + if b.hasPhi() { + return false // not sound without further effort + } + + // Eliminate jump at end of A, then copy all of B across. + a.Instrs = append(a.Instrs[:len(a.Instrs)-1], b.Instrs...) + for _, instr := range b.Instrs { + instr.setBlock(a) + } + + // A inherits B's successors + a.Succs = append(a.succs2[:0], b.Succs...) + + // Fix up Preds links of all successors of B. + for _, c := range b.Succs { + c.replacePred(b, a) + } + + if debugBlockOpt { + fmt.Fprintln(os.Stderr, "fuseBlocks", a, b) + } + + f.Blocks[b.Index] = nil // delete b + return true +} + +// optimizeBlocks() performs some simple block optimizations on a +// completed function: dead block elimination, block fusion, jump +// threading. +// +func optimizeBlocks(f *Function) { + deleteUnreachableBlocks(f) + + // Loop until no further progress. + changed := true + for changed { + changed = false + + if debugBlockOpt { + f.WriteTo(os.Stderr) + mustSanityCheck(f, nil) + } + + for _, b := range f.Blocks { + // f.Blocks will temporarily contain nils to indicate + // deleted blocks; we remove them at the end. + if b == nil { + continue + } + + // Fuse blocks. b->c becomes bc. + if fuseBlocks(f, b) { + changed = true + } + + // a->b->c becomes a->c if b contains only a Jump. + if jumpThreading(f, b) { + changed = true + continue // (b was disconnected) + } + } + } + f.removeNilBlocks() +} diff --git a/vendor/golang.org/x/tools/go/ssa/builder.go b/vendor/golang.org/x/tools/go/ssa/builder.go new file mode 100644 index 000000000000..fb6638a406bc --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/builder.go @@ -0,0 +1,2379 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file implements the BUILD phase of SSA construction. +// +// SSA construction has two phases, CREATE and BUILD. In the CREATE phase +// (create.go), all packages are constructed and type-checked and +// definitions of all package members are created, method-sets are +// computed, and wrapper methods are synthesized. +// ssa.Packages are created in arbitrary order. +// +// In the BUILD phase (builder.go), the builder traverses the AST of +// each Go source function and generates SSA instructions for the +// function body. Initializer expressions for package-level variables +// are emitted to the package's init() function in the order specified +// by go/types.Info.InitOrder, then code for each function in the +// package is generated in lexical order. +// The BUILD phases for distinct packages are independent and are +// executed in parallel. +// +// TODO(adonovan): indeed, building functions is now embarrassingly parallel. +// Audit for concurrency then benchmark using more goroutines. +// +// The builder's and Program's indices (maps) are populated and +// mutated during the CREATE phase, but during the BUILD phase they +// remain constant. The sole exception is Prog.methodSets and its +// related maps, which are protected by a dedicated mutex. + +import ( + "fmt" + "go/ast" + "go/constant" + "go/token" + "go/types" + "os" + "sync" +) + +type opaqueType struct { + types.Type + name string +} + +func (t *opaqueType) String() string { return t.name } + +var ( + varOk = newVar("ok", tBool) + varIndex = newVar("index", tInt) + + // Type constants. + tBool = types.Typ[types.Bool] + tByte = types.Typ[types.Byte] + tInt = types.Typ[types.Int] + tInvalid = types.Typ[types.Invalid] + tString = types.Typ[types.String] + tUntypedNil = types.Typ[types.UntypedNil] + tRangeIter = &opaqueType{nil, "iter"} // the type of all "range" iterators + tEface = types.NewInterface(nil, nil).Complete() + + // SSA Value constants. + vZero = intConst(0) + vOne = intConst(1) + vTrue = NewConst(constant.MakeBool(true), tBool) +) + +// builder holds state associated with the package currently being built. +// Its methods contain all the logic for AST-to-SSA conversion. +type builder struct{} + +// cond emits to fn code to evaluate boolean condition e and jump +// to t or f depending on its value, performing various simplifications. +// +// Postcondition: fn.currentBlock is nil. +// +func (b *builder) cond(fn *Function, e ast.Expr, t, f *BasicBlock) { + switch e := e.(type) { + case *ast.ParenExpr: + b.cond(fn, e.X, t, f) + return + + case *ast.BinaryExpr: + switch e.Op { + case token.LAND: + ltrue := fn.newBasicBlock("cond.true") + b.cond(fn, e.X, ltrue, f) + fn.currentBlock = ltrue + b.cond(fn, e.Y, t, f) + return + + case token.LOR: + lfalse := fn.newBasicBlock("cond.false") + b.cond(fn, e.X, t, lfalse) + fn.currentBlock = lfalse + b.cond(fn, e.Y, t, f) + return + } + + case *ast.UnaryExpr: + if e.Op == token.NOT { + b.cond(fn, e.X, f, t) + return + } + } + + // A traditional compiler would simplify "if false" (etc) here + // but we do not, for better fidelity to the source code. + // + // The value of a constant condition may be platform-specific, + // and may cause blocks that are reachable in some configuration + // to be hidden from subsequent analyses such as bug-finding tools. + emitIf(fn, b.expr(fn, e), t, f) +} + +// logicalBinop emits code to fn to evaluate e, a &&- or +// ||-expression whose reified boolean value is wanted. +// The value is returned. +// +func (b *builder) logicalBinop(fn *Function, e *ast.BinaryExpr) Value { + rhs := fn.newBasicBlock("binop.rhs") + done := fn.newBasicBlock("binop.done") + + // T(e) = T(e.X) = T(e.Y) after untyped constants have been + // eliminated. + // TODO(adonovan): not true; MyBool==MyBool yields UntypedBool. + t := fn.Pkg.typeOf(e) + + var short Value // value of the short-circuit path + switch e.Op { + case token.LAND: + b.cond(fn, e.X, rhs, done) + short = NewConst(constant.MakeBool(false), t) + + case token.LOR: + b.cond(fn, e.X, done, rhs) + short = NewConst(constant.MakeBool(true), t) + } + + // Is rhs unreachable? + if rhs.Preds == nil { + // Simplify false&&y to false, true||y to true. + fn.currentBlock = done + return short + } + + // Is done unreachable? + if done.Preds == nil { + // Simplify true&&y (or false||y) to y. + fn.currentBlock = rhs + return b.expr(fn, e.Y) + } + + // All edges from e.X to done carry the short-circuit value. + var edges []Value + for range done.Preds { + edges = append(edges, short) + } + + // The edge from e.Y to done carries the value of e.Y. + fn.currentBlock = rhs + edges = append(edges, b.expr(fn, e.Y)) + emitJump(fn, done) + fn.currentBlock = done + + phi := &Phi{Edges: edges, Comment: e.Op.String()} + phi.pos = e.OpPos + phi.typ = t + return done.emit(phi) +} + +// exprN lowers a multi-result expression e to SSA form, emitting code +// to fn and returning a single Value whose type is a *types.Tuple. +// The caller must access the components via Extract. +// +// Multi-result expressions include CallExprs in a multi-value +// assignment or return statement, and "value,ok" uses of +// TypeAssertExpr, IndexExpr (when X is a map), and UnaryExpr (when Op +// is token.ARROW). +// +func (b *builder) exprN(fn *Function, e ast.Expr) Value { + typ := fn.Pkg.typeOf(e).(*types.Tuple) + switch e := e.(type) { + case *ast.ParenExpr: + return b.exprN(fn, e.X) + + case *ast.CallExpr: + // Currently, no built-in function nor type conversion + // has multiple results, so we can avoid some of the + // cases for single-valued CallExpr. + var c Call + b.setCall(fn, e, &c.Call) + c.typ = typ + return fn.emit(&c) + + case *ast.IndexExpr: + mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map) + lookup := &Lookup{ + X: b.expr(fn, e.X), + Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()), + CommaOk: true, + } + lookup.setType(typ) + lookup.setPos(e.Lbrack) + return fn.emit(lookup) + + case *ast.TypeAssertExpr: + return emitTypeTest(fn, b.expr(fn, e.X), typ.At(0).Type(), e.Lparen) + + case *ast.UnaryExpr: // must be receive <- + unop := &UnOp{ + Op: token.ARROW, + X: b.expr(fn, e.X), + CommaOk: true, + } + unop.setType(typ) + unop.setPos(e.OpPos) + return fn.emit(unop) + } + panic(fmt.Sprintf("exprN(%T) in %s", e, fn)) +} + +// builtin emits to fn SSA instructions to implement a call to the +// built-in function obj with the specified arguments +// and return type. It returns the value defined by the result. +// +// The result is nil if no special handling was required; in this case +// the caller should treat this like an ordinary library function +// call. +// +func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ types.Type, pos token.Pos) Value { + switch obj.Name() { + case "make": + switch typ.Underlying().(type) { + case *types.Slice: + n := b.expr(fn, args[1]) + m := n + if len(args) == 3 { + m = b.expr(fn, args[2]) + } + if m, ok := m.(*Const); ok { + // treat make([]T, n, m) as new([m]T)[:n] + cap := m.Int64() + at := types.NewArray(typ.Underlying().(*types.Slice).Elem(), cap) + alloc := emitNew(fn, at, pos) + alloc.Comment = "makeslice" + v := &Slice{ + X: alloc, + High: n, + } + v.setPos(pos) + v.setType(typ) + return fn.emit(v) + } + v := &MakeSlice{ + Len: n, + Cap: m, + } + v.setPos(pos) + v.setType(typ) + return fn.emit(v) + + case *types.Map: + var res Value + if len(args) == 2 { + res = b.expr(fn, args[1]) + } + v := &MakeMap{Reserve: res} + v.setPos(pos) + v.setType(typ) + return fn.emit(v) + + case *types.Chan: + var sz Value = vZero + if len(args) == 2 { + sz = b.expr(fn, args[1]) + } + v := &MakeChan{Size: sz} + v.setPos(pos) + v.setType(typ) + return fn.emit(v) + } + + case "new": + alloc := emitNew(fn, deref(typ), pos) + alloc.Comment = "new" + return alloc + + case "len", "cap": + // Special case: len or cap of an array or *array is + // based on the type, not the value which may be nil. + // We must still evaluate the value, though. (If it + // was side-effect free, the whole call would have + // been constant-folded.) + t := deref(fn.Pkg.typeOf(args[0])).Underlying() + if at, ok := t.(*types.Array); ok { + b.expr(fn, args[0]) // for effects only + return intConst(at.Len()) + } + // Otherwise treat as normal. + + case "panic": + fn.emit(&Panic{ + X: emitConv(fn, b.expr(fn, args[0]), tEface), + pos: pos, + }) + fn.currentBlock = fn.newBasicBlock("unreachable") + return vTrue // any non-nil Value will do + } + return nil // treat all others as a regular function call +} + +// addr lowers a single-result addressable expression e to SSA form, +// emitting code to fn and returning the location (an lvalue) defined +// by the expression. +// +// If escaping is true, addr marks the base variable of the +// addressable expression e as being a potentially escaping pointer +// value. For example, in this code: +// +// a := A{ +// b: [1]B{B{c: 1}} +// } +// return &a.b[0].c +// +// the application of & causes a.b[0].c to have its address taken, +// which means that ultimately the local variable a must be +// heap-allocated. This is a simple but very conservative escape +// analysis. +// +// Operations forming potentially escaping pointers include: +// - &x, including when implicit in method call or composite literals. +// - a[:] iff a is an array (not *array) +// - references to variables in lexically enclosing functions. +// +func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue { + switch e := e.(type) { + case *ast.Ident: + if isBlankIdent(e) { + return blank{} + } + obj := fn.Pkg.objectOf(e) + v := fn.Prog.packageLevelValue(obj) // var (address) + if v == nil { + v = fn.lookup(obj, escaping) + } + return &address{addr: v, pos: e.Pos(), expr: e} + + case *ast.CompositeLit: + t := deref(fn.Pkg.typeOf(e)) + var v *Alloc + if escaping { + v = emitNew(fn, t, e.Lbrace) + } else { + v = fn.addLocal(t, e.Lbrace) + } + v.Comment = "complit" + var sb storebuf + b.compLit(fn, v, e, true, &sb) + sb.emit(fn) + return &address{addr: v, pos: e.Lbrace, expr: e} + + case *ast.ParenExpr: + return b.addr(fn, e.X, escaping) + + case *ast.SelectorExpr: + sel, ok := fn.Pkg.info.Selections[e] + if !ok { + // qualified identifier + return b.addr(fn, e.Sel, escaping) + } + if sel.Kind() != types.FieldVal { + panic(sel) + } + wantAddr := true + v := b.receiver(fn, e.X, wantAddr, escaping, sel) + last := len(sel.Index()) - 1 + return &address{ + addr: emitFieldSelection(fn, v, sel.Index()[last], true, e.Sel), + pos: e.Sel.Pos(), + expr: e.Sel, + } + + case *ast.IndexExpr: + var x Value + var et types.Type + switch t := fn.Pkg.typeOf(e.X).Underlying().(type) { + case *types.Array: + x = b.addr(fn, e.X, escaping).address(fn) + et = types.NewPointer(t.Elem()) + case *types.Pointer: // *array + x = b.expr(fn, e.X) + et = types.NewPointer(t.Elem().Underlying().(*types.Array).Elem()) + case *types.Slice: + x = b.expr(fn, e.X) + et = types.NewPointer(t.Elem()) + case *types.Map: + return &element{ + m: b.expr(fn, e.X), + k: emitConv(fn, b.expr(fn, e.Index), t.Key()), + t: t.Elem(), + pos: e.Lbrack, + } + default: + panic("unexpected container type in IndexExpr: " + t.String()) + } + v := &IndexAddr{ + X: x, + Index: emitConv(fn, b.expr(fn, e.Index), tInt), + } + v.setPos(e.Lbrack) + v.setType(et) + return &address{addr: fn.emit(v), pos: e.Lbrack, expr: e} + + case *ast.StarExpr: + return &address{addr: b.expr(fn, e.X), pos: e.Star, expr: e} + } + + panic(fmt.Sprintf("unexpected address expression: %T", e)) +} + +type store struct { + lhs lvalue + rhs Value +} + +type storebuf struct{ stores []store } + +func (sb *storebuf) store(lhs lvalue, rhs Value) { + sb.stores = append(sb.stores, store{lhs, rhs}) +} + +func (sb *storebuf) emit(fn *Function) { + for _, s := range sb.stores { + s.lhs.store(fn, s.rhs) + } +} + +// assign emits to fn code to initialize the lvalue loc with the value +// of expression e. If isZero is true, assign assumes that loc holds +// the zero value for its type. +// +// This is equivalent to loc.store(fn, b.expr(fn, e)), but may generate +// better code in some cases, e.g., for composite literals in an +// addressable location. +// +// If sb is not nil, assign generates code to evaluate expression e, but +// not to update loc. Instead, the necessary stores are appended to the +// storebuf sb so that they can be executed later. This allows correct +// in-place update of existing variables when the RHS is a composite +// literal that may reference parts of the LHS. +// +func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb *storebuf) { + // Can we initialize it in place? + if e, ok := unparen(e).(*ast.CompositeLit); ok { + // A CompositeLit never evaluates to a pointer, + // so if the type of the location is a pointer, + // an &-operation is implied. + if _, ok := loc.(blank); !ok { // avoid calling blank.typ() + if isPointer(loc.typ()) { + ptr := b.addr(fn, e, true).address(fn) + // copy address + if sb != nil { + sb.store(loc, ptr) + } else { + loc.store(fn, ptr) + } + return + } + } + + if _, ok := loc.(*address); ok { + if isInterface(loc.typ()) { + // e.g. var x interface{} = T{...} + // Can't in-place initialize an interface value. + // Fall back to copying. + } else { + // x = T{...} or x := T{...} + addr := loc.address(fn) + if sb != nil { + b.compLit(fn, addr, e, isZero, sb) + } else { + var sb storebuf + b.compLit(fn, addr, e, isZero, &sb) + sb.emit(fn) + } + + // Subtle: emit debug ref for aggregate types only; + // slice and map are handled by store ops in compLit. + switch loc.typ().Underlying().(type) { + case *types.Struct, *types.Array: + emitDebugRef(fn, e, addr, true) + } + + return + } + } + } + + // simple case: just copy + rhs := b.expr(fn, e) + if sb != nil { + sb.store(loc, rhs) + } else { + loc.store(fn, rhs) + } +} + +// expr lowers a single-result expression e to SSA form, emitting code +// to fn and returning the Value defined by the expression. +// +func (b *builder) expr(fn *Function, e ast.Expr) Value { + e = unparen(e) + + tv := fn.Pkg.info.Types[e] + + // Is expression a constant? + if tv.Value != nil { + return NewConst(tv.Value, tv.Type) + } + + var v Value + if tv.Addressable() { + // Prefer pointer arithmetic ({Index,Field}Addr) followed + // by Load over subelement extraction (e.g. Index, Field), + // to avoid large copies. + v = b.addr(fn, e, false).load(fn) + } else { + v = b.expr0(fn, e, tv) + } + if fn.debugInfo() { + emitDebugRef(fn, e, v, false) + } + return v +} + +func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value { + switch e := e.(type) { + case *ast.BasicLit: + panic("non-constant BasicLit") // unreachable + + case *ast.FuncLit: + fn2 := &Function{ + name: fmt.Sprintf("%s$%d", fn.Name(), 1+len(fn.AnonFuncs)), + Signature: fn.Pkg.typeOf(e.Type).Underlying().(*types.Signature), + pos: e.Type.Func, + parent: fn, + Pkg: fn.Pkg, + Prog: fn.Prog, + syntax: e, + } + fn.AnonFuncs = append(fn.AnonFuncs, fn2) + b.buildFunction(fn2) + if fn2.FreeVars == nil { + return fn2 + } + v := &MakeClosure{Fn: fn2} + v.setType(tv.Type) + for _, fv := range fn2.FreeVars { + v.Bindings = append(v.Bindings, fv.outer) + fv.outer = nil + } + return fn.emit(v) + + case *ast.TypeAssertExpr: // single-result form only + return emitTypeAssert(fn, b.expr(fn, e.X), tv.Type, e.Lparen) + + case *ast.CallExpr: + if fn.Pkg.info.Types[e.Fun].IsType() { + // Explicit type conversion, e.g. string(x) or big.Int(x) + x := b.expr(fn, e.Args[0]) + y := emitConv(fn, x, tv.Type) + if y != x { + switch y := y.(type) { + case *Convert: + y.pos = e.Lparen + case *ChangeType: + y.pos = e.Lparen + case *MakeInterface: + y.pos = e.Lparen + } + } + return y + } + // Call to "intrinsic" built-ins, e.g. new, make, panic. + if id, ok := unparen(e.Fun).(*ast.Ident); ok { + if obj, ok := fn.Pkg.info.Uses[id].(*types.Builtin); ok { + if v := b.builtin(fn, obj, e.Args, tv.Type, e.Lparen); v != nil { + return v + } + } + } + // Regular function call. + var v Call + b.setCall(fn, e, &v.Call) + v.setType(tv.Type) + return fn.emit(&v) + + case *ast.UnaryExpr: + switch e.Op { + case token.AND: // &X --- potentially escaping. + addr := b.addr(fn, e.X, true) + if _, ok := unparen(e.X).(*ast.StarExpr); ok { + // &*p must panic if p is nil (http://golang.org/s/go12nil). + // For simplicity, we'll just (suboptimally) rely + // on the side effects of a load. + // TODO(adonovan): emit dedicated nilcheck. + addr.load(fn) + } + return addr.address(fn) + case token.ADD: + return b.expr(fn, e.X) + case token.NOT, token.ARROW, token.SUB, token.XOR: // ! <- - ^ + v := &UnOp{ + Op: e.Op, + X: b.expr(fn, e.X), + } + v.setPos(e.OpPos) + v.setType(tv.Type) + return fn.emit(v) + default: + panic(e.Op) + } + + case *ast.BinaryExpr: + switch e.Op { + case token.LAND, token.LOR: + return b.logicalBinop(fn, e) + case token.SHL, token.SHR: + fallthrough + case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT: + return emitArith(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), tv.Type, e.OpPos) + + case token.EQL, token.NEQ, token.GTR, token.LSS, token.LEQ, token.GEQ: + cmp := emitCompare(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), e.OpPos) + // The type of x==y may be UntypedBool. + return emitConv(fn, cmp, DefaultType(tv.Type)) + default: + panic("illegal op in BinaryExpr: " + e.Op.String()) + } + + case *ast.SliceExpr: + var low, high, max Value + var x Value + switch fn.Pkg.typeOf(e.X).Underlying().(type) { + case *types.Array: + // Potentially escaping. + x = b.addr(fn, e.X, true).address(fn) + case *types.Basic, *types.Slice, *types.Pointer: // *array + x = b.expr(fn, e.X) + default: + panic("unreachable") + } + if e.High != nil { + high = b.expr(fn, e.High) + } + if e.Low != nil { + low = b.expr(fn, e.Low) + } + if e.Slice3 { + max = b.expr(fn, e.Max) + } + v := &Slice{ + X: x, + Low: low, + High: high, + Max: max, + } + v.setPos(e.Lbrack) + v.setType(tv.Type) + return fn.emit(v) + + case *ast.Ident: + obj := fn.Pkg.info.Uses[e] + // Universal built-in or nil? + switch obj := obj.(type) { + case *types.Builtin: + return &Builtin{name: obj.Name(), sig: tv.Type.(*types.Signature)} + case *types.Nil: + return nilConst(tv.Type) + } + // Package-level func or var? + if v := fn.Prog.packageLevelValue(obj); v != nil { + if _, ok := obj.(*types.Var); ok { + return emitLoad(fn, v) // var (address) + } + return v // (func) + } + // Local var. + return emitLoad(fn, fn.lookup(obj, false)) // var (address) + + case *ast.SelectorExpr: + sel, ok := fn.Pkg.info.Selections[e] + if !ok { + // qualified identifier + return b.expr(fn, e.Sel) + } + switch sel.Kind() { + case types.MethodExpr: + // (*T).f or T.f, the method f from the method-set of type T. + // The result is a "thunk". + return emitConv(fn, makeThunk(fn.Prog, sel), tv.Type) + + case types.MethodVal: + // e.f where e is an expression and f is a method. + // The result is a "bound". + obj := sel.Obj().(*types.Func) + rt := recvType(obj) + wantAddr := isPointer(rt) + escaping := true + v := b.receiver(fn, e.X, wantAddr, escaping, sel) + if isInterface(rt) { + // If v has interface type I, + // we must emit a check that v is non-nil. + // We use: typeassert v.(I). + emitTypeAssert(fn, v, rt, token.NoPos) + } + c := &MakeClosure{ + Fn: makeBound(fn.Prog, obj), + Bindings: []Value{v}, + } + c.setPos(e.Sel.Pos()) + c.setType(tv.Type) + return fn.emit(c) + + case types.FieldVal: + indices := sel.Index() + last := len(indices) - 1 + v := b.expr(fn, e.X) + v = emitImplicitSelections(fn, v, indices[:last]) + v = emitFieldSelection(fn, v, indices[last], false, e.Sel) + return v + } + + panic("unexpected expression-relative selector") + + case *ast.IndexExpr: + switch t := fn.Pkg.typeOf(e.X).Underlying().(type) { + case *types.Array: + // Non-addressable array (in a register). + v := &Index{ + X: b.expr(fn, e.X), + Index: emitConv(fn, b.expr(fn, e.Index), tInt), + } + v.setPos(e.Lbrack) + v.setType(t.Elem()) + return fn.emit(v) + + case *types.Map: + // Maps are not addressable. + mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map) + v := &Lookup{ + X: b.expr(fn, e.X), + Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()), + } + v.setPos(e.Lbrack) + v.setType(mapt.Elem()) + return fn.emit(v) + + case *types.Basic: // => string + // Strings are not addressable. + v := &Lookup{ + X: b.expr(fn, e.X), + Index: b.expr(fn, e.Index), + } + v.setPos(e.Lbrack) + v.setType(tByte) + return fn.emit(v) + + case *types.Slice, *types.Pointer: // *array + // Addressable slice/array; use IndexAddr and Load. + return b.addr(fn, e, false).load(fn) + + default: + panic("unexpected container type in IndexExpr: " + t.String()) + } + + case *ast.CompositeLit, *ast.StarExpr: + // Addressable types (lvalues) + return b.addr(fn, e, false).load(fn) + } + + panic(fmt.Sprintf("unexpected expr: %T", e)) +} + +// stmtList emits to fn code for all statements in list. +func (b *builder) stmtList(fn *Function, list []ast.Stmt) { + for _, s := range list { + b.stmt(fn, s) + } +} + +// receiver emits to fn code for expression e in the "receiver" +// position of selection e.f (where f may be a field or a method) and +// returns the effective receiver after applying the implicit field +// selections of sel. +// +// wantAddr requests that the result is an an address. If +// !sel.Indirect(), this may require that e be built in addr() mode; it +// must thus be addressable. +// +// escaping is defined as per builder.addr(). +// +func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, sel *types.Selection) Value { + var v Value + if wantAddr && !sel.Indirect() && !isPointer(fn.Pkg.typeOf(e)) { + v = b.addr(fn, e, escaping).address(fn) + } else { + v = b.expr(fn, e) + } + + last := len(sel.Index()) - 1 + v = emitImplicitSelections(fn, v, sel.Index()[:last]) + if !wantAddr && isPointer(v.Type()) { + v = emitLoad(fn, v) + } + return v +} + +// setCallFunc populates the function parts of a CallCommon structure +// (Func, Method, Recv, Args[0]) based on the kind of invocation +// occurring in e. +// +func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) { + c.pos = e.Lparen + + // Is this a method call? + if selector, ok := unparen(e.Fun).(*ast.SelectorExpr); ok { + sel, ok := fn.Pkg.info.Selections[selector] + if ok && sel.Kind() == types.MethodVal { + obj := sel.Obj().(*types.Func) + recv := recvType(obj) + wantAddr := isPointer(recv) + escaping := true + v := b.receiver(fn, selector.X, wantAddr, escaping, sel) + if isInterface(recv) { + // Invoke-mode call. + c.Value = v + c.Method = obj + } else { + // "Call"-mode call. + c.Value = fn.Prog.declaredFunc(obj) + c.Args = append(c.Args, v) + } + return + } + + // sel.Kind()==MethodExpr indicates T.f() or (*T).f(): + // a statically dispatched call to the method f in the + // method-set of T or *T. T may be an interface. + // + // e.Fun would evaluate to a concrete method, interface + // wrapper function, or promotion wrapper. + // + // For now, we evaluate it in the usual way. + // + // TODO(adonovan): opt: inline expr() here, to make the + // call static and to avoid generation of wrappers. + // It's somewhat tricky as it may consume the first + // actual parameter if the call is "invoke" mode. + // + // Examples: + // type T struct{}; func (T) f() {} // "call" mode + // type T interface { f() } // "invoke" mode + // + // type S struct{ T } + // + // var s S + // S.f(s) + // (*S).f(&s) + // + // Suggested approach: + // - consume the first actual parameter expression + // and build it with b.expr(). + // - apply implicit field selections. + // - use MethodVal logic to populate fields of c. + } + + // Evaluate the function operand in the usual way. + c.Value = b.expr(fn, e.Fun) +} + +// emitCallArgs emits to f code for the actual parameters of call e to +// a (possibly built-in) function of effective type sig. +// The argument values are appended to args, which is then returned. +// +func (b *builder) emitCallArgs(fn *Function, sig *types.Signature, e *ast.CallExpr, args []Value) []Value { + // f(x, y, z...): pass slice z straight through. + if e.Ellipsis != 0 { + for i, arg := range e.Args { + v := emitConv(fn, b.expr(fn, arg), sig.Params().At(i).Type()) + args = append(args, v) + } + return args + } + + offset := len(args) // 1 if call has receiver, 0 otherwise + + // Evaluate actual parameter expressions. + // + // If this is a chained call of the form f(g()) where g has + // multiple return values (MRV), they are flattened out into + // args; a suffix of them may end up in a varargs slice. + for _, arg := range e.Args { + v := b.expr(fn, arg) + if ttuple, ok := v.Type().(*types.Tuple); ok { // MRV chain + for i, n := 0, ttuple.Len(); i < n; i++ { + args = append(args, emitExtract(fn, v, i)) + } + } else { + args = append(args, v) + } + } + + // Actual->formal assignability conversions for normal parameters. + np := sig.Params().Len() // number of normal parameters + if sig.Variadic() { + np-- + } + for i := 0; i < np; i++ { + args[offset+i] = emitConv(fn, args[offset+i], sig.Params().At(i).Type()) + } + + // Actual->formal assignability conversions for variadic parameter, + // and construction of slice. + if sig.Variadic() { + varargs := args[offset+np:] + st := sig.Params().At(np).Type().(*types.Slice) + vt := st.Elem() + if len(varargs) == 0 { + args = append(args, nilConst(st)) + } else { + // Replace a suffix of args with a slice containing it. + at := types.NewArray(vt, int64(len(varargs))) + a := emitNew(fn, at, token.NoPos) + a.setPos(e.Rparen) + a.Comment = "varargs" + for i, arg := range varargs { + iaddr := &IndexAddr{ + X: a, + Index: intConst(int64(i)), + } + iaddr.setType(types.NewPointer(vt)) + fn.emit(iaddr) + emitStore(fn, iaddr, arg, arg.Pos()) + } + s := &Slice{X: a} + s.setType(st) + args[offset+np] = fn.emit(s) + args = args[:offset+np+1] + } + } + return args +} + +// setCall emits to fn code to evaluate all the parameters of a function +// call e, and populates *c with those values. +// +func (b *builder) setCall(fn *Function, e *ast.CallExpr, c *CallCommon) { + // First deal with the f(...) part and optional receiver. + b.setCallFunc(fn, e, c) + + // Then append the other actual parameters. + sig, _ := fn.Pkg.typeOf(e.Fun).Underlying().(*types.Signature) + if sig == nil { + panic(fmt.Sprintf("no signature for call of %s", e.Fun)) + } + c.Args = b.emitCallArgs(fn, sig, e, c.Args) +} + +// assignOp emits to fn code to perform loc = val. +func (b *builder) assignOp(fn *Function, loc lvalue, val Value, op token.Token, pos token.Pos) { + oldv := loc.load(fn) + loc.store(fn, emitArith(fn, op, oldv, emitConv(fn, val, oldv.Type()), loc.typ(), pos)) +} + +// localValueSpec emits to fn code to define all of the vars in the +// function-local ValueSpec, spec. +// +func (b *builder) localValueSpec(fn *Function, spec *ast.ValueSpec) { + switch { + case len(spec.Values) == len(spec.Names): + // e.g. var x, y = 0, 1 + // 1:1 assignment + for i, id := range spec.Names { + if !isBlankIdent(id) { + fn.addLocalForIdent(id) + } + lval := b.addr(fn, id, false) // non-escaping + b.assign(fn, lval, spec.Values[i], true, nil) + } + + case len(spec.Values) == 0: + // e.g. var x, y int + // Locals are implicitly zero-initialized. + for _, id := range spec.Names { + if !isBlankIdent(id) { + lhs := fn.addLocalForIdent(id) + if fn.debugInfo() { + emitDebugRef(fn, id, lhs, true) + } + } + } + + default: + // e.g. var x, y = pos() + tuple := b.exprN(fn, spec.Values[0]) + for i, id := range spec.Names { + if !isBlankIdent(id) { + fn.addLocalForIdent(id) + lhs := b.addr(fn, id, false) // non-escaping + lhs.store(fn, emitExtract(fn, tuple, i)) + } + } + } +} + +// assignStmt emits code to fn for a parallel assignment of rhss to lhss. +// isDef is true if this is a short variable declaration (:=). +// +// Note the similarity with localValueSpec. +// +func (b *builder) assignStmt(fn *Function, lhss, rhss []ast.Expr, isDef bool) { + // Side effects of all LHSs and RHSs must occur in left-to-right order. + lvals := make([]lvalue, len(lhss)) + isZero := make([]bool, len(lhss)) + for i, lhs := range lhss { + var lval lvalue = blank{} + if !isBlankIdent(lhs) { + if isDef { + if obj := fn.Pkg.info.Defs[lhs.(*ast.Ident)]; obj != nil { + fn.addNamedLocal(obj) + isZero[i] = true + } + } + lval = b.addr(fn, lhs, false) // non-escaping + } + lvals[i] = lval + } + if len(lhss) == len(rhss) { + // Simple assignment: x = f() (!isDef) + // Parallel assignment: x, y = f(), g() (!isDef) + // or short var decl: x, y := f(), g() (isDef) + // + // In all cases, the RHSs may refer to the LHSs, + // so we need a storebuf. + var sb storebuf + for i := range rhss { + b.assign(fn, lvals[i], rhss[i], isZero[i], &sb) + } + sb.emit(fn) + } else { + // e.g. x, y = pos() + tuple := b.exprN(fn, rhss[0]) + emitDebugRef(fn, rhss[0], tuple, false) + for i, lval := range lvals { + lval.store(fn, emitExtract(fn, tuple, i)) + } + } +} + +// arrayLen returns the length of the array whose composite literal elements are elts. +func (b *builder) arrayLen(fn *Function, elts []ast.Expr) int64 { + var max int64 = -1 + var i int64 = -1 + for _, e := range elts { + if kv, ok := e.(*ast.KeyValueExpr); ok { + i = b.expr(fn, kv.Key).(*Const).Int64() + } else { + i++ + } + if i > max { + max = i + } + } + return max + 1 +} + +// compLit emits to fn code to initialize a composite literal e at +// address addr with type typ. +// +// Nested composite literals are recursively initialized in place +// where possible. If isZero is true, compLit assumes that addr +// holds the zero value for typ. +// +// Because the elements of a composite literal may refer to the +// variables being updated, as in the second line below, +// x := T{a: 1} +// x = T{a: x.a} +// all the reads must occur before all the writes. Thus all stores to +// loc are emitted to the storebuf sb for later execution. +// +// A CompositeLit may have pointer type only in the recursive (nested) +// case when the type name is implicit. e.g. in []*T{{}}, the inner +// literal has type *T behaves like &T{}. +// In that case, addr must hold a T, not a *T. +// +func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero bool, sb *storebuf) { + typ := deref(fn.Pkg.typeOf(e)) + switch t := typ.Underlying().(type) { + case *types.Struct: + if !isZero && len(e.Elts) != t.NumFields() { + // memclear + sb.store(&address{addr, e.Lbrace, nil}, + zeroValue(fn, deref(addr.Type()))) + isZero = true + } + for i, e := range e.Elts { + fieldIndex := i + pos := e.Pos() + if kv, ok := e.(*ast.KeyValueExpr); ok { + fname := kv.Key.(*ast.Ident).Name + for i, n := 0, t.NumFields(); i < n; i++ { + sf := t.Field(i) + if sf.Name() == fname { + fieldIndex = i + pos = kv.Colon + e = kv.Value + break + } + } + } + sf := t.Field(fieldIndex) + faddr := &FieldAddr{ + X: addr, + Field: fieldIndex, + } + faddr.setType(types.NewPointer(sf.Type())) + fn.emit(faddr) + b.assign(fn, &address{addr: faddr, pos: pos, expr: e}, e, isZero, sb) + } + + case *types.Array, *types.Slice: + var at *types.Array + var array Value + switch t := t.(type) { + case *types.Slice: + at = types.NewArray(t.Elem(), b.arrayLen(fn, e.Elts)) + alloc := emitNew(fn, at, e.Lbrace) + alloc.Comment = "slicelit" + array = alloc + case *types.Array: + at = t + array = addr + + if !isZero && int64(len(e.Elts)) != at.Len() { + // memclear + sb.store(&address{array, e.Lbrace, nil}, + zeroValue(fn, deref(array.Type()))) + } + } + + var idx *Const + for _, e := range e.Elts { + pos := e.Pos() + if kv, ok := e.(*ast.KeyValueExpr); ok { + idx = b.expr(fn, kv.Key).(*Const) + pos = kv.Colon + e = kv.Value + } else { + var idxval int64 + if idx != nil { + idxval = idx.Int64() + 1 + } + idx = intConst(idxval) + } + iaddr := &IndexAddr{ + X: array, + Index: idx, + } + iaddr.setType(types.NewPointer(at.Elem())) + fn.emit(iaddr) + if t != at { // slice + // backing array is unaliased => storebuf not needed. + b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, nil) + } else { + b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, sb) + } + } + + if t != at { // slice + s := &Slice{X: array} + s.setPos(e.Lbrace) + s.setType(typ) + sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, fn.emit(s)) + } + + case *types.Map: + m := &MakeMap{Reserve: intConst(int64(len(e.Elts)))} + m.setPos(e.Lbrace) + m.setType(typ) + fn.emit(m) + for _, e := range e.Elts { + e := e.(*ast.KeyValueExpr) + + // If a key expression in a map literal is itself a + // composite literal, the type may be omitted. + // For example: + // map[*struct{}]bool{{}: true} + // An &-operation may be implied: + // map[*struct{}]bool{&struct{}{}: true} + var key Value + if _, ok := unparen(e.Key).(*ast.CompositeLit); ok && isPointer(t.Key()) { + // A CompositeLit never evaluates to a pointer, + // so if the type of the location is a pointer, + // an &-operation is implied. + key = b.addr(fn, e.Key, true).address(fn) + } else { + key = b.expr(fn, e.Key) + } + + loc := element{ + m: m, + k: emitConv(fn, key, t.Key()), + t: t.Elem(), + pos: e.Colon, + } + + // We call assign() only because it takes care + // of any &-operation required in the recursive + // case, e.g., + // map[int]*struct{}{0: {}} implies &struct{}{}. + // In-place update is of course impossible, + // and no storebuf is needed. + b.assign(fn, &loc, e.Value, true, nil) + } + sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, m) + + default: + panic("unexpected CompositeLit type: " + t.String()) + } +} + +// switchStmt emits to fn code for the switch statement s, optionally +// labelled by label. +// +func (b *builder) switchStmt(fn *Function, s *ast.SwitchStmt, label *lblock) { + // We treat SwitchStmt like a sequential if-else chain. + // Multiway dispatch can be recovered later by ssautil.Switches() + // to those cases that are free of side effects. + if s.Init != nil { + b.stmt(fn, s.Init) + } + var tag Value = vTrue + if s.Tag != nil { + tag = b.expr(fn, s.Tag) + } + done := fn.newBasicBlock("switch.done") + if label != nil { + label._break = done + } + // We pull the default case (if present) down to the end. + // But each fallthrough label must point to the next + // body block in source order, so we preallocate a + // body block (fallthru) for the next case. + // Unfortunately this makes for a confusing block order. + var dfltBody *[]ast.Stmt + var dfltFallthrough *BasicBlock + var fallthru, dfltBlock *BasicBlock + ncases := len(s.Body.List) + for i, clause := range s.Body.List { + body := fallthru + if body == nil { + body = fn.newBasicBlock("switch.body") // first case only + } + + // Preallocate body block for the next case. + fallthru = done + if i+1 < ncases { + fallthru = fn.newBasicBlock("switch.body") + } + + cc := clause.(*ast.CaseClause) + if cc.List == nil { + // Default case. + dfltBody = &cc.Body + dfltFallthrough = fallthru + dfltBlock = body + continue + } + + var nextCond *BasicBlock + for _, cond := range cc.List { + nextCond = fn.newBasicBlock("switch.next") + // TODO(adonovan): opt: when tag==vTrue, we'd + // get better code if we use b.cond(cond) + // instead of BinOp(EQL, tag, b.expr(cond)) + // followed by If. Don't forget conversions + // though. + cond := emitCompare(fn, token.EQL, tag, b.expr(fn, cond), token.NoPos) + emitIf(fn, cond, body, nextCond) + fn.currentBlock = nextCond + } + fn.currentBlock = body + fn.targets = &targets{ + tail: fn.targets, + _break: done, + _fallthrough: fallthru, + } + b.stmtList(fn, cc.Body) + fn.targets = fn.targets.tail + emitJump(fn, done) + fn.currentBlock = nextCond + } + if dfltBlock != nil { + emitJump(fn, dfltBlock) + fn.currentBlock = dfltBlock + fn.targets = &targets{ + tail: fn.targets, + _break: done, + _fallthrough: dfltFallthrough, + } + b.stmtList(fn, *dfltBody) + fn.targets = fn.targets.tail + } + emitJump(fn, done) + fn.currentBlock = done +} + +// typeSwitchStmt emits to fn code for the type switch statement s, optionally +// labelled by label. +// +func (b *builder) typeSwitchStmt(fn *Function, s *ast.TypeSwitchStmt, label *lblock) { + // We treat TypeSwitchStmt like a sequential if-else chain. + // Multiway dispatch can be recovered later by ssautil.Switches(). + + // Typeswitch lowering: + // + // var x X + // switch y := x.(type) { + // case T1, T2: S1 // >1 (y := x) + // case nil: SN // nil (y := x) + // default: SD // 0 types (y := x) + // case T3: S3 // 1 type (y := x.(T3)) + // } + // + // ...s.Init... + // x := eval x + // .caseT1: + // t1, ok1 := typeswitch,ok x + // if ok1 then goto S1 else goto .caseT2 + // .caseT2: + // t2, ok2 := typeswitch,ok x + // if ok2 then goto S1 else goto .caseNil + // .S1: + // y := x + // ...S1... + // goto done + // .caseNil: + // if t2, ok2 := typeswitch,ok x + // if x == nil then goto SN else goto .caseT3 + // .SN: + // y := x + // ...SN... + // goto done + // .caseT3: + // t3, ok3 := typeswitch,ok x + // if ok3 then goto S3 else goto default + // .S3: + // y := t3 + // ...S3... + // goto done + // .default: + // y := x + // ...SD... + // goto done + // .done: + + if s.Init != nil { + b.stmt(fn, s.Init) + } + + var x Value + switch ass := s.Assign.(type) { + case *ast.ExprStmt: // x.(type) + x = b.expr(fn, unparen(ass.X).(*ast.TypeAssertExpr).X) + case *ast.AssignStmt: // y := x.(type) + x = b.expr(fn, unparen(ass.Rhs[0]).(*ast.TypeAssertExpr).X) + } + + done := fn.newBasicBlock("typeswitch.done") + if label != nil { + label._break = done + } + var default_ *ast.CaseClause + for _, clause := range s.Body.List { + cc := clause.(*ast.CaseClause) + if cc.List == nil { + default_ = cc + continue + } + body := fn.newBasicBlock("typeswitch.body") + var next *BasicBlock + var casetype types.Type + var ti Value // ti, ok := typeassert,ok x + for _, cond := range cc.List { + next = fn.newBasicBlock("typeswitch.next") + casetype = fn.Pkg.typeOf(cond) + var condv Value + if casetype == tUntypedNil { + condv = emitCompare(fn, token.EQL, x, nilConst(x.Type()), token.NoPos) + ti = x + } else { + yok := emitTypeTest(fn, x, casetype, cc.Case) + ti = emitExtract(fn, yok, 0) + condv = emitExtract(fn, yok, 1) + } + emitIf(fn, condv, body, next) + fn.currentBlock = next + } + if len(cc.List) != 1 { + ti = x + } + fn.currentBlock = body + b.typeCaseBody(fn, cc, ti, done) + fn.currentBlock = next + } + if default_ != nil { + b.typeCaseBody(fn, default_, x, done) + } else { + emitJump(fn, done) + } + fn.currentBlock = done +} + +func (b *builder) typeCaseBody(fn *Function, cc *ast.CaseClause, x Value, done *BasicBlock) { + if obj := fn.Pkg.info.Implicits[cc]; obj != nil { + // In a switch y := x.(type), each case clause + // implicitly declares a distinct object y. + // In a single-type case, y has that type. + // In multi-type cases, 'case nil' and default, + // y has the same type as the interface operand. + emitStore(fn, fn.addNamedLocal(obj), x, obj.Pos()) + } + fn.targets = &targets{ + tail: fn.targets, + _break: done, + } + b.stmtList(fn, cc.Body) + fn.targets = fn.targets.tail + emitJump(fn, done) +} + +// selectStmt emits to fn code for the select statement s, optionally +// labelled by label. +// +func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) { + // A blocking select of a single case degenerates to a + // simple send or receive. + // TODO(adonovan): opt: is this optimization worth its weight? + if len(s.Body.List) == 1 { + clause := s.Body.List[0].(*ast.CommClause) + if clause.Comm != nil { + b.stmt(fn, clause.Comm) + done := fn.newBasicBlock("select.done") + if label != nil { + label._break = done + } + fn.targets = &targets{ + tail: fn.targets, + _break: done, + } + b.stmtList(fn, clause.Body) + fn.targets = fn.targets.tail + emitJump(fn, done) + fn.currentBlock = done + return + } + } + + // First evaluate all channels in all cases, and find + // the directions of each state. + var states []*SelectState + blocking := true + debugInfo := fn.debugInfo() + for _, clause := range s.Body.List { + var st *SelectState + switch comm := clause.(*ast.CommClause).Comm.(type) { + case nil: // default case + blocking = false + continue + + case *ast.SendStmt: // ch<- i + ch := b.expr(fn, comm.Chan) + st = &SelectState{ + Dir: types.SendOnly, + Chan: ch, + Send: emitConv(fn, b.expr(fn, comm.Value), + ch.Type().Underlying().(*types.Chan).Elem()), + Pos: comm.Arrow, + } + if debugInfo { + st.DebugNode = comm + } + + case *ast.AssignStmt: // x := <-ch + recv := unparen(comm.Rhs[0]).(*ast.UnaryExpr) + st = &SelectState{ + Dir: types.RecvOnly, + Chan: b.expr(fn, recv.X), + Pos: recv.OpPos, + } + if debugInfo { + st.DebugNode = recv + } + + case *ast.ExprStmt: // <-ch + recv := unparen(comm.X).(*ast.UnaryExpr) + st = &SelectState{ + Dir: types.RecvOnly, + Chan: b.expr(fn, recv.X), + Pos: recv.OpPos, + } + if debugInfo { + st.DebugNode = recv + } + } + states = append(states, st) + } + + // We dispatch on the (fair) result of Select using a + // sequential if-else chain, in effect: + // + // idx, recvOk, r0...r_n-1 := select(...) + // if idx == 0 { // receive on channel 0 (first receive => r0) + // x, ok := r0, recvOk + // ...state0... + // } else if v == 1 { // send on channel 1 + // ...state1... + // } else { + // ...default... + // } + sel := &Select{ + States: states, + Blocking: blocking, + } + sel.setPos(s.Select) + var vars []*types.Var + vars = append(vars, varIndex, varOk) + for _, st := range states { + if st.Dir == types.RecvOnly { + tElem := st.Chan.Type().Underlying().(*types.Chan).Elem() + vars = append(vars, anonVar(tElem)) + } + } + sel.setType(types.NewTuple(vars...)) + + fn.emit(sel) + idx := emitExtract(fn, sel, 0) + + done := fn.newBasicBlock("select.done") + if label != nil { + label._break = done + } + + var defaultBody *[]ast.Stmt + state := 0 + r := 2 // index in 'sel' tuple of value; increments if st.Dir==RECV + for _, cc := range s.Body.List { + clause := cc.(*ast.CommClause) + if clause.Comm == nil { + defaultBody = &clause.Body + continue + } + body := fn.newBasicBlock("select.body") + next := fn.newBasicBlock("select.next") + emitIf(fn, emitCompare(fn, token.EQL, idx, intConst(int64(state)), token.NoPos), body, next) + fn.currentBlock = body + fn.targets = &targets{ + tail: fn.targets, + _break: done, + } + switch comm := clause.Comm.(type) { + case *ast.ExprStmt: // <-ch + if debugInfo { + v := emitExtract(fn, sel, r) + emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false) + } + r++ + + case *ast.AssignStmt: // x := <-states[state].Chan + if comm.Tok == token.DEFINE { + fn.addLocalForIdent(comm.Lhs[0].(*ast.Ident)) + } + x := b.addr(fn, comm.Lhs[0], false) // non-escaping + v := emitExtract(fn, sel, r) + if debugInfo { + emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false) + } + x.store(fn, v) + + if len(comm.Lhs) == 2 { // x, ok := ... + if comm.Tok == token.DEFINE { + fn.addLocalForIdent(comm.Lhs[1].(*ast.Ident)) + } + ok := b.addr(fn, comm.Lhs[1], false) // non-escaping + ok.store(fn, emitExtract(fn, sel, 1)) + } + r++ + } + b.stmtList(fn, clause.Body) + fn.targets = fn.targets.tail + emitJump(fn, done) + fn.currentBlock = next + state++ + } + if defaultBody != nil { + fn.targets = &targets{ + tail: fn.targets, + _break: done, + } + b.stmtList(fn, *defaultBody) + fn.targets = fn.targets.tail + } else { + // A blocking select must match some case. + // (This should really be a runtime.errorString, not a string.) + fn.emit(&Panic{ + X: emitConv(fn, stringConst("blocking select matched no case"), tEface), + }) + fn.currentBlock = fn.newBasicBlock("unreachable") + } + emitJump(fn, done) + fn.currentBlock = done +} + +// forStmt emits to fn code for the for statement s, optionally +// labelled by label. +// +func (b *builder) forStmt(fn *Function, s *ast.ForStmt, label *lblock) { + // ...init... + // jump loop + // loop: + // if cond goto body else done + // body: + // ...body... + // jump post + // post: (target of continue) + // ...post... + // jump loop + // done: (target of break) + if s.Init != nil { + b.stmt(fn, s.Init) + } + body := fn.newBasicBlock("for.body") + done := fn.newBasicBlock("for.done") // target of 'break' + loop := body // target of back-edge + if s.Cond != nil { + loop = fn.newBasicBlock("for.loop") + } + cont := loop // target of 'continue' + if s.Post != nil { + cont = fn.newBasicBlock("for.post") + } + if label != nil { + label._break = done + label._continue = cont + } + emitJump(fn, loop) + fn.currentBlock = loop + if loop != body { + b.cond(fn, s.Cond, body, done) + fn.currentBlock = body + } + fn.targets = &targets{ + tail: fn.targets, + _break: done, + _continue: cont, + } + b.stmt(fn, s.Body) + fn.targets = fn.targets.tail + emitJump(fn, cont) + + if s.Post != nil { + fn.currentBlock = cont + b.stmt(fn, s.Post) + emitJump(fn, loop) // back-edge + } + fn.currentBlock = done +} + +// rangeIndexed emits to fn the header for an integer-indexed loop +// over array, *array or slice value x. +// The v result is defined only if tv is non-nil. +// forPos is the position of the "for" token. +// +func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) { + // + // length = len(x) + // index = -1 + // loop: (target of continue) + // index++ + // if index < length goto body else done + // body: + // k = index + // v = x[index] + // ...body... + // jump loop + // done: (target of break) + + // Determine number of iterations. + var length Value + if arr, ok := deref(x.Type()).Underlying().(*types.Array); ok { + // For array or *array, the number of iterations is + // known statically thanks to the type. We avoid a + // data dependence upon x, permitting later dead-code + // elimination if x is pure, static unrolling, etc. + // Ranging over a nil *array may have >0 iterations. + // We still generate code for x, in case it has effects. + length = intConst(arr.Len()) + } else { + // length = len(x). + var c Call + c.Call.Value = makeLen(x.Type()) + c.Call.Args = []Value{x} + c.setType(tInt) + length = fn.emit(&c) + } + + index := fn.addLocal(tInt, token.NoPos) + emitStore(fn, index, intConst(-1), pos) + + loop = fn.newBasicBlock("rangeindex.loop") + emitJump(fn, loop) + fn.currentBlock = loop + + incr := &BinOp{ + Op: token.ADD, + X: emitLoad(fn, index), + Y: vOne, + } + incr.setType(tInt) + emitStore(fn, index, fn.emit(incr), pos) + + body := fn.newBasicBlock("rangeindex.body") + done = fn.newBasicBlock("rangeindex.done") + emitIf(fn, emitCompare(fn, token.LSS, incr, length, token.NoPos), body, done) + fn.currentBlock = body + + k = emitLoad(fn, index) + if tv != nil { + switch t := x.Type().Underlying().(type) { + case *types.Array: + instr := &Index{ + X: x, + Index: k, + } + instr.setType(t.Elem()) + v = fn.emit(instr) + + case *types.Pointer: // *array + instr := &IndexAddr{ + X: x, + Index: k, + } + instr.setType(types.NewPointer(t.Elem().Underlying().(*types.Array).Elem())) + v = emitLoad(fn, fn.emit(instr)) + + case *types.Slice: + instr := &IndexAddr{ + X: x, + Index: k, + } + instr.setType(types.NewPointer(t.Elem())) + v = emitLoad(fn, fn.emit(instr)) + + default: + panic("rangeIndexed x:" + t.String()) + } + } + return +} + +// rangeIter emits to fn the header for a loop using +// Range/Next/Extract to iterate over map or string value x. +// tk and tv are the types of the key/value results k and v, or nil +// if the respective component is not wanted. +// +func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) { + // + // it = range x + // loop: (target of continue) + // okv = next it (ok, key, value) + // ok = extract okv #0 + // if ok goto body else done + // body: + // k = extract okv #1 + // v = extract okv #2 + // ...body... + // jump loop + // done: (target of break) + // + + if tk == nil { + tk = tInvalid + } + if tv == nil { + tv = tInvalid + } + + rng := &Range{X: x} + rng.setPos(pos) + rng.setType(tRangeIter) + it := fn.emit(rng) + + loop = fn.newBasicBlock("rangeiter.loop") + emitJump(fn, loop) + fn.currentBlock = loop + + _, isString := x.Type().Underlying().(*types.Basic) + + okv := &Next{ + Iter: it, + IsString: isString, + } + okv.setType(types.NewTuple( + varOk, + newVar("k", tk), + newVar("v", tv), + )) + fn.emit(okv) + + body := fn.newBasicBlock("rangeiter.body") + done = fn.newBasicBlock("rangeiter.done") + emitIf(fn, emitExtract(fn, okv, 0), body, done) + fn.currentBlock = body + + if tk != tInvalid { + k = emitExtract(fn, okv, 1) + } + if tv != tInvalid { + v = emitExtract(fn, okv, 2) + } + return +} + +// rangeChan emits to fn the header for a loop that receives from +// channel x until it fails. +// tk is the channel's element type, or nil if the k result is +// not wanted +// pos is the position of the '=' or ':=' token. +// +func (b *builder) rangeChan(fn *Function, x Value, tk types.Type, pos token.Pos) (k Value, loop, done *BasicBlock) { + // + // loop: (target of continue) + // ko = <-x (key, ok) + // ok = extract ko #1 + // if ok goto body else done + // body: + // k = extract ko #0 + // ... + // goto loop + // done: (target of break) + + loop = fn.newBasicBlock("rangechan.loop") + emitJump(fn, loop) + fn.currentBlock = loop + recv := &UnOp{ + Op: token.ARROW, + X: x, + CommaOk: true, + } + recv.setPos(pos) + recv.setType(types.NewTuple( + newVar("k", x.Type().Underlying().(*types.Chan).Elem()), + varOk, + )) + ko := fn.emit(recv) + body := fn.newBasicBlock("rangechan.body") + done = fn.newBasicBlock("rangechan.done") + emitIf(fn, emitExtract(fn, ko, 1), body, done) + fn.currentBlock = body + if tk != nil { + k = emitExtract(fn, ko, 0) + } + return +} + +// rangeStmt emits to fn code for the range statement s, optionally +// labelled by label. +// +func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock) { + var tk, tv types.Type + if s.Key != nil && !isBlankIdent(s.Key) { + tk = fn.Pkg.typeOf(s.Key) + } + if s.Value != nil && !isBlankIdent(s.Value) { + tv = fn.Pkg.typeOf(s.Value) + } + + // If iteration variables are defined (:=), this + // occurs once outside the loop. + // + // Unlike a short variable declaration, a RangeStmt + // using := never redeclares an existing variable; it + // always creates a new one. + if s.Tok == token.DEFINE { + if tk != nil { + fn.addLocalForIdent(s.Key.(*ast.Ident)) + } + if tv != nil { + fn.addLocalForIdent(s.Value.(*ast.Ident)) + } + } + + x := b.expr(fn, s.X) + + var k, v Value + var loop, done *BasicBlock + switch rt := x.Type().Underlying().(type) { + case *types.Slice, *types.Array, *types.Pointer: // *array + k, v, loop, done = b.rangeIndexed(fn, x, tv, s.For) + + case *types.Chan: + k, loop, done = b.rangeChan(fn, x, tk, s.For) + + case *types.Map, *types.Basic: // string + k, v, loop, done = b.rangeIter(fn, x, tk, tv, s.For) + + default: + panic("Cannot range over: " + rt.String()) + } + + // Evaluate both LHS expressions before we update either. + var kl, vl lvalue + if tk != nil { + kl = b.addr(fn, s.Key, false) // non-escaping + } + if tv != nil { + vl = b.addr(fn, s.Value, false) // non-escaping + } + if tk != nil { + kl.store(fn, k) + } + if tv != nil { + vl.store(fn, v) + } + + if label != nil { + label._break = done + label._continue = loop + } + + fn.targets = &targets{ + tail: fn.targets, + _break: done, + _continue: loop, + } + b.stmt(fn, s.Body) + fn.targets = fn.targets.tail + emitJump(fn, loop) // back-edge + fn.currentBlock = done +} + +// stmt lowers statement s to SSA form, emitting code to fn. +func (b *builder) stmt(fn *Function, _s ast.Stmt) { + // The label of the current statement. If non-nil, its _goto + // target is always set; its _break and _continue are set only + // within the body of switch/typeswitch/select/for/range. + // It is effectively an additional default-nil parameter of stmt(). + var label *lblock +start: + switch s := _s.(type) { + case *ast.EmptyStmt: + // ignore. (Usually removed by gofmt.) + + case *ast.DeclStmt: // Con, Var or Typ + d := s.Decl.(*ast.GenDecl) + if d.Tok == token.VAR { + for _, spec := range d.Specs { + if vs, ok := spec.(*ast.ValueSpec); ok { + b.localValueSpec(fn, vs) + } + } + } + + case *ast.LabeledStmt: + label = fn.labelledBlock(s.Label) + emitJump(fn, label._goto) + fn.currentBlock = label._goto + _s = s.Stmt + goto start // effectively: tailcall stmt(fn, s.Stmt, label) + + case *ast.ExprStmt: + b.expr(fn, s.X) + + case *ast.SendStmt: + fn.emit(&Send{ + Chan: b.expr(fn, s.Chan), + X: emitConv(fn, b.expr(fn, s.Value), + fn.Pkg.typeOf(s.Chan).Underlying().(*types.Chan).Elem()), + pos: s.Arrow, + }) + + case *ast.IncDecStmt: + op := token.ADD + if s.Tok == token.DEC { + op = token.SUB + } + loc := b.addr(fn, s.X, false) + b.assignOp(fn, loc, NewConst(constant.MakeInt64(1), loc.typ()), op, s.Pos()) + + case *ast.AssignStmt: + switch s.Tok { + case token.ASSIGN, token.DEFINE: + b.assignStmt(fn, s.Lhs, s.Rhs, s.Tok == token.DEFINE) + + default: // +=, etc. + op := s.Tok + token.ADD - token.ADD_ASSIGN + b.assignOp(fn, b.addr(fn, s.Lhs[0], false), b.expr(fn, s.Rhs[0]), op, s.Pos()) + } + + case *ast.GoStmt: + // The "intrinsics" new/make/len/cap are forbidden here. + // panic is treated like an ordinary function call. + v := Go{pos: s.Go} + b.setCall(fn, s.Call, &v.Call) + fn.emit(&v) + + case *ast.DeferStmt: + // The "intrinsics" new/make/len/cap are forbidden here. + // panic is treated like an ordinary function call. + v := Defer{pos: s.Defer} + b.setCall(fn, s.Call, &v.Call) + fn.emit(&v) + + // A deferred call can cause recovery from panic, + // and control resumes at the Recover block. + createRecoverBlock(fn) + + case *ast.ReturnStmt: + var results []Value + if len(s.Results) == 1 && fn.Signature.Results().Len() > 1 { + // Return of one expression in a multi-valued function. + tuple := b.exprN(fn, s.Results[0]) + ttuple := tuple.Type().(*types.Tuple) + for i, n := 0, ttuple.Len(); i < n; i++ { + results = append(results, + emitConv(fn, emitExtract(fn, tuple, i), + fn.Signature.Results().At(i).Type())) + } + } else { + // 1:1 return, or no-arg return in non-void function. + for i, r := range s.Results { + v := emitConv(fn, b.expr(fn, r), fn.Signature.Results().At(i).Type()) + results = append(results, v) + } + } + if fn.namedResults != nil { + // Function has named result parameters (NRPs). + // Perform parallel assignment of return operands to NRPs. + for i, r := range results { + emitStore(fn, fn.namedResults[i], r, s.Return) + } + } + // Run function calls deferred in this + // function when explicitly returning from it. + fn.emit(new(RunDefers)) + if fn.namedResults != nil { + // Reload NRPs to form the result tuple. + results = results[:0] + for _, r := range fn.namedResults { + results = append(results, emitLoad(fn, r)) + } + } + fn.emit(&Return{Results: results, pos: s.Return}) + fn.currentBlock = fn.newBasicBlock("unreachable") + + case *ast.BranchStmt: + var block *BasicBlock + switch s.Tok { + case token.BREAK: + if s.Label != nil { + block = fn.labelledBlock(s.Label)._break + } else { + for t := fn.targets; t != nil && block == nil; t = t.tail { + block = t._break + } + } + + case token.CONTINUE: + if s.Label != nil { + block = fn.labelledBlock(s.Label)._continue + } else { + for t := fn.targets; t != nil && block == nil; t = t.tail { + block = t._continue + } + } + + case token.FALLTHROUGH: + for t := fn.targets; t != nil && block == nil; t = t.tail { + block = t._fallthrough + } + + case token.GOTO: + block = fn.labelledBlock(s.Label)._goto + } + emitJump(fn, block) + fn.currentBlock = fn.newBasicBlock("unreachable") + + case *ast.BlockStmt: + b.stmtList(fn, s.List) + + case *ast.IfStmt: + if s.Init != nil { + b.stmt(fn, s.Init) + } + then := fn.newBasicBlock("if.then") + done := fn.newBasicBlock("if.done") + els := done + if s.Else != nil { + els = fn.newBasicBlock("if.else") + } + b.cond(fn, s.Cond, then, els) + fn.currentBlock = then + b.stmt(fn, s.Body) + emitJump(fn, done) + + if s.Else != nil { + fn.currentBlock = els + b.stmt(fn, s.Else) + emitJump(fn, done) + } + + fn.currentBlock = done + + case *ast.SwitchStmt: + b.switchStmt(fn, s, label) + + case *ast.TypeSwitchStmt: + b.typeSwitchStmt(fn, s, label) + + case *ast.SelectStmt: + b.selectStmt(fn, s, label) + + case *ast.ForStmt: + b.forStmt(fn, s, label) + + case *ast.RangeStmt: + b.rangeStmt(fn, s, label) + + default: + panic(fmt.Sprintf("unexpected statement kind: %T", s)) + } +} + +// buildFunction builds SSA code for the body of function fn. Idempotent. +func (b *builder) buildFunction(fn *Function) { + if fn.Blocks != nil { + return // building already started + } + + var recvField *ast.FieldList + var body *ast.BlockStmt + var functype *ast.FuncType + switch n := fn.syntax.(type) { + case nil: + return // not a Go source function. (Synthetic, or from object file.) + case *ast.FuncDecl: + functype = n.Type + recvField = n.Recv + body = n.Body + case *ast.FuncLit: + functype = n.Type + body = n.Body + default: + panic(n) + } + + if body == nil { + // External function. + if fn.Params == nil { + // This condition ensures we add a non-empty + // params list once only, but we may attempt + // the degenerate empty case repeatedly. + // TODO(adonovan): opt: don't do that. + + // We set Function.Params even though there is no body + // code to reference them. This simplifies clients. + if recv := fn.Signature.Recv(); recv != nil { + fn.addParamObj(recv) + } + params := fn.Signature.Params() + for i, n := 0, params.Len(); i < n; i++ { + fn.addParamObj(params.At(i)) + } + } + return + } + if fn.Prog.mode&LogSource != 0 { + defer logStack("build function %s @ %s", fn, fn.Prog.Fset.Position(fn.pos))() + } + fn.startBody() + fn.createSyntacticParams(recvField, functype) + b.stmt(fn, body) + if cb := fn.currentBlock; cb != nil && (cb == fn.Blocks[0] || cb == fn.Recover || cb.Preds != nil) { + // Control fell off the end of the function's body block. + // + // Block optimizations eliminate the current block, if + // unreachable. It is a builder invariant that + // if this no-arg return is ill-typed for + // fn.Signature.Results, this block must be + // unreachable. The sanity checker checks this. + fn.emit(new(RunDefers)) + fn.emit(new(Return)) + } + fn.finishBody() +} + +// buildFuncDecl builds SSA code for the function or method declared +// by decl in package pkg. +// +func (b *builder) buildFuncDecl(pkg *Package, decl *ast.FuncDecl) { + id := decl.Name + if isBlankIdent(id) { + return // discard + } + fn := pkg.values[pkg.info.Defs[id]].(*Function) + if decl.Recv == nil && id.Name == "init" { + var v Call + v.Call.Value = fn + v.setType(types.NewTuple()) + pkg.init.emit(&v) + } + b.buildFunction(fn) +} + +// Build calls Package.Build for each package in prog. +// Building occurs in parallel unless the BuildSerially mode flag was set. +// +// Build is intended for whole-program analysis; a typical compiler +// need only build a single package. +// +// Build is idempotent and thread-safe. +// +func (prog *Program) Build() { + var wg sync.WaitGroup + for _, p := range prog.packages { + if prog.mode&BuildSerially != 0 { + p.Build() + } else { + wg.Add(1) + go func(p *Package) { + p.Build() + wg.Done() + }(p) + } + } + wg.Wait() +} + +// Build builds SSA code for all functions and vars in package p. +// +// Precondition: CreatePackage must have been called for all of p's +// direct imports (and hence its direct imports must have been +// error-free). +// +// Build is idempotent and thread-safe. +// +func (p *Package) Build() { p.buildOnce.Do(p.build) } + +func (p *Package) build() { + if p.info == nil { + return // synthetic package, e.g. "testmain" + } + + // Ensure we have runtime type info for all exported members. + // TODO(adonovan): ideally belongs in memberFromObject, but + // that would require package creation in topological order. + for name, mem := range p.Members { + if ast.IsExported(name) { + p.Prog.needMethodsOf(mem.Type()) + } + } + if p.Prog.mode&LogSource != 0 { + defer logStack("build %s", p)() + } + init := p.init + init.startBody() + + var done *BasicBlock + + if p.Prog.mode&BareInits == 0 { + // Make init() skip if package is already initialized. + initguard := p.Var("init$guard") + doinit := init.newBasicBlock("init.start") + done = init.newBasicBlock("init.done") + emitIf(init, emitLoad(init, initguard), done, doinit) + init.currentBlock = doinit + emitStore(init, initguard, vTrue, token.NoPos) + + // Call the init() function of each package we import. + for _, pkg := range p.Pkg.Imports() { + prereq := p.Prog.packages[pkg] + if prereq == nil { + panic(fmt.Sprintf("Package(%q).Build(): unsatisfied import: Program.CreatePackage(%q) was not called", p.Pkg.Path(), pkg.Path())) + } + var v Call + v.Call.Value = prereq.init + v.Call.pos = init.pos + v.setType(types.NewTuple()) + init.emit(&v) + } + } + + var b builder + + // Initialize package-level vars in correct order. + for _, varinit := range p.info.InitOrder { + if init.Prog.mode&LogSource != 0 { + fmt.Fprintf(os.Stderr, "build global initializer %v @ %s\n", + varinit.Lhs, p.Prog.Fset.Position(varinit.Rhs.Pos())) + } + if len(varinit.Lhs) == 1 { + // 1:1 initialization: var x, y = a(), b() + var lval lvalue + if v := varinit.Lhs[0]; v.Name() != "_" { + lval = &address{addr: p.values[v].(*Global), pos: v.Pos()} + } else { + lval = blank{} + } + b.assign(init, lval, varinit.Rhs, true, nil) + } else { + // n:1 initialization: var x, y := f() + tuple := b.exprN(init, varinit.Rhs) + for i, v := range varinit.Lhs { + if v.Name() == "_" { + continue + } + emitStore(init, p.values[v].(*Global), emitExtract(init, tuple, i), v.Pos()) + } + } + } + + // Build all package-level functions, init functions + // and methods, including unreachable/blank ones. + // We build them in source order, but it's not significant. + for _, file := range p.files { + for _, decl := range file.Decls { + if decl, ok := decl.(*ast.FuncDecl); ok { + b.buildFuncDecl(p, decl) + } + } + } + + // Finish up init(). + if p.Prog.mode&BareInits == 0 { + emitJump(init, done) + init.currentBlock = done + } + init.emit(new(Return)) + init.finishBody() + + p.info = nil // We no longer need ASTs or go/types deductions. + + if p.Prog.mode&SanityCheckFunctions != 0 { + sanityCheckPackage(p) + } +} + +// Like ObjectOf, but panics instead of returning nil. +// Only valid during p's create and build phases. +func (p *Package) objectOf(id *ast.Ident) types.Object { + if o := p.info.ObjectOf(id); o != nil { + return o + } + panic(fmt.Sprintf("no types.Object for ast.Ident %s @ %s", + id.Name, p.Prog.Fset.Position(id.Pos()))) +} + +// Like TypeOf, but panics instead of returning nil. +// Only valid during p's create and build phases. +func (p *Package) typeOf(e ast.Expr) types.Type { + if T := p.info.TypeOf(e); T != nil { + return T + } + panic(fmt.Sprintf("no type for %T @ %s", + e, p.Prog.Fset.Position(e.Pos()))) +} diff --git a/vendor/github.com/golangci/tools/go/ssa/const15.go b/vendor/golang.org/x/tools/go/ssa/const.go similarity index 78% rename from vendor/github.com/golangci/tools/go/ssa/const15.go rename to vendor/golang.org/x/tools/go/ssa/const.go index 795cadaa08c5..f43792e7f378 100644 --- a/vendor/github.com/golangci/tools/go/ssa/const15.go +++ b/vendor/golang.org/x/tools/go/ssa/const.go @@ -2,15 +2,13 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build go1.5,!go1.6 - package ssa // This file defines the Const SSA value type. import ( "fmt" - exact "go/constant" + "go/constant" "go/token" "go/types" "strconv" @@ -19,14 +17,14 @@ import ( // NewConst returns a new constant of the specified value and type. // val must be valid according to the specification of Const.Value. // -func NewConst(val exact.Value, typ types.Type) *Const { +func NewConst(val constant.Value, typ types.Type) *Const { return &Const{typ, val} } // intConst returns an 'int' constant that evaluates to i. // (i is an int64 in case the host is narrower than the target.) func intConst(i int64) *Const { - return NewConst(exact.MakeInt64(i), tInt) + return NewConst(constant.MakeInt64(i), tInt) } // nilConst returns a nil constant of the specified type, which may @@ -38,7 +36,7 @@ func nilConst(typ types.Type) *Const { // stringConst returns a 'string' constant that evaluates to s. func stringConst(s string) *Const { - return NewConst(exact.MakeString(s), tString) + return NewConst(constant.MakeString(s), tString) } // zeroConst returns a new "zero" constant of the specified type, @@ -50,11 +48,11 @@ func zeroConst(t types.Type) *Const { case *types.Basic: switch { case t.Info()&types.IsBoolean != 0: - return NewConst(exact.MakeBool(false), t) + return NewConst(constant.MakeBool(false), t) case t.Info()&types.IsNumeric != 0: - return NewConst(exact.MakeInt64(0), t) + return NewConst(constant.MakeInt64(0), t) case t.Info()&types.IsString != 0: - return NewConst(exact.MakeString(""), t) + return NewConst(constant.MakeString(""), t) case t.Kind() == types.UnsafePointer: fallthrough case t.Kind() == types.UntypedNil: @@ -76,8 +74,8 @@ func (c *Const) RelString(from *types.Package) string { var s string if c.Value == nil { s = "nil" - } else if c.Value.Kind() == exact.String { - s = exact.StringVal(c.Value) + } else if c.Value.Kind() == constant.String { + s = constant.StringVal(c.Value) const max = 20 // TODO(adonovan): don't cut a rune in half. if len(s) > max { @@ -117,20 +115,20 @@ func (c *Const) IsNil() bool { return c.Value == nil } -// TODO(adonovan): move everything below into github.com/golangci/tools/go/ssa/interp. +// TODO(adonovan): move everything below into golang.org/x/tools/go/ssa/interp. // Int64 returns the numeric value of this constant truncated to fit // a signed 64-bit integer. // func (c *Const) Int64() int64 { - switch x := c.Value; x.Kind() { - case exact.Int: - if i, ok := exact.Int64Val(x); ok { + switch x := constant.ToInt(c.Value); x.Kind() { + case constant.Int: + if i, ok := constant.Int64Val(x); ok { return i } return 0 - case exact.Float: - f, _ := exact.Float64Val(x) + case constant.Float: + f, _ := constant.Float64Val(x) return int64(f) } panic(fmt.Sprintf("unexpected constant value: %T", c.Value)) @@ -140,14 +138,14 @@ func (c *Const) Int64() int64 { // an unsigned 64-bit integer. // func (c *Const) Uint64() uint64 { - switch x := c.Value; x.Kind() { - case exact.Int: - if u, ok := exact.Uint64Val(x); ok { + switch x := constant.ToInt(c.Value); x.Kind() { + case constant.Int: + if u, ok := constant.Uint64Val(x); ok { return u } return 0 - case exact.Float: - f, _ := exact.Float64Val(x) + case constant.Float: + f, _ := constant.Float64Val(x) return uint64(f) } panic(fmt.Sprintf("unexpected constant value: %T", c.Value)) @@ -157,7 +155,7 @@ func (c *Const) Uint64() uint64 { // a float64. // func (c *Const) Float64() float64 { - f, _ := exact.Float64Val(c.Value) + f, _ := constant.Float64Val(c.Value) return f } @@ -165,7 +163,7 @@ func (c *Const) Float64() float64 { // fit a complex128. // func (c *Const) Complex128() complex128 { - re, _ := exact.Float64Val(exact.Real(c.Value)) - im, _ := exact.Float64Val(exact.Imag(c.Value)) + re, _ := constant.Float64Val(constant.Real(c.Value)) + im, _ := constant.Float64Val(constant.Imag(c.Value)) return complex(re, im) } diff --git a/vendor/golang.org/x/tools/go/ssa/create.go b/vendor/golang.org/x/tools/go/ssa/create.go new file mode 100644 index 000000000000..85163a0c5a74 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/create.go @@ -0,0 +1,270 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file implements the CREATE phase of SSA construction. +// See builder.go for explanation. + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + "os" + "sync" + + "golang.org/x/tools/go/types/typeutil" +) + +// NewProgram returns a new SSA Program. +// +// mode controls diagnostics and checking during SSA construction. +// +func NewProgram(fset *token.FileSet, mode BuilderMode) *Program { + prog := &Program{ + Fset: fset, + imported: make(map[string]*Package), + packages: make(map[*types.Package]*Package), + thunks: make(map[selectionKey]*Function), + bounds: make(map[*types.Func]*Function), + mode: mode, + } + + h := typeutil.MakeHasher() // protected by methodsMu, in effect + prog.methodSets.SetHasher(h) + prog.canon.SetHasher(h) + + return prog +} + +// memberFromObject populates package pkg with a member for the +// typechecker object obj. +// +// For objects from Go source code, syntax is the associated syntax +// tree (for funcs and vars only); it will be used during the build +// phase. +// +func memberFromObject(pkg *Package, obj types.Object, syntax ast.Node) { + name := obj.Name() + switch obj := obj.(type) { + case *types.Builtin: + if pkg.Pkg != types.Unsafe { + panic("unexpected builtin object: " + obj.String()) + } + + case *types.TypeName: + pkg.Members[name] = &Type{ + object: obj, + pkg: pkg, + } + + case *types.Const: + c := &NamedConst{ + object: obj, + Value: NewConst(obj.Val(), obj.Type()), + pkg: pkg, + } + pkg.values[obj] = c.Value + pkg.Members[name] = c + + case *types.Var: + g := &Global{ + Pkg: pkg, + name: name, + object: obj, + typ: types.NewPointer(obj.Type()), // address + pos: obj.Pos(), + } + pkg.values[obj] = g + pkg.Members[name] = g + + case *types.Func: + sig := obj.Type().(*types.Signature) + if sig.Recv() == nil && name == "init" { + pkg.ninit++ + name = fmt.Sprintf("init#%d", pkg.ninit) + } + fn := &Function{ + name: name, + object: obj, + Signature: sig, + syntax: syntax, + pos: obj.Pos(), + Pkg: pkg, + Prog: pkg.Prog, + } + if syntax == nil { + fn.Synthetic = "loaded from gc object file" + } + + pkg.values[obj] = fn + if sig.Recv() == nil { + pkg.Members[name] = fn // package-level function + } + + default: // (incl. *types.Package) + panic("unexpected Object type: " + obj.String()) + } +} + +// membersFromDecl populates package pkg with members for each +// typechecker object (var, func, const or type) associated with the +// specified decl. +// +func membersFromDecl(pkg *Package, decl ast.Decl) { + switch decl := decl.(type) { + case *ast.GenDecl: // import, const, type or var + switch decl.Tok { + case token.CONST: + for _, spec := range decl.Specs { + for _, id := range spec.(*ast.ValueSpec).Names { + if !isBlankIdent(id) { + memberFromObject(pkg, pkg.info.Defs[id], nil) + } + } + } + + case token.VAR: + for _, spec := range decl.Specs { + for _, id := range spec.(*ast.ValueSpec).Names { + if !isBlankIdent(id) { + memberFromObject(pkg, pkg.info.Defs[id], spec) + } + } + } + + case token.TYPE: + for _, spec := range decl.Specs { + id := spec.(*ast.TypeSpec).Name + if !isBlankIdent(id) { + memberFromObject(pkg, pkg.info.Defs[id], nil) + } + } + } + + case *ast.FuncDecl: + id := decl.Name + if !isBlankIdent(id) { + memberFromObject(pkg, pkg.info.Defs[id], decl) + } + } +} + +// CreatePackage constructs and returns an SSA Package from the +// specified type-checked, error-free file ASTs, and populates its +// Members mapping. +// +// importable determines whether this package should be returned by a +// subsequent call to ImportedPackage(pkg.Path()). +// +// The real work of building SSA form for each function is not done +// until a subsequent call to Package.Build(). +// +func (prog *Program) CreatePackage(pkg *types.Package, files []*ast.File, info *types.Info, importable bool) *Package { + p := &Package{ + Prog: prog, + Members: make(map[string]Member), + values: make(map[types.Object]Value), + Pkg: pkg, + info: info, // transient (CREATE and BUILD phases) + files: files, // transient (CREATE and BUILD phases) + } + + // Add init() function. + p.init = &Function{ + name: "init", + Signature: new(types.Signature), + Synthetic: "package initializer", + Pkg: p, + Prog: prog, + } + p.Members[p.init.name] = p.init + + // CREATE phase. + // Allocate all package members: vars, funcs, consts and types. + if len(files) > 0 { + // Go source package. + for _, file := range files { + for _, decl := range file.Decls { + membersFromDecl(p, decl) + } + } + } else { + // GC-compiled binary package (or "unsafe") + // No code. + // No position information. + scope := p.Pkg.Scope() + for _, name := range scope.Names() { + obj := scope.Lookup(name) + memberFromObject(p, obj, nil) + if obj, ok := obj.(*types.TypeName); ok { + if named, ok := obj.Type().(*types.Named); ok { + for i, n := 0, named.NumMethods(); i < n; i++ { + memberFromObject(p, named.Method(i), nil) + } + } + } + } + } + + if prog.mode&BareInits == 0 { + // Add initializer guard variable. + initguard := &Global{ + Pkg: p, + name: "init$guard", + typ: types.NewPointer(tBool), + } + p.Members[initguard.Name()] = initguard + } + + if prog.mode&GlobalDebug != 0 { + p.SetDebugMode(true) + } + + if prog.mode&PrintPackages != 0 { + printMu.Lock() + p.WriteTo(os.Stdout) + printMu.Unlock() + } + + if importable { + prog.imported[p.Pkg.Path()] = p + } + prog.packages[p.Pkg] = p + + return p +} + +// printMu serializes printing of Packages/Functions to stdout. +var printMu sync.Mutex + +// AllPackages returns a new slice containing all packages in the +// program prog in unspecified order. +// +func (prog *Program) AllPackages() []*Package { + pkgs := make([]*Package, 0, len(prog.packages)) + for _, pkg := range prog.packages { + pkgs = append(pkgs, pkg) + } + return pkgs +} + +// ImportedPackage returns the importable Package whose PkgPath +// is path, or nil if no such Package has been created. +// +// A parameter to CreatePackage determines whether a package should be +// considered importable. For example, no import declaration can resolve +// to the ad-hoc main package created by 'go build foo.go'. +// +// TODO(adonovan): rethink this function and the "importable" concept; +// most packages are importable. This function assumes that all +// types.Package.Path values are unique within the ssa.Program, which is +// false---yet this function remains very convenient. +// Clients should use (*Program).Package instead where possible. +// SSA doesn't really need a string-keyed map of packages. +// +func (prog *Program) ImportedPackage(path string) *Package { + return prog.imported[path] +} diff --git a/vendor/golang.org/x/tools/go/ssa/doc.go b/vendor/golang.org/x/tools/go/ssa/doc.go new file mode 100644 index 000000000000..1a13640f9d58 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/doc.go @@ -0,0 +1,125 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package ssa defines a representation of the elements of Go programs +// (packages, types, functions, variables and constants) using a +// static single-assignment (SSA) form intermediate representation +// (IR) for the bodies of functions. +// +// THIS INTERFACE IS EXPERIMENTAL AND IS LIKELY TO CHANGE. +// +// For an introduction to SSA form, see +// http://en.wikipedia.org/wiki/Static_single_assignment_form. +// This page provides a broader reading list: +// http://www.dcs.gla.ac.uk/~jsinger/ssa.html. +// +// The level of abstraction of the SSA form is intentionally close to +// the source language to facilitate construction of source analysis +// tools. It is not intended for machine code generation. +// +// All looping, branching and switching constructs are replaced with +// unstructured control flow. Higher-level control flow constructs +// such as multi-way branch can be reconstructed as needed; see +// ssautil.Switches() for an example. +// +// The simplest way to create the SSA representation of a package is +// to load typed syntax trees using golang.org/x/tools/go/packages, then +// invoke the ssautil.Packages helper function. See ExampleLoadPackages +// and ExampleWholeProgram for examples. +// The resulting ssa.Program contains all the packages and their +// members, but SSA code is not created for function bodies until a +// subsequent call to (*Package).Build or (*Program).Build. +// +// The builder initially builds a naive SSA form in which all local +// variables are addresses of stack locations with explicit loads and +// stores. Registerisation of eligible locals and φ-node insertion +// using dominance and dataflow are then performed as a second pass +// called "lifting" to improve the accuracy and performance of +// subsequent analyses; this pass can be skipped by setting the +// NaiveForm builder flag. +// +// The primary interfaces of this package are: +// +// - Member: a named member of a Go package. +// - Value: an expression that yields a value. +// - Instruction: a statement that consumes values and performs computation. +// - Node: a Value or Instruction (emphasizing its membership in the SSA value graph) +// +// A computation that yields a result implements both the Value and +// Instruction interfaces. The following table shows for each +// concrete type which of these interfaces it implements. +// +// Value? Instruction? Member? +// *Alloc ✔ ✔ +// *BinOp ✔ ✔ +// *Builtin ✔ +// *Call ✔ ✔ +// *ChangeInterface ✔ ✔ +// *ChangeType ✔ ✔ +// *Const ✔ +// *Convert ✔ ✔ +// *DebugRef ✔ +// *Defer ✔ +// *Extract ✔ ✔ +// *Field ✔ ✔ +// *FieldAddr ✔ ✔ +// *FreeVar ✔ +// *Function ✔ ✔ (func) +// *Global ✔ ✔ (var) +// *Go ✔ +// *If ✔ +// *Index ✔ ✔ +// *IndexAddr ✔ ✔ +// *Jump ✔ +// *Lookup ✔ ✔ +// *MakeChan ✔ ✔ +// *MakeClosure ✔ ✔ +// *MakeInterface ✔ ✔ +// *MakeMap ✔ ✔ +// *MakeSlice ✔ ✔ +// *MapUpdate ✔ +// *NamedConst ✔ (const) +// *Next ✔ ✔ +// *Panic ✔ +// *Parameter ✔ +// *Phi ✔ ✔ +// *Range ✔ ✔ +// *Return ✔ +// *RunDefers ✔ +// *Select ✔ ✔ +// *Send ✔ +// *Slice ✔ ✔ +// *Store ✔ +// *Type ✔ (type) +// *TypeAssert ✔ ✔ +// *UnOp ✔ ✔ +// +// Other key types in this package include: Program, Package, Function +// and BasicBlock. +// +// The program representation constructed by this package is fully +// resolved internally, i.e. it does not rely on the names of Values, +// Packages, Functions, Types or BasicBlocks for the correct +// interpretation of the program. Only the identities of objects and +// the topology of the SSA and type graphs are semantically +// significant. (There is one exception: Ids, used to identify field +// and method names, contain strings.) Avoidance of name-based +// operations simplifies the implementation of subsequent passes and +// can make them very efficient. Many objects are nonetheless named +// to aid in debugging, but it is not essential that the names be +// either accurate or unambiguous. The public API exposes a number of +// name-based maps for client convenience. +// +// The ssa/ssautil package provides various utilities that depend only +// on the public API of this package. +// +// TODO(adonovan): Consider the exceptional control-flow implications +// of defer and recover(). +// +// TODO(adonovan): write a how-to document for all the various cases +// of trying to determine corresponding elements across the four +// domains of source locations, ast.Nodes, types.Objects, +// ssa.Values/Instructions. +// +package ssa // import "golang.org/x/tools/go/ssa" diff --git a/vendor/golang.org/x/tools/go/ssa/dom.go b/vendor/golang.org/x/tools/go/ssa/dom.go new file mode 100644 index 000000000000..12ef4308f3c0 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/dom.go @@ -0,0 +1,341 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file defines algorithms related to dominance. + +// Dominator tree construction ---------------------------------------- +// +// We use the algorithm described in Lengauer & Tarjan. 1979. A fast +// algorithm for finding dominators in a flowgraph. +// http://doi.acm.org/10.1145/357062.357071 +// +// We also apply the optimizations to SLT described in Georgiadis et +// al, Finding Dominators in Practice, JGAA 2006, +// http://jgaa.info/accepted/2006/GeorgiadisTarjanWerneck2006.10.1.pdf +// to avoid the need for buckets of size > 1. + +import ( + "bytes" + "fmt" + "math/big" + "os" + "sort" +) + +// Idom returns the block that immediately dominates b: +// its parent in the dominator tree, if any. +// Neither the entry node (b.Index==0) nor recover node +// (b==b.Parent().Recover()) have a parent. +// +func (b *BasicBlock) Idom() *BasicBlock { return b.dom.idom } + +// Dominees returns the list of blocks that b immediately dominates: +// its children in the dominator tree. +// +func (b *BasicBlock) Dominees() []*BasicBlock { return b.dom.children } + +// Dominates reports whether b dominates c. +func (b *BasicBlock) Dominates(c *BasicBlock) bool { + return b.dom.pre <= c.dom.pre && c.dom.post <= b.dom.post +} + +type byDomPreorder []*BasicBlock + +func (a byDomPreorder) Len() int { return len(a) } +func (a byDomPreorder) Swap(i, j int) { a[i], a[j] = a[j], a[i] } +func (a byDomPreorder) Less(i, j int) bool { return a[i].dom.pre < a[j].dom.pre } + +// DomPreorder returns a new slice containing the blocks of f in +// dominator tree preorder. +// +func (f *Function) DomPreorder() []*BasicBlock { + n := len(f.Blocks) + order := make(byDomPreorder, n, n) + copy(order, f.Blocks) + sort.Sort(order) + return order +} + +// domInfo contains a BasicBlock's dominance information. +type domInfo struct { + idom *BasicBlock // immediate dominator (parent in domtree) + children []*BasicBlock // nodes immediately dominated by this one + pre, post int32 // pre- and post-order numbering within domtree +} + +// ltState holds the working state for Lengauer-Tarjan algorithm +// (during which domInfo.pre is repurposed for CFG DFS preorder number). +type ltState struct { + // Each slice is indexed by b.Index. + sdom []*BasicBlock // b's semidominator + parent []*BasicBlock // b's parent in DFS traversal of CFG + ancestor []*BasicBlock // b's ancestor with least sdom +} + +// dfs implements the depth-first search part of the LT algorithm. +func (lt *ltState) dfs(v *BasicBlock, i int32, preorder []*BasicBlock) int32 { + preorder[i] = v + v.dom.pre = i // For now: DFS preorder of spanning tree of CFG + i++ + lt.sdom[v.Index] = v + lt.link(nil, v) + for _, w := range v.Succs { + if lt.sdom[w.Index] == nil { + lt.parent[w.Index] = v + i = lt.dfs(w, i, preorder) + } + } + return i +} + +// eval implements the EVAL part of the LT algorithm. +func (lt *ltState) eval(v *BasicBlock) *BasicBlock { + // TODO(adonovan): opt: do path compression per simple LT. + u := v + for ; lt.ancestor[v.Index] != nil; v = lt.ancestor[v.Index] { + if lt.sdom[v.Index].dom.pre < lt.sdom[u.Index].dom.pre { + u = v + } + } + return u +} + +// link implements the LINK part of the LT algorithm. +func (lt *ltState) link(v, w *BasicBlock) { + lt.ancestor[w.Index] = v +} + +// buildDomTree computes the dominator tree of f using the LT algorithm. +// Precondition: all blocks are reachable (e.g. optimizeBlocks has been run). +// +func buildDomTree(f *Function) { + // The step numbers refer to the original LT paper; the + // reordering is due to Georgiadis. + + // Clear any previous domInfo. + for _, b := range f.Blocks { + b.dom = domInfo{} + } + + n := len(f.Blocks) + // Allocate space for 5 contiguous [n]*BasicBlock arrays: + // sdom, parent, ancestor, preorder, buckets. + space := make([]*BasicBlock, 5*n, 5*n) + lt := ltState{ + sdom: space[0:n], + parent: space[n : 2*n], + ancestor: space[2*n : 3*n], + } + + // Step 1. Number vertices by depth-first preorder. + preorder := space[3*n : 4*n] + root := f.Blocks[0] + prenum := lt.dfs(root, 0, preorder) + recover := f.Recover + if recover != nil { + lt.dfs(recover, prenum, preorder) + } + + buckets := space[4*n : 5*n] + copy(buckets, preorder) + + // In reverse preorder... + for i := int32(n) - 1; i > 0; i-- { + w := preorder[i] + + // Step 3. Implicitly define the immediate dominator of each node. + for v := buckets[i]; v != w; v = buckets[v.dom.pre] { + u := lt.eval(v) + if lt.sdom[u.Index].dom.pre < i { + v.dom.idom = u + } else { + v.dom.idom = w + } + } + + // Step 2. Compute the semidominators of all nodes. + lt.sdom[w.Index] = lt.parent[w.Index] + for _, v := range w.Preds { + u := lt.eval(v) + if lt.sdom[u.Index].dom.pre < lt.sdom[w.Index].dom.pre { + lt.sdom[w.Index] = lt.sdom[u.Index] + } + } + + lt.link(lt.parent[w.Index], w) + + if lt.parent[w.Index] == lt.sdom[w.Index] { + w.dom.idom = lt.parent[w.Index] + } else { + buckets[i] = buckets[lt.sdom[w.Index].dom.pre] + buckets[lt.sdom[w.Index].dom.pre] = w + } + } + + // The final 'Step 3' is now outside the loop. + for v := buckets[0]; v != root; v = buckets[v.dom.pre] { + v.dom.idom = root + } + + // Step 4. Explicitly define the immediate dominator of each + // node, in preorder. + for _, w := range preorder[1:] { + if w == root || w == recover { + w.dom.idom = nil + } else { + if w.dom.idom != lt.sdom[w.Index] { + w.dom.idom = w.dom.idom.dom.idom + } + // Calculate Children relation as inverse of Idom. + w.dom.idom.dom.children = append(w.dom.idom.dom.children, w) + } + } + + pre, post := numberDomTree(root, 0, 0) + if recover != nil { + numberDomTree(recover, pre, post) + } + + // printDomTreeDot(os.Stderr, f) // debugging + // printDomTreeText(os.Stderr, root, 0) // debugging + + if f.Prog.mode&SanityCheckFunctions != 0 { + sanityCheckDomTree(f) + } +} + +// numberDomTree sets the pre- and post-order numbers of a depth-first +// traversal of the dominator tree rooted at v. These are used to +// answer dominance queries in constant time. +// +func numberDomTree(v *BasicBlock, pre, post int32) (int32, int32) { + v.dom.pre = pre + pre++ + for _, child := range v.dom.children { + pre, post = numberDomTree(child, pre, post) + } + v.dom.post = post + post++ + return pre, post +} + +// Testing utilities ---------------------------------------- + +// sanityCheckDomTree checks the correctness of the dominator tree +// computed by the LT algorithm by comparing against the dominance +// relation computed by a naive Kildall-style forward dataflow +// analysis (Algorithm 10.16 from the "Dragon" book). +// +func sanityCheckDomTree(f *Function) { + n := len(f.Blocks) + + // D[i] is the set of blocks that dominate f.Blocks[i], + // represented as a bit-set of block indices. + D := make([]big.Int, n) + + one := big.NewInt(1) + + // all is the set of all blocks; constant. + var all big.Int + all.Set(one).Lsh(&all, uint(n)).Sub(&all, one) + + // Initialization. + for i, b := range f.Blocks { + if i == 0 || b == f.Recover { + // A root is dominated only by itself. + D[i].SetBit(&D[0], 0, 1) + } else { + // All other blocks are (initially) dominated + // by every block. + D[i].Set(&all) + } + } + + // Iteration until fixed point. + for changed := true; changed; { + changed = false + for i, b := range f.Blocks { + if i == 0 || b == f.Recover { + continue + } + // Compute intersection across predecessors. + var x big.Int + x.Set(&all) + for _, pred := range b.Preds { + x.And(&x, &D[pred.Index]) + } + x.SetBit(&x, i, 1) // a block always dominates itself. + if D[i].Cmp(&x) != 0 { + D[i].Set(&x) + changed = true + } + } + } + + // Check the entire relation. O(n^2). + // The Recover block (if any) must be treated specially so we skip it. + ok := true + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + b, c := f.Blocks[i], f.Blocks[j] + if c == f.Recover { + continue + } + actual := b.Dominates(c) + expected := D[j].Bit(i) == 1 + if actual != expected { + fmt.Fprintf(os.Stderr, "dominates(%s, %s)==%t, want %t\n", b, c, actual, expected) + ok = false + } + } + } + + preorder := f.DomPreorder() + for _, b := range f.Blocks { + if got := preorder[b.dom.pre]; got != b { + fmt.Fprintf(os.Stderr, "preorder[%d]==%s, want %s\n", b.dom.pre, got, b) + ok = false + } + } + + if !ok { + panic("sanityCheckDomTree failed for " + f.String()) + } + +} + +// Printing functions ---------------------------------------- + +// printDomTree prints the dominator tree as text, using indentation. +func printDomTreeText(buf *bytes.Buffer, v *BasicBlock, indent int) { + fmt.Fprintf(buf, "%*s%s\n", 4*indent, "", v) + for _, child := range v.dom.children { + printDomTreeText(buf, child, indent+1) + } +} + +// printDomTreeDot prints the dominator tree of f in AT&T GraphViz +// (.dot) format. +func printDomTreeDot(buf *bytes.Buffer, f *Function) { + fmt.Fprintln(buf, "//", f) + fmt.Fprintln(buf, "digraph domtree {") + for i, b := range f.Blocks { + v := b.dom + fmt.Fprintf(buf, "\tn%d [label=\"%s (%d, %d)\",shape=\"rectangle\"];\n", v.pre, b, v.pre, v.post) + // TODO(adonovan): improve appearance of edges + // belonging to both dominator tree and CFG. + + // Dominator tree edge. + if i != 0 { + fmt.Fprintf(buf, "\tn%d -> n%d [style=\"solid\",weight=100];\n", v.idom.dom.pre, v.pre) + } + // CFG edges. + for _, pred := range b.Preds { + fmt.Fprintf(buf, "\tn%d -> n%d [style=\"dotted\",weight=0];\n", pred.dom.pre, v.pre) + } + } + fmt.Fprintln(buf, "}") +} diff --git a/vendor/golang.org/x/tools/go/ssa/emit.go b/vendor/golang.org/x/tools/go/ssa/emit.go new file mode 100644 index 000000000000..1036988adcb8 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/emit.go @@ -0,0 +1,468 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// Helpers for emitting SSA instructions. + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" +) + +// emitNew emits to f a new (heap Alloc) instruction allocating an +// object of type typ. pos is the optional source location. +// +func emitNew(f *Function, typ types.Type, pos token.Pos) *Alloc { + v := &Alloc{Heap: true} + v.setType(types.NewPointer(typ)) + v.setPos(pos) + f.emit(v) + return v +} + +// emitLoad emits to f an instruction to load the address addr into a +// new temporary, and returns the value so defined. +// +func emitLoad(f *Function, addr Value) *UnOp { + v := &UnOp{Op: token.MUL, X: addr} + v.setType(deref(addr.Type())) + f.emit(v) + return v +} + +// emitDebugRef emits to f a DebugRef pseudo-instruction associating +// expression e with value v. +// +func emitDebugRef(f *Function, e ast.Expr, v Value, isAddr bool) { + if !f.debugInfo() { + return // debugging not enabled + } + if v == nil || e == nil { + panic("nil") + } + var obj types.Object + e = unparen(e) + if id, ok := e.(*ast.Ident); ok { + if isBlankIdent(id) { + return + } + obj = f.Pkg.objectOf(id) + switch obj.(type) { + case *types.Nil, *types.Const, *types.Builtin: + return + } + } + f.emit(&DebugRef{ + X: v, + Expr: e, + IsAddr: isAddr, + object: obj, + }) +} + +// emitArith emits to f code to compute the binary operation op(x, y) +// where op is an eager shift, logical or arithmetic operation. +// (Use emitCompare() for comparisons and Builder.logicalBinop() for +// non-eager operations.) +// +func emitArith(f *Function, op token.Token, x, y Value, t types.Type, pos token.Pos) Value { + switch op { + case token.SHL, token.SHR: + x = emitConv(f, x, t) + // y may be signed or an 'untyped' constant. + // TODO(adonovan): whence signed values? + if b, ok := y.Type().Underlying().(*types.Basic); ok && b.Info()&types.IsUnsigned == 0 { + y = emitConv(f, y, types.Typ[types.Uint64]) + } + + case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT: + x = emitConv(f, x, t) + y = emitConv(f, y, t) + + default: + panic("illegal op in emitArith: " + op.String()) + + } + v := &BinOp{ + Op: op, + X: x, + Y: y, + } + v.setPos(pos) + v.setType(t) + return f.emit(v) +} + +// emitCompare emits to f code compute the boolean result of +// comparison comparison 'x op y'. +// +func emitCompare(f *Function, op token.Token, x, y Value, pos token.Pos) Value { + xt := x.Type().Underlying() + yt := y.Type().Underlying() + + // Special case to optimise a tagless SwitchStmt so that + // these are equivalent + // switch { case e: ...} + // switch true { case e: ... } + // if e==true { ... } + // even in the case when e's type is an interface. + // TODO(adonovan): opt: generalise to x==true, false!=y, etc. + if x == vTrue && op == token.EQL { + if yt, ok := yt.(*types.Basic); ok && yt.Info()&types.IsBoolean != 0 { + return y + } + } + + if types.Identical(xt, yt) { + // no conversion necessary + } else if _, ok := xt.(*types.Interface); ok { + y = emitConv(f, y, x.Type()) + } else if _, ok := yt.(*types.Interface); ok { + x = emitConv(f, x, y.Type()) + } else if _, ok := x.(*Const); ok { + x = emitConv(f, x, y.Type()) + } else if _, ok := y.(*Const); ok { + y = emitConv(f, y, x.Type()) + } else { + // other cases, e.g. channels. No-op. + } + + v := &BinOp{ + Op: op, + X: x, + Y: y, + } + v.setPos(pos) + v.setType(tBool) + return f.emit(v) +} + +// isValuePreserving returns true if a conversion from ut_src to +// ut_dst is value-preserving, i.e. just a change of type. +// Precondition: neither argument is a named type. +// +func isValuePreserving(ut_src, ut_dst types.Type) bool { + // Identical underlying types? + if structTypesIdentical(ut_dst, ut_src) { + return true + } + + switch ut_dst.(type) { + case *types.Chan: + // Conversion between channel types? + _, ok := ut_src.(*types.Chan) + return ok + + case *types.Pointer: + // Conversion between pointers with identical base types? + _, ok := ut_src.(*types.Pointer) + return ok + } + return false +} + +// emitConv emits to f code to convert Value val to exactly type typ, +// and returns the converted value. Implicit conversions are required +// by language assignability rules in assignments, parameter passing, +// etc. Conversions cannot fail dynamically. +// +func emitConv(f *Function, val Value, typ types.Type) Value { + t_src := val.Type() + + // Identical types? Conversion is a no-op. + if types.Identical(t_src, typ) { + return val + } + + ut_dst := typ.Underlying() + ut_src := t_src.Underlying() + + // Just a change of type, but not value or representation? + if isValuePreserving(ut_src, ut_dst) { + c := &ChangeType{X: val} + c.setType(typ) + return f.emit(c) + } + + // Conversion to, or construction of a value of, an interface type? + if _, ok := ut_dst.(*types.Interface); ok { + // Assignment from one interface type to another? + if _, ok := ut_src.(*types.Interface); ok { + c := &ChangeInterface{X: val} + c.setType(typ) + return f.emit(c) + } + + // Untyped nil constant? Return interface-typed nil constant. + if ut_src == tUntypedNil { + return nilConst(typ) + } + + // Convert (non-nil) "untyped" literals to their default type. + if t, ok := ut_src.(*types.Basic); ok && t.Info()&types.IsUntyped != 0 { + val = emitConv(f, val, DefaultType(ut_src)) + } + + f.Pkg.Prog.needMethodsOf(val.Type()) + mi := &MakeInterface{X: val} + mi.setType(typ) + return f.emit(mi) + } + + // Conversion of a compile-time constant value? + if c, ok := val.(*Const); ok { + if _, ok := ut_dst.(*types.Basic); ok || c.IsNil() { + // Conversion of a compile-time constant to + // another constant type results in a new + // constant of the destination type and + // (initially) the same abstract value. + // We don't truncate the value yet. + return NewConst(c.Value, typ) + } + + // We're converting from constant to non-constant type, + // e.g. string -> []byte/[]rune. + } + + // A representation-changing conversion? + // At least one of {ut_src,ut_dst} must be *Basic. + // (The other may be []byte or []rune.) + _, ok1 := ut_src.(*types.Basic) + _, ok2 := ut_dst.(*types.Basic) + if ok1 || ok2 { + c := &Convert{X: val} + c.setType(typ) + return f.emit(c) + } + + panic(fmt.Sprintf("in %s: cannot convert %s (%s) to %s", f, val, val.Type(), typ)) +} + +// emitStore emits to f an instruction to store value val at location +// addr, applying implicit conversions as required by assignability rules. +// +func emitStore(f *Function, addr, val Value, pos token.Pos) *Store { + s := &Store{ + Addr: addr, + Val: emitConv(f, val, deref(addr.Type())), + pos: pos, + } + f.emit(s) + return s +} + +// emitJump emits to f a jump to target, and updates the control-flow graph. +// Postcondition: f.currentBlock is nil. +// +func emitJump(f *Function, target *BasicBlock) { + b := f.currentBlock + b.emit(new(Jump)) + addEdge(b, target) + f.currentBlock = nil +} + +// emitIf emits to f a conditional jump to tblock or fblock based on +// cond, and updates the control-flow graph. +// Postcondition: f.currentBlock is nil. +// +func emitIf(f *Function, cond Value, tblock, fblock *BasicBlock) { + b := f.currentBlock + b.emit(&If{Cond: cond}) + addEdge(b, tblock) + addEdge(b, fblock) + f.currentBlock = nil +} + +// emitExtract emits to f an instruction to extract the index'th +// component of tuple. It returns the extracted value. +// +func emitExtract(f *Function, tuple Value, index int) Value { + e := &Extract{Tuple: tuple, Index: index} + e.setType(tuple.Type().(*types.Tuple).At(index).Type()) + return f.emit(e) +} + +// emitTypeAssert emits to f a type assertion value := x.(t) and +// returns the value. x.Type() must be an interface. +// +func emitTypeAssert(f *Function, x Value, t types.Type, pos token.Pos) Value { + a := &TypeAssert{X: x, AssertedType: t} + a.setPos(pos) + a.setType(t) + return f.emit(a) +} + +// emitTypeTest emits to f a type test value,ok := x.(t) and returns +// a (value, ok) tuple. x.Type() must be an interface. +// +func emitTypeTest(f *Function, x Value, t types.Type, pos token.Pos) Value { + a := &TypeAssert{ + X: x, + AssertedType: t, + CommaOk: true, + } + a.setPos(pos) + a.setType(types.NewTuple( + newVar("value", t), + varOk, + )) + return f.emit(a) +} + +// emitTailCall emits to f a function call in tail position. The +// caller is responsible for all fields of 'call' except its type. +// Intended for wrapper methods. +// Precondition: f does/will not use deferred procedure calls. +// Postcondition: f.currentBlock is nil. +// +func emitTailCall(f *Function, call *Call) { + tresults := f.Signature.Results() + nr := tresults.Len() + if nr == 1 { + call.typ = tresults.At(0).Type() + } else { + call.typ = tresults + } + tuple := f.emit(call) + var ret Return + switch nr { + case 0: + // no-op + case 1: + ret.Results = []Value{tuple} + default: + for i := 0; i < nr; i++ { + v := emitExtract(f, tuple, i) + // TODO(adonovan): in principle, this is required: + // v = emitConv(f, o.Type, f.Signature.Results[i].Type) + // but in practice emitTailCall is only used when + // the types exactly match. + ret.Results = append(ret.Results, v) + } + } + f.emit(&ret) + f.currentBlock = nil +} + +// emitImplicitSelections emits to f code to apply the sequence of +// implicit field selections specified by indices to base value v, and +// returns the selected value. +// +// If v is the address of a struct, the result will be the address of +// a field; if it is the value of a struct, the result will be the +// value of a field. +// +func emitImplicitSelections(f *Function, v Value, indices []int) Value { + for _, index := range indices { + fld := deref(v.Type()).Underlying().(*types.Struct).Field(index) + + if isPointer(v.Type()) { + instr := &FieldAddr{ + X: v, + Field: index, + } + instr.setType(types.NewPointer(fld.Type())) + v = f.emit(instr) + // Load the field's value iff indirectly embedded. + if isPointer(fld.Type()) { + v = emitLoad(f, v) + } + } else { + instr := &Field{ + X: v, + Field: index, + } + instr.setType(fld.Type()) + v = f.emit(instr) + } + } + return v +} + +// emitFieldSelection emits to f code to select the index'th field of v. +// +// If wantAddr, the input must be a pointer-to-struct and the result +// will be the field's address; otherwise the result will be the +// field's value. +// Ident id is used for position and debug info. +// +func emitFieldSelection(f *Function, v Value, index int, wantAddr bool, id *ast.Ident) Value { + fld := deref(v.Type()).Underlying().(*types.Struct).Field(index) + if isPointer(v.Type()) { + instr := &FieldAddr{ + X: v, + Field: index, + } + instr.setPos(id.Pos()) + instr.setType(types.NewPointer(fld.Type())) + v = f.emit(instr) + // Load the field's value iff we don't want its address. + if !wantAddr { + v = emitLoad(f, v) + } + } else { + instr := &Field{ + X: v, + Field: index, + } + instr.setPos(id.Pos()) + instr.setType(fld.Type()) + v = f.emit(instr) + } + emitDebugRef(f, id, v, wantAddr) + return v +} + +// zeroValue emits to f code to produce a zero value of type t, +// and returns it. +// +func zeroValue(f *Function, t types.Type) Value { + switch t.Underlying().(type) { + case *types.Struct, *types.Array: + return emitLoad(f, f.addLocal(t, token.NoPos)) + default: + return zeroConst(t) + } +} + +// createRecoverBlock emits to f a block of code to return after a +// recovered panic, and sets f.Recover to it. +// +// If f's result parameters are named, the code loads and returns +// their current values, otherwise it returns the zero values of their +// type. +// +// Idempotent. +// +func createRecoverBlock(f *Function) { + if f.Recover != nil { + return // already created + } + saved := f.currentBlock + + f.Recover = f.newBasicBlock("recover") + f.currentBlock = f.Recover + + var results []Value + if f.namedResults != nil { + // Reload NRPs to form value tuple. + for _, r := range f.namedResults { + results = append(results, emitLoad(f, r)) + } + } else { + R := f.Signature.Results() + for i, n := 0, R.Len(); i < n; i++ { + T := R.At(i).Type() + + // Return zero value of each result type. + results = append(results, zeroValue(f, T)) + } + } + f.emit(&Return{Results: results}) + + f.currentBlock = saved +} diff --git a/vendor/golang.org/x/tools/go/ssa/func.go b/vendor/golang.org/x/tools/go/ssa/func.go new file mode 100644 index 000000000000..b21ff4e521ee --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/func.go @@ -0,0 +1,689 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file implements the Function and BasicBlock types. + +import ( + "bytes" + "fmt" + "go/ast" + "go/token" + "go/types" + "io" + "os" + "strings" +) + +// addEdge adds a control-flow graph edge from from to to. +func addEdge(from, to *BasicBlock) { + from.Succs = append(from.Succs, to) + to.Preds = append(to.Preds, from) +} + +// Parent returns the function that contains block b. +func (b *BasicBlock) Parent() *Function { return b.parent } + +// String returns a human-readable label of this block. +// It is not guaranteed unique within the function. +// +func (b *BasicBlock) String() string { + return fmt.Sprintf("%d", b.Index) +} + +// emit appends an instruction to the current basic block. +// If the instruction defines a Value, it is returned. +// +func (b *BasicBlock) emit(i Instruction) Value { + i.setBlock(b) + b.Instrs = append(b.Instrs, i) + v, _ := i.(Value) + return v +} + +// predIndex returns the i such that b.Preds[i] == c or panics if +// there is none. +func (b *BasicBlock) predIndex(c *BasicBlock) int { + for i, pred := range b.Preds { + if pred == c { + return i + } + } + panic(fmt.Sprintf("no edge %s -> %s", c, b)) +} + +// hasPhi returns true if b.Instrs contains φ-nodes. +func (b *BasicBlock) hasPhi() bool { + _, ok := b.Instrs[0].(*Phi) + return ok +} + +// phis returns the prefix of b.Instrs containing all the block's φ-nodes. +func (b *BasicBlock) phis() []Instruction { + for i, instr := range b.Instrs { + if _, ok := instr.(*Phi); !ok { + return b.Instrs[:i] + } + } + return nil // unreachable in well-formed blocks +} + +// replacePred replaces all occurrences of p in b's predecessor list with q. +// Ordinarily there should be at most one. +// +func (b *BasicBlock) replacePred(p, q *BasicBlock) { + for i, pred := range b.Preds { + if pred == p { + b.Preds[i] = q + } + } +} + +// replaceSucc replaces all occurrences of p in b's successor list with q. +// Ordinarily there should be at most one. +// +func (b *BasicBlock) replaceSucc(p, q *BasicBlock) { + for i, succ := range b.Succs { + if succ == p { + b.Succs[i] = q + } + } +} + +// removePred removes all occurrences of p in b's +// predecessor list and φ-nodes. +// Ordinarily there should be at most one. +// +func (b *BasicBlock) removePred(p *BasicBlock) { + phis := b.phis() + + // We must preserve edge order for φ-nodes. + j := 0 + for i, pred := range b.Preds { + if pred != p { + b.Preds[j] = b.Preds[i] + // Strike out φ-edge too. + for _, instr := range phis { + phi := instr.(*Phi) + phi.Edges[j] = phi.Edges[i] + } + j++ + } + } + // Nil out b.Preds[j:] and φ-edges[j:] to aid GC. + for i := j; i < len(b.Preds); i++ { + b.Preds[i] = nil + for _, instr := range phis { + instr.(*Phi).Edges[i] = nil + } + } + b.Preds = b.Preds[:j] + for _, instr := range phis { + phi := instr.(*Phi) + phi.Edges = phi.Edges[:j] + } +} + +// Destinations associated with unlabelled for/switch/select stmts. +// We push/pop one of these as we enter/leave each construct and for +// each BranchStmt we scan for the innermost target of the right type. +// +type targets struct { + tail *targets // rest of stack + _break *BasicBlock + _continue *BasicBlock + _fallthrough *BasicBlock +} + +// Destinations associated with a labelled block. +// We populate these as labels are encountered in forward gotos or +// labelled statements. +// +type lblock struct { + _goto *BasicBlock + _break *BasicBlock + _continue *BasicBlock +} + +// labelledBlock returns the branch target associated with the +// specified label, creating it if needed. +// +func (f *Function) labelledBlock(label *ast.Ident) *lblock { + lb := f.lblocks[label.Obj] + if lb == nil { + lb = &lblock{_goto: f.newBasicBlock(label.Name)} + if f.lblocks == nil { + f.lblocks = make(map[*ast.Object]*lblock) + } + f.lblocks[label.Obj] = lb + } + return lb +} + +// addParam adds a (non-escaping) parameter to f.Params of the +// specified name, type and source position. +// +func (f *Function) addParam(name string, typ types.Type, pos token.Pos) *Parameter { + v := &Parameter{ + name: name, + typ: typ, + pos: pos, + parent: f, + } + f.Params = append(f.Params, v) + return v +} + +func (f *Function) addParamObj(obj types.Object) *Parameter { + name := obj.Name() + if name == "" { + name = fmt.Sprintf("arg%d", len(f.Params)) + } + param := f.addParam(name, obj.Type(), obj.Pos()) + param.object = obj + return param +} + +// addSpilledParam declares a parameter that is pre-spilled to the +// stack; the function body will load/store the spilled location. +// Subsequent lifting will eliminate spills where possible. +// +func (f *Function) addSpilledParam(obj types.Object) { + param := f.addParamObj(obj) + spill := &Alloc{Comment: obj.Name()} + spill.setType(types.NewPointer(obj.Type())) + spill.setPos(obj.Pos()) + f.objects[obj] = spill + f.Locals = append(f.Locals, spill) + f.emit(spill) + f.emit(&Store{Addr: spill, Val: param}) +} + +// startBody initializes the function prior to generating SSA code for its body. +// Precondition: f.Type() already set. +// +func (f *Function) startBody() { + f.currentBlock = f.newBasicBlock("entry") + f.objects = make(map[types.Object]Value) // needed for some synthetics, e.g. init +} + +// createSyntacticParams populates f.Params and generates code (spills +// and named result locals) for all the parameters declared in the +// syntax. In addition it populates the f.objects mapping. +// +// Preconditions: +// f.startBody() was called. +// Postcondition: +// len(f.Params) == len(f.Signature.Params) + (f.Signature.Recv() ? 1 : 0) +// +func (f *Function) createSyntacticParams(recv *ast.FieldList, functype *ast.FuncType) { + // Receiver (at most one inner iteration). + if recv != nil { + for _, field := range recv.List { + for _, n := range field.Names { + f.addSpilledParam(f.Pkg.info.Defs[n]) + } + // Anonymous receiver? No need to spill. + if field.Names == nil { + f.addParamObj(f.Signature.Recv()) + } + } + } + + // Parameters. + if functype.Params != nil { + n := len(f.Params) // 1 if has recv, 0 otherwise + for _, field := range functype.Params.List { + for _, n := range field.Names { + f.addSpilledParam(f.Pkg.info.Defs[n]) + } + // Anonymous parameter? No need to spill. + if field.Names == nil { + f.addParamObj(f.Signature.Params().At(len(f.Params) - n)) + } + } + } + + // Named results. + if functype.Results != nil { + for _, field := range functype.Results.List { + // Implicit "var" decl of locals for named results. + for _, n := range field.Names { + f.namedResults = append(f.namedResults, f.addLocalForIdent(n)) + } + } + } +} + +// numberRegisters assigns numbers to all SSA registers +// (value-defining Instructions) in f, to aid debugging. +// (Non-Instruction Values are named at construction.) +// +func numberRegisters(f *Function) { + v := 0 + for _, b := range f.Blocks { + for _, instr := range b.Instrs { + switch instr.(type) { + case Value: + instr.(interface { + setNum(int) + }).setNum(v) + v++ + } + } + } +} + +// buildReferrers populates the def/use information in all non-nil +// Value.Referrers slice. +// Precondition: all such slices are initially empty. +func buildReferrers(f *Function) { + var rands []*Value + for _, b := range f.Blocks { + for _, instr := range b.Instrs { + rands = instr.Operands(rands[:0]) // recycle storage + for _, rand := range rands { + if r := *rand; r != nil { + if ref := r.Referrers(); ref != nil { + *ref = append(*ref, instr) + } + } + } + } + } +} + +// finishBody() finalizes the function after SSA code generation of its body. +func (f *Function) finishBody() { + f.objects = nil + f.currentBlock = nil + f.lblocks = nil + + // Don't pin the AST in memory (except in debug mode). + if n := f.syntax; n != nil && !f.debugInfo() { + f.syntax = extentNode{n.Pos(), n.End()} + } + + // Remove from f.Locals any Allocs that escape to the heap. + j := 0 + for _, l := range f.Locals { + if !l.Heap { + f.Locals[j] = l + j++ + } + } + // Nil out f.Locals[j:] to aid GC. + for i := j; i < len(f.Locals); i++ { + f.Locals[i] = nil + } + f.Locals = f.Locals[:j] + + optimizeBlocks(f) + + buildReferrers(f) + + buildDomTree(f) + + if f.Prog.mode&NaiveForm == 0 { + // For debugging pre-state of lifting pass: + // numberRegisters(f) + // f.WriteTo(os.Stderr) + lift(f) + } + + f.namedResults = nil // (used by lifting) + + numberRegisters(f) + + if f.Prog.mode&PrintFunctions != 0 { + printMu.Lock() + f.WriteTo(os.Stdout) + printMu.Unlock() + } + + if f.Prog.mode&SanityCheckFunctions != 0 { + mustSanityCheck(f, nil) + } +} + +// removeNilBlocks eliminates nils from f.Blocks and updates each +// BasicBlock.Index. Use this after any pass that may delete blocks. +// +func (f *Function) removeNilBlocks() { + j := 0 + for _, b := range f.Blocks { + if b != nil { + b.Index = j + f.Blocks[j] = b + j++ + } + } + // Nil out f.Blocks[j:] to aid GC. + for i := j; i < len(f.Blocks); i++ { + f.Blocks[i] = nil + } + f.Blocks = f.Blocks[:j] +} + +// SetDebugMode sets the debug mode for package pkg. If true, all its +// functions will include full debug info. This greatly increases the +// size of the instruction stream, and causes Functions to depend upon +// the ASTs, potentially keeping them live in memory for longer. +// +func (pkg *Package) SetDebugMode(debug bool) { + // TODO(adonovan): do we want ast.File granularity? + pkg.debug = debug +} + +// debugInfo reports whether debug info is wanted for this function. +func (f *Function) debugInfo() bool { + return f.Pkg != nil && f.Pkg.debug +} + +// addNamedLocal creates a local variable, adds it to function f and +// returns it. Its name and type are taken from obj. Subsequent +// calls to f.lookup(obj) will return the same local. +// +func (f *Function) addNamedLocal(obj types.Object) *Alloc { + l := f.addLocal(obj.Type(), obj.Pos()) + l.Comment = obj.Name() + f.objects[obj] = l + return l +} + +func (f *Function) addLocalForIdent(id *ast.Ident) *Alloc { + return f.addNamedLocal(f.Pkg.info.Defs[id]) +} + +// addLocal creates an anonymous local variable of type typ, adds it +// to function f and returns it. pos is the optional source location. +// +func (f *Function) addLocal(typ types.Type, pos token.Pos) *Alloc { + v := &Alloc{} + v.setType(types.NewPointer(typ)) + v.setPos(pos) + f.Locals = append(f.Locals, v) + f.emit(v) + return v +} + +// lookup returns the address of the named variable identified by obj +// that is local to function f or one of its enclosing functions. +// If escaping, the reference comes from a potentially escaping pointer +// expression and the referent must be heap-allocated. +// +func (f *Function) lookup(obj types.Object, escaping bool) Value { + if v, ok := f.objects[obj]; ok { + if alloc, ok := v.(*Alloc); ok && escaping { + alloc.Heap = true + } + return v // function-local var (address) + } + + // Definition must be in an enclosing function; + // plumb it through intervening closures. + if f.parent == nil { + panic("no ssa.Value for " + obj.String()) + } + outer := f.parent.lookup(obj, true) // escaping + v := &FreeVar{ + name: obj.Name(), + typ: outer.Type(), + pos: outer.Pos(), + outer: outer, + parent: f, + } + f.objects[obj] = v + f.FreeVars = append(f.FreeVars, v) + return v +} + +// emit emits the specified instruction to function f. +func (f *Function) emit(instr Instruction) Value { + return f.currentBlock.emit(instr) +} + +// RelString returns the full name of this function, qualified by +// package name, receiver type, etc. +// +// The specific formatting rules are not guaranteed and may change. +// +// Examples: +// "math.IsNaN" // a package-level function +// "(*bytes.Buffer).Bytes" // a declared method or a wrapper +// "(*bytes.Buffer).Bytes$thunk" // thunk (func wrapping method; receiver is param 0) +// "(*bytes.Buffer).Bytes$bound" // bound (func wrapping method; receiver supplied by closure) +// "main.main$1" // an anonymous function in main +// "main.init#1" // a declared init function +// "main.init" // the synthesized package initializer +// +// When these functions are referred to from within the same package +// (i.e. from == f.Pkg.Object), they are rendered without the package path. +// For example: "IsNaN", "(*Buffer).Bytes", etc. +// +// All non-synthetic functions have distinct package-qualified names. +// (But two methods may have the same name "(T).f" if one is a synthetic +// wrapper promoting a non-exported method "f" from another package; in +// that case, the strings are equal but the identifiers "f" are distinct.) +// +func (f *Function) RelString(from *types.Package) string { + // Anonymous? + if f.parent != nil { + // An anonymous function's Name() looks like "parentName$1", + // but its String() should include the type/package/etc. + parent := f.parent.RelString(from) + for i, anon := range f.parent.AnonFuncs { + if anon == f { + return fmt.Sprintf("%s$%d", parent, 1+i) + } + } + + return f.name // should never happen + } + + // Method (declared or wrapper)? + if recv := f.Signature.Recv(); recv != nil { + return f.relMethod(from, recv.Type()) + } + + // Thunk? + if f.method != nil { + return f.relMethod(from, f.method.Recv()) + } + + // Bound? + if len(f.FreeVars) == 1 && strings.HasSuffix(f.name, "$bound") { + return f.relMethod(from, f.FreeVars[0].Type()) + } + + // Package-level function? + // Prefix with package name for cross-package references only. + if p := f.pkg(); p != nil && p != from { + return fmt.Sprintf("%s.%s", p.Path(), f.name) + } + + // Unknown. + return f.name +} + +func (f *Function) relMethod(from *types.Package, recv types.Type) string { + return fmt.Sprintf("(%s).%s", relType(recv, from), f.name) +} + +// writeSignature writes to buf the signature sig in declaration syntax. +func writeSignature(buf *bytes.Buffer, from *types.Package, name string, sig *types.Signature, params []*Parameter) { + buf.WriteString("func ") + if recv := sig.Recv(); recv != nil { + buf.WriteString("(") + if n := params[0].Name(); n != "" { + buf.WriteString(n) + buf.WriteString(" ") + } + types.WriteType(buf, params[0].Type(), types.RelativeTo(from)) + buf.WriteString(") ") + } + buf.WriteString(name) + types.WriteSignature(buf, sig, types.RelativeTo(from)) +} + +func (f *Function) pkg() *types.Package { + if f.Pkg != nil { + return f.Pkg.Pkg + } + return nil +} + +var _ io.WriterTo = (*Function)(nil) // *Function implements io.Writer + +func (f *Function) WriteTo(w io.Writer) (int64, error) { + var buf bytes.Buffer + WriteFunction(&buf, f) + n, err := w.Write(buf.Bytes()) + return int64(n), err +} + +// WriteFunction writes to buf a human-readable "disassembly" of f. +func WriteFunction(buf *bytes.Buffer, f *Function) { + fmt.Fprintf(buf, "# Name: %s\n", f.String()) + if f.Pkg != nil { + fmt.Fprintf(buf, "# Package: %s\n", f.Pkg.Pkg.Path()) + } + if syn := f.Synthetic; syn != "" { + fmt.Fprintln(buf, "# Synthetic:", syn) + } + if pos := f.Pos(); pos.IsValid() { + fmt.Fprintf(buf, "# Location: %s\n", f.Prog.Fset.Position(pos)) + } + + if f.parent != nil { + fmt.Fprintf(buf, "# Parent: %s\n", f.parent.Name()) + } + + if f.Recover != nil { + fmt.Fprintf(buf, "# Recover: %s\n", f.Recover) + } + + from := f.pkg() + + if f.FreeVars != nil { + buf.WriteString("# Free variables:\n") + for i, fv := range f.FreeVars { + fmt.Fprintf(buf, "# % 3d:\t%s %s\n", i, fv.Name(), relType(fv.Type(), from)) + } + } + + if len(f.Locals) > 0 { + buf.WriteString("# Locals:\n") + for i, l := range f.Locals { + fmt.Fprintf(buf, "# % 3d:\t%s %s\n", i, l.Name(), relType(deref(l.Type()), from)) + } + } + writeSignature(buf, from, f.Name(), f.Signature, f.Params) + buf.WriteString(":\n") + + if f.Blocks == nil { + buf.WriteString("\t(external)\n") + } + + // NB. column calculations are confused by non-ASCII + // characters and assume 8-space tabs. + const punchcard = 80 // for old time's sake. + const tabwidth = 8 + for _, b := range f.Blocks { + if b == nil { + // Corrupt CFG. + fmt.Fprintf(buf, ".nil:\n") + continue + } + n, _ := fmt.Fprintf(buf, "%d:", b.Index) + bmsg := fmt.Sprintf("%s P:%d S:%d", b.Comment, len(b.Preds), len(b.Succs)) + fmt.Fprintf(buf, "%*s%s\n", punchcard-1-n-len(bmsg), "", bmsg) + + if false { // CFG debugging + fmt.Fprintf(buf, "\t# CFG: %s --> %s --> %s\n", b.Preds, b, b.Succs) + } + for _, instr := range b.Instrs { + buf.WriteString("\t") + switch v := instr.(type) { + case Value: + l := punchcard - tabwidth + // Left-align the instruction. + if name := v.Name(); name != "" { + n, _ := fmt.Fprintf(buf, "%s = ", name) + l -= n + } + n, _ := buf.WriteString(instr.String()) + l -= n + // Right-align the type if there's space. + if t := v.Type(); t != nil { + buf.WriteByte(' ') + ts := relType(t, from) + l -= len(ts) + len(" ") // (spaces before and after type) + if l > 0 { + fmt.Fprintf(buf, "%*s", l, "") + } + buf.WriteString(ts) + } + case nil: + // Be robust against bad transforms. + buf.WriteString("") + default: + buf.WriteString(instr.String()) + } + buf.WriteString("\n") + } + } + fmt.Fprintf(buf, "\n") +} + +// newBasicBlock adds to f a new basic block and returns it. It does +// not automatically become the current block for subsequent calls to emit. +// comment is an optional string for more readable debugging output. +// +func (f *Function) newBasicBlock(comment string) *BasicBlock { + b := &BasicBlock{ + Index: len(f.Blocks), + Comment: comment, + parent: f, + } + b.Succs = b.succs2[:0] + f.Blocks = append(f.Blocks, b) + return b +} + +// NewFunction returns a new synthetic Function instance belonging to +// prog, with its name and signature fields set as specified. +// +// The caller is responsible for initializing the remaining fields of +// the function object, e.g. Pkg, Params, Blocks. +// +// It is practically impossible for clients to construct well-formed +// SSA functions/packages/programs directly, so we assume this is the +// job of the Builder alone. NewFunction exists to provide clients a +// little flexibility. For example, analysis tools may wish to +// construct fake Functions for the root of the callgraph, a fake +// "reflect" package, etc. +// +// TODO(adonovan): think harder about the API here. +// +func (prog *Program) NewFunction(name string, sig *types.Signature, provenance string) *Function { + return &Function{Prog: prog, name: name, Signature: sig, Synthetic: provenance} +} + +type extentNode [2]token.Pos + +func (n extentNode) Pos() token.Pos { return n[0] } +func (n extentNode) End() token.Pos { return n[1] } + +// Syntax returns an ast.Node whose Pos/End methods provide the +// lexical extent of the function if it was defined by Go source code +// (f.Synthetic==""), or nil otherwise. +// +// If f was built with debug information (see Package.SetDebugRef), +// the result is the *ast.FuncDecl or *ast.FuncLit that declared the +// function. Otherwise, it is an opaque Node providing only position +// information; this avoids pinning the AST in memory. +// +func (f *Function) Syntax() ast.Node { return f.syntax } diff --git a/vendor/golang.org/x/tools/go/ssa/identical.go b/vendor/golang.org/x/tools/go/ssa/identical.go new file mode 100644 index 000000000000..53cbee107b6b --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/identical.go @@ -0,0 +1,7 @@ +// +build go1.8 + +package ssa + +import "go/types" + +var structTypesIdentical = types.IdenticalIgnoreTags diff --git a/vendor/golang.org/x/tools/go/ssa/identical_17.go b/vendor/golang.org/x/tools/go/ssa/identical_17.go new file mode 100644 index 000000000000..da89d3339a5d --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/identical_17.go @@ -0,0 +1,7 @@ +// +build !go1.8 + +package ssa + +import "go/types" + +var structTypesIdentical = types.Identical diff --git a/vendor/golang.org/x/tools/go/ssa/lift.go b/vendor/golang.org/x/tools/go/ssa/lift.go new file mode 100644 index 000000000000..048e9b03260b --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/lift.go @@ -0,0 +1,653 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file defines the lifting pass which tries to "lift" Alloc +// cells (new/local variables) into SSA registers, replacing loads +// with the dominating stored value, eliminating loads and stores, and +// inserting φ-nodes as needed. + +// Cited papers and resources: +// +// Ron Cytron et al. 1991. Efficiently computing SSA form... +// http://doi.acm.org/10.1145/115372.115320 +// +// Cooper, Harvey, Kennedy. 2001. A Simple, Fast Dominance Algorithm. +// Software Practice and Experience 2001, 4:1-10. +// http://www.hipersoft.rice.edu/grads/publications/dom14.pdf +// +// Daniel Berlin, llvmdev mailing list, 2012. +// http://lists.cs.uiuc.edu/pipermail/llvmdev/2012-January/046638.html +// (Be sure to expand the whole thread.) + +// TODO(adonovan): opt: there are many optimizations worth evaluating, and +// the conventional wisdom for SSA construction is that a simple +// algorithm well engineered often beats those of better asymptotic +// complexity on all but the most egregious inputs. +// +// Danny Berlin suggests that the Cooper et al. algorithm for +// computing the dominance frontier is superior to Cytron et al. +// Furthermore he recommends that rather than computing the DF for the +// whole function then renaming all alloc cells, it may be cheaper to +// compute the DF for each alloc cell separately and throw it away. +// +// Consider exploiting liveness information to avoid creating dead +// φ-nodes which we then immediately remove. +// +// Also see many other "TODO: opt" suggestions in the code. + +import ( + "fmt" + "go/token" + "go/types" + "math/big" + "os" +) + +// If true, show diagnostic information at each step of lifting. +// Very verbose. +const debugLifting = false + +// domFrontier maps each block to the set of blocks in its dominance +// frontier. The outer slice is conceptually a map keyed by +// Block.Index. The inner slice is conceptually a set, possibly +// containing duplicates. +// +// TODO(adonovan): opt: measure impact of dups; consider a packed bit +// representation, e.g. big.Int, and bitwise parallel operations for +// the union step in the Children loop. +// +// domFrontier's methods mutate the slice's elements but not its +// length, so their receivers needn't be pointers. +// +type domFrontier [][]*BasicBlock + +func (df domFrontier) add(u, v *BasicBlock) { + p := &df[u.Index] + *p = append(*p, v) +} + +// build builds the dominance frontier df for the dominator (sub)tree +// rooted at u, using the Cytron et al. algorithm. +// +// TODO(adonovan): opt: consider Berlin approach, computing pruned SSA +// by pruning the entire IDF computation, rather than merely pruning +// the DF -> IDF step. +func (df domFrontier) build(u *BasicBlock) { + // Encounter each node u in postorder of dom tree. + for _, child := range u.dom.children { + df.build(child) + } + for _, vb := range u.Succs { + if v := vb.dom; v.idom != u { + df.add(u, vb) + } + } + for _, w := range u.dom.children { + for _, vb := range df[w.Index] { + // TODO(adonovan): opt: use word-parallel bitwise union. + if v := vb.dom; v.idom != u { + df.add(u, vb) + } + } + } +} + +func buildDomFrontier(fn *Function) domFrontier { + df := make(domFrontier, len(fn.Blocks)) + df.build(fn.Blocks[0]) + if fn.Recover != nil { + df.build(fn.Recover) + } + return df +} + +func removeInstr(refs []Instruction, instr Instruction) []Instruction { + i := 0 + for _, ref := range refs { + if ref == instr { + continue + } + refs[i] = ref + i++ + } + for j := i; j != len(refs); j++ { + refs[j] = nil // aid GC + } + return refs[:i] +} + +// lift replaces local and new Allocs accessed only with +// load/store by SSA registers, inserting φ-nodes where necessary. +// The result is a program in classical pruned SSA form. +// +// Preconditions: +// - fn has no dead blocks (blockopt has run). +// - Def/use info (Operands and Referrers) is up-to-date. +// - The dominator tree is up-to-date. +// +func lift(fn *Function) { + // TODO(adonovan): opt: lots of little optimizations may be + // worthwhile here, especially if they cause us to avoid + // buildDomFrontier. For example: + // + // - Alloc never loaded? Eliminate. + // - Alloc never stored? Replace all loads with a zero constant. + // - Alloc stored once? Replace loads with dominating store; + // don't forget that an Alloc is itself an effective store + // of zero. + // - Alloc used only within a single block? + // Use degenerate algorithm avoiding φ-nodes. + // - Consider synergy with scalar replacement of aggregates (SRA). + // e.g. *(&x.f) where x is an Alloc. + // Perhaps we'd get better results if we generated this as x.f + // i.e. Field(x, .f) instead of Load(FieldIndex(x, .f)). + // Unclear. + // + // But we will start with the simplest correct code. + df := buildDomFrontier(fn) + + if debugLifting { + title := false + for i, blocks := range df { + if blocks != nil { + if !title { + fmt.Fprintf(os.Stderr, "Dominance frontier of %s:\n", fn) + title = true + } + fmt.Fprintf(os.Stderr, "\t%s: %s\n", fn.Blocks[i], blocks) + } + } + } + + newPhis := make(newPhiMap) + + // During this pass we will replace some BasicBlock.Instrs + // (allocs, loads and stores) with nil, keeping a count in + // BasicBlock.gaps. At the end we will reset Instrs to the + // concatenation of all non-dead newPhis and non-nil Instrs + // for the block, reusing the original array if space permits. + + // While we're here, we also eliminate 'rundefers' + // instructions in functions that contain no 'defer' + // instructions. + usesDefer := false + + // A counter used to generate ~unique ids for Phi nodes, as an + // aid to debugging. We use large numbers to make them highly + // visible. All nodes are renumbered later. + fresh := 1000 + + // Determine which allocs we can lift and number them densely. + // The renaming phase uses this numbering for compact maps. + numAllocs := 0 + for _, b := range fn.Blocks { + b.gaps = 0 + b.rundefers = 0 + for _, instr := range b.Instrs { + switch instr := instr.(type) { + case *Alloc: + index := -1 + if liftAlloc(df, instr, newPhis, &fresh) { + index = numAllocs + numAllocs++ + } + instr.index = index + case *Defer: + usesDefer = true + case *RunDefers: + b.rundefers++ + } + } + } + + // renaming maps an alloc (keyed by index) to its replacement + // value. Initially the renaming contains nil, signifying the + // zero constant of the appropriate type; we construct the + // Const lazily at most once on each path through the domtree. + // TODO(adonovan): opt: cache per-function not per subtree. + renaming := make([]Value, numAllocs) + + // Renaming. + rename(fn.Blocks[0], renaming, newPhis) + + // Eliminate dead φ-nodes. + removeDeadPhis(fn.Blocks, newPhis) + + // Prepend remaining live φ-nodes to each block. + for _, b := range fn.Blocks { + nps := newPhis[b] + j := len(nps) + + rundefersToKill := b.rundefers + if usesDefer { + rundefersToKill = 0 + } + + if j+b.gaps+rundefersToKill == 0 { + continue // fast path: no new phis or gaps + } + + // Compact nps + non-nil Instrs into a new slice. + // TODO(adonovan): opt: compact in situ (rightwards) + // if Instrs has sufficient space or slack. + dst := make([]Instruction, len(b.Instrs)+j-b.gaps-rundefersToKill) + for i, np := range nps { + dst[i] = np.phi + } + for _, instr := range b.Instrs { + if instr == nil { + continue + } + if !usesDefer { + if _, ok := instr.(*RunDefers); ok { + continue + } + } + dst[j] = instr + j++ + } + b.Instrs = dst + } + + // Remove any fn.Locals that were lifted. + j := 0 + for _, l := range fn.Locals { + if l.index < 0 { + fn.Locals[j] = l + j++ + } + } + // Nil out fn.Locals[j:] to aid GC. + for i := j; i < len(fn.Locals); i++ { + fn.Locals[i] = nil + } + fn.Locals = fn.Locals[:j] +} + +// removeDeadPhis removes φ-nodes not transitively needed by a +// non-Phi, non-DebugRef instruction. +func removeDeadPhis(blocks []*BasicBlock, newPhis newPhiMap) { + // First pass: find the set of "live" φ-nodes: those reachable + // from some non-Phi instruction. + // + // We compute reachability in reverse, starting from each φ, + // rather than forwards, starting from each live non-Phi + // instruction, because this way visits much less of the + // Value graph. + livePhis := make(map[*Phi]bool) + for _, npList := range newPhis { + for _, np := range npList { + phi := np.phi + if !livePhis[phi] && phiHasDirectReferrer(phi) { + markLivePhi(livePhis, phi) + } + } + } + + // Existing φ-nodes due to && and || operators + // are all considered live (see Go issue 19622). + for _, b := range blocks { + for _, phi := range b.phis() { + markLivePhi(livePhis, phi.(*Phi)) + } + } + + // Second pass: eliminate unused phis from newPhis. + for block, npList := range newPhis { + j := 0 + for _, np := range npList { + if livePhis[np.phi] { + npList[j] = np + j++ + } else { + // discard it, first removing it from referrers + for _, val := range np.phi.Edges { + if refs := val.Referrers(); refs != nil { + *refs = removeInstr(*refs, np.phi) + } + } + np.phi.block = nil + } + } + newPhis[block] = npList[:j] + } +} + +// markLivePhi marks phi, and all φ-nodes transitively reachable via +// its Operands, live. +func markLivePhi(livePhis map[*Phi]bool, phi *Phi) { + livePhis[phi] = true + for _, rand := range phi.Operands(nil) { + if q, ok := (*rand).(*Phi); ok { + if !livePhis[q] { + markLivePhi(livePhis, q) + } + } + } +} + +// phiHasDirectReferrer reports whether phi is directly referred to by +// a non-Phi instruction. Such instructions are the +// roots of the liveness traversal. +func phiHasDirectReferrer(phi *Phi) bool { + for _, instr := range *phi.Referrers() { + if _, ok := instr.(*Phi); !ok { + return true + } + } + return false +} + +type blockSet struct{ big.Int } // (inherit methods from Int) + +// add adds b to the set and returns true if the set changed. +func (s *blockSet) add(b *BasicBlock) bool { + i := b.Index + if s.Bit(i) != 0 { + return false + } + s.SetBit(&s.Int, i, 1) + return true +} + +// take removes an arbitrary element from a set s and +// returns its index, or returns -1 if empty. +func (s *blockSet) take() int { + l := s.BitLen() + for i := 0; i < l; i++ { + if s.Bit(i) == 1 { + s.SetBit(&s.Int, i, 0) + return i + } + } + return -1 +} + +// newPhi is a pair of a newly introduced φ-node and the lifted Alloc +// it replaces. +type newPhi struct { + phi *Phi + alloc *Alloc +} + +// newPhiMap records for each basic block, the set of newPhis that +// must be prepended to the block. +type newPhiMap map[*BasicBlock][]newPhi + +// liftAlloc determines whether alloc can be lifted into registers, +// and if so, it populates newPhis with all the φ-nodes it may require +// and returns true. +// +// fresh is a source of fresh ids for phi nodes. +// +func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap, fresh *int) bool { + // Don't lift aggregates into registers, because we don't have + // a way to express their zero-constants. + switch deref(alloc.Type()).Underlying().(type) { + case *types.Array, *types.Struct: + return false + } + + // Don't lift named return values in functions that defer + // calls that may recover from panic. + if fn := alloc.Parent(); fn.Recover != nil { + for _, nr := range fn.namedResults { + if nr == alloc { + return false + } + } + } + + // Compute defblocks, the set of blocks containing a + // definition of the alloc cell. + var defblocks blockSet + for _, instr := range *alloc.Referrers() { + // Bail out if we discover the alloc is not liftable; + // the only operations permitted to use the alloc are + // loads/stores into the cell, and DebugRef. + switch instr := instr.(type) { + case *Store: + if instr.Val == alloc { + return false // address used as value + } + if instr.Addr != alloc { + panic("Alloc.Referrers is inconsistent") + } + defblocks.add(instr.Block()) + case *UnOp: + if instr.Op != token.MUL { + return false // not a load + } + if instr.X != alloc { + panic("Alloc.Referrers is inconsistent") + } + case *DebugRef: + // ok + default: + return false // some other instruction + } + } + // The Alloc itself counts as a (zero) definition of the cell. + defblocks.add(alloc.Block()) + + if debugLifting { + fmt.Fprintln(os.Stderr, "\tlifting ", alloc, alloc.Name()) + } + + fn := alloc.Parent() + + // Φ-insertion. + // + // What follows is the body of the main loop of the insert-φ + // function described by Cytron et al, but instead of using + // counter tricks, we just reset the 'hasAlready' and 'work' + // sets each iteration. These are bitmaps so it's pretty cheap. + // + // TODO(adonovan): opt: recycle slice storage for W, + // hasAlready, defBlocks across liftAlloc calls. + var hasAlready blockSet + + // Initialize W and work to defblocks. + var work blockSet = defblocks // blocks seen + var W blockSet // blocks to do + W.Set(&defblocks.Int) + + // Traverse iterated dominance frontier, inserting φ-nodes. + for i := W.take(); i != -1; i = W.take() { + u := fn.Blocks[i] + for _, v := range df[u.Index] { + if hasAlready.add(v) { + // Create φ-node. + // It will be prepended to v.Instrs later, if needed. + phi := &Phi{ + Edges: make([]Value, len(v.Preds)), + Comment: alloc.Comment, + } + // This is merely a debugging aid: + phi.setNum(*fresh) + *fresh++ + + phi.pos = alloc.Pos() + phi.setType(deref(alloc.Type())) + phi.block = v + if debugLifting { + fmt.Fprintf(os.Stderr, "\tplace %s = %s at block %s\n", phi.Name(), phi, v) + } + newPhis[v] = append(newPhis[v], newPhi{phi, alloc}) + + if work.add(v) { + W.add(v) + } + } + } + } + + return true +} + +// replaceAll replaces all intraprocedural uses of x with y, +// updating x.Referrers and y.Referrers. +// Precondition: x.Referrers() != nil, i.e. x must be local to some function. +// +func replaceAll(x, y Value) { + var rands []*Value + pxrefs := x.Referrers() + pyrefs := y.Referrers() + for _, instr := range *pxrefs { + rands = instr.Operands(rands[:0]) // recycle storage + for _, rand := range rands { + if *rand != nil { + if *rand == x { + *rand = y + } + } + } + if pyrefs != nil { + *pyrefs = append(*pyrefs, instr) // dups ok + } + } + *pxrefs = nil // x is now unreferenced +} + +// renamed returns the value to which alloc is being renamed, +// constructing it lazily if it's the implicit zero initialization. +// +func renamed(renaming []Value, alloc *Alloc) Value { + v := renaming[alloc.index] + if v == nil { + v = zeroConst(deref(alloc.Type())) + renaming[alloc.index] = v + } + return v +} + +// rename implements the (Cytron et al) SSA renaming algorithm, a +// preorder traversal of the dominator tree replacing all loads of +// Alloc cells with the value stored to that cell by the dominating +// store instruction. For lifting, we need only consider loads, +// stores and φ-nodes. +// +// renaming is a map from *Alloc (keyed by index number) to its +// dominating stored value; newPhis[x] is the set of new φ-nodes to be +// prepended to block x. +// +func rename(u *BasicBlock, renaming []Value, newPhis newPhiMap) { + // Each φ-node becomes the new name for its associated Alloc. + for _, np := range newPhis[u] { + phi := np.phi + alloc := np.alloc + renaming[alloc.index] = phi + } + + // Rename loads and stores of allocs. + for i, instr := range u.Instrs { + switch instr := instr.(type) { + case *Alloc: + if instr.index >= 0 { // store of zero to Alloc cell + // Replace dominated loads by the zero value. + renaming[instr.index] = nil + if debugLifting { + fmt.Fprintf(os.Stderr, "\tkill alloc %s\n", instr) + } + // Delete the Alloc. + u.Instrs[i] = nil + u.gaps++ + } + + case *Store: + if alloc, ok := instr.Addr.(*Alloc); ok && alloc.index >= 0 { // store to Alloc cell + // Replace dominated loads by the stored value. + renaming[alloc.index] = instr.Val + if debugLifting { + fmt.Fprintf(os.Stderr, "\tkill store %s; new value: %s\n", + instr, instr.Val.Name()) + } + // Remove the store from the referrer list of the stored value. + if refs := instr.Val.Referrers(); refs != nil { + *refs = removeInstr(*refs, instr) + } + // Delete the Store. + u.Instrs[i] = nil + u.gaps++ + } + + case *UnOp: + if instr.Op == token.MUL { + if alloc, ok := instr.X.(*Alloc); ok && alloc.index >= 0 { // load of Alloc cell + newval := renamed(renaming, alloc) + if debugLifting { + fmt.Fprintf(os.Stderr, "\tupdate load %s = %s with %s\n", + instr.Name(), instr, newval.Name()) + } + // Replace all references to + // the loaded value by the + // dominating stored value. + replaceAll(instr, newval) + // Delete the Load. + u.Instrs[i] = nil + u.gaps++ + } + } + + case *DebugRef: + if alloc, ok := instr.X.(*Alloc); ok && alloc.index >= 0 { // ref of Alloc cell + if instr.IsAddr { + instr.X = renamed(renaming, alloc) + instr.IsAddr = false + + // Add DebugRef to instr.X's referrers. + if refs := instr.X.Referrers(); refs != nil { + *refs = append(*refs, instr) + } + } else { + // A source expression denotes the address + // of an Alloc that was optimized away. + instr.X = nil + + // Delete the DebugRef. + u.Instrs[i] = nil + u.gaps++ + } + } + } + } + + // For each φ-node in a CFG successor, rename the edge. + for _, v := range u.Succs { + phis := newPhis[v] + if len(phis) == 0 { + continue + } + i := v.predIndex(u) + for _, np := range phis { + phi := np.phi + alloc := np.alloc + newval := renamed(renaming, alloc) + if debugLifting { + fmt.Fprintf(os.Stderr, "\tsetphi %s edge %s -> %s (#%d) (alloc=%s) := %s\n", + phi.Name(), u, v, i, alloc.Name(), newval.Name()) + } + phi.Edges[i] = newval + if prefs := newval.Referrers(); prefs != nil { + *prefs = append(*prefs, phi) + } + } + } + + // Continue depth-first recursion over domtree, pushing a + // fresh copy of the renaming map for each subtree. + for i, v := range u.dom.children { + r := renaming + if i < len(u.dom.children)-1 { + // On all but the final iteration, we must make + // a copy to avoid destructive update. + r = make([]Value, len(renaming)) + copy(r, renaming) + } + rename(v, r, newPhis) + } + +} diff --git a/vendor/golang.org/x/tools/go/ssa/lvalue.go b/vendor/golang.org/x/tools/go/ssa/lvalue.go new file mode 100644 index 000000000000..4d85be3ec78e --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/lvalue.go @@ -0,0 +1,120 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// lvalues are the union of addressable expressions and map-index +// expressions. + +import ( + "go/ast" + "go/token" + "go/types" +) + +// An lvalue represents an assignable location that may appear on the +// left-hand side of an assignment. This is a generalization of a +// pointer to permit updates to elements of maps. +// +type lvalue interface { + store(fn *Function, v Value) // stores v into the location + load(fn *Function) Value // loads the contents of the location + address(fn *Function) Value // address of the location + typ() types.Type // returns the type of the location +} + +// An address is an lvalue represented by a true pointer. +type address struct { + addr Value + pos token.Pos // source position + expr ast.Expr // source syntax of the value (not address) [debug mode] +} + +func (a *address) load(fn *Function) Value { + load := emitLoad(fn, a.addr) + load.pos = a.pos + return load +} + +func (a *address) store(fn *Function, v Value) { + store := emitStore(fn, a.addr, v, a.pos) + if a.expr != nil { + // store.Val is v, converted for assignability. + emitDebugRef(fn, a.expr, store.Val, false) + } +} + +func (a *address) address(fn *Function) Value { + if a.expr != nil { + emitDebugRef(fn, a.expr, a.addr, true) + } + return a.addr +} + +func (a *address) typ() types.Type { + return deref(a.addr.Type()) +} + +// An element is an lvalue represented by m[k], the location of an +// element of a map or string. These locations are not addressable +// since pointers cannot be formed from them, but they do support +// load(), and in the case of maps, store(). +// +type element struct { + m, k Value // map or string + t types.Type // map element type or string byte type + pos token.Pos // source position of colon ({k:v}) or lbrack (m[k]=v) +} + +func (e *element) load(fn *Function) Value { + l := &Lookup{ + X: e.m, + Index: e.k, + } + l.setPos(e.pos) + l.setType(e.t) + return fn.emit(l) +} + +func (e *element) store(fn *Function, v Value) { + up := &MapUpdate{ + Map: e.m, + Key: e.k, + Value: emitConv(fn, v, e.t), + } + up.pos = e.pos + fn.emit(up) +} + +func (e *element) address(fn *Function) Value { + panic("map/string elements are not addressable") +} + +func (e *element) typ() types.Type { + return e.t +} + +// A blank is a dummy variable whose name is "_". +// It is not reified: loads are illegal and stores are ignored. +// +type blank struct{} + +func (bl blank) load(fn *Function) Value { + panic("blank.load is illegal") +} + +func (bl blank) store(fn *Function, v Value) { + // no-op +} + +func (bl blank) address(fn *Function) Value { + panic("blank var is not addressable") +} + +func (bl blank) typ() types.Type { + // This should be the type of the blank Ident; the typechecker + // doesn't provide this yet, but fortunately, we don't need it + // yet either. + panic("blank.typ is unimplemented") +} diff --git a/vendor/golang.org/x/tools/go/ssa/methods.go b/vendor/golang.org/x/tools/go/ssa/methods.go new file mode 100644 index 000000000000..9cf383916bbe --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/methods.go @@ -0,0 +1,239 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file defines utilities for population of method sets. + +import ( + "fmt" + "go/types" +) + +// MethodValue returns the Function implementing method sel, building +// wrapper methods on demand. It returns nil if sel denotes an +// abstract (interface) method. +// +// Precondition: sel.Kind() == MethodVal. +// +// Thread-safe. +// +// EXCLUSIVE_LOCKS_ACQUIRED(prog.methodsMu) +// +func (prog *Program) MethodValue(sel *types.Selection) *Function { + if sel.Kind() != types.MethodVal { + panic(fmt.Sprintf("MethodValue(%s) kind != MethodVal", sel)) + } + T := sel.Recv() + if isInterface(T) { + return nil // abstract method + } + if prog.mode&LogSource != 0 { + defer logStack("MethodValue %s %v", T, sel)() + } + + prog.methodsMu.Lock() + defer prog.methodsMu.Unlock() + + return prog.addMethod(prog.createMethodSet(T), sel) +} + +// LookupMethod returns the implementation of the method of type T +// identified by (pkg, name). It returns nil if the method exists but +// is abstract, and panics if T has no such method. +// +func (prog *Program) LookupMethod(T types.Type, pkg *types.Package, name string) *Function { + sel := prog.MethodSets.MethodSet(T).Lookup(pkg, name) + if sel == nil { + panic(fmt.Sprintf("%s has no method %s", T, types.Id(pkg, name))) + } + return prog.MethodValue(sel) +} + +// methodSet contains the (concrete) methods of a non-interface type. +type methodSet struct { + mapping map[string]*Function // populated lazily + complete bool // mapping contains all methods +} + +// Precondition: !isInterface(T). +// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu) +func (prog *Program) createMethodSet(T types.Type) *methodSet { + mset, ok := prog.methodSets.At(T).(*methodSet) + if !ok { + mset = &methodSet{mapping: make(map[string]*Function)} + prog.methodSets.Set(T, mset) + } + return mset +} + +// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu) +func (prog *Program) addMethod(mset *methodSet, sel *types.Selection) *Function { + if sel.Kind() == types.MethodExpr { + panic(sel) + } + id := sel.Obj().Id() + fn := mset.mapping[id] + if fn == nil { + obj := sel.Obj().(*types.Func) + + needsPromotion := len(sel.Index()) > 1 + needsIndirection := !isPointer(recvType(obj)) && isPointer(sel.Recv()) + if needsPromotion || needsIndirection { + fn = makeWrapper(prog, sel) + } else { + fn = prog.declaredFunc(obj) + } + if fn.Signature.Recv() == nil { + panic(fn) // missing receiver + } + mset.mapping[id] = fn + } + return fn +} + +// RuntimeTypes returns a new unordered slice containing all +// concrete types in the program for which a complete (non-empty) +// method set is required at run-time. +// +// Thread-safe. +// +// EXCLUSIVE_LOCKS_ACQUIRED(prog.methodsMu) +// +func (prog *Program) RuntimeTypes() []types.Type { + prog.methodsMu.Lock() + defer prog.methodsMu.Unlock() + + var res []types.Type + prog.methodSets.Iterate(func(T types.Type, v interface{}) { + if v.(*methodSet).complete { + res = append(res, T) + } + }) + return res +} + +// declaredFunc returns the concrete function/method denoted by obj. +// Panic ensues if there is none. +// +func (prog *Program) declaredFunc(obj *types.Func) *Function { + if v := prog.packageLevelValue(obj); v != nil { + return v.(*Function) + } + panic("no concrete method: " + obj.String()) +} + +// needMethodsOf ensures that runtime type information (including the +// complete method set) is available for the specified type T and all +// its subcomponents. +// +// needMethodsOf must be called for at least every type that is an +// operand of some MakeInterface instruction, and for the type of +// every exported package member. +// +// Precondition: T is not a method signature (*Signature with Recv()!=nil). +// +// Thread-safe. (Called via emitConv from multiple builder goroutines.) +// +// TODO(adonovan): make this faster. It accounts for 20% of SSA build time. +// +// EXCLUSIVE_LOCKS_ACQUIRED(prog.methodsMu) +// +func (prog *Program) needMethodsOf(T types.Type) { + prog.methodsMu.Lock() + prog.needMethods(T, false) + prog.methodsMu.Unlock() +} + +// Precondition: T is not a method signature (*Signature with Recv()!=nil). +// Recursive case: skip => don't create methods for T. +// +// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu) +// +func (prog *Program) needMethods(T types.Type, skip bool) { + // Each package maintains its own set of types it has visited. + if prevSkip, ok := prog.runtimeTypes.At(T).(bool); ok { + // needMethods(T) was previously called + if !prevSkip || skip { + return // already seen, with same or false 'skip' value + } + } + prog.runtimeTypes.Set(T, skip) + + tmset := prog.MethodSets.MethodSet(T) + + if !skip && !isInterface(T) && tmset.Len() > 0 { + // Create methods of T. + mset := prog.createMethodSet(T) + if !mset.complete { + mset.complete = true + n := tmset.Len() + for i := 0; i < n; i++ { + prog.addMethod(mset, tmset.At(i)) + } + } + } + + // Recursion over signatures of each method. + for i := 0; i < tmset.Len(); i++ { + sig := tmset.At(i).Type().(*types.Signature) + prog.needMethods(sig.Params(), false) + prog.needMethods(sig.Results(), false) + } + + switch t := T.(type) { + case *types.Basic: + // nop + + case *types.Interface: + // nop---handled by recursion over method set. + + case *types.Pointer: + prog.needMethods(t.Elem(), false) + + case *types.Slice: + prog.needMethods(t.Elem(), false) + + case *types.Chan: + prog.needMethods(t.Elem(), false) + + case *types.Map: + prog.needMethods(t.Key(), false) + prog.needMethods(t.Elem(), false) + + case *types.Signature: + if t.Recv() != nil { + panic(fmt.Sprintf("Signature %s has Recv %s", t, t.Recv())) + } + prog.needMethods(t.Params(), false) + prog.needMethods(t.Results(), false) + + case *types.Named: + // A pointer-to-named type can be derived from a named + // type via reflection. It may have methods too. + prog.needMethods(types.NewPointer(T), false) + + // Consider 'type T struct{S}' where S has methods. + // Reflection provides no way to get from T to struct{S}, + // only to S, so the method set of struct{S} is unwanted, + // so set 'skip' flag during recursion. + prog.needMethods(t.Underlying(), true) + + case *types.Array: + prog.needMethods(t.Elem(), false) + + case *types.Struct: + for i, n := 0, t.NumFields(); i < n; i++ { + prog.needMethods(t.Field(i).Type(), false) + } + + case *types.Tuple: + for i, n := 0, t.Len(); i < n; i++ { + prog.needMethods(t.At(i).Type(), false) + } + + default: + panic(T) + } +} diff --git a/vendor/golang.org/x/tools/go/ssa/mode.go b/vendor/golang.org/x/tools/go/ssa/mode.go new file mode 100644 index 000000000000..d2a269893a71 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/mode.go @@ -0,0 +1,100 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file defines the BuilderMode type and its command-line flag. + +import ( + "bytes" + "fmt" +) + +// BuilderMode is a bitmask of options for diagnostics and checking. +// +// *BuilderMode satisfies the flag.Value interface. Example: +// +// var mode = ssa.BuilderMode(0) +// func init() { flag.Var(&mode, "build", ssa.BuilderModeDoc) } +// +type BuilderMode uint + +const ( + PrintPackages BuilderMode = 1 << iota // Print package inventory to stdout + PrintFunctions // Print function SSA code to stdout + LogSource // Log source locations as SSA builder progresses + SanityCheckFunctions // Perform sanity checking of function bodies + NaiveForm // Build naïve SSA form: don't replace local loads/stores with registers + BuildSerially // Build packages serially, not in parallel. + GlobalDebug // Enable debug info for all packages + BareInits // Build init functions without guards or calls to dependent inits +) + +const BuilderModeDoc = `Options controlling the SSA builder. +The value is a sequence of zero or more of these letters: +C perform sanity [C]hecking of the SSA form. +D include [D]ebug info for every function. +P print [P]ackage inventory. +F print [F]unction SSA code. +S log [S]ource locations as SSA builder progresses. +L build distinct packages seria[L]ly instead of in parallel. +N build [N]aive SSA form: don't replace local loads/stores with registers. +I build bare [I]nit functions: no init guards or calls to dependent inits. +` + +func (m BuilderMode) String() string { + var buf bytes.Buffer + if m&GlobalDebug != 0 { + buf.WriteByte('D') + } + if m&PrintPackages != 0 { + buf.WriteByte('P') + } + if m&PrintFunctions != 0 { + buf.WriteByte('F') + } + if m&LogSource != 0 { + buf.WriteByte('S') + } + if m&SanityCheckFunctions != 0 { + buf.WriteByte('C') + } + if m&NaiveForm != 0 { + buf.WriteByte('N') + } + if m&BuildSerially != 0 { + buf.WriteByte('L') + } + return buf.String() +} + +// Set parses the flag characters in s and updates *m. +func (m *BuilderMode) Set(s string) error { + var mode BuilderMode + for _, c := range s { + switch c { + case 'D': + mode |= GlobalDebug + case 'P': + mode |= PrintPackages + case 'F': + mode |= PrintFunctions + case 'S': + mode |= LogSource | BuildSerially + case 'C': + mode |= SanityCheckFunctions + case 'N': + mode |= NaiveForm + case 'L': + mode |= BuildSerially + default: + return fmt.Errorf("unknown BuilderMode option: %q", c) + } + } + *m = mode + return nil +} + +// Get returns m. +func (m BuilderMode) Get() interface{} { return m } diff --git a/vendor/golang.org/x/tools/go/ssa/print.go b/vendor/golang.org/x/tools/go/ssa/print.go new file mode 100644 index 000000000000..3333ba41a00f --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/print.go @@ -0,0 +1,431 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file implements the String() methods for all Value and +// Instruction types. + +import ( + "bytes" + "fmt" + "go/types" + "io" + "reflect" + "sort" + + "golang.org/x/tools/go/types/typeutil" +) + +// relName returns the name of v relative to i. +// In most cases, this is identical to v.Name(), but references to +// Functions (including methods) and Globals use RelString and +// all types are displayed with relType, so that only cross-package +// references are package-qualified. +// +func relName(v Value, i Instruction) string { + var from *types.Package + if i != nil { + from = i.Parent().pkg() + } + switch v := v.(type) { + case Member: // *Function or *Global + return v.RelString(from) + case *Const: + return v.RelString(from) + } + return v.Name() +} + +func relType(t types.Type, from *types.Package) string { + return types.TypeString(t, types.RelativeTo(from)) +} + +func relString(m Member, from *types.Package) string { + // NB: not all globals have an Object (e.g. init$guard), + // so use Package().Object not Object.Package(). + if pkg := m.Package().Pkg; pkg != nil && pkg != from { + return fmt.Sprintf("%s.%s", pkg.Path(), m.Name()) + } + return m.Name() +} + +// Value.String() +// +// This method is provided only for debugging. +// It never appears in disassembly, which uses Value.Name(). + +func (v *Parameter) String() string { + from := v.Parent().pkg() + return fmt.Sprintf("parameter %s : %s", v.Name(), relType(v.Type(), from)) +} + +func (v *FreeVar) String() string { + from := v.Parent().pkg() + return fmt.Sprintf("freevar %s : %s", v.Name(), relType(v.Type(), from)) +} + +func (v *Builtin) String() string { + return fmt.Sprintf("builtin %s", v.Name()) +} + +// Instruction.String() + +func (v *Alloc) String() string { + op := "local" + if v.Heap { + op = "new" + } + from := v.Parent().pkg() + return fmt.Sprintf("%s %s (%s)", op, relType(deref(v.Type()), from), v.Comment) +} + +func (v *Phi) String() string { + var b bytes.Buffer + b.WriteString("phi [") + for i, edge := range v.Edges { + if i > 0 { + b.WriteString(", ") + } + // Be robust against malformed CFG. + if v.block == nil { + b.WriteString("??") + continue + } + block := -1 + if i < len(v.block.Preds) { + block = v.block.Preds[i].Index + } + fmt.Fprintf(&b, "%d: ", block) + edgeVal := "" // be robust + if edge != nil { + edgeVal = relName(edge, v) + } + b.WriteString(edgeVal) + } + b.WriteString("]") + if v.Comment != "" { + b.WriteString(" #") + b.WriteString(v.Comment) + } + return b.String() +} + +func printCall(v *CallCommon, prefix string, instr Instruction) string { + var b bytes.Buffer + b.WriteString(prefix) + if !v.IsInvoke() { + b.WriteString(relName(v.Value, instr)) + } else { + fmt.Fprintf(&b, "invoke %s.%s", relName(v.Value, instr), v.Method.Name()) + } + b.WriteString("(") + for i, arg := range v.Args { + if i > 0 { + b.WriteString(", ") + } + b.WriteString(relName(arg, instr)) + } + if v.Signature().Variadic() { + b.WriteString("...") + } + b.WriteString(")") + return b.String() +} + +func (c *CallCommon) String() string { + return printCall(c, "", nil) +} + +func (v *Call) String() string { + return printCall(&v.Call, "", v) +} + +func (v *BinOp) String() string { + return fmt.Sprintf("%s %s %s", relName(v.X, v), v.Op.String(), relName(v.Y, v)) +} + +func (v *UnOp) String() string { + return fmt.Sprintf("%s%s%s", v.Op, relName(v.X, v), commaOk(v.CommaOk)) +} + +func printConv(prefix string, v, x Value) string { + from := v.Parent().pkg() + return fmt.Sprintf("%s %s <- %s (%s)", + prefix, + relType(v.Type(), from), + relType(x.Type(), from), + relName(x, v.(Instruction))) +} + +func (v *ChangeType) String() string { return printConv("changetype", v, v.X) } +func (v *Convert) String() string { return printConv("convert", v, v.X) } +func (v *ChangeInterface) String() string { return printConv("change interface", v, v.X) } +func (v *MakeInterface) String() string { return printConv("make", v, v.X) } + +func (v *MakeClosure) String() string { + var b bytes.Buffer + fmt.Fprintf(&b, "make closure %s", relName(v.Fn, v)) + if v.Bindings != nil { + b.WriteString(" [") + for i, c := range v.Bindings { + if i > 0 { + b.WriteString(", ") + } + b.WriteString(relName(c, v)) + } + b.WriteString("]") + } + return b.String() +} + +func (v *MakeSlice) String() string { + from := v.Parent().pkg() + return fmt.Sprintf("make %s %s %s", + relType(v.Type(), from), + relName(v.Len, v), + relName(v.Cap, v)) +} + +func (v *Slice) String() string { + var b bytes.Buffer + b.WriteString("slice ") + b.WriteString(relName(v.X, v)) + b.WriteString("[") + if v.Low != nil { + b.WriteString(relName(v.Low, v)) + } + b.WriteString(":") + if v.High != nil { + b.WriteString(relName(v.High, v)) + } + if v.Max != nil { + b.WriteString(":") + b.WriteString(relName(v.Max, v)) + } + b.WriteString("]") + return b.String() +} + +func (v *MakeMap) String() string { + res := "" + if v.Reserve != nil { + res = relName(v.Reserve, v) + } + from := v.Parent().pkg() + return fmt.Sprintf("make %s %s", relType(v.Type(), from), res) +} + +func (v *MakeChan) String() string { + from := v.Parent().pkg() + return fmt.Sprintf("make %s %s", relType(v.Type(), from), relName(v.Size, v)) +} + +func (v *FieldAddr) String() string { + st := deref(v.X.Type()).Underlying().(*types.Struct) + // Be robust against a bad index. + name := "?" + if 0 <= v.Field && v.Field < st.NumFields() { + name = st.Field(v.Field).Name() + } + return fmt.Sprintf("&%s.%s [#%d]", relName(v.X, v), name, v.Field) +} + +func (v *Field) String() string { + st := v.X.Type().Underlying().(*types.Struct) + // Be robust against a bad index. + name := "?" + if 0 <= v.Field && v.Field < st.NumFields() { + name = st.Field(v.Field).Name() + } + return fmt.Sprintf("%s.%s [#%d]", relName(v.X, v), name, v.Field) +} + +func (v *IndexAddr) String() string { + return fmt.Sprintf("&%s[%s]", relName(v.X, v), relName(v.Index, v)) +} + +func (v *Index) String() string { + return fmt.Sprintf("%s[%s]", relName(v.X, v), relName(v.Index, v)) +} + +func (v *Lookup) String() string { + return fmt.Sprintf("%s[%s]%s", relName(v.X, v), relName(v.Index, v), commaOk(v.CommaOk)) +} + +func (v *Range) String() string { + return "range " + relName(v.X, v) +} + +func (v *Next) String() string { + return "next " + relName(v.Iter, v) +} + +func (v *TypeAssert) String() string { + from := v.Parent().pkg() + return fmt.Sprintf("typeassert%s %s.(%s)", commaOk(v.CommaOk), relName(v.X, v), relType(v.AssertedType, from)) +} + +func (v *Extract) String() string { + return fmt.Sprintf("extract %s #%d", relName(v.Tuple, v), v.Index) +} + +func (s *Jump) String() string { + // Be robust against malformed CFG. + block := -1 + if s.block != nil && len(s.block.Succs) == 1 { + block = s.block.Succs[0].Index + } + return fmt.Sprintf("jump %d", block) +} + +func (s *If) String() string { + // Be robust against malformed CFG. + tblock, fblock := -1, -1 + if s.block != nil && len(s.block.Succs) == 2 { + tblock = s.block.Succs[0].Index + fblock = s.block.Succs[1].Index + } + return fmt.Sprintf("if %s goto %d else %d", relName(s.Cond, s), tblock, fblock) +} + +func (s *Go) String() string { + return printCall(&s.Call, "go ", s) +} + +func (s *Panic) String() string { + return "panic " + relName(s.X, s) +} + +func (s *Return) String() string { + var b bytes.Buffer + b.WriteString("return") + for i, r := range s.Results { + if i == 0 { + b.WriteString(" ") + } else { + b.WriteString(", ") + } + b.WriteString(relName(r, s)) + } + return b.String() +} + +func (*RunDefers) String() string { + return "rundefers" +} + +func (s *Send) String() string { + return fmt.Sprintf("send %s <- %s", relName(s.Chan, s), relName(s.X, s)) +} + +func (s *Defer) String() string { + return printCall(&s.Call, "defer ", s) +} + +func (s *Select) String() string { + var b bytes.Buffer + for i, st := range s.States { + if i > 0 { + b.WriteString(", ") + } + if st.Dir == types.RecvOnly { + b.WriteString("<-") + b.WriteString(relName(st.Chan, s)) + } else { + b.WriteString(relName(st.Chan, s)) + b.WriteString("<-") + b.WriteString(relName(st.Send, s)) + } + } + non := "" + if !s.Blocking { + non = "non" + } + return fmt.Sprintf("select %sblocking [%s]", non, b.String()) +} + +func (s *Store) String() string { + return fmt.Sprintf("*%s = %s", relName(s.Addr, s), relName(s.Val, s)) +} + +func (s *MapUpdate) String() string { + return fmt.Sprintf("%s[%s] = %s", relName(s.Map, s), relName(s.Key, s), relName(s.Value, s)) +} + +func (s *DebugRef) String() string { + p := s.Parent().Prog.Fset.Position(s.Pos()) + var descr interface{} + if s.object != nil { + descr = s.object // e.g. "var x int" + } else { + descr = reflect.TypeOf(s.Expr) // e.g. "*ast.CallExpr" + } + var addr string + if s.IsAddr { + addr = "address of " + } + return fmt.Sprintf("; %s%s @ %d:%d is %s", addr, descr, p.Line, p.Column, s.X.Name()) +} + +func (p *Package) String() string { + return "package " + p.Pkg.Path() +} + +var _ io.WriterTo = (*Package)(nil) // *Package implements io.Writer + +func (p *Package) WriteTo(w io.Writer) (int64, error) { + var buf bytes.Buffer + WritePackage(&buf, p) + n, err := w.Write(buf.Bytes()) + return int64(n), err +} + +// WritePackage writes to buf a human-readable summary of p. +func WritePackage(buf *bytes.Buffer, p *Package) { + fmt.Fprintf(buf, "%s:\n", p) + + var names []string + maxname := 0 + for name := range p.Members { + if l := len(name); l > maxname { + maxname = l + } + names = append(names, name) + } + + from := p.Pkg + sort.Strings(names) + for _, name := range names { + switch mem := p.Members[name].(type) { + case *NamedConst: + fmt.Fprintf(buf, " const %-*s %s = %s\n", + maxname, name, mem.Name(), mem.Value.RelString(from)) + + case *Function: + fmt.Fprintf(buf, " func %-*s %s\n", + maxname, name, relType(mem.Type(), from)) + + case *Type: + fmt.Fprintf(buf, " type %-*s %s\n", + maxname, name, relType(mem.Type().Underlying(), from)) + for _, meth := range typeutil.IntuitiveMethodSet(mem.Type(), &p.Prog.MethodSets) { + fmt.Fprintf(buf, " %s\n", types.SelectionString(meth, types.RelativeTo(from))) + } + + case *Global: + fmt.Fprintf(buf, " var %-*s %s\n", + maxname, name, relType(mem.Type().(*types.Pointer).Elem(), from)) + } + } + + fmt.Fprintf(buf, "\n") +} + +func commaOk(x bool) string { + if x { + return ",ok" + } + return "" +} diff --git a/vendor/golang.org/x/tools/go/ssa/sanity.go b/vendor/golang.org/x/tools/go/ssa/sanity.go new file mode 100644 index 000000000000..0a7abc5e98fc --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/sanity.go @@ -0,0 +1,532 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// An optional pass for sanity-checking invariants of the SSA representation. +// Currently it checks CFG invariants but little at the instruction level. + +import ( + "fmt" + "go/types" + "io" + "os" + "strings" +) + +type sanity struct { + reporter io.Writer + fn *Function + block *BasicBlock + instrs map[Instruction]struct{} + insane bool +} + +// sanityCheck performs integrity checking of the SSA representation +// of the function fn and returns true if it was valid. Diagnostics +// are written to reporter if non-nil, os.Stderr otherwise. Some +// diagnostics are only warnings and do not imply a negative result. +// +// Sanity-checking is intended to facilitate the debugging of code +// transformation passes. +// +func sanityCheck(fn *Function, reporter io.Writer) bool { + if reporter == nil { + reporter = os.Stderr + } + return (&sanity{reporter: reporter}).checkFunction(fn) +} + +// mustSanityCheck is like sanityCheck but panics instead of returning +// a negative result. +// +func mustSanityCheck(fn *Function, reporter io.Writer) { + if !sanityCheck(fn, reporter) { + fn.WriteTo(os.Stderr) + panic("SanityCheck failed") + } +} + +func (s *sanity) diagnostic(prefix, format string, args ...interface{}) { + fmt.Fprintf(s.reporter, "%s: function %s", prefix, s.fn) + if s.block != nil { + fmt.Fprintf(s.reporter, ", block %s", s.block) + } + io.WriteString(s.reporter, ": ") + fmt.Fprintf(s.reporter, format, args...) + io.WriteString(s.reporter, "\n") +} + +func (s *sanity) errorf(format string, args ...interface{}) { + s.insane = true + s.diagnostic("Error", format, args...) +} + +func (s *sanity) warnf(format string, args ...interface{}) { + s.diagnostic("Warning", format, args...) +} + +// findDuplicate returns an arbitrary basic block that appeared more +// than once in blocks, or nil if all were unique. +func findDuplicate(blocks []*BasicBlock) *BasicBlock { + if len(blocks) < 2 { + return nil + } + if blocks[0] == blocks[1] { + return blocks[0] + } + // Slow path: + m := make(map[*BasicBlock]bool) + for _, b := range blocks { + if m[b] { + return b + } + m[b] = true + } + return nil +} + +func (s *sanity) checkInstr(idx int, instr Instruction) { + switch instr := instr.(type) { + case *If, *Jump, *Return, *Panic: + s.errorf("control flow instruction not at end of block") + case *Phi: + if idx == 0 { + // It suffices to apply this check to just the first phi node. + if dup := findDuplicate(s.block.Preds); dup != nil { + s.errorf("phi node in block with duplicate predecessor %s", dup) + } + } else { + prev := s.block.Instrs[idx-1] + if _, ok := prev.(*Phi); !ok { + s.errorf("Phi instruction follows a non-Phi: %T", prev) + } + } + if ne, np := len(instr.Edges), len(s.block.Preds); ne != np { + s.errorf("phi node has %d edges but %d predecessors", ne, np) + + } else { + for i, e := range instr.Edges { + if e == nil { + s.errorf("phi node '%s' has no value for edge #%d from %s", instr.Comment, i, s.block.Preds[i]) + } + } + } + + case *Alloc: + if !instr.Heap { + found := false + for _, l := range s.fn.Locals { + if l == instr { + found = true + break + } + } + if !found { + s.errorf("local alloc %s = %s does not appear in Function.Locals", instr.Name(), instr) + } + } + + case *BinOp: + case *Call: + case *ChangeInterface: + case *ChangeType: + case *Convert: + if _, ok := instr.X.Type().Underlying().(*types.Basic); !ok { + if _, ok := instr.Type().Underlying().(*types.Basic); !ok { + s.errorf("convert %s -> %s: at least one type must be basic", instr.X.Type(), instr.Type()) + } + } + + case *Defer: + case *Extract: + case *Field: + case *FieldAddr: + case *Go: + case *Index: + case *IndexAddr: + case *Lookup: + case *MakeChan: + case *MakeClosure: + numFree := len(instr.Fn.(*Function).FreeVars) + numBind := len(instr.Bindings) + if numFree != numBind { + s.errorf("MakeClosure has %d Bindings for function %s with %d free vars", + numBind, instr.Fn, numFree) + + } + if recv := instr.Type().(*types.Signature).Recv(); recv != nil { + s.errorf("MakeClosure's type includes receiver %s", recv.Type()) + } + + case *MakeInterface: + case *MakeMap: + case *MakeSlice: + case *MapUpdate: + case *Next: + case *Range: + case *RunDefers: + case *Select: + case *Send: + case *Slice: + case *Store: + case *TypeAssert: + case *UnOp: + case *DebugRef: + // TODO(adonovan): implement checks. + default: + panic(fmt.Sprintf("Unknown instruction type: %T", instr)) + } + + if call, ok := instr.(CallInstruction); ok { + if call.Common().Signature() == nil { + s.errorf("nil signature: %s", call) + } + } + + // Check that value-defining instructions have valid types + // and a valid referrer list. + if v, ok := instr.(Value); ok { + t := v.Type() + if t == nil { + s.errorf("no type: %s = %s", v.Name(), v) + } else if t == tRangeIter { + // not a proper type; ignore. + } else if b, ok := t.Underlying().(*types.Basic); ok && b.Info()&types.IsUntyped != 0 { + s.errorf("instruction has 'untyped' result: %s = %s : %s", v.Name(), v, t) + } + s.checkReferrerList(v) + } + + // Untyped constants are legal as instruction Operands(), + // for example: + // _ = "foo"[0] + // or: + // if wordsize==64 {...} + + // All other non-Instruction Values can be found via their + // enclosing Function or Package. +} + +func (s *sanity) checkFinalInstr(instr Instruction) { + switch instr := instr.(type) { + case *If: + if nsuccs := len(s.block.Succs); nsuccs != 2 { + s.errorf("If-terminated block has %d successors; expected 2", nsuccs) + return + } + if s.block.Succs[0] == s.block.Succs[1] { + s.errorf("If-instruction has same True, False target blocks: %s", s.block.Succs[0]) + return + } + + case *Jump: + if nsuccs := len(s.block.Succs); nsuccs != 1 { + s.errorf("Jump-terminated block has %d successors; expected 1", nsuccs) + return + } + + case *Return: + if nsuccs := len(s.block.Succs); nsuccs != 0 { + s.errorf("Return-terminated block has %d successors; expected none", nsuccs) + return + } + if na, nf := len(instr.Results), s.fn.Signature.Results().Len(); nf != na { + s.errorf("%d-ary return in %d-ary function", na, nf) + } + + case *Panic: + if nsuccs := len(s.block.Succs); nsuccs != 0 { + s.errorf("Panic-terminated block has %d successors; expected none", nsuccs) + return + } + + default: + s.errorf("non-control flow instruction at end of block") + } +} + +func (s *sanity) checkBlock(b *BasicBlock, index int) { + s.block = b + + if b.Index != index { + s.errorf("block has incorrect Index %d", b.Index) + } + if b.parent != s.fn { + s.errorf("block has incorrect parent %s", b.parent) + } + + // Check all blocks are reachable. + // (The entry block is always implicitly reachable, + // as is the Recover block, if any.) + if (index > 0 && b != b.parent.Recover) && len(b.Preds) == 0 { + s.warnf("unreachable block") + if b.Instrs == nil { + // Since this block is about to be pruned, + // tolerating transient problems in it + // simplifies other optimizations. + return + } + } + + // Check predecessor and successor relations are dual, + // and that all blocks in CFG belong to same function. + for _, a := range b.Preds { + found := false + for _, bb := range a.Succs { + if bb == b { + found = true + break + } + } + if !found { + s.errorf("expected successor edge in predecessor %s; found only: %s", a, a.Succs) + } + if a.parent != s.fn { + s.errorf("predecessor %s belongs to different function %s", a, a.parent) + } + } + for _, c := range b.Succs { + found := false + for _, bb := range c.Preds { + if bb == b { + found = true + break + } + } + if !found { + s.errorf("expected predecessor edge in successor %s; found only: %s", c, c.Preds) + } + if c.parent != s.fn { + s.errorf("successor %s belongs to different function %s", c, c.parent) + } + } + + // Check each instruction is sane. + n := len(b.Instrs) + if n == 0 { + s.errorf("basic block contains no instructions") + } + var rands [10]*Value // reuse storage + for j, instr := range b.Instrs { + if instr == nil { + s.errorf("nil instruction at index %d", j) + continue + } + if b2 := instr.Block(); b2 == nil { + s.errorf("nil Block() for instruction at index %d", j) + continue + } else if b2 != b { + s.errorf("wrong Block() (%s) for instruction at index %d ", b2, j) + continue + } + if j < n-1 { + s.checkInstr(j, instr) + } else { + s.checkFinalInstr(instr) + } + + // Check Instruction.Operands. + operands: + for i, op := range instr.Operands(rands[:0]) { + if op == nil { + s.errorf("nil operand pointer %d of %s", i, instr) + continue + } + val := *op + if val == nil { + continue // a nil operand is ok + } + + // Check that "untyped" types only appear on constant operands. + if _, ok := (*op).(*Const); !ok { + if basic, ok := (*op).Type().(*types.Basic); ok { + if basic.Info()&types.IsUntyped != 0 { + s.errorf("operand #%d of %s is untyped: %s", i, instr, basic) + } + } + } + + // Check that Operands that are also Instructions belong to same function. + // TODO(adonovan): also check their block dominates block b. + if val, ok := val.(Instruction); ok { + if val.Block() == nil { + s.errorf("operand %d of %s is an instruction (%s) that belongs to no block", i, instr, val) + } else if val.Parent() != s.fn { + s.errorf("operand %d of %s is an instruction (%s) from function %s", i, instr, val, val.Parent()) + } + } + + // Check that each function-local operand of + // instr refers back to instr. (NB: quadratic) + switch val := val.(type) { + case *Const, *Global, *Builtin: + continue // not local + case *Function: + if val.parent == nil { + continue // only anon functions are local + } + } + + // TODO(adonovan): check val.Parent() != nil <=> val.Referrers() is defined. + + if refs := val.Referrers(); refs != nil { + for _, ref := range *refs { + if ref == instr { + continue operands + } + } + s.errorf("operand %d of %s (%s) does not refer to us", i, instr, val) + } else { + s.errorf("operand %d of %s (%s) has no referrers", i, instr, val) + } + } + } +} + +func (s *sanity) checkReferrerList(v Value) { + refs := v.Referrers() + if refs == nil { + s.errorf("%s has missing referrer list", v.Name()) + return + } + for i, ref := range *refs { + if _, ok := s.instrs[ref]; !ok { + s.errorf("%s.Referrers()[%d] = %s is not an instruction belonging to this function", v.Name(), i, ref) + } + } +} + +func (s *sanity) checkFunction(fn *Function) bool { + // TODO(adonovan): check Function invariants: + // - check params match signature + // - check transient fields are nil + // - warn if any fn.Locals do not appear among block instructions. + s.fn = fn + if fn.Prog == nil { + s.errorf("nil Prog") + } + + _ = fn.String() // must not crash + _ = fn.RelString(fn.pkg()) // must not crash + + // All functions have a package, except delegates (which are + // shared across packages, or duplicated as weak symbols in a + // separate-compilation model), and error.Error. + if fn.Pkg == nil { + if strings.HasPrefix(fn.Synthetic, "wrapper ") || + strings.HasPrefix(fn.Synthetic, "bound ") || + strings.HasPrefix(fn.Synthetic, "thunk ") || + strings.HasSuffix(fn.name, "Error") { + // ok + } else { + s.errorf("nil Pkg") + } + } + if src, syn := fn.Synthetic == "", fn.Syntax() != nil; src != syn { + s.errorf("got fromSource=%t, hasSyntax=%t; want same values", src, syn) + } + for i, l := range fn.Locals { + if l.Parent() != fn { + s.errorf("Local %s at index %d has wrong parent", l.Name(), i) + } + if l.Heap { + s.errorf("Local %s at index %d has Heap flag set", l.Name(), i) + } + } + // Build the set of valid referrers. + s.instrs = make(map[Instruction]struct{}) + for _, b := range fn.Blocks { + for _, instr := range b.Instrs { + s.instrs[instr] = struct{}{} + } + } + for i, p := range fn.Params { + if p.Parent() != fn { + s.errorf("Param %s at index %d has wrong parent", p.Name(), i) + } + // Check common suffix of Signature and Params match type. + if sig := fn.Signature; sig != nil { + j := i - len(fn.Params) + sig.Params().Len() // index within sig.Params + if j < 0 { + continue + } + if !types.Identical(p.Type(), sig.Params().At(j).Type()) { + s.errorf("Param %s at index %d has wrong type (%s, versus %s in Signature)", p.Name(), i, p.Type(), sig.Params().At(j).Type()) + + } + } + s.checkReferrerList(p) + } + for i, fv := range fn.FreeVars { + if fv.Parent() != fn { + s.errorf("FreeVar %s at index %d has wrong parent", fv.Name(), i) + } + s.checkReferrerList(fv) + } + + if fn.Blocks != nil && len(fn.Blocks) == 0 { + // Function _had_ blocks (so it's not external) but + // they were "optimized" away, even the entry block. + s.errorf("Blocks slice is non-nil but empty") + } + for i, b := range fn.Blocks { + if b == nil { + s.warnf("nil *BasicBlock at f.Blocks[%d]", i) + continue + } + s.checkBlock(b, i) + } + if fn.Recover != nil && fn.Blocks[fn.Recover.Index] != fn.Recover { + s.errorf("Recover block is not in Blocks slice") + } + + s.block = nil + for i, anon := range fn.AnonFuncs { + if anon.Parent() != fn { + s.errorf("AnonFuncs[%d]=%s but %s.Parent()=%s", i, anon, anon, anon.Parent()) + } + } + s.fn = nil + return !s.insane +} + +// sanityCheckPackage checks invariants of packages upon creation. +// It does not require that the package is built. +// Unlike sanityCheck (for functions), it just panics at the first error. +func sanityCheckPackage(pkg *Package) { + if pkg.Pkg == nil { + panic(fmt.Sprintf("Package %s has no Object", pkg)) + } + _ = pkg.String() // must not crash + + for name, mem := range pkg.Members { + if name != mem.Name() { + panic(fmt.Sprintf("%s: %T.Name() = %s, want %s", + pkg.Pkg.Path(), mem, mem.Name(), name)) + } + obj := mem.Object() + if obj == nil { + // This check is sound because fields + // {Global,Function}.object have type + // types.Object. (If they were declared as + // *types.{Var,Func}, we'd have a non-empty + // interface containing a nil pointer.) + + continue // not all members have typechecker objects + } + if obj.Name() != name { + if obj.Name() == "init" && strings.HasPrefix(mem.Name(), "init#") { + // Ok. The name of a declared init function varies between + // its types.Func ("init") and its ssa.Function ("init#%d"). + } else { + panic(fmt.Sprintf("%s: %T.Object().Name() = %s, want %s", + pkg.Pkg.Path(), mem, obj.Name(), name)) + } + } + if obj.Pos() != mem.Pos() { + panic(fmt.Sprintf("%s Pos=%d obj.Pos=%d", mem, mem.Pos(), obj.Pos())) + } + } +} diff --git a/vendor/golang.org/x/tools/go/ssa/source.go b/vendor/golang.org/x/tools/go/ssa/source.go new file mode 100644 index 000000000000..6d2223edaa36 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/source.go @@ -0,0 +1,293 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file defines utilities for working with source positions +// or source-level named entities ("objects"). + +// TODO(adonovan): test that {Value,Instruction}.Pos() positions match +// the originating syntax, as specified. + +import ( + "go/ast" + "go/token" + "go/types" +) + +// EnclosingFunction returns the function that contains the syntax +// node denoted by path. +// +// Syntax associated with package-level variable specifications is +// enclosed by the package's init() function. +// +// Returns nil if not found; reasons might include: +// - the node is not enclosed by any function. +// - the node is within an anonymous function (FuncLit) and +// its SSA function has not been created yet +// (pkg.Build() has not yet been called). +// +func EnclosingFunction(pkg *Package, path []ast.Node) *Function { + // Start with package-level function... + fn := findEnclosingPackageLevelFunction(pkg, path) + if fn == nil { + return nil // not in any function + } + + // ...then walk down the nested anonymous functions. + n := len(path) +outer: + for i := range path { + if lit, ok := path[n-1-i].(*ast.FuncLit); ok { + for _, anon := range fn.AnonFuncs { + if anon.Pos() == lit.Type.Func { + fn = anon + continue outer + } + } + // SSA function not found: + // - package not yet built, or maybe + // - builder skipped FuncLit in dead block + // (in principle; but currently the Builder + // generates even dead FuncLits). + return nil + } + } + return fn +} + +// HasEnclosingFunction returns true if the AST node denoted by path +// is contained within the declaration of some function or +// package-level variable. +// +// Unlike EnclosingFunction, the behaviour of this function does not +// depend on whether SSA code for pkg has been built, so it can be +// used to quickly reject check inputs that will cause +// EnclosingFunction to fail, prior to SSA building. +// +func HasEnclosingFunction(pkg *Package, path []ast.Node) bool { + return findEnclosingPackageLevelFunction(pkg, path) != nil +} + +// findEnclosingPackageLevelFunction returns the Function +// corresponding to the package-level function enclosing path. +// +func findEnclosingPackageLevelFunction(pkg *Package, path []ast.Node) *Function { + if n := len(path); n >= 2 { // [... {Gen,Func}Decl File] + switch decl := path[n-2].(type) { + case *ast.GenDecl: + if decl.Tok == token.VAR && n >= 3 { + // Package-level 'var' initializer. + return pkg.init + } + + case *ast.FuncDecl: + if decl.Recv == nil && decl.Name.Name == "init" { + // Explicit init() function. + for _, b := range pkg.init.Blocks { + for _, instr := range b.Instrs { + if instr, ok := instr.(*Call); ok { + if callee, ok := instr.Call.Value.(*Function); ok && callee.Pkg == pkg && callee.Pos() == decl.Name.NamePos { + return callee + } + } + } + } + // Hack: return non-nil when SSA is not yet + // built so that HasEnclosingFunction works. + return pkg.init + } + // Declared function/method. + return findNamedFunc(pkg, decl.Name.NamePos) + } + } + return nil // not in any function +} + +// findNamedFunc returns the named function whose FuncDecl.Ident is at +// position pos. +// +func findNamedFunc(pkg *Package, pos token.Pos) *Function { + // Look at all package members and method sets of named types. + // Not very efficient. + for _, mem := range pkg.Members { + switch mem := mem.(type) { + case *Function: + if mem.Pos() == pos { + return mem + } + case *Type: + mset := pkg.Prog.MethodSets.MethodSet(types.NewPointer(mem.Type())) + for i, n := 0, mset.Len(); i < n; i++ { + // Don't call Program.Method: avoid creating wrappers. + obj := mset.At(i).Obj().(*types.Func) + if obj.Pos() == pos { + return pkg.values[obj].(*Function) + } + } + } + } + return nil +} + +// ValueForExpr returns the SSA Value that corresponds to non-constant +// expression e. +// +// It returns nil if no value was found, e.g. +// - the expression is not lexically contained within f; +// - f was not built with debug information; or +// - e is a constant expression. (For efficiency, no debug +// information is stored for constants. Use +// go/types.Info.Types[e].Value instead.) +// - e is a reference to nil or a built-in function. +// - the value was optimised away. +// +// If e is an addressable expression used in an lvalue context, +// value is the address denoted by e, and isAddr is true. +// +// The types of e (or &e, if isAddr) and the result are equal +// (modulo "untyped" bools resulting from comparisons). +// +// (Tip: to find the ssa.Value given a source position, use +// importer.PathEnclosingInterval to locate the ast.Node, then +// EnclosingFunction to locate the Function, then ValueForExpr to find +// the ssa.Value.) +// +func (f *Function) ValueForExpr(e ast.Expr) (value Value, isAddr bool) { + if f.debugInfo() { // (opt) + e = unparen(e) + for _, b := range f.Blocks { + for _, instr := range b.Instrs { + if ref, ok := instr.(*DebugRef); ok { + if ref.Expr == e { + return ref.X, ref.IsAddr + } + } + } + } + } + return +} + +// --- Lookup functions for source-level named entities (types.Objects) --- + +// Package returns the SSA Package corresponding to the specified +// type-checker package object. +// It returns nil if no such SSA package has been created. +// +func (prog *Program) Package(obj *types.Package) *Package { + return prog.packages[obj] +} + +// packageLevelValue returns the package-level value corresponding to +// the specified named object, which may be a package-level const +// (*Const), var (*Global) or func (*Function) of some package in +// prog. It returns nil if the object is not found. +// +func (prog *Program) packageLevelValue(obj types.Object) Value { + if pkg, ok := prog.packages[obj.Pkg()]; ok { + return pkg.values[obj] + } + return nil +} + +// FuncValue returns the concrete Function denoted by the source-level +// named function obj, or nil if obj denotes an interface method. +// +// TODO(adonovan): check the invariant that obj.Type() matches the +// result's Signature, both in the params/results and in the receiver. +// +func (prog *Program) FuncValue(obj *types.Func) *Function { + fn, _ := prog.packageLevelValue(obj).(*Function) + return fn +} + +// ConstValue returns the SSA Value denoted by the source-level named +// constant obj. +// +func (prog *Program) ConstValue(obj *types.Const) *Const { + // TODO(adonovan): opt: share (don't reallocate) + // Consts for const objects and constant ast.Exprs. + + // Universal constant? {true,false,nil} + if obj.Parent() == types.Universe { + return NewConst(obj.Val(), obj.Type()) + } + // Package-level named constant? + if v := prog.packageLevelValue(obj); v != nil { + return v.(*Const) + } + return NewConst(obj.Val(), obj.Type()) +} + +// VarValue returns the SSA Value that corresponds to a specific +// identifier denoting the source-level named variable obj. +// +// VarValue returns nil if a local variable was not found, perhaps +// because its package was not built, the debug information was not +// requested during SSA construction, or the value was optimized away. +// +// ref is the path to an ast.Ident (e.g. from PathEnclosingInterval), +// and that ident must resolve to obj. +// +// pkg is the package enclosing the reference. (A reference to a var +// always occurs within a function, so we need to know where to find it.) +// +// If the identifier is a field selector and its base expression is +// non-addressable, then VarValue returns the value of that field. +// For example: +// func f() struct {x int} +// f().x // VarValue(x) returns a *Field instruction of type int +// +// All other identifiers denote addressable locations (variables). +// For them, VarValue may return either the variable's address or its +// value, even when the expression is evaluated only for its value; the +// situation is reported by isAddr, the second component of the result. +// +// If !isAddr, the returned value is the one associated with the +// specific identifier. For example, +// var x int // VarValue(x) returns Const 0 here +// x = 1 // VarValue(x) returns Const 1 here +// +// It is not specified whether the value or the address is returned in +// any particular case, as it may depend upon optimizations performed +// during SSA code generation, such as registerization, constant +// folding, avoidance of materialization of subexpressions, etc. +// +func (prog *Program) VarValue(obj *types.Var, pkg *Package, ref []ast.Node) (value Value, isAddr bool) { + // All references to a var are local to some function, possibly init. + fn := EnclosingFunction(pkg, ref) + if fn == nil { + return // e.g. def of struct field; SSA not built? + } + + id := ref[0].(*ast.Ident) + + // Defining ident of a parameter? + if id.Pos() == obj.Pos() { + for _, param := range fn.Params { + if param.Object() == obj { + return param, false + } + } + } + + // Other ident? + for _, b := range fn.Blocks { + for _, instr := range b.Instrs { + if dr, ok := instr.(*DebugRef); ok { + if dr.Pos() == id.Pos() { + return dr.X, dr.IsAddr + } + } + } + } + + // Defining ident of package-level var? + if v := prog.packageLevelValue(obj); v != nil { + return v.(*Global), true + } + + return // e.g. debug info not requested, or var optimized away +} diff --git a/vendor/golang.org/x/tools/go/ssa/ssa.go b/vendor/golang.org/x/tools/go/ssa/ssa.go new file mode 100644 index 000000000000..78272c546a1f --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/ssa.go @@ -0,0 +1,1695 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This package defines a high-level intermediate representation for +// Go programs using static single-assignment (SSA) form. + +import ( + "fmt" + "go/ast" + "go/constant" + "go/token" + "go/types" + "sync" + + "golang.org/x/tools/go/types/typeutil" +) + +// A Program is a partial or complete Go program converted to SSA form. +type Program struct { + Fset *token.FileSet // position information for the files of this Program + imported map[string]*Package // all importable Packages, keyed by import path + packages map[*types.Package]*Package // all loaded Packages, keyed by object + mode BuilderMode // set of mode bits for SSA construction + MethodSets typeutil.MethodSetCache // cache of type-checker's method-sets + + methodsMu sync.Mutex // guards the following maps: + methodSets typeutil.Map // maps type to its concrete methodSet + runtimeTypes typeutil.Map // types for which rtypes are needed + canon typeutil.Map // type canonicalization map + bounds map[*types.Func]*Function // bounds for curried x.Method closures + thunks map[selectionKey]*Function // thunks for T.Method expressions +} + +// A Package is a single analyzed Go package containing Members for +// all package-level functions, variables, constants and types it +// declares. These may be accessed directly via Members, or via the +// type-specific accessor methods Func, Type, Var and Const. +// +// Members also contains entries for "init" (the synthetic package +// initializer) and "init#%d", the nth declared init function, +// and unspecified other things too. +// +type Package struct { + Prog *Program // the owning program + Pkg *types.Package // the corresponding go/types.Package + Members map[string]Member // all package members keyed by name (incl. init and init#%d) + values map[types.Object]Value // package members (incl. types and methods), keyed by object + init *Function // Func("init"); the package's init function + debug bool // include full debug info in this package + + // The following fields are set transiently, then cleared + // after building. + buildOnce sync.Once // ensures package building occurs once + ninit int32 // number of init functions + info *types.Info // package type information + files []*ast.File // package ASTs +} + +// A Member is a member of a Go package, implemented by *NamedConst, +// *Global, *Function, or *Type; they are created by package-level +// const, var, func and type declarations respectively. +// +type Member interface { + Name() string // declared name of the package member + String() string // package-qualified name of the package member + RelString(*types.Package) string // like String, but relative refs are unqualified + Object() types.Object // typechecker's object for this member, if any + Pos() token.Pos // position of member's declaration, if known + Type() types.Type // type of the package member + Token() token.Token // token.{VAR,FUNC,CONST,TYPE} + Package() *Package // the containing package +} + +// A Type is a Member of a Package representing a package-level named type. +type Type struct { + object *types.TypeName + pkg *Package +} + +// A NamedConst is a Member of a Package representing a package-level +// named constant. +// +// Pos() returns the position of the declaring ast.ValueSpec.Names[*] +// identifier. +// +// NB: a NamedConst is not a Value; it contains a constant Value, which +// it augments with the name and position of its 'const' declaration. +// +type NamedConst struct { + object *types.Const + Value *Const + pkg *Package +} + +// A Value is an SSA value that can be referenced by an instruction. +type Value interface { + // Name returns the name of this value, and determines how + // this Value appears when used as an operand of an + // Instruction. + // + // This is the same as the source name for Parameters, + // Builtins, Functions, FreeVars, Globals. + // For constants, it is a representation of the constant's value + // and type. For all other Values this is the name of the + // virtual register defined by the instruction. + // + // The name of an SSA Value is not semantically significant, + // and may not even be unique within a function. + Name() string + + // If this value is an Instruction, String returns its + // disassembled form; otherwise it returns unspecified + // human-readable information about the Value, such as its + // kind, name and type. + String() string + + // Type returns the type of this value. Many instructions + // (e.g. IndexAddr) change their behaviour depending on the + // types of their operands. + Type() types.Type + + // Parent returns the function to which this Value belongs. + // It returns nil for named Functions, Builtin, Const and Global. + Parent() *Function + + // Referrers returns the list of instructions that have this + // value as one of their operands; it may contain duplicates + // if an instruction has a repeated operand. + // + // Referrers actually returns a pointer through which the + // caller may perform mutations to the object's state. + // + // Referrers is currently only defined if Parent()!=nil, + // i.e. for the function-local values FreeVar, Parameter, + // Functions (iff anonymous) and all value-defining instructions. + // It returns nil for named Functions, Builtin, Const and Global. + // + // Instruction.Operands contains the inverse of this relation. + Referrers() *[]Instruction + + // Pos returns the location of the AST token most closely + // associated with the operation that gave rise to this value, + // or token.NoPos if it was not explicit in the source. + // + // For each ast.Node type, a particular token is designated as + // the closest location for the expression, e.g. the Lparen + // for an *ast.CallExpr. This permits a compact but + // approximate mapping from Values to source positions for use + // in diagnostic messages, for example. + // + // (Do not use this position to determine which Value + // corresponds to an ast.Expr; use Function.ValueForExpr + // instead. NB: it requires that the function was built with + // debug information.) + Pos() token.Pos +} + +// An Instruction is an SSA instruction that computes a new Value or +// has some effect. +// +// An Instruction that defines a value (e.g. BinOp) also implements +// the Value interface; an Instruction that only has an effect (e.g. Store) +// does not. +// +type Instruction interface { + // String returns the disassembled form of this value. + // + // Examples of Instructions that are Values: + // "x + y" (BinOp) + // "len([])" (Call) + // Note that the name of the Value is not printed. + // + // Examples of Instructions that are not Values: + // "return x" (Return) + // "*y = x" (Store) + // + // (The separation Value.Name() from Value.String() is useful + // for some analyses which distinguish the operation from the + // value it defines, e.g., 'y = local int' is both an allocation + // of memory 'local int' and a definition of a pointer y.) + String() string + + // Parent returns the function to which this instruction + // belongs. + Parent() *Function + + // Block returns the basic block to which this instruction + // belongs. + Block() *BasicBlock + + // setBlock sets the basic block to which this instruction belongs. + setBlock(*BasicBlock) + + // Operands returns the operands of this instruction: the + // set of Values it references. + // + // Specifically, it appends their addresses to rands, a + // user-provided slice, and returns the resulting slice, + // permitting avoidance of memory allocation. + // + // The operands are appended in undefined order, but the order + // is consistent for a given Instruction; the addresses are + // always non-nil but may point to a nil Value. Clients may + // store through the pointers, e.g. to effect a value + // renaming. + // + // Value.Referrers is a subset of the inverse of this + // relation. (Referrers are not tracked for all types of + // Values.) + Operands(rands []*Value) []*Value + + // Pos returns the location of the AST token most closely + // associated with the operation that gave rise to this + // instruction, or token.NoPos if it was not explicit in the + // source. + // + // For each ast.Node type, a particular token is designated as + // the closest location for the expression, e.g. the Go token + // for an *ast.GoStmt. This permits a compact but approximate + // mapping from Instructions to source positions for use in + // diagnostic messages, for example. + // + // (Do not use this position to determine which Instruction + // corresponds to an ast.Expr; see the notes for Value.Pos. + // This position may be used to determine which non-Value + // Instruction corresponds to some ast.Stmts, but not all: If + // and Jump instructions have no Pos(), for example.) + Pos() token.Pos +} + +// A Node is a node in the SSA value graph. Every concrete type that +// implements Node is also either a Value, an Instruction, or both. +// +// Node contains the methods common to Value and Instruction, plus the +// Operands and Referrers methods generalized to return nil for +// non-Instructions and non-Values, respectively. +// +// Node is provided to simplify SSA graph algorithms. Clients should +// use the more specific and informative Value or Instruction +// interfaces where appropriate. +// +type Node interface { + // Common methods: + String() string + Pos() token.Pos + Parent() *Function + + // Partial methods: + Operands(rands []*Value) []*Value // nil for non-Instructions + Referrers() *[]Instruction // nil for non-Values +} + +// Function represents the parameters, results, and code of a function +// or method. +// +// If Blocks is nil, this indicates an external function for which no +// Go source code is available. In this case, FreeVars and Locals +// are nil too. Clients performing whole-program analysis must +// handle external functions specially. +// +// Blocks contains the function's control-flow graph (CFG). +// Blocks[0] is the function entry point; block order is not otherwise +// semantically significant, though it may affect the readability of +// the disassembly. +// To iterate over the blocks in dominance order, use DomPreorder(). +// +// Recover is an optional second entry point to which control resumes +// after a recovered panic. The Recover block may contain only a return +// statement, preceded by a load of the function's named return +// parameters, if any. +// +// A nested function (Parent()!=nil) that refers to one or more +// lexically enclosing local variables ("free variables") has FreeVars. +// Such functions cannot be called directly but require a +// value created by MakeClosure which, via its Bindings, supplies +// values for these parameters. +// +// If the function is a method (Signature.Recv() != nil) then the first +// element of Params is the receiver parameter. +// +// A Go package may declare many functions called "init". +// For each one, Object().Name() returns "init" but Name() returns +// "init#1", etc, in declaration order. +// +// Pos() returns the declaring ast.FuncLit.Type.Func or the position +// of the ast.FuncDecl.Name, if the function was explicit in the +// source. Synthetic wrappers, for which Synthetic != "", may share +// the same position as the function they wrap. +// Syntax.Pos() always returns the position of the declaring "func" token. +// +// Type() returns the function's Signature. +// +type Function struct { + name string + object types.Object // a declared *types.Func or one of its wrappers + method *types.Selection // info about provenance of synthetic methods + Signature *types.Signature + pos token.Pos + + Synthetic string // provenance of synthetic function; "" for true source functions + syntax ast.Node // *ast.Func{Decl,Lit}; replaced with simple ast.Node after build, unless debug mode + parent *Function // enclosing function if anon; nil if global + Pkg *Package // enclosing package; nil for shared funcs (wrappers and error.Error) + Prog *Program // enclosing program + Params []*Parameter // function parameters; for methods, includes receiver + FreeVars []*FreeVar // free variables whose values must be supplied by closure + Locals []*Alloc // local variables of this function + Blocks []*BasicBlock // basic blocks of the function; nil => external + Recover *BasicBlock // optional; control transfers here after recovered panic + AnonFuncs []*Function // anonymous functions directly beneath this one + referrers []Instruction // referring instructions (iff Parent() != nil) + + // The following fields are set transiently during building, + // then cleared. + currentBlock *BasicBlock // where to emit code + objects map[types.Object]Value // addresses of local variables + namedResults []*Alloc // tuple of named results + targets *targets // linked stack of branch targets + lblocks map[*ast.Object]*lblock // labelled blocks +} + +// BasicBlock represents an SSA basic block. +// +// The final element of Instrs is always an explicit transfer of +// control (If, Jump, Return, or Panic). +// +// A block may contain no Instructions only if it is unreachable, +// i.e., Preds is nil. Empty blocks are typically pruned. +// +// BasicBlocks and their Preds/Succs relation form a (possibly cyclic) +// graph independent of the SSA Value graph: the control-flow graph or +// CFG. It is illegal for multiple edges to exist between the same +// pair of blocks. +// +// Each BasicBlock is also a node in the dominator tree of the CFG. +// The tree may be navigated using Idom()/Dominees() and queried using +// Dominates(). +// +// The order of Preds and Succs is significant (to Phi and If +// instructions, respectively). +// +type BasicBlock struct { + Index int // index of this block within Parent().Blocks + Comment string // optional label; no semantic significance + parent *Function // parent function + Instrs []Instruction // instructions in order + Preds, Succs []*BasicBlock // predecessors and successors + succs2 [2]*BasicBlock // initial space for Succs + dom domInfo // dominator tree info + gaps int // number of nil Instrs (transient) + rundefers int // number of rundefers (transient) +} + +// Pure values ---------------------------------------- + +// A FreeVar represents a free variable of the function to which it +// belongs. +// +// FreeVars are used to implement anonymous functions, whose free +// variables are lexically captured in a closure formed by +// MakeClosure. The value of such a free var is an Alloc or another +// FreeVar and is considered a potentially escaping heap address, with +// pointer type. +// +// FreeVars are also used to implement bound method closures. Such a +// free var represents the receiver value and may be of any type that +// has concrete methods. +// +// Pos() returns the position of the value that was captured, which +// belongs to an enclosing function. +// +type FreeVar struct { + name string + typ types.Type + pos token.Pos + parent *Function + referrers []Instruction + + // Transiently needed during building. + outer Value // the Value captured from the enclosing context. +} + +// A Parameter represents an input parameter of a function. +// +type Parameter struct { + name string + object types.Object // a *types.Var; nil for non-source locals + typ types.Type + pos token.Pos + parent *Function + referrers []Instruction +} + +// A Const represents the value of a constant expression. +// +// The underlying type of a constant may be any boolean, numeric, or +// string type. In addition, a Const may represent the nil value of +// any reference type---interface, map, channel, pointer, slice, or +// function---but not "untyped nil". +// +// All source-level constant expressions are represented by a Const +// of the same type and value. +// +// Value holds the value of the constant, independent of its Type(), +// using go/constant representation, or nil for a typed nil value. +// +// Pos() returns token.NoPos. +// +// Example printed form: +// 42:int +// "hello":untyped string +// 3+4i:MyComplex +// +type Const struct { + typ types.Type + Value constant.Value +} + +// A Global is a named Value holding the address of a package-level +// variable. +// +// Pos() returns the position of the ast.ValueSpec.Names[*] +// identifier. +// +type Global struct { + name string + object types.Object // a *types.Var; may be nil for synthetics e.g. init$guard + typ types.Type + pos token.Pos + + Pkg *Package +} + +// A Builtin represents a specific use of a built-in function, e.g. len. +// +// Builtins are immutable values. Builtins do not have addresses. +// Builtins can only appear in CallCommon.Func. +// +// Name() indicates the function: one of the built-in functions from the +// Go spec (excluding "make" and "new") or one of these ssa-defined +// intrinsics: +// +// // wrapnilchk returns ptr if non-nil, panics otherwise. +// // (For use in indirection wrappers.) +// func ssa:wrapnilchk(ptr *T, recvType, methodName string) *T +// +// Object() returns a *types.Builtin for built-ins defined by the spec, +// nil for others. +// +// Type() returns a *types.Signature representing the effective +// signature of the built-in for this call. +// +type Builtin struct { + name string + sig *types.Signature +} + +// Value-defining instructions ---------------------------------------- + +// The Alloc instruction reserves space for a variable of the given type, +// zero-initializes it, and yields its address. +// +// Alloc values are always addresses, and have pointer types, so the +// type of the allocated variable is actually +// Type().Underlying().(*types.Pointer).Elem(). +// +// If Heap is false, Alloc allocates space in the function's +// activation record (frame); we refer to an Alloc(Heap=false) as a +// "local" alloc. Each local Alloc returns the same address each time +// it is executed within the same activation; the space is +// re-initialized to zero. +// +// If Heap is true, Alloc allocates space in the heap; we +// refer to an Alloc(Heap=true) as a "new" alloc. Each new Alloc +// returns a different address each time it is executed. +// +// When Alloc is applied to a channel, map or slice type, it returns +// the address of an uninitialized (nil) reference of that kind; store +// the result of MakeSlice, MakeMap or MakeChan in that location to +// instantiate these types. +// +// Pos() returns the ast.CompositeLit.Lbrace for a composite literal, +// or the ast.CallExpr.Rparen for a call to new() or for a call that +// allocates a varargs slice. +// +// Example printed form: +// t0 = local int +// t1 = new int +// +type Alloc struct { + register + Comment string + Heap bool + index int // dense numbering; for lifting +} + +// The Phi instruction represents an SSA φ-node, which combines values +// that differ across incoming control-flow edges and yields a new +// value. Within a block, all φ-nodes must appear before all non-φ +// nodes. +// +// Pos() returns the position of the && or || for short-circuit +// control-flow joins, or that of the *Alloc for φ-nodes inserted +// during SSA renaming. +// +// Example printed form: +// t2 = phi [0: t0, 1: t1] +// +type Phi struct { + register + Comment string // a hint as to its purpose + Edges []Value // Edges[i] is value for Block().Preds[i] +} + +// The Call instruction represents a function or method call. +// +// The Call instruction yields the function result if there is exactly +// one. Otherwise it returns a tuple, the components of which are +// accessed via Extract. +// +// See CallCommon for generic function call documentation. +// +// Pos() returns the ast.CallExpr.Lparen, if explicit in the source. +// +// Example printed form: +// t2 = println(t0, t1) +// t4 = t3() +// t7 = invoke t5.Println(...t6) +// +type Call struct { + register + Call CallCommon +} + +// The BinOp instruction yields the result of binary operation X Op Y. +// +// Pos() returns the ast.BinaryExpr.OpPos, if explicit in the source. +// +// Example printed form: +// t1 = t0 + 1:int +// +type BinOp struct { + register + // One of: + // ADD SUB MUL QUO REM + - * / % + // AND OR XOR SHL SHR AND_NOT & | ^ << >> &^ + // EQL NEQ LSS LEQ GTR GEQ == != < <= < >= + Op token.Token + X, Y Value +} + +// The UnOp instruction yields the result of Op X. +// ARROW is channel receive. +// MUL is pointer indirection (load). +// XOR is bitwise complement. +// SUB is negation. +// NOT is logical negation. +// +// If CommaOk and Op=ARROW, the result is a 2-tuple of the value above +// and a boolean indicating the success of the receive. The +// components of the tuple are accessed using Extract. +// +// Pos() returns the ast.UnaryExpr.OpPos, if explicit in the source. +// For receive operations (ARROW) implicit in ranging over a channel, +// Pos() returns the ast.RangeStmt.For. +// For implicit memory loads (STAR), Pos() returns the position of the +// most closely associated source-level construct; the details are not +// specified. +// +// Example printed form: +// t0 = *x +// t2 = <-t1,ok +// +type UnOp struct { + register + Op token.Token // One of: NOT SUB ARROW MUL XOR ! - <- * ^ + X Value + CommaOk bool +} + +// The ChangeType instruction applies to X a value-preserving type +// change to Type(). +// +// Type changes are permitted: +// - between a named type and its underlying type. +// - between two named types of the same underlying type. +// - between (possibly named) pointers to identical base types. +// - from a bidirectional channel to a read- or write-channel, +// optionally adding/removing a name. +// +// This operation cannot fail dynamically. +// +// Pos() returns the ast.CallExpr.Lparen, if the instruction arose +// from an explicit conversion in the source. +// +// Example printed form: +// t1 = changetype *int <- IntPtr (t0) +// +type ChangeType struct { + register + X Value +} + +// The Convert instruction yields the conversion of value X to type +// Type(). One or both of those types is basic (but possibly named). +// +// A conversion may change the value and representation of its operand. +// Conversions are permitted: +// - between real numeric types. +// - between complex numeric types. +// - between string and []byte or []rune. +// - between pointers and unsafe.Pointer. +// - between unsafe.Pointer and uintptr. +// - from (Unicode) integer to (UTF-8) string. +// A conversion may imply a type name change also. +// +// This operation cannot fail dynamically. +// +// Conversions of untyped string/number/bool constants to a specific +// representation are eliminated during SSA construction. +// +// Pos() returns the ast.CallExpr.Lparen, if the instruction arose +// from an explicit conversion in the source. +// +// Example printed form: +// t1 = convert []byte <- string (t0) +// +type Convert struct { + register + X Value +} + +// ChangeInterface constructs a value of one interface type from a +// value of another interface type known to be assignable to it. +// This operation cannot fail. +// +// Pos() returns the ast.CallExpr.Lparen if the instruction arose from +// an explicit T(e) conversion; the ast.TypeAssertExpr.Lparen if the +// instruction arose from an explicit e.(T) operation; or token.NoPos +// otherwise. +// +// Example printed form: +// t1 = change interface interface{} <- I (t0) +// +type ChangeInterface struct { + register + X Value +} + +// MakeInterface constructs an instance of an interface type from a +// value of a concrete type. +// +// Use Program.MethodSets.MethodSet(X.Type()) to find the method-set +// of X, and Program.MethodValue(m) to find the implementation of a method. +// +// To construct the zero value of an interface type T, use: +// NewConst(constant.MakeNil(), T, pos) +// +// Pos() returns the ast.CallExpr.Lparen, if the instruction arose +// from an explicit conversion in the source. +// +// Example printed form: +// t1 = make interface{} <- int (42:int) +// t2 = make Stringer <- t0 +// +type MakeInterface struct { + register + X Value +} + +// The MakeClosure instruction yields a closure value whose code is +// Fn and whose free variables' values are supplied by Bindings. +// +// Type() returns a (possibly named) *types.Signature. +// +// Pos() returns the ast.FuncLit.Type.Func for a function literal +// closure or the ast.SelectorExpr.Sel for a bound method closure. +// +// Example printed form: +// t0 = make closure anon@1.2 [x y z] +// t1 = make closure bound$(main.I).add [i] +// +type MakeClosure struct { + register + Fn Value // always a *Function + Bindings []Value // values for each free variable in Fn.FreeVars +} + +// The MakeMap instruction creates a new hash-table-based map object +// and yields a value of kind map. +// +// Type() returns a (possibly named) *types.Map. +// +// Pos() returns the ast.CallExpr.Lparen, if created by make(map), or +// the ast.CompositeLit.Lbrack if created by a literal. +// +// Example printed form: +// t1 = make map[string]int t0 +// t1 = make StringIntMap t0 +// +type MakeMap struct { + register + Reserve Value // initial space reservation; nil => default +} + +// The MakeChan instruction creates a new channel object and yields a +// value of kind chan. +// +// Type() returns a (possibly named) *types.Chan. +// +// Pos() returns the ast.CallExpr.Lparen for the make(chan) that +// created it. +// +// Example printed form: +// t0 = make chan int 0 +// t0 = make IntChan 0 +// +type MakeChan struct { + register + Size Value // int; size of buffer; zero => synchronous. +} + +// The MakeSlice instruction yields a slice of length Len backed by a +// newly allocated array of length Cap. +// +// Both Len and Cap must be non-nil Values of integer type. +// +// (Alloc(types.Array) followed by Slice will not suffice because +// Alloc can only create arrays of constant length.) +// +// Type() returns a (possibly named) *types.Slice. +// +// Pos() returns the ast.CallExpr.Lparen for the make([]T) that +// created it. +// +// Example printed form: +// t1 = make []string 1:int t0 +// t1 = make StringSlice 1:int t0 +// +type MakeSlice struct { + register + Len Value + Cap Value +} + +// The Slice instruction yields a slice of an existing string, slice +// or *array X between optional integer bounds Low and High. +// +// Dynamically, this instruction panics if X evaluates to a nil *array +// pointer. +// +// Type() returns string if the type of X was string, otherwise a +// *types.Slice with the same element type as X. +// +// Pos() returns the ast.SliceExpr.Lbrack if created by a x[:] slice +// operation, the ast.CompositeLit.Lbrace if created by a literal, or +// NoPos if not explicit in the source (e.g. a variadic argument slice). +// +// Example printed form: +// t1 = slice t0[1:] +// +type Slice struct { + register + X Value // slice, string, or *array + Low, High, Max Value // each may be nil +} + +// The FieldAddr instruction yields the address of Field of *struct X. +// +// The field is identified by its index within the field list of the +// struct type of X. +// +// Dynamically, this instruction panics if X evaluates to a nil +// pointer. +// +// Type() returns a (possibly named) *types.Pointer. +// +// Pos() returns the position of the ast.SelectorExpr.Sel for the +// field, if explicit in the source. +// +// Example printed form: +// t1 = &t0.name [#1] +// +type FieldAddr struct { + register + X Value // *struct + Field int // field is X.Type().Underlying().(*types.Pointer).Elem().Underlying().(*types.Struct).Field(Field) +} + +// The Field instruction yields the Field of struct X. +// +// The field is identified by its index within the field list of the +// struct type of X; by using numeric indices we avoid ambiguity of +// package-local identifiers and permit compact representations. +// +// Pos() returns the position of the ast.SelectorExpr.Sel for the +// field, if explicit in the source. +// +// Example printed form: +// t1 = t0.name [#1] +// +type Field struct { + register + X Value // struct + Field int // index into X.Type().(*types.Struct).Fields +} + +// The IndexAddr instruction yields the address of the element at +// index Index of collection X. Index is an integer expression. +// +// The elements of maps and strings are not addressable; use Lookup or +// MapUpdate instead. +// +// Dynamically, this instruction panics if X evaluates to a nil *array +// pointer. +// +// Type() returns a (possibly named) *types.Pointer. +// +// Pos() returns the ast.IndexExpr.Lbrack for the index operation, if +// explicit in the source. +// +// Example printed form: +// t2 = &t0[t1] +// +type IndexAddr struct { + register + X Value // slice or *array, + Index Value // numeric index +} + +// The Index instruction yields element Index of array X. +// +// Pos() returns the ast.IndexExpr.Lbrack for the index operation, if +// explicit in the source. +// +// Example printed form: +// t2 = t0[t1] +// +type Index struct { + register + X Value // array + Index Value // integer index +} + +// The Lookup instruction yields element Index of collection X, a map +// or string. Index is an integer expression if X is a string or the +// appropriate key type if X is a map. +// +// If CommaOk, the result is a 2-tuple of the value above and a +// boolean indicating the result of a map membership test for the key. +// The components of the tuple are accessed using Extract. +// +// Pos() returns the ast.IndexExpr.Lbrack, if explicit in the source. +// +// Example printed form: +// t2 = t0[t1] +// t5 = t3[t4],ok +// +type Lookup struct { + register + X Value // string or map + Index Value // numeric or key-typed index + CommaOk bool // return a value,ok pair +} + +// SelectState is a helper for Select. +// It represents one goal state and its corresponding communication. +// +type SelectState struct { + Dir types.ChanDir // direction of case (SendOnly or RecvOnly) + Chan Value // channel to use (for send or receive) + Send Value // value to send (for send) + Pos token.Pos // position of token.ARROW + DebugNode ast.Node // ast.SendStmt or ast.UnaryExpr(<-) [debug mode] +} + +// The Select instruction tests whether (or blocks until) one +// of the specified sent or received states is entered. +// +// Let n be the number of States for which Dir==RECV and T_i (0<=i string iterator; false => map iterator. +} + +// The TypeAssert instruction tests whether interface value X has type +// AssertedType. +// +// If !CommaOk, on success it returns v, the result of the conversion +// (defined below); on failure it panics. +// +// If CommaOk: on success it returns a pair (v, true) where v is the +// result of the conversion; on failure it returns (z, false) where z +// is AssertedType's zero value. The components of the pair must be +// accessed using the Extract instruction. +// +// If AssertedType is a concrete type, TypeAssert checks whether the +// dynamic type in interface X is equal to it, and if so, the result +// of the conversion is a copy of the value in the interface. +// +// If AssertedType is an interface, TypeAssert checks whether the +// dynamic type of the interface is assignable to it, and if so, the +// result of the conversion is a copy of the interface value X. +// If AssertedType is a superinterface of X.Type(), the operation will +// fail iff the operand is nil. (Contrast with ChangeInterface, which +// performs no nil-check.) +// +// Type() reflects the actual type of the result, possibly a +// 2-types.Tuple; AssertedType is the asserted type. +// +// Pos() returns the ast.CallExpr.Lparen if the instruction arose from +// an explicit T(e) conversion; the ast.TypeAssertExpr.Lparen if the +// instruction arose from an explicit e.(T) operation; or the +// ast.CaseClause.Case if the instruction arose from a case of a +// type-switch statement. +// +// Example printed form: +// t1 = typeassert t0.(int) +// t3 = typeassert,ok t2.(T) +// +type TypeAssert struct { + register + X Value + AssertedType types.Type + CommaOk bool +} + +// The Extract instruction yields component Index of Tuple. +// +// This is used to access the results of instructions with multiple +// return values, such as Call, TypeAssert, Next, UnOp(ARROW) and +// IndexExpr(Map). +// +// Example printed form: +// t1 = extract t0 #1 +// +type Extract struct { + register + Tuple Value + Index int +} + +// Instructions executed for effect. They do not yield a value. -------------------- + +// The Jump instruction transfers control to the sole successor of its +// owning block. +// +// A Jump must be the last instruction of its containing BasicBlock. +// +// Pos() returns NoPos. +// +// Example printed form: +// jump done +// +type Jump struct { + anInstruction +} + +// The If instruction transfers control to one of the two successors +// of its owning block, depending on the boolean Cond: the first if +// true, the second if false. +// +// An If instruction must be the last instruction of its containing +// BasicBlock. +// +// Pos() returns NoPos. +// +// Example printed form: +// if t0 goto done else body +// +type If struct { + anInstruction + Cond Value +} + +// The Return instruction returns values and control back to the calling +// function. +// +// len(Results) is always equal to the number of results in the +// function's signature. +// +// If len(Results) > 1, Return returns a tuple value with the specified +// components which the caller must access using Extract instructions. +// +// There is no instruction to return a ready-made tuple like those +// returned by a "value,ok"-mode TypeAssert, Lookup or UnOp(ARROW) or +// a tail-call to a function with multiple result parameters. +// +// Return must be the last instruction of its containing BasicBlock. +// Such a block has no successors. +// +// Pos() returns the ast.ReturnStmt.Return, if explicit in the source. +// +// Example printed form: +// return +// return nil:I, 2:int +// +type Return struct { + anInstruction + Results []Value + pos token.Pos +} + +// The RunDefers instruction pops and invokes the entire stack of +// procedure calls pushed by Defer instructions in this function. +// +// It is legal to encounter multiple 'rundefers' instructions in a +// single control-flow path through a function; this is useful in +// the combined init() function, for example. +// +// Pos() returns NoPos. +// +// Example printed form: +// rundefers +// +type RunDefers struct { + anInstruction +} + +// The Panic instruction initiates a panic with value X. +// +// A Panic instruction must be the last instruction of its containing +// BasicBlock, which must have no successors. +// +// NB: 'go panic(x)' and 'defer panic(x)' do not use this instruction; +// they are treated as calls to a built-in function. +// +// Pos() returns the ast.CallExpr.Lparen if this panic was explicit +// in the source. +// +// Example printed form: +// panic t0 +// +type Panic struct { + anInstruction + X Value // an interface{} + pos token.Pos +} + +// The Go instruction creates a new goroutine and calls the specified +// function within it. +// +// See CallCommon for generic function call documentation. +// +// Pos() returns the ast.GoStmt.Go. +// +// Example printed form: +// go println(t0, t1) +// go t3() +// go invoke t5.Println(...t6) +// +type Go struct { + anInstruction + Call CallCommon + pos token.Pos +} + +// The Defer instruction pushes the specified call onto a stack of +// functions to be called by a RunDefers instruction or by a panic. +// +// See CallCommon for generic function call documentation. +// +// Pos() returns the ast.DeferStmt.Defer. +// +// Example printed form: +// defer println(t0, t1) +// defer t3() +// defer invoke t5.Println(...t6) +// +type Defer struct { + anInstruction + Call CallCommon + pos token.Pos +} + +// The Send instruction sends X on channel Chan. +// +// Pos() returns the ast.SendStmt.Arrow, if explicit in the source. +// +// Example printed form: +// send t0 <- t1 +// +type Send struct { + anInstruction + Chan, X Value + pos token.Pos +} + +// The Store instruction stores Val at address Addr. +// Stores can be of arbitrary types. +// +// Pos() returns the position of the source-level construct most closely +// associated with the memory store operation. +// Since implicit memory stores are numerous and varied and depend upon +// implementation choices, the details are not specified. +// +// Example printed form: +// *x = y +// +type Store struct { + anInstruction + Addr Value + Val Value + pos token.Pos +} + +// The MapUpdate instruction updates the association of Map[Key] to +// Value. +// +// Pos() returns the ast.KeyValueExpr.Colon or ast.IndexExpr.Lbrack, +// if explicit in the source. +// +// Example printed form: +// t0[t1] = t2 +// +type MapUpdate struct { + anInstruction + Map Value + Key Value + Value Value + pos token.Pos +} + +// A DebugRef instruction maps a source-level expression Expr to the +// SSA value X that represents the value (!IsAddr) or address (IsAddr) +// of that expression. +// +// DebugRef is a pseudo-instruction: it has no dynamic effect. +// +// Pos() returns Expr.Pos(), the start position of the source-level +// expression. This is not the same as the "designated" token as +// documented at Value.Pos(). e.g. CallExpr.Pos() does not return the +// position of the ("designated") Lparen token. +// +// If Expr is an *ast.Ident denoting a var or func, Object() returns +// the object; though this information can be obtained from the type +// checker, including it here greatly facilitates debugging. +// For non-Ident expressions, Object() returns nil. +// +// DebugRefs are generated only for functions built with debugging +// enabled; see Package.SetDebugMode() and the GlobalDebug builder +// mode flag. +// +// DebugRefs are not emitted for ast.Idents referring to constants or +// predeclared identifiers, since they are trivial and numerous. +// Nor are they emitted for ast.ParenExprs. +// +// (By representing these as instructions, rather than out-of-band, +// consistency is maintained during transformation passes by the +// ordinary SSA renaming machinery.) +// +// Example printed form: +// ; *ast.CallExpr @ 102:9 is t5 +// ; var x float64 @ 109:72 is x +// ; address of *ast.CompositeLit @ 216:10 is t0 +// +type DebugRef struct { + anInstruction + Expr ast.Expr // the referring expression (never *ast.ParenExpr) + object types.Object // the identity of the source var/func + IsAddr bool // Expr is addressable and X is the address it denotes + X Value // the value or address of Expr +} + +// Embeddable mix-ins and helpers for common parts of other structs. ----------- + +// register is a mix-in embedded by all SSA values that are also +// instructions, i.e. virtual registers, and provides a uniform +// implementation of most of the Value interface: Value.Name() is a +// numbered register (e.g. "t0"); the other methods are field accessors. +// +// Temporary names are automatically assigned to each register on +// completion of building a function in SSA form. +// +// Clients must not assume that the 'id' value (and the Name() derived +// from it) is unique within a function. As always in this API, +// semantics are determined only by identity; names exist only to +// facilitate debugging. +// +type register struct { + anInstruction + num int // "name" of virtual register, e.g. "t0". Not guaranteed unique. + typ types.Type // type of virtual register + pos token.Pos // position of source expression, or NoPos + referrers []Instruction +} + +// anInstruction is a mix-in embedded by all Instructions. +// It provides the implementations of the Block and setBlock methods. +type anInstruction struct { + block *BasicBlock // the basic block of this instruction +} + +// CallCommon is contained by Go, Defer and Call to hold the +// common parts of a function or method call. +// +// Each CallCommon exists in one of two modes, function call and +// interface method invocation, or "call" and "invoke" for short. +// +// 1. "call" mode: when Method is nil (!IsInvoke), a CallCommon +// represents an ordinary function call of the value in Value, +// which may be a *Builtin, a *Function or any other value of kind +// 'func'. +// +// Value may be one of: +// (a) a *Function, indicating a statically dispatched call +// to a package-level function, an anonymous function, or +// a method of a named type. +// (b) a *MakeClosure, indicating an immediately applied +// function literal with free variables. +// (c) a *Builtin, indicating a statically dispatched call +// to a built-in function. +// (d) any other value, indicating a dynamically dispatched +// function call. +// StaticCallee returns the identity of the callee in cases +// (a) and (b), nil otherwise. +// +// Args contains the arguments to the call. If Value is a method, +// Args[0] contains the receiver parameter. +// +// Example printed form: +// t2 = println(t0, t1) +// go t3() +// defer t5(...t6) +// +// 2. "invoke" mode: when Method is non-nil (IsInvoke), a CallCommon +// represents a dynamically dispatched call to an interface method. +// In this mode, Value is the interface value and Method is the +// interface's abstract method. Note: an abstract method may be +// shared by multiple interfaces due to embedding; Value.Type() +// provides the specific interface used for this call. +// +// Value is implicitly supplied to the concrete method implementation +// as the receiver parameter; in other words, Args[0] holds not the +// receiver but the first true argument. +// +// Example printed form: +// t1 = invoke t0.String() +// go invoke t3.Run(t2) +// defer invoke t4.Handle(...t5) +// +// For all calls to variadic functions (Signature().Variadic()), +// the last element of Args is a slice. +// +type CallCommon struct { + Value Value // receiver (invoke mode) or func value (call mode) + Method *types.Func // abstract method (invoke mode) + Args []Value // actual parameters (in static method call, includes receiver) + pos token.Pos // position of CallExpr.Lparen, iff explicit in source +} + +// IsInvoke returns true if this call has "invoke" (not "call") mode. +func (c *CallCommon) IsInvoke() bool { + return c.Method != nil +} + +func (c *CallCommon) Pos() token.Pos { return c.pos } + +// Signature returns the signature of the called function. +// +// For an "invoke"-mode call, the signature of the interface method is +// returned. +// +// In either "call" or "invoke" mode, if the callee is a method, its +// receiver is represented by sig.Recv, not sig.Params().At(0). +// +func (c *CallCommon) Signature() *types.Signature { + if c.Method != nil { + return c.Method.Type().(*types.Signature) + } + return c.Value.Type().Underlying().(*types.Signature) +} + +// StaticCallee returns the callee if this is a trivially static +// "call"-mode call to a function. +func (c *CallCommon) StaticCallee() *Function { + switch fn := c.Value.(type) { + case *Function: + return fn + case *MakeClosure: + return fn.Fn.(*Function) + } + return nil +} + +// Description returns a description of the mode of this call suitable +// for a user interface, e.g., "static method call". +func (c *CallCommon) Description() string { + switch fn := c.Value.(type) { + case *Builtin: + return "built-in function call" + case *MakeClosure: + return "static function closure call" + case *Function: + if fn.Signature.Recv() != nil { + return "static method call" + } + return "static function call" + } + if c.IsInvoke() { + return "dynamic method call" // ("invoke" mode) + } + return "dynamic function call" +} + +// The CallInstruction interface, implemented by *Go, *Defer and *Call, +// exposes the common parts of function-calling instructions, +// yet provides a way back to the Value defined by *Call alone. +// +type CallInstruction interface { + Instruction + Common() *CallCommon // returns the common parts of the call + Value() *Call // returns the result value of the call (*Call) or nil (*Go, *Defer) +} + +func (s *Call) Common() *CallCommon { return &s.Call } +func (s *Defer) Common() *CallCommon { return &s.Call } +func (s *Go) Common() *CallCommon { return &s.Call } + +func (s *Call) Value() *Call { return s } +func (s *Defer) Value() *Call { return nil } +func (s *Go) Value() *Call { return nil } + +func (v *Builtin) Type() types.Type { return v.sig } +func (v *Builtin) Name() string { return v.name } +func (*Builtin) Referrers() *[]Instruction { return nil } +func (v *Builtin) Pos() token.Pos { return token.NoPos } +func (v *Builtin) Object() types.Object { return types.Universe.Lookup(v.name) } +func (v *Builtin) Parent() *Function { return nil } + +func (v *FreeVar) Type() types.Type { return v.typ } +func (v *FreeVar) Name() string { return v.name } +func (v *FreeVar) Referrers() *[]Instruction { return &v.referrers } +func (v *FreeVar) Pos() token.Pos { return v.pos } +func (v *FreeVar) Parent() *Function { return v.parent } + +func (v *Global) Type() types.Type { return v.typ } +func (v *Global) Name() string { return v.name } +func (v *Global) Parent() *Function { return nil } +func (v *Global) Pos() token.Pos { return v.pos } +func (v *Global) Referrers() *[]Instruction { return nil } +func (v *Global) Token() token.Token { return token.VAR } +func (v *Global) Object() types.Object { return v.object } +func (v *Global) String() string { return v.RelString(nil) } +func (v *Global) Package() *Package { return v.Pkg } +func (v *Global) RelString(from *types.Package) string { return relString(v, from) } + +func (v *Function) Name() string { return v.name } +func (v *Function) Type() types.Type { return v.Signature } +func (v *Function) Pos() token.Pos { return v.pos } +func (v *Function) Token() token.Token { return token.FUNC } +func (v *Function) Object() types.Object { return v.object } +func (v *Function) String() string { return v.RelString(nil) } +func (v *Function) Package() *Package { return v.Pkg } +func (v *Function) Parent() *Function { return v.parent } +func (v *Function) Referrers() *[]Instruction { + if v.parent != nil { + return &v.referrers + } + return nil +} + +func (v *Parameter) Type() types.Type { return v.typ } +func (v *Parameter) Name() string { return v.name } +func (v *Parameter) Object() types.Object { return v.object } +func (v *Parameter) Referrers() *[]Instruction { return &v.referrers } +func (v *Parameter) Pos() token.Pos { return v.pos } +func (v *Parameter) Parent() *Function { return v.parent } + +func (v *Alloc) Type() types.Type { return v.typ } +func (v *Alloc) Referrers() *[]Instruction { return &v.referrers } +func (v *Alloc) Pos() token.Pos { return v.pos } + +func (v *register) Type() types.Type { return v.typ } +func (v *register) setType(typ types.Type) { v.typ = typ } +func (v *register) Name() string { return fmt.Sprintf("t%d", v.num) } +func (v *register) setNum(num int) { v.num = num } +func (v *register) Referrers() *[]Instruction { return &v.referrers } +func (v *register) Pos() token.Pos { return v.pos } +func (v *register) setPos(pos token.Pos) { v.pos = pos } + +func (v *anInstruction) Parent() *Function { return v.block.parent } +func (v *anInstruction) Block() *BasicBlock { return v.block } +func (v *anInstruction) setBlock(block *BasicBlock) { v.block = block } +func (v *anInstruction) Referrers() *[]Instruction { return nil } + +func (t *Type) Name() string { return t.object.Name() } +func (t *Type) Pos() token.Pos { return t.object.Pos() } +func (t *Type) Type() types.Type { return t.object.Type() } +func (t *Type) Token() token.Token { return token.TYPE } +func (t *Type) Object() types.Object { return t.object } +func (t *Type) String() string { return t.RelString(nil) } +func (t *Type) Package() *Package { return t.pkg } +func (t *Type) RelString(from *types.Package) string { return relString(t, from) } + +func (c *NamedConst) Name() string { return c.object.Name() } +func (c *NamedConst) Pos() token.Pos { return c.object.Pos() } +func (c *NamedConst) String() string { return c.RelString(nil) } +func (c *NamedConst) Type() types.Type { return c.object.Type() } +func (c *NamedConst) Token() token.Token { return token.CONST } +func (c *NamedConst) Object() types.Object { return c.object } +func (c *NamedConst) Package() *Package { return c.pkg } +func (c *NamedConst) RelString(from *types.Package) string { return relString(c, from) } + +// Func returns the package-level function of the specified name, +// or nil if not found. +// +func (p *Package) Func(name string) (f *Function) { + f, _ = p.Members[name].(*Function) + return +} + +// Var returns the package-level variable of the specified name, +// or nil if not found. +// +func (p *Package) Var(name string) (g *Global) { + g, _ = p.Members[name].(*Global) + return +} + +// Const returns the package-level constant of the specified name, +// or nil if not found. +// +func (p *Package) Const(name string) (c *NamedConst) { + c, _ = p.Members[name].(*NamedConst) + return +} + +// Type returns the package-level type of the specified name, +// or nil if not found. +// +func (p *Package) Type(name string) (t *Type) { + t, _ = p.Members[name].(*Type) + return +} + +func (v *Call) Pos() token.Pos { return v.Call.pos } +func (s *Defer) Pos() token.Pos { return s.pos } +func (s *Go) Pos() token.Pos { return s.pos } +func (s *MapUpdate) Pos() token.Pos { return s.pos } +func (s *Panic) Pos() token.Pos { return s.pos } +func (s *Return) Pos() token.Pos { return s.pos } +func (s *Send) Pos() token.Pos { return s.pos } +func (s *Store) Pos() token.Pos { return s.pos } +func (s *If) Pos() token.Pos { return token.NoPos } +func (s *Jump) Pos() token.Pos { return token.NoPos } +func (s *RunDefers) Pos() token.Pos { return token.NoPos } +func (s *DebugRef) Pos() token.Pos { return s.Expr.Pos() } + +// Operands. + +func (v *Alloc) Operands(rands []*Value) []*Value { + return rands +} + +func (v *BinOp) Operands(rands []*Value) []*Value { + return append(rands, &v.X, &v.Y) +} + +func (c *CallCommon) Operands(rands []*Value) []*Value { + rands = append(rands, &c.Value) + for i := range c.Args { + rands = append(rands, &c.Args[i]) + } + return rands +} + +func (s *Go) Operands(rands []*Value) []*Value { + return s.Call.Operands(rands) +} + +func (s *Call) Operands(rands []*Value) []*Value { + return s.Call.Operands(rands) +} + +func (s *Defer) Operands(rands []*Value) []*Value { + return s.Call.Operands(rands) +} + +func (v *ChangeInterface) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (v *ChangeType) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (v *Convert) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (s *DebugRef) Operands(rands []*Value) []*Value { + return append(rands, &s.X) +} + +func (v *Extract) Operands(rands []*Value) []*Value { + return append(rands, &v.Tuple) +} + +func (v *Field) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (v *FieldAddr) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (s *If) Operands(rands []*Value) []*Value { + return append(rands, &s.Cond) +} + +func (v *Index) Operands(rands []*Value) []*Value { + return append(rands, &v.X, &v.Index) +} + +func (v *IndexAddr) Operands(rands []*Value) []*Value { + return append(rands, &v.X, &v.Index) +} + +func (*Jump) Operands(rands []*Value) []*Value { + return rands +} + +func (v *Lookup) Operands(rands []*Value) []*Value { + return append(rands, &v.X, &v.Index) +} + +func (v *MakeChan) Operands(rands []*Value) []*Value { + return append(rands, &v.Size) +} + +func (v *MakeClosure) Operands(rands []*Value) []*Value { + rands = append(rands, &v.Fn) + for i := range v.Bindings { + rands = append(rands, &v.Bindings[i]) + } + return rands +} + +func (v *MakeInterface) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (v *MakeMap) Operands(rands []*Value) []*Value { + return append(rands, &v.Reserve) +} + +func (v *MakeSlice) Operands(rands []*Value) []*Value { + return append(rands, &v.Len, &v.Cap) +} + +func (v *MapUpdate) Operands(rands []*Value) []*Value { + return append(rands, &v.Map, &v.Key, &v.Value) +} + +func (v *Next) Operands(rands []*Value) []*Value { + return append(rands, &v.Iter) +} + +func (s *Panic) Operands(rands []*Value) []*Value { + return append(rands, &s.X) +} + +func (v *Phi) Operands(rands []*Value) []*Value { + for i := range v.Edges { + rands = append(rands, &v.Edges[i]) + } + return rands +} + +func (v *Range) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (s *Return) Operands(rands []*Value) []*Value { + for i := range s.Results { + rands = append(rands, &s.Results[i]) + } + return rands +} + +func (*RunDefers) Operands(rands []*Value) []*Value { + return rands +} + +func (v *Select) Operands(rands []*Value) []*Value { + for i := range v.States { + rands = append(rands, &v.States[i].Chan, &v.States[i].Send) + } + return rands +} + +func (s *Send) Operands(rands []*Value) []*Value { + return append(rands, &s.Chan, &s.X) +} + +func (v *Slice) Operands(rands []*Value) []*Value { + return append(rands, &v.X, &v.Low, &v.High, &v.Max) +} + +func (s *Store) Operands(rands []*Value) []*Value { + return append(rands, &s.Addr, &s.Val) +} + +func (v *TypeAssert) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (v *UnOp) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +// Non-Instruction Values: +func (v *Builtin) Operands(rands []*Value) []*Value { return rands } +func (v *FreeVar) Operands(rands []*Value) []*Value { return rands } +func (v *Const) Operands(rands []*Value) []*Value { return rands } +func (v *Function) Operands(rands []*Value) []*Value { return rands } +func (v *Global) Operands(rands []*Value) []*Value { return rands } +func (v *Parameter) Operands(rands []*Value) []*Value { return rands } diff --git a/vendor/golang.org/x/tools/go/ssa/ssautil/load.go b/vendor/golang.org/x/tools/go/ssa/ssautil/load.go new file mode 100644 index 000000000000..72710becf8f3 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/ssautil/load.go @@ -0,0 +1,175 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssautil + +// This file defines utility functions for constructing programs in SSA form. + +import ( + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/loader" + "golang.org/x/tools/go/packages" + "golang.org/x/tools/go/ssa" +) + +// Packages creates an SSA program for a set of packages. +// +// The packages must have been loaded from source syntax using the +// golang.org/x/tools/go/packages.Load function in LoadSyntax or +// LoadAllSyntax mode. +// +// Packages creates an SSA package for each well-typed package in the +// initial list, plus all their dependencies. The resulting list of +// packages corresponds to the list of initial packages, and may contain +// a nil if SSA code could not be constructed for the corresponding initial +// package due to type errors. +// +// Code for bodies of functions is not built until Build is called on +// the resulting Program. SSA code is constructed only for the initial +// packages with well-typed syntax trees. +// +// The mode parameter controls diagnostics and checking during SSA construction. +// +func Packages(initial []*packages.Package, mode ssa.BuilderMode) (*ssa.Program, []*ssa.Package) { + return doPackages(initial, mode, false) +} + +// AllPackages creates an SSA program for a set of packages plus all +// their dependencies. +// +// The packages must have been loaded from source syntax using the +// golang.org/x/tools/go/packages.Load function in LoadAllSyntax mode. +// +// AllPackages creates an SSA package for each well-typed package in the +// initial list, plus all their dependencies. The resulting list of +// packages corresponds to the list of intial packages, and may contain +// a nil if SSA code could not be constructed for the corresponding +// initial package due to type errors. +// +// Code for bodies of functions is not built until Build is called on +// the resulting Program. SSA code is constructed for all packages with +// well-typed syntax trees. +// +// The mode parameter controls diagnostics and checking during SSA construction. +// +func AllPackages(initial []*packages.Package, mode ssa.BuilderMode) (*ssa.Program, []*ssa.Package) { + return doPackages(initial, mode, true) +} + +func doPackages(initial []*packages.Package, mode ssa.BuilderMode, deps bool) (*ssa.Program, []*ssa.Package) { + + var fset *token.FileSet + if len(initial) > 0 { + fset = initial[0].Fset + } + + prog := ssa.NewProgram(fset, mode) + + isInitial := make(map[*packages.Package]bool, len(initial)) + for _, p := range initial { + isInitial[p] = true + } + + ssamap := make(map[*packages.Package]*ssa.Package) + packages.Visit(initial, nil, func(p *packages.Package) { + if p.Types != nil && !p.IllTyped { + var files []*ast.File + if deps || isInitial[p] { + files = p.Syntax + } + ssamap[p] = prog.CreatePackage(p.Types, files, p.TypesInfo, true) + } + }) + + var ssapkgs []*ssa.Package + for _, p := range initial { + ssapkgs = append(ssapkgs, ssamap[p]) // may be nil + } + return prog, ssapkgs +} + +// CreateProgram returns a new program in SSA form, given a program +// loaded from source. An SSA package is created for each transitively +// error-free package of lprog. +// +// Code for bodies of functions is not built until Build is called +// on the result. +// +// The mode parameter controls diagnostics and checking during SSA construction. +// +// Deprecated: use golang.org/x/tools/go/packages and the Packages +// function instead; see ssa.ExampleLoadPackages. +// +func CreateProgram(lprog *loader.Program, mode ssa.BuilderMode) *ssa.Program { + prog := ssa.NewProgram(lprog.Fset, mode) + + for _, info := range lprog.AllPackages { + if info.TransitivelyErrorFree { + prog.CreatePackage(info.Pkg, info.Files, &info.Info, info.Importable) + } + } + + return prog +} + +// BuildPackage builds an SSA program with IR for a single package. +// +// It populates pkg by type-checking the specified file ASTs. All +// dependencies are loaded using the importer specified by tc, which +// typically loads compiler export data; SSA code cannot be built for +// those packages. BuildPackage then constructs an ssa.Program with all +// dependency packages created, and builds and returns the SSA package +// corresponding to pkg. +// +// The caller must have set pkg.Path() to the import path. +// +// The operation fails if there were any type-checking or import errors. +// +// See ../ssa/example_test.go for an example. +// +func BuildPackage(tc *types.Config, fset *token.FileSet, pkg *types.Package, files []*ast.File, mode ssa.BuilderMode) (*ssa.Package, *types.Info, error) { + if fset == nil { + panic("no token.FileSet") + } + if pkg.Path() == "" { + panic("package has no import path") + } + + info := &types.Info{ + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + Scopes: make(map[ast.Node]*types.Scope), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + } + if err := types.NewChecker(tc, fset, pkg, info).Files(files); err != nil { + return nil, nil, err + } + + prog := ssa.NewProgram(fset, mode) + + // Create SSA packages for all imports. + // Order is not significant. + created := make(map[*types.Package]bool) + var createAll func(pkgs []*types.Package) + createAll = func(pkgs []*types.Package) { + for _, p := range pkgs { + if !created[p] { + created[p] = true + prog.CreatePackage(p, nil, nil, true) + createAll(p.Imports()) + } + } + } + createAll(pkg.Imports()) + + // Create and build the primary package. + ssapkg := prog.CreatePackage(pkg, files, info, false) + ssapkg.Build() + return ssapkg, info, nil +} diff --git a/vendor/golang.org/x/tools/go/ssa/ssautil/switch.go b/vendor/golang.org/x/tools/go/ssa/ssautil/switch.go new file mode 100644 index 000000000000..db03bf555900 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/ssautil/switch.go @@ -0,0 +1,234 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssautil + +// This file implements discovery of switch and type-switch constructs +// from low-level control flow. +// +// Many techniques exist for compiling a high-level switch with +// constant cases to efficient machine code. The optimal choice will +// depend on the data type, the specific case values, the code in the +// body of each case, and the hardware. +// Some examples: +// - a lookup table (for a switch that maps constants to constants) +// - a computed goto +// - a binary tree +// - a perfect hash +// - a two-level switch (to partition constant strings by their first byte). + +import ( + "bytes" + "fmt" + "go/token" + "go/types" + + "golang.org/x/tools/go/ssa" +) + +// A ConstCase represents a single constant comparison. +// It is part of a Switch. +type ConstCase struct { + Block *ssa.BasicBlock // block performing the comparison + Body *ssa.BasicBlock // body of the case + Value *ssa.Const // case comparand +} + +// A TypeCase represents a single type assertion. +// It is part of a Switch. +type TypeCase struct { + Block *ssa.BasicBlock // block performing the type assert + Body *ssa.BasicBlock // body of the case + Type types.Type // case type + Binding ssa.Value // value bound by this case +} + +// A Switch is a logical high-level control flow operation +// (a multiway branch) discovered by analysis of a CFG containing +// only if/else chains. It is not part of the ssa.Instruction set. +// +// One of ConstCases and TypeCases has length >= 2; +// the other is nil. +// +// In a value switch, the list of cases may contain duplicate constants. +// A type switch may contain duplicate types, or types assignable +// to an interface type also in the list. +// TODO(adonovan): eliminate such duplicates. +// +type Switch struct { + Start *ssa.BasicBlock // block containing start of if/else chain + X ssa.Value // the switch operand + ConstCases []ConstCase // ordered list of constant comparisons + TypeCases []TypeCase // ordered list of type assertions + Default *ssa.BasicBlock // successor if all comparisons fail +} + +func (sw *Switch) String() string { + // We represent each block by the String() of its + // first Instruction, e.g. "print(42:int)". + var buf bytes.Buffer + if sw.ConstCases != nil { + fmt.Fprintf(&buf, "switch %s {\n", sw.X.Name()) + for _, c := range sw.ConstCases { + fmt.Fprintf(&buf, "case %s: %s\n", c.Value, c.Body.Instrs[0]) + } + } else { + fmt.Fprintf(&buf, "switch %s.(type) {\n", sw.X.Name()) + for _, c := range sw.TypeCases { + fmt.Fprintf(&buf, "case %s %s: %s\n", + c.Binding.Name(), c.Type, c.Body.Instrs[0]) + } + } + if sw.Default != nil { + fmt.Fprintf(&buf, "default: %s\n", sw.Default.Instrs[0]) + } + fmt.Fprintf(&buf, "}") + return buf.String() +} + +// Switches examines the control-flow graph of fn and returns the +// set of inferred value and type switches. A value switch tests an +// ssa.Value for equality against two or more compile-time constant +// values. Switches involving link-time constants (addresses) are +// ignored. A type switch type-asserts an ssa.Value against two or +// more types. +// +// The switches are returned in dominance order. +// +// The resulting switches do not necessarily correspond to uses of the +// 'switch' keyword in the source: for example, a single source-level +// switch statement with non-constant cases may result in zero, one or +// many Switches, one per plural sequence of constant cases. +// Switches may even be inferred from if/else- or goto-based control flow. +// (In general, the control flow constructs of the source program +// cannot be faithfully reproduced from the SSA representation.) +// +func Switches(fn *ssa.Function) []Switch { + // Traverse the CFG in dominance order, so we don't + // enter an if/else-chain in the middle. + var switches []Switch + seen := make(map[*ssa.BasicBlock]bool) // TODO(adonovan): opt: use ssa.blockSet + for _, b := range fn.DomPreorder() { + if x, k := isComparisonBlock(b); x != nil { + // Block b starts a switch. + sw := Switch{Start: b, X: x} + valueSwitch(&sw, k, seen) + if len(sw.ConstCases) > 1 { + switches = append(switches, sw) + } + } + + if y, x, T := isTypeAssertBlock(b); y != nil { + // Block b starts a type switch. + sw := Switch{Start: b, X: x} + typeSwitch(&sw, y, T, seen) + if len(sw.TypeCases) > 1 { + switches = append(switches, sw) + } + } + } + return switches +} + +func valueSwitch(sw *Switch, k *ssa.Const, seen map[*ssa.BasicBlock]bool) { + b := sw.Start + x := sw.X + for x == sw.X { + if seen[b] { + break + } + seen[b] = true + + sw.ConstCases = append(sw.ConstCases, ConstCase{ + Block: b, + Body: b.Succs[0], + Value: k, + }) + b = b.Succs[1] + if len(b.Instrs) > 2 { + // Block b contains not just 'if x == k', + // so it may have side effects that + // make it unsafe to elide. + break + } + if len(b.Preds) != 1 { + // Block b has multiple predecessors, + // so it cannot be treated as a case. + break + } + x, k = isComparisonBlock(b) + } + sw.Default = b +} + +func typeSwitch(sw *Switch, y ssa.Value, T types.Type, seen map[*ssa.BasicBlock]bool) { + b := sw.Start + x := sw.X + for x == sw.X { + if seen[b] { + break + } + seen[b] = true + + sw.TypeCases = append(sw.TypeCases, TypeCase{ + Block: b, + Body: b.Succs[0], + Type: T, + Binding: y, + }) + b = b.Succs[1] + if len(b.Instrs) > 4 { + // Block b contains not just + // {TypeAssert; Extract #0; Extract #1; If} + // so it may have side effects that + // make it unsafe to elide. + break + } + if len(b.Preds) != 1 { + // Block b has multiple predecessors, + // so it cannot be treated as a case. + break + } + y, x, T = isTypeAssertBlock(b) + } + sw.Default = b +} + +// isComparisonBlock returns the operands (v, k) if a block ends with +// a comparison v==k, where k is a compile-time constant. +// +func isComparisonBlock(b *ssa.BasicBlock) (v ssa.Value, k *ssa.Const) { + if n := len(b.Instrs); n >= 2 { + if i, ok := b.Instrs[n-1].(*ssa.If); ok { + if binop, ok := i.Cond.(*ssa.BinOp); ok && binop.Block() == b && binop.Op == token.EQL { + if k, ok := binop.Y.(*ssa.Const); ok { + return binop.X, k + } + if k, ok := binop.X.(*ssa.Const); ok { + return binop.Y, k + } + } + } + } + return +} + +// isTypeAssertBlock returns the operands (y, x, T) if a block ends with +// a type assertion "if y, ok := x.(T); ok {". +// +func isTypeAssertBlock(b *ssa.BasicBlock) (y, x ssa.Value, T types.Type) { + if n := len(b.Instrs); n >= 4 { + if i, ok := b.Instrs[n-1].(*ssa.If); ok { + if ext1, ok := i.Cond.(*ssa.Extract); ok && ext1.Block() == b && ext1.Index == 1 { + if ta, ok := ext1.Tuple.(*ssa.TypeAssert); ok && ta.Block() == b { + // hack: relies upon instruction ordering. + if ext0, ok := b.Instrs[n-3].(*ssa.Extract); ok { + return ext0, ta.X, ta.AssertedType + } + } + } + } + } + return +} diff --git a/vendor/golang.org/x/tools/go/ssa/ssautil/visit.go b/vendor/golang.org/x/tools/go/ssa/ssautil/visit.go new file mode 100644 index 000000000000..3424e8a30868 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/ssautil/visit.go @@ -0,0 +1,79 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssautil // import "golang.org/x/tools/go/ssa/ssautil" + +import "golang.org/x/tools/go/ssa" + +// This file defines utilities for visiting the SSA representation of +// a Program. +// +// TODO(adonovan): test coverage. + +// AllFunctions finds and returns the set of functions potentially +// needed by program prog, as determined by a simple linker-style +// reachability algorithm starting from the members and method-sets of +// each package. The result may include anonymous functions and +// synthetic wrappers. +// +// Precondition: all packages are built. +// +func AllFunctions(prog *ssa.Program) map[*ssa.Function]bool { + visit := visitor{ + prog: prog, + seen: make(map[*ssa.Function]bool), + } + visit.program() + return visit.seen +} + +type visitor struct { + prog *ssa.Program + seen map[*ssa.Function]bool +} + +func (visit *visitor) program() { + for _, pkg := range visit.prog.AllPackages() { + for _, mem := range pkg.Members { + if fn, ok := mem.(*ssa.Function); ok { + visit.function(fn) + } + } + } + for _, T := range visit.prog.RuntimeTypes() { + mset := visit.prog.MethodSets.MethodSet(T) + for i, n := 0, mset.Len(); i < n; i++ { + visit.function(visit.prog.MethodValue(mset.At(i))) + } + } +} + +func (visit *visitor) function(fn *ssa.Function) { + if !visit.seen[fn] { + visit.seen[fn] = true + var buf [10]*ssa.Value // avoid alloc in common case + for _, b := range fn.Blocks { + for _, instr := range b.Instrs { + for _, op := range instr.Operands(buf[:0]) { + if fn, ok := (*op).(*ssa.Function); ok { + visit.function(fn) + } + } + } + } + } +} + +// MainPackages returns the subset of the specified packages +// named "main" that define a main function. +// The result may include synthetic "testmain" packages. +func MainPackages(pkgs []*ssa.Package) []*ssa.Package { + var mains []*ssa.Package + for _, pkg := range pkgs { + if pkg.Pkg.Name() == "main" && pkg.Func("main") != nil { + mains = append(mains, pkg) + } + } + return mains +} diff --git a/vendor/golang.org/x/tools/go/ssa/testmain.go b/vendor/golang.org/x/tools/go/ssa/testmain.go new file mode 100644 index 000000000000..8ec15ba50513 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/testmain.go @@ -0,0 +1,271 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// CreateTestMainPackage synthesizes a main package that runs all the +// tests of the supplied packages. +// It is closely coupled to $GOROOT/src/cmd/go/test.go and $GOROOT/src/testing. +// +// TODO(adonovan): throws this all away now that x/tools/go/packages +// provides access to the actual synthetic test main files. + +import ( + "bytes" + "fmt" + "go/ast" + "go/parser" + "go/types" + "log" + "os" + "strings" + "text/template" +) + +// FindTests returns the Test, Benchmark, and Example functions +// (as defined by "go test") defined in the specified package, +// and its TestMain function, if any. +// +// Deprecated: use x/tools/go/packages to access synthetic testmain packages. +func FindTests(pkg *Package) (tests, benchmarks, examples []*Function, main *Function) { + prog := pkg.Prog + + // The first two of these may be nil: if the program doesn't import "testing", + // it can't contain any tests, but it may yet contain Examples. + var testSig *types.Signature // func(*testing.T) + var benchmarkSig *types.Signature // func(*testing.B) + var exampleSig = types.NewSignature(nil, nil, nil, false) // func() + + // Obtain the types from the parameters of testing.MainStart. + if testingPkg := prog.ImportedPackage("testing"); testingPkg != nil { + mainStart := testingPkg.Func("MainStart") + params := mainStart.Signature.Params() + testSig = funcField(params.At(1).Type()) + benchmarkSig = funcField(params.At(2).Type()) + + // Does the package define this function? + // func TestMain(*testing.M) + if f := pkg.Func("TestMain"); f != nil { + sig := f.Type().(*types.Signature) + starM := mainStart.Signature.Results().At(0).Type() // *testing.M + if sig.Results().Len() == 0 && + sig.Params().Len() == 1 && + types.Identical(sig.Params().At(0).Type(), starM) { + main = f + } + } + } + + // TODO(adonovan): use a stable order, e.g. lexical. + for _, mem := range pkg.Members { + if f, ok := mem.(*Function); ok && + ast.IsExported(f.Name()) && + strings.HasSuffix(prog.Fset.Position(f.Pos()).Filename, "_test.go") { + + switch { + case testSig != nil && isTestSig(f, "Test", testSig): + tests = append(tests, f) + case benchmarkSig != nil && isTestSig(f, "Benchmark", benchmarkSig): + benchmarks = append(benchmarks, f) + case isTestSig(f, "Example", exampleSig): + examples = append(examples, f) + default: + continue + } + } + } + return +} + +// Like isTest, but checks the signature too. +func isTestSig(f *Function, prefix string, sig *types.Signature) bool { + return isTest(f.Name(), prefix) && types.Identical(f.Signature, sig) +} + +// Given the type of one of the three slice parameters of testing.Main, +// returns the function type. +func funcField(slice types.Type) *types.Signature { + return slice.(*types.Slice).Elem().Underlying().(*types.Struct).Field(1).Type().(*types.Signature) +} + +// isTest tells whether name looks like a test (or benchmark, according to prefix). +// It is a Test (say) if there is a character after Test that is not a lower-case letter. +// We don't want TesticularCancer. +// Plundered from $GOROOT/src/cmd/go/test.go +func isTest(name, prefix string) bool { + if !strings.HasPrefix(name, prefix) { + return false + } + if len(name) == len(prefix) { // "Test" is ok + return true + } + return ast.IsExported(name[len(prefix):]) +} + +// CreateTestMainPackage creates and returns a synthetic "testmain" +// package for the specified package if it defines tests, benchmarks or +// executable examples, or nil otherwise. The new package is named +// "main" and provides a function named "main" that runs the tests, +// similar to the one that would be created by the 'go test' tool. +// +// Subsequent calls to prog.AllPackages include the new package. +// The package pkg must belong to the program prog. +// +// Deprecated: use x/tools/go/packages to access synthetic testmain packages. +func (prog *Program) CreateTestMainPackage(pkg *Package) *Package { + if pkg.Prog != prog { + log.Fatal("Package does not belong to Program") + } + + // Template data + var data struct { + Pkg *Package + Tests, Benchmarks, Examples []*Function + Main *Function + Go18 bool + } + data.Pkg = pkg + + // Enumerate tests. + data.Tests, data.Benchmarks, data.Examples, data.Main = FindTests(pkg) + if data.Main == nil && + data.Tests == nil && data.Benchmarks == nil && data.Examples == nil { + return nil + } + + // Synthesize source for testmain package. + path := pkg.Pkg.Path() + "$testmain" + tmpl := testmainTmpl + if testingPkg := prog.ImportedPackage("testing"); testingPkg != nil { + // In Go 1.8, testing.MainStart's first argument is an interface, not a func. + data.Go18 = types.IsInterface(testingPkg.Func("MainStart").Signature.Params().At(0).Type()) + } else { + // The program does not import "testing", but FindTests + // returned non-nil, which must mean there were Examples + // but no Test, Benchmark, or TestMain functions. + + // We'll simply call them from testmain.main; this will + // ensure they don't panic, but will not check any + // "Output:" comments. + // (We should not execute an Example that has no + // "Output:" comment, but it's impossible to tell here.) + tmpl = examplesOnlyTmpl + } + var buf bytes.Buffer + if err := tmpl.Execute(&buf, data); err != nil { + log.Fatalf("internal error expanding template for %s: %v", path, err) + } + if false { // debugging + fmt.Fprintln(os.Stderr, buf.String()) + } + + // Parse and type-check the testmain package. + f, err := parser.ParseFile(prog.Fset, path+".go", &buf, parser.Mode(0)) + if err != nil { + log.Fatalf("internal error parsing %s: %v", path, err) + } + conf := types.Config{ + DisableUnusedImportCheck: true, + Importer: importer{pkg}, + } + files := []*ast.File{f} + info := &types.Info{ + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + Scopes: make(map[ast.Node]*types.Scope), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + } + testmainPkg, err := conf.Check(path, prog.Fset, files, info) + if err != nil { + log.Fatalf("internal error type-checking %s: %v", path, err) + } + + // Create and build SSA code. + testmain := prog.CreatePackage(testmainPkg, files, info, false) + testmain.SetDebugMode(false) + testmain.Build() + testmain.Func("main").Synthetic = "test main function" + testmain.Func("init").Synthetic = "package initializer" + return testmain +} + +// An implementation of types.Importer for an already loaded SSA program. +type importer struct { + pkg *Package // package under test; may be non-importable +} + +func (imp importer) Import(path string) (*types.Package, error) { + if p := imp.pkg.Prog.ImportedPackage(path); p != nil { + return p.Pkg, nil + } + if path == imp.pkg.Pkg.Path() { + return imp.pkg.Pkg, nil + } + return nil, fmt.Errorf("not found") // can't happen +} + +var testmainTmpl = template.Must(template.New("testmain").Parse(` +package main + +import "io" +import "os" +import "testing" +import p {{printf "%q" .Pkg.Pkg.Path}} + +{{if .Go18}} +type deps struct{} + +func (deps) ImportPath() string { return "" } +func (deps) MatchString(pat, str string) (bool, error) { return true, nil } +func (deps) StartCPUProfile(io.Writer) error { return nil } +func (deps) StartTestLog(io.Writer) {} +func (deps) StopCPUProfile() {} +func (deps) StopTestLog() error { return nil } +func (deps) WriteHeapProfile(io.Writer) error { return nil } +func (deps) WriteProfileTo(string, io.Writer, int) error { return nil } + +var match deps +{{else}} +func match(_, _ string) (bool, error) { return true, nil } +{{end}} + +func main() { + tests := []testing.InternalTest{ +{{range .Tests}} + { {{printf "%q" .Name}}, p.{{.Name}} }, +{{end}} + } + benchmarks := []testing.InternalBenchmark{ +{{range .Benchmarks}} + { {{printf "%q" .Name}}, p.{{.Name}} }, +{{end}} + } + examples := []testing.InternalExample{ +{{range .Examples}} + {Name: {{printf "%q" .Name}}, F: p.{{.Name}}}, +{{end}} + } + m := testing.MainStart(match, tests, benchmarks, examples) +{{with .Main}} + p.{{.Name}}(m) +{{else}} + os.Exit(m.Run()) +{{end}} +} + +`)) + +var examplesOnlyTmpl = template.Must(template.New("examples").Parse(` +package main + +import p {{printf "%q" .Pkg.Pkg.Path}} + +func main() { +{{range .Examples}} + p.{{.Name}}() +{{end}} +} +`)) diff --git a/vendor/golang.org/x/tools/go/ssa/util.go b/vendor/golang.org/x/tools/go/ssa/util.go new file mode 100644 index 000000000000..ddb118460969 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/util.go @@ -0,0 +1,119 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file defines a number of miscellaneous utility functions. + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + "io" + "os" + + "golang.org/x/tools/go/ast/astutil" +) + +//// AST utilities + +func unparen(e ast.Expr) ast.Expr { return astutil.Unparen(e) } + +// isBlankIdent returns true iff e is an Ident with name "_". +// They have no associated types.Object, and thus no type. +// +func isBlankIdent(e ast.Expr) bool { + id, ok := e.(*ast.Ident) + return ok && id.Name == "_" +} + +//// Type utilities. Some of these belong in go/types. + +// isPointer returns true for types whose underlying type is a pointer. +func isPointer(typ types.Type) bool { + _, ok := typ.Underlying().(*types.Pointer) + return ok +} + +func isInterface(T types.Type) bool { return types.IsInterface(T) } + +// deref returns a pointer's element type; otherwise it returns typ. +func deref(typ types.Type) types.Type { + if p, ok := typ.Underlying().(*types.Pointer); ok { + return p.Elem() + } + return typ +} + +// recvType returns the receiver type of method obj. +func recvType(obj *types.Func) types.Type { + return obj.Type().(*types.Signature).Recv().Type() +} + +// DefaultType returns the default "typed" type for an "untyped" type; +// it returns the incoming type for all other types. The default type +// for untyped nil is untyped nil. +// +// Exported to ssa/interp. +// +// TODO(adonovan): use go/types.DefaultType after 1.8. +// +func DefaultType(typ types.Type) types.Type { + if t, ok := typ.(*types.Basic); ok { + k := t.Kind() + switch k { + case types.UntypedBool: + k = types.Bool + case types.UntypedInt: + k = types.Int + case types.UntypedRune: + k = types.Rune + case types.UntypedFloat: + k = types.Float64 + case types.UntypedComplex: + k = types.Complex128 + case types.UntypedString: + k = types.String + } + typ = types.Typ[k] + } + return typ +} + +// logStack prints the formatted "start" message to stderr and +// returns a closure that prints the corresponding "end" message. +// Call using 'defer logStack(...)()' to show builder stack on panic. +// Don't forget trailing parens! +// +func logStack(format string, args ...interface{}) func() { + msg := fmt.Sprintf(format, args...) + io.WriteString(os.Stderr, msg) + io.WriteString(os.Stderr, "\n") + return func() { + io.WriteString(os.Stderr, msg) + io.WriteString(os.Stderr, " end\n") + } +} + +// newVar creates a 'var' for use in a types.Tuple. +func newVar(name string, typ types.Type) *types.Var { + return types.NewParam(token.NoPos, nil, name, typ) +} + +// anonVar creates an anonymous 'var' for use in a types.Tuple. +func anonVar(typ types.Type) *types.Var { + return newVar("", typ) +} + +var lenResults = types.NewTuple(anonVar(tInt)) + +// makeLen returns the len builtin specialized to type func(T)int. +func makeLen(T types.Type) *Builtin { + lenParams := types.NewTuple(anonVar(T)) + return &Builtin{ + name: "len", + sig: types.NewSignature(nil, lenParams, lenResults, false), + } +} diff --git a/vendor/golang.org/x/tools/go/ssa/wrappers.go b/vendor/golang.org/x/tools/go/ssa/wrappers.go new file mode 100644 index 000000000000..a4ae71d8cfcf --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/wrappers.go @@ -0,0 +1,290 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file defines synthesis of Functions that delegate to declared +// methods; they come in three kinds: +// +// (1) wrappers: methods that wrap declared methods, performing +// implicit pointer indirections and embedded field selections. +// +// (2) thunks: funcs that wrap declared methods. Like wrappers, +// thunks perform indirections and field selections. The thunk's +// first parameter is used as the receiver for the method call. +// +// (3) bounds: funcs that wrap declared methods. The bound's sole +// free variable, supplied by a closure, is used as the receiver +// for the method call. No indirections or field selections are +// performed since they can be done before the call. + +import ( + "fmt" + + "go/types" +) + +// -- wrappers ----------------------------------------------------------- + +// makeWrapper returns a synthetic method that delegates to the +// declared method denoted by meth.Obj(), first performing any +// necessary pointer indirections or field selections implied by meth. +// +// The resulting method's receiver type is meth.Recv(). +// +// This function is versatile but quite subtle! Consider the +// following axes of variation when making changes: +// - optional receiver indirection +// - optional implicit field selections +// - meth.Obj() may denote a concrete or an interface method +// - the result may be a thunk or a wrapper. +// +// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu) +// +func makeWrapper(prog *Program, sel *types.Selection) *Function { + obj := sel.Obj().(*types.Func) // the declared function + sig := sel.Type().(*types.Signature) // type of this wrapper + + var recv *types.Var // wrapper's receiver or thunk's params[0] + name := obj.Name() + var description string + var start int // first regular param + if sel.Kind() == types.MethodExpr { + name += "$thunk" + description = "thunk" + recv = sig.Params().At(0) + start = 1 + } else { + description = "wrapper" + recv = sig.Recv() + } + + description = fmt.Sprintf("%s for %s", description, sel.Obj()) + if prog.mode&LogSource != 0 { + defer logStack("make %s to (%s)", description, recv.Type())() + } + fn := &Function{ + name: name, + method: sel, + object: obj, + Signature: sig, + Synthetic: description, + Prog: prog, + pos: obj.Pos(), + } + fn.startBody() + fn.addSpilledParam(recv) + createParams(fn, start) + + indices := sel.Index() + + var v Value = fn.Locals[0] // spilled receiver + if isPointer(sel.Recv()) { + v = emitLoad(fn, v) + + // For simple indirection wrappers, perform an informative nil-check: + // "value method (T).f called using nil *T pointer" + if len(indices) == 1 && !isPointer(recvType(obj)) { + var c Call + c.Call.Value = &Builtin{ + name: "ssa:wrapnilchk", + sig: types.NewSignature(nil, + types.NewTuple(anonVar(sel.Recv()), anonVar(tString), anonVar(tString)), + types.NewTuple(anonVar(sel.Recv())), false), + } + c.Call.Args = []Value{ + v, + stringConst(deref(sel.Recv()).String()), + stringConst(sel.Obj().Name()), + } + c.setType(v.Type()) + v = fn.emit(&c) + } + } + + // Invariant: v is a pointer, either + // value of *A receiver param, or + // address of A spilled receiver. + + // We use pointer arithmetic (FieldAddr possibly followed by + // Load) in preference to value extraction (Field possibly + // preceded by Load). + + v = emitImplicitSelections(fn, v, indices[:len(indices)-1]) + + // Invariant: v is a pointer, either + // value of implicit *C field, or + // address of implicit C field. + + var c Call + if r := recvType(obj); !isInterface(r) { // concrete method + if !isPointer(r) { + v = emitLoad(fn, v) + } + c.Call.Value = prog.declaredFunc(obj) + c.Call.Args = append(c.Call.Args, v) + } else { + c.Call.Method = obj + c.Call.Value = emitLoad(fn, v) + } + for _, arg := range fn.Params[1:] { + c.Call.Args = append(c.Call.Args, arg) + } + emitTailCall(fn, &c) + fn.finishBody() + return fn +} + +// createParams creates parameters for wrapper method fn based on its +// Signature.Params, which do not include the receiver. +// start is the index of the first regular parameter to use. +// +func createParams(fn *Function, start int) { + tparams := fn.Signature.Params() + for i, n := start, tparams.Len(); i < n; i++ { + fn.addParamObj(tparams.At(i)) + } +} + +// -- bounds ----------------------------------------------------------- + +// makeBound returns a bound method wrapper (or "bound"), a synthetic +// function that delegates to a concrete or interface method denoted +// by obj. The resulting function has no receiver, but has one free +// variable which will be used as the method's receiver in the +// tail-call. +// +// Use MakeClosure with such a wrapper to construct a bound method +// closure. e.g.: +// +// type T int or: type T interface { meth() } +// func (t T) meth() +// var t T +// f := t.meth +// f() // calls t.meth() +// +// f is a closure of a synthetic wrapper defined as if by: +// +// f := func() { return t.meth() } +// +// Unlike makeWrapper, makeBound need perform no indirection or field +// selections because that can be done before the closure is +// constructed. +// +// EXCLUSIVE_LOCKS_ACQUIRED(meth.Prog.methodsMu) +// +func makeBound(prog *Program, obj *types.Func) *Function { + prog.methodsMu.Lock() + defer prog.methodsMu.Unlock() + fn, ok := prog.bounds[obj] + if !ok { + description := fmt.Sprintf("bound method wrapper for %s", obj) + if prog.mode&LogSource != 0 { + defer logStack("%s", description)() + } + fn = &Function{ + name: obj.Name() + "$bound", + object: obj, + Signature: changeRecv(obj.Type().(*types.Signature), nil), // drop receiver + Synthetic: description, + Prog: prog, + pos: obj.Pos(), + } + + fv := &FreeVar{name: "recv", typ: recvType(obj), parent: fn} + fn.FreeVars = []*FreeVar{fv} + fn.startBody() + createParams(fn, 0) + var c Call + + if !isInterface(recvType(obj)) { // concrete + c.Call.Value = prog.declaredFunc(obj) + c.Call.Args = []Value{fv} + } else { + c.Call.Value = fv + c.Call.Method = obj + } + for _, arg := range fn.Params { + c.Call.Args = append(c.Call.Args, arg) + } + emitTailCall(fn, &c) + fn.finishBody() + + prog.bounds[obj] = fn + } + return fn +} + +// -- thunks ----------------------------------------------------------- + +// makeThunk returns a thunk, a synthetic function that delegates to a +// concrete or interface method denoted by sel.Obj(). The resulting +// function has no receiver, but has an additional (first) regular +// parameter. +// +// Precondition: sel.Kind() == types.MethodExpr. +// +// type T int or: type T interface { meth() } +// func (t T) meth() +// f := T.meth +// var t T +// f(t) // calls t.meth() +// +// f is a synthetic wrapper defined as if by: +// +// f := func(t T) { return t.meth() } +// +// TODO(adonovan): opt: currently the stub is created even when used +// directly in a function call: C.f(i, 0). This is less efficient +// than inlining the stub. +// +// EXCLUSIVE_LOCKS_ACQUIRED(meth.Prog.methodsMu) +// +func makeThunk(prog *Program, sel *types.Selection) *Function { + if sel.Kind() != types.MethodExpr { + panic(sel) + } + + key := selectionKey{ + kind: sel.Kind(), + recv: sel.Recv(), + obj: sel.Obj(), + index: fmt.Sprint(sel.Index()), + indirect: sel.Indirect(), + } + + prog.methodsMu.Lock() + defer prog.methodsMu.Unlock() + + // Canonicalize key.recv to avoid constructing duplicate thunks. + canonRecv, ok := prog.canon.At(key.recv).(types.Type) + if !ok { + canonRecv = key.recv + prog.canon.Set(key.recv, canonRecv) + } + key.recv = canonRecv + + fn, ok := prog.thunks[key] + if !ok { + fn = makeWrapper(prog, sel) + if fn.Signature.Recv() != nil { + panic(fn) // unexpected receiver + } + prog.thunks[key] = fn + } + return fn +} + +func changeRecv(s *types.Signature, recv *types.Var) *types.Signature { + return types.NewSignature(recv, s.Params(), s.Results(), s.Variadic()) +} + +// selectionKey is like types.Selection but a usable map key. +type selectionKey struct { + kind types.SelectionKind + recv types.Type // canonicalized via Program.canon + obj types.Object + index string + indirect bool +} diff --git a/vendor/modules.txt b/vendor/modules.txt index e777a2d3ecb4..f3fae97e6954 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -1,3 +1,5 @@ +# github.com/BurntSushi/toml v0.3.1 +github.com/BurntSushi/toml # github.com/OpenPeeDeeP/depguard v0.0.0-20180806142446-a69c782687b2 github.com/OpenPeeDeeP/depguard # github.com/StackExchange/wmi v0.0.0-20180116203802-5d049714c4a6 @@ -60,17 +62,27 @@ github.com/golangci/errcheck/golangci github.com/golangci/errcheck/internal/errcheck # github.com/golangci/go-misc v0.0.0-20180628070357-927a3d87b613 github.com/golangci/go-misc/deadcode -# github.com/golangci/go-tools v0.0.0-20180902103155-93eecd106a0b +# github.com/golangci/go-tools v0.0.0-20180109140146-be4842e24b95 +github.com/golangci/go-tools/config github.com/golangci/go-tools/lint github.com/golangci/go-tools/lint/lintutil github.com/golangci/go-tools/simple github.com/golangci/go-tools/staticcheck +github.com/golangci/go-tools/stylecheck github.com/golangci/go-tools/unused +github.com/golangci/go-tools/ssa +github.com/golangci/go-tools/ssa/ssautil +github.com/golangci/go-tools/lint/lintutil/format github.com/golangci/go-tools/version +github.com/golangci/go-tools/arg github.com/golangci/go-tools/internal/sharedcheck +github.com/golangci/go-tools/lint/lintdsl github.com/golangci/go-tools/deprecated github.com/golangci/go-tools/functions +github.com/golangci/go-tools/ssautil github.com/golangci/go-tools/staticcheck/vrp +github.com/golangci/go-tools/callgraph +github.com/golangci/go-tools/callgraph/static # github.com/golangci/goconst v0.0.0-20180610141641-041c5f2b40f3 github.com/golangci/goconst # github.com/golangci/gocyclo v0.0.0-20180528134321-2becd97e67ee @@ -87,10 +99,6 @@ github.com/golangci/govet/lib/cfg github.com/golangci/govet/lib/whitelist # github.com/golangci/ineffassign v0.0.0-20180808204949-2ee8f2867dde github.com/golangci/ineffassign -# github.com/golangci/interfacer v0.0.0-20180902080945-01958817a6ec -github.com/golangci/interfacer/check -# github.com/golangci/lint v0.0.0-20180902080404-c2187e7932b5 -github.com/golangci/lint # github.com/golangci/lint-1 v0.0.0-20180610141402-4bf9709227d1 github.com/golangci/lint-1 # github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca @@ -101,17 +109,8 @@ github.com/golangci/misspell github.com/golangci/prealloc # github.com/golangci/revgrep v0.0.0-20180526074752-d9c87f5ffaf0 github.com/golangci/revgrep -# github.com/golangci/tools v0.0.0-20180902102414-2cefd77fef9b -github.com/golangci/tools/go/ssa -github.com/golangci/tools/go/ssa/ssautil -github.com/golangci/tools/go/callgraph -github.com/golangci/tools/go/callgraph/cha -github.com/golangci/tools/go/callgraph/rta -github.com/golangci/tools/go/callgraph/static # github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4 github.com/golangci/unconvert -# github.com/golangci/unparam v0.0.0-20180902112548-7ad9dbcccc16 -github.com/golangci/unparam/check # github.com/hashicorp/hcl v0.0.0-20180404174102-ef8a98b0bbce github.com/hashicorp/hcl github.com/hashicorp/hcl/hcl/printer @@ -198,6 +197,8 @@ golang.org/x/text/unicode/norm golang.org/x/tools/go/loader golang.org/x/tools/go/packages golang.org/x/tools/imports +golang.org/x/tools/go/ssa +golang.org/x/tools/go/ssa/ssautil golang.org/x/tools/go/ast/astutil golang.org/x/tools/go/types/typeutil golang.org/x/tools/go/gcexportdata @@ -206,10 +207,19 @@ golang.org/x/tools/go/internal/cgo golang.org/x/tools/go/internal/packagesdriver golang.org/x/tools/internal/gopathwalk golang.org/x/tools/internal/semver +golang.org/x/tools/go/callgraph +golang.org/x/tools/go/callgraph/cha +golang.org/x/tools/go/callgraph/rta golang.org/x/tools/go/internal/gcimporter golang.org/x/tools/internal/fastwalk # gopkg.in/yaml.v2 v2.2.1 gopkg.in/yaml.v2 +# mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed +mvdan.cc/interfacer/check +# mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b +mvdan.cc/lint +# mvdan.cc/unparam v0.0.0-20181201214637-68701730a1d7 +mvdan.cc/unparam/check # sourcegraph.com/sourcegraph/go-diff v0.0.0-20171119081133-3f415a150aec sourcegraph.com/sourcegraph/go-diff/diff # sourcegraph.com/sqs/pbtypes v0.0.0-20160107090929-4d1b9dc7ffc3 diff --git a/vendor/github.com/golangci/interfacer/LICENSE b/vendor/mvdan.cc/interfacer/LICENSE similarity index 100% rename from vendor/github.com/golangci/interfacer/LICENSE rename to vendor/mvdan.cc/interfacer/LICENSE diff --git a/vendor/github.com/golangci/interfacer/check/cache.go b/vendor/mvdan.cc/interfacer/check/cache.go similarity index 100% rename from vendor/github.com/golangci/interfacer/check/cache.go rename to vendor/mvdan.cc/interfacer/check/cache.go diff --git a/vendor/github.com/golangci/interfacer/check/check.go b/vendor/mvdan.cc/interfacer/check/check.go similarity index 98% rename from vendor/github.com/golangci/interfacer/check/check.go rename to vendor/mvdan.cc/interfacer/check/check.go index 623fd57b575d..f4d3b4037b08 100644 --- a/vendor/github.com/golangci/interfacer/check/check.go +++ b/vendor/mvdan.cc/interfacer/check/check.go @@ -1,7 +1,7 @@ // Copyright (c) 2015, Daniel Martí // See LICENSE for licensing information -package check // import "github.com/golangci/interfacer/check" +package check // import "mvdan.cc/interfacer/check" import ( "fmt" @@ -12,11 +12,11 @@ import ( "strings" "golang.org/x/tools/go/loader" - "github.com/golangci/tools/go/ssa" - "github.com/golangci/tools/go/ssa/ssautil" + "golang.org/x/tools/go/ssa" + "golang.org/x/tools/go/ssa/ssautil" "github.com/kisielk/gotool" - "github.com/golangci/lint" + "mvdan.cc/lint" ) func toDiscard(usage *varUsage) bool { diff --git a/vendor/github.com/golangci/interfacer/check/types.go b/vendor/mvdan.cc/interfacer/check/types.go similarity index 100% rename from vendor/github.com/golangci/interfacer/check/types.go rename to vendor/mvdan.cc/interfacer/check/types.go diff --git a/vendor/github.com/golangci/lint/.travis.yml b/vendor/mvdan.cc/lint/.travis.yml similarity index 100% rename from vendor/github.com/golangci/lint/.travis.yml rename to vendor/mvdan.cc/lint/.travis.yml diff --git a/vendor/github.com/golangci/lint/LICENSE b/vendor/mvdan.cc/lint/LICENSE similarity index 100% rename from vendor/github.com/golangci/lint/LICENSE rename to vendor/mvdan.cc/lint/LICENSE diff --git a/vendor/github.com/golangci/lint/README.md b/vendor/mvdan.cc/lint/README.md similarity index 100% rename from vendor/github.com/golangci/lint/README.md rename to vendor/mvdan.cc/lint/README.md diff --git a/vendor/github.com/golangci/lint/lint.go b/vendor/mvdan.cc/lint/lint.go similarity index 85% rename from vendor/github.com/golangci/lint/lint.go rename to vendor/mvdan.cc/lint/lint.go index f0d454368bf9..a16789fad568 100644 --- a/vendor/github.com/golangci/lint/lint.go +++ b/vendor/mvdan.cc/lint/lint.go @@ -2,13 +2,13 @@ // See LICENSE for licensing information // Package lint defines common interfaces for Go code checkers. -package lint // import "github.com/golangci/lint" +package lint // import "mvdan.cc/lint" import ( "go/token" "golang.org/x/tools/go/loader" - "github.com/golangci/tools/go/ssa" + "golang.org/x/tools/go/ssa" ) // A Checker points out issues in a program. diff --git a/vendor/github.com/golangci/unparam/LICENSE b/vendor/mvdan.cc/unparam/LICENSE similarity index 100% rename from vendor/github.com/golangci/unparam/LICENSE rename to vendor/mvdan.cc/unparam/LICENSE diff --git a/vendor/github.com/golangci/unparam/check/check.go b/vendor/mvdan.cc/unparam/check/check.go similarity index 85% rename from vendor/github.com/golangci/unparam/check/check.go rename to vendor/mvdan.cc/unparam/check/check.go index 2d144c4f1dfd..fc22b4041eba 100644 --- a/vendor/github.com/golangci/unparam/check/check.go +++ b/vendor/mvdan.cc/unparam/check/check.go @@ -21,15 +21,12 @@ import ( "sort" "strings" - "github.com/golangci/tools/go/callgraph" - "github.com/golangci/tools/go/callgraph/cha" - "github.com/golangci/tools/go/callgraph/rta" - "golang.org/x/tools/go/loader" - "github.com/golangci/tools/go/ssa" - "github.com/golangci/tools/go/ssa/ssautil" - - "github.com/kisielk/gotool" - "github.com/golangci/lint" + "golang.org/x/tools/go/callgraph" + "golang.org/x/tools/go/callgraph/cha" + "golang.org/x/tools/go/callgraph/rta" + "golang.org/x/tools/go/packages" + "golang.org/x/tools/go/ssa" + "golang.org/x/tools/go/ssa/ssautil" ) // UnusedParams returns a list of human-readable issues that point out unused @@ -55,7 +52,7 @@ func UnusedParams(tests bool, algo string, exported, debug bool, args ...string) // UnusedParams instead, unless you want to use a *loader.Program and // *ssa.Program directly. type Checker struct { - lprog *loader.Program + pkgs []*packages.Package prog *ssa.Program graph *callgraph.Graph @@ -66,48 +63,58 @@ type Checker struct { exported bool debugLog io.Writer - issues []lint.Issue + issues []Issue cachedDeclCounts map[string]map[string]int + // callByPos maps from a call site position to its CallExpr. callByPos map[token.Pos]*ast.CallExpr -} -var ( - _ lint.Checker = (*Checker)(nil) - _ lint.WithSSA = (*Checker)(nil) + // funcBodyByPos maps from a function position to its body. We can't map + // to the declaration, as that could be either a FuncDecl or FuncLit. + funcBodyByPos map[token.Pos]*ast.BlockStmt +} - errorType = types.Universe.Lookup("error").Type() -) +var errorType = types.Universe.Lookup("error").Type() // lines runs the checker and returns the list of readable issues. func (c *Checker) lines(args ...string) ([]string, error) { - paths := gotool.ImportPaths(args) - conf := loader.Config{ - ParserMode: parser.ParseComments, - } - if _, err := conf.FromArgs(paths, c.tests); err != nil { - return nil, err + cfg := &packages.Config{ + Mode: packages.LoadSyntax, + Tests: c.tests, } - lprog, err := conf.Load() + pkgs, err := packages.Load(cfg, args...) if err != nil { return nil, err } - prog := ssautil.CreateProgram(lprog, 0) + if packages.PrintErrors(pkgs) > 0 { + return nil, fmt.Errorf("encountered errors") + } + + prog, _ := ssautil.Packages(pkgs, 0) prog.Build() - c.Program(lprog) + c.Packages(pkgs) c.ProgramSSA(prog) issues, err := c.Check() if err != nil { return nil, err } - lines := make([]string, len(issues)) - for i, issue := range issues { + lines := make([]string, 0, len(issues)) + prevLine := "" + for _, issue := range issues { fpos := prog.Fset.Position(issue.Pos()).String() if strings.HasPrefix(fpos, c.wd) { fpos = fpos[len(c.wd)+1:] } - lines[i] = fmt.Sprintf("%s: %s", fpos, issue.Message()) + line := fmt.Sprintf("%s: %s", fpos, issue.Message()) + if line == prevLine { + // Deduplicate lines, since we may look at the same + // package multiple times if tests are involved. + // TODO: is there a better way to handle this? + continue + } + prevLine = line + lines = append(lines, fmt.Sprintf("%s: %s", fpos, issue.Message())) } return lines, nil } @@ -123,8 +130,8 @@ func (i Issue) Pos() token.Pos { return i.pos } func (i Issue) Message() string { return i.fname + " - " + i.msg } // Program supplies Checker with the needed *loader.Program. -func (c *Checker) Program(lprog *loader.Program) { - c.lprog = lprog +func (c *Checker) Packages(pkgs []*packages.Package) { + c.pkgs = pkgs } // ProgramSSA supplies Checker with the needed *ssa.Program. @@ -173,21 +180,30 @@ var stdSizes = types.SizesFor("gc", "amd64") // Check runs the unused parameter check and returns the list of found issues, // and any error encountered. -func (c *Checker) Check() ([]lint.Issue, error) { +func (c *Checker) Check() ([]Issue, error) { c.cachedDeclCounts = make(map[string]map[string]int) c.callByPos = make(map[token.Pos]*ast.CallExpr) - wantPkg := make(map[*types.Package]*loader.PackageInfo) + c.funcBodyByPos = make(map[token.Pos]*ast.BlockStmt) + wantPkg := make(map[*types.Package]*packages.Package) genFiles := make(map[string]bool) - for _, info := range c.lprog.InitialPackages() { - wantPkg[info.Pkg] = info - for _, f := range info.Files { + for _, pkg := range c.pkgs { + wantPkg[pkg.Types] = pkg + for _, f := range pkg.Syntax { if len(f.Comments) > 0 && generatedDoc(f.Comments[0].Text()) { fname := c.prog.Fset.Position(f.Pos()).Filename genFiles[fname] = true } ast.Inspect(f, func(node ast.Node) bool { - if ce, ok := node.(*ast.CallExpr); ok { - c.callByPos[ce.Lparen] = ce + switch node := node.(type) { + case *ast.CallExpr: + c.callByPos[node.Lparen] = node + // ssa.Function.Pos returns the declaring + // FuncLit.Type.Func or the position of the + // FuncDecl.Name. + case *ast.FuncDecl: + c.funcBodyByPos[node.Name.Pos()] = node.Body + case *ast.FuncLit: + c.funcBodyByPos[node.Pos()] = node.Body } return true }) @@ -221,8 +237,8 @@ func (c *Checker) Check() ([]lint.Issue, error) { case len(fn.Blocks) == 0: // stub continue } - pkgInfo := wantPkg[fn.Pkg.Pkg] - if pkgInfo == nil { // not part of given pkgs + pkg := wantPkg[fn.Pkg.Pkg] + if pkg == nil { // not part of given pkgs continue } if c.exported || fn.Pkg.Pkg.Name() == "main" { @@ -238,7 +254,7 @@ func (c *Checker) Check() ([]lint.Issue, error) { continue // generated file } - c.checkFunc(fn, pkgInfo) + c.checkFunc(fn, pkg) } sort.Slice(c.issues, func(i, j int) bool { p1 := c.prog.Fset.Position(c.issues[i].Pos()) @@ -269,7 +285,7 @@ func constValueString(cnst *ssa.Const) string { } // checkFunc checks a single function for unused parameters. -func (c *Checker) checkFunc(fn *ssa.Function, pkgInfo *loader.PackageInfo) { +func (c *Checker) checkFunc(fn *ssa.Function, pkg *packages.Package) { c.debug("func %s\n", fn.RelString(fn.Package().Pkg)) if dummyImpl(fn.Blocks[0]) { // panic implementation c.debug(" skip - dummy implementation\n") @@ -283,7 +299,7 @@ func (c *Checker) checkFunc(fn *ssa.Function, pkgInfo *loader.PackageInfo) { c.debug(" skip - type is required via call\n") return } - if c.multipleImpls(pkgInfo, fn) { + if c.multipleImpls(pkg, fn) { c.debug(" skip - multiple implementations via build tags\n") return } @@ -336,8 +352,8 @@ resLoop: } res := results.At(i) if res.Type() == errorType { - // "error is never unused" is less useful, and it's up - // to tools like errcheck anyway. + // "error is never used" is less useful, and it's up to + // tools like errcheck anyway. continue } count := 0 @@ -386,7 +402,7 @@ resLoop: constStr := c.alwaysReceivedConst(inboundCalls, par, i) if constStr != "" { reason = fmt.Sprintf("always receives %s", constStr) - } else if anyRealUse(par, i) { + } else if c.anyRealUse(par, i, pkg) { c.debug(" skip - used somewhere in the func body\n") continue } @@ -395,7 +411,7 @@ resLoop: } // mainPackages returns the subset of main packages within pkgSet. -func mainPackages(prog *ssa.Program, pkgSet map[*types.Package]*loader.PackageInfo) ([]*ssa.Package, error) { +func mainPackages(prog *ssa.Program, pkgSet map[*types.Package]*packages.Package) ([]*ssa.Package, error) { mains := make([]*ssa.Package, 0, len(pkgSet)) for tpkg := range pkgSet { pkg := prog.Package(tpkg) @@ -524,9 +540,30 @@ func constValue(value ssa.Value) *ssa.Const { // anyRealUse reports whether a parameter has any relevant use within its // function body. Certain uses are ignored, such as recursive calls where the // parameter is re-used as itself. -func anyRealUse(par *ssa.Parameter, pos int) bool { +func (c *Checker) anyRealUse(par *ssa.Parameter, pos int, pkg *packages.Package) bool { + refs := *par.Referrers() + if len(refs) == 0 { + // Look for any uses like "_ = par", which are the developer's + // way to tell they want to keep the parameter. SSA does not + // keep that kind of statement around. + body := c.funcBodyByPos[par.Parent().Pos()] + any := false + ast.Inspect(body, func(node ast.Node) bool { + if any { + return false + } + if id, ok := node.(*ast.Ident); ok { + obj := pkg.TypesInfo.Uses[id] + if obj != nil && obj.Pos() == par.Pos() { + any = true + } + } + return true + }) + return any + } refLoop: - for _, ref := range *par.Referrers() { + for _, ref := range refs { switch x := ref.(type) { case *ssa.Call: if x.Call.Value != par.Parent() { @@ -641,6 +678,11 @@ func (c *Checker) declCounts(pkgDir string, pkgName string) map[string]int { c.cachedDeclCounts[pkgDir] = nil return nil } + if len(pkgs) == 0 { + // TODO: investigate why this started happening after switching + // to go/packages + return nil + } pkg := pkgs[pkgName] count := make(map[string]int) for _, file := range pkg.Files { @@ -682,12 +724,12 @@ func recvPrefix(recv *ast.FieldList) string { // source code. For example, if there are different function bodies depending on // the operating system or architecture. That tends to mean that an unused // parameter in one implementation may not be unused in another. -func (c *Checker) multipleImpls(info *loader.PackageInfo, fn *ssa.Function) bool { +func (c *Checker) multipleImpls(pkg *packages.Package, fn *ssa.Function) bool { if fn.Parent() != nil { // nested func return false } path := c.prog.Fset.Position(fn.Pos()).Filename - count := c.declCounts(filepath.Dir(path), info.Pkg.Name()) + count := c.declCounts(filepath.Dir(path), pkg.Types.Name()) name := fn.Name() if fn.Signature.Recv() != nil { tp := fn.Params[0].Type()