aboutsummaryrefslogtreecommitdiffstats
path: root/vendor
diff options
context:
space:
mode:
authorDmitry Vyukov <dvyukov@google.com>2020-07-04 10:38:29 +0200
committerDmitry Vyukov <dvyukov@google.com>2020-07-04 15:05:30 +0200
commitdcff124efb2ea4a834b74ac0974aa2f2fd000b40 (patch)
tree8d49a1e0849baa283d09c7227ec9d2311a34258a /vendor
parent4f739670f77d37168a44be2139f4005b748a825d (diff)
go.mod: switch to modules for dependency management
Godep is long deprecated and modules is the future. Updating dependencies with godep is painful and non-transparent. This will hopefully help to create custom golangci-lint linters. The change was created with: go mod init rm -rf vendor go mod vendor Fixes #1247
Diffstat (limited to 'vendor')
-rw-r--r--vendor/cloud.google.com/go/compute/metadata/metadata.go2
-rw-r--r--vendor/cloud.google.com/go/doc.go2
-rw-r--r--[-rwxr-xr-x]vendor/cloud.google.com/go/internal/version/update_version.sh0
-rw-r--r--vendor/cloud.google.com/go/storage/CHANGES.md45
-rw-r--r--vendor/cloud.google.com/go/storage/LICENSE202
-rw-r--r--vendor/cloud.google.com/go/storage/bucket.go143
-rw-r--r--vendor/cloud.google.com/go/storage/doc.go29
-rw-r--r--vendor/cloud.google.com/go/storage/go.mod17
-rw-r--r--vendor/cloud.google.com/go/storage/go.sum110
-rw-r--r--vendor/cloud.google.com/go/storage/hmac.go66
-rw-r--r--vendor/cloud.google.com/go/storage/storage.go124
-rw-r--r--vendor/github.com/google/go-cmp/cmp/cmpopts/equate.go89
-rw-r--r--vendor/github.com/google/go-cmp/cmp/cmpopts/ignore.go207
-rw-r--r--vendor/github.com/google/go-cmp/cmp/cmpopts/sort.go147
-rw-r--r--vendor/github.com/google/go-cmp/cmp/cmpopts/struct_filter.go182
-rw-r--r--vendor/github.com/google/go-cmp/cmp/cmpopts/xform.go35
-rw-r--r--vendor/github.com/google/go-cmp/cmp/compare.go83
-rw-r--r--vendor/github.com/google/go-cmp/cmp/export_panic.go4
-rw-r--r--vendor/github.com/google/go-cmp/cmp/export_unsafe.go6
-rw-r--r--vendor/github.com/google/go-cmp/cmp/internal/testprotos/protos.go116
-rw-r--r--vendor/github.com/google/go-cmp/cmp/internal/teststructs/project1.go267
-rw-r--r--vendor/github.com/google/go-cmp/cmp/internal/teststructs/project2.go74
-rw-r--r--vendor/github.com/google/go-cmp/cmp/internal/teststructs/project3.go82
-rw-r--r--vendor/github.com/google/go-cmp/cmp/internal/teststructs/project4.go142
-rw-r--r--vendor/github.com/google/go-cmp/cmp/internal/teststructs/structs.go197
-rw-r--r--vendor/github.com/google/go-cmp/cmp/internal/value/sort.go4
-rw-r--r--vendor/github.com/google/go-cmp/cmp/internal/value/zero.go9
-rw-r--r--vendor/github.com/google/go-cmp/cmp/options.go55
-rw-r--r--vendor/github.com/google/go-cmp/cmp/path.go71
-rw-r--r--vendor/github.com/google/go-cmp/cmp/report_compare.go2
-rw-r--r--vendor/github.com/google/go-cmp/cmp/report_reflect.go1
-rw-r--r--vendor/github.com/google/go-cmp/cmp/report_slices.go4
-rw-r--r--vendor/github.com/google/go-cmp/cmp/report_text.go7
-rw-r--r--vendor/github.com/googleapis/gax-go/.gitignore1
-rw-r--r--vendor/github.com/googleapis/gax-go/CODE_OF_CONDUCT.md43
-rw-r--r--vendor/github.com/googleapis/gax-go/CONTRIBUTING.md32
-rw-r--r--vendor/github.com/googleapis/gax-go/README.md27
-rw-r--r--vendor/github.com/googleapis/gax-go/RELEASING.md30
-rw-r--r--vendor/github.com/googleapis/gax-go/call_option.go71
-rw-r--r--vendor/github.com/googleapis/gax-go/gax.go39
-rw-r--r--vendor/github.com/googleapis/gax-go/go.mod13
-rw-r--r--vendor/github.com/googleapis/gax-go/go.sum35
-rw-r--r--vendor/github.com/googleapis/gax-go/header.go40
-rw-r--r--vendor/github.com/googleapis/gax-go/invoke.go52
-rw-r--r--vendor/github.com/googleapis/gax-go/tools.go33
-rw-r--r--vendor/github.com/googleapis/gax-go/v2/LICENSE (renamed from vendor/github.com/googleapis/gax-go/LICENSE)0
-rw-r--r--vendor/github.com/googleapis/gax-go/v2/call_option.go19
-rw-r--r--vendor/github.com/googleapis/gax-go/v2/gax.go2
-rw-r--r--vendor/github.com/googleapis/gax-go/v2/go.mod2
-rw-r--r--vendor/github.com/googleapis/gax-go/v2/invoke.go4
-rw-r--r--vendor/github.com/ianlancetaylor/demangle/c++filt.go144
-rw-r--r--vendor/go.opencensus.io/internal/internal.go2
-rw-r--r--vendor/go.opencensus.io/internal/tagencoding/tagencoding.go2
-rw-r--r--vendor/go.opencensus.io/metric/metricdata/doc.go2
-rw-r--r--vendor/go.opencensus.io/opencensus.go2
-rw-r--r--vendor/go.opencensus.io/plugin/ochttp/doc.go2
-rw-r--r--vendor/go.opencensus.io/plugin/ochttp/propagation/b3/b3.go2
-rw-r--r--vendor/go.opencensus.io/stats/doc.go2
-rw-r--r--vendor/go.opencensus.io/stats/view/doc.go2
-rw-r--r--vendor/go.opencensus.io/tag/doc.go2
-rw-r--r--vendor/go.opencensus.io/trace/doc.go2
-rw-r--r--vendor/go.opencensus.io/trace/propagation/propagation.go2
-rw-r--r--vendor/golang.org/x/lint/golint/importcomment.go2
-rw-r--r--vendor/golang.org/x/lint/lint.go2
-rw-r--r--vendor/golang.org/x/net/context/context.go2
-rw-r--r--vendor/golang.org/x/net/context/ctxhttp/ctxhttp.go2
-rw-r--r--vendor/golang.org/x/net/http2/http2.go2
-rw-r--r--vendor/golang.org/x/net/idna/idna10.0.0.go2
-rw-r--r--vendor/golang.org/x/net/idna/idna9.0.0.go2
-rw-r--r--vendor/golang.org/x/net/internal/timeseries/timeseries.go2
-rw-r--r--vendor/golang.org/x/net/trace/trace.go2
-rw-r--r--vendor/golang.org/x/oauth2/google/doc.go2
-rw-r--r--vendor/golang.org/x/oauth2/jws/jws.go2
-rw-r--r--vendor/golang.org/x/oauth2/oauth2.go2
-rw-r--r--[-rwxr-xr-x]vendor/golang.org/x/sys/unix/mkall.sh0
-rw-r--r--vendor/golang.org/x/sys/unix/mkasm_darwin.go78
-rw-r--r--[-rwxr-xr-x]vendor/golang.org/x/sys/unix/mkerrors.sh0
-rw-r--r--vendor/golang.org/x/sys/unix/mkmerge.go521
-rw-r--r--vendor/golang.org/x/sys/unix/mkpost.go127
-rw-r--r--vendor/golang.org/x/sys/unix/mksyscall.go402
-rw-r--r--vendor/golang.org/x/sys/unix/mksyscall_aix_ppc.go415
-rw-r--r--vendor/golang.org/x/sys/unix/mksyscall_aix_ppc64.go614
-rw-r--r--vendor/golang.org/x/sys/unix/mksyscall_solaris.go341
-rw-r--r--vendor/golang.org/x/sys/unix/mksysctl_openbsd.go355
-rw-r--r--vendor/golang.org/x/sys/unix/mksysnum.go190
-rw-r--r--vendor/golang.org/x/sys/unix/syscall.go2
-rw-r--r--vendor/golang.org/x/sys/unix/types_aix.go237
-rw-r--r--vendor/golang.org/x/sys/unix/types_darwin.go283
-rw-r--r--vendor/golang.org/x/sys/unix/types_dragonfly.go269
-rw-r--r--vendor/golang.org/x/sys/unix/types_freebsd.go406
-rw-r--r--vendor/golang.org/x/sys/unix/types_netbsd.go300
-rw-r--r--vendor/golang.org/x/sys/unix/types_openbsd.go283
-rw-r--r--vendor/golang.org/x/sys/unix/types_solaris.go269
-rw-r--r--vendor/golang.org/x/text/transform/transform.go2
-rw-r--r--vendor/golang.org/x/text/unicode/bidi/bidi.go2
-rw-r--r--vendor/golang.org/x/text/unicode/bidi/gen.go133
-rw-r--r--vendor/golang.org/x/text/unicode/bidi/gen_ranges.go57
-rw-r--r--vendor/golang.org/x/text/unicode/bidi/gen_trieval.go64
-rw-r--r--vendor/golang.org/x/text/unicode/norm/maketables.go986
-rw-r--r--vendor/golang.org/x/text/unicode/norm/normalize.go2
-rw-r--r--vendor/golang.org/x/text/unicode/norm/triegen.go117
-rw-r--r--vendor/golang.org/x/tools/cmd/goimports/doc.go2
-rw-r--r--vendor/golang.org/x/tools/go/analysis/doc.go33
-rw-r--r--vendor/golang.org/x/tools/go/ast/astutil/imports.go7
-rw-r--r--vendor/golang.org/x/tools/go/ast/inspector/inspector.go4
-rw-r--r--vendor/golang.org/x/tools/go/buildutil/allpackages.go2
-rw-r--r--vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go2
-rw-r--r--vendor/golang.org/x/tools/go/gcexportdata/main.go99
-rw-r--r--vendor/golang.org/x/tools/go/internal/cgo/cgo.go220
-rw-r--r--vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go39
-rw-r--r--vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go2
-rw-r--r--vendor/golang.org/x/tools/go/loader/doc.go204
-rw-r--r--vendor/golang.org/x/tools/go/loader/loader.go1086
-rw-r--r--vendor/golang.org/x/tools/go/loader/util.go124
-rw-r--r--vendor/golang.org/x/tools/go/packages/doc.go2
-rw-r--r--vendor/golang.org/x/tools/go/packages/golist.go576
-rw-r--r--vendor/golang.org/x/tools/go/packages/golist_overlay.go176
-rw-r--r--vendor/golang.org/x/tools/go/packages/packages.go38
-rw-r--r--vendor/golang.org/x/tools/go/types/typeutil/map.go2
-rw-r--r--vendor/golang.org/x/tools/internal/imports/fix.go583
-rw-r--r--vendor/golang.org/x/tools/internal/imports/imports.go21
-rw-r--r--vendor/golang.org/x/tools/internal/imports/mkindex.go173
-rw-r--r--vendor/golang.org/x/tools/internal/imports/mkstdlib.go128
-rw-r--r--vendor/golang.org/x/tools/internal/imports/mod.go271
-rw-r--r--vendor/golang.org/x/tools/internal/imports/mod_cache.go94
-rw-r--r--vendor/golang.org/x/tools/internal/packagesinternal/packages.go4
-rw-r--r--vendor/google.golang.org/api/compute/v0.beta/compute-gen.go2
-rw-r--r--vendor/google.golang.org/api/googleapi/googleapi.go2
-rw-r--r--vendor/google.golang.org/api/storage/v1/storage-gen.go2
-rw-r--r--vendor/google.golang.org/appengine/appengine.go2
-rw-r--r--vendor/google.golang.org/appengine/datastore/doc.go2
-rw-r--r--[-rwxr-xr-x]vendor/google.golang.org/appengine/internal/datastore/datastore_v3.proto0
-rw-r--r--[-rwxr-xr-x]vendor/google.golang.org/appengine/internal/regen.sh0
-rw-r--r--vendor/google.golang.org/appengine/internal/urlfetch/urlfetch_service.pb.go527
-rw-r--r--vendor/google.golang.org/appengine/internal/urlfetch/urlfetch_service.proto64
-rw-r--r--vendor/google.golang.org/appengine/log/log.go2
-rw-r--r--vendor/google.golang.org/appengine/mail/mail.go2
-rw-r--r--[-rwxr-xr-x]vendor/google.golang.org/appengine/travis_install.sh0
-rw-r--r--[-rwxr-xr-x]vendor/google.golang.org/appengine/travis_test.sh0
-rw-r--r--vendor/google.golang.org/appengine/urlfetch/urlfetch.go210
-rw-r--r--vendor/google.golang.org/appengine/user/user.go2
-rw-r--r--vendor/google.golang.org/grpc/binarylog/grpc_binarylog_v1/binarylog.pb.go2
-rw-r--r--[-rwxr-xr-x]vendor/google.golang.org/grpc/codegen.sh0
-rw-r--r--vendor/google.golang.org/grpc/codes/codes.go2
-rw-r--r--vendor/google.golang.org/grpc/credentials/credentials.go2
-rw-r--r--vendor/google.golang.org/grpc/doc.go2
-rw-r--r--vendor/google.golang.org/grpc/grpclog/grpclog.go2
-rw-r--r--[-rwxr-xr-x]vendor/google.golang.org/grpc/install_gae.sh0
-rw-r--r--[-rwxr-xr-x]vendor/google.golang.org/grpc/internal/binarylog/regenerate.sh0
-rw-r--r--vendor/google.golang.org/grpc/metadata/metadata.go2
-rw-r--r--vendor/google.golang.org/grpc/resolver/dns/dns_resolver.go36
-rw-r--r--vendor/google.golang.org/grpc/resolver/passthrough/passthrough.go26
-rw-r--r--vendor/google.golang.org/grpc/stats/stats.go2
-rw-r--r--[-rwxr-xr-x]vendor/google.golang.org/grpc/vet.sh0
-rw-r--r--vendor/honnef.co/go/tools/LICENSE-THIRD-PARTY60
-rw-r--r--vendor/honnef.co/go/tools/cmd/staticcheck/staticcheck.go2
-rw-r--r--vendor/honnef.co/go/tools/code/code.go481
-rw-r--r--vendor/honnef.co/go/tools/config/config.go79
-rw-r--r--vendor/honnef.co/go/tools/edit/edit.go67
-rw-r--r--vendor/honnef.co/go/tools/facts/generated.go4
-rw-r--r--vendor/honnef.co/go/tools/facts/purity.go78
-rw-r--r--vendor/honnef.co/go/tools/functions/loops.go12
-rw-r--r--vendor/honnef.co/go/tools/functions/pure.go46
-rw-r--r--vendor/honnef.co/go/tools/functions/stub.go32
-rw-r--r--vendor/honnef.co/go/tools/functions/terminates.go60
-rw-r--r--vendor/honnef.co/go/tools/internal/cache/cache.go32
-rw-r--r--vendor/honnef.co/go/tools/internal/passes/buildir/buildir.go113
-rw-r--r--vendor/honnef.co/go/tools/internal/passes/buildssa/buildssa.go116
-rw-r--r--vendor/honnef.co/go/tools/internal/renameio/renameio.go56
-rw-r--r--vendor/honnef.co/go/tools/internal/robustio/robustio.go53
-rw-r--r--vendor/honnef.co/go/tools/internal/robustio/robustio_other.go28
-rw-r--r--vendor/honnef.co/go/tools/internal/robustio/robustio_windows.go112
-rw-r--r--vendor/honnef.co/go/tools/internal/sharedcheck/lint.go21
-rw-r--r--vendor/honnef.co/go/tools/ir/exits.go271
-rw-r--r--vendor/honnef.co/go/tools/ir/html.go1130
-rw-r--r--vendor/honnef.co/go/tools/ir/irutil/load.go183
-rw-r--r--vendor/honnef.co/go/tools/ir/irutil/switch.go264
-rw-r--r--vendor/honnef.co/go/tools/ir/irutil/visit.go79
-rw-r--r--vendor/honnef.co/go/tools/ir/write.go5
-rw-r--r--vendor/honnef.co/go/tools/lint/lint.go60
-rw-r--r--vendor/honnef.co/go/tools/lint/lintdsl/lintdsl.go402
-rw-r--r--vendor/honnef.co/go/tools/lint/lintutil/format/format.go43
-rw-r--r--vendor/honnef.co/go/tools/lint/lintutil/util.go72
-rw-r--r--vendor/honnef.co/go/tools/lint/runner.go318
-rw-r--r--vendor/honnef.co/go/tools/lint/stats.go18
-rw-r--r--vendor/honnef.co/go/tools/loader/loader.go43
-rw-r--r--vendor/honnef.co/go/tools/pattern/convert.go242
-rw-r--r--vendor/honnef.co/go/tools/pattern/doc.go273
-rw-r--r--vendor/honnef.co/go/tools/pattern/fuzz.go50
-rw-r--r--vendor/honnef.co/go/tools/pattern/lexer.go221
-rw-r--r--vendor/honnef.co/go/tools/pattern/match.go513
-rw-r--r--vendor/honnef.co/go/tools/pattern/parser.go455
-rw-r--r--vendor/honnef.co/go/tools/pattern/pattern.go497
-rw-r--r--vendor/honnef.co/go/tools/report/report.go184
-rw-r--r--vendor/honnef.co/go/tools/simple/CONTRIBUTING.md15
-rw-r--r--vendor/honnef.co/go/tools/simple/analysis.go183
-rw-r--r--vendor/honnef.co/go/tools/simple/doc.go116
-rw-r--r--vendor/honnef.co/go/tools/simple/lint.go1520
-rw-r--r--vendor/honnef.co/go/tools/ssa/LICENSE (renamed from vendor/honnef.co/go/tools/ir/LICENSE)0
-rw-r--r--vendor/honnef.co/go/tools/ssa/blockopt.go (renamed from vendor/honnef.co/go/tools/ir/blockopt.go)50
-rw-r--r--vendor/honnef.co/go/tools/ssa/builder.go (renamed from vendor/honnef.co/go/tools/ir/builder.go)1075
-rw-r--r--vendor/honnef.co/go/tools/ssa/const.go (renamed from vendor/honnef.co/go/tools/ir/const.go)48
-rw-r--r--vendor/honnef.co/go/tools/ssa/create.go (renamed from vendor/honnef.co/go/tools/ir/create.go)51
-rw-r--r--vendor/honnef.co/go/tools/ssa/doc.go (renamed from vendor/honnef.co/go/tools/ir/doc.go)58
-rw-r--r--vendor/honnef.co/go/tools/ssa/dom.go (renamed from vendor/honnef.co/go/tools/ir/dom.go)350
-rw-r--r--vendor/honnef.co/go/tools/ssa/emit.go (renamed from vendor/honnef.co/go/tools/ir/emit.go)181
-rw-r--r--vendor/honnef.co/go/tools/ssa/func.go (renamed from vendor/honnef.co/go/tools/ir/func.go)514
-rw-r--r--vendor/honnef.co/go/tools/ssa/identical.go (renamed from vendor/honnef.co/go/tools/ir/identical.go)2
-rw-r--r--vendor/honnef.co/go/tools/ssa/identical_17.go (renamed from vendor/honnef.co/go/tools/ir/identical_17.go)2
-rw-r--r--vendor/honnef.co/go/tools/ssa/lift.go (renamed from vendor/honnef.co/go/tools/ir/lift.go)568
-rw-r--r--vendor/honnef.co/go/tools/ssa/lvalue.go (renamed from vendor/honnef.co/go/tools/ir/lvalue.go)55
-rw-r--r--vendor/honnef.co/go/tools/ssa/methods.go (renamed from vendor/honnef.co/go/tools/ir/methods.go)2
-rw-r--r--vendor/honnef.co/go/tools/ssa/mode.go (renamed from vendor/honnef.co/go/tools/ir/mode.go)40
-rw-r--r--vendor/honnef.co/go/tools/ssa/print.go (renamed from vendor/honnef.co/go/tools/ir/print.go)245
-rw-r--r--vendor/honnef.co/go/tools/ssa/sanity.go (renamed from vendor/honnef.co/go/tools/ir/sanity.go)68
-rw-r--r--vendor/honnef.co/go/tools/ssa/source.go (renamed from vendor/honnef.co/go/tools/ir/source.go)69
-rw-r--r--vendor/honnef.co/go/tools/ssa/ssa.go (renamed from vendor/honnef.co/go/tools/ir/ssa.go)632
-rw-r--r--vendor/honnef.co/go/tools/ssa/staticcheck.conf (renamed from vendor/honnef.co/go/tools/ir/staticcheck.conf)0
-rw-r--r--vendor/honnef.co/go/tools/ssa/testmain.go271
-rw-r--r--vendor/honnef.co/go/tools/ssa/util.go (renamed from vendor/honnef.co/go/tools/ir/util.go)32
-rw-r--r--vendor/honnef.co/go/tools/ssa/wrappers.go (renamed from vendor/honnef.co/go/tools/ir/wrappers.go)30
-rw-r--r--vendor/honnef.co/go/tools/ssa/write.go5
-rw-r--r--vendor/honnef.co/go/tools/ssautil/ssautil.go (renamed from vendor/honnef.co/go/tools/ir/irutil/util.go)24
-rw-r--r--vendor/honnef.co/go/tools/staticcheck/CONTRIBUTING.md15
-rw-r--r--vendor/honnef.co/go/tools/staticcheck/analysis.go380
-rw-r--r--vendor/honnef.co/go/tools/staticcheck/buildtag.go4
-rw-r--r--vendor/honnef.co/go/tools/staticcheck/doc.go224
-rw-r--r--vendor/honnef.co/go/tools/staticcheck/knowledge.go25
-rw-r--r--vendor/honnef.co/go/tools/staticcheck/lint.go1599
-rw-r--r--vendor/honnef.co/go/tools/staticcheck/rules.go72
-rw-r--r--vendor/honnef.co/go/tools/staticcheck/vrp/channel.go73
-rw-r--r--vendor/honnef.co/go/tools/staticcheck/vrp/int.go476
-rw-r--r--vendor/honnef.co/go/tools/staticcheck/vrp/slice.go273
-rw-r--r--vendor/honnef.co/go/tools/staticcheck/vrp/string.go258
-rw-r--r--vendor/honnef.co/go/tools/staticcheck/vrp/vrp.go1056
-rw-r--r--vendor/honnef.co/go/tools/stylecheck/analysis.go90
-rw-r--r--vendor/honnef.co/go/tools/stylecheck/doc.go82
-rw-r--r--vendor/honnef.co/go/tools/stylecheck/lint.go495
-rw-r--r--vendor/honnef.co/go/tools/stylecheck/names.go180
-rw-r--r--vendor/honnef.co/go/tools/unused/unused.go302
-rw-r--r--vendor/honnef.co/go/tools/version/version.go2
-rw-r--r--vendor/modules.txt229
242 files changed, 9710 insertions, 23503 deletions
diff --git a/vendor/cloud.google.com/go/compute/metadata/metadata.go b/vendor/cloud.google.com/go/compute/metadata/metadata.go
index f84e41420..9b1afb5cc 100644
--- a/vendor/cloud.google.com/go/compute/metadata/metadata.go
+++ b/vendor/cloud.google.com/go/compute/metadata/metadata.go
@@ -17,7 +17,7 @@
//
// This package is a wrapper around the GCE metadata service,
// as documented at https://developers.google.com/compute/docs/metadata.
-package metadata
+package metadata // import "cloud.google.com/go/compute/metadata"
import (
"context"
diff --git a/vendor/cloud.google.com/go/doc.go b/vendor/cloud.google.com/go/doc.go
index f3293c094..237d84561 100644
--- a/vendor/cloud.google.com/go/doc.go
+++ b/vendor/cloud.google.com/go/doc.go
@@ -97,4 +97,4 @@ making breaking changes, including removal.
These parts of the surface will be labeled with the note, "It is EXPERIMENTAL
and subject to change or removal without notice."
*/
-package cloud
+package cloud // import "cloud.google.com/go"
diff --git a/vendor/cloud.google.com/go/internal/version/update_version.sh b/vendor/cloud.google.com/go/internal/version/update_version.sh
index d7c5a3e21..d7c5a3e21 100755..100644
--- a/vendor/cloud.google.com/go/internal/version/update_version.sh
+++ b/vendor/cloud.google.com/go/internal/version/update_version.sh
diff --git a/vendor/cloud.google.com/go/storage/CHANGES.md b/vendor/cloud.google.com/go/storage/CHANGES.md
index 6b570f785..952fff68e 100644
--- a/vendor/cloud.google.com/go/storage/CHANGES.md
+++ b/vendor/cloud.google.com/go/storage/CHANGES.md
@@ -1,49 +1,6 @@
# Changes
-## v1.5.0
-
-- Honor WithEndpoint client option for reads as well as writes.
-- Add archive storage class to docs.
-- Make fixes to storage benchwrapper.
-
-## v1.4.0
-
-- When listing objects in a bucket, allow callers to specify which attributes
- are queried. This allows for performance optimization.
-
-## v1.3.0
-
-- Use `storage.googleapis.com/storage/v1` by default for GCS requests
- instead of `www.googleapis.com/storage/v1`.
-
-## v1.2.1
-
-- Fixed a bug where UniformBucketLevelAccess and BucketPolicyOnly were not
- being sent in all cases.
-
-## v1.2.0
-
-- Add support for UniformBucketLevelAccess. This configures access checks
- to use only bucket-level IAM policies.
- See: https://godoc.org/cloud.google.com/go/storage#UniformBucketLevelAccess.
-- Fix userAgent to use correct version.
-
-## v1.1.2
-
-- Fix memory leak in BucketIterator and ObjectIterator.
-
-## v1.1.1
-
-- Send BucketPolicyOnly even when it's disabled.
-
-## v1.1.0
-
-- Performance improvements for ObjectIterator and BucketIterator.
-- Fix Bucket.ObjectIterator size calculation checks.
-- Added HMACKeyOptions to all the methods which allows for options such as
- UserProject to be set per invocation and optionally be used.
-
## v1.0.0
This is the first tag to carve out storage as its own module. See:
-https://github.com/golang/go/wiki/Modules#is-it-possible-to-add-a-module-to-a-multi-module-repository.
+https://github.com/golang/go/wiki/Modules#is-it-possible-to-add-a-module-to-a-multi-module-repository. \ No newline at end of file
diff --git a/vendor/cloud.google.com/go/storage/LICENSE b/vendor/cloud.google.com/go/storage/LICENSE
new file mode 100644
index 000000000..d64569567
--- /dev/null
+++ b/vendor/cloud.google.com/go/storage/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/vendor/cloud.google.com/go/storage/bucket.go b/vendor/cloud.google.com/go/storage/bucket.go
index dae11ff96..0ba45e8f8 100644
--- a/vendor/cloud.google.com/go/storage/bucket.go
+++ b/vendor/cloud.google.com/go/storage/bucket.go
@@ -232,18 +232,10 @@ type BucketAttrs struct {
// ACL is the list of access control rules on the bucket.
ACL []ACLRule
- // BucketPolicyOnly is an alias for UniformBucketLevelAccess. Use of
- // UniformBucketLevelAccess is recommended above the use of this field.
- // Setting BucketPolicyOnly.Enabled OR UniformBucketLevelAccess.Enabled to
- // true, will enable UniformBucketLevelAccess.
+ // BucketPolicyOnly configures access checks to use only bucket-level IAM
+ // policies.
BucketPolicyOnly BucketPolicyOnly
- // UniformBucketLevelAccess configures access checks to use only bucket-level IAM
- // policies and ignore any ACL rules for the bucket.
- // See https://cloud.google.com/storage/docs/uniform-bucket-level-access
- // for more information.
- UniformBucketLevelAccess UniformBucketLevelAccess
-
// DefaultObjectACL is the list of access controls to
// apply to new objects when no object ACL is provided.
DefaultObjectACL []ACLRule
@@ -275,10 +267,8 @@ type BucketAttrs struct {
// StorageClass is the default storage class of the bucket. This defines
// how objects in the bucket are stored and determines the SLA
- // and the cost of storage. Typical values are "STANDARD", "NEARLINE",
- // "COLDLINE" and "ARCHIVE". Defaults to "STANDARD".
- // See https://cloud.google.com/storage/docs/storage-classes for all
- // valid values.
+ // and the cost of storage. Typical values are "NEARLINE", "COLDLINE" and
+ // "STANDARD". Defaults to "STANDARD".
StorageClass string
// Created is the creation time of the bucket.
@@ -331,20 +321,9 @@ type BucketAttrs struct {
LocationType string
}
-// BucketPolicyOnly is an alias for UniformBucketLevelAccess.
-// Use of UniformBucketLevelAccess is preferred above BucketPolicyOnly.
-type BucketPolicyOnly struct {
- // Enabled specifies whether access checks use only bucket-level IAM
- // policies. Enabled may be disabled until the locked time.
- Enabled bool
- // LockedTime specifies the deadline for changing Enabled from true to
- // false.
- LockedTime time.Time
-}
-
-// UniformBucketLevelAccess configures access checks to use only bucket-level IAM
+// BucketPolicyOnly configures access checks to use only bucket-level IAM
// policies.
-type UniformBucketLevelAccess struct {
+type BucketPolicyOnly struct {
// Enabled specifies whether access checks use only bucket-level IAM
// policies. Enabled may be disabled until the locked time.
Enabled bool
@@ -461,7 +440,7 @@ type LifecycleCondition struct {
// MatchesStorageClasses is the condition matching the object's storage
// class.
//
- // Values include "STANDARD", "NEARLINE", "COLDLINE" and "ARCHIVE".
+ // Values include "NEARLINE", "COLDLINE" and "STANDARD".
MatchesStorageClasses []string
// NumNewerVersions is the condition matching objects with a number of newer versions.
@@ -509,27 +488,26 @@ func newBucket(b *raw.Bucket) (*BucketAttrs, error) {
return nil, err
}
return &BucketAttrs{
- Name: b.Name,
- Location: b.Location,
- MetaGeneration: b.Metageneration,
- DefaultEventBasedHold: b.DefaultEventBasedHold,
- StorageClass: b.StorageClass,
- Created: convertTime(b.TimeCreated),
- VersioningEnabled: b.Versioning != nil && b.Versioning.Enabled,
- ACL: toBucketACLRules(b.Acl),
- DefaultObjectACL: toObjectACLRules(b.DefaultObjectAcl),
- Labels: b.Labels,
- RequesterPays: b.Billing != nil && b.Billing.RequesterPays,
- Lifecycle: toLifecycle(b.Lifecycle),
- RetentionPolicy: rp,
- CORS: toCORS(b.Cors),
- Encryption: toBucketEncryption(b.Encryption),
- Logging: toBucketLogging(b.Logging),
- Website: toBucketWebsite(b.Website),
- BucketPolicyOnly: toBucketPolicyOnly(b.IamConfiguration),
- UniformBucketLevelAccess: toUniformBucketLevelAccess(b.IamConfiguration),
- Etag: b.Etag,
- LocationType: b.LocationType,
+ Name: b.Name,
+ Location: b.Location,
+ MetaGeneration: b.Metageneration,
+ DefaultEventBasedHold: b.DefaultEventBasedHold,
+ StorageClass: b.StorageClass,
+ Created: convertTime(b.TimeCreated),
+ VersioningEnabled: b.Versioning != nil && b.Versioning.Enabled,
+ ACL: toBucketACLRules(b.Acl),
+ DefaultObjectACL: toObjectACLRules(b.DefaultObjectAcl),
+ Labels: b.Labels,
+ RequesterPays: b.Billing != nil && b.Billing.RequesterPays,
+ Lifecycle: toLifecycle(b.Lifecycle),
+ RetentionPolicy: rp,
+ CORS: toCORS(b.Cors),
+ Encryption: toBucketEncryption(b.Encryption),
+ Logging: toBucketLogging(b.Logging),
+ Website: toBucketWebsite(b.Website),
+ BucketPolicyOnly: toBucketPolicyOnly(b.IamConfiguration),
+ Etag: b.Etag,
+ LocationType: b.LocationType,
}, nil
}
@@ -555,9 +533,9 @@ func (b *BucketAttrs) toRawBucket() *raw.Bucket {
bb = &raw.BucketBilling{RequesterPays: true}
}
var bktIAM *raw.BucketIamConfiguration
- if b.UniformBucketLevelAccess.Enabled || b.BucketPolicyOnly.Enabled {
+ if b.BucketPolicyOnly.Enabled {
bktIAM = &raw.BucketIamConfiguration{
- UniformBucketLevelAccess: &raw.BucketIamConfigurationUniformBucketLevelAccess{
+ BucketPolicyOnly: &raw.BucketIamConfigurationBucketPolicyOnly{
Enabled: true,
},
}
@@ -624,20 +602,10 @@ type BucketAttrsToUpdate struct {
// newly created objects in this bucket.
DefaultEventBasedHold optional.Bool
- // BucketPolicyOnly is an alias for UniformBucketLevelAccess. Use of
- // UniformBucketLevelAccess is recommended above the use of this field.
- // Setting BucketPolicyOnly.Enabled OR UniformBucketLevelAccess.Enabled to
- // true, will enable UniformBucketLevelAccess. If both BucketPolicyOnly and
- // UniformBucketLevelAccess are set, the value of UniformBucketLevelAccess
- // will take precedence.
+ // BucketPolicyOnly configures access checks to use only bucket-level IAM
+ // policies.
BucketPolicyOnly *BucketPolicyOnly
- // UniformBucketLevelAccess configures access checks to use only bucket-level IAM
- // policies and ignore any ACL rules for the bucket.
- // See https://cloud.google.com/storage/docs/uniform-bucket-level-access
- // for more information.
- UniformBucketLevelAccess *UniformBucketLevelAccess
-
// If set, updates the retention policy of the bucket. Using
// RetentionPolicy.RetentionPeriod = 0 will delete the existing policy.
//
@@ -726,17 +694,8 @@ func (ua *BucketAttrsToUpdate) toRawBucket() *raw.Bucket {
}
if ua.BucketPolicyOnly != nil {
rb.IamConfiguration = &raw.BucketIamConfiguration{
- UniformBucketLevelAccess: &raw.BucketIamConfigurationUniformBucketLevelAccess{
- Enabled: ua.BucketPolicyOnly.Enabled,
- ForceSendFields: []string{"Enabled"},
- },
- }
- }
- if ua.UniformBucketLevelAccess != nil {
- rb.IamConfiguration = &raw.BucketIamConfiguration{
- UniformBucketLevelAccess: &raw.BucketIamConfigurationUniformBucketLevelAccess{
- Enabled: ua.UniformBucketLevelAccess.Enabled,
- ForceSendFields: []string{"Enabled"},
+ BucketPolicyOnly: &raw.BucketIamConfigurationBucketPolicyOnly{
+ Enabled: ua.BucketPolicyOnly.Enabled,
},
}
}
@@ -1075,26 +1034,8 @@ func toBucketPolicyOnly(b *raw.BucketIamConfiguration) BucketPolicyOnly {
}
}
-func toUniformBucketLevelAccess(b *raw.BucketIamConfiguration) UniformBucketLevelAccess {
- if b == nil || b.UniformBucketLevelAccess == nil || !b.UniformBucketLevelAccess.Enabled {
- return UniformBucketLevelAccess{}
- }
- lt, err := time.Parse(time.RFC3339, b.UniformBucketLevelAccess.LockedTime)
- if err != nil {
- return UniformBucketLevelAccess{
- Enabled: true,
- }
- }
- return UniformBucketLevelAccess{
- Enabled: true,
- LockedTime: lt,
- }
-}
-
// Objects returns an iterator over the objects in the bucket that match the Query q.
// If q is nil, no filtering is done.
-//
-// Note: The returned iterator is not safe for concurrent operations without explicit synchronization.
func (b *BucketHandle) Objects(ctx context.Context, q *Query) *ObjectIterator {
it := &ObjectIterator{
ctx: ctx,
@@ -1111,8 +1052,6 @@ func (b *BucketHandle) Objects(ctx context.Context, q *Query) *ObjectIterator {
}
// An ObjectIterator is an iterator over ObjectAttrs.
-//
-// Note: This iterator is not safe for concurrent operations without explicit synchronization.
type ObjectIterator struct {
ctx context.Context
bucket *BucketHandle
@@ -1123,8 +1062,6 @@ type ObjectIterator struct {
}
// PageInfo supports pagination. See the google.golang.org/api/iterator package for details.
-//
-// Note: This method is not safe for concurrent operations without explicit synchronization.
func (it *ObjectIterator) PageInfo() *iterator.PageInfo { return it.pageInfo }
// Next returns the next result. Its second return value is iterator.Done if
@@ -1134,8 +1071,6 @@ func (it *ObjectIterator) PageInfo() *iterator.PageInfo { return it.pageInfo }
// If Query.Delimiter is non-empty, some of the ObjectAttrs returned by Next will
// have a non-empty Prefix field, and a zero value for all other fields. These
// represent prefixes.
-//
-// Note: This method is not safe for concurrent operations without explicit synchronization.
func (it *ObjectIterator) Next() (*ObjectAttrs, error) {
if err := it.nextFunc(); err != nil {
return nil, err
@@ -1152,9 +1087,6 @@ func (it *ObjectIterator) fetch(pageSize int, pageToken string) (string, error)
req.Delimiter(it.query.Delimiter)
req.Prefix(it.query.Prefix)
req.Versions(it.query.Versions)
- if len(it.query.fieldSelection) > 0 {
- req.Fields("nextPageToken", googleapi.Field(it.query.fieldSelection))
- }
req.PageToken(pageToken)
if it.bucket.userProject != "" {
req.UserProject(it.bucket.userProject)
@@ -1187,8 +1119,6 @@ func (it *ObjectIterator) fetch(pageSize int, pageToken string) (string, error)
// optionally set the iterator's Prefix field to restrict the list to buckets
// whose names begin with the prefix. By default, all buckets in the project
// are returned.
-//
-// Note: The returned iterator is not safe for concurrent operations without explicit synchronization.
func (c *Client) Buckets(ctx context.Context, projectID string) *BucketIterator {
it := &BucketIterator{
ctx: ctx,
@@ -1199,13 +1129,10 @@ func (c *Client) Buckets(ctx context.Context, projectID string) *BucketIterator
it.fetch,
func() int { return len(it.buckets) },
func() interface{} { b := it.buckets; it.buckets = nil; return b })
-
return it
}
// A BucketIterator is an iterator over BucketAttrs.
-//
-// Note: This iterator is not safe for concurrent operations without explicit synchronization.
type BucketIterator struct {
// Prefix restricts the iterator to buckets whose names begin with it.
Prefix string
@@ -1221,8 +1148,6 @@ type BucketIterator struct {
// Next returns the next result. Its second return value is iterator.Done if
// there are no more results. Once Next returns iterator.Done, all subsequent
// calls will return iterator.Done.
-//
-// Note: This method is not safe for concurrent operations without explicit synchronization.
func (it *BucketIterator) Next() (*BucketAttrs, error) {
if err := it.nextFunc(); err != nil {
return nil, err
@@ -1233,8 +1158,6 @@ func (it *BucketIterator) Next() (*BucketAttrs, error) {
}
// PageInfo supports pagination. See the google.golang.org/api/iterator package for details.
-//
-// Note: This method is not safe for concurrent operations without explicit synchronization.
func (it *BucketIterator) PageInfo() *iterator.PageInfo { return it.pageInfo }
func (it *BucketIterator) fetch(pageSize int, pageToken string) (token string, err error) {
diff --git a/vendor/cloud.google.com/go/storage/doc.go b/vendor/cloud.google.com/go/storage/doc.go
index d33454ff6..88f645904 100644
--- a/vendor/cloud.google.com/go/storage/doc.go
+++ b/vendor/cloud.google.com/go/storage/doc.go
@@ -117,33 +117,6 @@ Objects also have attributes, which you can fetch with Attrs:
fmt.Printf("object %s has size %d and can be read using %s\n",
objAttrs.Name, objAttrs.Size, objAttrs.MediaLink)
-Listing objects
-
-Listing objects in a bucket is done with the Bucket.Objects method:
-
- query := &storage.Query{Prefix: ""}
-
- var names []string
- it := bkt.Objects(ctx, query)
- for {
- attrs, err := it.Next()
- if err == iterator.Done {
- break
- }
- if err != nil {
- log.Fatal(err)
- }
- names = append(names, attrs.Name)
- }
-
-If only a subset of object attributes is needed when listing, specifying this
-subset using Query.SetAttrSelection may speed up the listing process:
-
- query := &storage.Query{Prefix: ""}
- query.SetAttrSelection([]string{"Name"})
-
- // ... as before
-
ACLs
Both objects and buckets have ACLs (Access Control Lists). An ACL is a list of
@@ -200,4 +173,4 @@ These errors can be introspected for more information by type asserting to the r
if e.Code == 409 { ... }
}
*/
-package storage
+package storage // import "cloud.google.com/go/storage"
diff --git a/vendor/cloud.google.com/go/storage/go.mod b/vendor/cloud.google.com/go/storage/go.mod
index a82500e92..ce68c9daa 100644
--- a/vendor/cloud.google.com/go/storage/go.mod
+++ b/vendor/cloud.google.com/go/storage/go.mod
@@ -1,17 +1,14 @@
module cloud.google.com/go/storage
-go 1.11
+go 1.9
require (
- cloud.google.com/go v0.52.0
- cloud.google.com/go/bigquery v1.4.0 // indirect
+ cloud.google.com/go v0.46.3
github.com/golang/protobuf v1.3.2
- github.com/google/go-cmp v0.4.0
+ github.com/google/go-cmp v0.3.0
github.com/googleapis/gax-go/v2 v2.0.5
- golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d
- golang.org/x/sys v0.0.0-20200124204421-9fbb57f87de9 // indirect
- golang.org/x/tools v0.0.0-20200128002243-345141a36859 // indirect
- google.golang.org/api v0.15.0
- google.golang.org/genproto v0.0.0-20200128133413-58ce757ed39b
- google.golang.org/grpc v1.26.0
+ golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45
+ google.golang.org/api v0.9.0
+ google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51
+ google.golang.org/grpc v1.21.1
)
diff --git a/vendor/cloud.google.com/go/storage/go.sum b/vendor/cloud.google.com/go/storage/go.sum
index 6fc00554c..96d9ee7c0 100644
--- a/vendor/cloud.google.com/go/storage/go.sum
+++ b/vendor/cloud.google.com/go/storage/go.sum
@@ -4,46 +4,20 @@ cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSR
cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU=
cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY=
cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc=
-cloud.google.com/go v0.46.3 h1:AVXDdKsrtX33oR9fbCMu/+c1o8Ofjq6Ku/MInaLVg5Y=
-cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0=
-cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To=
-cloud.google.com/go v0.52.0 h1:GGslhk/BU052LPlnI1vpp3fcbUs+hQ3E+Doti/3/vF8=
-cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4=
+cloud.google.com/go v0.46.1/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0=
cloud.google.com/go/bigquery v1.0.1 h1:hL+ycaJpVE9M7nLoiXb/Pn10ENE2u+oddxbD8uu0ZVU=
cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=
-cloud.google.com/go/bigquery v1.3.0 h1:sAbMqjY1PEQKZBWfbu6Y6bsupJ9c4QdHnzg/VvYTLcE=
-cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE=
-cloud.google.com/go/bigquery v1.4.0 h1:xE3CPsOgttP4ACBePh79zTKALtXwn/Edhcr16R5hMWU=
-cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc=
cloud.google.com/go/datastore v1.0.0 h1:Kt+gOPPp2LEPWp8CSfxhsM8ik9CcyE/gYu+0r+RnZvM=
cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=
+cloud.google.com/go/firestore v1.0.0/go.mod h1:SdFEKccng5n2jTXm5x01uXEvi4MBzxWFR6YI781XSJI=
cloud.google.com/go/pubsub v1.0.1 h1:W9tAK3E57P75u0XLLR82LZyw8VpAnhmyTOxW9qzmyj8=
cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I=
-cloud.google.com/go/pubsub v1.1.0 h1:9/vpR43S4aJaROxqQHQ3nH9lfyKKV0dC3vOmnw8ebQQ=
-cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw=
-cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw=
-cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos=
-dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
-github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
-github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
-github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
-github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
-github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
-github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
-github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
-github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
-github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
-github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
-github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7 h1:5ZkaAPbicIKTF2I64qf5Fh8Aa83Q/dnOafMYV0OMwjA=
-github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
-github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY=
-github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
@@ -56,14 +30,10 @@ github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
-github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
-github.com/google/go-cmp v0.4.0 h1:xsAVV57WRhGj6kEIi8ReJzQlHHqcBYCElAvkovg3B/4=
-github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no=
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
-github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
github.com/googleapis/gax-go/v2 v2.0.5 h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM=
@@ -71,40 +41,24 @@ github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5m
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.1 h1:0hERBMJE1eitiLkihrMvRVBYAkpHzc/J3QdDN+dAcgU=
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
-github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024 h1:rBMNdlhTLzJjJSDIjNEXX1Pz3Hmwmz91v+zycvx9PJc=
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
-github.com/jstemmer/go-junit-report v0.9.1 h1:6QPYqodiu3GuPL+7mfx+NwDdp2eTkp9IfEUpgAwUN0o=
-github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
-github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
-github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
-github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
-github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
go.opencensus.io v0.22.0 h1:C9hSCOW830chIVkdja34wa6Ky+IzWllkUinR+BtRZd4=
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
-go.opencensus.io v0.22.2 h1:75k/FF0Q2YM8QYo07VPddOLBslDt1MZOdEslOHvmzAs=
-go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
-golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
golang.org/x/exp v0.0.0-20190829153037-c13cbed26979 h1:Agxu5KLo8o7Bb634SVDnhIfpTvxmzUwhbYAzBvXt6h4=
golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek=
-golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY=
-golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
-golang.org/x/exp v0.0.0-20191227195350-da58074b4299 h1:zQpM52jfKHG6II1ISZY1ZcpygvuSFZpLwfluuF89XOg=
-golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
-golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a h1:7Wlg8L54In96HTWOaI4sreLJ6qfyGuvSau5el3fK41Y=
-golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
@@ -114,14 +68,10 @@ golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHl
golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac h1:8R1esu+8QioDxo4E4mX6bFztO+dMTM49DNAaWfO5OeY=
golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
-golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
-golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f h1:J5lckAjkw6qYlOZNj90mLYNTEKDvWeuc1yieZ8qUzUE=
-golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs=
golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE=
golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o=
golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY=
-golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@@ -133,25 +83,16 @@ golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859 h1:R/3boaszxrf1GEUWTVDzSKVwLmSJpwZ1yqXm8j0v2QI=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa h1:F+8P+gmewFQYRk6JoLQLwjBCTu3mcIURZfNkVweuRKA=
-golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45 h1:SVwTIAaPC2U/AvvLNZ2a7OVsmBpC8L5BlwK1whH3hm0=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
-golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
-golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d h1:TzXSXBo42m9gQenoE3b9BGiEpg5IG2JkU5FkPIawgtw=
-golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58 h1:8gQV6CLnAEikrhgkHFbMAEhagSSnXWGV915qUMm9mrU=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e h1:vcxGaoTs7kV8m5Np9uUNQin4BrLOthgV7252N8V+FwY=
-golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@@ -161,15 +102,6 @@ golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0 h1:HyfiK1WMnHj5FXFXatD+Qs1A/xC2Run6RzeW1SyHxpc=
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200113162924-86b910548bc1 h1:gZpLHxUX5BdYLA08Lj4YCJNN/jk7KtquiArPoeX0WvA=
-golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200122134326-e047566fdf82 h1:ywK/j/KkyTHcdyYSZNXGjMwgmDSfjglYZ3vStQ/gSCU=
-golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200124204421-9fbb57f87de9 h1:1/DFK4b7JH8DmkqhUk48onnSfrPzImPoVxuomtbT2nk=
-golang.org/x/sys v0.0.0-20200124204421-9fbb57f87de9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
@@ -184,45 +116,23 @@ golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
-golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff h1:On1qIo75ByTwFJ4/W2bIqHcwJ9XAqtSWUs8GwRrIhtc=
golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
-golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
-golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
-golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
-golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
-golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
-golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
-golang.org/x/tools v0.0.0-20200117161641-43d50277825c h1:2EA2K0k9bcvvEDlqD8xdlOhCOqq+O/p9Voqi4x9W1YU=
-golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
-golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a h1:7YaEqUc1tUg0yDwvdX+3U5bwrBg7u3FFAZ5D8gUs4/c=
-golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
-golang.org/x/tools v0.0.0-20200128002243-345141a36859 h1:xIszjAtlVeHg9hhv6Zhntvwqowji1k2rrgoOhj/aaKw=
-golang.org/x/tools v0.0.0-20200128002243-345141a36859/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
-golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
-golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
-golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
google.golang.org/api v0.9.0 h1:jbyannxz0XFD3zdjgrSUsaJbgpH4eTrkdhRChkHPfO8=
google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
-google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
-google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
-google.golang.org/api v0.15.0 h1:yzlyyDW/J0w8yNFJIhiAJy4kq74S+1DOLdawELNxFMA=
-google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.6.1 h1:QzqyMA1tlu6CgqCDUtU9V+ZKhLFT2dkJuANu5QaxI3I=
google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
-google.golang.org/appengine v1.6.5 h1:tycE03LOZYQNhDpS27tcQdAzLCVMaj7QT2SXxebnpCM=
-google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
@@ -232,31 +142,15 @@ google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98
google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51 h1:Ex1mq5jaJof+kRnYi3SlYJ8KKa9Ao3NHyIT5XJ1gF6U=
google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8=
-google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
-google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
-google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
-google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
-google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba h1:pRj9OXZbwNtbtZtOB4dLwfK4u+EVRMvP+e9zKkg2grM=
-google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
-google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150 h1:VPpdpQkGvFicX9yo4G5oxZPi9ALBnEOZblPSa/Wa2m4=
-google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
-google.golang.org/genproto v0.0.0-20200128133413-58ce757ed39b h1:c8OBoXP3kTbDWWB/oVE3FkR851p4iZ3MPadz7zXEIPU=
-google.golang.org/genproto v0.0.0-20200128133413-58ce757ed39b/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
google.golang.org/grpc v1.21.1 h1:j6XxA85m/6txkUCHvzlV5f+HBNl/1r5cZ2A/3IEFOO8=
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
-google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
-google.golang.org/grpc v1.26.0 h1:2dTRdpdFEEhJYQD8EMLB61nnrzSCTbG38PhqdhvOltg=
-google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
-gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
-gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
-honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.1-2019.2.3 h1:3JgtbtFHMiCmsznwGVTUWbgGov+pVqnlf1dEJTNAXeM=
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
diff --git a/vendor/cloud.google.com/go/storage/hmac.go b/vendor/cloud.google.com/go/storage/hmac.go
index 7d8185f37..c39632740 100644
--- a/vendor/cloud.google.com/go/storage/hmac.go
+++ b/vendor/cloud.google.com/go/storage/hmac.go
@@ -25,8 +25,6 @@ import (
)
// HMACState is the state of the HMAC key.
-//
-// This type is EXPERIMENTAL and subject to change or removal without notice.
type HMACState string
const (
@@ -107,21 +105,9 @@ func (c *Client) HMACKeyHandle(projectID, accessID string) *HMACKeyHandle {
// Get invokes an RPC to retrieve the HMAC key referenced by the
// HMACKeyHandle's accessID.
//
-// Options such as UserProjectForHMACKeys can be used to set the
-// userProject to be billed against for operations.
-//
// This method is EXPERIMENTAL and subject to change or removal without notice.
-func (hkh *HMACKeyHandle) Get(ctx context.Context, opts ...HMACKeyOption) (*HMACKey, error) {
+func (hkh *HMACKeyHandle) Get(ctx context.Context) (*HMACKey, error) {
call := hkh.raw.Get(hkh.projectID, hkh.accessID)
-
- desc := new(hmacKeyDesc)
- for _, opt := range opts {
- opt.withHMACKeyDesc(desc)
- }
- if desc.userProjectID != "" {
- call = call.UserProject(desc.userProjectID)
- }
-
setClientHeader(call.Header())
var metadata *raw.HmacKeyMetadata
@@ -145,15 +131,8 @@ func (hkh *HMACKeyHandle) Get(ctx context.Context, opts ...HMACKeyOption) (*HMAC
// After deletion, a key cannot be used to authenticate requests.
//
// This method is EXPERIMENTAL and subject to change or removal without notice.
-func (hkh *HMACKeyHandle) Delete(ctx context.Context, opts ...HMACKeyOption) error {
+func (hkh *HMACKeyHandle) Delete(ctx context.Context) error {
delCall := hkh.raw.Delete(hkh.projectID, hkh.accessID)
- desc := new(hmacKeyDesc)
- for _, opt := range opts {
- opt.withHMACKeyDesc(desc)
- }
- if desc.userProjectID != "" {
- delCall = delCall.UserProject(desc.userProjectID)
- }
setClientHeader(delCall.Header())
return runWithRetry(ctx, func() error {
@@ -194,7 +173,7 @@ func pbHmacKeyToHMACKey(pb *raw.HmacKey, updatedTimeCanBeNil bool) (*HMACKey, er
// CreateHMACKey invokes an RPC for Google Cloud Storage to create a new HMACKey.
//
// This method is EXPERIMENTAL and subject to change or removal without notice.
-func (c *Client) CreateHMACKey(ctx context.Context, projectID, serviceAccountEmail string, opts ...HMACKeyOption) (*HMACKey, error) {
+func (c *Client) CreateHMACKey(ctx context.Context, projectID, serviceAccountEmail string) (*HMACKey, error) {
if projectID == "" {
return nil, errors.New("storage: expecting a non-blank projectID")
}
@@ -204,14 +183,6 @@ func (c *Client) CreateHMACKey(ctx context.Context, projectID, serviceAccountEma
svc := raw.NewProjectsHmacKeysService(c.raw)
call := svc.Create(projectID, serviceAccountEmail)
- desc := new(hmacKeyDesc)
- for _, opt := range opts {
- opt.withHMACKeyDesc(desc)
- }
- if desc.userProjectID != "" {
- call = call.UserProject(desc.userProjectID)
- }
-
setClientHeader(call.Header())
var hkPb *raw.HmacKey
@@ -241,7 +212,7 @@ type HMACKeyAttrsToUpdate struct {
// Update mutates the HMACKey referred to by accessID.
//
// This method is EXPERIMENTAL and subject to change or removal without notice.
-func (h *HMACKeyHandle) Update(ctx context.Context, au HMACKeyAttrsToUpdate, opts ...HMACKeyOption) (*HMACKey, error) {
+func (h *HMACKeyHandle) Update(ctx context.Context, au HMACKeyAttrsToUpdate) (*HMACKey, error) {
if au.State != Active && au.State != Inactive {
return nil, fmt.Errorf("storage: invalid state %q for update, must be either %q or %q", au.State, Active, Inactive)
}
@@ -250,14 +221,6 @@ func (h *HMACKeyHandle) Update(ctx context.Context, au HMACKeyAttrsToUpdate, opt
Etag: au.Etag,
State: string(au.State),
})
-
- desc := new(hmacKeyDesc)
- for _, opt := range opts {
- opt.withHMACKeyDesc(desc)
- }
- if desc.userProjectID != "" {
- call = call.UserProject(desc.userProjectID)
- }
setClientHeader(call.Header())
var metadata *raw.HmacKeyMetadata
@@ -278,8 +241,6 @@ func (h *HMACKeyHandle) Update(ctx context.Context, au HMACKeyAttrsToUpdate, opt
// An HMACKeysIterator is an iterator over HMACKeys.
//
-// Note: This iterator is not safe for concurrent operations without explicit synchronization.
-//
// This type is EXPERIMENTAL and subject to change or removal without notice.
type HMACKeysIterator struct {
ctx context.Context
@@ -294,8 +255,6 @@ type HMACKeysIterator struct {
// ListHMACKeys returns an iterator for listing HMACKeys.
//
-// Note: This iterator is not safe for concurrent operations without explicit synchronization.
-//
// This method is EXPERIMENTAL and subject to change or removal without notice.
func (c *Client) ListHMACKeys(ctx context.Context, projectID string, opts ...HMACKeyOption) *HMACKeysIterator {
it := &HMACKeysIterator{
@@ -324,8 +283,6 @@ func (c *Client) ListHMACKeys(ctx context.Context, projectID string, opts ...HMA
// there are no more results. Once Next returns iterator.Done, all subsequent
// calls will return iterator.Done.
//
-// Note: This iterator is not safe for concurrent operations without explicit synchronization.
-//
// This method is EXPERIMENTAL and subject to change or removal without notice.
func (it *HMACKeysIterator) Next() (*HMACKey, error) {
if err := it.nextFunc(); err != nil {
@@ -340,8 +297,6 @@ func (it *HMACKeysIterator) Next() (*HMACKey, error) {
// PageInfo supports pagination. See the google.golang.org/api/iterator package for details.
//
-// Note: This iterator is not safe for concurrent operations without explicit synchronization.
-//
// This method is EXPERIMENTAL and subject to change or removal without notice.
func (it *HMACKeysIterator) PageInfo() *iterator.PageInfo { return it.pageInfo }
@@ -394,8 +349,6 @@ type hmacKeyDesc struct {
}
// HMACKeyOption configures the behavior of HMACKey related methods and actions.
-//
-// This interface is EXPERIMENTAL and subject to change or removal without notice.
type HMACKeyOption interface {
withHMACKeyDesc(*hmacKeyDesc)
}
@@ -411,8 +364,6 @@ func (hkdf hmacKeyDescFunc) withHMACKeyDesc(hkd *hmacKeyDesc) {
//
// Only one service account email can be used as a filter, so if multiple
// of these options are applied, the last email to be set will be used.
-//
-// This option is EXPERIMENTAL and subject to change or removal without notice.
func ForHMACKeyServiceAccountEmail(serviceAccountEmail string) HMACKeyOption {
return hmacKeyDescFunc(func(hkd *hmacKeyDesc) {
hkd.forServiceAccountEmail = serviceAccountEmail
@@ -420,21 +371,16 @@ func ForHMACKeyServiceAccountEmail(serviceAccountEmail string) HMACKeyOption {
}
// ShowDeletedHMACKeys will also list keys whose state is "DELETED".
-//
-// This option is EXPERIMENTAL and subject to change or removal without notice.
func ShowDeletedHMACKeys() HMACKeyOption {
return hmacKeyDescFunc(func(hkd *hmacKeyDesc) {
hkd.showDeletedKeys = true
})
}
-// UserProjectForHMACKeys will bill the request against userProjectID
-// if userProjectID is non-empty.
+// HMACKeysForUserProject will bill the request against userProjectID.
//
// Note: This is a noop right now and only provided for API compatibility.
-//
-// This option is EXPERIMENTAL and subject to change or removal without notice.
-func UserProjectForHMACKeys(userProjectID string) HMACKeyOption {
+func HMACKeysForUserProject(userProjectID string) HMACKeyOption {
return hmacKeyDescFunc(func(hkd *hmacKeyDesc) {
hkd.userProjectID = userProjectID
})
diff --git a/vendor/cloud.google.com/go/storage/storage.go b/vendor/cloud.google.com/go/storage/storage.go
index c0bf509da..1ffb10f64 100644
--- a/vendor/cloud.google.com/go/storage/storage.go
+++ b/vendor/cloud.google.com/go/storage/storage.go
@@ -54,7 +54,7 @@ var (
ErrObjectNotExist = errors.New("storage: object doesn't exist")
)
-var userAgent = fmt.Sprintf("gcloud-golang-storage/%s", version.Repo)
+const userAgent = "gcloud-golang-storage/20151204"
const (
// ScopeFullControl grants permissions to manage your
@@ -94,20 +94,11 @@ type Client struct {
// NewClient creates a new Google Cloud Storage client.
// The default scope is ScopeFullControl. To use a different scope, like ScopeReadOnly, use option.WithScopes.
func NewClient(ctx context.Context, opts ...option.ClientOption) (*Client, error) {
- var host, readHost, scheme string
-
- if host = os.Getenv("STORAGE_EMULATOR_HOST"); host == "" {
- scheme = "https"
- readHost = "storage.googleapis.com"
-
- opts = append(opts, option.WithScopes(ScopeFullControl), option.WithUserAgent(userAgent))
- } else {
- scheme = "http"
- readHost = host
-
- opts = append(opts, option.WithoutAuthentication())
+ o := []option.ClientOption{
+ option.WithScopes(ScopeFullControl),
+ option.WithUserAgent(userAgent),
}
-
+ opts = append(o, opts...)
hc, ep, err := htransport.NewClient(ctx, opts...)
if err != nil {
return nil, fmt.Errorf("dialing: %v", err)
@@ -116,21 +107,17 @@ func NewClient(ctx context.Context, opts ...option.ClientOption) (*Client, error
if err != nil {
return nil, fmt.Errorf("storage client: %v", err)
}
- if ep == "" {
- // Override the default value for BasePath from the raw client.
- // TODO: remove when the raw client uses this endpoint as its default (~end of 2020)
- rawService.BasePath = "https://storage.googleapis.com/storage/v1/"
- } else {
- // If the endpoint has been set explicitly, use this for the BasePath
- // as well as readHost
+ if ep != "" {
rawService.BasePath = ep
- u, err := url.Parse(ep)
- if err != nil {
- return nil, fmt.Errorf("supplied endpoint %v is not valid: %v", ep, err)
- }
- readHost = u.Host
}
-
+ scheme := "https"
+ var host, readHost string
+ if host = os.Getenv("STORAGE_EMULATOR_HOST"); host != "" {
+ scheme = "http"
+ readHost = host
+ } else {
+ readHost = "storage.googleapis.com"
+ }
return &Client{
hc: hc,
raw: rawService,
@@ -1002,12 +989,11 @@ type ObjectAttrs struct {
// of a particular object. This field is read-only.
Metageneration int64
- // StorageClass is the storage class of the object. This defines
- // how objects are stored and determines the SLA and the cost of storage.
- // Typical values are "STANDARD", "NEARLINE", "COLDLINE" and "ARCHIVE".
- // Defaults to "STANDARD".
- // See https://cloud.google.com/storage/docs/storage-classes for all
- // valid values.
+ // StorageClass is the storage class of the object.
+ // This value defines how objects in the bucket are stored and
+ // determines the SLA and the cost of storage. Typical values are
+ // "NEARLINE", "COLDLINE" and "STANDARD".
+ // It defaults to "STANDARD".
StorageClass string
// Created is the time the object was created. This field is read-only.
@@ -1139,78 +1125,6 @@ type Query struct {
// Versions indicates whether multiple versions of the same
// object will be included in the results.
Versions bool
-
- // fieldSelection is used to select only specific fields to be returned by
- // the query. It's used internally and is populated for the user by
- // calling Query.SetAttrSelection
- fieldSelection string
-}
-
-// attrToFieldMap maps the field names of ObjectAttrs to the underlying field
-// names in the API call. Only the ObjectAttrs field names are visible to users
-// because they are already part of the public API of the package.
-var attrToFieldMap = map[string]string{
- "Bucket": "bucket",
- "Name": "name",
- "ContentType": "contentType",
- "ContentLanguage": "contentLanguage",
- "CacheControl": "cacheControl",
- "EventBasedHold": "eventBasedHold",
- "TemporaryHold": "temporaryHold",
- "RetentionExpirationTime": "retentionExpirationTime",
- "ACL": "acl",
- "Owner": "owner",
- "ContentEncoding": "contentEncoding",
- "ContentDisposition": "contentDisposition",
- "Size": "size",
- "MD5": "md5Hash",
- "CRC32C": "crc32c",
- "MediaLink": "mediaLink",
- "Metadata": "metadata",
- "Generation": "generation",
- "Metageneration": "metageneration",
- "StorageClass": "storageClass",
- "CustomerKeySHA256": "customerEncryption",
- "KMSKeyName": "kmsKeyName",
- "Created": "timeCreated",
- "Deleted": "timeDeleted",
- "Updated": "updated",
- "Etag": "etag",
-}
-
-// SetAttrSelection makes the query populate only specific attributes of
-// objects. When iterating over objects, if you only need each object's name
-// and size, pass []string{"Name", "Size"} to this method. Only these fields
-// will be fetched for each object across the network; the other fields of
-// ObjectAttr will remain at their default values. This is a performance
-// optimization; for more information, see
-// https://cloud.google.com/storage/docs/json_api/v1/how-tos/performance
-func (q *Query) SetAttrSelection(attrs []string) error {
- fieldSet := make(map[string]bool)
-
- for _, attr := range attrs {
- field, ok := attrToFieldMap[attr]
- if !ok {
- return fmt.Errorf("storage: attr %v is not valid", attr)
- }
- fieldSet[field] = true
- }
-
- if len(fieldSet) > 0 {
- var b strings.Builder
- b.WriteString("items(")
- first := true
- for field := range fieldSet {
- if !first {
- b.WriteString(",")
- }
- first = false
- b.WriteString(field)
- }
- b.WriteString(")")
- q.fieldSelection = b.String()
- }
- return nil
}
// Conditions constrain methods to act on specific generations of
diff --git a/vendor/github.com/google/go-cmp/cmp/cmpopts/equate.go b/vendor/github.com/google/go-cmp/cmp/cmpopts/equate.go
deleted file mode 100644
index 41bbddc61..000000000
--- a/vendor/github.com/google/go-cmp/cmp/cmpopts/equate.go
+++ /dev/null
@@ -1,89 +0,0 @@
-// Copyright 2017, The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE.md file.
-
-// Package cmpopts provides common options for the cmp package.
-package cmpopts
-
-import (
- "math"
- "reflect"
-
- "github.com/google/go-cmp/cmp"
-)
-
-func equateAlways(_, _ interface{}) bool { return true }
-
-// EquateEmpty returns a Comparer option that determines all maps and slices
-// with a length of zero to be equal, regardless of whether they are nil.
-//
-// EquateEmpty can be used in conjunction with SortSlices and SortMaps.
-func EquateEmpty() cmp.Option {
- return cmp.FilterValues(isEmpty, cmp.Comparer(equateAlways))
-}
-
-func isEmpty(x, y interface{}) bool {
- vx, vy := reflect.ValueOf(x), reflect.ValueOf(y)
- return (x != nil && y != nil && vx.Type() == vy.Type()) &&
- (vx.Kind() == reflect.Slice || vx.Kind() == reflect.Map) &&
- (vx.Len() == 0 && vy.Len() == 0)
-}
-
-// EquateApprox returns a Comparer option that determines float32 or float64
-// values to be equal if they are within a relative fraction or absolute margin.
-// This option is not used when either x or y is NaN or infinite.
-//
-// The fraction determines that the difference of two values must be within the
-// smaller fraction of the two values, while the margin determines that the two
-// values must be within some absolute margin.
-// To express only a fraction or only a margin, use 0 for the other parameter.
-// The fraction and margin must be non-negative.
-//
-// The mathematical expression used is equivalent to:
-// |x-y| ≤ max(fraction*min(|x|, |y|), margin)
-//
-// EquateApprox can be used in conjunction with EquateNaNs.
-func EquateApprox(fraction, margin float64) cmp.Option {
- if margin < 0 || fraction < 0 || math.IsNaN(margin) || math.IsNaN(fraction) {
- panic("margin or fraction must be a non-negative number")
- }
- a := approximator{fraction, margin}
- return cmp.Options{
- cmp.FilterValues(areRealF64s, cmp.Comparer(a.compareF64)),
- cmp.FilterValues(areRealF32s, cmp.Comparer(a.compareF32)),
- }
-}
-
-type approximator struct{ frac, marg float64 }
-
-func areRealF64s(x, y float64) bool {
- return !math.IsNaN(x) && !math.IsNaN(y) && !math.IsInf(x, 0) && !math.IsInf(y, 0)
-}
-func areRealF32s(x, y float32) bool {
- return areRealF64s(float64(x), float64(y))
-}
-func (a approximator) compareF64(x, y float64) bool {
- relMarg := a.frac * math.Min(math.Abs(x), math.Abs(y))
- return math.Abs(x-y) <= math.Max(a.marg, relMarg)
-}
-func (a approximator) compareF32(x, y float32) bool {
- return a.compareF64(float64(x), float64(y))
-}
-
-// EquateNaNs returns a Comparer option that determines float32 and float64
-// NaN values to be equal.
-//
-// EquateNaNs can be used in conjunction with EquateApprox.
-func EquateNaNs() cmp.Option {
- return cmp.Options{
- cmp.FilterValues(areNaNsF64s, cmp.Comparer(equateAlways)),
- cmp.FilterValues(areNaNsF32s, cmp.Comparer(equateAlways)),
- }
-}
-
-func areNaNsF64s(x, y float64) bool {
- return math.IsNaN(x) && math.IsNaN(y)
-}
-func areNaNsF32s(x, y float32) bool {
- return areNaNsF64s(float64(x), float64(y))
-}
diff --git a/vendor/github.com/google/go-cmp/cmp/cmpopts/ignore.go b/vendor/github.com/google/go-cmp/cmp/cmpopts/ignore.go
deleted file mode 100644
index ff8e785d4..000000000
--- a/vendor/github.com/google/go-cmp/cmp/cmpopts/ignore.go
+++ /dev/null
@@ -1,207 +0,0 @@
-// Copyright 2017, The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE.md file.
-
-package cmpopts
-
-import (
- "fmt"
- "reflect"
- "unicode"
- "unicode/utf8"
-
- "github.com/google/go-cmp/cmp"
- "github.com/google/go-cmp/cmp/internal/function"
-)
-
-// IgnoreFields returns an Option that ignores exported fields of the
-// given names on a single struct type.
-// The struct type is specified by passing in a value of that type.
-//
-// The name may be a dot-delimited string (e.g., "Foo.Bar") to ignore a
-// specific sub-field that is embedded or nested within the parent struct.
-//
-// This does not handle unexported fields; use IgnoreUnexported instead.
-func IgnoreFields(typ interface{}, names ...string) cmp.Option {
- sf := newStructFilter(typ, names...)
- return cmp.FilterPath(sf.filter, cmp.Ignore())
-}
-
-// IgnoreTypes returns an Option that ignores all values assignable to
-// certain types, which are specified by passing in a value of each type.
-func IgnoreTypes(typs ...interface{}) cmp.Option {
- tf := newTypeFilter(typs...)
- return cmp.FilterPath(tf.filter, cmp.Ignore())
-}
-
-type typeFilter []reflect.Type
-
-func newTypeFilter(typs ...interface{}) (tf typeFilter) {
- for _, typ := range typs {
- t := reflect.TypeOf(typ)
- if t == nil {
- // This occurs if someone tries to pass in sync.Locker(nil)
- panic("cannot determine type; consider using IgnoreInterfaces")
- }
- tf = append(tf, t)
- }
- return tf
-}
-func (tf typeFilter) filter(p cmp.Path) bool {
- if len(p) < 1 {
- return false
- }
- t := p.Last().Type()
- for _, ti := range tf {
- if t.AssignableTo(ti) {
- return true
- }
- }
- return false
-}
-
-// IgnoreInterfaces returns an Option that ignores all values or references of
-// values assignable to certain interface types. These interfaces are specified
-// by passing in an anonymous struct with the interface types embedded in it.
-// For example, to ignore sync.Locker, pass in struct{sync.Locker}{}.
-func IgnoreInterfaces(ifaces interface{}) cmp.Option {
- tf := newIfaceFilter(ifaces)
- return cmp.FilterPath(tf.filter, cmp.Ignore())
-}
-
-type ifaceFilter []reflect.Type
-
-func newIfaceFilter(ifaces interface{}) (tf ifaceFilter) {
- t := reflect.TypeOf(ifaces)
- if ifaces == nil || t.Name() != "" || t.Kind() != reflect.Struct {
- panic("input must be an anonymous struct")
- }
- for i := 0; i < t.NumField(); i++ {
- fi := t.Field(i)
- switch {
- case !fi.Anonymous:
- panic("struct cannot have named fields")
- case fi.Type.Kind() != reflect.Interface:
- panic("embedded field must be an interface type")
- case fi.Type.NumMethod() == 0:
- // This matches everything; why would you ever want this?
- panic("cannot ignore empty interface")
- default:
- tf = append(tf, fi.Type)
- }
- }
- return tf
-}
-func (tf ifaceFilter) filter(p cmp.Path) bool {
- if len(p) < 1 {
- return false
- }
- t := p.Last().Type()
- for _, ti := range tf {
- if t.AssignableTo(ti) {
- return true
- }
- if t.Kind() != reflect.Ptr && reflect.PtrTo(t).AssignableTo(ti) {
- return true
- }
- }
- return false
-}
-
-// IgnoreUnexported returns an Option that only ignores the immediate unexported
-// fields of a struct, including anonymous fields of unexported types.
-// In particular, unexported fields within the struct's exported fields
-// of struct types, including anonymous fields, will not be ignored unless the
-// type of the field itself is also passed to IgnoreUnexported.
-//
-// Avoid ignoring unexported fields of a type which you do not control (i.e. a
-// type from another repository), as changes to the implementation of such types
-// may change how the comparison behaves. Prefer a custom Comparer instead.
-func IgnoreUnexported(typs ...interface{}) cmp.Option {
- ux := newUnexportedFilter(typs...)
- return cmp.FilterPath(ux.filter, cmp.Ignore())
-}
-
-type unexportedFilter struct{ m map[reflect.Type]bool }
-
-func newUnexportedFilter(typs ...interface{}) unexportedFilter {
- ux := unexportedFilter{m: make(map[reflect.Type]bool)}
- for _, typ := range typs {
- t := reflect.TypeOf(typ)
- if t == nil || t.Kind() != reflect.Struct {
- panic(fmt.Sprintf("invalid struct type: %T", typ))
- }
- ux.m[t] = true
- }
- return ux
-}
-func (xf unexportedFilter) filter(p cmp.Path) bool {
- sf, ok := p.Index(-1).(cmp.StructField)
- if !ok {
- return false
- }
- return xf.m[p.Index(-2).Type()] && !isExported(sf.Name())
-}
-
-// isExported reports whether the identifier is exported.
-func isExported(id string) bool {
- r, _ := utf8.DecodeRuneInString(id)
- return unicode.IsUpper(r)
-}
-
-// IgnoreSliceElements returns an Option that ignores elements of []V.
-// The discard function must be of the form "func(T) bool" which is used to
-// ignore slice elements of type V, where V is assignable to T.
-// Elements are ignored if the function reports true.
-func IgnoreSliceElements(discardFunc interface{}) cmp.Option {
- vf := reflect.ValueOf(discardFunc)
- if !function.IsType(vf.Type(), function.ValuePredicate) || vf.IsNil() {
- panic(fmt.Sprintf("invalid discard function: %T", discardFunc))
- }
- return cmp.FilterPath(func(p cmp.Path) bool {
- si, ok := p.Index(-1).(cmp.SliceIndex)
- if !ok {
- return false
- }
- if !si.Type().AssignableTo(vf.Type().In(0)) {
- return false
- }
- vx, vy := si.Values()
- if vx.IsValid() && vf.Call([]reflect.Value{vx})[0].Bool() {
- return true
- }
- if vy.IsValid() && vf.Call([]reflect.Value{vy})[0].Bool() {
- return true
- }
- return false
- }, cmp.Ignore())
-}
-
-// IgnoreMapEntries returns an Option that ignores entries of map[K]V.
-// The discard function must be of the form "func(T, R) bool" which is used to
-// ignore map entries of type K and V, where K and V are assignable to T and R.
-// Entries are ignored if the function reports true.
-func IgnoreMapEntries(discardFunc interface{}) cmp.Option {
- vf := reflect.ValueOf(discardFunc)
- if !function.IsType(vf.Type(), function.KeyValuePredicate) || vf.IsNil() {
- panic(fmt.Sprintf("invalid discard function: %T", discardFunc))
- }
- return cmp.FilterPath(func(p cmp.Path) bool {
- mi, ok := p.Index(-1).(cmp.MapIndex)
- if !ok {
- return false
- }
- if !mi.Key().Type().AssignableTo(vf.Type().In(0)) || !mi.Type().AssignableTo(vf.Type().In(1)) {
- return false
- }
- k := mi.Key()
- vx, vy := mi.Values()
- if vx.IsValid() && vf.Call([]reflect.Value{k, vx})[0].Bool() {
- return true
- }
- if vy.IsValid() && vf.Call([]reflect.Value{k, vy})[0].Bool() {
- return true
- }
- return false
- }, cmp.Ignore())
-}
diff --git a/vendor/github.com/google/go-cmp/cmp/cmpopts/sort.go b/vendor/github.com/google/go-cmp/cmp/cmpopts/sort.go
deleted file mode 100644
index 3a4804621..000000000
--- a/vendor/github.com/google/go-cmp/cmp/cmpopts/sort.go
+++ /dev/null
@@ -1,147 +0,0 @@
-// Copyright 2017, The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE.md file.
-
-package cmpopts
-
-import (
- "fmt"
- "reflect"
- "sort"
-
- "github.com/google/go-cmp/cmp"
- "github.com/google/go-cmp/cmp/internal/function"
-)
-
-// SortSlices returns a Transformer option that sorts all []V.
-// The less function must be of the form "func(T, T) bool" which is used to
-// sort any slice with element type V that is assignable to T.
-//
-// The less function must be:
-// • Deterministic: less(x, y) == less(x, y)
-// • Irreflexive: !less(x, x)
-// • Transitive: if !less(x, y) and !less(y, z), then !less(x, z)
-//
-// The less function does not have to be "total". That is, if !less(x, y) and
-// !less(y, x) for two elements x and y, their relative order is maintained.
-//
-// SortSlices can be used in conjunction with EquateEmpty.
-func SortSlices(lessFunc interface{}) cmp.Option {
- vf := reflect.ValueOf(lessFunc)
- if !function.IsType(vf.Type(), function.Less) || vf.IsNil() {
- panic(fmt.Sprintf("invalid less function: %T", lessFunc))
- }
- ss := sliceSorter{vf.Type().In(0), vf}
- return cmp.FilterValues(ss.filter, cmp.Transformer("cmpopts.SortSlices", ss.sort))
-}
-
-type sliceSorter struct {
- in reflect.Type // T
- fnc reflect.Value // func(T, T) bool
-}
-
-func (ss sliceSorter) filter(x, y interface{}) bool {
- vx, vy := reflect.ValueOf(x), reflect.ValueOf(y)
- if !(x != nil && y != nil && vx.Type() == vy.Type()) ||
- !(vx.Kind() == reflect.Slice && vx.Type().Elem().AssignableTo(ss.in)) ||
- (vx.Len() <= 1 && vy.Len() <= 1) {
- return false
- }
- // Check whether the slices are already sorted to avoid an infinite
- // recursion cycle applying the same transform to itself.
- ok1 := sort.SliceIsSorted(x, func(i, j int) bool { return ss.less(vx, i, j) })
- ok2 := sort.SliceIsSorted(y, func(i, j int) bool { return ss.less(vy, i, j) })
- return !ok1 || !ok2
-}
-func (ss sliceSorter) sort(x interface{}) interface{} {
- src := reflect.ValueOf(x)
- dst := reflect.MakeSlice(src.Type(), src.Len(), src.Len())
- for i := 0; i < src.Len(); i++ {
- dst.Index(i).Set(src.Index(i))
- }
- sort.SliceStable(dst.Interface(), func(i, j int) bool { return ss.less(dst, i, j) })
- ss.checkSort(dst)
- return dst.Interface()
-}
-func (ss sliceSorter) checkSort(v reflect.Value) {
- start := -1 // Start of a sequence of equal elements.
- for i := 1; i < v.Len(); i++ {
- if ss.less(v, i-1, i) {
- // Check that first and last elements in v[start:i] are equal.
- if start >= 0 && (ss.less(v, start, i-1) || ss.less(v, i-1, start)) {
- panic(fmt.Sprintf("incomparable values detected: want equal elements: %v", v.Slice(start, i)))
- }
- start = -1
- } else if start == -1 {
- start = i
- }
- }
-}
-func (ss sliceSorter) less(v reflect.Value, i, j int) bool {
- vx, vy := v.Index(i), v.Index(j)
- return ss.fnc.Call([]reflect.Value{vx, vy})[0].Bool()
-}
-
-// SortMaps returns a Transformer option that flattens map[K]V types to be a
-// sorted []struct{K, V}. The less function must be of the form
-// "func(T, T) bool" which is used to sort any map with key K that is
-// assignable to T.
-//
-// Flattening the map into a slice has the property that cmp.Equal is able to
-// use Comparers on K or the K.Equal method if it exists.
-//
-// The less function must be:
-// • Deterministic: less(x, y) == less(x, y)
-// • Irreflexive: !less(x, x)
-// • Transitive: if !less(x, y) and !less(y, z), then !less(x, z)
-// • Total: if x != y, then either less(x, y) or less(y, x)
-//
-// SortMaps can be used in conjunction with EquateEmpty.
-func SortMaps(lessFunc interface{}) cmp.Option {
- vf := reflect.ValueOf(lessFunc)
- if !function.IsType(vf.Type(), function.Less) || vf.IsNil() {
- panic(fmt.Sprintf("invalid less function: %T", lessFunc))
- }
- ms := mapSorter{vf.Type().In(0), vf}
- return cmp.FilterValues(ms.filter, cmp.Transformer("cmpopts.SortMaps", ms.sort))
-}
-
-type mapSorter struct {
- in reflect.Type // T
- fnc reflect.Value // func(T, T) bool
-}
-
-func (ms mapSorter) filter(x, y interface{}) bool {
- vx, vy := reflect.ValueOf(x), reflect.ValueOf(y)
- return (x != nil && y != nil && vx.Type() == vy.Type()) &&
- (vx.Kind() == reflect.Map && vx.Type().Key().AssignableTo(ms.in)) &&
- (vx.Len() != 0 || vy.Len() != 0)
-}
-func (ms mapSorter) sort(x interface{}) interface{} {
- src := reflect.ValueOf(x)
- outType := reflect.StructOf([]reflect.StructField{
- {Name: "K", Type: src.Type().Key()},
- {Name: "V", Type: src.Type().Elem()},
- })
- dst := reflect.MakeSlice(reflect.SliceOf(outType), src.Len(), src.Len())
- for i, k := range src.MapKeys() {
- v := reflect.New(outType).Elem()
- v.Field(0).Set(k)
- v.Field(1).Set(src.MapIndex(k))
- dst.Index(i).Set(v)
- }
- sort.Slice(dst.Interface(), func(i, j int) bool { return ms.less(dst, i, j) })
- ms.checkSort(dst)
- return dst.Interface()
-}
-func (ms mapSorter) checkSort(v reflect.Value) {
- for i := 1; i < v.Len(); i++ {
- if !ms.less(v, i-1, i) {
- panic(fmt.Sprintf("partial order detected: want %v < %v", v.Index(i-1), v.Index(i)))
- }
- }
-}
-func (ms mapSorter) less(v reflect.Value, i, j int) bool {
- vx, vy := v.Index(i).Field(0), v.Index(j).Field(0)
- return ms.fnc.Call([]reflect.Value{vx, vy})[0].Bool()
-}
diff --git a/vendor/github.com/google/go-cmp/cmp/cmpopts/struct_filter.go b/vendor/github.com/google/go-cmp/cmp/cmpopts/struct_filter.go
deleted file mode 100644
index 97f707983..000000000
--- a/vendor/github.com/google/go-cmp/cmp/cmpopts/struct_filter.go
+++ /dev/null
@@ -1,182 +0,0 @@
-// Copyright 2017, The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE.md file.
-
-package cmpopts
-
-import (
- "fmt"
- "reflect"
- "strings"
-
- "github.com/google/go-cmp/cmp"
-)
-
-// filterField returns a new Option where opt is only evaluated on paths that
-// include a specific exported field on a single struct type.
-// The struct type is specified by passing in a value of that type.
-//
-// The name may be a dot-delimited string (e.g., "Foo.Bar") to select a
-// specific sub-field that is embedded or nested within the parent struct.
-func filterField(typ interface{}, name string, opt cmp.Option) cmp.Option {
- // TODO: This is currently unexported over concerns of how helper filters
- // can be composed together easily.
- // TODO: Add tests for FilterField.
-
- sf := newStructFilter(typ, name)
- return cmp.FilterPath(sf.filter, opt)
-}
-
-type structFilter struct {
- t reflect.Type // The root struct type to match on
- ft fieldTree // Tree of fields to match on
-}
-
-func newStructFilter(typ interface{}, names ...string) structFilter {
- // TODO: Perhaps allow * as a special identifier to allow ignoring any
- // number of path steps until the next field match?
- // This could be useful when a concrete struct gets transformed into
- // an anonymous struct where it is not possible to specify that by type,
- // but the transformer happens to provide guarantees about the names of
- // the transformed fields.
-
- t := reflect.TypeOf(typ)
- if t == nil || t.Kind() != reflect.Struct {
- panic(fmt.Sprintf("%T must be a struct", typ))
- }
- var ft fieldTree
- for _, name := range names {
- cname, err := canonicalName(t, name)
- if err != nil {
- panic(fmt.Sprintf("%s: %v", strings.Join(cname, "."), err))
- }
- ft.insert(cname)
- }
- return structFilter{t, ft}
-}
-
-func (sf structFilter) filter(p cmp.Path) bool {
- for i, ps := range p {
- if ps.Type().AssignableTo(sf.t) && sf.ft.matchPrefix(p[i+1:]) {
- return true
- }
- }
- return false
-}
-
-// fieldTree represents a set of dot-separated identifiers.
-//
-// For example, inserting the following selectors:
-// Foo
-// Foo.Bar.Baz
-// Foo.Buzz
-// Nuka.Cola.Quantum
-//
-// Results in a tree of the form:
-// {sub: {
-// "Foo": {ok: true, sub: {
-// "Bar": {sub: {
-// "Baz": {ok: true},
-// }},
-// "Buzz": {ok: true},
-// }},
-// "Nuka": {sub: {
-// "Cola": {sub: {
-// "Quantum": {ok: true},
-// }},
-// }},
-// }}
-type fieldTree struct {
- ok bool // Whether this is a specified node
- sub map[string]fieldTree // The sub-tree of fields under this node
-}
-
-// insert inserts a sequence of field accesses into the tree.
-func (ft *fieldTree) insert(cname []string) {
- if ft.sub == nil {
- ft.sub = make(map[string]fieldTree)
- }
- if len(cname) == 0 {
- ft.ok = true
- return
- }
- sub := ft.sub[cname[0]]
- sub.insert(cname[1:])
- ft.sub[cname[0]] = sub
-}
-
-// matchPrefix reports whether any selector in the fieldTree matches
-// the start of path p.
-func (ft fieldTree) matchPrefix(p cmp.Path) bool {
- for _, ps := range p {
- switch ps := ps.(type) {
- case cmp.StructField:
- ft = ft.sub[ps.Name()]
- if ft.ok {
- return true
- }
- if len(ft.sub) == 0 {
- return false
- }
- case cmp.Indirect:
- default:
- return false
- }
- }
- return false
-}
-
-// canonicalName returns a list of identifiers where any struct field access
-// through an embedded field is expanded to include the names of the embedded
-// types themselves.
-//
-// For example, suppose field "Foo" is not directly in the parent struct,
-// but actually from an embedded struct of type "Bar". Then, the canonical name
-// of "Foo" is actually "Bar.Foo".
-//
-// Suppose field "Foo" is not directly in the parent struct, but actually
-// a field in two different embedded structs of types "Bar" and "Baz".
-// Then the selector "Foo" causes a panic since it is ambiguous which one it
-// refers to. The user must specify either "Bar.Foo" or "Baz.Foo".
-func canonicalName(t reflect.Type, sel string) ([]string, error) {
- var name string
- sel = strings.TrimPrefix(sel, ".")
- if sel == "" {
- return nil, fmt.Errorf("name must not be empty")
- }
- if i := strings.IndexByte(sel, '.'); i < 0 {
- name, sel = sel, ""
- } else {
- name, sel = sel[:i], sel[i:]
- }
-
- // Type must be a struct or pointer to struct.
- if t.Kind() == reflect.Ptr {
- t = t.Elem()
- }
- if t.Kind() != reflect.Struct {
- return nil, fmt.Errorf("%v must be a struct", t)
- }
-
- // Find the canonical name for this current field name.
- // If the field exists in an embedded struct, then it will be expanded.
- if !isExported(name) {
- // Disallow unexported fields:
- // * To discourage people from actually touching unexported fields
- // * FieldByName is buggy (https://golang.org/issue/4876)
- return []string{name}, fmt.Errorf("name must be exported")
- }
- sf, ok := t.FieldByName(name)
- if !ok {
- return []string{name}, fmt.Errorf("does not exist")
- }
- var ss []string
- for i := range sf.Index {
- ss = append(ss, t.FieldByIndex(sf.Index[:i+1]).Name)
- }
- if sel == "" {
- return ss, nil
- }
- ssPost, err := canonicalName(sf.Type, sel)
- return append(ss, ssPost...), err
-}
diff --git a/vendor/github.com/google/go-cmp/cmp/cmpopts/xform.go b/vendor/github.com/google/go-cmp/cmp/cmpopts/xform.go
deleted file mode 100644
index 9d651553d..000000000
--- a/vendor/github.com/google/go-cmp/cmp/cmpopts/xform.go
+++ /dev/null
@@ -1,35 +0,0 @@
-// Copyright 2018, The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE.md file.
-
-package cmpopts
-
-import (
- "github.com/google/go-cmp/cmp"
-)
-
-type xformFilter struct{ xform cmp.Option }
-
-func (xf xformFilter) filter(p cmp.Path) bool {
- for _, ps := range p {
- if t, ok := ps.(cmp.Transform); ok && t.Option() == xf.xform {
- return false
- }
- }
- return true
-}
-
-// AcyclicTransformer returns a Transformer with a filter applied that ensures
-// that the transformer cannot be recursively applied upon its own output.
-//
-// An example use case is a transformer that splits a string by lines:
-// AcyclicTransformer("SplitLines", func(s string) []string{
-// return strings.Split(s, "\n")
-// })
-//
-// Had this been an unfiltered Transformer instead, this would result in an
-// infinite cycle converting a string to []string to [][]string and so on.
-func AcyclicTransformer(name string, xformFunc interface{}) cmp.Option {
- xf := xformFilter{cmp.Transformer(name, xformFunc)}
- return cmp.FilterPath(xf.filter, xf.xform)
-}
diff --git a/vendor/github.com/google/go-cmp/cmp/compare.go b/vendor/github.com/google/go-cmp/cmp/compare.go
index 2133562b0..c9a63ceda 100644
--- a/vendor/github.com/google/go-cmp/cmp/compare.go
+++ b/vendor/github.com/google/go-cmp/cmp/compare.go
@@ -22,8 +22,8 @@
// equality is determined by recursively comparing the primitive kinds on both
// values, much like reflect.DeepEqual. Unlike reflect.DeepEqual, unexported
// fields are not compared by default; they result in panics unless suppressed
-// by using an Ignore option (see cmpopts.IgnoreUnexported) or explicitly compared
-// using the AllowUnexported option.
+// by using an Ignore option (see cmpopts.IgnoreUnexported) or explicitly
+// compared using the Exporter option.
package cmp
import (
@@ -62,8 +62,8 @@ import (
//
// Structs are equal if recursively calling Equal on all fields report equal.
// If a struct contains unexported fields, Equal panics unless an Ignore option
-// (e.g., cmpopts.IgnoreUnexported) ignores that field or the AllowUnexported
-// option explicitly permits comparing the unexported field.
+// (e.g., cmpopts.IgnoreUnexported) ignores that field or the Exporter option
+// explicitly permits comparing the unexported field.
//
// Slices are equal if they are both nil or both non-nil, where recursively
// calling Equal on all non-ignored slice or array elements report equal.
@@ -80,6 +80,11 @@ import (
// Pointers and interfaces are equal if they are both nil or both non-nil,
// where they have the same underlying concrete type and recursively
// calling Equal on the underlying values reports equal.
+//
+// Before recursing into a pointer, slice element, or map, the current path
+// is checked to detect whether the address has already been visited.
+// If there is a cycle, then the pointed at values are considered equal
+// only if both addresses were previously visited in the same path step.
func Equal(x, y interface{}, opts ...Option) bool {
vx := reflect.ValueOf(x)
vy := reflect.ValueOf(y)
@@ -137,6 +142,7 @@ type state struct {
// Calling statelessCompare must not result in observable changes to these.
result diff.Result // The current result of comparison
curPath Path // The current path in the value tree
+ curPtrs pointerPath // The current set of visited pointers
reporters []reporter // Optional reporters
// recChecker checks for infinite cycles applying the same set of
@@ -148,13 +154,14 @@ type state struct {
dynChecker dynChecker
// These fields, once set by processOption, will not change.
- exporters map[reflect.Type]bool // Set of structs with unexported field visibility
- opts Options // List of all fundamental and filter options
+ exporters []exporter // List of exporters for structs with unexported fields
+ opts Options // List of all fundamental and filter options
}
func newState(opts []Option) *state {
// Always ensure a validator option exists to validate the inputs.
s := &state{opts: Options{validator{}}}
+ s.curPtrs.Init()
s.processOption(Options(opts))
return s
}
@@ -174,13 +181,8 @@ func (s *state) processOption(opt Option) {
panic(fmt.Sprintf("cannot use an unfiltered option: %v", opt))
}
s.opts = append(s.opts, opt)
- case visibleStructs:
- if s.exporters == nil {
- s.exporters = make(map[reflect.Type]bool)
- }
- for t := range opt {
- s.exporters[t] = true
- }
+ case exporter:
+ s.exporters = append(s.exporters, opt)
case reporter:
s.reporters = append(s.reporters, opt)
default:
@@ -192,9 +194,9 @@ func (s *state) processOption(opt Option) {
// This function is stateless in that it does not alter the current result,
// or output to any registered reporters.
func (s *state) statelessCompare(step PathStep) diff.Result {
- // We do not save and restore the curPath because all of the compareX
- // methods should properly push and pop from the path.
- // It is an implementation bug if the contents of curPath differs from
+ // We do not save and restore curPath and curPtrs because all of the
+ // compareX methods should properly push and pop from them.
+ // It is an implementation bug if the contents of the paths differ from
// when calling this function to when returning from it.
oldResult, oldReporters := s.result, s.reporters
@@ -216,9 +218,17 @@ func (s *state) compareAny(step PathStep) {
}
s.recChecker.Check(s.curPath)
- // Obtain the current type and values.
+ // Cycle-detection for slice elements (see NOTE in compareSlice).
t := step.Type()
vx, vy := step.Values()
+ if si, ok := step.(SliceIndex); ok && si.isSlice && vx.IsValid() && vy.IsValid() {
+ px, py := vx.Addr(), vy.Addr()
+ if eq, visited := s.curPtrs.Push(px, py); visited {
+ s.report(eq, reportByCycle)
+ return
+ }
+ defer s.curPtrs.Pop(px, py)
+ }
// Rule 1: Check whether an option applies on this node in the value tree.
if s.tryOptions(t, vx, vy) {
@@ -354,6 +364,7 @@ func sanitizeValue(v reflect.Value, t reflect.Type) reflect.Value {
func (s *state) compareStruct(t reflect.Type, vx, vy reflect.Value) {
var vax, vay reflect.Value // Addressable versions of vx and vy
+ var mayForce, mayForceInit bool
step := StructField{&structField{}}
for i := 0; i < t.NumField(); i++ {
step.typ = t.Field(i).Type
@@ -375,7 +386,13 @@ func (s *state) compareStruct(t reflect.Type, vx, vy reflect.Value) {
vax = makeAddressable(vx)
vay = makeAddressable(vy)
}
- step.mayForce = s.exporters[t]
+ if !mayForceInit {
+ for _, xf := range s.exporters {
+ mayForce = mayForce || xf(t)
+ }
+ mayForceInit = true
+ }
+ step.mayForce = mayForce
step.pvx = vax
step.pvy = vay
step.field = t.Field(i)
@@ -391,9 +408,21 @@ func (s *state) compareSlice(t reflect.Type, vx, vy reflect.Value) {
return
}
- // TODO: Support cyclic data structures.
+ // NOTE: It is incorrect to call curPtrs.Push on the slice header pointer
+ // since slices represents a list of pointers, rather than a single pointer.
+ // The pointer checking logic must be handled on a per-element basis
+ // in compareAny.
+ //
+ // A slice header (see reflect.SliceHeader) in Go is a tuple of a starting
+ // pointer P, a length N, and a capacity C. Supposing each slice element has
+ // a memory size of M, then the slice is equivalent to the list of pointers:
+ // [P+i*M for i in range(N)]
+ //
+ // For example, v[:0] and v[:1] are slices with the same starting pointer,
+ // but they are clearly different values. Using the slice pointer alone
+ // violates the assumption that equal pointers implies equal values.
- step := SliceIndex{&sliceIndex{pathStep: pathStep{typ: t.Elem()}}}
+ step := SliceIndex{&sliceIndex{pathStep: pathStep{typ: t.Elem()}, isSlice: isSlice}}
withIndexes := func(ix, iy int) SliceIndex {
if ix >= 0 {
step.vx, step.xkey = vx.Index(ix), ix
@@ -470,7 +499,12 @@ func (s *state) compareMap(t reflect.Type, vx, vy reflect.Value) {
return
}
- // TODO: Support cyclic data structures.
+ // Cycle-detection for maps.
+ if eq, visited := s.curPtrs.Push(vx, vy); visited {
+ s.report(eq, reportByCycle)
+ return
+ }
+ defer s.curPtrs.Pop(vx, vy)
// We combine and sort the two map keys so that we can perform the
// comparisons in a deterministic order.
@@ -507,7 +541,12 @@ func (s *state) comparePtr(t reflect.Type, vx, vy reflect.Value) {
return
}
- // TODO: Support cyclic data structures.
+ // Cycle-detection for pointers.
+ if eq, visited := s.curPtrs.Push(vx, vy); visited {
+ s.report(eq, reportByCycle)
+ return
+ }
+ defer s.curPtrs.Pop(vx, vy)
vx, vy = vx.Elem(), vy.Elem()
s.compareAny(Indirect{&indirect{pathStep{t.Elem(), vx, vy}}})
diff --git a/vendor/github.com/google/go-cmp/cmp/export_panic.go b/vendor/github.com/google/go-cmp/cmp/export_panic.go
index abc3a1c3e..dd032354f 100644
--- a/vendor/github.com/google/go-cmp/cmp/export_panic.go
+++ b/vendor/github.com/google/go-cmp/cmp/export_panic.go
@@ -8,8 +8,8 @@ package cmp
import "reflect"
-const supportAllowUnexported = false
+const supportExporters = false
func retrieveUnexportedField(reflect.Value, reflect.StructField) reflect.Value {
- panic("retrieveUnexportedField is not implemented")
+ panic("no support for forcibly accessing unexported fields")
}
diff --git a/vendor/github.com/google/go-cmp/cmp/export_unsafe.go b/vendor/github.com/google/go-cmp/cmp/export_unsafe.go
index 59d4ee91b..57020e26c 100644
--- a/vendor/github.com/google/go-cmp/cmp/export_unsafe.go
+++ b/vendor/github.com/google/go-cmp/cmp/export_unsafe.go
@@ -11,7 +11,7 @@ import (
"unsafe"
)
-const supportAllowUnexported = true
+const supportExporters = true
// retrieveUnexportedField uses unsafe to forcibly retrieve any field from
// a struct such that the value has read-write permissions.
@@ -19,5 +19,7 @@ const supportAllowUnexported = true
// The parent struct, v, must be addressable, while f must be a StructField
// describing the field to retrieve.
func retrieveUnexportedField(v reflect.Value, f reflect.StructField) reflect.Value {
- return reflect.NewAt(f.Type, unsafe.Pointer(v.UnsafeAddr()+f.Offset)).Elem()
+ // See https://github.com/google/go-cmp/issues/167 for discussion of the
+ // following expression.
+ return reflect.NewAt(f.Type, unsafe.Pointer(uintptr(unsafe.Pointer(v.UnsafeAddr()))+f.Offset)).Elem()
}
diff --git a/vendor/github.com/google/go-cmp/cmp/internal/testprotos/protos.go b/vendor/github.com/google/go-cmp/cmp/internal/testprotos/protos.go
deleted file mode 100644
index 120c8b0e8..000000000
--- a/vendor/github.com/google/go-cmp/cmp/internal/testprotos/protos.go
+++ /dev/null
@@ -1,116 +0,0 @@
-// Copyright 2017, The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE.md file.
-
-package testprotos
-
-func Equal(x, y Message) bool {
- if x == nil || y == nil {
- return x == nil && y == nil
- }
- return x.String() == y.String()
-}
-
-type Message interface {
- Proto()
- String() string
-}
-
-type proto interface {
- Proto()
-}
-
-type notComparable struct {
- unexportedField func()
-}
-
-type Stringer struct{ X string }
-
-func (s *Stringer) String() string { return s.X }
-
-// Project1 protocol buffers
-type (
- Eagle_States int
- Eagle_MissingCalls int
- Dreamer_States int
- Dreamer_MissingCalls int
- Slap_States int
- Goat_States int
- Donkey_States int
- SummerType int
-
- Eagle struct {
- proto
- notComparable
- Stringer
- }
- Dreamer struct {
- proto
- notComparable
- Stringer
- }
- Slap struct {
- proto
- notComparable
- Stringer
- }
- Goat struct {
- proto
- notComparable
- Stringer
- }
- Donkey struct {
- proto
- notComparable
- Stringer
- }
-)
-
-// Project2 protocol buffers
-type (
- Germ struct {
- proto
- notComparable
- Stringer
- }
- Dish struct {
- proto
- notComparable
- Stringer
- }
-)
-
-// Project3 protocol buffers
-type (
- Dirt struct {
- proto
- notComparable
- Stringer
- }
- Wizard struct {
- proto
- notComparable
- Stringer
- }
- Sadistic struct {
- proto
- notComparable
- Stringer
- }
-)
-
-// Project4 protocol buffers
-type (
- HoneyStatus int
- PoisonType int
- MetaData struct {
- proto
- notComparable
- Stringer
- }
- Restrictions struct {
- proto
- notComparable
- Stringer
- }
-)
diff --git a/vendor/github.com/google/go-cmp/cmp/internal/teststructs/project1.go b/vendor/github.com/google/go-cmp/cmp/internal/teststructs/project1.go
deleted file mode 100644
index 1999e38fd..000000000
--- a/vendor/github.com/google/go-cmp/cmp/internal/teststructs/project1.go
+++ /dev/null
@@ -1,267 +0,0 @@
-// Copyright 2017, The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE.md file.
-
-package teststructs
-
-import (
- "time"
-
- pb "github.com/google/go-cmp/cmp/internal/testprotos"
-)
-
-// This is an sanitized example of equality from a real use-case.
-// The original equality function was as follows:
-/*
-func equalEagle(x, y Eagle) bool {
- if x.Name != y.Name &&
- !reflect.DeepEqual(x.Hounds, y.Hounds) &&
- x.Desc != y.Desc &&
- x.DescLong != y.DescLong &&
- x.Prong != y.Prong &&
- x.StateGoverner != y.StateGoverner &&
- x.PrankRating != y.PrankRating &&
- x.FunnyPrank != y.FunnyPrank &&
- !pb.Equal(x.Immutable.Proto(), y.Immutable.Proto()) {
- return false
- }
-
- if len(x.Dreamers) != len(y.Dreamers) {
- return false
- }
- for i := range x.Dreamers {
- if !equalDreamer(x.Dreamers[i], y.Dreamers[i]) {
- return false
- }
- }
- if len(x.Slaps) != len(y.Slaps) {
- return false
- }
- for i := range x.Slaps {
- if !equalSlap(x.Slaps[i], y.Slaps[i]) {
- return false
- }
- }
- return true
-}
-func equalDreamer(x, y Dreamer) bool {
- if x.Name != y.Name ||
- x.Desc != y.Desc ||
- x.DescLong != y.DescLong ||
- x.ContSlapsInterval != y.ContSlapsInterval ||
- x.Ornamental != y.Ornamental ||
- x.Amoeba != y.Amoeba ||
- x.Heroes != y.Heroes ||
- x.FloppyDisk != y.FloppyDisk ||
- x.MightiestDuck != y.MightiestDuck ||
- x.FunnyPrank != y.FunnyPrank ||
- !pb.Equal(x.Immutable.Proto(), y.Immutable.Proto()) {
-
- return false
- }
- if len(x.Animal) != len(y.Animal) {
- return false
- }
- for i := range x.Animal {
- vx := x.Animal[i]
- vy := y.Animal[i]
- if reflect.TypeOf(x.Animal) != reflect.TypeOf(y.Animal) {
- return false
- }
- switch vx.(type) {
- case Goat:
- if !equalGoat(vx.(Goat), vy.(Goat)) {
- return false
- }
- case Donkey:
- if !equalDonkey(vx.(Donkey), vy.(Donkey)) {
- return false
- }
- default:
- panic(fmt.Sprintf("unknown type: %T", vx))
- }
- }
- if len(x.PreSlaps) != len(y.PreSlaps) {
- return false
- }
- for i := range x.PreSlaps {
- if !equalSlap(x.PreSlaps[i], y.PreSlaps[i]) {
- return false
- }
- }
- if len(x.ContSlaps) != len(y.ContSlaps) {
- return false
- }
- for i := range x.ContSlaps {
- if !equalSlap(x.ContSlaps[i], y.ContSlaps[i]) {
- return false
- }
- }
- return true
-}
-func equalSlap(x, y Slap) bool {
- return x.Name == y.Name &&
- x.Desc == y.Desc &&
- x.DescLong == y.DescLong &&
- pb.Equal(x.Args, y.Args) &&
- x.Tense == y.Tense &&
- x.Interval == y.Interval &&
- x.Homeland == y.Homeland &&
- x.FunnyPrank == y.FunnyPrank &&
- pb.Equal(x.Immutable.Proto(), y.Immutable.Proto())
-}
-func equalGoat(x, y Goat) bool {
- if x.Target != y.Target ||
- x.FunnyPrank != y.FunnyPrank ||
- !pb.Equal(x.Immutable.Proto(), y.Immutable.Proto()) {
- return false
- }
- if len(x.Slaps) != len(y.Slaps) {
- return false
- }
- for i := range x.Slaps {
- if !equalSlap(x.Slaps[i], y.Slaps[i]) {
- return false
- }
- }
- return true
-}
-func equalDonkey(x, y Donkey) bool {
- return x.Pause == y.Pause &&
- x.Sleep == y.Sleep &&
- x.FunnyPrank == y.FunnyPrank &&
- pb.Equal(x.Immutable.Proto(), y.Immutable.Proto())
-}
-*/
-
-type Eagle struct {
- Name string
- Hounds []string
- Desc string
- DescLong string
- Dreamers []Dreamer
- Prong int64
- Slaps []Slap
- StateGoverner string
- PrankRating string
- FunnyPrank string
- Immutable *EagleImmutable
-}
-
-type EagleImmutable struct {
- ID string
- State *pb.Eagle_States
- MissingCall *pb.Eagle_MissingCalls
- Birthday time.Time
- Death time.Time
- Started time.Time
- LastUpdate time.Time
- Creator string
- empty bool
-}
-
-type Dreamer struct {
- Name string
- Desc string
- DescLong string
- PreSlaps []Slap
- ContSlaps []Slap
- ContSlapsInterval int32
- Animal []interface{} // Could be either Goat or Donkey
- Ornamental bool
- Amoeba int64
- Heroes int32
- FloppyDisk int32
- MightiestDuck bool
- FunnyPrank string
- Immutable *DreamerImmutable
-}
-
-type DreamerImmutable struct {
- ID string
- State *pb.Dreamer_States
- MissingCall *pb.Dreamer_MissingCalls
- Calls int32
- Started time.Time
- Stopped time.Time
- LastUpdate time.Time
- empty bool
-}
-
-type Slap struct {
- Name string
- Desc string
- DescLong string
- Args pb.Message
- Tense int32
- Interval int32
- Homeland uint32
- FunnyPrank string
- Immutable *SlapImmutable
-}
-
-type SlapImmutable struct {
- ID string
- Out pb.Message
- MildSlap bool
- PrettyPrint string
- State *pb.Slap_States
- Started time.Time
- Stopped time.Time
- LastUpdate time.Time
- LoveRadius *LoveRadius
- empty bool
-}
-
-type Goat struct {
- Target string
- Slaps []Slap
- FunnyPrank string
- Immutable *GoatImmutable
-}
-
-type GoatImmutable struct {
- ID string
- State *pb.Goat_States
- Started time.Time
- Stopped time.Time
- LastUpdate time.Time
- empty bool
-}
-type Donkey struct {
- Pause bool
- Sleep int32
- FunnyPrank string
- Immutable *DonkeyImmutable
-}
-
-type DonkeyImmutable struct {
- ID string
- State *pb.Donkey_States
- Started time.Time
- Stopped time.Time
- LastUpdate time.Time
- empty bool
-}
-
-type LoveRadius struct {
- Summer *SummerLove
- empty bool
-}
-
-type SummerLove struct {
- Summary *SummerLoveSummary
- empty bool
-}
-
-type SummerLoveSummary struct {
- Devices []string
- ChangeType []pb.SummerType
- empty bool
-}
-
-func (EagleImmutable) Proto() *pb.Eagle { return nil }
-func (DreamerImmutable) Proto() *pb.Dreamer { return nil }
-func (SlapImmutable) Proto() *pb.Slap { return nil }
-func (GoatImmutable) Proto() *pb.Goat { return nil }
-func (DonkeyImmutable) Proto() *pb.Donkey { return nil }
diff --git a/vendor/github.com/google/go-cmp/cmp/internal/teststructs/project2.go b/vendor/github.com/google/go-cmp/cmp/internal/teststructs/project2.go
deleted file mode 100644
index 536592bbe..000000000
--- a/vendor/github.com/google/go-cmp/cmp/internal/teststructs/project2.go
+++ /dev/null
@@ -1,74 +0,0 @@
-// Copyright 2017, The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE.md file.
-
-package teststructs
-
-import (
- "time"
-
- pb "github.com/google/go-cmp/cmp/internal/testprotos"
-)
-
-// This is an sanitized example of equality from a real use-case.
-// The original equality function was as follows:
-/*
-func equalBatch(b1, b2 *GermBatch) bool {
- for _, b := range []*GermBatch{b1, b2} {
- for _, l := range b.DirtyGerms {
- sort.Slice(l, func(i, j int) bool { return l[i].String() < l[j].String() })
- }
- for _, l := range b.CleanGerms {
- sort.Slice(l, func(i, j int) bool { return l[i].String() < l[j].String() })
- }
- }
- if !pb.DeepEqual(b1.DirtyGerms, b2.DirtyGerms) ||
- !pb.DeepEqual(b1.CleanGerms, b2.CleanGerms) ||
- !pb.DeepEqual(b1.GermMap, b2.GermMap) {
- return false
- }
- if len(b1.DishMap) != len(b2.DishMap) {
- return false
- }
- for id := range b1.DishMap {
- kpb1, err1 := b1.DishMap[id].Proto()
- kpb2, err2 := b2.DishMap[id].Proto()
- if !pb.Equal(kpb1, kpb2) || !reflect.DeepEqual(err1, err2) {
- return false
- }
- }
- return b1.HasPreviousResult == b2.HasPreviousResult &&
- b1.DirtyID == b2.DirtyID &&
- b1.CleanID == b2.CleanID &&
- b1.GermStrain == b2.GermStrain &&
- b1.TotalDirtyGerms == b2.TotalDirtyGerms &&
- b1.InfectedAt.Equal(b2.InfectedAt)
-}
-*/
-
-type GermBatch struct {
- DirtyGerms, CleanGerms map[int32][]*pb.Germ
- GermMap map[int32]*pb.Germ
- DishMap map[int32]*Dish
- HasPreviousResult bool
- DirtyID, CleanID int32
- GermStrain int32
- TotalDirtyGerms int
- InfectedAt time.Time
-}
-
-type Dish struct {
- pb *pb.Dish
- err error
-}
-
-func CreateDish(m *pb.Dish, err error) *Dish {
- return &Dish{pb: m, err: err}
-}
-
-func (d *Dish) Proto() (*pb.Dish, error) {
- if d.err != nil {
- return nil, d.err
- }
- return d.pb, nil
-}
diff --git a/vendor/github.com/google/go-cmp/cmp/internal/teststructs/project3.go b/vendor/github.com/google/go-cmp/cmp/internal/teststructs/project3.go
deleted file mode 100644
index 957d093c7..000000000
--- a/vendor/github.com/google/go-cmp/cmp/internal/teststructs/project3.go
+++ /dev/null
@@ -1,82 +0,0 @@
-// Copyright 2017, The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE.md file.
-
-package teststructs
-
-import (
- "sync"
-
- pb "github.com/google/go-cmp/cmp/internal/testprotos"
-)
-
-// This is an sanitized example of equality from a real use-case.
-// The original equality function was as follows:
-/*
-func equalDirt(x, y *Dirt) bool {
- if !reflect.DeepEqual(x.table, y.table) ||
- !reflect.DeepEqual(x.ts, y.ts) ||
- x.Discord != y.Discord ||
- !pb.Equal(&x.Proto, &y.Proto) ||
- len(x.wizard) != len(y.wizard) ||
- len(x.sadistic) != len(y.sadistic) ||
- x.lastTime != y.lastTime {
- return false
- }
- for k, vx := range x.wizard {
- vy, ok := y.wizard[k]
- if !ok || !pb.Equal(vx, vy) {
- return false
- }
- }
- for k, vx := range x.sadistic {
- vy, ok := y.sadistic[k]
- if !ok || !pb.Equal(vx, vy) {
- return false
- }
- }
- return true
-}
-*/
-
-type FakeMutex struct {
- sync.Locker
- x struct{}
-}
-
-type Dirt struct {
- table Table // Always concrete type of MockTable
- ts Timestamp
- Discord DiscordState
- Proto pb.Dirt
- wizard map[string]*pb.Wizard
- sadistic map[string]*pb.Sadistic
- lastTime int64
- mu FakeMutex
-}
-
-type DiscordState int
-
-type Timestamp int64
-
-func (d *Dirt) SetTable(t Table) { d.table = t }
-func (d *Dirt) SetTimestamp(t Timestamp) { d.ts = t }
-func (d *Dirt) SetWizard(m map[string]*pb.Wizard) { d.wizard = m }
-func (d *Dirt) SetSadistic(m map[string]*pb.Sadistic) { d.sadistic = m }
-func (d *Dirt) SetLastTime(t int64) { d.lastTime = t }
-
-type Table interface {
- Operation1() error
- Operation2() error
- Operation3() error
-}
-
-type MockTable struct {
- state []string
-}
-
-func CreateMockTable(s []string) *MockTable { return &MockTable{s} }
-func (mt *MockTable) Operation1() error { return nil }
-func (mt *MockTable) Operation2() error { return nil }
-func (mt *MockTable) Operation3() error { return nil }
-func (mt *MockTable) State() []string { return mt.state }
diff --git a/vendor/github.com/google/go-cmp/cmp/internal/teststructs/project4.go b/vendor/github.com/google/go-cmp/cmp/internal/teststructs/project4.go
deleted file mode 100644
index 49920f237..000000000
--- a/vendor/github.com/google/go-cmp/cmp/internal/teststructs/project4.go
+++ /dev/null
@@ -1,142 +0,0 @@
-// Copyright 2017, The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE.md file.
-
-package teststructs
-
-import (
- "time"
-
- pb "github.com/google/go-cmp/cmp/internal/testprotos"
-)
-
-// This is an sanitized example of equality from a real use-case.
-// The original equality function was as follows:
-/*
-func equalCartel(x, y Cartel) bool {
- if !(equalHeadquarter(x.Headquarter, y.Headquarter) &&
- x.Source() == y.Source() &&
- x.CreationDate().Equal(y.CreationDate()) &&
- x.Boss() == y.Boss() &&
- x.LastCrimeDate().Equal(y.LastCrimeDate())) {
- return false
- }
- if len(x.Poisons()) != len(y.Poisons()) {
- return false
- }
- for i := range x.Poisons() {
- if !equalPoison(*x.Poisons()[i], *y.Poisons()[i]) {
- return false
- }
- }
- return true
-}
-func equalHeadquarter(x, y Headquarter) bool {
- xr, yr := x.Restrictions(), y.Restrictions()
- return x.ID() == y.ID() &&
- x.Location() == y.Location() &&
- reflect.DeepEqual(x.SubDivisions(), y.SubDivisions()) &&
- x.IncorporatedDate().Equal(y.IncorporatedDate()) &&
- pb.Equal(x.MetaData(), y.MetaData()) &&
- bytes.Equal(x.PrivateMessage(), y.PrivateMessage()) &&
- bytes.Equal(x.PublicMessage(), y.PublicMessage()) &&
- x.HorseBack() == y.HorseBack() &&
- x.Rattle() == y.Rattle() &&
- x.Convulsion() == y.Convulsion() &&
- x.Expansion() == y.Expansion() &&
- x.Status() == y.Status() &&
- pb.Equal(&xr, &yr) &&
- x.CreationTime().Equal(y.CreationTime())
-}
-func equalPoison(x, y Poison) bool {
- return x.PoisonType() == y.PoisonType() &&
- x.Expiration().Equal(y.Expiration()) &&
- x.Manufacturer() == y.Manufacturer() &&
- x.Potency() == y.Potency()
-}
-*/
-
-type Cartel struct {
- Headquarter
- source string
- creationDate time.Time
- boss string
- lastCrimeDate time.Time
- poisons []*Poison
-}
-
-func (p Cartel) Source() string { return p.source }
-func (p Cartel) CreationDate() time.Time { return p.creationDate }
-func (p Cartel) Boss() string { return p.boss }
-func (p Cartel) LastCrimeDate() time.Time { return p.lastCrimeDate }
-func (p Cartel) Poisons() []*Poison { return p.poisons }
-
-func (p *Cartel) SetSource(x string) { p.source = x }
-func (p *Cartel) SetCreationDate(x time.Time) { p.creationDate = x }
-func (p *Cartel) SetBoss(x string) { p.boss = x }
-func (p *Cartel) SetLastCrimeDate(x time.Time) { p.lastCrimeDate = x }
-func (p *Cartel) SetPoisons(x []*Poison) { p.poisons = x }
-
-type Headquarter struct {
- id uint64
- location string
- subDivisions []string
- incorporatedDate time.Time
- metaData *pb.MetaData
- privateMessage []byte
- publicMessage []byte
- horseBack string
- rattle string
- convulsion bool
- expansion uint64
- status pb.HoneyStatus
- restrictions pb.Restrictions
- creationTime time.Time
-}
-
-func (hq Headquarter) ID() uint64 { return hq.id }
-func (hq Headquarter) Location() string { return hq.location }
-func (hq Headquarter) SubDivisions() []string { return hq.subDivisions }
-func (hq Headquarter) IncorporatedDate() time.Time { return hq.incorporatedDate }
-func (hq Headquarter) MetaData() *pb.MetaData { return hq.metaData }
-func (hq Headquarter) PrivateMessage() []byte { return hq.privateMessage }
-func (hq Headquarter) PublicMessage() []byte { return hq.publicMessage }
-func (hq Headquarter) HorseBack() string { return hq.horseBack }
-func (hq Headquarter) Rattle() string { return hq.rattle }
-func (hq Headquarter) Convulsion() bool { return hq.convulsion }
-func (hq Headquarter) Expansion() uint64 { return hq.expansion }
-func (hq Headquarter) Status() pb.HoneyStatus { return hq.status }
-func (hq Headquarter) Restrictions() pb.Restrictions { return hq.restrictions }
-func (hq Headquarter) CreationTime() time.Time { return hq.creationTime }
-
-func (hq *Headquarter) SetID(x uint64) { hq.id = x }
-func (hq *Headquarter) SetLocation(x string) { hq.location = x }
-func (hq *Headquarter) SetSubDivisions(x []string) { hq.subDivisions = x }
-func (hq *Headquarter) SetIncorporatedDate(x time.Time) { hq.incorporatedDate = x }
-func (hq *Headquarter) SetMetaData(x *pb.MetaData) { hq.metaData = x }
-func (hq *Headquarter) SetPrivateMessage(x []byte) { hq.privateMessage = x }
-func (hq *Headquarter) SetPublicMessage(x []byte) { hq.publicMessage = x }
-func (hq *Headquarter) SetHorseBack(x string) { hq.horseBack = x }
-func (hq *Headquarter) SetRattle(x string) { hq.rattle = x }
-func (hq *Headquarter) SetConvulsion(x bool) { hq.convulsion = x }
-func (hq *Headquarter) SetExpansion(x uint64) { hq.expansion = x }
-func (hq *Headquarter) SetStatus(x pb.HoneyStatus) { hq.status = x }
-func (hq *Headquarter) SetRestrictions(x pb.Restrictions) { hq.restrictions = x }
-func (hq *Headquarter) SetCreationTime(x time.Time) { hq.creationTime = x }
-
-type Poison struct {
- poisonType pb.PoisonType
- expiration time.Time
- manufacturer string
- potency int
-}
-
-func (p Poison) PoisonType() pb.PoisonType { return p.poisonType }
-func (p Poison) Expiration() time.Time { return p.expiration }
-func (p Poison) Manufacturer() string { return p.manufacturer }
-func (p Poison) Potency() int { return p.potency }
-
-func (p *Poison) SetPoisonType(x pb.PoisonType) { p.poisonType = x }
-func (p *Poison) SetExpiration(x time.Time) { p.expiration = x }
-func (p *Poison) SetManufacturer(x string) { p.manufacturer = x }
-func (p *Poison) SetPotency(x int) { p.potency = x }
diff --git a/vendor/github.com/google/go-cmp/cmp/internal/teststructs/structs.go b/vendor/github.com/google/go-cmp/cmp/internal/teststructs/structs.go
deleted file mode 100644
index 6b4d2a725..000000000
--- a/vendor/github.com/google/go-cmp/cmp/internal/teststructs/structs.go
+++ /dev/null
@@ -1,197 +0,0 @@
-// Copyright 2017, The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE.md file.
-
-package teststructs
-
-type InterfaceA interface {
- InterfaceA()
-}
-
-type (
- StructA struct{ X string } // Equal method on value receiver
- StructB struct{ X string } // Equal method on pointer receiver
- StructC struct{ X string } // Equal method (with interface argument) on value receiver
- StructD struct{ X string } // Equal method (with interface argument) on pointer receiver
- StructE struct{ X string } // Equal method (with interface argument on value receiver) on pointer receiver
- StructF struct{ X string } // Equal method (with interface argument on pointer receiver) on value receiver
-
- // These embed the above types as a value.
- StructA1 struct {
- StructA
- X string
- }
- StructB1 struct {
- StructB
- X string
- }
- StructC1 struct {
- StructC
- X string
- }
- StructD1 struct {
- StructD
- X string
- }
- StructE1 struct {
- StructE
- X string
- }
- StructF1 struct {
- StructF
- X string
- }
-
- // These embed the above types as a pointer.
- StructA2 struct {
- *StructA
- X string
- }
- StructB2 struct {
- *StructB
- X string
- }
- StructC2 struct {
- *StructC
- X string
- }
- StructD2 struct {
- *StructD
- X string
- }
- StructE2 struct {
- *StructE
- X string
- }
- StructF2 struct {
- *StructF
- X string
- }
-
- StructNo struct{ X string } // Equal method (with interface argument) on non-satisfying receiver
-
- AssignA func() int
- AssignB struct{ A int }
- AssignC chan bool
- AssignD <-chan bool
-)
-
-func (x StructA) Equal(y StructA) bool { return true }
-func (x *StructB) Equal(y *StructB) bool { return true }
-func (x StructC) Equal(y InterfaceA) bool { return true }
-func (x StructC) InterfaceA() {}
-func (x *StructD) Equal(y InterfaceA) bool { return true }
-func (x *StructD) InterfaceA() {}
-func (x *StructE) Equal(y InterfaceA) bool { return true }
-func (x StructE) InterfaceA() {}
-func (x StructF) Equal(y InterfaceA) bool { return true }
-func (x *StructF) InterfaceA() {}
-func (x StructNo) Equal(y InterfaceA) bool { return true }
-
-func (x AssignA) Equal(y func() int) bool { return true }
-func (x AssignB) Equal(y struct{ A int }) bool { return true }
-func (x AssignC) Equal(y chan bool) bool { return true }
-func (x AssignD) Equal(y <-chan bool) bool { return true }
-
-var _ = func(
- a StructA, b StructB, c StructC, d StructD, e StructE, f StructF,
- ap *StructA, bp *StructB, cp *StructC, dp *StructD, ep *StructE, fp *StructF,
- a1 StructA1, b1 StructB1, c1 StructC1, d1 StructD1, e1 StructE1, f1 StructF1,
- a2 StructA2, b2 StructB2, c2 StructC2, d2 StructD2, e2 StructE2, f2 StructF1,
-) {
- a.Equal(a)
- b.Equal(&b)
- c.Equal(c)
- d.Equal(&d)
- e.Equal(e)
- f.Equal(&f)
-
- ap.Equal(*ap)
- bp.Equal(bp)
- cp.Equal(*cp)
- dp.Equal(dp)
- ep.Equal(*ep)
- fp.Equal(fp)
-
- a1.Equal(a1.StructA)
- b1.Equal(&b1.StructB)
- c1.Equal(c1)
- d1.Equal(&d1)
- e1.Equal(e1)
- f1.Equal(&f1)
-
- a2.Equal(*a2.StructA)
- b2.Equal(b2.StructB)
- c2.Equal(c2)
- d2.Equal(&d2)
- e2.Equal(e2)
- f2.Equal(&f2)
-}
-
-type (
- privateStruct struct{ Public, private int }
- PublicStruct struct{ Public, private int }
- ParentStructA struct{ privateStruct }
- ParentStructB struct{ PublicStruct }
- ParentStructC struct {
- privateStruct
- Public, private int
- }
- ParentStructD struct {
- PublicStruct
- Public, private int
- }
- ParentStructE struct {
- privateStruct
- PublicStruct
- }
- ParentStructF struct {
- privateStruct
- PublicStruct
- Public, private int
- }
- ParentStructG struct {
- *privateStruct
- }
- ParentStructH struct {
- *PublicStruct
- }
- ParentStructI struct {
- *privateStruct
- *PublicStruct
- }
- ParentStructJ struct {
- *privateStruct
- *PublicStruct
- Public PublicStruct
- private privateStruct
- }
-)
-
-func NewParentStructG() *ParentStructG {
- return &ParentStructG{new(privateStruct)}
-}
-func NewParentStructH() *ParentStructH {
- return &ParentStructH{new(PublicStruct)}
-}
-func NewParentStructI() *ParentStructI {
- return &ParentStructI{new(privateStruct), new(PublicStruct)}
-}
-func NewParentStructJ() *ParentStructJ {
- return &ParentStructJ{
- privateStruct: new(privateStruct), PublicStruct: new(PublicStruct),
- }
-}
-func (s *privateStruct) SetPrivate(i int) { s.private = i }
-func (s *PublicStruct) SetPrivate(i int) { s.private = i }
-func (s *ParentStructC) SetPrivate(i int) { s.private = i }
-func (s *ParentStructD) SetPrivate(i int) { s.private = i }
-func (s *ParentStructF) SetPrivate(i int) { s.private = i }
-func (s *ParentStructA) PrivateStruct() *privateStruct { return &s.privateStruct }
-func (s *ParentStructC) PrivateStruct() *privateStruct { return &s.privateStruct }
-func (s *ParentStructE) PrivateStruct() *privateStruct { return &s.privateStruct }
-func (s *ParentStructF) PrivateStruct() *privateStruct { return &s.privateStruct }
-func (s *ParentStructG) PrivateStruct() *privateStruct { return s.privateStruct }
-func (s *ParentStructI) PrivateStruct() *privateStruct { return s.privateStruct }
-func (s *ParentStructJ) PrivateStruct() *privateStruct { return s.privateStruct }
-func (s *ParentStructJ) Private() *privateStruct { return &s.private }
diff --git a/vendor/github.com/google/go-cmp/cmp/internal/value/sort.go b/vendor/github.com/google/go-cmp/cmp/internal/value/sort.go
index 938f646f0..24fbae6e3 100644
--- a/vendor/github.com/google/go-cmp/cmp/internal/value/sort.go
+++ b/vendor/github.com/google/go-cmp/cmp/internal/value/sort.go
@@ -19,7 +19,7 @@ func SortKeys(vs []reflect.Value) []reflect.Value {
}
// Sort the map keys.
- sort.Slice(vs, func(i, j int) bool { return isLess(vs[i], vs[j]) })
+ sort.SliceStable(vs, func(i, j int) bool { return isLess(vs[i], vs[j]) })
// Deduplicate keys (fails for NaNs).
vs2 := vs[:1]
@@ -42,6 +42,8 @@ func isLess(x, y reflect.Value) bool {
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
return x.Uint() < y.Uint()
case reflect.Float32, reflect.Float64:
+ // NOTE: This does not sort -0 as less than +0
+ // since Go maps treat -0 and +0 as equal keys.
fx, fy := x.Float(), y.Float()
return fx < fy || math.IsNaN(fx) && !math.IsNaN(fy)
case reflect.Complex64, reflect.Complex128:
diff --git a/vendor/github.com/google/go-cmp/cmp/internal/value/zero.go b/vendor/github.com/google/go-cmp/cmp/internal/value/zero.go
index d13a12ccf..06a8ffd03 100644
--- a/vendor/github.com/google/go-cmp/cmp/internal/value/zero.go
+++ b/vendor/github.com/google/go-cmp/cmp/internal/value/zero.go
@@ -4,7 +4,10 @@
package value
-import "reflect"
+import (
+ "math"
+ "reflect"
+)
// IsZero reports whether v is the zero value.
// This does not rely on Interface and so can be used on unexported fields.
@@ -17,9 +20,9 @@ func IsZero(v reflect.Value) bool {
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
return v.Uint() == 0
case reflect.Float32, reflect.Float64:
- return v.Float() == 0
+ return math.Float64bits(v.Float()) == 0
case reflect.Complex64, reflect.Complex128:
- return v.Complex() == 0
+ return math.Float64bits(real(v.Complex())) == 0 && math.Float64bits(imag(v.Complex())) == 0
case reflect.String:
return v.String() == ""
case reflect.UnsafePointer:
diff --git a/vendor/github.com/google/go-cmp/cmp/options.go b/vendor/github.com/google/go-cmp/cmp/options.go
index 793448160..abbd2a63b 100644
--- a/vendor/github.com/google/go-cmp/cmp/options.go
+++ b/vendor/github.com/google/go-cmp/cmp/options.go
@@ -225,8 +225,20 @@ func (validator) apply(s *state, vx, vy reflect.Value) {
// Unable to Interface implies unexported field without visibility access.
if !vx.CanInterface() || !vy.CanInterface() {
- const help = "consider using a custom Comparer; if you control the implementation of type, you can also consider AllowUnexported or cmpopts.IgnoreUnexported"
- panic(fmt.Sprintf("cannot handle unexported field: %#v\n%s", s.curPath, help))
+ const help = "consider using a custom Comparer; if you control the implementation of type, you can also consider using an Exporter, AllowUnexported, or cmpopts.IgnoreUnexported"
+ var name string
+ if t := s.curPath.Index(-2).Type(); t.Name() != "" {
+ // Named type with unexported fields.
+ name = fmt.Sprintf("%q.%v", t.PkgPath(), t.Name()) // e.g., "path/to/package".MyType
+ } else {
+ // Unnamed type with unexported fields. Derive PkgPath from field.
+ var pkgPath string
+ for i := 0; i < t.NumField() && pkgPath == ""; i++ {
+ pkgPath = t.Field(i).PkgPath
+ }
+ name = fmt.Sprintf("%q.(%v)", pkgPath, t.String()) // e.g., "path/to/package".(struct { a int })
+ }
+ panic(fmt.Sprintf("cannot handle unexported field at %#v:\n\t%v\n%s", s.curPath, name, help))
}
panic("not reachable")
@@ -360,9 +372,8 @@ func (cm comparer) String() string {
return fmt.Sprintf("Comparer(%s)", function.NameOf(cm.fnc))
}
-// AllowUnexported returns an Option that forcibly allows operations on
-// unexported fields in certain structs, which are specified by passing in a
-// value of each struct type.
+// Exporter returns an Option that specifies whether Equal is allowed to
+// introspect into the unexported fields of certain struct types.
//
// Users of this option must understand that comparing on unexported fields
// from external packages is not safe since changes in the internal
@@ -386,10 +397,24 @@ func (cm comparer) String() string {
//
// In other cases, the cmpopts.IgnoreUnexported option can be used to ignore
// all unexported fields on specified struct types.
-func AllowUnexported(types ...interface{}) Option {
- if !supportAllowUnexported {
- panic("AllowUnexported is not supported on purego builds, Google App Engine Standard, or GopherJS")
+func Exporter(f func(reflect.Type) bool) Option {
+ if !supportExporters {
+ panic("Exporter is not supported on purego builds")
}
+ return exporter(f)
+}
+
+type exporter func(reflect.Type) bool
+
+func (exporter) filter(_ *state, _ reflect.Type, _, _ reflect.Value) applicableOption {
+ panic("not implemented")
+}
+
+// AllowUnexported returns an Options that allows Equal to forcibly introspect
+// unexported fields of the specified struct types.
+//
+// See Exporter for the proper use of this option.
+func AllowUnexported(types ...interface{}) Option {
m := make(map[reflect.Type]bool)
for _, typ := range types {
t := reflect.TypeOf(typ)
@@ -398,13 +423,7 @@ func AllowUnexported(types ...interface{}) Option {
}
m[t] = true
}
- return visibleStructs(m)
-}
-
-type visibleStructs map[reflect.Type]bool
-
-func (visibleStructs) filter(_ *state, _ reflect.Type, _, _ reflect.Value) applicableOption {
- panic("not implemented")
+ return exporter(func(t reflect.Type) bool { return m[t] })
}
// Result represents the comparison result for a single node and
@@ -436,6 +455,11 @@ func (r Result) ByFunc() bool {
return r.flags&reportByFunc != 0
}
+// ByCycle reports whether a reference cycle was detected.
+func (r Result) ByCycle() bool {
+ return r.flags&reportByCycle != 0
+}
+
type resultFlags uint
const (
@@ -446,6 +470,7 @@ const (
reportByIgnore
reportByMethod
reportByFunc
+ reportByCycle
)
// Reporter is an Option that can be passed to Equal. When Equal traverses
diff --git a/vendor/github.com/google/go-cmp/cmp/path.go b/vendor/github.com/google/go-cmp/cmp/path.go
index 96fffd291..509d6b852 100644
--- a/vendor/github.com/google/go-cmp/cmp/path.go
+++ b/vendor/github.com/google/go-cmp/cmp/path.go
@@ -10,6 +10,8 @@ import (
"strings"
"unicode"
"unicode/utf8"
+
+ "github.com/google/go-cmp/cmp/internal/value"
)
// Path is a list of PathSteps describing the sequence of operations to get
@@ -41,7 +43,7 @@ type PathStep interface {
// In some cases, one or both may be invalid or have restrictions:
// • For StructField, both are not interface-able if the current field
// is unexported and the struct type is not explicitly permitted by
- // AllowUnexported to traverse unexported fields.
+ // an Exporter to traverse unexported fields.
// • For SliceIndex, one may be invalid if an element is missing from
// either the x or y slice.
// • For MapIndex, one may be invalid if an entry is missing from
@@ -207,6 +209,7 @@ type SliceIndex struct{ *sliceIndex }
type sliceIndex struct {
pathStep
xkey, ykey int
+ isSlice bool // False for reflect.Array
}
func (si SliceIndex) Type() reflect.Type { return si.typ }
@@ -301,6 +304,72 @@ func (tf Transform) Func() reflect.Value { return tf.trans.fnc }
// The == operator can be used to detect the exact option used.
func (tf Transform) Option() Option { return tf.trans }
+// pointerPath represents a dual-stack of pointers encountered when
+// recursively traversing the x and y values. This data structure supports
+// detection of cycles and determining whether the cycles are equal.
+// In Go, cycles can occur via pointers, slices, and maps.
+//
+// The pointerPath uses a map to represent a stack; where descension into a
+// pointer pushes the address onto the stack, and ascension from a pointer
+// pops the address from the stack. Thus, when traversing into a pointer from
+// reflect.Ptr, reflect.Slice element, or reflect.Map, we can detect cycles
+// by checking whether the pointer has already been visited. The cycle detection
+// uses a seperate stack for the x and y values.
+//
+// If a cycle is detected we need to determine whether the two pointers
+// should be considered equal. The definition of equality chosen by Equal
+// requires two graphs to have the same structure. To determine this, both the
+// x and y values must have a cycle where the previous pointers were also
+// encountered together as a pair.
+//
+// Semantically, this is equivalent to augmenting Indirect, SliceIndex, and
+// MapIndex with pointer information for the x and y values.
+// Suppose px and py are two pointers to compare, we then search the
+// Path for whether px was ever encountered in the Path history of x, and
+// similarly so with py. If either side has a cycle, the comparison is only
+// equal if both px and py have a cycle resulting from the same PathStep.
+//
+// Using a map as a stack is more performant as we can perform cycle detection
+// in O(1) instead of O(N) where N is len(Path).
+type pointerPath struct {
+ // mx is keyed by x pointers, where the value is the associated y pointer.
+ mx map[value.Pointer]value.Pointer
+ // my is keyed by y pointers, where the value is the associated x pointer.
+ my map[value.Pointer]value.Pointer
+}
+
+func (p *pointerPath) Init() {
+ p.mx = make(map[value.Pointer]value.Pointer)
+ p.my = make(map[value.Pointer]value.Pointer)
+}
+
+// Push indicates intent to descend into pointers vx and vy where
+// visited reports whether either has been seen before. If visited before,
+// equal reports whether both pointers were encountered together.
+// Pop must be called if and only if the pointers were never visited.
+//
+// The pointers vx and vy must be a reflect.Ptr, reflect.Slice, or reflect.Map
+// and be non-nil.
+func (p pointerPath) Push(vx, vy reflect.Value) (equal, visited bool) {
+ px := value.PointerOf(vx)
+ py := value.PointerOf(vy)
+ _, ok1 := p.mx[px]
+ _, ok2 := p.my[py]
+ if ok1 || ok2 {
+ equal = p.mx[px] == py && p.my[py] == px // Pointers paired together
+ return equal, true
+ }
+ p.mx[px] = py
+ p.my[py] = px
+ return false, false
+}
+
+// Pop ascends from pointers vx and vy.
+func (p pointerPath) Pop(vx, vy reflect.Value) {
+ delete(p.mx, value.PointerOf(vx))
+ delete(p.my, value.PointerOf(vy))
+}
+
// isExported reports whether the identifier is exported.
func isExported(id string) bool {
r, _ := utf8.DecodeRuneInString(id)
diff --git a/vendor/github.com/google/go-cmp/cmp/report_compare.go b/vendor/github.com/google/go-cmp/cmp/report_compare.go
index 05efb992c..17a05eede 100644
--- a/vendor/github.com/google/go-cmp/cmp/report_compare.go
+++ b/vendor/github.com/google/go-cmp/cmp/report_compare.go
@@ -168,7 +168,7 @@ func (opts formatOptions) formatDiffList(recs []reportRecord, k reflect.Kind) te
var isZero bool
switch opts.DiffMode {
case diffIdentical:
- isZero = value.IsZero(r.Value.ValueX) || value.IsZero(r.Value.ValueX)
+ isZero = value.IsZero(r.Value.ValueX) || value.IsZero(r.Value.ValueY)
case diffRemoved:
isZero = value.IsZero(r.Value.ValueX)
case diffInserted:
diff --git a/vendor/github.com/google/go-cmp/cmp/report_reflect.go b/vendor/github.com/google/go-cmp/cmp/report_reflect.go
index 5521c604c..2761b6289 100644
--- a/vendor/github.com/google/go-cmp/cmp/report_reflect.go
+++ b/vendor/github.com/google/go-cmp/cmp/report_reflect.go
@@ -208,7 +208,6 @@ func (opts formatOptions) FormatValue(v reflect.Value, m visitedPointers) (out t
func formatMapKey(v reflect.Value) string {
var opts formatOptions
opts.TypeMode = elideType
- opts.AvoidStringer = true
opts.ShallowPointers = true
s := opts.FormatValue(v, visitedPointers{}).String()
return strings.TrimSpace(s)
diff --git a/vendor/github.com/google/go-cmp/cmp/report_slices.go b/vendor/github.com/google/go-cmp/cmp/report_slices.go
index 8cb3265e7..eafcf2e4c 100644
--- a/vendor/github.com/google/go-cmp/cmp/report_slices.go
+++ b/vendor/github.com/google/go-cmp/cmp/report_slices.go
@@ -90,7 +90,7 @@ func (opts formatOptions) FormatDiffSlice(v *valueNode) textNode {
}
if r == '\n' {
if maxLineLen < i-lastLineIdx {
- lastLineIdx = i - lastLineIdx
+ maxLineLen = i - lastLineIdx
}
lastLineIdx = i + 1
numLines++
@@ -322,7 +322,7 @@ func coalesceInterveningIdentical(groups []diffStats, windowSize int) []diffStat
hadX, hadY := prev.NumRemoved > 0, prev.NumInserted > 0
hasX, hasY := next.NumRemoved > 0, next.NumInserted > 0
if ((hadX || hasX) && (hadY || hasY)) && curr.NumIdentical <= windowSize {
- *prev = (*prev).Append(*curr).Append(*next)
+ *prev = prev.Append(*curr).Append(*next)
groups = groups[:len(groups)-1] // Truncate off equal group
continue
}
diff --git a/vendor/github.com/google/go-cmp/cmp/report_text.go b/vendor/github.com/google/go-cmp/cmp/report_text.go
index 80605d0e4..8b8fcab7b 100644
--- a/vendor/github.com/google/go-cmp/cmp/report_text.go
+++ b/vendor/github.com/google/go-cmp/cmp/report_text.go
@@ -19,6 +19,11 @@ var randBool = rand.New(rand.NewSource(time.Now().Unix())).Intn(2) == 0
type indentMode int
func (n indentMode) appendIndent(b []byte, d diffMode) []byte {
+ // The output of Diff is documented as being unstable to provide future
+ // flexibility in changing the output for more humanly readable reports.
+ // This logic intentionally introduces instability to the exact output
+ // so that users can detect accidental reliance on stability early on,
+ // rather than much later when an actual change to the format occurs.
if flags.Deterministic || randBool {
// Use regular spaces (U+0020).
switch d {
@@ -360,7 +365,7 @@ func (s diffStats) String() string {
// Pluralize the name (adjusting for some obscure English grammar rules).
name := s.Name
if sum > 1 {
- name = name + "s"
+ name += "s"
if strings.HasSuffix(name, "ys") {
name = name[:len(name)-2] + "ies" // e.g., "entrys" => "entries"
}
diff --git a/vendor/github.com/googleapis/gax-go/.gitignore b/vendor/github.com/googleapis/gax-go/.gitignore
deleted file mode 100644
index 289bf1eb7..000000000
--- a/vendor/github.com/googleapis/gax-go/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-*.cover
diff --git a/vendor/github.com/googleapis/gax-go/CODE_OF_CONDUCT.md b/vendor/github.com/googleapis/gax-go/CODE_OF_CONDUCT.md
deleted file mode 100644
index 46b2a08ea..000000000
--- a/vendor/github.com/googleapis/gax-go/CODE_OF_CONDUCT.md
+++ /dev/null
@@ -1,43 +0,0 @@
-# Contributor Code of Conduct
-
-As contributors and maintainers of this project,
-and in the interest of fostering an open and welcoming community,
-we pledge to respect all people who contribute through reporting issues,
-posting feature requests, updating documentation,
-submitting pull requests or patches, and other activities.
-
-We are committed to making participation in this project
-a harassment-free experience for everyone,
-regardless of level of experience, gender, gender identity and expression,
-sexual orientation, disability, personal appearance,
-body size, race, ethnicity, age, religion, or nationality.
-
-Examples of unacceptable behavior by participants include:
-
-* The use of sexualized language or imagery
-* Personal attacks
-* Trolling or insulting/derogatory comments
-* Public or private harassment
-* Publishing other's private information,
-such as physical or electronic
-addresses, without explicit permission
-* Other unethical or unprofessional conduct.
-
-Project maintainers have the right and responsibility to remove, edit, or reject
-comments, commits, code, wiki edits, issues, and other contributions
-that are not aligned to this Code of Conduct.
-By adopting this Code of Conduct,
-project maintainers commit themselves to fairly and consistently
-applying these principles to every aspect of managing this project.
-Project maintainers who do not follow or enforce the Code of Conduct
-may be permanently removed from the project team.
-
-This code of conduct applies both within project spaces and in public spaces
-when an individual is representing the project or its community.
-
-Instances of abusive, harassing, or otherwise unacceptable behavior
-may be reported by opening an issue
-or contacting one or more of the project maintainers.
-
-This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0,
-available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/)
diff --git a/vendor/github.com/googleapis/gax-go/CONTRIBUTING.md b/vendor/github.com/googleapis/gax-go/CONTRIBUTING.md
deleted file mode 100644
index b0404fb64..000000000
--- a/vendor/github.com/googleapis/gax-go/CONTRIBUTING.md
+++ /dev/null
@@ -1,32 +0,0 @@
-Want to contribute? Great! First, read this page (including the small print at the end).
-
-### Before you contribute
-Before we can use your code, you must sign the
-[Google Individual Contributor License Agreement]
-(https://cla.developers.google.com/about/google-individual)
-(CLA), which you can do online. The CLA is necessary mainly because you own the
-copyright to your changes, even after your contribution becomes part of our
-codebase, so we need your permission to use and distribute your code. We also
-need to be sure of various other things—for instance that you'll tell us if you
-know that your code infringes on other people's patents. You don't have to sign
-the CLA until after you've submitted your code for review and a member has
-approved it, but you must do it before we can put your code into our codebase.
-Before you start working on a larger contribution, you should get in touch with
-us first through the issue tracker with your idea so that we can help out and
-possibly guide you. Coordinating up front makes it much easier to avoid
-frustration later on.
-
-### Code reviews
-All submissions, including submissions by project members, require review. We
-use Github pull requests for this purpose.
-
-### Breaking code changes
-When a breaking change is added, CI/CD will fail. If the change is expected,
-add a BREAKING_CHANGE_ACCEPTABLE=<reason> line to the CL description. This will
-cause CI/CD to skip checking breaking changes.
-
-### The small print
-Contributions made by corporations are covered by a different agreement than
-the one above, the
-[Software Grant and Corporate Contributor License Agreement]
-(https://cla.developers.google.com/about/google-corporate).
diff --git a/vendor/github.com/googleapis/gax-go/README.md b/vendor/github.com/googleapis/gax-go/README.md
deleted file mode 100644
index aae170fb6..000000000
--- a/vendor/github.com/googleapis/gax-go/README.md
+++ /dev/null
@@ -1,27 +0,0 @@
-Google API Extensions for Go
-============================
-
-[![GoDoc](https://godoc.org/github.com/googleapis/gax-go?status.svg)](https://godoc.org/github.com/googleapis/gax-go)
-
-Google API Extensions for Go (gax-go) is a set of modules which aids the
-development of APIs for clients and servers based on `gRPC` and Google API
-conventions.
-
-To install the API extensions, use:
-
-```
-go get -u github.com/googleapis/gax-go/v2
-```
-
-**Note:** Application code will rarely need to use this library directly,
-but the code generated automatically from API definition files can use it
-to simplify code generation and to provide more convenient and idiomatic API surface.
-
-Go Versions
-===========
-This library requires Go 1.6 or above.
-
-License
-=======
-BSD - please see [LICENSE](https://github.com/googleapis/gax-go/blob/master/LICENSE)
-for more information.
diff --git a/vendor/github.com/googleapis/gax-go/RELEASING.md b/vendor/github.com/googleapis/gax-go/RELEASING.md
deleted file mode 100644
index 0557654b2..000000000
--- a/vendor/github.com/googleapis/gax-go/RELEASING.md
+++ /dev/null
@@ -1,30 +0,0 @@
-# How to release v1
-
-1. Determine the current release version with `git tag -l`. It should look
- something like `vX.Y.Z`. We'll call the current version `$CV` and the new
- version `$NV`.
-1. On master, run `git log $CV..` to list all the changes since the last
- release.
- a. NOTE: Some commits may pertain to only v1 or v2. Manually introspect
- each commit to figure which occurred in v1.
-1. Edit `CHANGES.md` to include a summary of the changes.
-1. Mail the CL containing the `CHANGES.md` changes. When the CL is approved,
- submit it.
-1. Without submitting any other CLs:
- a. Switch to master.
- b. `git pull`
- c. Tag the repo with the next version: `git tag $NV`. It should be of the
- form `v1.Y.Z`.
- d. Push the tag: `git push origin $NV`.
-1. Update [the releases page](https://github.com/googleapis/google-cloud-go/releases)
- with the new release, copying the contents of the CHANGES.md.
-
-# How to release v2
-
-Same process as v1, once again noting that the commit list may include v1
-commits (which should be pruned out). Note also whilst v1 tags are `v1.Y.Z`, v2
-tags are `v2.Y.Z`.
-
-# On releasing multiple major versions
-
-Please see https://github.com/golang/go/wiki/Modules#releasing-modules-v2-or-higher.
diff --git a/vendor/github.com/googleapis/gax-go/call_option.go b/vendor/github.com/googleapis/gax-go/call_option.go
deleted file mode 100644
index 5bd48972b..000000000
--- a/vendor/github.com/googleapis/gax-go/call_option.go
+++ /dev/null
@@ -1,71 +0,0 @@
-// Copyright 2016, Google Inc.
-// All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-package gax
-
-import (
- v2 "github.com/googleapis/gax-go/v2"
- "google.golang.org/grpc"
- "google.golang.org/grpc/codes"
-)
-
-// CallOption is an option used by Invoke to control behaviors of RPC calls.
-// CallOption works by modifying relevant fields of CallSettings.
-type CallOption = v2.CallOption
-
-// Retryer is used by Invoke to determine retry behavior.
-type Retryer = v2.Retryer
-
-// WithRetry sets CallSettings.Retry to fn.
-func WithRetry(fn func() Retryer) CallOption {
- return v2.WithRetry(fn)
-}
-
-// OnCodes returns a Retryer that retries if and only if
-// the previous attempt returns a GRPC error whose error code is stored in cc.
-// Pause times between retries are specified by bo.
-//
-// bo is only used for its parameters; each Retryer has its own copy.
-func OnCodes(cc []codes.Code, bo Backoff) Retryer {
- return v2.OnCodes(cc, bo)
-}
-
-// Backoff implements exponential backoff.
-// The wait time between retries is a random value between 0 and the "retry envelope".
-// The envelope starts at Initial and increases by the factor of Multiplier every retry,
-// but is capped at Max.
-type Backoff = v2.Backoff
-
-// WithGRPCOptions allows passing gRPC call options during client creation.
-func WithGRPCOptions(opt ...grpc.CallOption) CallOption {
- return v2.WithGRPCOptions(opt...)
-}
-
-// CallSettings allow fine-grained control over how calls are made.
-type CallSettings = v2.CallSettings
diff --git a/vendor/github.com/googleapis/gax-go/gax.go b/vendor/github.com/googleapis/gax-go/gax.go
deleted file mode 100644
index 006739804..000000000
--- a/vendor/github.com/googleapis/gax-go/gax.go
+++ /dev/null
@@ -1,39 +0,0 @@
-// Copyright 2016, Google Inc.
-// All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Package gax contains a set of modules which aid the development of APIs
-// for clients and servers based on gRPC and Google API conventions.
-//
-// Application code will rarely need to use this library directly.
-// However, code generated automatically from API definition files can use it
-// to simplify code generation and to provide more convenient and idiomatic API surfaces.
-package gax
-
-// Version specifies the gax version.
-const Version = "1.0.1"
diff --git a/vendor/github.com/googleapis/gax-go/go.mod b/vendor/github.com/googleapis/gax-go/go.mod
deleted file mode 100644
index 8b088e52b..000000000
--- a/vendor/github.com/googleapis/gax-go/go.mod
+++ /dev/null
@@ -1,13 +0,0 @@
-module github.com/googleapis/gax-go
-
-go 1.11
-
-require (
- github.com/golang/protobuf v1.3.1
- github.com/googleapis/gax-go/v2 v2.0.5
- golang.org/x/exp v0.0.0-20190221220918-438050ddec5e
- golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3
- golang.org/x/tools v0.0.0-20190114222345-bf090417da8b
- google.golang.org/grpc v1.19.0
- honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099
-)
diff --git a/vendor/github.com/googleapis/gax-go/go.sum b/vendor/github.com/googleapis/gax-go/go.sum
deleted file mode 100644
index 8fd8a46c1..000000000
--- a/vendor/github.com/googleapis/gax-go/go.sum
+++ /dev/null
@@ -1,35 +0,0 @@
-cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
-github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
-github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
-github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
-github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58=
-github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
-github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
-github.com/golang/protobuf v1.2.0 h1:P3YflyNX/ehuJFLhxviNdFxQPkGK5cDcApsge1SqnvM=
-github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
-github.com/golang/protobuf v1.3.1 h1:YF8+flBXS5eO826T4nzqPrxfhQThhXl0YzfuUPu4SBg=
-github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
-github.com/googleapis/gax-go/v2 v2.0.5 h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM=
-github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
-golang.org/x/exp v0.0.0-20190221220918-438050ddec5e h1:dVreTP5bOOWt5GFwwvgTE2iU0TkIqi2x3r0b8qGlp6k=
-golang.org/x/exp v0.0.0-20190221220918-438050ddec5e/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
-golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3 h1:x/bBzNauLQAlE3fLku/xy92Y8QwKX5HZymrMz2IiKFc=
-golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
-golang.org/x/net v0.0.0-20180826012351-8a410e7b638d h1:g9qWBGx4puODJTMVyoPrpoxPFgVGd+z1DZwjfRu4d0I=
-golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
-golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f h1:wMNYb4v58l5UBM7MYRLPG6ZhfOqbKu7X5eyFl8ZhKvA=
-golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sys v0.0.0-20180830151530-49385e6e1522 h1:Ve1ORMCxvRmSXBwJK+t3Oy+V2vRW2OetUQBq4rJIkZE=
-golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
-golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
-golang.org/x/tools v0.0.0-20190114222345-bf090417da8b h1:qMK98NmNCRVDIYFycQ5yVRkvgDUFfdP8Ip4KqmDEB7g=
-golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
-google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
-google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8 h1:Nw54tB0rB7hY/N0NQvRW8DG4Yk3Q6T9cu9RcFQDu1tc=
-google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
-google.golang.org/grpc v1.19.0 h1:cfg4PD8YEdSFnm7qLV4++93WcmhH2nIUhMjhdCvl3j8=
-google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
-honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099 h1:XJP7lxbSxWLOMNdBE4B/STaqVy6L73o0knwj2vIlxnw=
-honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
diff --git a/vendor/github.com/googleapis/gax-go/header.go b/vendor/github.com/googleapis/gax-go/header.go
deleted file mode 100644
index e1a0af1ba..000000000
--- a/vendor/github.com/googleapis/gax-go/header.go
+++ /dev/null
@@ -1,40 +0,0 @@
-// Copyright 2018, Google Inc.
-// All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-package gax
-
-import v2 "github.com/googleapis/gax-go/v2"
-
-// XGoogHeader is for use by the Google Cloud Libraries only.
-//
-// XGoogHeader formats key-value pairs.
-// The resulting string is suitable for x-goog-api-client header.
-func XGoogHeader(keyval ...string) string {
- return v2.XGoogHeader(keyval...)
-}
diff --git a/vendor/github.com/googleapis/gax-go/invoke.go b/vendor/github.com/googleapis/gax-go/invoke.go
deleted file mode 100644
index 6422d3f73..000000000
--- a/vendor/github.com/googleapis/gax-go/invoke.go
+++ /dev/null
@@ -1,52 +0,0 @@
-// Copyright 2016, Google Inc.
-// All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-package gax
-
-import (
- "context"
- "time"
-
- v2 "github.com/googleapis/gax-go/v2"
-)
-
-// APICall is a user defined call stub.
-type APICall = v2.APICall
-
-// Invoke calls the given APICall,
-// performing retries as specified by opts, if any.
-func Invoke(ctx context.Context, call APICall, opts ...CallOption) error {
- return v2.Invoke(ctx, call, opts...)
-}
-
-// Sleep is similar to time.Sleep, but it can be interrupted by ctx.Done() closing.
-// If interrupted, Sleep returns ctx.Err().
-func Sleep(ctx context.Context, d time.Duration) error {
- return v2.Sleep(ctx, d)
-}
diff --git a/vendor/github.com/googleapis/gax-go/tools.go b/vendor/github.com/googleapis/gax-go/tools.go
deleted file mode 100644
index ffb3ad482..000000000
--- a/vendor/github.com/googleapis/gax-go/tools.go
+++ /dev/null
@@ -1,33 +0,0 @@
-// +build tools
-
-// Copyright 2019 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// This package exists to cause `go mod` and `go get` to believe these tools
-// are dependencies, even though they are not runtime dependencies of any
-// package (these are tools used by our CI builds). This means they will appear
-// in our `go.mod` file, but will not be a part of the build. Also, since the
-// build target is something non-existent, these should not be included in any
-// binaries.
-
-package gax
-
-import (
- _ "github.com/golang/protobuf/proto"
- _ "github.com/golang/protobuf/protoc-gen-go"
- _ "golang.org/x/exp/cmd/apidiff"
- _ "golang.org/x/lint/golint"
- _ "golang.org/x/tools/cmd/goimports"
- _ "honnef.co/go/tools/cmd/staticcheck"
-)
diff --git a/vendor/github.com/googleapis/gax-go/LICENSE b/vendor/github.com/googleapis/gax-go/v2/LICENSE
index 6d16b6578..6d16b6578 100644
--- a/vendor/github.com/googleapis/gax-go/LICENSE
+++ b/vendor/github.com/googleapis/gax-go/v2/LICENSE
diff --git a/vendor/github.com/googleapis/gax-go/v2/call_option.go b/vendor/github.com/googleapis/gax-go/v2/call_option.go
index 8b7753969..b1d53dd19 100644
--- a/vendor/github.com/googleapis/gax-go/v2/call_option.go
+++ b/vendor/github.com/googleapis/gax-go/v2/call_option.go
@@ -94,25 +94,22 @@ func (r *boRetryer) Retry(err error) (time.Duration, bool) {
return 0, false
}
-// Backoff implements exponential backoff. The wait time between retries is a
-// random value between 0 and the "retry period" - the time between retries. The
-// retry period starts at Initial and increases by the factor of Multiplier
-// every retry, but is capped at Max.
-//
-// Note: MaxNumRetries / RPCDeadline is specifically not provided. These should
-// be built on top of Backoff.
+// Backoff implements exponential backoff.
+// The wait time between retries is a random value between 0 and the "retry envelope".
+// The envelope starts at Initial and increases by the factor of Multiplier every retry,
+// but is capped at Max.
type Backoff struct {
- // Initial is the initial value of the retry period, defaults to 1 second.
+ // Initial is the initial value of the retry envelope, defaults to 1 second.
Initial time.Duration
- // Max is the maximum value of the retry period, defaults to 30 seconds.
+ // Max is the maximum value of the retry envelope, defaults to 30 seconds.
Max time.Duration
- // Multiplier is the factor by which the retry period increases.
+ // Multiplier is the factor by which the retry envelope increases.
// It should be greater than 1 and defaults to 2.
Multiplier float64
- // cur is the current retry period.
+ // cur is the current retry envelope
cur time.Duration
}
diff --git a/vendor/github.com/googleapis/gax-go/v2/gax.go b/vendor/github.com/googleapis/gax-go/v2/gax.go
index dfc4beb28..3fd1b0b84 100644
--- a/vendor/github.com/googleapis/gax-go/v2/gax.go
+++ b/vendor/github.com/googleapis/gax-go/v2/gax.go
@@ -36,4 +36,4 @@
package gax
// Version specifies the gax-go version being used.
-const Version = "2.0.5"
+const Version = "2.0.4"
diff --git a/vendor/github.com/googleapis/gax-go/v2/go.mod b/vendor/github.com/googleapis/gax-go/v2/go.mod
index e83e87b8b..9cdfaf447 100644
--- a/vendor/github.com/googleapis/gax-go/v2/go.mod
+++ b/vendor/github.com/googleapis/gax-go/v2/go.mod
@@ -1,5 +1,3 @@
module github.com/googleapis/gax-go/v2
-go 1.11
-
require google.golang.org/grpc v1.19.0
diff --git a/vendor/github.com/googleapis/gax-go/v2/invoke.go b/vendor/github.com/googleapis/gax-go/v2/invoke.go
index 749b9d234..fe31dd004 100644
--- a/vendor/github.com/googleapis/gax-go/v2/invoke.go
+++ b/vendor/github.com/googleapis/gax-go/v2/invoke.go
@@ -38,8 +38,8 @@ import (
// APICall is a user defined call stub.
type APICall func(context.Context, CallSettings) error
-// Invoke calls the given APICall, performing retries as specified by opts, if
-// any.
+// Invoke calls the given APICall,
+// performing retries as specified by opts, if any.
func Invoke(ctx context.Context, call APICall, opts ...CallOption) error {
var settings CallSettings
for _, opt := range opts {
diff --git a/vendor/github.com/ianlancetaylor/demangle/c++filt.go b/vendor/github.com/ianlancetaylor/demangle/c++filt.go
deleted file mode 100644
index 7ba817c9f..000000000
--- a/vendor/github.com/ianlancetaylor/demangle/c++filt.go
+++ /dev/null
@@ -1,144 +0,0 @@
-// Copyright 2015 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build ignore
-
-// This is a program that works like the GNU c++filt program.
-// It's here for testing purposes and as an example.
-
-package main
-
-import (
- "bufio"
- "flag"
- "fmt"
- "io"
- "os"
- "strings"
- "unicode"
-
- "github.com/ianlancetaylor/demangle"
-)
-
-func flagUsage() {
- usage(os.Stderr, 2)
-}
-
-func usage(w io.Writer, status int) {
- fmt.Fprintf(w, "Usage: %s [options] [mangled names]\n", os.Args[0])
- flag.CommandLine.SetOutput(w)
- flag.PrintDefaults()
- fmt.Fprintln(w, `Demangled names are displayed to stdout
-If a name cannot be demangled it is just echoed to stdout.
-If no names are provided on the command line, stdin is read.`)
- os.Exit(status)
-}
-
-var stripUnderscore = flag.Bool("_", false, "Ignore first leading underscore")
-var noParams = flag.Bool("p", false, "Do not display function argument types")
-var noVerbose = flag.Bool("i", false, "Do not show implementation details (if any)")
-var help = flag.Bool("h", false, "Display help information")
-var debug = flag.Bool("d", false, "Display debugging information for strings on command line")
-
-// Unimplemented c++filt flags:
-// -n (opposite of -_)
-// -t (demangle types)
-// -s (set demangling style)
-// -V (print version information)
-
-// Characters considered to be part of a symbol.
-const symbolChars = "_$."
-
-func main() {
- flag.Usage = func() { usage(os.Stderr, 1) }
- flag.Parse()
-
- if *help {
- usage(os.Stdout, 0)
- }
-
- out := bufio.NewWriter(os.Stdout)
-
- if flag.NArg() > 0 {
- for _, f := range flag.Args() {
- if *debug {
- a, err := demangle.ToAST(f, options()...)
- if err != nil {
- fmt.Fprintf(os.Stderr, "%s: %v\n", f, err)
- } else {
- fmt.Fprintf(out, "%#v\n", a)
- }
- } else {
- doDemangle(out, f)
- }
- out.WriteByte('\n')
- }
- if err := out.Flush(); err != nil {
- fmt.Fprintln(os.Stderr, err)
- os.Exit(2)
- }
- return
- }
-
- scanner := bufio.NewScanner(bufio.NewReader(os.Stdin))
- for scanner.Scan() {
- line := scanner.Text()
- start := -1
- for i, c := range line {
- if unicode.IsLetter(c) || unicode.IsNumber(c) || strings.ContainsRune(symbolChars, c) {
- if start < 0 {
- start = i
- }
- } else {
- if start >= 0 {
- doDemangle(out, line[start:i])
- }
- out.WriteRune(c)
- start = -1
- }
- }
- if start >= 0 {
- doDemangle(out, line[start:])
- start = -1
- }
- out.WriteByte('\n')
- if err := out.Flush(); err != nil {
- fmt.Fprintln(os.Stderr, err)
- os.Exit(2)
- }
- }
-}
-
-// Demangle a string just as the GNU c++filt program does.
-func doDemangle(out *bufio.Writer, name string) {
- skip := 0
- if name[0] == '.' || name[0] == '$' {
- skip++
- }
- if *stripUnderscore && name[skip] == '_' {
- skip++
- }
- result := demangle.Filter(name[skip:], options()...)
- if result == name[skip:] {
- out.WriteString(name)
- } else {
- if name[0] == '.' {
- out.WriteByte('.')
- }
- out.WriteString(result)
- }
-}
-
-// options returns the demangling options to use based on the command
-// line flags.
-func options() []demangle.Option {
- var options []demangle.Option
- if *noParams {
- options = append(options, demangle.NoParams)
- }
- if !*noVerbose {
- options = append(options, demangle.Verbose)
- }
- return options
-}
diff --git a/vendor/go.opencensus.io/internal/internal.go b/vendor/go.opencensus.io/internal/internal.go
index 62d1ef957..81dc7183e 100644
--- a/vendor/go.opencensus.io/internal/internal.go
+++ b/vendor/go.opencensus.io/internal/internal.go
@@ -12,7 +12,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
-package internal
+package internal // import "go.opencensus.io/internal"
import (
"fmt"
diff --git a/vendor/go.opencensus.io/internal/tagencoding/tagencoding.go b/vendor/go.opencensus.io/internal/tagencoding/tagencoding.go
index d4151c854..41b2c3fc0 100644
--- a/vendor/go.opencensus.io/internal/tagencoding/tagencoding.go
+++ b/vendor/go.opencensus.io/internal/tagencoding/tagencoding.go
@@ -15,7 +15,7 @@
// Package tagencoding contains the tag encoding
// used interally by the stats collector.
-package tagencoding
+package tagencoding // import "go.opencensus.io/internal/tagencoding"
// Values represent the encoded buffer for the values.
type Values struct {
diff --git a/vendor/go.opencensus.io/metric/metricdata/doc.go b/vendor/go.opencensus.io/metric/metricdata/doc.go
index 2ccdb10d6..52a7b3bf8 100644
--- a/vendor/go.opencensus.io/metric/metricdata/doc.go
+++ b/vendor/go.opencensus.io/metric/metricdata/doc.go
@@ -16,4 +16,4 @@
//
// This is an EXPERIMENTAL package, and may change in arbitrary ways without
// notice.
-package metricdata
+package metricdata // import "go.opencensus.io/metric/metricdata"
diff --git a/vendor/go.opencensus.io/opencensus.go b/vendor/go.opencensus.io/opencensus.go
index d264058dc..e5e4b4368 100644
--- a/vendor/go.opencensus.io/opencensus.go
+++ b/vendor/go.opencensus.io/opencensus.go
@@ -13,7 +13,7 @@
// limitations under the License.
// Package opencensus contains Go support for OpenCensus.
-package opencensus
+package opencensus // import "go.opencensus.io"
// Version is the current release version of OpenCensus in use.
func Version() string {
diff --git a/vendor/go.opencensus.io/plugin/ochttp/doc.go b/vendor/go.opencensus.io/plugin/ochttp/doc.go
index 4e9b56813..10e626b16 100644
--- a/vendor/go.opencensus.io/plugin/ochttp/doc.go
+++ b/vendor/go.opencensus.io/plugin/ochttp/doc.go
@@ -16,4 +16,4 @@
//
// For server instrumentation, see Handler. For client-side instrumentation,
// see Transport.
-package ochttp
+package ochttp // import "go.opencensus.io/plugin/ochttp"
diff --git a/vendor/go.opencensus.io/plugin/ochttp/propagation/b3/b3.go b/vendor/go.opencensus.io/plugin/ochttp/propagation/b3/b3.go
index 486659c25..2f1c7f006 100644
--- a/vendor/go.opencensus.io/plugin/ochttp/propagation/b3/b3.go
+++ b/vendor/go.opencensus.io/plugin/ochttp/propagation/b3/b3.go
@@ -15,7 +15,7 @@
// Package b3 contains a propagation.HTTPFormat implementation
// for B3 propagation. See https://github.com/openzipkin/b3-propagation
// for more details.
-package b3
+package b3 // import "go.opencensus.io/plugin/ochttp/propagation/b3"
import (
"encoding/hex"
diff --git a/vendor/go.opencensus.io/stats/doc.go b/vendor/go.opencensus.io/stats/doc.go
index 8b99283ac..00d473ee0 100644
--- a/vendor/go.opencensus.io/stats/doc.go
+++ b/vendor/go.opencensus.io/stats/doc.go
@@ -66,4 +66,4 @@ When exported to a supporting back end, you should be able to easily navigate
to example traces that fell into each bucket in the Distribution.
*/
-package stats
+package stats // import "go.opencensus.io/stats"
diff --git a/vendor/go.opencensus.io/stats/view/doc.go b/vendor/go.opencensus.io/stats/view/doc.go
index 3e0fde45c..7bbedfe1f 100644
--- a/vendor/go.opencensus.io/stats/view/doc.go
+++ b/vendor/go.opencensus.io/stats/view/doc.go
@@ -41,7 +41,7 @@
//
// Multiple exporters can be registered to upload the data to various
// different back ends.
-package view
+package view // import "go.opencensus.io/stats/view"
// TODO(acetechnologist): Add a link to the language independent OpenCensus
// spec when it is available.
diff --git a/vendor/go.opencensus.io/tag/doc.go b/vendor/go.opencensus.io/tag/doc.go
index bfeb77f51..da16b74e4 100644
--- a/vendor/go.opencensus.io/tag/doc.go
+++ b/vendor/go.opencensus.io/tag/doc.go
@@ -23,4 +23,4 @@ Tags can be propagated on the wire and in the same
process via context.Context. Encode and Decode should be
used to represent tags into their binary propagation form.
*/
-package tag
+package tag // import "go.opencensus.io/tag"
diff --git a/vendor/go.opencensus.io/trace/doc.go b/vendor/go.opencensus.io/trace/doc.go
index d83f111a2..04b1ee4f3 100644
--- a/vendor/go.opencensus.io/trace/doc.go
+++ b/vendor/go.opencensus.io/trace/doc.go
@@ -50,4 +50,4 @@ lines to the top of the function:
StartSpan will create a new top-level span if the context
doesn't contain another span, otherwise it will create a child span.
*/
-package trace
+package trace // import "go.opencensus.io/trace"
diff --git a/vendor/go.opencensus.io/trace/propagation/propagation.go b/vendor/go.opencensus.io/trace/propagation/propagation.go
index a3b00f474..1eb190a96 100644
--- a/vendor/go.opencensus.io/trace/propagation/propagation.go
+++ b/vendor/go.opencensus.io/trace/propagation/propagation.go
@@ -13,7 +13,7 @@
// limitations under the License.
// Package propagation implements the binary trace context format.
-package propagation
+package propagation // import "go.opencensus.io/trace/propagation"
// TODO: link to external spec document.
diff --git a/vendor/golang.org/x/lint/golint/importcomment.go b/vendor/golang.org/x/lint/golint/importcomment.go
index f1dce7706..d5b32f734 100644
--- a/vendor/golang.org/x/lint/golint/importcomment.go
+++ b/vendor/golang.org/x/lint/golint/importcomment.go
@@ -10,4 +10,4 @@
// any breakages coincide with people updating their CI configs or
// whatnot.
-package main
+package main // import "golang.org/x/lint/golint"
diff --git a/vendor/golang.org/x/lint/lint.go b/vendor/golang.org/x/lint/lint.go
index 7d2e765ac..532a75ad2 100644
--- a/vendor/golang.org/x/lint/lint.go
+++ b/vendor/golang.org/x/lint/lint.go
@@ -5,7 +5,7 @@
// https://developers.google.com/open-source/licenses/bsd.
// Package lint contains a linter for Go source code.
-package lint
+package lint // import "golang.org/x/lint"
import (
"bufio"
diff --git a/vendor/golang.org/x/net/context/context.go b/vendor/golang.org/x/net/context/context.go
index 839e3a64b..a3c021d3f 100644
--- a/vendor/golang.org/x/net/context/context.go
+++ b/vendor/golang.org/x/net/context/context.go
@@ -36,7 +36,7 @@
//
// See http://blog.golang.org/context for example code for a server that uses
// Contexts.
-package context
+package context // import "golang.org/x/net/context"
// Background returns a non-nil, empty Context. It is never canceled, has no
// values, and has no deadline. It is typically used by the main function,
diff --git a/vendor/golang.org/x/net/context/ctxhttp/ctxhttp.go b/vendor/golang.org/x/net/context/ctxhttp/ctxhttp.go
index e0df203ce..37dc0cfdb 100644
--- a/vendor/golang.org/x/net/context/ctxhttp/ctxhttp.go
+++ b/vendor/golang.org/x/net/context/ctxhttp/ctxhttp.go
@@ -3,7 +3,7 @@
// license that can be found in the LICENSE file.
// Package ctxhttp provides helper functions for performing context-aware HTTP requests.
-package ctxhttp
+package ctxhttp // import "golang.org/x/net/context/ctxhttp"
import (
"context"
diff --git a/vendor/golang.org/x/net/http2/http2.go b/vendor/golang.org/x/net/http2/http2.go
index 349999818..bdaba1d46 100644
--- a/vendor/golang.org/x/net/http2/http2.go
+++ b/vendor/golang.org/x/net/http2/http2.go
@@ -14,7 +14,7 @@
//
// See https://http2.golang.org/ for a test server running this code.
//
-package http2
+package http2 // import "golang.org/x/net/http2"
import (
"bufio"
diff --git a/vendor/golang.org/x/net/idna/idna10.0.0.go b/vendor/golang.org/x/net/idna/idna10.0.0.go
index 29a27795b..a98a31f40 100644
--- a/vendor/golang.org/x/net/idna/idna10.0.0.go
+++ b/vendor/golang.org/x/net/idna/idna10.0.0.go
@@ -15,7 +15,7 @@
// UTS #46 is defined in https://www.unicode.org/reports/tr46.
// See https://unicode.org/cldr/utility/idna.jsp for a visualization of the
// differences between these two standards.
-package idna
+package idna // import "golang.org/x/net/idna"
import (
"fmt"
diff --git a/vendor/golang.org/x/net/idna/idna9.0.0.go b/vendor/golang.org/x/net/idna/idna9.0.0.go
index 9adc1599f..8842146b5 100644
--- a/vendor/golang.org/x/net/idna/idna9.0.0.go
+++ b/vendor/golang.org/x/net/idna/idna9.0.0.go
@@ -15,7 +15,7 @@
// UTS #46 is defined in https://www.unicode.org/reports/tr46.
// See https://unicode.org/cldr/utility/idna.jsp for a visualization of the
// differences between these two standards.
-package idna
+package idna // import "golang.org/x/net/idna"
import (
"fmt"
diff --git a/vendor/golang.org/x/net/internal/timeseries/timeseries.go b/vendor/golang.org/x/net/internal/timeseries/timeseries.go
index 5f6f53cdd..dc5225b6d 100644
--- a/vendor/golang.org/x/net/internal/timeseries/timeseries.go
+++ b/vendor/golang.org/x/net/internal/timeseries/timeseries.go
@@ -3,7 +3,7 @@
// license that can be found in the LICENSE file.
// Package timeseries implements a time series structure for stats collection.
-package timeseries
+package timeseries // import "golang.org/x/net/internal/timeseries"
import (
"fmt"
diff --git a/vendor/golang.org/x/net/trace/trace.go b/vendor/golang.org/x/net/trace/trace.go
index e548dada2..3ebf6f2da 100644
--- a/vendor/golang.org/x/net/trace/trace.go
+++ b/vendor/golang.org/x/net/trace/trace.go
@@ -60,7 +60,7 @@ The /debug/events HTTP endpoint organizes the event logs by family and
by time since the last error. The expanded view displays recent log
entries and the log's call stack.
*/
-package trace
+package trace // import "golang.org/x/net/trace"
import (
"bytes"
diff --git a/vendor/golang.org/x/oauth2/google/doc.go b/vendor/golang.org/x/oauth2/google/doc.go
index 7a961ee27..73be62903 100644
--- a/vendor/golang.org/x/oauth2/google/doc.go
+++ b/vendor/golang.org/x/oauth2/google/doc.go
@@ -37,4 +37,4 @@
// same as the one obtained from the oauth2.Config returned from ConfigFromJSON or
// JWTConfigFromJSON, but the Credentials may contain additional information
// that is useful is some circumstances.
-package google
+package google // import "golang.org/x/oauth2/google"
diff --git a/vendor/golang.org/x/oauth2/jws/jws.go b/vendor/golang.org/x/oauth2/jws/jws.go
index 8bcecb46b..683d2d271 100644
--- a/vendor/golang.org/x/oauth2/jws/jws.go
+++ b/vendor/golang.org/x/oauth2/jws/jws.go
@@ -12,7 +12,7 @@
// removed in the future. It exists for internal use only.
// Please switch to another JWS package or copy this package into your own
// source tree.
-package jws
+package jws // import "golang.org/x/oauth2/jws"
import (
"bytes"
diff --git a/vendor/golang.org/x/oauth2/oauth2.go b/vendor/golang.org/x/oauth2/oauth2.go
index d852d193d..291df5c83 100644
--- a/vendor/golang.org/x/oauth2/oauth2.go
+++ b/vendor/golang.org/x/oauth2/oauth2.go
@@ -6,7 +6,7 @@
// OAuth2 authorized and authenticated HTTP requests,
// as specified in RFC 6749.
// It can additionally grant authorization with Bearer JWT.
-package oauth2
+package oauth2 // import "golang.org/x/oauth2"
import (
"bytes"
diff --git a/vendor/golang.org/x/sys/unix/mkall.sh b/vendor/golang.org/x/sys/unix/mkall.sh
index ece31e9dc..ece31e9dc 100755..100644
--- a/vendor/golang.org/x/sys/unix/mkall.sh
+++ b/vendor/golang.org/x/sys/unix/mkall.sh
diff --git a/vendor/golang.org/x/sys/unix/mkasm_darwin.go b/vendor/golang.org/x/sys/unix/mkasm_darwin.go
deleted file mode 100644
index 6f7bb6edf..000000000
--- a/vendor/golang.org/x/sys/unix/mkasm_darwin.go
+++ /dev/null
@@ -1,78 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build ignore
-
-// mkasm_darwin.go generates assembly trampolines to call libSystem routines from Go.
-//This program must be run after mksyscall.go.
-package main
-
-import (
- "bytes"
- "fmt"
- "io/ioutil"
- "log"
- "os"
- "strings"
-)
-
-func writeASMFile(in string, fileName string, buildTags string) {
- trampolines := map[string]bool{}
-
- var out bytes.Buffer
-
- fmt.Fprintf(&out, "// go run mkasm_darwin.go %s\n", strings.Join(os.Args[1:], " "))
- fmt.Fprintf(&out, "// Code generated by the command above; DO NOT EDIT.\n")
- fmt.Fprintf(&out, "\n")
- fmt.Fprintf(&out, "// +build %s\n", buildTags)
- fmt.Fprintf(&out, "\n")
- fmt.Fprintf(&out, "#include \"textflag.h\"\n")
- for _, line := range strings.Split(in, "\n") {
- if !strings.HasPrefix(line, "func ") || !strings.HasSuffix(line, "_trampoline()") {
- continue
- }
- fn := line[5 : len(line)-13]
- if !trampolines[fn] {
- trampolines[fn] = true
- fmt.Fprintf(&out, "TEXT ·%s_trampoline(SB),NOSPLIT,$0-0\n", fn)
- fmt.Fprintf(&out, "\tJMP\t%s(SB)\n", fn)
- }
- }
- err := ioutil.WriteFile(fileName, out.Bytes(), 0644)
- if err != nil {
- log.Fatalf("can't write %s: %s", fileName, err)
- }
-}
-
-func main() {
- in1, err := ioutil.ReadFile("syscall_darwin.go")
- if err != nil {
- log.Fatalf("can't open syscall_darwin.go: %s", err)
- }
- arch := os.Args[1]
- in2, err := ioutil.ReadFile(fmt.Sprintf("syscall_darwin_%s.go", arch))
- if err != nil {
- log.Fatalf("can't open syscall_darwin_%s.go: %s", arch, err)
- }
- in3, err := ioutil.ReadFile(fmt.Sprintf("zsyscall_darwin_%s.go", arch))
- if err != nil {
- log.Fatalf("can't open zsyscall_darwin_%s.go: %s", arch, err)
- }
- in := string(in1) + string(in2) + string(in3)
-
- writeASMFile(in, fmt.Sprintf("zsyscall_darwin_%s.s", arch), "go1.12")
-
- in1, err = ioutil.ReadFile("syscall_darwin.1_13.go")
- if err != nil {
- log.Fatalf("can't open syscall_darwin.1_13.go: %s", err)
- }
- in2, err = ioutil.ReadFile(fmt.Sprintf("zsyscall_darwin_%s.1_13.go", arch))
- if err != nil {
- log.Fatalf("can't open zsyscall_darwin_%s.1_13.go: %s", arch, err)
- }
-
- in = string(in1) + string(in2)
-
- writeASMFile(in, fmt.Sprintf("zsyscall_darwin_%s.1_13.s", arch), "go1.13")
-}
diff --git a/vendor/golang.org/x/sys/unix/mkerrors.sh b/vendor/golang.org/x/sys/unix/mkerrors.sh
index 08f8230d6..08f8230d6 100755..100644
--- a/vendor/golang.org/x/sys/unix/mkerrors.sh
+++ b/vendor/golang.org/x/sys/unix/mkerrors.sh
diff --git a/vendor/golang.org/x/sys/unix/mkmerge.go b/vendor/golang.org/x/sys/unix/mkmerge.go
deleted file mode 100644
index 8bde45016..000000000
--- a/vendor/golang.org/x/sys/unix/mkmerge.go
+++ /dev/null
@@ -1,521 +0,0 @@
-// Copyright 2020 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build ignore
-
-// mkmerge.go parses generated source files and merges common
-// consts, funcs, and types into a common source file, per GOOS.
-//
-// Usage:
-// $ go run mkmerge.go -out MERGED FILE [FILE ...]
-//
-// Example:
-// # Remove all common consts, funcs, and types from zerrors_linux_*.go
-// # and write the common code into zerrors_linux.go
-// $ go run mkmerge.go -out zerrors_linux.go zerrors_linux_*.go
-//
-// mkmerge.go performs the merge in the following steps:
-// 1. Construct the set of common code that is idential in all
-// architecture-specific files.
-// 2. Write this common code to the merged file.
-// 3. Remove the common code from all architecture-specific files.
-package main
-
-import (
- "bufio"
- "bytes"
- "flag"
- "fmt"
- "go/ast"
- "go/format"
- "go/parser"
- "go/token"
- "io"
- "io/ioutil"
- "log"
- "os"
- "path"
- "path/filepath"
- "regexp"
- "strconv"
- "strings"
-)
-
-const validGOOS = "aix|darwin|dragonfly|freebsd|linux|netbsd|openbsd|solaris"
-
-// getValidGOOS returns GOOS, true if filename ends with a valid "_GOOS.go"
-func getValidGOOS(filename string) (string, bool) {
- matches := regexp.MustCompile(`_(` + validGOOS + `)\.go$`).FindStringSubmatch(filename)
- if len(matches) != 2 {
- return "", false
- }
- return matches[1], true
-}
-
-// codeElem represents an ast.Decl in a comparable way.
-type codeElem struct {
- tok token.Token // e.g. token.CONST, token.TYPE, or token.FUNC
- src string // the declaration formatted as source code
-}
-
-// newCodeElem returns a codeElem based on tok and node, or an error is returned.
-func newCodeElem(tok token.Token, node ast.Node) (codeElem, error) {
- var b strings.Builder
- err := format.Node(&b, token.NewFileSet(), node)
- if err != nil {
- return codeElem{}, err
- }
- return codeElem{tok, b.String()}, nil
-}
-
-// codeSet is a set of codeElems
-type codeSet struct {
- set map[codeElem]bool // true for all codeElems in the set
-}
-
-// newCodeSet returns a new codeSet
-func newCodeSet() *codeSet { return &codeSet{make(map[codeElem]bool)} }
-
-// add adds elem to c
-func (c *codeSet) add(elem codeElem) { c.set[elem] = true }
-
-// has returns true if elem is in c
-func (c *codeSet) has(elem codeElem) bool { return c.set[elem] }
-
-// isEmpty returns true if the set is empty
-func (c *codeSet) isEmpty() bool { return len(c.set) == 0 }
-
-// intersection returns a new set which is the intersection of c and a
-func (c *codeSet) intersection(a *codeSet) *codeSet {
- res := newCodeSet()
-
- for elem := range c.set {
- if a.has(elem) {
- res.add(elem)
- }
- }
- return res
-}
-
-// keepCommon is a filterFn for filtering the merged file with common declarations.
-func (c *codeSet) keepCommon(elem codeElem) bool {
- switch elem.tok {
- case token.VAR:
- // Remove all vars from the merged file
- return false
- case token.CONST, token.TYPE, token.FUNC, token.COMMENT:
- // Remove arch-specific consts, types, functions, and file-level comments from the merged file
- return c.has(elem)
- case token.IMPORT:
- // Keep imports, they are handled by filterImports
- return true
- }
-
- log.Fatalf("keepCommon: invalid elem %v", elem)
- return true
-}
-
-// keepArchSpecific is a filterFn for filtering the GOARC-specific files.
-func (c *codeSet) keepArchSpecific(elem codeElem) bool {
- switch elem.tok {
- case token.CONST, token.TYPE, token.FUNC:
- // Remove common consts, types, or functions from the arch-specific file
- return !c.has(elem)
- }
- return true
-}
-
-// srcFile represents a source file
-type srcFile struct {
- name string
- src []byte
-}
-
-// filterFn is a helper for filter
-type filterFn func(codeElem) bool
-
-// filter parses and filters Go source code from src, removing top
-// level declarations using keep as predicate.
-// For src parameter, please see docs for parser.ParseFile.
-func filter(src interface{}, keep filterFn) ([]byte, error) {
- // Parse the src into an ast
- fset := token.NewFileSet()
- f, err := parser.ParseFile(fset, "", src, parser.ParseComments)
- if err != nil {
- return nil, err
- }
- cmap := ast.NewCommentMap(fset, f, f.Comments)
-
- // Group const/type specs on adjacent lines
- var groups specGroups = make(map[string]int)
- var groupID int
-
- decls := f.Decls
- f.Decls = f.Decls[:0]
- for _, decl := range decls {
- switch decl := decl.(type) {
- case *ast.GenDecl:
- // Filter imports, consts, types, vars
- specs := decl.Specs
- decl.Specs = decl.Specs[:0]
- for i, spec := range specs {
- elem, err := newCodeElem(decl.Tok, spec)
- if err != nil {
- return nil, err
- }
-
- // Create new group if there are empty lines between this and the previous spec
- if i > 0 && fset.Position(specs[i-1].End()).Line < fset.Position(spec.Pos()).Line-1 {
- groupID++
- }
-
- // Check if we should keep this spec
- if keep(elem) {
- decl.Specs = append(decl.Specs, spec)
- groups.add(elem.src, groupID)
- }
- }
- // Check if we should keep this decl
- if len(decl.Specs) > 0 {
- f.Decls = append(f.Decls, decl)
- }
- case *ast.FuncDecl:
- // Filter funcs
- elem, err := newCodeElem(token.FUNC, decl)
- if err != nil {
- return nil, err
- }
- if keep(elem) {
- f.Decls = append(f.Decls, decl)
- }
- }
- }
-
- // Filter file level comments
- if cmap[f] != nil {
- commentGroups := cmap[f]
- cmap[f] = cmap[f][:0]
- for _, cGrp := range commentGroups {
- if keep(codeElem{token.COMMENT, cGrp.Text()}) {
- cmap[f] = append(cmap[f], cGrp)
- }
- }
- }
- f.Comments = cmap.Filter(f).Comments()
-
- // Generate code for the filtered ast
- var buf bytes.Buffer
- if err = format.Node(&buf, fset, f); err != nil {
- return nil, err
- }
-
- groupedSrc, err := groups.filterEmptyLines(&buf)
- if err != nil {
- return nil, err
- }
-
- return filterImports(groupedSrc)
-}
-
-// getCommonSet returns the set of consts, types, and funcs that are present in every file.
-func getCommonSet(files []srcFile) (*codeSet, error) {
- if len(files) == 0 {
- return nil, fmt.Errorf("no files provided")
- }
- // Use the first architecture file as the baseline
- baseSet, err := getCodeSet(files[0].src)
- if err != nil {
- return nil, err
- }
-
- // Compare baseline set with other architecture files: discard any element,
- // that doesn't exist in other architecture files.
- for _, f := range files[1:] {
- set, err := getCodeSet(f.src)
- if err != nil {
- return nil, err
- }
-
- baseSet = baseSet.intersection(set)
- }
- return baseSet, nil
-}
-
-// getCodeSet returns the set of all top-level consts, types, and funcs from src.
-// src must be string, []byte, or io.Reader (see go/parser.ParseFile docs)
-func getCodeSet(src interface{}) (*codeSet, error) {
- set := newCodeSet()
-
- fset := token.NewFileSet()
- f, err := parser.ParseFile(fset, "", src, parser.ParseComments)
- if err != nil {
- return nil, err
- }
-
- for _, decl := range f.Decls {
- switch decl := decl.(type) {
- case *ast.GenDecl:
- // Add const, and type declarations
- if !(decl.Tok == token.CONST || decl.Tok == token.TYPE) {
- break
- }
-
- for _, spec := range decl.Specs {
- elem, err := newCodeElem(decl.Tok, spec)
- if err != nil {
- return nil, err
- }
-
- set.add(elem)
- }
- case *ast.FuncDecl:
- // Add func declarations
- elem, err := newCodeElem(token.FUNC, decl)
- if err != nil {
- return nil, err
- }
-
- set.add(elem)
- }
- }
-
- // Add file level comments
- cmap := ast.NewCommentMap(fset, f, f.Comments)
- for _, cGrp := range cmap[f] {
- set.add(codeElem{token.COMMENT, cGrp.Text()})
- }
-
- return set, nil
-}
-
-// importName returns the identifier (PackageName) for an imported package
-func importName(iSpec *ast.ImportSpec) (string, error) {
- if iSpec.Name == nil {
- name, err := strconv.Unquote(iSpec.Path.Value)
- if err != nil {
- return "", err
- }
- return path.Base(name), nil
- }
- return iSpec.Name.Name, nil
-}
-
-// specGroups tracks grouped const/type specs with a map of line: groupID pairs
-type specGroups map[string]int
-
-// add spec source to group
-func (s specGroups) add(src string, groupID int) error {
- srcBytes, err := format.Source(bytes.TrimSpace([]byte(src)))
- if err != nil {
- return err
- }
- s[string(srcBytes)] = groupID
- return nil
-}
-
-// filterEmptyLines removes empty lines within groups of const/type specs.
-// Returns the filtered source.
-func (s specGroups) filterEmptyLines(src io.Reader) ([]byte, error) {
- scanner := bufio.NewScanner(src)
- var out bytes.Buffer
-
- var emptyLines bytes.Buffer
- prevGroupID := -1 // Initialize to invalid group
- for scanner.Scan() {
- line := bytes.TrimSpace(scanner.Bytes())
-
- if len(line) == 0 {
- fmt.Fprintf(&emptyLines, "%s\n", scanner.Bytes())
- continue
- }
-
- // Discard emptyLines if previous non-empty line belonged to the same
- // group as this line
- if src, err := format.Source(line); err == nil {
- groupID, ok := s[string(src)]
- if ok && groupID == prevGroupID {
- emptyLines.Reset()
- }
- prevGroupID = groupID
- }
-
- emptyLines.WriteTo(&out)
- fmt.Fprintf(&out, "%s\n", scanner.Bytes())
- }
- if err := scanner.Err(); err != nil {
- return nil, err
- }
- return out.Bytes(), nil
-}
-
-// filterImports removes unused imports from fileSrc, and returns a formatted src.
-func filterImports(fileSrc []byte) ([]byte, error) {
- fset := token.NewFileSet()
- file, err := parser.ParseFile(fset, "", fileSrc, parser.ParseComments)
- if err != nil {
- return nil, err
- }
- cmap := ast.NewCommentMap(fset, file, file.Comments)
-
- // create set of references to imported identifiers
- keepImport := make(map[string]bool)
- for _, u := range file.Unresolved {
- keepImport[u.Name] = true
- }
-
- // filter import declarations
- decls := file.Decls
- file.Decls = file.Decls[:0]
- for _, decl := range decls {
- importDecl, ok := decl.(*ast.GenDecl)
-
- // Keep non-import declarations
- if !ok || importDecl.Tok != token.IMPORT {
- file.Decls = append(file.Decls, decl)
- continue
- }
-
- // Filter the import specs
- specs := importDecl.Specs
- importDecl.Specs = importDecl.Specs[:0]
- for _, spec := range specs {
- iSpec := spec.(*ast.ImportSpec)
- name, err := importName(iSpec)
- if err != nil {
- return nil, err
- }
-
- if keepImport[name] {
- importDecl.Specs = append(importDecl.Specs, iSpec)
- }
- }
- if len(importDecl.Specs) > 0 {
- file.Decls = append(file.Decls, importDecl)
- }
- }
-
- // filter file.Imports
- imports := file.Imports
- file.Imports = file.Imports[:0]
- for _, spec := range imports {
- name, err := importName(spec)
- if err != nil {
- return nil, err
- }
-
- if keepImport[name] {
- file.Imports = append(file.Imports, spec)
- }
- }
- file.Comments = cmap.Filter(file).Comments()
-
- var buf bytes.Buffer
- err = format.Node(&buf, fset, file)
- if err != nil {
- return nil, err
- }
-
- return buf.Bytes(), nil
-}
-
-// merge extracts duplicate code from archFiles and merges it to mergeFile.
-// 1. Construct commonSet: the set of code that is idential in all archFiles.
-// 2. Write the code in commonSet to mergedFile.
-// 3. Remove the commonSet code from all archFiles.
-func merge(mergedFile string, archFiles ...string) error {
- // extract and validate the GOOS part of the merged filename
- goos, ok := getValidGOOS(mergedFile)
- if !ok {
- return fmt.Errorf("invalid GOOS in merged file name %s", mergedFile)
- }
-
- // Read architecture files
- var inSrc []srcFile
- for _, file := range archFiles {
- src, err := ioutil.ReadFile(file)
- if err != nil {
- return fmt.Errorf("cannot read archfile %s: %w", file, err)
- }
-
- inSrc = append(inSrc, srcFile{file, src})
- }
-
- // 1. Construct the set of top-level declarations common for all files
- commonSet, err := getCommonSet(inSrc)
- if err != nil {
- return err
- }
- if commonSet.isEmpty() {
- // No common code => do not modify any files
- return nil
- }
-
- // 2. Write the merged file
- mergedSrc, err := filter(inSrc[0].src, commonSet.keepCommon)
- if err != nil {
- return err
- }
-
- f, err := os.Create(mergedFile)
- if err != nil {
- return err
- }
-
- buf := bufio.NewWriter(f)
- fmt.Fprintln(buf, "// Code generated by mkmerge.go; DO NOT EDIT.")
- fmt.Fprintln(buf)
- fmt.Fprintf(buf, "// +build %s\n", goos)
- fmt.Fprintln(buf)
- buf.Write(mergedSrc)
-
- err = buf.Flush()
- if err != nil {
- return err
- }
- err = f.Close()
- if err != nil {
- return err
- }
-
- // 3. Remove duplicate declarations from the architecture files
- for _, inFile := range inSrc {
- src, err := filter(inFile.src, commonSet.keepArchSpecific)
- if err != nil {
- return err
- }
- err = ioutil.WriteFile(inFile.name, src, 0644)
- if err != nil {
- return err
- }
- }
- return nil
-}
-
-func main() {
- var mergedFile string
- flag.StringVar(&mergedFile, "out", "", "Write merged code to `FILE`")
- flag.Parse()
-
- // Expand wildcards
- var filenames []string
- for _, arg := range flag.Args() {
- matches, err := filepath.Glob(arg)
- if err != nil {
- fmt.Fprintf(os.Stderr, "Invalid command line argument %q: %v\n", arg, err)
- os.Exit(1)
- }
- filenames = append(filenames, matches...)
- }
-
- if len(filenames) < 2 {
- // No need to merge
- return
- }
-
- err := merge(mergedFile, filenames...)
- if err != nil {
- fmt.Fprintf(os.Stderr, "Merge failed with error: %v\n", err)
- os.Exit(1)
- }
-}
diff --git a/vendor/golang.org/x/sys/unix/mkpost.go b/vendor/golang.org/x/sys/unix/mkpost.go
deleted file mode 100644
index 5ee1a16b9..000000000
--- a/vendor/golang.org/x/sys/unix/mkpost.go
+++ /dev/null
@@ -1,127 +0,0 @@
-// Copyright 2016 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build ignore
-
-// mkpost processes the output of cgo -godefs to
-// modify the generated types. It is used to clean up
-// the sys API in an architecture specific manner.
-//
-// mkpost is run after cgo -godefs; see README.md.
-package main
-
-import (
- "bytes"
- "fmt"
- "go/format"
- "io/ioutil"
- "log"
- "os"
- "regexp"
-)
-
-func main() {
- // Get the OS and architecture (using GOARCH_TARGET if it exists)
- goos := os.Getenv("GOOS")
- goarch := os.Getenv("GOARCH_TARGET")
- if goarch == "" {
- goarch = os.Getenv("GOARCH")
- }
- // Check that we are using the Docker-based build system if we should be.
- if goos == "linux" {
- if os.Getenv("GOLANG_SYS_BUILD") != "docker" {
- os.Stderr.WriteString("In the Docker-based build system, mkpost should not be called directly.\n")
- os.Stderr.WriteString("See README.md\n")
- os.Exit(1)
- }
- }
-
- b, err := ioutil.ReadAll(os.Stdin)
- if err != nil {
- log.Fatal(err)
- }
-
- if goos == "aix" {
- // Replace type of Atim, Mtim and Ctim by Timespec in Stat_t
- // to avoid having both StTimespec and Timespec.
- sttimespec := regexp.MustCompile(`_Ctype_struct_st_timespec`)
- b = sttimespec.ReplaceAll(b, []byte("Timespec"))
- }
-
- // Intentionally export __val fields in Fsid and Sigset_t
- valRegex := regexp.MustCompile(`type (Fsid|Sigset_t) struct {(\s+)X__(bits|val)(\s+\S+\s+)}`)
- b = valRegex.ReplaceAll(b, []byte("type $1 struct {${2}Val$4}"))
-
- // Intentionally export __fds_bits field in FdSet
- fdSetRegex := regexp.MustCompile(`type (FdSet) struct {(\s+)X__fds_bits(\s+\S+\s+)}`)
- b = fdSetRegex.ReplaceAll(b, []byte("type $1 struct {${2}Bits$3}"))
-
- // If we have empty Ptrace structs, we should delete them. Only s390x emits
- // nonempty Ptrace structs.
- ptraceRexexp := regexp.MustCompile(`type Ptrace((Psw|Fpregs|Per) struct {\s*})`)
- b = ptraceRexexp.ReplaceAll(b, nil)
-
- // Replace the control_regs union with a blank identifier for now.
- controlRegsRegex := regexp.MustCompile(`(Control_regs)\s+\[0\]uint64`)
- b = controlRegsRegex.ReplaceAll(b, []byte("_ [0]uint64"))
-
- // Remove fields that are added by glibc
- // Note that this is unstable as the identifers are private.
- removeFieldsRegex := regexp.MustCompile(`X__glibc\S*`)
- b = removeFieldsRegex.ReplaceAll(b, []byte("_"))
-
- // Convert [65]int8 to [65]byte in Utsname members to simplify
- // conversion to string; see golang.org/issue/20753
- convertUtsnameRegex := regexp.MustCompile(`((Sys|Node|Domain)name|Release|Version|Machine)(\s+)\[(\d+)\]u?int8`)
- b = convertUtsnameRegex.ReplaceAll(b, []byte("$1$3[$4]byte"))
-
- // Convert [n]int8 to [n]byte in Statvfs_t members to simplify
- // conversion to string.
- convertStatvfsRegex := regexp.MustCompile(`((Fstype|Mnton|Mntfrom)name)(\s+)\[(\d+)\]int8`)
- b = convertStatvfsRegex.ReplaceAll(b, []byte("$1$3[$4]byte"))
-
- // Convert [1024]int8 to [1024]byte in Ptmget members
- convertPtmget := regexp.MustCompile(`([SC]n)(\s+)\[(\d+)\]u?int8`)
- b = convertPtmget.ReplaceAll(b, []byte("$1[$3]byte"))
-
- // Remove spare fields (e.g. in Statx_t)
- spareFieldsRegex := regexp.MustCompile(`X__spare\S*`)
- b = spareFieldsRegex.ReplaceAll(b, []byte("_"))
-
- // Remove cgo padding fields
- removePaddingFieldsRegex := regexp.MustCompile(`Pad_cgo_\d+`)
- b = removePaddingFieldsRegex.ReplaceAll(b, []byte("_"))
-
- // Remove padding, hidden, or unused fields
- removeFieldsRegex = regexp.MustCompile(`\b(X_\S+|Padding)`)
- b = removeFieldsRegex.ReplaceAll(b, []byte("_"))
-
- // Remove the first line of warning from cgo
- b = b[bytes.IndexByte(b, '\n')+1:]
- // Modify the command in the header to include:
- // mkpost, our own warning, and a build tag.
- replacement := fmt.Sprintf(`$1 | go run mkpost.go
-// Code generated by the command above; see README.md. DO NOT EDIT.
-
-// +build %s,%s`, goarch, goos)
- cgoCommandRegex := regexp.MustCompile(`(cgo -godefs .*)`)
- b = cgoCommandRegex.ReplaceAll(b, []byte(replacement))
-
- // Rename Stat_t time fields
- if goos == "freebsd" && goarch == "386" {
- // Hide Stat_t.[AMCB]tim_ext fields
- renameStatTimeExtFieldsRegex := regexp.MustCompile(`[AMCB]tim_ext`)
- b = renameStatTimeExtFieldsRegex.ReplaceAll(b, []byte("_"))
- }
- renameStatTimeFieldsRegex := regexp.MustCompile(`([AMCB])(?:irth)?time?(?:spec)?\s+(Timespec|StTimespec)`)
- b = renameStatTimeFieldsRegex.ReplaceAll(b, []byte("${1}tim ${2}"))
-
- // gofmt
- b, err = format.Source(b)
- if err != nil {
- log.Fatal(err)
- }
-
- os.Stdout.Write(b)
-}
diff --git a/vendor/golang.org/x/sys/unix/mksyscall.go b/vendor/golang.org/x/sys/unix/mksyscall.go
deleted file mode 100644
index 9e540cc89..000000000
--- a/vendor/golang.org/x/sys/unix/mksyscall.go
+++ /dev/null
@@ -1,402 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build ignore
-
-/*
-This program reads a file containing function prototypes
-(like syscall_darwin.go) and generates system call bodies.
-The prototypes are marked by lines beginning with "//sys"
-and read like func declarations if //sys is replaced by func, but:
- * The parameter lists must give a name for each argument.
- This includes return parameters.
- * The parameter lists must give a type for each argument:
- the (x, y, z int) shorthand is not allowed.
- * If the return parameter is an error number, it must be named errno.
-
-A line beginning with //sysnb is like //sys, except that the
-goroutine will not be suspended during the execution of the system
-call. This must only be used for system calls which can never
-block, as otherwise the system call could cause all goroutines to
-hang.
-*/
-package main
-
-import (
- "bufio"
- "flag"
- "fmt"
- "os"
- "regexp"
- "strings"
-)
-
-var (
- b32 = flag.Bool("b32", false, "32bit big-endian")
- l32 = flag.Bool("l32", false, "32bit little-endian")
- plan9 = flag.Bool("plan9", false, "plan9")
- openbsd = flag.Bool("openbsd", false, "openbsd")
- netbsd = flag.Bool("netbsd", false, "netbsd")
- dragonfly = flag.Bool("dragonfly", false, "dragonfly")
- arm = flag.Bool("arm", false, "arm") // 64-bit value should use (even, odd)-pair
- tags = flag.String("tags", "", "build tags")
- filename = flag.String("output", "", "output file name (standard output if omitted)")
-)
-
-// cmdLine returns this programs's commandline arguments
-func cmdLine() string {
- return "go run mksyscall.go " + strings.Join(os.Args[1:], " ")
-}
-
-// buildTags returns build tags
-func buildTags() string {
- return *tags
-}
-
-// Param is function parameter
-type Param struct {
- Name string
- Type string
-}
-
-// usage prints the program usage
-func usage() {
- fmt.Fprintf(os.Stderr, "usage: go run mksyscall.go [-b32 | -l32] [-tags x,y] [file ...]\n")
- os.Exit(1)
-}
-
-// parseParamList parses parameter list and returns a slice of parameters
-func parseParamList(list string) []string {
- list = strings.TrimSpace(list)
- if list == "" {
- return []string{}
- }
- return regexp.MustCompile(`\s*,\s*`).Split(list, -1)
-}
-
-// parseParam splits a parameter into name and type
-func parseParam(p string) Param {
- ps := regexp.MustCompile(`^(\S*) (\S*)$`).FindStringSubmatch(p)
- if ps == nil {
- fmt.Fprintf(os.Stderr, "malformed parameter: %s\n", p)
- os.Exit(1)
- }
- return Param{ps[1], ps[2]}
-}
-
-func main() {
- // Get the OS and architecture (using GOARCH_TARGET if it exists)
- goos := os.Getenv("GOOS")
- if goos == "" {
- fmt.Fprintln(os.Stderr, "GOOS not defined in environment")
- os.Exit(1)
- }
- goarch := os.Getenv("GOARCH_TARGET")
- if goarch == "" {
- goarch = os.Getenv("GOARCH")
- }
-
- // Check that we are using the Docker-based build system if we should
- if goos == "linux" {
- if os.Getenv("GOLANG_SYS_BUILD") != "docker" {
- fmt.Fprintf(os.Stderr, "In the Docker-based build system, mksyscall should not be called directly.\n")
- fmt.Fprintf(os.Stderr, "See README.md\n")
- os.Exit(1)
- }
- }
-
- flag.Usage = usage
- flag.Parse()
- if len(flag.Args()) <= 0 {
- fmt.Fprintf(os.Stderr, "no files to parse provided\n")
- usage()
- }
-
- endianness := ""
- if *b32 {
- endianness = "big-endian"
- } else if *l32 {
- endianness = "little-endian"
- }
-
- libc := false
- if goos == "darwin" && (strings.Contains(buildTags(), ",go1.12") || strings.Contains(buildTags(), ",go1.13")) {
- libc = true
- }
- trampolines := map[string]bool{}
-
- text := ""
- for _, path := range flag.Args() {
- file, err := os.Open(path)
- if err != nil {
- fmt.Fprintf(os.Stderr, err.Error())
- os.Exit(1)
- }
- s := bufio.NewScanner(file)
- for s.Scan() {
- t := s.Text()
- t = strings.TrimSpace(t)
- t = regexp.MustCompile(`\s+`).ReplaceAllString(t, ` `)
- nonblock := regexp.MustCompile(`^\/\/sysnb `).FindStringSubmatch(t)
- if regexp.MustCompile(`^\/\/sys `).FindStringSubmatch(t) == nil && nonblock == nil {
- continue
- }
-
- // Line must be of the form
- // func Open(path string, mode int, perm int) (fd int, errno error)
- // Split into name, in params, out params.
- f := regexp.MustCompile(`^\/\/sys(nb)? (\w+)\(([^()]*)\)\s*(?:\(([^()]+)\))?\s*(?:=\s*((?i)SYS_[A-Z0-9_]+))?$`).FindStringSubmatch(t)
- if f == nil {
- fmt.Fprintf(os.Stderr, "%s:%s\nmalformed //sys declaration\n", path, t)
- os.Exit(1)
- }
- funct, inps, outps, sysname := f[2], f[3], f[4], f[5]
-
- // ClockGettime doesn't have a syscall number on Darwin, only generate libc wrappers.
- if goos == "darwin" && !libc && funct == "ClockGettime" {
- continue
- }
-
- // Split argument lists on comma.
- in := parseParamList(inps)
- out := parseParamList(outps)
-
- // Try in vain to keep people from editing this file.
- // The theory is that they jump into the middle of the file
- // without reading the header.
- text += "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT\n\n"
-
- // Go function header.
- outDecl := ""
- if len(out) > 0 {
- outDecl = fmt.Sprintf(" (%s)", strings.Join(out, ", "))
- }
- text += fmt.Sprintf("func %s(%s)%s {\n", funct, strings.Join(in, ", "), outDecl)
-
- // Check if err return available
- errvar := ""
- for _, param := range out {
- p := parseParam(param)
- if p.Type == "error" {
- errvar = p.Name
- break
- }
- }
-
- // Prepare arguments to Syscall.
- var args []string
- n := 0
- for _, param := range in {
- p := parseParam(param)
- if regexp.MustCompile(`^\*`).FindStringSubmatch(p.Type) != nil {
- args = append(args, "uintptr(unsafe.Pointer("+p.Name+"))")
- } else if p.Type == "string" && errvar != "" {
- text += fmt.Sprintf("\tvar _p%d *byte\n", n)
- text += fmt.Sprintf("\t_p%d, %s = BytePtrFromString(%s)\n", n, errvar, p.Name)
- text += fmt.Sprintf("\tif %s != nil {\n\t\treturn\n\t}\n", errvar)
- args = append(args, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n))
- n++
- } else if p.Type == "string" {
- fmt.Fprintf(os.Stderr, path+":"+funct+" uses string arguments, but has no error return\n")
- text += fmt.Sprintf("\tvar _p%d *byte\n", n)
- text += fmt.Sprintf("\t_p%d, _ = BytePtrFromString(%s)\n", n, p.Name)
- args = append(args, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n))
- n++
- } else if regexp.MustCompile(`^\[\](.*)`).FindStringSubmatch(p.Type) != nil {
- // Convert slice into pointer, length.
- // Have to be careful not to take address of &a[0] if len == 0:
- // pass dummy pointer in that case.
- // Used to pass nil, but some OSes or simulators reject write(fd, nil, 0).
- text += fmt.Sprintf("\tvar _p%d unsafe.Pointer\n", n)
- text += fmt.Sprintf("\tif len(%s) > 0 {\n\t\t_p%d = unsafe.Pointer(&%s[0])\n\t}", p.Name, n, p.Name)
- text += fmt.Sprintf(" else {\n\t\t_p%d = unsafe.Pointer(&_zero)\n\t}\n", n)
- args = append(args, fmt.Sprintf("uintptr(_p%d)", n), fmt.Sprintf("uintptr(len(%s))", p.Name))
- n++
- } else if p.Type == "int64" && (*openbsd || *netbsd) {
- args = append(args, "0")
- if endianness == "big-endian" {
- args = append(args, fmt.Sprintf("uintptr(%s>>32)", p.Name), fmt.Sprintf("uintptr(%s)", p.Name))
- } else if endianness == "little-endian" {
- args = append(args, fmt.Sprintf("uintptr(%s)", p.Name), fmt.Sprintf("uintptr(%s>>32)", p.Name))
- } else {
- args = append(args, fmt.Sprintf("uintptr(%s)", p.Name))
- }
- } else if p.Type == "int64" && *dragonfly {
- if regexp.MustCompile(`^(?i)extp(read|write)`).FindStringSubmatch(funct) == nil {
- args = append(args, "0")
- }
- if endianness == "big-endian" {
- args = append(args, fmt.Sprintf("uintptr(%s>>32)", p.Name), fmt.Sprintf("uintptr(%s)", p.Name))
- } else if endianness == "little-endian" {
- args = append(args, fmt.Sprintf("uintptr(%s)", p.Name), fmt.Sprintf("uintptr(%s>>32)", p.Name))
- } else {
- args = append(args, fmt.Sprintf("uintptr(%s)", p.Name))
- }
- } else if (p.Type == "int64" || p.Type == "uint64") && endianness != "" {
- if len(args)%2 == 1 && *arm {
- // arm abi specifies 64-bit argument uses
- // (even, odd) pair
- args = append(args, "0")
- }
- if endianness == "big-endian" {
- args = append(args, fmt.Sprintf("uintptr(%s>>32)", p.Name), fmt.Sprintf("uintptr(%s)", p.Name))
- } else {
- args = append(args, fmt.Sprintf("uintptr(%s)", p.Name), fmt.Sprintf("uintptr(%s>>32)", p.Name))
- }
- } else {
- args = append(args, fmt.Sprintf("uintptr(%s)", p.Name))
- }
- }
-
- // Determine which form to use; pad args with zeros.
- asm := "Syscall"
- if nonblock != nil {
- if errvar == "" && goos == "linux" {
- asm = "RawSyscallNoError"
- } else {
- asm = "RawSyscall"
- }
- } else {
- if errvar == "" && goos == "linux" {
- asm = "SyscallNoError"
- }
- }
- if len(args) <= 3 {
- for len(args) < 3 {
- args = append(args, "0")
- }
- } else if len(args) <= 6 {
- asm += "6"
- for len(args) < 6 {
- args = append(args, "0")
- }
- } else if len(args) <= 9 {
- asm += "9"
- for len(args) < 9 {
- args = append(args, "0")
- }
- } else {
- fmt.Fprintf(os.Stderr, "%s:%s too many arguments to system call\n", path, funct)
- }
-
- // System call number.
- if sysname == "" {
- sysname = "SYS_" + funct
- sysname = regexp.MustCompile(`([a-z])([A-Z])`).ReplaceAllString(sysname, `${1}_$2`)
- sysname = strings.ToUpper(sysname)
- }
-
- var libcFn string
- if libc {
- asm = "syscall_" + strings.ToLower(asm[:1]) + asm[1:] // internal syscall call
- sysname = strings.TrimPrefix(sysname, "SYS_") // remove SYS_
- sysname = strings.ToLower(sysname) // lowercase
- libcFn = sysname
- sysname = "funcPC(libc_" + sysname + "_trampoline)"
- }
-
- // Actual call.
- arglist := strings.Join(args, ", ")
- call := fmt.Sprintf("%s(%s, %s)", asm, sysname, arglist)
-
- // Assign return values.
- body := ""
- ret := []string{"_", "_", "_"}
- doErrno := false
- for i := 0; i < len(out); i++ {
- p := parseParam(out[i])
- reg := ""
- if p.Name == "err" && !*plan9 {
- reg = "e1"
- ret[2] = reg
- doErrno = true
- } else if p.Name == "err" && *plan9 {
- ret[0] = "r0"
- ret[2] = "e1"
- break
- } else {
- reg = fmt.Sprintf("r%d", i)
- ret[i] = reg
- }
- if p.Type == "bool" {
- reg = fmt.Sprintf("%s != 0", reg)
- }
- if p.Type == "int64" && endianness != "" {
- // 64-bit number in r1:r0 or r0:r1.
- if i+2 > len(out) {
- fmt.Fprintf(os.Stderr, "%s:%s not enough registers for int64 return\n", path, funct)
- }
- if endianness == "big-endian" {
- reg = fmt.Sprintf("int64(r%d)<<32 | int64(r%d)", i, i+1)
- } else {
- reg = fmt.Sprintf("int64(r%d)<<32 | int64(r%d)", i+1, i)
- }
- ret[i] = fmt.Sprintf("r%d", i)
- ret[i+1] = fmt.Sprintf("r%d", i+1)
- }
- if reg != "e1" || *plan9 {
- body += fmt.Sprintf("\t%s = %s(%s)\n", p.Name, p.Type, reg)
- }
- }
- if ret[0] == "_" && ret[1] == "_" && ret[2] == "_" {
- text += fmt.Sprintf("\t%s\n", call)
- } else {
- if errvar == "" && goos == "linux" {
- // raw syscall without error on Linux, see golang.org/issue/22924
- text += fmt.Sprintf("\t%s, %s := %s\n", ret[0], ret[1], call)
- } else {
- text += fmt.Sprintf("\t%s, %s, %s := %s\n", ret[0], ret[1], ret[2], call)
- }
- }
- text += body
-
- if *plan9 && ret[2] == "e1" {
- text += "\tif int32(r0) == -1 {\n"
- text += "\t\terr = e1\n"
- text += "\t}\n"
- } else if doErrno {
- text += "\tif e1 != 0 {\n"
- text += "\t\terr = errnoErr(e1)\n"
- text += "\t}\n"
- }
- text += "\treturn\n"
- text += "}\n\n"
-
- if libc && !trampolines[libcFn] {
- // some system calls share a trampoline, like read and readlen.
- trampolines[libcFn] = true
- // Declare assembly trampoline.
- text += fmt.Sprintf("func libc_%s_trampoline()\n", libcFn)
- // Assembly trampoline calls the libc_* function, which this magic
- // redirects to use the function from libSystem.
- text += fmt.Sprintf("//go:linkname libc_%s libc_%s\n", libcFn, libcFn)
- text += fmt.Sprintf("//go:cgo_import_dynamic libc_%s %s \"/usr/lib/libSystem.B.dylib\"\n", libcFn, libcFn)
- text += "\n"
- }
- }
- if err := s.Err(); err != nil {
- fmt.Fprintf(os.Stderr, err.Error())
- os.Exit(1)
- }
- file.Close()
- }
- fmt.Printf(srcTemplate, cmdLine(), buildTags(), text)
-}
-
-const srcTemplate = `// %s
-// Code generated by the command above; see README.md. DO NOT EDIT.
-
-// +build %s
-
-package unix
-
-import (
- "syscall"
- "unsafe"
-)
-
-var _ syscall.Errno
-
-%s
-`
diff --git a/vendor/golang.org/x/sys/unix/mksyscall_aix_ppc.go b/vendor/golang.org/x/sys/unix/mksyscall_aix_ppc.go
deleted file mode 100644
index 3be3cdfc3..000000000
--- a/vendor/golang.org/x/sys/unix/mksyscall_aix_ppc.go
+++ /dev/null
@@ -1,415 +0,0 @@
-// Copyright 2019 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build ignore
-
-/*
-This program reads a file containing function prototypes
-(like syscall_aix.go) and generates system call bodies.
-The prototypes are marked by lines beginning with "//sys"
-and read like func declarations if //sys is replaced by func, but:
- * The parameter lists must give a name for each argument.
- This includes return parameters.
- * The parameter lists must give a type for each argument:
- the (x, y, z int) shorthand is not allowed.
- * If the return parameter is an error number, it must be named err.
- * If go func name needs to be different than its libc name,
- * or the function is not in libc, name could be specified
- * at the end, after "=" sign, like
- //sys getsockopt(s int, level int, name int, val uintptr, vallen *_Socklen) (err error) = libsocket.getsockopt
-*/
-package main
-
-import (
- "bufio"
- "flag"
- "fmt"
- "os"
- "regexp"
- "strings"
-)
-
-var (
- b32 = flag.Bool("b32", false, "32bit big-endian")
- l32 = flag.Bool("l32", false, "32bit little-endian")
- aix = flag.Bool("aix", false, "aix")
- tags = flag.String("tags", "", "build tags")
-)
-
-// cmdLine returns this programs's commandline arguments
-func cmdLine() string {
- return "go run mksyscall_aix_ppc.go " + strings.Join(os.Args[1:], " ")
-}
-
-// buildTags returns build tags
-func buildTags() string {
- return *tags
-}
-
-// Param is function parameter
-type Param struct {
- Name string
- Type string
-}
-
-// usage prints the program usage
-func usage() {
- fmt.Fprintf(os.Stderr, "usage: go run mksyscall_aix_ppc.go [-b32 | -l32] [-tags x,y] [file ...]\n")
- os.Exit(1)
-}
-
-// parseParamList parses parameter list and returns a slice of parameters
-func parseParamList(list string) []string {
- list = strings.TrimSpace(list)
- if list == "" {
- return []string{}
- }
- return regexp.MustCompile(`\s*,\s*`).Split(list, -1)
-}
-
-// parseParam splits a parameter into name and type
-func parseParam(p string) Param {
- ps := regexp.MustCompile(`^(\S*) (\S*)$`).FindStringSubmatch(p)
- if ps == nil {
- fmt.Fprintf(os.Stderr, "malformed parameter: %s\n", p)
- os.Exit(1)
- }
- return Param{ps[1], ps[2]}
-}
-
-func main() {
- flag.Usage = usage
- flag.Parse()
- if len(flag.Args()) <= 0 {
- fmt.Fprintf(os.Stderr, "no files to parse provided\n")
- usage()
- }
-
- endianness := ""
- if *b32 {
- endianness = "big-endian"
- } else if *l32 {
- endianness = "little-endian"
- }
-
- pack := ""
- text := ""
- cExtern := "/*\n#include <stdint.h>\n#include <stddef.h>\n"
- for _, path := range flag.Args() {
- file, err := os.Open(path)
- if err != nil {
- fmt.Fprintf(os.Stderr, err.Error())
- os.Exit(1)
- }
- s := bufio.NewScanner(file)
- for s.Scan() {
- t := s.Text()
- t = strings.TrimSpace(t)
- t = regexp.MustCompile(`\s+`).ReplaceAllString(t, ` `)
- if p := regexp.MustCompile(`^package (\S+)$`).FindStringSubmatch(t); p != nil && pack == "" {
- pack = p[1]
- }
- nonblock := regexp.MustCompile(`^\/\/sysnb `).FindStringSubmatch(t)
- if regexp.MustCompile(`^\/\/sys `).FindStringSubmatch(t) == nil && nonblock == nil {
- continue
- }
-
- // Line must be of the form
- // func Open(path string, mode int, perm int) (fd int, err error)
- // Split into name, in params, out params.
- f := regexp.MustCompile(`^\/\/sys(nb)? (\w+)\(([^()]*)\)\s*(?:\(([^()]+)\))?\s*(?:=\s*(?:(\w*)\.)?(\w*))?$`).FindStringSubmatch(t)
- if f == nil {
- fmt.Fprintf(os.Stderr, "%s:%s\nmalformed //sys declaration\n", path, t)
- os.Exit(1)
- }
- funct, inps, outps, modname, sysname := f[2], f[3], f[4], f[5], f[6]
-
- // Split argument lists on comma.
- in := parseParamList(inps)
- out := parseParamList(outps)
-
- inps = strings.Join(in, ", ")
- outps = strings.Join(out, ", ")
-
- // Try in vain to keep people from editing this file.
- // The theory is that they jump into the middle of the file
- // without reading the header.
- text += "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT\n\n"
-
- // Check if value return, err return available
- errvar := ""
- retvar := ""
- rettype := ""
- for _, param := range out {
- p := parseParam(param)
- if p.Type == "error" {
- errvar = p.Name
- } else {
- retvar = p.Name
- rettype = p.Type
- }
- }
-
- // System call name.
- if sysname == "" {
- sysname = funct
- }
- sysname = regexp.MustCompile(`([a-z])([A-Z])`).ReplaceAllString(sysname, `${1}_$2`)
- sysname = strings.ToLower(sysname) // All libc functions are lowercase.
-
- cRettype := ""
- if rettype == "unsafe.Pointer" {
- cRettype = "uintptr_t"
- } else if rettype == "uintptr" {
- cRettype = "uintptr_t"
- } else if regexp.MustCompile(`^_`).FindStringSubmatch(rettype) != nil {
- cRettype = "uintptr_t"
- } else if rettype == "int" {
- cRettype = "int"
- } else if rettype == "int32" {
- cRettype = "int"
- } else if rettype == "int64" {
- cRettype = "long long"
- } else if rettype == "uint32" {
- cRettype = "unsigned int"
- } else if rettype == "uint64" {
- cRettype = "unsigned long long"
- } else {
- cRettype = "int"
- }
- if sysname == "exit" {
- cRettype = "void"
- }
-
- // Change p.Types to c
- var cIn []string
- for _, param := range in {
- p := parseParam(param)
- if regexp.MustCompile(`^\*`).FindStringSubmatch(p.Type) != nil {
- cIn = append(cIn, "uintptr_t")
- } else if p.Type == "string" {
- cIn = append(cIn, "uintptr_t")
- } else if regexp.MustCompile(`^\[\](.*)`).FindStringSubmatch(p.Type) != nil {
- cIn = append(cIn, "uintptr_t", "size_t")
- } else if p.Type == "unsafe.Pointer" {
- cIn = append(cIn, "uintptr_t")
- } else if p.Type == "uintptr" {
- cIn = append(cIn, "uintptr_t")
- } else if regexp.MustCompile(`^_`).FindStringSubmatch(p.Type) != nil {
- cIn = append(cIn, "uintptr_t")
- } else if p.Type == "int" {
- cIn = append(cIn, "int")
- } else if p.Type == "int32" {
- cIn = append(cIn, "int")
- } else if p.Type == "int64" {
- cIn = append(cIn, "long long")
- } else if p.Type == "uint32" {
- cIn = append(cIn, "unsigned int")
- } else if p.Type == "uint64" {
- cIn = append(cIn, "unsigned long long")
- } else {
- cIn = append(cIn, "int")
- }
- }
-
- if funct != "fcntl" && funct != "FcntlInt" && funct != "readlen" && funct != "writelen" {
- if sysname == "select" {
- // select is a keyword of Go. Its name is
- // changed to c_select.
- cExtern += "#define c_select select\n"
- }
- // Imports of system calls from libc
- cExtern += fmt.Sprintf("%s %s", cRettype, sysname)
- cIn := strings.Join(cIn, ", ")
- cExtern += fmt.Sprintf("(%s);\n", cIn)
- }
-
- // So file name.
- if *aix {
- if modname == "" {
- modname = "libc.a/shr_64.o"
- } else {
- fmt.Fprintf(os.Stderr, "%s: only syscall using libc are available\n", funct)
- os.Exit(1)
- }
- }
-
- strconvfunc := "C.CString"
-
- // Go function header.
- if outps != "" {
- outps = fmt.Sprintf(" (%s)", outps)
- }
- if text != "" {
- text += "\n"
- }
-
- text += fmt.Sprintf("func %s(%s)%s {\n", funct, strings.Join(in, ", "), outps)
-
- // Prepare arguments to Syscall.
- var args []string
- n := 0
- argN := 0
- for _, param := range in {
- p := parseParam(param)
- if regexp.MustCompile(`^\*`).FindStringSubmatch(p.Type) != nil {
- args = append(args, "C.uintptr_t(uintptr(unsafe.Pointer("+p.Name+")))")
- } else if p.Type == "string" && errvar != "" {
- text += fmt.Sprintf("\t_p%d := uintptr(unsafe.Pointer(%s(%s)))\n", n, strconvfunc, p.Name)
- args = append(args, fmt.Sprintf("C.uintptr_t(_p%d)", n))
- n++
- } else if p.Type == "string" {
- fmt.Fprintf(os.Stderr, path+":"+funct+" uses string arguments, but has no error return\n")
- text += fmt.Sprintf("\t_p%d := uintptr(unsafe.Pointer(%s(%s)))\n", n, strconvfunc, p.Name)
- args = append(args, fmt.Sprintf("C.uintptr_t(_p%d)", n))
- n++
- } else if m := regexp.MustCompile(`^\[\](.*)`).FindStringSubmatch(p.Type); m != nil {
- // Convert slice into pointer, length.
- // Have to be careful not to take address of &a[0] if len == 0:
- // pass nil in that case.
- text += fmt.Sprintf("\tvar _p%d *%s\n", n, m[1])
- text += fmt.Sprintf("\tif len(%s) > 0 {\n\t\t_p%d = &%s[0]\n\t}\n", p.Name, n, p.Name)
- args = append(args, fmt.Sprintf("C.uintptr_t(uintptr(unsafe.Pointer(_p%d)))", n))
- n++
- text += fmt.Sprintf("\tvar _p%d int\n", n)
- text += fmt.Sprintf("\t_p%d = len(%s)\n", n, p.Name)
- args = append(args, fmt.Sprintf("C.size_t(_p%d)", n))
- n++
- } else if p.Type == "int64" && endianness != "" {
- if endianness == "big-endian" {
- args = append(args, fmt.Sprintf("uintptr(%s>>32)", p.Name), fmt.Sprintf("uintptr(%s)", p.Name))
- } else {
- args = append(args, fmt.Sprintf("uintptr(%s)", p.Name), fmt.Sprintf("uintptr(%s>>32)", p.Name))
- }
- n++
- } else if p.Type == "bool" {
- text += fmt.Sprintf("\tvar _p%d uint32\n", n)
- text += fmt.Sprintf("\tif %s {\n\t\t_p%d = 1\n\t} else {\n\t\t_p%d = 0\n\t}\n", p.Name, n, n)
- args = append(args, fmt.Sprintf("_p%d", n))
- } else if regexp.MustCompile(`^_`).FindStringSubmatch(p.Type) != nil {
- args = append(args, fmt.Sprintf("C.uintptr_t(uintptr(%s))", p.Name))
- } else if p.Type == "unsafe.Pointer" {
- args = append(args, fmt.Sprintf("C.uintptr_t(uintptr(%s))", p.Name))
- } else if p.Type == "int" {
- if (argN == 2) && ((funct == "readlen") || (funct == "writelen")) {
- args = append(args, fmt.Sprintf("C.size_t(%s)", p.Name))
- } else if argN == 0 && funct == "fcntl" {
- args = append(args, fmt.Sprintf("C.uintptr_t(%s)", p.Name))
- } else if (argN == 2) && ((funct == "fcntl") || (funct == "FcntlInt")) {
- args = append(args, fmt.Sprintf("C.uintptr_t(%s)", p.Name))
- } else {
- args = append(args, fmt.Sprintf("C.int(%s)", p.Name))
- }
- } else if p.Type == "int32" {
- args = append(args, fmt.Sprintf("C.int(%s)", p.Name))
- } else if p.Type == "int64" {
- args = append(args, fmt.Sprintf("C.longlong(%s)", p.Name))
- } else if p.Type == "uint32" {
- args = append(args, fmt.Sprintf("C.uint(%s)", p.Name))
- } else if p.Type == "uint64" {
- args = append(args, fmt.Sprintf("C.ulonglong(%s)", p.Name))
- } else if p.Type == "uintptr" {
- args = append(args, fmt.Sprintf("C.uintptr_t(%s)", p.Name))
- } else {
- args = append(args, fmt.Sprintf("C.int(%s)", p.Name))
- }
- argN++
- }
-
- // Actual call.
- arglist := strings.Join(args, ", ")
- call := ""
- if sysname == "exit" {
- if errvar != "" {
- call += "er :="
- } else {
- call += ""
- }
- } else if errvar != "" {
- call += "r0,er :="
- } else if retvar != "" {
- call += "r0,_ :="
- } else {
- call += ""
- }
- if sysname == "select" {
- // select is a keyword of Go. Its name is
- // changed to c_select.
- call += fmt.Sprintf("C.c_%s(%s)", sysname, arglist)
- } else {
- call += fmt.Sprintf("C.%s(%s)", sysname, arglist)
- }
-
- // Assign return values.
- body := ""
- for i := 0; i < len(out); i++ {
- p := parseParam(out[i])
- reg := ""
- if p.Name == "err" {
- reg = "e1"
- } else {
- reg = "r0"
- }
- if reg != "e1" {
- body += fmt.Sprintf("\t%s = %s(%s)\n", p.Name, p.Type, reg)
- }
- }
-
- // verify return
- if sysname != "exit" && errvar != "" {
- if regexp.MustCompile(`^uintptr`).FindStringSubmatch(cRettype) != nil {
- body += "\tif (uintptr(r0) ==^uintptr(0) && er != nil) {\n"
- body += fmt.Sprintf("\t\t%s = er\n", errvar)
- body += "\t}\n"
- } else {
- body += "\tif (r0 ==-1 && er != nil) {\n"
- body += fmt.Sprintf("\t\t%s = er\n", errvar)
- body += "\t}\n"
- }
- } else if errvar != "" {
- body += "\tif (er != nil) {\n"
- body += fmt.Sprintf("\t\t%s = er\n", errvar)
- body += "\t}\n"
- }
-
- text += fmt.Sprintf("\t%s\n", call)
- text += body
-
- text += "\treturn\n"
- text += "}\n"
- }
- if err := s.Err(); err != nil {
- fmt.Fprintf(os.Stderr, err.Error())
- os.Exit(1)
- }
- file.Close()
- }
- imp := ""
- if pack != "unix" {
- imp = "import \"golang.org/x/sys/unix\"\n"
-
- }
- fmt.Printf(srcTemplate, cmdLine(), buildTags(), pack, cExtern, imp, text)
-}
-
-const srcTemplate = `// %s
-// Code generated by the command above; see README.md. DO NOT EDIT.
-
-// +build %s
-
-package %s
-
-
-%s
-*/
-import "C"
-import (
- "unsafe"
-)
-
-
-%s
-
-%s
-`
diff --git a/vendor/golang.org/x/sys/unix/mksyscall_aix_ppc64.go b/vendor/golang.org/x/sys/unix/mksyscall_aix_ppc64.go
deleted file mode 100644
index c96009951..000000000
--- a/vendor/golang.org/x/sys/unix/mksyscall_aix_ppc64.go
+++ /dev/null
@@ -1,614 +0,0 @@
-// Copyright 2019 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build ignore
-
-/*
-This program reads a file containing function prototypes
-(like syscall_aix.go) and generates system call bodies.
-The prototypes are marked by lines beginning with "//sys"
-and read like func declarations if //sys is replaced by func, but:
- * The parameter lists must give a name for each argument.
- This includes return parameters.
- * The parameter lists must give a type for each argument:
- the (x, y, z int) shorthand is not allowed.
- * If the return parameter is an error number, it must be named err.
- * If go func name needs to be different than its libc name,
- * or the function is not in libc, name could be specified
- * at the end, after "=" sign, like
- //sys getsockopt(s int, level int, name int, val uintptr, vallen *_Socklen) (err error) = libsocket.getsockopt
-
-
-This program will generate three files and handle both gc and gccgo implementation:
- - zsyscall_aix_ppc64.go: the common part of each implementation (error handler, pointer creation)
- - zsyscall_aix_ppc64_gc.go: gc part with //go_cgo_import_dynamic and a call to syscall6
- - zsyscall_aix_ppc64_gccgo.go: gccgo part with C function and conversion to C type.
-
- The generated code looks like this
-
-zsyscall_aix_ppc64.go
-func asyscall(...) (n int, err error) {
- // Pointer Creation
- r1, e1 := callasyscall(...)
- // Type Conversion
- // Error Handler
- return
-}
-
-zsyscall_aix_ppc64_gc.go
-//go:cgo_import_dynamic libc_asyscall asyscall "libc.a/shr_64.o"
-//go:linkname libc_asyscall libc_asyscall
-var asyscall syscallFunc
-
-func callasyscall(...) (r1 uintptr, e1 Errno) {
- r1, _, e1 = syscall6(uintptr(unsafe.Pointer(&libc_asyscall)), "nb_args", ... )
- return
-}
-
-zsyscall_aix_ppc64_ggcgo.go
-
-// int asyscall(...)
-
-import "C"
-
-func callasyscall(...) (r1 uintptr, e1 Errno) {
- r1 = uintptr(C.asyscall(...))
- e1 = syscall.GetErrno()
- return
-}
-*/
-
-package main
-
-import (
- "bufio"
- "flag"
- "fmt"
- "io/ioutil"
- "os"
- "regexp"
- "strings"
-)
-
-var (
- b32 = flag.Bool("b32", false, "32bit big-endian")
- l32 = flag.Bool("l32", false, "32bit little-endian")
- aix = flag.Bool("aix", false, "aix")
- tags = flag.String("tags", "", "build tags")
-)
-
-// cmdLine returns this programs's commandline arguments
-func cmdLine() string {
- return "go run mksyscall_aix_ppc64.go " + strings.Join(os.Args[1:], " ")
-}
-
-// buildTags returns build tags
-func buildTags() string {
- return *tags
-}
-
-// Param is function parameter
-type Param struct {
- Name string
- Type string
-}
-
-// usage prints the program usage
-func usage() {
- fmt.Fprintf(os.Stderr, "usage: go run mksyscall_aix_ppc64.go [-b32 | -l32] [-tags x,y] [file ...]\n")
- os.Exit(1)
-}
-
-// parseParamList parses parameter list and returns a slice of parameters
-func parseParamList(list string) []string {
- list = strings.TrimSpace(list)
- if list == "" {
- return []string{}
- }
- return regexp.MustCompile(`\s*,\s*`).Split(list, -1)
-}
-
-// parseParam splits a parameter into name and type
-func parseParam(p string) Param {
- ps := regexp.MustCompile(`^(\S*) (\S*)$`).FindStringSubmatch(p)
- if ps == nil {
- fmt.Fprintf(os.Stderr, "malformed parameter: %s\n", p)
- os.Exit(1)
- }
- return Param{ps[1], ps[2]}
-}
-
-func main() {
- flag.Usage = usage
- flag.Parse()
- if len(flag.Args()) <= 0 {
- fmt.Fprintf(os.Stderr, "no files to parse provided\n")
- usage()
- }
-
- endianness := ""
- if *b32 {
- endianness = "big-endian"
- } else if *l32 {
- endianness = "little-endian"
- }
-
- pack := ""
- // GCCGO
- textgccgo := ""
- cExtern := "/*\n#include <stdint.h>\n"
- // GC
- textgc := ""
- dynimports := ""
- linknames := ""
- var vars []string
- // COMMON
- textcommon := ""
- for _, path := range flag.Args() {
- file, err := os.Open(path)
- if err != nil {
- fmt.Fprintf(os.Stderr, err.Error())
- os.Exit(1)
- }
- s := bufio.NewScanner(file)
- for s.Scan() {
- t := s.Text()
- t = strings.TrimSpace(t)
- t = regexp.MustCompile(`\s+`).ReplaceAllString(t, ` `)
- if p := regexp.MustCompile(`^package (\S+)$`).FindStringSubmatch(t); p != nil && pack == "" {
- pack = p[1]
- }
- nonblock := regexp.MustCompile(`^\/\/sysnb `).FindStringSubmatch(t)
- if regexp.MustCompile(`^\/\/sys `).FindStringSubmatch(t) == nil && nonblock == nil {
- continue
- }
-
- // Line must be of the form
- // func Open(path string, mode int, perm int) (fd int, err error)
- // Split into name, in params, out params.
- f := regexp.MustCompile(`^\/\/sys(nb)? (\w+)\(([^()]*)\)\s*(?:\(([^()]+)\))?\s*(?:=\s*(?:(\w*)\.)?(\w*))?$`).FindStringSubmatch(t)
- if f == nil {
- fmt.Fprintf(os.Stderr, "%s:%s\nmalformed //sys declaration\n", path, t)
- os.Exit(1)
- }
- funct, inps, outps, modname, sysname := f[2], f[3], f[4], f[5], f[6]
-
- // Split argument lists on comma.
- in := parseParamList(inps)
- out := parseParamList(outps)
-
- inps = strings.Join(in, ", ")
- outps = strings.Join(out, ", ")
-
- if sysname == "" {
- sysname = funct
- }
-
- onlyCommon := false
- if funct == "readlen" || funct == "writelen" || funct == "FcntlInt" || funct == "FcntlFlock" {
- // This function call another syscall which is already implemented.
- // Therefore, the gc and gccgo part must not be generated.
- onlyCommon = true
- }
-
- // Try in vain to keep people from editing this file.
- // The theory is that they jump into the middle of the file
- // without reading the header.
-
- textcommon += "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT\n\n"
- if !onlyCommon {
- textgccgo += "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT\n\n"
- textgc += "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT\n\n"
- }
-
- // Check if value return, err return available
- errvar := ""
- rettype := ""
- for _, param := range out {
- p := parseParam(param)
- if p.Type == "error" {
- errvar = p.Name
- } else {
- rettype = p.Type
- }
- }
-
- sysname = regexp.MustCompile(`([a-z])([A-Z])`).ReplaceAllString(sysname, `${1}_$2`)
- sysname = strings.ToLower(sysname) // All libc functions are lowercase.
-
- // GCCGO Prototype return type
- cRettype := ""
- if rettype == "unsafe.Pointer" {
- cRettype = "uintptr_t"
- } else if rettype == "uintptr" {
- cRettype = "uintptr_t"
- } else if regexp.MustCompile(`^_`).FindStringSubmatch(rettype) != nil {
- cRettype = "uintptr_t"
- } else if rettype == "int" {
- cRettype = "int"
- } else if rettype == "int32" {
- cRettype = "int"
- } else if rettype == "int64" {
- cRettype = "long long"
- } else if rettype == "uint32" {
- cRettype = "unsigned int"
- } else if rettype == "uint64" {
- cRettype = "unsigned long long"
- } else {
- cRettype = "int"
- }
- if sysname == "exit" {
- cRettype = "void"
- }
-
- // GCCGO Prototype arguments type
- var cIn []string
- for i, param := range in {
- p := parseParam(param)
- if regexp.MustCompile(`^\*`).FindStringSubmatch(p.Type) != nil {
- cIn = append(cIn, "uintptr_t")
- } else if p.Type == "string" {
- cIn = append(cIn, "uintptr_t")
- } else if regexp.MustCompile(`^\[\](.*)`).FindStringSubmatch(p.Type) != nil {
- cIn = append(cIn, "uintptr_t", "size_t")
- } else if p.Type == "unsafe.Pointer" {
- cIn = append(cIn, "uintptr_t")
- } else if p.Type == "uintptr" {
- cIn = append(cIn, "uintptr_t")
- } else if regexp.MustCompile(`^_`).FindStringSubmatch(p.Type) != nil {
- cIn = append(cIn, "uintptr_t")
- } else if p.Type == "int" {
- if (i == 0 || i == 2) && funct == "fcntl" {
- // These fcntl arguments needs to be uintptr to be able to call FcntlInt and FcntlFlock
- cIn = append(cIn, "uintptr_t")
- } else {
- cIn = append(cIn, "int")
- }
-
- } else if p.Type == "int32" {
- cIn = append(cIn, "int")
- } else if p.Type == "int64" {
- cIn = append(cIn, "long long")
- } else if p.Type == "uint32" {
- cIn = append(cIn, "unsigned int")
- } else if p.Type == "uint64" {
- cIn = append(cIn, "unsigned long long")
- } else {
- cIn = append(cIn, "int")
- }
- }
-
- if !onlyCommon {
- // GCCGO Prototype Generation
- // Imports of system calls from libc
- if sysname == "select" {
- // select is a keyword of Go. Its name is
- // changed to c_select.
- cExtern += "#define c_select select\n"
- }
- cExtern += fmt.Sprintf("%s %s", cRettype, sysname)
- cIn := strings.Join(cIn, ", ")
- cExtern += fmt.Sprintf("(%s);\n", cIn)
- }
- // GC Library name
- if modname == "" {
- modname = "libc.a/shr_64.o"
- } else {
- fmt.Fprintf(os.Stderr, "%s: only syscall using libc are available\n", funct)
- os.Exit(1)
- }
- sysvarname := fmt.Sprintf("libc_%s", sysname)
-
- if !onlyCommon {
- // GC Runtime import of function to allow cross-platform builds.
- dynimports += fmt.Sprintf("//go:cgo_import_dynamic %s %s \"%s\"\n", sysvarname, sysname, modname)
- // GC Link symbol to proc address variable.
- linknames += fmt.Sprintf("//go:linkname %s %s\n", sysvarname, sysvarname)
- // GC Library proc address variable.
- vars = append(vars, sysvarname)
- }
-
- strconvfunc := "BytePtrFromString"
- strconvtype := "*byte"
-
- // Go function header.
- if outps != "" {
- outps = fmt.Sprintf(" (%s)", outps)
- }
- if textcommon != "" {
- textcommon += "\n"
- }
-
- textcommon += fmt.Sprintf("func %s(%s)%s {\n", funct, strings.Join(in, ", "), outps)
-
- // Prepare arguments tocall.
- var argscommon []string // Arguments in the common part
- var argscall []string // Arguments for call prototype
- var argsgc []string // Arguments for gc call (with syscall6)
- var argsgccgo []string // Arguments for gccgo call (with C.name_of_syscall)
- n := 0
- argN := 0
- for _, param := range in {
- p := parseParam(param)
- if regexp.MustCompile(`^\*`).FindStringSubmatch(p.Type) != nil {
- argscommon = append(argscommon, fmt.Sprintf("uintptr(unsafe.Pointer(%s))", p.Name))
- argscall = append(argscall, fmt.Sprintf("%s uintptr", p.Name))
- argsgc = append(argsgc, p.Name)
- argsgccgo = append(argsgccgo, fmt.Sprintf("C.uintptr_t(%s)", p.Name))
- } else if p.Type == "string" && errvar != "" {
- textcommon += fmt.Sprintf("\tvar _p%d %s\n", n, strconvtype)
- textcommon += fmt.Sprintf("\t_p%d, %s = %s(%s)\n", n, errvar, strconvfunc, p.Name)
- textcommon += fmt.Sprintf("\tif %s != nil {\n\t\treturn\n\t}\n", errvar)
-
- argscommon = append(argscommon, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n))
- argscall = append(argscall, fmt.Sprintf("_p%d uintptr ", n))
- argsgc = append(argsgc, fmt.Sprintf("_p%d", n))
- argsgccgo = append(argsgccgo, fmt.Sprintf("C.uintptr_t(_p%d)", n))
- n++
- } else if p.Type == "string" {
- fmt.Fprintf(os.Stderr, path+":"+funct+" uses string arguments, but has no error return\n")
- textcommon += fmt.Sprintf("\tvar _p%d %s\n", n, strconvtype)
- textcommon += fmt.Sprintf("\t_p%d, %s = %s(%s)\n", n, errvar, strconvfunc, p.Name)
- textcommon += fmt.Sprintf("\tif %s != nil {\n\t\treturn\n\t}\n", errvar)
-
- argscommon = append(argscommon, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n))
- argscall = append(argscall, fmt.Sprintf("_p%d uintptr", n))
- argsgc = append(argsgc, fmt.Sprintf("_p%d", n))
- argsgccgo = append(argsgccgo, fmt.Sprintf("C.uintptr_t(_p%d)", n))
- n++
- } else if m := regexp.MustCompile(`^\[\](.*)`).FindStringSubmatch(p.Type); m != nil {
- // Convert slice into pointer, length.
- // Have to be careful not to take address of &a[0] if len == 0:
- // pass nil in that case.
- textcommon += fmt.Sprintf("\tvar _p%d *%s\n", n, m[1])
- textcommon += fmt.Sprintf("\tif len(%s) > 0 {\n\t\t_p%d = &%s[0]\n\t}\n", p.Name, n, p.Name)
- argscommon = append(argscommon, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n), fmt.Sprintf("len(%s)", p.Name))
- argscall = append(argscall, fmt.Sprintf("_p%d uintptr", n), fmt.Sprintf("_lenp%d int", n))
- argsgc = append(argsgc, fmt.Sprintf("_p%d", n), fmt.Sprintf("uintptr(_lenp%d)", n))
- argsgccgo = append(argsgccgo, fmt.Sprintf("C.uintptr_t(_p%d)", n), fmt.Sprintf("C.size_t(_lenp%d)", n))
- n++
- } else if p.Type == "int64" && endianness != "" {
- fmt.Fprintf(os.Stderr, path+":"+funct+" uses int64 with 32 bits mode. Case not yet implemented\n")
- } else if p.Type == "bool" {
- fmt.Fprintf(os.Stderr, path+":"+funct+" uses bool. Case not yet implemented\n")
- } else if regexp.MustCompile(`^_`).FindStringSubmatch(p.Type) != nil || p.Type == "unsafe.Pointer" {
- argscommon = append(argscommon, fmt.Sprintf("uintptr(%s)", p.Name))
- argscall = append(argscall, fmt.Sprintf("%s uintptr", p.Name))
- argsgc = append(argsgc, p.Name)
- argsgccgo = append(argsgccgo, fmt.Sprintf("C.uintptr_t(%s)", p.Name))
- } else if p.Type == "int" {
- if (argN == 0 || argN == 2) && ((funct == "fcntl") || (funct == "FcntlInt") || (funct == "FcntlFlock")) {
- // These fcntl arguments need to be uintptr to be able to call FcntlInt and FcntlFlock
- argscommon = append(argscommon, fmt.Sprintf("uintptr(%s)", p.Name))
- argscall = append(argscall, fmt.Sprintf("%s uintptr", p.Name))
- argsgc = append(argsgc, p.Name)
- argsgccgo = append(argsgccgo, fmt.Sprintf("C.uintptr_t(%s)", p.Name))
-
- } else {
- argscommon = append(argscommon, p.Name)
- argscall = append(argscall, fmt.Sprintf("%s int", p.Name))
- argsgc = append(argsgc, fmt.Sprintf("uintptr(%s)", p.Name))
- argsgccgo = append(argsgccgo, fmt.Sprintf("C.int(%s)", p.Name))
- }
- } else if p.Type == "int32" {
- argscommon = append(argscommon, p.Name)
- argscall = append(argscall, fmt.Sprintf("%s int32", p.Name))
- argsgc = append(argsgc, fmt.Sprintf("uintptr(%s)", p.Name))
- argsgccgo = append(argsgccgo, fmt.Sprintf("C.int(%s)", p.Name))
- } else if p.Type == "int64" {
- argscommon = append(argscommon, p.Name)
- argscall = append(argscall, fmt.Sprintf("%s int64", p.Name))
- argsgc = append(argsgc, fmt.Sprintf("uintptr(%s)", p.Name))
- argsgccgo = append(argsgccgo, fmt.Sprintf("C.longlong(%s)", p.Name))
- } else if p.Type == "uint32" {
- argscommon = append(argscommon, p.Name)
- argscall = append(argscall, fmt.Sprintf("%s uint32", p.Name))
- argsgc = append(argsgc, fmt.Sprintf("uintptr(%s)", p.Name))
- argsgccgo = append(argsgccgo, fmt.Sprintf("C.uint(%s)", p.Name))
- } else if p.Type == "uint64" {
- argscommon = append(argscommon, p.Name)
- argscall = append(argscall, fmt.Sprintf("%s uint64", p.Name))
- argsgc = append(argsgc, fmt.Sprintf("uintptr(%s)", p.Name))
- argsgccgo = append(argsgccgo, fmt.Sprintf("C.ulonglong(%s)", p.Name))
- } else if p.Type == "uintptr" {
- argscommon = append(argscommon, p.Name)
- argscall = append(argscall, fmt.Sprintf("%s uintptr", p.Name))
- argsgc = append(argsgc, p.Name)
- argsgccgo = append(argsgccgo, fmt.Sprintf("C.uintptr_t(%s)", p.Name))
- } else {
- argscommon = append(argscommon, fmt.Sprintf("int(%s)", p.Name))
- argscall = append(argscall, fmt.Sprintf("%s int", p.Name))
- argsgc = append(argsgc, fmt.Sprintf("uintptr(%s)", p.Name))
- argsgccgo = append(argsgccgo, fmt.Sprintf("C.int(%s)", p.Name))
- }
- argN++
- }
- nargs := len(argsgc)
-
- // COMMON function generation
- argscommonlist := strings.Join(argscommon, ", ")
- callcommon := fmt.Sprintf("call%s(%s)", sysname, argscommonlist)
- ret := []string{"_", "_"}
- body := ""
- doErrno := false
- for i := 0; i < len(out); i++ {
- p := parseParam(out[i])
- reg := ""
- if p.Name == "err" {
- reg = "e1"
- ret[1] = reg
- doErrno = true
- } else {
- reg = "r0"
- ret[0] = reg
- }
- if p.Type == "bool" {
- reg = fmt.Sprintf("%s != 0", reg)
- }
- if reg != "e1" {
- body += fmt.Sprintf("\t%s = %s(%s)\n", p.Name, p.Type, reg)
- }
- }
- if ret[0] == "_" && ret[1] == "_" {
- textcommon += fmt.Sprintf("\t%s\n", callcommon)
- } else {
- textcommon += fmt.Sprintf("\t%s, %s := %s\n", ret[0], ret[1], callcommon)
- }
- textcommon += body
-
- if doErrno {
- textcommon += "\tif e1 != 0 {\n"
- textcommon += "\t\terr = errnoErr(e1)\n"
- textcommon += "\t}\n"
- }
- textcommon += "\treturn\n"
- textcommon += "}\n"
-
- if onlyCommon {
- continue
- }
-
- // CALL Prototype
- callProto := fmt.Sprintf("func call%s(%s) (r1 uintptr, e1 Errno) {\n", sysname, strings.Join(argscall, ", "))
-
- // GC function generation
- asm := "syscall6"
- if nonblock != nil {
- asm = "rawSyscall6"
- }
-
- if len(argsgc) <= 6 {
- for len(argsgc) < 6 {
- argsgc = append(argsgc, "0")
- }
- } else {
- fmt.Fprintf(os.Stderr, "%s: too many arguments to system call", funct)
- os.Exit(1)
- }
- argsgclist := strings.Join(argsgc, ", ")
- callgc := fmt.Sprintf("%s(uintptr(unsafe.Pointer(&%s)), %d, %s)", asm, sysvarname, nargs, argsgclist)
-
- textgc += callProto
- textgc += fmt.Sprintf("\tr1, _, e1 = %s\n", callgc)
- textgc += "\treturn\n}\n"
-
- // GCCGO function generation
- argsgccgolist := strings.Join(argsgccgo, ", ")
- var callgccgo string
- if sysname == "select" {
- // select is a keyword of Go. Its name is
- // changed to c_select.
- callgccgo = fmt.Sprintf("C.c_%s(%s)", sysname, argsgccgolist)
- } else {
- callgccgo = fmt.Sprintf("C.%s(%s)", sysname, argsgccgolist)
- }
- textgccgo += callProto
- textgccgo += fmt.Sprintf("\tr1 = uintptr(%s)\n", callgccgo)
- textgccgo += "\te1 = syscall.GetErrno()\n"
- textgccgo += "\treturn\n}\n"
- }
- if err := s.Err(); err != nil {
- fmt.Fprintf(os.Stderr, err.Error())
- os.Exit(1)
- }
- file.Close()
- }
- imp := ""
- if pack != "unix" {
- imp = "import \"golang.org/x/sys/unix\"\n"
-
- }
-
- // Print zsyscall_aix_ppc64.go
- err := ioutil.WriteFile("zsyscall_aix_ppc64.go",
- []byte(fmt.Sprintf(srcTemplate1, cmdLine(), buildTags(), pack, imp, textcommon)),
- 0644)
- if err != nil {
- fmt.Fprintf(os.Stderr, err.Error())
- os.Exit(1)
- }
-
- // Print zsyscall_aix_ppc64_gc.go
- vardecls := "\t" + strings.Join(vars, ",\n\t")
- vardecls += " syscallFunc"
- err = ioutil.WriteFile("zsyscall_aix_ppc64_gc.go",
- []byte(fmt.Sprintf(srcTemplate2, cmdLine(), buildTags(), pack, imp, dynimports, linknames, vardecls, textgc)),
- 0644)
- if err != nil {
- fmt.Fprintf(os.Stderr, err.Error())
- os.Exit(1)
- }
-
- // Print zsyscall_aix_ppc64_gccgo.go
- err = ioutil.WriteFile("zsyscall_aix_ppc64_gccgo.go",
- []byte(fmt.Sprintf(srcTemplate3, cmdLine(), buildTags(), pack, cExtern, imp, textgccgo)),
- 0644)
- if err != nil {
- fmt.Fprintf(os.Stderr, err.Error())
- os.Exit(1)
- }
-}
-
-const srcTemplate1 = `// %s
-// Code generated by the command above; see README.md. DO NOT EDIT.
-
-// +build %s
-
-package %s
-
-import (
- "unsafe"
-)
-
-
-%s
-
-%s
-`
-const srcTemplate2 = `// %s
-// Code generated by the command above; see README.md. DO NOT EDIT.
-
-// +build %s
-// +build !gccgo
-
-package %s
-
-import (
- "unsafe"
-)
-%s
-%s
-%s
-type syscallFunc uintptr
-
-var (
-%s
-)
-
-// Implemented in runtime/syscall_aix.go.
-func rawSyscall6(trap, nargs, a1, a2, a3, a4, a5, a6 uintptr) (r1, r2 uintptr, err Errno)
-func syscall6(trap, nargs, a1, a2, a3, a4, a5, a6 uintptr) (r1, r2 uintptr, err Errno)
-
-%s
-`
-const srcTemplate3 = `// %s
-// Code generated by the command above; see README.md. DO NOT EDIT.
-
-// +build %s
-// +build gccgo
-
-package %s
-
-%s
-*/
-import "C"
-import (
- "syscall"
-)
-
-
-%s
-
-%s
-`
diff --git a/vendor/golang.org/x/sys/unix/mksyscall_solaris.go b/vendor/golang.org/x/sys/unix/mksyscall_solaris.go
deleted file mode 100644
index 675597e44..000000000
--- a/vendor/golang.org/x/sys/unix/mksyscall_solaris.go
+++ /dev/null
@@ -1,341 +0,0 @@
-// Copyright 2019 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build ignore
-
-/*
- This program reads a file containing function prototypes
- (like syscall_solaris.go) and generates system call bodies.
- The prototypes are marked by lines beginning with "//sys"
- and read like func declarations if //sys is replaced by func, but:
- * The parameter lists must give a name for each argument.
- This includes return parameters.
- * The parameter lists must give a type for each argument:
- the (x, y, z int) shorthand is not allowed.
- * If the return parameter is an error number, it must be named err.
- * If go func name needs to be different than its libc name,
- * or the function is not in libc, name could be specified
- * at the end, after "=" sign, like
- //sys getsockopt(s int, level int, name int, val uintptr, vallen *_Socklen) (err error) = libsocket.getsockopt
-*/
-
-package main
-
-import (
- "bufio"
- "flag"
- "fmt"
- "os"
- "regexp"
- "strings"
-)
-
-var (
- b32 = flag.Bool("b32", false, "32bit big-endian")
- l32 = flag.Bool("l32", false, "32bit little-endian")
- tags = flag.String("tags", "", "build tags")
- illumos = flag.Bool("illumos", false, "illumos specific code generation")
-)
-
-// cmdLine returns this programs's commandline arguments
-func cmdLine() string {
- return "go run mksyscall_solaris.go " + strings.Join(os.Args[1:], " ")
-}
-
-// buildTags returns build tags
-func buildTags() string {
- return *tags
-}
-
-// Param is function parameter
-type Param struct {
- Name string
- Type string
-}
-
-// usage prints the program usage
-func usage() {
- fmt.Fprintf(os.Stderr, "usage: go run mksyscall_solaris.go [-b32 | -l32] [-tags x,y] [file ...]\n")
- os.Exit(1)
-}
-
-// parseParamList parses parameter list and returns a slice of parameters
-func parseParamList(list string) []string {
- list = strings.TrimSpace(list)
- if list == "" {
- return []string{}
- }
- return regexp.MustCompile(`\s*,\s*`).Split(list, -1)
-}
-
-// parseParam splits a parameter into name and type
-func parseParam(p string) Param {
- ps := regexp.MustCompile(`^(\S*) (\S*)$`).FindStringSubmatch(p)
- if ps == nil {
- fmt.Fprintf(os.Stderr, "malformed parameter: %s\n", p)
- os.Exit(1)
- }
- return Param{ps[1], ps[2]}
-}
-
-func main() {
- flag.Usage = usage
- flag.Parse()
- if len(flag.Args()) <= 0 {
- fmt.Fprintf(os.Stderr, "no files to parse provided\n")
- usage()
- }
-
- endianness := ""
- if *b32 {
- endianness = "big-endian"
- } else if *l32 {
- endianness = "little-endian"
- }
-
- pack := ""
- text := ""
- dynimports := ""
- linknames := ""
- var vars []string
- for _, path := range flag.Args() {
- file, err := os.Open(path)
- if err != nil {
- fmt.Fprintf(os.Stderr, err.Error())
- os.Exit(1)
- }
- s := bufio.NewScanner(file)
- for s.Scan() {
- t := s.Text()
- t = strings.TrimSpace(t)
- t = regexp.MustCompile(`\s+`).ReplaceAllString(t, ` `)
- if p := regexp.MustCompile(`^package (\S+)$`).FindStringSubmatch(t); p != nil && pack == "" {
- pack = p[1]
- }
- nonblock := regexp.MustCompile(`^\/\/sysnb `).FindStringSubmatch(t)
- if regexp.MustCompile(`^\/\/sys `).FindStringSubmatch(t) == nil && nonblock == nil {
- continue
- }
-
- // Line must be of the form
- // func Open(path string, mode int, perm int) (fd int, err error)
- // Split into name, in params, out params.
- f := regexp.MustCompile(`^\/\/sys(nb)? (\w+)\(([^()]*)\)\s*(?:\(([^()]+)\))?\s*(?:=\s*(?:(\w*)\.)?(\w*))?$`).FindStringSubmatch(t)
- if f == nil {
- fmt.Fprintf(os.Stderr, "%s:%s\nmalformed //sys declaration\n", path, t)
- os.Exit(1)
- }
- funct, inps, outps, modname, sysname := f[2], f[3], f[4], f[5], f[6]
-
- // Split argument lists on comma.
- in := parseParamList(inps)
- out := parseParamList(outps)
-
- inps = strings.Join(in, ", ")
- outps = strings.Join(out, ", ")
-
- // Try in vain to keep people from editing this file.
- // The theory is that they jump into the middle of the file
- // without reading the header.
- text += "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT\n\n"
-
- // So file name.
- if modname == "" {
- modname = "libc"
- }
-
- // System call name.
- if sysname == "" {
- sysname = funct
- }
-
- // System call pointer variable name.
- sysvarname := fmt.Sprintf("proc%s", sysname)
-
- strconvfunc := "BytePtrFromString"
- strconvtype := "*byte"
-
- sysname = strings.ToLower(sysname) // All libc functions are lowercase.
-
- // Runtime import of function to allow cross-platform builds.
- dynimports += fmt.Sprintf("//go:cgo_import_dynamic libc_%s %s \"%s.so\"\n", sysname, sysname, modname)
- // Link symbol to proc address variable.
- linknames += fmt.Sprintf("//go:linkname %s libc_%s\n", sysvarname, sysname)
- // Library proc address variable.
- vars = append(vars, sysvarname)
-
- // Go function header.
- outlist := strings.Join(out, ", ")
- if outlist != "" {
- outlist = fmt.Sprintf(" (%s)", outlist)
- }
- if text != "" {
- text += "\n"
- }
- text += fmt.Sprintf("func %s(%s)%s {\n", funct, strings.Join(in, ", "), outlist)
-
- // Check if err return available
- errvar := ""
- for _, param := range out {
- p := parseParam(param)
- if p.Type == "error" {
- errvar = p.Name
- continue
- }
- }
-
- // Prepare arguments to Syscall.
- var args []string
- n := 0
- for _, param := range in {
- p := parseParam(param)
- if regexp.MustCompile(`^\*`).FindStringSubmatch(p.Type) != nil {
- args = append(args, "uintptr(unsafe.Pointer("+p.Name+"))")
- } else if p.Type == "string" && errvar != "" {
- text += fmt.Sprintf("\tvar _p%d %s\n", n, strconvtype)
- text += fmt.Sprintf("\t_p%d, %s = %s(%s)\n", n, errvar, strconvfunc, p.Name)
- text += fmt.Sprintf("\tif %s != nil {\n\t\treturn\n\t}\n", errvar)
- args = append(args, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n))
- n++
- } else if p.Type == "string" {
- fmt.Fprintf(os.Stderr, path+":"+funct+" uses string arguments, but has no error return\n")
- text += fmt.Sprintf("\tvar _p%d %s\n", n, strconvtype)
- text += fmt.Sprintf("\t_p%d, _ = %s(%s)\n", n, strconvfunc, p.Name)
- args = append(args, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n))
- n++
- } else if s := regexp.MustCompile(`^\[\](.*)`).FindStringSubmatch(p.Type); s != nil {
- // Convert slice into pointer, length.
- // Have to be careful not to take address of &a[0] if len == 0:
- // pass nil in that case.
- text += fmt.Sprintf("\tvar _p%d *%s\n", n, s[1])
- text += fmt.Sprintf("\tif len(%s) > 0 {\n\t\t_p%d = &%s[0]\n\t}\n", p.Name, n, p.Name)
- args = append(args, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n), fmt.Sprintf("uintptr(len(%s))", p.Name))
- n++
- } else if p.Type == "int64" && endianness != "" {
- if endianness == "big-endian" {
- args = append(args, fmt.Sprintf("uintptr(%s>>32)", p.Name), fmt.Sprintf("uintptr(%s)", p.Name))
- } else {
- args = append(args, fmt.Sprintf("uintptr(%s)", p.Name), fmt.Sprintf("uintptr(%s>>32)", p.Name))
- }
- } else if p.Type == "bool" {
- text += fmt.Sprintf("\tvar _p%d uint32\n", n)
- text += fmt.Sprintf("\tif %s {\n\t\t_p%d = 1\n\t} else {\n\t\t_p%d = 0\n\t}\n", p.Name, n, n)
- args = append(args, fmt.Sprintf("uintptr(_p%d)", n))
- n++
- } else {
- args = append(args, fmt.Sprintf("uintptr(%s)", p.Name))
- }
- }
- nargs := len(args)
-
- // Determine which form to use; pad args with zeros.
- asm := "sysvicall6"
- if nonblock != nil {
- asm = "rawSysvicall6"
- }
- if len(args) <= 6 {
- for len(args) < 6 {
- args = append(args, "0")
- }
- } else {
- fmt.Fprintf(os.Stderr, "%s: too many arguments to system call\n", path)
- os.Exit(1)
- }
-
- // Actual call.
- arglist := strings.Join(args, ", ")
- call := fmt.Sprintf("%s(uintptr(unsafe.Pointer(&%s)), %d, %s)", asm, sysvarname, nargs, arglist)
-
- // Assign return values.
- body := ""
- ret := []string{"_", "_", "_"}
- doErrno := false
- for i := 0; i < len(out); i++ {
- p := parseParam(out[i])
- reg := ""
- if p.Name == "err" {
- reg = "e1"
- ret[2] = reg
- doErrno = true
- } else {
- reg = fmt.Sprintf("r%d", i)
- ret[i] = reg
- }
- if p.Type == "bool" {
- reg = fmt.Sprintf("%d != 0", reg)
- }
- if p.Type == "int64" && endianness != "" {
- // 64-bit number in r1:r0 or r0:r1.
- if i+2 > len(out) {
- fmt.Fprintf(os.Stderr, "%s: not enough registers for int64 return\n", path)
- os.Exit(1)
- }
- if endianness == "big-endian" {
- reg = fmt.Sprintf("int64(r%d)<<32 | int64(r%d)", i, i+1)
- } else {
- reg = fmt.Sprintf("int64(r%d)<<32 | int64(r%d)", i+1, i)
- }
- ret[i] = fmt.Sprintf("r%d", i)
- ret[i+1] = fmt.Sprintf("r%d", i+1)
- }
- if reg != "e1" {
- body += fmt.Sprintf("\t%s = %s(%s)\n", p.Name, p.Type, reg)
- }
- }
- if ret[0] == "_" && ret[1] == "_" && ret[2] == "_" {
- text += fmt.Sprintf("\t%s\n", call)
- } else {
- text += fmt.Sprintf("\t%s, %s, %s := %s\n", ret[0], ret[1], ret[2], call)
- }
- text += body
-
- if doErrno {
- text += "\tif e1 != 0 {\n"
- text += "\t\terr = e1\n"
- text += "\t}\n"
- }
- text += "\treturn\n"
- text += "}\n"
- }
- if err := s.Err(); err != nil {
- fmt.Fprintf(os.Stderr, err.Error())
- os.Exit(1)
- }
- file.Close()
- }
- imp := ""
- if pack != "unix" {
- imp = "import \"golang.org/x/sys/unix\"\n"
- }
-
- syscallimp := ""
- if !*illumos {
- syscallimp = "\"syscall\""
- }
-
- vardecls := "\t" + strings.Join(vars, ",\n\t")
- vardecls += " syscallFunc"
- fmt.Printf(srcTemplate, cmdLine(), buildTags(), pack, syscallimp, imp, dynimports, linknames, vardecls, text)
-}
-
-const srcTemplate = `// %s
-// Code generated by the command above; see README.md. DO NOT EDIT.
-
-// +build %s
-
-package %s
-
-import (
- "unsafe"
- %s
-)
-%s
-%s
-%s
-var (
-%s
-)
-
-%s
-`
diff --git a/vendor/golang.org/x/sys/unix/mksysctl_openbsd.go b/vendor/golang.org/x/sys/unix/mksysctl_openbsd.go
deleted file mode 100644
index b6b409909..000000000
--- a/vendor/golang.org/x/sys/unix/mksysctl_openbsd.go
+++ /dev/null
@@ -1,355 +0,0 @@
-// Copyright 2019 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build ignore
-
-// Parse the header files for OpenBSD and generate a Go usable sysctl MIB.
-//
-// Build a MIB with each entry being an array containing the level, type and
-// a hash that will contain additional entries if the current entry is a node.
-// We then walk this MIB and create a flattened sysctl name to OID hash.
-
-package main
-
-import (
- "bufio"
- "fmt"
- "os"
- "path/filepath"
- "regexp"
- "sort"
- "strings"
-)
-
-var (
- goos, goarch string
-)
-
-// cmdLine returns this programs's commandline arguments.
-func cmdLine() string {
- return "go run mksysctl_openbsd.go " + strings.Join(os.Args[1:], " ")
-}
-
-// buildTags returns build tags.
-func buildTags() string {
- return fmt.Sprintf("%s,%s", goarch, goos)
-}
-
-// reMatch performs regular expression match and stores the substring slice to value pointed by m.
-func reMatch(re *regexp.Regexp, str string, m *[]string) bool {
- *m = re.FindStringSubmatch(str)
- if *m != nil {
- return true
- }
- return false
-}
-
-type nodeElement struct {
- n int
- t string
- pE *map[string]nodeElement
-}
-
-var (
- debugEnabled bool
- mib map[string]nodeElement
- node *map[string]nodeElement
- nodeMap map[string]string
- sysCtl []string
-)
-
-var (
- ctlNames1RE = regexp.MustCompile(`^#define\s+(CTL_NAMES)\s+{`)
- ctlNames2RE = regexp.MustCompile(`^#define\s+(CTL_(.*)_NAMES)\s+{`)
- ctlNames3RE = regexp.MustCompile(`^#define\s+((.*)CTL_NAMES)\s+{`)
- netInetRE = regexp.MustCompile(`^netinet/`)
- netInet6RE = regexp.MustCompile(`^netinet6/`)
- netRE = regexp.MustCompile(`^net/`)
- bracesRE = regexp.MustCompile(`{.*}`)
- ctlTypeRE = regexp.MustCompile(`{\s+"(\w+)",\s+(CTLTYPE_[A-Z]+)\s+}`)
- fsNetKernRE = regexp.MustCompile(`^(fs|net|kern)_`)
-)
-
-func debug(s string) {
- if debugEnabled {
- fmt.Fprintln(os.Stderr, s)
- }
-}
-
-// Walk the MIB and build a sysctl name to OID mapping.
-func buildSysctl(pNode *map[string]nodeElement, name string, oid []int) {
- lNode := pNode // local copy of pointer to node
- var keys []string
- for k := range *lNode {
- keys = append(keys, k)
- }
- sort.Strings(keys)
-
- for _, key := range keys {
- nodename := name
- if name != "" {
- nodename += "."
- }
- nodename += key
-
- nodeoid := append(oid, (*pNode)[key].n)
-
- if (*pNode)[key].t == `CTLTYPE_NODE` {
- if _, ok := nodeMap[nodename]; ok {
- lNode = &mib
- ctlName := nodeMap[nodename]
- for _, part := range strings.Split(ctlName, ".") {
- lNode = ((*lNode)[part]).pE
- }
- } else {
- lNode = (*pNode)[key].pE
- }
- buildSysctl(lNode, nodename, nodeoid)
- } else if (*pNode)[key].t != "" {
- oidStr := []string{}
- for j := range nodeoid {
- oidStr = append(oidStr, fmt.Sprintf("%d", nodeoid[j]))
- }
- text := "\t{ \"" + nodename + "\", []_C_int{ " + strings.Join(oidStr, ", ") + " } }, \n"
- sysCtl = append(sysCtl, text)
- }
- }
-}
-
-func main() {
- // Get the OS (using GOOS_TARGET if it exist)
- goos = os.Getenv("GOOS_TARGET")
- if goos == "" {
- goos = os.Getenv("GOOS")
- }
- // Get the architecture (using GOARCH_TARGET if it exists)
- goarch = os.Getenv("GOARCH_TARGET")
- if goarch == "" {
- goarch = os.Getenv("GOARCH")
- }
- // Check if GOOS and GOARCH environment variables are defined
- if goarch == "" || goos == "" {
- fmt.Fprintf(os.Stderr, "GOARCH or GOOS not defined in environment\n")
- os.Exit(1)
- }
-
- mib = make(map[string]nodeElement)
- headers := [...]string{
- `sys/sysctl.h`,
- `sys/socket.h`,
- `sys/tty.h`,
- `sys/malloc.h`,
- `sys/mount.h`,
- `sys/namei.h`,
- `sys/sem.h`,
- `sys/shm.h`,
- `sys/vmmeter.h`,
- `uvm/uvmexp.h`,
- `uvm/uvm_param.h`,
- `uvm/uvm_swap_encrypt.h`,
- `ddb/db_var.h`,
- `net/if.h`,
- `net/if_pfsync.h`,
- `net/pipex.h`,
- `netinet/in.h`,
- `netinet/icmp_var.h`,
- `netinet/igmp_var.h`,
- `netinet/ip_ah.h`,
- `netinet/ip_carp.h`,
- `netinet/ip_divert.h`,
- `netinet/ip_esp.h`,
- `netinet/ip_ether.h`,
- `netinet/ip_gre.h`,
- `netinet/ip_ipcomp.h`,
- `netinet/ip_ipip.h`,
- `netinet/pim_var.h`,
- `netinet/tcp_var.h`,
- `netinet/udp_var.h`,
- `netinet6/in6.h`,
- `netinet6/ip6_divert.h`,
- `netinet6/pim6_var.h`,
- `netinet/icmp6.h`,
- `netmpls/mpls.h`,
- }
-
- ctls := [...]string{
- `kern`,
- `vm`,
- `fs`,
- `net`,
- //debug /* Special handling required */
- `hw`,
- //machdep /* Arch specific */
- `user`,
- `ddb`,
- //vfs /* Special handling required */
- `fs.posix`,
- `kern.forkstat`,
- `kern.intrcnt`,
- `kern.malloc`,
- `kern.nchstats`,
- `kern.seminfo`,
- `kern.shminfo`,
- `kern.timecounter`,
- `kern.tty`,
- `kern.watchdog`,
- `net.bpf`,
- `net.ifq`,
- `net.inet`,
- `net.inet.ah`,
- `net.inet.carp`,
- `net.inet.divert`,
- `net.inet.esp`,
- `net.inet.etherip`,
- `net.inet.gre`,
- `net.inet.icmp`,
- `net.inet.igmp`,
- `net.inet.ip`,
- `net.inet.ip.ifq`,
- `net.inet.ipcomp`,
- `net.inet.ipip`,
- `net.inet.mobileip`,
- `net.inet.pfsync`,
- `net.inet.pim`,
- `net.inet.tcp`,
- `net.inet.udp`,
- `net.inet6`,
- `net.inet6.divert`,
- `net.inet6.ip6`,
- `net.inet6.icmp6`,
- `net.inet6.pim6`,
- `net.inet6.tcp6`,
- `net.inet6.udp6`,
- `net.mpls`,
- `net.mpls.ifq`,
- `net.key`,
- `net.pflow`,
- `net.pfsync`,
- `net.pipex`,
- `net.rt`,
- `vm.swapencrypt`,
- //vfsgenctl /* Special handling required */
- }
-
- // Node name "fixups"
- ctlMap := map[string]string{
- "ipproto": "net.inet",
- "net.inet.ipproto": "net.inet",
- "net.inet6.ipv6proto": "net.inet6",
- "net.inet6.ipv6": "net.inet6.ip6",
- "net.inet.icmpv6": "net.inet6.icmp6",
- "net.inet6.divert6": "net.inet6.divert",
- "net.inet6.tcp6": "net.inet.tcp",
- "net.inet6.udp6": "net.inet.udp",
- "mpls": "net.mpls",
- "swpenc": "vm.swapencrypt",
- }
-
- // Node mappings
- nodeMap = map[string]string{
- "net.inet.ip.ifq": "net.ifq",
- "net.inet.pfsync": "net.pfsync",
- "net.mpls.ifq": "net.ifq",
- }
-
- mCtls := make(map[string]bool)
- for _, ctl := range ctls {
- mCtls[ctl] = true
- }
-
- for _, header := range headers {
- debug("Processing " + header)
- file, err := os.Open(filepath.Join("/usr/include", header))
- if err != nil {
- fmt.Fprintf(os.Stderr, "%v\n", err)
- os.Exit(1)
- }
- s := bufio.NewScanner(file)
- for s.Scan() {
- var sub []string
- if reMatch(ctlNames1RE, s.Text(), &sub) ||
- reMatch(ctlNames2RE, s.Text(), &sub) ||
- reMatch(ctlNames3RE, s.Text(), &sub) {
- if sub[1] == `CTL_NAMES` {
- // Top level.
- node = &mib
- } else {
- // Node.
- nodename := strings.ToLower(sub[2])
- ctlName := ""
- if reMatch(netInetRE, header, &sub) {
- ctlName = "net.inet." + nodename
- } else if reMatch(netInet6RE, header, &sub) {
- ctlName = "net.inet6." + nodename
- } else if reMatch(netRE, header, &sub) {
- ctlName = "net." + nodename
- } else {
- ctlName = nodename
- ctlName = fsNetKernRE.ReplaceAllString(ctlName, `$1.`)
- }
-
- if val, ok := ctlMap[ctlName]; ok {
- ctlName = val
- }
- if _, ok := mCtls[ctlName]; !ok {
- debug("Ignoring " + ctlName + "...")
- continue
- }
-
- // Walk down from the top of the MIB.
- node = &mib
- for _, part := range strings.Split(ctlName, ".") {
- if _, ok := (*node)[part]; !ok {
- debug("Missing node " + part)
- (*node)[part] = nodeElement{n: 0, t: "", pE: &map[string]nodeElement{}}
- }
- node = (*node)[part].pE
- }
- }
-
- // Populate current node with entries.
- i := -1
- for !strings.HasPrefix(s.Text(), "}") {
- s.Scan()
- if reMatch(bracesRE, s.Text(), &sub) {
- i++
- }
- if !reMatch(ctlTypeRE, s.Text(), &sub) {
- continue
- }
- (*node)[sub[1]] = nodeElement{n: i, t: sub[2], pE: &map[string]nodeElement{}}
- }
- }
- }
- err = s.Err()
- if err != nil {
- fmt.Fprintf(os.Stderr, "%v\n", err)
- os.Exit(1)
- }
- file.Close()
- }
- buildSysctl(&mib, "", []int{})
-
- sort.Strings(sysCtl)
- text := strings.Join(sysCtl, "")
-
- fmt.Printf(srcTemplate, cmdLine(), buildTags(), text)
-}
-
-const srcTemplate = `// %s
-// Code generated by the command above; DO NOT EDIT.
-
-// +build %s
-
-package unix
-
-type mibentry struct {
- ctlname string
- ctloid []_C_int
-}
-
-var sysctlMib = []mibentry {
-%s
-}
-`
diff --git a/vendor/golang.org/x/sys/unix/mksysnum.go b/vendor/golang.org/x/sys/unix/mksysnum.go
deleted file mode 100644
index baa6ecd85..000000000
--- a/vendor/golang.org/x/sys/unix/mksysnum.go
+++ /dev/null
@@ -1,190 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build ignore
-
-// Generate system call table for DragonFly, NetBSD,
-// FreeBSD, OpenBSD or Darwin from master list
-// (for example, /usr/src/sys/kern/syscalls.master or
-// sys/syscall.h).
-package main
-
-import (
- "bufio"
- "fmt"
- "io"
- "io/ioutil"
- "net/http"
- "os"
- "regexp"
- "strings"
-)
-
-var (
- goos, goarch string
-)
-
-// cmdLine returns this programs's commandline arguments
-func cmdLine() string {
- return "go run mksysnum.go " + strings.Join(os.Args[1:], " ")
-}
-
-// buildTags returns build tags
-func buildTags() string {
- return fmt.Sprintf("%s,%s", goarch, goos)
-}
-
-func checkErr(err error) {
- if err != nil {
- fmt.Fprintf(os.Stderr, "%v\n", err)
- os.Exit(1)
- }
-}
-
-// source string and substring slice for regexp
-type re struct {
- str string // source string
- sub []string // matched sub-string
-}
-
-// Match performs regular expression match
-func (r *re) Match(exp string) bool {
- r.sub = regexp.MustCompile(exp).FindStringSubmatch(r.str)
- if r.sub != nil {
- return true
- }
- return false
-}
-
-// fetchFile fetches a text file from URL
-func fetchFile(URL string) io.Reader {
- resp, err := http.Get(URL)
- checkErr(err)
- defer resp.Body.Close()
- body, err := ioutil.ReadAll(resp.Body)
- checkErr(err)
- return strings.NewReader(string(body))
-}
-
-// readFile reads a text file from path
-func readFile(path string) io.Reader {
- file, err := os.Open(os.Args[1])
- checkErr(err)
- return file
-}
-
-func format(name, num, proto string) string {
- name = strings.ToUpper(name)
- // There are multiple entries for enosys and nosys, so comment them out.
- nm := re{str: name}
- if nm.Match(`^SYS_E?NOSYS$`) {
- name = fmt.Sprintf("// %s", name)
- }
- if name == `SYS_SYS_EXIT` {
- name = `SYS_EXIT`
- }
- return fmt.Sprintf(" %s = %s; // %s\n", name, num, proto)
-}
-
-func main() {
- // Get the OS (using GOOS_TARGET if it exist)
- goos = os.Getenv("GOOS_TARGET")
- if goos == "" {
- goos = os.Getenv("GOOS")
- }
- // Get the architecture (using GOARCH_TARGET if it exists)
- goarch = os.Getenv("GOARCH_TARGET")
- if goarch == "" {
- goarch = os.Getenv("GOARCH")
- }
- // Check if GOOS and GOARCH environment variables are defined
- if goarch == "" || goos == "" {
- fmt.Fprintf(os.Stderr, "GOARCH or GOOS not defined in environment\n")
- os.Exit(1)
- }
-
- file := strings.TrimSpace(os.Args[1])
- var syscalls io.Reader
- if strings.HasPrefix(file, "https://") || strings.HasPrefix(file, "http://") {
- // Download syscalls.master file
- syscalls = fetchFile(file)
- } else {
- syscalls = readFile(file)
- }
-
- var text, line string
- s := bufio.NewScanner(syscalls)
- for s.Scan() {
- t := re{str: line}
- if t.Match(`^(.*)\\$`) {
- // Handle continuation
- line = t.sub[1]
- line += strings.TrimLeft(s.Text(), " \t")
- } else {
- // New line
- line = s.Text()
- }
- t = re{str: line}
- if t.Match(`\\$`) {
- continue
- }
- t = re{str: line}
-
- switch goos {
- case "dragonfly":
- if t.Match(`^([0-9]+)\s+STD\s+({ \S+\s+(\w+).*)$`) {
- num, proto := t.sub[1], t.sub[2]
- name := fmt.Sprintf("SYS_%s", t.sub[3])
- text += format(name, num, proto)
- }
- case "freebsd":
- if t.Match(`^([0-9]+)\s+\S+\s+(?:(?:NO)?STD|COMPAT10)\s+({ \S+\s+(\w+).*)$`) {
- num, proto := t.sub[1], t.sub[2]
- name := fmt.Sprintf("SYS_%s", t.sub[3])
- text += format(name, num, proto)
- }
- case "openbsd":
- if t.Match(`^([0-9]+)\s+STD\s+(NOLOCK\s+)?({ \S+\s+\*?(\w+).*)$`) {
- num, proto, name := t.sub[1], t.sub[3], t.sub[4]
- text += format(name, num, proto)
- }
- case "netbsd":
- if t.Match(`^([0-9]+)\s+((STD)|(NOERR))\s+(RUMP\s+)?({\s+\S+\s*\*?\s*\|(\S+)\|(\S*)\|(\w+).*\s+})(\s+(\S+))?$`) {
- num, proto, compat := t.sub[1], t.sub[6], t.sub[8]
- name := t.sub[7] + "_" + t.sub[9]
- if t.sub[11] != "" {
- name = t.sub[7] + "_" + t.sub[11]
- }
- name = strings.ToUpper(name)
- if compat == "" || compat == "13" || compat == "30" || compat == "50" {
- text += fmt.Sprintf(" %s = %s; // %s\n", name, num, proto)
- }
- }
- case "darwin":
- if t.Match(`^#define\s+SYS_(\w+)\s+([0-9]+)`) {
- name, num := t.sub[1], t.sub[2]
- name = strings.ToUpper(name)
- text += fmt.Sprintf(" SYS_%s = %s;\n", name, num)
- }
- default:
- fmt.Fprintf(os.Stderr, "unrecognized GOOS=%s\n", goos)
- os.Exit(1)
-
- }
- }
- err := s.Err()
- checkErr(err)
-
- fmt.Printf(template, cmdLine(), buildTags(), text)
-}
-
-const template = `// %s
-// Code generated by the command above; see README.md. DO NOT EDIT.
-
-// +build %s
-
-package unix
-
-const(
-%s)`
diff --git a/vendor/golang.org/x/sys/unix/syscall.go b/vendor/golang.org/x/sys/unix/syscall.go
index 4d682026b..fd4ee8ebe 100644
--- a/vendor/golang.org/x/sys/unix/syscall.go
+++ b/vendor/golang.org/x/sys/unix/syscall.go
@@ -22,7 +22,7 @@
// These calls return err == nil to indicate success; otherwise
// err represents an operating system error describing the failure and
// holds a value of type syscall.Errno.
-package unix
+package unix // import "golang.org/x/sys/unix"
import "strings"
diff --git a/vendor/golang.org/x/sys/unix/types_aix.go b/vendor/golang.org/x/sys/unix/types_aix.go
deleted file mode 100644
index 40d2beede..000000000
--- a/vendor/golang.org/x/sys/unix/types_aix.go
+++ /dev/null
@@ -1,237 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build ignore
-// +build aix
-
-/*
-Input to cgo -godefs. See also mkerrors.sh and mkall.sh
-*/
-
-// +godefs map struct_in_addr [4]byte /* in_addr */
-// +godefs map struct_in6_addr [16]byte /* in6_addr */
-
-package unix
-
-/*
-#include <sys/types.h>
-#include <sys/time.h>
-#include <sys/limits.h>
-#include <sys/un.h>
-#include <utime.h>
-#include <sys/utsname.h>
-#include <sys/poll.h>
-#include <sys/resource.h>
-#include <sys/stat.h>
-#include <sys/statfs.h>
-#include <sys/termio.h>
-#include <sys/ioctl.h>
-
-#include <termios.h>
-
-#include <net/if.h>
-#include <net/if_dl.h>
-#include <netinet/in.h>
-#include <netinet/icmp6.h>
-
-
-#include <dirent.h>
-#include <fcntl.h>
-
-enum {
- sizeofPtr = sizeof(void*),
-};
-
-union sockaddr_all {
- struct sockaddr s1; // this one gets used for fields
- struct sockaddr_in s2; // these pad it out
- struct sockaddr_in6 s3;
- struct sockaddr_un s4;
- struct sockaddr_dl s5;
-};
-
-struct sockaddr_any {
- struct sockaddr addr;
- char pad[sizeof(union sockaddr_all) - sizeof(struct sockaddr)];
-};
-
-*/
-import "C"
-
-// Machine characteristics
-
-const (
- SizeofPtr = C.sizeofPtr
- SizeofShort = C.sizeof_short
- SizeofInt = C.sizeof_int
- SizeofLong = C.sizeof_long
- SizeofLongLong = C.sizeof_longlong
- PathMax = C.PATH_MAX
-)
-
-// Basic types
-
-type (
- _C_short C.short
- _C_int C.int
- _C_long C.long
- _C_long_long C.longlong
-)
-
-type off64 C.off64_t
-type off C.off_t
-type Mode_t C.mode_t
-
-// Time
-
-type Timespec C.struct_timespec
-
-type Timeval C.struct_timeval
-
-type Timeval32 C.struct_timeval32
-
-type Timex C.struct_timex
-
-type Time_t C.time_t
-
-type Tms C.struct_tms
-
-type Utimbuf C.struct_utimbuf
-
-type Timezone C.struct_timezone
-
-// Processes
-
-type Rusage C.struct_rusage
-
-type Rlimit C.struct_rlimit64
-
-type Pid_t C.pid_t
-
-type _Gid_t C.gid_t
-
-type dev_t C.dev_t
-
-// Files
-
-type Stat_t C.struct_stat
-
-type StatxTimestamp C.struct_statx_timestamp
-
-type Statx_t C.struct_statx
-
-type Dirent C.struct_dirent
-
-// Sockets
-
-type RawSockaddrInet4 C.struct_sockaddr_in
-
-type RawSockaddrInet6 C.struct_sockaddr_in6
-
-type RawSockaddrUnix C.struct_sockaddr_un
-
-type RawSockaddrDatalink C.struct_sockaddr_dl
-
-type RawSockaddr C.struct_sockaddr
-
-type RawSockaddrAny C.struct_sockaddr_any
-
-type _Socklen C.socklen_t
-
-type Cmsghdr C.struct_cmsghdr
-
-type ICMPv6Filter C.struct_icmp6_filter
-
-type Iovec C.struct_iovec
-
-type IPMreq C.struct_ip_mreq
-
-type IPv6Mreq C.struct_ipv6_mreq
-
-type IPv6MTUInfo C.struct_ip6_mtuinfo
-
-type Linger C.struct_linger
-
-type Msghdr C.struct_msghdr
-
-const (
- SizeofSockaddrInet4 = C.sizeof_struct_sockaddr_in
- SizeofSockaddrInet6 = C.sizeof_struct_sockaddr_in6
- SizeofSockaddrAny = C.sizeof_struct_sockaddr_any
- SizeofSockaddrUnix = C.sizeof_struct_sockaddr_un
- SizeofSockaddrDatalink = C.sizeof_struct_sockaddr_dl
- SizeofLinger = C.sizeof_struct_linger
- SizeofIPMreq = C.sizeof_struct_ip_mreq
- SizeofIPv6Mreq = C.sizeof_struct_ipv6_mreq
- SizeofIPv6MTUInfo = C.sizeof_struct_ip6_mtuinfo
- SizeofMsghdr = C.sizeof_struct_msghdr
- SizeofCmsghdr = C.sizeof_struct_cmsghdr
- SizeofICMPv6Filter = C.sizeof_struct_icmp6_filter
-)
-
-// Routing and interface messages
-
-const (
- SizeofIfMsghdr = C.sizeof_struct_if_msghdr
-)
-
-type IfMsgHdr C.struct_if_msghdr
-
-// Misc
-
-type FdSet C.fd_set
-
-type Utsname C.struct_utsname
-
-type Ustat_t C.struct_ustat
-
-type Sigset_t C.sigset_t
-
-const (
- AT_FDCWD = C.AT_FDCWD
- AT_REMOVEDIR = C.AT_REMOVEDIR
- AT_SYMLINK_NOFOLLOW = C.AT_SYMLINK_NOFOLLOW
-)
-
-// Terminal handling
-
-type Termios C.struct_termios
-
-type Termio C.struct_termio
-
-type Winsize C.struct_winsize
-
-//poll
-
-type PollFd struct {
- Fd int32
- Events uint16
- Revents uint16
-}
-
-const (
- POLLERR = C.POLLERR
- POLLHUP = C.POLLHUP
- POLLIN = C.POLLIN
- POLLNVAL = C.POLLNVAL
- POLLOUT = C.POLLOUT
- POLLPRI = C.POLLPRI
- POLLRDBAND = C.POLLRDBAND
- POLLRDNORM = C.POLLRDNORM
- POLLWRBAND = C.POLLWRBAND
- POLLWRNORM = C.POLLWRNORM
-)
-
-//flock_t
-
-type Flock_t C.struct_flock64
-
-// Statfs
-
-type Fsid_t C.struct_fsid_t
-type Fsid64_t C.struct_fsid64_t
-
-type Statfs_t C.struct_statfs
-
-const RNDGETENTCNT = 0x80045200
diff --git a/vendor/golang.org/x/sys/unix/types_darwin.go b/vendor/golang.org/x/sys/unix/types_darwin.go
deleted file mode 100644
index 155c2e692..000000000
--- a/vendor/golang.org/x/sys/unix/types_darwin.go
+++ /dev/null
@@ -1,283 +0,0 @@
-// Copyright 2009 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build ignore
-
-/*
-Input to cgo -godefs. See README.md
-*/
-
-// +godefs map struct_in_addr [4]byte /* in_addr */
-// +godefs map struct_in6_addr [16]byte /* in6_addr */
-
-package unix
-
-/*
-#define __DARWIN_UNIX03 0
-#define KERNEL
-#define _DARWIN_USE_64_BIT_INODE
-#include <dirent.h>
-#include <fcntl.h>
-#include <poll.h>
-#include <signal.h>
-#include <termios.h>
-#include <unistd.h>
-#include <mach/mach.h>
-#include <mach/message.h>
-#include <sys/event.h>
-#include <sys/mman.h>
-#include <sys/mount.h>
-#include <sys/param.h>
-#include <sys/ptrace.h>
-#include <sys/resource.h>
-#include <sys/select.h>
-#include <sys/signal.h>
-#include <sys/socket.h>
-#include <sys/stat.h>
-#include <sys/time.h>
-#include <sys/types.h>
-#include <sys/uio.h>
-#include <sys/un.h>
-#include <sys/utsname.h>
-#include <sys/wait.h>
-#include <net/bpf.h>
-#include <net/if.h>
-#include <net/if_dl.h>
-#include <net/if_var.h>
-#include <net/route.h>
-#include <netinet/in.h>
-#include <netinet/icmp6.h>
-#include <netinet/tcp.h>
-
-enum {
- sizeofPtr = sizeof(void*),
-};
-
-union sockaddr_all {
- struct sockaddr s1; // this one gets used for fields
- struct sockaddr_in s2; // these pad it out
- struct sockaddr_in6 s3;
- struct sockaddr_un s4;
- struct sockaddr_dl s5;
-};
-
-struct sockaddr_any {
- struct sockaddr addr;
- char pad[sizeof(union sockaddr_all) - sizeof(struct sockaddr)];
-};
-
-*/
-import "C"
-
-// Machine characteristics
-
-const (
- SizeofPtr = C.sizeofPtr
- SizeofShort = C.sizeof_short
- SizeofInt = C.sizeof_int
- SizeofLong = C.sizeof_long
- SizeofLongLong = C.sizeof_longlong
-)
-
-// Basic types
-
-type (
- _C_short C.short
- _C_int C.int
- _C_long C.long
- _C_long_long C.longlong
-)
-
-// Time
-
-type Timespec C.struct_timespec
-
-type Timeval C.struct_timeval
-
-type Timeval32 C.struct_timeval32
-
-// Processes
-
-type Rusage C.struct_rusage
-
-type Rlimit C.struct_rlimit
-
-type _Gid_t C.gid_t
-
-// Files
-
-type Stat_t C.struct_stat64
-
-type Statfs_t C.struct_statfs64
-
-type Flock_t C.struct_flock
-
-type Fstore_t C.struct_fstore
-
-type Radvisory_t C.struct_radvisory
-
-type Fbootstraptransfer_t C.struct_fbootstraptransfer
-
-type Log2phys_t C.struct_log2phys
-
-type Fsid C.struct_fsid
-
-type Dirent C.struct_dirent
-
-// Sockets
-
-type RawSockaddrInet4 C.struct_sockaddr_in
-
-type RawSockaddrInet6 C.struct_sockaddr_in6
-
-type RawSockaddrUnix C.struct_sockaddr_un
-
-type RawSockaddrDatalink C.struct_sockaddr_dl
-
-type RawSockaddr C.struct_sockaddr
-
-type RawSockaddrAny C.struct_sockaddr_any
-
-type _Socklen C.socklen_t
-
-type Linger C.struct_linger
-
-type Iovec C.struct_iovec
-
-type IPMreq C.struct_ip_mreq
-
-type IPv6Mreq C.struct_ipv6_mreq
-
-type Msghdr C.struct_msghdr
-
-type Cmsghdr C.struct_cmsghdr
-
-type Inet4Pktinfo C.struct_in_pktinfo
-
-type Inet6Pktinfo C.struct_in6_pktinfo
-
-type IPv6MTUInfo C.struct_ip6_mtuinfo
-
-type ICMPv6Filter C.struct_icmp6_filter
-
-const (
- SizeofSockaddrInet4 = C.sizeof_struct_sockaddr_in
- SizeofSockaddrInet6 = C.sizeof_struct_sockaddr_in6
- SizeofSockaddrAny = C.sizeof_struct_sockaddr_any
- SizeofSockaddrUnix = C.sizeof_struct_sockaddr_un
- SizeofSockaddrDatalink = C.sizeof_struct_sockaddr_dl
- SizeofLinger = C.sizeof_struct_linger
- SizeofIPMreq = C.sizeof_struct_ip_mreq
- SizeofIPv6Mreq = C.sizeof_struct_ipv6_mreq
- SizeofMsghdr = C.sizeof_struct_msghdr
- SizeofCmsghdr = C.sizeof_struct_cmsghdr
- SizeofInet4Pktinfo = C.sizeof_struct_in_pktinfo
- SizeofInet6Pktinfo = C.sizeof_struct_in6_pktinfo
- SizeofIPv6MTUInfo = C.sizeof_struct_ip6_mtuinfo
- SizeofICMPv6Filter = C.sizeof_struct_icmp6_filter
-)
-
-// Ptrace requests
-
-const (
- PTRACE_TRACEME = C.PT_TRACE_ME
- PTRACE_CONT = C.PT_CONTINUE
- PTRACE_KILL = C.PT_KILL
-)
-
-// Events (kqueue, kevent)
-
-type Kevent_t C.struct_kevent
-
-// Select
-
-type FdSet C.fd_set
-
-// Routing and interface messages
-
-const (
- SizeofIfMsghdr = C.sizeof_struct_if_msghdr
- SizeofIfData = C.sizeof_struct_if_data
- SizeofIfaMsghdr = C.sizeof_struct_ifa_msghdr
- SizeofIfmaMsghdr = C.sizeof_struct_ifma_msghdr
- SizeofIfmaMsghdr2 = C.sizeof_struct_ifma_msghdr2
- SizeofRtMsghdr = C.sizeof_struct_rt_msghdr
- SizeofRtMetrics = C.sizeof_struct_rt_metrics
-)
-
-type IfMsghdr C.struct_if_msghdr
-
-type IfData C.struct_if_data
-
-type IfaMsghdr C.struct_ifa_msghdr
-
-type IfmaMsghdr C.struct_ifma_msghdr
-
-type IfmaMsghdr2 C.struct_ifma_msghdr2
-
-type RtMsghdr C.struct_rt_msghdr
-
-type RtMetrics C.struct_rt_metrics
-
-// Berkeley packet filter
-
-const (
- SizeofBpfVersion = C.sizeof_struct_bpf_version
- SizeofBpfStat = C.sizeof_struct_bpf_stat
- SizeofBpfProgram = C.sizeof_struct_bpf_program
- SizeofBpfInsn = C.sizeof_struct_bpf_insn
- SizeofBpfHdr = C.sizeof_struct_bpf_hdr
-)
-
-type BpfVersion C.struct_bpf_version
-
-type BpfStat C.struct_bpf_stat
-
-type BpfProgram C.struct_bpf_program
-
-type BpfInsn C.struct_bpf_insn
-
-type BpfHdr C.struct_bpf_hdr
-
-// Terminal handling
-
-type Termios C.struct_termios
-
-type Winsize C.struct_winsize
-
-// fchmodat-like syscalls.
-
-const (
- AT_FDCWD = C.AT_FDCWD
- AT_REMOVEDIR = C.AT_REMOVEDIR
- AT_SYMLINK_FOLLOW = C.AT_SYMLINK_FOLLOW
- AT_SYMLINK_NOFOLLOW = C.AT_SYMLINK_NOFOLLOW
-)
-
-// poll
-
-type PollFd C.struct_pollfd
-
-const (
- POLLERR = C.POLLERR
- POLLHUP = C.POLLHUP
- POLLIN = C.POLLIN
- POLLNVAL = C.POLLNVAL
- POLLOUT = C.POLLOUT
- POLLPRI = C.POLLPRI
- POLLRDBAND = C.POLLRDBAND
- POLLRDNORM = C.POLLRDNORM
- POLLWRBAND = C.POLLWRBAND
- POLLWRNORM = C.POLLWRNORM
-)
-
-// uname
-
-type Utsname C.struct_utsname
-
-// Clockinfo
-
-const SizeofClockinfo = C.sizeof_struct_clockinfo
-
-type Clockinfo C.struct_clockinfo
diff --git a/vendor/golang.org/x/sys/unix/types_dragonfly.go b/vendor/golang.org/x/sys/unix/types_dragonfly.go
deleted file mode 100644
index 6574f6b6a..000000000
--- a/vendor/golang.org/x/sys/unix/types_dragonfly.go
+++ /dev/null
@@ -1,269 +0,0 @@
-// Copyright 2009 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build ignore
-
-/*
-Input to cgo -godefs. See README.md
-*/
-
-// +godefs map struct_in_addr [4]byte /* in_addr */
-// +godefs map struct_in6_addr [16]byte /* in6_addr */
-
-package unix
-
-/*
-#define KERNEL
-#include <dirent.h>
-#include <fcntl.h>
-#include <poll.h>
-#include <signal.h>
-#include <termios.h>
-#include <stdio.h>
-#include <unistd.h>
-#include <sys/event.h>
-#include <sys/mman.h>
-#include <sys/mount.h>
-#include <sys/param.h>
-#include <sys/ptrace.h>
-#include <sys/resource.h>
-#include <sys/select.h>
-#include <sys/signal.h>
-#include <sys/socket.h>
-#include <sys/stat.h>
-#include <sys/time.h>
-#include <sys/types.h>
-#include <sys/un.h>
-#include <sys/utsname.h>
-#include <sys/wait.h>
-#include <net/bpf.h>
-#include <net/if.h>
-#include <net/if_dl.h>
-#include <net/route.h>
-#include <netinet/in.h>
-#include <netinet/icmp6.h>
-#include <netinet/tcp.h>
-
-enum {
- sizeofPtr = sizeof(void*),
-};
-
-union sockaddr_all {
- struct sockaddr s1; // this one gets used for fields
- struct sockaddr_in s2; // these pad it out
- struct sockaddr_in6 s3;
- struct sockaddr_un s4;
- struct sockaddr_dl s5;
-};
-
-struct sockaddr_any {
- struct sockaddr addr;
- char pad[sizeof(union sockaddr_all) - sizeof(struct sockaddr)];
-};
-
-*/
-import "C"
-
-// Machine characteristics
-
-const (
- SizeofPtr = C.sizeofPtr
- SizeofShort = C.sizeof_short
- SizeofInt = C.sizeof_int
- SizeofLong = C.sizeof_long
- SizeofLongLong = C.sizeof_longlong
-)
-
-// Basic types
-
-type (
- _C_short C.short
- _C_int C.int
- _C_long C.long
- _C_long_long C.longlong
-)
-
-// Time
-
-type Timespec C.struct_timespec
-
-type Timeval C.struct_timeval
-
-// Processes
-
-type Rusage C.struct_rusage
-
-type Rlimit C.struct_rlimit
-
-type _Gid_t C.gid_t
-
-// Files
-
-type Stat_t C.struct_stat
-
-type Statfs_t C.struct_statfs
-
-type Flock_t C.struct_flock
-
-type Dirent C.struct_dirent
-
-type Fsid C.struct_fsid
-
-// File system limits
-
-const (
- PathMax = C.PATH_MAX
-)
-
-// Sockets
-
-type RawSockaddrInet4 C.struct_sockaddr_in
-
-type RawSockaddrInet6 C.struct_sockaddr_in6
-
-type RawSockaddrUnix C.struct_sockaddr_un
-
-type RawSockaddrDatalink C.struct_sockaddr_dl
-
-type RawSockaddr C.struct_sockaddr
-
-type RawSockaddrAny C.struct_sockaddr_any
-
-type _Socklen C.socklen_t
-
-type Linger C.struct_linger
-
-type Iovec C.struct_iovec
-
-type IPMreq C.struct_ip_mreq
-
-type IPv6Mreq C.struct_ipv6_mreq
-
-type Msghdr C.struct_msghdr
-
-type Cmsghdr C.struct_cmsghdr
-
-type Inet6Pktinfo C.struct_in6_pktinfo
-
-type IPv6MTUInfo C.struct_ip6_mtuinfo
-
-type ICMPv6Filter C.struct_icmp6_filter
-
-const (
- SizeofSockaddrInet4 = C.sizeof_struct_sockaddr_in
- SizeofSockaddrInet6 = C.sizeof_struct_sockaddr_in6
- SizeofSockaddrAny = C.sizeof_struct_sockaddr_any
- SizeofSockaddrUnix = C.sizeof_struct_sockaddr_un
- SizeofSockaddrDatalink = C.sizeof_struct_sockaddr_dl
- SizeofLinger = C.sizeof_struct_linger
- SizeofIPMreq = C.sizeof_struct_ip_mreq
- SizeofIPv6Mreq = C.sizeof_struct_ipv6_mreq
- SizeofMsghdr = C.sizeof_struct_msghdr
- SizeofCmsghdr = C.sizeof_struct_cmsghdr
- SizeofInet6Pktinfo = C.sizeof_struct_in6_pktinfo
- SizeofIPv6MTUInfo = C.sizeof_struct_ip6_mtuinfo
- SizeofICMPv6Filter = C.sizeof_struct_icmp6_filter
-)
-
-// Ptrace requests
-
-const (
- PTRACE_TRACEME = C.PT_TRACE_ME
- PTRACE_CONT = C.PT_CONTINUE
- PTRACE_KILL = C.PT_KILL
-)
-
-// Events (kqueue, kevent)
-
-type Kevent_t C.struct_kevent
-
-// Select
-
-type FdSet C.fd_set
-
-// Routing and interface messages
-
-const (
- SizeofIfMsghdr = C.sizeof_struct_if_msghdr
- SizeofIfData = C.sizeof_struct_if_data
- SizeofIfaMsghdr = C.sizeof_struct_ifa_msghdr
- SizeofIfmaMsghdr = C.sizeof_struct_ifma_msghdr
- SizeofIfAnnounceMsghdr = C.sizeof_struct_if_announcemsghdr
- SizeofRtMsghdr = C.sizeof_struct_rt_msghdr
- SizeofRtMetrics = C.sizeof_struct_rt_metrics
-)
-
-type IfMsghdr C.struct_if_msghdr
-
-type IfData C.struct_if_data
-
-type IfaMsghdr C.struct_ifa_msghdr
-
-type IfmaMsghdr C.struct_ifma_msghdr
-
-type IfAnnounceMsghdr C.struct_if_announcemsghdr
-
-type RtMsghdr C.struct_rt_msghdr
-
-type RtMetrics C.struct_rt_metrics
-
-// Berkeley packet filter
-
-const (
- SizeofBpfVersion = C.sizeof_struct_bpf_version
- SizeofBpfStat = C.sizeof_struct_bpf_stat
- SizeofBpfProgram = C.sizeof_struct_bpf_program
- SizeofBpfInsn = C.sizeof_struct_bpf_insn
- SizeofBpfHdr = C.sizeof_struct_bpf_hdr
-)
-
-type BpfVersion C.struct_bpf_version
-
-type BpfStat C.struct_bpf_stat
-
-type BpfProgram C.struct_bpf_program
-
-type BpfInsn C.struct_bpf_insn
-
-type BpfHdr C.struct_bpf_hdr
-
-// Terminal handling
-
-type Termios C.struct_termios
-
-type Winsize C.struct_winsize
-
-// fchmodat-like syscalls.
-
-const (
- AT_FDCWD = C.AT_FDCWD
- AT_SYMLINK_NOFOLLOW = C.AT_SYMLINK_NOFOLLOW
-)
-
-// poll
-
-type PollFd C.struct_pollfd
-
-const (
- POLLERR = C.POLLERR
- POLLHUP = C.POLLHUP
- POLLIN = C.POLLIN
- POLLNVAL = C.POLLNVAL
- POLLOUT = C.POLLOUT
- POLLPRI = C.POLLPRI
- POLLRDBAND = C.POLLRDBAND
- POLLRDNORM = C.POLLRDNORM
- POLLWRBAND = C.POLLWRBAND
- POLLWRNORM = C.POLLWRNORM
-)
-
-// Uname
-
-type Utsname C.struct_utsname
-
-// Clockinfo
-
-const SizeofClockinfo = C.sizeof_struct_clockinfo
-
-type Clockinfo C.struct_clockinfo
diff --git a/vendor/golang.org/x/sys/unix/types_freebsd.go b/vendor/golang.org/x/sys/unix/types_freebsd.go
deleted file mode 100644
index c6fde4249..000000000
--- a/vendor/golang.org/x/sys/unix/types_freebsd.go
+++ /dev/null
@@ -1,406 +0,0 @@
-// Copyright 2009 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build ignore
-
-/*
-Input to cgo -godefs. See README.md
-*/
-
-// +godefs map struct_in_addr [4]byte /* in_addr */
-// +godefs map struct_in6_addr [16]byte /* in6_addr */
-
-package unix
-
-/*
-#define _WANT_FREEBSD11_STAT 1
-#define _WANT_FREEBSD11_STATFS 1
-#define _WANT_FREEBSD11_DIRENT 1
-#define _WANT_FREEBSD11_KEVENT 1
-
-#include <dirent.h>
-#include <fcntl.h>
-#include <poll.h>
-#include <signal.h>
-#include <termios.h>
-#include <stdio.h>
-#include <unistd.h>
-#include <sys/capsicum.h>
-#include <sys/event.h>
-#include <sys/mman.h>
-#include <sys/mount.h>
-#include <sys/param.h>
-#include <sys/ptrace.h>
-#include <sys/resource.h>
-#include <sys/select.h>
-#include <sys/signal.h>
-#include <sys/socket.h>
-#include <sys/stat.h>
-#include <sys/time.h>
-#include <sys/types.h>
-#include <sys/un.h>
-#include <sys/utsname.h>
-#include <sys/wait.h>
-#include <net/bpf.h>
-#include <net/if.h>
-#include <net/if_dl.h>
-#include <net/route.h>
-#include <netinet/in.h>
-#include <netinet/icmp6.h>
-#include <netinet/tcp.h>
-
-enum {
- sizeofPtr = sizeof(void*),
-};
-
-union sockaddr_all {
- struct sockaddr s1; // this one gets used for fields
- struct sockaddr_in s2; // these pad it out
- struct sockaddr_in6 s3;
- struct sockaddr_un s4;
- struct sockaddr_dl s5;
-};
-
-struct sockaddr_any {
- struct sockaddr addr;
- char pad[sizeof(union sockaddr_all) - sizeof(struct sockaddr)];
-};
-
-// This structure is a duplicate of if_data on FreeBSD 8-STABLE.
-// See /usr/include/net/if.h.
-struct if_data8 {
- u_char ifi_type;
- u_char ifi_physical;
- u_char ifi_addrlen;
- u_char ifi_hdrlen;
- u_char ifi_link_state;
- u_char ifi_spare_char1;
- u_char ifi_spare_char2;
- u_char ifi_datalen;
- u_long ifi_mtu;
- u_long ifi_metric;
- u_long ifi_baudrate;
- u_long ifi_ipackets;
- u_long ifi_ierrors;
- u_long ifi_opackets;
- u_long ifi_oerrors;
- u_long ifi_collisions;
- u_long ifi_ibytes;
- u_long ifi_obytes;
- u_long ifi_imcasts;
- u_long ifi_omcasts;
- u_long ifi_iqdrops;
- u_long ifi_noproto;
- u_long ifi_hwassist;
-// FIXME: these are now unions, so maybe need to change definitions?
-#undef ifi_epoch
- time_t ifi_epoch;
-#undef ifi_lastchange
- struct timeval ifi_lastchange;
-};
-
-// This structure is a duplicate of if_msghdr on FreeBSD 8-STABLE.
-// See /usr/include/net/if.h.
-struct if_msghdr8 {
- u_short ifm_msglen;
- u_char ifm_version;
- u_char ifm_type;
- int ifm_addrs;
- int ifm_flags;
- u_short ifm_index;
- struct if_data8 ifm_data;
-};
-*/
-import "C"
-
-// Machine characteristics
-
-const (
- SizeofPtr = C.sizeofPtr
- SizeofShort = C.sizeof_short
- SizeofInt = C.sizeof_int
- SizeofLong = C.sizeof_long
- SizeofLongLong = C.sizeof_longlong
-)
-
-// Basic types
-
-type (
- _C_short C.short
- _C_int C.int
- _C_long C.long
- _C_long_long C.longlong
-)
-
-// Time
-
-type Timespec C.struct_timespec
-
-type Timeval C.struct_timeval
-
-// Processes
-
-type Rusage C.struct_rusage
-
-type Rlimit C.struct_rlimit
-
-type _Gid_t C.gid_t
-
-// Files
-
-const (
- _statfsVersion = C.STATFS_VERSION
- _dirblksiz = C.DIRBLKSIZ
-)
-
-type Stat_t C.struct_stat
-
-type stat_freebsd11_t C.struct_freebsd11_stat
-
-type Statfs_t C.struct_statfs
-
-type statfs_freebsd11_t C.struct_freebsd11_statfs
-
-type Flock_t C.struct_flock
-
-type Dirent C.struct_dirent
-
-type dirent_freebsd11 C.struct_freebsd11_dirent
-
-type Fsid C.struct_fsid
-
-// File system limits
-
-const (
- PathMax = C.PATH_MAX
-)
-
-// Advice to Fadvise
-
-const (
- FADV_NORMAL = C.POSIX_FADV_NORMAL
- FADV_RANDOM = C.POSIX_FADV_RANDOM
- FADV_SEQUENTIAL = C.POSIX_FADV_SEQUENTIAL
- FADV_WILLNEED = C.POSIX_FADV_WILLNEED
- FADV_DONTNEED = C.POSIX_FADV_DONTNEED
- FADV_NOREUSE = C.POSIX_FADV_NOREUSE
-)
-
-// Sockets
-
-type RawSockaddrInet4 C.struct_sockaddr_in
-
-type RawSockaddrInet6 C.struct_sockaddr_in6
-
-type RawSockaddrUnix C.struct_sockaddr_un
-
-type RawSockaddrDatalink C.struct_sockaddr_dl
-
-type RawSockaddr C.struct_sockaddr
-
-type RawSockaddrAny C.struct_sockaddr_any
-
-type _Socklen C.socklen_t
-
-type Linger C.struct_linger
-
-type Iovec C.struct_iovec
-
-type IPMreq C.struct_ip_mreq
-
-type IPMreqn C.struct_ip_mreqn
-
-type IPv6Mreq C.struct_ipv6_mreq
-
-type Msghdr C.struct_msghdr
-
-type Cmsghdr C.struct_cmsghdr
-
-type Inet6Pktinfo C.struct_in6_pktinfo
-
-type IPv6MTUInfo C.struct_ip6_mtuinfo
-
-type ICMPv6Filter C.struct_icmp6_filter
-
-const (
- SizeofSockaddrInet4 = C.sizeof_struct_sockaddr_in
- SizeofSockaddrInet6 = C.sizeof_struct_sockaddr_in6
- SizeofSockaddrAny = C.sizeof_struct_sockaddr_any
- SizeofSockaddrUnix = C.sizeof_struct_sockaddr_un
- SizeofSockaddrDatalink = C.sizeof_struct_sockaddr_dl
- SizeofLinger = C.sizeof_struct_linger
- SizeofIPMreq = C.sizeof_struct_ip_mreq
- SizeofIPMreqn = C.sizeof_struct_ip_mreqn
- SizeofIPv6Mreq = C.sizeof_struct_ipv6_mreq
- SizeofMsghdr = C.sizeof_struct_msghdr
- SizeofCmsghdr = C.sizeof_struct_cmsghdr
- SizeofInet6Pktinfo = C.sizeof_struct_in6_pktinfo
- SizeofIPv6MTUInfo = C.sizeof_struct_ip6_mtuinfo
- SizeofICMPv6Filter = C.sizeof_struct_icmp6_filter
-)
-
-// Ptrace requests
-
-const (
- PTRACE_ATTACH = C.PT_ATTACH
- PTRACE_CONT = C.PT_CONTINUE
- PTRACE_DETACH = C.PT_DETACH
- PTRACE_GETFPREGS = C.PT_GETFPREGS
- PTRACE_GETFSBASE = C.PT_GETFSBASE
- PTRACE_GETLWPLIST = C.PT_GETLWPLIST
- PTRACE_GETNUMLWPS = C.PT_GETNUMLWPS
- PTRACE_GETREGS = C.PT_GETREGS
- PTRACE_GETXSTATE = C.PT_GETXSTATE
- PTRACE_IO = C.PT_IO
- PTRACE_KILL = C.PT_KILL
- PTRACE_LWPEVENTS = C.PT_LWP_EVENTS
- PTRACE_LWPINFO = C.PT_LWPINFO
- PTRACE_SETFPREGS = C.PT_SETFPREGS
- PTRACE_SETREGS = C.PT_SETREGS
- PTRACE_SINGLESTEP = C.PT_STEP
- PTRACE_TRACEME = C.PT_TRACE_ME
-)
-
-const (
- PIOD_READ_D = C.PIOD_READ_D
- PIOD_WRITE_D = C.PIOD_WRITE_D
- PIOD_READ_I = C.PIOD_READ_I
- PIOD_WRITE_I = C.PIOD_WRITE_I
-)
-
-const (
- PL_FLAG_BORN = C.PL_FLAG_BORN
- PL_FLAG_EXITED = C.PL_FLAG_EXITED
- PL_FLAG_SI = C.PL_FLAG_SI
-)
-
-const (
- TRAP_BRKPT = C.TRAP_BRKPT
- TRAP_TRACE = C.TRAP_TRACE
-)
-
-type PtraceLwpInfoStruct C.struct_ptrace_lwpinfo
-
-type __Siginfo C.struct___siginfo
-
-type Sigset_t C.sigset_t
-
-type Reg C.struct_reg
-
-type FpReg C.struct_fpreg
-
-type PtraceIoDesc C.struct_ptrace_io_desc
-
-// Events (kqueue, kevent)
-
-type Kevent_t C.struct_kevent_freebsd11
-
-// Select
-
-type FdSet C.fd_set
-
-// Routing and interface messages
-
-const (
- sizeofIfMsghdr = C.sizeof_struct_if_msghdr
- SizeofIfMsghdr = C.sizeof_struct_if_msghdr8
- sizeofIfData = C.sizeof_struct_if_data
- SizeofIfData = C.sizeof_struct_if_data8
- SizeofIfaMsghdr = C.sizeof_struct_ifa_msghdr
- SizeofIfmaMsghdr = C.sizeof_struct_ifma_msghdr
- SizeofIfAnnounceMsghdr = C.sizeof_struct_if_announcemsghdr
- SizeofRtMsghdr = C.sizeof_struct_rt_msghdr
- SizeofRtMetrics = C.sizeof_struct_rt_metrics
-)
-
-type ifMsghdr C.struct_if_msghdr
-
-type IfMsghdr C.struct_if_msghdr8
-
-type ifData C.struct_if_data
-
-type IfData C.struct_if_data8
-
-type IfaMsghdr C.struct_ifa_msghdr
-
-type IfmaMsghdr C.struct_ifma_msghdr
-
-type IfAnnounceMsghdr C.struct_if_announcemsghdr
-
-type RtMsghdr C.struct_rt_msghdr
-
-type RtMetrics C.struct_rt_metrics
-
-// Berkeley packet filter
-
-const (
- SizeofBpfVersion = C.sizeof_struct_bpf_version
- SizeofBpfStat = C.sizeof_struct_bpf_stat
- SizeofBpfZbuf = C.sizeof_struct_bpf_zbuf
- SizeofBpfProgram = C.sizeof_struct_bpf_program
- SizeofBpfInsn = C.sizeof_struct_bpf_insn
- SizeofBpfHdr = C.sizeof_struct_bpf_hdr
- SizeofBpfZbufHeader = C.sizeof_struct_bpf_zbuf_header
-)
-
-type BpfVersion C.struct_bpf_version
-
-type BpfStat C.struct_bpf_stat
-
-type BpfZbuf C.struct_bpf_zbuf
-
-type BpfProgram C.struct_bpf_program
-
-type BpfInsn C.struct_bpf_insn
-
-type BpfHdr C.struct_bpf_hdr
-
-type BpfZbufHeader C.struct_bpf_zbuf_header
-
-// Terminal handling
-
-type Termios C.struct_termios
-
-type Winsize C.struct_winsize
-
-// fchmodat-like syscalls.
-
-const (
- AT_FDCWD = C.AT_FDCWD
- AT_REMOVEDIR = C.AT_REMOVEDIR
- AT_SYMLINK_FOLLOW = C.AT_SYMLINK_FOLLOW
- AT_SYMLINK_NOFOLLOW = C.AT_SYMLINK_NOFOLLOW
-)
-
-// poll
-
-type PollFd C.struct_pollfd
-
-const (
- POLLERR = C.POLLERR
- POLLHUP = C.POLLHUP
- POLLIN = C.POLLIN
- POLLINIGNEOF = C.POLLINIGNEOF
- POLLNVAL = C.POLLNVAL
- POLLOUT = C.POLLOUT
- POLLPRI = C.POLLPRI
- POLLRDBAND = C.POLLRDBAND
- POLLRDNORM = C.POLLRDNORM
- POLLWRBAND = C.POLLWRBAND
- POLLWRNORM = C.POLLWRNORM
-)
-
-// Capabilities
-
-type CapRights C.struct_cap_rights
-
-// Uname
-
-type Utsname C.struct_utsname
-
-// Clockinfo
-
-const SizeofClockinfo = C.sizeof_struct_clockinfo
-
-type Clockinfo C.struct_clockinfo
diff --git a/vendor/golang.org/x/sys/unix/types_netbsd.go b/vendor/golang.org/x/sys/unix/types_netbsd.go
deleted file mode 100644
index 0a81aadb8..000000000
--- a/vendor/golang.org/x/sys/unix/types_netbsd.go
+++ /dev/null
@@ -1,300 +0,0 @@
-// Copyright 2009 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build ignore
-
-/*
-Input to cgo -godefs. See README.md
-*/
-
-// +godefs map struct_in_addr [4]byte /* in_addr */
-// +godefs map struct_in6_addr [16]byte /* in6_addr */
-
-package unix
-
-/*
-#define KERNEL
-#include <dirent.h>
-#include <fcntl.h>
-#include <poll.h>
-#include <signal.h>
-#include <termios.h>
-#include <stdio.h>
-#include <unistd.h>
-#include <sys/param.h>
-#include <sys/types.h>
-#include <sys/event.h>
-#include <sys/mman.h>
-#include <sys/mount.h>
-#include <sys/ptrace.h>
-#include <sys/resource.h>
-#include <sys/select.h>
-#include <sys/signal.h>
-#include <sys/socket.h>
-#include <sys/stat.h>
-#include <sys/statvfs.h>
-#include <sys/sysctl.h>
-#include <sys/time.h>
-#include <sys/uio.h>
-#include <sys/un.h>
-#include <sys/utsname.h>
-#include <sys/wait.h>
-#include <net/bpf.h>
-#include <net/if.h>
-#include <net/if_dl.h>
-#include <net/route.h>
-#include <netinet/in.h>
-#include <netinet/icmp6.h>
-#include <netinet/tcp.h>
-
-enum {
- sizeofPtr = sizeof(void*),
-};
-
-union sockaddr_all {
- struct sockaddr s1; // this one gets used for fields
- struct sockaddr_in s2; // these pad it out
- struct sockaddr_in6 s3;
- struct sockaddr_un s4;
- struct sockaddr_dl s5;
-};
-
-struct sockaddr_any {
- struct sockaddr addr;
- char pad[sizeof(union sockaddr_all) - sizeof(struct sockaddr)];
-};
-
-*/
-import "C"
-
-// Machine characteristics
-
-const (
- SizeofPtr = C.sizeofPtr
- SizeofShort = C.sizeof_short
- SizeofInt = C.sizeof_int
- SizeofLong = C.sizeof_long
- SizeofLongLong = C.sizeof_longlong
-)
-
-// Basic types
-
-type (
- _C_short C.short
- _C_int C.int
- _C_long C.long
- _C_long_long C.longlong
-)
-
-// Time
-
-type Timespec C.struct_timespec
-
-type Timeval C.struct_timeval
-
-// Processes
-
-type Rusage C.struct_rusage
-
-type Rlimit C.struct_rlimit
-
-type _Gid_t C.gid_t
-
-// Files
-
-type Stat_t C.struct_stat
-
-type Statfs_t C.struct_statfs
-
-type Statvfs_t C.struct_statvfs
-
-type Flock_t C.struct_flock
-
-type Dirent C.struct_dirent
-
-type Fsid C.fsid_t
-
-// File system limits
-
-const (
- PathMax = C.PATH_MAX
-)
-
-// Fstatvfs/Statvfs flags
-
-const (
- ST_WAIT = C.ST_WAIT
- ST_NOWAIT = C.ST_NOWAIT
-)
-
-// Advice to Fadvise
-
-const (
- FADV_NORMAL = C.POSIX_FADV_NORMAL
- FADV_RANDOM = C.POSIX_FADV_RANDOM
- FADV_SEQUENTIAL = C.POSIX_FADV_SEQUENTIAL
- FADV_WILLNEED = C.POSIX_FADV_WILLNEED
- FADV_DONTNEED = C.POSIX_FADV_DONTNEED
- FADV_NOREUSE = C.POSIX_FADV_NOREUSE
-)
-
-// Sockets
-
-type RawSockaddrInet4 C.struct_sockaddr_in
-
-type RawSockaddrInet6 C.struct_sockaddr_in6
-
-type RawSockaddrUnix C.struct_sockaddr_un
-
-type RawSockaddrDatalink C.struct_sockaddr_dl
-
-type RawSockaddr C.struct_sockaddr
-
-type RawSockaddrAny C.struct_sockaddr_any
-
-type _Socklen C.socklen_t
-
-type Linger C.struct_linger
-
-type Iovec C.struct_iovec
-
-type IPMreq C.struct_ip_mreq
-
-type IPv6Mreq C.struct_ipv6_mreq
-
-type Msghdr C.struct_msghdr
-
-type Cmsghdr C.struct_cmsghdr
-
-type Inet6Pktinfo C.struct_in6_pktinfo
-
-type IPv6MTUInfo C.struct_ip6_mtuinfo
-
-type ICMPv6Filter C.struct_icmp6_filter
-
-const (
- SizeofSockaddrInet4 = C.sizeof_struct_sockaddr_in
- SizeofSockaddrInet6 = C.sizeof_struct_sockaddr_in6
- SizeofSockaddrAny = C.sizeof_struct_sockaddr_any
- SizeofSockaddrUnix = C.sizeof_struct_sockaddr_un
- SizeofSockaddrDatalink = C.sizeof_struct_sockaddr_dl
- SizeofLinger = C.sizeof_struct_linger
- SizeofIPMreq = C.sizeof_struct_ip_mreq
- SizeofIPv6Mreq = C.sizeof_struct_ipv6_mreq
- SizeofMsghdr = C.sizeof_struct_msghdr
- SizeofCmsghdr = C.sizeof_struct_cmsghdr
- SizeofInet6Pktinfo = C.sizeof_struct_in6_pktinfo
- SizeofIPv6MTUInfo = C.sizeof_struct_ip6_mtuinfo
- SizeofICMPv6Filter = C.sizeof_struct_icmp6_filter
-)
-
-// Ptrace requests
-
-const (
- PTRACE_TRACEME = C.PT_TRACE_ME
- PTRACE_CONT = C.PT_CONTINUE
- PTRACE_KILL = C.PT_KILL
-)
-
-// Events (kqueue, kevent)
-
-type Kevent_t C.struct_kevent
-
-// Select
-
-type FdSet C.fd_set
-
-// Routing and interface messages
-
-const (
- SizeofIfMsghdr = C.sizeof_struct_if_msghdr
- SizeofIfData = C.sizeof_struct_if_data
- SizeofIfaMsghdr = C.sizeof_struct_ifa_msghdr
- SizeofIfAnnounceMsghdr = C.sizeof_struct_if_announcemsghdr
- SizeofRtMsghdr = C.sizeof_struct_rt_msghdr
- SizeofRtMetrics = C.sizeof_struct_rt_metrics
-)
-
-type IfMsghdr C.struct_if_msghdr
-
-type IfData C.struct_if_data
-
-type IfaMsghdr C.struct_ifa_msghdr
-
-type IfAnnounceMsghdr C.struct_if_announcemsghdr
-
-type RtMsghdr C.struct_rt_msghdr
-
-type RtMetrics C.struct_rt_metrics
-
-type Mclpool C.struct_mclpool
-
-// Berkeley packet filter
-
-const (
- SizeofBpfVersion = C.sizeof_struct_bpf_version
- SizeofBpfStat = C.sizeof_struct_bpf_stat
- SizeofBpfProgram = C.sizeof_struct_bpf_program
- SizeofBpfInsn = C.sizeof_struct_bpf_insn
- SizeofBpfHdr = C.sizeof_struct_bpf_hdr
-)
-
-type BpfVersion C.struct_bpf_version
-
-type BpfStat C.struct_bpf_stat
-
-type BpfProgram C.struct_bpf_program
-
-type BpfInsn C.struct_bpf_insn
-
-type BpfHdr C.struct_bpf_hdr
-
-type BpfTimeval C.struct_bpf_timeval
-
-// Terminal handling
-
-type Termios C.struct_termios
-
-type Winsize C.struct_winsize
-
-type Ptmget C.struct_ptmget
-
-// fchmodat-like syscalls.
-
-const (
- AT_FDCWD = C.AT_FDCWD
- AT_SYMLINK_FOLLOW = C.AT_SYMLINK_FOLLOW
- AT_SYMLINK_NOFOLLOW = C.AT_SYMLINK_NOFOLLOW
-)
-
-// poll
-
-type PollFd C.struct_pollfd
-
-const (
- POLLERR = C.POLLERR
- POLLHUP = C.POLLHUP
- POLLIN = C.POLLIN
- POLLNVAL = C.POLLNVAL
- POLLOUT = C.POLLOUT
- POLLPRI = C.POLLPRI
- POLLRDBAND = C.POLLRDBAND
- POLLRDNORM = C.POLLRDNORM
- POLLWRBAND = C.POLLWRBAND
- POLLWRNORM = C.POLLWRNORM
-)
-
-// Sysctl
-
-type Sysctlnode C.struct_sysctlnode
-
-// Uname
-
-type Utsname C.struct_utsname
-
-// Clockinfo
-
-const SizeofClockinfo = C.sizeof_struct_clockinfo
-
-type Clockinfo C.struct_clockinfo
diff --git a/vendor/golang.org/x/sys/unix/types_openbsd.go b/vendor/golang.org/x/sys/unix/types_openbsd.go
deleted file mode 100644
index 775cb57dc..000000000
--- a/vendor/golang.org/x/sys/unix/types_openbsd.go
+++ /dev/null
@@ -1,283 +0,0 @@
-// Copyright 2009 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build ignore
-
-/*
-Input to cgo -godefs. See README.md
-*/
-
-// +godefs map struct_in_addr [4]byte /* in_addr */
-// +godefs map struct_in6_addr [16]byte /* in6_addr */
-
-package unix
-
-/*
-#define KERNEL
-#include <dirent.h>
-#include <fcntl.h>
-#include <poll.h>
-#include <signal.h>
-#include <termios.h>
-#include <stdio.h>
-#include <unistd.h>
-#include <sys/param.h>
-#include <sys/types.h>
-#include <sys/event.h>
-#include <sys/mman.h>
-#include <sys/mount.h>
-#include <sys/ptrace.h>
-#include <sys/resource.h>
-#include <sys/select.h>
-#include <sys/signal.h>
-#include <sys/socket.h>
-#include <sys/stat.h>
-#include <sys/time.h>
-#include <sys/uio.h>
-#include <sys/un.h>
-#include <sys/utsname.h>
-#include <sys/wait.h>
-#include <uvm/uvmexp.h>
-#include <net/bpf.h>
-#include <net/if.h>
-#include <net/if_dl.h>
-#include <net/route.h>
-#include <netinet/in.h>
-#include <netinet/icmp6.h>
-#include <netinet/tcp.h>
-
-enum {
- sizeofPtr = sizeof(void*),
-};
-
-union sockaddr_all {
- struct sockaddr s1; // this one gets used for fields
- struct sockaddr_in s2; // these pad it out
- struct sockaddr_in6 s3;
- struct sockaddr_un s4;
- struct sockaddr_dl s5;
-};
-
-struct sockaddr_any {
- struct sockaddr addr;
- char pad[sizeof(union sockaddr_all) - sizeof(struct sockaddr)];
-};
-
-*/
-import "C"
-
-// Machine characteristics
-
-const (
- SizeofPtr = C.sizeofPtr
- SizeofShort = C.sizeof_short
- SizeofInt = C.sizeof_int
- SizeofLong = C.sizeof_long
- SizeofLongLong = C.sizeof_longlong
-)
-
-// Basic types
-
-type (
- _C_short C.short
- _C_int C.int
- _C_long C.long
- _C_long_long C.longlong
-)
-
-// Time
-
-type Timespec C.struct_timespec
-
-type Timeval C.struct_timeval
-
-// Processes
-
-type Rusage C.struct_rusage
-
-type Rlimit C.struct_rlimit
-
-type _Gid_t C.gid_t
-
-// Files
-
-type Stat_t C.struct_stat
-
-type Statfs_t C.struct_statfs
-
-type Flock_t C.struct_flock
-
-type Dirent C.struct_dirent
-
-type Fsid C.fsid_t
-
-// File system limits
-
-const (
- PathMax = C.PATH_MAX
-)
-
-// Sockets
-
-type RawSockaddrInet4 C.struct_sockaddr_in
-
-type RawSockaddrInet6 C.struct_sockaddr_in6
-
-type RawSockaddrUnix C.struct_sockaddr_un
-
-type RawSockaddrDatalink C.struct_sockaddr_dl
-
-type RawSockaddr C.struct_sockaddr
-
-type RawSockaddrAny C.struct_sockaddr_any
-
-type _Socklen C.socklen_t
-
-type Linger C.struct_linger
-
-type Iovec C.struct_iovec
-
-type IPMreq C.struct_ip_mreq
-
-type IPv6Mreq C.struct_ipv6_mreq
-
-type Msghdr C.struct_msghdr
-
-type Cmsghdr C.struct_cmsghdr
-
-type Inet6Pktinfo C.struct_in6_pktinfo
-
-type IPv6MTUInfo C.struct_ip6_mtuinfo
-
-type ICMPv6Filter C.struct_icmp6_filter
-
-const (
- SizeofSockaddrInet4 = C.sizeof_struct_sockaddr_in
- SizeofSockaddrInet6 = C.sizeof_struct_sockaddr_in6
- SizeofSockaddrAny = C.sizeof_struct_sockaddr_any
- SizeofSockaddrUnix = C.sizeof_struct_sockaddr_un
- SizeofSockaddrDatalink = C.sizeof_struct_sockaddr_dl
- SizeofLinger = C.sizeof_struct_linger
- SizeofIPMreq = C.sizeof_struct_ip_mreq
- SizeofIPv6Mreq = C.sizeof_struct_ipv6_mreq
- SizeofMsghdr = C.sizeof_struct_msghdr
- SizeofCmsghdr = C.sizeof_struct_cmsghdr
- SizeofInet6Pktinfo = C.sizeof_struct_in6_pktinfo
- SizeofIPv6MTUInfo = C.sizeof_struct_ip6_mtuinfo
- SizeofICMPv6Filter = C.sizeof_struct_icmp6_filter
-)
-
-// Ptrace requests
-
-const (
- PTRACE_TRACEME = C.PT_TRACE_ME
- PTRACE_CONT = C.PT_CONTINUE
- PTRACE_KILL = C.PT_KILL
-)
-
-// Events (kqueue, kevent)
-
-type Kevent_t C.struct_kevent
-
-// Select
-
-type FdSet C.fd_set
-
-// Routing and interface messages
-
-const (
- SizeofIfMsghdr = C.sizeof_struct_if_msghdr
- SizeofIfData = C.sizeof_struct_if_data
- SizeofIfaMsghdr = C.sizeof_struct_ifa_msghdr
- SizeofIfAnnounceMsghdr = C.sizeof_struct_if_announcemsghdr
- SizeofRtMsghdr = C.sizeof_struct_rt_msghdr
- SizeofRtMetrics = C.sizeof_struct_rt_metrics
-)
-
-type IfMsghdr C.struct_if_msghdr
-
-type IfData C.struct_if_data
-
-type IfaMsghdr C.struct_ifa_msghdr
-
-type IfAnnounceMsghdr C.struct_if_announcemsghdr
-
-type RtMsghdr C.struct_rt_msghdr
-
-type RtMetrics C.struct_rt_metrics
-
-type Mclpool C.struct_mclpool
-
-// Berkeley packet filter
-
-const (
- SizeofBpfVersion = C.sizeof_struct_bpf_version
- SizeofBpfStat = C.sizeof_struct_bpf_stat
- SizeofBpfProgram = C.sizeof_struct_bpf_program
- SizeofBpfInsn = C.sizeof_struct_bpf_insn
- SizeofBpfHdr = C.sizeof_struct_bpf_hdr
-)
-
-type BpfVersion C.struct_bpf_version
-
-type BpfStat C.struct_bpf_stat
-
-type BpfProgram C.struct_bpf_program
-
-type BpfInsn C.struct_bpf_insn
-
-type BpfHdr C.struct_bpf_hdr
-
-type BpfTimeval C.struct_bpf_timeval
-
-// Terminal handling
-
-type Termios C.struct_termios
-
-type Winsize C.struct_winsize
-
-// fchmodat-like syscalls.
-
-const (
- AT_FDCWD = C.AT_FDCWD
- AT_SYMLINK_FOLLOW = C.AT_SYMLINK_FOLLOW
- AT_SYMLINK_NOFOLLOW = C.AT_SYMLINK_NOFOLLOW
-)
-
-// poll
-
-type PollFd C.struct_pollfd
-
-const (
- POLLERR = C.POLLERR
- POLLHUP = C.POLLHUP
- POLLIN = C.POLLIN
- POLLNVAL = C.POLLNVAL
- POLLOUT = C.POLLOUT
- POLLPRI = C.POLLPRI
- POLLRDBAND = C.POLLRDBAND
- POLLRDNORM = C.POLLRDNORM
- POLLWRBAND = C.POLLWRBAND
- POLLWRNORM = C.POLLWRNORM
-)
-
-// Signal Sets
-
-type Sigset_t C.sigset_t
-
-// Uname
-
-type Utsname C.struct_utsname
-
-// Uvmexp
-
-const SizeofUvmexp = C.sizeof_struct_uvmexp
-
-type Uvmexp C.struct_uvmexp
-
-// Clockinfo
-
-const SizeofClockinfo = C.sizeof_struct_clockinfo
-
-type Clockinfo C.struct_clockinfo
diff --git a/vendor/golang.org/x/sys/unix/types_solaris.go b/vendor/golang.org/x/sys/unix/types_solaris.go
deleted file mode 100644
index d713f09e0..000000000
--- a/vendor/golang.org/x/sys/unix/types_solaris.go
+++ /dev/null
@@ -1,269 +0,0 @@
-// Copyright 2009 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build ignore
-
-/*
-Input to cgo -godefs. See README.md
-*/
-
-// +godefs map struct_in_addr [4]byte /* in_addr */
-// +godefs map struct_in6_addr [16]byte /* in6_addr */
-
-package unix
-
-/*
-#define KERNEL
-// These defines ensure that builds done on newer versions of Solaris are
-// backwards-compatible with older versions of Solaris and
-// OpenSolaris-based derivatives.
-#define __USE_SUNOS_SOCKETS__ // msghdr
-#define __USE_LEGACY_PROTOTYPES__ // iovec
-#include <dirent.h>
-#include <fcntl.h>
-#include <netdb.h>
-#include <limits.h>
-#include <poll.h>
-#include <signal.h>
-#include <termios.h>
-#include <termio.h>
-#include <stdio.h>
-#include <unistd.h>
-#include <sys/mman.h>
-#include <sys/mount.h>
-#include <sys/param.h>
-#include <sys/resource.h>
-#include <sys/select.h>
-#include <sys/signal.h>
-#include <sys/socket.h>
-#include <sys/stat.h>
-#include <sys/statvfs.h>
-#include <sys/time.h>
-#include <sys/times.h>
-#include <sys/types.h>
-#include <sys/utsname.h>
-#include <sys/un.h>
-#include <sys/wait.h>
-#include <net/bpf.h>
-#include <net/if.h>
-#include <net/if_dl.h>
-#include <net/route.h>
-#include <netinet/in.h>
-#include <netinet/icmp6.h>
-#include <netinet/tcp.h>
-#include <ustat.h>
-#include <utime.h>
-
-enum {
- sizeofPtr = sizeof(void*),
-};
-
-union sockaddr_all {
- struct sockaddr s1; // this one gets used for fields
- struct sockaddr_in s2; // these pad it out
- struct sockaddr_in6 s3;
- struct sockaddr_un s4;
- struct sockaddr_dl s5;
-};
-
-struct sockaddr_any {
- struct sockaddr addr;
- char pad[sizeof(union sockaddr_all) - sizeof(struct sockaddr)];
-};
-
-*/
-import "C"
-
-// Machine characteristics
-
-const (
- SizeofPtr = C.sizeofPtr
- SizeofShort = C.sizeof_short
- SizeofInt = C.sizeof_int
- SizeofLong = C.sizeof_long
- SizeofLongLong = C.sizeof_longlong
- PathMax = C.PATH_MAX
- MaxHostNameLen = C.MAXHOSTNAMELEN
-)
-
-// Basic types
-
-type (
- _C_short C.short
- _C_int C.int
- _C_long C.long
- _C_long_long C.longlong
-)
-
-// Time
-
-type Timespec C.struct_timespec
-
-type Timeval C.struct_timeval
-
-type Timeval32 C.struct_timeval32
-
-type Tms C.struct_tms
-
-type Utimbuf C.struct_utimbuf
-
-// Processes
-
-type Rusage C.struct_rusage
-
-type Rlimit C.struct_rlimit
-
-type _Gid_t C.gid_t
-
-// Files
-
-type Stat_t C.struct_stat
-
-type Flock_t C.struct_flock
-
-type Dirent C.struct_dirent
-
-// Filesystems
-
-type _Fsblkcnt_t C.fsblkcnt_t
-
-type Statvfs_t C.struct_statvfs
-
-// Sockets
-
-type RawSockaddrInet4 C.struct_sockaddr_in
-
-type RawSockaddrInet6 C.struct_sockaddr_in6
-
-type RawSockaddrUnix C.struct_sockaddr_un
-
-type RawSockaddrDatalink C.struct_sockaddr_dl
-
-type RawSockaddr C.struct_sockaddr
-
-type RawSockaddrAny C.struct_sockaddr_any
-
-type _Socklen C.socklen_t
-
-type Linger C.struct_linger
-
-type Iovec C.struct_iovec
-
-type IPMreq C.struct_ip_mreq
-
-type IPv6Mreq C.struct_ipv6_mreq
-
-type Msghdr C.struct_msghdr
-
-type Cmsghdr C.struct_cmsghdr
-
-type Inet4Pktinfo C.struct_in_pktinfo
-
-type Inet6Pktinfo C.struct_in6_pktinfo
-
-type IPv6MTUInfo C.struct_ip6_mtuinfo
-
-type ICMPv6Filter C.struct_icmp6_filter
-
-const (
- SizeofSockaddrInet4 = C.sizeof_struct_sockaddr_in
- SizeofSockaddrInet6 = C.sizeof_struct_sockaddr_in6
- SizeofSockaddrAny = C.sizeof_struct_sockaddr_any
- SizeofSockaddrUnix = C.sizeof_struct_sockaddr_un
- SizeofSockaddrDatalink = C.sizeof_struct_sockaddr_dl
- SizeofLinger = C.sizeof_struct_linger
- SizeofIPMreq = C.sizeof_struct_ip_mreq
- SizeofIPv6Mreq = C.sizeof_struct_ipv6_mreq
- SizeofMsghdr = C.sizeof_struct_msghdr
- SizeofCmsghdr = C.sizeof_struct_cmsghdr
- SizeofInet4Pktinfo = C.sizeof_struct_in_pktinfo
- SizeofInet6Pktinfo = C.sizeof_struct_in6_pktinfo
- SizeofIPv6MTUInfo = C.sizeof_struct_ip6_mtuinfo
- SizeofICMPv6Filter = C.sizeof_struct_icmp6_filter
-)
-
-// Select
-
-type FdSet C.fd_set
-
-// Misc
-
-type Utsname C.struct_utsname
-
-type Ustat_t C.struct_ustat
-
-const (
- AT_FDCWD = C.AT_FDCWD
- AT_SYMLINK_NOFOLLOW = C.AT_SYMLINK_NOFOLLOW
- AT_SYMLINK_FOLLOW = C.AT_SYMLINK_FOLLOW
- AT_REMOVEDIR = C.AT_REMOVEDIR
- AT_EACCESS = C.AT_EACCESS
-)
-
-// Routing and interface messages
-
-const (
- SizeofIfMsghdr = C.sizeof_struct_if_msghdr
- SizeofIfData = C.sizeof_struct_if_data
- SizeofIfaMsghdr = C.sizeof_struct_ifa_msghdr
- SizeofRtMsghdr = C.sizeof_struct_rt_msghdr
- SizeofRtMetrics = C.sizeof_struct_rt_metrics
-)
-
-type IfMsghdr C.struct_if_msghdr
-
-type IfData C.struct_if_data
-
-type IfaMsghdr C.struct_ifa_msghdr
-
-type RtMsghdr C.struct_rt_msghdr
-
-type RtMetrics C.struct_rt_metrics
-
-// Berkeley packet filter
-
-const (
- SizeofBpfVersion = C.sizeof_struct_bpf_version
- SizeofBpfStat = C.sizeof_struct_bpf_stat
- SizeofBpfProgram = C.sizeof_struct_bpf_program
- SizeofBpfInsn = C.sizeof_struct_bpf_insn
- SizeofBpfHdr = C.sizeof_struct_bpf_hdr
-)
-
-type BpfVersion C.struct_bpf_version
-
-type BpfStat C.struct_bpf_stat
-
-type BpfProgram C.struct_bpf_program
-
-type BpfInsn C.struct_bpf_insn
-
-type BpfTimeval C.struct_bpf_timeval
-
-type BpfHdr C.struct_bpf_hdr
-
-// Terminal handling
-
-type Termios C.struct_termios
-
-type Termio C.struct_termio
-
-type Winsize C.struct_winsize
-
-// poll
-
-type PollFd C.struct_pollfd
-
-const (
- POLLERR = C.POLLERR
- POLLHUP = C.POLLHUP
- POLLIN = C.POLLIN
- POLLNVAL = C.POLLNVAL
- POLLOUT = C.POLLOUT
- POLLPRI = C.POLLPRI
- POLLRDBAND = C.POLLRDBAND
- POLLRDNORM = C.POLLRDNORM
- POLLWRBAND = C.POLLWRBAND
- POLLWRNORM = C.POLLWRNORM
-)
diff --git a/vendor/golang.org/x/text/transform/transform.go b/vendor/golang.org/x/text/transform/transform.go
index 277e6e343..520b9ada0 100644
--- a/vendor/golang.org/x/text/transform/transform.go
+++ b/vendor/golang.org/x/text/transform/transform.go
@@ -6,7 +6,7 @@
// bytes passing through as well as various transformations. Example
// transformations provided by other packages include normalization and
// conversion between character sets.
-package transform
+package transform // import "golang.org/x/text/transform"
import (
"bytes"
diff --git a/vendor/golang.org/x/text/unicode/bidi/bidi.go b/vendor/golang.org/x/text/unicode/bidi/bidi.go
index fd53d0712..e8edc54cc 100644
--- a/vendor/golang.org/x/text/unicode/bidi/bidi.go
+++ b/vendor/golang.org/x/text/unicode/bidi/bidi.go
@@ -10,7 +10,7 @@
//
// NOTE: UNDER CONSTRUCTION. This API may change in backwards incompatible ways
// and without notice.
-package bidi
+package bidi // import "golang.org/x/text/unicode/bidi"
// TODO:
// The following functionality would not be hard to implement, but hinges on
diff --git a/vendor/golang.org/x/text/unicode/bidi/gen.go b/vendor/golang.org/x/text/unicode/bidi/gen.go
deleted file mode 100644
index 987fc169c..000000000
--- a/vendor/golang.org/x/text/unicode/bidi/gen.go
+++ /dev/null
@@ -1,133 +0,0 @@
-// Copyright 2015 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build ignore
-
-package main
-
-import (
- "flag"
- "log"
-
- "golang.org/x/text/internal/gen"
- "golang.org/x/text/internal/triegen"
- "golang.org/x/text/internal/ucd"
-)
-
-var outputFile = flag.String("out", "tables.go", "output file")
-
-func main() {
- gen.Init()
- gen.Repackage("gen_trieval.go", "trieval.go", "bidi")
- gen.Repackage("gen_ranges.go", "ranges_test.go", "bidi")
-
- genTables()
-}
-
-// bidiClass names and codes taken from class "bc" in
-// https://www.unicode.org/Public/8.0.0/ucd/PropertyValueAliases.txt
-var bidiClass = map[string]Class{
- "AL": AL, // ArabicLetter
- "AN": AN, // ArabicNumber
- "B": B, // ParagraphSeparator
- "BN": BN, // BoundaryNeutral
- "CS": CS, // CommonSeparator
- "EN": EN, // EuropeanNumber
- "ES": ES, // EuropeanSeparator
- "ET": ET, // EuropeanTerminator
- "L": L, // LeftToRight
- "NSM": NSM, // NonspacingMark
- "ON": ON, // OtherNeutral
- "R": R, // RightToLeft
- "S": S, // SegmentSeparator
- "WS": WS, // WhiteSpace
-
- "FSI": Control,
- "PDF": Control,
- "PDI": Control,
- "LRE": Control,
- "LRI": Control,
- "LRO": Control,
- "RLE": Control,
- "RLI": Control,
- "RLO": Control,
-}
-
-func genTables() {
- if numClass > 0x0F {
- log.Fatalf("Too many Class constants (%#x > 0x0F).", numClass)
- }
- w := gen.NewCodeWriter()
- defer w.WriteVersionedGoFile(*outputFile, "bidi")
-
- gen.WriteUnicodeVersion(w)
-
- t := triegen.NewTrie("bidi")
-
- // Build data about bracket mapping. These bits need to be or-ed with
- // any other bits.
- orMask := map[rune]uint64{}
-
- xorMap := map[rune]int{}
- xorMasks := []rune{0} // First value is no-op.
-
- ucd.Parse(gen.OpenUCDFile("BidiBrackets.txt"), func(p *ucd.Parser) {
- r1 := p.Rune(0)
- r2 := p.Rune(1)
- xor := r1 ^ r2
- if _, ok := xorMap[xor]; !ok {
- xorMap[xor] = len(xorMasks)
- xorMasks = append(xorMasks, xor)
- }
- entry := uint64(xorMap[xor]) << xorMaskShift
- switch p.String(2) {
- case "o":
- entry |= openMask
- case "c", "n":
- default:
- log.Fatalf("Unknown bracket class %q.", p.String(2))
- }
- orMask[r1] = entry
- })
-
- w.WriteComment(`
- xorMasks contains masks to be xor-ed with brackets to get the reverse
- version.`)
- w.WriteVar("xorMasks", xorMasks)
-
- done := map[rune]bool{}
-
- insert := func(r rune, c Class) {
- if !done[r] {
- t.Insert(r, orMask[r]|uint64(c))
- done[r] = true
- }
- }
-
- // Insert the derived BiDi properties.
- ucd.Parse(gen.OpenUCDFile("extracted/DerivedBidiClass.txt"), func(p *ucd.Parser) {
- r := p.Rune(0)
- class, ok := bidiClass[p.String(1)]
- if !ok {
- log.Fatalf("%U: Unknown BiDi class %q", r, p.String(1))
- }
- insert(r, class)
- })
- visitDefaults(insert)
-
- // TODO: use sparse blocks. This would reduce table size considerably
- // from the looks of it.
-
- sz, err := t.Gen(w)
- if err != nil {
- log.Fatal(err)
- }
- w.Size += sz
-}
-
-// dummy values to make methods in gen_common compile. The real versions
-// will be generated by this file to tables.go.
-var (
- xorMasks []rune
-)
diff --git a/vendor/golang.org/x/text/unicode/bidi/gen_ranges.go b/vendor/golang.org/x/text/unicode/bidi/gen_ranges.go
deleted file mode 100644
index 02c3b505d..000000000
--- a/vendor/golang.org/x/text/unicode/bidi/gen_ranges.go
+++ /dev/null
@@ -1,57 +0,0 @@
-// Copyright 2015 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build ignore
-
-package main
-
-import (
- "unicode"
-
- "golang.org/x/text/internal/gen"
- "golang.org/x/text/internal/ucd"
- "golang.org/x/text/unicode/rangetable"
-)
-
-// These tables are hand-extracted from:
-// https://www.unicode.org/Public/8.0.0/ucd/extracted/DerivedBidiClass.txt
-func visitDefaults(fn func(r rune, c Class)) {
- // first write default values for ranges listed above.
- visitRunes(fn, AL, []rune{
- 0x0600, 0x07BF, // Arabic
- 0x08A0, 0x08FF, // Arabic Extended-A
- 0xFB50, 0xFDCF, // Arabic Presentation Forms
- 0xFDF0, 0xFDFF,
- 0xFE70, 0xFEFF,
- 0x0001EE00, 0x0001EEFF, // Arabic Mathematical Alpha Symbols
- })
- visitRunes(fn, R, []rune{
- 0x0590, 0x05FF, // Hebrew
- 0x07C0, 0x089F, // Nko et al.
- 0xFB1D, 0xFB4F,
- 0x00010800, 0x00010FFF, // Cypriot Syllabary et. al.
- 0x0001E800, 0x0001EDFF,
- 0x0001EF00, 0x0001EFFF,
- })
- visitRunes(fn, ET, []rune{ // European Terminator
- 0x20A0, 0x20Cf, // Currency symbols
- })
- rangetable.Visit(unicode.Noncharacter_Code_Point, func(r rune) {
- fn(r, BN) // Boundary Neutral
- })
- ucd.Parse(gen.OpenUCDFile("DerivedCoreProperties.txt"), func(p *ucd.Parser) {
- if p.String(1) == "Default_Ignorable_Code_Point" {
- fn(p.Rune(0), BN) // Boundary Neutral
- }
- })
-}
-
-func visitRunes(fn func(r rune, c Class), c Class, runes []rune) {
- for i := 0; i < len(runes); i += 2 {
- lo, hi := runes[i], runes[i+1]
- for j := lo; j <= hi; j++ {
- fn(j, c)
- }
- }
-}
diff --git a/vendor/golang.org/x/text/unicode/bidi/gen_trieval.go b/vendor/golang.org/x/text/unicode/bidi/gen_trieval.go
deleted file mode 100644
index 9cb994289..000000000
--- a/vendor/golang.org/x/text/unicode/bidi/gen_trieval.go
+++ /dev/null
@@ -1,64 +0,0 @@
-// Copyright 2015 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build ignore
-
-package main
-
-// Class is the Unicode BiDi class. Each rune has a single class.
-type Class uint
-
-const (
- L Class = iota // LeftToRight
- R // RightToLeft
- EN // EuropeanNumber
- ES // EuropeanSeparator
- ET // EuropeanTerminator
- AN // ArabicNumber
- CS // CommonSeparator
- B // ParagraphSeparator
- S // SegmentSeparator
- WS // WhiteSpace
- ON // OtherNeutral
- BN // BoundaryNeutral
- NSM // NonspacingMark
- AL // ArabicLetter
- Control // Control LRO - PDI
-
- numClass
-
- LRO // LeftToRightOverride
- RLO // RightToLeftOverride
- LRE // LeftToRightEmbedding
- RLE // RightToLeftEmbedding
- PDF // PopDirectionalFormat
- LRI // LeftToRightIsolate
- RLI // RightToLeftIsolate
- FSI // FirstStrongIsolate
- PDI // PopDirectionalIsolate
-
- unknownClass = ^Class(0)
-)
-
-var controlToClass = map[rune]Class{
- 0x202D: LRO, // LeftToRightOverride,
- 0x202E: RLO, // RightToLeftOverride,
- 0x202A: LRE, // LeftToRightEmbedding,
- 0x202B: RLE, // RightToLeftEmbedding,
- 0x202C: PDF, // PopDirectionalFormat,
- 0x2066: LRI, // LeftToRightIsolate,
- 0x2067: RLI, // RightToLeftIsolate,
- 0x2068: FSI, // FirstStrongIsolate,
- 0x2069: PDI, // PopDirectionalIsolate,
-}
-
-// A trie entry has the following bits:
-// 7..5 XOR mask for brackets
-// 4 1: Bracket open, 0: Bracket close
-// 3..0 Class type
-
-const (
- openMask = 0x10
- xorMaskShift = 5
-)
diff --git a/vendor/golang.org/x/text/unicode/norm/maketables.go b/vendor/golang.org/x/text/unicode/norm/maketables.go
deleted file mode 100644
index 30a3aa933..000000000
--- a/vendor/golang.org/x/text/unicode/norm/maketables.go
+++ /dev/null
@@ -1,986 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build ignore
-
-// Normalization table generator.
-// Data read from the web.
-// See forminfo.go for a description of the trie values associated with each rune.
-
-package main
-
-import (
- "bytes"
- "encoding/binary"
- "flag"
- "fmt"
- "io"
- "log"
- "sort"
- "strconv"
- "strings"
-
- "golang.org/x/text/internal/gen"
- "golang.org/x/text/internal/triegen"
- "golang.org/x/text/internal/ucd"
-)
-
-func main() {
- gen.Init()
- loadUnicodeData()
- compactCCC()
- loadCompositionExclusions()
- completeCharFields(FCanonical)
- completeCharFields(FCompatibility)
- computeNonStarterCounts()
- verifyComputed()
- printChars()
- testDerived()
- printTestdata()
- makeTables()
-}
-
-var (
- tablelist = flag.String("tables",
- "all",
- "comma-separated list of which tables to generate; "+
- "can be 'decomp', 'recomp', 'info' and 'all'")
- test = flag.Bool("test",
- false,
- "test existing tables against DerivedNormalizationProps and generate test data for regression testing")
- verbose = flag.Bool("verbose",
- false,
- "write data to stdout as it is parsed")
-)
-
-const MaxChar = 0x10FFFF // anything above this shouldn't exist
-
-// Quick Check properties of runes allow us to quickly
-// determine whether a rune may occur in a normal form.
-// For a given normal form, a rune may be guaranteed to occur
-// verbatim (QC=Yes), may or may not combine with another
-// rune (QC=Maybe), or may not occur (QC=No).
-type QCResult int
-
-const (
- QCUnknown QCResult = iota
- QCYes
- QCNo
- QCMaybe
-)
-
-func (r QCResult) String() string {
- switch r {
- case QCYes:
- return "Yes"
- case QCNo:
- return "No"
- case QCMaybe:
- return "Maybe"
- }
- return "***UNKNOWN***"
-}
-
-const (
- FCanonical = iota // NFC or NFD
- FCompatibility // NFKC or NFKD
- FNumberOfFormTypes
-)
-
-const (
- MComposed = iota // NFC or NFKC
- MDecomposed // NFD or NFKD
- MNumberOfModes
-)
-
-// This contains only the properties we're interested in.
-type Char struct {
- name string
- codePoint rune // if zero, this index is not a valid code point.
- ccc uint8 // canonical combining class
- origCCC uint8
- excludeInComp bool // from CompositionExclusions.txt
- compatDecomp bool // it has a compatibility expansion
-
- nTrailingNonStarters uint8
- nLeadingNonStarters uint8 // must be equal to trailing if non-zero
-
- forms [FNumberOfFormTypes]FormInfo // For FCanonical and FCompatibility
-
- state State
-}
-
-var chars = make([]Char, MaxChar+1)
-var cccMap = make(map[uint8]uint8)
-
-func (c Char) String() string {
- buf := new(bytes.Buffer)
-
- fmt.Fprintf(buf, "%U [%s]:\n", c.codePoint, c.name)
- fmt.Fprintf(buf, " ccc: %v\n", c.ccc)
- fmt.Fprintf(buf, " excludeInComp: %v\n", c.excludeInComp)
- fmt.Fprintf(buf, " compatDecomp: %v\n", c.compatDecomp)
- fmt.Fprintf(buf, " state: %v\n", c.state)
- fmt.Fprintf(buf, " NFC:\n")
- fmt.Fprint(buf, c.forms[FCanonical])
- fmt.Fprintf(buf, " NFKC:\n")
- fmt.Fprint(buf, c.forms[FCompatibility])
-
- return buf.String()
-}
-
-// In UnicodeData.txt, some ranges are marked like this:
-// 3400;<CJK Ideograph Extension A, First>;Lo;0;L;;;;;N;;;;;
-// 4DB5;<CJK Ideograph Extension A, Last>;Lo;0;L;;;;;N;;;;;
-// parseCharacter keeps a state variable indicating the weirdness.
-type State int
-
-const (
- SNormal State = iota // known to be zero for the type
- SFirst
- SLast
- SMissing
-)
-
-var lastChar = rune('\u0000')
-
-func (c Char) isValid() bool {
- return c.codePoint != 0 && c.state != SMissing
-}
-
-type FormInfo struct {
- quickCheck [MNumberOfModes]QCResult // index: MComposed or MDecomposed
- verified [MNumberOfModes]bool // index: MComposed or MDecomposed
-
- combinesForward bool // May combine with rune on the right
- combinesBackward bool // May combine with rune on the left
- isOneWay bool // Never appears in result
- inDecomp bool // Some decompositions result in this char.
- decomp Decomposition
- expandedDecomp Decomposition
-}
-
-func (f FormInfo) String() string {
- buf := bytes.NewBuffer(make([]byte, 0))
-
- fmt.Fprintf(buf, " quickCheck[C]: %v\n", f.quickCheck[MComposed])
- fmt.Fprintf(buf, " quickCheck[D]: %v\n", f.quickCheck[MDecomposed])
- fmt.Fprintf(buf, " cmbForward: %v\n", f.combinesForward)
- fmt.Fprintf(buf, " cmbBackward: %v\n", f.combinesBackward)
- fmt.Fprintf(buf, " isOneWay: %v\n", f.isOneWay)
- fmt.Fprintf(buf, " inDecomp: %v\n", f.inDecomp)
- fmt.Fprintf(buf, " decomposition: %X\n", f.decomp)
- fmt.Fprintf(buf, " expandedDecomp: %X\n", f.expandedDecomp)
-
- return buf.String()
-}
-
-type Decomposition []rune
-
-func parseDecomposition(s string, skipfirst bool) (a []rune, err error) {
- decomp := strings.Split(s, " ")
- if len(decomp) > 0 && skipfirst {
- decomp = decomp[1:]
- }
- for _, d := range decomp {
- point, err := strconv.ParseUint(d, 16, 64)
- if err != nil {
- return a, err
- }
- a = append(a, rune(point))
- }
- return a, nil
-}
-
-func loadUnicodeData() {
- f := gen.OpenUCDFile("UnicodeData.txt")
- defer f.Close()
- p := ucd.New(f)
- for p.Next() {
- r := p.Rune(ucd.CodePoint)
- char := &chars[r]
-
- char.ccc = uint8(p.Uint(ucd.CanonicalCombiningClass))
- decmap := p.String(ucd.DecompMapping)
-
- exp, err := parseDecomposition(decmap, false)
- isCompat := false
- if err != nil {
- if len(decmap) > 0 {
- exp, err = parseDecomposition(decmap, true)
- if err != nil {
- log.Fatalf(`%U: bad decomp |%v|: "%s"`, r, decmap, err)
- }
- isCompat = true
- }
- }
-
- char.name = p.String(ucd.Name)
- char.codePoint = r
- char.forms[FCompatibility].decomp = exp
- if !isCompat {
- char.forms[FCanonical].decomp = exp
- } else {
- char.compatDecomp = true
- }
- if len(decmap) > 0 {
- char.forms[FCompatibility].decomp = exp
- }
- }
- if err := p.Err(); err != nil {
- log.Fatal(err)
- }
-}
-
-// compactCCC converts the sparse set of CCC values to a continguous one,
-// reducing the number of bits needed from 8 to 6.
-func compactCCC() {
- m := make(map[uint8]uint8)
- for i := range chars {
- c := &chars[i]
- m[c.ccc] = 0
- }
- cccs := []int{}
- for v, _ := range m {
- cccs = append(cccs, int(v))
- }
- sort.Ints(cccs)
- for i, c := range cccs {
- cccMap[uint8(i)] = uint8(c)
- m[uint8(c)] = uint8(i)
- }
- for i := range chars {
- c := &chars[i]
- c.origCCC = c.ccc
- c.ccc = m[c.ccc]
- }
- if len(m) >= 1<<6 {
- log.Fatalf("too many difference CCC values: %d >= 64", len(m))
- }
-}
-
-// CompositionExclusions.txt has form:
-// 0958 # ...
-// See https://unicode.org/reports/tr44/ for full explanation
-func loadCompositionExclusions() {
- f := gen.OpenUCDFile("CompositionExclusions.txt")
- defer f.Close()
- p := ucd.New(f)
- for p.Next() {
- c := &chars[p.Rune(0)]
- if c.excludeInComp {
- log.Fatalf("%U: Duplicate entry in exclusions.", c.codePoint)
- }
- c.excludeInComp = true
- }
- if e := p.Err(); e != nil {
- log.Fatal(e)
- }
-}
-
-// hasCompatDecomp returns true if any of the recursive
-// decompositions contains a compatibility expansion.
-// In this case, the character may not occur in NFK*.
-func hasCompatDecomp(r rune) bool {
- c := &chars[r]
- if c.compatDecomp {
- return true
- }
- for _, d := range c.forms[FCompatibility].decomp {
- if hasCompatDecomp(d) {
- return true
- }
- }
- return false
-}
-
-// Hangul related constants.
-const (
- HangulBase = 0xAC00
- HangulEnd = 0xD7A4 // hangulBase + Jamo combinations (19 * 21 * 28)
-
- JamoLBase = 0x1100
- JamoLEnd = 0x1113
- JamoVBase = 0x1161
- JamoVEnd = 0x1176
- JamoTBase = 0x11A8
- JamoTEnd = 0x11C3
-
- JamoLVTCount = 19 * 21 * 28
- JamoTCount = 28
-)
-
-func isHangul(r rune) bool {
- return HangulBase <= r && r < HangulEnd
-}
-
-func isHangulWithoutJamoT(r rune) bool {
- if !isHangul(r) {
- return false
- }
- r -= HangulBase
- return r < JamoLVTCount && r%JamoTCount == 0
-}
-
-func ccc(r rune) uint8 {
- return chars[r].ccc
-}
-
-// Insert a rune in a buffer, ordered by Canonical Combining Class.
-func insertOrdered(b Decomposition, r rune) Decomposition {
- n := len(b)
- b = append(b, 0)
- cc := ccc(r)
- if cc > 0 {
- // Use bubble sort.
- for ; n > 0; n-- {
- if ccc(b[n-1]) <= cc {
- break
- }
- b[n] = b[n-1]
- }
- }
- b[n] = r
- return b
-}
-
-// Recursively decompose.
-func decomposeRecursive(form int, r rune, d Decomposition) Decomposition {
- dcomp := chars[r].forms[form].decomp
- if len(dcomp) == 0 {
- return insertOrdered(d, r)
- }
- for _, c := range dcomp {
- d = decomposeRecursive(form, c, d)
- }
- return d
-}
-
-func completeCharFields(form int) {
- // Phase 0: pre-expand decomposition.
- for i := range chars {
- f := &chars[i].forms[form]
- if len(f.decomp) == 0 {
- continue
- }
- exp := make(Decomposition, 0)
- for _, c := range f.decomp {
- exp = decomposeRecursive(form, c, exp)
- }
- f.expandedDecomp = exp
- }
-
- // Phase 1: composition exclusion, mark decomposition.
- for i := range chars {
- c := &chars[i]
- f := &c.forms[form]
-
- // Marks script-specific exclusions and version restricted.
- f.isOneWay = c.excludeInComp
-
- // Singletons
- f.isOneWay = f.isOneWay || len(f.decomp) == 1
-
- // Non-starter decompositions
- if len(f.decomp) > 1 {
- chk := c.ccc != 0 || chars[f.decomp[0]].ccc != 0
- f.isOneWay = f.isOneWay || chk
- }
-
- // Runes that decompose into more than two runes.
- f.isOneWay = f.isOneWay || len(f.decomp) > 2
-
- if form == FCompatibility {
- f.isOneWay = f.isOneWay || hasCompatDecomp(c.codePoint)
- }
-
- for _, r := range f.decomp {
- chars[r].forms[form].inDecomp = true
- }
- }
-
- // Phase 2: forward and backward combining.
- for i := range chars {
- c := &chars[i]
- f := &c.forms[form]
-
- if !f.isOneWay && len(f.decomp) == 2 {
- f0 := &chars[f.decomp[0]].forms[form]
- f1 := &chars[f.decomp[1]].forms[form]
- if !f0.isOneWay {
- f0.combinesForward = true
- }
- if !f1.isOneWay {
- f1.combinesBackward = true
- }
- }
- if isHangulWithoutJamoT(rune(i)) {
- f.combinesForward = true
- }
- }
-
- // Phase 3: quick check values.
- for i := range chars {
- c := &chars[i]
- f := &c.forms[form]
-
- switch {
- case len(f.decomp) > 0:
- f.quickCheck[MDecomposed] = QCNo
- case isHangul(rune(i)):
- f.quickCheck[MDecomposed] = QCNo
- default:
- f.quickCheck[MDecomposed] = QCYes
- }
- switch {
- case f.isOneWay:
- f.quickCheck[MComposed] = QCNo
- case (i & 0xffff00) == JamoLBase:
- f.quickCheck[MComposed] = QCYes
- if JamoLBase <= i && i < JamoLEnd {
- f.combinesForward = true
- }
- if JamoVBase <= i && i < JamoVEnd {
- f.quickCheck[MComposed] = QCMaybe
- f.combinesBackward = true
- f.combinesForward = true
- }
- if JamoTBase <= i && i < JamoTEnd {
- f.quickCheck[MComposed] = QCMaybe
- f.combinesBackward = true
- }
- case !f.combinesBackward:
- f.quickCheck[MComposed] = QCYes
- default:
- f.quickCheck[MComposed] = QCMaybe
- }
- }
-}
-
-func computeNonStarterCounts() {
- // Phase 4: leading and trailing non-starter count
- for i := range chars {
- c := &chars[i]
-
- runes := []rune{rune(i)}
- // We always use FCompatibility so that the CGJ insertion points do not
- // change for repeated normalizations with different forms.
- if exp := c.forms[FCompatibility].expandedDecomp; len(exp) > 0 {
- runes = exp
- }
- // We consider runes that combine backwards to be non-starters for the
- // purpose of Stream-Safe Text Processing.
- for _, r := range runes {
- if cr := &chars[r]; cr.ccc == 0 && !cr.forms[FCompatibility].combinesBackward {
- break
- }
- c.nLeadingNonStarters++
- }
- for i := len(runes) - 1; i >= 0; i-- {
- if cr := &chars[runes[i]]; cr.ccc == 0 && !cr.forms[FCompatibility].combinesBackward {
- break
- }
- c.nTrailingNonStarters++
- }
- if c.nTrailingNonStarters > 3 {
- log.Fatalf("%U: Decomposition with more than 3 (%d) trailing modifiers (%U)", i, c.nTrailingNonStarters, runes)
- }
-
- if isHangul(rune(i)) {
- c.nTrailingNonStarters = 2
- if isHangulWithoutJamoT(rune(i)) {
- c.nTrailingNonStarters = 1
- }
- }
-
- if l, t := c.nLeadingNonStarters, c.nTrailingNonStarters; l > 0 && l != t {
- log.Fatalf("%U: number of leading and trailing non-starters should be equal (%d vs %d)", i, l, t)
- }
- if t := c.nTrailingNonStarters; t > 3 {
- log.Fatalf("%U: number of trailing non-starters is %d > 3", t)
- }
- }
-}
-
-func printBytes(w io.Writer, b []byte, name string) {
- fmt.Fprintf(w, "// %s: %d bytes\n", name, len(b))
- fmt.Fprintf(w, "var %s = [...]byte {", name)
- for i, c := range b {
- switch {
- case i%64 == 0:
- fmt.Fprintf(w, "\n// Bytes %x - %x\n", i, i+63)
- case i%8 == 0:
- fmt.Fprintf(w, "\n")
- }
- fmt.Fprintf(w, "0x%.2X, ", c)
- }
- fmt.Fprint(w, "\n}\n\n")
-}
-
-// See forminfo.go for format.
-func makeEntry(f *FormInfo, c *Char) uint16 {
- e := uint16(0)
- if r := c.codePoint; HangulBase <= r && r < HangulEnd {
- e |= 0x40
- }
- if f.combinesForward {
- e |= 0x20
- }
- if f.quickCheck[MDecomposed] == QCNo {
- e |= 0x4
- }
- switch f.quickCheck[MComposed] {
- case QCYes:
- case QCNo:
- e |= 0x10
- case QCMaybe:
- e |= 0x18
- default:
- log.Fatalf("Illegal quickcheck value %v.", f.quickCheck[MComposed])
- }
- e |= uint16(c.nTrailingNonStarters)
- return e
-}
-
-// decompSet keeps track of unique decompositions, grouped by whether
-// the decomposition is followed by a trailing and/or leading CCC.
-type decompSet [7]map[string]bool
-
-const (
- normalDecomp = iota
- firstMulti
- firstCCC
- endMulti
- firstLeadingCCC
- firstCCCZeroExcept
- firstStarterWithNLead
- lastDecomp
-)
-
-var cname = []string{"firstMulti", "firstCCC", "endMulti", "firstLeadingCCC", "firstCCCZeroExcept", "firstStarterWithNLead", "lastDecomp"}
-
-func makeDecompSet() decompSet {
- m := decompSet{}
- for i := range m {
- m[i] = make(map[string]bool)
- }
- return m
-}
-func (m *decompSet) insert(key int, s string) {
- m[key][s] = true
-}
-
-func printCharInfoTables(w io.Writer) int {
- mkstr := func(r rune, f *FormInfo) (int, string) {
- d := f.expandedDecomp
- s := string([]rune(d))
- if max := 1 << 6; len(s) >= max {
- const msg = "%U: too many bytes in decomposition: %d >= %d"
- log.Fatalf(msg, r, len(s), max)
- }
- head := uint8(len(s))
- if f.quickCheck[MComposed] != QCYes {
- head |= 0x40
- }
- if f.combinesForward {
- head |= 0x80
- }
- s = string([]byte{head}) + s
-
- lccc := ccc(d[0])
- tccc := ccc(d[len(d)-1])
- cc := ccc(r)
- if cc != 0 && lccc == 0 && tccc == 0 {
- log.Fatalf("%U: trailing and leading ccc are 0 for non-zero ccc %d", r, cc)
- }
- if tccc < lccc && lccc != 0 {
- const msg = "%U: lccc (%d) must be <= tcc (%d)"
- log.Fatalf(msg, r, lccc, tccc)
- }
- index := normalDecomp
- nTrail := chars[r].nTrailingNonStarters
- nLead := chars[r].nLeadingNonStarters
- if tccc > 0 || lccc > 0 || nTrail > 0 {
- tccc <<= 2
- tccc |= nTrail
- s += string([]byte{tccc})
- index = endMulti
- for _, r := range d[1:] {
- if ccc(r) == 0 {
- index = firstCCC
- }
- }
- if lccc > 0 || nLead > 0 {
- s += string([]byte{lccc})
- if index == firstCCC {
- log.Fatalf("%U: multi-segment decomposition not supported for decompositions with leading CCC != 0", r)
- }
- index = firstLeadingCCC
- }
- if cc != lccc {
- if cc != 0 {
- log.Fatalf("%U: for lccc != ccc, expected ccc to be 0; was %d", r, cc)
- }
- index = firstCCCZeroExcept
- }
- } else if len(d) > 1 {
- index = firstMulti
- }
- return index, s
- }
-
- decompSet := makeDecompSet()
- const nLeadStr = "\x00\x01" // 0-byte length and tccc with nTrail.
- decompSet.insert(firstStarterWithNLead, nLeadStr)
-
- // Store the uniqued decompositions in a byte buffer,
- // preceded by their byte length.
- for _, c := range chars {
- for _, f := range c.forms {
- if len(f.expandedDecomp) == 0 {
- continue
- }
- if f.combinesBackward {
- log.Fatalf("%U: combinesBackward and decompose", c.codePoint)
- }
- index, s := mkstr(c.codePoint, &f)
- decompSet.insert(index, s)
- }
- }
-
- decompositions := bytes.NewBuffer(make([]byte, 0, 10000))
- size := 0
- positionMap := make(map[string]uint16)
- decompositions.WriteString("\000")
- fmt.Fprintln(w, "const (")
- for i, m := range decompSet {
- sa := []string{}
- for s := range m {
- sa = append(sa, s)
- }
- sort.Strings(sa)
- for _, s := range sa {
- p := decompositions.Len()
- decompositions.WriteString(s)
- positionMap[s] = uint16(p)
- }
- if cname[i] != "" {
- fmt.Fprintf(w, "%s = 0x%X\n", cname[i], decompositions.Len())
- }
- }
- fmt.Fprintln(w, "maxDecomp = 0x8000")
- fmt.Fprintln(w, ")")
- b := decompositions.Bytes()
- printBytes(w, b, "decomps")
- size += len(b)
-
- varnames := []string{"nfc", "nfkc"}
- for i := 0; i < FNumberOfFormTypes; i++ {
- trie := triegen.NewTrie(varnames[i])
-
- for r, c := range chars {
- f := c.forms[i]
- d := f.expandedDecomp
- if len(d) != 0 {
- _, key := mkstr(c.codePoint, &f)
- trie.Insert(rune(r), uint64(positionMap[key]))
- if c.ccc != ccc(d[0]) {
- // We assume the lead ccc of a decomposition !=0 in this case.
- if ccc(d[0]) == 0 {
- log.Fatalf("Expected leading CCC to be non-zero; ccc is %d", c.ccc)
- }
- }
- } else if c.nLeadingNonStarters > 0 && len(f.expandedDecomp) == 0 && c.ccc == 0 && !f.combinesBackward {
- // Handle cases where it can't be detected that the nLead should be equal
- // to nTrail.
- trie.Insert(c.codePoint, uint64(positionMap[nLeadStr]))
- } else if v := makeEntry(&f, &c)<<8 | uint16(c.ccc); v != 0 {
- trie.Insert(c.codePoint, uint64(0x8000|v))
- }
- }
- sz, err := trie.Gen(w, triegen.Compact(&normCompacter{name: varnames[i]}))
- if err != nil {
- log.Fatal(err)
- }
- size += sz
- }
- return size
-}
-
-func contains(sa []string, s string) bool {
- for _, a := range sa {
- if a == s {
- return true
- }
- }
- return false
-}
-
-func makeTables() {
- w := &bytes.Buffer{}
-
- size := 0
- if *tablelist == "" {
- return
- }
- list := strings.Split(*tablelist, ",")
- if *tablelist == "all" {
- list = []string{"recomp", "info"}
- }
-
- // Compute maximum decomposition size.
- max := 0
- for _, c := range chars {
- if n := len(string(c.forms[FCompatibility].expandedDecomp)); n > max {
- max = n
- }
- }
- fmt.Fprintln(w, `import "sync"`)
- fmt.Fprintln(w)
-
- fmt.Fprintln(w, "const (")
- fmt.Fprintln(w, "\t// Version is the Unicode edition from which the tables are derived.")
- fmt.Fprintf(w, "\tVersion = %q\n", gen.UnicodeVersion())
- fmt.Fprintln(w)
- fmt.Fprintln(w, "\t// MaxTransformChunkSize indicates the maximum number of bytes that Transform")
- fmt.Fprintln(w, "\t// may need to write atomically for any Form. Making a destination buffer at")
- fmt.Fprintln(w, "\t// least this size ensures that Transform can always make progress and that")
- fmt.Fprintln(w, "\t// the user does not need to grow the buffer on an ErrShortDst.")
- fmt.Fprintf(w, "\tMaxTransformChunkSize = %d+maxNonStarters*4\n", len(string(0x034F))+max)
- fmt.Fprintln(w, ")\n")
-
- // Print the CCC remap table.
- size += len(cccMap)
- fmt.Fprintf(w, "var ccc = [%d]uint8{", len(cccMap))
- for i := 0; i < len(cccMap); i++ {
- if i%8 == 0 {
- fmt.Fprintln(w)
- }
- fmt.Fprintf(w, "%3d, ", cccMap[uint8(i)])
- }
- fmt.Fprintln(w, "\n}\n")
-
- if contains(list, "info") {
- size += printCharInfoTables(w)
- }
-
- if contains(list, "recomp") {
- // Note that we use 32 bit keys, instead of 64 bit.
- // This clips the bits of three entries, but we know
- // this won't cause a collision. The compiler will catch
- // any changes made to UnicodeData.txt that introduces
- // a collision.
- // Note that the recomposition map for NFC and NFKC
- // are identical.
-
- // Recomposition map
- nrentries := 0
- for _, c := range chars {
- f := c.forms[FCanonical]
- if !f.isOneWay && len(f.decomp) > 0 {
- nrentries++
- }
- }
- sz := nrentries * 8
- size += sz
- fmt.Fprintf(w, "// recompMap: %d bytes (entries only)\n", sz)
- fmt.Fprintln(w, "var recompMap map[uint32]rune")
- fmt.Fprintln(w, "var recompMapOnce sync.Once\n")
- fmt.Fprintln(w, `const recompMapPacked = "" +`)
- var buf [8]byte
- for i, c := range chars {
- f := c.forms[FCanonical]
- d := f.decomp
- if !f.isOneWay && len(d) > 0 {
- key := uint32(uint16(d[0]))<<16 + uint32(uint16(d[1]))
- binary.BigEndian.PutUint32(buf[:4], key)
- binary.BigEndian.PutUint32(buf[4:], uint32(i))
- fmt.Fprintf(w, "\t\t%q + // 0x%.8X: 0x%.8X\n", string(buf[:]), key, uint32(i))
- }
- }
- // hack so we don't have to special case the trailing plus sign
- fmt.Fprintf(w, ` ""`)
- fmt.Fprintln(w)
- }
-
- fmt.Fprintf(w, "// Total size of tables: %dKB (%d bytes)\n", (size+512)/1024, size)
- gen.WriteVersionedGoFile("tables.go", "norm", w.Bytes())
-}
-
-func printChars() {
- if *verbose {
- for _, c := range chars {
- if !c.isValid() || c.state == SMissing {
- continue
- }
- fmt.Println(c)
- }
- }
-}
-
-// verifyComputed does various consistency tests.
-func verifyComputed() {
- for i, c := range chars {
- for _, f := range c.forms {
- isNo := (f.quickCheck[MDecomposed] == QCNo)
- if (len(f.decomp) > 0) != isNo && !isHangul(rune(i)) {
- log.Fatalf("%U: NF*D QC must be No if rune decomposes", i)
- }
-
- isMaybe := f.quickCheck[MComposed] == QCMaybe
- if f.combinesBackward != isMaybe {
- log.Fatalf("%U: NF*C QC must be Maybe if combinesBackward", i)
- }
- if len(f.decomp) > 0 && f.combinesForward && isMaybe {
- log.Fatalf("%U: NF*C QC must be Yes or No if combinesForward and decomposes", i)
- }
-
- if len(f.expandedDecomp) != 0 {
- continue
- }
- if a, b := c.nLeadingNonStarters > 0, (c.ccc > 0 || f.combinesBackward); a != b {
- // We accept these runes to be treated differently (it only affects
- // segment breaking in iteration, most likely on improper use), but
- // reconsider if more characters are added.
- // U+FF9E HALFWIDTH KATAKANA VOICED SOUND MARK;Lm;0;L;<narrow> 3099;;;;N;;;;;
- // U+FF9F HALFWIDTH KATAKANA SEMI-VOICED SOUND MARK;Lm;0;L;<narrow> 309A;;;;N;;;;;
- // U+3133 HANGUL LETTER KIYEOK-SIOS;Lo;0;L;<compat> 11AA;;;;N;HANGUL LETTER GIYEOG SIOS;;;;
- // U+318E HANGUL LETTER ARAEAE;Lo;0;L;<compat> 11A1;;;;N;HANGUL LETTER ALAE AE;;;;
- // U+FFA3 HALFWIDTH HANGUL LETTER KIYEOK-SIOS;Lo;0;L;<narrow> 3133;;;;N;HALFWIDTH HANGUL LETTER GIYEOG SIOS;;;;
- // U+FFDC HALFWIDTH HANGUL LETTER I;Lo;0;L;<narrow> 3163;;;;N;;;;;
- if i != 0xFF9E && i != 0xFF9F && !(0x3133 <= i && i <= 0x318E) && !(0xFFA3 <= i && i <= 0xFFDC) {
- log.Fatalf("%U: nLead was %v; want %v", i, a, b)
- }
- }
- }
- nfc := c.forms[FCanonical]
- nfkc := c.forms[FCompatibility]
- if nfc.combinesBackward != nfkc.combinesBackward {
- log.Fatalf("%U: Cannot combine combinesBackward\n", c.codePoint)
- }
- }
-}
-
-// Use values in DerivedNormalizationProps.txt to compare against the
-// values we computed.
-// DerivedNormalizationProps.txt has form:
-// 00C0..00C5 ; NFD_QC; N # ...
-// 0374 ; NFD_QC; N # ...
-// See https://unicode.org/reports/tr44/ for full explanation
-func testDerived() {
- f := gen.OpenUCDFile("DerivedNormalizationProps.txt")
- defer f.Close()
- p := ucd.New(f)
- for p.Next() {
- r := p.Rune(0)
- c := &chars[r]
-
- var ftype, mode int
- qt := p.String(1)
- switch qt {
- case "NFC_QC":
- ftype, mode = FCanonical, MComposed
- case "NFD_QC":
- ftype, mode = FCanonical, MDecomposed
- case "NFKC_QC":
- ftype, mode = FCompatibility, MComposed
- case "NFKD_QC":
- ftype, mode = FCompatibility, MDecomposed
- default:
- continue
- }
- var qr QCResult
- switch p.String(2) {
- case "Y":
- qr = QCYes
- case "N":
- qr = QCNo
- case "M":
- qr = QCMaybe
- default:
- log.Fatalf(`Unexpected quick check value "%s"`, p.String(2))
- }
- if got := c.forms[ftype].quickCheck[mode]; got != qr {
- log.Printf("%U: FAILED %s (was %v need %v)\n", r, qt, got, qr)
- }
- c.forms[ftype].verified[mode] = true
- }
- if err := p.Err(); err != nil {
- log.Fatal(err)
- }
- // Any unspecified value must be QCYes. Verify this.
- for i, c := range chars {
- for j, fd := range c.forms {
- for k, qr := range fd.quickCheck {
- if !fd.verified[k] && qr != QCYes {
- m := "%U: FAIL F:%d M:%d (was %v need Yes) %s\n"
- log.Printf(m, i, j, k, qr, c.name)
- }
- }
- }
- }
-}
-
-var testHeader = `const (
- Yes = iota
- No
- Maybe
-)
-
-type formData struct {
- qc uint8
- combinesForward bool
- decomposition string
-}
-
-type runeData struct {
- r rune
- ccc uint8
- nLead uint8
- nTrail uint8
- f [2]formData // 0: canonical; 1: compatibility
-}
-
-func f(qc uint8, cf bool, dec string) [2]formData {
- return [2]formData{{qc, cf, dec}, {qc, cf, dec}}
-}
-
-func g(qc, qck uint8, cf, cfk bool, d, dk string) [2]formData {
- return [2]formData{{qc, cf, d}, {qck, cfk, dk}}
-}
-
-var testData = []runeData{
-`
-
-func printTestdata() {
- type lastInfo struct {
- ccc uint8
- nLead uint8
- nTrail uint8
- f string
- }
-
- last := lastInfo{}
- w := &bytes.Buffer{}
- fmt.Fprintf(w, testHeader)
- for r, c := range chars {
- f := c.forms[FCanonical]
- qc, cf, d := f.quickCheck[MComposed], f.combinesForward, string(f.expandedDecomp)
- f = c.forms[FCompatibility]
- qck, cfk, dk := f.quickCheck[MComposed], f.combinesForward, string(f.expandedDecomp)
- s := ""
- if d == dk && qc == qck && cf == cfk {
- s = fmt.Sprintf("f(%s, %v, %q)", qc, cf, d)
- } else {
- s = fmt.Sprintf("g(%s, %s, %v, %v, %q, %q)", qc, qck, cf, cfk, d, dk)
- }
- current := lastInfo{c.ccc, c.nLeadingNonStarters, c.nTrailingNonStarters, s}
- if last != current {
- fmt.Fprintf(w, "\t{0x%x, %d, %d, %d, %s},\n", r, c.origCCC, c.nLeadingNonStarters, c.nTrailingNonStarters, s)
- last = current
- }
- }
- fmt.Fprintln(w, "}")
- gen.WriteVersionedGoFile("data_test.go", "norm", w.Bytes())
-}
diff --git a/vendor/golang.org/x/text/unicode/norm/normalize.go b/vendor/golang.org/x/text/unicode/norm/normalize.go
index af522aa7c..95efcf26e 100644
--- a/vendor/golang.org/x/text/unicode/norm/normalize.go
+++ b/vendor/golang.org/x/text/unicode/norm/normalize.go
@@ -7,7 +7,7 @@
//go:generate go test -tags test
// Package norm contains types and functions for normalizing Unicode strings.
-package norm
+package norm // import "golang.org/x/text/unicode/norm"
import (
"unicode/utf8"
diff --git a/vendor/golang.org/x/text/unicode/norm/triegen.go b/vendor/golang.org/x/text/unicode/norm/triegen.go
deleted file mode 100644
index 45d711900..000000000
--- a/vendor/golang.org/x/text/unicode/norm/triegen.go
+++ /dev/null
@@ -1,117 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build ignore
-
-// Trie table generator.
-// Used by make*tables tools to generate a go file with trie data structures
-// for mapping UTF-8 to a 16-bit value. All but the last byte in a UTF-8 byte
-// sequence are used to lookup offsets in the index table to be used for the
-// next byte. The last byte is used to index into a table with 16-bit values.
-
-package main
-
-import (
- "fmt"
- "io"
-)
-
-const maxSparseEntries = 16
-
-type normCompacter struct {
- sparseBlocks [][]uint64
- sparseOffset []uint16
- sparseCount int
- name string
-}
-
-func mostFrequentStride(a []uint64) int {
- counts := make(map[int]int)
- var v int
- for _, x := range a {
- if stride := int(x) - v; v != 0 && stride >= 0 {
- counts[stride]++
- }
- v = int(x)
- }
- var maxs, maxc int
- for stride, cnt := range counts {
- if cnt > maxc || (cnt == maxc && stride < maxs) {
- maxs, maxc = stride, cnt
- }
- }
- return maxs
-}
-
-func countSparseEntries(a []uint64) int {
- stride := mostFrequentStride(a)
- var v, count int
- for _, tv := range a {
- if int(tv)-v != stride {
- if tv != 0 {
- count++
- }
- }
- v = int(tv)
- }
- return count
-}
-
-func (c *normCompacter) Size(v []uint64) (sz int, ok bool) {
- if n := countSparseEntries(v); n <= maxSparseEntries {
- return (n+1)*4 + 2, true
- }
- return 0, false
-}
-
-func (c *normCompacter) Store(v []uint64) uint32 {
- h := uint32(len(c.sparseOffset))
- c.sparseBlocks = append(c.sparseBlocks, v)
- c.sparseOffset = append(c.sparseOffset, uint16(c.sparseCount))
- c.sparseCount += countSparseEntries(v) + 1
- return h
-}
-
-func (c *normCompacter) Handler() string {
- return c.name + "Sparse.lookup"
-}
-
-func (c *normCompacter) Print(w io.Writer) (retErr error) {
- p := func(f string, x ...interface{}) {
- if _, err := fmt.Fprintf(w, f, x...); retErr == nil && err != nil {
- retErr = err
- }
- }
-
- ls := len(c.sparseBlocks)
- p("// %sSparseOffset: %d entries, %d bytes\n", c.name, ls, ls*2)
- p("var %sSparseOffset = %#v\n\n", c.name, c.sparseOffset)
-
- ns := c.sparseCount
- p("// %sSparseValues: %d entries, %d bytes\n", c.name, ns, ns*4)
- p("var %sSparseValues = [%d]valueRange {", c.name, ns)
- for i, b := range c.sparseBlocks {
- p("\n// Block %#x, offset %#x", i, c.sparseOffset[i])
- var v int
- stride := mostFrequentStride(b)
- n := countSparseEntries(b)
- p("\n{value:%#04x,lo:%#02x},", stride, uint8(n))
- for i, nv := range b {
- if int(nv)-v != stride {
- if v != 0 {
- p(",hi:%#02x},", 0x80+i-1)
- }
- if nv != 0 {
- p("\n{value:%#04x,lo:%#02x", nv, 0x80+i)
- }
- }
- v = int(nv)
- }
- if v != 0 {
- p(",hi:%#02x},", 0x80+len(b)-1)
- }
- }
- p("\n}\n\n")
- return
-}
diff --git a/vendor/golang.org/x/tools/cmd/goimports/doc.go b/vendor/golang.org/x/tools/cmd/goimports/doc.go
index 7809b163b..7033e4d4c 100644
--- a/vendor/golang.org/x/tools/cmd/goimports/doc.go
+++ b/vendor/golang.org/x/tools/cmd/goimports/doc.go
@@ -40,4 +40,4 @@ File bugs or feature requests at:
Happy hacking!
*/
-package main
+package main // import "golang.org/x/tools/cmd/goimports"
diff --git a/vendor/golang.org/x/tools/go/analysis/doc.go b/vendor/golang.org/x/tools/go/analysis/doc.go
index 1b7b7ed5a..8fa4a8531 100644
--- a/vendor/golang.org/x/tools/go/analysis/doc.go
+++ b/vendor/golang.org/x/tools/go/analysis/doc.go
@@ -70,39 +70,6 @@ A driver may use the name, flags, and documentation to provide on-line
help that describes the analyses it performs.
The doc comment contains a brief one-line summary,
optionally followed by paragraphs of explanation.
-The vet command, shown below, is an example of a driver that runs
-multiple analyzers. It is based on the multichecker package
-(see the "Standalone commands" section for details).
-
- $ go build golang.org/x/tools/go/analysis/cmd/vet
- $ ./vet help
- vet is a tool for static analysis of Go programs.
-
- Usage: vet [-flag] [package]
-
- Registered analyzers:
-
- asmdecl report mismatches between assembly files and Go declarations
- assign check for useless assignments
- atomic check for common mistakes using the sync/atomic package
- ...
- unusedresult check for unused results of calls to some functions
-
- $ ./vet help unusedresult
- unusedresult: check for unused results of calls to some functions
-
- Analyzer flags:
-
- -unusedresult.funcs value
- comma-separated list of functions whose results must be used (default Error,String)
- -unusedresult.stringmethods value
- comma-separated list of names of methods of type func() string whose results must be used
-
- Some functions like fmt.Errorf return a result and have no side effects,
- so it is always a mistake to discard the result. This analyzer reports
- calls to certain functions in which the result of the call is ignored.
-
- The set of functions may be controlled using flags.
The Analyzer type has more fields besides those shown above:
diff --git a/vendor/golang.org/x/tools/go/ast/astutil/imports.go b/vendor/golang.org/x/tools/go/ast/astutil/imports.go
index 46f9ad184..2087ceec9 100644
--- a/vendor/golang.org/x/tools/go/ast/astutil/imports.go
+++ b/vendor/golang.org/x/tools/go/ast/astutil/imports.go
@@ -3,7 +3,7 @@
// license that can be found in the LICENSE file.
// Package astutil contains common utilities for working with the Go AST.
-package astutil
+package astutil // import "golang.org/x/tools/go/ast/astutil"
import (
"fmt"
@@ -275,9 +275,10 @@ func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (del
// We deleted an entry but now there may be
// a blank line-sized hole where the import was.
- if line-lastLine > 1 {
+ if line-lastLine > 1 || !gen.Rparen.IsValid() {
// There was a blank line immediately preceding the deleted import,
- // so there's no need to close the hole.
+ // so there's no need to close the hole. The right parenthesis is
+ // invalid after AddImport to an import statement without parenthesis.
// Do nothing.
} else if line != fset.File(gen.Rparen).LineCount() {
// There was no blank line. Close the hole.
diff --git a/vendor/golang.org/x/tools/go/ast/inspector/inspector.go b/vendor/golang.org/x/tools/go/ast/inspector/inspector.go
index ddbdd3f08..3084508b5 100644
--- a/vendor/golang.org/x/tools/go/ast/inspector/inspector.go
+++ b/vendor/golang.org/x/tools/go/ast/inspector/inspector.go
@@ -90,7 +90,7 @@ func (in *Inspector) Preorder(types []ast.Node, f func(ast.Node)) {
// The types argument, if non-empty, enables type-based filtering of
// events. The function f if is called only for nodes whose type
// matches an element of the types slice.
-func (in *Inspector) Nodes(types []ast.Node, f func(n ast.Node, push bool) (prune bool)) {
+func (in *Inspector) Nodes(types []ast.Node, f func(n ast.Node, push bool) (proceed bool)) {
mask := maskOf(types)
for i := 0; i < len(in.events); {
ev := in.events[i]
@@ -114,7 +114,7 @@ func (in *Inspector) Nodes(types []ast.Node, f func(n ast.Node, push bool) (prun
// supplies each call to f an additional argument, the current
// traversal stack. The stack's first element is the outermost node,
// an *ast.File; its last is the innermost, n.
-func (in *Inspector) WithStack(types []ast.Node, f func(n ast.Node, push bool, stack []ast.Node) (prune bool)) {
+func (in *Inspector) WithStack(types []ast.Node, f func(n ast.Node, push bool, stack []ast.Node) (proceed bool)) {
mask := maskOf(types)
var stack []ast.Node
for i := 0; i < len(in.events); {
diff --git a/vendor/golang.org/x/tools/go/buildutil/allpackages.go b/vendor/golang.org/x/tools/go/buildutil/allpackages.go
index 3d29cf3b8..c0cb03e7b 100644
--- a/vendor/golang.org/x/tools/go/buildutil/allpackages.go
+++ b/vendor/golang.org/x/tools/go/buildutil/allpackages.go
@@ -7,7 +7,7 @@
//
// All I/O is done via the build.Context file system interface, which must
// be concurrency-safe.
-package buildutil
+package buildutil // import "golang.org/x/tools/go/buildutil"
import (
"go/build"
diff --git a/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go b/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go
index 3aa0c9317..f8363d8fa 100644
--- a/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go
+++ b/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go
@@ -18,7 +18,7 @@
// Go 1.8 export data files, so they will work before and after the
// Go update. (See discussion at https://golang.org/issue/15651.)
//
-package gcexportdata
+package gcexportdata // import "golang.org/x/tools/go/gcexportdata"
import (
"bufio"
diff --git a/vendor/golang.org/x/tools/go/gcexportdata/main.go b/vendor/golang.org/x/tools/go/gcexportdata/main.go
deleted file mode 100644
index 2713dce64..000000000
--- a/vendor/golang.org/x/tools/go/gcexportdata/main.go
+++ /dev/null
@@ -1,99 +0,0 @@
-// Copyright 2017 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build ignore
-
-// The gcexportdata command is a diagnostic tool that displays the
-// contents of gc export data files.
-package main
-
-import (
- "flag"
- "fmt"
- "go/token"
- "go/types"
- "log"
- "os"
-
- "golang.org/x/tools/go/gcexportdata"
- "golang.org/x/tools/go/types/typeutil"
-)
-
-var packageFlag = flag.String("package", "", "alternative package to print")
-
-func main() {
- log.SetPrefix("gcexportdata: ")
- log.SetFlags(0)
- flag.Usage = func() {
- fmt.Fprintln(os.Stderr, "usage: gcexportdata [-package path] file.a")
- }
- flag.Parse()
- if flag.NArg() != 1 {
- flag.Usage()
- os.Exit(2)
- }
- filename := flag.Args()[0]
-
- f, err := os.Open(filename)
- if err != nil {
- log.Fatal(err)
- }
-
- r, err := gcexportdata.NewReader(f)
- if err != nil {
- log.Fatalf("%s: %s", filename, err)
- }
-
- // Decode the package.
- const primary = "<primary>"
- imports := make(map[string]*types.Package)
- fset := token.NewFileSet()
- pkg, err := gcexportdata.Read(r, fset, imports, primary)
- if err != nil {
- log.Fatalf("%s: %s", filename, err)
- }
-
- // Optionally select an indirectly mentioned package.
- if *packageFlag != "" {
- pkg = imports[*packageFlag]
- if pkg == nil {
- fmt.Fprintf(os.Stderr, "export data file %s does not mention %s; has:\n",
- filename, *packageFlag)
- for p := range imports {
- if p != primary {
- fmt.Fprintf(os.Stderr, "\t%s\n", p)
- }
- }
- os.Exit(1)
- }
- }
-
- // Print all package-level declarations, including non-exported ones.
- fmt.Printf("package %s\n", pkg.Name())
- for _, imp := range pkg.Imports() {
- fmt.Printf("import %q\n", imp.Path())
- }
- qual := func(p *types.Package) string {
- if pkg == p {
- return ""
- }
- return p.Name()
- }
- scope := pkg.Scope()
- for _, name := range scope.Names() {
- obj := scope.Lookup(name)
- fmt.Printf("%s: %s\n",
- fset.Position(obj.Pos()),
- types.ObjectString(obj, qual))
-
- // For types, print each method.
- if _, ok := obj.(*types.TypeName); ok {
- for _, method := range typeutil.IntuitiveMethodSet(obj.Type(), nil) {
- fmt.Printf("%s: %s\n",
- fset.Position(method.Obj().Pos()),
- types.SelectionString(method, qual))
- }
- }
- }
-}
diff --git a/vendor/golang.org/x/tools/go/internal/cgo/cgo.go b/vendor/golang.org/x/tools/go/internal/cgo/cgo.go
deleted file mode 100644
index 0f652ea6f..000000000
--- a/vendor/golang.org/x/tools/go/internal/cgo/cgo.go
+++ /dev/null
@@ -1,220 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package cgo
-
-// This file handles cgo preprocessing of files containing `import "C"`.
-//
-// DESIGN
-//
-// The approach taken is to run the cgo processor on the package's
-// CgoFiles and parse the output, faking the filenames of the
-// resulting ASTs so that the synthetic file containing the C types is
-// called "C" (e.g. "~/go/src/net/C") and the preprocessed files
-// have their original names (e.g. "~/go/src/net/cgo_unix.go"),
-// not the names of the actual temporary files.
-//
-// The advantage of this approach is its fidelity to 'go build'. The
-// downside is that the token.Position.Offset for each AST node is
-// incorrect, being an offset within the temporary file. Line numbers
-// should still be correct because of the //line comments.
-//
-// The logic of this file is mostly plundered from the 'go build'
-// tool, which also invokes the cgo preprocessor.
-//
-//
-// REJECTED ALTERNATIVE
-//
-// An alternative approach that we explored is to extend go/types'
-// Importer mechanism to provide the identity of the importing package
-// so that each time `import "C"` appears it resolves to a different
-// synthetic package containing just the objects needed in that case.
-// The loader would invoke cgo but parse only the cgo_types.go file
-// defining the package-level objects, discarding the other files
-// resulting from preprocessing.
-//
-// The benefit of this approach would have been that source-level
-// syntax information would correspond exactly to the original cgo
-// file, with no preprocessing involved, making source tools like
-// godoc, guru, and eg happy. However, the approach was rejected
-// due to the additional complexity it would impose on go/types. (It
-// made for a beautiful demo, though.)
-//
-// cgo files, despite their *.go extension, are not legal Go source
-// files per the specification since they may refer to unexported
-// members of package "C" such as C.int. Also, a function such as
-// C.getpwent has in effect two types, one matching its C type and one
-// which additionally returns (errno C.int). The cgo preprocessor
-// uses name mangling to distinguish these two functions in the
-// processed code, but go/types would need to duplicate this logic in
-// its handling of function calls, analogous to the treatment of map
-// lookups in which y=m[k] and y,ok=m[k] are both legal.
-
-import (
- "fmt"
- "go/ast"
- "go/build"
- "go/parser"
- "go/token"
- "io/ioutil"
- "log"
- "os"
- "os/exec"
- "path/filepath"
- "regexp"
- "strings"
-)
-
-// ProcessFiles invokes the cgo preprocessor on bp.CgoFiles, parses
-// the output and returns the resulting ASTs.
-//
-func ProcessFiles(bp *build.Package, fset *token.FileSet, DisplayPath func(path string) string, mode parser.Mode) ([]*ast.File, error) {
- tmpdir, err := ioutil.TempDir("", strings.Replace(bp.ImportPath, "/", "_", -1)+"_C")
- if err != nil {
- return nil, err
- }
- defer os.RemoveAll(tmpdir)
-
- pkgdir := bp.Dir
- if DisplayPath != nil {
- pkgdir = DisplayPath(pkgdir)
- }
-
- cgoFiles, cgoDisplayFiles, err := Run(bp, pkgdir, tmpdir, false)
- if err != nil {
- return nil, err
- }
- var files []*ast.File
- for i := range cgoFiles {
- rd, err := os.Open(cgoFiles[i])
- if err != nil {
- return nil, err
- }
- display := filepath.Join(bp.Dir, cgoDisplayFiles[i])
- f, err := parser.ParseFile(fset, display, rd, mode)
- rd.Close()
- if err != nil {
- return nil, err
- }
- files = append(files, f)
- }
- return files, nil
-}
-
-var cgoRe = regexp.MustCompile(`[/\\:]`)
-
-// Run invokes the cgo preprocessor on bp.CgoFiles and returns two
-// lists of files: the resulting processed files (in temporary
-// directory tmpdir) and the corresponding names of the unprocessed files.
-//
-// Run is adapted from (*builder).cgo in
-// $GOROOT/src/cmd/go/build.go, but these features are unsupported:
-// Objective C, CGOPKGPATH, CGO_FLAGS.
-//
-// If useabs is set to true, absolute paths of the bp.CgoFiles will be passed in
-// to the cgo preprocessor. This in turn will set the // line comments
-// referring to those files to use absolute paths. This is needed for
-// go/packages using the legacy go list support so it is able to find
-// the original files.
-func Run(bp *build.Package, pkgdir, tmpdir string, useabs bool) (files, displayFiles []string, err error) {
- cgoCPPFLAGS, _, _, _ := cflags(bp, true)
- _, cgoexeCFLAGS, _, _ := cflags(bp, false)
-
- if len(bp.CgoPkgConfig) > 0 {
- pcCFLAGS, err := pkgConfigFlags(bp)
- if err != nil {
- return nil, nil, err
- }
- cgoCPPFLAGS = append(cgoCPPFLAGS, pcCFLAGS...)
- }
-
- // Allows including _cgo_export.h from .[ch] files in the package.
- cgoCPPFLAGS = append(cgoCPPFLAGS, "-I", tmpdir)
-
- // _cgo_gotypes.go (displayed "C") contains the type definitions.
- files = append(files, filepath.Join(tmpdir, "_cgo_gotypes.go"))
- displayFiles = append(displayFiles, "C")
- for _, fn := range bp.CgoFiles {
- // "foo.cgo1.go" (displayed "foo.go") is the processed Go source.
- f := cgoRe.ReplaceAllString(fn[:len(fn)-len("go")], "_")
- files = append(files, filepath.Join(tmpdir, f+"cgo1.go"))
- displayFiles = append(displayFiles, fn)
- }
-
- var cgoflags []string
- if bp.Goroot && bp.ImportPath == "runtime/cgo" {
- cgoflags = append(cgoflags, "-import_runtime_cgo=false")
- }
- if bp.Goroot && bp.ImportPath == "runtime/race" || bp.ImportPath == "runtime/cgo" {
- cgoflags = append(cgoflags, "-import_syscall=false")
- }
-
- var cgoFiles []string = bp.CgoFiles
- if useabs {
- cgoFiles = make([]string, len(bp.CgoFiles))
- for i := range cgoFiles {
- cgoFiles[i] = filepath.Join(pkgdir, bp.CgoFiles[i])
- }
- }
-
- args := stringList(
- "go", "tool", "cgo", "-objdir", tmpdir, cgoflags, "--",
- cgoCPPFLAGS, cgoexeCFLAGS, cgoFiles,
- )
- if false {
- log.Printf("Running cgo for package %q: %s (dir=%s)", bp.ImportPath, args, pkgdir)
- }
- cmd := exec.Command(args[0], args[1:]...)
- cmd.Dir = pkgdir
- cmd.Stdout = os.Stderr
- cmd.Stderr = os.Stderr
- if err := cmd.Run(); err != nil {
- return nil, nil, fmt.Errorf("cgo failed: %s: %s", args, err)
- }
-
- return files, displayFiles, nil
-}
-
-// -- unmodified from 'go build' ---------------------------------------
-
-// Return the flags to use when invoking the C or C++ compilers, or cgo.
-func cflags(p *build.Package, def bool) (cppflags, cflags, cxxflags, ldflags []string) {
- var defaults string
- if def {
- defaults = "-g -O2"
- }
-
- cppflags = stringList(envList("CGO_CPPFLAGS", ""), p.CgoCPPFLAGS)
- cflags = stringList(envList("CGO_CFLAGS", defaults), p.CgoCFLAGS)
- cxxflags = stringList(envList("CGO_CXXFLAGS", defaults), p.CgoCXXFLAGS)
- ldflags = stringList(envList("CGO_LDFLAGS", defaults), p.CgoLDFLAGS)
- return
-}
-
-// envList returns the value of the given environment variable broken
-// into fields, using the default value when the variable is empty.
-func envList(key, def string) []string {
- v := os.Getenv(key)
- if v == "" {
- v = def
- }
- return strings.Fields(v)
-}
-
-// stringList's arguments should be a sequence of string or []string values.
-// stringList flattens them into a single []string.
-func stringList(args ...interface{}) []string {
- var x []string
- for _, arg := range args {
- switch arg := arg.(type) {
- case []string:
- x = append(x, arg...)
- case string:
- x = append(x, arg)
- default:
- panic("stringList: invalid argument")
- }
- }
- return x
-}
diff --git a/vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go b/vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go
deleted file mode 100644
index b5bb95a63..000000000
--- a/vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go
+++ /dev/null
@@ -1,39 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package cgo
-
-import (
- "errors"
- "fmt"
- "go/build"
- "os/exec"
- "strings"
-)
-
-// pkgConfig runs pkg-config with the specified arguments and returns the flags it prints.
-func pkgConfig(mode string, pkgs []string) (flags []string, err error) {
- cmd := exec.Command("pkg-config", append([]string{mode}, pkgs...)...)
- out, err := cmd.CombinedOutput()
- if err != nil {
- s := fmt.Sprintf("%s failed: %v", strings.Join(cmd.Args, " "), err)
- if len(out) > 0 {
- s = fmt.Sprintf("%s: %s", s, out)
- }
- return nil, errors.New(s)
- }
- if len(out) > 0 {
- flags = strings.Fields(string(out))
- }
- return
-}
-
-// pkgConfigFlags calls pkg-config if needed and returns the cflags
-// needed to build the package.
-func pkgConfigFlags(p *build.Package) (cflags []string, err error) {
- if len(p.CgoPkgConfig) == 0 {
- return nil, nil
- }
- return pkgConfig("--cflags", p.CgoPkgConfig)
-}
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go b/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go
index 31238f0df..9cf186605 100644
--- a/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go
@@ -9,7 +9,7 @@
// Package gcimporter provides various functions for reading
// gc-generated object files that can be used to implement the
// Importer interface defined by the Go 1.5 standard library package.
-package gcimporter
+package gcimporter // import "golang.org/x/tools/go/internal/gcimporter"
import (
"bufio"
diff --git a/vendor/golang.org/x/tools/go/loader/doc.go b/vendor/golang.org/x/tools/go/loader/doc.go
deleted file mode 100644
index c5aa31c1a..000000000
--- a/vendor/golang.org/x/tools/go/loader/doc.go
+++ /dev/null
@@ -1,204 +0,0 @@
-// Copyright 2015 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package loader loads a complete Go program from source code, parsing
-// and type-checking the initial packages plus their transitive closure
-// of dependencies. The ASTs and the derived facts are retained for
-// later use.
-//
-// Deprecated: This is an older API and does not have support
-// for modules. Use golang.org/x/tools/go/packages instead.
-//
-// The package defines two primary types: Config, which specifies a
-// set of initial packages to load and various other options; and
-// Program, which is the result of successfully loading the packages
-// specified by a configuration.
-//
-// The configuration can be set directly, but *Config provides various
-// convenience methods to simplify the common cases, each of which can
-// be called any number of times. Finally, these are followed by a
-// call to Load() to actually load and type-check the program.
-//
-// var conf loader.Config
-//
-// // Use the command-line arguments to specify
-// // a set of initial packages to load from source.
-// // See FromArgsUsage for help.
-// rest, err := conf.FromArgs(os.Args[1:], wantTests)
-//
-// // Parse the specified files and create an ad hoc package with path "foo".
-// // All files must have the same 'package' declaration.
-// conf.CreateFromFilenames("foo", "foo.go", "bar.go")
-//
-// // Create an ad hoc package with path "foo" from
-// // the specified already-parsed files.
-// // All ASTs must have the same 'package' declaration.
-// conf.CreateFromFiles("foo", parsedFiles)
-//
-// // Add "runtime" to the set of packages to be loaded.
-// conf.Import("runtime")
-//
-// // Adds "fmt" and "fmt_test" to the set of packages
-// // to be loaded. "fmt" will include *_test.go files.
-// conf.ImportWithTests("fmt")
-//
-// // Finally, load all the packages specified by the configuration.
-// prog, err := conf.Load()
-//
-// See examples_test.go for examples of API usage.
-//
-//
-// CONCEPTS AND TERMINOLOGY
-//
-// The WORKSPACE is the set of packages accessible to the loader. The
-// workspace is defined by Config.Build, a *build.Context. The
-// default context treats subdirectories of $GOROOT and $GOPATH as
-// packages, but this behavior may be overridden.
-//
-// An AD HOC package is one specified as a set of source files on the
-// command line. In the simplest case, it may consist of a single file
-// such as $GOROOT/src/net/http/triv.go.
-//
-// EXTERNAL TEST packages are those comprised of a set of *_test.go
-// files all with the same 'package foo_test' declaration, all in the
-// same directory. (go/build.Package calls these files XTestFiles.)
-//
-// An IMPORTABLE package is one that can be referred to by some import
-// spec. Every importable package is uniquely identified by its
-// PACKAGE PATH or just PATH, a string such as "fmt", "encoding/json",
-// or "cmd/vendor/golang.org/x/arch/x86/x86asm". A package path
-// typically denotes a subdirectory of the workspace.
-//
-// An import declaration uses an IMPORT PATH to refer to a package.
-// Most import declarations use the package path as the import path.
-//
-// Due to VENDORING (https://golang.org/s/go15vendor), the
-// interpretation of an import path may depend on the directory in which
-// it appears. To resolve an import path to a package path, go/build
-// must search the enclosing directories for a subdirectory named
-// "vendor".
-//
-// ad hoc packages and external test packages are NON-IMPORTABLE. The
-// path of an ad hoc package is inferred from the package
-// declarations of its files and is therefore not a unique package key.
-// For example, Config.CreatePkgs may specify two initial ad hoc
-// packages, both with path "main".
-//
-// An AUGMENTED package is an importable package P plus all the
-// *_test.go files with same 'package foo' declaration as P.
-// (go/build.Package calls these files TestFiles.)
-//
-// The INITIAL packages are those specified in the configuration. A
-// DEPENDENCY is a package loaded to satisfy an import in an initial
-// package or another dependency.
-//
-package loader
-
-// IMPLEMENTATION NOTES
-//
-// 'go test', in-package test files, and import cycles
-// ---------------------------------------------------
-//
-// An external test package may depend upon members of the augmented
-// package that are not in the unaugmented package, such as functions
-// that expose internals. (See bufio/export_test.go for an example.)
-// So, the loader must ensure that for each external test package
-// it loads, it also augments the corresponding non-test package.
-//
-// The import graph over n unaugmented packages must be acyclic; the
-// import graph over n-1 unaugmented packages plus one augmented
-// package must also be acyclic. ('go test' relies on this.) But the
-// import graph over n augmented packages may contain cycles.
-//
-// First, all the (unaugmented) non-test packages and their
-// dependencies are imported in the usual way; the loader reports an
-// error if it detects an import cycle.
-//
-// Then, each package P for which testing is desired is augmented by
-// the list P' of its in-package test files, by calling
-// (*types.Checker).Files. This arrangement ensures that P' may
-// reference definitions within P, but P may not reference definitions
-// within P'. Furthermore, P' may import any other package, including
-// ones that depend upon P, without an import cycle error.
-//
-// Consider two packages A and B, both of which have lists of
-// in-package test files we'll call A' and B', and which have the
-// following import graph edges:
-// B imports A
-// B' imports A
-// A' imports B
-// This last edge would be expected to create an error were it not
-// for the special type-checking discipline above.
-// Cycles of size greater than two are possible. For example:
-// compress/bzip2/bzip2_test.go (package bzip2) imports "io/ioutil"
-// io/ioutil/tempfile_test.go (package ioutil) imports "regexp"
-// regexp/exec_test.go (package regexp) imports "compress/bzip2"
-//
-//
-// Concurrency
-// -----------
-//
-// Let us define the import dependency graph as follows. Each node is a
-// list of files passed to (Checker).Files at once. Many of these lists
-// are the production code of an importable Go package, so those nodes
-// are labelled by the package's path. The remaining nodes are
-// ad hoc packages and lists of in-package *_test.go files that augment
-// an importable package; those nodes have no label.
-//
-// The edges of the graph represent import statements appearing within a
-// file. An edge connects a node (a list of files) to the node it
-// imports, which is importable and thus always labelled.
-//
-// Loading is controlled by this dependency graph.
-//
-// To reduce I/O latency, we start loading a package's dependencies
-// asynchronously as soon as we've parsed its files and enumerated its
-// imports (scanImports). This performs a preorder traversal of the
-// import dependency graph.
-//
-// To exploit hardware parallelism, we type-check unrelated packages in
-// parallel, where "unrelated" means not ordered by the partial order of
-// the import dependency graph.
-//
-// We use a concurrency-safe non-blocking cache (importer.imported) to
-// record the results of type-checking, whether success or failure. An
-// entry is created in this cache by startLoad the first time the
-// package is imported. The first goroutine to request an entry becomes
-// responsible for completing the task and broadcasting completion to
-// subsequent requestors, which block until then.
-//
-// Type checking occurs in (parallel) postorder: we cannot type-check a
-// set of files until we have loaded and type-checked all of their
-// immediate dependencies (and thus all of their transitive
-// dependencies). If the input were guaranteed free of import cycles,
-// this would be trivial: we could simply wait for completion of the
-// dependencies and then invoke the typechecker.
-//
-// But as we saw in the 'go test' section above, some cycles in the
-// import graph over packages are actually legal, so long as the
-// cycle-forming edge originates in the in-package test files that
-// augment the package. This explains why the nodes of the import
-// dependency graph are not packages, but lists of files: the unlabelled
-// nodes avoid the cycles. Consider packages A and B where B imports A
-// and A's in-package tests AT import B. The naively constructed import
-// graph over packages would contain a cycle (A+AT) --> B --> (A+AT) but
-// the graph over lists of files is AT --> B --> A, where AT is an
-// unlabelled node.
-//
-// Awaiting completion of the dependencies in a cyclic graph would
-// deadlock, so we must materialize the import dependency graph (as
-// importer.graph) and check whether each import edge forms a cycle. If
-// x imports y, and the graph already contains a path from y to x, then
-// there is an import cycle, in which case the processing of x must not
-// wait for the completion of processing of y.
-//
-// When the type-checker makes a callback (doImport) to the loader for a
-// given import edge, there are two possible cases. In the normal case,
-// the dependency has already been completely type-checked; doImport
-// does a cache lookup and returns it. In the cyclic case, the entry in
-// the cache is still necessarily incomplete, indicating a cycle. We
-// perform the cycle check again to obtain the error message, and return
-// the error.
-//
-// The result of using concurrency is about a 2.5x speedup for stdlib_test.
diff --git a/vendor/golang.org/x/tools/go/loader/loader.go b/vendor/golang.org/x/tools/go/loader/loader.go
deleted file mode 100644
index bc12ca33d..000000000
--- a/vendor/golang.org/x/tools/go/loader/loader.go
+++ /dev/null
@@ -1,1086 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package loader
-
-// See doc.go for package documentation and implementation notes.
-
-import (
- "errors"
- "fmt"
- "go/ast"
- "go/build"
- "go/parser"
- "go/token"
- "go/types"
- "os"
- "path/filepath"
- "sort"
- "strings"
- "sync"
- "time"
-
- "golang.org/x/tools/go/ast/astutil"
- "golang.org/x/tools/go/internal/cgo"
-)
-
-var ignoreVendor build.ImportMode
-
-const trace = false // show timing info for type-checking
-
-// Config specifies the configuration for loading a whole program from
-// Go source code.
-// The zero value for Config is a ready-to-use default configuration.
-type Config struct {
- // Fset is the file set for the parser to use when loading the
- // program. If nil, it may be lazily initialized by any
- // method of Config.
- Fset *token.FileSet
-
- // ParserMode specifies the mode to be used by the parser when
- // loading source packages.
- ParserMode parser.Mode
-
- // TypeChecker contains options relating to the type checker.
- //
- // The supplied IgnoreFuncBodies is not used; the effective
- // value comes from the TypeCheckFuncBodies func below.
- // The supplied Import function is not used either.
- TypeChecker types.Config
-
- // TypeCheckFuncBodies is a predicate over package paths.
- // A package for which the predicate is false will
- // have its package-level declarations type checked, but not
- // its function bodies; this can be used to quickly load
- // dependencies from source. If nil, all func bodies are type
- // checked.
- TypeCheckFuncBodies func(path string) bool
-
- // If Build is non-nil, it is used to locate source packages.
- // Otherwise &build.Default is used.
- //
- // By default, cgo is invoked to preprocess Go files that
- // import the fake package "C". This behaviour can be
- // disabled by setting CGO_ENABLED=0 in the environment prior
- // to startup, or by setting Build.CgoEnabled=false.
- Build *build.Context
-
- // The current directory, used for resolving relative package
- // references such as "./go/loader". If empty, os.Getwd will be
- // used instead.
- Cwd string
-
- // If DisplayPath is non-nil, it is used to transform each
- // file name obtained from Build.Import(). This can be used
- // to prevent a virtualized build.Config's file names from
- // leaking into the user interface.
- DisplayPath func(path string) string
-
- // If AllowErrors is true, Load will return a Program even
- // if some of the its packages contained I/O, parser or type
- // errors; such errors are accessible via PackageInfo.Errors. If
- // false, Load will fail if any package had an error.
- AllowErrors bool
-
- // CreatePkgs specifies a list of non-importable initial
- // packages to create. The resulting packages will appear in
- // the corresponding elements of the Program.Created slice.
- CreatePkgs []PkgSpec
-
- // ImportPkgs specifies a set of initial packages to load.
- // The map keys are package paths.
- //
- // The map value indicates whether to load tests. If true, Load
- // will add and type-check two lists of files to the package:
- // non-test files followed by in-package *_test.go files. In
- // addition, it will append the external test package (if any)
- // to Program.Created.
- ImportPkgs map[string]bool
-
- // FindPackage is called during Load to create the build.Package
- // for a given import path from a given directory.
- // If FindPackage is nil, (*build.Context).Import is used.
- // A client may use this hook to adapt to a proprietary build
- // system that does not follow the "go build" layout
- // conventions, for example.
- //
- // It must be safe to call concurrently from multiple goroutines.
- FindPackage func(ctxt *build.Context, importPath, fromDir string, mode build.ImportMode) (*build.Package, error)
-
- // AfterTypeCheck is called immediately after a list of files
- // has been type-checked and appended to info.Files.
- //
- // This optional hook function is the earliest opportunity for
- // the client to observe the output of the type checker,
- // which may be useful to reduce analysis latency when loading
- // a large program.
- //
- // The function is permitted to modify info.Info, for instance
- // to clear data structures that are no longer needed, which can
- // dramatically reduce peak memory consumption.
- //
- // The function may be called twice for the same PackageInfo:
- // once for the files of the package and again for the
- // in-package test files.
- //
- // It must be safe to call concurrently from multiple goroutines.
- AfterTypeCheck func(info *PackageInfo, files []*ast.File)
-}
-
-// A PkgSpec specifies a non-importable package to be created by Load.
-// Files are processed first, but typically only one of Files and
-// Filenames is provided. The path needn't be globally unique.
-//
-// For vendoring purposes, the package's directory is the one that
-// contains the first file.
-type PkgSpec struct {
- Path string // package path ("" => use package declaration)
- Files []*ast.File // ASTs of already-parsed files
- Filenames []string // names of files to be parsed
-}
-
-// A Program is a Go program loaded from source as specified by a Config.
-type Program struct {
- Fset *token.FileSet // the file set for this program
-
- // Created[i] contains the initial package whose ASTs or
- // filenames were supplied by Config.CreatePkgs[i], followed by
- // the external test package, if any, of each package in
- // Config.ImportPkgs ordered by ImportPath.
- //
- // NOTE: these files must not import "C". Cgo preprocessing is
- // only performed on imported packages, not ad hoc packages.
- //
- // TODO(adonovan): we need to copy and adapt the logic of
- // goFilesPackage (from $GOROOT/src/cmd/go/build.go) and make
- // Config.Import and Config.Create methods return the same kind
- // of entity, essentially a build.Package.
- // Perhaps we can even reuse that type directly.
- Created []*PackageInfo
-
- // Imported contains the initially imported packages,
- // as specified by Config.ImportPkgs.
- Imported map[string]*PackageInfo
-
- // AllPackages contains the PackageInfo of every package
- // encountered by Load: all initial packages and all
- // dependencies, including incomplete ones.
- AllPackages map[*types.Package]*PackageInfo
-
- // importMap is the canonical mapping of package paths to
- // packages. It contains all Imported initial packages, but not
- // Created ones, and all imported dependencies.
- importMap map[string]*types.Package
-}
-
-// PackageInfo holds the ASTs and facts derived by the type-checker
-// for a single package.
-//
-// Not mutated once exposed via the API.
-//
-type PackageInfo struct {
- Pkg *types.Package
- Importable bool // true if 'import "Pkg.Path()"' would resolve to this
- TransitivelyErrorFree bool // true if Pkg and all its dependencies are free of errors
- Files []*ast.File // syntax trees for the package's files
- Errors []error // non-nil if the package had errors
- types.Info // type-checker deductions.
- dir string // package directory
-
- checker *types.Checker // transient type-checker state
- errorFunc func(error)
-}
-
-func (info *PackageInfo) String() string { return info.Pkg.Path() }
-
-func (info *PackageInfo) appendError(err error) {
- if info.errorFunc != nil {
- info.errorFunc(err)
- } else {
- fmt.Fprintln(os.Stderr, err)
- }
- info.Errors = append(info.Errors, err)
-}
-
-func (conf *Config) fset() *token.FileSet {
- if conf.Fset == nil {
- conf.Fset = token.NewFileSet()
- }
- return conf.Fset
-}
-
-// ParseFile is a convenience function (intended for testing) that invokes
-// the parser using the Config's FileSet, which is initialized if nil.
-//
-// src specifies the parser input as a string, []byte, or io.Reader, and
-// filename is its apparent name. If src is nil, the contents of
-// filename are read from the file system.
-//
-func (conf *Config) ParseFile(filename string, src interface{}) (*ast.File, error) {
- // TODO(adonovan): use conf.build() etc like parseFiles does.
- return parser.ParseFile(conf.fset(), filename, src, conf.ParserMode)
-}
-
-// FromArgsUsage is a partial usage message that applications calling
-// FromArgs may wish to include in their -help output.
-const FromArgsUsage = `
-<args> is a list of arguments denoting a set of initial packages.
-It may take one of two forms:
-
-1. A list of *.go source files.
-
- All of the specified files are loaded, parsed and type-checked
- as a single package. All the files must belong to the same directory.
-
-2. A list of import paths, each denoting a package.
-
- The package's directory is found relative to the $GOROOT and
- $GOPATH using similar logic to 'go build', and the *.go files in
- that directory are loaded, parsed and type-checked as a single
- package.
-
- In addition, all *_test.go files in the directory are then loaded
- and parsed. Those files whose package declaration equals that of
- the non-*_test.go files are included in the primary package. Test
- files whose package declaration ends with "_test" are type-checked
- as another package, the 'external' test package, so that a single
- import path may denote two packages. (Whether this behaviour is
- enabled is tool-specific, and may depend on additional flags.)
-
-A '--' argument terminates the list of packages.
-`
-
-// FromArgs interprets args as a set of initial packages to load from
-// source and updates the configuration. It returns the list of
-// unconsumed arguments.
-//
-// It is intended for use in command-line interfaces that require a
-// set of initial packages to be specified; see FromArgsUsage message
-// for details.
-//
-// Only superficial errors are reported at this stage; errors dependent
-// on I/O are detected during Load.
-//
-func (conf *Config) FromArgs(args []string, xtest bool) ([]string, error) {
- var rest []string
- for i, arg := range args {
- if arg == "--" {
- rest = args[i+1:]
- args = args[:i]
- break // consume "--" and return the remaining args
- }
- }
-
- if len(args) > 0 && strings.HasSuffix(args[0], ".go") {
- // Assume args is a list of a *.go files
- // denoting a single ad hoc package.
- for _, arg := range args {
- if !strings.HasSuffix(arg, ".go") {
- return nil, fmt.Errorf("named files must be .go files: %s", arg)
- }
- }
- conf.CreateFromFilenames("", args...)
- } else {
- // Assume args are directories each denoting a
- // package and (perhaps) an external test, iff xtest.
- for _, arg := range args {
- if xtest {
- conf.ImportWithTests(arg)
- } else {
- conf.Import(arg)
- }
- }
- }
-
- return rest, nil
-}
-
-// CreateFromFilenames is a convenience function that adds
-// a conf.CreatePkgs entry to create a package of the specified *.go
-// files.
-//
-func (conf *Config) CreateFromFilenames(path string, filenames ...string) {
- conf.CreatePkgs = append(conf.CreatePkgs, PkgSpec{Path: path, Filenames: filenames})
-}
-
-// CreateFromFiles is a convenience function that adds a conf.CreatePkgs
-// entry to create package of the specified path and parsed files.
-//
-func (conf *Config) CreateFromFiles(path string, files ...*ast.File) {
- conf.CreatePkgs = append(conf.CreatePkgs, PkgSpec{Path: path, Files: files})
-}
-
-// ImportWithTests is a convenience function that adds path to
-// ImportPkgs, the set of initial source packages located relative to
-// $GOPATH. The package will be augmented by any *_test.go files in
-// its directory that contain a "package x" (not "package x_test")
-// declaration.
-//
-// In addition, if any *_test.go files contain a "package x_test"
-// declaration, an additional package comprising just those files will
-// be added to CreatePkgs.
-//
-func (conf *Config) ImportWithTests(path string) { conf.addImport(path, true) }
-
-// Import is a convenience function that adds path to ImportPkgs, the
-// set of initial packages that will be imported from source.
-//
-func (conf *Config) Import(path string) { conf.addImport(path, false) }
-
-func (conf *Config) addImport(path string, tests bool) {
- if path == "C" {
- return // ignore; not a real package
- }
- if conf.ImportPkgs == nil {
- conf.ImportPkgs = make(map[string]bool)
- }
- conf.ImportPkgs[path] = conf.ImportPkgs[path] || tests
-}
-
-// PathEnclosingInterval returns the PackageInfo and ast.Node that
-// contain source interval [start, end), and all the node's ancestors
-// up to the AST root. It searches all ast.Files of all packages in prog.
-// exact is defined as for astutil.PathEnclosingInterval.
-//
-// The zero value is returned if not found.
-//
-func (prog *Program) PathEnclosingInterval(start, end token.Pos) (pkg *PackageInfo, path []ast.Node, exact bool) {
- for _, info := range prog.AllPackages {
- for _, f := range info.Files {
- if f.Pos() == token.NoPos {
- // This can happen if the parser saw
- // too many errors and bailed out.
- // (Use parser.AllErrors to prevent that.)
- continue
- }
- if !tokenFileContainsPos(prog.Fset.File(f.Pos()), start) {
- continue
- }
- if path, exact := astutil.PathEnclosingInterval(f, start, end); path != nil {
- return info, path, exact
- }
- }
- }
- return nil, nil, false
-}
-
-// InitialPackages returns a new slice containing the set of initial
-// packages (Created + Imported) in unspecified order.
-//
-func (prog *Program) InitialPackages() []*PackageInfo {
- infos := make([]*PackageInfo, 0, len(prog.Created)+len(prog.Imported))
- infos = append(infos, prog.Created...)
- for _, info := range prog.Imported {
- infos = append(infos, info)
- }
- return infos
-}
-
-// Package returns the ASTs and results of type checking for the
-// specified package.
-func (prog *Program) Package(path string) *PackageInfo {
- if info, ok := prog.AllPackages[prog.importMap[path]]; ok {
- return info
- }
- for _, info := range prog.Created {
- if path == info.Pkg.Path() {
- return info
- }
- }
- return nil
-}
-
-// ---------- Implementation ----------
-
-// importer holds the working state of the algorithm.
-type importer struct {
- conf *Config // the client configuration
- start time.Time // for logging
-
- progMu sync.Mutex // guards prog
- prog *Program // the resulting program
-
- // findpkg is a memoization of FindPackage.
- findpkgMu sync.Mutex // guards findpkg
- findpkg map[findpkgKey]*findpkgValue
-
- importedMu sync.Mutex // guards imported
- imported map[string]*importInfo // all imported packages (incl. failures) by import path
-
- // import dependency graph: graph[x][y] => x imports y
- //
- // Since non-importable packages cannot be cyclic, we ignore
- // their imports, thus we only need the subgraph over importable
- // packages. Nodes are identified by their import paths.
- graphMu sync.Mutex
- graph map[string]map[string]bool
-}
-
-type findpkgKey struct {
- importPath string
- fromDir string
- mode build.ImportMode
-}
-
-type findpkgValue struct {
- ready chan struct{} // closed to broadcast readiness
- bp *build.Package
- err error
-}
-
-// importInfo tracks the success or failure of a single import.
-//
-// Upon completion, exactly one of info and err is non-nil:
-// info on successful creation of a package, err otherwise.
-// A successful package may still contain type errors.
-//
-type importInfo struct {
- path string // import path
- info *PackageInfo // results of typechecking (including errors)
- complete chan struct{} // closed to broadcast that info is set.
-}
-
-// awaitCompletion blocks until ii is complete,
-// i.e. the info field is safe to inspect.
-func (ii *importInfo) awaitCompletion() {
- <-ii.complete // wait for close
-}
-
-// Complete marks ii as complete.
-// Its info and err fields will not be subsequently updated.
-func (ii *importInfo) Complete(info *PackageInfo) {
- if info == nil {
- panic("info == nil")
- }
- ii.info = info
- close(ii.complete)
-}
-
-type importError struct {
- path string // import path
- err error // reason for failure to create a package
-}
-
-// Load creates the initial packages specified by conf.{Create,Import}Pkgs,
-// loading their dependencies packages as needed.
-//
-// On success, Load returns a Program containing a PackageInfo for
-// each package. On failure, it returns an error.
-//
-// If AllowErrors is true, Load will return a Program even if some
-// packages contained I/O, parser or type errors, or if dependencies
-// were missing. (Such errors are accessible via PackageInfo.Errors. If
-// false, Load will fail if any package had an error.
-//
-// It is an error if no packages were loaded.
-//
-func (conf *Config) Load() (*Program, error) {
- // Create a simple default error handler for parse/type errors.
- if conf.TypeChecker.Error == nil {
- conf.TypeChecker.Error = func(e error) { fmt.Fprintln(os.Stderr, e) }
- }
-
- // Set default working directory for relative package references.
- if conf.Cwd == "" {
- var err error
- conf.Cwd, err = os.Getwd()
- if err != nil {
- return nil, err
- }
- }
-
- // Install default FindPackage hook using go/build logic.
- if conf.FindPackage == nil {
- conf.FindPackage = (*build.Context).Import
- }
-
- prog := &Program{
- Fset: conf.fset(),
- Imported: make(map[string]*PackageInfo),
- importMap: make(map[string]*types.Package),
- AllPackages: make(map[*types.Package]*PackageInfo),
- }
-
- imp := importer{
- conf: conf,
- prog: prog,
- findpkg: make(map[findpkgKey]*findpkgValue),
- imported: make(map[string]*importInfo),
- start: time.Now(),
- graph: make(map[string]map[string]bool),
- }
-
- // -- loading proper (concurrent phase) --------------------------------
-
- var errpkgs []string // packages that contained errors
-
- // Load the initially imported packages and their dependencies,
- // in parallel.
- // No vendor check on packages imported from the command line.
- infos, importErrors := imp.importAll("", conf.Cwd, conf.ImportPkgs, ignoreVendor)
- for _, ie := range importErrors {
- conf.TypeChecker.Error(ie.err) // failed to create package
- errpkgs = append(errpkgs, ie.path)
- }
- for _, info := range infos {
- prog.Imported[info.Pkg.Path()] = info
- }
-
- // Augment the designated initial packages by their tests.
- // Dependencies are loaded in parallel.
- var xtestPkgs []*build.Package
- for importPath, augment := range conf.ImportPkgs {
- if !augment {
- continue
- }
-
- // No vendor check on packages imported from command line.
- bp, err := imp.findPackage(importPath, conf.Cwd, ignoreVendor)
- if err != nil {
- // Package not found, or can't even parse package declaration.
- // Already reported by previous loop; ignore it.
- continue
- }
-
- // Needs external test package?
- if len(bp.XTestGoFiles) > 0 {
- xtestPkgs = append(xtestPkgs, bp)
- }
-
- // Consult the cache using the canonical package path.
- path := bp.ImportPath
- imp.importedMu.Lock() // (unnecessary, we're sequential here)
- ii, ok := imp.imported[path]
- // Paranoid checks added due to issue #11012.
- if !ok {
- // Unreachable.
- // The previous loop called importAll and thus
- // startLoad for each path in ImportPkgs, which
- // populates imp.imported[path] with a non-zero value.
- panic(fmt.Sprintf("imported[%q] not found", path))
- }
- if ii == nil {
- // Unreachable.
- // The ii values in this loop are the same as in
- // the previous loop, which enforced the invariant
- // that at least one of ii.err and ii.info is non-nil.
- panic(fmt.Sprintf("imported[%q] == nil", path))
- }
- if ii.info == nil {
- // Unreachable.
- // awaitCompletion has the postcondition
- // ii.info != nil.
- panic(fmt.Sprintf("imported[%q].info = nil", path))
- }
- info := ii.info
- imp.importedMu.Unlock()
-
- // Parse the in-package test files.
- files, errs := imp.conf.parsePackageFiles(bp, 't')
- for _, err := range errs {
- info.appendError(err)
- }
-
- // The test files augmenting package P cannot be imported,
- // but may import packages that import P,
- // so we must disable the cycle check.
- imp.addFiles(info, files, false)
- }
-
- createPkg := func(path, dir string, files []*ast.File, errs []error) {
- info := imp.newPackageInfo(path, dir)
- for _, err := range errs {
- info.appendError(err)
- }
-
- // Ad hoc packages are non-importable,
- // so no cycle check is needed.
- // addFiles loads dependencies in parallel.
- imp.addFiles(info, files, false)
- prog.Created = append(prog.Created, info)
- }
-
- // Create packages specified by conf.CreatePkgs.
- for _, cp := range conf.CreatePkgs {
- files, errs := parseFiles(conf.fset(), conf.build(), nil, conf.Cwd, cp.Filenames, conf.ParserMode)
- files = append(files, cp.Files...)
-
- path := cp.Path
- if path == "" {
- if len(files) > 0 {
- path = files[0].Name.Name
- } else {
- path = "(unnamed)"
- }
- }
-
- dir := conf.Cwd
- if len(files) > 0 && files[0].Pos().IsValid() {
- dir = filepath.Dir(conf.fset().File(files[0].Pos()).Name())
- }
- createPkg(path, dir, files, errs)
- }
-
- // Create external test packages.
- sort.Sort(byImportPath(xtestPkgs))
- for _, bp := range xtestPkgs {
- files, errs := imp.conf.parsePackageFiles(bp, 'x')
- createPkg(bp.ImportPath+"_test", bp.Dir, files, errs)
- }
-
- // -- finishing up (sequential) ----------------------------------------
-
- if len(prog.Imported)+len(prog.Created) == 0 {
- return nil, errors.New("no initial packages were loaded")
- }
-
- // Create infos for indirectly imported packages.
- // e.g. incomplete packages without syntax, loaded from export data.
- for _, obj := range prog.importMap {
- info := prog.AllPackages[obj]
- if info == nil {
- prog.AllPackages[obj] = &PackageInfo{Pkg: obj, Importable: true}
- } else {
- // finished
- info.checker = nil
- info.errorFunc = nil
- }
- }
-
- if !conf.AllowErrors {
- // Report errors in indirectly imported packages.
- for _, info := range prog.AllPackages {
- if len(info.Errors) > 0 {
- errpkgs = append(errpkgs, info.Pkg.Path())
- }
- }
- if errpkgs != nil {
- var more string
- if len(errpkgs) > 3 {
- more = fmt.Sprintf(" and %d more", len(errpkgs)-3)
- errpkgs = errpkgs[:3]
- }
- return nil, fmt.Errorf("couldn't load packages due to errors: %s%s",
- strings.Join(errpkgs, ", "), more)
- }
- }
-
- markErrorFreePackages(prog.AllPackages)
-
- return prog, nil
-}
-
-type byImportPath []*build.Package
-
-func (b byImportPath) Len() int { return len(b) }
-func (b byImportPath) Less(i, j int) bool { return b[i].ImportPath < b[j].ImportPath }
-func (b byImportPath) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
-
-// markErrorFreePackages sets the TransitivelyErrorFree flag on all
-// applicable packages.
-func markErrorFreePackages(allPackages map[*types.Package]*PackageInfo) {
- // Build the transpose of the import graph.
- importedBy := make(map[*types.Package]map[*types.Package]bool)
- for P := range allPackages {
- for _, Q := range P.Imports() {
- clients, ok := importedBy[Q]
- if !ok {
- clients = make(map[*types.Package]bool)
- importedBy[Q] = clients
- }
- clients[P] = true
- }
- }
-
- // Find all packages reachable from some error package.
- reachable := make(map[*types.Package]bool)
- var visit func(*types.Package)
- visit = func(p *types.Package) {
- if !reachable[p] {
- reachable[p] = true
- for q := range importedBy[p] {
- visit(q)
- }
- }
- }
- for _, info := range allPackages {
- if len(info.Errors) > 0 {
- visit(info.Pkg)
- }
- }
-
- // Mark the others as "transitively error-free".
- for _, info := range allPackages {
- if !reachable[info.Pkg] {
- info.TransitivelyErrorFree = true
- }
- }
-}
-
-// build returns the effective build context.
-func (conf *Config) build() *build.Context {
- if conf.Build != nil {
- return conf.Build
- }
- return &build.Default
-}
-
-// parsePackageFiles enumerates the files belonging to package path,
-// then loads, parses and returns them, plus a list of I/O or parse
-// errors that were encountered.
-//
-// 'which' indicates which files to include:
-// 'g': include non-test *.go source files (GoFiles + processed CgoFiles)
-// 't': include in-package *_test.go source files (TestGoFiles)
-// 'x': include external *_test.go source files. (XTestGoFiles)
-//
-func (conf *Config) parsePackageFiles(bp *build.Package, which rune) ([]*ast.File, []error) {
- if bp.ImportPath == "unsafe" {
- return nil, nil
- }
- var filenames []string
- switch which {
- case 'g':
- filenames = bp.GoFiles
- case 't':
- filenames = bp.TestGoFiles
- case 'x':
- filenames = bp.XTestGoFiles
- default:
- panic(which)
- }
-
- files, errs := parseFiles(conf.fset(), conf.build(), conf.DisplayPath, bp.Dir, filenames, conf.ParserMode)
-
- // Preprocess CgoFiles and parse the outputs (sequentially).
- if which == 'g' && bp.CgoFiles != nil {
- cgofiles, err := cgo.ProcessFiles(bp, conf.fset(), conf.DisplayPath, conf.ParserMode)
- if err != nil {
- errs = append(errs, err)
- } else {
- files = append(files, cgofiles...)
- }
- }
-
- return files, errs
-}
-
-// doImport imports the package denoted by path.
-// It implements the types.Importer signature.
-//
-// It returns an error if a package could not be created
-// (e.g. go/build or parse error), but type errors are reported via
-// the types.Config.Error callback (the first of which is also saved
-// in the package's PackageInfo).
-//
-// Idempotent.
-//
-func (imp *importer) doImport(from *PackageInfo, to string) (*types.Package, error) {
- if to == "C" {
- // This should be unreachable, but ad hoc packages are
- // not currently subject to cgo preprocessing.
- // See https://golang.org/issue/11627.
- return nil, fmt.Errorf(`the loader doesn't cgo-process ad hoc packages like %q; see Go issue 11627`,
- from.Pkg.Path())
- }
-
- bp, err := imp.findPackage(to, from.dir, 0)
- if err != nil {
- return nil, err
- }
-
- // The standard unsafe package is handled specially,
- // and has no PackageInfo.
- if bp.ImportPath == "unsafe" {
- return types.Unsafe, nil
- }
-
- // Look for the package in the cache using its canonical path.
- path := bp.ImportPath
- imp.importedMu.Lock()
- ii := imp.imported[path]
- imp.importedMu.Unlock()
- if ii == nil {
- panic("internal error: unexpected import: " + path)
- }
- if ii.info != nil {
- return ii.info.Pkg, nil
- }
-
- // Import of incomplete package: this indicates a cycle.
- fromPath := from.Pkg.Path()
- if cycle := imp.findPath(path, fromPath); cycle != nil {
- // Normalize cycle: start from alphabetically largest node.
- pos, start := -1, ""
- for i, s := range cycle {
- if pos < 0 || s > start {
- pos, start = i, s
- }
- }
- cycle = append(cycle, cycle[:pos]...)[pos:] // rotate cycle to start from largest
- cycle = append(cycle, cycle[0]) // add start node to end to show cycliness
- return nil, fmt.Errorf("import cycle: %s", strings.Join(cycle, " -> "))
- }
-
- panic("internal error: import of incomplete (yet acyclic) package: " + fromPath)
-}
-
-// findPackage locates the package denoted by the importPath in the
-// specified directory.
-func (imp *importer) findPackage(importPath, fromDir string, mode build.ImportMode) (*build.Package, error) {
- // We use a non-blocking duplicate-suppressing cache (gopl.io §9.7)
- // to avoid holding the lock around FindPackage.
- key := findpkgKey{importPath, fromDir, mode}
- imp.findpkgMu.Lock()
- v, ok := imp.findpkg[key]
- if ok {
- // cache hit
- imp.findpkgMu.Unlock()
-
- <-v.ready // wait for entry to become ready
- } else {
- // Cache miss: this goroutine becomes responsible for
- // populating the map entry and broadcasting its readiness.
- v = &findpkgValue{ready: make(chan struct{})}
- imp.findpkg[key] = v
- imp.findpkgMu.Unlock()
-
- ioLimit <- true
- v.bp, v.err = imp.conf.FindPackage(imp.conf.build(), importPath, fromDir, mode)
- <-ioLimit
-
- if _, ok := v.err.(*build.NoGoError); ok {
- v.err = nil // empty directory is not an error
- }
-
- close(v.ready) // broadcast ready condition
- }
- return v.bp, v.err
-}
-
-// importAll loads, parses, and type-checks the specified packages in
-// parallel and returns their completed importInfos in unspecified order.
-//
-// fromPath is the package path of the importing package, if it is
-// importable, "" otherwise. It is used for cycle detection.
-//
-// fromDir is the directory containing the import declaration that
-// caused these imports.
-//
-func (imp *importer) importAll(fromPath, fromDir string, imports map[string]bool, mode build.ImportMode) (infos []*PackageInfo, errors []importError) {
- // TODO(adonovan): opt: do the loop in parallel once
- // findPackage is non-blocking.
- var pending []*importInfo
- for importPath := range imports {
- bp, err := imp.findPackage(importPath, fromDir, mode)
- if err != nil {
- errors = append(errors, importError{
- path: importPath,
- err: err,
- })
- continue
- }
- pending = append(pending, imp.startLoad(bp))
- }
-
- if fromPath != "" {
- // We're loading a set of imports.
- //
- // We must record graph edges from the importing package
- // to its dependencies, and check for cycles.
- imp.graphMu.Lock()
- deps, ok := imp.graph[fromPath]
- if !ok {
- deps = make(map[string]bool)
- imp.graph[fromPath] = deps
- }
- for _, ii := range pending {
- deps[ii.path] = true
- }
- imp.graphMu.Unlock()
- }
-
- for _, ii := range pending {
- if fromPath != "" {
- if cycle := imp.findPath(ii.path, fromPath); cycle != nil {
- // Cycle-forming import: we must not await its
- // completion since it would deadlock.
- //
- // We don't record the error in ii since
- // the error is really associated with the
- // cycle-forming edge, not the package itself.
- // (Also it would complicate the
- // invariants of importPath completion.)
- if trace {
- fmt.Fprintf(os.Stderr, "import cycle: %q\n", cycle)
- }
- continue
- }
- }
- ii.awaitCompletion()
- infos = append(infos, ii.info)
- }
-
- return infos, errors
-}
-
-// findPath returns an arbitrary path from 'from' to 'to' in the import
-// graph, or nil if there was none.
-func (imp *importer) findPath(from, to string) []string {
- imp.graphMu.Lock()
- defer imp.graphMu.Unlock()
-
- seen := make(map[string]bool)
- var search func(stack []string, importPath string) []string
- search = func(stack []string, importPath string) []string {
- if !seen[importPath] {
- seen[importPath] = true
- stack = append(stack, importPath)
- if importPath == to {
- return stack
- }
- for x := range imp.graph[importPath] {
- if p := search(stack, x); p != nil {
- return p
- }
- }
- }
- return nil
- }
- return search(make([]string, 0, 20), from)
-}
-
-// startLoad initiates the loading, parsing and type-checking of the
-// specified package and its dependencies, if it has not already begun.
-//
-// It returns an importInfo, not necessarily in a completed state. The
-// caller must call awaitCompletion() before accessing its info field.
-//
-// startLoad is concurrency-safe and idempotent.
-//
-func (imp *importer) startLoad(bp *build.Package) *importInfo {
- path := bp.ImportPath
- imp.importedMu.Lock()
- ii, ok := imp.imported[path]
- if !ok {
- ii = &importInfo{path: path, complete: make(chan struct{})}
- imp.imported[path] = ii
- go func() {
- info := imp.load(bp)
- ii.Complete(info)
- }()
- }
- imp.importedMu.Unlock()
-
- return ii
-}
-
-// load implements package loading by parsing Go source files
-// located by go/build.
-func (imp *importer) load(bp *build.Package) *PackageInfo {
- info := imp.newPackageInfo(bp.ImportPath, bp.Dir)
- info.Importable = true
- files, errs := imp.conf.parsePackageFiles(bp, 'g')
- for _, err := range errs {
- info.appendError(err)
- }
-
- imp.addFiles(info, files, true)
-
- imp.progMu.Lock()
- imp.prog.importMap[bp.ImportPath] = info.Pkg
- imp.progMu.Unlock()
-
- return info
-}
-
-// addFiles adds and type-checks the specified files to info, loading
-// their dependencies if needed. The order of files determines the
-// package initialization order. It may be called multiple times on the
-// same package. Errors are appended to the info.Errors field.
-//
-// cycleCheck determines whether the imports within files create
-// dependency edges that should be checked for potential cycles.
-//
-func (imp *importer) addFiles(info *PackageInfo, files []*ast.File, cycleCheck bool) {
- // Ensure the dependencies are loaded, in parallel.
- var fromPath string
- if cycleCheck {
- fromPath = info.Pkg.Path()
- }
- // TODO(adonovan): opt: make the caller do scanImports.
- // Callers with a build.Package can skip it.
- imp.importAll(fromPath, info.dir, scanImports(files), 0)
-
- if trace {
- fmt.Fprintf(os.Stderr, "%s: start %q (%d)\n",
- time.Since(imp.start), info.Pkg.Path(), len(files))
- }
-
- // Don't call checker.Files on Unsafe, even with zero files,
- // because it would mutate the package, which is a global.
- if info.Pkg == types.Unsafe {
- if len(files) > 0 {
- panic(`"unsafe" package contains unexpected files`)
- }
- } else {
- // Ignore the returned (first) error since we
- // already collect them all in the PackageInfo.
- info.checker.Files(files)
- info.Files = append(info.Files, files...)
- }
-
- if imp.conf.AfterTypeCheck != nil {
- imp.conf.AfterTypeCheck(info, files)
- }
-
- if trace {
- fmt.Fprintf(os.Stderr, "%s: stop %q\n",
- time.Since(imp.start), info.Pkg.Path())
- }
-}
-
-func (imp *importer) newPackageInfo(path, dir string) *PackageInfo {
- var pkg *types.Package
- if path == "unsafe" {
- pkg = types.Unsafe
- } else {
- pkg = types.NewPackage(path, "")
- }
- info := &PackageInfo{
- Pkg: pkg,
- Info: types.Info{
- Types: make(map[ast.Expr]types.TypeAndValue),
- Defs: make(map[*ast.Ident]types.Object),
- Uses: make(map[*ast.Ident]types.Object),
- Implicits: make(map[ast.Node]types.Object),
- Scopes: make(map[ast.Node]*types.Scope),
- Selections: make(map[*ast.SelectorExpr]*types.Selection),
- },
- errorFunc: imp.conf.TypeChecker.Error,
- dir: dir,
- }
-
- // Copy the types.Config so we can vary it across PackageInfos.
- tc := imp.conf.TypeChecker
- tc.IgnoreFuncBodies = false
- if f := imp.conf.TypeCheckFuncBodies; f != nil {
- tc.IgnoreFuncBodies = !f(path)
- }
- tc.Importer = closure{imp, info}
- tc.Error = info.appendError // appendError wraps the user's Error function
-
- info.checker = types.NewChecker(&tc, imp.conf.fset(), pkg, &info.Info)
- imp.progMu.Lock()
- imp.prog.AllPackages[pkg] = info
- imp.progMu.Unlock()
- return info
-}
-
-type closure struct {
- imp *importer
- info *PackageInfo
-}
-
-func (c closure) Import(to string) (*types.Package, error) { return c.imp.doImport(c.info, to) }
diff --git a/vendor/golang.org/x/tools/go/loader/util.go b/vendor/golang.org/x/tools/go/loader/util.go
deleted file mode 100644
index 7f38dd740..000000000
--- a/vendor/golang.org/x/tools/go/loader/util.go
+++ /dev/null
@@ -1,124 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package loader
-
-import (
- "go/ast"
- "go/build"
- "go/parser"
- "go/token"
- "io"
- "os"
- "strconv"
- "sync"
-
- "golang.org/x/tools/go/buildutil"
-)
-
-// We use a counting semaphore to limit
-// the number of parallel I/O calls per process.
-var ioLimit = make(chan bool, 10)
-
-// parseFiles parses the Go source files within directory dir and
-// returns the ASTs of the ones that could be at least partially parsed,
-// along with a list of I/O and parse errors encountered.
-//
-// I/O is done via ctxt, which may specify a virtual file system.
-// displayPath is used to transform the filenames attached to the ASTs.
-//
-func parseFiles(fset *token.FileSet, ctxt *build.Context, displayPath func(string) string, dir string, files []string, mode parser.Mode) ([]*ast.File, []error) {
- if displayPath == nil {
- displayPath = func(path string) string { return path }
- }
- var wg sync.WaitGroup
- n := len(files)
- parsed := make([]*ast.File, n)
- errors := make([]error, n)
- for i, file := range files {
- if !buildutil.IsAbsPath(ctxt, file) {
- file = buildutil.JoinPath(ctxt, dir, file)
- }
- wg.Add(1)
- go func(i int, file string) {
- ioLimit <- true // wait
- defer func() {
- wg.Done()
- <-ioLimit // signal
- }()
- var rd io.ReadCloser
- var err error
- if ctxt.OpenFile != nil {
- rd, err = ctxt.OpenFile(file)
- } else {
- rd, err = os.Open(file)
- }
- if err != nil {
- errors[i] = err // open failed
- return
- }
-
- // ParseFile may return both an AST and an error.
- parsed[i], errors[i] = parser.ParseFile(fset, displayPath(file), rd, mode)
- rd.Close()
- }(i, file)
- }
- wg.Wait()
-
- // Eliminate nils, preserving order.
- var o int
- for _, f := range parsed {
- if f != nil {
- parsed[o] = f
- o++
- }
- }
- parsed = parsed[:o]
-
- o = 0
- for _, err := range errors {
- if err != nil {
- errors[o] = err
- o++
- }
- }
- errors = errors[:o]
-
- return parsed, errors
-}
-
-// scanImports returns the set of all import paths from all
-// import specs in the specified files.
-func scanImports(files []*ast.File) map[string]bool {
- imports := make(map[string]bool)
- for _, f := range files {
- for _, decl := range f.Decls {
- if decl, ok := decl.(*ast.GenDecl); ok && decl.Tok == token.IMPORT {
- for _, spec := range decl.Specs {
- spec := spec.(*ast.ImportSpec)
-
- // NB: do not assume the program is well-formed!
- path, err := strconv.Unquote(spec.Path.Value)
- if err != nil {
- continue // quietly ignore the error
- }
- if path == "C" {
- continue // skip pseudopackage
- }
- imports[path] = true
- }
- }
- }
- }
- return imports
-}
-
-// ---------- Internal helpers ----------
-
-// TODO(adonovan): make this a method: func (*token.File) Contains(token.Pos)
-func tokenFileContainsPos(f *token.File, pos token.Pos) bool {
- p := int(pos)
- base := f.Base()
- return base <= p && p < base+f.Size()
-}
diff --git a/vendor/golang.org/x/tools/go/packages/doc.go b/vendor/golang.org/x/tools/go/packages/doc.go
index a1ba6a9eb..4bfe28a51 100644
--- a/vendor/golang.org/x/tools/go/packages/doc.go
+++ b/vendor/golang.org/x/tools/go/packages/doc.go
@@ -69,7 +69,7 @@ according to the conventions of the underlying build system.
See the Example function for typical usage.
*/
-package packages
+package packages // import "golang.org/x/tools/go/packages"
/*
diff --git a/vendor/golang.org/x/tools/go/packages/golist.go b/vendor/golang.org/x/tools/go/packages/golist.go
index 9c895b389..308971132 100644
--- a/vendor/golang.org/x/tools/go/packages/golist.go
+++ b/vendor/golang.org/x/tools/go/packages/golist.go
@@ -6,17 +6,16 @@ package packages
import (
"bytes"
+ "context"
"encoding/json"
"fmt"
"go/types"
- "io/ioutil"
"log"
"os"
"os/exec"
"path"
"path/filepath"
"reflect"
- "regexp"
"strconv"
"strings"
"sync"
@@ -24,8 +23,6 @@ import (
"unicode"
"golang.org/x/tools/go/internal/packagesdriver"
- "golang.org/x/tools/internal/gopathwalk"
- "golang.org/x/tools/internal/semver"
)
// debug controls verbose logging.
@@ -44,16 +41,21 @@ type responseDeduper struct {
dr *driverResponse
}
-// init fills in r with a driverResponse.
-func (r *responseDeduper) init(dr *driverResponse) {
- r.dr = dr
- r.seenRoots = map[string]bool{}
- r.seenPackages = map[string]*Package{}
+func newDeduper() *responseDeduper {
+ return &responseDeduper{
+ dr: &driverResponse{},
+ seenRoots: map[string]bool{},
+ seenPackages: map[string]*Package{},
+ }
+}
+
+// addAll fills in r with a driverResponse.
+func (r *responseDeduper) addAll(dr *driverResponse) {
for _, pkg := range dr.Packages {
- r.seenPackages[pkg.ID] = pkg
+ r.addPackage(pkg)
}
for _, root := range dr.Roots {
- r.seenRoots[root] = true
+ r.addRoot(root)
}
}
@@ -73,25 +75,47 @@ func (r *responseDeduper) addRoot(id string) {
r.dr.Roots = append(r.dr.Roots, id)
}
-// goInfo contains global information from the go tool.
-type goInfo struct {
- rootDirs map[string]string
- env goEnv
+type golistState struct {
+ cfg *Config
+ ctx context.Context
+
+ envOnce sync.Once
+ goEnvError error
+ goEnv map[string]string
+
+ rootsOnce sync.Once
+ rootDirsError error
+ rootDirs map[string]string
+
+ // vendorDirs caches the (non)existence of vendor directories.
+ vendorDirs map[string]bool
}
-type goEnv struct {
- modulesOn bool
+// getEnv returns Go environment variables. Only specific variables are
+// populated -- computing all of them is slow.
+func (state *golistState) getEnv() (map[string]string, error) {
+ state.envOnce.Do(func() {
+ var b *bytes.Buffer
+ b, state.goEnvError = state.invokeGo("env", "-json", "GOMOD", "GOPATH")
+ if state.goEnvError != nil {
+ return
+ }
+
+ state.goEnv = make(map[string]string)
+ decoder := json.NewDecoder(b)
+ if state.goEnvError = decoder.Decode(&state.goEnv); state.goEnvError != nil {
+ return
+ }
+ })
+ return state.goEnv, state.goEnvError
}
-func determineEnv(cfg *Config) goEnv {
- buf, err := invokeGo(cfg, "env", "GOMOD")
+// mustGetEnv is a convenience function that can be used if getEnv has already succeeded.
+func (state *golistState) mustGetEnv() map[string]string {
+ env, err := state.getEnv()
if err != nil {
- return goEnv{}
+ panic(fmt.Sprintf("mustGetEnv: %v", err))
}
- gomod := bytes.TrimSpace(buf.Bytes())
-
- env := goEnv{}
- env.modulesOn = len(gomod) > 0
return env
}
@@ -99,47 +123,38 @@ func determineEnv(cfg *Config) goEnv {
// the build system package structure.
// See driver for more details.
func goListDriver(cfg *Config, patterns ...string) (*driverResponse, error) {
- var sizes types.Sizes
+ // Make sure that any asynchronous go commands are killed when we return.
+ parentCtx := cfg.Context
+ if parentCtx == nil {
+ parentCtx = context.Background()
+ }
+ ctx, cancel := context.WithCancel(parentCtx)
+ defer cancel()
+
+ response := newDeduper()
+
+ // Fill in response.Sizes asynchronously if necessary.
var sizeserr error
var sizeswg sync.WaitGroup
if cfg.Mode&NeedTypesSizes != 0 || cfg.Mode&NeedTypes != 0 {
sizeswg.Add(1)
go func() {
- sizes, sizeserr = getSizes(cfg)
+ var sizes types.Sizes
+ sizes, sizeserr = packagesdriver.GetSizesGolist(ctx, cfg.BuildFlags, cfg.Env, cfg.Dir, usesExportData(cfg))
+ // types.SizesFor always returns nil or a *types.StdSizes.
+ response.dr.Sizes, _ = sizes.(*types.StdSizes)
sizeswg.Done()
}()
}
- defer sizeswg.Wait()
-
- // start fetching rootDirs
- var info goInfo
- var rootDirsReady, envReady = make(chan struct{}), make(chan struct{})
- go func() {
- info.rootDirs = determineRootDirs(cfg)
- close(rootDirsReady)
- }()
- go func() {
- info.env = determineEnv(cfg)
- close(envReady)
- }()
- getGoInfo := func() *goInfo {
- <-rootDirsReady
- <-envReady
- return &info
- }
- // Ensure that we don't leak goroutines: Load is synchronous, so callers will
- // not expect it to access the fields of cfg after the call returns.
- defer getGoInfo()
-
- // always pass getGoInfo to golistDriver
- golistDriver := func(cfg *Config, patterns ...string) (*driverResponse, error) {
- return golistDriver(cfg, getGoInfo, patterns...)
+ state := &golistState{
+ cfg: cfg,
+ ctx: ctx,
+ vendorDirs: map[string]bool{},
}
// Determine files requested in contains patterns
var containFiles []string
- var packagesNamed []string
restPatterns := make([]string, 0, len(patterns))
// Extract file= and other [querytype]= patterns. Report an error if querytype
// doesn't exist.
@@ -155,8 +170,6 @@ extractQueries:
containFiles = append(containFiles, value)
case "pattern":
restPatterns = append(restPatterns, value)
- case "iamashamedtousethedisabledqueryname":
- packagesNamed = append(packagesNamed, value)
case "": // not a reserved query
restPatterns = append(restPatterns, pattern)
default:
@@ -172,52 +185,34 @@ extractQueries:
}
}
- response := &responseDeduper{}
- var err error
-
// See if we have any patterns to pass through to go list. Zero initial
// patterns also requires a go list call, since it's the equivalent of
// ".".
if len(restPatterns) > 0 || len(patterns) == 0 {
- dr, err := golistDriver(cfg, restPatterns...)
+ dr, err := state.createDriverResponse(restPatterns...)
if err != nil {
return nil, err
}
- response.init(dr)
- } else {
- response.init(&driverResponse{})
+ response.addAll(dr)
}
- sizeswg.Wait()
- if sizeserr != nil {
- return nil, sizeserr
- }
- // types.SizesFor always returns nil or a *types.StdSizes
- response.dr.Sizes, _ = sizes.(*types.StdSizes)
-
- var containsCandidates []string
-
if len(containFiles) != 0 {
- if err := runContainsQueries(cfg, golistDriver, response, containFiles, getGoInfo); err != nil {
- return nil, err
- }
- }
-
- if len(packagesNamed) != 0 {
- if err := runNamedQueries(cfg, golistDriver, response, packagesNamed); err != nil {
+ if err := state.runContainsQueries(response, containFiles); err != nil {
return nil, err
}
}
- modifiedPkgs, needPkgs, err := processGolistOverlay(cfg, response, getGoInfo)
+ modifiedPkgs, needPkgs, err := state.processGolistOverlay(response)
if err != nil {
return nil, err
}
+
+ var containsCandidates []string
if len(containFiles) > 0 {
containsCandidates = append(containsCandidates, modifiedPkgs...)
containsCandidates = append(containsCandidates, needPkgs...)
}
- if err := addNeededOverlayPackages(cfg, golistDriver, response, needPkgs, getGoInfo); err != nil {
+ if err := state.addNeededOverlayPackages(response, needPkgs); err != nil {
return nil, err
}
// Check candidate packages for containFiles.
@@ -246,28 +241,32 @@ extractQueries:
}
}
+ sizeswg.Wait()
+ if sizeserr != nil {
+ return nil, sizeserr
+ }
return response.dr, nil
}
-func addNeededOverlayPackages(cfg *Config, driver driver, response *responseDeduper, pkgs []string, getGoInfo func() *goInfo) error {
+func (state *golistState) addNeededOverlayPackages(response *responseDeduper, pkgs []string) error {
if len(pkgs) == 0 {
return nil
}
- dr, err := driver(cfg, pkgs...)
+ dr, err := state.createDriverResponse(pkgs...)
if err != nil {
return err
}
for _, pkg := range dr.Packages {
response.addPackage(pkg)
}
- _, needPkgs, err := processGolistOverlay(cfg, response, getGoInfo)
+ _, needPkgs, err := state.processGolistOverlay(response)
if err != nil {
return err
}
- return addNeededOverlayPackages(cfg, driver, response, needPkgs, getGoInfo)
+ return state.addNeededOverlayPackages(response, needPkgs)
}
-func runContainsQueries(cfg *Config, driver driver, response *responseDeduper, queries []string, goInfo func() *goInfo) error {
+func (state *golistState) runContainsQueries(response *responseDeduper, queries []string) error {
for _, query := range queries {
// TODO(matloob): Do only one query per directory.
fdir := filepath.Dir(query)
@@ -277,44 +276,17 @@ func runContainsQueries(cfg *Config, driver driver, response *responseDeduper, q
if err != nil {
return fmt.Errorf("could not determine absolute path of file= query path %q: %v", query, err)
}
- dirResponse, err := driver(cfg, pattern)
- if err != nil || (len(dirResponse.Packages) == 1 && len(dirResponse.Packages[0].Errors) == 1) {
- // There was an error loading the package. Try to load the file as an ad-hoc package.
- // Usually the error will appear in a returned package, but may not if we're in modules mode
- // and the ad-hoc is located outside a module.
+ dirResponse, err := state.createDriverResponse(pattern)
+
+ // If there was an error loading the package, or the package is returned
+ // with errors, try to load the file as an ad-hoc package.
+ // Usually the error will appear in a returned package, but may not if we're
+ // in module mode and the ad-hoc is located outside a module.
+ if err != nil || len(dirResponse.Packages) == 1 && len(dirResponse.Packages[0].GoFiles) == 0 &&
+ len(dirResponse.Packages[0].Errors) == 1 {
var queryErr error
- dirResponse, queryErr = driver(cfg, query)
- if queryErr != nil {
- // Return the original error if the attempt to fall back failed.
- return err
- }
- // If we get nothing back from `go list`, try to make this file into its own ad-hoc package.
- if len(dirResponse.Packages) == 0 && queryErr == nil {
- dirResponse.Packages = append(dirResponse.Packages, &Package{
- ID: "command-line-arguments",
- PkgPath: query,
- GoFiles: []string{query},
- CompiledGoFiles: []string{query},
- Imports: make(map[string]*Package),
- })
- dirResponse.Roots = append(dirResponse.Roots, "command-line-arguments")
- }
- // Special case to handle issue #33482:
- // If this is a file= query for ad-hoc packages where the file only exists on an overlay,
- // and exists outside of a module, add the file in for the package.
- if len(dirResponse.Packages) == 1 && (dirResponse.Packages[0].ID == "command-line-arguments" ||
- filepath.ToSlash(dirResponse.Packages[0].PkgPath) == filepath.ToSlash(query)) {
- if len(dirResponse.Packages[0].GoFiles) == 0 {
- filename := filepath.Join(pattern, filepath.Base(query)) // avoid recomputing abspath
- // TODO(matloob): check if the file is outside of a root dir?
- for path := range cfg.Overlay {
- if path == filename {
- dirResponse.Packages[0].Errors = nil
- dirResponse.Packages[0].GoFiles = []string{path}
- dirResponse.Packages[0].CompiledGoFiles = []string{path}
- }
- }
- }
+ if dirResponse, queryErr = state.adhocPackage(pattern, query); queryErr != nil {
+ return err // return the original error
}
}
isRoot := make(map[string]bool, len(dirResponse.Roots))
@@ -342,276 +314,47 @@ func runContainsQueries(cfg *Config, driver driver, response *responseDeduper, q
return nil
}
-// modCacheRegexp splits a path in a module cache into module, module version, and package.
-var modCacheRegexp = regexp.MustCompile(`(.*)@([^/\\]*)(.*)`)
-
-func runNamedQueries(cfg *Config, driver driver, response *responseDeduper, queries []string) error {
- // calling `go env` isn't free; bail out if there's nothing to do.
- if len(queries) == 0 {
- return nil
- }
- // Determine which directories are relevant to scan.
- roots, modRoot, err := roots(cfg)
- if err != nil {
- return err
- }
-
- // Scan the selected directories. Simple matches, from GOPATH/GOROOT
- // or the local module, can simply be "go list"ed. Matches from the
- // module cache need special treatment.
- var matchesMu sync.Mutex
- var simpleMatches, modCacheMatches []string
- add := func(root gopathwalk.Root, dir string) {
- // Walk calls this concurrently; protect the result slices.
- matchesMu.Lock()
- defer matchesMu.Unlock()
-
- path := dir
- if dir != root.Path {
- path = dir[len(root.Path)+1:]
- }
- if pathMatchesQueries(path, queries) {
- switch root.Type {
- case gopathwalk.RootModuleCache:
- modCacheMatches = append(modCacheMatches, path)
- case gopathwalk.RootCurrentModule:
- // We'd need to read go.mod to find the full
- // import path. Relative's easier.
- rel, err := filepath.Rel(cfg.Dir, dir)
- if err != nil {
- // This ought to be impossible, since
- // we found dir in the current module.
- panic(err)
- }
- simpleMatches = append(simpleMatches, "./"+rel)
- case gopathwalk.RootGOPATH, gopathwalk.RootGOROOT:
- simpleMatches = append(simpleMatches, path)
- }
- }
- }
-
- startWalk := time.Now()
- gopathwalk.Walk(roots, add, gopathwalk.Options{ModulesEnabled: modRoot != "", Debug: debug})
- cfg.Logf("%v for walk", time.Since(startWalk))
-
- // Weird special case: the top-level package in a module will be in
- // whatever directory the user checked the repository out into. It's
- // more reasonable for that to not match the package name. So, if there
- // are any Go files in the mod root, query it just to be safe.
- if modRoot != "" {
- rel, err := filepath.Rel(cfg.Dir, modRoot)
- if err != nil {
- panic(err) // See above.
- }
-
- files, err := ioutil.ReadDir(modRoot)
- if err != nil {
- panic(err) // See above.
- }
-
- for _, f := range files {
- if strings.HasSuffix(f.Name(), ".go") {
- simpleMatches = append(simpleMatches, rel)
- break
- }
- }
- }
-
- addResponse := func(r *driverResponse) {
- for _, pkg := range r.Packages {
- response.addPackage(pkg)
- for _, name := range queries {
- if pkg.Name == name {
- response.addRoot(pkg.ID)
- break
- }
- }
- }
- }
-
- if len(simpleMatches) != 0 {
- resp, err := driver(cfg, simpleMatches...)
- if err != nil {
- return err
- }
- addResponse(resp)
- }
-
- // Module cache matches are tricky. We want to avoid downloading new
- // versions of things, so we need to use the ones present in the cache.
- // go list doesn't accept version specifiers, so we have to write out a
- // temporary module, and do the list in that module.
- if len(modCacheMatches) != 0 {
- // Collect all the matches, deduplicating by major version
- // and preferring the newest.
- type modInfo struct {
- mod string
- major string
- }
- mods := make(map[modInfo]string)
- var imports []string
- for _, modPath := range modCacheMatches {
- matches := modCacheRegexp.FindStringSubmatch(modPath)
- mod, ver := filepath.ToSlash(matches[1]), matches[2]
- importPath := filepath.ToSlash(filepath.Join(matches[1], matches[3]))
-
- major := semver.Major(ver)
- if prevVer, ok := mods[modInfo{mod, major}]; !ok || semver.Compare(ver, prevVer) > 0 {
- mods[modInfo{mod, major}] = ver
- }
-
- imports = append(imports, importPath)
- }
-
- // Build the temporary module.
- var gomod bytes.Buffer
- gomod.WriteString("module modquery\nrequire (\n")
- for mod, version := range mods {
- gomod.WriteString("\t" + mod.mod + " " + version + "\n")
- }
- gomod.WriteString(")\n")
-
- tmpCfg := *cfg
-
- // We're only trying to look at stuff in the module cache, so
- // disable the network. This should speed things up, and has
- // prevented errors in at least one case, #28518.
- tmpCfg.Env = append([]string{"GOPROXY=off"}, cfg.Env...)
-
- var err error
- tmpCfg.Dir, err = ioutil.TempDir("", "gopackages-modquery")
- if err != nil {
- return err
- }
- defer os.RemoveAll(tmpCfg.Dir)
-
- if err := ioutil.WriteFile(filepath.Join(tmpCfg.Dir, "go.mod"), gomod.Bytes(), 0777); err != nil {
- return fmt.Errorf("writing go.mod for module cache query: %v", err)
- }
-
- // Run the query, using the import paths calculated from the matches above.
- resp, err := driver(&tmpCfg, imports...)
- if err != nil {
- return fmt.Errorf("querying module cache matches: %v", err)
- }
- addResponse(resp)
- }
-
- return nil
-}
-
-func getSizes(cfg *Config) (types.Sizes, error) {
- return packagesdriver.GetSizesGolist(cfg.Context, cfg.BuildFlags, cfg.Env, cfg.Dir, usesExportData(cfg))
-}
-
-// roots selects the appropriate paths to walk based on the passed-in configuration,
-// particularly the environment and the presence of a go.mod in cfg.Dir's parents.
-func roots(cfg *Config) ([]gopathwalk.Root, string, error) {
- stdout, err := invokeGo(cfg, "env", "GOROOT", "GOPATH", "GOMOD")
+// adhocPackage attempts to load or construct an ad-hoc package for a given
+// query, if the original call to the driver produced inadequate results.
+func (state *golistState) adhocPackage(pattern, query string) (*driverResponse, error) {
+ response, err := state.createDriverResponse(query)
if err != nil {
- return nil, "", err
- }
-
- fields := strings.Split(stdout.String(), "\n")
- if len(fields) != 4 || len(fields[3]) != 0 {
- return nil, "", fmt.Errorf("go env returned unexpected output: %q", stdout.String())
- }
- goroot, gopath, gomod := fields[0], filepath.SplitList(fields[1]), fields[2]
- var modDir string
- if gomod != "" {
- modDir = filepath.Dir(gomod)
+ return nil, err
}
-
- var roots []gopathwalk.Root
- // Always add GOROOT.
- roots = append(roots, gopathwalk.Root{
- Path: filepath.Join(goroot, "/src"),
- Type: gopathwalk.RootGOROOT,
- })
- // If modules are enabled, scan the module dir.
- if modDir != "" {
- roots = append(roots, gopathwalk.Root{
- Path: modDir,
- Type: gopathwalk.RootCurrentModule,
+ // If we get nothing back from `go list`,
+ // try to make this file into its own ad-hoc package.
+ // TODO(rstambler): Should this check against the original response?
+ if len(response.Packages) == 0 {
+ response.Packages = append(response.Packages, &Package{
+ ID: "command-line-arguments",
+ PkgPath: query,
+ GoFiles: []string{query},
+ CompiledGoFiles: []string{query},
+ Imports: make(map[string]*Package),
})
- }
- // Add either GOPATH/src or GOPATH/pkg/mod, depending on module mode.
- for _, p := range gopath {
- if modDir != "" {
- roots = append(roots, gopathwalk.Root{
- Path: filepath.Join(p, "/pkg/mod"),
- Type: gopathwalk.RootModuleCache,
- })
- } else {
- roots = append(roots, gopathwalk.Root{
- Path: filepath.Join(p, "/src"),
- Type: gopathwalk.RootGOPATH,
- })
- }
- }
-
- return roots, modDir, nil
-}
-
-// These functions were copied from goimports. See further documentation there.
-
-// pathMatchesQueries is adapted from pkgIsCandidate.
-// TODO: is it reasonable to do Contains here, rather than an exact match on a path component?
-func pathMatchesQueries(path string, queries []string) bool {
- lastTwo := lastTwoComponents(path)
- for _, query := range queries {
- if strings.Contains(lastTwo, query) {
- return true
- }
- if hasHyphenOrUpperASCII(lastTwo) && !hasHyphenOrUpperASCII(query) {
- lastTwo = lowerASCIIAndRemoveHyphen(lastTwo)
- if strings.Contains(lastTwo, query) {
- return true
- }
- }
- }
- return false
-}
-
-// lastTwoComponents returns at most the last two path components
-// of v, using either / or \ as the path separator.
-func lastTwoComponents(v string) string {
- nslash := 0
- for i := len(v) - 1; i >= 0; i-- {
- if v[i] == '/' || v[i] == '\\' {
- nslash++
- if nslash == 2 {
- return v[i:]
+ response.Roots = append(response.Roots, "command-line-arguments")
+ }
+ // Handle special cases.
+ if len(response.Packages) == 1 {
+ // golang/go#33482: If this is a file= query for ad-hoc packages where
+ // the file only exists on an overlay, and exists outside of a module,
+ // add the file to the package and remove the errors.
+ if response.Packages[0].ID == "command-line-arguments" ||
+ filepath.ToSlash(response.Packages[0].PkgPath) == filepath.ToSlash(query) {
+ if len(response.Packages[0].GoFiles) == 0 {
+ filename := filepath.Join(pattern, filepath.Base(query)) // avoid recomputing abspath
+ // TODO(matloob): check if the file is outside of a root dir?
+ for path := range state.cfg.Overlay {
+ if path == filename {
+ response.Packages[0].Errors = nil
+ response.Packages[0].GoFiles = []string{path}
+ response.Packages[0].CompiledGoFiles = []string{path}
+ }
+ }
}
}
}
- return v
-}
-
-func hasHyphenOrUpperASCII(s string) bool {
- for i := 0; i < len(s); i++ {
- b := s[i]
- if b == '-' || ('A' <= b && b <= 'Z') {
- return true
- }
- }
- return false
-}
-
-func lowerASCIIAndRemoveHyphen(s string) (ret string) {
- buf := make([]byte, 0, len(s))
- for i := 0; i < len(s); i++ {
- b := s[i]
- switch {
- case b == '-':
- continue
- case 'A' <= b && b <= 'Z':
- buf = append(buf, b+('a'-'A'))
- default:
- buf = append(buf, b)
- }
- }
- return string(buf)
+ return response, nil
}
// Fields must match go list;
@@ -656,10 +399,9 @@ func otherFiles(p *jsonPackage) [][]string {
return [][]string{p.CFiles, p.CXXFiles, p.MFiles, p.HFiles, p.FFiles, p.SFiles, p.SwigFiles, p.SwigCXXFiles, p.SysoFiles}
}
-// golistDriver uses the "go list" command to expand the pattern
-// words and return metadata for the specified packages. dir may be
-// "" and env may be nil, as per os/exec.Command.
-func golistDriver(cfg *Config, rootsDirs func() *goInfo, words ...string) (*driverResponse, error) {
+// createDriverResponse uses the "go list" command to expand the pattern
+// words and return a response for the specified packages.
+func (state *golistState) createDriverResponse(words ...string) (*driverResponse, error) {
// go list uses the following identifiers in ImportPath and Imports:
//
// "p" -- importable package or main (command)
@@ -673,7 +415,7 @@ func golistDriver(cfg *Config, rootsDirs func() *goInfo, words ...string) (*driv
// Run "go list" for complete
// information on the specified packages.
- buf, err := invokeGo(cfg, golistargs(cfg, words)...)
+ buf, err := state.invokeGo("list", golistargs(state.cfg, words)...)
if err != nil {
return nil, err
}
@@ -708,7 +450,10 @@ func golistDriver(cfg *Config, rootsDirs func() *goInfo, words ...string) (*driv
// contained in a known module or GOPATH entry. This will allow the package to be
// properly "reclaimed" when overlays are processed.
if filepath.IsAbs(p.ImportPath) && p.Error != nil {
- pkgPath, ok := getPkgPath(cfg, p.ImportPath, rootsDirs)
+ pkgPath, ok, err := state.getPkgPath(p.ImportPath)
+ if err != nil {
+ return nil, err
+ }
if ok {
p.ImportPath = pkgPath
}
@@ -729,6 +474,7 @@ func golistDriver(cfg *Config, rootsDirs func() *goInfo, words ...string) (*driv
GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles),
CompiledGoFiles: absJoin(p.Dir, p.CompiledGoFiles),
OtherFiles: absJoin(p.Dir, otherFiles(p)...),
+ forTest: p.ForTest,
}
// Work around https://golang.org/issue/28749:
@@ -811,8 +557,9 @@ func golistDriver(cfg *Config, rootsDirs func() *goInfo, words ...string) (*driv
msg += fmt.Sprintf(": import stack: %v", p.Error.ImportStack)
}
pkg.Errors = append(pkg.Errors, Error{
- Pos: p.Error.Pos,
- Msg: msg,
+ Pos: p.Error.Pos,
+ Msg: msg,
+ Kind: ListError,
})
}
@@ -823,22 +570,20 @@ func golistDriver(cfg *Config, rootsDirs func() *goInfo, words ...string) (*driv
}
// getPkgPath finds the package path of a directory if it's relative to a root directory.
-func getPkgPath(cfg *Config, dir string, goInfo func() *goInfo) (string, bool) {
+func (state *golistState) getPkgPath(dir string) (string, bool, error) {
absDir, err := filepath.Abs(dir)
if err != nil {
- cfg.Logf("error getting absolute path of %s: %v", dir, err)
- return "", false
+ return "", false, err
}
- for rdir, rpath := range goInfo().rootDirs {
- absRdir, err := filepath.Abs(rdir)
- if err != nil {
- cfg.Logf("error getting absolute path of %s: %v", rdir, err)
- continue
- }
+ roots, err := state.determineRootDirs()
+ if err != nil {
+ return "", false, err
+ }
+
+ for rdir, rpath := range roots {
// Make sure that the directory is in the module,
// to avoid creating a path relative to another module.
- if !strings.HasPrefix(absDir, absRdir) {
- cfg.Logf("%s does not have prefix %s", absDir, absRdir)
+ if !strings.HasPrefix(absDir, rdir) {
continue
}
// TODO(matloob): This doesn't properly handle symlinks.
@@ -853,11 +598,11 @@ func getPkgPath(cfg *Config, dir string, goInfo func() *goInfo) (string, bool) {
// Once the file is saved, gopls, or the next invocation of the tool will get the correct
// result straight from golist.
// TODO(matloob): Implement module tiebreaking?
- return path.Join(rpath, filepath.ToSlash(r)), true
+ return path.Join(rpath, filepath.ToSlash(r)), true, nil
}
- return filepath.ToSlash(r), true
+ return filepath.ToSlash(r), true, nil
}
- return "", false
+ return "", false, nil
}
// absJoin absolutizes and flattens the lists of files.
@@ -876,8 +621,8 @@ func absJoin(dir string, fileses ...[]string) (res []string) {
func golistargs(cfg *Config, words []string) []string {
const findFlags = NeedImports | NeedTypes | NeedSyntax | NeedTypesInfo
fullargs := []string{
- "list", "-e", "-json",
- fmt.Sprintf("-compiled=%t", cfg.Mode&(NeedCompiledGoFiles|NeedSyntax|NeedTypesInfo|NeedTypesSizes) != 0),
+ "-e", "-json",
+ fmt.Sprintf("-compiled=%t", cfg.Mode&(NeedCompiledGoFiles|NeedSyntax|NeedTypes|NeedTypesInfo|NeedTypesSizes) != 0),
fmt.Sprintf("-test=%t", cfg.Tests),
fmt.Sprintf("-export=%t", usesExportData(cfg)),
fmt.Sprintf("-deps=%t", cfg.Mode&NeedImports != 0),
@@ -892,10 +637,17 @@ func golistargs(cfg *Config, words []string) []string {
}
// invokeGo returns the stdout of a go command invocation.
-func invokeGo(cfg *Config, args ...string) (*bytes.Buffer, error) {
+func (state *golistState) invokeGo(verb string, args ...string) (*bytes.Buffer, error) {
+ cfg := state.cfg
+
stdout := new(bytes.Buffer)
stderr := new(bytes.Buffer)
- cmd := exec.CommandContext(cfg.Context, "go", args...)
+ goArgs := []string{verb}
+ if verb != "env" {
+ goArgs = append(goArgs, cfg.BuildFlags...)
+ }
+ goArgs = append(goArgs, args...)
+ cmd := exec.CommandContext(state.ctx, "go", goArgs...)
// On darwin the cwd gets resolved to the real path, which breaks anything that
// expects the working directory to keep the original path, including the
// go command when dealing with modules.
@@ -907,7 +659,7 @@ func invokeGo(cfg *Config, args ...string) (*bytes.Buffer, error) {
cmd.Stdout = stdout
cmd.Stderr = stderr
defer func(start time.Time) {
- cfg.Logf("%s for %v, stderr: <<%s>> stdout: <<%s>>\n", time.Since(start), cmdDebugStr(cmd, args...), stderr, stdout)
+ cfg.Logf("%s for %v, stderr: <<%s>> stdout: <<%s>>\n", time.Since(start), cmdDebugStr(cmd, goArgs...), stderr, stdout)
}(time.Now())
if err := cmd.Run(); err != nil {
diff --git a/vendor/golang.org/x/tools/go/packages/golist_overlay.go b/vendor/golang.org/x/tools/go/packages/golist_overlay.go
index a7de62299..c6925c87b 100644
--- a/vendor/golang.org/x/tools/go/packages/golist_overlay.go
+++ b/vendor/golang.org/x/tools/go/packages/golist_overlay.go
@@ -1,11 +1,11 @@
package packages
import (
- "bytes"
"encoding/json"
"fmt"
"go/parser"
"go/token"
+ "os"
"path/filepath"
"strconv"
"strings"
@@ -16,7 +16,7 @@ import (
// sometimes incorrect.
// TODO(matloob): Handle unsupported cases, including the following:
// - determining the correct package to add given a new import path
-func processGolistOverlay(cfg *Config, response *responseDeduper, rootDirs func() *goInfo) (modifiedPkgs, needPkgs []string, err error) {
+func (state *golistState) processGolistOverlay(response *responseDeduper) (modifiedPkgs, needPkgs []string, err error) {
havePkgs := make(map[string]string) // importPath -> non-test package ID
needPkgsSet := make(map[string]bool)
modifiedPkgsSet := make(map[string]bool)
@@ -34,7 +34,7 @@ func processGolistOverlay(cfg *Config, response *responseDeduper, rootDirs func(
// potentially modifying the transitive set of dependencies).
var overlayAddsImports bool
- for opath, contents := range cfg.Overlay {
+ for opath, contents := range state.cfg.Overlay {
base := filepath.Base(opath)
dir := filepath.Dir(opath)
var pkg *Package // if opath belongs to both a package and its test variant, this will be the test variant
@@ -86,7 +86,10 @@ func processGolistOverlay(cfg *Config, response *responseDeduper, rootDirs func(
if pkg == nil {
// Try to find the module or gopath dir the file is contained in.
// Then for modules, add the module opath to the beginning.
- pkgPath, ok := getPkgPath(cfg, dir, rootDirs)
+ pkgPath, ok, err := state.getPkgPath(dir)
+ if err != nil {
+ return nil, nil, err
+ }
if !ok {
break
}
@@ -114,6 +117,11 @@ func processGolistOverlay(cfg *Config, response *responseDeduper, rootDirs func(
if isTestFile && !isXTest && testVariantOf != nil {
pkg.GoFiles = append(pkg.GoFiles, testVariantOf.GoFiles...)
pkg.CompiledGoFiles = append(pkg.CompiledGoFiles, testVariantOf.CompiledGoFiles...)
+ // Add the package under test and its imports to the test variant.
+ pkg.forTest = testVariantOf.PkgPath
+ for k, v := range testVariantOf.Imports {
+ pkg.Imports[k] = &Package{ID: v.ID}
+ }
}
}
}
@@ -130,42 +138,45 @@ func processGolistOverlay(cfg *Config, response *responseDeduper, rootDirs func(
continue
}
for _, imp := range imports {
- _, found := pkg.Imports[imp]
- if !found {
- overlayAddsImports = true
- // TODO(matloob): Handle cases when the following block isn't correct.
- // These include imports of vendored packages, etc.
- id, ok := havePkgs[imp]
- if !ok {
- id = imp
- }
- pkg.Imports[imp] = &Package{ID: id}
- // Add dependencies to the non-test variant version of this package as wel.
- if testVariantOf != nil {
- testVariantOf.Imports[imp] = &Package{ID: id}
+ if _, found := pkg.Imports[imp]; found {
+ continue
+ }
+ overlayAddsImports = true
+ id, ok := havePkgs[imp]
+ if !ok {
+ var err error
+ id, err = state.resolveImport(dir, imp)
+ if err != nil {
+ return nil, nil, err
}
}
+ pkg.Imports[imp] = &Package{ID: id}
+ // Add dependencies to the non-test variant version of this package as well.
+ if testVariantOf != nil {
+ testVariantOf.Imports[imp] = &Package{ID: id}
+ }
}
- continue
}
- // toPkgPath tries to guess the package path given the id.
- // This isn't always correct -- it's certainly wrong for
- // vendored packages' paths.
- toPkgPath := func(id string) string {
- // TODO(matloob): Handle vendor paths.
- i := strings.IndexByte(id, ' ')
- if i >= 0 {
- return id[:i]
+ // toPkgPath guesses the package path given the id.
+ toPkgPath := func(sourceDir, id string) (string, error) {
+ if i := strings.IndexByte(id, ' '); i >= 0 {
+ return state.resolveImport(sourceDir, id[:i])
}
- return id
+ return state.resolveImport(sourceDir, id)
}
- // Do another pass now that new packages have been created to determine the
- // set of missing packages.
+ // Now that new packages have been created, do another pass to determine
+ // the new set of missing packages.
for _, pkg := range response.dr.Packages {
for _, imp := range pkg.Imports {
- pkgPath := toPkgPath(imp.ID)
+ if len(pkg.GoFiles) == 0 {
+ return nil, nil, fmt.Errorf("cannot resolve imports for package %q with no Go files", pkg.PkgPath)
+ }
+ pkgPath, err := toPkgPath(filepath.Dir(pkg.GoFiles[0]), imp.ID)
+ if err != nil {
+ return nil, nil, err
+ }
if _, ok := havePkgs[pkgPath]; !ok {
needPkgsSet[pkgPath] = true
}
@@ -185,6 +196,52 @@ func processGolistOverlay(cfg *Config, response *responseDeduper, rootDirs func(
return modifiedPkgs, needPkgs, err
}
+// resolveImport finds the the ID of a package given its import path.
+// In particular, it will find the right vendored copy when in GOPATH mode.
+func (state *golistState) resolveImport(sourceDir, importPath string) (string, error) {
+ env, err := state.getEnv()
+ if err != nil {
+ return "", err
+ }
+ if env["GOMOD"] != "" {
+ return importPath, nil
+ }
+
+ searchDir := sourceDir
+ for {
+ vendorDir := filepath.Join(searchDir, "vendor")
+ exists, ok := state.vendorDirs[vendorDir]
+ if !ok {
+ info, err := os.Stat(vendorDir)
+ exists = err == nil && info.IsDir()
+ state.vendorDirs[vendorDir] = exists
+ }
+
+ if exists {
+ vendoredPath := filepath.Join(vendorDir, importPath)
+ if info, err := os.Stat(vendoredPath); err == nil && info.IsDir() {
+ // We should probably check for .go files here, but shame on anyone who fools us.
+ path, ok, err := state.getPkgPath(vendoredPath)
+ if err != nil {
+ return "", err
+ }
+ if ok {
+ return path, nil
+ }
+ }
+ }
+
+ // We know we've hit the top of the filesystem when we Dir / and get /,
+ // or C:\ and get C:\, etc.
+ next := filepath.Dir(searchDir)
+ if next == searchDir {
+ break
+ }
+ searchDir = next
+ }
+ return importPath, nil
+}
+
func hasTestFiles(p *Package) bool {
for _, f := range p.GoFiles {
if strings.HasSuffix(f, "_test.go") {
@@ -194,44 +251,59 @@ func hasTestFiles(p *Package) bool {
return false
}
-// determineRootDirs returns a mapping from directories code can be contained in to the
-// corresponding import path prefixes of those directories.
-// Its result is used to try to determine the import path for a package containing
-// an overlay file.
-func determineRootDirs(cfg *Config) map[string]string {
- // Assume modules first:
- out, err := invokeGo(cfg, "list", "-m", "-json", "all")
+// determineRootDirs returns a mapping from absolute directories that could
+// contain code to their corresponding import path prefixes.
+func (state *golistState) determineRootDirs() (map[string]string, error) {
+ env, err := state.getEnv()
if err != nil {
- return determineRootDirsGOPATH(cfg)
+ return nil, err
+ }
+ if env["GOMOD"] != "" {
+ state.rootsOnce.Do(func() {
+ state.rootDirs, state.rootDirsError = state.determineRootDirsModules()
+ })
+ } else {
+ state.rootsOnce.Do(func() {
+ state.rootDirs, state.rootDirsError = state.determineRootDirsGOPATH()
+ })
+ }
+ return state.rootDirs, state.rootDirsError
+}
+
+func (state *golistState) determineRootDirsModules() (map[string]string, error) {
+ out, err := state.invokeGo("list", "-m", "-json", "all")
+ if err != nil {
+ return nil, err
}
m := map[string]string{}
type jsonMod struct{ Path, Dir string }
for dec := json.NewDecoder(out); dec.More(); {
mod := new(jsonMod)
if err := dec.Decode(mod); err != nil {
- return m // Give up and return an empty map. Package won't be found for overlay.
+ return nil, err
}
if mod.Dir != "" && mod.Path != "" {
// This is a valid module; add it to the map.
- m[mod.Dir] = mod.Path
+ absDir, err := filepath.Abs(mod.Dir)
+ if err != nil {
+ return nil, err
+ }
+ m[absDir] = mod.Path
}
}
- return m
+ return m, nil
}
-func determineRootDirsGOPATH(cfg *Config) map[string]string {
+func (state *golistState) determineRootDirsGOPATH() (map[string]string, error) {
m := map[string]string{}
- out, err := invokeGo(cfg, "env", "GOPATH")
- if err != nil {
- // Could not determine root dir mapping. Everything is best-effort, so just return an empty map.
- // When we try to find the import path for a directory, there will be no root-dir match and
- // we'll give up.
- return m
- }
- for _, p := range filepath.SplitList(string(bytes.TrimSpace(out.Bytes()))) {
- m[filepath.Join(p, "src")] = ""
+ for _, dir := range filepath.SplitList(state.mustGetEnv()["GOPATH"]) {
+ absDir, err := filepath.Abs(dir)
+ if err != nil {
+ return nil, err
+ }
+ m[filepath.Join(absDir, "src")] = ""
}
- return m
+ return m, nil
}
func extractImports(filename string, contents []byte) ([]string, error) {
diff --git a/vendor/golang.org/x/tools/go/packages/packages.go b/vendor/golang.org/x/tools/go/packages/packages.go
index 050cca43a..586c714f6 100644
--- a/vendor/golang.org/x/tools/go/packages/packages.go
+++ b/vendor/golang.org/x/tools/go/packages/packages.go
@@ -23,6 +23,7 @@ import (
"sync"
"golang.org/x/tools/go/gcexportdata"
+ "golang.org/x/tools/internal/packagesinternal"
)
// A LoadMode controls the amount of detail to return when loading.
@@ -34,6 +35,9 @@ import (
// Load may return more information than requested.
type LoadMode int
+// TODO(matloob): When a V2 of go/packages is released, rename NeedExportsFile to
+// NeedExportFile to make it consistent with the Package field it's adding.
+
const (
// NeedName adds Name and PkgPath.
NeedName LoadMode = 1 << iota
@@ -51,7 +55,7 @@ const (
// NeedDeps adds the fields requested by the LoadMode in the packages in Imports.
NeedDeps
- // NeedExportsFile adds ExportsFile.
+ // NeedExportsFile adds ExportFile.
NeedExportsFile
// NeedTypes adds Types, Fset, and IllTyped.
@@ -160,7 +164,7 @@ type Config struct {
Tests bool
// Overlay provides a mapping of absolute file paths to file contents.
- // If the file with the given path already exists, the parser will use the
+ // If the file with the given path already exists, the parser will use the
// alternative file contents provided by the map.
//
// Overlays provide incomplete support for when a given file doesn't
@@ -292,6 +296,15 @@ type Package struct {
// TypesSizes provides the effective size function for types in TypesInfo.
TypesSizes types.Sizes
+
+ // forTest is the package under test, if any.
+ forTest string
+}
+
+func init() {
+ packagesinternal.GetForTest = func(p interface{}) string {
+ return p.(*Package).forTest
+ }
}
// An Error describes a problem with a package's metadata, syntax, or types.
@@ -500,12 +513,23 @@ func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) {
if i, found := rootMap[pkg.ID]; found {
rootIndex = i
}
+
+ // Overlays can invalidate export data.
+ // TODO(matloob): make this check fine-grained based on dependencies on overlaid files
+ exportDataInvalid := len(ld.Overlay) > 0 || pkg.ExportFile == "" && pkg.PkgPath != "unsafe"
+ // This package needs type information if the caller requested types and the package is
+ // either a root, or it's a non-root and the user requested dependencies ...
+ needtypes := (ld.Mode&NeedTypes|NeedTypesInfo != 0 && (rootIndex >= 0 || ld.Mode&NeedDeps != 0))
+ // This package needs source if the call requested source (or types info, which implies source)
+ // and the package is either a root, or itas a non- root and the user requested dependencies...
+ needsrc := ((ld.Mode&(NeedSyntax|NeedTypesInfo) != 0 && (rootIndex >= 0 || ld.Mode&NeedDeps != 0)) ||
+ // ... or if we need types and the exportData is invalid. We fall back to (incompletely)
+ // typechecking packages from source if they fail to compile.
+ (ld.Mode&NeedTypes|NeedTypesInfo != 0 && exportDataInvalid)) && pkg.PkgPath != "unsafe"
lpkg := &loaderPackage{
Package: pkg,
- needtypes: (ld.Mode&(NeedTypes|NeedTypesInfo) != 0 && ld.Mode&NeedDeps != 0 && rootIndex < 0) || rootIndex >= 0,
- needsrc: (ld.Mode&(NeedSyntax|NeedTypesInfo) != 0 && ld.Mode&NeedDeps != 0 && rootIndex < 0) || rootIndex >= 0 ||
- len(ld.Overlay) > 0 || // Overlays can invalidate export data. TODO(matloob): make this check fine-grained based on dependencies on overlaid files
- pkg.ExportFile == "" && pkg.PkgPath != "unsafe",
+ needtypes: needtypes,
+ needsrc: needsrc,
}
ld.pkgs[lpkg.ID] = lpkg
if rootIndex >= 0 {
@@ -713,7 +737,7 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) {
// which would then require that such created packages be explicitly
// inserted back into the Import graph as a final step after export data loading.
// The Diamond test exercises this case.
- if !lpkg.needtypes {
+ if !lpkg.needtypes && !lpkg.needsrc {
return
}
if !lpkg.needsrc {
diff --git a/vendor/golang.org/x/tools/go/types/typeutil/map.go b/vendor/golang.org/x/tools/go/types/typeutil/map.go
index eb63ea22c..c7f754500 100644
--- a/vendor/golang.org/x/tools/go/types/typeutil/map.go
+++ b/vendor/golang.org/x/tools/go/types/typeutil/map.go
@@ -4,7 +4,7 @@
// Package typeutil defines various utilities for types, such as Map,
// a mapping from types.Type to interface{} values.
-package typeutil
+package typeutil // import "golang.org/x/tools/go/types/typeutil"
import (
"bytes"
diff --git a/vendor/golang.org/x/tools/internal/imports/fix.go b/vendor/golang.org/x/tools/internal/imports/fix.go
index f531024da..ac8f6b153 100644
--- a/vendor/golang.org/x/tools/internal/imports/fix.go
+++ b/vendor/golang.org/x/tools/internal/imports/fix.go
@@ -27,7 +27,6 @@ import (
"unicode/utf8"
"golang.org/x/tools/go/ast/astutil"
- "golang.org/x/tools/go/packages"
"golang.org/x/tools/internal/gopathwalk"
)
@@ -82,7 +81,8 @@ type ImportFix struct {
// IdentName is the identifier that this fix will add or remove.
IdentName string
// FixType is the type of fix this is (AddImport, DeleteImport, SetImportName).
- FixType ImportFixType
+ FixType ImportFixType
+ Relevance int // see pkg
}
// An ImportInfo represents a single import statement.
@@ -585,62 +585,86 @@ func getFixes(fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv
return fixes, nil
}
-// getCandidatePkgs returns the list of pkgs that are accessible from filename,
-// optionall filtered to only packages named pkgName.
-func getCandidatePkgs(pkgName, filename string, env *ProcessEnv) ([]*pkg, error) {
- // TODO(heschi): filter out current package. (Don't forget x_test can import x.)
+// Highest relevance, used for the standard library. Chosen arbitrarily to
+// match pre-existing gopls code.
+const MaxRelevance = 7
- var result []*pkg
+// getCandidatePkgs works with the passed callback to find all acceptable packages.
+// It deduplicates by import path, and uses a cached stdlib rather than reading
+// from disk.
+func getCandidatePkgs(ctx context.Context, wrappedCallback *scanCallback, filename, filePkg string, env *ProcessEnv) error {
+ notSelf := func(p *pkg) bool {
+ return p.packageName != filePkg || p.dir != filepath.Dir(filename)
+ }
// Start off with the standard library.
- for importPath := range stdlib {
- if pkgName != "" && path.Base(importPath) != pkgName {
- continue
- }
- result = append(result, &pkg{
+ for importPath, exports := range stdlib {
+ p := &pkg{
dir: filepath.Join(env.GOROOT, "src", importPath),
importPathShort: importPath,
packageName: path.Base(importPath),
- relevance: 0,
- })
- }
-
- // Exclude goroot results -- getting them is relatively expensive, not cached,
- // and generally redundant with the in-memory version.
- exclude := []gopathwalk.RootType{gopathwalk.RootGOROOT}
- // Only the go/packages resolver uses the first argument, and nobody uses that resolver.
- scannedPkgs, err := env.GetResolver().scan(nil, true, exclude)
- if err != nil {
- return nil, err
+ relevance: MaxRelevance,
+ }
+ if notSelf(p) && wrappedCallback.packageNameLoaded(p) {
+ wrappedCallback.exportsLoaded(p, exports)
+ }
}
+ var mu sync.Mutex
dupCheck := map[string]struct{}{}
- for _, pkg := range scannedPkgs {
- if pkgName != "" && pkg.packageName != pkgName {
- continue
- }
- if !canUse(filename, pkg.dir) {
- continue
- }
- if _, ok := dupCheck[pkg.importPathShort]; ok {
- continue
- }
- dupCheck[pkg.importPathShort] = struct{}{}
- result = append(result, pkg)
+
+ scanFilter := &scanCallback{
+ rootFound: func(root gopathwalk.Root) bool {
+ // Exclude goroot results -- getting them is relatively expensive, not cached,
+ // and generally redundant with the in-memory version.
+ return root.Type != gopathwalk.RootGOROOT && wrappedCallback.rootFound(root)
+ },
+ dirFound: wrappedCallback.dirFound,
+ packageNameLoaded: func(pkg *pkg) bool {
+ mu.Lock()
+ defer mu.Unlock()
+ if _, ok := dupCheck[pkg.importPathShort]; ok {
+ return false
+ }
+ dupCheck[pkg.importPathShort] = struct{}{}
+ return notSelf(pkg) && wrappedCallback.packageNameLoaded(pkg)
+ },
+ exportsLoaded: func(pkg *pkg, exports []string) {
+ // If we're an x_test, load the package under test's test variant.
+ if strings.HasSuffix(filePkg, "_test") && pkg.dir == filepath.Dir(filename) {
+ var err error
+ _, exports, err = loadExportsFromFiles(ctx, env, pkg.dir, true)
+ if err != nil {
+ return
+ }
+ }
+ wrappedCallback.exportsLoaded(pkg, exports)
+ },
}
+ return env.GetResolver().scan(ctx, scanFilter)
+}
- // Sort first by relevance, then by package name, with import path as a tiebreaker.
- sort.Slice(result, func(i, j int) bool {
- pi, pj := result[i], result[j]
- if pi.relevance != pj.relevance {
- return pi.relevance < pj.relevance
- }
- if pi.packageName != pj.packageName {
- return pi.packageName < pj.packageName
- }
- return pi.importPathShort < pj.importPathShort
- })
+func ScoreImportPaths(ctx context.Context, env *ProcessEnv, paths []string) map[string]int {
+ result := make(map[string]int)
+ for _, path := range paths {
+ result[path] = env.GetResolver().scoreImportPath(ctx, path)
+ }
+ return result
+}
- return result, nil
+func PrimeCache(ctx context.Context, env *ProcessEnv) error {
+ // Fully scan the disk for directories, but don't actually read any Go files.
+ callback := &scanCallback{
+ rootFound: func(gopathwalk.Root) bool {
+ return true
+ },
+ dirFound: func(pkg *pkg) bool {
+ return false
+ },
+ packageNameLoaded: func(pkg *pkg) bool {
+ return false
+ },
+ }
+ return getCandidatePkgs(ctx, callback, "", "", env)
}
func candidateImportName(pkg *pkg) string {
@@ -651,23 +675,37 @@ func candidateImportName(pkg *pkg) string {
}
// getAllCandidates gets all of the candidates to be imported, regardless of if they are needed.
-func getAllCandidates(filename string, env *ProcessEnv) ([]ImportFix, error) {
- pkgs, err := getCandidatePkgs("", filename, env)
- if err != nil {
- return nil, err
- }
- result := make([]ImportFix, 0, len(pkgs))
- for _, pkg := range pkgs {
- result = append(result, ImportFix{
- StmtInfo: ImportInfo{
- ImportPath: pkg.importPathShort,
- Name: candidateImportName(pkg),
- },
- IdentName: pkg.packageName,
- FixType: AddImport,
- })
+func getAllCandidates(ctx context.Context, wrapped func(ImportFix), searchPrefix, filename, filePkg string, env *ProcessEnv) error {
+ callback := &scanCallback{
+ rootFound: func(gopathwalk.Root) bool {
+ return true
+ },
+ dirFound: func(pkg *pkg) bool {
+ if !canUse(filename, pkg.dir) {
+ return false
+ }
+ // Try the assumed package name first, then a simpler path match
+ // in case of packages named vN, which are not uncommon.
+ return strings.HasPrefix(ImportPathToAssumedName(pkg.importPathShort), searchPrefix) ||
+ strings.HasPrefix(path.Base(pkg.importPathShort), searchPrefix)
+ },
+ packageNameLoaded: func(pkg *pkg) bool {
+ if !strings.HasPrefix(pkg.packageName, searchPrefix) {
+ return false
+ }
+ wrapped(ImportFix{
+ StmtInfo: ImportInfo{
+ ImportPath: pkg.importPathShort,
+ Name: candidateImportName(pkg),
+ },
+ IdentName: pkg.packageName,
+ FixType: AddImport,
+ Relevance: pkg.relevance,
+ })
+ return false
+ },
}
- return result, nil
+ return getCandidatePkgs(ctx, callback, filename, filePkg, env)
}
// A PackageExport is a package and its exports.
@@ -676,42 +714,34 @@ type PackageExport struct {
Exports []string
}
-func getPackageExports(completePackage, filename string, env *ProcessEnv) ([]PackageExport, error) {
- pkgs, err := getCandidatePkgs(completePackage, filename, env)
- if err != nil {
- return nil, err
- }
-
- results := make([]PackageExport, 0, len(pkgs))
- for _, pkg := range pkgs {
- fix := &ImportFix{
- StmtInfo: ImportInfo{
- ImportPath: pkg.importPathShort,
- Name: candidateImportName(pkg),
- },
- IdentName: pkg.packageName,
- FixType: AddImport,
- }
- var exports []string
- if e, ok := stdlib[pkg.importPathShort]; ok {
- exports = e
- } else {
- exports, err = loadExportsForPackage(context.Background(), env, completePackage, pkg)
- if err != nil {
- if env.Debug {
- env.Logf("while completing %q, error loading exports from %q: %v", completePackage, pkg.importPathShort, err)
- }
- continue
- }
- }
- sort.Strings(exports)
- results = append(results, PackageExport{
- Fix: fix,
- Exports: exports,
- })
+func getPackageExports(ctx context.Context, wrapped func(PackageExport), searchPkg, filename, filePkg string, env *ProcessEnv) error {
+ callback := &scanCallback{
+ rootFound: func(gopathwalk.Root) bool {
+ return true
+ },
+ dirFound: func(pkg *pkg) bool {
+ return pkgIsCandidate(filename, references{searchPkg: nil}, pkg)
+ },
+ packageNameLoaded: func(pkg *pkg) bool {
+ return pkg.packageName == searchPkg
+ },
+ exportsLoaded: func(pkg *pkg, exports []string) {
+ sort.Strings(exports)
+ wrapped(PackageExport{
+ Fix: &ImportFix{
+ StmtInfo: ImportInfo{
+ ImportPath: pkg.importPathShort,
+ Name: candidateImportName(pkg),
+ },
+ IdentName: pkg.packageName,
+ FixType: AddImport,
+ Relevance: pkg.relevance,
+ },
+ Exports: exports,
+ })
+ },
}
-
- return results, nil
+ return getCandidatePkgs(ctx, callback, filename, filePkg, env)
}
// ProcessEnv contains environment variables and settings that affect the use of
@@ -725,15 +755,19 @@ type ProcessEnv struct {
GOPATH, GOROOT, GO111MODULE, GOPROXY, GOFLAGS, GOSUMDB string
WorkingDir string
- // If true, use go/packages regardless of the environment.
- ForceGoPackages bool
-
// Logf is the default logger for the ProcessEnv.
Logf func(format string, args ...interface{})
resolver Resolver
}
+// CopyConfig copies the env's configuration into a new env.
+func (e *ProcessEnv) CopyConfig() *ProcessEnv {
+ copy := *e
+ copy.resolver = nil
+ return &copy
+}
+
func (e *ProcessEnv) env() []string {
env := os.Environ()
add := func(k, v string) {
@@ -757,39 +791,34 @@ func (e *ProcessEnv) GetResolver() Resolver {
if e.resolver != nil {
return e.resolver
}
- if e.ForceGoPackages {
- e.resolver = &goPackagesResolver{env: e}
- return e.resolver
- }
-
out, err := e.invokeGo("env", "GOMOD")
if err != nil || len(bytes.TrimSpace(out.Bytes())) == 0 {
- e.resolver = &gopathResolver{env: e}
+ e.resolver = newGopathResolver(e)
return e.resolver
}
- e.resolver = &ModuleResolver{env: e}
+ e.resolver = newModuleResolver(e)
return e.resolver
}
-func (e *ProcessEnv) newPackagesConfig(mode packages.LoadMode) *packages.Config {
- return &packages.Config{
- Mode: mode,
- Dir: e.WorkingDir,
- Env: e.env(),
- }
-}
-
func (e *ProcessEnv) buildContext() *build.Context {
ctx := build.Default
ctx.GOROOT = e.GOROOT
ctx.GOPATH = e.GOPATH
- // As of Go 1.14, build.Context has a WorkingDir field
+ // As of Go 1.14, build.Context has a Dir field
// (see golang.org/issue/34860).
// Populate it only if present.
- if wd := reflect.ValueOf(&ctx).Elem().FieldByName("WorkingDir"); wd.IsValid() && wd.Kind() == reflect.String {
- wd.SetString(e.WorkingDir)
+ rc := reflect.ValueOf(&ctx).Elem()
+ dir := rc.FieldByName("Dir")
+ if !dir.IsValid() {
+ // Working drafts of Go 1.14 named the field "WorkingDir" instead.
+ // TODO(bcmills): Remove this case after the Go 1.14 beta has been released.
+ dir = rc.FieldByName("WorkingDir")
+ }
+ if dir.IsValid() && dir.Kind() == reflect.String {
+ dir.SetString(e.WorkingDir)
}
+
return &ctx
}
@@ -848,94 +877,65 @@ func addStdlibCandidates(pass *pass, refs references) {
type Resolver interface {
// loadPackageNames loads the package names in importPaths.
loadPackageNames(importPaths []string, srcDir string) (map[string]string, error)
- // scan finds (at least) the packages satisfying refs. If loadNames is true,
- // package names will be set on the results, and dirs whose package name
- // could not be determined will be excluded.
- scan(refs references, loadNames bool, exclude []gopathwalk.RootType) ([]*pkg, error)
+ // scan works with callback to search for packages. See scanCallback for details.
+ scan(ctx context.Context, callback *scanCallback) error
// loadExports returns the set of exported symbols in the package at dir.
// loadExports may be called concurrently.
- loadExports(ctx context.Context, pkg *pkg) (string, []string, error)
+ loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []string, error)
+ // scoreImportPath returns the relevance for an import path.
+ scoreImportPath(ctx context.Context, path string) int
ClearForNewScan()
}
-// gopackagesResolver implements resolver for GOPATH and module workspaces using go/packages.
-type goPackagesResolver struct {
- env *ProcessEnv
-}
-
-func (r *goPackagesResolver) ClearForNewScan() {}
-
-func (r *goPackagesResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) {
- if len(importPaths) == 0 {
- return nil, nil
- }
- cfg := r.env.newPackagesConfig(packages.LoadFiles)
- pkgs, err := packages.Load(cfg, importPaths...)
- if err != nil {
- return nil, err
- }
- names := map[string]string{}
- for _, pkg := range pkgs {
- names[VendorlessPath(pkg.PkgPath)] = pkg.Name
- }
- // We may not have found all the packages. Guess the rest.
- for _, path := range importPaths {
- if _, ok := names[path]; ok {
- continue
- }
- names[path] = ImportPathToAssumedName(path)
- }
- return names, nil
-
-}
-
-func (r *goPackagesResolver) scan(refs references, _ bool, _ []gopathwalk.RootType) ([]*pkg, error) {
- var loadQueries []string
- for pkgName := range refs {
- loadQueries = append(loadQueries, "iamashamedtousethedisabledqueryname="+pkgName)
- }
- sort.Strings(loadQueries)
- cfg := r.env.newPackagesConfig(packages.LoadFiles)
- goPackages, err := packages.Load(cfg, loadQueries...)
- if err != nil {
- return nil, err
- }
-
- var scan []*pkg
- for _, goPackage := range goPackages {
- scan = append(scan, &pkg{
- dir: filepath.Dir(goPackage.CompiledGoFiles[0]),
- importPathShort: VendorlessPath(goPackage.PkgPath),
- goPackage: goPackage,
- packageName: goPackage.Name,
- })
- }
- return scan, nil
+// A scanCallback controls a call to scan and receives its results.
+// In general, minor errors will be silently discarded; a user should not
+// expect to receive a full series of calls for everything.
+type scanCallback struct {
+ // rootFound is called before scanning a new root dir. If it returns true,
+ // the root will be scanned. Returning false will not necessarily prevent
+ // directories from that root making it to dirFound.
+ rootFound func(gopathwalk.Root) bool
+ // dirFound is called when a directory is found that is possibly a Go package.
+ // pkg will be populated with everything except packageName.
+ // If it returns true, the package's name will be loaded.
+ dirFound func(pkg *pkg) bool
+ // packageNameLoaded is called when a package is found and its name is loaded.
+ // If it returns true, the package's exports will be loaded.
+ packageNameLoaded func(pkg *pkg) bool
+ // exportsLoaded is called when a package's exports have been loaded.
+ exportsLoaded func(pkg *pkg, exports []string)
}
-func (r *goPackagesResolver) loadExports(ctx context.Context, pkg *pkg) (string, []string, error) {
- if pkg.goPackage == nil {
- return "", nil, fmt.Errorf("goPackage not set")
- }
- var exports []string
- fset := token.NewFileSet()
- for _, fname := range pkg.goPackage.CompiledGoFiles {
- f, err := parser.ParseFile(fset, fname, nil, 0)
- if err != nil {
- return "", nil, fmt.Errorf("parsing %s: %v", fname, err)
- }
- for name := range f.Scope.Objects {
- if ast.IsExported(name) {
- exports = append(exports, name)
+func addExternalCandidates(pass *pass, refs references, filename string) error {
+ var mu sync.Mutex
+ found := make(map[string][]pkgDistance)
+ callback := &scanCallback{
+ rootFound: func(gopathwalk.Root) bool {
+ return true // We want everything.
+ },
+ dirFound: func(pkg *pkg) bool {
+ return pkgIsCandidate(filename, refs, pkg)
+ },
+ packageNameLoaded: func(pkg *pkg) bool {
+ if _, want := refs[pkg.packageName]; !want {
+ return false
}
- }
+ if pkg.dir == pass.srcDir && pass.f.Name.Name == pkg.packageName {
+ // The candidate is in the same directory and has the
+ // same package name. Don't try to import ourselves.
+ return false
+ }
+ if !canUse(filename, pkg.dir) {
+ return false
+ }
+ mu.Lock()
+ defer mu.Unlock()
+ found[pkg.packageName] = append(found[pkg.packageName], pkgDistance{pkg, distance(pass.srcDir, pkg.dir)})
+ return false // We'll do our own loading after we sort.
+ },
}
- return pkg.goPackage.Name, exports, nil
-}
-
-func addExternalCandidates(pass *pass, refs references, filename string) error {
- dirScan, err := pass.env.GetResolver().scan(refs, false, nil)
+ err := pass.env.GetResolver().scan(context.Background(), callback)
if err != nil {
return err
}
@@ -962,7 +962,7 @@ func addExternalCandidates(pass *pass, refs references, filename string) error {
go func(pkgName string, symbols map[string]bool) {
defer wg.Done()
- found, err := findImport(ctx, pass, dirScan, pkgName, symbols, filename)
+ found, err := findImport(ctx, pass, found[pkgName], pkgName, symbols, filename)
if err != nil {
firstErrOnce.Do(func() {
@@ -1033,24 +1033,36 @@ func ImportPathToAssumedName(importPath string) string {
// gopathResolver implements resolver for GOPATH workspaces.
type gopathResolver struct {
- env *ProcessEnv
- cache *dirInfoCache
+ env *ProcessEnv
+ walked bool
+ cache *dirInfoCache
+ scanSema chan struct{} // scanSema prevents concurrent scans.
}
-func (r *gopathResolver) init() {
- if r.cache == nil {
- r.cache = &dirInfoCache{
- dirs: map[string]*directoryPackageInfo{},
- }
+func newGopathResolver(env *ProcessEnv) *gopathResolver {
+ r := &gopathResolver{
+ env: env,
+ cache: &dirInfoCache{
+ dirs: map[string]*directoryPackageInfo{},
+ listeners: map[*int]cacheListener{},
+ },
+ scanSema: make(chan struct{}, 1),
}
+ r.scanSema <- struct{}{}
+ return r
}
func (r *gopathResolver) ClearForNewScan() {
- r.cache = nil
+ <-r.scanSema
+ r.cache = &dirInfoCache{
+ dirs: map[string]*directoryPackageInfo{},
+ listeners: map[*int]cacheListener{},
+ }
+ r.walked = false
+ r.scanSema <- struct{}{}
}
func (r *gopathResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) {
- r.init()
names := map[string]string{}
for _, path := range importPaths {
names[path] = importPathToName(r.env, path, srcDir)
@@ -1130,7 +1142,6 @@ func packageDirToName(dir string) (packageName string, err error) {
}
type pkg struct {
- goPackage *packages.Package
dir string // absolute file path to pkg directory ("/usr/lib/go/src/net/http")
importPathShort string // vendorless import path ("net/http", "a/b")
packageName string // package name loaded from source if requested
@@ -1178,8 +1189,7 @@ func distance(basepath, targetpath string) int {
return strings.Count(p, string(filepath.Separator)) + 1
}
-func (r *gopathResolver) scan(_ references, loadNames bool, exclude []gopathwalk.RootType) ([]*pkg, error) {
- r.init()
+func (r *gopathResolver) scan(ctx context.Context, callback *scanCallback) error {
add := func(root gopathwalk.Root, dir string) {
// We assume cached directories have not changed. We can skip them and their
// children.
@@ -1196,56 +1206,84 @@ func (r *gopathResolver) scan(_ references, loadNames bool, exclude []gopathwalk
}
r.cache.Store(dir, info)
}
- roots := filterRoots(gopathwalk.SrcDirsRoots(r.env.buildContext()), exclude)
- gopathwalk.Walk(roots, add, gopathwalk.Options{Debug: r.env.Debug, ModulesEnabled: false})
- var result []*pkg
- for _, dir := range r.cache.Keys() {
- info, ok := r.cache.Load(dir)
- if !ok {
- continue
- }
- if loadNames {
- var err error
- info, err = r.cache.CachePackageName(info)
- if err != nil {
- continue
- }
+ processDir := func(info directoryPackageInfo) {
+ // Skip this directory if we were not able to get the package information successfully.
+ if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil {
+ return
}
p := &pkg{
importPathShort: info.nonCanonicalImportPath,
- dir: dir,
- relevance: 1,
- packageName: info.packageName,
+ dir: info.dir,
+ relevance: MaxRelevance - 1,
}
if info.rootType == gopathwalk.RootGOROOT {
- p.relevance = 0
+ p.relevance = MaxRelevance
+ }
+
+ if !callback.dirFound(p) {
+ return
+ }
+ var err error
+ p.packageName, err = r.cache.CachePackageName(info)
+ if err != nil {
+ return
+ }
+
+ if !callback.packageNameLoaded(p) {
+ return
+ }
+ if _, exports, err := r.loadExports(ctx, p, false); err == nil {
+ callback.exportsLoaded(p, exports)
}
- result = append(result, p)
}
- return result, nil
+ stop := r.cache.ScanAndListen(ctx, processDir)
+ defer stop()
+ // The callback is not necessarily safe to use in the goroutine below. Process roots eagerly.
+ roots := filterRoots(gopathwalk.SrcDirsRoots(r.env.buildContext()), callback.rootFound)
+ // We can't cancel walks, because we need them to finish to have a usable
+ // cache. Instead, run them in a separate goroutine and detach.
+ scanDone := make(chan struct{})
+ go func() {
+ select {
+ case <-ctx.Done():
+ return
+ case <-r.scanSema:
+ }
+ defer func() { r.scanSema <- struct{}{} }()
+ gopathwalk.Walk(roots, add, gopathwalk.Options{Debug: r.env.Debug, ModulesEnabled: false})
+ close(scanDone)
+ }()
+ select {
+ case <-ctx.Done():
+ case <-scanDone:
+ }
+ return nil
+}
+
+func (r *gopathResolver) scoreImportPath(ctx context.Context, path string) int {
+ if _, ok := stdlib[path]; ok {
+ return MaxRelevance
+ }
+ return MaxRelevance - 1
}
-func filterRoots(roots []gopathwalk.Root, exclude []gopathwalk.RootType) []gopathwalk.Root {
+func filterRoots(roots []gopathwalk.Root, include func(gopathwalk.Root) bool) []gopathwalk.Root {
var result []gopathwalk.Root
-outer:
for _, root := range roots {
- for _, i := range exclude {
- if i == root.Type {
- continue outer
- }
+ if !include(root) {
+ continue
}
result = append(result, root)
}
return result
}
-func (r *gopathResolver) loadExports(ctx context.Context, pkg *pkg) (string, []string, error) {
- r.init()
- if info, ok := r.cache.Load(pkg.dir); ok {
+func (r *gopathResolver) loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []string, error) {
+ if info, ok := r.cache.Load(pkg.dir); ok && !includeTest {
return r.cache.CacheExports(ctx, r.env, info)
}
- return loadExportsFromFiles(ctx, r.env, pkg.dir)
+ return loadExportsFromFiles(ctx, r.env, pkg.dir, includeTest)
}
// VendorlessPath returns the devendorized version of the import path ipath.
@@ -1261,7 +1299,7 @@ func VendorlessPath(ipath string) string {
return ipath
}
-func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string) (string, []string, error) {
+func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string, includeTest bool) (string, []string, error) {
var exports []string
// Look for non-test, buildable .go files which could provide exports.
@@ -1272,7 +1310,7 @@ func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string) (str
var files []os.FileInfo
for _, fi := range all {
name := fi.Name()
- if !strings.HasSuffix(name, ".go") || strings.HasSuffix(name, "_test.go") {
+ if !strings.HasSuffix(name, ".go") || (!includeTest && strings.HasSuffix(name, "_test.go")) {
continue
}
match, err := env.buildContext().MatchFile(dir, fi.Name())
@@ -1305,6 +1343,10 @@ func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string) (str
// handled by MatchFile above.
continue
}
+ if includeTest && strings.HasSuffix(f.Name.Name, "_test") {
+ // x_test package. We want internal test files only.
+ continue
+ }
pkgName = f.Name.Name
for name := range f.Scope.Objects {
if ast.IsExported(name) {
@@ -1323,29 +1365,7 @@ func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string) (str
// findImport searches for a package with the given symbols.
// If no package is found, findImport returns ("", false, nil)
-func findImport(ctx context.Context, pass *pass, dirScan []*pkg, pkgName string, symbols map[string]bool, filename string) (*pkg, error) {
- pkgDir, err := filepath.Abs(filename)
- if err != nil {
- return nil, err
- }
- pkgDir = filepath.Dir(pkgDir)
-
- // Find candidate packages, looking only at their directory names first.
- var candidates []pkgDistance
- for _, pkg := range dirScan {
- if pkg.dir == pkgDir && pass.f.Name.Name == pkgName {
- // The candidate is in the same directory and has the
- // same package name. Don't try to import ourselves.
- continue
- }
- if pkgIsCandidate(filename, pkgName, pkg) {
- candidates = append(candidates, pkgDistance{
- pkg: pkg,
- distance: distance(pkgDir, pkg.dir),
- })
- }
- }
-
+func findImport(ctx context.Context, pass *pass, candidates []pkgDistance, pkgName string, symbols map[string]bool, filename string) (*pkg, error) {
// Sort the candidates by their import package length,
// assuming that shorter package names are better than long
// ones. Note that this sorts by the de-vendored name, so
@@ -1358,7 +1378,6 @@ func findImport(ctx context.Context, pass *pass, dirScan []*pkg, pkgName string,
}
// Collect exports for packages with matching names.
-
rescv := make([]chan *pkg, len(candidates))
for i := range candidates {
rescv[i] = make(chan *pkg, 1)
@@ -1393,7 +1412,9 @@ func findImport(ctx context.Context, pass *pass, dirScan []*pkg, pkgName string,
if pass.env.Debug {
pass.env.Logf("loading exports in dir %s (seeking package %s)", c.pkg.dir, pkgName)
}
- exports, err := loadExportsForPackage(ctx, pass.env, pkgName, c.pkg)
+ // If we're an x_test, load the package under test's test variant.
+ includeTest := strings.HasSuffix(pass.f.Name.Name, "_test") && c.pkg.dir == pass.srcDir
+ _, exports, err := pass.env.GetResolver().loadExports(ctx, c.pkg, includeTest)
if err != nil {
if pass.env.Debug {
pass.env.Logf("loading exports in dir %s (seeking package %s): %v", c.pkg.dir, pkgName, err)
@@ -1430,17 +1451,6 @@ func findImport(ctx context.Context, pass *pass, dirScan []*pkg, pkgName string,
return nil, nil
}
-func loadExportsForPackage(ctx context.Context, env *ProcessEnv, expectPkg string, pkg *pkg) ([]string, error) {
- pkgName, exports, err := env.GetResolver().loadExports(ctx, pkg)
- if err != nil {
- return nil, err
- }
- if expectPkg != pkgName {
- return nil, fmt.Errorf("dir %v is package %v, wanted %v", pkg.dir, pkgName, expectPkg)
- }
- return exports, err
-}
-
// pkgIsCandidate reports whether pkg is a candidate for satisfying the
// finding which package pkgIdent in the file named by filename is trying
// to refer to.
@@ -1453,7 +1463,7 @@ func loadExportsForPackage(ctx context.Context, env *ProcessEnv, expectPkg strin
// filename is the file being formatted.
// pkgIdent is the package being searched for, like "client" (if
// searching for "client.New")
-func pkgIsCandidate(filename, pkgIdent string, pkg *pkg) bool {
+func pkgIsCandidate(filename string, refs references, pkg *pkg) bool {
// Check "internal" and "vendor" visibility:
if !canUse(filename, pkg.dir) {
return false
@@ -1471,17 +1481,18 @@ func pkgIsCandidate(filename, pkgIdent string, pkg *pkg) bool {
// "bar", which is strongly discouraged
// anyway. There's no reason goimports needs
// to be slow just to accommodate that.
- lastTwo := lastTwoComponents(pkg.importPathShort)
- if strings.Contains(lastTwo, pkgIdent) {
- return true
- }
- if hasHyphenOrUpperASCII(lastTwo) && !hasHyphenOrUpperASCII(pkgIdent) {
- lastTwo = lowerASCIIAndRemoveHyphen(lastTwo)
+ for pkgIdent := range refs {
+ lastTwo := lastTwoComponents(pkg.importPathShort)
if strings.Contains(lastTwo, pkgIdent) {
return true
}
+ if hasHyphenOrUpperASCII(lastTwo) && !hasHyphenOrUpperASCII(pkgIdent) {
+ lastTwo = lowerASCIIAndRemoveHyphen(lastTwo)
+ if strings.Contains(lastTwo, pkgIdent) {
+ return true
+ }
+ }
}
-
return false
}
diff --git a/vendor/golang.org/x/tools/internal/imports/imports.go b/vendor/golang.org/x/tools/internal/imports/imports.go
index b5c975495..2e7a317e5 100644
--- a/vendor/golang.org/x/tools/internal/imports/imports.go
+++ b/vendor/golang.org/x/tools/internal/imports/imports.go
@@ -11,6 +11,7 @@ package imports
import (
"bufio"
"bytes"
+ "context"
"fmt"
"go/ast"
"go/build"
@@ -115,23 +116,23 @@ func ApplyFixes(fixes []*ImportFix, filename string, src []byte, opt *Options, e
return formatFile(fileSet, file, src, nil, opt)
}
-// GetAllCandidates gets all of the standard library candidate packages to import in
-// sorted order on import path.
-func GetAllCandidates(filename string, opt *Options) (pkgs []ImportFix, err error) {
- _, opt, err = initialize(filename, nil, opt)
+// GetAllCandidates gets all of the packages starting with prefix that can be
+// imported by filename, sorted by import path.
+func GetAllCandidates(ctx context.Context, callback func(ImportFix), searchPrefix, filename, filePkg string, opt *Options) error {
+ _, opt, err := initialize(filename, []byte{}, opt)
if err != nil {
- return nil, err
+ return err
}
- return getAllCandidates(filename, opt.Env)
+ return getAllCandidates(ctx, callback, searchPrefix, filename, filePkg, opt.Env)
}
// GetPackageExports returns all known packages with name pkg and their exports.
-func GetPackageExports(pkg, filename string, opt *Options) (exports []PackageExport, err error) {
- _, opt, err = initialize(filename, nil, opt)
+func GetPackageExports(ctx context.Context, callback func(PackageExport), searchPkg, filename, filePkg string, opt *Options) error {
+ _, opt, err := initialize(filename, []byte{}, opt)
if err != nil {
- return nil, err
+ return err
}
- return getPackageExports(pkg, filename, opt.Env)
+ return getPackageExports(ctx, callback, searchPkg, filename, filePkg, opt.Env)
}
// initialize sets the values for opt and src.
diff --git a/vendor/golang.org/x/tools/internal/imports/mkindex.go b/vendor/golang.org/x/tools/internal/imports/mkindex.go
deleted file mode 100644
index ef8c0d287..000000000
--- a/vendor/golang.org/x/tools/internal/imports/mkindex.go
+++ /dev/null
@@ -1,173 +0,0 @@
-// +build ignore
-
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Command mkindex creates the file "pkgindex.go" containing an index of the Go
-// standard library. The file is intended to be built as part of the imports
-// package, so that the package may be used in environments where a GOROOT is
-// not available (such as App Engine).
-package imports
-
-import (
- "bytes"
- "fmt"
- "go/ast"
- "go/build"
- "go/format"
- "go/parser"
- "go/token"
- "io/ioutil"
- "log"
- "os"
- "path"
- "path/filepath"
- "strings"
-)
-
-var (
- pkgIndex = make(map[string][]pkg)
- exports = make(map[string]map[string]bool)
-)
-
-func main() {
- // Don't use GOPATH.
- ctx := build.Default
- ctx.GOPATH = ""
-
- // Populate pkgIndex global from GOROOT.
- for _, path := range ctx.SrcDirs() {
- f, err := os.Open(path)
- if err != nil {
- log.Print(err)
- continue
- }
- children, err := f.Readdir(-1)
- f.Close()
- if err != nil {
- log.Print(err)
- continue
- }
- for _, child := range children {
- if child.IsDir() {
- loadPkg(path, child.Name())
- }
- }
- }
- // Populate exports global.
- for _, ps := range pkgIndex {
- for _, p := range ps {
- e := loadExports(p.dir)
- if e != nil {
- exports[p.dir] = e
- }
- }
- }
-
- // Construct source file.
- var buf bytes.Buffer
- fmt.Fprint(&buf, pkgIndexHead)
- fmt.Fprintf(&buf, "var pkgIndexMaster = %#v\n", pkgIndex)
- fmt.Fprintf(&buf, "var exportsMaster = %#v\n", exports)
- src := buf.Bytes()
-
- // Replace main.pkg type name with pkg.
- src = bytes.Replace(src, []byte("main.pkg"), []byte("pkg"), -1)
- // Replace actual GOROOT with "/go".
- src = bytes.Replace(src, []byte(ctx.GOROOT), []byte("/go"), -1)
- // Add some line wrapping.
- src = bytes.Replace(src, []byte("}, "), []byte("},\n"), -1)
- src = bytes.Replace(src, []byte("true, "), []byte("true,\n"), -1)
-
- var err error
- src, err = format.Source(src)
- if err != nil {
- log.Fatal(err)
- }
-
- // Write out source file.
- err = ioutil.WriteFile("pkgindex.go", src, 0644)
- if err != nil {
- log.Fatal(err)
- }
-}
-
-const pkgIndexHead = `package imports
-
-func init() {
- pkgIndexOnce.Do(func() {
- pkgIndex.m = pkgIndexMaster
- })
- loadExports = func(dir string) map[string]bool {
- return exportsMaster[dir]
- }
-}
-`
-
-type pkg struct {
- importpath string // full pkg import path, e.g. "net/http"
- dir string // absolute file path to pkg directory e.g. "/usr/lib/go/src/fmt"
-}
-
-var fset = token.NewFileSet()
-
-func loadPkg(root, importpath string) {
- shortName := path.Base(importpath)
- if shortName == "testdata" {
- return
- }
-
- dir := filepath.Join(root, importpath)
- pkgIndex[shortName] = append(pkgIndex[shortName], pkg{
- importpath: importpath,
- dir: dir,
- })
-
- pkgDir, err := os.Open(dir)
- if err != nil {
- return
- }
- children, err := pkgDir.Readdir(-1)
- pkgDir.Close()
- if err != nil {
- return
- }
- for _, child := range children {
- name := child.Name()
- if name == "" {
- continue
- }
- if c := name[0]; c == '.' || ('0' <= c && c <= '9') {
- continue
- }
- if child.IsDir() {
- loadPkg(root, filepath.Join(importpath, name))
- }
- }
-}
-
-func loadExports(dir string) map[string]bool {
- exports := make(map[string]bool)
- buildPkg, err := build.ImportDir(dir, 0)
- if err != nil {
- if strings.Contains(err.Error(), "no buildable Go source files in") {
- return nil
- }
- log.Printf("could not import %q: %v", dir, err)
- return nil
- }
- for _, file := range buildPkg.GoFiles {
- f, err := parser.ParseFile(fset, filepath.Join(dir, file), nil, 0)
- if err != nil {
- log.Printf("could not parse %q: %v", file, err)
- continue
- }
- for name := range f.Scope.Objects {
- if ast.IsExported(name) {
- exports[name] = true
- }
- }
- }
- return exports
-}
diff --git a/vendor/golang.org/x/tools/internal/imports/mkstdlib.go b/vendor/golang.org/x/tools/internal/imports/mkstdlib.go
deleted file mode 100644
index 39b86ccd9..000000000
--- a/vendor/golang.org/x/tools/internal/imports/mkstdlib.go
+++ /dev/null
@@ -1,128 +0,0 @@
-// +build ignore
-
-// mkstdlib generates the zstdlib.go file, containing the Go standard
-// library API symbols. It's baked into the binary to avoid scanning
-// GOPATH in the common case.
-package main
-
-import (
- "bufio"
- "bytes"
- "fmt"
- "go/format"
- "io"
- "io/ioutil"
- "log"
- "os"
- "os/exec"
- "path/filepath"
- "regexp"
- "runtime"
- "sort"
-)
-
-func mustOpen(name string) io.Reader {
- f, err := os.Open(name)
- if err != nil {
- log.Fatal(err)
- }
- return f
-}
-
-func api(base string) string {
- return filepath.Join(runtime.GOROOT(), "api", base)
-}
-
-var sym = regexp.MustCompile(`^pkg (\S+).*?, (?:var|func|type|const) ([A-Z]\w*)`)
-
-var unsafeSyms = map[string]bool{"Alignof": true, "ArbitraryType": true, "Offsetof": true, "Pointer": true, "Sizeof": true}
-
-func main() {
- var buf bytes.Buffer
- outf := func(format string, args ...interface{}) {
- fmt.Fprintf(&buf, format, args...)
- }
- outf("// Code generated by mkstdlib.go. DO NOT EDIT.\n\n")
- outf("package imports\n")
- outf("var stdlib = map[string][]string{\n")
- f := io.MultiReader(
- mustOpen(api("go1.txt")),
- mustOpen(api("go1.1.txt")),
- mustOpen(api("go1.2.txt")),
- mustOpen(api("go1.3.txt")),
- mustOpen(api("go1.4.txt")),
- mustOpen(api("go1.5.txt")),
- mustOpen(api("go1.6.txt")),
- mustOpen(api("go1.7.txt")),
- mustOpen(api("go1.8.txt")),
- mustOpen(api("go1.9.txt")),
- mustOpen(api("go1.10.txt")),
- mustOpen(api("go1.11.txt")),
- mustOpen(api("go1.12.txt")),
- mustOpen(api("go1.13.txt")),
-
- // The API of the syscall/js package needs to be computed explicitly,
- // because it's not included in the GOROOT/api/go1.*.txt files at this time.
- syscallJSAPI(),
- )
- sc := bufio.NewScanner(f)
-
- pkgs := map[string]map[string]bool{
- "unsafe": unsafeSyms,
- }
- paths := []string{"unsafe"}
-
- for sc.Scan() {
- l := sc.Text()
- if m := sym.FindStringSubmatch(l); m != nil {
- path, sym := m[1], m[2]
-
- if _, ok := pkgs[path]; !ok {
- pkgs[path] = map[string]bool{}
- paths = append(paths, path)
- }
- pkgs[path][sym] = true
- }
- }
- if err := sc.Err(); err != nil {
- log.Fatal(err)
- }
- sort.Strings(paths)
- for _, path := range paths {
- outf("\t%q: []string{\n", path)
- pkg := pkgs[path]
- var syms []string
- for sym := range pkg {
- syms = append(syms, sym)
- }
- sort.Strings(syms)
- for _, sym := range syms {
- outf("\t\t%q,\n", sym)
- }
- outf("},\n")
- }
- outf("}\n")
- fmtbuf, err := format.Source(buf.Bytes())
- if err != nil {
- log.Fatal(err)
- }
- err = ioutil.WriteFile("zstdlib.go", fmtbuf, 0666)
- if err != nil {
- log.Fatal(err)
- }
-}
-
-// syscallJSAPI returns the API of the syscall/js package.
-// It's computed from the contents of $(go env GOROOT)/src/syscall/js.
-func syscallJSAPI() io.Reader {
- var exeSuffix string
- if runtime.GOOS == "windows" {
- exeSuffix = ".exe"
- }
- cmd := exec.Command("go"+exeSuffix, "run", "cmd/api", "-contexts", "js-wasm", "syscall/js")
- out, err := cmd.Output()
- if err != nil {
- log.Fatalln(err)
- }
- return bytes.NewReader(out)
-}
diff --git a/vendor/golang.org/x/tools/internal/imports/mod.go b/vendor/golang.org/x/tools/internal/imports/mod.go
index 0f9b87eb7..3ae859ed2 100644
--- a/vendor/golang.org/x/tools/internal/imports/mod.go
+++ b/vendor/golang.org/x/tools/internal/imports/mod.go
@@ -13,7 +13,6 @@ import (
"sort"
"strconv"
"strings"
- "sync"
"golang.org/x/tools/internal/gopathwalk"
"golang.org/x/tools/internal/module"
@@ -26,11 +25,14 @@ type ModuleResolver struct {
env *ProcessEnv
moduleCacheDir string
dummyVendorMod *ModuleJSON // If vendoring is enabled, the pseudo-module that represents the /vendor directory.
+ roots []gopathwalk.Root
+ scanSema chan struct{} // scanSema prevents concurrent scans and guards scannedRoots.
+ scannedRoots map[gopathwalk.Root]bool
- Initialized bool
- Main *ModuleJSON
- ModsByModPath []*ModuleJSON // All modules, ordered by # of path components in module Path...
- ModsByDir []*ModuleJSON // ...or Dir.
+ initialized bool
+ main *ModuleJSON
+ modsByModPath []*ModuleJSON // All modules, ordered by # of path components in module Path...
+ modsByDir []*ModuleJSON // ...or Dir.
// moduleCacheCache stores information about the module cache.
moduleCacheCache *dirInfoCache
@@ -41,13 +43,23 @@ type ModuleJSON struct {
Path string // module path
Replace *ModuleJSON // replaced by this module
Main bool // is this the main module?
+ Indirect bool // is this module only an indirect dependency of main module?
Dir string // directory holding files for this module, if any
GoMod string // path to go.mod file for this module, if any
GoVersion string // go version used in module
}
+func newModuleResolver(e *ProcessEnv) *ModuleResolver {
+ r := &ModuleResolver{
+ env: e,
+ scanSema: make(chan struct{}, 1),
+ }
+ r.scanSema <- struct{}{}
+ return r
+}
+
func (r *ModuleResolver) init() error {
- if r.Initialized {
+ if r.initialized {
return nil
}
mainMod, vendorEnabled, err := vendorEnabled(r.env)
@@ -58,13 +70,13 @@ func (r *ModuleResolver) init() error {
if mainMod != nil && vendorEnabled {
// Vendor mode is on, so all the non-Main modules are irrelevant,
// and we need to search /vendor for everything.
- r.Main = mainMod
+ r.main = mainMod
r.dummyVendorMod = &ModuleJSON{
Path: "",
Dir: filepath.Join(mainMod.Dir, "vendor"),
}
- r.ModsByModPath = []*ModuleJSON{mainMod, r.dummyVendorMod}
- r.ModsByDir = []*ModuleJSON{mainMod, r.dummyVendorMod}
+ r.modsByModPath = []*ModuleJSON{mainMod, r.dummyVendorMod}
+ r.modsByDir = []*ModuleJSON{mainMod, r.dummyVendorMod}
} else {
// Vendor mode is off, so run go list -m ... to find everything.
r.initAllMods()
@@ -72,30 +84,64 @@ func (r *ModuleResolver) init() error {
r.moduleCacheDir = filepath.Join(filepath.SplitList(r.env.GOPATH)[0], "/pkg/mod")
- sort.Slice(r.ModsByModPath, func(i, j int) bool {
+ sort.Slice(r.modsByModPath, func(i, j int) bool {
count := func(x int) int {
- return strings.Count(r.ModsByModPath[x].Path, "/")
+ return strings.Count(r.modsByModPath[x].Path, "/")
}
return count(j) < count(i) // descending order
})
- sort.Slice(r.ModsByDir, func(i, j int) bool {
+ sort.Slice(r.modsByDir, func(i, j int) bool {
count := func(x int) int {
- return strings.Count(r.ModsByDir[x].Dir, "/")
+ return strings.Count(r.modsByDir[x].Dir, "/")
}
return count(j) < count(i) // descending order
})
+ r.roots = []gopathwalk.Root{
+ {filepath.Join(r.env.GOROOT, "/src"), gopathwalk.RootGOROOT},
+ }
+ if r.main != nil {
+ r.roots = append(r.roots, gopathwalk.Root{r.main.Dir, gopathwalk.RootCurrentModule})
+ }
+ if vendorEnabled {
+ r.roots = append(r.roots, gopathwalk.Root{r.dummyVendorMod.Dir, gopathwalk.RootOther})
+ } else {
+ addDep := func(mod *ModuleJSON) {
+ if mod.Replace == nil {
+ // This is redundant with the cache, but we'll skip it cheaply enough.
+ r.roots = append(r.roots, gopathwalk.Root{mod.Dir, gopathwalk.RootModuleCache})
+ } else {
+ r.roots = append(r.roots, gopathwalk.Root{mod.Dir, gopathwalk.RootOther})
+ }
+ }
+ // Walk dependent modules before scanning the full mod cache, direct deps first.
+ for _, mod := range r.modsByModPath {
+ if !mod.Indirect && !mod.Main {
+ addDep(mod)
+ }
+ }
+ for _, mod := range r.modsByModPath {
+ if mod.Indirect && !mod.Main {
+ addDep(mod)
+ }
+ }
+ r.roots = append(r.roots, gopathwalk.Root{r.moduleCacheDir, gopathwalk.RootModuleCache})
+ }
+
+ r.scannedRoots = map[gopathwalk.Root]bool{}
if r.moduleCacheCache == nil {
r.moduleCacheCache = &dirInfoCache{
- dirs: map[string]*directoryPackageInfo{},
+ dirs: map[string]*directoryPackageInfo{},
+ listeners: map[*int]cacheListener{},
}
}
if r.otherCache == nil {
r.otherCache = &dirInfoCache{
- dirs: map[string]*directoryPackageInfo{},
+ dirs: map[string]*directoryPackageInfo{},
+ listeners: map[*int]cacheListener{},
}
}
- r.Initialized = true
+ r.initialized = true
return nil
}
@@ -116,27 +162,35 @@ func (r *ModuleResolver) initAllMods() error {
// Can't do anything with a module that's not downloaded.
continue
}
- r.ModsByModPath = append(r.ModsByModPath, mod)
- r.ModsByDir = append(r.ModsByDir, mod)
+ r.modsByModPath = append(r.modsByModPath, mod)
+ r.modsByDir = append(r.modsByDir, mod)
if mod.Main {
- r.Main = mod
+ r.main = mod
}
}
return nil
}
func (r *ModuleResolver) ClearForNewScan() {
+ <-r.scanSema
+ r.scannedRoots = map[gopathwalk.Root]bool{}
r.otherCache = &dirInfoCache{
- dirs: map[string]*directoryPackageInfo{},
+ dirs: map[string]*directoryPackageInfo{},
+ listeners: map[*int]cacheListener{},
}
+ r.scanSema <- struct{}{}
}
func (r *ModuleResolver) ClearForNewMod() {
- env := r.env
+ <-r.scanSema
*r = ModuleResolver{
- env: env,
+ env: r.env,
+ moduleCacheCache: r.moduleCacheCache,
+ otherCache: r.otherCache,
+ scanSema: r.scanSema,
}
r.init()
+ r.scanSema <- struct{}{}
}
// findPackage returns the module and directory that contains the package at
@@ -144,7 +198,7 @@ func (r *ModuleResolver) ClearForNewMod() {
func (r *ModuleResolver) findPackage(importPath string) (*ModuleJSON, string) {
// This can't find packages in the stdlib, but that's harmless for all
// the existing code paths.
- for _, m := range r.ModsByModPath {
+ for _, m := range r.modsByModPath {
if !strings.HasPrefix(importPath, m.Path) {
continue
}
@@ -211,7 +265,7 @@ func (r *ModuleResolver) cacheKeys() []string {
}
// cachePackageName caches the package name for a dir already in the cache.
-func (r *ModuleResolver) cachePackageName(info directoryPackageInfo) (directoryPackageInfo, error) {
+func (r *ModuleResolver) cachePackageName(info directoryPackageInfo) (string, error) {
if info.rootType == gopathwalk.RootModuleCache {
return r.moduleCacheCache.CachePackageName(info)
}
@@ -238,7 +292,7 @@ func (r *ModuleResolver) findModuleByDir(dir string) *ModuleJSON {
// - in /vendor/ in -mod=vendor mode.
// - nested module? Dunno.
// Rumor has it that replace targets cannot contain other replace targets.
- for _, m := range r.ModsByDir {
+ for _, m := range r.modsByDir {
if !strings.HasPrefix(dir, m.Dir) {
continue
}
@@ -333,41 +387,49 @@ func (r *ModuleResolver) loadPackageNames(importPaths []string, srcDir string) (
return names, nil
}
-func (r *ModuleResolver) scan(_ references, loadNames bool, exclude []gopathwalk.RootType) ([]*pkg, error) {
+func (r *ModuleResolver) scan(ctx context.Context, callback *scanCallback) error {
if err := r.init(); err != nil {
- return nil, err
+ return err
}
- // Walk GOROOT, GOPATH/pkg/mod, and the main module.
- roots := []gopathwalk.Root{
- {filepath.Join(r.env.GOROOT, "/src"), gopathwalk.RootGOROOT},
- }
- if r.Main != nil {
- roots = append(roots, gopathwalk.Root{r.Main.Dir, gopathwalk.RootCurrentModule})
- }
- if r.dummyVendorMod != nil {
- roots = append(roots, gopathwalk.Root{r.dummyVendorMod.Dir, gopathwalk.RootOther})
- } else {
- roots = append(roots, gopathwalk.Root{r.moduleCacheDir, gopathwalk.RootModuleCache})
- // Walk replace targets, just in case they're not in any of the above.
- for _, mod := range r.ModsByModPath {
- if mod.Replace != nil {
- roots = append(roots, gopathwalk.Root{mod.Dir, gopathwalk.RootOther})
- }
+ processDir := func(info directoryPackageInfo) {
+ // Skip this directory if we were not able to get the package information successfully.
+ if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil {
+ return
+ }
+ pkg, err := r.canonicalize(info)
+ if err != nil {
+ return
+ }
+
+ if !callback.dirFound(pkg) {
+ return
+ }
+ pkg.packageName, err = r.cachePackageName(info)
+ if err != nil {
+ return
}
- }
- roots = filterRoots(roots, exclude)
+ if !callback.packageNameLoaded(pkg) {
+ return
+ }
+ _, exports, err := r.loadExports(ctx, pkg, false)
+ if err != nil {
+ return
+ }
+ callback.exportsLoaded(pkg, exports)
+ }
- var result []*pkg
- var mu sync.Mutex
+ // Start processing everything in the cache, and listen for the new stuff
+ // we discover in the walk below.
+ stop1 := r.moduleCacheCache.ScanAndListen(ctx, processDir)
+ defer stop1()
+ stop2 := r.otherCache.ScanAndListen(ctx, processDir)
+ defer stop2()
- // We assume cached directories have not changed. We can skip them and their
- // children.
+ // We assume cached directories are fully cached, including all their
+ // children, and have not changed. We can skip them.
skip := func(root gopathwalk.Root, dir string) bool {
- mu.Lock()
- defer mu.Unlock()
-
info, ok := r.cacheLoad(dir)
if !ok {
return false
@@ -379,44 +441,64 @@ func (r *ModuleResolver) scan(_ references, loadNames bool, exclude []gopathwalk
return packageScanned
}
- // Add anything new to the cache. We'll process everything in it below.
+ // Add anything new to the cache, and process it if we're still listening.
add := func(root gopathwalk.Root, dir string) {
- mu.Lock()
- defer mu.Unlock()
-
r.cacheStore(r.scanDirForPackage(root, dir))
}
- gopathwalk.WalkSkip(roots, add, skip, gopathwalk.Options{Debug: r.env.Debug, ModulesEnabled: true})
-
- // Everything we already had, and everything new, is now in the cache.
- for _, dir := range r.cacheKeys() {
- info, ok := r.cacheLoad(dir)
- if !ok {
- continue
- }
-
- // Skip this directory if we were not able to get the package information successfully.
- if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil {
- continue
- }
+ // r.roots and the callback are not necessarily safe to use in the
+ // goroutine below. Process them eagerly.
+ roots := filterRoots(r.roots, callback.rootFound)
+ // We can't cancel walks, because we need them to finish to have a usable
+ // cache. Instead, run them in a separate goroutine and detach.
+ scanDone := make(chan struct{})
+ go func() {
+ select {
+ case <-ctx.Done():
+ return
+ case <-r.scanSema:
+ }
+ defer func() { r.scanSema <- struct{}{} }()
+ // We have the lock on r.scannedRoots, and no other scans can run.
+ for _, root := range roots {
+ if ctx.Err() != nil {
+ return
+ }
- // If we want package names, make sure the cache has them.
- if loadNames {
- var err error
- if info, err = r.cachePackageName(info); err != nil {
+ if r.scannedRoots[root] {
continue
}
+ gopathwalk.WalkSkip([]gopathwalk.Root{root}, add, skip, gopathwalk.Options{Debug: r.env.Debug, ModulesEnabled: true})
+ r.scannedRoots[root] = true
}
+ close(scanDone)
+ }()
+ select {
+ case <-ctx.Done():
+ case <-scanDone:
+ }
+ return nil
+}
- res, err := r.canonicalize(info)
- if err != nil {
- continue
- }
- result = append(result, res)
+func (r *ModuleResolver) scoreImportPath(ctx context.Context, path string) int {
+ if _, ok := stdlib[path]; ok {
+ return MaxRelevance
}
+ mod, _ := r.findPackage(path)
+ return modRelevance(mod)
+}
- return result, nil
+func modRelevance(mod *ModuleJSON) int {
+ switch {
+ case mod == nil: // out of scope
+ return MaxRelevance - 4
+ case mod.Indirect:
+ return MaxRelevance - 3
+ case !mod.Main:
+ return MaxRelevance - 2
+ default:
+ return MaxRelevance - 1 // main module ties with stdlib
+ }
}
// canonicalize gets the result of canonicalizing the packages using the results
@@ -428,15 +510,14 @@ func (r *ModuleResolver) canonicalize(info directoryPackageInfo) (*pkg, error) {
importPathShort: info.nonCanonicalImportPath,
dir: info.dir,
packageName: path.Base(info.nonCanonicalImportPath),
- relevance: 0,
+ relevance: MaxRelevance,
}, nil
}
importPath := info.nonCanonicalImportPath
- relevance := 2
+ mod := r.findModuleByDir(info.dir)
// Check if the directory is underneath a module that's in scope.
- if mod := r.findModuleByDir(info.dir); mod != nil {
- relevance = 1
+ if mod != nil {
// It is. If dir is the target of a replace directive,
// our guessed import path is wrong. Use the real one.
if mod.Dir == info.dir {
@@ -445,15 +526,16 @@ func (r *ModuleResolver) canonicalize(info directoryPackageInfo) (*pkg, error) {
dirInMod := info.dir[len(mod.Dir)+len("/"):]
importPath = path.Join(mod.Path, filepath.ToSlash(dirInMod))
}
- } else if info.needsReplace {
+ } else if !strings.HasPrefix(importPath, info.moduleName) {
+ // The module's name doesn't match the package's import path. It
+ // probably needs a replace directive we don't have.
return nil, fmt.Errorf("package in %q is not valid without a replace statement", info.dir)
}
res := &pkg{
importPathShort: importPath,
dir: info.dir,
- packageName: info.packageName, // may not be populated if the caller didn't ask for it
- relevance: relevance,
+ relevance: modRelevance(mod),
}
// We may have discovered a package that has a different version
// in scope already. Canonicalize to that one if possible.
@@ -463,14 +545,14 @@ func (r *ModuleResolver) canonicalize(info directoryPackageInfo) (*pkg, error) {
return res, nil
}
-func (r *ModuleResolver) loadExports(ctx context.Context, pkg *pkg) (string, []string, error) {
+func (r *ModuleResolver) loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []string, error) {
if err := r.init(); err != nil {
return "", nil, err
}
- if info, ok := r.cacheLoad(pkg.dir); ok {
+ if info, ok := r.cacheLoad(pkg.dir); ok && !includeTest {
return r.cacheExports(ctx, r.env, info)
}
- return loadExportsFromFiles(ctx, r.env, pkg.dir)
+ return loadExportsFromFiles(ctx, r.env, pkg.dir, includeTest)
}
func (r *ModuleResolver) scanDirForPackage(root gopathwalk.Root, dir string) directoryPackageInfo {
@@ -488,7 +570,7 @@ func (r *ModuleResolver) scanDirForPackage(root gopathwalk.Root, dir string) dir
}
switch root.Type {
case gopathwalk.RootCurrentModule:
- importPath = path.Join(r.Main.Path, filepath.ToSlash(subdir))
+ importPath = path.Join(r.main.Path, filepath.ToSlash(subdir))
case gopathwalk.RootModuleCache:
matches := modCacheRegexp.FindStringSubmatch(subdir)
if len(matches) == 0 {
@@ -516,7 +598,6 @@ func (r *ModuleResolver) scanDirForPackage(root gopathwalk.Root, dir string) dir
dir: dir,
rootType: root.Type,
nonCanonicalImportPath: importPath,
- needsReplace: false,
moduleDir: modDir,
moduleName: modName,
}
@@ -524,14 +605,6 @@ func (r *ModuleResolver) scanDirForPackage(root gopathwalk.Root, dir string) dir
// stdlib packages are always in scope, despite the confusing go.mod
return result
}
- // Check that this package is not obviously impossible to import.
- if !strings.HasPrefix(importPath, modName) {
- // The module's declared path does not match
- // its expected path. It probably needs a
- // replace directive we don't have.
- result.needsReplace = true
- }
-
return result
}
diff --git a/vendor/golang.org/x/tools/internal/imports/mod_cache.go b/vendor/golang.org/x/tools/internal/imports/mod_cache.go
index f6b070a3f..6df7d48f9 100644
--- a/vendor/golang.org/x/tools/internal/imports/mod_cache.go
+++ b/vendor/golang.org/x/tools/internal/imports/mod_cache.go
@@ -49,10 +49,6 @@ type directoryPackageInfo struct {
// nonCanonicalImportPath is the package's expected import path. It may
// not actually be importable at that path.
nonCanonicalImportPath string
- // needsReplace is true if the nonCanonicalImportPath does not match the
- // module's declared path, making it impossible to import without a
- // replace directive.
- needsReplace bool
// Module-related information.
moduleDir string // The directory that is the module root of this dir.
@@ -97,15 +93,85 @@ func (info *directoryPackageInfo) reachedStatus(target directoryPackageStatus) (
type dirInfoCache struct {
mu sync.Mutex
// dirs stores information about packages in directories, keyed by absolute path.
- dirs map[string]*directoryPackageInfo
+ dirs map[string]*directoryPackageInfo
+ listeners map[*int]cacheListener
+}
+
+type cacheListener func(directoryPackageInfo)
+
+// ScanAndListen calls listener on all the items in the cache, and on anything
+// newly added. The returned stop function waits for all in-flight callbacks to
+// finish and blocks new ones.
+func (d *dirInfoCache) ScanAndListen(ctx context.Context, listener cacheListener) func() {
+ ctx, cancel := context.WithCancel(ctx)
+
+ // Flushing out all the callbacks is tricky without knowing how many there
+ // are going to be. Setting an arbitrary limit makes it much easier.
+ const maxInFlight = 10
+ sema := make(chan struct{}, maxInFlight)
+ for i := 0; i < maxInFlight; i++ {
+ sema <- struct{}{}
+ }
+
+ cookie := new(int) // A unique ID we can use for the listener.
+
+ // We can't hold mu while calling the listener.
+ d.mu.Lock()
+ var keys []string
+ for key := range d.dirs {
+ keys = append(keys, key)
+ }
+ d.listeners[cookie] = func(info directoryPackageInfo) {
+ select {
+ case <-ctx.Done():
+ return
+ case <-sema:
+ }
+ listener(info)
+ sema <- struct{}{}
+ }
+ d.mu.Unlock()
+
+ // Process the pre-existing keys.
+ for _, k := range keys {
+ select {
+ case <-ctx.Done():
+ cancel()
+ return func() {}
+ default:
+ }
+ if v, ok := d.Load(k); ok {
+ listener(v)
+ }
+ }
+
+ return func() {
+ cancel()
+ d.mu.Lock()
+ delete(d.listeners, cookie)
+ d.mu.Unlock()
+ for i := 0; i < maxInFlight; i++ {
+ <-sema
+ }
+ }
}
// Store stores the package info for dir.
func (d *dirInfoCache) Store(dir string, info directoryPackageInfo) {
d.mu.Lock()
- defer d.mu.Unlock()
- stored := info // defensive copy
- d.dirs[dir] = &stored
+ _, old := d.dirs[dir]
+ d.dirs[dir] = &info
+ var listeners []cacheListener
+ for _, l := range d.listeners {
+ listeners = append(listeners, l)
+ }
+ d.mu.Unlock()
+
+ if !old {
+ for _, l := range listeners {
+ l(info)
+ }
+ }
}
// Load returns a copy of the directoryPackageInfo for absolute directory dir.
@@ -129,17 +195,17 @@ func (d *dirInfoCache) Keys() (keys []string) {
return keys
}
-func (d *dirInfoCache) CachePackageName(info directoryPackageInfo) (directoryPackageInfo, error) {
+func (d *dirInfoCache) CachePackageName(info directoryPackageInfo) (string, error) {
if loaded, err := info.reachedStatus(nameLoaded); loaded {
- return info, err
+ return info.packageName, err
}
if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil {
- return info, fmt.Errorf("cannot read package name, scan error: %v", err)
+ return "", fmt.Errorf("cannot read package name, scan error: %v", err)
}
info.packageName, info.err = packageDirToName(info.dir)
info.status = nameLoaded
d.Store(info.dir, info)
- return info, info.err
+ return info.packageName, info.err
}
func (d *dirInfoCache) CacheExports(ctx context.Context, env *ProcessEnv, info directoryPackageInfo) (string, []string, error) {
@@ -149,8 +215,8 @@ func (d *dirInfoCache) CacheExports(ctx context.Context, env *ProcessEnv, info d
if reached, err := info.reachedStatus(nameLoaded); reached && err != nil {
return "", nil, err
}
- info.packageName, info.exports, info.err = loadExportsFromFiles(ctx, env, info.dir)
- if info.err == context.Canceled {
+ info.packageName, info.exports, info.err = loadExportsFromFiles(ctx, env, info.dir, false)
+ if info.err == context.Canceled || info.err == context.DeadlineExceeded {
return info.packageName, info.exports, info.err
}
// The cache structure wants things to proceed linearly. We can skip a
diff --git a/vendor/golang.org/x/tools/internal/packagesinternal/packages.go b/vendor/golang.org/x/tools/internal/packagesinternal/packages.go
new file mode 100644
index 000000000..0c0dbb6a9
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/packagesinternal/packages.go
@@ -0,0 +1,4 @@
+// Package packagesinternal exposes internal-only fields from go/packages.
+package packagesinternal
+
+var GetForTest = func(p interface{}) string { return "" }
diff --git a/vendor/google.golang.org/api/compute/v0.beta/compute-gen.go b/vendor/google.golang.org/api/compute/v0.beta/compute-gen.go
index efed0d0ac..86e930778 100644
--- a/vendor/google.golang.org/api/compute/v0.beta/compute-gen.go
+++ b/vendor/google.golang.org/api/compute/v0.beta/compute-gen.go
@@ -39,7 +39,7 @@
// computeService, err := compute.NewService(ctx, option.WithTokenSource(config.TokenSource(ctx, token)))
//
// See https://godoc.org/google.golang.org/api/option/ for details on options.
-package compute
+package compute // import "google.golang.org/api/compute/v0.beta"
import (
"bytes"
diff --git a/vendor/google.golang.org/api/googleapi/googleapi.go b/vendor/google.golang.org/api/googleapi/googleapi.go
index 850a094a4..471ae259c 100644
--- a/vendor/google.golang.org/api/googleapi/googleapi.go
+++ b/vendor/google.golang.org/api/googleapi/googleapi.go
@@ -4,7 +4,7 @@
// Package googleapi contains the common code shared by all Google API
// libraries.
-package googleapi
+package googleapi // import "google.golang.org/api/googleapi"
import (
"bytes"
diff --git a/vendor/google.golang.org/api/storage/v1/storage-gen.go b/vendor/google.golang.org/api/storage/v1/storage-gen.go
index 9c036e829..d19121d5d 100644
--- a/vendor/google.golang.org/api/storage/v1/storage-gen.go
+++ b/vendor/google.golang.org/api/storage/v1/storage-gen.go
@@ -41,7 +41,7 @@
// storageService, err := storage.NewService(ctx, option.WithTokenSource(config.TokenSource(ctx, token)))
//
// See https://godoc.org/google.golang.org/api/option/ for details on options.
-package storage
+package storage // import "google.golang.org/api/storage/v1"
import (
"bytes"
diff --git a/vendor/google.golang.org/appengine/appengine.go b/vendor/google.golang.org/appengine/appengine.go
index 7c702b0c1..8c9697674 100644
--- a/vendor/google.golang.org/appengine/appengine.go
+++ b/vendor/google.golang.org/appengine/appengine.go
@@ -6,7 +6,7 @@
//
// For more information on how to write Go apps for Google App Engine, see:
// https://cloud.google.com/appengine/docs/go/
-package appengine
+package appengine // import "google.golang.org/appengine"
import (
"net/http"
diff --git a/vendor/google.golang.org/appengine/datastore/doc.go b/vendor/google.golang.org/appengine/datastore/doc.go
index c1bfd8ce8..85616cf27 100644
--- a/vendor/google.golang.org/appengine/datastore/doc.go
+++ b/vendor/google.golang.org/appengine/datastore/doc.go
@@ -358,4 +358,4 @@ Example code:
}
}
*/
-package datastore
+package datastore // import "google.golang.org/appengine/datastore"
diff --git a/vendor/google.golang.org/appengine/internal/datastore/datastore_v3.proto b/vendor/google.golang.org/appengine/internal/datastore/datastore_v3.proto
index 497b4d9a9..497b4d9a9 100755..100644
--- a/vendor/google.golang.org/appengine/internal/datastore/datastore_v3.proto
+++ b/vendor/google.golang.org/appengine/internal/datastore/datastore_v3.proto
diff --git a/vendor/google.golang.org/appengine/internal/regen.sh b/vendor/google.golang.org/appengine/internal/regen.sh
index 2fdb546a6..2fdb546a6 100755..100644
--- a/vendor/google.golang.org/appengine/internal/regen.sh
+++ b/vendor/google.golang.org/appengine/internal/regen.sh
diff --git a/vendor/google.golang.org/appengine/internal/urlfetch/urlfetch_service.pb.go b/vendor/google.golang.org/appengine/internal/urlfetch/urlfetch_service.pb.go
new file mode 100644
index 000000000..5f727750a
--- /dev/null
+++ b/vendor/google.golang.org/appengine/internal/urlfetch/urlfetch_service.pb.go
@@ -0,0 +1,527 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// source: google.golang.org/appengine/internal/urlfetch/urlfetch_service.proto
+
+package urlfetch
+
+import proto "github.com/golang/protobuf/proto"
+import fmt "fmt"
+import math "math"
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
+
+type URLFetchServiceError_ErrorCode int32
+
+const (
+ URLFetchServiceError_OK URLFetchServiceError_ErrorCode = 0
+ URLFetchServiceError_INVALID_URL URLFetchServiceError_ErrorCode = 1
+ URLFetchServiceError_FETCH_ERROR URLFetchServiceError_ErrorCode = 2
+ URLFetchServiceError_UNSPECIFIED_ERROR URLFetchServiceError_ErrorCode = 3
+ URLFetchServiceError_RESPONSE_TOO_LARGE URLFetchServiceError_ErrorCode = 4
+ URLFetchServiceError_DEADLINE_EXCEEDED URLFetchServiceError_ErrorCode = 5
+ URLFetchServiceError_SSL_CERTIFICATE_ERROR URLFetchServiceError_ErrorCode = 6
+ URLFetchServiceError_DNS_ERROR URLFetchServiceError_ErrorCode = 7
+ URLFetchServiceError_CLOSED URLFetchServiceError_ErrorCode = 8
+ URLFetchServiceError_INTERNAL_TRANSIENT_ERROR URLFetchServiceError_ErrorCode = 9
+ URLFetchServiceError_TOO_MANY_REDIRECTS URLFetchServiceError_ErrorCode = 10
+ URLFetchServiceError_MALFORMED_REPLY URLFetchServiceError_ErrorCode = 11
+ URLFetchServiceError_CONNECTION_ERROR URLFetchServiceError_ErrorCode = 12
+)
+
+var URLFetchServiceError_ErrorCode_name = map[int32]string{
+ 0: "OK",
+ 1: "INVALID_URL",
+ 2: "FETCH_ERROR",
+ 3: "UNSPECIFIED_ERROR",
+ 4: "RESPONSE_TOO_LARGE",
+ 5: "DEADLINE_EXCEEDED",
+ 6: "SSL_CERTIFICATE_ERROR",
+ 7: "DNS_ERROR",
+ 8: "CLOSED",
+ 9: "INTERNAL_TRANSIENT_ERROR",
+ 10: "TOO_MANY_REDIRECTS",
+ 11: "MALFORMED_REPLY",
+ 12: "CONNECTION_ERROR",
+}
+var URLFetchServiceError_ErrorCode_value = map[string]int32{
+ "OK": 0,
+ "INVALID_URL": 1,
+ "FETCH_ERROR": 2,
+ "UNSPECIFIED_ERROR": 3,
+ "RESPONSE_TOO_LARGE": 4,
+ "DEADLINE_EXCEEDED": 5,
+ "SSL_CERTIFICATE_ERROR": 6,
+ "DNS_ERROR": 7,
+ "CLOSED": 8,
+ "INTERNAL_TRANSIENT_ERROR": 9,
+ "TOO_MANY_REDIRECTS": 10,
+ "MALFORMED_REPLY": 11,
+ "CONNECTION_ERROR": 12,
+}
+
+func (x URLFetchServiceError_ErrorCode) Enum() *URLFetchServiceError_ErrorCode {
+ p := new(URLFetchServiceError_ErrorCode)
+ *p = x
+ return p
+}
+func (x URLFetchServiceError_ErrorCode) String() string {
+ return proto.EnumName(URLFetchServiceError_ErrorCode_name, int32(x))
+}
+func (x *URLFetchServiceError_ErrorCode) UnmarshalJSON(data []byte) error {
+ value, err := proto.UnmarshalJSONEnum(URLFetchServiceError_ErrorCode_value, data, "URLFetchServiceError_ErrorCode")
+ if err != nil {
+ return err
+ }
+ *x = URLFetchServiceError_ErrorCode(value)
+ return nil
+}
+func (URLFetchServiceError_ErrorCode) EnumDescriptor() ([]byte, []int) {
+ return fileDescriptor_urlfetch_service_b245a7065f33bced, []int{0, 0}
+}
+
+type URLFetchRequest_RequestMethod int32
+
+const (
+ URLFetchRequest_GET URLFetchRequest_RequestMethod = 1
+ URLFetchRequest_POST URLFetchRequest_RequestMethod = 2
+ URLFetchRequest_HEAD URLFetchRequest_RequestMethod = 3
+ URLFetchRequest_PUT URLFetchRequest_RequestMethod = 4
+ URLFetchRequest_DELETE URLFetchRequest_RequestMethod = 5
+ URLFetchRequest_PATCH URLFetchRequest_RequestMethod = 6
+)
+
+var URLFetchRequest_RequestMethod_name = map[int32]string{
+ 1: "GET",
+ 2: "POST",
+ 3: "HEAD",
+ 4: "PUT",
+ 5: "DELETE",
+ 6: "PATCH",
+}
+var URLFetchRequest_RequestMethod_value = map[string]int32{
+ "GET": 1,
+ "POST": 2,
+ "HEAD": 3,
+ "PUT": 4,
+ "DELETE": 5,
+ "PATCH": 6,
+}
+
+func (x URLFetchRequest_RequestMethod) Enum() *URLFetchRequest_RequestMethod {
+ p := new(URLFetchRequest_RequestMethod)
+ *p = x
+ return p
+}
+func (x URLFetchRequest_RequestMethod) String() string {
+ return proto.EnumName(URLFetchRequest_RequestMethod_name, int32(x))
+}
+func (x *URLFetchRequest_RequestMethod) UnmarshalJSON(data []byte) error {
+ value, err := proto.UnmarshalJSONEnum(URLFetchRequest_RequestMethod_value, data, "URLFetchRequest_RequestMethod")
+ if err != nil {
+ return err
+ }
+ *x = URLFetchRequest_RequestMethod(value)
+ return nil
+}
+func (URLFetchRequest_RequestMethod) EnumDescriptor() ([]byte, []int) {
+ return fileDescriptor_urlfetch_service_b245a7065f33bced, []int{1, 0}
+}
+
+type URLFetchServiceError struct {
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *URLFetchServiceError) Reset() { *m = URLFetchServiceError{} }
+func (m *URLFetchServiceError) String() string { return proto.CompactTextString(m) }
+func (*URLFetchServiceError) ProtoMessage() {}
+func (*URLFetchServiceError) Descriptor() ([]byte, []int) {
+ return fileDescriptor_urlfetch_service_b245a7065f33bced, []int{0}
+}
+func (m *URLFetchServiceError) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_URLFetchServiceError.Unmarshal(m, b)
+}
+func (m *URLFetchServiceError) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_URLFetchServiceError.Marshal(b, m, deterministic)
+}
+func (dst *URLFetchServiceError) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_URLFetchServiceError.Merge(dst, src)
+}
+func (m *URLFetchServiceError) XXX_Size() int {
+ return xxx_messageInfo_URLFetchServiceError.Size(m)
+}
+func (m *URLFetchServiceError) XXX_DiscardUnknown() {
+ xxx_messageInfo_URLFetchServiceError.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_URLFetchServiceError proto.InternalMessageInfo
+
+type URLFetchRequest struct {
+ Method *URLFetchRequest_RequestMethod `protobuf:"varint,1,req,name=Method,enum=appengine.URLFetchRequest_RequestMethod" json:"Method,omitempty"`
+ Url *string `protobuf:"bytes,2,req,name=Url" json:"Url,omitempty"`
+ Header []*URLFetchRequest_Header `protobuf:"group,3,rep,name=Header,json=header" json:"header,omitempty"`
+ Payload []byte `protobuf:"bytes,6,opt,name=Payload" json:"Payload,omitempty"`
+ FollowRedirects *bool `protobuf:"varint,7,opt,name=FollowRedirects,def=1" json:"FollowRedirects,omitempty"`
+ Deadline *float64 `protobuf:"fixed64,8,opt,name=Deadline" json:"Deadline,omitempty"`
+ MustValidateServerCertificate *bool `protobuf:"varint,9,opt,name=MustValidateServerCertificate,def=1" json:"MustValidateServerCertificate,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *URLFetchRequest) Reset() { *m = URLFetchRequest{} }
+func (m *URLFetchRequest) String() string { return proto.CompactTextString(m) }
+func (*URLFetchRequest) ProtoMessage() {}
+func (*URLFetchRequest) Descriptor() ([]byte, []int) {
+ return fileDescriptor_urlfetch_service_b245a7065f33bced, []int{1}
+}
+func (m *URLFetchRequest) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_URLFetchRequest.Unmarshal(m, b)
+}
+func (m *URLFetchRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_URLFetchRequest.Marshal(b, m, deterministic)
+}
+func (dst *URLFetchRequest) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_URLFetchRequest.Merge(dst, src)
+}
+func (m *URLFetchRequest) XXX_Size() int {
+ return xxx_messageInfo_URLFetchRequest.Size(m)
+}
+func (m *URLFetchRequest) XXX_DiscardUnknown() {
+ xxx_messageInfo_URLFetchRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_URLFetchRequest proto.InternalMessageInfo
+
+const Default_URLFetchRequest_FollowRedirects bool = true
+const Default_URLFetchRequest_MustValidateServerCertificate bool = true
+
+func (m *URLFetchRequest) GetMethod() URLFetchRequest_RequestMethod {
+ if m != nil && m.Method != nil {
+ return *m.Method
+ }
+ return URLFetchRequest_GET
+}
+
+func (m *URLFetchRequest) GetUrl() string {
+ if m != nil && m.Url != nil {
+ return *m.Url
+ }
+ return ""
+}
+
+func (m *URLFetchRequest) GetHeader() []*URLFetchRequest_Header {
+ if m != nil {
+ return m.Header
+ }
+ return nil
+}
+
+func (m *URLFetchRequest) GetPayload() []byte {
+ if m != nil {
+ return m.Payload
+ }
+ return nil
+}
+
+func (m *URLFetchRequest) GetFollowRedirects() bool {
+ if m != nil && m.FollowRedirects != nil {
+ return *m.FollowRedirects
+ }
+ return Default_URLFetchRequest_FollowRedirects
+}
+
+func (m *URLFetchRequest) GetDeadline() float64 {
+ if m != nil && m.Deadline != nil {
+ return *m.Deadline
+ }
+ return 0
+}
+
+func (m *URLFetchRequest) GetMustValidateServerCertificate() bool {
+ if m != nil && m.MustValidateServerCertificate != nil {
+ return *m.MustValidateServerCertificate
+ }
+ return Default_URLFetchRequest_MustValidateServerCertificate
+}
+
+type URLFetchRequest_Header struct {
+ Key *string `protobuf:"bytes,4,req,name=Key" json:"Key,omitempty"`
+ Value *string `protobuf:"bytes,5,req,name=Value" json:"Value,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *URLFetchRequest_Header) Reset() { *m = URLFetchRequest_Header{} }
+func (m *URLFetchRequest_Header) String() string { return proto.CompactTextString(m) }
+func (*URLFetchRequest_Header) ProtoMessage() {}
+func (*URLFetchRequest_Header) Descriptor() ([]byte, []int) {
+ return fileDescriptor_urlfetch_service_b245a7065f33bced, []int{1, 0}
+}
+func (m *URLFetchRequest_Header) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_URLFetchRequest_Header.Unmarshal(m, b)
+}
+func (m *URLFetchRequest_Header) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_URLFetchRequest_Header.Marshal(b, m, deterministic)
+}
+func (dst *URLFetchRequest_Header) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_URLFetchRequest_Header.Merge(dst, src)
+}
+func (m *URLFetchRequest_Header) XXX_Size() int {
+ return xxx_messageInfo_URLFetchRequest_Header.Size(m)
+}
+func (m *URLFetchRequest_Header) XXX_DiscardUnknown() {
+ xxx_messageInfo_URLFetchRequest_Header.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_URLFetchRequest_Header proto.InternalMessageInfo
+
+func (m *URLFetchRequest_Header) GetKey() string {
+ if m != nil && m.Key != nil {
+ return *m.Key
+ }
+ return ""
+}
+
+func (m *URLFetchRequest_Header) GetValue() string {
+ if m != nil && m.Value != nil {
+ return *m.Value
+ }
+ return ""
+}
+
+type URLFetchResponse struct {
+ Content []byte `protobuf:"bytes,1,opt,name=Content" json:"Content,omitempty"`
+ StatusCode *int32 `protobuf:"varint,2,req,name=StatusCode" json:"StatusCode,omitempty"`
+ Header []*URLFetchResponse_Header `protobuf:"group,3,rep,name=Header,json=header" json:"header,omitempty"`
+ ContentWasTruncated *bool `protobuf:"varint,6,opt,name=ContentWasTruncated,def=0" json:"ContentWasTruncated,omitempty"`
+ ExternalBytesSent *int64 `protobuf:"varint,7,opt,name=ExternalBytesSent" json:"ExternalBytesSent,omitempty"`
+ ExternalBytesReceived *int64 `protobuf:"varint,8,opt,name=ExternalBytesReceived" json:"ExternalBytesReceived,omitempty"`
+ FinalUrl *string `protobuf:"bytes,9,opt,name=FinalUrl" json:"FinalUrl,omitempty"`
+ ApiCpuMilliseconds *int64 `protobuf:"varint,10,opt,name=ApiCpuMilliseconds,def=0" json:"ApiCpuMilliseconds,omitempty"`
+ ApiBytesSent *int64 `protobuf:"varint,11,opt,name=ApiBytesSent,def=0" json:"ApiBytesSent,omitempty"`
+ ApiBytesReceived *int64 `protobuf:"varint,12,opt,name=ApiBytesReceived,def=0" json:"ApiBytesReceived,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *URLFetchResponse) Reset() { *m = URLFetchResponse{} }
+func (m *URLFetchResponse) String() string { return proto.CompactTextString(m) }
+func (*URLFetchResponse) ProtoMessage() {}
+func (*URLFetchResponse) Descriptor() ([]byte, []int) {
+ return fileDescriptor_urlfetch_service_b245a7065f33bced, []int{2}
+}
+func (m *URLFetchResponse) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_URLFetchResponse.Unmarshal(m, b)
+}
+func (m *URLFetchResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_URLFetchResponse.Marshal(b, m, deterministic)
+}
+func (dst *URLFetchResponse) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_URLFetchResponse.Merge(dst, src)
+}
+func (m *URLFetchResponse) XXX_Size() int {
+ return xxx_messageInfo_URLFetchResponse.Size(m)
+}
+func (m *URLFetchResponse) XXX_DiscardUnknown() {
+ xxx_messageInfo_URLFetchResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_URLFetchResponse proto.InternalMessageInfo
+
+const Default_URLFetchResponse_ContentWasTruncated bool = false
+const Default_URLFetchResponse_ApiCpuMilliseconds int64 = 0
+const Default_URLFetchResponse_ApiBytesSent int64 = 0
+const Default_URLFetchResponse_ApiBytesReceived int64 = 0
+
+func (m *URLFetchResponse) GetContent() []byte {
+ if m != nil {
+ return m.Content
+ }
+ return nil
+}
+
+func (m *URLFetchResponse) GetStatusCode() int32 {
+ if m != nil && m.StatusCode != nil {
+ return *m.StatusCode
+ }
+ return 0
+}
+
+func (m *URLFetchResponse) GetHeader() []*URLFetchResponse_Header {
+ if m != nil {
+ return m.Header
+ }
+ return nil
+}
+
+func (m *URLFetchResponse) GetContentWasTruncated() bool {
+ if m != nil && m.ContentWasTruncated != nil {
+ return *m.ContentWasTruncated
+ }
+ return Default_URLFetchResponse_ContentWasTruncated
+}
+
+func (m *URLFetchResponse) GetExternalBytesSent() int64 {
+ if m != nil && m.ExternalBytesSent != nil {
+ return *m.ExternalBytesSent
+ }
+ return 0
+}
+
+func (m *URLFetchResponse) GetExternalBytesReceived() int64 {
+ if m != nil && m.ExternalBytesReceived != nil {
+ return *m.ExternalBytesReceived
+ }
+ return 0
+}
+
+func (m *URLFetchResponse) GetFinalUrl() string {
+ if m != nil && m.FinalUrl != nil {
+ return *m.FinalUrl
+ }
+ return ""
+}
+
+func (m *URLFetchResponse) GetApiCpuMilliseconds() int64 {
+ if m != nil && m.ApiCpuMilliseconds != nil {
+ return *m.ApiCpuMilliseconds
+ }
+ return Default_URLFetchResponse_ApiCpuMilliseconds
+}
+
+func (m *URLFetchResponse) GetApiBytesSent() int64 {
+ if m != nil && m.ApiBytesSent != nil {
+ return *m.ApiBytesSent
+ }
+ return Default_URLFetchResponse_ApiBytesSent
+}
+
+func (m *URLFetchResponse) GetApiBytesReceived() int64 {
+ if m != nil && m.ApiBytesReceived != nil {
+ return *m.ApiBytesReceived
+ }
+ return Default_URLFetchResponse_ApiBytesReceived
+}
+
+type URLFetchResponse_Header struct {
+ Key *string `protobuf:"bytes,4,req,name=Key" json:"Key,omitempty"`
+ Value *string `protobuf:"bytes,5,req,name=Value" json:"Value,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *URLFetchResponse_Header) Reset() { *m = URLFetchResponse_Header{} }
+func (m *URLFetchResponse_Header) String() string { return proto.CompactTextString(m) }
+func (*URLFetchResponse_Header) ProtoMessage() {}
+func (*URLFetchResponse_Header) Descriptor() ([]byte, []int) {
+ return fileDescriptor_urlfetch_service_b245a7065f33bced, []int{2, 0}
+}
+func (m *URLFetchResponse_Header) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_URLFetchResponse_Header.Unmarshal(m, b)
+}
+func (m *URLFetchResponse_Header) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_URLFetchResponse_Header.Marshal(b, m, deterministic)
+}
+func (dst *URLFetchResponse_Header) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_URLFetchResponse_Header.Merge(dst, src)
+}
+func (m *URLFetchResponse_Header) XXX_Size() int {
+ return xxx_messageInfo_URLFetchResponse_Header.Size(m)
+}
+func (m *URLFetchResponse_Header) XXX_DiscardUnknown() {
+ xxx_messageInfo_URLFetchResponse_Header.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_URLFetchResponse_Header proto.InternalMessageInfo
+
+func (m *URLFetchResponse_Header) GetKey() string {
+ if m != nil && m.Key != nil {
+ return *m.Key
+ }
+ return ""
+}
+
+func (m *URLFetchResponse_Header) GetValue() string {
+ if m != nil && m.Value != nil {
+ return *m.Value
+ }
+ return ""
+}
+
+func init() {
+ proto.RegisterType((*URLFetchServiceError)(nil), "appengine.URLFetchServiceError")
+ proto.RegisterType((*URLFetchRequest)(nil), "appengine.URLFetchRequest")
+ proto.RegisterType((*URLFetchRequest_Header)(nil), "appengine.URLFetchRequest.Header")
+ proto.RegisterType((*URLFetchResponse)(nil), "appengine.URLFetchResponse")
+ proto.RegisterType((*URLFetchResponse_Header)(nil), "appengine.URLFetchResponse.Header")
+}
+
+func init() {
+ proto.RegisterFile("google.golang.org/appengine/internal/urlfetch/urlfetch_service.proto", fileDescriptor_urlfetch_service_b245a7065f33bced)
+}
+
+var fileDescriptor_urlfetch_service_b245a7065f33bced = []byte{
+ // 770 bytes of a gzipped FileDescriptorProto
+ 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0xdd, 0x6e, 0xe3, 0x54,
+ 0x10, 0xc6, 0x76, 0x7e, 0xa7, 0x5d, 0x7a, 0x76, 0xb6, 0x45, 0x66, 0xb5, 0xa0, 0x10, 0x09, 0x29,
+ 0x17, 0x90, 0x2e, 0x2b, 0x24, 0x44, 0xaf, 0x70, 0xed, 0x93, 0xad, 0xa9, 0x63, 0x47, 0xc7, 0x4e,
+ 0x61, 0xb9, 0xb1, 0xac, 0x78, 0x9a, 0x5a, 0xb2, 0xec, 0x60, 0x9f, 0x2c, 0xf4, 0x35, 0x78, 0x0d,
+ 0xde, 0x87, 0xa7, 0xe1, 0x02, 0x9d, 0xc4, 0xc9, 0x6e, 0xbb, 0xd1, 0x4a, 0x5c, 0x65, 0xe6, 0x9b,
+ 0xef, 0xcc, 0x99, 0x7c, 0xdf, 0xf8, 0x80, 0xb3, 0x2c, 0xcb, 0x65, 0x4e, 0xe3, 0x65, 0x99, 0x27,
+ 0xc5, 0x72, 0x5c, 0x56, 0xcb, 0xf3, 0x64, 0xb5, 0xa2, 0x62, 0x99, 0x15, 0x74, 0x9e, 0x15, 0x92,
+ 0xaa, 0x22, 0xc9, 0xcf, 0xd7, 0x55, 0x7e, 0x4b, 0x72, 0x71, 0xb7, 0x0f, 0xe2, 0x9a, 0xaa, 0xb7,
+ 0xd9, 0x82, 0xc6, 0xab, 0xaa, 0x94, 0x25, 0xf6, 0xf7, 0x67, 0x86, 0x7f, 0xeb, 0x70, 0x3a, 0x17,
+ 0xde, 0x44, 0xb1, 0xc2, 0x2d, 0x89, 0x57, 0x55, 0x59, 0x0d, 0xff, 0xd2, 0xa1, 0xbf, 0x89, 0xec,
+ 0x32, 0x25, 0xec, 0x80, 0x1e, 0x5c, 0xb3, 0x4f, 0xf0, 0x04, 0x8e, 0x5c, 0xff, 0xc6, 0xf2, 0x5c,
+ 0x27, 0x9e, 0x0b, 0x8f, 0x69, 0x0a, 0x98, 0xf0, 0xc8, 0xbe, 0x8a, 0xb9, 0x10, 0x81, 0x60, 0x3a,
+ 0x9e, 0xc1, 0xd3, 0xb9, 0x1f, 0xce, 0xb8, 0xed, 0x4e, 0x5c, 0xee, 0x34, 0xb0, 0x81, 0x9f, 0x01,
+ 0x0a, 0x1e, 0xce, 0x02, 0x3f, 0xe4, 0x71, 0x14, 0x04, 0xb1, 0x67, 0x89, 0xd7, 0x9c, 0xb5, 0x14,
+ 0xdd, 0xe1, 0x96, 0xe3, 0xb9, 0x3e, 0x8f, 0xf9, 0xaf, 0x36, 0xe7, 0x0e, 0x77, 0x58, 0x1b, 0x3f,
+ 0x87, 0xb3, 0x30, 0xf4, 0x62, 0x9b, 0x8b, 0xc8, 0x9d, 0xb8, 0xb6, 0x15, 0xf1, 0xa6, 0x53, 0x07,
+ 0x9f, 0x40, 0xdf, 0xf1, 0xc3, 0x26, 0xed, 0x22, 0x40, 0xc7, 0xf6, 0x82, 0x90, 0x3b, 0xac, 0x87,
+ 0x2f, 0xc0, 0x74, 0xfd, 0x88, 0x0b, 0xdf, 0xf2, 0xe2, 0x48, 0x58, 0x7e, 0xe8, 0x72, 0x3f, 0x6a,
+ 0x98, 0x7d, 0x35, 0x82, 0xba, 0x79, 0x6a, 0xf9, 0x6f, 0x62, 0xc1, 0x1d, 0x57, 0x70, 0x3b, 0x0a,
+ 0x19, 0xe0, 0x33, 0x38, 0x99, 0x5a, 0xde, 0x24, 0x10, 0x53, 0xee, 0xc4, 0x82, 0xcf, 0xbc, 0x37,
+ 0xec, 0x08, 0x4f, 0x81, 0xd9, 0x81, 0xef, 0x73, 0x3b, 0x72, 0x03, 0xbf, 0x69, 0x71, 0x3c, 0xfc,
+ 0xc7, 0x80, 0x93, 0x9d, 0x5a, 0x82, 0x7e, 0x5f, 0x53, 0x2d, 0xf1, 0x27, 0xe8, 0x4c, 0x49, 0xde,
+ 0x95, 0xa9, 0xa9, 0x0d, 0xf4, 0xd1, 0xa7, 0xaf, 0x46, 0xe3, 0xbd, 0xba, 0xe3, 0x47, 0xdc, 0x71,
+ 0xf3, 0xbb, 0xe5, 0x8b, 0xe6, 0x1c, 0x32, 0x30, 0xe6, 0x55, 0x6e, 0xea, 0x03, 0x7d, 0xd4, 0x17,
+ 0x2a, 0xc4, 0x1f, 0xa1, 0x73, 0x47, 0x49, 0x4a, 0x95, 0x69, 0x0c, 0x8c, 0x11, 0xbc, 0xfa, 0xea,
+ 0x23, 0x3d, 0xaf, 0x36, 0x44, 0xd1, 0x1c, 0xc0, 0x17, 0xd0, 0x9d, 0x25, 0xf7, 0x79, 0x99, 0xa4,
+ 0x66, 0x67, 0xa0, 0x8d, 0x8e, 0x2f, 0xf5, 0x9e, 0x26, 0x76, 0x10, 0x8e, 0xe1, 0x64, 0x52, 0xe6,
+ 0x79, 0xf9, 0x87, 0xa0, 0x34, 0xab, 0x68, 0x21, 0x6b, 0xb3, 0x3b, 0xd0, 0x46, 0xbd, 0x8b, 0x96,
+ 0xac, 0xd6, 0x24, 0x1e, 0x17, 0xf1, 0x39, 0xf4, 0x1c, 0x4a, 0xd2, 0x3c, 0x2b, 0xc8, 0xec, 0x0d,
+ 0xb4, 0x91, 0x26, 0xf6, 0x39, 0xfe, 0x0c, 0x5f, 0x4c, 0xd7, 0xb5, 0xbc, 0x49, 0xf2, 0x2c, 0x4d,
+ 0x24, 0xa9, 0xed, 0xa1, 0xca, 0xa6, 0x4a, 0x66, 0xb7, 0xd9, 0x22, 0x91, 0x64, 0xf6, 0xdf, 0xeb,
+ 0xfc, 0x71, 0xea, 0xf3, 0x97, 0xd0, 0xd9, 0xfe, 0x0f, 0x25, 0xc6, 0x35, 0xdd, 0x9b, 0xad, 0xad,
+ 0x18, 0xd7, 0x74, 0x8f, 0xa7, 0xd0, 0xbe, 0x49, 0xf2, 0x35, 0x99, 0xed, 0x0d, 0xb6, 0x4d, 0x86,
+ 0x1e, 0x3c, 0x79, 0xa0, 0x26, 0x76, 0xc1, 0x78, 0xcd, 0x23, 0xa6, 0x61, 0x0f, 0x5a, 0xb3, 0x20,
+ 0x8c, 0x98, 0xae, 0xa2, 0x2b, 0x6e, 0x39, 0xcc, 0x50, 0xc5, 0xd9, 0x3c, 0x62, 0x2d, 0xb5, 0x2e,
+ 0x0e, 0xf7, 0x78, 0xc4, 0x59, 0x1b, 0xfb, 0xd0, 0x9e, 0x59, 0x91, 0x7d, 0xc5, 0x3a, 0xc3, 0x7f,
+ 0x0d, 0x60, 0xef, 0x84, 0xad, 0x57, 0x65, 0x51, 0x13, 0x9a, 0xd0, 0xb5, 0xcb, 0x42, 0x52, 0x21,
+ 0x4d, 0x4d, 0x49, 0x29, 0x76, 0x29, 0x7e, 0x09, 0x10, 0xca, 0x44, 0xae, 0x6b, 0xf5, 0x71, 0x6c,
+ 0x8c, 0x6b, 0x8b, 0xf7, 0x10, 0xbc, 0x78, 0xe4, 0xdf, 0xf0, 0xa0, 0x7f, 0xdb, 0x6b, 0x1e, 0x1b,
+ 0xf8, 0x03, 0x3c, 0x6b, 0xae, 0xf9, 0x25, 0xa9, 0xa3, 0x6a, 0x5d, 0x28, 0x81, 0xb6, 0x66, 0xf6,
+ 0x2e, 0xda, 0xb7, 0x49, 0x5e, 0x93, 0x38, 0xc4, 0xc0, 0x6f, 0xe0, 0x29, 0xff, 0x73, 0xfb, 0x02,
+ 0x5c, 0xde, 0x4b, 0xaa, 0x43, 0x35, 0xb8, 0x72, 0xd7, 0x10, 0x1f, 0x16, 0xf0, 0x7b, 0x38, 0x7b,
+ 0x00, 0x0a, 0x5a, 0x50, 0xf6, 0x96, 0xd2, 0x8d, 0xcd, 0x86, 0x38, 0x5c, 0x54, 0xfb, 0x30, 0xc9,
+ 0x8a, 0x24, 0x57, 0xfb, 0xaa, 0xec, 0xed, 0x8b, 0x7d, 0x8e, 0xdf, 0x01, 0x5a, 0xab, 0xcc, 0x5e,
+ 0xad, 0xa7, 0x59, 0x9e, 0x67, 0x35, 0x2d, 0xca, 0x22, 0xad, 0x4d, 0x50, 0xed, 0x2e, 0xb4, 0x97,
+ 0xe2, 0x40, 0x11, 0xbf, 0x86, 0x63, 0x6b, 0x95, 0xbd, 0x9b, 0xf6, 0x68, 0x47, 0x7e, 0x00, 0xe3,
+ 0xb7, 0xc0, 0x76, 0xf9, 0x7e, 0xcc, 0xe3, 0x1d, 0xf5, 0x83, 0xd2, 0xff, 0x5f, 0xa6, 0x4b, 0xf8,
+ 0xad, 0xb7, 0x7b, 0x2a, 0xff, 0x0b, 0x00, 0x00, 0xff, 0xff, 0x1d, 0x9f, 0x6d, 0x24, 0x63, 0x05,
+ 0x00, 0x00,
+}
diff --git a/vendor/google.golang.org/appengine/internal/urlfetch/urlfetch_service.proto b/vendor/google.golang.org/appengine/internal/urlfetch/urlfetch_service.proto
new file mode 100644
index 000000000..f695edf6a
--- /dev/null
+++ b/vendor/google.golang.org/appengine/internal/urlfetch/urlfetch_service.proto
@@ -0,0 +1,64 @@
+syntax = "proto2";
+option go_package = "urlfetch";
+
+package appengine;
+
+message URLFetchServiceError {
+ enum ErrorCode {
+ OK = 0;
+ INVALID_URL = 1;
+ FETCH_ERROR = 2;
+ UNSPECIFIED_ERROR = 3;
+ RESPONSE_TOO_LARGE = 4;
+ DEADLINE_EXCEEDED = 5;
+ SSL_CERTIFICATE_ERROR = 6;
+ DNS_ERROR = 7;
+ CLOSED = 8;
+ INTERNAL_TRANSIENT_ERROR = 9;
+ TOO_MANY_REDIRECTS = 10;
+ MALFORMED_REPLY = 11;
+ CONNECTION_ERROR = 12;
+ }
+}
+
+message URLFetchRequest {
+ enum RequestMethod {
+ GET = 1;
+ POST = 2;
+ HEAD = 3;
+ PUT = 4;
+ DELETE = 5;
+ PATCH = 6;
+ }
+ required RequestMethod Method = 1;
+ required string Url = 2;
+ repeated group Header = 3 {
+ required string Key = 4;
+ required string Value = 5;
+ }
+ optional bytes Payload = 6 [ctype=CORD];
+
+ optional bool FollowRedirects = 7 [default=true];
+
+ optional double Deadline = 8;
+
+ optional bool MustValidateServerCertificate = 9 [default=true];
+}
+
+message URLFetchResponse {
+ optional bytes Content = 1;
+ required int32 StatusCode = 2;
+ repeated group Header = 3 {
+ required string Key = 4;
+ required string Value = 5;
+ }
+ optional bool ContentWasTruncated = 6 [default=false];
+ optional int64 ExternalBytesSent = 7;
+ optional int64 ExternalBytesReceived = 8;
+
+ optional string FinalUrl = 9;
+
+ optional int64 ApiCpuMilliseconds = 10 [default=0];
+ optional int64 ApiBytesSent = 11 [default=0];
+ optional int64 ApiBytesReceived = 12 [default=0];
+}
diff --git a/vendor/google.golang.org/appengine/log/log.go b/vendor/google.golang.org/appengine/log/log.go
index fdf7b4d73..731ad8c36 100644
--- a/vendor/google.golang.org/appengine/log/log.go
+++ b/vendor/google.golang.org/appengine/log/log.go
@@ -26,7 +26,7 @@ Example:
log.Infof(c, "Saw record %v", record)
}
*/
-package log
+package log // import "google.golang.org/appengine/log"
import (
"errors"
diff --git a/vendor/google.golang.org/appengine/mail/mail.go b/vendor/google.golang.org/appengine/mail/mail.go
index 66d036b8f..1ce1e8706 100644
--- a/vendor/google.golang.org/appengine/mail/mail.go
+++ b/vendor/google.golang.org/appengine/mail/mail.go
@@ -17,7 +17,7 @@ Example:
log.Errorf(c, "Alas, my user, the email failed to sendeth: %v", err)
}
*/
-package mail
+package mail // import "google.golang.org/appengine/mail"
import (
"net/mail"
diff --git a/vendor/google.golang.org/appengine/travis_install.sh b/vendor/google.golang.org/appengine/travis_install.sh
index 785b62f46..785b62f46 100755..100644
--- a/vendor/google.golang.org/appengine/travis_install.sh
+++ b/vendor/google.golang.org/appengine/travis_install.sh
diff --git a/vendor/google.golang.org/appengine/travis_test.sh b/vendor/google.golang.org/appengine/travis_test.sh
index d4390f045..d4390f045 100755..100644
--- a/vendor/google.golang.org/appengine/travis_test.sh
+++ b/vendor/google.golang.org/appengine/travis_test.sh
diff --git a/vendor/google.golang.org/appengine/urlfetch/urlfetch.go b/vendor/google.golang.org/appengine/urlfetch/urlfetch.go
new file mode 100644
index 000000000..6ffe1e6d9
--- /dev/null
+++ b/vendor/google.golang.org/appengine/urlfetch/urlfetch.go
@@ -0,0 +1,210 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+// Package urlfetch provides an http.RoundTripper implementation
+// for fetching URLs via App Engine's urlfetch service.
+package urlfetch // import "google.golang.org/appengine/urlfetch"
+
+import (
+ "errors"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "net/http"
+ "net/url"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/golang/protobuf/proto"
+ "golang.org/x/net/context"
+
+ "google.golang.org/appengine/internal"
+ pb "google.golang.org/appengine/internal/urlfetch"
+)
+
+// Transport is an implementation of http.RoundTripper for
+// App Engine. Users should generally create an http.Client using
+// this transport and use the Client rather than using this transport
+// directly.
+type Transport struct {
+ Context context.Context
+
+ // Controls whether the application checks the validity of SSL certificates
+ // over HTTPS connections. A value of false (the default) instructs the
+ // application to send a request to the server only if the certificate is
+ // valid and signed by a trusted certificate authority (CA), and also
+ // includes a hostname that matches the certificate. A value of true
+ // instructs the application to perform no certificate validation.
+ AllowInvalidServerCertificate bool
+}
+
+// Verify statically that *Transport implements http.RoundTripper.
+var _ http.RoundTripper = (*Transport)(nil)
+
+// Client returns an *http.Client using a default urlfetch Transport. This
+// client will have the default deadline of 5 seconds, and will check the
+// validity of SSL certificates.
+//
+// Any deadline of the provided context will be used for requests through this client;
+// if the client does not have a deadline then a 5 second default is used.
+func Client(ctx context.Context) *http.Client {
+ return &http.Client{
+ Transport: &Transport{
+ Context: ctx,
+ },
+ }
+}
+
+type bodyReader struct {
+ content []byte
+ truncated bool
+ closed bool
+}
+
+// ErrTruncatedBody is the error returned after the final Read() from a
+// response's Body if the body has been truncated by App Engine's proxy.
+var ErrTruncatedBody = errors.New("urlfetch: truncated body")
+
+func statusCodeToText(code int) string {
+ if t := http.StatusText(code); t != "" {
+ return t
+ }
+ return strconv.Itoa(code)
+}
+
+func (br *bodyReader) Read(p []byte) (n int, err error) {
+ if br.closed {
+ if br.truncated {
+ return 0, ErrTruncatedBody
+ }
+ return 0, io.EOF
+ }
+ n = copy(p, br.content)
+ if n > 0 {
+ br.content = br.content[n:]
+ return
+ }
+ if br.truncated {
+ br.closed = true
+ return 0, ErrTruncatedBody
+ }
+ return 0, io.EOF
+}
+
+func (br *bodyReader) Close() error {
+ br.closed = true
+ br.content = nil
+ return nil
+}
+
+// A map of the URL Fetch-accepted methods that take a request body.
+var methodAcceptsRequestBody = map[string]bool{
+ "POST": true,
+ "PUT": true,
+ "PATCH": true,
+}
+
+// urlString returns a valid string given a URL. This function is necessary because
+// the String method of URL doesn't correctly handle URLs with non-empty Opaque values.
+// See http://code.google.com/p/go/issues/detail?id=4860.
+func urlString(u *url.URL) string {
+ if u.Opaque == "" || strings.HasPrefix(u.Opaque, "//") {
+ return u.String()
+ }
+ aux := *u
+ aux.Opaque = "//" + aux.Host + aux.Opaque
+ return aux.String()
+}
+
+// RoundTrip issues a single HTTP request and returns its response. Per the
+// http.RoundTripper interface, RoundTrip only returns an error if there
+// was an unsupported request or the URL Fetch proxy fails.
+// Note that HTTP response codes such as 5xx, 403, 404, etc are not
+// errors as far as the transport is concerned and will be returned
+// with err set to nil.
+func (t *Transport) RoundTrip(req *http.Request) (res *http.Response, err error) {
+ methNum, ok := pb.URLFetchRequest_RequestMethod_value[req.Method]
+ if !ok {
+ return nil, fmt.Errorf("urlfetch: unsupported HTTP method %q", req.Method)
+ }
+
+ method := pb.URLFetchRequest_RequestMethod(methNum)
+
+ freq := &pb.URLFetchRequest{
+ Method: &method,
+ Url: proto.String(urlString(req.URL)),
+ FollowRedirects: proto.Bool(false), // http.Client's responsibility
+ MustValidateServerCertificate: proto.Bool(!t.AllowInvalidServerCertificate),
+ }
+ if deadline, ok := t.Context.Deadline(); ok {
+ freq.Deadline = proto.Float64(deadline.Sub(time.Now()).Seconds())
+ }
+
+ for k, vals := range req.Header {
+ for _, val := range vals {
+ freq.Header = append(freq.Header, &pb.URLFetchRequest_Header{
+ Key: proto.String(k),
+ Value: proto.String(val),
+ })
+ }
+ }
+ if methodAcceptsRequestBody[req.Method] && req.Body != nil {
+ // Avoid a []byte copy if req.Body has a Bytes method.
+ switch b := req.Body.(type) {
+ case interface {
+ Bytes() []byte
+ }:
+ freq.Payload = b.Bytes()
+ default:
+ freq.Payload, err = ioutil.ReadAll(req.Body)
+ if err != nil {
+ return nil, err
+ }
+ }
+ }
+
+ fres := &pb.URLFetchResponse{}
+ if err := internal.Call(t.Context, "urlfetch", "Fetch", freq, fres); err != nil {
+ return nil, err
+ }
+
+ res = &http.Response{}
+ res.StatusCode = int(*fres.StatusCode)
+ res.Status = fmt.Sprintf("%d %s", res.StatusCode, statusCodeToText(res.StatusCode))
+ res.Header = make(http.Header)
+ res.Request = req
+
+ // Faked:
+ res.ProtoMajor = 1
+ res.ProtoMinor = 1
+ res.Proto = "HTTP/1.1"
+ res.Close = true
+
+ for _, h := range fres.Header {
+ hkey := http.CanonicalHeaderKey(*h.Key)
+ hval := *h.Value
+ if hkey == "Content-Length" {
+ // Will get filled in below for all but HEAD requests.
+ if req.Method == "HEAD" {
+ res.ContentLength, _ = strconv.ParseInt(hval, 10, 64)
+ }
+ continue
+ }
+ res.Header.Add(hkey, hval)
+ }
+
+ if req.Method != "HEAD" {
+ res.ContentLength = int64(len(fres.Content))
+ }
+
+ truncated := fres.GetContentWasTruncated()
+ res.Body = &bodyReader{content: fres.Content, truncated: truncated}
+ return
+}
+
+func init() {
+ internal.RegisterErrorCodeMap("urlfetch", pb.URLFetchServiceError_ErrorCode_name)
+ internal.RegisterTimeoutErrorCode("urlfetch", int32(pb.URLFetchServiceError_DEADLINE_EXCEEDED))
+}
diff --git a/vendor/google.golang.org/appengine/user/user.go b/vendor/google.golang.org/appengine/user/user.go
index 622b61020..eb76f59b7 100644
--- a/vendor/google.golang.org/appengine/user/user.go
+++ b/vendor/google.golang.org/appengine/user/user.go
@@ -3,7 +3,7 @@
// license that can be found in the LICENSE file.
// Package user provides a client for App Engine's user authentication service.
-package user
+package user // import "google.golang.org/appengine/user"
import (
"strings"
diff --git a/vendor/google.golang.org/grpc/binarylog/grpc_binarylog_v1/binarylog.pb.go b/vendor/google.golang.org/grpc/binarylog/grpc_binarylog_v1/binarylog.pb.go
index a65fbe7e7..f393bb661 100644
--- a/vendor/google.golang.org/grpc/binarylog/grpc_binarylog_v1/binarylog.pb.go
+++ b/vendor/google.golang.org/grpc/binarylog/grpc_binarylog_v1/binarylog.pb.go
@@ -1,7 +1,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// source: grpc/binarylog/grpc_binarylog_v1/binarylog.proto
-package grpc_binarylog_v1
+package grpc_binarylog_v1 // import "google.golang.org/grpc/binarylog/grpc_binarylog_v1"
import proto "github.com/golang/protobuf/proto"
import fmt "fmt"
diff --git a/vendor/google.golang.org/grpc/codegen.sh b/vendor/google.golang.org/grpc/codegen.sh
index 4cdc6ba7c..4cdc6ba7c 100755..100644
--- a/vendor/google.golang.org/grpc/codegen.sh
+++ b/vendor/google.golang.org/grpc/codegen.sh
diff --git a/vendor/google.golang.org/grpc/codes/codes.go b/vendor/google.golang.org/grpc/codes/codes.go
index 93458ce79..02738839d 100644
--- a/vendor/google.golang.org/grpc/codes/codes.go
+++ b/vendor/google.golang.org/grpc/codes/codes.go
@@ -18,7 +18,7 @@
// Package codes defines the canonical error codes used by gRPC. It is
// consistent across various languages.
-package codes
+package codes // import "google.golang.org/grpc/codes"
import (
"fmt"
diff --git a/vendor/google.golang.org/grpc/credentials/credentials.go b/vendor/google.golang.org/grpc/credentials/credentials.go
index 4bee74ed9..845ce5d21 100644
--- a/vendor/google.golang.org/grpc/credentials/credentials.go
+++ b/vendor/google.golang.org/grpc/credentials/credentials.go
@@ -20,7 +20,7 @@
// which encapsulate all the state needed by a client to authenticate with a
// server and make various assertions, e.g., about the client's identity, role,
// or whether it is authorized to make a particular call.
-package credentials
+package credentials // import "google.golang.org/grpc/credentials"
import (
"context"
diff --git a/vendor/google.golang.org/grpc/doc.go b/vendor/google.golang.org/grpc/doc.go
index e153b2c39..187adbb11 100644
--- a/vendor/google.golang.org/grpc/doc.go
+++ b/vendor/google.golang.org/grpc/doc.go
@@ -21,4 +21,4 @@ Package grpc implements an RPC system called gRPC.
See grpc.io for more information about gRPC.
*/
-package grpc
+package grpc // import "google.golang.org/grpc"
diff --git a/vendor/google.golang.org/grpc/grpclog/grpclog.go b/vendor/google.golang.org/grpc/grpclog/grpclog.go
index 77e22a034..874ea6d98 100644
--- a/vendor/google.golang.org/grpc/grpclog/grpclog.go
+++ b/vendor/google.golang.org/grpc/grpclog/grpclog.go
@@ -24,7 +24,7 @@
// In the default logger,
// severity level can be set by environment variable GRPC_GO_LOG_SEVERITY_LEVEL,
// verbosity level can be set by GRPC_GO_LOG_VERBOSITY_LEVEL.
-package grpclog
+package grpclog // import "google.golang.org/grpc/grpclog"
import "os"
diff --git a/vendor/google.golang.org/grpc/install_gae.sh b/vendor/google.golang.org/grpc/install_gae.sh
index 7c7bcada5..7c7bcada5 100755..100644
--- a/vendor/google.golang.org/grpc/install_gae.sh
+++ b/vendor/google.golang.org/grpc/install_gae.sh
diff --git a/vendor/google.golang.org/grpc/internal/binarylog/regenerate.sh b/vendor/google.golang.org/grpc/internal/binarylog/regenerate.sh
index 113d40cbe..113d40cbe 100755..100644
--- a/vendor/google.golang.org/grpc/internal/binarylog/regenerate.sh
+++ b/vendor/google.golang.org/grpc/internal/binarylog/regenerate.sh
diff --git a/vendor/google.golang.org/grpc/metadata/metadata.go b/vendor/google.golang.org/grpc/metadata/metadata.go
index b4c0b375e..cf6d1b947 100644
--- a/vendor/google.golang.org/grpc/metadata/metadata.go
+++ b/vendor/google.golang.org/grpc/metadata/metadata.go
@@ -19,7 +19,7 @@
// Package metadata define the structure of the metadata supported by gRPC library.
// Please refer to https://github.com/grpc/grpc/blob/master/doc/PROTOCOL-HTTP2.md
// for more information about custom-metadata.
-package metadata
+package metadata // import "google.golang.org/grpc/metadata"
import (
"context"
diff --git a/vendor/google.golang.org/grpc/resolver/dns/dns_resolver.go b/vendor/google.golang.org/grpc/resolver/dns/dns_resolver.go
deleted file mode 100644
index 14aa6f20a..000000000
--- a/vendor/google.golang.org/grpc/resolver/dns/dns_resolver.go
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- *
- * Copyright 2018 gRPC authors.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-// Package dns implements a dns resolver to be installed as the default resolver
-// in grpc.
-//
-// Deprecated: this package is imported by grpc and should not need to be
-// imported directly by users.
-package dns
-
-import (
- "google.golang.org/grpc/internal/resolver/dns"
- "google.golang.org/grpc/resolver"
-)
-
-// NewBuilder creates a dnsBuilder which is used to factory DNS resolvers.
-//
-// Deprecated: import grpc and use resolver.Get("dns") instead.
-func NewBuilder() resolver.Builder {
- return dns.NewBuilder()
-}
diff --git a/vendor/google.golang.org/grpc/resolver/passthrough/passthrough.go b/vendor/google.golang.org/grpc/resolver/passthrough/passthrough.go
deleted file mode 100644
index c8a0c3daa..000000000
--- a/vendor/google.golang.org/grpc/resolver/passthrough/passthrough.go
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- *
- * Copyright 2017 gRPC authors.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-// Package passthrough implements a pass-through resolver. It sends the target
-// name without scheme back to gRPC as resolved address.
-//
-// Deprecated: this package is imported by grpc and should not need to be
-// imported directly by users.
-package passthrough
-
-import _ "google.golang.org/grpc/internal/resolver/passthrough" // import for side effects after package was moved
diff --git a/vendor/google.golang.org/grpc/stats/stats.go b/vendor/google.golang.org/grpc/stats/stats.go
index 171547570..9e22c393f 100644
--- a/vendor/google.golang.org/grpc/stats/stats.go
+++ b/vendor/google.golang.org/grpc/stats/stats.go
@@ -21,7 +21,7 @@
// Package stats is for collecting and reporting various network and RPC stats.
// This package is for monitoring purpose only. All fields are read-only.
// All APIs are experimental.
-package stats
+package stats // import "google.golang.org/grpc/stats"
import (
"context"
diff --git a/vendor/google.golang.org/grpc/vet.sh b/vendor/google.golang.org/grpc/vet.sh
index 0e7370727..0e7370727 100755..100644
--- a/vendor/google.golang.org/grpc/vet.sh
+++ b/vendor/google.golang.org/grpc/vet.sh
diff --git a/vendor/honnef.co/go/tools/LICENSE-THIRD-PARTY b/vendor/honnef.co/go/tools/LICENSE-THIRD-PARTY
index 623d85e85..7c241b71a 100644
--- a/vendor/honnef.co/go/tools/LICENSE-THIRD-PARTY
+++ b/vendor/honnef.co/go/tools/LICENSE-THIRD-PARTY
@@ -75,7 +75,7 @@ resulting binaries. These projects are:
limitations under the License.
-* github.com/kisielk/gotool - https://github.com/kisielk/gotool
+* github.com/kisielk/gotool – https://github.com/kisielk/gotool
Copyright (c) 2013 Kamil Kisiel <kamil@kamilkisiel.net>
@@ -224,61 +224,3 @@ resulting binaries. These projects are:
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-* gogrep - https://github.com/mvdan/gogrep
-
- Copyright (c) 2017, Daniel Martí. All rights reserved.
-
- Redistribution and use in source and binary forms, with or without
- modification, are permitted provided that the following conditions are
- met:
-
- * Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
- * Redistributions in binary form must reproduce the above
- copyright notice, this list of conditions and the following disclaimer
- in the documentation and/or other materials provided with the
- distribution.
- * Neither the name of the copyright holder nor the names of its
- contributors may be used to endorse or promote products derived from
- this software without specific prior written permission.
-
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
- "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
- LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
- A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
- OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
- DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
- THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-* gosmith - https://github.com/dvyukov/gosmith
-
- Copyright (c) 2014 Dmitry Vyukov. All rights reserved.
-
- Redistribution and use in source and binary forms, with or without
- modification, are permitted provided that the following conditions are
- met:
-
- * Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
- * Redistributions in binary form must reproduce the above
- copyright notice, this list of conditions and the following disclaimer
- in the documentation and/or other materials provided with the
- distribution.
- * The name of Dmitry Vyukov may be used to endorse or promote
- products derived from this software without specific prior written permission.
-
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
- "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
- LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
- A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
- OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
- DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
- THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/honnef.co/go/tools/cmd/staticcheck/staticcheck.go b/vendor/honnef.co/go/tools/cmd/staticcheck/staticcheck.go
index 4621f32ed..4f504dc39 100644
--- a/vendor/honnef.co/go/tools/cmd/staticcheck/staticcheck.go
+++ b/vendor/honnef.co/go/tools/cmd/staticcheck/staticcheck.go
@@ -1,5 +1,5 @@
// staticcheck analyses Go code and makes it better.
-package main
+package main // import "honnef.co/go/tools/cmd/staticcheck"
import (
"log"
diff --git a/vendor/honnef.co/go/tools/code/code.go b/vendor/honnef.co/go/tools/code/code.go
deleted file mode 100644
index 6f4df8b9a..000000000
--- a/vendor/honnef.co/go/tools/code/code.go
+++ /dev/null
@@ -1,481 +0,0 @@
-// Package code answers structural and type questions about Go code.
-package code
-
-import (
- "flag"
- "fmt"
- "go/ast"
- "go/constant"
- "go/token"
- "go/types"
- "strings"
-
- "golang.org/x/tools/go/analysis"
- "golang.org/x/tools/go/analysis/passes/inspect"
- "golang.org/x/tools/go/ast/astutil"
- "golang.org/x/tools/go/ast/inspector"
- "honnef.co/go/tools/facts"
- "honnef.co/go/tools/go/types/typeutil"
- "honnef.co/go/tools/ir"
- "honnef.co/go/tools/lint"
-)
-
-type Positioner interface {
- Pos() token.Pos
-}
-
-func CallName(call *ir.CallCommon) string {
- if call.IsInvoke() {
- return ""
- }
- switch v := call.Value.(type) {
- case *ir.Function:
- fn, ok := v.Object().(*types.Func)
- if !ok {
- return ""
- }
- return lint.FuncName(fn)
- case *ir.Builtin:
- return v.Name()
- }
- return ""
-}
-
-func IsCallTo(call *ir.CallCommon, name string) bool { return CallName(call) == name }
-
-func IsCallToAny(call *ir.CallCommon, names ...string) bool {
- q := CallName(call)
- for _, name := range names {
- if q == name {
- return true
- }
- }
- return false
-}
-
-func IsType(T types.Type, name string) bool { return types.TypeString(T, nil) == name }
-
-func FilterDebug(instr []ir.Instruction) []ir.Instruction {
- var out []ir.Instruction
- for _, ins := range instr {
- if _, ok := ins.(*ir.DebugRef); !ok {
- out = append(out, ins)
- }
- }
- return out
-}
-
-func IsExample(fn *ir.Function) bool {
- if !strings.HasPrefix(fn.Name(), "Example") {
- return false
- }
- f := fn.Prog.Fset.File(fn.Pos())
- if f == nil {
- return false
- }
- return strings.HasSuffix(f.Name(), "_test.go")
-}
-
-func IsPointerLike(T types.Type) bool {
- switch T := T.Underlying().(type) {
- case *types.Interface, *types.Chan, *types.Map, *types.Signature, *types.Pointer:
- return true
- case *types.Basic:
- return T.Kind() == types.UnsafePointer
- }
- return false
-}
-
-func IsIdent(expr ast.Expr, ident string) bool {
- id, ok := expr.(*ast.Ident)
- return ok && id.Name == ident
-}
-
-// isBlank returns whether id is the blank identifier "_".
-// If id == nil, the answer is false.
-func IsBlank(id ast.Expr) bool {
- ident, _ := id.(*ast.Ident)
- return ident != nil && ident.Name == "_"
-}
-
-func IsIntLiteral(expr ast.Expr, literal string) bool {
- lit, ok := expr.(*ast.BasicLit)
- return ok && lit.Kind == token.INT && lit.Value == literal
-}
-
-// Deprecated: use IsIntLiteral instead
-func IsZero(expr ast.Expr) bool {
- return IsIntLiteral(expr, "0")
-}
-
-func IsOfType(pass *analysis.Pass, expr ast.Expr, name string) bool {
- return IsType(pass.TypesInfo.TypeOf(expr), name)
-}
-
-func IsInTest(pass *analysis.Pass, node Positioner) bool {
- // FIXME(dh): this doesn't work for global variables with
- // initializers
- f := pass.Fset.File(node.Pos())
- return f != nil && strings.HasSuffix(f.Name(), "_test.go")
-}
-
-// IsMain reports whether the package being processed is a package
-// main.
-func IsMain(pass *analysis.Pass) bool {
- return pass.Pkg.Name() == "main"
-}
-
-// IsMainLike reports whether the package being processed is a
-// main-like package. A main-like package is a package that is
-// package main, or that is intended to be used by a tool framework
-// such as cobra to implement a command.
-//
-// Note that this function errs on the side of false positives; it may
-// return true for packages that aren't main-like. IsMainLike is
-// intended for analyses that wish to suppress diagnostics for
-// main-like packages to avoid false positives.
-func IsMainLike(pass *analysis.Pass) bool {
- if pass.Pkg.Name() == "main" {
- return true
- }
- for _, imp := range pass.Pkg.Imports() {
- if imp.Path() == "github.com/spf13/cobra" {
- return true
- }
- }
- return false
-}
-
-func SelectorName(pass *analysis.Pass, expr *ast.SelectorExpr) string {
- info := pass.TypesInfo
- sel := info.Selections[expr]
- if sel == nil {
- if x, ok := expr.X.(*ast.Ident); ok {
- pkg, ok := info.ObjectOf(x).(*types.PkgName)
- if !ok {
- // This shouldn't happen
- return fmt.Sprintf("%s.%s", x.Name, expr.Sel.Name)
- }
- return fmt.Sprintf("%s.%s", pkg.Imported().Path(), expr.Sel.Name)
- }
- panic(fmt.Sprintf("unsupported selector: %v", expr))
- }
- return fmt.Sprintf("(%s).%s", sel.Recv(), sel.Obj().Name())
-}
-
-func IsNil(pass *analysis.Pass, expr ast.Expr) bool {
- return pass.TypesInfo.Types[expr].IsNil()
-}
-
-func BoolConst(pass *analysis.Pass, expr ast.Expr) bool {
- val := pass.TypesInfo.ObjectOf(expr.(*ast.Ident)).(*types.Const).Val()
- return constant.BoolVal(val)
-}
-
-func IsBoolConst(pass *analysis.Pass, expr ast.Expr) bool {
- // We explicitly don't support typed bools because more often than
- // not, custom bool types are used as binary enums and the
- // explicit comparison is desired.
-
- ident, ok := expr.(*ast.Ident)
- if !ok {
- return false
- }
- obj := pass.TypesInfo.ObjectOf(ident)
- c, ok := obj.(*types.Const)
- if !ok {
- return false
- }
- basic, ok := c.Type().(*types.Basic)
- if !ok {
- return false
- }
- if basic.Kind() != types.UntypedBool && basic.Kind() != types.Bool {
- return false
- }
- return true
-}
-
-func ExprToInt(pass *analysis.Pass, expr ast.Expr) (int64, bool) {
- tv := pass.TypesInfo.Types[expr]
- if tv.Value == nil {
- return 0, false
- }
- if tv.Value.Kind() != constant.Int {
- return 0, false
- }
- return constant.Int64Val(tv.Value)
-}
-
-func ExprToString(pass *analysis.Pass, expr ast.Expr) (string, bool) {
- val := pass.TypesInfo.Types[expr].Value
- if val == nil {
- return "", false
- }
- if val.Kind() != constant.String {
- return "", false
- }
- return constant.StringVal(val), true
-}
-
-// Dereference returns a pointer's element type; otherwise it returns
-// T.
-func Dereference(T types.Type) types.Type {
- if p, ok := T.Underlying().(*types.Pointer); ok {
- return p.Elem()
- }
- return T
-}
-
-// DereferenceR returns a pointer's element type; otherwise it returns
-// T. If the element type is itself a pointer, DereferenceR will be
-// applied recursively.
-func DereferenceR(T types.Type) types.Type {
- if p, ok := T.Underlying().(*types.Pointer); ok {
- return DereferenceR(p.Elem())
- }
- return T
-}
-
-func CallNameAST(pass *analysis.Pass, call *ast.CallExpr) string {
- switch fun := astutil.Unparen(call.Fun).(type) {
- case *ast.SelectorExpr:
- fn, ok := pass.TypesInfo.ObjectOf(fun.Sel).(*types.Func)
- if !ok {
- return ""
- }
- return lint.FuncName(fn)
- case *ast.Ident:
- obj := pass.TypesInfo.ObjectOf(fun)
- switch obj := obj.(type) {
- case *types.Func:
- return lint.FuncName(obj)
- case *types.Builtin:
- return obj.Name()
- default:
- return ""
- }
- default:
- return ""
- }
-}
-
-func IsCallToAST(pass *analysis.Pass, node ast.Node, name string) bool {
- call, ok := node.(*ast.CallExpr)
- if !ok {
- return false
- }
- return CallNameAST(pass, call) == name
-}
-
-func IsCallToAnyAST(pass *analysis.Pass, node ast.Node, names ...string) bool {
- call, ok := node.(*ast.CallExpr)
- if !ok {
- return false
- }
- q := CallNameAST(pass, call)
- for _, name := range names {
- if q == name {
- return true
- }
- }
- return false
-}
-
-func Preamble(f *ast.File) string {
- cutoff := f.Package
- if f.Doc != nil {
- cutoff = f.Doc.Pos()
- }
- var out []string
- for _, cmt := range f.Comments {
- if cmt.Pos() >= cutoff {
- break
- }
- out = append(out, cmt.Text())
- }
- return strings.Join(out, "\n")
-}
-
-func GroupSpecs(fset *token.FileSet, specs []ast.Spec) [][]ast.Spec {
- if len(specs) == 0 {
- return nil
- }
- groups := make([][]ast.Spec, 1)
- groups[0] = append(groups[0], specs[0])
-
- for _, spec := range specs[1:] {
- g := groups[len(groups)-1]
- if fset.PositionFor(spec.Pos(), false).Line-1 !=
- fset.PositionFor(g[len(g)-1].End(), false).Line {
-
- groups = append(groups, nil)
- }
-
- groups[len(groups)-1] = append(groups[len(groups)-1], spec)
- }
-
- return groups
-}
-
-func IsObject(obj types.Object, name string) bool {
- var path string
- if pkg := obj.Pkg(); pkg != nil {
- path = pkg.Path() + "."
- }
- return path+obj.Name() == name
-}
-
-type Field struct {
- Var *types.Var
- Tag string
- Path []int
-}
-
-// FlattenFields recursively flattens T and embedded structs,
-// returning a list of fields. If multiple fields with the same name
-// exist, all will be returned.
-func FlattenFields(T *types.Struct) []Field {
- return flattenFields(T, nil, nil)
-}
-
-func flattenFields(T *types.Struct, path []int, seen map[types.Type]bool) []Field {
- if seen == nil {
- seen = map[types.Type]bool{}
- }
- if seen[T] {
- return nil
- }
- seen[T] = true
- var out []Field
- for i := 0; i < T.NumFields(); i++ {
- field := T.Field(i)
- tag := T.Tag(i)
- np := append(path[:len(path):len(path)], i)
- if field.Anonymous() {
- if s, ok := Dereference(field.Type()).Underlying().(*types.Struct); ok {
- out = append(out, flattenFields(s, np, seen)...)
- }
- } else {
- out = append(out, Field{field, tag, np})
- }
- }
- return out
-}
-
-func File(pass *analysis.Pass, node Positioner) *ast.File {
- m := pass.ResultOf[facts.TokenFile].(map[*token.File]*ast.File)
- return m[pass.Fset.File(node.Pos())]
-}
-
-// IsGenerated reports whether pos is in a generated file, It ignores
-// //line directives.
-func IsGenerated(pass *analysis.Pass, pos token.Pos) bool {
- _, ok := Generator(pass, pos)
- return ok
-}
-
-// Generator returns the generator that generated the file containing
-// pos. It ignores //line directives.
-func Generator(pass *analysis.Pass, pos token.Pos) (facts.Generator, bool) {
- file := pass.Fset.PositionFor(pos, false).Filename
- m := pass.ResultOf[facts.Generated].(map[string]facts.Generator)
- g, ok := m[file]
- return g, ok
-}
-
-// MayHaveSideEffects reports whether expr may have side effects. If
-// the purity argument is nil, this function implements a purely
-// syntactic check, meaning that any function call may have side
-// effects, regardless of the called function's body. Otherwise,
-// purity will be consulted to determine the purity of function calls.
-func MayHaveSideEffects(pass *analysis.Pass, expr ast.Expr, purity facts.PurityResult) bool {
- switch expr := expr.(type) {
- case *ast.BadExpr:
- return true
- case *ast.Ellipsis:
- return MayHaveSideEffects(pass, expr.Elt, purity)
- case *ast.FuncLit:
- // the literal itself cannot have side ffects, only calling it
- // might, which is handled by CallExpr.
- return false
- case *ast.ArrayType, *ast.StructType, *ast.FuncType, *ast.InterfaceType, *ast.MapType, *ast.ChanType:
- // types cannot have side effects
- return false
- case *ast.BasicLit:
- return false
- case *ast.BinaryExpr:
- return MayHaveSideEffects(pass, expr.X, purity) || MayHaveSideEffects(pass, expr.Y, purity)
- case *ast.CallExpr:
- if purity == nil {
- return true
- }
- switch obj := typeutil.Callee(pass.TypesInfo, expr).(type) {
- case *types.Func:
- if _, ok := purity[obj]; !ok {
- return true
- }
- case *types.Builtin:
- switch obj.Name() {
- case "len", "cap":
- default:
- return true
- }
- default:
- return true
- }
- for _, arg := range expr.Args {
- if MayHaveSideEffects(pass, arg, purity) {
- return true
- }
- }
- return false
- case *ast.CompositeLit:
- if MayHaveSideEffects(pass, expr.Type, purity) {
- return true
- }
- for _, elt := range expr.Elts {
- if MayHaveSideEffects(pass, elt, purity) {
- return true
- }
- }
- return false
- case *ast.Ident:
- return false
- case *ast.IndexExpr:
- return MayHaveSideEffects(pass, expr.X, purity) || MayHaveSideEffects(pass, expr.Index, purity)
- case *ast.KeyValueExpr:
- return MayHaveSideEffects(pass, expr.Key, purity) || MayHaveSideEffects(pass, expr.Value, purity)
- case *ast.SelectorExpr:
- return MayHaveSideEffects(pass, expr.X, purity)
- case *ast.SliceExpr:
- return MayHaveSideEffects(pass, expr.X, purity) ||
- MayHaveSideEffects(pass, expr.Low, purity) ||
- MayHaveSideEffects(pass, expr.High, purity) ||
- MayHaveSideEffects(pass, expr.Max, purity)
- case *ast.StarExpr:
- return MayHaveSideEffects(pass, expr.X, purity)
- case *ast.TypeAssertExpr:
- return MayHaveSideEffects(pass, expr.X, purity)
- case *ast.UnaryExpr:
- if MayHaveSideEffects(pass, expr.X, purity) {
- return true
- }
- return expr.Op == token.ARROW
- case *ast.ParenExpr:
- return MayHaveSideEffects(pass, expr.X, purity)
- case nil:
- return false
- default:
- panic(fmt.Sprintf("internal error: unhandled type %T", expr))
- }
-}
-
-func IsGoVersion(pass *analysis.Pass, minor int) bool {
- version := pass.Analyzer.Flags.Lookup("go").Value.(flag.Getter).Get().(int)
- return version >= minor
-}
-
-func Preorder(pass *analysis.Pass, fn func(ast.Node), types ...ast.Node) {
- pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder(types, fn)
-}
diff --git a/vendor/honnef.co/go/tools/config/config.go b/vendor/honnef.co/go/tools/config/config.go
index 55115371b..c22093a6d 100644
--- a/vendor/honnef.co/go/tools/config/config.go
+++ b/vendor/honnef.co/go/tools/config/config.go
@@ -3,8 +3,6 @@ package config
import (
"bytes"
"fmt"
- "go/ast"
- "go/token"
"os"
"path/filepath"
"reflect"
@@ -14,57 +12,38 @@ import (
"golang.org/x/tools/go/analysis"
)
-// Dir looks at a list of absolute file names, which should make up a
-// single package, and returns the path of the directory that may
-// contain a staticcheck.conf file. It returns the empty string if no
-// such directory could be determined, for example because all files
-// were located in Go's build cache.
-func Dir(files []string) string {
- if len(files) == 0 {
- return ""
- }
- cache, err := os.UserCacheDir()
- if err != nil {
- cache = ""
- }
- var path string
- for _, p := range files {
- // FIXME(dh): using strings.HasPrefix isn't technically
- // correct, but it should be good enough for now.
- if cache != "" && strings.HasPrefix(p, cache) {
- // File in the build cache of the standard Go build system
- continue
- }
- path = p
- break
- }
-
- if path == "" {
- // The package only consists of generated files.
- return ""
- }
-
- dir := filepath.Dir(path)
- return dir
-}
-
-func dirAST(files []*ast.File, fset *token.FileSet) string {
- names := make([]string, len(files))
- for i, f := range files {
- names[i] = fset.PositionFor(f.Pos(), true).Filename
- }
- return Dir(names)
-}
-
var Analyzer = &analysis.Analyzer{
Name: "config",
Doc: "loads configuration for the current package tree",
Run: func(pass *analysis.Pass) (interface{}, error) {
- dir := dirAST(pass.Files, pass.Fset)
- if dir == "" {
+ if len(pass.Files) == 0 {
+ cfg := DefaultConfig
+ return &cfg, nil
+ }
+ cache, err := os.UserCacheDir()
+ if err != nil {
+ cache = ""
+ }
+ var path string
+ for _, f := range pass.Files {
+ p := pass.Fset.PositionFor(f.Pos(), true).Filename
+ // FIXME(dh): using strings.HasPrefix isn't technically
+ // correct, but it should be good enough for now.
+ if cache != "" && strings.HasPrefix(p, cache) {
+ // File in the build cache of the standard Go build system
+ continue
+ }
+ path = p
+ break
+ }
+
+ if path == "" {
+ // The package only consists of generated files.
cfg := DefaultConfig
return &cfg, nil
}
+
+ dir := filepath.Dir(path)
cfg, err := Load(dir)
if err != nil {
return nil, fmt.Errorf("error loading staticcheck.conf: %s", err)
@@ -157,7 +136,7 @@ func (c Config) String() string {
}
var DefaultConfig = Config{
- Checks: []string{"all", "-ST1000", "-ST1003", "-ST1016", "-ST1020", "-ST1021", "-ST1022"},
+ Checks: []string{"all", "-ST1000", "-ST1003", "-ST1016"},
Initialisms: []string{
"ACL", "API", "ASCII", "CPU", "CSS", "DNS",
"EOF", "GUID", "HTML", "HTTP", "HTTPS", "ID",
@@ -165,20 +144,20 @@ var DefaultConfig = Config{
"SMTP", "SQL", "SSH", "TCP", "TLS", "TTL",
"UDP", "UI", "GID", "UID", "UUID", "URI",
"URL", "UTF8", "VM", "XML", "XMPP", "XSRF",
- "XSS", "SIP", "RTP", "AMQP", "DB", "TS",
+ "XSS", "SIP", "RTP",
},
DotImportWhitelist: []string{},
HTTPStatusCodeWhitelist: []string{"200", "400", "404", "500"},
}
-const ConfigName = "staticcheck.conf"
+const configName = "staticcheck.conf"
func parseConfigs(dir string) ([]Config, error) {
var out []Config
// TODO(dh): consider stopping at the GOPATH/module boundary
for dir != "" {
- f, err := os.Open(filepath.Join(dir, ConfigName))
+ f, err := os.Open(filepath.Join(dir, configName))
if os.IsNotExist(err) {
ndir := filepath.Dir(dir)
if ndir == dir {
diff --git a/vendor/honnef.co/go/tools/edit/edit.go b/vendor/honnef.co/go/tools/edit/edit.go
deleted file mode 100644
index f4cfba234..000000000
--- a/vendor/honnef.co/go/tools/edit/edit.go
+++ /dev/null
@@ -1,67 +0,0 @@
-package edit
-
-import (
- "bytes"
- "go/ast"
- "go/format"
- "go/token"
-
- "golang.org/x/tools/go/analysis"
- "honnef.co/go/tools/pattern"
-)
-
-type Ranger interface {
- Pos() token.Pos
- End() token.Pos
-}
-
-type Range [2]token.Pos
-
-func (r Range) Pos() token.Pos { return r[0] }
-func (r Range) End() token.Pos { return r[1] }
-
-func ReplaceWithString(fset *token.FileSet, old Ranger, new string) analysis.TextEdit {
- return analysis.TextEdit{
- Pos: old.Pos(),
- End: old.End(),
- NewText: []byte(new),
- }
-}
-
-func ReplaceWithNode(fset *token.FileSet, old Ranger, new ast.Node) analysis.TextEdit {
- buf := &bytes.Buffer{}
- if err := format.Node(buf, fset, new); err != nil {
- panic("internal error: " + err.Error())
- }
- return analysis.TextEdit{
- Pos: old.Pos(),
- End: old.End(),
- NewText: buf.Bytes(),
- }
-}
-
-func ReplaceWithPattern(pass *analysis.Pass, after pattern.Pattern, state pattern.State, node Ranger) analysis.TextEdit {
- r := pattern.NodeToAST(after.Root, state)
- buf := &bytes.Buffer{}
- format.Node(buf, pass.Fset, r)
- return analysis.TextEdit{
- Pos: node.Pos(),
- End: node.End(),
- NewText: buf.Bytes(),
- }
-}
-
-func Delete(old Ranger) analysis.TextEdit {
- return analysis.TextEdit{
- Pos: old.Pos(),
- End: old.End(),
- NewText: nil,
- }
-}
-
-func Fix(msg string, edits ...analysis.TextEdit) analysis.SuggestedFix {
- return analysis.SuggestedFix{
- Message: msg,
- TextEdits: edits,
- }
-}
diff --git a/vendor/honnef.co/go/tools/facts/generated.go b/vendor/honnef.co/go/tools/facts/generated.go
index 3e7aef110..1ed9563a3 100644
--- a/vendor/honnef.co/go/tools/facts/generated.go
+++ b/vendor/honnef.co/go/tools/facts/generated.go
@@ -55,10 +55,6 @@ func isGenerated(path string) (Generator, bool) {
if strings.HasPrefix(text, `by "stringer `) {
return Stringer, true
}
- if strings.HasPrefix(text, `by goyacc `) {
- return Goyacc, true
- }
-
return Unknown, true
}
if bytes.Equal(s, oldCgo) {
diff --git a/vendor/honnef.co/go/tools/facts/purity.go b/vendor/honnef.co/go/tools/facts/purity.go
index 099ee23e3..861ca4110 100644
--- a/vendor/honnef.co/go/tools/facts/purity.go
+++ b/vendor/honnef.co/go/tools/facts/purity.go
@@ -1,13 +1,14 @@
package facts
import (
+ "go/token"
"go/types"
"reflect"
"golang.org/x/tools/go/analysis"
"honnef.co/go/tools/functions"
- "honnef.co/go/tools/internal/passes/buildir"
- "honnef.co/go/tools/ir"
+ "honnef.co/go/tools/internal/passes/buildssa"
+ "honnef.co/go/tools/ssa"
)
type IsPure struct{}
@@ -21,7 +22,7 @@ var Purity = &analysis.Analyzer{
Name: "fact_purity",
Doc: "Mark pure functions",
Run: purity,
- Requires: []*analysis.Analyzer{buildir.Analyzer},
+ Requires: []*analysis.Analyzer{buildssa.Analyzer},
FactTypes: []analysis.Fact{(*IsPure)(nil)},
ResultType: reflect.TypeOf(PurityResult{}),
}
@@ -55,68 +56,65 @@ var pureStdlib = map[string]struct{}{
}
func purity(pass *analysis.Pass) (interface{}, error) {
- seen := map[*ir.Function]struct{}{}
- irpkg := pass.ResultOf[buildir.Analyzer].(*buildir.IR).Pkg
- var check func(fn *ir.Function) (ret bool)
- check = func(fn *ir.Function) (ret bool) {
- if fn.Object() == nil {
+ seen := map[*ssa.Function]struct{}{}
+ ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).Pkg
+ var check func(ssafn *ssa.Function) (ret bool)
+ check = func(ssafn *ssa.Function) (ret bool) {
+ if ssafn.Object() == nil {
// TODO(dh): support closures
return false
}
- if pass.ImportObjectFact(fn.Object(), new(IsPure)) {
+ if pass.ImportObjectFact(ssafn.Object(), new(IsPure)) {
return true
}
- if fn.Pkg != irpkg {
+ if ssafn.Pkg != ssapkg {
// Function is in another package but wasn't marked as
// pure, ergo it isn't pure
return false
}
// Break recursion
- if _, ok := seen[fn]; ok {
+ if _, ok := seen[ssafn]; ok {
return false
}
- seen[fn] = struct{}{}
+ seen[ssafn] = struct{}{}
defer func() {
if ret {
- pass.ExportObjectFact(fn.Object(), &IsPure{})
+ pass.ExportObjectFact(ssafn.Object(), &IsPure{})
}
}()
- if functions.IsStub(fn) {
+ if functions.IsStub(ssafn) {
return false
}
- if _, ok := pureStdlib[fn.Object().(*types.Func).FullName()]; ok {
+ if _, ok := pureStdlib[ssafn.Object().(*types.Func).FullName()]; ok {
return true
}
- if fn.Signature.Results().Len() == 0 {
+ if ssafn.Signature.Results().Len() == 0 {
// A function with no return values is empty or is doing some
// work we cannot see (for example because of build tags);
// don't consider it pure.
return false
}
- for _, param := range fn.Params {
- // TODO(dh): this may not be strictly correct. pure code
- // can, to an extent, operate on non-basic types.
+ for _, param := range ssafn.Params {
if _, ok := param.Type().Underlying().(*types.Basic); !ok {
return false
}
}
- // Don't consider external functions pure.
- if fn.Blocks == nil {
+ if ssafn.Blocks == nil {
return false
}
- checkCall := func(common *ir.CallCommon) bool {
+ checkCall := func(common *ssa.CallCommon) bool {
if common.IsInvoke() {
return false
}
- builtin, ok := common.Value.(*ir.Builtin)
+ builtin, ok := common.Value.(*ssa.Builtin)
if !ok {
- if common.StaticCallee() != fn {
+ if common.StaticCallee() != ssafn {
if common.StaticCallee() == nil {
return false
}
@@ -126,47 +124,47 @@ func purity(pass *analysis.Pass) (interface{}, error) {
}
} else {
switch builtin.Name() {
- case "len", "cap":
+ case "len", "cap", "make", "new":
default:
return false
}
}
return true
}
- for _, b := range fn.Blocks {
+ for _, b := range ssafn.Blocks {
for _, ins := range b.Instrs {
switch ins := ins.(type) {
- case *ir.Call:
+ case *ssa.Call:
if !checkCall(ins.Common()) {
return false
}
- case *ir.Defer:
+ case *ssa.Defer:
if !checkCall(&ins.Call) {
return false
}
- case *ir.Select:
+ case *ssa.Select:
return false
- case *ir.Send:
+ case *ssa.Send:
return false
- case *ir.Go:
+ case *ssa.Go:
return false
- case *ir.Panic:
+ case *ssa.Panic:
return false
- case *ir.Store:
+ case *ssa.Store:
return false
- case *ir.FieldAddr:
- return false
- case *ir.Alloc:
- return false
- case *ir.Load:
+ case *ssa.FieldAddr:
return false
+ case *ssa.UnOp:
+ if ins.Op == token.MUL || ins.Op == token.AND {
+ return false
+ }
}
}
}
return true
}
- for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
- check(fn)
+ for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
+ check(ssafn)
}
out := PurityResult{}
diff --git a/vendor/honnef.co/go/tools/functions/loops.go b/vendor/honnef.co/go/tools/functions/loops.go
index a8af70100..15877a2f9 100644
--- a/vendor/honnef.co/go/tools/functions/loops.go
+++ b/vendor/honnef.co/go/tools/functions/loops.go
@@ -1,10 +1,10 @@
package functions
-import "honnef.co/go/tools/ir"
+import "honnef.co/go/tools/ssa"
-type Loop struct{ *ir.BlockSet }
+type Loop struct{ ssa.BlockSet }
-func FindLoops(fn *ir.Function) []Loop {
+func FindLoops(fn *ssa.Function) []Loop {
if fn.Blocks == nil {
return nil
}
@@ -18,12 +18,12 @@ func FindLoops(fn *ir.Function) []Loop {
// n is a back-edge to h
// h is the loop header
if n == h {
- set := Loop{ir.NewBlockSet(len(fn.Blocks))}
+ set := Loop{}
set.Add(n)
sets = append(sets, set)
continue
}
- set := Loop{ir.NewBlockSet(len(fn.Blocks))}
+ set := Loop{}
set.Add(h)
set.Add(n)
for _, b := range allPredsBut(n, h, nil) {
@@ -35,7 +35,7 @@ func FindLoops(fn *ir.Function) []Loop {
return sets
}
-func allPredsBut(b, but *ir.BasicBlock, list []*ir.BasicBlock) []*ir.BasicBlock {
+func allPredsBut(b, but *ssa.BasicBlock, list []*ssa.BasicBlock) []*ssa.BasicBlock {
outer:
for _, pred := range b.Preds {
if pred == but {
diff --git a/vendor/honnef.co/go/tools/functions/pure.go b/vendor/honnef.co/go/tools/functions/pure.go
new file mode 100644
index 000000000..8bc558771
--- /dev/null
+++ b/vendor/honnef.co/go/tools/functions/pure.go
@@ -0,0 +1,46 @@
+package functions
+
+import (
+ "honnef.co/go/tools/ssa"
+)
+
+func filterDebug(instr []ssa.Instruction) []ssa.Instruction {
+ var out []ssa.Instruction
+ for _, ins := range instr {
+ if _, ok := ins.(*ssa.DebugRef); !ok {
+ out = append(out, ins)
+ }
+ }
+ return out
+}
+
+// IsStub reports whether a function is a stub. A function is
+// considered a stub if it has no instructions or exactly one
+// instruction, which must be either returning only constant values or
+// a panic.
+func IsStub(fn *ssa.Function) bool {
+ if len(fn.Blocks) == 0 {
+ return true
+ }
+ if len(fn.Blocks) > 1 {
+ return false
+ }
+ instrs := filterDebug(fn.Blocks[0].Instrs)
+ if len(instrs) != 1 {
+ return false
+ }
+
+ switch instrs[0].(type) {
+ case *ssa.Return:
+ // Since this is the only instruction, the return value must
+ // be a constant. We consider all constants as stubs, not just
+ // the zero value. This does not, unfortunately, cover zero
+ // initialised structs, as these cause additional
+ // instructions.
+ return true
+ case *ssa.Panic:
+ return true
+ default:
+ return false
+ }
+}
diff --git a/vendor/honnef.co/go/tools/functions/stub.go b/vendor/honnef.co/go/tools/functions/stub.go
deleted file mode 100644
index 4d5de10b8..000000000
--- a/vendor/honnef.co/go/tools/functions/stub.go
+++ /dev/null
@@ -1,32 +0,0 @@
-package functions
-
-import (
- "honnef.co/go/tools/ir"
-)
-
-// IsStub reports whether a function is a stub. A function is
-// considered a stub if it has no instructions or if all it does is
-// return a constant value.
-func IsStub(fn *ir.Function) bool {
- for _, b := range fn.Blocks {
- for _, instr := range b.Instrs {
- switch instr.(type) {
- case *ir.Const:
- // const naturally has no side-effects
- case *ir.Panic:
- // panic is a stub if it only uses constants
- case *ir.Return:
- // return is a stub if it only uses constants
- case *ir.DebugRef:
- case *ir.Jump:
- // if there are no disallowed instructions, then we're
- // only jumping to the exit block (or possibly
- // somewhere else that's stubby?)
- default:
- // all other instructions are assumed to do actual work
- return false
- }
- }
- }
- return true
-}
diff --git a/vendor/honnef.co/go/tools/functions/terminates.go b/vendor/honnef.co/go/tools/functions/terminates.go
index c4984673f..3e9c3a23f 100644
--- a/vendor/honnef.co/go/tools/functions/terminates.go
+++ b/vendor/honnef.co/go/tools/functions/terminates.go
@@ -1,15 +1,11 @@
package functions
-import (
- "go/types"
-
- "honnef.co/go/tools/ir"
-)
+import "honnef.co/go/tools/ssa"
// Terminates reports whether fn is supposed to return, that is if it
// has at least one theoretic path that returns from the function.
// Explicit panics do not count as terminating.
-func Terminates(fn *ir.Function) bool {
+func Terminates(fn *ssa.Function) bool {
if fn.Blocks == nil {
// assuming that a function terminates is the conservative
// choice
@@ -17,53 +13,11 @@ func Terminates(fn *ir.Function) bool {
}
for _, block := range fn.Blocks {
- if _, ok := block.Control().(*ir.Return); ok {
- if len(block.Preds) == 0 {
- return true
- }
- for _, pred := range block.Preds {
- switch ctrl := pred.Control().(type) {
- case *ir.Panic:
- // explicit panics do not count as terminating
- case *ir.If:
- // Check if we got here by receiving from a closed
- // time.Tick channel – this cannot happen at
- // runtime and thus doesn't constitute termination
- iff := ctrl
- if !ok {
- return true
- }
- ex, ok := iff.Cond.(*ir.Extract)
- if !ok {
- return true
- }
- if ex.Index != 1 {
- return true
- }
- recv, ok := ex.Tuple.(*ir.Recv)
- if !ok {
- return true
- }
- call, ok := recv.Chan.(*ir.Call)
- if !ok {
- return true
- }
- fn, ok := call.Common().Value.(*ir.Function)
- if !ok {
- return true
- }
- fn2, ok := fn.Object().(*types.Func)
- if !ok {
- return true
- }
- if fn2.FullName() != "time.Tick" {
- return true
- }
- default:
- // we've reached the exit block
- return true
- }
- }
+ if len(block.Instrs) == 0 {
+ continue
+ }
+ if _, ok := block.Instrs[len(block.Instrs)-1].(*ssa.Return); ok {
+ return true
}
}
return false
diff --git a/vendor/honnef.co/go/tools/internal/cache/cache.go b/vendor/honnef.co/go/tools/internal/cache/cache.go
index 6b41811cf..2b33ca106 100644
--- a/vendor/honnef.co/go/tools/internal/cache/cache.go
+++ b/vendor/honnef.co/go/tools/internal/cache/cache.go
@@ -177,7 +177,7 @@ func (c *Cache) get(id ActionID) (Entry, error) {
i++
}
tm, err := strconv.ParseInt(string(etime[i:]), 10, 64)
- if err != nil || tm < 0 {
+ if err != nil || size < 0 {
return missing()
}
@@ -265,7 +265,7 @@ func (c *Cache) Trim() {
// We maintain in dir/trim.txt the time of the last completed cache trim.
// If the cache has been trimmed recently enough, do nothing.
// This is the common case.
- data, _ := renameio.ReadFile(filepath.Join(c.dir, "trim.txt"))
+ data, _ := ioutil.ReadFile(filepath.Join(c.dir, "trim.txt"))
t, err := strconv.ParseInt(strings.TrimSpace(string(data)), 10, 64)
if err == nil && now.Sub(time.Unix(t, 0)) < trimInterval {
return
@@ -282,7 +282,7 @@ func (c *Cache) Trim() {
// Ignore errors from here: if we don't write the complete timestamp, the
// cache will appear older than it is, and we'll trim it again next time.
- renameio.WriteFile(filepath.Join(c.dir, "trim.txt"), []byte(fmt.Sprintf("%d", now.Unix())), 0666)
+ renameio.WriteFile(filepath.Join(c.dir, "trim.txt"), []byte(fmt.Sprintf("%d", now.Unix())))
}
// trimSubdir trims a single cache subdirectory.
@@ -326,8 +326,7 @@ func (c *Cache) putIndexEntry(id ActionID, out OutputID, size int64, allowVerify
// in verify mode we are double-checking that the cache entries
// are entirely reproducible. As just noted, this may be unrealistic
// in some cases but the check is also useful for shaking out real bugs.
- entry := fmt.Sprintf("v1 %x %x %20d %20d\n", id, out, size, time.Now().UnixNano())
-
+ entry := []byte(fmt.Sprintf("v1 %x %x %20d %20d\n", id, out, size, time.Now().UnixNano()))
if verify && allowVerify {
old, err := c.get(id)
if err == nil && (old.OutputID != out || old.Size != size) {
@@ -337,28 +336,7 @@ func (c *Cache) putIndexEntry(id ActionID, out OutputID, size int64, allowVerify
}
}
file := c.fileName(id, "a")
-
- // Copy file to cache directory.
- mode := os.O_WRONLY | os.O_CREATE
- f, err := os.OpenFile(file, mode, 0666)
- if err != nil {
- return err
- }
- _, err = f.WriteString(entry)
- if err == nil {
- // Truncate the file only *after* writing it.
- // (This should be a no-op, but truncate just in case of previous corruption.)
- //
- // This differs from ioutil.WriteFile, which truncates to 0 *before* writing
- // via os.O_TRUNC. Truncating only after writing ensures that a second write
- // of the same content to the same file is idempotent, and does not — even
- // temporarily! — undo the effect of the first write.
- err = f.Truncate(int64(len(entry)))
- }
- if closeErr := f.Close(); err == nil {
- err = closeErr
- }
- if err != nil {
+ if err := ioutil.WriteFile(file, entry, 0666); err != nil {
// TODO(bcmills): This Remove potentially races with another go command writing to file.
// Can we eliminate it?
os.Remove(file)
diff --git a/vendor/honnef.co/go/tools/internal/passes/buildir/buildir.go b/vendor/honnef.co/go/tools/internal/passes/buildir/buildir.go
deleted file mode 100644
index 394697702..000000000
--- a/vendor/honnef.co/go/tools/internal/passes/buildir/buildir.go
+++ /dev/null
@@ -1,113 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package buildir defines an Analyzer that constructs the IR
-// of an error-free package and returns the set of all
-// functions within it. It does not report any diagnostics itself but
-// may be used as an input to other analyzers.
-//
-// THIS INTERFACE IS EXPERIMENTAL AND MAY BE SUBJECT TO INCOMPATIBLE CHANGE.
-package buildir
-
-import (
- "go/ast"
- "go/types"
- "reflect"
-
- "golang.org/x/tools/go/analysis"
- "honnef.co/go/tools/ir"
-)
-
-type willExit struct{}
-type willUnwind struct{}
-
-func (*willExit) AFact() {}
-func (*willUnwind) AFact() {}
-
-var Analyzer = &analysis.Analyzer{
- Name: "buildir",
- Doc: "build IR for later passes",
- Run: run,
- ResultType: reflect.TypeOf(new(IR)),
- FactTypes: []analysis.Fact{new(willExit), new(willUnwind)},
-}
-
-// IR provides intermediate representation for all the
-// non-blank source functions in the current package.
-type IR struct {
- Pkg *ir.Package
- SrcFuncs []*ir.Function
-}
-
-func run(pass *analysis.Pass) (interface{}, error) {
- // Plundered from ssautil.BuildPackage.
-
- // We must create a new Program for each Package because the
- // analysis API provides no place to hang a Program shared by
- // all Packages. Consequently, IR Packages and Functions do not
- // have a canonical representation across an analysis session of
- // multiple packages. This is unlikely to be a problem in
- // practice because the analysis API essentially forces all
- // packages to be analysed independently, so any given call to
- // Analysis.Run on a package will see only IR objects belonging
- // to a single Program.
-
- mode := ir.GlobalDebug
-
- prog := ir.NewProgram(pass.Fset, mode)
-
- // Create IR packages for all imports.
- // Order is not significant.
- created := make(map[*types.Package]bool)
- var createAll func(pkgs []*types.Package)
- createAll = func(pkgs []*types.Package) {
- for _, p := range pkgs {
- if !created[p] {
- created[p] = true
- irpkg := prog.CreatePackage(p, nil, nil, true)
- for _, fn := range irpkg.Functions {
- if ast.IsExported(fn.Name()) {
- var exit willExit
- var unwind willUnwind
- if pass.ImportObjectFact(fn.Object(), &exit) {
- fn.WillExit = true
- }
- if pass.ImportObjectFact(fn.Object(), &unwind) {
- fn.WillUnwind = true
- }
- }
- }
- createAll(p.Imports())
- }
- }
- }
- createAll(pass.Pkg.Imports())
-
- // Create and build the primary package.
- irpkg := prog.CreatePackage(pass.Pkg, pass.Files, pass.TypesInfo, false)
- irpkg.Build()
-
- // Compute list of source functions, including literals,
- // in source order.
- var addAnons func(f *ir.Function)
- funcs := make([]*ir.Function, len(irpkg.Functions))
- copy(funcs, irpkg.Functions)
- addAnons = func(f *ir.Function) {
- for _, anon := range f.AnonFuncs {
- funcs = append(funcs, anon)
- addAnons(anon)
- }
- }
- for _, fn := range irpkg.Functions {
- addAnons(fn)
- if fn.WillExit {
- pass.ExportObjectFact(fn.Object(), new(willExit))
- }
- if fn.WillUnwind {
- pass.ExportObjectFact(fn.Object(), new(willUnwind))
- }
- }
-
- return &IR{Pkg: irpkg, SrcFuncs: funcs}, nil
-}
diff --git a/vendor/honnef.co/go/tools/internal/passes/buildssa/buildssa.go b/vendor/honnef.co/go/tools/internal/passes/buildssa/buildssa.go
new file mode 100644
index 000000000..fde918d12
--- /dev/null
+++ b/vendor/honnef.co/go/tools/internal/passes/buildssa/buildssa.go
@@ -0,0 +1,116 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package buildssa defines an Analyzer that constructs the SSA
+// representation of an error-free package and returns the set of all
+// functions within it. It does not report any diagnostics itself but
+// may be used as an input to other analyzers.
+//
+// THIS INTERFACE IS EXPERIMENTAL AND MAY BE SUBJECT TO INCOMPATIBLE CHANGE.
+package buildssa
+
+import (
+ "go/ast"
+ "go/types"
+ "reflect"
+
+ "golang.org/x/tools/go/analysis"
+ "honnef.co/go/tools/ssa"
+)
+
+var Analyzer = &analysis.Analyzer{
+ Name: "buildssa",
+ Doc: "build SSA-form IR for later passes",
+ Run: run,
+ ResultType: reflect.TypeOf(new(SSA)),
+}
+
+// SSA provides SSA-form intermediate representation for all the
+// non-blank source functions in the current package.
+type SSA struct {
+ Pkg *ssa.Package
+ SrcFuncs []*ssa.Function
+}
+
+func run(pass *analysis.Pass) (interface{}, error) {
+ // Plundered from ssautil.BuildPackage.
+
+ // We must create a new Program for each Package because the
+ // analysis API provides no place to hang a Program shared by
+ // all Packages. Consequently, SSA Packages and Functions do not
+ // have a canonical representation across an analysis session of
+ // multiple packages. This is unlikely to be a problem in
+ // practice because the analysis API essentially forces all
+ // packages to be analysed independently, so any given call to
+ // Analysis.Run on a package will see only SSA objects belonging
+ // to a single Program.
+
+ mode := ssa.GlobalDebug
+
+ prog := ssa.NewProgram(pass.Fset, mode)
+
+ // Create SSA packages for all imports.
+ // Order is not significant.
+ created := make(map[*types.Package]bool)
+ var createAll func(pkgs []*types.Package)
+ createAll = func(pkgs []*types.Package) {
+ for _, p := range pkgs {
+ if !created[p] {
+ created[p] = true
+ prog.CreatePackage(p, nil, nil, true)
+ createAll(p.Imports())
+ }
+ }
+ }
+ createAll(pass.Pkg.Imports())
+
+ // Create and build the primary package.
+ ssapkg := prog.CreatePackage(pass.Pkg, pass.Files, pass.TypesInfo, false)
+ ssapkg.Build()
+
+ // Compute list of source functions, including literals,
+ // in source order.
+ var funcs []*ssa.Function
+ var addAnons func(f *ssa.Function)
+ addAnons = func(f *ssa.Function) {
+ funcs = append(funcs, f)
+ for _, anon := range f.AnonFuncs {
+ addAnons(anon)
+ }
+ }
+ addAnons(ssapkg.Members["init"].(*ssa.Function))
+ for _, f := range pass.Files {
+ for _, decl := range f.Decls {
+ if fdecl, ok := decl.(*ast.FuncDecl); ok {
+
+ // SSA will not build a Function
+ // for a FuncDecl named blank.
+ // That's arguably too strict but
+ // relaxing it would break uniqueness of
+ // names of package members.
+ if fdecl.Name.Name == "_" {
+ continue
+ }
+
+ // (init functions have distinct Func
+ // objects named "init" and distinct
+ // ssa.Functions named "init#1", ...)
+
+ fn := pass.TypesInfo.Defs[fdecl.Name].(*types.Func)
+ if fn == nil {
+ panic(fn)
+ }
+
+ f := ssapkg.Prog.FuncValue(fn)
+ if f == nil {
+ panic(fn)
+ }
+
+ addAnons(f)
+ }
+ }
+ }
+
+ return &SSA{Pkg: ssapkg, SrcFuncs: funcs}, nil
+}
diff --git a/vendor/honnef.co/go/tools/internal/renameio/renameio.go b/vendor/honnef.co/go/tools/internal/renameio/renameio.go
index a279d1a1e..3f3f1708f 100644
--- a/vendor/honnef.co/go/tools/internal/renameio/renameio.go
+++ b/vendor/honnef.co/go/tools/internal/renameio/renameio.go
@@ -8,15 +8,15 @@ package renameio
import (
"bytes"
"io"
- "math/rand"
+ "io/ioutil"
"os"
"path/filepath"
- "strconv"
-
- "honnef.co/go/tools/internal/robustio"
+ "runtime"
+ "strings"
+ "time"
)
-const patternSuffix = ".tmp"
+const patternSuffix = "*.tmp"
// Pattern returns a glob pattern that matches the unrenamed temporary files
// created when writing to filename.
@@ -29,14 +29,14 @@ func Pattern(filename string) string {
// final name.
//
// That ensures that the final location, if it exists, is always a complete file.
-func WriteFile(filename string, data []byte, perm os.FileMode) (err error) {
- return WriteToFile(filename, bytes.NewReader(data), perm)
+func WriteFile(filename string, data []byte) (err error) {
+ return WriteToFile(filename, bytes.NewReader(data))
}
// WriteToFile is a variant of WriteFile that accepts the data as an io.Reader
// instead of a slice.
-func WriteToFile(filename string, data io.Reader, perm os.FileMode) (err error) {
- f, err := tempFile(filepath.Dir(filename), filepath.Base(filename), perm)
+func WriteToFile(filename string, data io.Reader) (err error) {
+ f, err := ioutil.TempFile(filepath.Dir(filename), filepath.Base(filename)+patternSuffix)
if err != nil {
return err
}
@@ -63,31 +63,21 @@ func WriteToFile(filename string, data io.Reader, perm os.FileMode) (err error)
return err
}
- return robustio.Rename(f.Name(), filename)
-}
+ var start time.Time
+ for {
+ err := os.Rename(f.Name(), filename)
+ if err == nil || runtime.GOOS != "windows" || !strings.HasSuffix(err.Error(), "Access is denied.") {
+ return err
+ }
-// tempFile creates a new temporary file with given permission bits.
-func tempFile(dir, prefix string, perm os.FileMode) (f *os.File, err error) {
- for i := 0; i < 10000; i++ {
- name := filepath.Join(dir, prefix+strconv.Itoa(rand.Intn(1000000000))+patternSuffix)
- f, err = os.OpenFile(name, os.O_RDWR|os.O_CREATE|os.O_EXCL, perm)
- if os.IsExist(err) {
- continue
+ // Windows seems to occasionally trigger spurious "Access is denied" errors
+ // here (see golang.org/issue/31247). We're not sure why. It's probably
+ // worth a little extra latency to avoid propagating the spurious errors.
+ if start.IsZero() {
+ start = time.Now()
+ } else if time.Since(start) >= 500*time.Millisecond {
+ return err
}
- break
+ time.Sleep(5 * time.Millisecond)
}
- return
-}
-
-// ReadFile is like ioutil.ReadFile, but on Windows retries spurious errors that
-// may occur if the file is concurrently replaced.
-//
-// Errors are classified heuristically and retries are bounded, so even this
-// function may occasionally return a spurious error on Windows.
-// If so, the error will likely wrap one of:
-// - syscall.ERROR_ACCESS_DENIED
-// - syscall.ERROR_FILE_NOT_FOUND
-// - internal/syscall/windows.ERROR_SHARING_VIOLATION
-func ReadFile(filename string) ([]byte, error) {
- return robustio.ReadFile(filename)
}
diff --git a/vendor/honnef.co/go/tools/internal/robustio/robustio.go b/vendor/honnef.co/go/tools/internal/robustio/robustio.go
deleted file mode 100644
index 76e47ad1f..000000000
--- a/vendor/honnef.co/go/tools/internal/robustio/robustio.go
+++ /dev/null
@@ -1,53 +0,0 @@
-// Copyright 2019 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package robustio wraps I/O functions that are prone to failure on Windows,
-// transparently retrying errors up to an arbitrary timeout.
-//
-// Errors are classified heuristically and retries are bounded, so the functions
-// in this package do not completely eliminate spurious errors. However, they do
-// significantly reduce the rate of failure in practice.
-//
-// If so, the error will likely wrap one of:
-// The functions in this package do not completely eliminate spurious errors,
-// but substantially reduce their rate of occurrence in practice.
-package robustio
-
-// Rename is like os.Rename, but on Windows retries errors that may occur if the
-// file is concurrently read or overwritten.
-//
-// (See golang.org/issue/31247 and golang.org/issue/32188.)
-func Rename(oldpath, newpath string) error {
- return rename(oldpath, newpath)
-}
-
-// ReadFile is like ioutil.ReadFile, but on Windows retries errors that may
-// occur if the file is concurrently replaced.
-//
-// (See golang.org/issue/31247 and golang.org/issue/32188.)
-func ReadFile(filename string) ([]byte, error) {
- return readFile(filename)
-}
-
-// RemoveAll is like os.RemoveAll, but on Windows retries errors that may occur
-// if an executable file in the directory has recently been executed.
-//
-// (See golang.org/issue/19491.)
-func RemoveAll(path string) error {
- return removeAll(path)
-}
-
-// IsEphemeralError reports whether err is one of the errors that the functions
-// in this package attempt to mitigate.
-//
-// Errors considered ephemeral include:
-// - syscall.ERROR_ACCESS_DENIED
-// - syscall.ERROR_FILE_NOT_FOUND
-// - internal/syscall/windows.ERROR_SHARING_VIOLATION
-//
-// This set may be expanded in the future; programs must not rely on the
-// non-ephemerality of any given error.
-func IsEphemeralError(err error) bool {
- return isEphemeralError(err)
-}
diff --git a/vendor/honnef.co/go/tools/internal/robustio/robustio_other.go b/vendor/honnef.co/go/tools/internal/robustio/robustio_other.go
deleted file mode 100644
index 91ca56cb8..000000000
--- a/vendor/honnef.co/go/tools/internal/robustio/robustio_other.go
+++ /dev/null
@@ -1,28 +0,0 @@
-// Copyright 2019 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-//+build !windows
-
-package robustio
-
-import (
- "io/ioutil"
- "os"
-)
-
-func rename(oldpath, newpath string) error {
- return os.Rename(oldpath, newpath)
-}
-
-func readFile(filename string) ([]byte, error) {
- return ioutil.ReadFile(filename)
-}
-
-func removeAll(path string) error {
- return os.RemoveAll(path)
-}
-
-func isEphemeralError(err error) bool {
- return false
-}
diff --git a/vendor/honnef.co/go/tools/internal/robustio/robustio_windows.go b/vendor/honnef.co/go/tools/internal/robustio/robustio_windows.go
deleted file mode 100644
index c57e09528..000000000
--- a/vendor/honnef.co/go/tools/internal/robustio/robustio_windows.go
+++ /dev/null
@@ -1,112 +0,0 @@
-// Copyright 2019 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package robustio
-
-import (
- "io/ioutil"
- "math/rand"
- "os"
- "syscall"
- "time"
-)
-
-const arbitraryTimeout = 500 * time.Millisecond
-
-const ERROR_SHARING_VIOLATION = 32
-
-// retry retries ephemeral errors from f up to an arbitrary timeout
-// to work around spurious filesystem errors on Windows
-func retry(f func() (err error, mayRetry bool)) error {
- var (
- bestErr error
- lowestErrno syscall.Errno
- start time.Time
- nextSleep time.Duration = 1 * time.Millisecond
- )
- for {
- err, mayRetry := f()
- if err == nil || !mayRetry {
- return err
- }
-
- if errno, ok := err.(syscall.Errno); ok && (lowestErrno == 0 || errno < lowestErrno) {
- bestErr = err
- lowestErrno = errno
- } else if bestErr == nil {
- bestErr = err
- }
-
- if start.IsZero() {
- start = time.Now()
- } else if d := time.Since(start) + nextSleep; d >= arbitraryTimeout {
- break
- }
- time.Sleep(nextSleep)
- nextSleep += time.Duration(rand.Int63n(int64(nextSleep)))
- }
-
- return bestErr
-}
-
-// rename is like os.Rename, but retries ephemeral errors.
-//
-// It wraps os.Rename, which (as of 2019-06-04) uses MoveFileEx with
-// MOVEFILE_REPLACE_EXISTING.
-//
-// Windows also provides a different system call, ReplaceFile,
-// that provides similar semantics, but perhaps preserves more metadata. (The
-// documentation on the differences between the two is very sparse.)
-//
-// Empirical error rates with MoveFileEx are lower under modest concurrency, so
-// for now we're sticking with what the os package already provides.
-func rename(oldpath, newpath string) (err error) {
- return retry(func() (err error, mayRetry bool) {
- err = os.Rename(oldpath, newpath)
- return err, isEphemeralError(err)
- })
-}
-
-// readFile is like ioutil.ReadFile, but retries ephemeral errors.
-func readFile(filename string) ([]byte, error) {
- var b []byte
- err := retry(func() (err error, mayRetry bool) {
- b, err = ioutil.ReadFile(filename)
-
- // Unlike in rename, we do not retry ERROR_FILE_NOT_FOUND here: it can occur
- // as a spurious error, but the file may also genuinely not exist, so the
- // increase in robustness is probably not worth the extra latency.
-
- return err, isEphemeralError(err) && err != syscall.ERROR_FILE_NOT_FOUND
- })
- return b, err
-}
-
-func removeAll(path string) error {
- return retry(func() (err error, mayRetry bool) {
- err = os.RemoveAll(path)
- return err, isEphemeralError(err)
- })
-}
-
-// isEphemeralError returns true if err may be resolved by waiting.
-func isEphemeralError(err error) bool {
- switch werr := err.(type) {
- case *os.PathError:
- err = werr.Err
- case *os.LinkError:
- err = werr.Err
- case *os.SyscallError:
- err = werr.Err
- }
- if errno, ok := err.(syscall.Errno); ok {
- switch errno {
- case syscall.ERROR_ACCESS_DENIED,
- syscall.ERROR_FILE_NOT_FOUND,
- ERROR_SHARING_VIOLATION:
- return true
- }
- }
- return false
-}
diff --git a/vendor/honnef.co/go/tools/internal/sharedcheck/lint.go b/vendor/honnef.co/go/tools/internal/sharedcheck/lint.go
index e9abf0d89..affee6607 100644
--- a/vendor/honnef.co/go/tools/internal/sharedcheck/lint.go
+++ b/vendor/honnef.co/go/tools/internal/sharedcheck/lint.go
@@ -5,24 +5,23 @@ import (
"go/types"
"golang.org/x/tools/go/analysis"
- "honnef.co/go/tools/code"
- "honnef.co/go/tools/internal/passes/buildir"
- "honnef.co/go/tools/ir"
+ "honnef.co/go/tools/internal/passes/buildssa"
. "honnef.co/go/tools/lint/lintdsl"
+ "honnef.co/go/tools/ssa"
)
func CheckRangeStringRunes(pass *analysis.Pass) (interface{}, error) {
- for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
- cb := func(node ast.Node) bool {
+ for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
+ fn := func(node ast.Node) bool {
rng, ok := node.(*ast.RangeStmt)
- if !ok || !code.IsBlank(rng.Key) {
+ if !ok || !IsBlank(rng.Key) {
return true
}
- v, _ := fn.ValueForExpr(rng.X)
+ v, _ := ssafn.ValueForExpr(rng.X)
// Check that we're converting from string to []rune
- val, _ := v.(*ir.Convert)
+ val, _ := v.(*ssa.Convert)
if val == nil {
return true
}
@@ -48,13 +47,13 @@ func CheckRangeStringRunes(pass *analysis.Pass) (interface{}, error) {
// Expect two refs: one for obtaining the length of the slice,
// one for accessing the elements
- if len(code.FilterDebug(*refs)) != 2 {
+ if len(FilterDebug(*refs)) != 2 {
// TODO(dh): right now, we check that only one place
// refers to our slice. This will miss cases such as
// ranging over the slice twice. Ideally, we'd ensure that
// the slice is only used for ranging over (without
// accessing the key), but that is harder to do because in
- // IR form, ranging over a slice looks like an ordinary
+ // SSA form, ranging over a slice looks like an ordinary
// loop with index increments and slice accesses. We'd
// have to look at the associated AST node to check that
// it's a range statement.
@@ -65,7 +64,7 @@ func CheckRangeStringRunes(pass *analysis.Pass) (interface{}, error) {
return true
}
- Inspect(fn.Source(), cb)
+ Inspect(ssafn.Syntax(), fn)
}
return nil, nil
}
diff --git a/vendor/honnef.co/go/tools/ir/exits.go b/vendor/honnef.co/go/tools/ir/exits.go
deleted file mode 100644
index 10cda7bb6..000000000
--- a/vendor/honnef.co/go/tools/ir/exits.go
+++ /dev/null
@@ -1,271 +0,0 @@
-package ir
-
-import (
- "go/types"
-)
-
-func (b *builder) buildExits(fn *Function) {
- if obj := fn.Object(); obj != nil {
- switch obj.Pkg().Path() {
- case "runtime":
- switch obj.Name() {
- case "exit":
- fn.WillExit = true
- return
- case "throw":
- fn.WillExit = true
- return
- case "Goexit":
- fn.WillUnwind = true
- return
- }
- case "github.com/sirupsen/logrus":
- switch obj.(*types.Func).FullName() {
- case "(*github.com/sirupsen/logrus.Logger).Exit":
- // Technically, this method does not unconditionally exit
- // the process. It dynamically calls a function stored in
- // the logger. If the function is nil, it defaults to
- // os.Exit.
- //
- // The main intent of this method is to terminate the
- // process, and that's what the vast majority of people
- // will use it for. We'll happily accept some false
- // negatives to avoid a lot of false positives.
- fn.WillExit = true
- return
- case "(*github.com/sirupsen/logrus.Logger).Panic",
- "(*github.com/sirupsen/logrus.Logger).Panicf",
- "(*github.com/sirupsen/logrus.Logger).Panicln":
-
- // These methods will always panic, but that's not
- // statically known from the code alone, because they
- // take a detour through the generic Log methods.
- fn.WillUnwind = true
- return
- case "(*github.com/sirupsen/logrus.Entry).Panicf",
- "(*github.com/sirupsen/logrus.Entry).Panicln":
-
- // Entry.Panic has an explicit panic, but Panicf and
- // Panicln do not, relying fully on the generic Log
- // method.
- fn.WillUnwind = true
- return
- case "(*github.com/sirupsen/logrus.Logger).Log",
- "(*github.com/sirupsen/logrus.Logger).Logf",
- "(*github.com/sirupsen/logrus.Logger).Logln":
- // TODO(dh): we cannot handle these case. Whether they
- // exit or unwind depends on the level, which is set
- // via the first argument. We don't currently support
- // call-site-specific exit information.
- }
- }
- }
-
- buildDomTree(fn)
-
- isRecoverCall := func(instr Instruction) bool {
- if instr, ok := instr.(*Call); ok {
- if builtin, ok := instr.Call.Value.(*Builtin); ok {
- if builtin.Name() == "recover" {
- return true
- }
- }
- }
- return false
- }
-
- // All panics branch to the exit block, which means that if every
- // possible path through the function panics, then all
- // predecessors of the exit block must panic.
- willPanic := true
- for _, pred := range fn.Exit.Preds {
- if _, ok := pred.Control().(*Panic); !ok {
- willPanic = false
- }
- }
- if willPanic {
- recovers := false
- recoverLoop:
- for _, u := range fn.Blocks {
- for _, instr := range u.Instrs {
- if instr, ok := instr.(*Defer); ok {
- call := instr.Call.StaticCallee()
- if call == nil {
- // not a static call, so we can't be sure the
- // deferred call isn't calling recover
- recovers = true
- break recoverLoop
- }
- if len(call.Blocks) == 0 {
- // external function, we don't know what's
- // happening inside it
- //
- // TODO(dh): this includes functions from
- // imported packages, due to how go/analysis
- // works. We could introduce another fact,
- // like we've done for exiting and unwinding,
- // but it doesn't seem worth it. Virtually all
- // uses of recover will be in closures.
- recovers = true
- break recoverLoop
- }
- for _, y := range call.Blocks {
- for _, instr2 := range y.Instrs {
- if isRecoverCall(instr2) {
- recovers = true
- break recoverLoop
- }
- }
- }
- }
- }
- }
- if !recovers {
- fn.WillUnwind = true
- return
- }
- }
-
- // TODO(dh): don't check that any specific call dominates the exit
- // block. instead, check that all calls combined cover every
- // possible path through the function.
- exits := NewBlockSet(len(fn.Blocks))
- unwinds := NewBlockSet(len(fn.Blocks))
- for _, u := range fn.Blocks {
- for _, instr := range u.Instrs {
- if instr, ok := instr.(CallInstruction); ok {
- switch instr.(type) {
- case *Defer, *Call:
- default:
- continue
- }
- if instr.Common().IsInvoke() {
- // give up
- return
- }
- var call *Function
- switch instr.Common().Value.(type) {
- case *Function, *MakeClosure:
- call = instr.Common().StaticCallee()
- case *Builtin:
- // the only builtins that affect control flow are
- // panic and recover, and we've already handled
- // those
- continue
- default:
- // dynamic dispatch
- return
- }
- // buildFunction is idempotent. if we're part of a
- // (mutually) recursive call chain, then buildFunction
- // will immediately return, and fn.WillExit will be false.
- if call.Package() == fn.Package() {
- b.buildFunction(call)
- }
- dom := u.Dominates(fn.Exit)
- if call.WillExit {
- if dom {
- fn.WillExit = true
- return
- }
- exits.Add(u)
- } else if call.WillUnwind {
- if dom {
- fn.WillUnwind = true
- return
- }
- unwinds.Add(u)
- }
- }
- }
- }
-
- // depth-first search trying to find a path to the exit block that
- // doesn't cross any of the blacklisted blocks
- seen := NewBlockSet(len(fn.Blocks))
- var findPath func(root *BasicBlock, bl *BlockSet) bool
- findPath = func(root *BasicBlock, bl *BlockSet) bool {
- if root == fn.Exit {
- return true
- }
- if seen.Has(root) {
- return false
- }
- if bl.Has(root) {
- return false
- }
- seen.Add(root)
- for _, succ := range root.Succs {
- if findPath(succ, bl) {
- return true
- }
- }
- return false
- }
-
- if exits.Num() > 0 {
- if !findPath(fn.Blocks[0], exits) {
- fn.WillExit = true
- return
- }
- }
- if unwinds.Num() > 0 {
- seen.Clear()
- if !findPath(fn.Blocks[0], unwinds) {
- fn.WillUnwind = true
- return
- }
- }
-}
-
-func (b *builder) addUnreachables(fn *Function) {
- for _, bb := range fn.Blocks {
- for i, instr := range bb.Instrs {
- if instr, ok := instr.(*Call); ok {
- var call *Function
- switch v := instr.Common().Value.(type) {
- case *Function:
- call = v
- case *MakeClosure:
- call = v.Fn.(*Function)
- }
- if call == nil {
- continue
- }
- if call.Package() == fn.Package() {
- // make sure we have information on all functions in this package
- b.buildFunction(call)
- }
- if call.WillExit {
- // This call will cause the process to terminate.
- // Remove remaining instructions in the block and
- // replace any control flow with Unreachable.
- for _, succ := range bb.Succs {
- succ.removePred(bb)
- }
- bb.Succs = bb.Succs[:0]
-
- bb.Instrs = bb.Instrs[:i+1]
- bb.emit(new(Unreachable), instr.Source())
- addEdge(bb, fn.Exit)
- break
- } else if call.WillUnwind {
- // This call will cause the goroutine to terminate
- // and defers to run (i.e. a panic or
- // runtime.Goexit). Remove remaining instructions
- // in the block and replace any control flow with
- // an unconditional jump to the exit block.
- for _, succ := range bb.Succs {
- succ.removePred(bb)
- }
- bb.Succs = bb.Succs[:0]
-
- bb.Instrs = bb.Instrs[:i+1]
- bb.emit(new(Jump), instr.Source())
- addEdge(bb, fn.Exit)
- break
- }
- }
- }
- }
-}
diff --git a/vendor/honnef.co/go/tools/ir/html.go b/vendor/honnef.co/go/tools/ir/html.go
deleted file mode 100644
index 8570aa66d..000000000
--- a/vendor/honnef.co/go/tools/ir/html.go
+++ /dev/null
@@ -1,1130 +0,0 @@
-// Copyright 2015 The Go Authors. All rights reserved.
-// Copyright 2019 Dominik Honnef. All rights reserved.
-
-package ir
-
-import (
- "bytes"
- "fmt"
- "go/types"
- "html"
- "io"
- "log"
- "os"
- "os/exec"
- "path/filepath"
- "reflect"
- "sort"
- "strings"
-)
-
-func live(f *Function) []bool {
- max := 0
- var ops []*Value
-
- for _, b := range f.Blocks {
- for _, instr := range b.Instrs {
- if int(instr.ID()) > max {
- max = int(instr.ID())
- }
- }
- }
-
- out := make([]bool, max+1)
- var q []Node
- for _, b := range f.Blocks {
- for _, instr := range b.Instrs {
- switch instr.(type) {
- case *BlankStore, *Call, *ConstantSwitch, *Defer, *Go, *If, *Jump, *MapUpdate, *Next, *Panic, *Recv, *Return, *RunDefers, *Send, *Store, *Unreachable:
- out[instr.ID()] = true
- q = append(q, instr)
- }
- }
- }
-
- for len(q) > 0 {
- v := q[len(q)-1]
- q = q[:len(q)-1]
- for _, op := range v.Operands(ops) {
- if *op == nil {
- continue
- }
- if !out[(*op).ID()] {
- out[(*op).ID()] = true
- q = append(q, *op)
- }
- }
- }
-
- return out
-}
-
-type funcPrinter interface {
- startBlock(b *BasicBlock, reachable bool)
- endBlock(b *BasicBlock)
- value(v Node, live bool)
- startDepCycle()
- endDepCycle()
- named(n string, vals []Value)
-}
-
-func namedValues(f *Function) map[types.Object][]Value {
- names := map[types.Object][]Value{}
- for _, b := range f.Blocks {
- for _, instr := range b.Instrs {
- if instr, ok := instr.(*DebugRef); ok {
- if obj := instr.object; obj != nil {
- names[obj] = append(names[obj], instr.X)
- }
- }
- }
- }
- // XXX deduplicate values
- return names
-}
-
-func fprintFunc(p funcPrinter, f *Function) {
- // XXX does our IR form preserve unreachable blocks?
- // reachable, live := findlive(f)
-
- l := live(f)
- for _, b := range f.Blocks {
- // XXX
- // p.startBlock(b, reachable[b.Index])
- p.startBlock(b, true)
-
- end := len(b.Instrs) - 1
- if end < 0 {
- end = 0
- }
- for _, v := range b.Instrs[:end] {
- if _, ok := v.(*DebugRef); !ok {
- p.value(v, l[v.ID()])
- }
- }
- p.endBlock(b)
- }
-
- names := namedValues(f)
- keys := make([]types.Object, 0, len(names))
- for key := range names {
- keys = append(keys, key)
- }
- sort.Slice(keys, func(i, j int) bool {
- return keys[i].Pos() < keys[j].Pos()
- })
- for _, key := range keys {
- p.named(key.Name(), names[key])
- }
-}
-
-func opName(v Node) string {
- switch v := v.(type) {
- case *Call:
- if v.Common().IsInvoke() {
- return "Invoke"
- }
- return "Call"
- case *Alloc:
- if v.Heap {
- return "HeapAlloc"
- }
- return "StackAlloc"
- case *Select:
- if v.Blocking {
- return "SelectBlocking"
- }
- return "SelectNonBlocking"
- default:
- return reflect.ValueOf(v).Type().Elem().Name()
- }
-}
-
-type HTMLWriter struct {
- w io.WriteCloser
- path string
- dot *dotWriter
-}
-
-func NewHTMLWriter(path string, funcname, cfgMask string) *HTMLWriter {
- out, err := os.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0644)
- if err != nil {
- log.Fatalf("%v", err)
- }
- pwd, err := os.Getwd()
- if err != nil {
- log.Fatalf("%v", err)
- }
- html := HTMLWriter{w: out, path: filepath.Join(pwd, path)}
- html.dot = newDotWriter()
- html.start(funcname)
- return &html
-}
-
-func (w *HTMLWriter) start(name string) {
- if w == nil {
- return
- }
- w.WriteString("<html>")
- w.WriteString(`<head>
-<meta http-equiv="Content-Type" content="text/html;charset=UTF-8">
-<style>
-
-body {
- font-size: 14px;
- font-family: Arial, sans-serif;
-}
-
-h1 {
- font-size: 18px;
- display: inline-block;
- margin: 0 1em .5em 0;
-}
-
-#helplink {
- display: inline-block;
-}
-
-#help {
- display: none;
-}
-
-.stats {
- font-size: 60%;
-}
-
-table {
- border: 1px solid black;
- table-layout: fixed;
- width: 300px;
-}
-
-th, td {
- border: 1px solid black;
- overflow: hidden;
- width: 400px;
- vertical-align: top;
- padding: 5px;
-}
-
-td > h2 {
- cursor: pointer;
- font-size: 120%;
-}
-
-td.collapsed {
- font-size: 12px;
- width: 12px;
- border: 0px;
- padding: 0;
- cursor: pointer;
- background: #fafafa;
-}
-
-td.collapsed div {
- -moz-transform: rotate(-90.0deg); /* FF3.5+ */
- -o-transform: rotate(-90.0deg); /* Opera 10.5 */
- -webkit-transform: rotate(-90.0deg); /* Saf3.1+, Chrome */
- filter: progid:DXImageTransform.Microsoft.BasicImage(rotation=0.083); /* IE6,IE7 */
- -ms-filter: "progid:DXImageTransform.Microsoft.BasicImage(rotation=0.083)"; /* IE8 */
- margin-top: 10.3em;
- margin-left: -10em;
- margin-right: -10em;
- text-align: right;
-}
-
-code, pre, .lines, .ast {
- font-family: Menlo, monospace;
- font-size: 12px;
-}
-
-pre {
- -moz-tab-size: 4;
- -o-tab-size: 4;
- tab-size: 4;
-}
-
-.allow-x-scroll {
- overflow-x: scroll;
-}
-
-.lines {
- float: left;
- overflow: hidden;
- text-align: right;
-}
-
-.lines div {
- padding-right: 10px;
- color: gray;
-}
-
-div.line-number {
- font-size: 12px;
-}
-
-.ast {
- white-space: nowrap;
-}
-
-td.ssa-prog {
- width: 600px;
- word-wrap: break-word;
-}
-
-li {
- list-style-type: none;
-}
-
-li.ssa-long-value {
- text-indent: -2em; /* indent wrapped lines */
-}
-
-li.ssa-value-list {
- display: inline;
-}
-
-li.ssa-start-block {
- padding: 0;
- margin: 0;
-}
-
-li.ssa-end-block {
- padding: 0;
- margin: 0;
-}
-
-ul.ssa-print-func {
- padding-left: 0;
-}
-
-li.ssa-start-block button {
- padding: 0 1em;
- margin: 0;
- border: none;
- display: inline;
- font-size: 14px;
- float: right;
-}
-
-button:hover {
- background-color: #eee;
- cursor: pointer;
-}
-
-dl.ssa-gen {
- padding-left: 0;
-}
-
-dt.ssa-prog-src {
- padding: 0;
- margin: 0;
- float: left;
- width: 4em;
-}
-
-dd.ssa-prog {
- padding: 0;
- margin-right: 0;
- margin-left: 4em;
-}
-
-.dead-value {
- color: gray;
-}
-
-.dead-block {
- opacity: 0.5;
-}
-
-.depcycle {
- font-style: italic;
-}
-
-.line-number {
- font-size: 11px;
-}
-
-.no-line-number {
- font-size: 11px;
- color: gray;
-}
-
-.zoom {
- position: absolute;
- float: left;
- white-space: nowrap;
- background-color: #eee;
-}
-
-.zoom a:link, .zoom a:visited {
- text-decoration: none;
- color: blue;
- font-size: 16px;
- padding: 4px 2px;
-}
-
-svg {
- cursor: default;
- outline: 1px solid #eee;
-}
-
-.highlight-aquamarine { background-color: aquamarine; }
-.highlight-coral { background-color: coral; }
-.highlight-lightpink { background-color: lightpink; }
-.highlight-lightsteelblue { background-color: lightsteelblue; }
-.highlight-palegreen { background-color: palegreen; }
-.highlight-skyblue { background-color: skyblue; }
-.highlight-lightgray { background-color: lightgray; }
-.highlight-yellow { background-color: yellow; }
-.highlight-lime { background-color: lime; }
-.highlight-khaki { background-color: khaki; }
-.highlight-aqua { background-color: aqua; }
-.highlight-salmon { background-color: salmon; }
-
-.outline-blue { outline: blue solid 2px; }
-.outline-red { outline: red solid 2px; }
-.outline-blueviolet { outline: blueviolet solid 2px; }
-.outline-darkolivegreen { outline: darkolivegreen solid 2px; }
-.outline-fuchsia { outline: fuchsia solid 2px; }
-.outline-sienna { outline: sienna solid 2px; }
-.outline-gold { outline: gold solid 2px; }
-.outline-orangered { outline: orangered solid 2px; }
-.outline-teal { outline: teal solid 2px; }
-.outline-maroon { outline: maroon solid 2px; }
-.outline-black { outline: black solid 2px; }
-
-ellipse.outline-blue { stroke-width: 2px; stroke: blue; }
-ellipse.outline-red { stroke-width: 2px; stroke: red; }
-ellipse.outline-blueviolet { stroke-width: 2px; stroke: blueviolet; }
-ellipse.outline-darkolivegreen { stroke-width: 2px; stroke: darkolivegreen; }
-ellipse.outline-fuchsia { stroke-width: 2px; stroke: fuchsia; }
-ellipse.outline-sienna { stroke-width: 2px; stroke: sienna; }
-ellipse.outline-gold { stroke-width: 2px; stroke: gold; }
-ellipse.outline-orangered { stroke-width: 2px; stroke: orangered; }
-ellipse.outline-teal { stroke-width: 2px; stroke: teal; }
-ellipse.outline-maroon { stroke-width: 2px; stroke: maroon; }
-ellipse.outline-black { stroke-width: 2px; stroke: black; }
-
-</style>
-
-<script type="text/javascript">
-// ordered list of all available highlight colors
-var highlights = [
- "highlight-aquamarine",
- "highlight-coral",
- "highlight-lightpink",
- "highlight-lightsteelblue",
- "highlight-palegreen",
- "highlight-skyblue",
- "highlight-lightgray",
- "highlight-yellow",
- "highlight-lime",
- "highlight-khaki",
- "highlight-aqua",
- "highlight-salmon"
-];
-
-// state: which value is highlighted this color?
-var highlighted = {};
-for (var i = 0; i < highlights.length; i++) {
- highlighted[highlights[i]] = "";
-}
-
-// ordered list of all available outline colors
-var outlines = [
- "outline-blue",
- "outline-red",
- "outline-blueviolet",
- "outline-darkolivegreen",
- "outline-fuchsia",
- "outline-sienna",
- "outline-gold",
- "outline-orangered",
- "outline-teal",
- "outline-maroon",
- "outline-black"
-];
-
-// state: which value is outlined this color?
-var outlined = {};
-for (var i = 0; i < outlines.length; i++) {
- outlined[outlines[i]] = "";
-}
-
-window.onload = function() {
- var ssaElemClicked = function(elem, event, selections, selected) {
- event.stopPropagation();
-
- // TODO: pushState with updated state and read it on page load,
- // so that state can survive across reloads
-
- // find all values with the same name
- var c = elem.classList.item(0);
- var x = document.getElementsByClassName(c);
-
- // if selected, remove selections from all of them
- // otherwise, attempt to add
-
- var remove = "";
- for (var i = 0; i < selections.length; i++) {
- var color = selections[i];
- if (selected[color] == c) {
- remove = color;
- break;
- }
- }
-
- if (remove != "") {
- for (var i = 0; i < x.length; i++) {
- x[i].classList.remove(remove);
- }
- selected[remove] = "";
- return;
- }
-
- // we're adding a selection
- // find first available color
- var avail = "";
- for (var i = 0; i < selections.length; i++) {
- var color = selections[i];
- if (selected[color] == "") {
- avail = color;
- break;
- }
- }
- if (avail == "") {
- alert("out of selection colors; go add more");
- return;
- }
-
- // set that as the selection
- for (var i = 0; i < x.length; i++) {
- x[i].classList.add(avail);
- }
- selected[avail] = c;
- };
-
- var ssaValueClicked = function(event) {
- ssaElemClicked(this, event, highlights, highlighted);
- };
-
- var ssaBlockClicked = function(event) {
- ssaElemClicked(this, event, outlines, outlined);
- };
-
- var ssavalues = document.getElementsByClassName("ssa-value");
- for (var i = 0; i < ssavalues.length; i++) {
- ssavalues[i].addEventListener('click', ssaValueClicked);
- }
-
- var ssalongvalues = document.getElementsByClassName("ssa-long-value");
- for (var i = 0; i < ssalongvalues.length; i++) {
- // don't attach listeners to li nodes, just the spans they contain
- if (ssalongvalues[i].nodeName == "SPAN") {
- ssalongvalues[i].addEventListener('click', ssaValueClicked);
- }
- }
-
- var ssablocks = document.getElementsByClassName("ssa-block");
- for (var i = 0; i < ssablocks.length; i++) {
- ssablocks[i].addEventListener('click', ssaBlockClicked);
- }
-
- var lines = document.getElementsByClassName("line-number");
- for (var i = 0; i < lines.length; i++) {
- lines[i].addEventListener('click', ssaValueClicked);
- }
-
- // Contains phase names which are expanded by default. Other columns are collapsed.
- var expandedDefault = [
- "start",
- "deadcode",
- "opt",
- "lower",
- "late deadcode",
- "regalloc",
- "genssa",
- ];
-
- function toggler(phase) {
- return function() {
- toggle_cell(phase+'-col');
- toggle_cell(phase+'-exp');
- };
- }
-
- function toggle_cell(id) {
- var e = document.getElementById(id);
- if (e.style.display == 'table-cell') {
- e.style.display = 'none';
- } else {
- e.style.display = 'table-cell';
- }
- }
-
- // Go through all columns and collapse needed phases.
- var td = document.getElementsByTagName("td");
- for (var i = 0; i < td.length; i++) {
- var id = td[i].id;
- var phase = id.substr(0, id.length-4);
- var show = expandedDefault.indexOf(phase) !== -1
- if (id.endsWith("-exp")) {
- var h2 = td[i].getElementsByTagName("h2");
- if (h2 && h2[0]) {
- h2[0].addEventListener('click', toggler(phase));
- }
- } else {
- td[i].addEventListener('click', toggler(phase));
- }
- if (id.endsWith("-col") && show || id.endsWith("-exp") && !show) {
- td[i].style.display = 'none';
- continue;
- }
- td[i].style.display = 'table-cell';
- }
-
- // find all svg block nodes, add their block classes
- var nodes = document.querySelectorAll('*[id^="graph_node_"]');
- for (var i = 0; i < nodes.length; i++) {
- var node = nodes[i];
- var name = node.id.toString();
- var block = name.substring(name.lastIndexOf("_")+1);
- node.classList.remove("node");
- node.classList.add(block);
- node.addEventListener('click', ssaBlockClicked);
- var ellipse = node.getElementsByTagName('ellipse')[0];
- ellipse.classList.add(block);
- ellipse.addEventListener('click', ssaBlockClicked);
- }
-
- // make big graphs smaller
- var targetScale = 0.5;
- var nodes = document.querySelectorAll('*[id^="svg_graph_"]');
- // TODO: Implement smarter auto-zoom using the viewBox attribute
- // and in case of big graphs set the width and height of the svg graph to
- // maximum allowed.
- for (var i = 0; i < nodes.length; i++) {
- var node = nodes[i];
- var name = node.id.toString();
- var phase = name.substring(name.lastIndexOf("_")+1);
- var gNode = document.getElementById("g_graph_"+phase);
- var scale = gNode.transform.baseVal.getItem(0).matrix.a;
- if (scale > targetScale) {
- node.width.baseVal.value *= targetScale / scale;
- node.height.baseVal.value *= targetScale / scale;
- }
- }
-};
-
-function toggle_visibility(id) {
- var e = document.getElementById(id);
- if (e.style.display == 'block') {
- e.style.display = 'none';
- } else {
- e.style.display = 'block';
- }
-}
-
-function hideBlock(el) {
- var es = el.parentNode.parentNode.getElementsByClassName("ssa-value-list");
- if (es.length===0)
- return;
- var e = es[0];
- if (e.style.display === 'block' || e.style.display === '') {
- e.style.display = 'none';
- el.innerHTML = '+';
- } else {
- e.style.display = 'block';
- el.innerHTML = '-';
- }
-}
-
-// TODO: scale the graph with the viewBox attribute.
-function graphReduce(id) {
- var node = document.getElementById(id);
- if (node) {
- node.width.baseVal.value *= 0.9;
- node.height.baseVal.value *= 0.9;
- }
- return false;
-}
-
-function graphEnlarge(id) {
- var node = document.getElementById(id);
- if (node) {
- node.width.baseVal.value *= 1.1;
- node.height.baseVal.value *= 1.1;
- }
- return false;
-}
-
-function makeDraggable(event) {
- var svg = event.target;
- if (window.PointerEvent) {
- svg.addEventListener('pointerdown', startDrag);
- svg.addEventListener('pointermove', drag);
- svg.addEventListener('pointerup', endDrag);
- svg.addEventListener('pointerleave', endDrag);
- } else {
- svg.addEventListener('mousedown', startDrag);
- svg.addEventListener('mousemove', drag);
- svg.addEventListener('mouseup', endDrag);
- svg.addEventListener('mouseleave', endDrag);
- }
-
- var point = svg.createSVGPoint();
- var isPointerDown = false;
- var pointerOrigin;
- var viewBox = svg.viewBox.baseVal;
-
- function getPointFromEvent (event) {
- point.x = event.clientX;
- point.y = event.clientY;
-
- // We get the current transformation matrix of the SVG and we inverse it
- var invertedSVGMatrix = svg.getScreenCTM().inverse();
- return point.matrixTransform(invertedSVGMatrix);
- }
-
- function startDrag(event) {
- isPointerDown = true;
- pointerOrigin = getPointFromEvent(event);
- }
-
- function drag(event) {
- if (!isPointerDown) {
- return;
- }
- event.preventDefault();
-
- var pointerPosition = getPointFromEvent(event);
- viewBox.x -= (pointerPosition.x - pointerOrigin.x);
- viewBox.y -= (pointerPosition.y - pointerOrigin.y);
- }
-
- function endDrag(event) {
- isPointerDown = false;
- }
-}</script>
-
-</head>`)
- w.WriteString("<body>")
- w.WriteString("<h1>")
- w.WriteString(html.EscapeString(name))
- w.WriteString("</h1>")
- w.WriteString(`
-<a href="#" onclick="toggle_visibility('help');return false;" id="helplink">help</a>
-<div id="help">
-
-<p>
-Click on a value or block to toggle highlighting of that value/block
-and its uses. (Values and blocks are highlighted by ID, and IDs of
-dead items may be reused, so not all highlights necessarily correspond
-to the clicked item.)
-</p>
-
-<p>
-Faded out values and blocks are dead code that has not been eliminated.
-</p>
-
-<p>
-Values printed in italics have a dependency cycle.
-</p>
-
-<p>
-<b>CFG</b>: Dashed edge is for unlikely branches. Blue color is for backward edges.
-Edge with a dot means that this edge follows the order in which blocks were laidout.
-</p>
-
-</div>
-`)
- w.WriteString("<table>")
- w.WriteString("<tr>")
-}
-
-func (w *HTMLWriter) Close() {
- if w == nil {
- return
- }
- io.WriteString(w.w, "</tr>")
- io.WriteString(w.w, "</table>")
- io.WriteString(w.w, "</body>")
- io.WriteString(w.w, "</html>")
- w.w.Close()
- fmt.Printf("dumped IR to %v\n", w.path)
-}
-
-// WriteFunc writes f in a column headed by title.
-// phase is used for collapsing columns and should be unique across the table.
-func (w *HTMLWriter) WriteFunc(phase, title string, f *Function) {
- if w == nil {
- return
- }
- w.WriteColumn(phase, title, "", funcHTML(f, phase, w.dot))
-}
-
-// WriteColumn writes raw HTML in a column headed by title.
-// It is intended for pre- and post-compilation log output.
-func (w *HTMLWriter) WriteColumn(phase, title, class, html string) {
- if w == nil {
- return
- }
- id := strings.Replace(phase, " ", "-", -1)
- // collapsed column
- w.Printf("<td id=\"%v-col\" class=\"collapsed\"><div>%v</div></td>", id, phase)
-
- if class == "" {
- w.Printf("<td id=\"%v-exp\">", id)
- } else {
- w.Printf("<td id=\"%v-exp\" class=\"%v\">", id, class)
- }
- w.WriteString("<h2>" + title + "</h2>")
- w.WriteString(html)
- w.WriteString("</td>")
-}
-
-func (w *HTMLWriter) Printf(msg string, v ...interface{}) {
- if _, err := fmt.Fprintf(w.w, msg, v...); err != nil {
- log.Fatalf("%v", err)
- }
-}
-
-func (w *HTMLWriter) WriteString(s string) {
- if _, err := io.WriteString(w.w, s); err != nil {
- log.Fatalf("%v", err)
- }
-}
-
-func valueHTML(v Node) string {
- if v == nil {
- return "&lt;nil&gt;"
- }
- // TODO: Using the value ID as the class ignores the fact
- // that value IDs get recycled and that some values
- // are transmuted into other values.
- class := fmt.Sprintf("t%d", v.ID())
- var label string
- switch v := v.(type) {
- case *Function:
- label = v.RelString(nil)
- case *Builtin:
- label = v.Name()
- default:
- label = class
- }
- return fmt.Sprintf("<span class=\"%s ssa-value\">%s</span>", class, label)
-}
-
-func valueLongHTML(v Node) string {
- // TODO: Any intra-value formatting?
- // I'm wary of adding too much visual noise,
- // but a little bit might be valuable.
- // We already have visual noise in the form of punctuation
- // maybe we could replace some of that with formatting.
- s := fmt.Sprintf("<span class=\"t%d ssa-long-value\">", v.ID())
-
- linenumber := "<span class=\"no-line-number\">(?)</span>"
- if v.Pos().IsValid() {
- line := v.Parent().Prog.Fset.Position(v.Pos()).Line
- linenumber = fmt.Sprintf("<span class=\"l%v line-number\">(%d)</span>", line, line)
- }
-
- s += fmt.Sprintf("%s %s = %s", valueHTML(v), linenumber, opName(v))
-
- if v, ok := v.(Value); ok {
- s += " &lt;" + html.EscapeString(v.Type().String()) + "&gt;"
- }
-
- switch v := v.(type) {
- case *Parameter:
- s += fmt.Sprintf(" {%s}", html.EscapeString(v.name))
- case *BinOp:
- s += fmt.Sprintf(" {%s}", html.EscapeString(v.Op.String()))
- case *UnOp:
- s += fmt.Sprintf(" {%s}", html.EscapeString(v.Op.String()))
- case *Extract:
- name := v.Tuple.Type().(*types.Tuple).At(v.Index).Name()
- s += fmt.Sprintf(" [%d] (%s)", v.Index, name)
- case *Field:
- st := v.X.Type().Underlying().(*types.Struct)
- // Be robust against a bad index.
- name := "?"
- if 0 <= v.Field && v.Field < st.NumFields() {
- name = st.Field(v.Field).Name()
- }
- s += fmt.Sprintf(" [%d] (%s)", v.Field, name)
- case *FieldAddr:
- st := deref(v.X.Type()).Underlying().(*types.Struct)
- // Be robust against a bad index.
- name := "?"
- if 0 <= v.Field && v.Field < st.NumFields() {
- name = st.Field(v.Field).Name()
- }
-
- s += fmt.Sprintf(" [%d] (%s)", v.Field, name)
- case *Recv:
- s += fmt.Sprintf(" {%t}", v.CommaOk)
- case *Call:
- if v.Common().IsInvoke() {
- s += fmt.Sprintf(" {%s}", html.EscapeString(v.Common().Method.FullName()))
- }
- case *Const:
- if v.Value == nil {
- s += " {&lt;nil&gt;}"
- } else {
- s += fmt.Sprintf(" {%s}", html.EscapeString(v.Value.String()))
- }
- case *Sigma:
- s += fmt.Sprintf(" [#%s]", v.From)
- }
- for _, a := range v.Operands(nil) {
- s += fmt.Sprintf(" %s", valueHTML(*a))
- }
- switch v := v.(type) {
- case *Alloc:
- s += fmt.Sprintf(" (%s)", v.Comment)
- case *Sigma:
- s += fmt.Sprintf(" (%s)", v.Comment)
- }
-
- // OPT(dh): we're calling namedValues many times on the same function.
- allNames := namedValues(v.Parent())
- var names []string
- for name, values := range allNames {
- for _, value := range values {
- if v == value {
- names = append(names, name.Name())
- break
- }
- }
- }
- if len(names) != 0 {
- s += " (" + strings.Join(names, ", ") + ")"
- }
-
- s += "</span>"
- return s
-}
-
-func blockHTML(b *BasicBlock) string {
- // TODO: Using the value ID as the class ignores the fact
- // that value IDs get recycled and that some values
- // are transmuted into other values.
- s := html.EscapeString(b.String())
- return fmt.Sprintf("<span class=\"%s ssa-block\">%s</span>", s, s)
-}
-
-func blockLongHTML(b *BasicBlock) string {
- var kind string
- var term Instruction
- if len(b.Instrs) > 0 {
- term = b.Control()
- kind = opName(term)
- }
- // TODO: improve this for HTML?
- s := fmt.Sprintf("<span class=\"b%d ssa-block\">%s</span>", b.Index, kind)
-
- if term != nil {
- ops := term.Operands(nil)
- if len(ops) > 0 {
- var ss []string
- for _, op := range ops {
- ss = append(ss, valueHTML(*op))
- }
- s += " " + strings.Join(ss, ", ")
- }
- }
- if len(b.Succs) > 0 {
- s += " &#8594;" // right arrow
- for _, c := range b.Succs {
- s += " " + blockHTML(c)
- }
- }
- return s
-}
-
-func funcHTML(f *Function, phase string, dot *dotWriter) string {
- buf := new(bytes.Buffer)
- if dot != nil {
- dot.writeFuncSVG(buf, phase, f)
- }
- fmt.Fprint(buf, "<code>")
- p := htmlFuncPrinter{w: buf}
- fprintFunc(p, f)
-
- // fprintFunc(&buf, f) // TODO: HTML, not text, <br /> for line breaks, etc.
- fmt.Fprint(buf, "</code>")
- return buf.String()
-}
-
-type htmlFuncPrinter struct {
- w io.Writer
-}
-
-func (p htmlFuncPrinter) startBlock(b *BasicBlock, reachable bool) {
- var dead string
- if !reachable {
- dead = "dead-block"
- }
- fmt.Fprintf(p.w, "<ul class=\"%s ssa-print-func %s\">", b, dead)
- fmt.Fprintf(p.w, "<li class=\"ssa-start-block\">%s:", blockHTML(b))
- if len(b.Preds) > 0 {
- io.WriteString(p.w, " &#8592;") // left arrow
- for _, pred := range b.Preds {
- fmt.Fprintf(p.w, " %s", blockHTML(pred))
- }
- }
- if len(b.Instrs) > 0 {
- io.WriteString(p.w, `<button onclick="hideBlock(this)">-</button>`)
- }
- io.WriteString(p.w, "</li>")
- if len(b.Instrs) > 0 { // start list of values
- io.WriteString(p.w, "<li class=\"ssa-value-list\">")
- io.WriteString(p.w, "<ul>")
- }
-}
-
-func (p htmlFuncPrinter) endBlock(b *BasicBlock) {
- if len(b.Instrs) > 0 { // end list of values
- io.WriteString(p.w, "</ul>")
- io.WriteString(p.w, "</li>")
- }
- io.WriteString(p.w, "<li class=\"ssa-end-block\">")
- fmt.Fprint(p.w, blockLongHTML(b))
- io.WriteString(p.w, "</li>")
- io.WriteString(p.w, "</ul>")
-}
-
-func (p htmlFuncPrinter) value(v Node, live bool) {
- var dead string
- if !live {
- dead = "dead-value"
- }
- fmt.Fprintf(p.w, "<li class=\"ssa-long-value %s\">", dead)
- fmt.Fprint(p.w, valueLongHTML(v))
- io.WriteString(p.w, "</li>")
-}
-
-func (p htmlFuncPrinter) startDepCycle() {
- fmt.Fprintln(p.w, "<span class=\"depcycle\">")
-}
-
-func (p htmlFuncPrinter) endDepCycle() {
- fmt.Fprintln(p.w, "</span>")
-}
-
-func (p htmlFuncPrinter) named(n string, vals []Value) {
- fmt.Fprintf(p.w, "<li>name %s: ", n)
- for _, val := range vals {
- fmt.Fprintf(p.w, "%s ", valueHTML(val))
- }
- fmt.Fprintf(p.w, "</li>")
-}
-
-type dotWriter struct {
- path string
- broken bool
-}
-
-// newDotWriter returns non-nil value when mask is valid.
-// dotWriter will generate SVGs only for the phases specified in the mask.
-// mask can contain following patterns and combinations of them:
-// * - all of them;
-// x-y - x through y, inclusive;
-// x,y - x and y, but not the passes between.
-func newDotWriter() *dotWriter {
- path, err := exec.LookPath("dot")
- if err != nil {
- fmt.Println(err)
- return nil
- }
- return &dotWriter{path: path}
-}
-
-func (d *dotWriter) writeFuncSVG(w io.Writer, phase string, f *Function) {
- if d.broken {
- return
- }
- cmd := exec.Command(d.path, "-Tsvg")
- pipe, err := cmd.StdinPipe()
- if err != nil {
- d.broken = true
- fmt.Println(err)
- return
- }
- buf := new(bytes.Buffer)
- cmd.Stdout = buf
- bufErr := new(bytes.Buffer)
- cmd.Stderr = bufErr
- err = cmd.Start()
- if err != nil {
- d.broken = true
- fmt.Println(err)
- return
- }
- fmt.Fprint(pipe, `digraph "" { margin=0; size="4,40"; ranksep=.2; `)
- id := strings.Replace(phase, " ", "-", -1)
- fmt.Fprintf(pipe, `id="g_graph_%s";`, id)
- fmt.Fprintf(pipe, `node [style=filled,fillcolor=white,fontsize=16,fontname="Menlo,Times,serif",margin="0.01,0.03"];`)
- fmt.Fprintf(pipe, `edge [fontsize=16,fontname="Menlo,Times,serif"];`)
- for _, b := range f.Blocks {
- layout := ""
- fmt.Fprintf(pipe, `%v [label="%v%s\n%v",id="graph_node_%v_%v"];`, b, b, layout, b.Control().String(), id, b)
- }
- indexOf := make([]int, len(f.Blocks))
- for i, b := range f.Blocks {
- indexOf[b.Index] = i
- }
-
- // XXX
- /*
- ponums := make([]int32, len(f.Blocks))
- _ = postorderWithNumbering(f, ponums)
- isBackEdge := func(from, to int) bool {
- return ponums[from] <= ponums[to]
- }
- */
- isBackEdge := func(from, to int) bool { return false }
-
- for _, b := range f.Blocks {
- for i, s := range b.Succs {
- style := "solid"
- color := "black"
- arrow := "vee"
- if isBackEdge(b.Index, s.Index) {
- color = "blue"
- }
- fmt.Fprintf(pipe, `%v -> %v [label=" %d ",style="%s",color="%s",arrowhead="%s"];`, b, s, i, style, color, arrow)
- }
- }
- fmt.Fprint(pipe, "}")
- pipe.Close()
- err = cmd.Wait()
- if err != nil {
- d.broken = true
- fmt.Printf("dot: %v\n%v\n", err, bufErr.String())
- return
- }
-
- svgID := "svg_graph_" + id
- fmt.Fprintf(w, `<div class="zoom"><button onclick="return graphReduce('%s');">-</button> <button onclick="return graphEnlarge('%s');">+</button></div>`, svgID, svgID)
- // For now, an awful hack: edit the html as it passes through
- // our fingers, finding '<svg ' and injecting needed attributes after it.
- err = d.copyUntil(w, buf, `<svg `)
- if err != nil {
- fmt.Printf("injecting attributes: %v\n", err)
- return
- }
- fmt.Fprintf(w, ` id="%s" onload="makeDraggable(evt)" width="100%%" `, svgID)
- io.Copy(w, buf)
-}
-
-func (d *dotWriter) copyUntil(w io.Writer, buf *bytes.Buffer, sep string) error {
- i := bytes.Index(buf.Bytes(), []byte(sep))
- if i == -1 {
- return fmt.Errorf("couldn't find dot sep %q", sep)
- }
- _, err := io.CopyN(w, buf, int64(i+len(sep)))
- return err
-}
diff --git a/vendor/honnef.co/go/tools/ir/irutil/load.go b/vendor/honnef.co/go/tools/ir/irutil/load.go
deleted file mode 100644
index a62df49ea..000000000
--- a/vendor/honnef.co/go/tools/ir/irutil/load.go
+++ /dev/null
@@ -1,183 +0,0 @@
-// Copyright 2015 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package irutil
-
-// This file defines utility functions for constructing programs in IR form.
-
-import (
- "go/ast"
- "go/token"
- "go/types"
-
- "golang.org/x/tools/go/loader"
- "golang.org/x/tools/go/packages"
- "honnef.co/go/tools/ir"
-)
-
-type Options struct {
- // Which function, if any, to print in HTML form
- PrintFunc string
-}
-
-// Packages creates an IR program for a set of packages.
-//
-// The packages must have been loaded from source syntax using the
-// golang.org/x/tools/go/packages.Load function in LoadSyntax or
-// LoadAllSyntax mode.
-//
-// Packages creates an IR package for each well-typed package in the
-// initial list, plus all their dependencies. The resulting list of
-// packages corresponds to the list of initial packages, and may contain
-// a nil if IR code could not be constructed for the corresponding initial
-// package due to type errors.
-//
-// Code for bodies of functions is not built until Build is called on
-// the resulting Program. IR code is constructed only for the initial
-// packages with well-typed syntax trees.
-//
-// The mode parameter controls diagnostics and checking during IR construction.
-//
-func Packages(initial []*packages.Package, mode ir.BuilderMode, opts *Options) (*ir.Program, []*ir.Package) {
- return doPackages(initial, mode, false, opts)
-}
-
-// AllPackages creates an IR program for a set of packages plus all
-// their dependencies.
-//
-// The packages must have been loaded from source syntax using the
-// golang.org/x/tools/go/packages.Load function in LoadAllSyntax mode.
-//
-// AllPackages creates an IR package for each well-typed package in the
-// initial list, plus all their dependencies. The resulting list of
-// packages corresponds to the list of initial packages, and may contain
-// a nil if IR code could not be constructed for the corresponding
-// initial package due to type errors.
-//
-// Code for bodies of functions is not built until Build is called on
-// the resulting Program. IR code is constructed for all packages with
-// well-typed syntax trees.
-//
-// The mode parameter controls diagnostics and checking during IR construction.
-//
-func AllPackages(initial []*packages.Package, mode ir.BuilderMode, opts *Options) (*ir.Program, []*ir.Package) {
- return doPackages(initial, mode, true, opts)
-}
-
-func doPackages(initial []*packages.Package, mode ir.BuilderMode, deps bool, opts *Options) (*ir.Program, []*ir.Package) {
-
- var fset *token.FileSet
- if len(initial) > 0 {
- fset = initial[0].Fset
- }
-
- prog := ir.NewProgram(fset, mode)
- if opts != nil {
- prog.PrintFunc = opts.PrintFunc
- }
-
- isInitial := make(map[*packages.Package]bool, len(initial))
- for _, p := range initial {
- isInitial[p] = true
- }
-
- irmap := make(map[*packages.Package]*ir.Package)
- packages.Visit(initial, nil, func(p *packages.Package) {
- if p.Types != nil && !p.IllTyped {
- var files []*ast.File
- if deps || isInitial[p] {
- files = p.Syntax
- }
- irmap[p] = prog.CreatePackage(p.Types, files, p.TypesInfo, true)
- }
- })
-
- var irpkgs []*ir.Package
- for _, p := range initial {
- irpkgs = append(irpkgs, irmap[p]) // may be nil
- }
- return prog, irpkgs
-}
-
-// CreateProgram returns a new program in IR form, given a program
-// loaded from source. An IR package is created for each transitively
-// error-free package of lprog.
-//
-// Code for bodies of functions is not built until Build is called
-// on the result.
-//
-// The mode parameter controls diagnostics and checking during IR construction.
-//
-// Deprecated: use golang.org/x/tools/go/packages and the Packages
-// function instead; see ir.ExampleLoadPackages.
-//
-func CreateProgram(lprog *loader.Program, mode ir.BuilderMode) *ir.Program {
- prog := ir.NewProgram(lprog.Fset, mode)
-
- for _, info := range lprog.AllPackages {
- if info.TransitivelyErrorFree {
- prog.CreatePackage(info.Pkg, info.Files, &info.Info, info.Importable)
- }
- }
-
- return prog
-}
-
-// BuildPackage builds an IR program with IR for a single package.
-//
-// It populates pkg by type-checking the specified file ASTs. All
-// dependencies are loaded using the importer specified by tc, which
-// typically loads compiler export data; IR code cannot be built for
-// those packages. BuildPackage then constructs an ir.Program with all
-// dependency packages created, and builds and returns the IR package
-// corresponding to pkg.
-//
-// The caller must have set pkg.Path() to the import path.
-//
-// The operation fails if there were any type-checking or import errors.
-//
-// See ../ir/example_test.go for an example.
-//
-func BuildPackage(tc *types.Config, fset *token.FileSet, pkg *types.Package, files []*ast.File, mode ir.BuilderMode) (*ir.Package, *types.Info, error) {
- if fset == nil {
- panic("no token.FileSet")
- }
- if pkg.Path() == "" {
- panic("package has no import path")
- }
-
- info := &types.Info{
- Types: make(map[ast.Expr]types.TypeAndValue),
- Defs: make(map[*ast.Ident]types.Object),
- Uses: make(map[*ast.Ident]types.Object),
- Implicits: make(map[ast.Node]types.Object),
- Scopes: make(map[ast.Node]*types.Scope),
- Selections: make(map[*ast.SelectorExpr]*types.Selection),
- }
- if err := types.NewChecker(tc, fset, pkg, info).Files(files); err != nil {
- return nil, nil, err
- }
-
- prog := ir.NewProgram(fset, mode)
-
- // Create IR packages for all imports.
- // Order is not significant.
- created := make(map[*types.Package]bool)
- var createAll func(pkgs []*types.Package)
- createAll = func(pkgs []*types.Package) {
- for _, p := range pkgs {
- if !created[p] {
- created[p] = true
- prog.CreatePackage(p, nil, nil, true)
- createAll(p.Imports())
- }
- }
- }
- createAll(pkg.Imports())
-
- // Create and build the primary package.
- irpkg := prog.CreatePackage(pkg, files, info, false)
- irpkg.Build()
- return irpkg, info, nil
-}
diff --git a/vendor/honnef.co/go/tools/ir/irutil/switch.go b/vendor/honnef.co/go/tools/ir/irutil/switch.go
deleted file mode 100644
index f44cbca9e..000000000
--- a/vendor/honnef.co/go/tools/ir/irutil/switch.go
+++ /dev/null
@@ -1,264 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package irutil
-
-// This file implements discovery of switch and type-switch constructs
-// from low-level control flow.
-//
-// Many techniques exist for compiling a high-level switch with
-// constant cases to efficient machine code. The optimal choice will
-// depend on the data type, the specific case values, the code in the
-// body of each case, and the hardware.
-// Some examples:
-// - a lookup table (for a switch that maps constants to constants)
-// - a computed goto
-// - a binary tree
-// - a perfect hash
-// - a two-level switch (to partition constant strings by their first byte).
-
-import (
- "bytes"
- "fmt"
- "go/token"
- "go/types"
-
- "honnef.co/go/tools/ir"
-)
-
-// A ConstCase represents a single constant comparison.
-// It is part of a Switch.
-type ConstCase struct {
- Block *ir.BasicBlock // block performing the comparison
- Body *ir.BasicBlock // body of the case
- Value *ir.Const // case comparand
-}
-
-// A TypeCase represents a single type assertion.
-// It is part of a Switch.
-type TypeCase struct {
- Block *ir.BasicBlock // block performing the type assert
- Body *ir.BasicBlock // body of the case
- Type types.Type // case type
- Binding ir.Value // value bound by this case
-}
-
-// A Switch is a logical high-level control flow operation
-// (a multiway branch) discovered by analysis of a CFG containing
-// only if/else chains. It is not part of the ir.Instruction set.
-//
-// One of ConstCases and TypeCases has length >= 2;
-// the other is nil.
-//
-// In a value switch, the list of cases may contain duplicate constants.
-// A type switch may contain duplicate types, or types assignable
-// to an interface type also in the list.
-// TODO(adonovan): eliminate such duplicates.
-//
-type Switch struct {
- Start *ir.BasicBlock // block containing start of if/else chain
- X ir.Value // the switch operand
- ConstCases []ConstCase // ordered list of constant comparisons
- TypeCases []TypeCase // ordered list of type assertions
- Default *ir.BasicBlock // successor if all comparisons fail
-}
-
-func (sw *Switch) String() string {
- // We represent each block by the String() of its
- // first Instruction, e.g. "print(42:int)".
- var buf bytes.Buffer
- if sw.ConstCases != nil {
- fmt.Fprintf(&buf, "switch %s {\n", sw.X.Name())
- for _, c := range sw.ConstCases {
- fmt.Fprintf(&buf, "case %s: %s\n", c.Value.Name(), c.Body.Instrs[0])
- }
- } else {
- fmt.Fprintf(&buf, "switch %s.(type) {\n", sw.X.Name())
- for _, c := range sw.TypeCases {
- fmt.Fprintf(&buf, "case %s %s: %s\n",
- c.Binding.Name(), c.Type, c.Body.Instrs[0])
- }
- }
- if sw.Default != nil {
- fmt.Fprintf(&buf, "default: %s\n", sw.Default.Instrs[0])
- }
- fmt.Fprintf(&buf, "}")
- return buf.String()
-}
-
-// Switches examines the control-flow graph of fn and returns the
-// set of inferred value and type switches. A value switch tests an
-// ir.Value for equality against two or more compile-time constant
-// values. Switches involving link-time constants (addresses) are
-// ignored. A type switch type-asserts an ir.Value against two or
-// more types.
-//
-// The switches are returned in dominance order.
-//
-// The resulting switches do not necessarily correspond to uses of the
-// 'switch' keyword in the source: for example, a single source-level
-// switch statement with non-constant cases may result in zero, one or
-// many Switches, one per plural sequence of constant cases.
-// Switches may even be inferred from if/else- or goto-based control flow.
-// (In general, the control flow constructs of the source program
-// cannot be faithfully reproduced from the IR.)
-//
-func Switches(fn *ir.Function) []Switch {
- // Traverse the CFG in dominance order, so we don't
- // enter an if/else-chain in the middle.
- var switches []Switch
- seen := make(map[*ir.BasicBlock]bool) // TODO(adonovan): opt: use ir.blockSet
- for _, b := range fn.DomPreorder() {
- if x, k := isComparisonBlock(b); x != nil {
- // Block b starts a switch.
- sw := Switch{Start: b, X: x}
- valueSwitch(&sw, k, seen)
- if len(sw.ConstCases) > 1 {
- switches = append(switches, sw)
- }
- }
-
- if y, x, T := isTypeAssertBlock(b); y != nil {
- // Block b starts a type switch.
- sw := Switch{Start: b, X: x}
- typeSwitch(&sw, y, T, seen)
- if len(sw.TypeCases) > 1 {
- switches = append(switches, sw)
- }
- }
- }
- return switches
-}
-
-func isSameX(x1 ir.Value, x2 ir.Value) bool {
- if x1 == x2 {
- return true
- }
- if x2, ok := x2.(*ir.Sigma); ok {
- return isSameX(x1, x2.X)
- }
- return false
-}
-
-func valueSwitch(sw *Switch, k *ir.Const, seen map[*ir.BasicBlock]bool) {
- b := sw.Start
- x := sw.X
- for isSameX(sw.X, x) {
- if seen[b] {
- break
- }
- seen[b] = true
-
- sw.ConstCases = append(sw.ConstCases, ConstCase{
- Block: b,
- Body: b.Succs[0],
- Value: k,
- })
- b = b.Succs[1]
- n := 0
- for _, instr := range b.Instrs {
- switch instr.(type) {
- case *ir.If, *ir.BinOp:
- n++
- case *ir.Sigma, *ir.Phi, *ir.DebugRef:
- default:
- n += 1000
- }
- }
- if n != 2 {
- // Block b contains not just 'if x == k' and σ/ϕ nodes,
- // so it may have side effects that
- // make it unsafe to elide.
- break
- }
- if len(b.Preds) != 1 {
- // Block b has multiple predecessors,
- // so it cannot be treated as a case.
- break
- }
- x, k = isComparisonBlock(b)
- }
- sw.Default = b
-}
-
-func typeSwitch(sw *Switch, y ir.Value, T types.Type, seen map[*ir.BasicBlock]bool) {
- b := sw.Start
- x := sw.X
- for isSameX(sw.X, x) {
- if seen[b] {
- break
- }
- seen[b] = true
-
- sw.TypeCases = append(sw.TypeCases, TypeCase{
- Block: b,
- Body: b.Succs[0],
- Type: T,
- Binding: y,
- })
- b = b.Succs[1]
- n := 0
- for _, instr := range b.Instrs {
- switch instr.(type) {
- case *ir.TypeAssert, *ir.Extract, *ir.If:
- n++
- case *ir.Sigma, *ir.Phi:
- default:
- n += 1000
- }
- }
- if n != 4 {
- // Block b contains not just
- // {TypeAssert; Extract #0; Extract #1; If}
- // so it may have side effects that
- // make it unsafe to elide.
- break
- }
- if len(b.Preds) != 1 {
- // Block b has multiple predecessors,
- // so it cannot be treated as a case.
- break
- }
- y, x, T = isTypeAssertBlock(b)
- }
- sw.Default = b
-}
-
-// isComparisonBlock returns the operands (v, k) if a block ends with
-// a comparison v==k, where k is a compile-time constant.
-//
-func isComparisonBlock(b *ir.BasicBlock) (v ir.Value, k *ir.Const) {
- if n := len(b.Instrs); n >= 2 {
- if i, ok := b.Instrs[n-1].(*ir.If); ok {
- if binop, ok := i.Cond.(*ir.BinOp); ok && binop.Block() == b && binop.Op == token.EQL {
- if k, ok := binop.Y.(*ir.Const); ok {
- return binop.X, k
- }
- if k, ok := binop.X.(*ir.Const); ok {
- return binop.Y, k
- }
- }
- }
- }
- return
-}
-
-// isTypeAssertBlock returns the operands (y, x, T) if a block ends with
-// a type assertion "if y, ok := x.(T); ok {".
-//
-func isTypeAssertBlock(b *ir.BasicBlock) (y, x ir.Value, T types.Type) {
- if n := len(b.Instrs); n >= 4 {
- if i, ok := b.Instrs[n-1].(*ir.If); ok {
- if ext1, ok := i.Cond.(*ir.Extract); ok && ext1.Block() == b && ext1.Index == 1 {
- if ta, ok := ext1.Tuple.(*ir.TypeAssert); ok && ta.Block() == b {
- // hack: relies upon instruction ordering.
- if ext0, ok := b.Instrs[n-3].(*ir.Extract); ok {
- return ext0, ta.X, ta.AssertedType
- }
- }
- }
- }
- }
- return
-}
diff --git a/vendor/honnef.co/go/tools/ir/irutil/visit.go b/vendor/honnef.co/go/tools/ir/irutil/visit.go
deleted file mode 100644
index 576a092d1..000000000
--- a/vendor/honnef.co/go/tools/ir/irutil/visit.go
+++ /dev/null
@@ -1,79 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package irutil
-
-import "honnef.co/go/tools/ir"
-
-// This file defines utilities for visiting the IR of
-// a Program.
-//
-// TODO(adonovan): test coverage.
-
-// AllFunctions finds and returns the set of functions potentially
-// needed by program prog, as determined by a simple linker-style
-// reachability algorithm starting from the members and method-sets of
-// each package. The result may include anonymous functions and
-// synthetic wrappers.
-//
-// Precondition: all packages are built.
-//
-func AllFunctions(prog *ir.Program) map[*ir.Function]bool {
- visit := visitor{
- prog: prog,
- seen: make(map[*ir.Function]bool),
- }
- visit.program()
- return visit.seen
-}
-
-type visitor struct {
- prog *ir.Program
- seen map[*ir.Function]bool
-}
-
-func (visit *visitor) program() {
- for _, pkg := range visit.prog.AllPackages() {
- for _, mem := range pkg.Members {
- if fn, ok := mem.(*ir.Function); ok {
- visit.function(fn)
- }
- }
- }
- for _, T := range visit.prog.RuntimeTypes() {
- mset := visit.prog.MethodSets.MethodSet(T)
- for i, n := 0, mset.Len(); i < n; i++ {
- visit.function(visit.prog.MethodValue(mset.At(i)))
- }
- }
-}
-
-func (visit *visitor) function(fn *ir.Function) {
- if !visit.seen[fn] {
- visit.seen[fn] = true
- var buf [10]*ir.Value // avoid alloc in common case
- for _, b := range fn.Blocks {
- for _, instr := range b.Instrs {
- for _, op := range instr.Operands(buf[:0]) {
- if fn, ok := (*op).(*ir.Function); ok {
- visit.function(fn)
- }
- }
- }
- }
- }
-}
-
-// MainPackages returns the subset of the specified packages
-// named "main" that define a main function.
-// The result may include synthetic "testmain" packages.
-func MainPackages(pkgs []*ir.Package) []*ir.Package {
- var mains []*ir.Package
- for _, pkg := range pkgs {
- if pkg.Pkg.Name() == "main" && pkg.Func("main") != nil {
- mains = append(mains, pkg)
- }
- }
- return mains
-}
diff --git a/vendor/honnef.co/go/tools/ir/write.go b/vendor/honnef.co/go/tools/ir/write.go
deleted file mode 100644
index b936bc985..000000000
--- a/vendor/honnef.co/go/tools/ir/write.go
+++ /dev/null
@@ -1,5 +0,0 @@
-package ir
-
-func NewJump(parent *BasicBlock) *Jump {
- return &Jump{anInstruction{block: parent}, ""}
-}
diff --git a/vendor/honnef.co/go/tools/lint/lint.go b/vendor/honnef.co/go/tools/lint/lint.go
index d9ae03738..de5a8f128 100644
--- a/vendor/honnef.co/go/tools/lint/lint.go
+++ b/vendor/honnef.co/go/tools/lint/lint.go
@@ -1,9 +1,8 @@
// Package lint provides the foundation for tools like staticcheck
-package lint
+package lint // import "honnef.co/go/tools/lint"
import (
"bytes"
- "encoding/gob"
"fmt"
"go/scanner"
"go/token"
@@ -18,7 +17,6 @@ import (
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/packages"
"honnef.co/go/tools/config"
- "honnef.co/go/tools/internal/cache"
)
type Documentation struct {
@@ -121,21 +119,6 @@ type Problem struct {
Message string
Check string
Severity Severity
- Related []Related
-}
-
-type Related struct {
- Pos token.Position
- End token.Position
- Message string
-}
-
-func (p Problem) Equal(o Problem) bool {
- return p.Pos == o.Pos &&
- p.End == o.End &&
- p.Message == o.Message &&
- p.Check == o.Check &&
- p.Severity == o.Severity
}
func (p *Problem) String() string {
@@ -149,7 +132,6 @@ type Linter struct {
GoVersion int
Config config.Config
Stats Stats
- RepeatAnalyzers uint
}
type CumulativeChecker interface {
@@ -202,7 +184,6 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error
return nil, err
}
r.goVersion = l.GoVersion
- r.repeatAnalyzers = l.RepeatAnalyzers
pkgs, err := r.Run(cfg, patterns, allowedAnalyzers, hasCumulative)
if err != nil {
@@ -283,12 +264,10 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error
}
atomic.StoreUint32(&r.stats.State, StateCumulative)
+ var problems []Problem
for _, cum := range l.CumulativeCheckers {
for _, res := range cum.Result() {
pkg := tpkgToPkg[res.Pkg()]
- if pkg == nil {
- panic(fmt.Sprintf("analyzer %s flagged object %s in package %s, a package that we aren't tracking", cum.Analyzer(), res, res.Pkg()))
- }
allowedChecks := FilterChecks(allowedAnalyzers, pkg.cfg.Merge(l.Config).Checks)
if allowedChecks[cum.Analyzer().Name] {
pos := DisplayPosition(pkg.Fset, res.Pos())
@@ -299,33 +278,12 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error
continue
}
p := cum.ProblemObject(pkg.Fset, res)
- pkg.problems = append(pkg.problems, p)
+ problems = append(problems, p)
}
}
}
for _, pkg := range pkgs {
- if !pkg.fromSource {
- // Don't cache packages that we loaded from the cache
- continue
- }
- cpkg := cachedPackage{
- Problems: pkg.problems,
- Ignores: pkg.ignores,
- Config: pkg.cfg,
- }
- buf := &bytes.Buffer{}
- if err := gob.NewEncoder(buf).Encode(cpkg); err != nil {
- return nil, err
- }
- id := cache.Subkey(pkg.actionID, "data "+r.problemsCacheKey)
- if err := r.cache.PutBytes(id, buf.Bytes()); err != nil {
- return nil, err
- }
- }
-
- var problems []Problem
- for _, pkg := range pkgs {
for _, ig := range pkg.ignores {
for i := range pkg.problems {
p := &pkg.problems[i]
@@ -333,6 +291,12 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error
p.Severity = Ignored
}
}
+ for i := range problems {
+ p := &problems[i]
+ if ig.Match(*p) {
+ p.Severity = Ignored
+ }
+ }
}
if pkg.cfg == nil {
@@ -408,7 +372,7 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error
for i, p := range problems[1:] {
// We may encounter duplicate problems because one file
// can be part of many packages.
- if !problems[i].Equal(p) {
+ if problems[i] != p {
out = append(out, p)
}
}
@@ -458,6 +422,10 @@ func FilterChecks(allChecks []*analysis.Analyzer, checks []string) map[string]bo
return allowedChecks
}
+type Positioner interface {
+ Pos() token.Pos
+}
+
func DisplayPosition(fset *token.FileSet, p token.Pos) token.Position {
if p == token.NoPos {
return token.Position{}
diff --git a/vendor/honnef.co/go/tools/lint/lintdsl/lintdsl.go b/vendor/honnef.co/go/tools/lint/lintdsl/lintdsl.go
index 4408aff25..3b939e95f 100644
--- a/vendor/honnef.co/go/tools/lint/lintdsl/lintdsl.go
+++ b/vendor/honnef.co/go/tools/lint/lintdsl/lintdsl.go
@@ -4,14 +4,283 @@ package lintdsl
import (
"bytes"
+ "flag"
"fmt"
"go/ast"
- "go/format"
+ "go/constant"
+ "go/printer"
+ "go/token"
+ "go/types"
+ "strings"
"golang.org/x/tools/go/analysis"
- "honnef.co/go/tools/pattern"
+ "honnef.co/go/tools/facts"
+ "honnef.co/go/tools/lint"
+ "honnef.co/go/tools/ssa"
)
+type packager interface {
+ Package() *ssa.Package
+}
+
+func CallName(call *ssa.CallCommon) string {
+ if call.IsInvoke() {
+ return ""
+ }
+ switch v := call.Value.(type) {
+ case *ssa.Function:
+ fn, ok := v.Object().(*types.Func)
+ if !ok {
+ return ""
+ }
+ return lint.FuncName(fn)
+ case *ssa.Builtin:
+ return v.Name()
+ }
+ return ""
+}
+
+func IsCallTo(call *ssa.CallCommon, name string) bool { return CallName(call) == name }
+func IsType(T types.Type, name string) bool { return types.TypeString(T, nil) == name }
+
+func FilterDebug(instr []ssa.Instruction) []ssa.Instruction {
+ var out []ssa.Instruction
+ for _, ins := range instr {
+ if _, ok := ins.(*ssa.DebugRef); !ok {
+ out = append(out, ins)
+ }
+ }
+ return out
+}
+
+func IsExample(fn *ssa.Function) bool {
+ if !strings.HasPrefix(fn.Name(), "Example") {
+ return false
+ }
+ f := fn.Prog.Fset.File(fn.Pos())
+ if f == nil {
+ return false
+ }
+ return strings.HasSuffix(f.Name(), "_test.go")
+}
+
+func IsPointerLike(T types.Type) bool {
+ switch T := T.Underlying().(type) {
+ case *types.Interface, *types.Chan, *types.Map, *types.Signature, *types.Pointer:
+ return true
+ case *types.Basic:
+ return T.Kind() == types.UnsafePointer
+ }
+ return false
+}
+
+func IsIdent(expr ast.Expr, ident string) bool {
+ id, ok := expr.(*ast.Ident)
+ return ok && id.Name == ident
+}
+
+// isBlank returns whether id is the blank identifier "_".
+// If id == nil, the answer is false.
+func IsBlank(id ast.Expr) bool {
+ ident, _ := id.(*ast.Ident)
+ return ident != nil && ident.Name == "_"
+}
+
+func IsIntLiteral(expr ast.Expr, literal string) bool {
+ lit, ok := expr.(*ast.BasicLit)
+ return ok && lit.Kind == token.INT && lit.Value == literal
+}
+
+// Deprecated: use IsIntLiteral instead
+func IsZero(expr ast.Expr) bool {
+ return IsIntLiteral(expr, "0")
+}
+
+func IsOfType(pass *analysis.Pass, expr ast.Expr, name string) bool {
+ return IsType(pass.TypesInfo.TypeOf(expr), name)
+}
+
+func IsInTest(pass *analysis.Pass, node lint.Positioner) bool {
+ // FIXME(dh): this doesn't work for global variables with
+ // initializers
+ f := pass.Fset.File(node.Pos())
+ return f != nil && strings.HasSuffix(f.Name(), "_test.go")
+}
+
+func IsInMain(pass *analysis.Pass, node lint.Positioner) bool {
+ if node, ok := node.(packager); ok {
+ return node.Package().Pkg.Name() == "main"
+ }
+ return pass.Pkg.Name() == "main"
+}
+
+func SelectorName(pass *analysis.Pass, expr *ast.SelectorExpr) string {
+ info := pass.TypesInfo
+ sel := info.Selections[expr]
+ if sel == nil {
+ if x, ok := expr.X.(*ast.Ident); ok {
+ pkg, ok := info.ObjectOf(x).(*types.PkgName)
+ if !ok {
+ // This shouldn't happen
+ return fmt.Sprintf("%s.%s", x.Name, expr.Sel.Name)
+ }
+ return fmt.Sprintf("%s.%s", pkg.Imported().Path(), expr.Sel.Name)
+ }
+ panic(fmt.Sprintf("unsupported selector: %v", expr))
+ }
+ return fmt.Sprintf("(%s).%s", sel.Recv(), sel.Obj().Name())
+}
+
+func IsNil(pass *analysis.Pass, expr ast.Expr) bool {
+ return pass.TypesInfo.Types[expr].IsNil()
+}
+
+func BoolConst(pass *analysis.Pass, expr ast.Expr) bool {
+ val := pass.TypesInfo.ObjectOf(expr.(*ast.Ident)).(*types.Const).Val()
+ return constant.BoolVal(val)
+}
+
+func IsBoolConst(pass *analysis.Pass, expr ast.Expr) bool {
+ // We explicitly don't support typed bools because more often than
+ // not, custom bool types are used as binary enums and the
+ // explicit comparison is desired.
+
+ ident, ok := expr.(*ast.Ident)
+ if !ok {
+ return false
+ }
+ obj := pass.TypesInfo.ObjectOf(ident)
+ c, ok := obj.(*types.Const)
+ if !ok {
+ return false
+ }
+ basic, ok := c.Type().(*types.Basic)
+ if !ok {
+ return false
+ }
+ if basic.Kind() != types.UntypedBool && basic.Kind() != types.Bool {
+ return false
+ }
+ return true
+}
+
+func ExprToInt(pass *analysis.Pass, expr ast.Expr) (int64, bool) {
+ tv := pass.TypesInfo.Types[expr]
+ if tv.Value == nil {
+ return 0, false
+ }
+ if tv.Value.Kind() != constant.Int {
+ return 0, false
+ }
+ return constant.Int64Val(tv.Value)
+}
+
+func ExprToString(pass *analysis.Pass, expr ast.Expr) (string, bool) {
+ val := pass.TypesInfo.Types[expr].Value
+ if val == nil {
+ return "", false
+ }
+ if val.Kind() != constant.String {
+ return "", false
+ }
+ return constant.StringVal(val), true
+}
+
+// Dereference returns a pointer's element type; otherwise it returns
+// T.
+func Dereference(T types.Type) types.Type {
+ if p, ok := T.Underlying().(*types.Pointer); ok {
+ return p.Elem()
+ }
+ return T
+}
+
+// DereferenceR returns a pointer's element type; otherwise it returns
+// T. If the element type is itself a pointer, DereferenceR will be
+// applied recursively.
+func DereferenceR(T types.Type) types.Type {
+ if p, ok := T.Underlying().(*types.Pointer); ok {
+ return DereferenceR(p.Elem())
+ }
+ return T
+}
+
+func IsGoVersion(pass *analysis.Pass, minor int) bool {
+ version := pass.Analyzer.Flags.Lookup("go").Value.(flag.Getter).Get().(int)
+ return version >= minor
+}
+
+func CallNameAST(pass *analysis.Pass, call *ast.CallExpr) string {
+ switch fun := call.Fun.(type) {
+ case *ast.SelectorExpr:
+ fn, ok := pass.TypesInfo.ObjectOf(fun.Sel).(*types.Func)
+ if !ok {
+ return ""
+ }
+ return lint.FuncName(fn)
+ case *ast.Ident:
+ obj := pass.TypesInfo.ObjectOf(fun)
+ switch obj := obj.(type) {
+ case *types.Func:
+ return lint.FuncName(obj)
+ case *types.Builtin:
+ return obj.Name()
+ default:
+ return ""
+ }
+ default:
+ return ""
+ }
+}
+
+func IsCallToAST(pass *analysis.Pass, node ast.Node, name string) bool {
+ call, ok := node.(*ast.CallExpr)
+ if !ok {
+ return false
+ }
+ return CallNameAST(pass, call) == name
+}
+
+func IsCallToAnyAST(pass *analysis.Pass, node ast.Node, names ...string) bool {
+ for _, name := range names {
+ if IsCallToAST(pass, node, name) {
+ return true
+ }
+ }
+ return false
+}
+
+func Render(pass *analysis.Pass, x interface{}) string {
+ var buf bytes.Buffer
+ if err := printer.Fprint(&buf, pass.Fset, x); err != nil {
+ panic(err)
+ }
+ return buf.String()
+}
+
+func RenderArgs(pass *analysis.Pass, args []ast.Expr) string {
+ var ss []string
+ for _, arg := range args {
+ ss = append(ss, Render(pass, arg))
+ }
+ return strings.Join(ss, ", ")
+}
+
+func Preamble(f *ast.File) string {
+ cutoff := f.Package
+ if f.Doc != nil {
+ cutoff = f.Doc.Pos()
+ }
+ var out []string
+ for _, cmt := range f.Comments {
+ if cmt.Pos() >= cutoff {
+ break
+ }
+ out = append(out, cmt.Text())
+ }
+ return strings.Join(out, "\n")
+}
+
func Inspect(node ast.Node, fn func(node ast.Node) bool) {
if node == nil {
return
@@ -19,40 +288,113 @@ func Inspect(node ast.Node, fn func(node ast.Node) bool) {
ast.Inspect(node, fn)
}
-func Match(pass *analysis.Pass, q pattern.Pattern, node ast.Node) (*pattern.Matcher, bool) {
- // Note that we ignore q.Relevant – callers of Match usually use
- // AST inspectors that already filter on nodes we're interested
- // in.
- m := &pattern.Matcher{TypesInfo: pass.TypesInfo}
- ok := m.Match(q.Root, node)
- return m, ok
+func GroupSpecs(fset *token.FileSet, specs []ast.Spec) [][]ast.Spec {
+ if len(specs) == 0 {
+ return nil
+ }
+ groups := make([][]ast.Spec, 1)
+ groups[0] = append(groups[0], specs[0])
+
+ for _, spec := range specs[1:] {
+ g := groups[len(groups)-1]
+ if fset.PositionFor(spec.Pos(), false).Line-1 !=
+ fset.PositionFor(g[len(g)-1].End(), false).Line {
+
+ groups = append(groups, nil)
+ }
+
+ groups[len(groups)-1] = append(groups[len(groups)-1], spec)
+ }
+
+ return groups
+}
+
+func IsObject(obj types.Object, name string) bool {
+ var path string
+ if pkg := obj.Pkg(); pkg != nil {
+ path = pkg.Path() + "."
+ }
+ return path+obj.Name() == name
}
-func MatchAndEdit(pass *analysis.Pass, before, after pattern.Pattern, node ast.Node) (*pattern.Matcher, []analysis.TextEdit, bool) {
- m, ok := Match(pass, before, node)
- if !ok {
- return m, nil, false
+type Field struct {
+ Var *types.Var
+ Tag string
+ Path []int
+}
+
+// FlattenFields recursively flattens T and embedded structs,
+// returning a list of fields. If multiple fields with the same name
+// exist, all will be returned.
+func FlattenFields(T *types.Struct) []Field {
+ return flattenFields(T, nil, nil)
+}
+
+func flattenFields(T *types.Struct, path []int, seen map[types.Type]bool) []Field {
+ if seen == nil {
+ seen = map[types.Type]bool{}
+ }
+ if seen[T] {
+ return nil
}
- r := pattern.NodeToAST(after.Root, m.State)
- buf := &bytes.Buffer{}
- format.Node(buf, pass.Fset, r)
- edit := []analysis.TextEdit{{
- Pos: node.Pos(),
- End: node.End(),
- NewText: buf.Bytes(),
- }}
- return m, edit, true
+ seen[T] = true
+ var out []Field
+ for i := 0; i < T.NumFields(); i++ {
+ field := T.Field(i)
+ tag := T.Tag(i)
+ np := append(path[:len(path):len(path)], i)
+ if field.Anonymous() {
+ if s, ok := Dereference(field.Type()).Underlying().(*types.Struct); ok {
+ out = append(out, flattenFields(s, np, seen)...)
+ }
+ } else {
+ out = append(out, Field{field, tag, np})
+ }
+ }
+ return out
+}
+
+func File(pass *analysis.Pass, node lint.Positioner) *ast.File {
+ pass.Fset.PositionFor(node.Pos(), true)
+ m := pass.ResultOf[facts.TokenFile].(map[*token.File]*ast.File)
+ return m[pass.Fset.File(node.Pos())]
+}
+
+// IsGenerated reports whether pos is in a generated file, It ignores
+// //line directives.
+func IsGenerated(pass *analysis.Pass, pos token.Pos) bool {
+ _, ok := Generator(pass, pos)
+ return ok
}
-func Selector(x, sel string) *ast.SelectorExpr {
- return &ast.SelectorExpr{
- X: &ast.Ident{Name: x},
- Sel: &ast.Ident{Name: sel},
+// Generator returns the generator that generated the file containing
+// pos. It ignores //line directives.
+func Generator(pass *analysis.Pass, pos token.Pos) (facts.Generator, bool) {
+ file := pass.Fset.PositionFor(pos, false).Filename
+ m := pass.ResultOf[facts.Generated].(map[string]facts.Generator)
+ g, ok := m[file]
+ return g, ok
+}
+
+func ReportfFG(pass *analysis.Pass, pos token.Pos, f string, args ...interface{}) {
+ file := lint.DisplayPosition(pass.Fset, pos).Filename
+ m := pass.ResultOf[facts.Generated].(map[string]facts.Generator)
+ if _, ok := m[file]; ok {
+ return
}
+ pass.Reportf(pos, f, args...)
}
-// ExhaustiveTypeSwitch panics when called. It can be used to ensure
-// that type switches are exhaustive.
-func ExhaustiveTypeSwitch(v interface{}) {
- panic(fmt.Sprintf("internal error: unhandled case %T", v))
+func ReportNodef(pass *analysis.Pass, node ast.Node, format string, args ...interface{}) {
+ msg := fmt.Sprintf(format, args...)
+ pass.Report(analysis.Diagnostic{Pos: node.Pos(), End: node.End(), Message: msg})
+}
+
+func ReportNodefFG(pass *analysis.Pass, node ast.Node, format string, args ...interface{}) {
+ file := lint.DisplayPosition(pass.Fset, node.Pos()).Filename
+ m := pass.ResultOf[facts.Generated].(map[string]facts.Generator)
+ if _, ok := m[file]; ok {
+ return
+ }
+ ReportNodef(pass, node, format, args...)
}
diff --git a/vendor/honnef.co/go/tools/lint/lintutil/format/format.go b/vendor/honnef.co/go/tools/lint/lintutil/format/format.go
index b28f8885b..9385431f8 100644
--- a/vendor/honnef.co/go/tools/lint/lintutil/format/format.go
+++ b/vendor/honnef.co/go/tools/lint/lintutil/format/format.go
@@ -39,7 +39,7 @@ func relativePositionString(pos token.Position) string {
}
type Statter interface {
- Stats(total, errors, warnings, ignored int)
+ Stats(total, errors, warnings int)
}
type Formatter interface {
@@ -51,10 +51,7 @@ type Text struct {
}
func (o Text) Format(p lint.Problem) {
- fmt.Fprintf(o.W, "%s: %s\n", relativePositionString(p.Pos), p.String())
- for _, r := range p.Related {
- fmt.Fprintf(o.W, "\t%s: %s\n", relativePositionString(r.Pos), r.Message)
- }
+ fmt.Fprintf(o.W, "%v: %s\n", relativePositionString(p.Pos), p.String())
}
type JSON struct {
@@ -79,18 +76,12 @@ func (o JSON) Format(p lint.Problem) {
Line int `json:"line"`
Column int `json:"column"`
}
- type related struct {
+ jp := struct {
+ Code string `json:"code"`
+ Severity string `json:"severity,omitempty"`
Location location `json:"location"`
End location `json:"end"`
Message string `json:"message"`
- }
- jp := struct {
- Code string `json:"code"`
- Severity string `json:"severity,omitempty"`
- Location location `json:"location"`
- End location `json:"end"`
- Message string `json:"message"`
- Related []related `json:"related,omitempty"`
}{
Code: p.Check,
Severity: severity(p.Severity),
@@ -106,21 +97,6 @@ func (o JSON) Format(p lint.Problem) {
},
Message: p.Message,
}
- for _, r := range p.Related {
- jp.Related = append(jp.Related, related{
- Location: location{
- File: r.Pos.Filename,
- Line: r.Pos.Line,
- Column: r.Pos.Column,
- },
- End: location{
- File: r.End.Filename,
- Line: r.End.Line,
- Column: r.End.Column,
- },
- Message: r.Message,
- })
- }
_ = json.NewEncoder(o.W).Encode(jp)
}
@@ -147,16 +123,13 @@ func (o *Stylish) Format(p lint.Problem) {
o.tw = tabwriter.NewWriter(o.W, 0, 4, 2, ' ', 0)
}
fmt.Fprintf(o.tw, " (%d, %d)\t%s\t%s\n", pos.Line, pos.Column, p.Check, p.Message)
- for _, r := range p.Related {
- fmt.Fprintf(o.tw, " (%d, %d)\t\t %s\n", r.Pos.Line, r.Pos.Column, r.Message)
- }
}
-func (o *Stylish) Stats(total, errors, warnings, ignored int) {
+func (o *Stylish) Stats(total, errors, warnings int) {
if o.tw != nil {
o.tw.Flush()
fmt.Fprintln(o.W)
}
- fmt.Fprintf(o.W, " ✖ %d problems (%d errors, %d warnings, %d ignored)\n",
- total, errors, warnings, ignored)
+ fmt.Fprintf(o.W, " ✖ %d problems (%d errors, %d warnings)\n",
+ total, errors, warnings)
}
diff --git a/vendor/honnef.co/go/tools/lint/lintutil/util.go b/vendor/honnef.co/go/tools/lint/lintutil/util.go
index c7591d2de..fe0279f92 100644
--- a/vendor/honnef.co/go/tools/lint/lintutil/util.go
+++ b/vendor/honnef.co/go/tools/lint/lintutil/util.go
@@ -5,7 +5,7 @@
// https://developers.google.com/open-source/licenses/bsd.
// Package lintutil provides helpers for writing linter command lines.
-package lintutil
+package lintutil // import "honnef.co/go/tools/lint/lintutil"
import (
"crypto/sha256"
@@ -23,9 +23,7 @@ import (
"runtime/pprof"
"strconv"
"strings"
- "sync"
"sync/atomic"
- "time"
"honnef.co/go/tools/config"
"honnef.co/go/tools/internal/cache"
@@ -116,8 +114,6 @@ func FlagSet(name string) *flag.FlagSet {
flags.String("debug.memprofile", "", "Write memory profile to `file`")
flags.Bool("debug.version", false, "Print detailed version information about this program")
flags.Bool("debug.no-compile-errors", false, "Don't print compile errors")
- flags.String("debug.measure-analyzers", "", "Write analysis measurements to `file`. `file` will be opened for appending if it already exists.")
- flags.Uint("debug.repeat-analyzers", 0, "Run analyzers `num` times")
checks := list{"inherit"}
fail := list{"all"}
@@ -157,24 +153,6 @@ func ProcessFlagSet(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, fs *
memProfile := fs.Lookup("debug.memprofile").Value.(flag.Getter).Get().(string)
debugVersion := fs.Lookup("debug.version").Value.(flag.Getter).Get().(bool)
debugNoCompile := fs.Lookup("debug.no-compile-errors").Value.(flag.Getter).Get().(bool)
- debugRepeat := fs.Lookup("debug.repeat-analyzers").Value.(flag.Getter).Get().(uint)
-
- var measureAnalyzers func(analysis *analysis.Analyzer, pkg *lint.Package, d time.Duration)
- if path := fs.Lookup("debug.measure-analyzers").Value.(flag.Getter).Get().(string); path != "" {
- f, err := os.OpenFile(path, os.O_CREATE|os.O_APPEND|os.O_WRONLY, 0600)
- if err != nil {
- log.Fatal(err)
- }
-
- mu := &sync.Mutex{}
- measureAnalyzers = func(analysis *analysis.Analyzer, pkg *lint.Package, d time.Duration) {
- mu.Lock()
- defer mu.Unlock()
- if _, err := fmt.Fprintf(f, "%s\t%s\t%d\n", analysis.Name, pkg.ID, d.Nanoseconds()); err != nil {
- log.Println("error writing analysis measurements:", err)
- }
- }
- }
cfg := config.Config{}
cfg.Checks = *fs.Lookup("checks").Value.(*list)
@@ -240,12 +218,10 @@ func ProcessFlagSet(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, fs *
}
ps, err := Lint(cs, cums, fs.Args(), &Options{
- Tags: tags,
- LintTests: tests,
- GoVersion: goVersion,
- Config: cfg,
- PrintAnalyzerMeasurement: measureAnalyzers,
- RepeatAnalyzers: debugRepeat,
+ Tags: tags,
+ LintTests: tests,
+ GoVersion: goVersion,
+ Config: cfg,
})
if err != nil {
fmt.Fprintln(os.Stderr, err)
@@ -269,7 +245,6 @@ func ProcessFlagSet(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, fs *
total int
errors int
warnings int
- ignored int
)
fail := *fs.Lookup("fail").Value.(*list)
@@ -287,7 +262,6 @@ func ProcessFlagSet(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, fs *
continue
}
if p.Severity == lint.Ignored && !showIgnored {
- ignored++
continue
}
if shouldExit[p.Check] {
@@ -299,7 +273,7 @@ func ProcessFlagSet(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, fs *
f.Format(p)
}
if f, ok := f.(format.Statter); ok {
- f.Stats(total, errors, warnings, ignored)
+ f.Stats(total, errors, warnings)
}
if errors > 0 {
exit(1)
@@ -310,11 +284,9 @@ func ProcessFlagSet(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, fs *
type Options struct {
Config config.Config
- Tags string
- LintTests bool
- GoVersion int
- PrintAnalyzerMeasurement func(analysis *analysis.Analyzer, pkg *lint.Package, d time.Duration)
- RepeatAnalyzers uint
+ Tags string
+ LintTests bool
+ GoVersion int
}
func computeSalt() ([]byte, error) {
@@ -353,9 +325,7 @@ func Lint(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, paths []string
CumulativeCheckers: cums,
GoVersion: opt.GoVersion,
Config: opt.Config,
- RepeatAnalyzers: opt.RepeatAnalyzers,
}
- l.Stats.PrintAnalyzerMeasurement = opt.PrintAnalyzerMeasurement
cfg := &packages.Config{}
if opt.LintTests {
cfg.Tests = true
@@ -398,8 +368,7 @@ func Lint(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, paths []string
}()
}
- ps, err := l.Lint(cfg, paths)
- return ps, err
+ return l.Lint(cfg, paths)
}
var posRe = regexp.MustCompile(`^(.+?):(\d+)(?::(\d+)?)?$`)
@@ -421,24 +390,3 @@ func parsePos(pos string) token.Position {
Column: col,
}
}
-
-func InitializeAnalyzers(docs map[string]*lint.Documentation, analyzers map[string]*analysis.Analyzer) map[string]*analysis.Analyzer {
- out := make(map[string]*analysis.Analyzer, len(analyzers))
- for k, v := range analyzers {
- vc := *v
- out[k] = &vc
-
- vc.Name = k
- doc, ok := docs[k]
- if !ok {
- panic(fmt.Sprintf("missing documentation for check %s", k))
- }
- vc.Doc = doc.String()
- if vc.Flags.Usage == nil {
- fs := flag.NewFlagSet("", flag.PanicOnError)
- fs.Var(NewVersionFlag(), "go", "Target Go version")
- vc.Flags = *fs
- }
- }
- return out
-}
diff --git a/vendor/honnef.co/go/tools/lint/runner.go b/vendor/honnef.co/go/tools/lint/runner.go
index 3235dce82..3b22a63fa 100644
--- a/vendor/honnef.co/go/tools/lint/runner.go
+++ b/vendor/honnef.co/go/tools/lint/runner.go
@@ -1,30 +1,6 @@
package lint
/*
-Package loading
-
-Conceptually, package loading in the runner can be imagined as a
-graph-shaped work list. We iteratively pop off leaf nodes (packages
-that have no unloaded dependencies) and load data from export data,
-our cache, or source.
-
-Specifically, non-initial packages are loaded from export data and the
-fact cache if possible, otherwise from source. Initial packages are
-loaded from export data, the fact cache and the (problems, ignores,
-config) cache if possible, otherwise from source.
-
-The appeal of this approach is that it is both simple to implement and
-easily parallelizable. Each leaf node can be processed independently,
-and new leaf nodes appear as their dependencies are being processed.
-
-The downside of this approach, however, is that we're doing more work
-than necessary. Imagine an initial package A, which has the following
-dependency chain: A->B->C->D – in the current implementation, we will
-load all 4 packages. However, if package A can be loaded fully from
-cached information, then none of its dependencies are necessary, and
-we could avoid loading them.
-
-
Parallelism
Runner implements parallel processing of packages by spawning one
@@ -43,34 +19,6 @@ all execute in parallel, while not wasting resources for long linear
chains or trying to process more subgraphs in parallel than the system
can handle.
-
-Caching
-
-We make use of several caches. These caches are Go's export data, our
-facts cache, and our (problems, ignores, config) cache.
-
-Initial packages will either be loaded from a combination of all three
-caches, or from source. Non-initial packages will either be loaded
-from a combination of export data and facts cache, or from source.
-
-The facts cache is separate from the (problems, ignores, config) cache
-because when we process non-initial packages, we generate facts, but
-we discard problems and ignores.
-
-The facts cache is keyed by (package, analyzer), whereas the
-(problems, ignores, config) cache is keyed by (package, list of
-analyzes). The difference between the two exists because there are
-only a handful of analyses that produce facts, but hundreds of
-analyses that don't. Creating one cache entry per fact-generating
-analysis is feasible, creating one cache entry per normal analysis has
-significant performance and storage overheads.
-
-The downside of keying by the list of analyzes is, naturally, that a
-change in list of analyzes changes the cache key. `staticcheck -checks
-A` and `staticcheck -checks A,B` will therefore need their own cache
-entries and not reuse each other's work. This problem does not affect
-the facts cache.
-
*/
import (
@@ -89,7 +37,6 @@ import (
"strings"
"sync"
"sync/atomic"
- "time"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/packages"
@@ -100,11 +47,6 @@ import (
"honnef.co/go/tools/loader"
)
-func init() {
- gob.Register(&FileIgnore{})
- gob.Register(&LineIgnore{})
-}
-
// If enabled, abuse of the go/analysis API will lead to panics
const sanityCheck = true
@@ -116,43 +58,21 @@ const sanityCheck = true
// This may change unused's behavior, however, as it may observe fewer
// interfaces from transitive dependencies.
-// OPT(dh): every single package will have the same value for
-// canClearTypes. We could move the Package.decUse method to runner to
-// eliminate this field. This is probably not worth it, though. There
-// are only thousands of packages, so the field only takes up
-// kilobytes of memory.
-
-// OPT(dh): do we really need the Package.gen field? it's based
-// trivially on pkg.results and merely caches the result of a type
-// assertion. How often do we actually use the field?
-
type Package struct {
- // dependents is initially set to 1 plus the number of packages
- // that directly import this package. It is atomically decreased
- // by 1 every time a dependent has been processed or when the
- // package itself has been processed. Once the value reaches zero,
- // the package is no longer needed.
dependents uint64
*packages.Package
- Imports []*Package
- initial bool
- // fromSource is set to true for packages that have been loaded
- // from source. This is the case for initial packages, packages
- // with missing export data, and packages with no cached facts.
+ Imports []*Package
+ initial bool
fromSource bool
- // hash stores the package hash, as computed by packageHash
- hash string
- actionID cache.ActionID
- done chan struct{}
+ hash string
+ done chan struct{}
resultsMu sync.Mutex
- // results maps analyzer IDs to analyzer results. it is
- // implemented as a deduplicating concurrent cache.
+ // results maps analyzer IDs to analyzer results
results []*result
- cfg *config.Config
- // gen maps file names to the code generator that created them
+ cfg *config.Config
gen map[string]facts.Generator
problems []Problem
ignores []Ignore
@@ -162,22 +82,12 @@ type Package struct {
facts []map[types.Object][]analysis.Fact
pkgFacts [][]analysis.Fact
- // canClearTypes is set to true if we can discard type
- // information after the package and its dependents have been
- // processed. This is the case when no cumulative checkers are
- // being run.
canClearTypes bool
}
-type cachedPackage struct {
- Problems []Problem
- Ignores []Ignore
- Config *config.Config
-}
-
func (pkg *Package) decUse() {
- ret := atomic.AddUint64(&pkg.dependents, ^uint64(0))
- if ret == 0 {
+ atomic.AddUint64(&pkg.dependents, ^uint64(0))
+ if atomic.LoadUint64(&pkg.dependents) == 0 {
// nobody depends on this package anymore
if pkg.canClearTypes {
pkg.Types = nil
@@ -198,16 +108,16 @@ type result struct {
}
type Runner struct {
- cache *cache.Cache
- goVersion int
- stats *Stats
- repeatAnalyzers uint
+ ld loader.Loader
+ cache *cache.Cache
- analyzerIDs analyzerIDs
- problemsCacheKey string
+ analyzerIDs analyzerIDs
// limits parallelism of loading packages
loadSem chan struct{}
+
+ goVersion int
+ stats *Stats
}
type analyzerIDs struct {
@@ -315,13 +225,6 @@ func (ac *analysisAction) report(pass *analysis.Pass, d analysis.Diagnostic) {
Message: d.Message,
Check: pass.Analyzer.Name,
}
- for _, r := range d.Related {
- p.Related = append(p.Related, Related{
- Pos: DisplayPosition(pass.Fset, r.Pos),
- End: DisplayPosition(pass.Fset, r.End),
- Message: r.Message,
- })
- }
ac.problems = append(ac.problems, p)
}
@@ -375,21 +278,6 @@ func (r *Runner) runAnalysis(ac *analysisAction) (ret interface{}, err error) {
}
}
-func (r *Runner) loadCachedPackage(pkg *Package, analyzers []*analysis.Analyzer) (cachedPackage, bool) {
- // OPT(dh): we can cache this computation, it'll be the same for all packages
- id := cache.Subkey(pkg.actionID, "data "+r.problemsCacheKey)
-
- b, _, err := r.cache.GetBytes(id)
- if err != nil {
- return cachedPackage{}, false
- }
- var cpkg cachedPackage
- if err := gob.NewDecoder(bytes.NewReader(b)).Decode(&cpkg); err != nil {
- return cachedPackage{}, false
- }
- return cpkg, true
-}
-
func (r *Runner) loadCachedFacts(a *analysis.Analyzer, pkg *Package) ([]Fact, bool) {
if len(a.FactTypes) == 0 {
return nil, true
@@ -397,7 +285,10 @@ func (r *Runner) loadCachedFacts(a *analysis.Analyzer, pkg *Package) ([]Fact, bo
var facts []Fact
// Look in the cache for facts
- aID := passActionID(pkg, a)
+ aID, err := passActionID(pkg, a)
+ if err != nil {
+ return nil, false
+ }
aID = cache.Subkey(aID, "facts")
b, _, err := r.cache.GetBytes(aID)
if err != nil {
@@ -487,15 +378,9 @@ func (r *Runner) runAnalysisUser(pass *analysis.Pass, ac *analysisAction) (inter
}
// Then with this analyzer
- var ret interface{}
- for i := uint(0); i < r.repeatAnalyzers+1; i++ {
- var err error
- t := time.Now()
- ret, err = ac.analyzer.Run(pass)
- r.stats.MeasureAnalyzer(ac.analyzer, ac.pkg, time.Since(t))
- if err != nil {
- return nil, err
- }
+ ret, err := ac.analyzer.Run(pass)
+ if err != nil {
+ return nil, err
}
if len(ac.analyzer.FactTypes) > 0 {
@@ -519,7 +404,16 @@ func (r *Runner) runAnalysisUser(pass *analysis.Pass, ac *analysisAction) (inter
}
}
- if err := r.cacheData(facts, ac.pkg, ac.analyzer, "facts"); err != nil {
+ buf := &bytes.Buffer{}
+ if err := gob.NewEncoder(buf).Encode(facts); err != nil {
+ return nil, err
+ }
+ aID, err := passActionID(ac.pkg, ac.analyzer)
+ if err != nil {
+ return nil, err
+ }
+ aID = cache.Subkey(aID, "facts")
+ if err := r.cache.PutBytes(aID, buf.Bytes()); err != nil {
return nil, err
}
}
@@ -527,19 +421,6 @@ func (r *Runner) runAnalysisUser(pass *analysis.Pass, ac *analysisAction) (inter
return ret, nil
}
-func (r *Runner) cacheData(v interface{}, pkg *Package, a *analysis.Analyzer, subkey string) error {
- buf := &bytes.Buffer{}
- if err := gob.NewEncoder(buf).Encode(v); err != nil {
- return err
- }
- aID := passActionID(pkg, a)
- aID = cache.Subkey(aID, subkey)
- if err := r.cache.PutBytes(aID, buf.Bytes()); err != nil {
- return err
- }
- return nil
-}
-
func NewRunner(stats *Stats) (*Runner, error) {
cache, err := cache.Default()
if err != nil {
@@ -557,17 +438,9 @@ func NewRunner(stats *Stats) (*Runner, error) {
// diagnostics as well as extracted ignore directives.
//
// Note that diagnostics have not been filtered at this point yet, to
-// accommodate cumulative analyzes that require additional steps to
+// accomodate cumulative analyzes that require additional steps to
// produce diagnostics.
func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analysis.Analyzer, hasCumulative bool) ([]*Package, error) {
- checkerNames := make([]string, len(analyzers))
- for i, a := range analyzers {
- checkerNames[i] = a.Name
- }
- sort.Strings(checkerNames)
- r.problemsCacheKey = strings.Join(checkerNames, " ")
-
- var allAnalyzers []*analysis.Analyzer
r.analyzerIDs = analyzerIDs{m: map[*analysis.Analyzer]int{}}
id := 0
seen := map[*analysis.Analyzer]struct{}{}
@@ -577,7 +450,6 @@ func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analy
return
}
seen[a] = struct{}{}
- allAnalyzers = append(allAnalyzers, a)
r.analyzerIDs.m[a] = id
id++
for _, f := range a.FactTypes {
@@ -596,11 +468,6 @@ func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analy
for _, a := range injectedAnalyses {
dfs(a)
}
- // Run all analyzers on all packages (subject to further
- // restrictions enforced later). This guarantees that if analyzer
- // A1 depends on A2, and A2 has facts, that A2 will run on the
- // dependencies of user-provided packages, even though A1 won't.
- analyzers = allAnalyzers
var dcfg packages.Config
if cfg != nil {
@@ -608,10 +475,11 @@ func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analy
}
atomic.StoreUint32(&r.stats.State, StateGraph)
- initialPkgs, err := loader.Graph(dcfg, patterns...)
+ initialPkgs, err := r.ld.Graph(dcfg, patterns...)
if err != nil {
return nil, err
}
+
defer r.cache.Trim()
var allPkgs []*Package
@@ -639,8 +507,7 @@ func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analy
m[l].Imports = append(m[l].Imports, m[v])
}
- m[l].hash, err = r.packageHash(m[l])
- m[l].actionID = packageActionID(m[l])
+ m[l].hash, err = packageHash(m[l])
if err != nil {
m[l].errs = append(m[l].errs, err)
}
@@ -697,36 +564,27 @@ func parsePos(pos string) (token.Position, int, error) {
}, len(parts[0]), nil
}
-// loadPkg loads a Go package. It may be loaded from a combination of
-// caches, or from source.
+// loadPkg loads a Go package. If the package is in the set of initial
+// packages, it will be loaded from source, otherwise it will be
+// loaded from export data. In the case that the package was loaded
+// from export data, cached facts will also be loaded.
+//
+// Currently, only cached facts for this package will be loaded, not
+// for any of its dependencies.
func (r *Runner) loadPkg(pkg *Package, analyzers []*analysis.Analyzer) error {
if pkg.Types != nil {
panic(fmt.Sprintf("internal error: %s has already been loaded", pkg.Package))
}
+ // Load type information
if pkg.initial {
- // Try to load cached package
- cpkg, ok := r.loadCachedPackage(pkg, analyzers)
- if ok {
- pkg.problems = cpkg.Problems
- pkg.ignores = cpkg.Ignores
- pkg.cfg = cpkg.Config
- } else {
- pkg.fromSource = true
- return loader.LoadFromSource(pkg.Package)
- }
+ // Load package from source
+ pkg.fromSource = true
+ return r.ld.LoadFromSource(pkg.Package)
}
- // At this point we're either working with a non-initial package,
- // or we managed to load cached problems for the package. We still
- // need export data and facts.
-
- // OPT(dh): we don't need type information for this package if no
- // other package depends on it. this may be the case for initial
- // packages.
-
// Load package from export data
- if err := loader.LoadFromExport(pkg.Package); err != nil {
+ if err := r.ld.LoadFromExport(pkg.Package); err != nil {
// We asked Go to give us up to date export data, yet
// we can't load it. There must be something wrong.
//
@@ -739,7 +597,7 @@ func (r *Runner) loadPkg(pkg *Package, analyzers []*analysis.Analyzer) error {
// FIXME(dh): we no longer reload from export data, so
// theoretically we should be able to continue
pkg.fromSource = true
- if err := loader.LoadFromSource(pkg.Package); err != nil {
+ if err := r.ld.LoadFromSource(pkg.Package); err != nil {
return err
}
// Make sure this package can't be imported successfully
@@ -800,14 +658,13 @@ func (r *Runner) loadPkg(pkg *Package, analyzers []*analysis.Analyzer) error {
dfs(a)
}
- if !failed {
- return nil
+ if failed {
+ pkg.fromSource = true
+ // XXX we added facts to the maps, we need to get rid of those
+ return r.ld.LoadFromSource(pkg.Package)
}
- // We failed to load some cached facts
- pkg.fromSource = true
- // XXX we added facts to the maps, we need to get rid of those
- return loader.LoadFromSource(pkg.Package)
+ return nil
}
type analysisError struct {
@@ -838,7 +695,7 @@ func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) {
}()
// Ensure all packages have the generated map and config. This is
- // required by internals of the runner. Analyses that themselves
+ // required by interna of the runner. Analyses that themselves
// make use of either have an explicit dependency so that other
// runners work correctly, too.
analyzers = append(analyzers[0:len(analyzers):len(analyzers)], injectedAnalyses...)
@@ -909,7 +766,7 @@ func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) {
defer wg.Done()
// Only initial packages and packages with missing
// facts will have been loaded from source.
- if pkg.initial || len(a.FactTypes) > 0 {
+ if pkg.initial || r.hasFacts(a) {
if _, err := r.runAnalysis(ac); err != nil {
errs[i] = analysisError{a, pkg, err}
return
@@ -943,8 +800,6 @@ func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) {
// We can't process ignores at this point because `unused` needs
// to see more than one package to make its decision.
- //
- // OPT(dh): can't we guard this block of code by pkg.initial?
ignores, problems := parseDirectives(pkg.Package)
pkg.ignores = append(pkg.ignores, ignores...)
pkg.problems = append(pkg.problems, problems...)
@@ -969,6 +824,32 @@ func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) {
// from processPkg.
}
+// hasFacts reports whether an analysis exports any facts. An analysis
+// that has a transitive dependency that exports facts is considered
+// to be exporting facts.
+func (r *Runner) hasFacts(a *analysis.Analyzer) bool {
+ ret := false
+ seen := make([]bool, len(r.analyzerIDs.m))
+ var dfs func(*analysis.Analyzer)
+ dfs = func(a *analysis.Analyzer) {
+ if seen[r.analyzerIDs.get(a)] {
+ return
+ }
+ seen[r.analyzerIDs.get(a)] = true
+ if len(a.FactTypes) > 0 {
+ ret = true
+ }
+ for _, req := range a.Requires {
+ if ret {
+ break
+ }
+ dfs(req)
+ }
+ }
+ dfs(a)
+ return ret
+}
+
func parseDirective(s string) (cmd string, args []string) {
if !strings.HasPrefix(s, "//lint:") {
return "", nil
@@ -1051,10 +932,9 @@ func parseDirectives(pkg *packages.Package) ([]Ignore, []Problem) {
// packageHash computes a package's hash. The hash is based on all Go
// files that make up the package, as well as the hashes of imported
// packages.
-func (r *Runner) packageHash(pkg *Package) (string, error) {
+func packageHash(pkg *Package) (string, error) {
key := cache.NewHash("package hash")
fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath)
- fmt.Fprintf(key, "go %d\n", r.goVersion)
for _, f := range pkg.CompiledGoFiles {
h, err := cache.FileHash(f)
if err != nil {
@@ -1063,28 +943,6 @@ func (r *Runner) packageHash(pkg *Package) (string, error) {
fmt.Fprintf(key, "file %s %x\n", f, h)
}
- // Actually load the configuration to calculate its hash. This
- // will take into consideration inheritance of configuration
- // files, as well as the default configuration.
- //
- // OPT(dh): doing this means we'll load the config twice: once for
- // computing the hash, and once when analyzing the package from
- // source.
- cdir := config.Dir(pkg.GoFiles)
- if cdir == "" {
- fmt.Fprintf(key, "file %s %x\n", config.ConfigName, [cache.HashSize]byte{})
- } else {
- cfg, err := config.Load(cdir)
- if err != nil {
- return "", err
- }
- h := cache.NewHash(config.ConfigName)
- if _, err := h.Write([]byte(cfg.String())); err != nil {
- return "", err
- }
- fmt.Fprintf(key, "file %s %x\n", config.ConfigName, h.Sum())
- }
-
imps := make([]*Package, len(pkg.Imports))
copy(imps, pkg.Imports)
sort.Slice(imps, func(i, j int) bool {
@@ -1101,14 +959,12 @@ func (r *Runner) packageHash(pkg *Package) (string, error) {
return hex.EncodeToString(h[:]), nil
}
-func packageActionID(pkg *Package) cache.ActionID {
- key := cache.NewHash("package ID")
+// passActionID computes an ActionID for an analysis pass.
+func passActionID(pkg *Package, analyzer *analysis.Analyzer) (cache.ActionID, error) {
+ key := cache.NewHash("action ID")
fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath)
fmt.Fprintf(key, "pkghash %s\n", pkg.hash)
- return key.Sum()
-}
+ fmt.Fprintf(key, "analyzer %s\n", analyzer.Name)
-// passActionID computes an ActionID for an analysis pass.
-func passActionID(pkg *Package, analyzer *analysis.Analyzer) cache.ActionID {
- return cache.Subkey(pkg.actionID, fmt.Sprintf("analyzer %s", analyzer.Name))
+ return key.Sum(), nil
}
diff --git a/vendor/honnef.co/go/tools/lint/stats.go b/vendor/honnef.co/go/tools/lint/stats.go
index 85eb97844..2f6508559 100644
--- a/vendor/honnef.co/go/tools/lint/stats.go
+++ b/vendor/honnef.co/go/tools/lint/stats.go
@@ -1,11 +1,5 @@
package lint
-import (
- "time"
-
- "golang.org/x/tools/go/analysis"
-)
-
const (
StateInitializing = 0
StateGraph = 1
@@ -23,16 +17,4 @@ type Stats struct {
Problems uint32
ActiveWorkers uint32
TotalWorkers uint32
- PrintAnalyzerMeasurement func(*analysis.Analyzer, *Package, time.Duration)
-}
-
-type AnalysisMeasurementKey struct {
- Analysis string
- Pkg string
-}
-
-func (s *Stats) MeasureAnalyzer(analysis *analysis.Analyzer, pkg *Package, d time.Duration) {
- if s.PrintAnalyzerMeasurement != nil {
- s.PrintAnalyzerMeasurement(analysis, pkg, d)
- }
}
diff --git a/vendor/honnef.co/go/tools/loader/loader.go b/vendor/honnef.co/go/tools/loader/loader.go
index a14f274d2..9c6885d48 100644
--- a/vendor/honnef.co/go/tools/loader/loader.go
+++ b/vendor/honnef.co/go/tools/loader/loader.go
@@ -1,7 +1,6 @@
package loader
import (
- "errors"
"fmt"
"go/ast"
"go/parser"
@@ -10,17 +9,22 @@ import (
"go/types"
"log"
"os"
+ "sync"
"golang.org/x/tools/go/gcexportdata"
"golang.org/x/tools/go/packages"
)
+type Loader struct {
+ exportMu sync.RWMutex
+}
+
// Graph resolves patterns and returns packages with all the
// information required to later load type information, and optionally
// syntax trees.
//
// The provided config can set any setting with the exception of Mode.
-func Graph(cfg packages.Config, patterns ...string) ([]*packages.Package, error) {
+func (ld *Loader) Graph(cfg packages.Config, patterns ...string) ([]*packages.Package, error) {
cfg.Mode = packages.NeedName | packages.NeedImports | packages.NeedDeps | packages.NeedExportsFile | packages.NeedFiles | packages.NeedCompiledGoFiles | packages.NeedTypesSizes
pkgs, err := packages.Load(&cfg, patterns...)
if err != nil {
@@ -30,29 +34,15 @@ func Graph(cfg packages.Config, patterns ...string) ([]*packages.Package, error)
packages.Visit(pkgs, nil, func(pkg *packages.Package) {
pkg.Fset = fset
})
-
- n := 0
- for _, pkg := range pkgs {
- if len(pkg.CompiledGoFiles) == 0 && len(pkg.Errors) == 0 && pkg.PkgPath != "unsafe" {
- // If a package consists only of test files, then
- // go/packages incorrectly(?) returns an empty package for
- // the non-test variant. Get rid of those packages. See
- // #646.
- //
- // Do not, however, skip packages that have errors. Those,
- // too, may have no files, but we want to print the
- // errors.
- continue
- }
- pkgs[n] = pkg
- n++
- }
- return pkgs[:n], nil
+ return pkgs, nil
}
// LoadFromExport loads a package from export data. All of its
// dependencies must have been loaded already.
-func LoadFromExport(pkg *packages.Package) error {
+func (ld *Loader) LoadFromExport(pkg *packages.Package) error {
+ ld.exportMu.Lock()
+ defer ld.exportMu.Unlock()
+
pkg.IllTyped = true
for path, pkg := range pkg.Imports {
if pkg.Types == nil {
@@ -97,7 +87,10 @@ func LoadFromExport(pkg *packages.Package) error {
// LoadFromSource loads a package from source. All of its dependencies
// must have been loaded already.
-func LoadFromSource(pkg *packages.Package) error {
+func (ld *Loader) LoadFromSource(pkg *packages.Package) error {
+ ld.exportMu.RLock()
+ defer ld.exportMu.RUnlock()
+
pkg.IllTyped = true
pkg.Types = types.NewPackage(pkg.PkgPath, pkg.Name)
@@ -128,12 +121,6 @@ func LoadFromSource(pkg *packages.Package) error {
if path == "unsafe" {
return types.Unsafe, nil
}
- if path == "C" {
- // go/packages doesn't tell us that cgo preprocessing
- // failed. When we subsequently try to parse the package,
- // we'll encounter the raw C import.
- return nil, errors.New("cgo preprocessing failed")
- }
imp := pkg.Imports[path]
if imp == nil {
return nil, nil
diff --git a/vendor/honnef.co/go/tools/pattern/convert.go b/vendor/honnef.co/go/tools/pattern/convert.go
deleted file mode 100644
index dfcd1560d..000000000
--- a/vendor/honnef.co/go/tools/pattern/convert.go
+++ /dev/null
@@ -1,242 +0,0 @@
-package pattern
-
-import (
- "fmt"
- "go/ast"
- "go/token"
- "go/types"
- "reflect"
-)
-
-var astTypes = map[string]reflect.Type{
- "Ellipsis": reflect.TypeOf(ast.Ellipsis{}),
- "RangeStmt": reflect.TypeOf(ast.RangeStmt{}),
- "AssignStmt": reflect.TypeOf(ast.AssignStmt{}),
- "IndexExpr": reflect.TypeOf(ast.IndexExpr{}),
- "Ident": reflect.TypeOf(ast.Ident{}),
- "ValueSpec": reflect.TypeOf(ast.ValueSpec{}),
- "GenDecl": reflect.TypeOf(ast.GenDecl{}),
- "BinaryExpr": reflect.TypeOf(ast.BinaryExpr{}),
- "ForStmt": reflect.TypeOf(ast.ForStmt{}),
- "ArrayType": reflect.TypeOf(ast.ArrayType{}),
- "DeferStmt": reflect.TypeOf(ast.DeferStmt{}),
- "MapType": reflect.TypeOf(ast.MapType{}),
- "ReturnStmt": reflect.TypeOf(ast.ReturnStmt{}),
- "SliceExpr": reflect.TypeOf(ast.SliceExpr{}),
- "StarExpr": reflect.TypeOf(ast.StarExpr{}),
- "UnaryExpr": reflect.TypeOf(ast.UnaryExpr{}),
- "SendStmt": reflect.TypeOf(ast.SendStmt{}),
- "SelectStmt": reflect.TypeOf(ast.SelectStmt{}),
- "ImportSpec": reflect.TypeOf(ast.ImportSpec{}),
- "IfStmt": reflect.TypeOf(ast.IfStmt{}),
- "GoStmt": reflect.TypeOf(ast.GoStmt{}),
- "Field": reflect.TypeOf(ast.Field{}),
- "SelectorExpr": reflect.TypeOf(ast.SelectorExpr{}),
- "StructType": reflect.TypeOf(ast.StructType{}),
- "KeyValueExpr": reflect.TypeOf(ast.KeyValueExpr{}),
- "FuncType": reflect.TypeOf(ast.FuncType{}),
- "FuncLit": reflect.TypeOf(ast.FuncLit{}),
- "FuncDecl": reflect.TypeOf(ast.FuncDecl{}),
- "ChanType": reflect.TypeOf(ast.ChanType{}),
- "CallExpr": reflect.TypeOf(ast.CallExpr{}),
- "CaseClause": reflect.TypeOf(ast.CaseClause{}),
- "CommClause": reflect.TypeOf(ast.CommClause{}),
- "CompositeLit": reflect.TypeOf(ast.CompositeLit{}),
- "EmptyStmt": reflect.TypeOf(ast.EmptyStmt{}),
- "SwitchStmt": reflect.TypeOf(ast.SwitchStmt{}),
- "TypeSwitchStmt": reflect.TypeOf(ast.TypeSwitchStmt{}),
- "TypeAssertExpr": reflect.TypeOf(ast.TypeAssertExpr{}),
- "TypeSpec": reflect.TypeOf(ast.TypeSpec{}),
- "InterfaceType": reflect.TypeOf(ast.InterfaceType{}),
- "BranchStmt": reflect.TypeOf(ast.BranchStmt{}),
- "IncDecStmt": reflect.TypeOf(ast.IncDecStmt{}),
- "BasicLit": reflect.TypeOf(ast.BasicLit{}),
-}
-
-func ASTToNode(node interface{}) Node {
- switch node := node.(type) {
- case *ast.File:
- panic("cannot convert *ast.File to Node")
- case nil:
- return Nil{}
- case string:
- return String(node)
- case token.Token:
- return Token(node)
- case *ast.ExprStmt:
- return ASTToNode(node.X)
- case *ast.BlockStmt:
- if node == nil {
- return Nil{}
- }
- return ASTToNode(node.List)
- case *ast.FieldList:
- if node == nil {
- return Nil{}
- }
- return ASTToNode(node.List)
- case *ast.BasicLit:
- if node == nil {
- return Nil{}
- }
- case *ast.ParenExpr:
- return ASTToNode(node.X)
- }
-
- if node, ok := node.(ast.Node); ok {
- name := reflect.TypeOf(node).Elem().Name()
- T, ok := structNodes[name]
- if !ok {
- panic(fmt.Sprintf("internal error: unhandled type %T", node))
- }
-
- if reflect.ValueOf(node).IsNil() {
- return Nil{}
- }
- v := reflect.ValueOf(node).Elem()
- objs := make([]Node, T.NumField())
- for i := 0; i < T.NumField(); i++ {
- f := v.FieldByName(T.Field(i).Name)
- objs[i] = ASTToNode(f.Interface())
- }
-
- n, err := populateNode(name, objs, false)
- if err != nil {
- panic(fmt.Sprintf("internal error: %s", err))
- }
- return n
- }
-
- s := reflect.ValueOf(node)
- if s.Kind() == reflect.Slice {
- if s.Len() == 0 {
- return List{}
- }
- if s.Len() == 1 {
- return ASTToNode(s.Index(0).Interface())
- }
-
- tail := List{}
- for i := s.Len() - 1; i >= 0; i-- {
- head := ASTToNode(s.Index(i).Interface())
- l := List{
- Head: head,
- Tail: tail,
- }
- tail = l
- }
- return tail
- }
-
- panic(fmt.Sprintf("internal error: unhandled type %T", node))
-}
-
-func NodeToAST(node Node, state State) interface{} {
- switch node := node.(type) {
- case Binding:
- v, ok := state[node.Name]
- if !ok {
- // really we want to return an error here
- panic("XXX")
- }
- switch v := v.(type) {
- case types.Object:
- return &ast.Ident{Name: v.Name()}
- default:
- return v
- }
- case Builtin, Any, Object, Function, Not, Or:
- panic("XXX")
- case List:
- if (node == List{}) {
- return []ast.Node{}
- }
- x := []ast.Node{NodeToAST(node.Head, state).(ast.Node)}
- x = append(x, NodeToAST(node.Tail, state).([]ast.Node)...)
- return x
- case Token:
- return token.Token(node)
- case String:
- return string(node)
- case Nil:
- return nil
- }
-
- name := reflect.TypeOf(node).Name()
- T, ok := astTypes[name]
- if !ok {
- panic(fmt.Sprintf("internal error: unhandled type %T", node))
- }
- v := reflect.ValueOf(node)
- out := reflect.New(T)
- for i := 0; i < T.NumField(); i++ {
- fNode := v.FieldByName(T.Field(i).Name)
- if (fNode == reflect.Value{}) {
- continue
- }
- fAST := out.Elem().FieldByName(T.Field(i).Name)
- switch fAST.Type().Kind() {
- case reflect.Slice:
- c := reflect.ValueOf(NodeToAST(fNode.Interface().(Node), state))
- if c.Kind() != reflect.Slice {
- // it's a single node in the pattern, we have to wrap
- // it in a slice
- slice := reflect.MakeSlice(fAST.Type(), 1, 1)
- slice.Index(0).Set(c)
- c = slice
- }
- switch fAST.Interface().(type) {
- case []ast.Node:
- switch cc := c.Interface().(type) {
- case []ast.Node:
- fAST.Set(c)
- case []ast.Expr:
- var slice []ast.Node
- for _, el := range cc {
- slice = append(slice, el)
- }
- fAST.Set(reflect.ValueOf(slice))
- default:
- panic("XXX")
- }
- case []ast.Expr:
- switch cc := c.Interface().(type) {
- case []ast.Node:
- var slice []ast.Expr
- for _, el := range cc {
- slice = append(slice, el.(ast.Expr))
- }
- fAST.Set(reflect.ValueOf(slice))
- case []ast.Expr:
- fAST.Set(c)
- default:
- panic("XXX")
- }
- default:
- panic("XXX")
- }
- case reflect.Int:
- c := reflect.ValueOf(NodeToAST(fNode.Interface().(Node), state))
- switch c.Kind() {
- case reflect.String:
- tok, ok := tokensByString[c.Interface().(string)]
- if !ok {
- // really we want to return an error here
- panic("XXX")
- }
- fAST.SetInt(int64(tok))
- case reflect.Int:
- fAST.Set(c)
- default:
- panic(fmt.Sprintf("internal error: unexpected kind %s", c.Kind()))
- }
- default:
- r := NodeToAST(fNode.Interface().(Node), state)
- if r != nil {
- fAST.Set(reflect.ValueOf(r))
- }
- }
- }
-
- return out.Interface().(ast.Node)
-}
diff --git a/vendor/honnef.co/go/tools/pattern/doc.go b/vendor/honnef.co/go/tools/pattern/doc.go
deleted file mode 100644
index 05d86c251..000000000
--- a/vendor/honnef.co/go/tools/pattern/doc.go
+++ /dev/null
@@ -1,273 +0,0 @@
-/*
-Package pattern implements a simple language for pattern matching Go ASTs.
-
-Design decisions and trade-offs
-
-The language is designed specifically for the task of filtering ASTs
-to simplify the implementation of analyses in staticcheck.
-It is also intended to be trivial to parse and execute.
-
-To that end, we make certain decisions that make the language more
-suited to its task, while making certain queries infeasible.
-
-Furthermore, it is fully expected that the majority of analyses will still require ordinary Go code
-to further process the filtered AST, to make use of type information and to enforce complex invariants.
-It is not our goal to design a scripting language for writing entire checks in.
-
-The language
-
-At its core, patterns are a representation of Go ASTs, allowing for the use of placeholders to enable pattern matching.
-Their syntax is inspired by LISP and Haskell, but unlike LISP, the core unit of patterns isn't the list, but the node.
-There is a fixed set of nodes, identified by name, and with the exception of the Or node, all nodes have a fixed number of arguments.
-In addition to nodes, there are atoms, which represent basic units such as strings or the nil value.
-
-Pattern matching is implemented via bindings, represented by the Binding node.
-A Binding can match nodes and associate them with names, to later recall the nodes.
-This allows for expressing "this node must be equal to that node" constraints.
-
-To simplify writing and reading patterns, a small amount of additional syntax exists on top of nodes and atoms.
-This additional syntax doesn't add any new features of its own, it simply provides shortcuts to creating nodes and atoms.
-
-To show an example of a pattern, first consider this snippet of Go code:
-
- if x := fn(); x != nil {
- for _, v := range x {
- println(v, x)
- }
- }
-
-The corresponding AST expressed as an idiomatic pattern would look as follows:
-
- (IfStmt
- (AssignStmt (Ident "x") ":=" (CallExpr (Ident "fn") []))
- (BinaryExpr (Ident "x") "!=" (Ident "nil"))
- (RangeStmt
- (Ident "_") (Ident "v") ":=" (Ident "x")
- (CallExpr (Ident "println") [(Ident "v") (Ident "x")]))
- nil)
-
-Two things are worth noting about this representation.
-First, the [el1 el2 ...] syntax is a short-hand for creating lists.
-It is a short-hand for el1:el2:[], which itself is a short-hand for (List el1 (List el2 (List nil nil)).
-Second, note the absence of a lot of lists in places that normally accept lists.
-For example, assignment assigns a number of right-hands to a number of left-hands, yet our AssignStmt is lacking any form of list.
-This is due to the fact that a single node can match a list of exactly one element.
-Thus, the two following forms have identical matching behavior:
-
- (AssignStmt (Ident "x") ":=" (CallExpr (Ident "fn") []))
- (AssignStmt [(Ident "x")] ":=" [(CallExpr (Ident "fn") [])])
-
-This section serves as an overview of the language's syntax.
-More in-depth explanations of the matching behavior as well as an exhaustive list of node types follows in the coming sections.
-
-Pattern matching
-
-TODO write about pattern matching
-
-- inspired by haskell syntax, but much, much simpler and naive
-
-Node types
-
-The language contains two kinds of nodes: those that map to nodes in the AST, and those that implement additional logic.
-
-Nodes that map directly to AST nodes are named identically to the types in the go/ast package.
-What follows is an exhaustive list of these nodes:
-
- (ArrayType len elt)
- (AssignStmt lhs tok rhs)
- (BasicLit kind value)
- (BinaryExpr x op y)
- (BranchStmt tok label)
- (CallExpr fun args)
- (CaseClause list body)
- (ChanType dir value)
- (CommClause comm body)
- (CompositeLit type elts)
- (DeferStmt call)
- (Ellipsis elt)
- (EmptyStmt)
- (Field names type tag)
- (ForStmt init cond post body)
- (FuncDecl recv name type body)
- (FuncLit type body)
- (FuncType params results)
- (GenDecl specs)
- (GoStmt call)
- (Ident name)
- (IfStmt init cond body else)
- (ImportSpec name path)
- (IncDecStmt x tok)
- (IndexExpr x index)
- (InterfaceType methods)
- (KeyValueExpr key value)
- (MapType key value)
- (RangeStmt key value tok x body)
- (ReturnStmt results)
- (SelectStmt body)
- (SelectorExpr x sel)
- (SendStmt chan value)
- (SliceExpr x low high max)
- (StarExpr x)
- (StructType fields)
- (SwitchStmt init tag body)
- (TypeAssertExpr)
- (TypeSpec name type)
- (TypeSwitchStmt init assign body)
- (UnaryExpr op x)
- (ValueSpec names type values)
-
-Additionally, there are the String, Token and nil atoms.
-Strings are double-quoted string literals, as in (Ident "someName").
-Tokens are also represented as double-quoted string literals, but are converted to token.Token values in contexts that require tokens,
-such as in (BinaryExpr x "<" y), where "<" is transparently converted to token.LSS during matching.
-The keyword 'nil' denotes the nil value, which represents the absence of any value.
-
-We also defines the (List head tail) node, which is used to represent sequences of elements as a singly linked list.
-The head is a single element, and the tail is the remainder of the list.
-For example,
-
- (List "foo" (List "bar" (List "baz" (List nil nil))))
-
-represents a list of three elements, "foo", "bar" and "baz". There is dedicated syntax for writing lists, which looks as follows:
-
- ["foo" "bar" "baz"]
-
-This syntax is itself syntactic sugar for the following form:
-
- "foo":"bar":"baz":[]
-
-This form is of particular interest for pattern matching, as it allows matching on the head and tail. For example,
-
- "foo":"bar":_
-
-would match any list with at least two elements, where the first two elements are "foo" and "bar". This is equivalent to writing
-
- (List "foo" (List "bar" _))
-
-Note that it is not possible to match from the end of the list.
-That is, there is no way to express a query such as "a list of any length where the last element is foo".
-
-Note that unlike in LISP, nil and empty lists are distinct from one another.
-In patterns, with respect to lists, nil is akin to Go's untyped nil.
-It will match a nil ast.Node, but it will not match a nil []ast.Expr. Nil will, however, match pointers to named types such as *ast.Ident.
-Similarly, lists are akin to Go's
-slices. An empty list will match both a nil and an empty []ast.Expr, but it will not match a nil ast.Node.
-
-Due to the difference between nil and empty lists, an empty list is represented as (List nil nil), i.e. a list with no head or tail.
-Similarly, a list of one element is represented as (List el (List nil nil)). Unlike in LISP, it cannot be represented by (List el nil).
-
-Finally, there are nodes that implement special logic or matching behavior.
-
-(Any) matches any value. The underscore (_) maps to this node, making the following two forms equivalent:
-
- (Ident _)
- (Ident (Any))
-
-(Builtin name) matches a built-in identifier or function by name.
-This is a type-aware variant of (Ident name).
-Instead of only comparing the name, it resolves the object behind the name and makes sure it's a pre-declared identifier.
-
-For example, in the following piece of code
-
- func fn() {
- println(true)
- true := false
- println(true)
- }
-
-the pattern
-
- (Builtin "true")
-
-will match exactly once, on the first use of 'true' in the function.
-Subsequent occurrences of 'true' no longer refer to the pre-declared identifier.
-
-(Object name) matches an identifier by name, but yields the
-types.Object it refers to.
-
-(Function name) matches ast.Idents and ast.SelectorExprs that refer to a function with a given fully qualified name.
-For example, "net/url.PathEscape" matches the PathEscape function in the net/url package,
-and "(net/url.EscapeError).Error" refers to the Error method on the net/url.EscapeError type,
-either on an instance of the type, or on the type itself.
-
-For example, the following patterns match the following lines of code:
-
- (CallExpr (Function "fmt.Println") _) // pattern 1
- (CallExpr (Function "(net/url.EscapeError).Error") _) // pattern 2
-
- fmt.Println("hello, world") // matches pattern 1
- var x url.EscapeError
- x.Error() // matches pattern 2
- (url.EscapeError).Error(x) // also matches pattern 2
-
-(Binding name node) creates or uses a binding.
-Bindings work like variable assignments, allowing referring to already matched nodes.
-As an example, bindings are necessary to match self-assignment of the form "x = x",
-since we need to express that the right-hand side is identical to the left-hand side.
-
-If a binding's node is not nil, the matcher will attempt to match a node according to the pattern.
-If a binding's node is nil, the binding will either recall an existing value, or match the Any node.
-It is an error to provide a non-nil node to a binding that has already been bound.
-
-Referring back to the earlier example, the following pattern will match self-assignment of idents:
-
- (AssignStmt (Binding "lhs" (Ident _)) "=" (Binding "lhs" nil))
-
-Because bindings are a crucial component of pattern matching, there is special syntax for creating and recalling bindings.
-Lower-case names refer to bindings. If standing on its own, the name "foo" will be equivalent to (Binding "foo" nil).
-If a name is followed by an at-sign (@) then it will create a binding for the node that follows.
-Together, this allows us to rewrite the earlier example as follows:
-
- (AssignStmt lhs@(Ident _) "=" lhs)
-
-(Or nodes...) is a variadic node that tries matching each node until one succeeds. For example, the following pattern matches all idents of name "foo" or "bar":
-
- (Ident (Or "foo" "bar"))
-
-We could also have written
-
- (Or (Ident "foo") (Ident "bar"))
-
-and achieved the same result. We can also mix different kinds of nodes:
-
- (Or (Ident "foo") (CallExpr (Ident "bar") _))
-
-When using bindings inside of nodes used inside Or, all or none of the bindings will be bound.
-That is, partially matched nodes that ultimately failed to match will not produce any bindings observable outside of the matching attempt.
-We can thus write
-
- (Or (Ident name) (CallExpr name))
-
-and 'name' will either be a String if the first option matched, or an Ident or SelectorExpr if the second option matched.
-
-(Not node)
-
-The Not node negates a match. For example, (Not (Ident _)) will match all nodes that aren't identifiers.
-
-ChanDir(0)
-
-Automatic unnesting of AST nodes
-
-The Go AST has several types of nodes that wrap other nodes.
-To simplify matching, we automatically unwrap some of these nodes.
-
-These nodes are ExprStmt (for using expressions in a statement context),
-ParenExpr (for parenthesized expressions),
-DeclStmt (for declarations in a statement context),
-and LabeledStmt (for labeled statements).
-
-Thus, the query
-
- (FuncLit _ [(CallExpr _ _)]
-
-will match a function literal containing a single function call,
-even though in the actual Go AST, the CallExpr is nested inside an ExprStmt,
-as function bodies are made up of sequences of statements.
-
-On the flip-side, there is no way to specifically match these wrapper nodes.
-For example, there is no way of searching for unnecessary parentheses, like in the following piece of Go code:
-
- ((x)) += 2
-
-*/
-package pattern
diff --git a/vendor/honnef.co/go/tools/pattern/fuzz.go b/vendor/honnef.co/go/tools/pattern/fuzz.go
deleted file mode 100644
index 52e7df974..000000000
--- a/vendor/honnef.co/go/tools/pattern/fuzz.go
+++ /dev/null
@@ -1,50 +0,0 @@
-// +build gofuzz
-
-package pattern
-
-import (
- "go/ast"
- goparser "go/parser"
- "go/token"
- "os"
- "path/filepath"
- "strings"
-)
-
-var files []*ast.File
-
-func init() {
- fset := token.NewFileSet()
- filepath.Walk("/usr/lib/go/src", func(path string, info os.FileInfo, err error) error {
- if err != nil {
- // XXX error handling
- panic(err)
- }
- if !strings.HasSuffix(path, ".go") {
- return nil
- }
- f, err := goparser.ParseFile(fset, path, nil, 0)
- if err != nil {
- return nil
- }
- files = append(files, f)
- return nil
- })
-}
-
-func Fuzz(data []byte) int {
- p := &Parser{}
- pat, err := p.Parse(string(data))
- if err != nil {
- if strings.Contains(err.Error(), "internal error") {
- panic(err)
- }
- return 0
- }
- _ = pat.Root.String()
-
- for _, f := range files {
- Match(pat.Root, f)
- }
- return 1
-}
diff --git a/vendor/honnef.co/go/tools/pattern/lexer.go b/vendor/honnef.co/go/tools/pattern/lexer.go
deleted file mode 100644
index fb72e392b..000000000
--- a/vendor/honnef.co/go/tools/pattern/lexer.go
+++ /dev/null
@@ -1,221 +0,0 @@
-package pattern
-
-import (
- "fmt"
- "go/token"
- "unicode"
- "unicode/utf8"
-)
-
-type lexer struct {
- f *token.File
-
- input string
- start int
- pos int
- width int
- items chan item
-}
-
-type itemType int
-
-const eof = -1
-
-const (
- itemError itemType = iota
- itemLeftParen
- itemRightParen
- itemLeftBracket
- itemRightBracket
- itemTypeName
- itemVariable
- itemAt
- itemColon
- itemBlank
- itemString
- itemEOF
-)
-
-func (typ itemType) String() string {
- switch typ {
- case itemError:
- return "ERROR"
- case itemLeftParen:
- return "("
- case itemRightParen:
- return ")"
- case itemLeftBracket:
- return "["
- case itemRightBracket:
- return "]"
- case itemTypeName:
- return "TYPE"
- case itemVariable:
- return "VAR"
- case itemAt:
- return "@"
- case itemColon:
- return ":"
- case itemBlank:
- return "_"
- case itemString:
- return "STRING"
- case itemEOF:
- return "EOF"
- default:
- return fmt.Sprintf("itemType(%d)", typ)
- }
-}
-
-type item struct {
- typ itemType
- val string
- pos int
-}
-
-type stateFn func(*lexer) stateFn
-
-func (l *lexer) run() {
- for state := lexStart; state != nil; {
- state = state(l)
- }
- close(l.items)
-}
-
-func (l *lexer) emitValue(t itemType, value string) {
- l.items <- item{t, value, l.start}
- l.start = l.pos
-}
-
-func (l *lexer) emit(t itemType) {
- l.items <- item{t, l.input[l.start:l.pos], l.start}
- l.start = l.pos
-}
-
-func lexStart(l *lexer) stateFn {
- switch r := l.next(); {
- case r == eof:
- l.emit(itemEOF)
- return nil
- case unicode.IsSpace(r):
- l.ignore()
- case r == '(':
- l.emit(itemLeftParen)
- case r == ')':
- l.emit(itemRightParen)
- case r == '[':
- l.emit(itemLeftBracket)
- case r == ']':
- l.emit(itemRightBracket)
- case r == '@':
- l.emit(itemAt)
- case r == ':':
- l.emit(itemColon)
- case r == '_':
- l.emit(itemBlank)
- case r == '"':
- l.backup()
- return lexString
- case unicode.IsUpper(r):
- l.backup()
- return lexType
- case unicode.IsLower(r):
- l.backup()
- return lexVariable
- default:
- return l.errorf("unexpected character %c", r)
- }
- return lexStart
-}
-
-func (l *lexer) next() (r rune) {
- if l.pos >= len(l.input) {
- l.width = 0
- return eof
- }
- r, l.width = utf8.DecodeRuneInString(l.input[l.pos:])
-
- if r == '\n' {
- l.f.AddLine(l.pos)
- }
-
- l.pos += l.width
-
- return r
-}
-
-func (l *lexer) ignore() {
- l.start = l.pos
-}
-
-func (l *lexer) backup() {
- l.pos -= l.width
-}
-
-func (l *lexer) errorf(format string, args ...interface{}) stateFn {
- // TODO(dh): emit position information in errors
- l.items <- item{
- itemError,
- fmt.Sprintf(format, args...),
- l.start,
- }
- return nil
-}
-
-func isAlphaNumeric(r rune) bool {
- return r >= '0' && r <= '9' ||
- r >= 'a' && r <= 'z' ||
- r >= 'A' && r <= 'Z'
-}
-
-func lexString(l *lexer) stateFn {
- l.next() // skip quote
- escape := false
-
- var runes []rune
- for {
- switch r := l.next(); r {
- case eof:
- return l.errorf("unterminated string")
- case '"':
- if !escape {
- l.emitValue(itemString, string(runes))
- return lexStart
- } else {
- runes = append(runes, '"')
- escape = false
- }
- case '\\':
- if escape {
- runes = append(runes, '\\')
- escape = false
- } else {
- escape = true
- }
- default:
- runes = append(runes, r)
- }
- }
-}
-
-func lexType(l *lexer) stateFn {
- l.next()
- for {
- if !isAlphaNumeric(l.next()) {
- l.backup()
- l.emit(itemTypeName)
- return lexStart
- }
- }
-}
-
-func lexVariable(l *lexer) stateFn {
- l.next()
- for {
- if !isAlphaNumeric(l.next()) {
- l.backup()
- l.emit(itemVariable)
- return lexStart
- }
- }
-}
diff --git a/vendor/honnef.co/go/tools/pattern/match.go b/vendor/honnef.co/go/tools/pattern/match.go
deleted file mode 100644
index ff039baa7..000000000
--- a/vendor/honnef.co/go/tools/pattern/match.go
+++ /dev/null
@@ -1,513 +0,0 @@
-package pattern
-
-import (
- "fmt"
- "go/ast"
- "go/token"
- "go/types"
- "reflect"
-
- "honnef.co/go/tools/lint"
-)
-
-var tokensByString = map[string]Token{
- "INT": Token(token.INT),
- "FLOAT": Token(token.FLOAT),
- "IMAG": Token(token.IMAG),
- "CHAR": Token(token.CHAR),
- "STRING": Token(token.STRING),
- "+": Token(token.ADD),
- "-": Token(token.SUB),
- "*": Token(token.MUL),
- "/": Token(token.QUO),
- "%": Token(token.REM),
- "&": Token(token.AND),
- "|": Token(token.OR),
- "^": Token(token.XOR),
- "<<": Token(token.SHL),
- ">>": Token(token.SHR),
- "&^": Token(token.AND_NOT),
- "+=": Token(token.ADD_ASSIGN),
- "-=": Token(token.SUB_ASSIGN),
- "*=": Token(token.MUL_ASSIGN),
- "/=": Token(token.QUO_ASSIGN),
- "%=": Token(token.REM_ASSIGN),
- "&=": Token(token.AND_ASSIGN),
- "|=": Token(token.OR_ASSIGN),
- "^=": Token(token.XOR_ASSIGN),
- "<<=": Token(token.SHL_ASSIGN),
- ">>=": Token(token.SHR_ASSIGN),
- "&^=": Token(token.AND_NOT_ASSIGN),
- "&&": Token(token.LAND),
- "||": Token(token.LOR),
- "<-": Token(token.ARROW),
- "++": Token(token.INC),
- "--": Token(token.DEC),
- "==": Token(token.EQL),
- "<": Token(token.LSS),
- ">": Token(token.GTR),
- "=": Token(token.ASSIGN),
- "!": Token(token.NOT),
- "!=": Token(token.NEQ),
- "<=": Token(token.LEQ),
- ">=": Token(token.GEQ),
- ":=": Token(token.DEFINE),
- "...": Token(token.ELLIPSIS),
- "IMPORT": Token(token.IMPORT),
- "VAR": Token(token.VAR),
- "TYPE": Token(token.TYPE),
- "CONST": Token(token.CONST),
-}
-
-func maybeToken(node Node) (Node, bool) {
- if node, ok := node.(String); ok {
- if tok, ok := tokensByString[string(node)]; ok {
- return tok, true
- }
- return node, false
- }
- return node, false
-}
-
-func isNil(v interface{}) bool {
- if v == nil {
- return true
- }
- if _, ok := v.(Nil); ok {
- return true
- }
- return false
-}
-
-type matcher interface {
- Match(*Matcher, interface{}) (interface{}, bool)
-}
-
-type State = map[string]interface{}
-
-type Matcher struct {
- TypesInfo *types.Info
- State State
-}
-
-func (m *Matcher) fork() *Matcher {
- state := make(State, len(m.State))
- for k, v := range m.State {
- state[k] = v
- }
- return &Matcher{
- TypesInfo: m.TypesInfo,
- State: state,
- }
-}
-
-func (m *Matcher) merge(mc *Matcher) {
- m.State = mc.State
-}
-
-func (m *Matcher) Match(a Node, b ast.Node) bool {
- m.State = State{}
- _, ok := match(m, a, b)
- return ok
-}
-
-func Match(a Node, b ast.Node) (*Matcher, bool) {
- m := &Matcher{}
- ret := m.Match(a, b)
- return m, ret
-}
-
-// Match two items, which may be (Node, AST) or (AST, AST)
-func match(m *Matcher, l, r interface{}) (interface{}, bool) {
- if _, ok := r.(Node); ok {
- panic("Node mustn't be on right side of match")
- }
-
- switch l := l.(type) {
- case *ast.ParenExpr:
- return match(m, l.X, r)
- case *ast.ExprStmt:
- return match(m, l.X, r)
- case *ast.DeclStmt:
- return match(m, l.Decl, r)
- case *ast.LabeledStmt:
- return match(m, l.Stmt, r)
- case *ast.BlockStmt:
- return match(m, l.List, r)
- case *ast.FieldList:
- return match(m, l.List, r)
- }
-
- switch r := r.(type) {
- case *ast.ParenExpr:
- return match(m, l, r.X)
- case *ast.ExprStmt:
- return match(m, l, r.X)
- case *ast.DeclStmt:
- return match(m, l, r.Decl)
- case *ast.LabeledStmt:
- return match(m, l, r.Stmt)
- case *ast.BlockStmt:
- if r == nil {
- return match(m, l, nil)
- }
- return match(m, l, r.List)
- case *ast.FieldList:
- if r == nil {
- return match(m, l, nil)
- }
- return match(m, l, r.List)
- case *ast.BasicLit:
- if r == nil {
- return match(m, l, nil)
- }
- }
-
- if l, ok := l.(matcher); ok {
- return l.Match(m, r)
- }
-
- if l, ok := l.(Node); ok {
- // Matching of pattern with concrete value
- return matchNodeAST(m, l, r)
- }
-
- if l == nil || r == nil {
- return nil, l == r
- }
-
- {
- ln, ok1 := l.(ast.Node)
- rn, ok2 := r.(ast.Node)
- if ok1 && ok2 {
- return matchAST(m, ln, rn)
- }
- }
-
- {
- obj, ok := l.(types.Object)
- if ok {
- switch r := r.(type) {
- case *ast.Ident:
- return obj, obj == m.TypesInfo.ObjectOf(r)
- case *ast.SelectorExpr:
- return obj, obj == m.TypesInfo.ObjectOf(r.Sel)
- default:
- return obj, false
- }
- }
- }
-
- {
- ln, ok1 := l.([]ast.Expr)
- rn, ok2 := r.([]ast.Expr)
- if ok1 || ok2 {
- if ok1 && !ok2 {
- rn = []ast.Expr{r.(ast.Expr)}
- } else if !ok1 && ok2 {
- ln = []ast.Expr{l.(ast.Expr)}
- }
-
- if len(ln) != len(rn) {
- return nil, false
- }
- for i, ll := range ln {
- if _, ok := match(m, ll, rn[i]); !ok {
- return nil, false
- }
- }
- return r, true
- }
- }
-
- {
- ln, ok1 := l.([]ast.Stmt)
- rn, ok2 := r.([]ast.Stmt)
- if ok1 || ok2 {
- if ok1 && !ok2 {
- rn = []ast.Stmt{r.(ast.Stmt)}
- } else if !ok1 && ok2 {
- ln = []ast.Stmt{l.(ast.Stmt)}
- }
-
- if len(ln) != len(rn) {
- return nil, false
- }
- for i, ll := range ln {
- if _, ok := match(m, ll, rn[i]); !ok {
- return nil, false
- }
- }
- return r, true
- }
- }
-
- panic(fmt.Sprintf("unsupported comparison: %T and %T", l, r))
-}
-
-// Match a Node with an AST node
-func matchNodeAST(m *Matcher, a Node, b interface{}) (interface{}, bool) {
- switch b := b.(type) {
- case []ast.Stmt:
- // 'a' is not a List or we'd be using its Match
- // implementation.
-
- if len(b) != 1 {
- return nil, false
- }
- return match(m, a, b[0])
- case []ast.Expr:
- // 'a' is not a List or we'd be using its Match
- // implementation.
-
- if len(b) != 1 {
- return nil, false
- }
- return match(m, a, b[0])
- case ast.Node:
- ra := reflect.ValueOf(a)
- rb := reflect.ValueOf(b).Elem()
-
- if ra.Type().Name() != rb.Type().Name() {
- return nil, false
- }
-
- for i := 0; i < ra.NumField(); i++ {
- af := ra.Field(i)
- fieldName := ra.Type().Field(i).Name
- bf := rb.FieldByName(fieldName)
- if (bf == reflect.Value{}) {
- panic(fmt.Sprintf("internal error: could not find field %s in type %t when comparing with %T", fieldName, b, a))
- }
- ai := af.Interface()
- bi := bf.Interface()
- if ai == nil {
- return b, bi == nil
- }
- if _, ok := match(m, ai.(Node), bi); !ok {
- return b, false
- }
- }
- return b, true
- case nil:
- return nil, a == Nil{}
- default:
- panic(fmt.Sprintf("unhandled type %T", b))
- }
-}
-
-// Match two AST nodes
-func matchAST(m *Matcher, a, b ast.Node) (interface{}, bool) {
- ra := reflect.ValueOf(a)
- rb := reflect.ValueOf(b)
-
- if ra.Type() != rb.Type() {
- return nil, false
- }
- if ra.IsNil() || rb.IsNil() {
- return rb, ra.IsNil() == rb.IsNil()
- }
-
- ra = ra.Elem()
- rb = rb.Elem()
- for i := 0; i < ra.NumField(); i++ {
- af := ra.Field(i)
- bf := rb.Field(i)
- if af.Type() == rtTokPos || af.Type() == rtObject || af.Type() == rtCommentGroup {
- continue
- }
-
- switch af.Kind() {
- case reflect.Slice:
- if af.Len() != bf.Len() {
- return nil, false
- }
- for j := 0; j < af.Len(); j++ {
- if _, ok := match(m, af.Index(j).Interface().(ast.Node), bf.Index(j).Interface().(ast.Node)); !ok {
- return nil, false
- }
- }
- case reflect.String:
- if af.String() != bf.String() {
- return nil, false
- }
- case reflect.Int:
- if af.Int() != bf.Int() {
- return nil, false
- }
- case reflect.Bool:
- if af.Bool() != bf.Bool() {
- return nil, false
- }
- case reflect.Ptr, reflect.Interface:
- if _, ok := match(m, af.Interface(), bf.Interface()); !ok {
- return nil, false
- }
- default:
- panic(fmt.Sprintf("internal error: unhandled kind %s (%T)", af.Kind(), af.Interface()))
- }
- }
- return b, true
-}
-
-func (b Binding) Match(m *Matcher, node interface{}) (interface{}, bool) {
- if isNil(b.Node) {
- v, ok := m.State[b.Name]
- if ok {
- // Recall value
- return match(m, v, node)
- }
- // Matching anything
- b.Node = Any{}
- }
-
- // Store value
- if _, ok := m.State[b.Name]; ok {
- panic(fmt.Sprintf("binding already created: %s", b.Name))
- }
- new, ret := match(m, b.Node, node)
- if ret {
- m.State[b.Name] = new
- }
- return new, ret
-}
-
-func (Any) Match(m *Matcher, node interface{}) (interface{}, bool) {
- return node, true
-}
-
-func (l List) Match(m *Matcher, node interface{}) (interface{}, bool) {
- v := reflect.ValueOf(node)
- if v.Kind() == reflect.Slice {
- if isNil(l.Head) {
- return node, v.Len() == 0
- }
- if v.Len() == 0 {
- return nil, false
- }
- // OPT(dh): don't check the entire tail if head didn't match
- _, ok1 := match(m, l.Head, v.Index(0).Interface())
- _, ok2 := match(m, l.Tail, v.Slice(1, v.Len()).Interface())
- return node, ok1 && ok2
- }
- // Our empty list does not equal an untyped Go nil. This way, we can
- // tell apart an if with no else and an if with an empty else.
- return nil, false
-}
-
-func (s String) Match(m *Matcher, node interface{}) (interface{}, bool) {
- switch o := node.(type) {
- case token.Token:
- if tok, ok := maybeToken(s); ok {
- return match(m, tok, node)
- }
- return nil, false
- case string:
- return o, string(s) == o
- default:
- return nil, false
- }
-}
-
-func (tok Token) Match(m *Matcher, node interface{}) (interface{}, bool) {
- o, ok := node.(token.Token)
- if !ok {
- return nil, false
- }
- return o, token.Token(tok) == o
-}
-
-func (Nil) Match(m *Matcher, node interface{}) (interface{}, bool) {
- return nil, isNil(node)
-}
-
-func (builtin Builtin) Match(m *Matcher, node interface{}) (interface{}, bool) {
- ident, ok := node.(*ast.Ident)
- if !ok {
- return nil, false
- }
- obj := m.TypesInfo.ObjectOf(ident)
- if obj != types.Universe.Lookup(ident.Name) {
- return nil, false
- }
- return match(m, builtin.Name, ident.Name)
-}
-
-func (obj Object) Match(m *Matcher, node interface{}) (interface{}, bool) {
- ident, ok := node.(*ast.Ident)
- if !ok {
- return nil, false
- }
-
- id := m.TypesInfo.ObjectOf(ident)
- _, ok = match(m, obj.Name, ident.Name)
- return id, ok
-}
-
-func (fn Function) Match(m *Matcher, node interface{}) (interface{}, bool) {
- var name string
- var obj types.Object
- switch node := node.(type) {
- case *ast.Ident:
- obj = m.TypesInfo.ObjectOf(node)
- switch obj := obj.(type) {
- case *types.Func:
- name = lint.FuncName(obj)
- case *types.Builtin:
- name = obj.Name()
- default:
- return nil, false
- }
- case *ast.SelectorExpr:
- var ok bool
- obj, ok = m.TypesInfo.ObjectOf(node.Sel).(*types.Func)
- if !ok {
- return nil, false
- }
- name = lint.FuncName(obj.(*types.Func))
- default:
- return nil, false
- }
- _, ok := match(m, fn.Name, name)
- return obj, ok
-}
-
-func (or Or) Match(m *Matcher, node interface{}) (interface{}, bool) {
- for _, opt := range or.Nodes {
- mc := m.fork()
- if ret, ok := match(mc, opt, node); ok {
- m.merge(mc)
- return ret, true
- }
- }
- return nil, false
-}
-
-func (not Not) Match(m *Matcher, node interface{}) (interface{}, bool) {
- _, ok := match(m, not.Node, node)
- if ok {
- return nil, false
- }
- return node, true
-}
-
-var (
- // Types of fields in go/ast structs that we want to skip
- rtTokPos = reflect.TypeOf(token.Pos(0))
- rtObject = reflect.TypeOf((*ast.Object)(nil))
- rtCommentGroup = reflect.TypeOf((*ast.CommentGroup)(nil))
-)
-
-var (
- _ matcher = Binding{}
- _ matcher = Any{}
- _ matcher = List{}
- _ matcher = String("")
- _ matcher = Token(0)
- _ matcher = Nil{}
- _ matcher = Builtin{}
- _ matcher = Object{}
- _ matcher = Function{}
- _ matcher = Or{}
- _ matcher = Not{}
-)
diff --git a/vendor/honnef.co/go/tools/pattern/parser.go b/vendor/honnef.co/go/tools/pattern/parser.go
deleted file mode 100644
index 009238b86..000000000
--- a/vendor/honnef.co/go/tools/pattern/parser.go
+++ /dev/null
@@ -1,455 +0,0 @@
-package pattern
-
-import (
- "fmt"
- "go/ast"
- "go/token"
- "reflect"
-)
-
-type Pattern struct {
- Root Node
- // Relevant contains instances of ast.Node that could potentially
- // initiate a successful match of the pattern.
- Relevant []reflect.Type
-}
-
-func MustParse(s string) Pattern {
- p := &Parser{AllowTypeInfo: true}
- pat, err := p.Parse(s)
- if err != nil {
- panic(err)
- }
- return pat
-}
-
-func roots(node Node) []reflect.Type {
- switch node := node.(type) {
- case Or:
- var out []reflect.Type
- for _, el := range node.Nodes {
- out = append(out, roots(el)...)
- }
- return out
- case Not:
- return roots(node.Node)
- case Binding:
- return roots(node.Node)
- case Nil, nil:
- // this branch is reached via bindings
- return allTypes
- default:
- Ts, ok := nodeToASTTypes[reflect.TypeOf(node)]
- if !ok {
- panic(fmt.Sprintf("internal error: unhandled type %T", node))
- }
- return Ts
- }
-}
-
-var allTypes = []reflect.Type{
- reflect.TypeOf((*ast.RangeStmt)(nil)),
- reflect.TypeOf((*ast.AssignStmt)(nil)),
- reflect.TypeOf((*ast.IndexExpr)(nil)),
- reflect.TypeOf((*ast.Ident)(nil)),
- reflect.TypeOf((*ast.ValueSpec)(nil)),
- reflect.TypeOf((*ast.GenDecl)(nil)),
- reflect.TypeOf((*ast.BinaryExpr)(nil)),
- reflect.TypeOf((*ast.ForStmt)(nil)),
- reflect.TypeOf((*ast.ArrayType)(nil)),
- reflect.TypeOf((*ast.DeferStmt)(nil)),
- reflect.TypeOf((*ast.MapType)(nil)),
- reflect.TypeOf((*ast.ReturnStmt)(nil)),
- reflect.TypeOf((*ast.SliceExpr)(nil)),
- reflect.TypeOf((*ast.StarExpr)(nil)),
- reflect.TypeOf((*ast.UnaryExpr)(nil)),
- reflect.TypeOf((*ast.SendStmt)(nil)),
- reflect.TypeOf((*ast.SelectStmt)(nil)),
- reflect.TypeOf((*ast.ImportSpec)(nil)),
- reflect.TypeOf((*ast.IfStmt)(nil)),
- reflect.TypeOf((*ast.GoStmt)(nil)),
- reflect.TypeOf((*ast.Field)(nil)),
- reflect.TypeOf((*ast.SelectorExpr)(nil)),
- reflect.TypeOf((*ast.StructType)(nil)),
- reflect.TypeOf((*ast.KeyValueExpr)(nil)),
- reflect.TypeOf((*ast.FuncType)(nil)),
- reflect.TypeOf((*ast.FuncLit)(nil)),
- reflect.TypeOf((*ast.FuncDecl)(nil)),
- reflect.TypeOf((*ast.ChanType)(nil)),
- reflect.TypeOf((*ast.CallExpr)(nil)),
- reflect.TypeOf((*ast.CaseClause)(nil)),
- reflect.TypeOf((*ast.CommClause)(nil)),
- reflect.TypeOf((*ast.CompositeLit)(nil)),
- reflect.TypeOf((*ast.EmptyStmt)(nil)),
- reflect.TypeOf((*ast.SwitchStmt)(nil)),
- reflect.TypeOf((*ast.TypeSwitchStmt)(nil)),
- reflect.TypeOf((*ast.TypeAssertExpr)(nil)),
- reflect.TypeOf((*ast.TypeSpec)(nil)),
- reflect.TypeOf((*ast.InterfaceType)(nil)),
- reflect.TypeOf((*ast.BranchStmt)(nil)),
- reflect.TypeOf((*ast.IncDecStmt)(nil)),
- reflect.TypeOf((*ast.BasicLit)(nil)),
-}
-
-var nodeToASTTypes = map[reflect.Type][]reflect.Type{
- reflect.TypeOf(String("")): nil,
- reflect.TypeOf(Token(0)): nil,
- reflect.TypeOf(List{}): {reflect.TypeOf((*ast.BlockStmt)(nil)), reflect.TypeOf((*ast.FieldList)(nil))},
- reflect.TypeOf(Builtin{}): {reflect.TypeOf((*ast.Ident)(nil))},
- reflect.TypeOf(Object{}): {reflect.TypeOf((*ast.Ident)(nil))},
- reflect.TypeOf(Function{}): {reflect.TypeOf((*ast.Ident)(nil)), reflect.TypeOf((*ast.SelectorExpr)(nil))},
- reflect.TypeOf(Any{}): allTypes,
- reflect.TypeOf(RangeStmt{}): {reflect.TypeOf((*ast.RangeStmt)(nil))},
- reflect.TypeOf(AssignStmt{}): {reflect.TypeOf((*ast.AssignStmt)(nil))},
- reflect.TypeOf(IndexExpr{}): {reflect.TypeOf((*ast.IndexExpr)(nil))},
- reflect.TypeOf(Ident{}): {reflect.TypeOf((*ast.Ident)(nil))},
- reflect.TypeOf(ValueSpec{}): {reflect.TypeOf((*ast.ValueSpec)(nil))},
- reflect.TypeOf(GenDecl{}): {reflect.TypeOf((*ast.GenDecl)(nil))},
- reflect.TypeOf(BinaryExpr{}): {reflect.TypeOf((*ast.BinaryExpr)(nil))},
- reflect.TypeOf(ForStmt{}): {reflect.TypeOf((*ast.ForStmt)(nil))},
- reflect.TypeOf(ArrayType{}): {reflect.TypeOf((*ast.ArrayType)(nil))},
- reflect.TypeOf(DeferStmt{}): {reflect.TypeOf((*ast.DeferStmt)(nil))},
- reflect.TypeOf(MapType{}): {reflect.TypeOf((*ast.MapType)(nil))},
- reflect.TypeOf(ReturnStmt{}): {reflect.TypeOf((*ast.ReturnStmt)(nil))},
- reflect.TypeOf(SliceExpr{}): {reflect.TypeOf((*ast.SliceExpr)(nil))},
- reflect.TypeOf(StarExpr{}): {reflect.TypeOf((*ast.StarExpr)(nil))},
- reflect.TypeOf(UnaryExpr{}): {reflect.TypeOf((*ast.UnaryExpr)(nil))},
- reflect.TypeOf(SendStmt{}): {reflect.TypeOf((*ast.SendStmt)(nil))},
- reflect.TypeOf(SelectStmt{}): {reflect.TypeOf((*ast.SelectStmt)(nil))},
- reflect.TypeOf(ImportSpec{}): {reflect.TypeOf((*ast.ImportSpec)(nil))},
- reflect.TypeOf(IfStmt{}): {reflect.TypeOf((*ast.IfStmt)(nil))},
- reflect.TypeOf(GoStmt{}): {reflect.TypeOf((*ast.GoStmt)(nil))},
- reflect.TypeOf(Field{}): {reflect.TypeOf((*ast.Field)(nil))},
- reflect.TypeOf(SelectorExpr{}): {reflect.TypeOf((*ast.SelectorExpr)(nil))},
- reflect.TypeOf(StructType{}): {reflect.TypeOf((*ast.StructType)(nil))},
- reflect.TypeOf(KeyValueExpr{}): {reflect.TypeOf((*ast.KeyValueExpr)(nil))},
- reflect.TypeOf(FuncType{}): {reflect.TypeOf((*ast.FuncType)(nil))},
- reflect.TypeOf(FuncLit{}): {reflect.TypeOf((*ast.FuncLit)(nil))},
- reflect.TypeOf(FuncDecl{}): {reflect.TypeOf((*ast.FuncDecl)(nil))},
- reflect.TypeOf(ChanType{}): {reflect.TypeOf((*ast.ChanType)(nil))},
- reflect.TypeOf(CallExpr{}): {reflect.TypeOf((*ast.CallExpr)(nil))},
- reflect.TypeOf(CaseClause{}): {reflect.TypeOf((*ast.CaseClause)(nil))},
- reflect.TypeOf(CommClause{}): {reflect.TypeOf((*ast.CommClause)(nil))},
- reflect.TypeOf(CompositeLit{}): {reflect.TypeOf((*ast.CompositeLit)(nil))},
- reflect.TypeOf(EmptyStmt{}): {reflect.TypeOf((*ast.EmptyStmt)(nil))},
- reflect.TypeOf(SwitchStmt{}): {reflect.TypeOf((*ast.SwitchStmt)(nil))},
- reflect.TypeOf(TypeSwitchStmt{}): {reflect.TypeOf((*ast.TypeSwitchStmt)(nil))},
- reflect.TypeOf(TypeAssertExpr{}): {reflect.TypeOf((*ast.TypeAssertExpr)(nil))},
- reflect.TypeOf(TypeSpec{}): {reflect.TypeOf((*ast.TypeSpec)(nil))},
- reflect.TypeOf(InterfaceType{}): {reflect.TypeOf((*ast.InterfaceType)(nil))},
- reflect.TypeOf(BranchStmt{}): {reflect.TypeOf((*ast.BranchStmt)(nil))},
- reflect.TypeOf(IncDecStmt{}): {reflect.TypeOf((*ast.IncDecStmt)(nil))},
- reflect.TypeOf(BasicLit{}): {reflect.TypeOf((*ast.BasicLit)(nil))},
-}
-
-var requiresTypeInfo = map[string]bool{
- "Function": true,
- "Builtin": true,
- "Object": true,
-}
-
-type Parser struct {
- // Allow nodes that rely on type information
- AllowTypeInfo bool
-
- lex *lexer
- cur item
- last *item
- items chan item
-}
-
-func (p *Parser) Parse(s string) (Pattern, error) {
- p.cur = item{}
- p.last = nil
- p.items = nil
-
- fset := token.NewFileSet()
- p.lex = &lexer{
- f: fset.AddFile("<input>", -1, len(s)),
- input: s,
- items: make(chan item),
- }
- go p.lex.run()
- p.items = p.lex.items
- root, err := p.node()
- if err != nil {
- // drain lexer if parsing failed
- for range p.lex.items {
- }
- return Pattern{}, err
- }
- if item := <-p.lex.items; item.typ != itemEOF {
- return Pattern{}, fmt.Errorf("unexpected token %s after end of pattern", item.typ)
- }
- return Pattern{
- Root: root,
- Relevant: roots(root),
- }, nil
-}
-
-func (p *Parser) next() item {
- if p.last != nil {
- n := *p.last
- p.last = nil
- return n
- }
- var ok bool
- p.cur, ok = <-p.items
- if !ok {
- p.cur = item{typ: eof}
- }
- return p.cur
-}
-
-func (p *Parser) rewind() {
- p.last = &p.cur
-}
-
-func (p *Parser) peek() item {
- n := p.next()
- p.rewind()
- return n
-}
-
-func (p *Parser) accept(typ itemType) (item, bool) {
- n := p.next()
- if n.typ == typ {
- return n, true
- }
- p.rewind()
- return item{}, false
-}
-
-func (p *Parser) unexpectedToken(valid string) error {
- if p.cur.typ == itemError {
- return fmt.Errorf("error lexing input: %s", p.cur.val)
- }
- var got string
- switch p.cur.typ {
- case itemTypeName, itemVariable, itemString:
- got = p.cur.val
- default:
- got = "'" + p.cur.typ.String() + "'"
- }
-
- pos := p.lex.f.Position(token.Pos(p.cur.pos))
- return fmt.Errorf("%s: expected %s, found %s", pos, valid, got)
-}
-
-func (p *Parser) node() (Node, error) {
- if _, ok := p.accept(itemLeftParen); !ok {
- return nil, p.unexpectedToken("'('")
- }
- typ, ok := p.accept(itemTypeName)
- if !ok {
- return nil, p.unexpectedToken("Node type")
- }
-
- var objs []Node
- for {
- if _, ok := p.accept(itemRightParen); ok {
- break
- } else {
- p.rewind()
- obj, err := p.object()
- if err != nil {
- return nil, err
- }
- objs = append(objs, obj)
- }
- }
-
- return p.populateNode(typ.val, objs)
-}
-
-func populateNode(typ string, objs []Node, allowTypeInfo bool) (Node, error) {
- T, ok := structNodes[typ]
- if !ok {
- return nil, fmt.Errorf("unknown node %s", typ)
- }
-
- if !allowTypeInfo && requiresTypeInfo[typ] {
- return nil, fmt.Errorf("Node %s requires type information", typ)
- }
-
- pv := reflect.New(T)
- v := pv.Elem()
-
- if v.NumField() == 1 {
- f := v.Field(0)
- if f.Type().Kind() == reflect.Slice {
- // Variadic node
- f.Set(reflect.AppendSlice(f, reflect.ValueOf(objs)))
- return v.Interface().(Node), nil
- }
- }
- if len(objs) != v.NumField() {
- return nil, fmt.Errorf("tried to initialize node %s with %d values, expected %d", typ, len(objs), v.NumField())
- }
- for i := 0; i < v.NumField(); i++ {
- f := v.Field(i)
- if f.Kind() == reflect.String {
- if obj, ok := objs[i].(String); ok {
- f.Set(reflect.ValueOf(string(obj)))
- } else {
- return nil, fmt.Errorf("first argument of (Binding name node) must be string, but got %s", objs[i])
- }
- } else {
- f.Set(reflect.ValueOf(objs[i]))
- }
- }
- return v.Interface().(Node), nil
-}
-
-func (p *Parser) populateNode(typ string, objs []Node) (Node, error) {
- return populateNode(typ, objs, p.AllowTypeInfo)
-}
-
-var structNodes = map[string]reflect.Type{
- "Any": reflect.TypeOf(Any{}),
- "Ellipsis": reflect.TypeOf(Ellipsis{}),
- "List": reflect.TypeOf(List{}),
- "Binding": reflect.TypeOf(Binding{}),
- "RangeStmt": reflect.TypeOf(RangeStmt{}),
- "AssignStmt": reflect.TypeOf(AssignStmt{}),
- "IndexExpr": reflect.TypeOf(IndexExpr{}),
- "Ident": reflect.TypeOf(Ident{}),
- "Builtin": reflect.TypeOf(Builtin{}),
- "ValueSpec": reflect.TypeOf(ValueSpec{}),
- "GenDecl": reflect.TypeOf(GenDecl{}),
- "BinaryExpr": reflect.TypeOf(BinaryExpr{}),
- "ForStmt": reflect.TypeOf(ForStmt{}),
- "ArrayType": reflect.TypeOf(ArrayType{}),
- "DeferStmt": reflect.TypeOf(DeferStmt{}),
- "MapType": reflect.TypeOf(MapType{}),
- "ReturnStmt": reflect.TypeOf(ReturnStmt{}),
- "SliceExpr": reflect.TypeOf(SliceExpr{}),
- "StarExpr": reflect.TypeOf(StarExpr{}),
- "UnaryExpr": reflect.TypeOf(UnaryExpr{}),
- "SendStmt": reflect.TypeOf(SendStmt{}),
- "SelectStmt": reflect.TypeOf(SelectStmt{}),
- "ImportSpec": reflect.TypeOf(ImportSpec{}),
- "IfStmt": reflect.TypeOf(IfStmt{}),
- "GoStmt": reflect.TypeOf(GoStmt{}),
- "Field": reflect.TypeOf(Field{}),
- "SelectorExpr": reflect.TypeOf(SelectorExpr{}),
- "StructType": reflect.TypeOf(StructType{}),
- "KeyValueExpr": reflect.TypeOf(KeyValueExpr{}),
- "FuncType": reflect.TypeOf(FuncType{}),
- "FuncLit": reflect.TypeOf(FuncLit{}),
- "FuncDecl": reflect.TypeOf(FuncDecl{}),
- "ChanType": reflect.TypeOf(ChanType{}),
- "CallExpr": reflect.TypeOf(CallExpr{}),
- "CaseClause": reflect.TypeOf(CaseClause{}),
- "CommClause": reflect.TypeOf(CommClause{}),
- "CompositeLit": reflect.TypeOf(CompositeLit{}),
- "EmptyStmt": reflect.TypeOf(EmptyStmt{}),
- "SwitchStmt": reflect.TypeOf(SwitchStmt{}),
- "TypeSwitchStmt": reflect.TypeOf(TypeSwitchStmt{}),
- "TypeAssertExpr": reflect.TypeOf(TypeAssertExpr{}),
- "TypeSpec": reflect.TypeOf(TypeSpec{}),
- "InterfaceType": reflect.TypeOf(InterfaceType{}),
- "BranchStmt": reflect.TypeOf(BranchStmt{}),
- "IncDecStmt": reflect.TypeOf(IncDecStmt{}),
- "BasicLit": reflect.TypeOf(BasicLit{}),
- "Object": reflect.TypeOf(Object{}),
- "Function": reflect.TypeOf(Function{}),
- "Or": reflect.TypeOf(Or{}),
- "Not": reflect.TypeOf(Not{}),
-}
-
-func (p *Parser) object() (Node, error) {
- n := p.next()
- switch n.typ {
- case itemLeftParen:
- p.rewind()
- node, err := p.node()
- if err != nil {
- return node, err
- }
- if p.peek().typ == itemColon {
- p.next()
- tail, err := p.object()
- if err != nil {
- return node, err
- }
- return List{Head: node, Tail: tail}, nil
- }
- return node, nil
- case itemLeftBracket:
- p.rewind()
- return p.array()
- case itemVariable:
- v := n
- if v.val == "nil" {
- return Nil{}, nil
- }
- var b Binding
- if _, ok := p.accept(itemAt); ok {
- o, err := p.node()
- if err != nil {
- return nil, err
- }
- b = Binding{
- Name: v.val,
- Node: o,
- }
- } else {
- p.rewind()
- b = Binding{Name: v.val}
- }
- if p.peek().typ == itemColon {
- p.next()
- tail, err := p.object()
- if err != nil {
- return b, err
- }
- return List{Head: b, Tail: tail}, nil
- }
- return b, nil
- case itemBlank:
- return Any{}, nil
- case itemString:
- return String(n.val), nil
- default:
- return nil, p.unexpectedToken("object")
- }
-}
-
-func (p *Parser) array() (Node, error) {
- if _, ok := p.accept(itemLeftBracket); !ok {
- return nil, p.unexpectedToken("'['")
- }
-
- var objs []Node
- for {
- if _, ok := p.accept(itemRightBracket); ok {
- break
- } else {
- p.rewind()
- obj, err := p.object()
- if err != nil {
- return nil, err
- }
- objs = append(objs, obj)
- }
- }
-
- tail := List{}
- for i := len(objs) - 1; i >= 0; i-- {
- l := List{
- Head: objs[i],
- Tail: tail,
- }
- tail = l
- }
- return tail, nil
-}
-
-/*
-Node ::= itemLeftParen itemTypeName Object* itemRightParen
-Object ::= Node | Array | Binding | itemVariable | itemBlank | itemString
-Array := itemLeftBracket Object* itemRightBracket
-Array := Object itemColon Object
-Binding ::= itemVariable itemAt Node
-*/
diff --git a/vendor/honnef.co/go/tools/pattern/pattern.go b/vendor/honnef.co/go/tools/pattern/pattern.go
deleted file mode 100644
index d74605602..000000000
--- a/vendor/honnef.co/go/tools/pattern/pattern.go
+++ /dev/null
@@ -1,497 +0,0 @@
-package pattern
-
-import (
- "fmt"
- "go/token"
- "reflect"
- "strings"
-)
-
-var (
- _ Node = Ellipsis{}
- _ Node = Binding{}
- _ Node = RangeStmt{}
- _ Node = AssignStmt{}
- _ Node = IndexExpr{}
- _ Node = Ident{}
- _ Node = Builtin{}
- _ Node = String("")
- _ Node = Any{}
- _ Node = ValueSpec{}
- _ Node = List{}
- _ Node = GenDecl{}
- _ Node = BinaryExpr{}
- _ Node = ForStmt{}
- _ Node = ArrayType{}
- _ Node = DeferStmt{}
- _ Node = MapType{}
- _ Node = ReturnStmt{}
- _ Node = SliceExpr{}
- _ Node = StarExpr{}
- _ Node = UnaryExpr{}
- _ Node = SendStmt{}
- _ Node = SelectStmt{}
- _ Node = ImportSpec{}
- _ Node = IfStmt{}
- _ Node = GoStmt{}
- _ Node = Field{}
- _ Node = SelectorExpr{}
- _ Node = StructType{}
- _ Node = KeyValueExpr{}
- _ Node = FuncType{}
- _ Node = FuncLit{}
- _ Node = FuncDecl{}
- _ Node = Token(0)
- _ Node = ChanType{}
- _ Node = CallExpr{}
- _ Node = CaseClause{}
- _ Node = CommClause{}
- _ Node = CompositeLit{}
- _ Node = EmptyStmt{}
- _ Node = SwitchStmt{}
- _ Node = TypeSwitchStmt{}
- _ Node = TypeAssertExpr{}
- _ Node = TypeSpec{}
- _ Node = InterfaceType{}
- _ Node = BranchStmt{}
- _ Node = IncDecStmt{}
- _ Node = BasicLit{}
- _ Node = Nil{}
- _ Node = Object{}
- _ Node = Function{}
- _ Node = Not{}
- _ Node = Or{}
-)
-
-type Function struct {
- Name Node
-}
-
-type Token token.Token
-
-type Nil struct {
-}
-
-type Ellipsis struct {
- Elt Node
-}
-
-type IncDecStmt struct {
- X Node
- Tok Node
-}
-
-type BranchStmt struct {
- Tok Node
- Label Node
-}
-
-type InterfaceType struct {
- Methods Node
-}
-
-type TypeSpec struct {
- Name Node
- Type Node
-}
-
-type TypeAssertExpr struct {
- X Node
- Type Node
-}
-
-type TypeSwitchStmt struct {
- Init Node
- Assign Node
- Body Node
-}
-
-type SwitchStmt struct {
- Init Node
- Tag Node
- Body Node
-}
-
-type EmptyStmt struct {
-}
-
-type CompositeLit struct {
- Type Node
- Elts Node
-}
-
-type CommClause struct {
- Comm Node
- Body Node
-}
-
-type CaseClause struct {
- List Node
- Body Node
-}
-
-type CallExpr struct {
- Fun Node
- Args Node
- // XXX handle ellipsis
-}
-
-// TODO(dh): add a ChanDir node, and a way of instantiating it.
-
-type ChanType struct {
- Dir Node
- Value Node
-}
-
-type FuncDecl struct {
- Recv Node
- Name Node
- Type Node
- Body Node
-}
-
-type FuncLit struct {
- Type Node
- Body Node
-}
-
-type FuncType struct {
- Params Node
- Results Node
-}
-
-type KeyValueExpr struct {
- Key Node
- Value Node
-}
-
-type StructType struct {
- Fields Node
-}
-
-type SelectorExpr struct {
- X Node
- Sel Node
-}
-
-type Field struct {
- Names Node
- Type Node
- Tag Node
-}
-
-type GoStmt struct {
- Call Node
-}
-
-type IfStmt struct {
- Init Node
- Cond Node
- Body Node
- Else Node
-}
-
-type ImportSpec struct {
- Name Node
- Path Node
-}
-
-type SelectStmt struct {
- Body Node
-}
-
-type ArrayType struct {
- Len Node
- Elt Node
-}
-
-type DeferStmt struct {
- Call Node
-}
-
-type MapType struct {
- Key Node
- Value Node
-}
-
-type ReturnStmt struct {
- Results Node
-}
-
-type SliceExpr struct {
- X Node
- Low Node
- High Node
- Max Node
-}
-
-type StarExpr struct {
- X Node
-}
-
-type UnaryExpr struct {
- Op Node
- X Node
-}
-
-type SendStmt struct {
- Chan Node
- Value Node
-}
-
-type Binding struct {
- Name string
- Node Node
-}
-
-type RangeStmt struct {
- Key Node
- Value Node
- Tok Node
- X Node
- Body Node
-}
-
-type AssignStmt struct {
- Lhs Node
- Tok Node
- Rhs Node
-}
-
-type IndexExpr struct {
- X Node
- Index Node
-}
-
-type Node interface {
- String() string
- isNode()
-}
-
-type Ident struct {
- Name Node
-}
-
-type Object struct {
- Name Node
-}
-
-type Builtin struct {
- Name Node
-}
-
-type String string
-
-type Any struct{}
-
-type ValueSpec struct {
- Names Node
- Type Node
- Values Node
-}
-
-type List struct {
- Head Node
- Tail Node
-}
-
-type GenDecl struct {
- Tok Node
- Specs Node
-}
-
-type BasicLit struct {
- Kind Node
- Value Node
-}
-
-type BinaryExpr struct {
- X Node
- Op Node
- Y Node
-}
-
-type ForStmt struct {
- Init Node
- Cond Node
- Post Node
- Body Node
-}
-
-type Or struct {
- Nodes []Node
-}
-
-type Not struct {
- Node Node
-}
-
-func stringify(n Node) string {
- v := reflect.ValueOf(n)
- var parts []string
- parts = append(parts, v.Type().Name())
- for i := 0; i < v.NumField(); i++ {
- //lint:ignore S1025 false positive in staticcheck 2019.2.3
- parts = append(parts, fmt.Sprintf("%s", v.Field(i)))
- }
- return "(" + strings.Join(parts, " ") + ")"
-}
-
-func (stmt AssignStmt) String() string { return stringify(stmt) }
-func (expr IndexExpr) String() string { return stringify(expr) }
-func (id Ident) String() string { return stringify(id) }
-func (spec ValueSpec) String() string { return stringify(spec) }
-func (decl GenDecl) String() string { return stringify(decl) }
-func (lit BasicLit) String() string { return stringify(lit) }
-func (expr BinaryExpr) String() string { return stringify(expr) }
-func (stmt ForStmt) String() string { return stringify(stmt) }
-func (stmt RangeStmt) String() string { return stringify(stmt) }
-func (typ ArrayType) String() string { return stringify(typ) }
-func (stmt DeferStmt) String() string { return stringify(stmt) }
-func (typ MapType) String() string { return stringify(typ) }
-func (stmt ReturnStmt) String() string { return stringify(stmt) }
-func (expr SliceExpr) String() string { return stringify(expr) }
-func (expr StarExpr) String() string { return stringify(expr) }
-func (expr UnaryExpr) String() string { return stringify(expr) }
-func (stmt SendStmt) String() string { return stringify(stmt) }
-func (spec ImportSpec) String() string { return stringify(spec) }
-func (stmt SelectStmt) String() string { return stringify(stmt) }
-func (stmt IfStmt) String() string { return stringify(stmt) }
-func (stmt IncDecStmt) String() string { return stringify(stmt) }
-func (stmt GoStmt) String() string { return stringify(stmt) }
-func (field Field) String() string { return stringify(field) }
-func (expr SelectorExpr) String() string { return stringify(expr) }
-func (typ StructType) String() string { return stringify(typ) }
-func (expr KeyValueExpr) String() string { return stringify(expr) }
-func (typ FuncType) String() string { return stringify(typ) }
-func (lit FuncLit) String() string { return stringify(lit) }
-func (decl FuncDecl) String() string { return stringify(decl) }
-func (stmt BranchStmt) String() string { return stringify(stmt) }
-func (expr CallExpr) String() string { return stringify(expr) }
-func (clause CaseClause) String() string { return stringify(clause) }
-func (typ ChanType) String() string { return stringify(typ) }
-func (clause CommClause) String() string { return stringify(clause) }
-func (lit CompositeLit) String() string { return stringify(lit) }
-func (stmt EmptyStmt) String() string { return stringify(stmt) }
-func (typ InterfaceType) String() string { return stringify(typ) }
-func (stmt SwitchStmt) String() string { return stringify(stmt) }
-func (expr TypeAssertExpr) String() string { return stringify(expr) }
-func (spec TypeSpec) String() string { return stringify(spec) }
-func (stmt TypeSwitchStmt) String() string { return stringify(stmt) }
-func (nil Nil) String() string { return "nil" }
-func (builtin Builtin) String() string { return stringify(builtin) }
-func (obj Object) String() string { return stringify(obj) }
-func (fn Function) String() string { return stringify(fn) }
-func (el Ellipsis) String() string { return stringify(el) }
-func (not Not) String() string { return stringify(not) }
-
-func (or Or) String() string {
- s := "(Or"
- for _, node := range or.Nodes {
- s += " "
- s += node.String()
- }
- s += ")"
- return s
-}
-
-func isProperList(l List) bool {
- if l.Head == nil && l.Tail == nil {
- return true
- }
- switch tail := l.Tail.(type) {
- case nil:
- return false
- case List:
- return isProperList(tail)
- default:
- return false
- }
-}
-
-func (l List) String() string {
- if l.Head == nil && l.Tail == nil {
- return "[]"
- }
-
- if isProperList(l) {
- // pretty-print the list
- var objs []string
- for l.Head != nil {
- objs = append(objs, l.Head.String())
- l = l.Tail.(List)
- }
- return fmt.Sprintf("[%s]", strings.Join(objs, " "))
- }
-
- return fmt.Sprintf("%s:%s", l.Head, l.Tail)
-}
-
-func (bind Binding) String() string {
- if bind.Node == nil {
- return bind.Name
- }
- return fmt.Sprintf("%s@%s", bind.Name, bind.Node)
-}
-
-func (s String) String() string { return fmt.Sprintf("%q", string(s)) }
-
-func (tok Token) String() string {
- return fmt.Sprintf("%q", strings.ToUpper(token.Token(tok).String()))
-}
-
-func (Any) String() string { return "_" }
-
-func (AssignStmt) isNode() {}
-func (IndexExpr) isNode() {}
-func (Ident) isNode() {}
-func (ValueSpec) isNode() {}
-func (GenDecl) isNode() {}
-func (BasicLit) isNode() {}
-func (BinaryExpr) isNode() {}
-func (ForStmt) isNode() {}
-func (RangeStmt) isNode() {}
-func (ArrayType) isNode() {}
-func (DeferStmt) isNode() {}
-func (MapType) isNode() {}
-func (ReturnStmt) isNode() {}
-func (SliceExpr) isNode() {}
-func (StarExpr) isNode() {}
-func (UnaryExpr) isNode() {}
-func (SendStmt) isNode() {}
-func (ImportSpec) isNode() {}
-func (SelectStmt) isNode() {}
-func (IfStmt) isNode() {}
-func (IncDecStmt) isNode() {}
-func (GoStmt) isNode() {}
-func (Field) isNode() {}
-func (SelectorExpr) isNode() {}
-func (StructType) isNode() {}
-func (KeyValueExpr) isNode() {}
-func (FuncType) isNode() {}
-func (FuncLit) isNode() {}
-func (FuncDecl) isNode() {}
-func (BranchStmt) isNode() {}
-func (CallExpr) isNode() {}
-func (CaseClause) isNode() {}
-func (ChanType) isNode() {}
-func (CommClause) isNode() {}
-func (CompositeLit) isNode() {}
-func (EmptyStmt) isNode() {}
-func (InterfaceType) isNode() {}
-func (SwitchStmt) isNode() {}
-func (TypeAssertExpr) isNode() {}
-func (TypeSpec) isNode() {}
-func (TypeSwitchStmt) isNode() {}
-func (Nil) isNode() {}
-func (Builtin) isNode() {}
-func (Object) isNode() {}
-func (Function) isNode() {}
-func (Ellipsis) isNode() {}
-func (Or) isNode() {}
-func (List) isNode() {}
-func (String) isNode() {}
-func (Token) isNode() {}
-func (Any) isNode() {}
-func (Binding) isNode() {}
-func (Not) isNode() {}
diff --git a/vendor/honnef.co/go/tools/report/report.go b/vendor/honnef.co/go/tools/report/report.go
deleted file mode 100644
index 9b8b6ee74..000000000
--- a/vendor/honnef.co/go/tools/report/report.go
+++ /dev/null
@@ -1,184 +0,0 @@
-package report
-
-import (
- "bytes"
- "go/ast"
- "go/printer"
- "go/token"
- "strings"
-
- "golang.org/x/tools/go/analysis"
- "golang.org/x/tools/go/ast/astutil"
- "honnef.co/go/tools/facts"
- "honnef.co/go/tools/lint"
-)
-
-type Options struct {
- ShortRange bool
- FilterGenerated bool
- Fixes []analysis.SuggestedFix
- Related []analysis.RelatedInformation
-}
-
-type Option func(*Options)
-
-func ShortRange() Option {
- return func(opts *Options) {
- opts.ShortRange = true
- }
-}
-
-func FilterGenerated() Option {
- return func(opts *Options) {
- opts.FilterGenerated = true
- }
-}
-
-func Fixes(fixes ...analysis.SuggestedFix) Option {
- return func(opts *Options) {
- opts.Fixes = append(opts.Fixes, fixes...)
- }
-}
-
-func Related(node Positioner, message string) Option {
- return func(opts *Options) {
- pos, end := getRange(node, opts.ShortRange)
- r := analysis.RelatedInformation{
- Pos: pos,
- End: end,
- Message: message,
- }
- opts.Related = append(opts.Related, r)
- }
-}
-
-type Positioner interface {
- Pos() token.Pos
-}
-
-type fullPositioner interface {
- Pos() token.Pos
- End() token.Pos
-}
-
-type sourcer interface {
- Source() ast.Node
-}
-
-// shortRange returns the position and end of the main component of an
-// AST node. For nodes that have no body, the short range is identical
-// to the node's Pos and End. For nodes that do have a body, the short
-// range excludes the body.
-func shortRange(node ast.Node) (pos, end token.Pos) {
- switch node := node.(type) {
- case *ast.File:
- return node.Pos(), node.Name.End()
- case *ast.CaseClause:
- return node.Pos(), node.Colon + 1
- case *ast.CommClause:
- return node.Pos(), node.Colon + 1
- case *ast.DeferStmt:
- return node.Pos(), node.Defer + token.Pos(len("defer"))
- case *ast.ExprStmt:
- return shortRange(node.X)
- case *ast.ForStmt:
- if node.Post != nil {
- return node.For, node.Post.End()
- } else if node.Cond != nil {
- return node.For, node.Cond.End()
- } else if node.Init != nil {
- // +1 to catch the semicolon, for gofmt'ed code
- return node.Pos(), node.Init.End() + 1
- } else {
- return node.Pos(), node.For + token.Pos(len("for"))
- }
- case *ast.FuncDecl:
- return node.Pos(), node.Type.End()
- case *ast.FuncLit:
- return node.Pos(), node.Type.End()
- case *ast.GoStmt:
- if _, ok := astutil.Unparen(node.Call.Fun).(*ast.FuncLit); ok {
- return node.Pos(), node.Go + token.Pos(len("go"))
- } else {
- return node.Pos(), node.End()
- }
- case *ast.IfStmt:
- return node.Pos(), node.Cond.End()
- case *ast.RangeStmt:
- return node.Pos(), node.X.End()
- case *ast.SelectStmt:
- return node.Pos(), node.Pos() + token.Pos(len("select"))
- case *ast.SwitchStmt:
- if node.Tag != nil {
- return node.Pos(), node.Tag.End()
- } else if node.Init != nil {
- // +1 to catch the semicolon, for gofmt'ed code
- return node.Pos(), node.Init.End() + 1
- } else {
- return node.Pos(), node.Pos() + token.Pos(len("switch"))
- }
- case *ast.TypeSwitchStmt:
- return node.Pos(), node.Assign.End()
- default:
- return node.Pos(), node.End()
- }
-}
-
-func getRange(node Positioner, short bool) (pos, end token.Pos) {
- switch node := node.(type) {
- case sourcer:
- s := node.Source()
- if short {
- return shortRange(s)
- }
- return s.Pos(), s.End()
- case fullPositioner:
- if short {
- return shortRange(node)
- }
- return node.Pos(), node.End()
- default:
- return node.Pos(), token.NoPos
- }
-}
-
-func Report(pass *analysis.Pass, node Positioner, message string, opts ...Option) {
- cfg := &Options{}
- for _, opt := range opts {
- opt(cfg)
- }
-
- file := lint.DisplayPosition(pass.Fset, node.Pos()).Filename
- if cfg.FilterGenerated {
- m := pass.ResultOf[facts.Generated].(map[string]facts.Generator)
- if _, ok := m[file]; ok {
- return
- }
- }
-
- pos, end := getRange(node, cfg.ShortRange)
- d := analysis.Diagnostic{
- Pos: pos,
- End: end,
- Message: message,
- SuggestedFixes: cfg.Fixes,
- Related: cfg.Related,
- }
- pass.Report(d)
-}
-
-func Render(pass *analysis.Pass, x interface{}) string {
- var buf bytes.Buffer
- if err := printer.Fprint(&buf, pass.Fset, x); err != nil {
- panic(err)
- }
- return buf.String()
-}
-
-func RenderArgs(pass *analysis.Pass, args []ast.Expr) string {
- var ss []string
- for _, arg := range args {
- ss = append(ss, Render(pass, arg))
- }
- return strings.Join(ss, ", ")
-}
diff --git a/vendor/honnef.co/go/tools/simple/CONTRIBUTING.md b/vendor/honnef.co/go/tools/simple/CONTRIBUTING.md
new file mode 100644
index 000000000..c54c6c50a
--- /dev/null
+++ b/vendor/honnef.co/go/tools/simple/CONTRIBUTING.md
@@ -0,0 +1,15 @@
+# Contributing to gosimple
+
+## Before filing an issue:
+
+### Are you having trouble building gosimple?
+
+Check you have the latest version of its dependencies. Run
+```
+go get -u honnef.co/go/tools/simple
+```
+If you still have problems, consider searching for existing issues before filing a new issue.
+
+## Before sending a pull request:
+
+Have you understood the purpose of gosimple? Make sure to carefully read `README`.
diff --git a/vendor/honnef.co/go/tools/simple/analysis.go b/vendor/honnef.co/go/tools/simple/analysis.go
index 9f554c310..abb1648fa 100644
--- a/vendor/honnef.co/go/tools/simple/analysis.go
+++ b/vendor/honnef.co/go/tools/simple/analysis.go
@@ -1,148 +1,223 @@
package simple
import (
+ "flag"
+
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"honnef.co/go/tools/facts"
- "honnef.co/go/tools/internal/passes/buildir"
+ "honnef.co/go/tools/internal/passes/buildssa"
"honnef.co/go/tools/lint/lintutil"
)
-var Analyzers = lintutil.InitializeAnalyzers(Docs, map[string]*analysis.Analyzer{
+func newFlagSet() flag.FlagSet {
+ fs := flag.NewFlagSet("", flag.PanicOnError)
+ fs.Var(lintutil.NewVersionFlag(), "go", "Target Go version")
+ return *fs
+}
+
+var Analyzers = map[string]*analysis.Analyzer{
"S1000": {
- Run: CheckSingleCaseSelect,
+ Name: "S1000",
+ Run: LintSingleCaseSelect,
+ Doc: Docs["S1000"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
+ Flags: newFlagSet(),
},
"S1001": {
- Run: CheckLoopCopy,
+ Name: "S1001",
+ Run: LintLoopCopy,
+ Doc: Docs["S1001"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
+ Flags: newFlagSet(),
},
"S1002": {
- Run: CheckIfBoolCmp,
+ Name: "S1002",
+ Run: LintIfBoolCmp,
+ Doc: Docs["S1002"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
+ Flags: newFlagSet(),
},
"S1003": {
- Run: CheckStringsContains,
+ Name: "S1003",
+ Run: LintStringsContains,
+ Doc: Docs["S1003"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
+ Flags: newFlagSet(),
},
"S1004": {
- Run: CheckBytesCompare,
+ Name: "S1004",
+ Run: LintBytesCompare,
+ Doc: Docs["S1004"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
+ Flags: newFlagSet(),
},
"S1005": {
- Run: CheckUnnecessaryBlank,
+ Name: "S1005",
+ Run: LintUnnecessaryBlank,
+ Doc: Docs["S1005"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
+ Flags: newFlagSet(),
},
"S1006": {
- Run: CheckForTrue,
+ Name: "S1006",
+ Run: LintForTrue,
+ Doc: Docs["S1006"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
+ Flags: newFlagSet(),
},
"S1007": {
- Run: CheckRegexpRaw,
+ Name: "S1007",
+ Run: LintRegexpRaw,
+ Doc: Docs["S1007"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
+ Flags: newFlagSet(),
},
"S1008": {
- Run: CheckIfReturn,
+ Name: "S1008",
+ Run: LintIfReturn,
+ Doc: Docs["S1008"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
+ Flags: newFlagSet(),
},
"S1009": {
- Run: CheckRedundantNilCheckWithLen,
+ Name: "S1009",
+ Run: LintRedundantNilCheckWithLen,
+ Doc: Docs["S1009"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
+ Flags: newFlagSet(),
},
"S1010": {
- Run: CheckSlicing,
+ Name: "S1010",
+ Run: LintSlicing,
+ Doc: Docs["S1010"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
+ Flags: newFlagSet(),
},
"S1011": {
- Run: CheckLoopAppend,
+ Name: "S1011",
+ Run: LintLoopAppend,
+ Doc: Docs["S1011"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
+ Flags: newFlagSet(),
},
"S1012": {
- Run: CheckTimeSince,
+ Name: "S1012",
+ Run: LintTimeSince,
+ Doc: Docs["S1012"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
+ Flags: newFlagSet(),
},
"S1016": {
- Run: CheckSimplerStructConversion,
+ Name: "S1016",
+ Run: LintSimplerStructConversion,
+ Doc: Docs["S1016"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
+ Flags: newFlagSet(),
},
"S1017": {
- Run: CheckTrim,
+ Name: "S1017",
+ Run: LintTrim,
+ Doc: Docs["S1017"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
+ Flags: newFlagSet(),
},
"S1018": {
- Run: CheckLoopSlide,
+ Name: "S1018",
+ Run: LintLoopSlide,
+ Doc: Docs["S1018"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
+ Flags: newFlagSet(),
},
"S1019": {
- Run: CheckMakeLenCap,
+ Name: "S1019",
+ Run: LintMakeLenCap,
+ Doc: Docs["S1019"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
+ Flags: newFlagSet(),
},
"S1020": {
- Run: CheckAssertNotNil,
+ Name: "S1020",
+ Run: LintAssertNotNil,
+ Doc: Docs["S1020"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
+ Flags: newFlagSet(),
},
"S1021": {
- Run: CheckDeclareAssign,
+ Name: "S1021",
+ Run: LintDeclareAssign,
+ Doc: Docs["S1021"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
+ Flags: newFlagSet(),
},
"S1023": {
- Run: CheckRedundantBreak,
+ Name: "S1023",
+ Run: LintRedundantBreak,
+ Doc: Docs["S1023"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
+ Flags: newFlagSet(),
},
"S1024": {
- Run: CheckTimeUntil,
+ Name: "S1024",
+ Run: LintTimeUntil,
+ Doc: Docs["S1024"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
+ Flags: newFlagSet(),
},
"S1025": {
- Run: CheckRedundantSprintf,
- Requires: []*analysis.Analyzer{buildir.Analyzer, inspect.Analyzer, facts.Generated},
+ Name: "S1025",
+ Run: LintRedundantSprintf,
+ Doc: Docs["S1025"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer, inspect.Analyzer, facts.Generated},
+ Flags: newFlagSet(),
},
"S1028": {
- Run: CheckErrorsNewSprintf,
+ Name: "S1028",
+ Run: LintErrorsNewSprintf,
+ Doc: Docs["S1028"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
+ Flags: newFlagSet(),
},
"S1029": {
- Run: CheckRangeStringRunes,
- Requires: []*analysis.Analyzer{buildir.Analyzer},
+ Name: "S1029",
+ Run: LintRangeStringRunes,
+ Doc: Docs["S1029"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer},
+ Flags: newFlagSet(),
},
"S1030": {
- Run: CheckBytesBufferConversions,
+ Name: "S1030",
+ Run: LintBytesBufferConversions,
+ Doc: Docs["S1030"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
+ Flags: newFlagSet(),
},
"S1031": {
- Run: CheckNilCheckAroundRange,
+ Name: "S1031",
+ Run: LintNilCheckAroundRange,
+ Doc: Docs["S1031"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
+ Flags: newFlagSet(),
},
"S1032": {
- Run: CheckSortHelpers,
+ Name: "S1032",
+ Run: LintSortHelpers,
+ Doc: Docs["S1032"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
+ Flags: newFlagSet(),
},
"S1033": {
- Run: CheckGuardedDelete,
+ Name: "S1033",
+ Run: LintGuardedDelete,
+ Doc: Docs["S1033"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
+ Flags: newFlagSet(),
},
"S1034": {
- Run: CheckSimplifyTypeSwitch,
- Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
- },
- "S1035": {
- Run: CheckRedundantCanonicalHeaderKey,
- Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
- },
- "S1036": {
- Run: CheckUnnecessaryGuard,
- Requires: []*analysis.Analyzer{inspect.Analyzer},
- },
- "S1037": {
- Run: CheckElaborateSleep,
- Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
- },
- "S1038": {
- Run: CheckPrintSprintf,
- Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
- },
- "S1039": {
- Run: CheckSprintLiteral,
+ Name: "S1034",
+ Run: LintSimplifyTypeSwitch,
+ Doc: Docs["S1034"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
+ Flags: newFlagSet(),
},
-})
+}
diff --git a/vendor/honnef.co/go/tools/simple/doc.go b/vendor/honnef.co/go/tools/simple/doc.go
index db70bab59..eb0072de5 100644
--- a/vendor/honnef.co/go/tools/simple/doc.go
+++ b/vendor/honnef.co/go/tools/simple/doc.go
@@ -3,7 +3,7 @@ package simple
import "honnef.co/go/tools/lint"
var Docs = map[string]*lint.Documentation{
- "S1000": {
+ "S1000": &lint.Documentation{
Title: `Use plain channel send or receive instead of single-case select`,
Text: `Select statements with a single case can be replaced with a simple
send or receive.
@@ -22,7 +22,7 @@ After:
Since: "2017.1",
},
- "S1001": {
+ "S1001": &lint.Documentation{
Title: `Replace for loop with call to copy`,
Text: `Use copy() for copying elements from one slice to another.
@@ -38,7 +38,7 @@ After:
Since: "2017.1",
},
- "S1002": {
+ "S1002": &lint.Documentation{
Title: `Omit comparison with boolean constant`,
Text: `Before:
@@ -50,7 +50,7 @@ After:
Since: "2017.1",
},
- "S1003": {
+ "S1003": &lint.Documentation{
Title: `Replace call to strings.Index with strings.Contains`,
Text: `Before:
@@ -62,7 +62,7 @@ After:
Since: "2017.1",
},
- "S1004": {
+ "S1004": &lint.Documentation{
Title: `Replace call to bytes.Compare with bytes.Equal`,
Text: `Before:
@@ -74,7 +74,7 @@ After:
Since: "2017.1",
},
- "S1005": {
+ "S1005": &lint.Documentation{
Title: `Drop unnecessary use of the blank identifier`,
Text: `In many cases, assigning to the blank identifier is unnecessary.
@@ -92,13 +92,13 @@ After:
Since: "2017.1",
},
- "S1006": {
+ "S1006": &lint.Documentation{
Title: `Use for { ... } for infinite loops`,
Text: `For infinite loops, using for { ... } is the most idiomatic choice.`,
Since: "2017.1",
},
- "S1007": {
+ "S1007": &lint.Documentation{
Title: `Simplify regular expression by using raw string literal`,
Text: `Raw string literals use ` + "`" + ` instead of " and do not support
any escape sequences. This means that the backslash (\) can be used
@@ -117,7 +117,7 @@ After:
Since: "2017.1",
},
- "S1008": {
+ "S1008": &lint.Documentation{
Title: `Simplify returning boolean expression`,
Text: `Before:
@@ -132,7 +132,7 @@ After:
Since: "2017.1",
},
- "S1009": {
+ "S1009": &lint.Documentation{
Title: `Omit redundant nil check on slices`,
Text: `The len function is defined for all slices, even nil ones, which have
a length of zero. It is not necessary to check if a slice is not nil
@@ -148,14 +148,14 @@ After:
Since: "2017.1",
},
- "S1010": {
+ "S1010": &lint.Documentation{
Title: `Omit default slice index`,
Text: `When slicing, the second index defaults to the length of the value,
making s[n:len(s)] and s[n:] equivalent.`,
Since: "2017.1",
},
- "S1011": {
+ "S1011": &lint.Documentation{
Title: `Use a single append to concatenate two slices`,
Text: `Before:
@@ -169,7 +169,7 @@ After:
Since: "2017.1",
},
- "S1012": {
+ "S1012": &lint.Documentation{
Title: `Replace time.Now().Sub(x) with time.Since(x)`,
Text: `The time.Since helper has the same effect as using time.Now().Sub(x)
but is easier to read.
@@ -184,7 +184,7 @@ After:
Since: "2017.1",
},
- "S1016": {
+ "S1016": &lint.Documentation{
Title: `Use a type conversion instead of manually copying struct fields`,
Text: `Two struct types with identical fields can be converted between each
other. In older versions of Go, the fields had to have identical
@@ -207,7 +207,7 @@ After:
Since: "2017.1",
},
- "S1017": {
+ "S1017": &lint.Documentation{
Title: `Replace manual trimming with strings.TrimPrefix`,
Text: `Instead of using strings.HasPrefix and manual slicing, use the
strings.TrimPrefix function. If the string doesn't start with the
@@ -227,7 +227,7 @@ After:
Since: "2017.1",
},
- "S1018": {
+ "S1018": &lint.Documentation{
Title: `Use copy for sliding elements`,
Text: `copy() permits using the same source and destination slice, even with
overlapping ranges. This makes it ideal for sliding elements in a
@@ -245,7 +245,7 @@ After:
Since: "2017.1",
},
- "S1019": {
+ "S1019": &lint.Documentation{
Title: `Simplify make call by omitting redundant arguments`,
Text: `The make function has default values for the length and capacity
arguments. For channels and maps, the length defaults to zero.
@@ -253,7 +253,7 @@ Additionally, for slices the capacity defaults to the length.`,
Since: "2017.1",
},
- "S1020": {
+ "S1020": &lint.Documentation{
Title: `Omit redundant nil check in type assertion`,
Text: `Before:
@@ -265,7 +265,7 @@ After:
Since: "2017.1",
},
- "S1021": {
+ "S1021": &lint.Documentation{
Title: `Merge variable declaration and assignment`,
Text: `Before:
@@ -278,7 +278,7 @@ After:
Since: "2017.1",
},
- "S1023": {
+ "S1023": &lint.Documentation{
Title: `Omit redundant control flow`,
Text: `Functions that have no return value do not need a return statement as
the final statement of the function.
@@ -289,7 +289,7 @@ statement in a case block.`,
Since: "2017.1",
},
- "S1024": {
+ "S1024": &lint.Documentation{
Title: `Replace x.Sub(time.Now()) with time.Until(x)`,
Text: `The time.Until helper has the same effect as using x.Sub(time.Now())
but is easier to read.
@@ -304,7 +304,7 @@ After:
Since: "2017.1",
},
- "S1025": {
+ "S1025": &lint.Documentation{
Title: `Don't use fmt.Sprintf("%s", x) unnecessarily`,
Text: `In many instances, there are easier and more efficient ways of getting
a value's string representation. Whenever a value's underlying type is
@@ -336,7 +336,7 @@ to
Since: "2017.1",
},
- "S1028": {
+ "S1028": &lint.Documentation{
Title: `Simplify error construction with fmt.Errorf`,
Text: `Before:
@@ -348,7 +348,7 @@ After:
Since: "2017.1",
},
- "S1029": {
+ "S1029": &lint.Documentation{
Title: `Range over the string directly`,
Text: `Ranging over a string will yield byte offsets and runes. If the offset
isn't used, this is functionally equivalent to converting the string
@@ -366,7 +366,7 @@ After:
Since: "2017.1",
},
- "S1030": {
+ "S1030": &lint.Documentation{
Title: `Use bytes.Buffer.String or bytes.Buffer.Bytes`,
Text: `bytes.Buffer has both a String and a Bytes method. It is never
necessary to use string(buf.Bytes()) or []byte(buf.String()) – simply
@@ -374,7 +374,7 @@ use the other method.`,
Since: "2017.1",
},
- "S1031": {
+ "S1031": &lint.Documentation{
Title: `Omit redundant nil check around loop`,
Text: `You can use range on nil slices and maps, the loop will simply never
execute. This makes an additional nil check around the loop
@@ -396,7 +396,7 @@ After:
Since: "2017.1",
},
- "S1032": {
+ "S1032": &lint.Documentation{
Title: `Use sort.Ints(x), sort.Float64s(x), and sort.Strings(x)`,
Text: `The sort.Ints, sort.Float64s and sort.Strings functions are easier to
read than sort.Sort(sort.IntSlice(x)), sort.Sort(sort.Float64Slice(x))
@@ -412,72 +412,14 @@ After:
Since: "2019.1",
},
- "S1033": {
+ "S1033": &lint.Documentation{
Title: `Unnecessary guard around call to delete`,
Text: `Calling delete on a nil map is a no-op.`,
Since: "2019.2",
},
- "S1034": {
+ "S1034": &lint.Documentation{
Title: `Use result of type assertion to simplify cases`,
Since: "2019.2",
},
-
- "S1035": {
- Title: `Redundant call to net/http.CanonicalHeaderKey in method call on net/http.Header`,
- Text: `The methods on net/http.Header, namely Add, Del, Get and Set, already
-canonicalize the given header name.`,
- Since: "Unreleased",
- },
-
- "S1036": {
- Title: `Unnecessary guard around map access`,
-
- Text: `When accessing a map key that doesn't exist yet, one
-receives a zero value. Often, the zero value is a suitable value, for example when using append or doing integer math.
-
-The following
-
- if _, ok := m["foo"]; ok {
- m["foo"] = append(m["foo"], "bar")
- } else {
- m["foo"] = []string{"bar"}
- }
-
-can be simplified to
-
- m["foo"] = append(m["foo"], "bar")
-
-and
-
- if _, ok := m2["k"]; ok {
- m2["k"] += 4
- } else {
- m2["k"] = 4
- }
-
-can be simplified to
-
- m["k"] += 4
-`,
- Since: "Unreleased",
- },
-
- "S1037": {
- Title: `Elaborate way of sleeping`,
- Text: `Using a select statement with a single case receiving
-from the result of time.After is a very elaborate way of sleeping that
-can much simpler be expressed with a simple call to time.Sleep.`,
- Since: "Unreleased",
- },
-
- "S1038": {
- Title: "Unnecessarily complex way of printing formatted string",
- Since: "Unreleased",
- },
-
- "S1039": {
- Title: "Unnecessary use of fmt.Sprint",
- Since: "Unreleased",
- },
}
diff --git a/vendor/honnef.co/go/tools/simple/lint.go b/vendor/honnef.co/go/tools/simple/lint.go
index 7ae811059..c78a7bb7a 100644
--- a/vendor/honnef.co/go/tools/simple/lint.go
+++ b/vendor/honnef.co/go/tools/simple/lint.go
@@ -1,5 +1,5 @@
// Package simple contains a linter for Go source code.
-package simple
+package simple // import "honnef.co/go/tools/simple"
import (
"fmt"
@@ -7,124 +7,154 @@ import (
"go/constant"
"go/token"
"go/types"
- "path/filepath"
"reflect"
"sort"
"strings"
"golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/types/typeutil"
. "honnef.co/go/tools/arg"
- "honnef.co/go/tools/code"
- "honnef.co/go/tools/edit"
- "honnef.co/go/tools/internal/passes/buildir"
+ "honnef.co/go/tools/internal/passes/buildssa"
"honnef.co/go/tools/internal/sharedcheck"
+ "honnef.co/go/tools/lint"
. "honnef.co/go/tools/lint/lintdsl"
- "honnef.co/go/tools/pattern"
- "honnef.co/go/tools/report"
)
-var (
- checkSingleCaseSelectQ1 = pattern.MustParse(`
- (ForStmt
- nil nil nil
- select@(SelectStmt
- (CommClause
- (Or
- (UnaryExpr "<-" _)
- (AssignStmt _ _ (UnaryExpr "<-" _)))
- _)))`)
- checkSingleCaseSelectQ2 = pattern.MustParse(`(SelectStmt (CommClause _ _))`)
-)
+func LintSingleCaseSelect(pass *analysis.Pass) (interface{}, error) {
+ isSingleSelect := func(node ast.Node) bool {
+ v, ok := node.(*ast.SelectStmt)
+ if !ok {
+ return false
+ }
+ return len(v.Body.List) == 1
+ }
-func CheckSingleCaseSelect(pass *analysis.Pass) (interface{}, error) {
seen := map[ast.Node]struct{}{}
fn := func(node ast.Node) {
- if m, ok := Match(pass, checkSingleCaseSelectQ1, node); ok {
- seen[m.State["select"].(ast.Node)] = struct{}{}
- report.Report(pass, node, "should use for range instead of for { select {} }", report.FilterGenerated())
- } else if _, ok := Match(pass, checkSingleCaseSelectQ2, node); ok {
- if _, ok := seen[node]; !ok {
- report.Report(pass, node, "should use a simple channel send/receive instead of select with a single case",
- report.ShortRange(),
- report.FilterGenerated())
+ switch v := node.(type) {
+ case *ast.ForStmt:
+ if len(v.Body.List) != 1 {
+ return
+ }
+ if !isSingleSelect(v.Body.List[0]) {
+ return
}
+ if _, ok := v.Body.List[0].(*ast.SelectStmt).Body.List[0].(*ast.CommClause).Comm.(*ast.SendStmt); ok {
+ // Don't suggest using range for channel sends
+ return
+ }
+ seen[v.Body.List[0]] = struct{}{}
+ ReportNodefFG(pass, node, "should use for range instead of for { select {} }")
+ case *ast.SelectStmt:
+ if _, ok := seen[v]; ok {
+ return
+ }
+ if !isSingleSelect(v) {
+ return
+ }
+ ReportNodefFG(pass, node, "should use a simple channel send/receive instead of select with a single case")
}
}
- code.Preorder(pass, fn, (*ast.ForStmt)(nil), (*ast.SelectStmt)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.ForStmt)(nil), (*ast.SelectStmt)(nil)}, fn)
return nil, nil
}
-var (
- checkLoopCopyQ = pattern.MustParse(`
- (Or
- (RangeStmt
- key value ":=" src@(Ident _)
- [(AssignStmt
- (IndexExpr dst@(Ident _) key)
- "="
- value)])
- (RangeStmt
- key nil ":=" src@(Ident _)
- [(AssignStmt
- (IndexExpr dst@(Ident _) key)
- "="
- (IndexExpr src key))]))`)
- checkLoopCopyR = pattern.MustParse(`(CallExpr (Ident "copy") [dst src])`)
-)
-
-func CheckLoopCopy(pass *analysis.Pass) (interface{}, error) {
+func LintLoopCopy(pass *analysis.Pass) (interface{}, error) {
fn := func(node ast.Node) {
- m, edits, ok := MatchAndEdit(pass, checkLoopCopyQ, checkLoopCopyR, node)
+ loop := node.(*ast.RangeStmt)
+
+ if loop.Key == nil {
+ return
+ }
+ if len(loop.Body.List) != 1 {
+ return
+ }
+ stmt, ok := loop.Body.List[0].(*ast.AssignStmt)
if !ok {
return
}
- t1 := pass.TypesInfo.TypeOf(m.State["src"].(*ast.Ident))
- t2 := pass.TypesInfo.TypeOf(m.State["dst"].(*ast.Ident))
- if _, ok := t1.Underlying().(*types.Slice); !ok {
+ if stmt.Tok != token.ASSIGN || len(stmt.Lhs) != 1 || len(stmt.Rhs) != 1 {
return
}
- if !types.Identical(t1, t2) {
+ lhs, ok := stmt.Lhs[0].(*ast.IndexExpr)
+ if !ok {
return
}
- tv, err := types.Eval(pass.Fset, pass.Pkg, node.Pos(), "copy")
- if err == nil && tv.IsBuiltin() {
- report.Report(pass, node,
- "should use copy() instead of a loop",
- report.ShortRange(),
- report.FilterGenerated(),
- report.Fixes(edit.Fix("replace loop with call to copy()", edits...)))
+ if _, ok := pass.TypesInfo.TypeOf(lhs.X).(*types.Slice); !ok {
+ return
+ }
+ lidx, ok := lhs.Index.(*ast.Ident)
+ if !ok {
+ return
+ }
+ key, ok := loop.Key.(*ast.Ident)
+ if !ok {
+ return
+ }
+ if pass.TypesInfo.TypeOf(lhs) == nil || pass.TypesInfo.TypeOf(stmt.Rhs[0]) == nil {
+ return
+ }
+ if pass.TypesInfo.ObjectOf(lidx) != pass.TypesInfo.ObjectOf(key) {
+ return
+ }
+ if !types.Identical(pass.TypesInfo.TypeOf(lhs), pass.TypesInfo.TypeOf(stmt.Rhs[0])) {
+ return
+ }
+ if _, ok := pass.TypesInfo.TypeOf(loop.X).(*types.Slice); !ok {
+ return
+ }
+
+ if rhs, ok := stmt.Rhs[0].(*ast.IndexExpr); ok {
+ rx, ok := rhs.X.(*ast.Ident)
+ _ = rx
+ if !ok {
+ return
+ }
+ ridx, ok := rhs.Index.(*ast.Ident)
+ if !ok {
+ return
+ }
+ if pass.TypesInfo.ObjectOf(ridx) != pass.TypesInfo.ObjectOf(key) {
+ return
+ }
+ } else if rhs, ok := stmt.Rhs[0].(*ast.Ident); ok {
+ value, ok := loop.Value.(*ast.Ident)
+ if !ok {
+ return
+ }
+ if pass.TypesInfo.ObjectOf(rhs) != pass.TypesInfo.ObjectOf(value) {
+ return
+ }
} else {
- report.Report(pass, node, "should use copy() instead of a loop", report.FilterGenerated())
+ return
}
+ ReportNodefFG(pass, loop, "should use copy() instead of a loop")
}
- code.Preorder(pass, fn, (*ast.RangeStmt)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn)
return nil, nil
}
-func CheckIfBoolCmp(pass *analysis.Pass) (interface{}, error) {
+func LintIfBoolCmp(pass *analysis.Pass) (interface{}, error) {
fn := func(node ast.Node) {
- if code.IsInTest(pass, node) {
- return
- }
-
expr := node.(*ast.BinaryExpr)
if expr.Op != token.EQL && expr.Op != token.NEQ {
return
}
- x := code.IsBoolConst(pass, expr.X)
- y := code.IsBoolConst(pass, expr.Y)
+ x := IsBoolConst(pass, expr.X)
+ y := IsBoolConst(pass, expr.Y)
if !x && !y {
return
}
var other ast.Expr
var val bool
if x {
- val = code.BoolConst(pass, expr.X)
+ val = BoolConst(pass, expr.X)
other = expr.Y
} else {
- val = code.BoolConst(pass, expr.Y)
+ val = BoolConst(pass, expr.Y)
other = expr.X
}
basic, ok := pass.TypesInfo.TypeOf(other).Underlying().(*types.Basic)
@@ -135,56 +165,50 @@ func CheckIfBoolCmp(pass *analysis.Pass) (interface{}, error) {
if (expr.Op == token.EQL && !val) || (expr.Op == token.NEQ && val) {
op = "!"
}
- r := op + report.Render(pass, other)
+ r := op + Render(pass, other)
l1 := len(r)
r = strings.TrimLeft(r, "!")
if (l1-len(r))%2 == 1 {
r = "!" + r
}
- report.Report(pass, expr, fmt.Sprintf("should omit comparison to bool constant, can be simplified to %s", r),
- report.FilterGenerated(),
- report.Fixes(edit.Fix("simplify bool comparison", edit.ReplaceWithString(pass.Fset, expr, r))))
+ if IsInTest(pass, node) {
+ return
+ }
+ ReportNodefFG(pass, expr, "should omit comparison to bool constant, can be simplified to %s", r)
}
- code.Preorder(pass, fn, (*ast.BinaryExpr)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn)
return nil, nil
}
-var (
- checkBytesBufferConversionsQ = pattern.MustParse(`(CallExpr _ [(CallExpr sel@(SelectorExpr recv _) [])])`)
- checkBytesBufferConversionsRs = pattern.MustParse(`(CallExpr (SelectorExpr recv (Ident "String")) [])`)
- checkBytesBufferConversionsRb = pattern.MustParse(`(CallExpr (SelectorExpr recv (Ident "Bytes")) [])`)
-)
-
-func CheckBytesBufferConversions(pass *analysis.Pass) (interface{}, error) {
- if pass.Pkg.Path() == "bytes" || pass.Pkg.Path() == "bytes_test" {
- // The bytes package can use itself however it wants
- return nil, nil
- }
+func LintBytesBufferConversions(pass *analysis.Pass) (interface{}, error) {
fn := func(node ast.Node) {
- m, ok := Match(pass, checkBytesBufferConversionsQ, node)
+ call := node.(*ast.CallExpr)
+ if len(call.Args) != 1 {
+ return
+ }
+
+ argCall, ok := call.Args[0].(*ast.CallExpr)
+ if !ok {
+ return
+ }
+ sel, ok := argCall.Fun.(*ast.SelectorExpr)
if !ok {
return
}
- call := node.(*ast.CallExpr)
- sel := m.State["sel"].(*ast.SelectorExpr)
typ := pass.TypesInfo.TypeOf(call.Fun)
- if typ == types.Universe.Lookup("string").Type() && code.IsCallToAST(pass, call.Args[0], "(*bytes.Buffer).Bytes") {
- report.Report(pass, call, fmt.Sprintf("should use %v.String() instead of %v", report.Render(pass, sel.X), report.Render(pass, call)),
- report.FilterGenerated(),
- report.Fixes(edit.Fix("simplify conversion", edit.ReplaceWithPattern(pass, checkBytesBufferConversionsRs, m.State, node))))
- } else if typ, ok := typ.(*types.Slice); ok && typ.Elem() == types.Universe.Lookup("byte").Type() && code.IsCallToAST(pass, call.Args[0], "(*bytes.Buffer).String") {
- report.Report(pass, call, fmt.Sprintf("should use %v.Bytes() instead of %v", report.Render(pass, sel.X), report.Render(pass, call)),
- report.FilterGenerated(),
- report.Fixes(edit.Fix("simplify conversion", edit.ReplaceWithPattern(pass, checkBytesBufferConversionsRb, m.State, node))))
+ if typ == types.Universe.Lookup("string").Type() && IsCallToAST(pass, call.Args[0], "(*bytes.Buffer).Bytes") {
+ ReportNodefFG(pass, call, "should use %v.String() instead of %v", Render(pass, sel.X), Render(pass, call))
+ } else if typ, ok := typ.(*types.Slice); ok && typ.Elem() == types.Universe.Lookup("byte").Type() && IsCallToAST(pass, call.Args[0], "(*bytes.Buffer).String") {
+ ReportNodefFG(pass, call, "should use %v.Bytes() instead of %v", Render(pass, sel.X), Render(pass, call))
}
}
- code.Preorder(pass, fn, (*ast.CallExpr)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn)
return nil, nil
}
-func CheckStringsContains(pass *analysis.Pass) (interface{}, error) {
+func LintStringsContains(pass *analysis.Pass) (interface{}, error) {
// map of value to token to bool value
allowed := map[int64]map[token.Token]bool{
-1: {token.GTR: true, token.NEQ: true, token.EQL: false},
@@ -198,7 +222,7 @@ func CheckStringsContains(pass *analysis.Pass) (interface{}, error) {
return
}
- value, ok := code.ExprToInt(pass, expr.Y)
+ value, ok := ExprToInt(pass, expr.Y)
if !ok {
return
}
@@ -228,112 +252,86 @@ func CheckStringsContains(pass *analysis.Pass) (interface{}, error) {
if pkgIdent.Name != "strings" && pkgIdent.Name != "bytes" {
return
}
-
- var r ast.Expr
+ newFunc := ""
switch funIdent.Name {
case "IndexRune":
- r = &ast.SelectorExpr{
- X: pkgIdent,
- Sel: &ast.Ident{Name: "ContainsRune"},
- }
+ newFunc = "ContainsRune"
case "IndexAny":
- r = &ast.SelectorExpr{
- X: pkgIdent,
- Sel: &ast.Ident{Name: "ContainsAny"},
- }
+ newFunc = "ContainsAny"
case "Index":
- r = &ast.SelectorExpr{
- X: pkgIdent,
- Sel: &ast.Ident{Name: "Contains"},
- }
+ newFunc = "Contains"
default:
return
}
- r = &ast.CallExpr{
- Fun: r,
- Args: call.Args,
- }
+ prefix := ""
if !b {
- r = &ast.UnaryExpr{
- Op: token.NOT,
- X: r,
- }
+ prefix = "!"
}
-
- report.Report(pass, node, fmt.Sprintf("should use %s instead", report.Render(pass, r)),
- report.FilterGenerated(),
- report.Fixes(edit.Fix(fmt.Sprintf("simplify use of %s", report.Render(pass, call.Fun)), edit.ReplaceWithNode(pass.Fset, node, r))))
+ ReportNodefFG(pass, node, "should use %s%s.%s(%s) instead", prefix, pkgIdent.Name, newFunc, RenderArgs(pass, call.Args))
}
- code.Preorder(pass, fn, (*ast.BinaryExpr)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn)
return nil, nil
}
-var (
- checkBytesCompareQ = pattern.MustParse(`(BinaryExpr (CallExpr (Function "bytes.Compare") args) op@(Or "==" "!=") (BasicLit "INT" "0"))`)
- checkBytesCompareRn = pattern.MustParse(`(CallExpr (SelectorExpr (Ident "bytes") (Ident "Equal")) args)`)
- checkBytesCompareRe = pattern.MustParse(`(UnaryExpr "!" (CallExpr (SelectorExpr (Ident "bytes") (Ident "Equal")) args))`)
-)
-
-func CheckBytesCompare(pass *analysis.Pass) (interface{}, error) {
- if pass.Pkg.Path() == "bytes" || pass.Pkg.Path() == "bytes_test" {
- // the bytes package is free to use bytes.Compare as it sees fit
- return nil, nil
- }
+func LintBytesCompare(pass *analysis.Pass) (interface{}, error) {
fn := func(node ast.Node) {
- m, ok := Match(pass, checkBytesCompareQ, node)
+ expr := node.(*ast.BinaryExpr)
+ if expr.Op != token.NEQ && expr.Op != token.EQL {
+ return
+ }
+ call, ok := expr.X.(*ast.CallExpr)
if !ok {
return
}
-
- args := report.RenderArgs(pass, m.State["args"].([]ast.Expr))
+ if !IsCallToAST(pass, call, "bytes.Compare") {
+ return
+ }
+ value, ok := ExprToInt(pass, expr.Y)
+ if !ok || value != 0 {
+ return
+ }
+ args := RenderArgs(pass, call.Args)
prefix := ""
- if m.State["op"].(token.Token) == token.NEQ {
+ if expr.Op == token.NEQ {
prefix = "!"
}
-
- var fix analysis.SuggestedFix
- switch tok := m.State["op"].(token.Token); tok {
- case token.EQL:
- fix = edit.Fix("simplify use of bytes.Compare", edit.ReplaceWithPattern(pass, checkBytesCompareRe, m.State, node))
- case token.NEQ:
- fix = edit.Fix("simplify use of bytes.Compare", edit.ReplaceWithPattern(pass, checkBytesCompareRn, m.State, node))
- default:
- panic(fmt.Sprintf("unexpected token %v", tok))
- }
- report.Report(pass, node, fmt.Sprintf("should use %sbytes.Equal(%s) instead", prefix, args), report.FilterGenerated(), report.Fixes(fix))
+ ReportNodefFG(pass, node, "should use %sbytes.Equal(%s) instead", prefix, args)
}
- code.Preorder(pass, fn, (*ast.BinaryExpr)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn)
return nil, nil
}
-func CheckForTrue(pass *analysis.Pass) (interface{}, error) {
+func LintForTrue(pass *analysis.Pass) (interface{}, error) {
fn := func(node ast.Node) {
loop := node.(*ast.ForStmt)
if loop.Init != nil || loop.Post != nil {
return
}
- if !code.IsBoolConst(pass, loop.Cond) || !code.BoolConst(pass, loop.Cond) {
+ if !IsBoolConst(pass, loop.Cond) || !BoolConst(pass, loop.Cond) {
return
}
- report.Report(pass, loop, "should use for {} instead of for true {}",
- report.ShortRange(),
- report.FilterGenerated())
+ ReportNodefFG(pass, loop, "should use for {} instead of for true {}")
}
- code.Preorder(pass, fn, (*ast.ForStmt)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn)
return nil, nil
}
-func CheckRegexpRaw(pass *analysis.Pass) (interface{}, error) {
+func LintRegexpRaw(pass *analysis.Pass) (interface{}, error) {
fn := func(node ast.Node) {
call := node.(*ast.CallExpr)
- if !code.IsCallToAnyAST(pass, call, "regexp.MustCompile", "regexp.Compile") {
+ if !IsCallToAST(pass, call, "regexp.MustCompile") &&
+ !IsCallToAST(pass, call, "regexp.Compile") {
return
}
sel, ok := call.Fun.(*ast.SelectorExpr)
if !ok {
return
}
+ if len(call.Args) != 1 {
+ // invalid function call
+ return
+ }
lit, ok := call.Args[Arg("regexp.Compile.expr")].(*ast.BasicLit)
if !ok {
// TODO(dominikh): support string concat, maybe support constants
@@ -371,18 +369,13 @@ func CheckRegexpRaw(pass *analysis.Pass) (interface{}, error) {
}
}
- report.Report(pass, call, fmt.Sprintf("should use raw string (`...`) with regexp.%s to avoid having to escape twice", sel.Sel.Name), report.FilterGenerated())
+ ReportNodefFG(pass, call, "should use raw string (`...`) with regexp.%s to avoid having to escape twice", sel.Sel.Name)
}
- code.Preorder(pass, fn, (*ast.CallExpr)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn)
return nil, nil
}
-var (
- checkIfReturnQIf = pattern.MustParse(`(IfStmt nil cond [(ReturnStmt [ret@(Ident _)])] nil)`)
- checkIfReturnQRet = pattern.MustParse(`(ReturnStmt [ret@(Ident _)])`)
-)
-
-func CheckIfReturn(pass *analysis.Pass) (interface{}, error) {
+func LintIfReturn(pass *analysis.Pass) (interface{}, error) {
fn := func(node ast.Node) {
block := node.(*ast.BlockStmt)
l := len(block.List)
@@ -397,88 +390,61 @@ func CheckIfReturn(pass *analysis.Pass) (interface{}, error) {
return
}
}
- m1, ok := Match(pass, checkIfReturnQIf, n1)
+ // if statement with no init, no else, a single condition
+ // checking an identifier or function call and just a return
+ // statement in the body, that returns a boolean constant
+ ifs, ok := n1.(*ast.IfStmt)
if !ok {
return
}
- m2, ok := Match(pass, checkIfReturnQRet, n2)
- if !ok {
+ if ifs.Else != nil || ifs.Init != nil {
return
}
-
- if op, ok := m1.State["cond"].(*ast.BinaryExpr); ok {
+ if len(ifs.Body.List) != 1 {
+ return
+ }
+ if op, ok := ifs.Cond.(*ast.BinaryExpr); ok {
switch op.Op {
case token.EQL, token.LSS, token.GTR, token.NEQ, token.LEQ, token.GEQ:
default:
return
}
}
+ ret1, ok := ifs.Body.List[0].(*ast.ReturnStmt)
+ if !ok {
+ return
+ }
+ if len(ret1.Results) != 1 {
+ return
+ }
+ if !IsBoolConst(pass, ret1.Results[0]) {
+ return
+ }
- ret1 := m1.State["ret"].(*ast.Ident)
- if !code.IsBoolConst(pass, ret1) {
+ ret2, ok := n2.(*ast.ReturnStmt)
+ if !ok {
return
}
- ret2 := m2.State["ret"].(*ast.Ident)
- if !code.IsBoolConst(pass, ret2) {
+ if len(ret2.Results) != 1 {
+ return
+ }
+ if !IsBoolConst(pass, ret2.Results[0]) {
return
}
- if ret1.Name == ret2.Name {
+ if ret1.Results[0].(*ast.Ident).Name == ret2.Results[0].(*ast.Ident).Name {
// we want the function to return true and false, not the
// same value both times.
return
}
- cond := m1.State["cond"].(ast.Expr)
- origCond := cond
- if ret1.Name == "false" {
- cond = negate(cond)
- }
- report.Report(pass, n1,
- fmt.Sprintf("should use 'return %s' instead of 'if %s { return %s }; return %s'",
- report.Render(pass, cond),
- report.Render(pass, origCond), report.Render(pass, ret1), report.Render(pass, ret2)),
- report.FilterGenerated())
+ ReportNodefFG(pass, n1, "should use 'return <expr>' instead of 'if <expr> { return <bool> }; return <bool>'")
}
- code.Preorder(pass, fn, (*ast.BlockStmt)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn)
return nil, nil
}
-func negate(expr ast.Expr) ast.Expr {
- switch expr := expr.(type) {
- case *ast.BinaryExpr:
- out := *expr
- switch expr.Op {
- case token.EQL:
- out.Op = token.NEQ
- case token.LSS:
- out.Op = token.GEQ
- case token.GTR:
- out.Op = token.LEQ
- case token.NEQ:
- out.Op = token.EQL
- case token.LEQ:
- out.Op = token.GTR
- case token.GEQ:
- out.Op = token.LEQ
- }
- return &out
- case *ast.Ident, *ast.CallExpr, *ast.IndexExpr:
- return &ast.UnaryExpr{
- Op: token.NOT,
- X: expr,
- }
- default:
- return &ast.UnaryExpr{
- Op: token.NOT,
- X: &ast.ParenExpr{
- X: expr,
- },
- }
- }
-}
-
-// CheckRedundantNilCheckWithLen checks for the following redundant nil-checks:
+// LintRedundantNilCheckWithLen checks for the following reduntant nil-checks:
//
// if x == nil || len(x) == 0 {}
// if x != nil && len(x) != 0 {}
@@ -486,11 +452,11 @@ func negate(expr ast.Expr) ast.Expr {
// if x != nil && len(x) > N {}
// if x != nil && len(x) >= N {} (where N != 0)
//
-func CheckRedundantNilCheckWithLen(pass *analysis.Pass) (interface{}, error) {
+func LintRedundantNilCheckWithLen(pass *analysis.Pass) (interface{}, error) {
isConstZero := func(expr ast.Expr) (isConst bool, isZero bool) {
_, ok := expr.(*ast.BasicLit)
if ok {
- return true, code.IsIntLiteral(expr, "0")
+ return true, IsZero(expr)
}
id, ok := expr.(*ast.Ident)
if !ok {
@@ -526,7 +492,7 @@ func CheckRedundantNilCheckWithLen(pass *analysis.Pass) (interface{}, error) {
if !ok {
return
}
- if !code.IsNil(pass, x.Y) {
+ if !IsNil(pass, x.Y) {
return
}
@@ -554,7 +520,7 @@ func CheckRedundantNilCheckWithLen(pass *analysis.Pass) (interface{}, error) {
return
}
- if eqNil && !code.IsIntLiteral(y.Y, "0") { // must be len(x) == *0*
+ if eqNil && !IsZero(y.Y) { // must be len(x) == *0*
return
}
@@ -599,36 +565,51 @@ func CheckRedundantNilCheckWithLen(pass *analysis.Pass) (interface{}, error) {
default:
return
}
- report.Report(pass, expr, fmt.Sprintf("should omit nil check; len() for %s is defined as zero", nilType), report.FilterGenerated())
+ ReportNodefFG(pass, expr, "should omit nil check; len() for %s is defined as zero", nilType)
}
- code.Preorder(pass, fn, (*ast.BinaryExpr)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn)
return nil, nil
}
-var checkSlicingQ = pattern.MustParse(`(SliceExpr x@(Object _) low (CallExpr (Builtin "len") [x]) nil)`)
-
-func CheckSlicing(pass *analysis.Pass) (interface{}, error) {
+func LintSlicing(pass *analysis.Pass) (interface{}, error) {
fn := func(node ast.Node) {
- if _, ok := Match(pass, checkSlicingQ, node); ok {
- expr := node.(*ast.SliceExpr)
- report.Report(pass, expr.High,
- "should omit second index in slice, s[a:len(s)] is identical to s[a:]",
- report.FilterGenerated(),
- report.Fixes(edit.Fix("simplify slice expression", edit.Delete(expr.High))))
+ n := node.(*ast.SliceExpr)
+ if n.Max != nil {
+ return
+ }
+ s, ok := n.X.(*ast.Ident)
+ if !ok || s.Obj == nil {
+ return
+ }
+ call, ok := n.High.(*ast.CallExpr)
+ if !ok || len(call.Args) != 1 || call.Ellipsis.IsValid() {
+ return
+ }
+ fun, ok := call.Fun.(*ast.Ident)
+ if !ok || fun.Name != "len" {
+ return
}
+ if _, ok := pass.TypesInfo.ObjectOf(fun).(*types.Builtin); !ok {
+ return
+ }
+ arg, ok := call.Args[Arg("len.v")].(*ast.Ident)
+ if !ok || arg.Obj != s.Obj {
+ return
+ }
+ ReportNodefFG(pass, n, "should omit second index in slice, s[a:len(s)] is identical to s[a:]")
}
- code.Preorder(pass, fn, (*ast.SliceExpr)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.SliceExpr)(nil)}, fn)
return nil, nil
}
-func refersTo(pass *analysis.Pass, expr ast.Expr, ident types.Object) bool {
+func refersTo(pass *analysis.Pass, expr ast.Expr, ident *ast.Ident) bool {
found := false
fn := func(node ast.Node) bool {
ident2, ok := node.(*ast.Ident)
if !ok {
return true
}
- if ident == pass.TypesInfo.ObjectOf(ident2) {
+ if pass.TypesInfo.ObjectOf(ident) == pass.TypesInfo.ObjectOf(ident2) {
found = true
return false
}
@@ -638,160 +619,183 @@ func refersTo(pass *analysis.Pass, expr ast.Expr, ident types.Object) bool {
return found
}
-var checkLoopAppendQ = pattern.MustParse(`
- (RangeStmt
- (Ident "_")
- val@(Object _)
- _
- x
- [(AssignStmt [lhs] "=" [(CallExpr (Builtin "append") [lhs val])])]) `)
-
-func CheckLoopAppend(pass *analysis.Pass) (interface{}, error) {
+func LintLoopAppend(pass *analysis.Pass) (interface{}, error) {
fn := func(node ast.Node) {
- m, ok := Match(pass, checkLoopAppendQ, node)
+ loop := node.(*ast.RangeStmt)
+ if !IsBlank(loop.Key) {
+ return
+ }
+ val, ok := loop.Value.(*ast.Ident)
if !ok {
return
}
-
- val := m.State["val"].(types.Object)
- if refersTo(pass, m.State["lhs"].(ast.Expr), val) {
+ if len(loop.Body.List) != 1 {
+ return
+ }
+ stmt, ok := loop.Body.List[0].(*ast.AssignStmt)
+ if !ok {
+ return
+ }
+ if stmt.Tok != token.ASSIGN || len(stmt.Lhs) != 1 || len(stmt.Rhs) != 1 {
+ return
+ }
+ if refersTo(pass, stmt.Lhs[0], val) {
+ return
+ }
+ call, ok := stmt.Rhs[0].(*ast.CallExpr)
+ if !ok {
+ return
+ }
+ if len(call.Args) != 2 || call.Ellipsis.IsValid() {
+ return
+ }
+ fun, ok := call.Fun.(*ast.Ident)
+ if !ok {
+ return
+ }
+ obj := pass.TypesInfo.ObjectOf(fun)
+ fn, ok := obj.(*types.Builtin)
+ if !ok || fn.Name() != "append" {
return
}
- src := pass.TypesInfo.TypeOf(m.State["x"].(ast.Expr))
- dst := pass.TypesInfo.TypeOf(m.State["lhs"].(ast.Expr))
+ src := pass.TypesInfo.TypeOf(loop.X)
+ dst := pass.TypesInfo.TypeOf(call.Args[Arg("append.slice")])
+ // TODO(dominikh) remove nil check once Go issue #15173 has
+ // been fixed
+ if src == nil {
+ return
+ }
if !types.Identical(src, dst) {
return
}
- r := &ast.AssignStmt{
- Lhs: []ast.Expr{m.State["lhs"].(ast.Expr)},
- Tok: token.ASSIGN,
- Rhs: []ast.Expr{
- &ast.CallExpr{
- Fun: &ast.Ident{Name: "append"},
- Args: []ast.Expr{
- m.State["lhs"].(ast.Expr),
- m.State["x"].(ast.Expr),
- },
- Ellipsis: 1,
- },
- },
+ if Render(pass, stmt.Lhs[0]) != Render(pass, call.Args[Arg("append.slice")]) {
+ return
}
- report.Report(pass, node, fmt.Sprintf("should replace loop with %s", report.Render(pass, r)),
- report.ShortRange(),
- report.FilterGenerated(),
- report.Fixes(edit.Fix("replace loop with call to append", edit.ReplaceWithNode(pass.Fset, node, r))))
+ el, ok := call.Args[Arg("append.elems")].(*ast.Ident)
+ if !ok {
+ return
+ }
+ if pass.TypesInfo.ObjectOf(val) != pass.TypesInfo.ObjectOf(el) {
+ return
+ }
+ ReportNodefFG(pass, loop, "should replace loop with %s = append(%s, %s...)",
+ Render(pass, stmt.Lhs[0]), Render(pass, call.Args[Arg("append.slice")]), Render(pass, loop.X))
}
- code.Preorder(pass, fn, (*ast.RangeStmt)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn)
return nil, nil
}
-var (
- checkTimeSinceQ = pattern.MustParse(`(CallExpr (SelectorExpr (CallExpr (Function "time.Now") []) (Function "(time.Time).Sub")) [arg])`)
- checkTimeSinceR = pattern.MustParse(`(CallExpr (SelectorExpr (Ident "time") (Ident "Since")) [arg])`)
-)
-
-func CheckTimeSince(pass *analysis.Pass) (interface{}, error) {
+func LintTimeSince(pass *analysis.Pass) (interface{}, error) {
fn := func(node ast.Node) {
- if _, edits, ok := MatchAndEdit(pass, checkTimeSinceQ, checkTimeSinceR, node); ok {
- report.Report(pass, node, "should use time.Since instead of time.Now().Sub",
- report.FilterGenerated(),
- report.Fixes(edit.Fix("replace with call to time.Since", edits...)))
+ call := node.(*ast.CallExpr)
+ sel, ok := call.Fun.(*ast.SelectorExpr)
+ if !ok {
+ return
+ }
+ if !IsCallToAST(pass, sel.X, "time.Now") {
+ return
}
+ if sel.Sel.Name != "Sub" {
+ return
+ }
+ ReportNodefFG(pass, call, "should use time.Since instead of time.Now().Sub")
}
- code.Preorder(pass, fn, (*ast.CallExpr)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn)
return nil, nil
}
-var (
- checkTimeUntilQ = pattern.MustParse(`(CallExpr (Function "(time.Time).Sub") [(CallExpr (Function "time.Now") [])])`)
- checkTimeUntilR = pattern.MustParse(`(CallExpr (SelectorExpr (Ident "time") (Ident "Until")) [arg])`)
-)
-
-func CheckTimeUntil(pass *analysis.Pass) (interface{}, error) {
- if !code.IsGoVersion(pass, 8) {
+func LintTimeUntil(pass *analysis.Pass) (interface{}, error) {
+ if !IsGoVersion(pass, 8) {
return nil, nil
}
fn := func(node ast.Node) {
- if _, ok := Match(pass, checkTimeUntilQ, node); ok {
- if sel, ok := node.(*ast.CallExpr).Fun.(*ast.SelectorExpr); ok {
- r := pattern.NodeToAST(checkTimeUntilR.Root, map[string]interface{}{"arg": sel.X}).(ast.Node)
- report.Report(pass, node, "should use time.Until instead of t.Sub(time.Now())",
- report.FilterGenerated(),
- report.Fixes(edit.Fix("replace with call to time.Until", edit.ReplaceWithNode(pass.Fset, node, r))))
- } else {
- report.Report(pass, node, "should use time.Until instead of t.Sub(time.Now())", report.FilterGenerated())
- }
+ call := node.(*ast.CallExpr)
+ if !IsCallToAST(pass, call, "(time.Time).Sub") {
+ return
+ }
+ if !IsCallToAST(pass, call.Args[Arg("(time.Time).Sub.u")], "time.Now") {
+ return
}
+ ReportNodefFG(pass, call, "should use time.Until instead of t.Sub(time.Now())")
}
- code.Preorder(pass, fn, (*ast.CallExpr)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn)
return nil, nil
}
-var (
- checkUnnecessaryBlankQ1 = pattern.MustParse(`
- (AssignStmt
- [_ (Ident "_")]
- _
- (Or
- (IndexExpr _ _)
- (UnaryExpr "<-" _))) `)
- checkUnnecessaryBlankQ2 = pattern.MustParse(`
- (AssignStmt
- (Ident "_") _ recv@(UnaryExpr "<-" _))`)
-)
-
-func CheckUnnecessaryBlank(pass *analysis.Pass) (interface{}, error) {
+func LintUnnecessaryBlank(pass *analysis.Pass) (interface{}, error) {
fn1 := func(node ast.Node) {
- if _, ok := Match(pass, checkUnnecessaryBlankQ1, node); ok {
- r := *node.(*ast.AssignStmt)
- r.Lhs = r.Lhs[0:1]
- report.Report(pass, node, "unnecessary assignment to the blank identifier",
- report.FilterGenerated(),
- report.Fixes(edit.Fix("remove assignment to blank identifier", edit.ReplaceWithNode(pass.Fset, node, &r))))
- } else if m, ok := Match(pass, checkUnnecessaryBlankQ2, node); ok {
- report.Report(pass, node, "unnecessary assignment to the blank identifier",
- report.FilterGenerated(),
- report.Fixes(edit.Fix("simplify channel receive operation", edit.ReplaceWithNode(pass.Fset, node, m.State["recv"].(ast.Node)))))
+ assign := node.(*ast.AssignStmt)
+ if len(assign.Lhs) != 2 || len(assign.Rhs) != 1 {
+ return
+ }
+ if !IsBlank(assign.Lhs[1]) {
+ return
}
+ switch rhs := assign.Rhs[0].(type) {
+ case *ast.IndexExpr:
+ // The type-checker should make sure that it's a map, but
+ // let's be safe.
+ if _, ok := pass.TypesInfo.TypeOf(rhs.X).Underlying().(*types.Map); !ok {
+ return
+ }
+ case *ast.UnaryExpr:
+ if rhs.Op != token.ARROW {
+ return
+ }
+ default:
+ return
+ }
+ cp := *assign
+ cp.Lhs = cp.Lhs[0:1]
+ ReportNodefFG(pass, assign, "should write %s instead of %s", Render(pass, &cp), Render(pass, assign))
}
- fn3 := func(node ast.Node) {
- rs := node.(*ast.RangeStmt)
-
- // for _
- if rs.Value == nil && code.IsBlank(rs.Key) {
- report.Report(pass, rs.Key, "unnecessary assignment to the blank identifier",
- report.FilterGenerated(),
- report.Fixes(edit.Fix("remove assignment to blank identifier", edit.Delete(edit.Range{rs.Key.Pos(), rs.TokPos + 1}))))
+ fn2 := func(node ast.Node) {
+ stmt := node.(*ast.AssignStmt)
+ if len(stmt.Lhs) != len(stmt.Rhs) {
+ return
}
-
- // for _, _
- if code.IsBlank(rs.Key) && code.IsBlank(rs.Value) {
- // FIXME we should mark both key and value
- report.Report(pass, rs.Key, "unnecessary assignment to the blank identifier",
- report.FilterGenerated(),
- report.Fixes(edit.Fix("remove assignment to blank identifier", edit.Delete(edit.Range{rs.Key.Pos(), rs.TokPos + 1}))))
+ for i, lh := range stmt.Lhs {
+ rh := stmt.Rhs[i]
+ if !IsBlank(lh) {
+ continue
+ }
+ expr, ok := rh.(*ast.UnaryExpr)
+ if !ok {
+ continue
+ }
+ if expr.Op != token.ARROW {
+ continue
+ }
+ ReportNodefFG(pass, lh, "'_ = <-ch' can be simplified to '<-ch'")
}
+ }
+
+ fn3 := func(node ast.Node) {
+ rs := node.(*ast.RangeStmt)
// for x, _
- if !code.IsBlank(rs.Key) && code.IsBlank(rs.Value) {
- report.Report(pass, rs.Value, "unnecessary assignment to the blank identifier",
- report.FilterGenerated(),
- report.Fixes(edit.Fix("remove assignment to blank identifier", edit.Delete(edit.Range{rs.Key.End(), rs.Value.End()}))))
+ if !IsBlank(rs.Key) && IsBlank(rs.Value) {
+ ReportNodefFG(pass, rs.Value, "should omit value from range; this loop is equivalent to `for %s %s range ...`", Render(pass, rs.Key), rs.Tok)
+ }
+ // for _, _ || for _
+ if IsBlank(rs.Key) && (IsBlank(rs.Value) || rs.Value == nil) {
+ ReportNodefFG(pass, rs.Key, "should omit values from range; this loop is equivalent to `for range ...`")
}
}
- code.Preorder(pass, fn1, (*ast.AssignStmt)(nil))
- if code.IsGoVersion(pass, 4) {
- code.Preorder(pass, fn3, (*ast.RangeStmt)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn1)
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn2)
+ if IsGoVersion(pass, 4) {
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn3)
}
return nil, nil
}
-func CheckSimplerStructConversion(pass *analysis.Pass) (interface{}, error) {
+func LintSimplerStructConversion(pass *analysis.Pass) (interface{}, error) {
var skip ast.Node
fn := func(node ast.Node) {
// Do not suggest type conversion between pointers
@@ -898,7 +902,7 @@ func CheckSimplerStructConversion(pass *analysis.Pass) (interface{}, error) {
if typ1 == typ2 {
return
}
- if code.IsGoVersion(pass, 8) {
+ if IsGoVersion(pass, 8) {
if !types.IdenticalIgnoreTags(s1, s2) {
return
}
@@ -907,21 +911,14 @@ func CheckSimplerStructConversion(pass *analysis.Pass) (interface{}, error) {
return
}
}
-
- r := &ast.CallExpr{
- Fun: lit.Type,
- Args: []ast.Expr{ident},
- }
- report.Report(pass, node,
- fmt.Sprintf("should convert %s (type %s) to %s instead of using struct literal", ident.Name, typ2.Obj().Name(), typ1.Obj().Name()),
- report.FilterGenerated(),
- report.Fixes(edit.Fix("use type conversion", edit.ReplaceWithNode(pass.Fset, node, r))))
+ ReportNodefFG(pass, node, "should convert %s (type %s) to %s instead of using struct literal",
+ ident.Name, typ2.Obj().Name(), typ1.Obj().Name())
}
- code.Preorder(pass, fn, (*ast.UnaryExpr)(nil), (*ast.CompositeLit)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.UnaryExpr)(nil), (*ast.CompositeLit)(nil)}, fn)
return nil, nil
}
-func CheckTrim(pass *analysis.Pass) (interface{}, error) {
+func LintTrim(pass *analysis.Pass) (interface{}, error) {
sameNonDynamic := func(node1, node2 ast.Node) bool {
if reflect.TypeOf(node1) != reflect.TypeOf(node2) {
return false
@@ -931,9 +928,9 @@ func CheckTrim(pass *analysis.Pass) (interface{}, error) {
case *ast.Ident:
return node1.Obj == node2.(*ast.Ident).Obj
case *ast.SelectorExpr:
- return report.Render(pass, node1) == report.Render(pass, node2)
+ return Render(pass, node1) == Render(pass, node2)
case *ast.IndexExpr:
- return report.Render(pass, node1) == report.Render(pass, node2)
+ return Render(pass, node1) == Render(pass, node2)
}
return false
}
@@ -970,25 +967,23 @@ func CheckTrim(pass *analysis.Pass) (interface{}, error) {
if !ok {
return
}
-
- condCallName := code.CallNameAST(pass, condCall)
- switch condCallName {
- case "strings.HasPrefix":
+ switch {
+ case IsCallToAST(pass, condCall, "strings.HasPrefix"):
pkg = "strings"
fun = "HasPrefix"
- case "strings.HasSuffix":
+ case IsCallToAST(pass, condCall, "strings.HasSuffix"):
pkg = "strings"
fun = "HasSuffix"
- case "strings.Contains":
+ case IsCallToAST(pass, condCall, "strings.Contains"):
pkg = "strings"
fun = "Contains"
- case "bytes.HasPrefix":
+ case IsCallToAST(pass, condCall, "bytes.HasPrefix"):
pkg = "bytes"
fun = "HasPrefix"
- case "bytes.HasSuffix":
+ case IsCallToAST(pass, condCall, "bytes.HasSuffix"):
pkg = "bytes"
fun = "HasSuffix"
- case "bytes.Contains":
+ case IsCallToAST(pass, condCall, "bytes.Contains"):
pkg = "bytes"
fun = "Contains"
default:
@@ -1014,15 +1009,13 @@ func CheckTrim(pass *analysis.Pass) (interface{}, error) {
if len(rhs.Args) < 2 || !sameNonDynamic(condCall.Args[0], rhs.Args[0]) || !sameNonDynamic(condCall.Args[1], rhs.Args[1]) {
return
}
-
- rhsName := code.CallNameAST(pass, rhs)
- if condCallName == "strings.HasPrefix" && rhsName == "strings.TrimPrefix" ||
- condCallName == "strings.HasSuffix" && rhsName == "strings.TrimSuffix" ||
- condCallName == "strings.Contains" && rhsName == "strings.Replace" ||
- condCallName == "bytes.HasPrefix" && rhsName == "bytes.TrimPrefix" ||
- condCallName == "bytes.HasSuffix" && rhsName == "bytes.TrimSuffix" ||
- condCallName == "bytes.Contains" && rhsName == "bytes.Replace" {
- report.Report(pass, ifstmt, fmt.Sprintf("should replace this if statement with an unconditional %s", rhsName), report.FilterGenerated())
+ if IsCallToAST(pass, condCall, "strings.HasPrefix") && IsCallToAST(pass, rhs, "strings.TrimPrefix") ||
+ IsCallToAST(pass, condCall, "strings.HasSuffix") && IsCallToAST(pass, rhs, "strings.TrimSuffix") ||
+ IsCallToAST(pass, condCall, "strings.Contains") && IsCallToAST(pass, rhs, "strings.Replace") ||
+ IsCallToAST(pass, condCall, "bytes.HasPrefix") && IsCallToAST(pass, rhs, "bytes.TrimPrefix") ||
+ IsCallToAST(pass, condCall, "bytes.HasSuffix") && IsCallToAST(pass, rhs, "bytes.TrimSuffix") ||
+ IsCallToAST(pass, condCall, "bytes.Contains") && IsCallToAST(pass, rhs, "bytes.Replace") {
+ ReportNodefFG(pass, ifstmt, "should replace this if statement with an unconditional %s", CallNameAST(pass, rhs))
}
return
case *ast.SliceExpr:
@@ -1047,7 +1040,7 @@ func CheckTrim(pass *analysis.Pass) (interface{}, error) {
index = slice.Low
case "HasSuffix":
if slice.Low != nil {
- n, ok := code.ExprToInt(pass, slice.Low)
+ n, ok := ExprToInt(pass, slice.Low)
if !ok || n != 0 {
return
}
@@ -1076,8 +1069,8 @@ func CheckTrim(pass *analysis.Pass) (interface{}, error) {
if !ok {
return
}
- s1, ok1 := code.ExprToString(pass, lit)
- s2, ok2 := code.ExprToString(pass, condCall.Args[1])
+ s1, ok1 := ExprToString(pass, lit)
+ s2, ok2 := ExprToString(pass, condCall.Args[1])
if !ok1 || !ok2 || s1 != s2 {
return
}
@@ -1093,8 +1086,8 @@ func CheckTrim(pass *analysis.Pass) (interface{}, error) {
if pkg != "strings" {
return
}
- string, ok1 := code.ExprToString(pass, condCall.Args[1])
- int, ok2 := code.ExprToInt(pass, slice.Low)
+ string, ok1 := ExprToString(pass, condCall.Args[1])
+ int, ok2 := ExprToInt(pass, slice.Low)
if !ok1 || !ok2 || int != int64(len(string)) {
return
}
@@ -1120,142 +1113,257 @@ func CheckTrim(pass *analysis.Pass) (interface{}, error) {
case "HasSuffix":
replacement = "TrimSuffix"
}
- report.Report(pass, ifstmt, fmt.Sprintf("should replace this if statement with an unconditional %s.%s", pkg, replacement),
- report.ShortRange(),
- report.FilterGenerated())
+ ReportNodefFG(pass, ifstmt, "should replace this if statement with an unconditional %s.%s", pkg, replacement)
}
}
- code.Preorder(pass, fn, (*ast.IfStmt)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn)
return nil, nil
}
-var (
- checkLoopSlideQ = pattern.MustParse(`
- (ForStmt
- (AssignStmt initvar@(Ident _) _ (BasicLit "INT" "0"))
- (BinaryExpr initvar "<" limit@(Ident _))
- (IncDecStmt initvar "++")
- [(AssignStmt
- (IndexExpr slice@(Ident _) initvar)
- "="
- (IndexExpr slice (BinaryExpr offset@(Ident _) "+" initvar)))])`)
- checkLoopSlideR = pattern.MustParse(`
- (CallExpr
- (Ident "copy")
- [(SliceExpr slice nil limit nil)
- (SliceExpr slice offset nil nil)])`)
-)
-
-func CheckLoopSlide(pass *analysis.Pass) (interface{}, error) {
+func LintLoopSlide(pass *analysis.Pass) (interface{}, error) {
// TODO(dh): detect bs[i+offset] in addition to bs[offset+i]
// TODO(dh): consider merging this function with LintLoopCopy
// TODO(dh): detect length that is an expression, not a variable name
// TODO(dh): support sliding to a different offset than the beginning of the slice
fn := func(node ast.Node) {
+ /*
+ for i := 0; i < n; i++ {
+ bs[i] = bs[offset+i]
+ }
+
+ ↓
+
+ copy(bs[:n], bs[offset:offset+n])
+ */
+
loop := node.(*ast.ForStmt)
- m, edits, ok := MatchAndEdit(pass, checkLoopSlideQ, checkLoopSlideR, loop)
+ if len(loop.Body.List) != 1 || loop.Init == nil || loop.Cond == nil || loop.Post == nil {
+ return
+ }
+ assign, ok := loop.Init.(*ast.AssignStmt)
+ if !ok || len(assign.Lhs) != 1 || len(assign.Rhs) != 1 || !IsZero(assign.Rhs[0]) {
+ return
+ }
+ initvar, ok := assign.Lhs[0].(*ast.Ident)
+ if !ok {
+ return
+ }
+ post, ok := loop.Post.(*ast.IncDecStmt)
+ if !ok || post.Tok != token.INC {
+ return
+ }
+ postvar, ok := post.X.(*ast.Ident)
+ if !ok || pass.TypesInfo.ObjectOf(postvar) != pass.TypesInfo.ObjectOf(initvar) {
+ return
+ }
+ bin, ok := loop.Cond.(*ast.BinaryExpr)
+ if !ok || bin.Op != token.LSS {
+ return
+ }
+ binx, ok := bin.X.(*ast.Ident)
+ if !ok || pass.TypesInfo.ObjectOf(binx) != pass.TypesInfo.ObjectOf(initvar) {
+ return
+ }
+ biny, ok := bin.Y.(*ast.Ident)
+ if !ok {
+ return
+ }
+
+ assign, ok = loop.Body.List[0].(*ast.AssignStmt)
+ if !ok || len(assign.Lhs) != 1 || len(assign.Rhs) != 1 || assign.Tok != token.ASSIGN {
+ return
+ }
+ lhs, ok := assign.Lhs[0].(*ast.IndexExpr)
+ if !ok {
+ return
+ }
+ rhs, ok := assign.Rhs[0].(*ast.IndexExpr)
if !ok {
return
}
- if _, ok := pass.TypesInfo.TypeOf(m.State["slice"].(*ast.Ident)).Underlying().(*types.Slice); !ok {
+
+ bs1, ok := lhs.X.(*ast.Ident)
+ if !ok {
+ return
+ }
+ bs2, ok := rhs.X.(*ast.Ident)
+ if !ok {
+ return
+ }
+ obj1 := pass.TypesInfo.ObjectOf(bs1)
+ obj2 := pass.TypesInfo.ObjectOf(bs2)
+ if obj1 != obj2 {
+ return
+ }
+ if _, ok := obj1.Type().Underlying().(*types.Slice); !ok {
return
}
- report.Report(pass, loop, "should use copy() instead of loop for sliding slice elements",
- report.ShortRange(),
- report.FilterGenerated(),
- report.Fixes(edit.Fix("use copy() instead of loop", edits...)))
+ index1, ok := lhs.Index.(*ast.Ident)
+ if !ok || pass.TypesInfo.ObjectOf(index1) != pass.TypesInfo.ObjectOf(initvar) {
+ return
+ }
+ index2, ok := rhs.Index.(*ast.BinaryExpr)
+ if !ok || index2.Op != token.ADD {
+ return
+ }
+ add1, ok := index2.X.(*ast.Ident)
+ if !ok {
+ return
+ }
+ add2, ok := index2.Y.(*ast.Ident)
+ if !ok || pass.TypesInfo.ObjectOf(add2) != pass.TypesInfo.ObjectOf(initvar) {
+ return
+ }
+
+ ReportNodefFG(pass, loop, "should use copy(%s[:%s], %s[%s:]) instead", Render(pass, bs1), Render(pass, biny), Render(pass, bs1), Render(pass, add1))
}
- code.Preorder(pass, fn, (*ast.ForStmt)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn)
return nil, nil
}
-var (
- checkMakeLenCapQ1 = pattern.MustParse(`(CallExpr (Builtin "make") [typ size@(BasicLit "INT" "0")])`)
- checkMakeLenCapQ2 = pattern.MustParse(`(CallExpr (Builtin "make") [typ size size])`)
-)
-
-func CheckMakeLenCap(pass *analysis.Pass) (interface{}, error) {
+func LintMakeLenCap(pass *analysis.Pass) (interface{}, error) {
fn := func(node ast.Node) {
- if pass.Pkg.Path() == "runtime_test" && filepath.Base(pass.Fset.Position(node.Pos()).Filename) == "map_test.go" {
- // special case of runtime tests testing map creation
+ call := node.(*ast.CallExpr)
+ if fn, ok := call.Fun.(*ast.Ident); !ok || fn.Name != "make" {
+ // FIXME check whether make is indeed the built-in function
return
}
- if m, ok := Match(pass, checkMakeLenCapQ1, node); ok {
- T := m.State["typ"].(ast.Expr)
- size := m.State["size"].(ast.Node)
- if _, ok := pass.TypesInfo.TypeOf(T).Underlying().(*types.Slice); ok {
- return
+ switch len(call.Args) {
+ case 2:
+ // make(T, len)
+ if _, ok := pass.TypesInfo.TypeOf(call.Args[Arg("make.t")]).Underlying().(*types.Slice); ok {
+ break
+ }
+ if IsZero(call.Args[Arg("make.size[0]")]) {
+ ReportNodefFG(pass, call.Args[Arg("make.size[0]")], "should use make(%s) instead", Render(pass, call.Args[Arg("make.t")]))
+ }
+ case 3:
+ // make(T, len, cap)
+ if Render(pass, call.Args[Arg("make.size[0]")]) == Render(pass, call.Args[Arg("make.size[1]")]) {
+ ReportNodefFG(pass, call.Args[Arg("make.size[0]")],
+ "should use make(%s, %s) instead",
+ Render(pass, call.Args[Arg("make.t")]), Render(pass, call.Args[Arg("make.size[0]")]))
}
- report.Report(pass, size, fmt.Sprintf("should use make(%s) instead", report.Render(pass, T)), report.FilterGenerated())
- } else if m, ok := Match(pass, checkMakeLenCapQ2, node); ok {
- // TODO(dh): don't consider sizes identical if they're
- // dynamic. for example: make(T, <-ch, <-ch).
- T := m.State["typ"].(ast.Expr)
- size := m.State["size"].(ast.Node)
- report.Report(pass, size,
- fmt.Sprintf("should use make(%s, %s) instead", report.Render(pass, T), report.Render(pass, size)),
- report.FilterGenerated())
}
}
- code.Preorder(pass, fn, (*ast.CallExpr)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn)
return nil, nil
}
-var (
- checkAssertNotNilFn1Q = pattern.MustParse(`
- (IfStmt
- (AssignStmt [(Ident "_") ok@(Object _)] _ [(TypeAssertExpr assert@(Object _) _)])
- (Or
- (BinaryExpr ok "&&" (BinaryExpr assert "!=" (Builtin "nil")))
- (BinaryExpr (BinaryExpr assert "!=" (Builtin "nil")) "&&" ok))
- _
- _)`)
- checkAssertNotNilFn2Q = pattern.MustParse(`
- (IfStmt
- nil
- (BinaryExpr lhs@(Object _) "!=" (Builtin "nil"))
- [
- ifstmt@(IfStmt
- (AssignStmt [(Ident "_") ok@(Object _)] _ [(TypeAssertExpr lhs _)])
- ok
- _
- _)
- ]
- nil)`)
-)
-
-func CheckAssertNotNil(pass *analysis.Pass) (interface{}, error) {
+func LintAssertNotNil(pass *analysis.Pass) (interface{}, error) {
+ isNilCheck := func(ident *ast.Ident, expr ast.Expr) bool {
+ xbinop, ok := expr.(*ast.BinaryExpr)
+ if !ok || xbinop.Op != token.NEQ {
+ return false
+ }
+ xident, ok := xbinop.X.(*ast.Ident)
+ if !ok || xident.Obj != ident.Obj {
+ return false
+ }
+ if !IsNil(pass, xbinop.Y) {
+ return false
+ }
+ return true
+ }
+ isOKCheck := func(ident *ast.Ident, expr ast.Expr) bool {
+ yident, ok := expr.(*ast.Ident)
+ if !ok || yident.Obj != ident.Obj {
+ return false
+ }
+ return true
+ }
fn1 := func(node ast.Node) {
- m, ok := Match(pass, checkAssertNotNilFn1Q, node)
+ ifstmt := node.(*ast.IfStmt)
+ assign, ok := ifstmt.Init.(*ast.AssignStmt)
+ if !ok || len(assign.Lhs) != 2 || len(assign.Rhs) != 1 || !IsBlank(assign.Lhs[0]) {
+ return
+ }
+ assert, ok := assign.Rhs[0].(*ast.TypeAssertExpr)
if !ok {
return
}
- assert := m.State["assert"].(types.Object)
- assign := m.State["ok"].(types.Object)
- report.Report(pass, node, fmt.Sprintf("when %s is true, %s can't be nil", assign.Name(), assert.Name()),
- report.ShortRange(),
- report.FilterGenerated())
+ binop, ok := ifstmt.Cond.(*ast.BinaryExpr)
+ if !ok || binop.Op != token.LAND {
+ return
+ }
+ assertIdent, ok := assert.X.(*ast.Ident)
+ if !ok {
+ return
+ }
+ assignIdent, ok := assign.Lhs[1].(*ast.Ident)
+ if !ok {
+ return
+ }
+ if !(isNilCheck(assertIdent, binop.X) && isOKCheck(assignIdent, binop.Y)) &&
+ !(isNilCheck(assertIdent, binop.Y) && isOKCheck(assignIdent, binop.X)) {
+ return
+ }
+ ReportNodefFG(pass, ifstmt, "when %s is true, %s can't be nil", Render(pass, assignIdent), Render(pass, assertIdent))
}
fn2 := func(node ast.Node) {
- m, ok := Match(pass, checkAssertNotNilFn2Q, node)
+ // Check that outer ifstmt is an 'if x != nil {}'
+ ifstmt := node.(*ast.IfStmt)
+ if ifstmt.Init != nil {
+ return
+ }
+ if ifstmt.Else != nil {
+ return
+ }
+ if len(ifstmt.Body.List) != 1 {
+ return
+ }
+ binop, ok := ifstmt.Cond.(*ast.BinaryExpr)
+ if !ok {
+ return
+ }
+ if binop.Op != token.NEQ {
+ return
+ }
+ lhs, ok := binop.X.(*ast.Ident)
+ if !ok {
+ return
+ }
+ if !IsNil(pass, binop.Y) {
+ return
+ }
+
+ // Check that inner ifstmt is an `if _, ok := x.(T); ok {}`
+ ifstmt, ok = ifstmt.Body.List[0].(*ast.IfStmt)
if !ok {
return
}
- ifstmt := m.State["ifstmt"].(*ast.IfStmt)
- lhs := m.State["lhs"].(types.Object)
- assignIdent := m.State["ok"].(types.Object)
- report.Report(pass, ifstmt, fmt.Sprintf("when %s is true, %s can't be nil", assignIdent.Name(), lhs.Name()),
- report.ShortRange(),
- report.FilterGenerated())
+ assign, ok := ifstmt.Init.(*ast.AssignStmt)
+ if !ok || len(assign.Lhs) != 2 || len(assign.Rhs) != 1 || !IsBlank(assign.Lhs[0]) {
+ return
+ }
+ assert, ok := assign.Rhs[0].(*ast.TypeAssertExpr)
+ if !ok {
+ return
+ }
+ assertIdent, ok := assert.X.(*ast.Ident)
+ if !ok {
+ return
+ }
+ if lhs.Obj != assertIdent.Obj {
+ return
+ }
+ assignIdent, ok := assign.Lhs[1].(*ast.Ident)
+ if !ok {
+ return
+ }
+ if !isOKCheck(assignIdent, ifstmt.Cond) {
+ return
+ }
+ ReportNodefFG(pass, ifstmt, "when %s is true, %s can't be nil", Render(pass, assignIdent), Render(pass, assertIdent))
}
- code.Preorder(pass, fn1, (*ast.IfStmt)(nil))
- code.Preorder(pass, fn2, (*ast.IfStmt)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn1)
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn2)
return nil, nil
}
-func CheckDeclareAssign(pass *analysis.Pass) (interface{}, error) {
+func LintDeclareAssign(pass *analysis.Pass) (interface{}, error) {
hasMultipleAssignments := func(root ast.Node, ident *ast.Ident) bool {
num := 0
ast.Inspect(root, func(node ast.Node) bool {
@@ -1313,33 +1421,21 @@ func CheckDeclareAssign(pass *analysis.Pass) (interface{}, error) {
continue
}
- if refersTo(pass, assign.Rhs[0], pass.TypesInfo.ObjectOf(ident)) {
+ if refersTo(pass, assign.Rhs[0], ident) {
continue
}
if hasMultipleAssignments(block, ident) {
continue
}
- r := &ast.GenDecl{
- Specs: []ast.Spec{
- &ast.ValueSpec{
- Names: vspec.Names,
- Values: []ast.Expr{assign.Rhs[0]},
- Type: vspec.Type,
- },
- },
- Tok: gdecl.Tok,
- }
- report.Report(pass, decl, "should merge variable declaration with assignment on next line",
- report.FilterGenerated(),
- report.Fixes(edit.Fix("merge declaration with assignment", edit.ReplaceWithNode(pass.Fset, edit.Range{decl.Pos(), assign.End()}, r))))
+ ReportNodefFG(pass, decl, "should merge variable declaration with assignment on next line")
}
}
- code.Preorder(pass, fn, (*ast.BlockStmt)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn)
return nil, nil
}
-func CheckRedundantBreak(pass *analysis.Pass) (interface{}, error) {
+func LintRedundantBreak(pass *analysis.Pass) (interface{}, error) {
fn1 := func(node ast.Node) {
clause := node.(*ast.CaseClause)
if len(clause.Body) < 2 {
@@ -1349,7 +1445,7 @@ func CheckRedundantBreak(pass *analysis.Pass) (interface{}, error) {
if !ok || branch.Tok != token.BREAK || branch.Label != nil {
return
}
- report.Report(pass, branch, "redundant break statement", report.FilterGenerated())
+ ReportNodefFG(pass, branch, "redundant break statement")
}
fn2 := func(node ast.Node) {
var ret *ast.FieldList
@@ -1362,7 +1458,7 @@ func CheckRedundantBreak(pass *analysis.Pass) (interface{}, error) {
ret = x.Type.Results
body = x.Body
default:
- ExhaustiveTypeSwitch(node)
+ panic(fmt.Sprintf("unreachable: %T", node))
}
// if the func has results, a return can't be redundant.
// similarly, if there are no statements, there can be
@@ -1376,10 +1472,10 @@ func CheckRedundantBreak(pass *analysis.Pass) (interface{}, error) {
}
// we don't need to check rst.Results as we already
// checked x.Type.Results to be nil.
- report.Report(pass, rst, "redundant return statement", report.FilterGenerated())
+ ReportNodefFG(pass, rst, "redundant return statement")
}
- code.Preorder(pass, fn1, (*ast.CaseClause)(nil))
- code.Preorder(pass, fn2, (*ast.FuncDecl)(nil), (*ast.FuncLit)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CaseClause)(nil)}, fn1)
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)}, fn2)
return nil, nil
}
@@ -1401,105 +1497,97 @@ func isStringer(T types.Type, msCache *typeutil.MethodSetCache) bool {
if sig.Results().Len() != 1 {
return false
}
- if !code.IsType(sig.Results().At(0).Type(), "string") {
+ if !IsType(sig.Results().At(0).Type(), "string") {
return false
}
return true
}
-var checkRedundantSprintfQ = pattern.MustParse(`(CallExpr (Function "fmt.Sprintf") [format arg])`)
-
-func CheckRedundantSprintf(pass *analysis.Pass) (interface{}, error) {
+func LintRedundantSprintf(pass *analysis.Pass) (interface{}, error) {
fn := func(node ast.Node) {
- m, ok := Match(pass, checkRedundantSprintfQ, node)
- if !ok {
+ call := node.(*ast.CallExpr)
+ if !IsCallToAST(pass, call, "fmt.Sprintf") {
return
}
-
- format := m.State["format"].(ast.Expr)
- arg := m.State["arg"].(ast.Expr)
- if s, ok := code.ExprToString(pass, format); !ok || s != "%s" {
+ if len(call.Args) != 2 {
+ return
+ }
+ if s, ok := ExprToString(pass, call.Args[Arg("fmt.Sprintf.format")]); !ok || s != "%s" {
return
}
+ arg := call.Args[Arg("fmt.Sprintf.a[0]")]
typ := pass.TypesInfo.TypeOf(arg)
- irpkg := pass.ResultOf[buildir.Analyzer].(*buildir.IR).Pkg
- if types.TypeString(typ, nil) != "reflect.Value" && isStringer(typ, &irpkg.Prog.MethodSets) {
- replacement := &ast.CallExpr{
- Fun: &ast.SelectorExpr{
- X: arg,
- Sel: &ast.Ident{Name: "String"},
- },
- }
- report.Report(pass, node, "should use String() instead of fmt.Sprintf",
- report.Fixes(edit.Fix("replace with call to String method", edit.ReplaceWithNode(pass.Fset, node, replacement))))
+ ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).Pkg
+ if isStringer(typ, &ssapkg.Prog.MethodSets) {
+ ReportNodef(pass, call, "should use String() instead of fmt.Sprintf")
return
}
if typ.Underlying() == types.Universe.Lookup("string").Type() {
if typ == types.Universe.Lookup("string").Type() {
- report.Report(pass, node, "the argument is already a string, there's no need to use fmt.Sprintf",
- report.FilterGenerated(),
- report.Fixes(edit.Fix("remove unnecessary call to fmt.Sprintf", edit.ReplaceWithNode(pass.Fset, node, arg))))
+ ReportNodefFG(pass, call, "the argument is already a string, there's no need to use fmt.Sprintf")
} else {
- replacement := &ast.CallExpr{
- Fun: &ast.Ident{Name: "string"},
- Args: []ast.Expr{arg},
- }
- report.Report(pass, node, "the argument's underlying type is a string, should use a simple conversion instead of fmt.Sprintf",
- report.FilterGenerated(),
- report.Fixes(edit.Fix("replace with conversion to string", edit.ReplaceWithNode(pass.Fset, node, replacement))))
+ ReportNodefFG(pass, call, "the argument's underlying type is a string, should use a simple conversion instead of fmt.Sprintf")
}
}
}
- code.Preorder(pass, fn, (*ast.CallExpr)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn)
return nil, nil
}
-var (
- checkErrorsNewSprintfQ = pattern.MustParse(`(CallExpr (Function "errors.New") [(CallExpr (Function "fmt.Sprintf") args)])`)
- checkErrorsNewSprintfR = pattern.MustParse(`(CallExpr (SelectorExpr (Ident "fmt") (Ident "Errorf")) args)`)
-)
-
-func CheckErrorsNewSprintf(pass *analysis.Pass) (interface{}, error) {
+func LintErrorsNewSprintf(pass *analysis.Pass) (interface{}, error) {
fn := func(node ast.Node) {
- if _, edits, ok := MatchAndEdit(pass, checkErrorsNewSprintfQ, checkErrorsNewSprintfR, node); ok {
- // TODO(dh): the suggested fix may leave an unused import behind
- report.Report(pass, node, "should use fmt.Errorf(...) instead of errors.New(fmt.Sprintf(...))",
- report.FilterGenerated(),
- report.Fixes(edit.Fix("use fmt.Errorf", edits...)))
+ if !IsCallToAST(pass, node, "errors.New") {
+ return
+ }
+ call := node.(*ast.CallExpr)
+ if !IsCallToAST(pass, call.Args[Arg("errors.New.text")], "fmt.Sprintf") {
+ return
}
+ ReportNodefFG(pass, node, "should use fmt.Errorf(...) instead of errors.New(fmt.Sprintf(...))")
}
- code.Preorder(pass, fn, (*ast.CallExpr)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn)
return nil, nil
}
-func CheckRangeStringRunes(pass *analysis.Pass) (interface{}, error) {
+func LintRangeStringRunes(pass *analysis.Pass) (interface{}, error) {
return sharedcheck.CheckRangeStringRunes(pass)
}
-var checkNilCheckAroundRangeQ = pattern.MustParse(`
- (IfStmt
- nil
- (BinaryExpr x@(Object _) "!=" (Builtin "nil"))
- [(RangeStmt _ _ _ x _)]
- nil)`)
-
-func CheckNilCheckAroundRange(pass *analysis.Pass) (interface{}, error) {
+func LintNilCheckAroundRange(pass *analysis.Pass) (interface{}, error) {
fn := func(node ast.Node) {
- m, ok := Match(pass, checkNilCheckAroundRangeQ, node)
+ ifstmt := node.(*ast.IfStmt)
+ cond, ok := ifstmt.Cond.(*ast.BinaryExpr)
if !ok {
return
}
- switch m.State["x"].(types.Object).Type().Underlying().(type) {
- case *types.Slice, *types.Map:
- report.Report(pass, node, "unnecessary nil check around range",
- report.ShortRange(),
- report.FilterGenerated())
+ if cond.Op != token.NEQ || !IsNil(pass, cond.Y) || len(ifstmt.Body.List) != 1 {
+ return
+ }
+
+ loop, ok := ifstmt.Body.List[0].(*ast.RangeStmt)
+ if !ok {
+ return
+ }
+ ifXIdent, ok := cond.X.(*ast.Ident)
+ if !ok {
+ return
+ }
+ rangeXIdent, ok := loop.X.(*ast.Ident)
+ if !ok {
+ return
+ }
+ if ifXIdent.Obj != rangeXIdent.Obj {
+ return
+ }
+ switch pass.TypesInfo.TypeOf(rangeXIdent).(type) {
+ case *types.Slice, *types.Map:
+ ReportNodefFG(pass, node, "unnecessary nil check around range")
}
}
- code.Preorder(pass, fn, (*ast.IfStmt)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn)
return nil, nil
}
@@ -1514,7 +1602,7 @@ func isPermissibleSort(pass *analysis.Pass, node ast.Node) bool {
if !ok {
return true
}
- name := code.SelectorName(pass, sel)
+ name := SelectorName(pass, sel)
switch name {
case "sort.IntSlice", "sort.Float64Slice", "sort.StringSlice":
default:
@@ -1524,7 +1612,7 @@ func isPermissibleSort(pass *analysis.Pass, node ast.Node) bool {
return false
}
-func CheckSortHelpers(pass *analysis.Pass) (interface{}, error) {
+func LintSortHelpers(pass *analysis.Pass) (interface{}, error) {
type Error struct {
node ast.Node
msg string
@@ -1538,7 +1626,7 @@ func CheckSortHelpers(pass *analysis.Pass) (interface{}, error) {
case *ast.FuncDecl:
body = node.Body
default:
- ExhaustiveTypeSwitch(node)
+ panic(fmt.Sprintf("unreachable: %T", node))
}
if body == nil {
return
@@ -1550,7 +1638,7 @@ func CheckSortHelpers(pass *analysis.Pass) (interface{}, error) {
if permissible {
return false
}
- if !code.IsCallToAST(pass, node, "sort.Sort") {
+ if !IsCallToAST(pass, node, "sort.Sort") {
return true
}
if isPermissibleSort(pass, node) {
@@ -1560,7 +1648,7 @@ func CheckSortHelpers(pass *analysis.Pass) (interface{}, error) {
call := node.(*ast.CallExpr)
typeconv := call.Args[Arg("sort.Sort.data")].(*ast.CallExpr)
sel := typeconv.Fun.(*ast.SelectorExpr)
- name := code.SelectorName(pass, sel)
+ name := SelectorName(pass, sel)
switch name {
case "sort.IntSlice":
@@ -1579,7 +1667,7 @@ func CheckSortHelpers(pass *analysis.Pass) (interface{}, error) {
}
allErrors = append(allErrors, errors...)
}
- code.Preorder(pass, fn, (*ast.FuncLit)(nil), (*ast.FuncDecl)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.FuncLit)(nil), (*ast.FuncDecl)(nil)}, fn)
sort.Slice(allErrors, func(i, j int) bool {
return allErrors[i].node.Pos() < allErrors[j].node.Pos()
})
@@ -1589,64 +1677,100 @@ func CheckSortHelpers(pass *analysis.Pass) (interface{}, error) {
continue
}
prev = err.node.Pos()
- report.Report(pass, err.node, err.msg, report.FilterGenerated())
+ ReportNodefFG(pass, err.node, "%s", err.msg)
}
return nil, nil
}
-var checkGuardedDeleteQ = pattern.MustParse(`
- (IfStmt
- (AssignStmt
- [(Ident "_") ok@(Ident _)]
- ":="
- (IndexExpr m key))
- ok
- [call@(CallExpr (Builtin "delete") [m key])]
- nil)`)
-
-func CheckGuardedDelete(pass *analysis.Pass) (interface{}, error) {
+func LintGuardedDelete(pass *analysis.Pass) (interface{}, error) {
+ isCommaOkMapIndex := func(stmt ast.Stmt) (b *ast.Ident, m ast.Expr, key ast.Expr, ok bool) {
+ // Has to be of the form `_, <b:*ast.Ident> = <m:*types.Map>[<key>]
+
+ assign, ok := stmt.(*ast.AssignStmt)
+ if !ok {
+ return nil, nil, nil, false
+ }
+ if len(assign.Lhs) != 2 || len(assign.Rhs) != 1 {
+ return nil, nil, nil, false
+ }
+ if !IsBlank(assign.Lhs[0]) {
+ return nil, nil, nil, false
+ }
+ ident, ok := assign.Lhs[1].(*ast.Ident)
+ if !ok {
+ return nil, nil, nil, false
+ }
+ index, ok := assign.Rhs[0].(*ast.IndexExpr)
+ if !ok {
+ return nil, nil, nil, false
+ }
+ if _, ok := pass.TypesInfo.TypeOf(index.X).(*types.Map); !ok {
+ return nil, nil, nil, false
+ }
+ key = index.Index
+ return ident, index.X, key, true
+ }
fn := func(node ast.Node) {
- if m, ok := Match(pass, checkGuardedDeleteQ, node); ok {
- report.Report(pass, node, "unnecessary guard around call to delete",
- report.ShortRange(),
- report.FilterGenerated(),
- report.Fixes(edit.Fix("remove guard", edit.ReplaceWithNode(pass.Fset, node, m.State["call"].(ast.Node)))))
+ stmt := node.(*ast.IfStmt)
+ if len(stmt.Body.List) != 1 {
+ return
+ }
+ if stmt.Else != nil {
+ return
+ }
+ expr, ok := stmt.Body.List[0].(*ast.ExprStmt)
+ if !ok {
+ return
+ }
+ call, ok := expr.X.(*ast.CallExpr)
+ if !ok {
+ return
+ }
+ if !IsCallToAST(pass, call, "delete") {
+ return
+ }
+ b, m, key, ok := isCommaOkMapIndex(stmt.Init)
+ if !ok {
+ return
}
+ if cond, ok := stmt.Cond.(*ast.Ident); !ok || pass.TypesInfo.ObjectOf(cond) != pass.TypesInfo.ObjectOf(b) {
+ return
+ }
+ if Render(pass, call.Args[0]) != Render(pass, m) || Render(pass, call.Args[1]) != Render(pass, key) {
+ return
+ }
+ ReportNodefFG(pass, stmt, "unnecessary guard around call to delete")
}
-
- code.Preorder(pass, fn, (*ast.IfStmt)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn)
return nil, nil
}
-var (
- checkSimplifyTypeSwitchQ = pattern.MustParse(`
- (TypeSwitchStmt
- nil
- expr@(TypeAssertExpr ident@(Ident _) _)
- body)`)
- checkSimplifyTypeSwitchR = pattern.MustParse(`(AssignStmt ident ":=" expr)`)
-)
-
-func CheckSimplifyTypeSwitch(pass *analysis.Pass) (interface{}, error) {
+func LintSimplifyTypeSwitch(pass *analysis.Pass) (interface{}, error) {
fn := func(node ast.Node) {
- m, ok := Match(pass, checkSimplifyTypeSwitchQ, node)
+ stmt := node.(*ast.TypeSwitchStmt)
+ if stmt.Init != nil {
+ // bailing out for now, can't anticipate how type switches with initializers are being used
+ return
+ }
+ expr, ok := stmt.Assign.(*ast.ExprStmt)
+ if !ok {
+ // the user is in fact assigning the result
+ return
+ }
+ assert := expr.X.(*ast.TypeAssertExpr)
+ ident, ok := assert.X.(*ast.Ident)
if !ok {
return
}
- stmt := node.(*ast.TypeSwitchStmt)
- expr := m.State["expr"].(ast.Node)
- ident := m.State["ident"].(*ast.Ident)
-
x := pass.TypesInfo.ObjectOf(ident)
- var allOffenders []*ast.TypeAssertExpr
- canSuggestFix := true
+ var allOffenders []ast.Node
for _, clause := range stmt.Body.List {
clause := clause.(*ast.CaseClause)
if len(clause.List) != 1 {
continue
}
hasUnrelatedAssertion := false
- var offenders []*ast.TypeAssertExpr
+ var offenders []ast.Node
ast.Inspect(clause, func(node ast.Node) bool {
assert2, ok := node.(*ast.TypeAssertExpr)
if !ok {
@@ -1677,192 +1801,16 @@ func CheckSimplifyTypeSwitch(pass *analysis.Pass) (interface{}, error) {
// type.
allOffenders = append(allOffenders, offenders...)
}
- canSuggestFix = canSuggestFix && !hasUnrelatedAssertion
}
if len(allOffenders) != 0 {
- var opts []report.Option
+ at := ""
for _, offender := range allOffenders {
- opts = append(opts, report.Related(offender, "could eliminate this type assertion"))
- }
- opts = append(opts, report.FilterGenerated())
-
- msg := fmt.Sprintf("assigning the result of this type assertion to a variable (switch %s := %s.(type)) could eliminate type assertions in switch cases",
- report.Render(pass, ident), report.Render(pass, ident))
- if canSuggestFix {
- var edits []analysis.TextEdit
- edits = append(edits, edit.ReplaceWithPattern(pass, checkSimplifyTypeSwitchR, m.State, expr))
- for _, offender := range allOffenders {
- edits = append(edits, edit.ReplaceWithNode(pass.Fset, offender, offender.X))
- }
- opts = append(opts, report.Fixes(edit.Fix("simplify type switch", edits...)))
- report.Report(pass, expr, msg, opts...)
- } else {
- report.Report(pass, expr, msg, opts...)
- }
- }
- }
- code.Preorder(pass, fn, (*ast.TypeSwitchStmt)(nil))
- return nil, nil
-}
-
-func CheckRedundantCanonicalHeaderKey(pass *analysis.Pass) (interface{}, error) {
- fn := func(node ast.Node) {
- call := node.(*ast.CallExpr)
- callName := code.CallNameAST(pass, call)
- switch callName {
- case "(net/http.Header).Add", "(net/http.Header).Del", "(net/http.Header).Get", "(net/http.Header).Set":
- default:
- return
- }
-
- if !code.IsCallToAST(pass, call.Args[0], "net/http.CanonicalHeaderKey") {
- return
- }
-
- report.Report(pass, call,
- fmt.Sprintf("calling net/http.CanonicalHeaderKey on the 'key' argument of %s is redundant", callName),
- report.FilterGenerated(),
- report.Fixes(edit.Fix("remove call to CanonicalHeaderKey", edit.ReplaceWithNode(pass.Fset, call.Args[0], call.Args[0].(*ast.CallExpr).Args[0]))))
- }
- code.Preorder(pass, fn, (*ast.CallExpr)(nil))
- return nil, nil
-}
-
-var checkUnnecessaryGuardQ = pattern.MustParse(`
- (Or
- (IfStmt
- (AssignStmt [(Ident "_") ok@(Ident _)] ":=" indexexpr@(IndexExpr _ _))
- ok
- set@(AssignStmt indexexpr "=" (CallExpr (Builtin "append") indexexpr:values))
- (AssignStmt indexexpr "=" (CompositeLit _ values)))
- (IfStmt
- (AssignStmt [(Ident "_") ok] ":=" indexexpr@(IndexExpr _ _))
- ok
- set@(AssignStmt indexexpr "+=" value)
- (AssignStmt indexexpr "=" value))
- (IfStmt
- (AssignStmt [(Ident "_") ok] ":=" indexexpr@(IndexExpr _ _))
- ok
- set@(IncDecStmt indexexpr "++")
- (AssignStmt indexexpr "=" (BasicLit "INT" "1"))))`)
-
-func CheckUnnecessaryGuard(pass *analysis.Pass) (interface{}, error) {
- fn := func(node ast.Node) {
- if m, ok := Match(pass, checkUnnecessaryGuardQ, node); ok {
- if code.MayHaveSideEffects(pass, m.State["indexexpr"].(ast.Expr), nil) {
- return
- }
- report.Report(pass, node, "unnecessary guard around map access",
- report.ShortRange(),
- report.Fixes(edit.Fix("simplify map access", edit.ReplaceWithNode(pass.Fset, node, m.State["set"].(ast.Node)))))
- }
- }
- code.Preorder(pass, fn, (*ast.IfStmt)(nil))
- return nil, nil
-}
-
-var (
- checkElaborateSleepQ = pattern.MustParse(`(SelectStmt (CommClause (UnaryExpr "<-" (CallExpr (Function "time.After") [arg])) body))`)
- checkElaborateSleepR = pattern.MustParse(`(CallExpr (SelectorExpr (Ident "time") (Ident "Sleep")) [arg])`)
-)
-
-func CheckElaborateSleep(pass *analysis.Pass) (interface{}, error) {
- fn := func(node ast.Node) {
- if m, ok := Match(pass, checkElaborateSleepQ, node); ok {
- if body, ok := m.State["body"].([]ast.Stmt); ok && len(body) == 0 {
- report.Report(pass, node, "should use time.Sleep instead of elaborate way of sleeping",
- report.ShortRange(),
- report.FilterGenerated(),
- report.Fixes(edit.Fix("Use time.Sleep", edit.ReplaceWithPattern(pass, checkElaborateSleepR, m.State, node))))
- } else {
- // TODO(dh): we could make a suggested fix if the body
- // doesn't declare or shadow any identifiers
- report.Report(pass, node, "should use time.Sleep instead of elaborate way of sleeping",
- report.ShortRange(),
- report.FilterGenerated())
- }
- }
- }
- code.Preorder(pass, fn, (*ast.SelectStmt)(nil))
- return nil, nil
-}
-
-var checkPrintSprintQ = pattern.MustParse(`
- (Or
- (CallExpr
- fn@(Or
- (Function "fmt.Print")
- (Function "fmt.Sprint")
- (Function "fmt.Println")
- (Function "fmt.Sprintln"))
- [(CallExpr (Function "fmt.Sprintf") f:_)])
- (CallExpr
- fn@(Or
- (Function "fmt.Fprint")
- (Function "fmt.Fprintln"))
- [_ (CallExpr (Function "fmt.Sprintf") f:_)]))`)
-
-func CheckPrintSprintf(pass *analysis.Pass) (interface{}, error) {
- fn := func(node ast.Node) {
- m, ok := Match(pass, checkPrintSprintQ, node)
- if !ok {
- return
- }
-
- name := m.State["fn"].(*types.Func).Name()
- var msg string
- switch name {
- case "Print", "Fprint", "Sprint":
- newname := name + "f"
- msg = fmt.Sprintf("should use fmt.%s instead of fmt.%s(fmt.Sprintf(...))", newname, name)
- case "Println", "Fprintln", "Sprintln":
- if _, ok := m.State["f"].(*ast.BasicLit); !ok {
- // This may be an instance of
- // fmt.Println(fmt.Sprintf(arg, ...)) where arg is an
- // externally provided format string and the caller
- // cannot guarantee that the format string ends with a
- // newline.
- return
- }
- newname := name[:len(name)-2] + "f"
- msg = fmt.Sprintf("should use fmt.%s instead of fmt.%s(fmt.Sprintf(...)) (but don't forget the newline)", newname, name)
- }
- report.Report(pass, node, msg,
- report.FilterGenerated())
- }
- code.Preorder(pass, fn, (*ast.CallExpr)(nil))
- return nil, nil
-}
-
-var checkSprintLiteralQ = pattern.MustParse(`
- (CallExpr
- fn@(Or
- (Function "fmt.Sprint")
- (Function "fmt.Sprintf"))
- [lit@(BasicLit "STRING" _)])`)
-
-func CheckSprintLiteral(pass *analysis.Pass) (interface{}, error) {
- // We only flag calls with string literals, not expressions of
- // type string, because some people use fmt.Sprint(s) as a pattern
- // for copying strings, which may be useful when extracing a small
- // substring from a large string.
- fn := func(node ast.Node) {
- m, ok := Match(pass, checkSprintLiteralQ, node)
- if !ok {
- return
- }
- callee := m.State["fn"].(*types.Func)
- lit := m.State["lit"].(*ast.BasicLit)
- if callee.Name() == "Sprintf" {
- if strings.ContainsRune(lit.Value, '%') {
- // This might be a format string
- return
+ pos := lint.DisplayPosition(pass.Fset, offender.Pos())
+ at += "\n\t" + pos.String()
}
+ ReportNodefFG(pass, expr, "assigning the result of this type assertion to a variable (switch %s := %s.(type)) could eliminate the following type assertions:%s", Render(pass, ident), Render(pass, ident), at)
}
- report.Report(pass, node, fmt.Sprintf("unnecessary use of fmt.%s", callee.Name()),
- report.FilterGenerated(),
- report.Fixes(edit.Fix("Replace with string literal", edit.ReplaceWithNode(pass.Fset, node, lit))))
}
- code.Preorder(pass, fn, (*ast.CallExpr)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.TypeSwitchStmt)(nil)}, fn)
return nil, nil
}
diff --git a/vendor/honnef.co/go/tools/ir/LICENSE b/vendor/honnef.co/go/tools/ssa/LICENSE
index aee48041e..aee48041e 100644
--- a/vendor/honnef.co/go/tools/ir/LICENSE
+++ b/vendor/honnef.co/go/tools/ssa/LICENSE
diff --git a/vendor/honnef.co/go/tools/ir/blockopt.go b/vendor/honnef.co/go/tools/ssa/blockopt.go
index d7a0e3567..22c9a4c0d 100644
--- a/vendor/honnef.co/go/tools/ir/blockopt.go
+++ b/vendor/honnef.co/go/tools/ssa/blockopt.go
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-package ir
+package ssa
// Simple block optimizations to simplify the control flow graph.
@@ -21,34 +21,35 @@ const debugBlockOpt = false
// markReachable sets Index=-1 for all blocks reachable from b.
func markReachable(b *BasicBlock) {
- b.gaps = -1
+ b.Index = -1
for _, succ := range b.Succs {
- if succ.gaps == 0 {
+ if succ.Index == 0 {
markReachable(succ)
}
}
}
+func DeleteUnreachableBlocks(f *Function) {
+ deleteUnreachableBlocks(f)
+}
+
// deleteUnreachableBlocks marks all reachable blocks of f and
// eliminates (nils) all others, including possibly cyclic subgraphs.
//
func deleteUnreachableBlocks(f *Function) {
const white, black = 0, -1
- // We borrow b.gaps temporarily as the mark bit.
+ // We borrow b.Index temporarily as the mark bit.
for _, b := range f.Blocks {
- b.gaps = white
+ b.Index = white
}
markReachable(f.Blocks[0])
- // In SSI form, we need the exit to be reachable for correct
- // post-dominance information. In original form, however, we
- // cannot unconditionally mark it reachable because we won't
- // be adding fake edges, and this breaks the calculation of
- // dominance information.
- markReachable(f.Exit)
+ if f.Recover != nil {
+ markReachable(f.Recover)
+ }
for i, b := range f.Blocks {
- if b.gaps == white {
+ if b.Index == white {
for _, c := range b.Succs {
- if c.gaps == black {
+ if c.Index == black {
c.removePred(b) // delete white->black edge
}
}
@@ -72,13 +73,6 @@ func jumpThreading(f *Function, b *BasicBlock) bool {
if b.Instrs == nil {
return false
}
- for _, pred := range b.Preds {
- switch pred.Control().(type) {
- case *ConstantSwitch:
- // don't optimize away the head blocks of switch statements
- return false
- }
- }
if _, ok := b.Instrs[0].(*Jump); !ok {
return false // not just a jump
}
@@ -123,17 +117,10 @@ func fuseBlocks(f *Function, a *BasicBlock) bool {
if len(a.Succs) != 1 {
return false
}
- if a.Succs[0] == f.Exit {
- return false
- }
b := a.Succs[0]
if len(b.Preds) != 1 {
return false
}
- if _, ok := a.Instrs[len(a.Instrs)-1].(*Panic); ok {
- // panics aren't simple jumps, they have side effects.
- return false
- }
// Degenerate &&/|| ops may result in a straight-line CFG
// containing φ-nodes. (Ideally we'd replace such them with
@@ -164,16 +151,15 @@ func fuseBlocks(f *Function, a *BasicBlock) bool {
return true
}
+func OptimizeBlocks(f *Function) {
+ optimizeBlocks(f)
+}
+
// optimizeBlocks() performs some simple block optimizations on a
// completed function: dead block elimination, block fusion, jump
// threading.
//
func optimizeBlocks(f *Function) {
- if debugBlockOpt {
- f.WriteTo(os.Stderr)
- mustSanityCheck(f, nil)
- }
-
deleteUnreachableBlocks(f)
// Loop until no further progress.
diff --git a/vendor/honnef.co/go/tools/ir/builder.go b/vendor/honnef.co/go/tools/ssa/builder.go
index 5b740247e..317ac0611 100644
--- a/vendor/honnef.co/go/tools/ir/builder.go
+++ b/vendor/honnef.co/go/tools/ssa/builder.go
@@ -2,22 +2,27 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-package ir
+package ssa
-// This file implements the BUILD phase of IR construction.
+// This file implements the BUILD phase of SSA construction.
//
-// IR construction has two phases, CREATE and BUILD. In the CREATE phase
+// SSA construction has two phases, CREATE and BUILD. In the CREATE phase
// (create.go), all packages are constructed and type-checked and
// definitions of all package members are created, method-sets are
// computed, and wrapper methods are synthesized.
-// ir.Packages are created in arbitrary order.
+// ssa.Packages are created in arbitrary order.
//
// In the BUILD phase (builder.go), the builder traverses the AST of
-// each Go source function and generates IR instructions for the
+// each Go source function and generates SSA instructions for the
// function body. Initializer expressions for package-level variables
// are emitted to the package's init() function in the order specified
// by go/types.Info.InitOrder, then code for each function in the
// package is generated in lexical order.
+// The BUILD phases for distinct packages are independent and are
+// executed in parallel.
+//
+// TODO(adonovan): indeed, building functions is now embarrassingly parallel.
+// Audit for concurrency then benchmark using more goroutines.
//
// The builder's and Program's indices (maps) are populated and
// mutated during the CREATE phase, but during the BUILD phase they
@@ -31,6 +36,7 @@ import (
"go/token"
"go/types"
"os"
+ "sync"
)
type opaqueType struct {
@@ -53,25 +59,27 @@ var (
tUntypedNil = types.Typ[types.UntypedNil]
tRangeIter = &opaqueType{nil, "iter"} // the type of all "range" iterators
tEface = types.NewInterfaceType(nil, nil).Complete()
+
+ // SSA Value constants.
+ vZero = intConst(0)
+ vOne = intConst(1)
+ vTrue = NewConst(constant.MakeBool(true), tBool)
)
// builder holds state associated with the package currently being built.
-// Its methods contain all the logic for AST-to-IR conversion.
-type builder struct {
- printFunc string
-
- blocksets [5]BlockSet
-}
+// Its methods contain all the logic for AST-to-SSA conversion.
+type builder struct{}
// cond emits to fn code to evaluate boolean condition e and jump
// to t or f depending on its value, performing various simplifications.
//
// Postcondition: fn.currentBlock is nil.
//
-func (b *builder) cond(fn *Function, e ast.Expr, t, f *BasicBlock) *If {
+func (b *builder) cond(fn *Function, e ast.Expr, t, f *BasicBlock) {
switch e := e.(type) {
case *ast.ParenExpr:
- return b.cond(fn, e.X, t, f)
+ b.cond(fn, e.X, t, f)
+ return
case *ast.BinaryExpr:
switch e.Op {
@@ -79,18 +87,21 @@ func (b *builder) cond(fn *Function, e ast.Expr, t, f *BasicBlock) *If {
ltrue := fn.newBasicBlock("cond.true")
b.cond(fn, e.X, ltrue, f)
fn.currentBlock = ltrue
- return b.cond(fn, e.Y, t, f)
+ b.cond(fn, e.Y, t, f)
+ return
case token.LOR:
lfalse := fn.newBasicBlock("cond.false")
b.cond(fn, e.X, t, lfalse)
fn.currentBlock = lfalse
- return b.cond(fn, e.Y, t, f)
+ b.cond(fn, e.Y, t, f)
+ return
}
case *ast.UnaryExpr:
if e.Op == token.NOT {
- return b.cond(fn, e.X, f, t)
+ b.cond(fn, e.X, f, t)
+ return
}
}
@@ -100,7 +111,7 @@ func (b *builder) cond(fn *Function, e ast.Expr, t, f *BasicBlock) *If {
// The value of a constant condition may be platform-specific,
// and may cause blocks that are reachable in some configuration
// to be hidden from subsequent analyses such as bug-finding tools.
- return emitIf(fn, b.expr(fn, e), t, f, e)
+ emitIf(fn, b.expr(fn, e), t, f)
}
// logicalBinop emits code to fn to evaluate e, a &&- or
@@ -120,11 +131,11 @@ func (b *builder) logicalBinop(fn *Function, e *ast.BinaryExpr) Value {
switch e.Op {
case token.LAND:
b.cond(fn, e.X, rhs, done)
- short = emitConst(fn, NewConst(constant.MakeBool(false), t))
+ short = NewConst(constant.MakeBool(false), t)
case token.LOR:
b.cond(fn, e.X, done, rhs)
- short = emitConst(fn, NewConst(constant.MakeBool(true), t))
+ short = NewConst(constant.MakeBool(true), t)
}
// Is rhs unreachable?
@@ -150,21 +161,23 @@ func (b *builder) logicalBinop(fn *Function, e *ast.BinaryExpr) Value {
// The edge from e.Y to done carries the value of e.Y.
fn.currentBlock = rhs
edges = append(edges, b.expr(fn, e.Y))
- emitJump(fn, done, e)
+ emitJump(fn, done)
fn.currentBlock = done
phi := &Phi{Edges: edges, Comment: e.Op.String()}
+ phi.pos = e.OpPos
phi.typ = t
- return done.emit(phi, e)
+ return done.emit(phi)
}
-// exprN lowers a multi-result expression e to IR form, emitting code
+// exprN lowers a multi-result expression e to SSA form, emitting code
// to fn and returning a single Value whose type is a *types.Tuple.
// The caller must access the components via Extract.
//
// Multi-result expressions include CallExprs in a multi-value
// assignment or return statement, and "value,ok" uses of
-// TypeAssertExpr, IndexExpr (when X is a map), and Recv.
+// TypeAssertExpr, IndexExpr (when X is a map), and UnaryExpr (when Op
+// is token.ARROW).
//
func (b *builder) exprN(fn *Function, e ast.Expr) Value {
typ := fn.Pkg.typeOf(e).(*types.Tuple)
@@ -179,28 +192,36 @@ func (b *builder) exprN(fn *Function, e ast.Expr) Value {
var c Call
b.setCall(fn, e, &c.Call)
c.typ = typ
- return fn.emit(&c, e)
+ return fn.emit(&c)
case *ast.IndexExpr:
mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map)
- lookup := &MapLookup{
+ lookup := &Lookup{
X: b.expr(fn, e.X),
- Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key(), e),
+ Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()),
CommaOk: true,
}
lookup.setType(typ)
- return fn.emit(lookup, e)
+ lookup.setPos(e.Lbrack)
+ return fn.emit(lookup)
case *ast.TypeAssertExpr:
- return emitTypeTest(fn, b.expr(fn, e.X), typ.At(0).Type(), e)
+ return emitTypeTest(fn, b.expr(fn, e.X), typ.At(0).Type(), e.Lparen)
case *ast.UnaryExpr: // must be receive <-
- return emitRecv(fn, b.expr(fn, e.X), true, typ, e)
+ unop := &UnOp{
+ Op: token.ARROW,
+ X: b.expr(fn, e.X),
+ CommaOk: true,
+ }
+ unop.setType(typ)
+ unop.setPos(e.OpPos)
+ return fn.emit(unop)
}
panic(fmt.Sprintf("exprN(%T) in %s", e, fn))
}
-// builtin emits to fn IR instructions to implement a call to the
+// builtin emits to fn SSA instructions to implement a call to the
// built-in function obj with the specified arguments
// and return type. It returns the value defined by the result.
//
@@ -208,7 +229,7 @@ func (b *builder) exprN(fn *Function, e ast.Expr) Value {
// the caller should treat this like an ordinary library function
// call.
//
-func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ types.Type, source ast.Node) Value {
+func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ types.Type, pos token.Pos) Value {
switch obj.Name() {
case "make":
switch typ.Underlying().(type) {
@@ -222,21 +243,23 @@ func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ
// treat make([]T, n, m) as new([m]T)[:n]
cap := m.Int64()
at := types.NewArray(typ.Underlying().(*types.Slice).Elem(), cap)
- alloc := emitNew(fn, at, source)
+ alloc := emitNew(fn, at, pos)
alloc.Comment = "makeslice"
v := &Slice{
X: alloc,
High: n,
}
+ v.setPos(pos)
v.setType(typ)
- return fn.emit(v, source)
+ return fn.emit(v)
}
v := &MakeSlice{
Len: n,
Cap: m,
}
+ v.setPos(pos)
v.setType(typ)
- return fn.emit(v, source)
+ return fn.emit(v)
case *types.Map:
var res Value
@@ -244,21 +267,23 @@ func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ
res = b.expr(fn, args[1])
}
v := &MakeMap{Reserve: res}
+ v.setPos(pos)
v.setType(typ)
- return fn.emit(v, source)
+ return fn.emit(v)
case *types.Chan:
- var sz Value = emitConst(fn, intConst(0))
+ var sz Value = vZero
if len(args) == 2 {
sz = b.expr(fn, args[1])
}
v := &MakeChan{Size: sz}
+ v.setPos(pos)
v.setType(typ)
- return fn.emit(v, source)
+ return fn.emit(v)
}
case "new":
- alloc := emitNew(fn, deref(typ), source)
+ alloc := emitNew(fn, deref(typ), pos)
alloc.Comment = "new"
return alloc
@@ -271,22 +296,22 @@ func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ
t := deref(fn.Pkg.typeOf(args[0])).Underlying()
if at, ok := t.(*types.Array); ok {
b.expr(fn, args[0]) // for effects only
- return emitConst(fn, intConst(at.Len()))
+ return intConst(at.Len())
}
// Otherwise treat as normal.
case "panic":
fn.emit(&Panic{
- X: emitConv(fn, b.expr(fn, args[0]), tEface, source),
- }, source)
- addEdge(fn.currentBlock, fn.Exit)
+ X: emitConv(fn, b.expr(fn, args[0]), tEface),
+ pos: pos,
+ })
fn.currentBlock = fn.newBasicBlock("unreachable")
- return emitConst(fn, NewConst(constant.MakeBool(true), tBool)) // any non-nil Value will do
+ return vTrue // any non-nil Value will do
}
return nil // treat all others as a regular function call
}
-// addr lowers a single-result addressable expression e to IR form,
+// addr lowers a single-result addressable expression e to SSA form,
// emitting code to fn and returning the location (an lvalue) defined
// by the expression.
//
@@ -320,21 +345,21 @@ func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue {
if v == nil {
v = fn.lookup(obj, escaping)
}
- return &address{addr: v, expr: e}
+ return &address{addr: v, pos: e.Pos(), expr: e}
case *ast.CompositeLit:
t := deref(fn.Pkg.typeOf(e))
var v *Alloc
if escaping {
- v = emitNew(fn, t, e)
+ v = emitNew(fn, t, e.Lbrace)
} else {
- v = fn.addLocal(t, e)
+ v = fn.addLocal(t, e.Lbrace)
}
v.Comment = "complit"
var sb storebuf
b.compLit(fn, v, e, true, &sb)
sb.emit(fn)
- return &address{addr: v, expr: e}
+ return &address{addr: v, pos: e.Lbrace, expr: e}
case *ast.ParenExpr:
return b.addr(fn, e.X, escaping)
@@ -349,10 +374,11 @@ func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue {
panic(sel)
}
wantAddr := true
- v := b.receiver(fn, e.X, wantAddr, escaping, sel, e)
+ v := b.receiver(fn, e.X, wantAddr, escaping, sel)
last := len(sel.Index()) - 1
return &address{
addr: emitFieldSelection(fn, v, sel.Index()[last], true, e.Sel),
+ pos: e.Sel.Pos(),
expr: e.Sel,
}
@@ -371,42 +397,43 @@ func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue {
et = types.NewPointer(t.Elem())
case *types.Map:
return &element{
- m: b.expr(fn, e.X),
- k: emitConv(fn, b.expr(fn, e.Index), t.Key(), e.Index),
- t: t.Elem(),
+ m: b.expr(fn, e.X),
+ k: emitConv(fn, b.expr(fn, e.Index), t.Key()),
+ t: t.Elem(),
+ pos: e.Lbrack,
}
default:
panic("unexpected container type in IndexExpr: " + t.String())
}
v := &IndexAddr{
X: x,
- Index: emitConv(fn, b.expr(fn, e.Index), tInt, e.Index),
+ Index: emitConv(fn, b.expr(fn, e.Index), tInt),
}
+ v.setPos(e.Lbrack)
v.setType(et)
- return &address{addr: fn.emit(v, e), expr: e}
+ return &address{addr: fn.emit(v), pos: e.Lbrack, expr: e}
case *ast.StarExpr:
- return &address{addr: b.expr(fn, e.X), expr: e}
+ return &address{addr: b.expr(fn, e.X), pos: e.Star, expr: e}
}
panic(fmt.Sprintf("unexpected address expression: %T", e))
}
type store struct {
- lhs lvalue
- rhs Value
- source ast.Node
+ lhs lvalue
+ rhs Value
}
type storebuf struct{ stores []store }
-func (sb *storebuf) store(lhs lvalue, rhs Value, source ast.Node) {
- sb.stores = append(sb.stores, store{lhs, rhs, source})
+func (sb *storebuf) store(lhs lvalue, rhs Value) {
+ sb.stores = append(sb.stores, store{lhs, rhs})
}
func (sb *storebuf) emit(fn *Function) {
for _, s := range sb.stores {
- s.lhs.store(fn, s.rhs, s.source)
+ s.lhs.store(fn, s.rhs)
}
}
@@ -424,7 +451,7 @@ func (sb *storebuf) emit(fn *Function) {
// in-place update of existing variables when the RHS is a composite
// literal that may reference parts of the LHS.
//
-func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb *storebuf, source ast.Node) {
+func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb *storebuf) {
// Can we initialize it in place?
if e, ok := unparen(e).(*ast.CompositeLit); ok {
// A CompositeLit never evaluates to a pointer,
@@ -435,9 +462,9 @@ func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb *
ptr := b.addr(fn, e, true).address(fn)
// copy address
if sb != nil {
- sb.store(loc, ptr, source)
+ sb.store(loc, ptr)
} else {
- loc.store(fn, ptr, source)
+ loc.store(fn, ptr)
}
return
}
@@ -474,13 +501,13 @@ func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb *
// simple case: just copy
rhs := b.expr(fn, e)
if sb != nil {
- sb.store(loc, rhs, source)
+ sb.store(loc, rhs)
} else {
- loc.store(fn, rhs, source)
+ loc.store(fn, rhs)
}
}
-// expr lowers a single-result expression e to IR form, emitting code
+// expr lowers a single-result expression e to SSA form, emitting code
// to fn and returning the Value defined by the expression.
//
func (b *builder) expr(fn *Function, e ast.Expr) Value {
@@ -490,7 +517,7 @@ func (b *builder) expr(fn *Function, e ast.Expr) Value {
// Is expression a constant?
if tv.Value != nil {
- return emitConst(fn, NewConst(tv.Value, tv.Type))
+ return NewConst(tv.Value, tv.Type)
}
var v Value
@@ -498,7 +525,7 @@ func (b *builder) expr(fn *Function, e ast.Expr) Value {
// Prefer pointer arithmetic ({Index,Field}Addr) followed
// by Load over subelement extraction (e.g. Index, Field),
// to avoid large copies.
- v = b.addr(fn, e, false).load(fn, e)
+ v = b.addr(fn, e, false).load(fn)
} else {
v = b.expr0(fn, e, tv)
}
@@ -517,13 +544,13 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value {
fn2 := &Function{
name: fmt.Sprintf("%s$%d", fn.Name(), 1+len(fn.AnonFuncs)),
Signature: fn.Pkg.typeOf(e.Type).Underlying().(*types.Signature),
+ pos: e.Type.Func,
parent: fn,
Pkg: fn.Pkg,
Prog: fn.Prog,
+ syntax: e,
}
- fn2.source = e
fn.AnonFuncs = append(fn.AnonFuncs, fn2)
- fn2.initHTML(b.printFunc)
b.buildFunction(fn2)
if fn2.FreeVars == nil {
return fn2
@@ -534,22 +561,32 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value {
v.Bindings = append(v.Bindings, fv.outer)
fv.outer = nil
}
- return fn.emit(v, e)
+ return fn.emit(v)
case *ast.TypeAssertExpr: // single-result form only
- return emitTypeAssert(fn, b.expr(fn, e.X), tv.Type, e)
+ return emitTypeAssert(fn, b.expr(fn, e.X), tv.Type, e.Lparen)
case *ast.CallExpr:
if fn.Pkg.info.Types[e.Fun].IsType() {
// Explicit type conversion, e.g. string(x) or big.Int(x)
x := b.expr(fn, e.Args[0])
- y := emitConv(fn, x, tv.Type, e)
+ y := emitConv(fn, x, tv.Type)
+ if y != x {
+ switch y := y.(type) {
+ case *Convert:
+ y.pos = e.Lparen
+ case *ChangeType:
+ y.pos = e.Lparen
+ case *MakeInterface:
+ y.pos = e.Lparen
+ }
+ }
return y
}
// Call to "intrinsic" built-ins, e.g. new, make, panic.
if id, ok := unparen(e.Fun).(*ast.Ident); ok {
if obj, ok := fn.Pkg.info.Uses[id].(*types.Builtin); ok {
- if v := b.builtin(fn, obj, e.Args, tv.Type, e); v != nil {
+ if v := b.builtin(fn, obj, e.Args, tv.Type, e.Lparen); v != nil {
return v
}
}
@@ -558,7 +595,7 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value {
var v Call
b.setCall(fn, e, &v.Call)
v.setType(tv.Type)
- return fn.emit(&v, e)
+ return fn.emit(&v)
case *ast.UnaryExpr:
switch e.Op {
@@ -569,20 +606,19 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value {
// For simplicity, we'll just (suboptimally) rely
// on the side effects of a load.
// TODO(adonovan): emit dedicated nilcheck.
- addr.load(fn, e)
+ addr.load(fn)
}
return addr.address(fn)
case token.ADD:
return b.expr(fn, e.X)
- case token.NOT, token.SUB, token.XOR: // ! <- - ^
+ case token.NOT, token.ARROW, token.SUB, token.XOR: // ! <- - ^
v := &UnOp{
Op: e.Op,
X: b.expr(fn, e.X),
}
+ v.setPos(e.OpPos)
v.setType(tv.Type)
- return fn.emit(v, e)
- case token.ARROW:
- return emitRecv(fn, b.expr(fn, e.X), false, tv.Type, e)
+ return fn.emit(v)
default:
panic(e.Op)
}
@@ -594,12 +630,12 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value {
case token.SHL, token.SHR:
fallthrough
case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT:
- return emitArith(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), tv.Type, e)
+ return emitArith(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), tv.Type, e.OpPos)
case token.EQL, token.NEQ, token.GTR, token.LSS, token.LEQ, token.GEQ:
- cmp := emitCompare(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), e)
+ cmp := emitCompare(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), e.OpPos)
// The type of x==y may be UntypedBool.
- return emitConv(fn, cmp, types.Default(tv.Type), e)
+ return emitConv(fn, cmp, DefaultType(tv.Type))
default:
panic("illegal op in BinaryExpr: " + e.Op.String())
}
@@ -631,8 +667,9 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value {
High: high,
Max: max,
}
+ v.setPos(e.Lbrack)
v.setType(tv.Type)
- return fn.emit(v, e)
+ return fn.emit(v)
case *ast.Ident:
obj := fn.Pkg.info.Uses[e]
@@ -641,17 +678,17 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value {
case *types.Builtin:
return &Builtin{name: obj.Name(), sig: tv.Type.(*types.Signature)}
case *types.Nil:
- return emitConst(fn, nilConst(tv.Type))
+ return nilConst(tv.Type)
}
// Package-level func or var?
if v := fn.Prog.packageLevelValue(obj); v != nil {
if _, ok := obj.(*types.Var); ok {
- return emitLoad(fn, v, e) // var (address)
+ return emitLoad(fn, v) // var (address)
}
return v // (func)
}
// Local var.
- return emitLoad(fn, fn.lookup(obj, false), e) // var (address)
+ return emitLoad(fn, fn.lookup(obj, false)) // var (address)
case *ast.SelectorExpr:
sel, ok := fn.Pkg.info.Selections[e]
@@ -663,7 +700,7 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value {
case types.MethodExpr:
// (*T).f or T.f, the method f from the method-set of type T.
// The result is a "thunk".
- return emitConv(fn, makeThunk(fn.Prog, sel), tv.Type, e)
+ return emitConv(fn, makeThunk(fn.Prog, sel), tv.Type)
case types.MethodVal:
// e.f where e is an expression and f is a method.
@@ -672,26 +709,26 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value {
rt := recvType(obj)
wantAddr := isPointer(rt)
escaping := true
- v := b.receiver(fn, e.X, wantAddr, escaping, sel, e)
+ v := b.receiver(fn, e.X, wantAddr, escaping, sel)
if isInterface(rt) {
// If v has interface type I,
// we must emit a check that v is non-nil.
// We use: typeassert v.(I).
- emitTypeAssert(fn, v, rt, e)
+ emitTypeAssert(fn, v, rt, token.NoPos)
}
c := &MakeClosure{
Fn: makeBound(fn.Prog, obj),
Bindings: []Value{v},
}
- c.source = e.Sel
+ c.setPos(e.Sel.Pos())
c.setType(tv.Type)
- return fn.emit(c, e)
+ return fn.emit(c)
case types.FieldVal:
indices := sel.Index()
last := len(indices) - 1
v := b.expr(fn, e.X)
- v = emitImplicitSelections(fn, v, indices[:last], e)
+ v = emitImplicitSelections(fn, v, indices[:last])
v = emitFieldSelection(fn, v, indices[last], false, e.Sel)
return v
}
@@ -704,33 +741,36 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value {
// Non-addressable array (in a register).
v := &Index{
X: b.expr(fn, e.X),
- Index: emitConv(fn, b.expr(fn, e.Index), tInt, e.Index),
+ Index: emitConv(fn, b.expr(fn, e.Index), tInt),
}
+ v.setPos(e.Lbrack)
v.setType(t.Elem())
- return fn.emit(v, e)
+ return fn.emit(v)
case *types.Map:
// Maps are not addressable.
mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map)
- v := &MapLookup{
+ v := &Lookup{
X: b.expr(fn, e.X),
- Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key(), e.Index),
+ Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()),
}
+ v.setPos(e.Lbrack)
v.setType(mapt.Elem())
- return fn.emit(v, e)
+ return fn.emit(v)
case *types.Basic: // => string
// Strings are not addressable.
- v := &StringLookup{
+ v := &Lookup{
X: b.expr(fn, e.X),
Index: b.expr(fn, e.Index),
}
+ v.setPos(e.Lbrack)
v.setType(tByte)
- return fn.emit(v, e)
+ return fn.emit(v)
case *types.Slice, *types.Pointer: // *array
// Addressable slice/array; use IndexAddr and Load.
- return b.addr(fn, e, false).load(fn, e)
+ return b.addr(fn, e, false).load(fn)
default:
panic("unexpected container type in IndexExpr: " + t.String())
@@ -738,7 +778,7 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value {
case *ast.CompositeLit, *ast.StarExpr:
// Addressable types (lvalues)
- return b.addr(fn, e, false).load(fn, e)
+ return b.addr(fn, e, false).load(fn)
}
panic(fmt.Sprintf("unexpected expr: %T", e))
@@ -762,7 +802,7 @@ func (b *builder) stmtList(fn *Function, list []ast.Stmt) {
//
// escaping is defined as per builder.addr().
//
-func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, sel *types.Selection, source ast.Node) Value {
+func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, sel *types.Selection) Value {
var v Value
if wantAddr && !sel.Indirect() && !isPointer(fn.Pkg.typeOf(e)) {
v = b.addr(fn, e, escaping).address(fn)
@@ -771,9 +811,9 @@ func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, se
}
last := len(sel.Index()) - 1
- v = emitImplicitSelections(fn, v, sel.Index()[:last], source)
+ v = emitImplicitSelections(fn, v, sel.Index()[:last])
if !wantAddr && isPointer(v.Type()) {
- v = emitLoad(fn, v, e)
+ v = emitLoad(fn, v)
}
return v
}
@@ -783,6 +823,8 @@ func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, se
// occurring in e.
//
func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) {
+ c.pos = e.Lparen
+
// Is this a method call?
if selector, ok := unparen(e.Fun).(*ast.SelectorExpr); ok {
sel, ok := fn.Pkg.info.Selections[selector]
@@ -791,7 +833,7 @@ func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) {
recv := recvType(obj)
wantAddr := isPointer(recv)
escaping := true
- v := b.receiver(fn, selector.X, wantAddr, escaping, sel, selector)
+ v := b.receiver(fn, selector.X, wantAddr, escaping, sel)
if isInterface(recv) {
// Invoke-mode call.
c.Value = v
@@ -847,7 +889,7 @@ func (b *builder) emitCallArgs(fn *Function, sig *types.Signature, e *ast.CallEx
// f(x, y, z...): pass slice z straight through.
if e.Ellipsis != 0 {
for i, arg := range e.Args {
- v := emitConv(fn, b.expr(fn, arg), sig.Params().At(i).Type(), arg)
+ v := emitConv(fn, b.expr(fn, arg), sig.Params().At(i).Type())
args = append(args, v)
}
return args
@@ -864,7 +906,7 @@ func (b *builder) emitCallArgs(fn *Function, sig *types.Signature, e *ast.CallEx
v := b.expr(fn, arg)
if ttuple, ok := v.Type().(*types.Tuple); ok { // MRV chain
for i, n := 0, ttuple.Len(); i < n; i++ {
- args = append(args, emitExtract(fn, v, i, arg))
+ args = append(args, emitExtract(fn, v, i))
}
} else {
args = append(args, v)
@@ -877,7 +919,7 @@ func (b *builder) emitCallArgs(fn *Function, sig *types.Signature, e *ast.CallEx
np--
}
for i := 0; i < np; i++ {
- args[offset+i] = emitConv(fn, args[offset+i], sig.Params().At(i).Type(), args[offset+i].Source())
+ args[offset+i] = emitConv(fn, args[offset+i], sig.Params().At(i).Type())
}
// Actual->formal assignability conversions for variadic parameter,
@@ -887,25 +929,25 @@ func (b *builder) emitCallArgs(fn *Function, sig *types.Signature, e *ast.CallEx
st := sig.Params().At(np).Type().(*types.Slice)
vt := st.Elem()
if len(varargs) == 0 {
- args = append(args, emitConst(fn, nilConst(st)))
+ args = append(args, nilConst(st))
} else {
// Replace a suffix of args with a slice containing it.
at := types.NewArray(vt, int64(len(varargs)))
- a := emitNew(fn, at, e)
- a.source = e
+ a := emitNew(fn, at, token.NoPos)
+ a.setPos(e.Rparen)
a.Comment = "varargs"
for i, arg := range varargs {
iaddr := &IndexAddr{
X: a,
- Index: emitConst(fn, intConst(int64(i))),
+ Index: intConst(int64(i)),
}
iaddr.setType(types.NewPointer(vt))
- fn.emit(iaddr, e)
- emitStore(fn, iaddr, arg, arg.Source())
+ fn.emit(iaddr)
+ emitStore(fn, iaddr, arg, arg.Pos())
}
s := &Slice{X: a}
s.setType(st)
- args[offset+np] = fn.emit(s, args[offset+np].Source())
+ args[offset+np] = fn.emit(s)
args = args[:offset+np+1]
}
}
@@ -928,9 +970,9 @@ func (b *builder) setCall(fn *Function, e *ast.CallExpr, c *CallCommon) {
}
// assignOp emits to fn code to perform loc <op>= val.
-func (b *builder) assignOp(fn *Function, loc lvalue, val Value, op token.Token, source ast.Node) {
- oldv := loc.load(fn, source)
- loc.store(fn, emitArith(fn, op, oldv, emitConv(fn, val, oldv.Type(), source), loc.typ(), source), source)
+func (b *builder) assignOp(fn *Function, loc lvalue, val Value, op token.Token, pos token.Pos) {
+ oldv := loc.load(fn)
+ loc.store(fn, emitArith(fn, op, oldv, emitConv(fn, val, oldv.Type()), loc.typ(), pos))
}
// localValueSpec emits to fn code to define all of the vars in the
@@ -946,7 +988,7 @@ func (b *builder) localValueSpec(fn *Function, spec *ast.ValueSpec) {
fn.addLocalForIdent(id)
}
lval := b.addr(fn, id, false) // non-escaping
- b.assign(fn, lval, spec.Values[i], true, nil, spec)
+ b.assign(fn, lval, spec.Values[i], true, nil)
}
case len(spec.Values) == 0:
@@ -968,7 +1010,7 @@ func (b *builder) localValueSpec(fn *Function, spec *ast.ValueSpec) {
if !isBlankIdent(id) {
fn.addLocalForIdent(id)
lhs := b.addr(fn, id, false) // non-escaping
- lhs.store(fn, emitExtract(fn, tuple, i, id), id)
+ lhs.store(fn, emitExtract(fn, tuple, i))
}
}
}
@@ -979,7 +1021,7 @@ func (b *builder) localValueSpec(fn *Function, spec *ast.ValueSpec) {
//
// Note the similarity with localValueSpec.
//
-func (b *builder) assignStmt(fn *Function, lhss, rhss []ast.Expr, isDef bool, source ast.Node) {
+func (b *builder) assignStmt(fn *Function, lhss, rhss []ast.Expr, isDef bool) {
// Side effects of all LHSs and RHSs must occur in left-to-right order.
lvals := make([]lvalue, len(lhss))
isZero := make([]bool, len(lhss))
@@ -988,7 +1030,7 @@ func (b *builder) assignStmt(fn *Function, lhss, rhss []ast.Expr, isDef bool, so
if !isBlankIdent(lhs) {
if isDef {
if obj := fn.Pkg.info.Defs[lhs.(*ast.Ident)]; obj != nil {
- fn.addNamedLocal(obj, lhs)
+ fn.addNamedLocal(obj)
isZero[i] = true
}
}
@@ -1005,7 +1047,7 @@ func (b *builder) assignStmt(fn *Function, lhss, rhss []ast.Expr, isDef bool, so
// so we need a storebuf.
var sb storebuf
for i := range rhss {
- b.assign(fn, lvals[i], rhss[i], isZero[i], &sb, source)
+ b.assign(fn, lvals[i], rhss[i], isZero[i], &sb)
}
sb.emit(fn)
} else {
@@ -1013,7 +1055,7 @@ func (b *builder) assignStmt(fn *Function, lhss, rhss []ast.Expr, isDef bool, so
tuple := b.exprN(fn, rhss[0])
emitDebugRef(fn, rhss[0], tuple, false)
for i, lval := range lvals {
- lval.store(fn, emitExtract(fn, tuple, i, source), source)
+ lval.store(fn, emitExtract(fn, tuple, i))
}
}
}
@@ -1060,17 +1102,20 @@ func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero
case *types.Struct:
if !isZero && len(e.Elts) != t.NumFields() {
// memclear
- sb.store(&address{addr, nil}, zeroValue(fn, deref(addr.Type()), e), e)
+ sb.store(&address{addr, e.Lbrace, nil},
+ zeroValue(fn, deref(addr.Type())))
isZero = true
}
for i, e := range e.Elts {
fieldIndex := i
+ pos := e.Pos()
if kv, ok := e.(*ast.KeyValueExpr); ok {
fname := kv.Key.(*ast.Ident).Name
for i, n := 0, t.NumFields(); i < n; i++ {
sf := t.Field(i)
if sf.Name() == fname {
fieldIndex = i
+ pos = kv.Colon
e = kv.Value
break
}
@@ -1082,8 +1127,8 @@ func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero
Field: fieldIndex,
}
faddr.setType(types.NewPointer(sf.Type()))
- fn.emit(faddr, e)
- b.assign(fn, &address{addr: faddr, expr: e}, e, isZero, sb, e)
+ fn.emit(faddr)
+ b.assign(fn, &address{addr: faddr, pos: pos, expr: e}, e, isZero, sb)
}
case *types.Array, *types.Slice:
@@ -1092,7 +1137,7 @@ func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero
switch t := t.(type) {
case *types.Slice:
at = types.NewArray(t.Elem(), b.arrayLen(fn, e.Elts))
- alloc := emitNew(fn, at, e)
+ alloc := emitNew(fn, at, e.Lbrace)
alloc.Comment = "slicelit"
array = alloc
case *types.Array:
@@ -1101,46 +1146,51 @@ func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero
if !isZero && int64(len(e.Elts)) != at.Len() {
// memclear
- sb.store(&address{array, nil}, zeroValue(fn, deref(array.Type()), e), e)
+ sb.store(&address{array, e.Lbrace, nil},
+ zeroValue(fn, deref(array.Type())))
}
}
var idx *Const
for _, e := range e.Elts {
+ pos := e.Pos()
if kv, ok := e.(*ast.KeyValueExpr); ok {
idx = b.expr(fn, kv.Key).(*Const)
+ pos = kv.Colon
e = kv.Value
} else {
var idxval int64
if idx != nil {
idxval = idx.Int64() + 1
}
- idx = emitConst(fn, intConst(idxval))
+ idx = intConst(idxval)
}
iaddr := &IndexAddr{
X: array,
Index: idx,
}
iaddr.setType(types.NewPointer(at.Elem()))
- fn.emit(iaddr, e)
+ fn.emit(iaddr)
if t != at { // slice
// backing array is unaliased => storebuf not needed.
- b.assign(fn, &address{addr: iaddr, expr: e}, e, true, nil, e)
+ b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, nil)
} else {
- b.assign(fn, &address{addr: iaddr, expr: e}, e, true, sb, e)
+ b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, sb)
}
}
if t != at { // slice
s := &Slice{X: array}
+ s.setPos(e.Lbrace)
s.setType(typ)
- sb.store(&address{addr: addr, expr: e}, fn.emit(s, e), e)
+ sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, fn.emit(s))
}
case *types.Map:
- m := &MakeMap{Reserve: emitConst(fn, intConst(int64(len(e.Elts))))}
+ m := &MakeMap{Reserve: intConst(int64(len(e.Elts)))}
+ m.setPos(e.Lbrace)
m.setType(typ)
- fn.emit(m, e)
+ fn.emit(m)
for _, e := range e.Elts {
e := e.(*ast.KeyValueExpr)
@@ -1161,9 +1211,10 @@ func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero
}
loc := element{
- m: m,
- k: emitConv(fn, key, t.Key(), e),
- t: t.Elem(),
+ m: m,
+ k: emitConv(fn, key, t.Key()),
+ t: t.Elem(),
+ pos: e.Colon,
}
// We call assign() only because it takes care
@@ -1172,143 +1223,29 @@ func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero
// map[int]*struct{}{0: {}} implies &struct{}{}.
// In-place update is of course impossible,
// and no storebuf is needed.
- b.assign(fn, &loc, e.Value, true, nil, e)
+ b.assign(fn, &loc, e.Value, true, nil)
}
- sb.store(&address{addr: addr, expr: e}, m, e)
+ sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, m)
default:
panic("unexpected CompositeLit type: " + t.String())
}
}
-func (b *builder) switchStmt(fn *Function, s *ast.SwitchStmt, label *lblock) {
- if s.Tag == nil {
- b.switchStmtDynamic(fn, s, label)
- return
- }
- dynamic := false
- for _, iclause := range s.Body.List {
- clause := iclause.(*ast.CaseClause)
- for _, cond := range clause.List {
- if fn.Pkg.info.Types[unparen(cond)].Value == nil {
- dynamic = true
- break
- }
- }
- }
-
- if dynamic {
- b.switchStmtDynamic(fn, s, label)
- return
- }
-
- if s.Init != nil {
- b.stmt(fn, s.Init)
- }
-
- entry := fn.currentBlock
- tag := b.expr(fn, s.Tag)
-
- heads := make([]*BasicBlock, 0, len(s.Body.List))
- bodies := make([]*BasicBlock, len(s.Body.List))
- conds := make([]Value, 0, len(s.Body.List))
-
- hasDefault := false
- done := fn.newBasicBlock(fmt.Sprintf("switch.done"))
- if label != nil {
- label._break = done
- }
- for i, stmt := range s.Body.List {
- body := fn.newBasicBlock(fmt.Sprintf("switch.body.%d", i))
- bodies[i] = body
- cas := stmt.(*ast.CaseClause)
- if cas.List == nil {
- // default branch
- hasDefault = true
- head := fn.newBasicBlock(fmt.Sprintf("switch.head.%d", i))
- conds = append(conds, nil)
- heads = append(heads, head)
- fn.currentBlock = head
- emitJump(fn, body, cas)
- }
- for j, cond := range stmt.(*ast.CaseClause).List {
- fn.currentBlock = entry
- head := fn.newBasicBlock(fmt.Sprintf("switch.head.%d.%d", i, j))
- conds = append(conds, b.expr(fn, cond))
- heads = append(heads, head)
- fn.currentBlock = head
- emitJump(fn, body, cond)
- }
- }
-
- for i, stmt := range s.Body.List {
- clause := stmt.(*ast.CaseClause)
- body := bodies[i]
- fn.currentBlock = body
- fallthru := done
- if i+1 < len(bodies) {
- fallthru = bodies[i+1]
- }
- fn.targets = &targets{
- tail: fn.targets,
- _break: done,
- _fallthrough: fallthru,
- }
- b.stmtList(fn, clause.Body)
- fn.targets = fn.targets.tail
- emitJump(fn, done, stmt)
- }
-
- if !hasDefault {
- head := fn.newBasicBlock(fmt.Sprintf("switch.head.implicit-default"))
- body := fn.newBasicBlock("switch.body.implicit-default")
- fn.currentBlock = head
- emitJump(fn, body, s)
- fn.currentBlock = body
- emitJump(fn, done, s)
- heads = append(heads, head)
- conds = append(conds, nil)
- }
-
- if len(heads) != len(conds) {
- panic(fmt.Sprintf("internal error: %d heads for %d conds", len(heads), len(conds)))
- }
- for _, head := range heads {
- addEdge(entry, head)
- }
- fn.currentBlock = entry
- entry.emit(&ConstantSwitch{
- Tag: tag,
- Conds: conds,
- }, s)
- fn.currentBlock = done
-}
-
// switchStmt emits to fn code for the switch statement s, optionally
// labelled by label.
//
-func (b *builder) switchStmtDynamic(fn *Function, s *ast.SwitchStmt, label *lblock) {
+func (b *builder) switchStmt(fn *Function, s *ast.SwitchStmt, label *lblock) {
// We treat SwitchStmt like a sequential if-else chain.
- // Multiway dispatch can be recovered later by irutil.Switches()
+ // Multiway dispatch can be recovered later by ssautil.Switches()
// to those cases that are free of side effects.
if s.Init != nil {
b.stmt(fn, s.Init)
}
- kTrue := emitConst(fn, NewConst(constant.MakeBool(true), tBool))
-
- var tagv Value = kTrue
- var tagSource ast.Node = s
+ var tag Value = vTrue
if s.Tag != nil {
- tagv = b.expr(fn, s.Tag)
- tagSource = s.Tag
+ tag = b.expr(fn, s.Tag)
}
- // lifting only considers loads and stores, but we want different
- // sigma nodes for the different comparisons. use a temporary and
- // load it in every branch.
- tag := fn.addLocal(tagv.Type(), tagSource)
- tag.Comment = "switch.tag"
- emitStore(fn, tag, tagv, tagSource)
-
done := fn.newBasicBlock("switch.done")
if label != nil {
label._break = done
@@ -1346,23 +1283,13 @@ func (b *builder) switchStmtDynamic(fn *Function, s *ast.SwitchStmt, label *lblo
var nextCond *BasicBlock
for _, cond := range cc.List {
nextCond = fn.newBasicBlock("switch.next")
- if tagv == kTrue {
- // emit a proper if/else chain instead of a comparison
- // of a value against true.
- //
- // NOTE(dh): adonovan had a todo saying "don't forget
- // conversions though". As far as I can tell, there
- // aren't any conversions that we need to take care of
- // here. `case bool(a) && bool(b)` as well as `case
- // bool(a && b)` are being taken care of by b.cond,
- // and `case a` where a is not of type bool is
- // invalid.
- b.cond(fn, cond, body, nextCond)
- } else {
- cond := emitCompare(fn, token.EQL, emitLoad(fn, tag, cond), b.expr(fn, cond), cond)
- emitIf(fn, cond, body, nextCond, cond.Source())
- }
-
+ // TODO(adonovan): opt: when tag==vTrue, we'd
+ // get better code if we use b.cond(cond)
+ // instead of BinOp(EQL, tag, b.expr(cond))
+ // followed by If. Don't forget conversions
+ // though.
+ cond := emitCompare(fn, token.EQL, tag, b.expr(fn, cond), cond.Pos())
+ emitIf(fn, cond, body, nextCond)
fn.currentBlock = nextCond
}
fn.currentBlock = body
@@ -1373,14 +1300,11 @@ func (b *builder) switchStmtDynamic(fn *Function, s *ast.SwitchStmt, label *lblo
}
b.stmtList(fn, cc.Body)
fn.targets = fn.targets.tail
- emitJump(fn, done, s)
+ emitJump(fn, done)
fn.currentBlock = nextCond
}
if dfltBlock != nil {
- // The lack of a Source for the jump doesn't matter, block
- // fusing will get rid of the jump later.
-
- emitJump(fn, dfltBlock, s)
+ emitJump(fn, dfltBlock)
fn.currentBlock = dfltBlock
fn.targets = &targets{
tail: fn.targets,
@@ -1390,175 +1314,138 @@ func (b *builder) switchStmtDynamic(fn *Function, s *ast.SwitchStmt, label *lblo
b.stmtList(fn, *dfltBody)
fn.targets = fn.targets.tail
}
- emitJump(fn, done, s)
+ emitJump(fn, done)
fn.currentBlock = done
}
+// typeSwitchStmt emits to fn code for the type switch statement s, optionally
+// labelled by label.
+//
func (b *builder) typeSwitchStmt(fn *Function, s *ast.TypeSwitchStmt, label *lblock) {
+ // We treat TypeSwitchStmt like a sequential if-else chain.
+ // Multiway dispatch can be recovered later by ssautil.Switches().
+
+ // Typeswitch lowering:
+ //
+ // var x X
+ // switch y := x.(type) {
+ // case T1, T2: S1 // >1 (y := x)
+ // case nil: SN // nil (y := x)
+ // default: SD // 0 types (y := x)
+ // case T3: S3 // 1 type (y := x.(T3))
+ // }
+ //
+ // ...s.Init...
+ // x := eval x
+ // .caseT1:
+ // t1, ok1 := typeswitch,ok x <T1>
+ // if ok1 then goto S1 else goto .caseT2
+ // .caseT2:
+ // t2, ok2 := typeswitch,ok x <T2>
+ // if ok2 then goto S1 else goto .caseNil
+ // .S1:
+ // y := x
+ // ...S1...
+ // goto done
+ // .caseNil:
+ // if t2, ok2 := typeswitch,ok x <T2>
+ // if x == nil then goto SN else goto .caseT3
+ // .SN:
+ // y := x
+ // ...SN...
+ // goto done
+ // .caseT3:
+ // t3, ok3 := typeswitch,ok x <T3>
+ // if ok3 then goto S3 else goto default
+ // .S3:
+ // y := t3
+ // ...S3...
+ // goto done
+ // .default:
+ // y := x
+ // ...SD...
+ // goto done
+ // .done:
+
if s.Init != nil {
b.stmt(fn, s.Init)
}
- var tag Value
- switch e := s.Assign.(type) {
+ var x Value
+ switch ass := s.Assign.(type) {
case *ast.ExprStmt: // x.(type)
- tag = b.expr(fn, unparen(e.X).(*ast.TypeAssertExpr).X)
+ x = b.expr(fn, unparen(ass.X).(*ast.TypeAssertExpr).X)
case *ast.AssignStmt: // y := x.(type)
- tag = b.expr(fn, unparen(e.Rhs[0]).(*ast.TypeAssertExpr).X)
- default:
- panic("unreachable")
+ x = b.expr(fn, unparen(ass.Rhs[0]).(*ast.TypeAssertExpr).X)
}
- tagPtr := fn.addLocal(tag.Type(), tag.Source())
- emitStore(fn, tagPtr, tag, tag.Source())
- // +1 in case there's no explicit default case
- heads := make([]*BasicBlock, 0, len(s.Body.List)+1)
-
- entry := fn.currentBlock
- done := fn.newBasicBlock("done")
+ done := fn.newBasicBlock("typeswitch.done")
if label != nil {
label._break = done
}
-
- // set up type switch and constant switch, populate their conditions
- tswtch := &TypeSwitch{
- Tag: emitLoad(fn, tagPtr, tag.Source()),
- Conds: make([]types.Type, 0, len(s.Body.List)+1),
- }
- cswtch := &ConstantSwitch{
- Conds: make([]Value, 0, len(s.Body.List)+1),
- }
-
- rets := make([]types.Type, 0, len(s.Body.List)+1)
- index := 0
var default_ *ast.CaseClause
for _, clause := range s.Body.List {
cc := clause.(*ast.CaseClause)
- if obj := fn.Pkg.info.Implicits[cc]; obj != nil {
- fn.addNamedLocal(obj, cc)
- }
if cc.List == nil {
- // default case
default_ = cc
- } else {
- for _, expr := range cc.List {
- tswtch.Conds = append(tswtch.Conds, fn.Pkg.typeOf(expr))
- cswtch.Conds = append(cswtch.Conds, emitConst(fn, intConst(int64(index))))
- index++
- }
- if len(cc.List) == 1 {
- rets = append(rets, fn.Pkg.typeOf(cc.List[0]))
- } else {
- for range cc.List {
- rets = append(rets, tag.Type())
- }
- }
- }
- }
-
- // default branch
- rets = append(rets, tag.Type())
-
- var vars []*types.Var
- vars = append(vars, varIndex)
- for _, typ := range rets {
- vars = append(vars, anonVar(typ))
- }
- tswtch.setType(types.NewTuple(vars...))
- // default branch
- fn.currentBlock = entry
- fn.emit(tswtch, s)
- cswtch.Conds = append(cswtch.Conds, emitConst(fn, intConst(int64(-1))))
- // in theory we should add a local and stores/loads for tswtch, to
- // generate sigma nodes in the branches. however, there isn't any
- // useful information we could possibly attach to it.
- cswtch.Tag = emitExtract(fn, tswtch, 0, s)
- fn.emit(cswtch, s)
-
- // build heads and bodies
- index = 0
- for _, clause := range s.Body.List {
- cc := clause.(*ast.CaseClause)
- if cc.List == nil {
continue
}
-
body := fn.newBasicBlock("typeswitch.body")
- for _, expr := range cc.List {
- head := fn.newBasicBlock("typeswitch.head")
- heads = append(heads, head)
- fn.currentBlock = head
-
- if obj := fn.Pkg.info.Implicits[cc]; obj != nil {
- // In a switch y := x.(type), each case clause
- // implicitly declares a distinct object y.
- // In a single-type case, y has that type.
- // In multi-type cases, 'case nil' and default,
- // y has the same type as the interface operand.
-
- l := fn.objects[obj]
- if rets[index] == tUntypedNil {
- emitStore(fn, l, emitConst(fn, nilConst(tswtch.Tag.Type())), s.Assign)
- } else {
- x := emitExtract(fn, tswtch, index+1, s.Assign)
- emitStore(fn, l, x, nil)
- }
+ var next *BasicBlock
+ var casetype types.Type
+ var ti Value // ti, ok := typeassert,ok x <Ti>
+ for _, cond := range cc.List {
+ next = fn.newBasicBlock("typeswitch.next")
+ casetype = fn.Pkg.typeOf(cond)
+ var condv Value
+ if casetype == tUntypedNil {
+ condv = emitCompare(fn, token.EQL, x, nilConst(x.Type()), token.NoPos)
+ ti = x
+ } else {
+ yok := emitTypeTest(fn, x, casetype, cc.Case)
+ ti = emitExtract(fn, yok, 0)
+ condv = emitExtract(fn, yok, 1)
}
-
- emitJump(fn, body, expr)
- index++
+ emitIf(fn, condv, body, next)
+ fn.currentBlock = next
}
- fn.currentBlock = body
- fn.targets = &targets{
- tail: fn.targets,
- _break: done,
+ if len(cc.List) != 1 {
+ ti = x
}
- b.stmtList(fn, cc.Body)
- fn.targets = fn.targets.tail
- emitJump(fn, done, clause)
+ fn.currentBlock = body
+ b.typeCaseBody(fn, cc, ti, done)
+ fn.currentBlock = next
}
-
- if default_ == nil {
- // implicit default
- heads = append(heads, done)
+ if default_ != nil {
+ b.typeCaseBody(fn, default_, x, done)
} else {
- body := fn.newBasicBlock("typeswitch.default")
- heads = append(heads, body)
- fn.currentBlock = body
- fn.targets = &targets{
- tail: fn.targets,
- _break: done,
- }
- if obj := fn.Pkg.info.Implicits[default_]; obj != nil {
- l := fn.objects[obj]
- x := emitExtract(fn, tswtch, index+1, s.Assign)
- emitStore(fn, l, x, s)
- }
- b.stmtList(fn, default_.Body)
- fn.targets = fn.targets.tail
- emitJump(fn, done, s)
+ emitJump(fn, done)
}
+ fn.currentBlock = done
+}
- fn.currentBlock = entry
- for _, head := range heads {
- addEdge(entry, head)
+func (b *builder) typeCaseBody(fn *Function, cc *ast.CaseClause, x Value, done *BasicBlock) {
+ if obj := fn.Pkg.info.Implicits[cc]; obj != nil {
+ // In a switch y := x.(type), each case clause
+ // implicitly declares a distinct object y.
+ // In a single-type case, y has that type.
+ // In multi-type cases, 'case nil' and default,
+ // y has the same type as the interface operand.
+ emitStore(fn, fn.addNamedLocal(obj), x, obj.Pos())
}
- fn.currentBlock = done
+ fn.targets = &targets{
+ tail: fn.targets,
+ _break: done,
+ }
+ b.stmtList(fn, cc.Body)
+ fn.targets = fn.targets.tail
+ emitJump(fn, done)
}
// selectStmt emits to fn code for the select statement s, optionally
// labelled by label.
//
-func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) (noreturn bool) {
- if len(s.Body.List) == 0 {
- instr := &Select{Blocking: true}
- instr.setType(types.NewTuple(varIndex, varOk))
- fn.emit(instr, s)
- fn.emit(new(Unreachable), s)
- addEdge(fn.currentBlock, fn.Exit)
- return true
- }
-
+func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) {
// A blocking select of a single case degenerates to a
// simple send or receive.
// TODO(adonovan): opt: is this optimization worth its weight?
@@ -1576,9 +1463,9 @@ func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) (no
}
b.stmtList(fn, clause.Body)
fn.targets = fn.targets.tail
- emitJump(fn, done, clause)
+ emitJump(fn, done)
fn.currentBlock = done
- return false
+ return
}
}
@@ -1600,7 +1487,7 @@ func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) (no
Dir: types.SendOnly,
Chan: ch,
Send: emitConv(fn, b.expr(fn, comm.Value),
- ch.Type().Underlying().(*types.Chan).Elem(), comm),
+ ch.Type().Underlying().(*types.Chan).Elem()),
Pos: comm.Arrow,
}
if debugInfo {
@@ -1633,12 +1520,22 @@ func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) (no
}
// We dispatch on the (fair) result of Select using a
- // switch on the returned index.
+ // sequential if-else chain, in effect:
+ //
+ // idx, recvOk, r0...r_n-1 := select(...)
+ // if idx == 0 { // receive on channel 0 (first receive => r0)
+ // x, ok := r0, recvOk
+ // ...state0...
+ // } else if v == 1 { // send on channel 1
+ // ...state1...
+ // } else {
+ // ...default...
+ // }
sel := &Select{
States: states,
Blocking: blocking,
}
- sel.source = s
+ sel.setPos(s.Select)
var vars []*types.Var
vars = append(vars, varIndex, varOk)
for _, st := range states {
@@ -1648,45 +1545,28 @@ func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) (no
}
}
sel.setType(types.NewTuple(vars...))
- fn.emit(sel, s)
- idx := emitExtract(fn, sel, 0, s)
+
+ fn.emit(sel)
+ idx := emitExtract(fn, sel, 0)
done := fn.newBasicBlock("select.done")
if label != nil {
label._break = done
}
- entry := fn.currentBlock
- swtch := &ConstantSwitch{
- Tag: idx,
- // one condition per case
- Conds: make([]Value, 0, len(s.Body.List)+1),
- }
- // note that we don't need heads; a select case can only have a single condition
- var bodies []*BasicBlock
-
+ var defaultBody *[]ast.Stmt
state := 0
r := 2 // index in 'sel' tuple of value; increments if st.Dir==RECV
for _, cc := range s.Body.List {
clause := cc.(*ast.CommClause)
if clause.Comm == nil {
- body := fn.newBasicBlock("select.default")
- fn.currentBlock = body
- bodies = append(bodies, body)
- fn.targets = &targets{
- tail: fn.targets,
- _break: done,
- }
- b.stmtList(fn, clause.Body)
- emitJump(fn, done, s)
- fn.targets = fn.targets.tail
- swtch.Conds = append(swtch.Conds, emitConst(fn, intConst(-1)))
+ defaultBody = &clause.Body
continue
}
- swtch.Conds = append(swtch.Conds, emitConst(fn, intConst(int64(state))))
body := fn.newBasicBlock("select.body")
+ next := fn.newBasicBlock("select.next")
+ emitIf(fn, emitCompare(fn, token.EQL, idx, intConst(int64(state)), token.NoPos), body, next)
fn.currentBlock = body
- bodies = append(bodies, body)
fn.targets = &targets{
tail: fn.targets,
_break: done,
@@ -1694,7 +1574,7 @@ func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) (no
switch comm := clause.Comm.(type) {
case *ast.ExprStmt: // <-ch
if debugInfo {
- v := emitExtract(fn, sel, r, comm)
+ v := emitExtract(fn, sel, r)
emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false)
}
r++
@@ -1704,33 +1584,44 @@ func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) (no
fn.addLocalForIdent(comm.Lhs[0].(*ast.Ident))
}
x := b.addr(fn, comm.Lhs[0], false) // non-escaping
- v := emitExtract(fn, sel, r, comm)
+ v := emitExtract(fn, sel, r)
if debugInfo {
emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false)
}
- x.store(fn, v, comm)
+ x.store(fn, v)
if len(comm.Lhs) == 2 { // x, ok := ...
if comm.Tok == token.DEFINE {
fn.addLocalForIdent(comm.Lhs[1].(*ast.Ident))
}
ok := b.addr(fn, comm.Lhs[1], false) // non-escaping
- ok.store(fn, emitExtract(fn, sel, 1, comm), comm)
+ ok.store(fn, emitExtract(fn, sel, 1))
}
r++
}
b.stmtList(fn, clause.Body)
fn.targets = fn.targets.tail
- emitJump(fn, done, s)
+ emitJump(fn, done)
+ fn.currentBlock = next
state++
}
- fn.currentBlock = entry
- fn.emit(swtch, s)
- for _, body := range bodies {
- addEdge(entry, body)
+ if defaultBody != nil {
+ fn.targets = &targets{
+ tail: fn.targets,
+ _break: done,
+ }
+ b.stmtList(fn, *defaultBody)
+ fn.targets = fn.targets.tail
+ } else {
+ // A blocking select must match some case.
+ // (This should really be a runtime.errorString, not a string.)
+ fn.emit(&Panic{
+ X: emitConv(fn, stringConst("blocking select matched no case"), tEface),
+ })
+ fn.currentBlock = fn.newBasicBlock("unreachable")
}
+ emitJump(fn, done)
fn.currentBlock = done
- return false
}
// forStmt emits to fn code for the for statement s, optionally
@@ -1765,7 +1656,7 @@ func (b *builder) forStmt(fn *Function, s *ast.ForStmt, label *lblock) {
label._break = done
label._continue = cont
}
- emitJump(fn, loop, s)
+ emitJump(fn, loop)
fn.currentBlock = loop
if loop != body {
b.cond(fn, s.Cond, body, done)
@@ -1778,12 +1669,12 @@ func (b *builder) forStmt(fn *Function, s *ast.ForStmt, label *lblock) {
}
b.stmt(fn, s.Body)
fn.targets = fn.targets.tail
- emitJump(fn, cont, s)
+ emitJump(fn, cont)
if s.Post != nil {
fn.currentBlock = cont
b.stmt(fn, s.Post)
- emitJump(fn, loop, s) // back-edge
+ emitJump(fn, loop) // back-edge
}
fn.currentBlock = done
}
@@ -1793,7 +1684,7 @@ func (b *builder) forStmt(fn *Function, s *ast.ForStmt, label *lblock) {
// The v result is defined only if tv is non-nil.
// forPos is the position of the "for" token.
//
-func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, source ast.Node) (k, v Value, loop, done *BasicBlock) {
+func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) {
//
// length = len(x)
// index = -1
@@ -1816,37 +1707,37 @@ func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, source ast.
// elimination if x is pure, static unrolling, etc.
// Ranging over a nil *array may have >0 iterations.
// We still generate code for x, in case it has effects.
- length = emitConst(fn, intConst(arr.Len()))
+ length = intConst(arr.Len())
} else {
// length = len(x).
var c Call
c.Call.Value = makeLen(x.Type())
c.Call.Args = []Value{x}
c.setType(tInt)
- length = fn.emit(&c, source)
+ length = fn.emit(&c)
}
- index := fn.addLocal(tInt, source)
- emitStore(fn, index, emitConst(fn, intConst(-1)), source)
+ index := fn.addLocal(tInt, token.NoPos)
+ emitStore(fn, index, intConst(-1), pos)
loop = fn.newBasicBlock("rangeindex.loop")
- emitJump(fn, loop, source)
+ emitJump(fn, loop)
fn.currentBlock = loop
incr := &BinOp{
Op: token.ADD,
- X: emitLoad(fn, index, source),
- Y: emitConst(fn, intConst(1)),
+ X: emitLoad(fn, index),
+ Y: vOne,
}
incr.setType(tInt)
- emitStore(fn, index, fn.emit(incr, source), source)
+ emitStore(fn, index, fn.emit(incr), pos)
body := fn.newBasicBlock("rangeindex.body")
done = fn.newBasicBlock("rangeindex.done")
- emitIf(fn, emitCompare(fn, token.LSS, incr, length, source), body, done, source)
+ emitIf(fn, emitCompare(fn, token.LSS, incr, length, token.NoPos), body, done)
fn.currentBlock = body
- k = emitLoad(fn, index, source)
+ k = emitLoad(fn, index)
if tv != nil {
switch t := x.Type().Underlying().(type) {
case *types.Array:
@@ -1855,7 +1746,7 @@ func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, source ast.
Index: k,
}
instr.setType(t.Elem())
- v = fn.emit(instr, source)
+ v = fn.emit(instr)
case *types.Pointer: // *array
instr := &IndexAddr{
@@ -1863,7 +1754,7 @@ func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, source ast.
Index: k,
}
instr.setType(types.NewPointer(t.Elem().Underlying().(*types.Array).Elem()))
- v = emitLoad(fn, fn.emit(instr, source), source)
+ v = emitLoad(fn, fn.emit(instr))
case *types.Slice:
instr := &IndexAddr{
@@ -1871,7 +1762,7 @@ func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, source ast.
Index: k,
}
instr.setType(types.NewPointer(t.Elem()))
- v = emitLoad(fn, fn.emit(instr, source), source)
+ v = emitLoad(fn, fn.emit(instr))
default:
panic("rangeIndexed x:" + t.String())
@@ -1885,7 +1776,7 @@ func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, source ast.
// tk and tv are the types of the key/value results k and v, or nil
// if the respective component is not wanted.
//
-func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, source ast.Node) (k, v Value, loop, done *BasicBlock) {
+func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) {
//
// it = range x
// loop: (target of continue)
@@ -1908,11 +1799,12 @@ func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, source ast
}
rng := &Range{X: x}
+ rng.setPos(pos)
rng.setType(tRangeIter)
- it := fn.emit(rng, source)
+ it := fn.emit(rng)
loop = fn.newBasicBlock("rangeiter.loop")
- emitJump(fn, loop, source)
+ emitJump(fn, loop)
fn.currentBlock = loop
_, isString := x.Type().Underlying().(*types.Basic)
@@ -1926,18 +1818,18 @@ func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, source ast
newVar("k", tk),
newVar("v", tv),
))
- fn.emit(okv, source)
+ fn.emit(okv)
body := fn.newBasicBlock("rangeiter.body")
done = fn.newBasicBlock("rangeiter.done")
- emitIf(fn, emitExtract(fn, okv, 0, source), body, done, source)
+ emitIf(fn, emitExtract(fn, okv, 0), body, done)
fn.currentBlock = body
if tk != tInvalid {
- k = emitExtract(fn, okv, 1, source)
+ k = emitExtract(fn, okv, 1)
}
if tv != tInvalid {
- v = emitExtract(fn, okv, 2, source)
+ v = emitExtract(fn, okv, 2)
}
return
}
@@ -1948,7 +1840,7 @@ func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, source ast
// not wanted
// pos is the position of the '=' or ':=' token.
//
-func (b *builder) rangeChan(fn *Function, x Value, tk types.Type, source ast.Node) (k Value, loop, done *BasicBlock) {
+func (b *builder) rangeChan(fn *Function, x Value, tk types.Type, pos token.Pos) (k Value, loop, done *BasicBlock) {
//
// loop: (target of continue)
// ko = <-x (key, ok)
@@ -1961,15 +1853,25 @@ func (b *builder) rangeChan(fn *Function, x Value, tk types.Type, source ast.Nod
// done: (target of break)
loop = fn.newBasicBlock("rangechan.loop")
- emitJump(fn, loop, source)
+ emitJump(fn, loop)
fn.currentBlock = loop
- retv := emitRecv(fn, x, true, types.NewTuple(newVar("k", x.Type().Underlying().(*types.Chan).Elem()), varOk), source)
+ recv := &UnOp{
+ Op: token.ARROW,
+ X: x,
+ CommaOk: true,
+ }
+ recv.setPos(pos)
+ recv.setType(types.NewTuple(
+ newVar("k", x.Type().Underlying().(*types.Chan).Elem()),
+ varOk,
+ ))
+ ko := fn.emit(recv)
body := fn.newBasicBlock("rangechan.body")
done = fn.newBasicBlock("rangechan.done")
- emitIf(fn, emitExtract(fn, retv, 1, source), body, done, source)
+ emitIf(fn, emitExtract(fn, ko, 1), body, done)
fn.currentBlock = body
if tk != nil {
- k = emitExtract(fn, retv, 0, source)
+ k = emitExtract(fn, ko, 0)
}
return
}
@@ -1977,7 +1879,7 @@ func (b *builder) rangeChan(fn *Function, x Value, tk types.Type, source ast.Nod
// rangeStmt emits to fn code for the range statement s, optionally
// labelled by label.
//
-func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock, source ast.Node) {
+func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock) {
var tk, tv types.Type
if s.Key != nil && !isBlankIdent(s.Key) {
tk = fn.Pkg.typeOf(s.Key)
@@ -2007,13 +1909,13 @@ func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock, sourc
var loop, done *BasicBlock
switch rt := x.Type().Underlying().(type) {
case *types.Slice, *types.Array, *types.Pointer: // *array
- k, v, loop, done = b.rangeIndexed(fn, x, tv, source)
+ k, v, loop, done = b.rangeIndexed(fn, x, tv, s.For)
case *types.Chan:
- k, loop, done = b.rangeChan(fn, x, tk, source)
+ k, loop, done = b.rangeChan(fn, x, tk, s.For)
case *types.Map, *types.Basic: // string
- k, v, loop, done = b.rangeIter(fn, x, tk, tv, source)
+ k, v, loop, done = b.rangeIter(fn, x, tk, tv, s.For)
default:
panic("Cannot range over: " + rt.String())
@@ -2028,10 +1930,10 @@ func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock, sourc
vl = b.addr(fn, s.Value, false) // non-escaping
}
if tk != nil {
- kl.store(fn, k, s)
+ kl.store(fn, k)
}
if tv != nil {
- vl.store(fn, v, s)
+ vl.store(fn, v)
}
if label != nil {
@@ -2046,11 +1948,11 @@ func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock, sourc
}
b.stmt(fn, s.Body)
fn.targets = fn.targets.tail
- emitJump(fn, loop, source) // back-edge
+ emitJump(fn, loop) // back-edge
fn.currentBlock = done
}
-// stmt lowers statement s to IR form, emitting code to fn.
+// stmt lowers statement s to SSA form, emitting code to fn.
func (b *builder) stmt(fn *Function, _s ast.Stmt) {
// The label of the current statement. If non-nil, its _goto
// target is always set; its _break and _continue are set only
@@ -2074,7 +1976,7 @@ start:
case *ast.LabeledStmt:
label = fn.labelledBlock(s.Label)
- emitJump(fn, label._goto, s)
+ emitJump(fn, label._goto)
fn.currentBlock = label._goto
_s = s.Stmt
goto start // effectively: tailcall stmt(fn, s.Stmt, label)
@@ -2083,12 +1985,12 @@ start:
b.expr(fn, s.X)
case *ast.SendStmt:
- instr := &Send{
+ fn.emit(&Send{
Chan: b.expr(fn, s.Chan),
X: emitConv(fn, b.expr(fn, s.Value),
- fn.Pkg.typeOf(s.Chan).Underlying().(*types.Chan).Elem(), s),
- }
- fn.emit(instr, s)
+ fn.Pkg.typeOf(s.Chan).Underlying().(*types.Chan).Elem()),
+ pos: s.Arrow,
+ })
case *ast.IncDecStmt:
op := token.ADD
@@ -2096,37 +1998,37 @@ start:
op = token.SUB
}
loc := b.addr(fn, s.X, false)
- b.assignOp(fn, loc, emitConst(fn, NewConst(constant.MakeInt64(1), loc.typ())), op, s)
+ b.assignOp(fn, loc, NewConst(constant.MakeInt64(1), loc.typ()), op, s.Pos())
case *ast.AssignStmt:
switch s.Tok {
case token.ASSIGN, token.DEFINE:
- b.assignStmt(fn, s.Lhs, s.Rhs, s.Tok == token.DEFINE, _s)
+ b.assignStmt(fn, s.Lhs, s.Rhs, s.Tok == token.DEFINE)
default: // +=, etc.
op := s.Tok + token.ADD - token.ADD_ASSIGN
- b.assignOp(fn, b.addr(fn, s.Lhs[0], false), b.expr(fn, s.Rhs[0]), op, s)
+ b.assignOp(fn, b.addr(fn, s.Lhs[0], false), b.expr(fn, s.Rhs[0]), op, s.Pos())
}
case *ast.GoStmt:
// The "intrinsics" new/make/len/cap are forbidden here.
// panic is treated like an ordinary function call.
- v := Go{}
+ v := Go{pos: s.Go}
b.setCall(fn, s.Call, &v.Call)
- fn.emit(&v, s)
+ fn.emit(&v)
case *ast.DeferStmt:
// The "intrinsics" new/make/len/cap are forbidden here.
// panic is treated like an ordinary function call.
- v := Defer{}
+ v := Defer{pos: s.Defer}
b.setCall(fn, s.Call, &v.Call)
- fn.hasDefer = true
- fn.emit(&v, s)
+ fn.emit(&v)
- case *ast.ReturnStmt:
- // TODO(dh): we could emit tigher position information by
- // using the ith returned expression
+ // A deferred call can cause recovery from panic,
+ // and control resumes at the Recover block.
+ createRecoverBlock(fn)
+ case *ast.ReturnStmt:
var results []Value
if len(s.Results) == 1 && fn.Signature.Results().Len() > 1 {
// Return of one expression in a multi-valued function.
@@ -2134,23 +2036,34 @@ start:
ttuple := tuple.Type().(*types.Tuple)
for i, n := 0, ttuple.Len(); i < n; i++ {
results = append(results,
- emitConv(fn, emitExtract(fn, tuple, i, s),
- fn.Signature.Results().At(i).Type(), s))
+ emitConv(fn, emitExtract(fn, tuple, i),
+ fn.Signature.Results().At(i).Type()))
}
} else {
// 1:1 return, or no-arg return in non-void function.
for i, r := range s.Results {
- v := emitConv(fn, b.expr(fn, r), fn.Signature.Results().At(i).Type(), s)
+ v := emitConv(fn, b.expr(fn, r), fn.Signature.Results().At(i).Type())
results = append(results, v)
}
}
-
- ret := fn.results()
- for i, r := range results {
- emitStore(fn, ret[i], r, s)
+ if fn.namedResults != nil {
+ // Function has named result parameters (NRPs).
+ // Perform parallel assignment of return operands to NRPs.
+ for i, r := range results {
+ emitStore(fn, fn.namedResults[i], r, s.Return)
+ }
}
-
- emitJump(fn, fn.Exit, s)
+ // Run function calls deferred in this
+ // function when explicitly returning from it.
+ fn.emit(new(RunDefers))
+ if fn.namedResults != nil {
+ // Reload NRPs to form the result tuple.
+ results = results[:0]
+ for _, r := range fn.namedResults {
+ results = append(results, emitLoad(fn, r))
+ }
+ }
+ fn.emit(&Return{Results: results, pos: s.Return})
fn.currentBlock = fn.newBasicBlock("unreachable")
case *ast.BranchStmt:
@@ -2182,8 +2095,7 @@ start:
case token.GOTO:
block = fn.labelledBlock(s.Label)._goto
}
- j := emitJump(fn, block, s)
- j.Comment = s.Tok.String()
+ emitJump(fn, block)
fn.currentBlock = fn.newBasicBlock("unreachable")
case *ast.BlockStmt:
@@ -2199,16 +2111,15 @@ start:
if s.Else != nil {
els = fn.newBasicBlock("if.else")
}
- instr := b.cond(fn, s.Cond, then, els)
- instr.source = s
+ b.cond(fn, s.Cond, then, els)
fn.currentBlock = then
b.stmt(fn, s.Body)
- emitJump(fn, done, s)
+ emitJump(fn, done)
if s.Else != nil {
fn.currentBlock = els
b.stmt(fn, s.Else)
- emitJump(fn, done, s)
+ emitJump(fn, done)
}
fn.currentBlock = done
@@ -2220,23 +2131,20 @@ start:
b.typeSwitchStmt(fn, s, label)
case *ast.SelectStmt:
- if b.selectStmt(fn, s, label) {
- // the select has no cases, it blocks forever
- fn.currentBlock = fn.newBasicBlock("unreachable")
- }
+ b.selectStmt(fn, s, label)
case *ast.ForStmt:
b.forStmt(fn, s, label)
case *ast.RangeStmt:
- b.rangeStmt(fn, s, label, s)
+ b.rangeStmt(fn, s, label)
default:
panic(fmt.Sprintf("unexpected statement kind: %T", s))
}
}
-// buildFunction builds IR code for the body of function fn. Idempotent.
+// buildFunction builds SSA code for the body of function fn. Idempotent.
func (b *builder) buildFunction(fn *Function) {
if fn.Blocks != nil {
return // building already started
@@ -2245,7 +2153,7 @@ func (b *builder) buildFunction(fn *Function) {
var recvField *ast.FieldList
var body *ast.BlockStmt
var functype *ast.FuncType
- switch n := fn.source.(type) {
+ switch n := fn.syntax.(type) {
case nil:
return // not a Go source function. (Synthetic, or from object file.)
case *ast.FuncDecl:
@@ -2259,16 +2167,6 @@ func (b *builder) buildFunction(fn *Function) {
panic(n)
}
- if fn.Package().Pkg.Path() == "syscall" && fn.Name() == "Exit" {
- // syscall.Exit is a stub and the way os.Exit terminates the
- // process. Note that there are other functions in the runtime
- // that also terminate or unwind that we cannot analyze.
- // However, they aren't stubs, so buildExits ends up getting
- // called on them, so that's where we handle those special
- // cases.
- fn.WillExit = true
- }
-
if body == nil {
// External function.
if fn.Params == nil {
@@ -2280,26 +2178,22 @@ func (b *builder) buildFunction(fn *Function) {
// We set Function.Params even though there is no body
// code to reference them. This simplifies clients.
if recv := fn.Signature.Recv(); recv != nil {
- // XXX synthesize an ast.Node
- fn.addParamObj(recv, nil)
+ fn.addParamObj(recv)
}
params := fn.Signature.Params()
for i, n := 0, params.Len(); i < n; i++ {
- // XXX synthesize an ast.Node
- fn.addParamObj(params.At(i), nil)
+ fn.addParamObj(params.At(i))
}
}
return
}
if fn.Prog.mode&LogSource != 0 {
- defer logStack("build function %s @ %s", fn, fn.Prog.Fset.Position(fn.Pos()))()
+ defer logStack("build function %s @ %s", fn, fn.Prog.Fset.Position(fn.pos))()
}
- fn.blocksets = b.blocksets
fn.startBody()
fn.createSyntacticParams(recvField, functype)
- fn.exitBlock()
b.stmt(fn, body)
- if cb := fn.currentBlock; cb != nil && (cb == fn.Blocks[0] || cb.Preds != nil) {
+ if cb := fn.currentBlock; cb != nil && (cb == fn.Blocks[0] || cb == fn.Recover || cb.Preds != nil) {
// Control fell off the end of the function's body block.
//
// Block optimizations eliminate the current block, if
@@ -2307,19 +2201,13 @@ func (b *builder) buildFunction(fn *Function) {
// if this no-arg return is ill-typed for
// fn.Signature.Results, this block must be
// unreachable. The sanity checker checks this.
- // fn.emit(new(RunDefers))
- // fn.emit(new(Return))
- emitJump(fn, fn.Exit, nil)
- }
- optimizeBlocks(fn)
- buildFakeExits(fn)
- b.buildExits(fn)
- b.addUnreachables(fn)
+ fn.emit(new(RunDefers))
+ fn.emit(new(Return))
+ }
fn.finishBody()
- b.blocksets = fn.blocksets
}
-// buildFuncDecl builds IR code for the function or method declared
+// buildFuncDecl builds SSA code for the function or method declared
// by decl in package pkg.
//
func (b *builder) buildFuncDecl(pkg *Package, decl *ast.FuncDecl) {
@@ -2332,13 +2220,13 @@ func (b *builder) buildFuncDecl(pkg *Package, decl *ast.FuncDecl) {
var v Call
v.Call.Value = fn
v.setType(types.NewTuple())
- pkg.init.emit(&v, decl)
+ pkg.init.emit(&v)
}
- fn.source = decl
b.buildFunction(fn)
}
// Build calls Package.Build for each package in prog.
+// Building occurs in parallel unless the BuildSerially mode flag was set.
//
// Build is intended for whole-program analysis; a typical compiler
// need only build a single package.
@@ -2346,12 +2234,22 @@ func (b *builder) buildFuncDecl(pkg *Package, decl *ast.FuncDecl) {
// Build is idempotent and thread-safe.
//
func (prog *Program) Build() {
+ var wg sync.WaitGroup
for _, p := range prog.packages {
- p.Build()
+ if prog.mode&BuildSerially != 0 {
+ p.Build()
+ } else {
+ wg.Add(1)
+ go func(p *Package) {
+ p.Build()
+ wg.Done()
+ }(p)
+ }
}
+ wg.Wait()
}
-// Build builds IR code for all functions and vars in package p.
+// Build builds SSA code for all functions and vars in package p.
//
// Precondition: CreatePackage must have been called for all of p's
// direct imports (and hence its direct imports must have been
@@ -2379,33 +2277,33 @@ func (p *Package) build() {
}
init := p.init
init.startBody()
- init.exitBlock()
var done *BasicBlock
- // Make init() skip if package is already initialized.
- initguard := p.Var("init$guard")
- doinit := init.newBasicBlock("init.start")
- done = init.Exit
- emitIf(init, emitLoad(init, initguard, nil), done, doinit, nil)
- init.currentBlock = doinit
- emitStore(init, initguard, emitConst(init, NewConst(constant.MakeBool(true), tBool)), nil)
+ if p.Prog.mode&BareInits == 0 {
+ // Make init() skip if package is already initialized.
+ initguard := p.Var("init$guard")
+ doinit := init.newBasicBlock("init.start")
+ done = init.newBasicBlock("init.done")
+ emitIf(init, emitLoad(init, initguard), done, doinit)
+ init.currentBlock = doinit
+ emitStore(init, initguard, vTrue, token.NoPos)
- // Call the init() function of each package we import.
- for _, pkg := range p.Pkg.Imports() {
- prereq := p.Prog.packages[pkg]
- if prereq == nil {
- panic(fmt.Sprintf("Package(%q).Build(): unsatisfied import: Program.CreatePackage(%q) was not called", p.Pkg.Path(), pkg.Path()))
+ // Call the init() function of each package we import.
+ for _, pkg := range p.Pkg.Imports() {
+ prereq := p.Prog.packages[pkg]
+ if prereq == nil {
+ panic(fmt.Sprintf("Package(%q).Build(): unsatisfied import: Program.CreatePackage(%q) was not called", p.Pkg.Path(), pkg.Path()))
+ }
+ var v Call
+ v.Call.Value = prereq.init
+ v.Call.pos = init.pos
+ v.setType(types.NewTuple())
+ init.emit(&v)
}
- var v Call
- v.Call.Value = prereq.init
- v.setType(types.NewTuple())
- init.emit(&v, nil)
}
- b := builder{
- printFunc: p.printFunc,
- }
+ var b builder
// Initialize package-level vars in correct order.
for _, varinit := range p.info.InitOrder {
@@ -2417,12 +2315,11 @@ func (p *Package) build() {
// 1:1 initialization: var x, y = a(), b()
var lval lvalue
if v := varinit.Lhs[0]; v.Name() != "_" {
- lval = &address{addr: p.values[v].(*Global)}
+ lval = &address{addr: p.values[v].(*Global), pos: v.Pos()}
} else {
lval = blank{}
}
- // TODO(dh): do emit position information
- b.assign(init, lval, varinit.Rhs, true, nil, nil)
+ b.assign(init, lval, varinit.Rhs, true, nil)
} else {
// n:1 initialization: var x, y := f()
tuple := b.exprN(init, varinit.Rhs)
@@ -2430,7 +2327,7 @@ func (p *Package) build() {
if v.Name() == "_" {
continue
}
- emitStore(init, p.values[v].(*Global), emitExtract(init, tuple, i, nil), nil)
+ emitStore(init, p.values[v].(*Global), emitExtract(init, tuple, i), v.Pos())
}
}
}
@@ -2447,7 +2344,11 @@ func (p *Package) build() {
}
// Finish up init().
- emitJump(init, done, nil)
+ if p.Prog.mode&BareInits == 0 {
+ emitJump(init, done)
+ init.currentBlock = done
+ }
+ init.emit(new(Return))
init.finishBody()
p.info = nil // We no longer need ASTs or go/types deductions.
diff --git a/vendor/honnef.co/go/tools/ir/const.go b/vendor/honnef.co/go/tools/ssa/const.go
index 7cdf006e8..f95d9e114 100644
--- a/vendor/honnef.co/go/tools/ir/const.go
+++ b/vendor/honnef.co/go/tools/ssa/const.go
@@ -2,13 +2,14 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-package ir
+package ssa
// This file defines the Const SSA value type.
import (
"fmt"
"go/constant"
+ "go/token"
"go/types"
"strconv"
)
@@ -17,12 +18,7 @@ import (
// val must be valid according to the specification of Const.Value.
//
func NewConst(val constant.Value, typ types.Type) *Const {
- return &Const{
- register: register{
- typ: typ,
- },
- Value: val,
- }
+ return &Const{typ, val}
}
// intConst returns an 'int' constant that evaluates to i.
@@ -75,25 +71,43 @@ func zeroConst(t types.Type) *Const {
}
func (c *Const) RelString(from *types.Package) string {
- var p string
+ var s string
if c.Value == nil {
- p = "nil"
+ s = "nil"
} else if c.Value.Kind() == constant.String {
- v := constant.StringVal(c.Value)
+ s = constant.StringVal(c.Value)
const max = 20
// TODO(adonovan): don't cut a rune in half.
- if len(v) > max {
- v = v[:max-3] + "..." // abbreviate
+ if len(s) > max {
+ s = s[:max-3] + "..." // abbreviate
}
- p = strconv.Quote(v)
+ s = strconv.Quote(s)
} else {
- p = c.Value.String()
+ s = c.Value.String()
}
- return fmt.Sprintf("Const <%s> {%s}", relType(c.Type(), from), p)
+ return s + ":" + relType(c.Type(), from)
+}
+
+func (c *Const) Name() string {
+ return c.RelString(nil)
}
func (c *Const) String() string {
- return c.RelString(c.Parent().pkg())
+ return c.Name()
+}
+
+func (c *Const) Type() types.Type {
+ return c.typ
+}
+
+func (c *Const) Referrers() *[]Instruction {
+ return nil
+}
+
+func (c *Const) Parent() *Function { return nil }
+
+func (c *Const) Pos() token.Pos {
+ return token.NoPos
}
// IsNil returns true if this constant represents a typed or untyped nil value.
@@ -101,6 +115,8 @@ func (c *Const) IsNil() bool {
return c.Value == nil
}
+// TODO(adonovan): move everything below into honnef.co/go/tools/ssa/interp.
+
// Int64 returns the numeric value of this constant truncated to fit
// a signed 64-bit integer.
//
diff --git a/vendor/honnef.co/go/tools/ir/create.go b/vendor/honnef.co/go/tools/ssa/create.go
index 34d0314e7..85163a0c5 100644
--- a/vendor/honnef.co/go/tools/ir/create.go
+++ b/vendor/honnef.co/go/tools/ssa/create.go
@@ -2,9 +2,9 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-package ir
+package ssa
-// This file implements the CREATE phase of IR construction.
+// This file implements the CREATE phase of SSA construction.
// See builder.go for explanation.
import (
@@ -18,9 +18,9 @@ import (
"golang.org/x/tools/go/types/typeutil"
)
-// NewProgram returns a new IR Program.
+// NewProgram returns a new SSA Program.
//
-// mode controls diagnostics and checking during IR construction.
+// mode controls diagnostics and checking during SSA construction.
//
func NewProgram(fset *token.FileSet, mode BuilderMode) *Program {
prog := &Program{
@@ -75,6 +75,7 @@ func memberFromObject(pkg *Package, obj types.Object, syntax ast.Node) {
name: name,
object: obj,
typ: types.NewPointer(obj.Type()), // address
+ pos: obj.Pos(),
}
pkg.values[obj] = g
pkg.Members[name] = g
@@ -89,17 +90,16 @@ func memberFromObject(pkg *Package, obj types.Object, syntax ast.Node) {
name: name,
object: obj,
Signature: sig,
+ syntax: syntax,
+ pos: obj.Pos(),
Pkg: pkg,
Prog: pkg.Prog,
}
- fn.source = syntax
- fn.initHTML(pkg.printFunc)
if syntax == nil {
fn.Synthetic = "loaded from gc object file"
}
pkg.values[obj] = fn
- pkg.Functions = append(pkg.Functions, fn)
if sig.Recv() == nil {
pkg.Members[name] = fn // package-level function
}
@@ -152,25 +152,24 @@ func membersFromDecl(pkg *Package, decl ast.Decl) {
}
}
-// CreatePackage constructs and returns an IR Package from the
+// CreatePackage constructs and returns an SSA Package from the
// specified type-checked, error-free file ASTs, and populates its
// Members mapping.
//
// importable determines whether this package should be returned by a
// subsequent call to ImportedPackage(pkg.Path()).
//
-// The real work of building IR form for each function is not done
+// The real work of building SSA form for each function is not done
// until a subsequent call to Package.Build().
//
func (prog *Program) CreatePackage(pkg *types.Package, files []*ast.File, info *types.Info, importable bool) *Package {
p := &Package{
- Prog: prog,
- Members: make(map[string]Member),
- values: make(map[types.Object]Value),
- Pkg: pkg,
- info: info, // transient (CREATE and BUILD phases)
- files: files, // transient (CREATE and BUILD phases)
- printFunc: prog.PrintFunc,
+ Prog: prog,
+ Members: make(map[string]Member),
+ values: make(map[types.Object]Value),
+ Pkg: pkg,
+ info: info, // transient (CREATE and BUILD phases)
+ files: files, // transient (CREATE and BUILD phases)
}
// Add init() function.
@@ -181,9 +180,7 @@ func (prog *Program) CreatePackage(pkg *types.Package, files []*ast.File, info *
Pkg: p,
Prog: prog,
}
- p.init.initHTML(prog.PrintFunc)
p.Members[p.init.name] = p.init
- p.Functions = append(p.Functions, p.init)
// CREATE phase.
// Allocate all package members: vars, funcs, consts and types.
@@ -212,13 +209,15 @@ func (prog *Program) CreatePackage(pkg *types.Package, files []*ast.File, info *
}
}
- // Add initializer guard variable.
- initguard := &Global{
- Pkg: p,
- name: "init$guard",
- typ: types.NewPointer(tBool),
+ if prog.mode&BareInits == 0 {
+ // Add initializer guard variable.
+ initguard := &Global{
+ Pkg: p,
+ name: "init$guard",
+ typ: types.NewPointer(tBool),
+ }
+ p.Members[initguard.Name()] = initguard
}
- p.Members[initguard.Name()] = initguard
if prog.mode&GlobalDebug != 0 {
p.SetDebugMode(true)
@@ -261,10 +260,10 @@ func (prog *Program) AllPackages() []*Package {
//
// TODO(adonovan): rethink this function and the "importable" concept;
// most packages are importable. This function assumes that all
-// types.Package.Path values are unique within the ir.Program, which is
+// types.Package.Path values are unique within the ssa.Program, which is
// false---yet this function remains very convenient.
// Clients should use (*Program).Package instead where possible.
-// IR doesn't really need a string-keyed map of packages.
+// SSA doesn't really need a string-keyed map of packages.
//
func (prog *Program) ImportedPackage(path string) *Package {
return prog.imported[path]
diff --git a/vendor/honnef.co/go/tools/ir/doc.go b/vendor/honnef.co/go/tools/ssa/doc.go
index 87c54c55e..0f71fda00 100644
--- a/vendor/honnef.co/go/tools/ir/doc.go
+++ b/vendor/honnef.co/go/tools/ssa/doc.go
@@ -2,34 +2,36 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-// Package ir defines a representation of the elements of Go programs
+// Package ssa defines a representation of the elements of Go programs
// (packages, types, functions, variables and constants) using a
-// static single-information (SSI) form intermediate representation
+// static single-assignment (SSA) form intermediate representation
// (IR) for the bodies of functions.
//
// THIS INTERFACE IS EXPERIMENTAL AND IS LIKELY TO CHANGE.
//
-// For an introduction to SSA form, upon which SSI builds, see
+// For an introduction to SSA form, see
// http://en.wikipedia.org/wiki/Static_single_assignment_form.
// This page provides a broader reading list:
// http://www.dcs.gla.ac.uk/~jsinger/ssa.html.
//
-// For an introduction to SSI form, see The static single information
-// form by C. Scott Ananian.
-//
-// The level of abstraction of the IR form is intentionally close to
+// The level of abstraction of the SSA form is intentionally close to
// the source language to facilitate construction of source analysis
// tools. It is not intended for machine code generation.
//
-// The simplest way to create the IR of a package is
+// All looping, branching and switching constructs are replaced with
+// unstructured control flow. Higher-level control flow constructs
+// such as multi-way branch can be reconstructed as needed; see
+// ssautil.Switches() for an example.
+//
+// The simplest way to create the SSA representation of a package is
// to load typed syntax trees using golang.org/x/tools/go/packages, then
-// invoke the irutil.Packages helper function. See ExampleLoadPackages
+// invoke the ssautil.Packages helper function. See ExampleLoadPackages
// and ExampleWholeProgram for examples.
-// The resulting ir.Program contains all the packages and their
-// members, but IR code is not created for function bodies until a
+// The resulting ssa.Program contains all the packages and their
+// members, but SSA code is not created for function bodies until a
// subsequent call to (*Package).Build or (*Program).Build.
//
-// The builder initially builds a naive IR form in which all local
+// The builder initially builds a naive SSA form in which all local
// variables are addresses of stack locations with explicit loads and
// stores. Registerisation of eligible locals and φ-node insertion
// using dominance and dataflow are then performed as a second pass
@@ -42,7 +44,7 @@
// - Member: a named member of a Go package.
// - Value: an expression that yields a value.
// - Instruction: a statement that consumes values and performs computation.
-// - Node: a Value or Instruction (emphasizing its membership in the IR value graph)
+// - Node: a Value or Instruction (emphasizing its membership in the SSA value graph)
//
// A computation that yields a result implements both the Value and
// Instruction interfaces. The following table shows for each
@@ -51,53 +53,47 @@
// Value? Instruction? Member?
// *Alloc ✔ ✔
// *BinOp ✔ ✔
-// *BlankStore ✔
// *Builtin ✔
// *Call ✔ ✔
// *ChangeInterface ✔ ✔
// *ChangeType ✔ ✔
-// *Const ✔ ✔
+// *Const ✔
// *Convert ✔ ✔
// *DebugRef ✔
-// *Defer ✔ ✔
+// *Defer ✔
// *Extract ✔ ✔
// *Field ✔ ✔
// *FieldAddr ✔ ✔
// *FreeVar ✔
// *Function ✔ ✔ (func)
// *Global ✔ ✔ (var)
-// *Go ✔ ✔
+// *Go ✔
// *If ✔
// *Index ✔ ✔
// *IndexAddr ✔ ✔
// *Jump ✔
-// *Load ✔ ✔
+// *Lookup ✔ ✔
// *MakeChan ✔ ✔
// *MakeClosure ✔ ✔
// *MakeInterface ✔ ✔
// *MakeMap ✔ ✔
// *MakeSlice ✔ ✔
-// *MapLookup ✔ ✔
-// *MapUpdate ✔ ✔
+// *MapUpdate ✔
// *NamedConst ✔ (const)
// *Next ✔ ✔
// *Panic ✔
-// *Parameter ✔ ✔
+// *Parameter ✔
// *Phi ✔ ✔
// *Range ✔ ✔
-// *Recv ✔ ✔
// *Return ✔
// *RunDefers ✔
// *Select ✔ ✔
-// *Send ✔ ✔
-// *Sigma ✔ ✔
+// *Send ✔
// *Slice ✔ ✔
-// *Store ✔ ✔
-// *StringLookup ✔ ✔
+// *Store ✔
// *Type ✔ (type)
// *TypeAssert ✔ ✔
// *UnOp ✔ ✔
-// *Unreachable ✔
//
// Other key types in this package include: Program, Package, Function
// and BasicBlock.
@@ -106,7 +102,7 @@
// resolved internally, i.e. it does not rely on the names of Values,
// Packages, Functions, Types or BasicBlocks for the correct
// interpretation of the program. Only the identities of objects and
-// the topology of the IR and type graphs are semantically
+// the topology of the SSA and type graphs are semantically
// significant. (There is one exception: Ids, used to identify field
// and method names, contain strings.) Avoidance of name-based
// operations simplifies the implementation of subsequent passes and
@@ -115,7 +111,7 @@
// either accurate or unambiguous. The public API exposes a number of
// name-based maps for client convenience.
//
-// The ir/irutil package provides various utilities that depend only
+// The ssa/ssautil package provides various utilities that depend only
// on the public API of this package.
//
// TODO(adonovan): Consider the exceptional control-flow implications
@@ -124,6 +120,6 @@
// TODO(adonovan): write a how-to document for all the various cases
// of trying to determine corresponding elements across the four
// domains of source locations, ast.Nodes, types.Objects,
-// ir.Values/Instructions.
+// ssa.Values/Instructions.
//
-package ir
+package ssa // import "honnef.co/go/tools/ssa"
diff --git a/vendor/honnef.co/go/tools/ir/dom.go b/vendor/honnef.co/go/tools/ssa/dom.go
index 08c147df9..a036be87c 100644
--- a/vendor/honnef.co/go/tools/ir/dom.go
+++ b/vendor/honnef.co/go/tools/ssa/dom.go
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-package ir
+package ssa
// This file defines algorithms related to dominance.
@@ -20,7 +20,6 @@ package ir
import (
"bytes"
"fmt"
- "io"
"math/big"
"os"
"sort"
@@ -28,7 +27,8 @@ import (
// Idom returns the block that immediately dominates b:
// its parent in the dominator tree, if any.
-// The entry node (b.Index==0) does not have a parent.
+// Neither the entry node (b.Index==0) nor recover node
+// (b==b.Parent().Recover()) have a parent.
//
func (b *BasicBlock) Idom() *BasicBlock { return b.dom.idom }
@@ -66,211 +66,144 @@ type domInfo struct {
pre, post int32 // pre- and post-order numbering within domtree
}
+// ltState holds the working state for Lengauer-Tarjan algorithm
+// (during which domInfo.pre is repurposed for CFG DFS preorder number).
+type ltState struct {
+ // Each slice is indexed by b.Index.
+ sdom []*BasicBlock // b's semidominator
+ parent []*BasicBlock // b's parent in DFS traversal of CFG
+ ancestor []*BasicBlock // b's ancestor with least sdom
+}
+
+// dfs implements the depth-first search part of the LT algorithm.
+func (lt *ltState) dfs(v *BasicBlock, i int32, preorder []*BasicBlock) int32 {
+ preorder[i] = v
+ v.dom.pre = i // For now: DFS preorder of spanning tree of CFG
+ i++
+ lt.sdom[v.Index] = v
+ lt.link(nil, v)
+ for _, w := range v.Succs {
+ if lt.sdom[w.Index] == nil {
+ lt.parent[w.Index] = v
+ i = lt.dfs(w, i, preorder)
+ }
+ }
+ return i
+}
+
+// eval implements the EVAL part of the LT algorithm.
+func (lt *ltState) eval(v *BasicBlock) *BasicBlock {
+ // TODO(adonovan): opt: do path compression per simple LT.
+ u := v
+ for ; lt.ancestor[v.Index] != nil; v = lt.ancestor[v.Index] {
+ if lt.sdom[v.Index].dom.pre < lt.sdom[u.Index].dom.pre {
+ u = v
+ }
+ }
+ return u
+}
+
+// link implements the LINK part of the LT algorithm.
+func (lt *ltState) link(v, w *BasicBlock) {
+ lt.ancestor[w.Index] = v
+}
+
// buildDomTree computes the dominator tree of f using the LT algorithm.
// Precondition: all blocks are reachable (e.g. optimizeBlocks has been run).
//
-func buildDomTree(fn *Function) {
+func buildDomTree(f *Function) {
// The step numbers refer to the original LT paper; the
// reordering is due to Georgiadis.
// Clear any previous domInfo.
- for _, b := range fn.Blocks {
+ for _, b := range f.Blocks {
b.dom = domInfo{}
}
- idoms := make([]*BasicBlock, len(fn.Blocks))
-
- order := make([]*BasicBlock, 0, len(fn.Blocks))
- seen := fn.blockset(0)
- var dfs func(b *BasicBlock)
- dfs = func(b *BasicBlock) {
- if !seen.Add(b) {
- return
- }
- for _, succ := range b.Succs {
- dfs(succ)
- }
- if fn.fakeExits.Has(b) {
- dfs(fn.Exit)
- }
- order = append(order, b)
- b.post = len(order) - 1
+ n := len(f.Blocks)
+ // Allocate space for 5 contiguous [n]*BasicBlock arrays:
+ // sdom, parent, ancestor, preorder, buckets.
+ space := make([]*BasicBlock, 5*n)
+ lt := ltState{
+ sdom: space[0:n],
+ parent: space[n : 2*n],
+ ancestor: space[2*n : 3*n],
}
- dfs(fn.Blocks[0])
- for i := 0; i < len(order)/2; i++ {
- o := len(order) - i - 1
- order[i], order[o] = order[o], order[i]
+ // Step 1. Number vertices by depth-first preorder.
+ preorder := space[3*n : 4*n]
+ root := f.Blocks[0]
+ prenum := lt.dfs(root, 0, preorder)
+ recover := f.Recover
+ if recover != nil {
+ lt.dfs(recover, prenum, preorder)
}
- idoms[fn.Blocks[0].Index] = fn.Blocks[0]
- changed := true
- for changed {
- changed = false
- // iterate over all nodes in reverse postorder, except for the
- // entry node
- for _, b := range order[1:] {
- var newIdom *BasicBlock
- do := func(p *BasicBlock) {
- if idoms[p.Index] == nil {
- return
- }
- if newIdom == nil {
- newIdom = p
- } else {
- finger1 := p
- finger2 := newIdom
- for finger1 != finger2 {
- for finger1.post < finger2.post {
- finger1 = idoms[finger1.Index]
- }
- for finger2.post < finger1.post {
- finger2 = idoms[finger2.Index]
- }
- }
- newIdom = finger1
- }
- }
- for _, p := range b.Preds {
- do(p)
- }
- if b == fn.Exit {
- for _, p := range fn.Blocks {
- if fn.fakeExits.Has(p) {
- do(p)
- }
- }
- }
+ buckets := space[4*n : 5*n]
+ copy(buckets, preorder)
- if idoms[b.Index] != newIdom {
- idoms[b.Index] = newIdom
- changed = true
+ // In reverse preorder...
+ for i := int32(n) - 1; i > 0; i-- {
+ w := preorder[i]
+
+ // Step 3. Implicitly define the immediate dominator of each node.
+ for v := buckets[i]; v != w; v = buckets[v.dom.pre] {
+ u := lt.eval(v)
+ if lt.sdom[u.Index].dom.pre < i {
+ v.dom.idom = u
+ } else {
+ v.dom.idom = w
}
}
- }
- for i, b := range idoms {
- fn.Blocks[i].dom.idom = b
- if b == nil {
- // malformed CFG
- continue
- }
- if i == b.Index {
- continue
+ // Step 2. Compute the semidominators of all nodes.
+ lt.sdom[w.Index] = lt.parent[w.Index]
+ for _, v := range w.Preds {
+ u := lt.eval(v)
+ if lt.sdom[u.Index].dom.pre < lt.sdom[w.Index].dom.pre {
+ lt.sdom[w.Index] = lt.sdom[u.Index]
+ }
}
- b.dom.children = append(b.dom.children, fn.Blocks[i])
- }
-
- numberDomTree(fn.Blocks[0], 0, 0)
- // printDomTreeDot(os.Stderr, fn) // debugging
- // printDomTreeText(os.Stderr, root, 0) // debugging
-
- if fn.Prog.mode&SanityCheckFunctions != 0 {
- sanityCheckDomTree(fn)
- }
-}
+ lt.link(lt.parent[w.Index], w)
-// buildPostDomTree is like buildDomTree, but builds the post-dominator tree instead.
-func buildPostDomTree(fn *Function) {
- // The step numbers refer to the original LT paper; the
- // reordering is due to Georgiadis.
-
- // Clear any previous domInfo.
- for _, b := range fn.Blocks {
- b.pdom = domInfo{}
- }
-
- idoms := make([]*BasicBlock, len(fn.Blocks))
-
- order := make([]*BasicBlock, 0, len(fn.Blocks))
- seen := fn.blockset(0)
- var dfs func(b *BasicBlock)
- dfs = func(b *BasicBlock) {
- if !seen.Add(b) {
- return
- }
- for _, pred := range b.Preds {
- dfs(pred)
- }
- if b == fn.Exit {
- for _, p := range fn.Blocks {
- if fn.fakeExits.Has(p) {
- dfs(p)
- }
- }
+ if lt.parent[w.Index] == lt.sdom[w.Index] {
+ w.dom.idom = lt.parent[w.Index]
+ } else {
+ buckets[i] = buckets[lt.sdom[w.Index].dom.pre]
+ buckets[lt.sdom[w.Index].dom.pre] = w
}
- order = append(order, b)
- b.post = len(order) - 1
}
- dfs(fn.Exit)
- for i := 0; i < len(order)/2; i++ {
- o := len(order) - i - 1
- order[i], order[o] = order[o], order[i]
+ // The final 'Step 3' is now outside the loop.
+ for v := buckets[0]; v != root; v = buckets[v.dom.pre] {
+ v.dom.idom = root
}
- idoms[fn.Exit.Index] = fn.Exit
- changed := true
- for changed {
- changed = false
- // iterate over all nodes in reverse postorder, except for the
- // exit node
- for _, b := range order[1:] {
- var newIdom *BasicBlock
- do := func(p *BasicBlock) {
- if idoms[p.Index] == nil {
- return
- }
- if newIdom == nil {
- newIdom = p
- } else {
- finger1 := p
- finger2 := newIdom
- for finger1 != finger2 {
- for finger1.post < finger2.post {
- finger1 = idoms[finger1.Index]
- }
- for finger2.post < finger1.post {
- finger2 = idoms[finger2.Index]
- }
- }
- newIdom = finger1
- }
- }
- for _, p := range b.Succs {
- do(p)
- }
- if fn.fakeExits.Has(b) {
- do(fn.Exit)
- }
-
- if idoms[b.Index] != newIdom {
- idoms[b.Index] = newIdom
- changed = true
+ // Step 4. Explicitly define the immediate dominator of each
+ // node, in preorder.
+ for _, w := range preorder[1:] {
+ if w == root || w == recover {
+ w.dom.idom = nil
+ } else {
+ if w.dom.idom != lt.sdom[w.Index] {
+ w.dom.idom = w.dom.idom.dom.idom
}
+ // Calculate Children relation as inverse of Idom.
+ w.dom.idom.dom.children = append(w.dom.idom.dom.children, w)
}
}
- for i, b := range idoms {
- fn.Blocks[i].pdom.idom = b
- if b == nil {
- // malformed CFG
- continue
- }
- if i == b.Index {
- continue
- }
- b.pdom.children = append(b.pdom.children, fn.Blocks[i])
+ pre, post := numberDomTree(root, 0, 0)
+ if recover != nil {
+ numberDomTree(recover, pre, post)
}
- numberPostDomTree(fn.Exit, 0, 0)
-
- // printPostDomTreeDot(os.Stderr, fn) // debugging
- // printPostDomTreeText(os.Stderr, fn.Exit, 0) // debugging
+ // printDomTreeDot(os.Stderr, f) // debugging
+ // printDomTreeText(os.Stderr, root, 0) // debugging
- if fn.Prog.mode&SanityCheckFunctions != 0 { // XXX
- sanityCheckDomTree(fn) // XXX
+ if f.Prog.mode&SanityCheckFunctions != 0 {
+ sanityCheckDomTree(f)
}
}
@@ -289,21 +222,6 @@ func numberDomTree(v *BasicBlock, pre, post int32) (int32, int32) {
return pre, post
}
-// numberPostDomTree sets the pre- and post-order numbers of a depth-first
-// traversal of the post-dominator tree rooted at v. These are used to
-// answer post-dominance queries in constant time.
-//
-func numberPostDomTree(v *BasicBlock, pre, post int32) (int32, int32) {
- v.pdom.pre = pre
- pre++
- for _, child := range v.pdom.children {
- pre, post = numberPostDomTree(child, pre, post)
- }
- v.pdom.post = post
- post++
- return pre, post
-}
-
// Testing utilities ----------------------------------------
// sanityCheckDomTree checks the correctness of the dominator tree
@@ -325,8 +243,8 @@ func sanityCheckDomTree(f *Function) {
all.Set(one).Lsh(&all, uint(n)).Sub(&all, one)
// Initialization.
- for i := range f.Blocks {
- if i == 0 {
+ for i, b := range f.Blocks {
+ if i == 0 || b == f.Recover {
// A root is dominated only by itself.
D[i].SetBit(&D[0], 0, 1)
} else {
@@ -340,7 +258,7 @@ func sanityCheckDomTree(f *Function) {
for changed := true; changed; {
changed = false
for i, b := range f.Blocks {
- if i == 0 {
+ if i == 0 || b == f.Recover {
continue
}
// Compute intersection across predecessors.
@@ -349,13 +267,6 @@ func sanityCheckDomTree(f *Function) {
for _, pred := range b.Preds {
x.And(&x, &D[pred.Index])
}
- if b == f.Exit {
- for _, p := range f.Blocks {
- if f.fakeExits.Has(p) {
- x.And(&x, &D[p.Index])
- }
- }
- }
x.SetBit(&x, i, 1) // a block always dominates itself.
if D[i].Cmp(&x) != 0 {
D[i].Set(&x)
@@ -365,10 +276,14 @@ func sanityCheckDomTree(f *Function) {
}
// Check the entire relation. O(n^2).
+ // The Recover block (if any) must be treated specially so we skip it.
ok := true
for i := 0; i < n; i++ {
for j := 0; j < n; j++ {
b, c := f.Blocks[i], f.Blocks[j]
+ if c == f.Recover {
+ continue
+ }
actual := b.Dominates(c)
expected := D[j].Bit(i) == 1
if actual != expected {
@@ -406,7 +321,7 @@ func printDomTreeText(buf *bytes.Buffer, v *BasicBlock, indent int) {
// printDomTreeDot prints the dominator tree of f in AT&T GraphViz
// (.dot) format.
//lint:ignore U1000 used during debugging
-func printDomTreeDot(buf io.Writer, f *Function) {
+func printDomTreeDot(buf *bytes.Buffer, f *Function) {
fmt.Fprintln(buf, "//", f)
fmt.Fprintln(buf, "digraph domtree {")
for i, b := range f.Blocks {
@@ -426,36 +341,3 @@ func printDomTreeDot(buf io.Writer, f *Function) {
}
fmt.Fprintln(buf, "}")
}
-
-// printDomTree prints the dominator tree as text, using indentation.
-//lint:ignore U1000 used during debugging
-func printPostDomTreeText(buf io.Writer, v *BasicBlock, indent int) {
- fmt.Fprintf(buf, "%*s%s\n", 4*indent, "", v)
- for _, child := range v.pdom.children {
- printPostDomTreeText(buf, child, indent+1)
- }
-}
-
-// printDomTreeDot prints the dominator tree of f in AT&T GraphViz
-// (.dot) format.
-//lint:ignore U1000 used during debugging
-func printPostDomTreeDot(buf io.Writer, f *Function) {
- fmt.Fprintln(buf, "//", f)
- fmt.Fprintln(buf, "digraph pdomtree {")
- for _, b := range f.Blocks {
- v := b.pdom
- fmt.Fprintf(buf, "\tn%d [label=\"%s (%d, %d)\",shape=\"rectangle\"];\n", v.pre, b, v.pre, v.post)
- // TODO(adonovan): improve appearance of edges
- // belonging to both dominator tree and CFG.
-
- // Dominator tree edge.
- if b != f.Exit {
- fmt.Fprintf(buf, "\tn%d -> n%d [style=\"solid\",weight=100];\n", v.idom.pdom.pre, v.pre)
- }
- // CFG edges.
- for _, pred := range b.Preds {
- fmt.Fprintf(buf, "\tn%d -> n%d [style=\"dotted\",weight=0];\n", pred.pdom.pre, v.pre)
- }
- }
- fmt.Fprintln(buf, "}")
-}
diff --git a/vendor/honnef.co/go/tools/ir/emit.go b/vendor/honnef.co/go/tools/ssa/emit.go
index 5fa137af9..6bf9ec32d 100644
--- a/vendor/honnef.co/go/tools/ir/emit.go
+++ b/vendor/honnef.co/go/tools/ssa/emit.go
@@ -2,14 +2,13 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-package ir
+package ssa
-// Helpers for emitting IR instructions.
+// Helpers for emitting SSA instructions.
import (
"fmt"
"go/ast"
- "go/constant"
"go/token"
"go/types"
)
@@ -17,32 +16,24 @@ import (
// emitNew emits to f a new (heap Alloc) instruction allocating an
// object of type typ. pos is the optional source location.
//
-func emitNew(f *Function, typ types.Type, source ast.Node) *Alloc {
+func emitNew(f *Function, typ types.Type, pos token.Pos) *Alloc {
v := &Alloc{Heap: true}
v.setType(types.NewPointer(typ))
- f.emit(v, source)
+ v.setPos(pos)
+ f.emit(v)
return v
}
// emitLoad emits to f an instruction to load the address addr into a
// new temporary, and returns the value so defined.
//
-func emitLoad(f *Function, addr Value, source ast.Node) *Load {
- v := &Load{X: addr}
+func emitLoad(f *Function, addr Value) *UnOp {
+ v := &UnOp{Op: token.MUL, X: addr}
v.setType(deref(addr.Type()))
- f.emit(v, source)
+ f.emit(v)
return v
}
-func emitRecv(f *Function, ch Value, commaOk bool, typ types.Type, source ast.Node) Value {
- recv := &Recv{
- Chan: ch,
- CommaOk: commaOk,
- }
- recv.setType(typ)
- return f.emit(recv, source)
-}
-
// emitDebugRef emits to f a DebugRef pseudo-instruction associating
// expression e with value v.
//
@@ -70,7 +61,7 @@ func emitDebugRef(f *Function, e ast.Expr, v Value, isAddr bool) {
Expr: e,
IsAddr: isAddr,
object: obj,
- }, nil)
+ })
}
// emitArith emits to f code to compute the binary operation op(x, y)
@@ -78,19 +69,19 @@ func emitDebugRef(f *Function, e ast.Expr, v Value, isAddr bool) {
// (Use emitCompare() for comparisons and Builder.logicalBinop() for
// non-eager operations.)
//
-func emitArith(f *Function, op token.Token, x, y Value, t types.Type, source ast.Node) Value {
+func emitArith(f *Function, op token.Token, x, y Value, t types.Type, pos token.Pos) Value {
switch op {
case token.SHL, token.SHR:
- x = emitConv(f, x, t, source)
+ x = emitConv(f, x, t)
// y may be signed or an 'untyped' constant.
// TODO(adonovan): whence signed values?
if b, ok := y.Type().Underlying().(*types.Basic); ok && b.Info()&types.IsUnsigned == 0 {
- y = emitConv(f, y, types.Typ[types.Uint64], source)
+ y = emitConv(f, y, types.Typ[types.Uint64])
}
case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT:
- x = emitConv(f, x, t, source)
- y = emitConv(f, y, t, source)
+ x = emitConv(f, x, t)
+ y = emitConv(f, y, t)
default:
panic("illegal op in emitArith: " + op.String())
@@ -101,14 +92,15 @@ func emitArith(f *Function, op token.Token, x, y Value, t types.Type, source ast
X: x,
Y: y,
}
+ v.setPos(pos)
v.setType(t)
- return f.emit(v, source)
+ return f.emit(v)
}
// emitCompare emits to f code compute the boolean result of
// comparison comparison 'x op y'.
//
-func emitCompare(f *Function, op token.Token, x, y Value, source ast.Node) Value {
+func emitCompare(f *Function, op token.Token, x, y Value, pos token.Pos) Value {
xt := x.Type().Underlying()
yt := y.Type().Underlying()
@@ -119,7 +111,7 @@ func emitCompare(f *Function, op token.Token, x, y Value, source ast.Node) Value
// if e==true { ... }
// even in the case when e's type is an interface.
// TODO(adonovan): opt: generalise to x==true, false!=y, etc.
- if x, ok := x.(*Const); ok && op == token.EQL && x.Value != nil && x.Value.Kind() == constant.Bool && constant.BoolVal(x.Value) {
+ if x == vTrue && op == token.EQL {
if yt, ok := yt.(*types.Basic); ok && yt.Info()&types.IsBoolean != 0 {
return y
}
@@ -128,13 +120,13 @@ func emitCompare(f *Function, op token.Token, x, y Value, source ast.Node) Value
if types.Identical(xt, yt) {
// no conversion necessary
} else if _, ok := xt.(*types.Interface); ok {
- y = emitConv(f, y, x.Type(), source)
+ y = emitConv(f, y, x.Type())
} else if _, ok := yt.(*types.Interface); ok {
- x = emitConv(f, x, y.Type(), source)
+ x = emitConv(f, x, y.Type())
} else if _, ok := x.(*Const); ok {
- x = emitConv(f, x, y.Type(), source)
+ x = emitConv(f, x, y.Type())
} else if _, ok := y.(*Const); ok {
- y = emitConv(f, y, x.Type(), source)
+ y = emitConv(f, y, x.Type())
//lint:ignore SA9003 no-op
} else {
// other cases, e.g. channels. No-op.
@@ -145,8 +137,9 @@ func emitCompare(f *Function, op token.Token, x, y Value, source ast.Node) Value
X: x,
Y: y,
}
+ v.setPos(pos)
v.setType(tBool)
- return f.emit(v, source)
+ return f.emit(v)
}
// isValuePreserving returns true if a conversion from ut_src to
@@ -178,7 +171,7 @@ func isValuePreserving(ut_src, ut_dst types.Type) bool {
// by language assignability rules in assignments, parameter passing,
// etc. Conversions cannot fail dynamically.
//
-func emitConv(f *Function, val Value, typ types.Type, source ast.Node) Value {
+func emitConv(f *Function, val Value, typ types.Type) Value {
t_src := val.Type()
// Identical types? Conversion is a no-op.
@@ -193,7 +186,7 @@ func emitConv(f *Function, val Value, typ types.Type, source ast.Node) Value {
if isValuePreserving(ut_src, ut_dst) {
c := &ChangeType{X: val}
c.setType(typ)
- return f.emit(c, source)
+ return f.emit(c)
}
// Conversion to, or construction of a value of, an interface type?
@@ -202,23 +195,23 @@ func emitConv(f *Function, val Value, typ types.Type, source ast.Node) Value {
if _, ok := ut_src.(*types.Interface); ok {
c := &ChangeInterface{X: val}
c.setType(typ)
- return f.emit(c, source)
+ return f.emit(c)
}
// Untyped nil constant? Return interface-typed nil constant.
if ut_src == tUntypedNil {
- return emitConst(f, nilConst(typ))
+ return nilConst(typ)
}
// Convert (non-nil) "untyped" literals to their default type.
if t, ok := ut_src.(*types.Basic); ok && t.Info()&types.IsUntyped != 0 {
- val = emitConv(f, val, types.Default(ut_src), source)
+ val = emitConv(f, val, DefaultType(ut_src))
}
f.Pkg.Prog.needMethodsOf(val.Type())
mi := &MakeInterface{X: val}
mi.setType(typ)
- return f.emit(mi, source)
+ return f.emit(mi)
}
// Conversion of a compile-time constant value?
@@ -229,7 +222,7 @@ func emitConv(f *Function, val Value, typ types.Type, source ast.Node) Value {
// constant of the destination type and
// (initially) the same abstract value.
// We don't truncate the value yet.
- return emitConst(f, NewConst(c.Value, typ))
+ return NewConst(c.Value, typ)
}
// We're converting from constant to non-constant type,
@@ -244,7 +237,7 @@ func emitConv(f *Function, val Value, typ types.Type, source ast.Node) Value {
if ok1 || ok2 {
c := &Convert{X: val}
c.setType(typ)
- return f.emit(c, source)
+ return f.emit(c)
}
panic(fmt.Sprintf("in %s: cannot convert %s (%s) to %s", f, val, val.Type(), typ))
@@ -253,75 +246,72 @@ func emitConv(f *Function, val Value, typ types.Type, source ast.Node) Value {
// emitStore emits to f an instruction to store value val at location
// addr, applying implicit conversions as required by assignability rules.
//
-func emitStore(f *Function, addr, val Value, source ast.Node) *Store {
+func emitStore(f *Function, addr, val Value, pos token.Pos) *Store {
s := &Store{
Addr: addr,
- Val: emitConv(f, val, deref(addr.Type()), source),
+ Val: emitConv(f, val, deref(addr.Type())),
+ pos: pos,
}
- // make sure we call getMem after the call to emitConv, which may
- // itself update the memory state
- f.emit(s, source)
+ f.emit(s)
return s
}
// emitJump emits to f a jump to target, and updates the control-flow graph.
// Postcondition: f.currentBlock is nil.
//
-func emitJump(f *Function, target *BasicBlock, source ast.Node) *Jump {
+func emitJump(f *Function, target *BasicBlock) {
b := f.currentBlock
- j := new(Jump)
- b.emit(j, source)
+ b.emit(new(Jump))
addEdge(b, target)
f.currentBlock = nil
- return j
}
// emitIf emits to f a conditional jump to tblock or fblock based on
// cond, and updates the control-flow graph.
// Postcondition: f.currentBlock is nil.
//
-func emitIf(f *Function, cond Value, tblock, fblock *BasicBlock, source ast.Node) *If {
+func emitIf(f *Function, cond Value, tblock, fblock *BasicBlock) {
b := f.currentBlock
- stmt := &If{Cond: cond}
- b.emit(stmt, source)
+ b.emit(&If{Cond: cond})
addEdge(b, tblock)
addEdge(b, fblock)
f.currentBlock = nil
- return stmt
}
// emitExtract emits to f an instruction to extract the index'th
// component of tuple. It returns the extracted value.
//
-func emitExtract(f *Function, tuple Value, index int, source ast.Node) Value {
+func emitExtract(f *Function, tuple Value, index int) Value {
e := &Extract{Tuple: tuple, Index: index}
e.setType(tuple.Type().(*types.Tuple).At(index).Type())
- return f.emit(e, source)
+ return f.emit(e)
}
// emitTypeAssert emits to f a type assertion value := x.(t) and
// returns the value. x.Type() must be an interface.
//
-func emitTypeAssert(f *Function, x Value, t types.Type, source ast.Node) Value {
+func emitTypeAssert(f *Function, x Value, t types.Type, pos token.Pos) Value {
a := &TypeAssert{X: x, AssertedType: t}
+ a.setPos(pos)
a.setType(t)
- return f.emit(a, source)
+ return f.emit(a)
}
// emitTypeTest emits to f a type test value,ok := x.(t) and returns
// a (value, ok) tuple. x.Type() must be an interface.
//
-func emitTypeTest(f *Function, x Value, t types.Type, source ast.Node) Value {
+func emitTypeTest(f *Function, x Value, t types.Type, pos token.Pos) Value {
a := &TypeAssert{
X: x,
AssertedType: t,
CommaOk: true,
}
+ a.setPos(pos)
a.setType(types.NewTuple(
newVar("value", t),
varOk,
))
- return f.emit(a, source)
+ return f.emit(a)
}
// emitTailCall emits to f a function call in tail position. The
@@ -330,7 +320,7 @@ func emitTypeTest(f *Function, x Value, t types.Type, source ast.Node) Value {
// Precondition: f does/will not use deferred procedure calls.
// Postcondition: f.currentBlock is nil.
//
-func emitTailCall(f *Function, call *Call, source ast.Node) {
+func emitTailCall(f *Function, call *Call) {
tresults := f.Signature.Results()
nr := tresults.Len()
if nr == 1 {
@@ -338,7 +328,7 @@ func emitTailCall(f *Function, call *Call, source ast.Node) {
} else {
call.typ = tresults
}
- tuple := f.emit(call, source)
+ tuple := f.emit(call)
var ret Return
switch nr {
case 0:
@@ -347,7 +337,7 @@ func emitTailCall(f *Function, call *Call, source ast.Node) {
ret.Results = []Value{tuple}
default:
for i := 0; i < nr; i++ {
- v := emitExtract(f, tuple, i, source)
+ v := emitExtract(f, tuple, i)
// TODO(adonovan): in principle, this is required:
// v = emitConv(f, o.Type, f.Signature.Results[i].Type)
// but in practice emitTailCall is only used when
@@ -355,11 +345,7 @@ func emitTailCall(f *Function, call *Call, source ast.Node) {
ret.Results = append(ret.Results, v)
}
}
-
- f.Exit = f.newBasicBlock("exit")
- emitJump(f, f.Exit, source)
- f.currentBlock = f.Exit
- f.emit(&ret, source)
+ f.emit(&ret)
f.currentBlock = nil
}
@@ -371,7 +357,7 @@ func emitTailCall(f *Function, call *Call, source ast.Node) {
// a field; if it is the value of a struct, the result will be the
// value of a field.
//
-func emitImplicitSelections(f *Function, v Value, indices []int, source ast.Node) Value {
+func emitImplicitSelections(f *Function, v Value, indices []int) Value {
for _, index := range indices {
fld := deref(v.Type()).Underlying().(*types.Struct).Field(index)
@@ -381,10 +367,10 @@ func emitImplicitSelections(f *Function, v Value, indices []int, source ast.Node
Field: index,
}
instr.setType(types.NewPointer(fld.Type()))
- v = f.emit(instr, source)
+ v = f.emit(instr)
// Load the field's value iff indirectly embedded.
if isPointer(fld.Type()) {
- v = emitLoad(f, v, source)
+ v = emitLoad(f, v)
}
} else {
instr := &Field{
@@ -392,7 +378,7 @@ func emitImplicitSelections(f *Function, v Value, indices []int, source ast.Node
Field: index,
}
instr.setType(fld.Type())
- v = f.emit(instr, source)
+ v = f.emit(instr)
}
}
return v
@@ -412,21 +398,21 @@ func emitFieldSelection(f *Function, v Value, index int, wantAddr bool, id *ast.
X: v,
Field: index,
}
- instr.setSource(id)
+ instr.setPos(id.Pos())
instr.setType(types.NewPointer(fld.Type()))
- v = f.emit(instr, id)
+ v = f.emit(instr)
// Load the field's value iff we don't want its address.
if !wantAddr {
- v = emitLoad(f, v, id)
+ v = emitLoad(f, v)
}
} else {
instr := &Field{
X: v,
Field: index,
}
- instr.setSource(id)
+ instr.setPos(id.Pos())
instr.setType(fld.Type())
- v = f.emit(instr, id)
+ v = f.emit(instr)
}
emitDebugRef(f, id, v, wantAddr)
return v
@@ -435,16 +421,49 @@ func emitFieldSelection(f *Function, v Value, index int, wantAddr bool, id *ast.
// zeroValue emits to f code to produce a zero value of type t,
// and returns it.
//
-func zeroValue(f *Function, t types.Type, source ast.Node) Value {
+func zeroValue(f *Function, t types.Type) Value {
switch t.Underlying().(type) {
case *types.Struct, *types.Array:
- return emitLoad(f, f.addLocal(t, source), source)
+ return emitLoad(f, f.addLocal(t, token.NoPos))
default:
- return emitConst(f, zeroConst(t))
+ return zeroConst(t)
}
}
-func emitConst(f *Function, c *Const) *Const {
- f.consts = append(f.consts, c)
- return c
+// createRecoverBlock emits to f a block of code to return after a
+// recovered panic, and sets f.Recover to it.
+//
+// If f's result parameters are named, the code loads and returns
+// their current values, otherwise it returns the zero values of their
+// type.
+//
+// Idempotent.
+//
+func createRecoverBlock(f *Function) {
+ if f.Recover != nil {
+ return // already created
+ }
+ saved := f.currentBlock
+
+ f.Recover = f.newBasicBlock("recover")
+ f.currentBlock = f.Recover
+
+ var results []Value
+ if f.namedResults != nil {
+ // Reload NRPs to form value tuple.
+ for _, r := range f.namedResults {
+ results = append(results, emitLoad(f, r))
+ }
+ } else {
+ R := f.Signature.Results()
+ for i, n := 0, R.Len(); i < n; i++ {
+ T := R.At(i).Type()
+
+ // Return zero value of each result type.
+ results = append(results, zeroValue(f, T))
+ }
+ }
+ f.emit(&Return{Results: results})
+
+ f.currentBlock = saved
}
diff --git a/vendor/honnef.co/go/tools/ir/func.go b/vendor/honnef.co/go/tools/ssa/func.go
index 978849420..222eea641 100644
--- a/vendor/honnef.co/go/tools/ir/func.go
+++ b/vendor/honnef.co/go/tools/ssa/func.go
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-package ir
+package ssa
// This file implements the Function and BasicBlock types.
@@ -10,8 +10,6 @@ import (
"bytes"
"fmt"
"go/ast"
- "go/constant"
- "go/format"
"go/token"
"go/types"
"io"
@@ -25,29 +23,6 @@ func addEdge(from, to *BasicBlock) {
to.Preds = append(to.Preds, from)
}
-// Control returns the last instruction in the block.
-func (b *BasicBlock) Control() Instruction {
- if len(b.Instrs) == 0 {
- return nil
- }
- return b.Instrs[len(b.Instrs)-1]
-}
-
-// SIgmaFor returns the sigma node for v coming from pred.
-func (b *BasicBlock) SigmaFor(v Value, pred *BasicBlock) *Sigma {
- for _, instr := range b.Instrs {
- sigma, ok := instr.(*Sigma)
- if !ok {
- // no more sigmas
- return nil
- }
- if sigma.From == pred && sigma.X == v {
- return sigma
- }
- }
- return nil
-}
-
// Parent returns the function that contains block b.
func (b *BasicBlock) Parent() *Function { return b.parent }
@@ -61,8 +36,7 @@ func (b *BasicBlock) String() string {
// emit appends an instruction to the current basic block.
// If the instruction defines a Value, it is returned.
//
-func (b *BasicBlock) emit(i Instruction, source ast.Node) Value {
- i.setSource(source)
+func (b *BasicBlock) emit(i Instruction) Value {
i.setBlock(b)
b.Instrs = append(b.Instrs, i)
v, _ := i.(Value)
@@ -80,16 +54,6 @@ func (b *BasicBlock) predIndex(c *BasicBlock) int {
panic(fmt.Sprintf("no edge %s -> %s", c, b))
}
-// succIndex returns the i such that b.Succs[i] == c or -1 if there is none.
-func (b *BasicBlock) succIndex(c *BasicBlock) int {
- for i, succ := range b.Succs {
- if succ == c {
- return i
- }
- }
- return -1
-}
-
// hasPhi returns true if b.Instrs contains φ-nodes.
func (b *BasicBlock) hasPhi() bool {
_, ok := b.Instrs[0].(*Phi)
@@ -132,6 +96,10 @@ func (b *BasicBlock) replaceSucc(p, q *BasicBlock) {
}
}
+func (b *BasicBlock) RemovePred(p *BasicBlock) {
+ b.removePred(p)
+}
+
// removePred removes all occurrences of p in b's
// predecessor list and φ-nodes.
// Ordinarily there should be at most one.
@@ -205,33 +173,23 @@ func (f *Function) labelledBlock(label *ast.Ident) *lblock {
// addParam adds a (non-escaping) parameter to f.Params of the
// specified name, type and source position.
//
-func (f *Function) addParam(name string, typ types.Type, source ast.Node) *Parameter {
- var b *BasicBlock
- if len(f.Blocks) > 0 {
- b = f.Blocks[0]
- }
+func (f *Function) addParam(name string, typ types.Type, pos token.Pos) *Parameter {
v := &Parameter{
- name: name,
+ name: name,
+ typ: typ,
+ pos: pos,
+ parent: f,
}
- v.setBlock(b)
- v.setType(typ)
- v.setSource(source)
f.Params = append(f.Params, v)
- if b != nil {
- // There may be no blocks if this function has no body. We
- // still create params, but aren't interested in the
- // instruction.
- f.Blocks[0].Instrs = append(f.Blocks[0].Instrs, v)
- }
return v
}
-func (f *Function) addParamObj(obj types.Object, source ast.Node) *Parameter {
+func (f *Function) addParamObj(obj types.Object) *Parameter {
name := obj.Name()
if name == "" {
name = fmt.Sprintf("arg%d", len(f.Params))
}
- param := f.addParam(name, obj.Type(), source)
+ param := f.addParam(name, obj.Type(), obj.Pos())
param.object = obj
return param
}
@@ -240,61 +198,25 @@ func (f *Function) addParamObj(obj types.Object, source ast.Node) *Parameter {
// stack; the function body will load/store the spilled location.
// Subsequent lifting will eliminate spills where possible.
//
-func (f *Function) addSpilledParam(obj types.Object, source ast.Node) {
- param := f.addParamObj(obj, source)
+func (f *Function) addSpilledParam(obj types.Object) {
+ param := f.addParamObj(obj)
spill := &Alloc{Comment: obj.Name()}
spill.setType(types.NewPointer(obj.Type()))
- spill.source = source
+ spill.setPos(obj.Pos())
f.objects[obj] = spill
f.Locals = append(f.Locals, spill)
- f.emit(spill, source)
- emitStore(f, spill, param, source)
- // f.emit(&Store{Addr: spill, Val: param})
+ f.emit(spill)
+ f.emit(&Store{Addr: spill, Val: param})
}
-// startBody initializes the function prior to generating IR code for its body.
+// startBody initializes the function prior to generating SSA code for its body.
// Precondition: f.Type() already set.
//
func (f *Function) startBody() {
- entry := f.newBasicBlock("entry")
- f.currentBlock = entry
+ f.currentBlock = f.newBasicBlock("entry")
f.objects = make(map[types.Object]Value) // needed for some synthetics, e.g. init
}
-func (f *Function) blockset(i int) *BlockSet {
- bs := &f.blocksets[i]
- if len(bs.values) != len(f.Blocks) {
- if cap(bs.values) >= len(f.Blocks) {
- bs.values = bs.values[:len(f.Blocks)]
- bs.Clear()
- } else {
- bs.values = make([]bool, len(f.Blocks))
- }
- } else {
- bs.Clear()
- }
- return bs
-}
-
-func (f *Function) exitBlock() {
- old := f.currentBlock
-
- f.Exit = f.newBasicBlock("exit")
- f.currentBlock = f.Exit
-
- ret := f.results()
- results := make([]Value, len(ret))
- // Run function calls deferred in this
- // function when explicitly returning from it.
- f.emit(new(RunDefers), nil)
- for i, r := range ret {
- results[i] = emitLoad(f, r, nil)
- }
-
- f.emit(&Return{Results: results}, nil)
- f.currentBlock = old
-}
-
// createSyntacticParams populates f.Params and generates code (spills
// and named result locals) for all the parameters declared in the
// syntax. In addition it populates the f.objects mapping.
@@ -309,11 +231,11 @@ func (f *Function) createSyntacticParams(recv *ast.FieldList, functype *ast.Func
if recv != nil {
for _, field := range recv.List {
for _, n := range field.Names {
- f.addSpilledParam(f.Pkg.info.Defs[n], n)
+ f.addSpilledParam(f.Pkg.info.Defs[n])
}
// Anonymous receiver? No need to spill.
if field.Names == nil {
- f.addParamObj(f.Signature.Recv(), field)
+ f.addParamObj(f.Signature.Recv())
}
}
}
@@ -323,11 +245,11 @@ func (f *Function) createSyntacticParams(recv *ast.FieldList, functype *ast.Func
n := len(f.Params) // 1 if has recv, 0 otherwise
for _, field := range functype.Params.List {
for _, n := range field.Names {
- f.addSpilledParam(f.Pkg.info.Defs[n], n)
+ f.addSpilledParam(f.Pkg.info.Defs[n])
}
// Anonymous parameter? No need to spill.
if field.Names == nil {
- f.addParamObj(f.Signature.Params().At(len(f.Params)-n), field)
+ f.addParamObj(f.Signature.Params().At(len(f.Params) - n))
}
}
}
@@ -340,28 +262,24 @@ func (f *Function) createSyntacticParams(recv *ast.FieldList, functype *ast.Func
f.namedResults = append(f.namedResults, f.addLocalForIdent(n))
}
}
-
- if len(f.namedResults) == 0 {
- sig := f.Signature.Results()
- for i := 0; i < sig.Len(); i++ {
- // XXX position information
- v := f.addLocal(sig.At(i).Type(), nil)
- v.Comment = fmt.Sprintf("ret.%d", i)
- f.implicitResults = append(f.implicitResults, v)
- }
- }
}
}
-func numberNodes(f *Function) {
- var base ID
+// numberRegisters assigns numbers to all SSA registers
+// (value-defining Instructions) in f, to aid debugging.
+// (Non-Instruction Values are named at construction.)
+//
+func numberRegisters(f *Function) {
+ v := 0
for _, b := range f.Blocks {
for _, instr := range b.Instrs {
- if instr == nil {
- continue
+ switch instr.(type) {
+ case Value:
+ instr.(interface {
+ setNum(int)
+ }).setNum(v)
+ v++
}
- base++
- instr.setID(base)
}
}
}
@@ -385,197 +303,111 @@ func buildReferrers(f *Function) {
}
}
-func (f *Function) emitConsts() {
- if len(f.Blocks) == 0 {
- f.consts = nil
- return
- }
+// finishBody() finalizes the function after SSA code generation of its body.
+func (f *Function) finishBody() {
+ f.objects = nil
+ f.currentBlock = nil
+ f.lblocks = nil
- // TODO(dh): our deduplication only works on booleans and
- // integers. other constants are represented as pointers to
- // things.
- if len(f.consts) == 0 {
- return
- } else if len(f.consts) <= 32 {
- f.emitConstsFew()
- } else {
- f.emitConstsMany()
+ // Don't pin the AST in memory (except in debug mode).
+ if n := f.syntax; n != nil && !f.debugInfo() {
+ f.syntax = extentNode{n.Pos(), n.End()}
}
-}
-func (f *Function) emitConstsFew() {
- dedup := make([]*Const, 0, 32)
- for _, c := range f.consts {
- if len(*c.Referrers()) == 0 {
- continue
- }
- found := false
- for _, d := range dedup {
- if c.typ == d.typ && c.Value == d.Value {
- replaceAll(c, d)
- found = true
- break
- }
- }
- if !found {
- dedup = append(dedup, c)
+ // Remove from f.Locals any Allocs that escape to the heap.
+ j := 0
+ for _, l := range f.Locals {
+ if !l.Heap {
+ f.Locals[j] = l
+ j++
}
}
-
- instrs := make([]Instruction, len(f.Blocks[0].Instrs)+len(dedup))
- for i, c := range dedup {
- instrs[i] = c
- c.setBlock(f.Blocks[0])
- }
- copy(instrs[len(dedup):], f.Blocks[0].Instrs)
- f.Blocks[0].Instrs = instrs
- f.consts = nil
-}
-
-func (f *Function) emitConstsMany() {
- type constKey struct {
- typ types.Type
- value constant.Value
+ // Nil out f.Locals[j:] to aid GC.
+ for i := j; i < len(f.Locals); i++ {
+ f.Locals[i] = nil
}
+ f.Locals = f.Locals[:j]
- m := make(map[constKey]Value, len(f.consts))
- areNil := 0
- for i, c := range f.consts {
- if len(*c.Referrers()) == 0 {
- f.consts[i] = nil
- areNil++
+ // comma-ok receiving from a time.Tick channel will never return
+ // ok == false, so any branching on the value of ok can be
+ // replaced with an unconditional jump. This will primarily match
+ // `for range time.Tick(x)` loops, but it can also match
+ // user-written code.
+ for _, block := range f.Blocks {
+ if len(block.Instrs) < 3 {
continue
}
-
- k := constKey{
- typ: c.typ,
- value: c.Value,
+ if len(block.Succs) != 2 {
+ continue
}
- if dup, ok := m[k]; !ok {
- m[k] = c
- } else {
- f.consts[i] = nil
- areNil++
- replaceAll(c, dup)
+ var instrs []*Instruction
+ for i, ins := range block.Instrs {
+ if _, ok := ins.(*DebugRef); ok {
+ continue
+ }
+ instrs = append(instrs, &block.Instrs[i])
}
- }
- instrs := make([]Instruction, len(f.Blocks[0].Instrs)+len(f.consts)-areNil)
- i := 0
- for _, c := range f.consts {
- if c != nil {
- instrs[i] = c
- c.setBlock(f.Blocks[0])
- i++
- }
- }
- copy(instrs[i:], f.Blocks[0].Instrs)
- f.Blocks[0].Instrs = instrs
- f.consts = nil
-}
-
-// buildFakeExits ensures that every block in the function is
-// reachable in reverse from the Exit block. This is required to build
-// a full post-dominator tree, and to ensure the exit block's
-// inclusion in the dominator tree.
-func buildFakeExits(fn *Function) {
- // Find back-edges via forward DFS
- fn.fakeExits = BlockSet{values: make([]bool, len(fn.Blocks))}
- seen := fn.blockset(0)
- backEdges := fn.blockset(1)
-
- var dfs func(b *BasicBlock)
- dfs = func(b *BasicBlock) {
- if !seen.Add(b) {
- backEdges.Add(b)
- return
- }
- for _, pred := range b.Succs {
- dfs(pred)
- }
- }
- dfs(fn.Blocks[0])
-buildLoop:
- for {
- seen := fn.blockset(2)
- var dfs func(b *BasicBlock)
- dfs = func(b *BasicBlock) {
- if !seen.Add(b) {
- return
- }
- for _, pred := range b.Preds {
- dfs(pred)
+ for i, ins := range instrs {
+ unop, ok := (*ins).(*UnOp)
+ if !ok || unop.Op != token.ARROW {
+ continue
}
- if b == fn.Exit {
- for _, b := range fn.Blocks {
- if fn.fakeExits.Has(b) {
- dfs(b)
- }
- }
+ call, ok := unop.X.(*Call)
+ if !ok {
+ continue
}
- }
- dfs(fn.Exit)
-
- for _, b := range fn.Blocks {
- if !seen.Has(b) && backEdges.Has(b) {
- // Block b is not reachable from the exit block. Add a
- // fake jump from b to exit, then try again. Note that we
- // only add one fake edge at a time, as it may make
- // multiple blocks reachable.
- //
- // We only consider those blocks that have back edges.
- // Any unreachable block that doesn't have a back edge
- // must flow into a loop, which by definition has a
- // back edge. Thus, by looking for loops, we should
- // need fewer fake edges overall.
- fn.fakeExits.Add(b)
- continue buildLoop
+ if call.Common().IsInvoke() {
+ continue
}
- }
- break
- }
-}
+ // OPT(dh): surely there is a more efficient way of doing
+ // this, than using FullName. We should already have
+ // resolved time.Tick somewhere?
+ v, ok := call.Common().Value.(*Function)
+ if !ok {
+ continue
+ }
+ t, ok := v.Object().(*types.Func)
+ if !ok {
+ continue
+ }
+ if t.FullName() != "time.Tick" {
+ continue
+ }
+ ex, ok := (*instrs[i+1]).(*Extract)
+ if !ok || ex.Tuple != unop || ex.Index != 1 {
+ continue
+ }
-// finishBody() finalizes the function after IR code generation of its body.
-func (f *Function) finishBody() {
- f.objects = nil
- f.currentBlock = nil
- f.lblocks = nil
+ ifstmt, ok := (*instrs[i+2]).(*If)
+ if !ok || ifstmt.Cond != ex {
+ continue
+ }
- // Remove from f.Locals any Allocs that escape to the heap.
- j := 0
- for _, l := range f.Locals {
- if !l.Heap {
- f.Locals[j] = l
- j++
+ *instrs[i+2] = NewJump(block)
+ succ := block.Succs[1]
+ block.Succs = block.Succs[0:1]
+ succ.RemovePred(block)
}
}
- // Nil out f.Locals[j:] to aid GC.
- for i := j; i < len(f.Locals); i++ {
- f.Locals[i] = nil
- }
- f.Locals = f.Locals[:j]
optimizeBlocks(f)
+
buildReferrers(f)
+
buildDomTree(f)
- buildPostDomTree(f)
if f.Prog.mode&NaiveForm == 0 {
+ // For debugging pre-state of lifting pass:
+ // numberRegisters(f)
+ // f.WriteTo(os.Stderr)
lift(f)
}
- // emit constants after lifting, because lifting may produce new constants.
- f.emitConsts()
-
f.namedResults = nil // (used by lifting)
- f.implicitResults = nil
-
- numberNodes(f)
- defer f.wr.Close()
- f.wr.WriteFunc("start", "start", f)
+ numberRegisters(f)
if f.Prog.mode&PrintFunctions != 0 {
printMu.Lock()
@@ -588,29 +420,6 @@ func (f *Function) finishBody() {
}
}
-func isUselessPhi(phi *Phi) (Value, bool) {
- var v0 Value
- for _, e := range phi.Edges {
- if e == phi {
- continue
- }
- if v0 == nil {
- v0 = e
- }
- if v0 != e {
- if v0, ok := v0.(*Const); ok {
- if e, ok := e.(*Const); ok {
- if v0.typ == e.typ && v0.Value == e.Value {
- continue
- }
- }
- }
- return nil, false
- }
- }
- return v0, true
-}
-
func (f *Function) RemoveNilBlocks() {
f.removeNilBlocks()
}
@@ -653,25 +462,26 @@ func (f *Function) debugInfo() bool {
// returns it. Its name and type are taken from obj. Subsequent
// calls to f.lookup(obj) will return the same local.
//
-func (f *Function) addNamedLocal(obj types.Object, source ast.Node) *Alloc {
- l := f.addLocal(obj.Type(), source)
+func (f *Function) addNamedLocal(obj types.Object) *Alloc {
+ l := f.addLocal(obj.Type(), obj.Pos())
l.Comment = obj.Name()
f.objects[obj] = l
return l
}
func (f *Function) addLocalForIdent(id *ast.Ident) *Alloc {
- return f.addNamedLocal(f.Pkg.info.Defs[id], id)
+ return f.addNamedLocal(f.Pkg.info.Defs[id])
}
// addLocal creates an anonymous local variable of type typ, adds it
// to function f and returns it. pos is the optional source location.
//
-func (f *Function) addLocal(typ types.Type, source ast.Node) *Alloc {
+func (f *Function) addLocal(typ types.Type, pos token.Pos) *Alloc {
v := &Alloc{}
v.setType(types.NewPointer(typ))
+ v.setPos(pos)
f.Locals = append(f.Locals, v)
- f.emit(v, source)
+ f.emit(v)
return v
}
@@ -691,12 +501,13 @@ func (f *Function) lookup(obj types.Object, escaping bool) Value {
// Definition must be in an enclosing function;
// plumb it through intervening closures.
if f.parent == nil {
- panic("no ir.Value for " + obj.String())
+ panic("no ssa.Value for " + obj.String())
}
outer := f.parent.lookup(obj, true) // escaping
v := &FreeVar{
name: obj.Name(),
typ: outer.Type(),
+ pos: outer.Pos(),
outer: outer,
parent: f,
}
@@ -706,8 +517,8 @@ func (f *Function) lookup(obj types.Object, escaping bool) Value {
}
// emit emits the specified instruction to function f.
-func (f *Function) emit(instr Instruction, source ast.Node) Value {
- return f.currentBlock.emit(instr, source)
+func (f *Function) emit(instr Instruction) Value {
+ return f.currentBlock.emit(instr)
}
// RelString returns the full name of this function, qualified by
@@ -826,6 +637,10 @@ func WriteFunction(buf *bytes.Buffer, f *Function) {
fmt.Fprintf(buf, "# Parent: %s\n", f.parent.Name())
}
+ if f.Recover != nil {
+ fmt.Fprintf(buf, "# Recover: %s\n", f.Recover)
+ }
+
from := f.pkg()
if f.FreeVars != nil {
@@ -848,38 +663,45 @@ func WriteFunction(buf *bytes.Buffer, f *Function) {
buf.WriteString("\t(external)\n")
}
+ // NB. column calculations are confused by non-ASCII
+ // characters and assume 8-space tabs.
+ const punchcard = 80 // for old time's sake.
+ const tabwidth = 8
for _, b := range f.Blocks {
if b == nil {
// Corrupt CFG.
fmt.Fprintf(buf, ".nil:\n")
continue
}
- fmt.Fprintf(buf, "b%d:", b.Index)
- if len(b.Preds) > 0 {
- fmt.Fprint(buf, " ←")
- for _, pred := range b.Preds {
- fmt.Fprintf(buf, " b%d", pred.Index)
- }
- }
- if b.Comment != "" {
- fmt.Fprintf(buf, " # %s", b.Comment)
- }
- buf.WriteByte('\n')
+ n, _ := fmt.Fprintf(buf, "%d:", b.Index)
+ bmsg := fmt.Sprintf("%s P:%d S:%d", b.Comment, len(b.Preds), len(b.Succs))
+ fmt.Fprintf(buf, "%*s%s\n", punchcard-1-n-len(bmsg), "", bmsg)
if false { // CFG debugging
fmt.Fprintf(buf, "\t# CFG: %s --> %s --> %s\n", b.Preds, b, b.Succs)
}
-
- buf2 := &bytes.Buffer{}
for _, instr := range b.Instrs {
buf.WriteString("\t")
switch v := instr.(type) {
case Value:
+ l := punchcard - tabwidth
// Left-align the instruction.
if name := v.Name(); name != "" {
- fmt.Fprintf(buf, "%s = ", name)
+ n, _ := fmt.Fprintf(buf, "%s = ", name)
+ l -= n
+ }
+ n, _ := buf.WriteString(instr.String())
+ l -= n
+ // Right-align the type if there's space.
+ if t := v.Type(); t != nil {
+ buf.WriteByte(' ')
+ ts := relType(t, from)
+ l -= len(ts) + len(" ") // (spaces before and after type)
+ if l > 0 {
+ fmt.Fprintf(buf, "%*s", l, "")
+ }
+ buf.WriteString(ts)
}
- buf.WriteString(instr.String())
case nil:
// Be robust against bad transforms.
buf.WriteString("<deleted>")
@@ -887,30 +709,9 @@ func WriteFunction(buf *bytes.Buffer, f *Function) {
buf.WriteString(instr.String())
}
buf.WriteString("\n")
-
- if f.Prog.mode&PrintSource != 0 {
- if s := instr.Source(); s != nil {
- buf2.Reset()
- format.Node(buf2, f.Prog.Fset, s)
- for {
- line, err := buf2.ReadString('\n')
- if len(line) == 0 {
- break
- }
- buf.WriteString("\t\t> ")
- buf.WriteString(line)
- if line[len(line)-1] != '\n' {
- buf.WriteString("\n")
- }
- if err != nil {
- break
- }
- }
- }
- }
}
- buf.WriteString("\n")
}
+ fmt.Fprintf(buf, "\n")
}
// newBasicBlock adds to f a new basic block and returns it. It does
@@ -935,7 +736,7 @@ func (f *Function) newBasicBlock(comment string) *BasicBlock {
// the function object, e.g. Pkg, Params, Blocks.
//
// It is practically impossible for clients to construct well-formed
-// IR functions/packages/programs directly, so we assume this is the
+// SSA functions/packages/programs directly, so we assume this is the
// job of the Builder alone. NewFunction exists to provide clients a
// little flexibility. For example, analysis tools may wish to
// construct fake Functions for the root of the callgraph, a fake
@@ -947,17 +748,18 @@ func (prog *Program) NewFunction(name string, sig *types.Signature, provenance s
return &Function{Prog: prog, name: name, Signature: sig, Synthetic: provenance}
}
-//lint:ignore U1000 we may make use of this for functions loaded from export data
type extentNode [2]token.Pos
func (n extentNode) Pos() token.Pos { return n[0] }
func (n extentNode) End() token.Pos { return n[1] }
-func (f *Function) initHTML(name string) {
- if name == "" {
- return
- }
- if rel := f.RelString(nil); rel == name {
- f.wr = NewHTMLWriter("ir.html", rel, "")
- }
-}
+// Syntax returns an ast.Node whose Pos/End methods provide the
+// lexical extent of the function if it was defined by Go source code
+// (f.Synthetic==""), or nil otherwise.
+//
+// If f was built with debug information (see Package.SetDebugRef),
+// the result is the *ast.FuncDecl or *ast.FuncLit that declared the
+// function. Otherwise, it is an opaque Node providing only position
+// information; this avoids pinning the AST in memory.
+//
+func (f *Function) Syntax() ast.Node { return f.syntax }
diff --git a/vendor/honnef.co/go/tools/ir/identical.go b/vendor/honnef.co/go/tools/ssa/identical.go
index 4dbe44717..53cbee107 100644
--- a/vendor/honnef.co/go/tools/ir/identical.go
+++ b/vendor/honnef.co/go/tools/ssa/identical.go
@@ -1,6 +1,6 @@
// +build go1.8
-package ir
+package ssa
import "go/types"
diff --git a/vendor/honnef.co/go/tools/ir/identical_17.go b/vendor/honnef.co/go/tools/ssa/identical_17.go
index 3968fa782..da89d3339 100644
--- a/vendor/honnef.co/go/tools/ir/identical_17.go
+++ b/vendor/honnef.co/go/tools/ssa/identical_17.go
@@ -1,6 +1,6 @@
// +build !go1.8
-package ir
+package ssa
import "go/types"
diff --git a/vendor/honnef.co/go/tools/ir/lift.go b/vendor/honnef.co/go/tools/ssa/lift.go
index 1851ce032..531358fa3 100644
--- a/vendor/honnef.co/go/tools/ir/lift.go
+++ b/vendor/honnef.co/go/tools/ssa/lift.go
@@ -2,12 +2,12 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-package ir
+package ssa
// This file defines the lifting pass which tries to "lift" Alloc
// cells (new/local variables) into SSA registers, replacing loads
// with the dominating stored value, eliminating loads and stores, and
-// inserting φ- and σ-nodes as needed.
+// inserting φ-nodes as needed.
// Cited papers and resources:
//
@@ -21,10 +21,6 @@ package ir
// Daniel Berlin, llvmdev mailing list, 2012.
// http://lists.cs.uiuc.edu/pipermail/llvmdev/2012-January/046638.html
// (Be sure to expand the whole thread.)
-//
-// C. Scott Ananian. 1997. The static single information form.
-//
-// Jeremy Singer. 2006. Static program analysis based on virtual register renaming.
// TODO(adonovan): opt: there are many optimizations worth evaluating, and
// the conventional wisdom for SSA construction is that a simple
@@ -44,7 +40,9 @@ package ir
import (
"fmt"
+ "go/token"
"go/types"
+ "math/big"
"os"
)
@@ -67,25 +65,31 @@ const debugLifting = false
type domFrontier [][]*BasicBlock
func (df domFrontier) add(u, v *BasicBlock) {
- df[u.Index] = append(df[u.Index], v)
+ p := &df[u.Index]
+ *p = append(*p, v)
}
-// build builds the dominance frontier df for the dominator tree of
-// fn, using the algorithm found in A Simple, Fast Dominance
-// Algorithm, Figure 5.
+// build builds the dominance frontier df for the dominator (sub)tree
+// rooted at u, using the Cytron et al. algorithm.
//
// TODO(adonovan): opt: consider Berlin approach, computing pruned SSA
// by pruning the entire IDF computation, rather than merely pruning
// the DF -> IDF step.
-func (df domFrontier) build(fn *Function) {
- for _, b := range fn.Blocks {
- if len(b.Preds) >= 2 {
- for _, p := range b.Preds {
- runner := p
- for runner != b.dom.idom {
- df.add(runner, b)
- runner = runner.dom.idom
- }
+func (df domFrontier) build(u *BasicBlock) {
+ // Encounter each node u in postorder of dom tree.
+ for _, child := range u.dom.children {
+ df.build(child)
+ }
+ for _, vb := range u.Succs {
+ if v := vb.dom; v.idom != u {
+ df.add(u, vb)
+ }
+ }
+ for _, w := range u.dom.children {
+ for _, vb := range df[w.Index] {
+ // TODO(adonovan): opt: use word-parallel bitwise union.
+ if v := vb.dom; v.idom != u {
+ df.add(u, vb)
}
}
}
@@ -93,34 +97,11 @@ func (df domFrontier) build(fn *Function) {
func buildDomFrontier(fn *Function) domFrontier {
df := make(domFrontier, len(fn.Blocks))
- df.build(fn)
- return df
-}
-
-type postDomFrontier [][]*BasicBlock
-
-func (rdf postDomFrontier) add(u, v *BasicBlock) {
- rdf[u.Index] = append(rdf[u.Index], v)
-}
-
-func (rdf postDomFrontier) build(fn *Function) {
- for _, b := range fn.Blocks {
- if len(b.Succs) >= 2 {
- for _, s := range b.Succs {
- runner := s
- for runner != b.pdom.idom {
- rdf.add(runner, b)
- runner = runner.pdom.idom
- }
- }
- }
+ df.build(fn.Blocks[0])
+ if fn.Recover != nil {
+ df.build(fn.Recover)
}
-}
-
-func buildPostDomFrontier(fn *Function) postDomFrontier {
- rdf := make(postDomFrontier, len(fn.Blocks))
- rdf.build(fn)
- return rdf
+ return df
}
func removeInstr(refs []Instruction, instr Instruction) []Instruction {
@@ -139,8 +120,8 @@ func removeInstr(refs []Instruction, instr Instruction) []Instruction {
}
// lift replaces local and new Allocs accessed only with
-// load/store by IR registers, inserting φ- and σ-nodes where necessary.
-// The result is a program in pruned SSI form.
+// load/store by SSA registers, inserting φ-nodes where necessary.
+// The result is a program in classical pruned SSA form.
//
// Preconditions:
// - fn has no dead blocks (blockopt has run).
@@ -167,7 +148,6 @@ func lift(fn *Function) {
//
// But we will start with the simplest correct code.
df := buildDomFrontier(fn)
- rdf := buildPostDomFrontier(fn)
if debugLifting {
title := false
@@ -182,8 +162,7 @@ func lift(fn *Function) {
}
}
- newPhis := make(newPhiMap, len(fn.Blocks))
- newSigmas := make(newSigmaMap, len(fn.Blocks))
+ newPhis := make(newPhiMap)
// During this pass we will replace some BasicBlock.Instrs
// (allocs, loads and stores) with nil, keeping a count in
@@ -196,6 +175,11 @@ func lift(fn *Function) {
// instructions.
usesDefer := false
+ // A counter used to generate ~unique ids for Phi nodes, as an
+ // aid to debugging. We use large numbers to make them highly
+ // visible. All nodes are renumbered later.
+ fresh := 1000
+
// Determine which allocs we can lift and number them densely.
// The renaming phase uses this numbering for compact maps.
numAllocs := 0
@@ -206,7 +190,7 @@ func lift(fn *Function) {
switch instr := instr.(type) {
case *Alloc:
index := -1
- if liftAlloc(df, rdf, instr, newPhis, newSigmas) {
+ if liftAlloc(df, instr, newPhis, &fresh) {
index = numAllocs
numAllocs++
}
@@ -227,60 +211,32 @@ func lift(fn *Function) {
renaming := make([]Value, numAllocs)
// Renaming.
- rename(fn.Blocks[0], renaming, newPhis, newSigmas)
-
- simplifyPhis(newPhis)
+ rename(fn.Blocks[0], renaming, newPhis)
- // Eliminate dead φ- and σ-nodes.
- markLiveNodes(fn.Blocks, newPhis, newSigmas)
+ // Eliminate dead φ-nodes.
+ removeDeadPhis(fn.Blocks, newPhis)
- // Prepend remaining live φ-nodes to each block and possibly kill rundefers.
+ // Prepend remaining live φ-nodes to each block.
for _, b := range fn.Blocks {
- nps := newPhis[b.Index]
- head := make([]Instruction, 0, len(nps))
- for _, pred := range b.Preds {
- nss := newSigmas[pred.Index]
- idx := pred.succIndex(b)
- for _, newSigma := range nss {
- if sigma := newSigma.sigmas[idx]; sigma.live {
- head = append(head, sigma)
-
- // we didn't populate referrers before, as most
- // sigma nodes will be killed
- if refs := sigma.X.Referrers(); refs != nil {
- *refs = append(*refs, sigma)
- }
- } else {
- sigma.block = nil
- }
- }
- }
- for _, np := range nps {
- if np.phi.live {
- head = append(head, np.phi)
- } else {
- for _, edge := range np.phi.Edges {
- if refs := edge.Referrers(); refs != nil {
- *refs = removeInstr(*refs, np.phi)
- }
- }
- np.phi.block = nil
- }
- }
+ nps := newPhis[b]
+ j := len(nps)
rundefersToKill := b.rundefers
if usesDefer {
rundefersToKill = 0
}
- j := len(head)
if j+b.gaps+rundefersToKill == 0 {
continue // fast path: no new phis or gaps
}
// Compact nps + non-nil Instrs into a new slice.
+ // TODO(adonovan): opt: compact in situ (rightwards)
+ // if Instrs has sufficient space or slack.
dst := make([]Instruction, len(b.Instrs)+j-b.gaps-rundefersToKill)
- copy(dst, head)
+ for i, np := range nps {
+ dst[i] = np.phi
+ }
for _, instr := range b.Instrs {
if instr == nil {
continue
@@ -311,185 +267,106 @@ func lift(fn *Function) {
fn.Locals = fn.Locals[:j]
}
-func hasDirectReferrer(instr Instruction) bool {
- for _, instr := range *instr.Referrers() {
- switch instr.(type) {
- case *Phi, *Sigma:
- // ignore
- default:
- return true
- }
- }
- return false
-}
-
-func markLiveNodes(blocks []*BasicBlock, newPhis newPhiMap, newSigmas newSigmaMap) {
- // Phi and sigma nodes are considered live if a non-phi, non-sigma
- // node uses them. Once we find a node that is live, we mark all
- // of its operands as used, too.
+// removeDeadPhis removes φ-nodes not transitively needed by a
+// non-Phi, non-DebugRef instruction.
+func removeDeadPhis(blocks []*BasicBlock, newPhis newPhiMap) {
+ // First pass: find the set of "live" φ-nodes: those reachable
+ // from some non-Phi instruction.
+ //
+ // We compute reachability in reverse, starting from each φ,
+ // rather than forwards, starting from each live non-Phi
+ // instruction, because this way visits much less of the
+ // Value graph.
+ livePhis := make(map[*Phi]bool)
for _, npList := range newPhis {
for _, np := range npList {
phi := np.phi
- if !phi.live && hasDirectReferrer(phi) {
- markLivePhi(phi)
- }
- }
- }
- for _, npList := range newSigmas {
- for _, np := range npList {
- for _, sigma := range np.sigmas {
- if !sigma.live && hasDirectReferrer(sigma) {
- markLiveSigma(sigma)
- }
+ if !livePhis[phi] && phiHasDirectReferrer(phi) {
+ markLivePhi(livePhis, phi)
}
}
}
+
// Existing φ-nodes due to && and || operators
// are all considered live (see Go issue 19622).
for _, b := range blocks {
for _, phi := range b.phis() {
- markLivePhi(phi.(*Phi))
+ markLivePhi(livePhis, phi.(*Phi))
}
}
-}
-func markLivePhi(phi *Phi) {
- phi.live = true
- for _, rand := range phi.Edges {
- switch rand := rand.(type) {
- case *Phi:
- if !rand.live {
- markLivePhi(rand)
- }
- case *Sigma:
- if !rand.live {
- markLiveSigma(rand)
+ // Second pass: eliminate unused phis from newPhis.
+ for block, npList := range newPhis {
+ j := 0
+ for _, np := range npList {
+ if livePhis[np.phi] {
+ npList[j] = np
+ j++
+ } else {
+ // discard it, first removing it from referrers
+ for _, val := range np.phi.Edges {
+ if refs := val.Referrers(); refs != nil {
+ *refs = removeInstr(*refs, np.phi)
+ }
+ }
+ np.phi.block = nil
}
}
+ newPhis[block] = npList[:j]
}
}
-func markLiveSigma(sigma *Sigma) {
- sigma.live = true
- switch rand := sigma.X.(type) {
- case *Phi:
- if !rand.live {
- markLivePhi(rand)
- }
- case *Sigma:
- if !rand.live {
- markLiveSigma(rand)
- }
- }
-}
-
-// simplifyPhis replaces trivial phis with non-phi alternatives. Phi
-// nodes where all edges are identical, or consist of only the phi
-// itself and one other value, may be replaced with the value.
-func simplifyPhis(newPhis newPhiMap) {
- // find all phis that are trivial and can be replaced with a
- // non-phi value. run until we reach a fixpoint, because replacing
- // a phi may make other phis trivial.
- for changed := true; changed; {
- changed = false
- for _, npList := range newPhis {
- for _, np := range npList {
- if np.phi.live {
- // we're reusing 'live' to mean 'dead' in the context of simplifyPhis
- continue
- }
- if r, ok := isUselessPhi(np.phi); ok {
- // useless phi, replace its uses with the
- // replacement value. the dead phi pass will clean
- // up the phi afterwards.
- replaceAll(np.phi, r)
- np.phi.live = true
- changed = true
- }
+// markLivePhi marks phi, and all φ-nodes transitively reachable via
+// its Operands, live.
+func markLivePhi(livePhis map[*Phi]bool, phi *Phi) {
+ livePhis[phi] = true
+ for _, rand := range phi.Operands(nil) {
+ if q, ok := (*rand).(*Phi); ok {
+ if !livePhis[q] {
+ markLivePhi(livePhis, q)
}
}
}
-
- for _, npList := range newPhis {
- for _, np := range npList {
- np.phi.live = false
- }
- }
}
-type BlockSet struct {
- idx int
- values []bool
- count int
-}
-
-func NewBlockSet(size int) *BlockSet {
- return &BlockSet{values: make([]bool, size)}
-}
-
-func (s *BlockSet) Set(s2 *BlockSet) {
- copy(s.values, s2.values)
- s.count = 0
- for _, v := range s.values {
- if v {
- s.count++
+// phiHasDirectReferrer reports whether phi is directly referred to by
+// a non-Phi instruction. Such instructions are the
+// roots of the liveness traversal.
+func phiHasDirectReferrer(phi *Phi) bool {
+ for _, instr := range *phi.Referrers() {
+ if _, ok := instr.(*Phi); !ok {
+ return true
}
}
+ return false
}
-func (s *BlockSet) Num() int {
- return s.count
-}
-
-func (s *BlockSet) Has(b *BasicBlock) bool {
- if b.Index >= len(s.values) {
- return false
- }
- return s.values[b.Index]
-}
+type BlockSet struct{ big.Int } // (inherit methods from Int)
// add adds b to the set and returns true if the set changed.
func (s *BlockSet) Add(b *BasicBlock) bool {
- if s.values[b.Index] {
+ i := b.Index
+ if s.Bit(i) != 0 {
return false
}
- s.count++
- s.values[b.Index] = true
- s.idx = b.Index
-
+ s.SetBit(&s.Int, i, 1)
return true
}
-func (s *BlockSet) Clear() {
- for j := range s.values {
- s.values[j] = false
- }
- s.count = 0
+func (s *BlockSet) Has(b *BasicBlock) bool {
+ return s.Bit(b.Index) == 1
}
// take removes an arbitrary element from a set s and
// returns its index, or returns -1 if empty.
func (s *BlockSet) Take() int {
- // [i, end]
- for i := s.idx; i < len(s.values); i++ {
- if s.values[i] {
- s.values[i] = false
- s.idx = i
- s.count--
+ l := s.BitLen()
+ for i := 0; i < l; i++ {
+ if s.Bit(i) == 1 {
+ s.SetBit(&s.Int, i, 0)
return i
}
}
-
- // [start, i)
- for i := 0; i < s.idx; i++ {
- if s.values[i] {
- s.values[i] = false
- s.idx = i
- s.count--
- return i
- }
- }
-
return -1
}
@@ -500,28 +377,17 @@ type newPhi struct {
alloc *Alloc
}
-type newSigma struct {
- alloc *Alloc
- sigmas []*Sigma
-}
-
// newPhiMap records for each basic block, the set of newPhis that
// must be prepended to the block.
-type newPhiMap [][]newPhi
-type newSigmaMap [][]newSigma
+type newPhiMap map[*BasicBlock][]newPhi
// liftAlloc determines whether alloc can be lifted into registers,
// and if so, it populates newPhis with all the φ-nodes it may require
// and returns true.
-func liftAlloc(df domFrontier, rdf postDomFrontier, alloc *Alloc, newPhis newPhiMap, newSigmas newSigmaMap) bool {
- fn := alloc.Parent()
-
- defblocks := fn.blockset(0)
- useblocks := fn.blockset(1)
- Aphi := fn.blockset(2)
- Asigma := fn.blockset(3)
- W := fn.blockset(4)
-
+//
+// fresh is a source of fresh ids for phi nodes.
+//
+func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap, fresh *int) bool {
// Don't lift aggregates into registers, because we don't have
// a way to express their zero-constants.
switch deref(alloc.Type()).Underlying().(type) {
@@ -531,7 +397,7 @@ func liftAlloc(df domFrontier, rdf postDomFrontier, alloc *Alloc, newPhis newPhi
// Don't lift named return values in functions that defer
// calls that may recover from panic.
- if fn.hasDefer {
+ if fn := alloc.Parent(); fn.Recover != nil {
for _, nr := range fn.namedResults {
if nr == alloc {
return false
@@ -541,6 +407,7 @@ func liftAlloc(df domFrontier, rdf postDomFrontier, alloc *Alloc, newPhis newPhi
// Compute defblocks, the set of blocks containing a
// definition of the alloc cell.
+ var defblocks BlockSet
for _, instr := range *alloc.Referrers() {
// Bail out if we discover the alloc is not liftable;
// the only operations permitted to use the alloc are
@@ -554,14 +421,13 @@ func liftAlloc(df domFrontier, rdf postDomFrontier, alloc *Alloc, newPhis newPhi
panic("Alloc.Referrers is inconsistent")
}
defblocks.Add(instr.Block())
- case *Load:
+ case *UnOp:
+ if instr.Op != token.MUL {
+ return false // not a load
+ }
if instr.X != alloc {
panic("Alloc.Referrers is inconsistent")
}
- useblocks.Add(instr.Block())
- for _, ref := range *instr.Referrers() {
- useblocks.Add(ref.Block())
- }
case *DebugRef:
// ok
default:
@@ -575,85 +441,49 @@ func liftAlloc(df domFrontier, rdf postDomFrontier, alloc *Alloc, newPhis newPhi
fmt.Fprintln(os.Stderr, "\tlifting ", alloc, alloc.Name())
}
+ fn := alloc.Parent()
+
// Φ-insertion.
//
// What follows is the body of the main loop of the insert-φ
// function described by Cytron et al, but instead of using
// counter tricks, we just reset the 'hasAlready' and 'work'
// sets each iteration. These are bitmaps so it's pretty cheap.
+ //
+ // TODO(adonovan): opt: recycle slice storage for W,
+ // hasAlready, defBlocks across liftAlloc calls.
+ var hasAlready BlockSet
// Initialize W and work to defblocks.
+ var work BlockSet = defblocks // blocks seen
+ var W BlockSet // blocks to do
+ W.Set(&defblocks.Int)
+
+ // Traverse iterated dominance frontier, inserting φ-nodes.
+ for i := W.Take(); i != -1; i = W.Take() {
+ u := fn.Blocks[i]
+ for _, v := range df[u.Index] {
+ if hasAlready.Add(v) {
+ // Create φ-node.
+ // It will be prepended to v.Instrs later, if needed.
+ phi := &Phi{
+ Edges: make([]Value, len(v.Preds)),
+ Comment: alloc.Comment,
+ }
+ // This is merely a debugging aid:
+ phi.setNum(*fresh)
+ *fresh++
- for change := true; change; {
- change = false
- {
- // Traverse iterated dominance frontier, inserting φ-nodes.
- W.Set(defblocks)
-
- for i := W.Take(); i != -1; i = W.Take() {
- n := fn.Blocks[i]
- for _, y := range df[n.Index] {
- if Aphi.Add(y) {
- // Create φ-node.
- // It will be prepended to v.Instrs later, if needed.
- phi := &Phi{
- Edges: make([]Value, len(y.Preds)),
- Comment: alloc.Comment,
- }
-
- phi.source = alloc.source
- phi.setType(deref(alloc.Type()))
- phi.block = y
- if debugLifting {
- fmt.Fprintf(os.Stderr, "\tplace %s = %s at block %s\n", phi.Name(), phi, y)
- }
- newPhis[y.Index] = append(newPhis[y.Index], newPhi{phi, alloc})
-
- for _, p := range y.Preds {
- useblocks.Add(p)
- }
- change = true
- if defblocks.Add(y) {
- W.Add(y)
- }
- }
+ phi.pos = alloc.Pos()
+ phi.setType(deref(alloc.Type()))
+ phi.block = v
+ if debugLifting {
+ fmt.Fprintf(os.Stderr, "\tplace %s = %s at block %s\n", phi.Name(), phi, v)
}
- }
- }
+ newPhis[v] = append(newPhis[v], newPhi{phi, alloc})
- {
- W.Set(useblocks)
- for i := W.Take(); i != -1; i = W.Take() {
- n := fn.Blocks[i]
- for _, y := range rdf[n.Index] {
- // OPT(dh): if we had liveness information, we
- // could avoid adding sigma nodes for already dead
- // variables. but would calculating liveness
- // information be cheaper than pruning dead sigmas
- // later?
- if Asigma.Add(y) {
- sigmas := make([]*Sigma, 0, len(y.Succs))
- for _, succ := range y.Succs {
- sigma := &Sigma{
- From: y,
- X: alloc,
- Comment: alloc.Comment,
- }
- sigma.source = alloc.source
- sigma.setType(deref(alloc.Type()))
- sigma.block = succ
- sigmas = append(sigmas, sigma)
- }
-
- newSigmas[y.Index] = append(newSigmas[y.Index], newSigma{alloc, sigmas})
- for _, s := range y.Succs {
- defblocks.Add(s)
- }
- change = true
- if useblocks.Add(y) {
- W.Add(y)
- }
- }
+ if work.Add(v) {
+ W.Add(v)
}
}
}
@@ -689,27 +519,28 @@ func replaceAll(x, y Value) {
// renamed returns the value to which alloc is being renamed,
// constructing it lazily if it's the implicit zero initialization.
//
-func renamed(fn *Function, renaming []Value, alloc *Alloc) Value {
+func renamed(renaming []Value, alloc *Alloc) Value {
v := renaming[alloc.index]
if v == nil {
- v = emitConst(fn, zeroConst(deref(alloc.Type())))
+ v = zeroConst(deref(alloc.Type()))
renaming[alloc.index] = v
}
return v
}
-// rename implements the Cytron et al-based SSI renaming algorithm, a
+// rename implements the (Cytron et al) SSA renaming algorithm, a
// preorder traversal of the dominator tree replacing all loads of
// Alloc cells with the value stored to that cell by the dominating
-// store instruction.
+// store instruction. For lifting, we need only consider loads,
+// stores and φ-nodes.
//
// renaming is a map from *Alloc (keyed by index number) to its
// dominating stored value; newPhis[x] is the set of new φ-nodes to be
// prepended to block x.
//
-func rename(u *BasicBlock, renaming []Value, newPhis newPhiMap, newSigmas newSigmaMap) {
+func rename(u *BasicBlock, renaming []Value, newPhis newPhiMap) {
// Each φ-node becomes the new name for its associated Alloc.
- for _, np := range newPhis[u.Index] {
+ for _, np := range newPhis[u] {
phi := np.phi
alloc := np.alloc
renaming[alloc.index] = phi
@@ -738,9 +569,7 @@ func rename(u *BasicBlock, renaming []Value, newPhis newPhiMap, newSigmas newSig
fmt.Fprintf(os.Stderr, "\tkill store %s; new value: %s\n",
instr, instr.Val.Name())
}
- if refs := instr.Addr.Referrers(); refs != nil {
- *refs = removeInstr(*refs, instr)
- }
+ // Remove the store from the referrer list of the stored value.
if refs := instr.Val.Referrers(); refs != nil {
*refs = removeInstr(*refs, instr)
}
@@ -749,37 +578,28 @@ func rename(u *BasicBlock, renaming []Value, newPhis newPhiMap, newSigmas newSig
u.gaps++
}
- case *Load:
- if alloc, ok := instr.X.(*Alloc); ok && alloc.index >= 0 { // load of Alloc cell
- // In theory, we wouldn't be able to replace loads
- // directly, because a loaded value could be used in
- // different branches, in which case it should be
- // replaced with different sigma nodes. But we can't
- // simply defer replacement, either, because then
- // later stores might incorrectly affect this load.
- //
- // To avoid doing renaming on _all_ values (instead of
- // just loads and stores like we're doing), we make
- // sure during code generation that each load is only
- // used in one block. For example, in constant switch
- // statements, where the tag is only evaluated once,
- // we store it in a temporary and load it for each
- // comparison, so that we have individual loads to
- // replace.
- newval := renamed(u.Parent(), renaming, alloc)
- if debugLifting {
- fmt.Fprintf(os.Stderr, "\tupdate load %s = %s with %s\n",
- instr.Name(), instr, newval)
+ case *UnOp:
+ if instr.Op == token.MUL {
+ if alloc, ok := instr.X.(*Alloc); ok && alloc.index >= 0 { // load of Alloc cell
+ newval := renamed(renaming, alloc)
+ if debugLifting {
+ fmt.Fprintf(os.Stderr, "\tupdate load %s = %s with %s\n",
+ instr.Name(), instr, newval.Name())
+ }
+ // Replace all references to
+ // the loaded value by the
+ // dominating stored value.
+ replaceAll(instr, newval)
+ // Delete the Load.
+ u.Instrs[i] = nil
+ u.gaps++
}
- replaceAll(instr, newval)
- u.Instrs[i] = nil
- u.gaps++
}
case *DebugRef:
- if x, ok := instr.X.(*Alloc); ok && x.index >= 0 {
+ if alloc, ok := instr.X.(*Alloc); ok && alloc.index >= 0 { // ref of Alloc cell
if instr.IsAddr {
- instr.X = renamed(u.Parent(), renaming, x)
+ instr.X = renamed(renaming, alloc)
instr.IsAddr = false
// Add DebugRef to instr.X's referrers.
@@ -799,16 +619,9 @@ func rename(u *BasicBlock, renaming []Value, newPhis newPhiMap, newSigmas newSig
}
}
- // update all outgoing sigma nodes with the dominating store
- for _, sigmas := range newSigmas[u.Index] {
- for _, sigma := range sigmas.sigmas {
- sigma.X = renamed(u.Parent(), renaming, sigmas.alloc)
- }
- }
-
// For each φ-node in a CFG successor, rename the edge.
- for succi, v := range u.Succs {
- phis := newPhis[v.Index]
+ for _, v := range u.Succs {
+ phis := newPhis[v]
if len(phis) == 0 {
continue
}
@@ -816,17 +629,7 @@ func rename(u *BasicBlock, renaming []Value, newPhis newPhiMap, newSigmas newSig
for _, np := range phis {
phi := np.phi
alloc := np.alloc
- // if there's a sigma node, use it, else use the dominating value
- var newval Value
- for _, sigmas := range newSigmas[u.Index] {
- if sigmas.alloc == alloc {
- newval = sigmas.sigmas[succi]
- break
- }
- }
- if newval == nil {
- newval = renamed(u.Parent(), renaming, alloc)
- }
+ newval := renamed(renaming, alloc)
if debugLifting {
fmt.Fprintf(os.Stderr, "\tsetphi %s edge %s -> %s (#%d) (alloc=%s) := %s\n",
phi.Name(), u, v, i, alloc.Name(), newval.Name())
@@ -840,18 +643,15 @@ func rename(u *BasicBlock, renaming []Value, newPhis newPhiMap, newSigmas newSig
// Continue depth-first recursion over domtree, pushing a
// fresh copy of the renaming map for each subtree.
- r := make([]Value, len(renaming))
- for _, v := range u.dom.children {
- // XXX add debugging
- copy(r, renaming)
-
- // on entry to a block, the incoming sigma nodes become the new values for their alloc
- if idx := u.succIndex(v); idx != -1 {
- for _, sigma := range newSigmas[u.Index] {
- r[sigma.alloc.index] = sigma.sigmas[idx]
- }
- }
- rename(v, r, newPhis, newSigmas)
+ for i, v := range u.dom.children {
+ r := renaming
+ if i < len(u.dom.children)-1 {
+ // On all but the final iteration, we must make
+ // a copy to avoid destructive update.
+ r = make([]Value, len(renaming))
+ copy(r, renaming)
+ }
+ rename(v, r, newPhis)
}
}
diff --git a/vendor/honnef.co/go/tools/ir/lvalue.go b/vendor/honnef.co/go/tools/ssa/lvalue.go
index f676a1f7a..eb5d71e18 100644
--- a/vendor/honnef.co/go/tools/ir/lvalue.go
+++ b/vendor/honnef.co/go/tools/ssa/lvalue.go
@@ -2,13 +2,14 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-package ir
+package ssa
// lvalues are the union of addressable expressions and map-index
// expressions.
import (
"go/ast"
+ "go/token"
"go/types"
)
@@ -17,24 +18,27 @@ import (
// pointer to permit updates to elements of maps.
//
type lvalue interface {
- store(fn *Function, v Value, source ast.Node) // stores v into the location
- load(fn *Function, source ast.Node) Value // loads the contents of the location
- address(fn *Function) Value // address of the location
- typ() types.Type // returns the type of the location
+ store(fn *Function, v Value) // stores v into the location
+ load(fn *Function) Value // loads the contents of the location
+ address(fn *Function) Value // address of the location
+ typ() types.Type // returns the type of the location
}
// An address is an lvalue represented by a true pointer.
type address struct {
addr Value
- expr ast.Expr // source syntax of the value (not address) [debug mode]
+ pos token.Pos // source position
+ expr ast.Expr // source syntax of the value (not address) [debug mode]
}
-func (a *address) load(fn *Function, source ast.Node) Value {
- return emitLoad(fn, a.addr, source)
+func (a *address) load(fn *Function) Value {
+ load := emitLoad(fn, a.addr)
+ load.pos = a.pos
+ return load
}
-func (a *address) store(fn *Function, v Value, source ast.Node) {
- store := emitStore(fn, a.addr, v, source)
+func (a *address) store(fn *Function, v Value) {
+ store := emitStore(fn, a.addr, v, a.pos)
if a.expr != nil {
// store.Val is v, converted for assignability.
emitDebugRef(fn, a.expr, store.Val, false)
@@ -53,35 +57,38 @@ func (a *address) typ() types.Type {
}
// An element is an lvalue represented by m[k], the location of an
-// element of a map. These locations are not addressable
+// element of a map or string. These locations are not addressable
// since pointers cannot be formed from them, but they do support
-// load() and store().
+// load(), and in the case of maps, store().
//
type element struct {
- m, k Value // map
- t types.Type // map element type
+ m, k Value // map or string
+ t types.Type // map element type or string byte type
+ pos token.Pos // source position of colon ({k:v}) or lbrack (m[k]=v)
}
-func (e *element) load(fn *Function, source ast.Node) Value {
- l := &MapLookup{
+func (e *element) load(fn *Function) Value {
+ l := &Lookup{
X: e.m,
Index: e.k,
}
+ l.setPos(e.pos)
l.setType(e.t)
- return fn.emit(l, source)
+ return fn.emit(l)
}
-func (e *element) store(fn *Function, v Value, source ast.Node) {
+func (e *element) store(fn *Function, v Value) {
up := &MapUpdate{
Map: e.m,
Key: e.k,
- Value: emitConv(fn, v, e.t, source),
+ Value: emitConv(fn, v, e.t),
}
- fn.emit(up, source)
+ up.pos = e.pos
+ fn.emit(up)
}
func (e *element) address(fn *Function) Value {
- panic("map elements are not addressable")
+ panic("map/string elements are not addressable")
}
func (e *element) typ() types.Type {
@@ -93,15 +100,15 @@ func (e *element) typ() types.Type {
//
type blank struct{}
-func (bl blank) load(fn *Function, source ast.Node) Value {
+func (bl blank) load(fn *Function) Value {
panic("blank.load is illegal")
}
-func (bl blank) store(fn *Function, v Value, source ast.Node) {
+func (bl blank) store(fn *Function, v Value) {
s := &BlankStore{
Val: v,
}
- fn.emit(s, source)
+ fn.emit(s)
}
func (bl blank) address(fn *Function) Value {
diff --git a/vendor/honnef.co/go/tools/ir/methods.go b/vendor/honnef.co/go/tools/ssa/methods.go
index 517f448b8..9cf383916 100644
--- a/vendor/honnef.co/go/tools/ir/methods.go
+++ b/vendor/honnef.co/go/tools/ssa/methods.go
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-package ir
+package ssa
// This file defines utilities for population of method sets.
diff --git a/vendor/honnef.co/go/tools/ir/mode.go b/vendor/honnef.co/go/tools/ssa/mode.go
index da548fdbb..d2a269893 100644
--- a/vendor/honnef.co/go/tools/ir/mode.go
+++ b/vendor/honnef.co/go/tools/ssa/mode.go
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-package ir
+package ssa
// This file defines the BuilderMode type and its command-line flag.
@@ -15,30 +15,32 @@ import (
//
// *BuilderMode satisfies the flag.Value interface. Example:
//
-// var mode = ir.BuilderMode(0)
-// func init() { flag.Var(&mode, "build", ir.BuilderModeDoc) }
+// var mode = ssa.BuilderMode(0)
+// func init() { flag.Var(&mode, "build", ssa.BuilderModeDoc) }
//
type BuilderMode uint
const (
PrintPackages BuilderMode = 1 << iota // Print package inventory to stdout
- PrintFunctions // Print function IR code to stdout
- PrintSource // Print source code when printing function IR
- LogSource // Log source locations as IR builder progresses
+ PrintFunctions // Print function SSA code to stdout
+ LogSource // Log source locations as SSA builder progresses
SanityCheckFunctions // Perform sanity checking of function bodies
- NaiveForm // Build naïve IR form: don't replace local loads/stores with registers
+ NaiveForm // Build naïve SSA form: don't replace local loads/stores with registers
+ BuildSerially // Build packages serially, not in parallel.
GlobalDebug // Enable debug info for all packages
+ BareInits // Build init functions without guards or calls to dependent inits
)
-const BuilderModeDoc = `Options controlling the IR builder.
+const BuilderModeDoc = `Options controlling the SSA builder.
The value is a sequence of zero or more of these letters:
-C perform sanity [C]hecking of the IR form.
+C perform sanity [C]hecking of the SSA form.
D include [D]ebug info for every function.
P print [P]ackage inventory.
-F print [F]unction IR code.
-A print [A]ST nodes responsible for IR instructions
-S log [S]ource locations as IR builder progresses.
-N build [N]aive IR form: don't replace local loads/stores with registers.
+F print [F]unction SSA code.
+S log [S]ource locations as SSA builder progresses.
+L build distinct packages seria[L]ly instead of in parallel.
+N build [N]aive SSA form: don't replace local loads/stores with registers.
+I build bare [I]nit functions: no init guards or calls to dependent inits.
`
func (m BuilderMode) String() string {
@@ -52,9 +54,6 @@ func (m BuilderMode) String() string {
if m&PrintFunctions != 0 {
buf.WriteByte('F')
}
- if m&PrintSource != 0 {
- buf.WriteByte('A')
- }
if m&LogSource != 0 {
buf.WriteByte('S')
}
@@ -64,6 +63,9 @@ func (m BuilderMode) String() string {
if m&NaiveForm != 0 {
buf.WriteByte('N')
}
+ if m&BuildSerially != 0 {
+ buf.WriteByte('L')
+ }
return buf.String()
}
@@ -78,14 +80,14 @@ func (m *BuilderMode) Set(s string) error {
mode |= PrintPackages
case 'F':
mode |= PrintFunctions
- case 'A':
- mode |= PrintSource
case 'S':
- mode |= LogSource
+ mode |= LogSource | BuildSerially
case 'C':
mode |= SanityCheckFunctions
case 'N':
mode |= NaiveForm
+ case 'L':
+ mode |= BuildSerially
default:
return fmt.Errorf("unknown BuilderMode option: %q", c)
}
diff --git a/vendor/honnef.co/go/tools/ir/print.go b/vendor/honnef.co/go/tools/ssa/print.go
index 776eb5ece..6fd277277 100644
--- a/vendor/honnef.co/go/tools/ir/print.go
+++ b/vendor/honnef.co/go/tools/ssa/print.go
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-package ir
+package ssa
// This file implements the String() methods for all Value and
// Instruction types.
@@ -25,9 +25,6 @@ import (
// references are package-qualified.
//
func relName(v Value, i Instruction) string {
- if v == nil {
- return "<nil>"
- }
var from *types.Package
if i != nil {
from = i.Parent().pkg()
@@ -35,6 +32,8 @@ func relName(v Value, i Instruction) string {
switch v := v.(type) {
case Member: // *Function or *Global
return v.RelString(from)
+ case *Const:
+ return v.RelString(from)
}
return v.Name()
}
@@ -59,43 +58,36 @@ func relString(m Member, from *types.Package) string {
func (v *Parameter) String() string {
from := v.Parent().pkg()
- return fmt.Sprintf("Parameter <%s> {%s}", relType(v.Type(), from), v.name)
+ return fmt.Sprintf("parameter %s : %s", v.Name(), relType(v.Type(), from))
}
func (v *FreeVar) String() string {
from := v.Parent().pkg()
- return fmt.Sprintf("FreeVar <%s> %s", relType(v.Type(), from), v.Name())
+ return fmt.Sprintf("freevar %s : %s", v.Name(), relType(v.Type(), from))
}
func (v *Builtin) String() string {
- return fmt.Sprintf("Builtin %s", v.Name())
+ return fmt.Sprintf("builtin %s", v.Name())
}
// Instruction.String()
func (v *Alloc) String() string {
- from := v.Parent().pkg()
- storage := "Stack"
+ op := "local"
if v.Heap {
- storage = "Heap"
+ op = "new"
}
- return fmt.Sprintf("%sAlloc <%s> (%s)", storage, relType(v.Type(), from), v.Comment)
-}
-
-func (v *Sigma) String() string {
from := v.Parent().pkg()
- s := fmt.Sprintf("Sigma <%s> [b%d] %s", relType(v.Type(), from), v.From.Index, v.X.Name())
- if v.Comment != "" {
- s += fmt.Sprintf(" (%s)", v.Comment)
- }
- return s
+ return fmt.Sprintf("%s %s (%s)", op, relType(deref(v.Type()), from), v.Comment)
}
func (v *Phi) String() string {
var b bytes.Buffer
- fmt.Fprintf(&b, "Phi <%s>", v.Type())
+ b.WriteString("phi [")
for i, edge := range v.Edges {
- b.WriteString(" ")
+ if i > 0 {
+ b.WriteString(", ")
+ }
// Be robust against malformed CFG.
if v.block == nil {
b.WriteString("??")
@@ -105,38 +97,40 @@ func (v *Phi) String() string {
if i < len(v.block.Preds) {
block = v.block.Preds[i].Index
}
- fmt.Fprintf(&b, "%d:", block)
+ fmt.Fprintf(&b, "%d: ", block)
edgeVal := "<nil>" // be robust
if edge != nil {
edgeVal = relName(edge, v)
}
b.WriteString(edgeVal)
}
+ b.WriteString("]")
if v.Comment != "" {
- fmt.Fprintf(&b, " (%s)", v.Comment)
+ b.WriteString(" #")
+ b.WriteString(v.Comment)
}
return b.String()
}
func printCall(v *CallCommon, prefix string, instr Instruction) string {
var b bytes.Buffer
+ b.WriteString(prefix)
if !v.IsInvoke() {
- if value, ok := instr.(Value); ok {
- fmt.Fprintf(&b, "%s <%s> %s", prefix, relType(value.Type(), instr.Parent().pkg()), relName(v.Value, instr))
- } else {
- fmt.Fprintf(&b, "%s %s", prefix, relName(v.Value, instr))
- }
+ b.WriteString(relName(v.Value, instr))
} else {
- if value, ok := instr.(Value); ok {
- fmt.Fprintf(&b, "%sInvoke <%s> %s.%s", prefix, relType(value.Type(), instr.Parent().pkg()), relName(v.Value, instr), v.Method.Name())
- } else {
- fmt.Fprintf(&b, "%sInvoke %s.%s", prefix, relName(v.Value, instr), v.Method.Name())
- }
+ fmt.Fprintf(&b, "invoke %s.%s", relName(v.Value, instr), v.Method.Name())
}
- for _, arg := range v.Args {
- b.WriteString(" ")
+ b.WriteString("(")
+ for i, arg := range v.Args {
+ if i > 0 {
+ b.WriteString(", ")
+ }
b.WriteString(relName(arg, instr))
}
+ if v.Signature().Variadic() {
+ b.WriteString("...")
+ }
+ b.WriteString(")")
return b.String()
}
@@ -145,59 +139,73 @@ func (c *CallCommon) String() string {
}
func (v *Call) String() string {
- return printCall(&v.Call, "Call", v)
+ return printCall(&v.Call, "", v)
}
func (v *BinOp) String() string {
- return fmt.Sprintf("BinOp <%s> {%s} %s %s", relType(v.Type(), v.Parent().pkg()), v.Op.String(), relName(v.X, v), relName(v.Y, v))
+ return fmt.Sprintf("%s %s %s", relName(v.X, v), v.Op.String(), relName(v.Y, v))
}
func (v *UnOp) String() string {
- return fmt.Sprintf("UnOp <%s> {%s} %s", relType(v.Type(), v.Parent().pkg()), v.Op.String(), relName(v.X, v))
-}
-
-func (v *Load) String() string {
- return fmt.Sprintf("Load <%s> %s", relType(v.Type(), v.Parent().pkg()), relName(v.X, v))
+ return fmt.Sprintf("%s%s%s", v.Op, relName(v.X, v), commaOk(v.CommaOk))
}
func printConv(prefix string, v, x Value) string {
from := v.Parent().pkg()
- return fmt.Sprintf("%s <%s> %s",
+ return fmt.Sprintf("%s %s <- %s (%s)",
prefix,
relType(v.Type(), from),
+ relType(x.Type(), from),
relName(x, v.(Instruction)))
}
-func (v *ChangeType) String() string { return printConv("ChangeType", v, v.X) }
-func (v *Convert) String() string { return printConv("Convert", v, v.X) }
-func (v *ChangeInterface) String() string { return printConv("ChangeInterface", v, v.X) }
-func (v *MakeInterface) String() string { return printConv("MakeInterface", v, v.X) }
+func (v *ChangeType) String() string { return printConv("changetype", v, v.X) }
+func (v *Convert) String() string { return printConv("convert", v, v.X) }
+func (v *ChangeInterface) String() string { return printConv("change interface", v, v.X) }
+func (v *MakeInterface) String() string { return printConv("make", v, v.X) }
func (v *MakeClosure) String() string {
- from := v.Parent().pkg()
var b bytes.Buffer
- fmt.Fprintf(&b, "MakeClosure <%s> %s", relType(v.Type(), from), relName(v.Fn, v))
+ fmt.Fprintf(&b, "make closure %s", relName(v.Fn, v))
if v.Bindings != nil {
- for _, c := range v.Bindings {
- b.WriteString(" ")
+ b.WriteString(" [")
+ for i, c := range v.Bindings {
+ if i > 0 {
+ b.WriteString(", ")
+ }
b.WriteString(relName(c, v))
}
+ b.WriteString("]")
}
return b.String()
}
func (v *MakeSlice) String() string {
from := v.Parent().pkg()
- return fmt.Sprintf("MakeSlice <%s> %s %s",
+ return fmt.Sprintf("make %s %s %s",
relType(v.Type(), from),
relName(v.Len, v),
relName(v.Cap, v))
}
func (v *Slice) String() string {
- from := v.Parent().pkg()
- return fmt.Sprintf("Slice <%s> %s %s %s %s",
- relType(v.Type(), from), relName(v.X, v), relName(v.Low, v), relName(v.High, v), relName(v.Max, v))
+ var b bytes.Buffer
+ b.WriteString("slice ")
+ b.WriteString(relName(v.X, v))
+ b.WriteString("[")
+ if v.Low != nil {
+ b.WriteString(relName(v.Low, v))
+ }
+ b.WriteString(":")
+ if v.High != nil {
+ b.WriteString(relName(v.High, v))
+ }
+ if v.Max != nil {
+ b.WriteString(":")
+ b.WriteString(relName(v.Max, v))
+ }
+ b.WriteString("]")
+ return b.String()
}
func (v *MakeMap) String() string {
@@ -206,23 +214,22 @@ func (v *MakeMap) String() string {
res = relName(v.Reserve, v)
}
from := v.Parent().pkg()
- return fmt.Sprintf("MakeMap <%s> %s", relType(v.Type(), from), res)
+ return fmt.Sprintf("make %s %s", relType(v.Type(), from), res)
}
func (v *MakeChan) String() string {
from := v.Parent().pkg()
- return fmt.Sprintf("MakeChan <%s> %s", relType(v.Type(), from), relName(v.Size, v))
+ return fmt.Sprintf("make %s %s", relType(v.Type(), from), relName(v.Size, v))
}
func (v *FieldAddr) String() string {
- from := v.Parent().pkg()
st := deref(v.X.Type()).Underlying().(*types.Struct)
// Be robust against a bad index.
name := "?"
if 0 <= v.Field && v.Field < st.NumFields() {
name = st.Field(v.Field).Name()
}
- return fmt.Sprintf("FieldAddr <%s> [%d] (%s) %s", relType(v.Type(), from), v.Field, name, relName(v.X, v))
+ return fmt.Sprintf("&%s.%s [#%d]", relName(v.X, v), name, v.Field)
}
func (v *Field) String() string {
@@ -232,49 +239,36 @@ func (v *Field) String() string {
if 0 <= v.Field && v.Field < st.NumFields() {
name = st.Field(v.Field).Name()
}
- from := v.Parent().pkg()
- return fmt.Sprintf("Field <%s> [%d] (%s) %s", relType(v.Type(), from), v.Field, name, relName(v.X, v))
+ return fmt.Sprintf("%s.%s [#%d]", relName(v.X, v), name, v.Field)
}
func (v *IndexAddr) String() string {
- from := v.Parent().pkg()
- return fmt.Sprintf("IndexAddr <%s> %s %s", relType(v.Type(), from), relName(v.X, v), relName(v.Index, v))
+ return fmt.Sprintf("&%s[%s]", relName(v.X, v), relName(v.Index, v))
}
func (v *Index) String() string {
- from := v.Parent().pkg()
- return fmt.Sprintf("Index <%s> %s %s", relType(v.Type(), from), relName(v.X, v), relName(v.Index, v))
+ return fmt.Sprintf("%s[%s]", relName(v.X, v), relName(v.Index, v))
}
-func (v *MapLookup) String() string {
- from := v.Parent().pkg()
- return fmt.Sprintf("MapLookup <%s> %s %s", relType(v.Type(), from), relName(v.X, v), relName(v.Index, v))
-}
-
-func (v *StringLookup) String() string {
- from := v.Parent().pkg()
- return fmt.Sprintf("StringLookup <%s> %s %s", relType(v.Type(), from), relName(v.X, v), relName(v.Index, v))
+func (v *Lookup) String() string {
+ return fmt.Sprintf("%s[%s]%s", relName(v.X, v), relName(v.Index, v), commaOk(v.CommaOk))
}
func (v *Range) String() string {
- from := v.Parent().pkg()
- return fmt.Sprintf("Range <%s> %s", relType(v.Type(), from), relName(v.X, v))
+ return "range " + relName(v.X, v)
}
func (v *Next) String() string {
- from := v.Parent().pkg()
- return fmt.Sprintf("Next <%s> %s", relType(v.Type(), from), relName(v.Iter, v))
+ return "next " + relName(v.Iter, v)
}
func (v *TypeAssert) String() string {
from := v.Parent().pkg()
- return fmt.Sprintf("TypeAssert <%s> %s", relType(v.Type(), from), relName(v.X, v))
+ return fmt.Sprintf("typeassert%s %s.(%s)", commaOk(v.CommaOk), relName(v.X, v), relType(v.AssertedType, from))
}
func (v *Extract) String() string {
- from := v.Parent().pkg()
- name := v.Tuple.Type().(*types.Tuple).At(v.Index).Name()
- return fmt.Sprintf("Extract <%s> [%d] (%s) %s", relType(v.Type(), from), v.Index, name, relName(v.Tuple, v))
+ return fmt.Sprintf("extract %s #%d", relName(v.Tuple, v), v.Index)
}
func (s *Jump) String() string {
@@ -283,20 +277,7 @@ func (s *Jump) String() string {
if s.block != nil && len(s.block.Succs) == 1 {
block = s.block.Succs[0].Index
}
- str := fmt.Sprintf("Jump → b%d", block)
- if s.Comment != "" {
- str = fmt.Sprintf("%s # %s", str, s.Comment)
- }
- return str
-}
-
-func (s *Unreachable) String() string {
- // Be robust against malformed CFG.
- block := -1
- if s.block != nil && len(s.block.Succs) == 1 {
- block = s.block.Succs[0].Index
- }
- return fmt.Sprintf("Unreachable → b%d", block)
+ return fmt.Sprintf("jump %d", block)
}
func (s *If) String() string {
@@ -306,70 +287,41 @@ func (s *If) String() string {
tblock = s.block.Succs[0].Index
fblock = s.block.Succs[1].Index
}
- return fmt.Sprintf("If %s → b%d b%d", relName(s.Cond, s), tblock, fblock)
-}
-
-func (s *ConstantSwitch) String() string {
- var b bytes.Buffer
- fmt.Fprintf(&b, "ConstantSwitch %s", relName(s.Tag, s))
- for _, cond := range s.Conds {
- fmt.Fprintf(&b, " %s", relName(cond, s))
- }
- fmt.Fprint(&b, " →")
- for _, succ := range s.block.Succs {
- fmt.Fprintf(&b, " b%d", succ.Index)
- }
- return b.String()
-}
-
-func (s *TypeSwitch) String() string {
- from := s.Parent().pkg()
- var b bytes.Buffer
- fmt.Fprintf(&b, "TypeSwitch <%s> %s", relType(s.typ, from), relName(s.Tag, s))
- for _, cond := range s.Conds {
- fmt.Fprintf(&b, " %q", relType(cond, s.block.parent.pkg()))
- }
- return b.String()
+ return fmt.Sprintf("if %s goto %d else %d", relName(s.Cond, s), tblock, fblock)
}
func (s *Go) String() string {
- return printCall(&s.Call, "Go", s)
+ return printCall(&s.Call, "go ", s)
}
func (s *Panic) String() string {
- // Be robust against malformed CFG.
- block := -1
- if s.block != nil && len(s.block.Succs) == 1 {
- block = s.block.Succs[0].Index
- }
- return fmt.Sprintf("Panic %s → b%d", relName(s.X, s), block)
+ return "panic " + relName(s.X, s)
}
func (s *Return) String() string {
var b bytes.Buffer
- b.WriteString("Return")
- for _, r := range s.Results {
- b.WriteString(" ")
+ b.WriteString("return")
+ for i, r := range s.Results {
+ if i == 0 {
+ b.WriteString(" ")
+ } else {
+ b.WriteString(", ")
+ }
b.WriteString(relName(r, s))
}
return b.String()
}
func (*RunDefers) String() string {
- return "RunDefers"
+ return "rundefers"
}
func (s *Send) String() string {
- return fmt.Sprintf("Send %s %s", relName(s.Chan, s), relName(s.X, s))
-}
-
-func (recv *Recv) String() string {
- from := recv.Parent().pkg()
- return fmt.Sprintf("Recv <%s> %s", relType(recv.Type(), from), relName(recv.Chan, recv))
+ return fmt.Sprintf("send %s <- %s", relName(s.Chan, s), relName(s.X, s))
}
func (s *Defer) String() string {
- return printCall(&s.Call, "Defer", s)
+ return printCall(&s.Call, "defer ", s)
}
func (s *Select) String() string {
@@ -389,23 +341,21 @@ func (s *Select) String() string {
}
non := ""
if !s.Blocking {
- non = "Non"
+ non = "non"
}
- from := s.Parent().pkg()
- return fmt.Sprintf("Select%sBlocking <%s> [%s]", non, relType(s.Type(), from), b.String())
+ return fmt.Sprintf("select %sblocking [%s]", non, b.String())
}
func (s *Store) String() string {
- return fmt.Sprintf("Store {%s} %s %s",
- s.Val.Type(), relName(s.Addr, s), relName(s.Val, s))
+ return fmt.Sprintf("*%s = %s", relName(s.Addr, s), relName(s.Val, s))
}
func (s *BlankStore) String() string {
- return fmt.Sprintf("BlankStore %s", relName(s.Val, s))
+ return fmt.Sprintf("_ = %s", relName(s.Val, s))
}
func (s *MapUpdate) String() string {
- return fmt.Sprintf("MapUpdate %s %s %s", relName(s.Map, s), relName(s.Key, s), relName(s.Value, s))
+ return fmt.Sprintf("%s[%s] = %s", relName(s.Map, s), relName(s.Key, s), relName(s.Value, s))
}
func (s *DebugRef) String() string {
@@ -476,3 +426,10 @@ func WritePackage(buf *bytes.Buffer, p *Package) {
fmt.Fprintf(buf, "\n")
}
+
+func commaOk(x bool) string {
+ if x {
+ return ",ok"
+ }
+ return ""
+}
diff --git a/vendor/honnef.co/go/tools/ir/sanity.go b/vendor/honnef.co/go/tools/ssa/sanity.go
index 5c487186d..1d29b66b0 100644
--- a/vendor/honnef.co/go/tools/ir/sanity.go
+++ b/vendor/honnef.co/go/tools/ssa/sanity.go
@@ -2,9 +2,9 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-package ir
+package ssa
-// An optional pass for sanity-checking invariants of the IR representation.
+// An optional pass for sanity-checking invariants of the SSA representation.
// Currently it checks CFG invariants but little at the instruction level.
import (
@@ -23,7 +23,7 @@ type sanity struct {
insane bool
}
-// sanityCheck performs integrity checking of the IR representation
+// sanityCheck performs integrity checking of the SSA representation
// of the function fn and returns true if it was valid. Diagnostics
// are written to reporter if non-nil, os.Stderr otherwise. Some
// diagnostics are only warnings and do not imply a negative result.
@@ -89,15 +89,8 @@ func findDuplicate(blocks []*BasicBlock) *BasicBlock {
func (s *sanity) checkInstr(idx int, instr Instruction) {
switch instr := instr.(type) {
- case *If, *Jump, *Return, *Panic, *Unreachable, *ConstantSwitch:
+ case *If, *Jump, *Return, *Panic:
s.errorf("control flow instruction not at end of block")
- case *Sigma:
- if idx > 0 {
- prev := s.block.Instrs[idx-1]
- if _, ok := prev.(*Sigma); !ok {
- s.errorf("Sigma instruction follows a non-Sigma: %T", prev)
- }
- }
case *Phi:
if idx == 0 {
// It suffices to apply this check to just the first phi node.
@@ -106,10 +99,8 @@ func (s *sanity) checkInstr(idx int, instr Instruction) {
}
} else {
prev := s.block.Instrs[idx-1]
- switch prev.(type) {
- case *Phi, *Sigma:
- default:
- s.errorf("Phi instruction follows a non-Phi, non-Sigma: %T", prev)
+ if _, ok := prev.(*Phi); !ok {
+ s.errorf("Phi instruction follows a non-Phi: %T", prev)
}
}
if ne, np := len(instr.Edges), len(s.block.Preds); ne != np {
@@ -155,8 +146,7 @@ func (s *sanity) checkInstr(idx int, instr Instruction) {
case *Go:
case *Index:
case *IndexAddr:
- case *MapLookup:
- case *StringLookup:
+ case *Lookup:
case *MakeChan:
case *MakeClosure:
numFree := len(instr.Fn.(*Function).FreeVars)
@@ -185,11 +175,8 @@ func (s *sanity) checkInstr(idx int, instr Instruction) {
case *UnOp:
case *DebugRef:
case *BlankStore:
- case *Load:
- case *Parameter:
- case *Const:
- case *Recv:
- case *TypeSwitch:
+ case *Sigma:
+ // TODO(adonovan): implement checks.
default:
panic(fmt.Sprintf("Unknown instruction type: %T", instr))
}
@@ -209,9 +196,7 @@ func (s *sanity) checkInstr(idx int, instr Instruction) {
} else if t == tRangeIter {
// not a proper type; ignore.
} else if b, ok := t.Underlying().(*types.Basic); ok && b.Info()&types.IsUntyped != 0 {
- if _, ok := v.(*Const); !ok {
- s.errorf("instruction has 'untyped' result: %s = %s : %s", v.Name(), v, t)
- }
+ s.errorf("instruction has 'untyped' result: %s = %s : %s", v.Name(), v, t)
}
s.checkReferrerList(v)
}
@@ -254,19 +239,11 @@ func (s *sanity) checkFinalInstr(instr Instruction) {
}
case *Panic:
- if nsuccs := len(s.block.Succs); nsuccs != 1 {
- s.errorf("Panic-terminated block has %d successors; expected one", nsuccs)
- return
- }
-
- case *Unreachable:
- if nsuccs := len(s.block.Succs); nsuccs != 1 {
- s.errorf("Unreachable-terminated block has %d successors; expected one", nsuccs)
+ if nsuccs := len(s.block.Succs); nsuccs != 0 {
+ s.errorf("Panic-terminated block has %d successors; expected none", nsuccs)
return
}
- case *ConstantSwitch:
-
default:
s.errorf("non-control flow instruction at end of block")
}
@@ -283,8 +260,9 @@ func (s *sanity) checkBlock(b *BasicBlock, index int) {
}
// Check all blocks are reachable.
- // (The entry block is always implicitly reachable, the exit block may be unreachable.)
- if index > 1 && len(b.Preds) == 0 {
+ // (The entry block is always implicitly reachable,
+ // as is the Recover block, if any.)
+ if (index > 0 && b != b.parent.Recover) && len(b.Preds) == 0 {
s.warnf("unreachable block")
if b.Instrs == nil {
// Since this block is about to be pruned,
@@ -417,11 +395,7 @@ func (s *sanity) checkReferrerList(v Value) {
}
for i, ref := range *refs {
if _, ok := s.instrs[ref]; !ok {
- if val, ok := ref.(Value); ok {
- s.errorf("%s.Referrers()[%d] = %s = %s is not an instruction belonging to this function", v.Name(), i, val.Name(), val)
- } else {
- s.errorf("%s.Referrers()[%d] = %s is not an instruction belonging to this function", v.Name(), i, ref)
- }
+ s.errorf("%s.Referrers()[%d] = %s is not an instruction belonging to this function", v.Name(), i, ref)
}
}
}
@@ -452,7 +426,7 @@ func (s *sanity) checkFunction(fn *Function) bool {
s.errorf("nil Pkg")
}
}
- if src, syn := fn.Synthetic == "", fn.source != nil; src != syn {
+ if src, syn := fn.Synthetic == "", fn.Syntax() != nil; src != syn {
s.errorf("got fromSource=%t, hasSyntax=%t; want same values", src, syn)
}
for i, l := range fn.Locals {
@@ -507,6 +481,9 @@ func (s *sanity) checkFunction(fn *Function) bool {
}
s.checkBlock(b, i)
}
+ if fn.Recover != nil && fn.Blocks[fn.Recover.Index] != fn.Recover {
+ s.errorf("Recover block is not in Blocks slice")
+ }
s.block = nil
for i, anon := range fn.AnonFuncs {
@@ -545,11 +522,14 @@ func sanityCheckPackage(pkg *Package) {
if obj.Name() != name {
if obj.Name() == "init" && strings.HasPrefix(mem.Name(), "init#") {
// Ok. The name of a declared init function varies between
- // its types.Func ("init") and its ir.Function ("init#%d").
+ // its types.Func ("init") and its ssa.Function ("init#%d").
} else {
panic(fmt.Sprintf("%s: %T.Object().Name() = %s, want %s",
pkg.Pkg.Path(), mem, obj.Name(), name))
}
}
+ if obj.Pos() != mem.Pos() {
+ panic(fmt.Sprintf("%s Pos=%d obj.Pos=%d", mem, mem.Pos(), obj.Pos()))
+ }
}
}
diff --git a/vendor/honnef.co/go/tools/ir/source.go b/vendor/honnef.co/go/tools/ssa/source.go
index 93d1ccbd2..8d9cca170 100644
--- a/vendor/honnef.co/go/tools/ir/source.go
+++ b/vendor/honnef.co/go/tools/ssa/source.go
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-package ir
+package ssa
// This file defines utilities for working with source positions
// or source-level named entities ("objects").
@@ -25,7 +25,7 @@ import (
// Returns nil if not found; reasons might include:
// - the node is not enclosed by any function.
// - the node is within an anonymous function (FuncLit) and
-// its IR function has not been created yet
+// its SSA function has not been created yet
// (pkg.Build() has not yet been called).
//
func EnclosingFunction(pkg *Package, path []ast.Node) *Function {
@@ -46,7 +46,7 @@ outer:
continue outer
}
}
- // IR function not found:
+ // SSA function not found:
// - package not yet built, or maybe
// - builder skipped FuncLit in dead block
// (in principle; but currently the Builder
@@ -62,9 +62,9 @@ outer:
// package-level variable.
//
// Unlike EnclosingFunction, the behaviour of this function does not
-// depend on whether IR code for pkg has been built, so it can be
+// depend on whether SSA code for pkg has been built, so it can be
// used to quickly reject check inputs that will cause
-// EnclosingFunction to fail, prior to IR building.
+// EnclosingFunction to fail, prior to SSA building.
//
func HasEnclosingFunction(pkg *Package, path []ast.Node) bool {
return findEnclosingPackageLevelFunction(pkg, path) != nil
@@ -83,14 +83,23 @@ func findEnclosingPackageLevelFunction(pkg *Package, path []ast.Node) *Function
}
case *ast.FuncDecl:
- // Declared function/method.
- fn := findNamedFunc(pkg, decl.Pos())
- if fn == nil && decl.Recv == nil && decl.Name.Name == "init" {
- // Hack: return non-nil when IR is not yet
+ if decl.Recv == nil && decl.Name.Name == "init" {
+ // Explicit init() function.
+ for _, b := range pkg.init.Blocks {
+ for _, instr := range b.Instrs {
+ if instr, ok := instr.(*Call); ok {
+ if callee, ok := instr.Call.Value.(*Function); ok && callee.Pkg == pkg && callee.Pos() == decl.Name.NamePos {
+ return callee
+ }
+ }
+ }
+ }
+ // Hack: return non-nil when SSA is not yet
// built so that HasEnclosingFunction works.
return pkg.init
}
- return fn
+ // Declared function/method.
+ return findNamedFunc(pkg, decl.Name.NamePos)
}
}
return nil // not in any function
@@ -100,15 +109,29 @@ func findEnclosingPackageLevelFunction(pkg *Package, path []ast.Node) *Function
// position pos.
//
func findNamedFunc(pkg *Package, pos token.Pos) *Function {
- for _, fn := range pkg.Functions {
- if fn.Pos() == pos {
- return fn
+ // Look at all package members and method sets of named types.
+ // Not very efficient.
+ for _, mem := range pkg.Members {
+ switch mem := mem.(type) {
+ case *Function:
+ if mem.Pos() == pos {
+ return mem
+ }
+ case *Type:
+ mset := pkg.Prog.MethodSets.MethodSet(types.NewPointer(mem.Type()))
+ for i, n := 0, mset.Len(); i < n; i++ {
+ // Don't call Program.Method: avoid creating wrappers.
+ obj := mset.At(i).Obj().(*types.Func)
+ if obj.Pos() == pos {
+ return pkg.values[obj].(*Function)
+ }
+ }
}
}
return nil
}
-// ValueForExpr returns the IR Value that corresponds to non-constant
+// ValueForExpr returns the SSA Value that corresponds to non-constant
// expression e.
//
// It returns nil if no value was found, e.g.
@@ -126,10 +149,10 @@ func findNamedFunc(pkg *Package, pos token.Pos) *Function {
// The types of e (or &e, if isAddr) and the result are equal
// (modulo "untyped" bools resulting from comparisons).
//
-// (Tip: to find the ir.Value given a source position, use
+// (Tip: to find the ssa.Value given a source position, use
// astutil.PathEnclosingInterval to locate the ast.Node, then
// EnclosingFunction to locate the Function, then ValueForExpr to find
-// the ir.Value.)
+// the ssa.Value.)
//
func (f *Function) ValueForExpr(e ast.Expr) (value Value, isAddr bool) {
if f.debugInfo() { // (opt)
@@ -149,9 +172,9 @@ func (f *Function) ValueForExpr(e ast.Expr) (value Value, isAddr bool) {
// --- Lookup functions for source-level named entities (types.Objects) ---
-// Package returns the IR Package corresponding to the specified
+// Package returns the SSA Package corresponding to the specified
// type-checker package object.
-// It returns nil if no such IR package has been created.
+// It returns nil if no such SSA package has been created.
//
func (prog *Program) Package(obj *types.Package) *Package {
return prog.packages[obj]
@@ -180,7 +203,7 @@ func (prog *Program) FuncValue(obj *types.Func) *Function {
return fn
}
-// ConstValue returns the IR Value denoted by the source-level named
+// ConstValue returns the SSA Value denoted by the source-level named
// constant obj.
//
func (prog *Program) ConstValue(obj *types.Const) *Const {
@@ -198,12 +221,12 @@ func (prog *Program) ConstValue(obj *types.Const) *Const {
return NewConst(obj.Val(), obj.Type())
}
-// VarValue returns the IR Value that corresponds to a specific
+// VarValue returns the SSA Value that corresponds to a specific
// identifier denoting the source-level named variable obj.
//
// VarValue returns nil if a local variable was not found, perhaps
// because its package was not built, the debug information was not
-// requested during IR construction, or the value was optimized away.
+// requested during SSA construction, or the value was optimized away.
//
// ref is the path to an ast.Ident (e.g. from PathEnclosingInterval),
// and that ident must resolve to obj.
@@ -229,14 +252,14 @@ func (prog *Program) ConstValue(obj *types.Const) *Const {
//
// It is not specified whether the value or the address is returned in
// any particular case, as it may depend upon optimizations performed
-// during IR code generation, such as registerization, constant
+// during SSA code generation, such as registerization, constant
// folding, avoidance of materialization of subexpressions, etc.
//
func (prog *Program) VarValue(obj *types.Var, pkg *Package, ref []ast.Node) (value Value, isAddr bool) {
// All references to a var are local to some function, possibly init.
fn := EnclosingFunction(pkg, ref)
if fn == nil {
- return // e.g. def of struct field; IR not built?
+ return // e.g. def of struct field; SSA not built?
}
id := ref[0].(*ast.Ident)
diff --git a/vendor/honnef.co/go/tools/ir/ssa.go b/vendor/honnef.co/go/tools/ssa/ssa.go
index d3cb25222..aeddd65e5 100644
--- a/vendor/honnef.co/go/tools/ir/ssa.go
+++ b/vendor/honnef.co/go/tools/ssa/ssa.go
@@ -2,10 +2,10 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-package ir
+package ssa
// This package defines a high-level intermediate representation for
-// Go programs using static single-information (SSI) form.
+// Go programs using static single-assignment (SSA) form.
import (
"fmt"
@@ -18,15 +18,12 @@ import (
"golang.org/x/tools/go/types/typeutil"
)
-type ID int32
-
-// A Program is a partial or complete Go program converted to IR form.
+// A Program is a partial or complete Go program converted to SSA form.
type Program struct {
Fset *token.FileSet // position information for the files of this Program
- PrintFunc string // create ir.html for function specified in PrintFunc
imported map[string]*Package // all importable Packages, keyed by import path
packages map[*types.Package]*Package // all loaded Packages, keyed by object
- mode BuilderMode // set of mode bits for IR construction
+ mode BuilderMode // set of mode bits for SSA construction
MethodSets typeutil.MethodSetCache // cache of type-checker's method-sets
methodsMu sync.Mutex // guards the following maps:
@@ -47,14 +44,12 @@ type Program struct {
// and unspecified other things too.
//
type Package struct {
- Prog *Program // the owning program
- Pkg *types.Package // the corresponding go/types.Package
- Members map[string]Member // all package members keyed by name (incl. init and init#%d)
- Functions []*Function // all functions, including anonymous ones
- values map[types.Object]Value // package members (incl. types and methods), keyed by object
- init *Function // Func("init"); the package's init function
- debug bool // include full debug info in this package
- printFunc string // which function to print in HTML form
+ Prog *Program // the owning program
+ Pkg *types.Package // the corresponding go/types.Package
+ Members map[string]Member // all package members keyed by name (incl. init and init#%d)
+ values map[types.Object]Value // package members (incl. types and methods), keyed by object
+ init *Function // Func("init"); the package's init function
+ debug bool // include full debug info in this package
// The following fields are set transiently, then cleared
// after building.
@@ -73,6 +68,7 @@ type Member interface {
String() string // package-qualified name of the package member
RelString(*types.Package) string // like String, but relative refs are unqualified
Object() types.Object // typechecker's object for this member, if any
+ Pos() token.Pos // position of member's declaration, if known
Type() types.Type // type of the package member
Token() token.Token // token.{VAR,FUNC,CONST,TYPE}
Package() *Package // the containing package
@@ -99,10 +95,8 @@ type NamedConst struct {
pkg *Package
}
-// A Value is an IR value that can be referenced by an instruction.
+// A Value is an SSA value that can be referenced by an instruction.
type Value interface {
- setID(ID)
-
// Name returns the name of this value, and determines how
// this Value appears when used as an operand of an
// Instruction.
@@ -113,21 +107,10 @@ type Value interface {
// and type. For all other Values this is the name of the
// virtual register defined by the instruction.
//
- // The name of an IR Value is not semantically significant,
+ // The name of an SSA Value is not semantically significant,
// and may not even be unique within a function.
Name() string
- // ID returns the ID of this value. IDs are unique within a single
- // function and are densely numbered, but may contain gaps.
- // Values and other Instructions share the same ID space.
- // Globally, values are identified by their addresses. However,
- // IDs exist to facilitate efficient storage of mappings between
- // values and data when analysing functions.
- //
- // NB: IDs are allocated late in the IR construction process and
- // are not available to early stages of said process.
- ID() ID
-
// If this value is an Instruction, String returns its
// disassembled form; otherwise it returns unspecified
// human-readable information about the Value, such as its
@@ -140,7 +123,7 @@ type Value interface {
Type() types.Type
// Parent returns the function to which this Value belongs.
- // It returns nil for named Functions, Builtin and Global.
+ // It returns nil for named Functions, Builtin, Const and Global.
Parent() *Function
// Referrers returns the list of instructions that have this
@@ -153,27 +136,29 @@ type Value interface {
// Referrers is currently only defined if Parent()!=nil,
// i.e. for the function-local values FreeVar, Parameter,
// Functions (iff anonymous) and all value-defining instructions.
- // It returns nil for named Functions, Builtin and Global.
+ // It returns nil for named Functions, Builtin, Const and Global.
//
// Instruction.Operands contains the inverse of this relation.
Referrers() *[]Instruction
- Operands(rands []*Value) []*Value // nil for non-Instructions
-
- // Source returns the AST node responsible for creating this
- // value. A single AST node may be responsible for more than one
- // value, and not all values have an associated AST node.
+ // Pos returns the location of the AST token most closely
+ // associated with the operation that gave rise to this value,
+ // or token.NoPos if it was not explicit in the source.
//
- // Do not use this method to find a Value given an ast.Expr; use
- // ValueForExpr instead.
- Source() ast.Node
-
- // Pos returns Source().Pos() if Source is not nil, else it
- // returns token.NoPos.
+ // For each ast.Node type, a particular token is designated as
+ // the closest location for the expression, e.g. the Lparen
+ // for an *ast.CallExpr. This permits a compact but
+ // approximate mapping from Values to source positions for use
+ // in diagnostic messages, for example.
+ //
+ // (Do not use this position to determine which Value
+ // corresponds to an ast.Expr; use Function.ValueForExpr
+ // instead. NB: it requires that the function was built with
+ // debug information.)
Pos() token.Pos
}
-// An Instruction is an IR instruction that computes a new Value or
+// An Instruction is an SSA instruction that computes a new Value or
// has some effect.
//
// An Instruction that defines a value (e.g. BinOp) also implements
@@ -181,36 +166,23 @@ type Value interface {
// does not.
//
type Instruction interface {
- setSource(ast.Node)
- setID(ID)
-
// String returns the disassembled form of this value.
//
// Examples of Instructions that are Values:
- // "BinOp <int> {+} t1 t2" (BinOp)
- // "Call <int> len t1" (Call)
+ // "x + y" (BinOp)
+ // "len([])" (Call)
// Note that the name of the Value is not printed.
//
// Examples of Instructions that are not Values:
- // "Return t1" (Return)
- // "Store {int} t2 t1" (Store)
+ // "return x" (Return)
+ // "*y = x" (Store)
//
- // (The separation of Value.Name() from Value.String() is useful
+ // (The separation Value.Name() from Value.String() is useful
// for some analyses which distinguish the operation from the
// value it defines, e.g., 'y = local int' is both an allocation
// of memory 'local int' and a definition of a pointer y.)
String() string
- // ID returns the ID of this instruction. IDs are unique within a single
- // function and are densely numbered, but may contain gaps.
- // Globally, instructions are identified by their addresses. However,
- // IDs exist to facilitate efficient storage of mappings between
- // instructions and data when analysing functions.
- //
- // NB: IDs are allocated late in the IR construction process and
- // are not available to early stages of said process.
- ID() ID
-
// Parent returns the function to which this instruction
// belongs.
Parent() *Function
@@ -240,37 +212,39 @@ type Instruction interface {
// Values.)
Operands(rands []*Value) []*Value
- Referrers() *[]Instruction // nil for non-Values
-
- // Source returns the AST node responsible for creating this
- // instruction. A single AST node may be responsible for more than
- // one instruction, and not all instructions have an associated
- // AST node.
- Source() ast.Node
-
- // Pos returns Source().Pos() if Source is not nil, else it
- // returns token.NoPos.
+ // Pos returns the location of the AST token most closely
+ // associated with the operation that gave rise to this
+ // instruction, or token.NoPos if it was not explicit in the
+ // source.
+ //
+ // For each ast.Node type, a particular token is designated as
+ // the closest location for the expression, e.g. the Go token
+ // for an *ast.GoStmt. This permits a compact but approximate
+ // mapping from Instructions to source positions for use in
+ // diagnostic messages, for example.
+ //
+ // (Do not use this position to determine which Instruction
+ // corresponds to an ast.Expr; see the notes for Value.Pos.
+ // This position may be used to determine which non-Value
+ // Instruction corresponds to some ast.Stmts, but not all: If
+ // and Jump instructions have no Pos(), for example.)
Pos() token.Pos
}
-// A Node is a node in the IR value graph. Every concrete type that
+// A Node is a node in the SSA value graph. Every concrete type that
// implements Node is also either a Value, an Instruction, or both.
//
// Node contains the methods common to Value and Instruction, plus the
// Operands and Referrers methods generalized to return nil for
// non-Instructions and non-Values, respectively.
//
-// Node is provided to simplify IR graph algorithms. Clients should
+// Node is provided to simplify SSA graph algorithms. Clients should
// use the more specific and informative Value or Instruction
// interfaces where appropriate.
//
type Node interface {
- setID(ID)
-
// Common methods:
- ID() ID
String() string
- Source() ast.Node
Pos() token.Pos
Parent() *Function
@@ -293,6 +267,11 @@ type Node interface {
// the disassembly.
// To iterate over the blocks in dominance order, use DomPreorder().
//
+// Recover is an optional second entry point to which control resumes
+// after a recovered panic. The Recover block may contain only a return
+// statement, preceded by a load of the function's named return
+// parameters, if any.
+//
// A nested function (Parent()!=nil) that refers to one or more
// lexically enclosing local variables ("free variables") has FreeVars.
// Such functions cannot be called directly but require a
@@ -315,59 +294,44 @@ type Node interface {
// Type() returns the function's Signature.
//
type Function struct {
- node
-
name string
object types.Object // a declared *types.Func or one of its wrappers
method *types.Selection // info about provenance of synthetic methods
Signature *types.Signature
-
- Synthetic string // provenance of synthetic function; "" for true source functions
- parent *Function // enclosing function if anon; nil if global
- Pkg *Package // enclosing package; nil for shared funcs (wrappers and error.Error)
- Prog *Program // enclosing program
- Params []*Parameter // function parameters; for methods, includes receiver
- FreeVars []*FreeVar // free variables whose values must be supplied by closure
- Locals []*Alloc // local variables of this function
- Blocks []*BasicBlock // basic blocks of the function; nil => external
- Exit *BasicBlock // The function's exit block
- AnonFuncs []*Function // anonymous functions directly beneath this one
- referrers []Instruction // referring instructions (iff Parent() != nil)
- hasDefer bool
- WillExit bool // Calling this function will always terminate the process
- WillUnwind bool // Calling this function will always unwind (it will call runtime.Goexit or panic)
+ pos token.Pos
+
+ Synthetic string // provenance of synthetic function; "" for true source functions
+ syntax ast.Node // *ast.Func{Decl,Lit}; replaced with simple ast.Node after build, unless debug mode
+ parent *Function // enclosing function if anon; nil if global
+ Pkg *Package // enclosing package; nil for shared funcs (wrappers and error.Error)
+ Prog *Program // enclosing program
+ Params []*Parameter // function parameters; for methods, includes receiver
+ FreeVars []*FreeVar // free variables whose values must be supplied by closure
+ Locals []*Alloc // local variables of this function
+ Blocks []*BasicBlock // basic blocks of the function; nil => external
+ Recover *BasicBlock // optional; control transfers here after recovered panic
+ AnonFuncs []*Function // anonymous functions directly beneath this one
+ referrers []Instruction // referring instructions (iff Parent() != nil)
// The following fields are set transiently during building,
// then cleared.
- currentBlock *BasicBlock // where to emit code
- objects map[types.Object]Value // addresses of local variables
- namedResults []*Alloc // tuple of named results
- implicitResults []*Alloc // tuple of results
- targets *targets // linked stack of branch targets
- lblocks map[*ast.Object]*lblock // labelled blocks
- consts []*Const
- wr *HTMLWriter
- fakeExits BlockSet
- blocksets [5]BlockSet
-}
-
-func (fn *Function) results() []*Alloc {
- if len(fn.namedResults) > 0 {
- return fn.namedResults
- }
- return fn.implicitResults
+ currentBlock *BasicBlock // where to emit code
+ objects map[types.Object]Value // addresses of local variables
+ namedResults []*Alloc // tuple of named results
+ targets *targets // linked stack of branch targets
+ lblocks map[*ast.Object]*lblock // labelled blocks
}
-// BasicBlock represents an IR basic block.
+// BasicBlock represents an SSA basic block.
//
// The final element of Instrs is always an explicit transfer of
-// control (If, Jump, Return, Panic, or Unreachable).
+// control (If, Jump, Return, or Panic).
//
// A block may contain no Instructions only if it is unreachable,
// i.e., Preds is nil. Empty blocks are typically pruned.
//
// BasicBlocks and their Preds/Succs relation form a (possibly cyclic)
-// graph independent of the IR Value graph: the control-flow graph or
+// graph independent of the SSA Value graph: the control-flow graph or
// CFG. It is illegal for multiple edges to exist between the same
// pair of blocks.
//
@@ -386,10 +350,8 @@ type BasicBlock struct {
Preds, Succs []*BasicBlock // predecessors and successors
succs2 [2]*BasicBlock // initial space for Succs
dom domInfo // dominator tree info
- pdom domInfo // post-dominator tree info
- post int
- gaps int // number of nil Instrs (transient)
- rundefers int // number of rundefers (transient)
+ gaps int // number of nil Instrs (transient)
+ rundefers int // number of rundefers (transient)
}
// Pure values ----------------------------------------
@@ -411,10 +373,9 @@ type BasicBlock struct {
// belongs to an enclosing function.
//
type FreeVar struct {
- node
-
name string
typ types.Type
+ pos token.Pos
parent *Function
referrers []Instruction
@@ -425,10 +386,12 @@ type FreeVar struct {
// A Parameter represents an input parameter of a function.
//
type Parameter struct {
- register
-
- name string
- object types.Object // a *types.Var; nil for non-source locals
+ name string
+ object types.Object // a *types.Var; nil for non-source locals
+ typ types.Type
+ pos token.Pos
+ parent *Function
+ referrers []Instruction
}
// A Const represents the value of a constant expression.
@@ -448,13 +411,12 @@ type Parameter struct {
// Pos() returns token.NoPos.
//
// Example printed form:
-// Const <int> {42}
-// Const <untyped string> {"test"}
-// Const <MyComplex> {(3 + 4i)}
+// 42:int
+// "hello":untyped string
+// 3+4i:MyComplex
//
type Const struct {
- register
-
+ typ types.Type
Value constant.Value
}
@@ -465,11 +427,10 @@ type Const struct {
// identifier.
//
type Global struct {
- node
-
name string
object types.Object // a *types.Var; may be nil for synthetics e.g. init$guard
typ types.Type
+ pos token.Pos
Pkg *Package
}
@@ -480,12 +441,12 @@ type Global struct {
// Builtins can only appear in CallCommon.Func.
//
// Name() indicates the function: one of the built-in functions from the
-// Go spec (excluding "make" and "new") or one of these ir-defined
+// Go spec (excluding "make" and "new") or one of these ssa-defined
// intrinsics:
//
// // wrapnilchk returns ptr if non-nil, panics otherwise.
// // (For use in indirection wrappers.)
-// func ir:wrapnilchk(ptr *T, recvType, methodName string) *T
+// func ssa:wrapnilchk(ptr *T, recvType, methodName string) *T
//
// Object() returns a *types.Builtin for built-ins defined by the spec,
// nil for others.
@@ -494,8 +455,6 @@ type Global struct {
// signature of the built-in for this call.
//
type Builtin struct {
- node
-
name string
sig *types.Signature
}
@@ -511,12 +470,12 @@ type Builtin struct {
//
// If Heap is false, Alloc allocates space in the function's
// activation record (frame); we refer to an Alloc(Heap=false) as a
-// "stack" alloc. Each stack Alloc returns the same address each time
+// "local" alloc. Each local Alloc returns the same address each time
// it is executed within the same activation; the space is
// re-initialized to zero.
//
// If Heap is true, Alloc allocates space in the heap; we
-// refer to an Alloc(Heap=true) as a "heap" alloc. Each heap Alloc
+// refer to an Alloc(Heap=true) as a "new" alloc. Each new Alloc
// returns a different address each time it is executed.
//
// When Alloc is applied to a channel, map or slice type, it returns
@@ -529,8 +488,8 @@ type Builtin struct {
// allocates a varargs slice.
//
// Example printed form:
-// t1 = StackAlloc <*int>
-// t2 = HeapAlloc <*int> (new)
+// t0 = local int
+// t1 = new int
//
type Alloc struct {
register
@@ -542,32 +501,31 @@ type Alloc struct {
var _ Instruction = (*Sigma)(nil)
var _ Value = (*Sigma)(nil)
-// The Sigma instruction represents an SSI σ-node, which splits values
-// at branches in the control flow.
-//
-// Conceptually, σ-nodes exist at the end of blocks that branch and
-// constitute parallel assignments to one value per destination block.
-// However, such a representation would be awkward to work with, so
-// instead we place σ-nodes at the beginning of branch targets. The
-// From field denotes to which incoming edge the node applies.
-//
-// Within a block, all σ-nodes must appear before all non-σ nodes.
-//
-// Example printed form:
-// t2 = Sigma <int> [#0] t1 (x)
-//
type Sigma struct {
register
- From *BasicBlock
- X Value
- Comment string
+ X Value
+ Branch bool
+}
- live bool // used during lifting
+func (p *Sigma) Value() Value {
+ v := p.X
+ for {
+ sigma, ok := v.(*Sigma)
+ if !ok {
+ break
+ }
+ v = sigma
+ }
+ return v
+}
+
+func (p *Sigma) String() string {
+ return fmt.Sprintf("σ [%s.%t]", relName(p.X, p), p.Branch)
}
// The Phi instruction represents an SSA φ-node, which combines values
// that differ across incoming control-flow edges and yields a new
-// value. Within a block, all φ-nodes must appear before all non-φ, non-σ
+// value. Within a block, all φ-nodes must appear before all non-φ
// nodes.
//
// Pos() returns the position of the && or || for short-circuit
@@ -575,14 +533,12 @@ type Sigma struct {
// during SSA renaming.
//
// Example printed form:
-// t3 = Phi <int> 2:t1 4:t2 (x)
+// t2 = phi [0: t0, 1: t1]
//
type Phi struct {
register
Comment string // a hint as to its purpose
Edges []Value // Edges[i] is value for Block().Preds[i]
-
- live bool // used during lifting
}
// The Call instruction represents a function or method call.
@@ -596,9 +552,9 @@ type Phi struct {
// Pos() returns the ast.CallExpr.Lparen, if explicit in the source.
//
// Example printed form:
-// t3 = Call <()> println t1 t2
-// t4 = Call <()> foo$1
-// t6 = Invoke <string> t5.String
+// t2 = println(t0, t1)
+// t4 = t3()
+// t7 = invoke t5.Println(...t6)
//
type Call struct {
register
@@ -610,7 +566,7 @@ type Call struct {
// Pos() returns the ast.BinaryExpr.OpPos, if explicit in the source.
//
// Example printed form:
-// t3 = BinOp <int> {+} t2 t1
+// t1 = t0 + 1:int
//
type BinOp struct {
register
@@ -623,32 +579,32 @@ type BinOp struct {
}
// The UnOp instruction yields the result of Op X.
+// ARROW is channel receive.
+// MUL is pointer indirection (load).
// XOR is bitwise complement.
// SUB is negation.
// NOT is logical negation.
//
+// If CommaOk and Op=ARROW, the result is a 2-tuple of the value above
+// and a boolean indicating the success of the receive. The
+// components of the tuple are accessed using Extract.
//
-// Example printed form:
-// t2 = UnOp <int> {^} t1
-//
-type UnOp struct {
- register
- Op token.Token // One of: NOT SUB XOR ! - ^
- X Value
-}
-
-// The Load instruction loads a value from a memory address.
-//
-// For implicit memory loads, Pos() returns the position of the
+// Pos() returns the ast.UnaryExpr.OpPos, if explicit in the source.
+// For receive operations (ARROW) implicit in ranging over a channel,
+// Pos() returns the ast.RangeStmt.For.
+// For implicit memory loads (STAR), Pos() returns the position of the
// most closely associated source-level construct; the details are not
// specified.
//
// Example printed form:
-// t2 = Load <int> t1
+// t0 = *x
+// t2 = <-t1,ok
//
-type Load struct {
+type UnOp struct {
register
- X Value
+ Op token.Token // One of: NOT SUB ARROW MUL XOR ! - <- * ^
+ X Value
+ CommaOk bool
}
// The ChangeType instruction applies to X a value-preserving type
@@ -667,7 +623,7 @@ type Load struct {
// from an explicit conversion in the source.
//
// Example printed form:
-// t2 = ChangeType <*T> t1
+// t1 = changetype *int <- IntPtr (t0)
//
type ChangeType struct {
register
@@ -690,13 +646,13 @@ type ChangeType struct {
// This operation cannot fail dynamically.
//
// Conversions of untyped string/number/bool constants to a specific
-// representation are eliminated during IR construction.
+// representation are eliminated during SSA construction.
//
// Pos() returns the ast.CallExpr.Lparen, if the instruction arose
// from an explicit conversion in the source.
//
// Example printed form:
-// t2 = Convert <[]byte> t1
+// t1 = convert []byte <- string (t0)
//
type Convert struct {
register
@@ -713,7 +669,7 @@ type Convert struct {
// otherwise.
//
// Example printed form:
-// t2 = ChangeInterface <I1> t1
+// t1 = change interface interface{} <- I (t0)
//
type ChangeInterface struct {
register
@@ -733,7 +689,8 @@ type ChangeInterface struct {
// from an explicit conversion in the source.
//
// Example printed form:
-// t2 = MakeInterface <interface{}> t1
+// t1 = make interface{} <- int (42:int)
+// t2 = make Stringer <- t0
//
type MakeInterface struct {
register
@@ -749,8 +706,8 @@ type MakeInterface struct {
// closure or the ast.SelectorExpr.Sel for a bound method closure.
//
// Example printed form:
-// t1 = MakeClosure <func()> foo$1 t1 t2
-// t5 = MakeClosure <func(int)> (T).foo$bound t4
+// t0 = make closure anon@1.2 [x y z]
+// t1 = make closure bound$(main.I).add [i]
//
type MakeClosure struct {
register
@@ -767,8 +724,8 @@ type MakeClosure struct {
// the ast.CompositeLit.Lbrack if created by a literal.
//
// Example printed form:
-// t1 = MakeMap <map[string]int>
-// t2 = MakeMap <StringIntMap> t1
+// t1 = make map[string]int t0
+// t1 = make StringIntMap t0
//
type MakeMap struct {
register
@@ -784,8 +741,8 @@ type MakeMap struct {
// created it.
//
// Example printed form:
-// t3 = MakeChan <chan int> t1
-// t4 = MakeChan <chan IntChan> t2
+// t0 = make chan int 0
+// t0 = make IntChan 0
//
type MakeChan struct {
register
@@ -806,8 +763,8 @@ type MakeChan struct {
// created it.
//
// Example printed form:
-// t3 = MakeSlice <[]string> t1 t2
-// t4 = MakeSlice <StringSlice> t1 t2
+// t1 = make []string 1:int t0
+// t1 = make StringSlice 1:int t0
//
type MakeSlice struct {
register
@@ -829,7 +786,7 @@ type MakeSlice struct {
// NoPos if not explicit in the source (e.g. a variadic argument slice).
//
// Example printed form:
-// t4 = Slice <[]int> t3 t2 t1 <nil>
+// t1 = slice t0[1:]
//
type Slice struct {
register
@@ -851,7 +808,7 @@ type Slice struct {
// field, if explicit in the source.
//
// Example printed form:
-// t2 = FieldAddr <*int> [0] (X) t1
+// t1 = &t0.name [#1]
//
type FieldAddr struct {
register
@@ -869,7 +826,7 @@ type FieldAddr struct {
// field, if explicit in the source.
//
// Example printed form:
-// t2 = FieldAddr <int> [0] (X) t1
+// t1 = t0.name [#1]
//
type Field struct {
register
@@ -880,7 +837,7 @@ type Field struct {
// The IndexAddr instruction yields the address of the element at
// index Index of collection X. Index is an integer expression.
//
-// The elements of maps and strings are not addressable; use StringLookup, MapLookup or
+// The elements of maps and strings are not addressable; use Lookup or
// MapUpdate instead.
//
// Dynamically, this instruction panics if X evaluates to a nil *array
@@ -892,7 +849,7 @@ type Field struct {
// explicit in the source.
//
// Example printed form:
-// t3 = IndexAddr <*int> t2 t1
+// t2 = &t0[t1]
//
type IndexAddr struct {
register
@@ -906,7 +863,7 @@ type IndexAddr struct {
// explicit in the source.
//
// Example printed form:
-// t3 = Index <int> t2 t1
+// t2 = t0[t1]
//
type Index struct {
register
@@ -914,7 +871,9 @@ type Index struct {
Index Value // integer index
}
-// The MapLookup instruction yields element Index of collection X, a map.
+// The Lookup instruction yields element Index of collection X, a map
+// or string. Index is an integer expression if X is a string or the
+// appropriate key type if X is a map.
//
// If CommaOk, the result is a 2-tuple of the value above and a
// boolean indicating the result of a map membership test for the key.
@@ -923,30 +882,16 @@ type Index struct {
// Pos() returns the ast.IndexExpr.Lbrack, if explicit in the source.
//
// Example printed form:
-// t4 = MapLookup <string> t3 t1
-// t6 = MapLookup <(string, bool)> t3 t2
+// t2 = t0[t1]
+// t5 = t3[t4],ok
//
-type MapLookup struct {
+type Lookup struct {
register
- X Value // map
- Index Value // key-typed index
+ X Value // string or map
+ Index Value // numeric or key-typed index
CommaOk bool // return a value,ok pair
}
-// The StringLookup instruction yields element Index of collection X, a string.
-// Index is an integer expression.
-//
-// Pos() returns the ast.IndexExpr.Lbrack, if explicit in the source.
-//
-// Example printed form:
-// t3 = StringLookup <uint8> t2 t1
-//
-type StringLookup struct {
- register
- X Value // string
- Index Value // numeric index
-}
-
// SelectState is a helper for Select.
// It represents one goal state and its corresponding communication.
//
@@ -961,10 +906,10 @@ type SelectState struct {
// The Select instruction tests whether (or blocks until) one
// of the specified sent or received states is entered.
//
-// Let n be the number of States for which Dir==RECV and Tᵢ (0 ≤ i < n)
+// Let n be the number of States for which Dir==RECV and T_i (0<=i<n)
// be the element type of each such state's Chan.
// Select returns an n+2-tuple
-// (index int, recvOk bool, r₀ T₀, ... rₙ-1 Tₙ-1)
+// (index int, recvOk bool, r_0 T_0, ... r_n-1 T_n-1)
// The tuple's components, described below, must be accessed via the
// Extract instruction.
//
@@ -977,9 +922,9 @@ type SelectState struct {
// If !Blocking, select doesn't block if no states hold; instead it
// returns immediately with index equal to -1.
//
-// If the chosen channel was used for a receive, the rᵢ component is
+// If the chosen channel was used for a receive, the r_i component is
// set to the received value, where i is the index of that state among
-// all n receive states; otherwise rᵢ has the zero value of type Tᵢ.
+// all n receive states; otherwise r_i has the zero value of type T_i.
// Note that the receive index i is not the same as the state
// index index.
//
@@ -990,8 +935,8 @@ type SelectState struct {
// Pos() returns the ast.SelectStmt.Select.
//
// Example printed form:
-// t6 = SelectNonBlocking <(index int, ok bool, int)> [<-t4, t5<-t1]
-// t11 = SelectBlocking <(index int, ok bool)> []
+// t3 = select nonblocking [<-t0, t1<-t2]
+// t4 = select blocking []
//
type Select struct {
register
@@ -1009,7 +954,7 @@ type Select struct {
// Pos() returns the ast.RangeStmt.For.
//
// Example printed form:
-// t2 = Range <iter> t1
+// t0 = range "hello":string
//
type Range struct {
register
@@ -1032,8 +977,7 @@ type Range struct {
// The types of k and/or v may be types.Invalid.
//
// Example printed form:
-// t5 = Next <(ok bool, k int, v rune)> t2
-// t5 = Next <(ok bool, k invalid type, v invalid type)> t2
+// t1 = next t0
//
type Next struct {
register
@@ -1073,8 +1017,8 @@ type Next struct {
// type-switch statement.
//
// Example printed form:
-// t2 = TypeAssert <int> t1
-// t4 = TypeAssert <(value fmt.Stringer, ok bool)> t1
+// t1 = typeassert t0.(int)
+// t3 = typeassert,ok t2.(T)
//
type TypeAssert struct {
register
@@ -1086,11 +1030,11 @@ type TypeAssert struct {
// The Extract instruction yields component Index of Tuple.
//
// This is used to access the results of instructions with multiple
-// return values, such as Call, TypeAssert, Next, Recv,
-// MapLookup and others.
+// return values, such as Call, TypeAssert, Next, UnOp(ARROW) and
+// IndexExpr(Map).
//
// Example printed form:
-// t7 = Extract <bool> [1] (ok) t4
+// t1 = extract t0 #1
//
type Extract struct {
register
@@ -1108,28 +1052,10 @@ type Extract struct {
// Pos() returns NoPos.
//
// Example printed form:
-// Jump → b1
+// jump done
//
type Jump struct {
anInstruction
- Comment string
-}
-
-// The Unreachable pseudo-instruction signals that execution cannot
-// continue after the preceding function call because it terminates
-// the process.
-//
-// The instruction acts as a control instruction, jumping to the exit
-// block. However, this jump will never execute.
-//
-// An Unreachable instruction must be the last instruction of its
-// containing BasicBlock.
-//
-// Example printed form:
-// Unreachable → b1
-//
-type Unreachable struct {
- anInstruction
}
// The If instruction transfers control to one of the two successors
@@ -1139,30 +1065,16 @@ type Unreachable struct {
// An If instruction must be the last instruction of its containing
// BasicBlock.
//
-// Pos() returns the *ast.IfStmt, if explicit in the source.
+// Pos() returns NoPos.
//
// Example printed form:
-// If t2 → b1 b2
+// if t0 goto done else body
//
type If struct {
anInstruction
Cond Value
}
-type ConstantSwitch struct {
- anInstruction
- Tag Value
- // Constant branch conditions. A nil Value denotes the (implicit
- // or explicit) default branch.
- Conds []Value
-}
-
-type TypeSwitch struct {
- register
- Tag Value
- Conds []types.Type
-}
-
// The Return instruction returns values and control back to the calling
// function.
//
@@ -1173,7 +1085,7 @@ type TypeSwitch struct {
// components which the caller must access using Extract instructions.
//
// There is no instruction to return a ready-made tuple like those
-// returned by a "value,ok"-mode TypeAssert, MapLookup or Recv or
+// returned by a "value,ok"-mode TypeAssert, Lookup or UnOp(ARROW) or
// a tail-call to a function with multiple result parameters.
//
// Return must be the last instruction of its containing BasicBlock.
@@ -1182,12 +1094,13 @@ type TypeSwitch struct {
// Pos() returns the ast.ReturnStmt.Return, if explicit in the source.
//
// Example printed form:
-// Return
-// Return t1 t2
+// return
+// return nil:I, 2:int
//
type Return struct {
anInstruction
Results []Value
+ pos token.Pos
}
// The RunDefers instruction pops and invokes the entire stack of
@@ -1200,7 +1113,7 @@ type Return struct {
// Pos() returns NoPos.
//
// Example printed form:
-// RunDefers
+// rundefers
//
type RunDefers struct {
anInstruction
@@ -1209,7 +1122,7 @@ type RunDefers struct {
// The Panic instruction initiates a panic with value X.
//
// A Panic instruction must be the last instruction of its containing
-// BasicBlock, which must have one successor, the exit block.
+// BasicBlock, which must have no successors.
//
// NB: 'go panic(x)' and 'defer panic(x)' do not use this instruction;
// they are treated as calls to a built-in function.
@@ -1218,11 +1131,12 @@ type RunDefers struct {
// in the source.
//
// Example printed form:
-// Panic t1
+// panic t0
//
type Panic struct {
anInstruction
- X Value // an interface{}
+ X Value // an interface{}
+ pos token.Pos
}
// The Go instruction creates a new goroutine and calls the specified
@@ -1233,13 +1147,14 @@ type Panic struct {
// Pos() returns the ast.GoStmt.Go.
//
// Example printed form:
-// Go println t1
-// Go t3
-// GoInvoke t4.Bar t2
+// go println(t0, t1)
+// go t3()
+// go invoke t5.Println(...t6)
//
type Go struct {
anInstruction
Call CallCommon
+ pos token.Pos
}
// The Defer instruction pushes the specified call onto a stack of
@@ -1250,13 +1165,14 @@ type Go struct {
// Pos() returns the ast.DeferStmt.Defer.
//
// Example printed form:
-// Defer println t1
-// Defer t3
-// DeferInvoke t4.Bar t2
+// defer println(t0, t1)
+// defer t3()
+// defer invoke t5.Println(...t6)
//
type Defer struct {
anInstruction
Call CallCommon
+ pos token.Pos
}
// The Send instruction sends X on channel Chan.
@@ -1264,30 +1180,12 @@ type Defer struct {
// Pos() returns the ast.SendStmt.Arrow, if explicit in the source.
//
// Example printed form:
-// Send t2 t1
+// send t0 <- t1
//
type Send struct {
anInstruction
Chan, X Value
-}
-
-// The Recv instruction receives from channel Chan.
-//
-// If CommaOk, the result is a 2-tuple of the value above
-// and a boolean indicating the success of the receive. The
-// components of the tuple are accessed using Extract.
-//
-// Pos() returns the ast.UnaryExpr.OpPos, if explicit in the source.
-// For receive operations implicit in ranging over a channel,
-// Pos() returns the ast.RangeStmt.For.
-//
-// Example printed form:
-// t2 = Recv <int> t1
-// t3 = Recv <(int, bool)> t1
-type Recv struct {
- register
- Chan Value
- CommaOk bool
+ pos token.Pos
}
// The Store instruction stores Val at address Addr.
@@ -1299,12 +1197,13 @@ type Recv struct {
// implementation choices, the details are not specified.
//
// Example printed form:
-// Store {int} t2 t1
+// *x = y
//
type Store struct {
anInstruction
Addr Value
Val Value
+ pos token.Pos
}
// The BlankStore instruction is emitted for assignments to the blank
@@ -1315,7 +1214,7 @@ type Store struct {
// Pos() returns NoPos.
//
// Example printed form:
-// BlankStore t1
+// _ = t0
//
type BlankStore struct {
anInstruction
@@ -1329,17 +1228,18 @@ type BlankStore struct {
// if explicit in the source.
//
// Example printed form:
-// MapUpdate t3 t1 t2
+// t0[t1] = t2
//
type MapUpdate struct {
anInstruction
Map Value
Key Value
Value Value
+ pos token.Pos
}
// A DebugRef instruction maps a source-level expression Expr to the
-// IR value X that represents the value (!IsAddr) or address (IsAddr)
+// SSA value X that represents the value (!IsAddr) or address (IsAddr)
// of that expression.
//
// DebugRef is a pseudo-instruction: it has no dynamic effect.
@@ -1349,6 +1249,11 @@ type MapUpdate struct {
// documented at Value.Pos(). e.g. CallExpr.Pos() does not return the
// position of the ("designated") Lparen token.
//
+// If Expr is an *ast.Ident denoting a var or func, Object() returns
+// the object; though this information can be obtained from the type
+// checker, including it here greatly facilitates debugging.
+// For non-Ident expressions, Object() returns nil.
+//
// DebugRefs are generated only for functions built with debugging
// enabled; see Package.SetDebugMode() and the GlobalDebug builder
// mode flag.
@@ -1376,42 +1281,30 @@ type DebugRef struct {
// Embeddable mix-ins and helpers for common parts of other structs. -----------
-// register is a mix-in embedded by all IR values that are also
+// register is a mix-in embedded by all SSA values that are also
// instructions, i.e. virtual registers, and provides a uniform
// implementation of most of the Value interface: Value.Name() is a
// numbered register (e.g. "t0"); the other methods are field accessors.
//
// Temporary names are automatically assigned to each register on
-// completion of building a function in IR form.
+// completion of building a function in SSA form.
+//
+// Clients must not assume that the 'id' value (and the Name() derived
+// from it) is unique within a function. As always in this API,
+// semantics are determined only by identity; names exist only to
+// facilitate debugging.
//
type register struct {
anInstruction
+ num int // "name" of virtual register, e.g. "t0". Not guaranteed unique.
typ types.Type // type of virtual register
+ pos token.Pos // position of source expression, or NoPos
referrers []Instruction
}
-type node struct {
- source ast.Node
- id ID
-}
-
-func (n *node) setID(id ID) { n.id = id }
-func (n node) ID() ID { return n.id }
-
-func (n *node) setSource(source ast.Node) { n.source = source }
-func (n *node) Source() ast.Node { return n.source }
-
-func (n *node) Pos() token.Pos {
- if n.source != nil {
- return n.source.Pos()
- }
- return token.NoPos
-}
-
// anInstruction is a mix-in embedded by all Instructions.
// It provides the implementations of the Block and setBlock methods.
type anInstruction struct {
- node
block *BasicBlock // the basic block of this instruction
}
@@ -1443,9 +1336,9 @@ type anInstruction struct {
// Args[0] contains the receiver parameter.
//
// Example printed form:
-// t3 = Call <()> println t1 t2
-// Go t3
-// Defer t3
+// t2 = println(t0, t1)
+// go t3()
+// defer t5(...t6)
//
// 2. "invoke" mode: when Method is non-nil (IsInvoke), a CallCommon
// represents a dynamically dispatched call to an interface method.
@@ -1459,18 +1352,18 @@ type anInstruction struct {
// receiver but the first true argument.
//
// Example printed form:
-// t6 = Invoke <string> t5.String
-// GoInvoke t4.Bar t2
-// DeferInvoke t4.Bar t2
+// t1 = invoke t0.String()
+// go invoke t3.Run(t2)
+// defer invoke t4.Handle(...t5)
//
// For all calls to variadic functions (Signature().Variadic()),
// the last element of Args is a slice.
//
type CallCommon struct {
- Value Value // receiver (invoke mode) or func value (call mode)
- Method *types.Func // abstract method (invoke mode)
- Args []Value // actual parameters (in static method call, includes receiver)
- Results Value
+ Value Value // receiver (invoke mode) or func value (call mode)
+ Method *types.Func // abstract method (invoke mode)
+ Args []Value // actual parameters (in static method call, includes receiver)
+ pos token.Pos // position of CallExpr.Lparen, iff explicit in source
}
// IsInvoke returns true if this call has "invoke" (not "call") mode.
@@ -1478,6 +1371,8 @@ func (c *CallCommon) IsInvoke() bool {
return c.Method != nil
}
+func (c *CallCommon) Pos() token.Pos { return c.pos }
+
// Signature returns the signature of the called function.
//
// For an "invoke"-mode call, the signature of the interface method is
@@ -1532,7 +1427,7 @@ func (c *CallCommon) Description() string {
type CallInstruction interface {
Instruction
Common() *CallCommon // returns the common parts of the call
- Value() *Call
+ Value() *Call // returns the result value of the call (*Call) or nil (*Go, *Defer)
}
func (s *Call) Common() *CallCommon { return &s.Call }
@@ -1553,11 +1448,13 @@ func (v *Builtin) Parent() *Function { return nil }
func (v *FreeVar) Type() types.Type { return v.typ }
func (v *FreeVar) Name() string { return v.name }
func (v *FreeVar) Referrers() *[]Instruction { return &v.referrers }
+func (v *FreeVar) Pos() token.Pos { return v.pos }
func (v *FreeVar) Parent() *Function { return v.parent }
func (v *Global) Type() types.Type { return v.typ }
func (v *Global) Name() string { return v.name }
func (v *Global) Parent() *Function { return nil }
+func (v *Global) Pos() token.Pos { return v.pos }
func (v *Global) Referrers() *[]Instruction { return nil }
func (v *Global) Token() token.Token { return token.VAR }
func (v *Global) Object() types.Object { return v.object }
@@ -1567,6 +1464,7 @@ func (v *Global) RelString(from *types.Package) string { return relString(v, fro
func (v *Function) Name() string { return v.name }
func (v *Function) Type() types.Type { return v.Signature }
+func (v *Function) Pos() token.Pos { return v.pos }
func (v *Function) Token() token.Token { return token.FUNC }
func (v *Function) Object() types.Object { return v.object }
func (v *Function) String() string { return v.RelString(nil) }
@@ -1579,15 +1477,24 @@ func (v *Function) Referrers() *[]Instruction {
return nil
}
-func (v *Parameter) Object() types.Object { return v.object }
+func (v *Parameter) Type() types.Type { return v.typ }
+func (v *Parameter) Name() string { return v.name }
+func (v *Parameter) Object() types.Object { return v.object }
+func (v *Parameter) Referrers() *[]Instruction { return &v.referrers }
+func (v *Parameter) Pos() token.Pos { return v.pos }
+func (v *Parameter) Parent() *Function { return v.parent }
func (v *Alloc) Type() types.Type { return v.typ }
func (v *Alloc) Referrers() *[]Instruction { return &v.referrers }
+func (v *Alloc) Pos() token.Pos { return v.pos }
func (v *register) Type() types.Type { return v.typ }
func (v *register) setType(typ types.Type) { v.typ = typ }
-func (v *register) Name() string { return fmt.Sprintf("t%d", v.id) }
+func (v *register) Name() string { return fmt.Sprintf("t%d", v.num) }
+func (v *register) setNum(num int) { v.num = num }
func (v *register) Referrers() *[]Instruction { return &v.referrers }
+func (v *register) Pos() token.Pos { return v.pos }
+func (v *register) setPos(pos token.Pos) { v.pos = pos }
func (v *anInstruction) Parent() *Function { return v.block.parent }
func (v *anInstruction) Block() *BasicBlock { return v.block }
@@ -1644,7 +1551,19 @@ func (p *Package) Type(name string) (t *Type) {
return
}
-func (s *DebugRef) Pos() token.Pos { return s.Expr.Pos() }
+func (v *Call) Pos() token.Pos { return v.Call.pos }
+func (s *Defer) Pos() token.Pos { return s.pos }
+func (s *Go) Pos() token.Pos { return s.pos }
+func (s *MapUpdate) Pos() token.Pos { return s.pos }
+func (s *Panic) Pos() token.Pos { return s.pos }
+func (s *Return) Pos() token.Pos { return s.pos }
+func (s *Send) Pos() token.Pos { return s.pos }
+func (s *Store) Pos() token.Pos { return s.pos }
+func (s *BlankStore) Pos() token.Pos { return token.NoPos }
+func (s *If) Pos() token.Pos { return token.NoPos }
+func (s *Jump) Pos() token.Pos { return token.NoPos }
+func (s *RunDefers) Pos() token.Pos { return token.NoPos }
+func (s *DebugRef) Pos() token.Pos { return s.Expr.Pos() }
// Operands.
@@ -1708,19 +1627,6 @@ func (s *If) Operands(rands []*Value) []*Value {
return append(rands, &s.Cond)
}
-func (s *ConstantSwitch) Operands(rands []*Value) []*Value {
- rands = append(rands, &s.Tag)
- for i := range s.Conds {
- rands = append(rands, &s.Conds[i])
- }
- return rands
-}
-
-func (s *TypeSwitch) Operands(rands []*Value) []*Value {
- rands = append(rands, &s.Tag)
- return rands
-}
-
func (v *Index) Operands(rands []*Value) []*Value {
return append(rands, &v.X, &v.Index)
}
@@ -1733,15 +1639,7 @@ func (*Jump) Operands(rands []*Value) []*Value {
return rands
}
-func (*Unreachable) Operands(rands []*Value) []*Value {
- return rands
-}
-
-func (v *MapLookup) Operands(rands []*Value) []*Value {
- return append(rands, &v.X, &v.Index)
-}
-
-func (v *StringLookup) Operands(rands []*Value) []*Value {
+func (v *Lookup) Operands(rands []*Value) []*Value {
return append(rands, &v.X, &v.Index)
}
@@ -1818,10 +1716,6 @@ func (s *Send) Operands(rands []*Value) []*Value {
return append(rands, &s.Chan, &s.X)
}
-func (recv *Recv) Operands(rands []*Value) []*Value {
- return append(rands, &recv.Chan)
-}
-
func (v *Slice) Operands(rands []*Value) []*Value {
return append(rands, &v.X, &v.Low, &v.High, &v.Max)
}
@@ -1842,10 +1736,6 @@ func (v *UnOp) Operands(rands []*Value) []*Value {
return append(rands, &v.X)
}
-func (v *Load) Operands(rands []*Value) []*Value {
- return append(rands, &v.X)
-}
-
// Non-Instruction Values:
func (v *Builtin) Operands(rands []*Value) []*Value { return rands }
func (v *FreeVar) Operands(rands []*Value) []*Value { return rands }
diff --git a/vendor/honnef.co/go/tools/ir/staticcheck.conf b/vendor/honnef.co/go/tools/ssa/staticcheck.conf
index d7b38bc35..d7b38bc35 100644
--- a/vendor/honnef.co/go/tools/ir/staticcheck.conf
+++ b/vendor/honnef.co/go/tools/ssa/staticcheck.conf
diff --git a/vendor/honnef.co/go/tools/ssa/testmain.go b/vendor/honnef.co/go/tools/ssa/testmain.go
new file mode 100644
index 000000000..8ec15ba50
--- /dev/null
+++ b/vendor/honnef.co/go/tools/ssa/testmain.go
@@ -0,0 +1,271 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// CreateTestMainPackage synthesizes a main package that runs all the
+// tests of the supplied packages.
+// It is closely coupled to $GOROOT/src/cmd/go/test.go and $GOROOT/src/testing.
+//
+// TODO(adonovan): throws this all away now that x/tools/go/packages
+// provides access to the actual synthetic test main files.
+
+import (
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/parser"
+ "go/types"
+ "log"
+ "os"
+ "strings"
+ "text/template"
+)
+
+// FindTests returns the Test, Benchmark, and Example functions
+// (as defined by "go test") defined in the specified package,
+// and its TestMain function, if any.
+//
+// Deprecated: use x/tools/go/packages to access synthetic testmain packages.
+func FindTests(pkg *Package) (tests, benchmarks, examples []*Function, main *Function) {
+ prog := pkg.Prog
+
+ // The first two of these may be nil: if the program doesn't import "testing",
+ // it can't contain any tests, but it may yet contain Examples.
+ var testSig *types.Signature // func(*testing.T)
+ var benchmarkSig *types.Signature // func(*testing.B)
+ var exampleSig = types.NewSignature(nil, nil, nil, false) // func()
+
+ // Obtain the types from the parameters of testing.MainStart.
+ if testingPkg := prog.ImportedPackage("testing"); testingPkg != nil {
+ mainStart := testingPkg.Func("MainStart")
+ params := mainStart.Signature.Params()
+ testSig = funcField(params.At(1).Type())
+ benchmarkSig = funcField(params.At(2).Type())
+
+ // Does the package define this function?
+ // func TestMain(*testing.M)
+ if f := pkg.Func("TestMain"); f != nil {
+ sig := f.Type().(*types.Signature)
+ starM := mainStart.Signature.Results().At(0).Type() // *testing.M
+ if sig.Results().Len() == 0 &&
+ sig.Params().Len() == 1 &&
+ types.Identical(sig.Params().At(0).Type(), starM) {
+ main = f
+ }
+ }
+ }
+
+ // TODO(adonovan): use a stable order, e.g. lexical.
+ for _, mem := range pkg.Members {
+ if f, ok := mem.(*Function); ok &&
+ ast.IsExported(f.Name()) &&
+ strings.HasSuffix(prog.Fset.Position(f.Pos()).Filename, "_test.go") {
+
+ switch {
+ case testSig != nil && isTestSig(f, "Test", testSig):
+ tests = append(tests, f)
+ case benchmarkSig != nil && isTestSig(f, "Benchmark", benchmarkSig):
+ benchmarks = append(benchmarks, f)
+ case isTestSig(f, "Example", exampleSig):
+ examples = append(examples, f)
+ default:
+ continue
+ }
+ }
+ }
+ return
+}
+
+// Like isTest, but checks the signature too.
+func isTestSig(f *Function, prefix string, sig *types.Signature) bool {
+ return isTest(f.Name(), prefix) && types.Identical(f.Signature, sig)
+}
+
+// Given the type of one of the three slice parameters of testing.Main,
+// returns the function type.
+func funcField(slice types.Type) *types.Signature {
+ return slice.(*types.Slice).Elem().Underlying().(*types.Struct).Field(1).Type().(*types.Signature)
+}
+
+// isTest tells whether name looks like a test (or benchmark, according to prefix).
+// It is a Test (say) if there is a character after Test that is not a lower-case letter.
+// We don't want TesticularCancer.
+// Plundered from $GOROOT/src/cmd/go/test.go
+func isTest(name, prefix string) bool {
+ if !strings.HasPrefix(name, prefix) {
+ return false
+ }
+ if len(name) == len(prefix) { // "Test" is ok
+ return true
+ }
+ return ast.IsExported(name[len(prefix):])
+}
+
+// CreateTestMainPackage creates and returns a synthetic "testmain"
+// package for the specified package if it defines tests, benchmarks or
+// executable examples, or nil otherwise. The new package is named
+// "main" and provides a function named "main" that runs the tests,
+// similar to the one that would be created by the 'go test' tool.
+//
+// Subsequent calls to prog.AllPackages include the new package.
+// The package pkg must belong to the program prog.
+//
+// Deprecated: use x/tools/go/packages to access synthetic testmain packages.
+func (prog *Program) CreateTestMainPackage(pkg *Package) *Package {
+ if pkg.Prog != prog {
+ log.Fatal("Package does not belong to Program")
+ }
+
+ // Template data
+ var data struct {
+ Pkg *Package
+ Tests, Benchmarks, Examples []*Function
+ Main *Function
+ Go18 bool
+ }
+ data.Pkg = pkg
+
+ // Enumerate tests.
+ data.Tests, data.Benchmarks, data.Examples, data.Main = FindTests(pkg)
+ if data.Main == nil &&
+ data.Tests == nil && data.Benchmarks == nil && data.Examples == nil {
+ return nil
+ }
+
+ // Synthesize source for testmain package.
+ path := pkg.Pkg.Path() + "$testmain"
+ tmpl := testmainTmpl
+ if testingPkg := prog.ImportedPackage("testing"); testingPkg != nil {
+ // In Go 1.8, testing.MainStart's first argument is an interface, not a func.
+ data.Go18 = types.IsInterface(testingPkg.Func("MainStart").Signature.Params().At(0).Type())
+ } else {
+ // The program does not import "testing", but FindTests
+ // returned non-nil, which must mean there were Examples
+ // but no Test, Benchmark, or TestMain functions.
+
+ // We'll simply call them from testmain.main; this will
+ // ensure they don't panic, but will not check any
+ // "Output:" comments.
+ // (We should not execute an Example that has no
+ // "Output:" comment, but it's impossible to tell here.)
+ tmpl = examplesOnlyTmpl
+ }
+ var buf bytes.Buffer
+ if err := tmpl.Execute(&buf, data); err != nil {
+ log.Fatalf("internal error expanding template for %s: %v", path, err)
+ }
+ if false { // debugging
+ fmt.Fprintln(os.Stderr, buf.String())
+ }
+
+ // Parse and type-check the testmain package.
+ f, err := parser.ParseFile(prog.Fset, path+".go", &buf, parser.Mode(0))
+ if err != nil {
+ log.Fatalf("internal error parsing %s: %v", path, err)
+ }
+ conf := types.Config{
+ DisableUnusedImportCheck: true,
+ Importer: importer{pkg},
+ }
+ files := []*ast.File{f}
+ info := &types.Info{
+ Types: make(map[ast.Expr]types.TypeAndValue),
+ Defs: make(map[*ast.Ident]types.Object),
+ Uses: make(map[*ast.Ident]types.Object),
+ Implicits: make(map[ast.Node]types.Object),
+ Scopes: make(map[ast.Node]*types.Scope),
+ Selections: make(map[*ast.SelectorExpr]*types.Selection),
+ }
+ testmainPkg, err := conf.Check(path, prog.Fset, files, info)
+ if err != nil {
+ log.Fatalf("internal error type-checking %s: %v", path, err)
+ }
+
+ // Create and build SSA code.
+ testmain := prog.CreatePackage(testmainPkg, files, info, false)
+ testmain.SetDebugMode(false)
+ testmain.Build()
+ testmain.Func("main").Synthetic = "test main function"
+ testmain.Func("init").Synthetic = "package initializer"
+ return testmain
+}
+
+// An implementation of types.Importer for an already loaded SSA program.
+type importer struct {
+ pkg *Package // package under test; may be non-importable
+}
+
+func (imp importer) Import(path string) (*types.Package, error) {
+ if p := imp.pkg.Prog.ImportedPackage(path); p != nil {
+ return p.Pkg, nil
+ }
+ if path == imp.pkg.Pkg.Path() {
+ return imp.pkg.Pkg, nil
+ }
+ return nil, fmt.Errorf("not found") // can't happen
+}
+
+var testmainTmpl = template.Must(template.New("testmain").Parse(`
+package main
+
+import "io"
+import "os"
+import "testing"
+import p {{printf "%q" .Pkg.Pkg.Path}}
+
+{{if .Go18}}
+type deps struct{}
+
+func (deps) ImportPath() string { return "" }
+func (deps) MatchString(pat, str string) (bool, error) { return true, nil }
+func (deps) StartCPUProfile(io.Writer) error { return nil }
+func (deps) StartTestLog(io.Writer) {}
+func (deps) StopCPUProfile() {}
+func (deps) StopTestLog() error { return nil }
+func (deps) WriteHeapProfile(io.Writer) error { return nil }
+func (deps) WriteProfileTo(string, io.Writer, int) error { return nil }
+
+var match deps
+{{else}}
+func match(_, _ string) (bool, error) { return true, nil }
+{{end}}
+
+func main() {
+ tests := []testing.InternalTest{
+{{range .Tests}}
+ { {{printf "%q" .Name}}, p.{{.Name}} },
+{{end}}
+ }
+ benchmarks := []testing.InternalBenchmark{
+{{range .Benchmarks}}
+ { {{printf "%q" .Name}}, p.{{.Name}} },
+{{end}}
+ }
+ examples := []testing.InternalExample{
+{{range .Examples}}
+ {Name: {{printf "%q" .Name}}, F: p.{{.Name}}},
+{{end}}
+ }
+ m := testing.MainStart(match, tests, benchmarks, examples)
+{{with .Main}}
+ p.{{.Name}}(m)
+{{else}}
+ os.Exit(m.Run())
+{{end}}
+}
+
+`))
+
+var examplesOnlyTmpl = template.Must(template.New("examples").Parse(`
+package main
+
+import p {{printf "%q" .Pkg.Pkg.Path}}
+
+func main() {
+{{range .Examples}}
+ p.{{.Name}}()
+{{end}}
+}
+`))
diff --git a/vendor/honnef.co/go/tools/ir/util.go b/vendor/honnef.co/go/tools/ssa/util.go
index df0f8bf97..ddb118460 100644
--- a/vendor/honnef.co/go/tools/ir/util.go
+++ b/vendor/honnef.co/go/tools/ssa/util.go
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-package ir
+package ssa
// This file defines a number of miscellaneous utility functions.
@@ -52,6 +52,36 @@ func recvType(obj *types.Func) types.Type {
return obj.Type().(*types.Signature).Recv().Type()
}
+// DefaultType returns the default "typed" type for an "untyped" type;
+// it returns the incoming type for all other types. The default type
+// for untyped nil is untyped nil.
+//
+// Exported to ssa/interp.
+//
+// TODO(adonovan): use go/types.DefaultType after 1.8.
+//
+func DefaultType(typ types.Type) types.Type {
+ if t, ok := typ.(*types.Basic); ok {
+ k := t.Kind()
+ switch k {
+ case types.UntypedBool:
+ k = types.Bool
+ case types.UntypedInt:
+ k = types.Int
+ case types.UntypedRune:
+ k = types.Rune
+ case types.UntypedFloat:
+ k = types.Float64
+ case types.UntypedComplex:
+ k = types.Complex128
+ case types.UntypedString:
+ k = types.String
+ }
+ typ = types.Typ[k]
+ }
+ return typ
+}
+
// logStack prints the formatted "start" message to stderr and
// returns a closure that prints the corresponding "end" message.
// Call using 'defer logStack(...)()' to show builder stack on panic.
diff --git a/vendor/honnef.co/go/tools/ir/wrappers.go b/vendor/honnef.co/go/tools/ssa/wrappers.go
index c9d903d23..a4ae71d8c 100644
--- a/vendor/honnef.co/go/tools/ir/wrappers.go
+++ b/vendor/honnef.co/go/tools/ssa/wrappers.go
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-package ir
+package ssa
// This file defines synthesis of Functions that delegate to declared
// methods; they come in three kinds:
@@ -71,35 +71,35 @@ func makeWrapper(prog *Program, sel *types.Selection) *Function {
Signature: sig,
Synthetic: description,
Prog: prog,
+ pos: obj.Pos(),
}
- fn.initHTML(prog.PrintFunc)
fn.startBody()
- fn.addSpilledParam(recv, nil)
+ fn.addSpilledParam(recv)
createParams(fn, start)
indices := sel.Index()
var v Value = fn.Locals[0] // spilled receiver
if isPointer(sel.Recv()) {
- v = emitLoad(fn, v, nil)
+ v = emitLoad(fn, v)
// For simple indirection wrappers, perform an informative nil-check:
// "value method (T).f called using nil *T pointer"
if len(indices) == 1 && !isPointer(recvType(obj)) {
var c Call
c.Call.Value = &Builtin{
- name: "ir:wrapnilchk",
+ name: "ssa:wrapnilchk",
sig: types.NewSignature(nil,
types.NewTuple(anonVar(sel.Recv()), anonVar(tString), anonVar(tString)),
types.NewTuple(anonVar(sel.Recv())), false),
}
c.Call.Args = []Value{
v,
- emitConst(fn, stringConst(deref(sel.Recv()).String())),
- emitConst(fn, stringConst(sel.Obj().Name())),
+ stringConst(deref(sel.Recv()).String()),
+ stringConst(sel.Obj().Name()),
}
c.setType(v.Type())
- v = fn.emit(&c, nil)
+ v = fn.emit(&c)
}
}
@@ -111,7 +111,7 @@ func makeWrapper(prog *Program, sel *types.Selection) *Function {
// Load) in preference to value extraction (Field possibly
// preceded by Load).
- v = emitImplicitSelections(fn, v, indices[:len(indices)-1], nil)
+ v = emitImplicitSelections(fn, v, indices[:len(indices)-1])
// Invariant: v is a pointer, either
// value of implicit *C field, or
@@ -120,18 +120,18 @@ func makeWrapper(prog *Program, sel *types.Selection) *Function {
var c Call
if r := recvType(obj); !isInterface(r) { // concrete method
if !isPointer(r) {
- v = emitLoad(fn, v, nil)
+ v = emitLoad(fn, v)
}
c.Call.Value = prog.declaredFunc(obj)
c.Call.Args = append(c.Call.Args, v)
} else {
c.Call.Method = obj
- c.Call.Value = emitLoad(fn, v, nil)
+ c.Call.Value = emitLoad(fn, v)
}
for _, arg := range fn.Params[1:] {
c.Call.Args = append(c.Call.Args, arg)
}
- emitTailCall(fn, &c, nil)
+ emitTailCall(fn, &c)
fn.finishBody()
return fn
}
@@ -143,7 +143,7 @@ func makeWrapper(prog *Program, sel *types.Selection) *Function {
func createParams(fn *Function, start int) {
tparams := fn.Signature.Params()
for i, n := start, tparams.Len(); i < n; i++ {
- fn.addParamObj(tparams.At(i), nil)
+ fn.addParamObj(tparams.At(i))
}
}
@@ -189,8 +189,8 @@ func makeBound(prog *Program, obj *types.Func) *Function {
Signature: changeRecv(obj.Type().(*types.Signature), nil), // drop receiver
Synthetic: description,
Prog: prog,
+ pos: obj.Pos(),
}
- fn.initHTML(prog.PrintFunc)
fv := &FreeVar{name: "recv", typ: recvType(obj), parent: fn}
fn.FreeVars = []*FreeVar{fv}
@@ -208,7 +208,7 @@ func makeBound(prog *Program, obj *types.Func) *Function {
for _, arg := range fn.Params {
c.Call.Args = append(c.Call.Args, arg)
}
- emitTailCall(fn, &c, nil)
+ emitTailCall(fn, &c)
fn.finishBody()
prog.bounds[obj] = fn
diff --git a/vendor/honnef.co/go/tools/ssa/write.go b/vendor/honnef.co/go/tools/ssa/write.go
new file mode 100644
index 000000000..89761a18a
--- /dev/null
+++ b/vendor/honnef.co/go/tools/ssa/write.go
@@ -0,0 +1,5 @@
+package ssa
+
+func NewJump(parent *BasicBlock) *Jump {
+ return &Jump{anInstruction{parent}}
+}
diff --git a/vendor/honnef.co/go/tools/ir/irutil/util.go b/vendor/honnef.co/go/tools/ssautil/ssautil.go
index 028584482..72c3c919d 100644
--- a/vendor/honnef.co/go/tools/ir/irutil/util.go
+++ b/vendor/honnef.co/go/tools/ssautil/ssautil.go
@@ -1,10 +1,10 @@
-package irutil
+package ssautil
import (
- "honnef.co/go/tools/ir"
+ "honnef.co/go/tools/ssa"
)
-func Reachable(from, to *ir.BasicBlock) bool {
+func Reachable(from, to *ssa.BasicBlock) bool {
if from == to {
return true
}
@@ -13,7 +13,7 @@ func Reachable(from, to *ir.BasicBlock) bool {
}
found := false
- Walk(from, func(b *ir.BasicBlock) bool {
+ Walk(from, func(b *ssa.BasicBlock) bool {
if b == to {
found = true
return false
@@ -23,9 +23,9 @@ func Reachable(from, to *ir.BasicBlock) bool {
return found
}
-func Walk(b *ir.BasicBlock, fn func(*ir.BasicBlock) bool) {
- seen := map[*ir.BasicBlock]bool{}
- wl := []*ir.BasicBlock{b}
+func Walk(b *ssa.BasicBlock, fn func(*ssa.BasicBlock) bool) {
+ seen := map[*ssa.BasicBlock]bool{}
+ wl := []*ssa.BasicBlock{b}
for len(wl) > 0 {
b := wl[len(wl)-1]
wl = wl[:len(wl)-1]
@@ -40,18 +40,18 @@ func Walk(b *ir.BasicBlock, fn func(*ir.BasicBlock) bool) {
}
}
-func Vararg(x *ir.Slice) ([]ir.Value, bool) {
- var out []ir.Value
- slice, ok := x.X.(*ir.Alloc)
+func Vararg(x *ssa.Slice) ([]ssa.Value, bool) {
+ var out []ssa.Value
+ slice, ok := x.X.(*ssa.Alloc)
if !ok || slice.Comment != "varargs" {
return nil, false
}
for _, ref := range *slice.Referrers() {
- idx, ok := ref.(*ir.IndexAddr)
+ idx, ok := ref.(*ssa.IndexAddr)
if !ok {
continue
}
- v := (*idx.Referrers())[0].(*ir.Store).Val
+ v := (*idx.Referrers())[0].(*ssa.Store).Val
out = append(out, v)
}
return out, true
diff --git a/vendor/honnef.co/go/tools/staticcheck/CONTRIBUTING.md b/vendor/honnef.co/go/tools/staticcheck/CONTRIBUTING.md
new file mode 100644
index 000000000..b12c7afc7
--- /dev/null
+++ b/vendor/honnef.co/go/tools/staticcheck/CONTRIBUTING.md
@@ -0,0 +1,15 @@
+# Contributing to staticcheck
+
+## Before filing an issue:
+
+### Are you having trouble building staticcheck?
+
+Check you have the latest version of its dependencies. Run
+```
+go get -u honnef.co/go/tools/staticcheck
+```
+If you still have problems, consider searching for existing issues before filing a new issue.
+
+## Before sending a pull request:
+
+Have you understood the purpose of staticcheck? Make sure to carefully read `README`.
diff --git a/vendor/honnef.co/go/tools/staticcheck/analysis.go b/vendor/honnef.co/go/tools/staticcheck/analysis.go
index 75df1e120..442aebe5a 100644
--- a/vendor/honnef.co/go/tools/staticcheck/analysis.go
+++ b/vendor/honnef.co/go/tools/staticcheck/analysis.go
@@ -1,267 +1,525 @@
package staticcheck
import (
+ "flag"
+
"honnef.co/go/tools/facts"
- "honnef.co/go/tools/internal/passes/buildir"
+ "honnef.co/go/tools/internal/passes/buildssa"
"honnef.co/go/tools/lint/lintutil"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
)
-func makeCallCheckerAnalyzer(rules map[string]CallCheck, extraReqs ...*analysis.Analyzer) *analysis.Analyzer {
- reqs := []*analysis.Analyzer{buildir.Analyzer, facts.TokenFile}
- reqs = append(reqs, extraReqs...)
- return &analysis.Analyzer{
- Run: callChecker(rules),
- Requires: reqs,
- }
+func newFlagSet() flag.FlagSet {
+ fs := flag.NewFlagSet("", flag.PanicOnError)
+ fs.Var(lintutil.NewVersionFlag(), "go", "Target Go version")
+ return *fs
}
-var Analyzers = lintutil.InitializeAnalyzers(Docs, map[string]*analysis.Analyzer{
- "SA1000": makeCallCheckerAnalyzer(checkRegexpRules),
+var Analyzers = map[string]*analysis.Analyzer{
+ "SA1000": {
+ Name: "SA1000",
+ Run: callChecker(checkRegexpRules),
+ Doc: Docs["SA1000"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
+ Flags: newFlagSet(),
+ },
"SA1001": {
+ Name: "SA1001",
Run: CheckTemplate,
+ Doc: Docs["SA1001"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Flags: newFlagSet(),
+ },
+ "SA1002": {
+ Name: "SA1002",
+ Run: callChecker(checkTimeParseRules),
+ Doc: Docs["SA1002"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
+ Flags: newFlagSet(),
+ },
+ "SA1003": {
+ Name: "SA1003",
+ Run: callChecker(checkEncodingBinaryRules),
+ Doc: Docs["SA1003"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
+ Flags: newFlagSet(),
},
- "SA1002": makeCallCheckerAnalyzer(checkTimeParseRules),
- "SA1003": makeCallCheckerAnalyzer(checkEncodingBinaryRules),
"SA1004": {
+ Name: "SA1004",
Run: CheckTimeSleepConstant,
+ Doc: Docs["SA1004"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Flags: newFlagSet(),
},
"SA1005": {
+ Name: "SA1005",
Run: CheckExec,
+ Doc: Docs["SA1005"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Flags: newFlagSet(),
},
"SA1006": {
+ Name: "SA1006",
Run: CheckUnsafePrintf,
+ Doc: Docs["SA1006"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Flags: newFlagSet(),
+ },
+ "SA1007": {
+ Name: "SA1007",
+ Run: callChecker(checkURLsRules),
+ Doc: Docs["SA1007"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
+ Flags: newFlagSet(),
},
- "SA1007": makeCallCheckerAnalyzer(checkURLsRules),
"SA1008": {
+ Name: "SA1008",
Run: CheckCanonicalHeaderKey,
+ Doc: Docs["SA1008"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Flags: newFlagSet(),
+ },
+ "SA1010": {
+ Name: "SA1010",
+ Run: callChecker(checkRegexpFindAllRules),
+ Doc: Docs["SA1010"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
+ Flags: newFlagSet(),
+ },
+ "SA1011": {
+ Name: "SA1011",
+ Run: callChecker(checkUTF8CutsetRules),
+ Doc: Docs["SA1011"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
+ Flags: newFlagSet(),
},
- "SA1010": makeCallCheckerAnalyzer(checkRegexpFindAllRules),
- "SA1011": makeCallCheckerAnalyzer(checkUTF8CutsetRules),
"SA1012": {
+ Name: "SA1012",
Run: CheckNilContext,
+ Doc: Docs["SA1012"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Flags: newFlagSet(),
},
"SA1013": {
+ Name: "SA1013",
Run: CheckSeeker,
+ Doc: Docs["SA1013"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Flags: newFlagSet(),
+ },
+ "SA1014": {
+ Name: "SA1014",
+ Run: callChecker(checkUnmarshalPointerRules),
+ Doc: Docs["SA1014"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
+ Flags: newFlagSet(),
},
- "SA1014": makeCallCheckerAnalyzer(checkUnmarshalPointerRules),
"SA1015": {
+ Name: "SA1015",
Run: CheckLeakyTimeTick,
- Requires: []*analysis.Analyzer{buildir.Analyzer},
+ Doc: Docs["SA1015"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer},
+ Flags: newFlagSet(),
},
"SA1016": {
+ Name: "SA1016",
Run: CheckUntrappableSignal,
+ Doc: Docs["SA1016"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Flags: newFlagSet(),
+ },
+ "SA1017": {
+ Name: "SA1017",
+ Run: callChecker(checkUnbufferedSignalChanRules),
+ Doc: Docs["SA1017"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
+ Flags: newFlagSet(),
+ },
+ "SA1018": {
+ Name: "SA1018",
+ Run: callChecker(checkStringsReplaceZeroRules),
+ Doc: Docs["SA1018"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
+ Flags: newFlagSet(),
},
- "SA1017": makeCallCheckerAnalyzer(checkUnbufferedSignalChanRules),
- "SA1018": makeCallCheckerAnalyzer(checkStringsReplaceZeroRules),
"SA1019": {
+ Name: "SA1019",
Run: CheckDeprecated,
+ Doc: Docs["SA1019"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Deprecated},
+ Flags: newFlagSet(),
+ },
+ "SA1020": {
+ Name: "SA1020",
+ Run: callChecker(checkListenAddressRules),
+ Doc: Docs["SA1020"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
+ Flags: newFlagSet(),
+ },
+ "SA1021": {
+ Name: "SA1021",
+ Run: callChecker(checkBytesEqualIPRules),
+ Doc: Docs["SA1021"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
+ Flags: newFlagSet(),
},
- "SA1020": makeCallCheckerAnalyzer(checkListenAddressRules),
- "SA1021": makeCallCheckerAnalyzer(checkBytesEqualIPRules),
"SA1023": {
+ Name: "SA1023",
Run: CheckWriterBufferModified,
- Requires: []*analysis.Analyzer{buildir.Analyzer},
+ Doc: Docs["SA1023"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer},
+ Flags: newFlagSet(),
+ },
+ "SA1024": {
+ Name: "SA1024",
+ Run: callChecker(checkUniqueCutsetRules),
+ Doc: Docs["SA1024"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
+ Flags: newFlagSet(),
},
- "SA1024": makeCallCheckerAnalyzer(checkUniqueCutsetRules),
"SA1025": {
+ Name: "SA1025",
Run: CheckTimerResetReturnValue,
- Requires: []*analysis.Analyzer{buildir.Analyzer},
+ Doc: Docs["SA1025"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer},
+ Flags: newFlagSet(),
+ },
+ "SA1026": {
+ Name: "SA1026",
+ Run: callChecker(checkUnsupportedMarshal),
+ Doc: Docs["SA1026"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
+ Flags: newFlagSet(),
+ },
+ "SA1027": {
+ Name: "SA1027",
+ Run: callChecker(checkAtomicAlignment),
+ Doc: Docs["SA1027"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
+ Flags: newFlagSet(),
},
- "SA1026": makeCallCheckerAnalyzer(checkUnsupportedMarshal),
- "SA1027": makeCallCheckerAnalyzer(checkAtomicAlignment),
- "SA1028": makeCallCheckerAnalyzer(checkSortSliceRules),
- "SA1029": makeCallCheckerAnalyzer(checkWithValueKeyRules),
"SA2000": {
+ Name: "SA2000",
Run: CheckWaitgroupAdd,
+ Doc: Docs["SA2000"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Flags: newFlagSet(),
},
"SA2001": {
+ Name: "SA2001",
Run: CheckEmptyCriticalSection,
+ Doc: Docs["SA2001"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Flags: newFlagSet(),
},
"SA2002": {
+ Name: "SA2002",
Run: CheckConcurrentTesting,
- Requires: []*analysis.Analyzer{buildir.Analyzer},
+ Doc: Docs["SA2002"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer},
+ Flags: newFlagSet(),
},
"SA2003": {
+ Name: "SA2003",
Run: CheckDeferLock,
- Requires: []*analysis.Analyzer{buildir.Analyzer},
+ Doc: Docs["SA2003"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer},
+ Flags: newFlagSet(),
},
"SA3000": {
+ Name: "SA3000",
Run: CheckTestMainExit,
+ Doc: Docs["SA3000"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Flags: newFlagSet(),
},
"SA3001": {
+ Name: "SA3001",
Run: CheckBenchmarkN,
+ Doc: Docs["SA3001"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Flags: newFlagSet(),
},
"SA4000": {
+ Name: "SA4000",
Run: CheckLhsRhsIdentical,
+ Doc: Docs["SA4000"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.TokenFile, facts.Generated},
+ Flags: newFlagSet(),
},
"SA4001": {
+ Name: "SA4001",
Run: CheckIneffectiveCopy,
+ Doc: Docs["SA4001"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Flags: newFlagSet(),
+ },
+ "SA4002": {
+ Name: "SA4002",
+ Run: CheckDiffSizeComparison,
+ Doc: Docs["SA4002"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
+ Flags: newFlagSet(),
},
"SA4003": {
+ Name: "SA4003",
Run: CheckExtremeComparison,
+ Doc: Docs["SA4003"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Flags: newFlagSet(),
},
"SA4004": {
+ Name: "SA4004",
Run: CheckIneffectiveLoop,
+ Doc: Docs["SA4004"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Flags: newFlagSet(),
},
"SA4006": {
+ Name: "SA4006",
Run: CheckUnreadVariableValues,
- Requires: []*analysis.Analyzer{buildir.Analyzer, facts.Generated},
+ Doc: Docs["SA4006"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer, facts.Generated},
+ Flags: newFlagSet(),
},
"SA4008": {
+ Name: "SA4008",
Run: CheckLoopCondition,
- Requires: []*analysis.Analyzer{buildir.Analyzer},
+ Doc: Docs["SA4008"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer},
+ Flags: newFlagSet(),
},
"SA4009": {
+ Name: "SA4009",
Run: CheckArgOverwritten,
- Requires: []*analysis.Analyzer{buildir.Analyzer},
+ Doc: Docs["SA4009"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer},
+ Flags: newFlagSet(),
},
"SA4010": {
+ Name: "SA4010",
Run: CheckIneffectiveAppend,
- Requires: []*analysis.Analyzer{buildir.Analyzer},
+ Doc: Docs["SA4010"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer},
+ Flags: newFlagSet(),
},
"SA4011": {
+ Name: "SA4011",
Run: CheckScopedBreak,
+ Doc: Docs["SA4011"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Flags: newFlagSet(),
},
"SA4012": {
+ Name: "SA4012",
Run: CheckNaNComparison,
- Requires: []*analysis.Analyzer{buildir.Analyzer},
+ Doc: Docs["SA4012"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer},
+ Flags: newFlagSet(),
},
"SA4013": {
+ Name: "SA4013",
Run: CheckDoubleNegation,
+ Doc: Docs["SA4013"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Flags: newFlagSet(),
},
"SA4014": {
+ Name: "SA4014",
Run: CheckRepeatedIfElse,
+ Doc: Docs["SA4014"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Flags: newFlagSet(),
+ },
+ "SA4015": {
+ Name: "SA4015",
+ Run: callChecker(checkMathIntRules),
+ Doc: Docs["SA4015"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
+ Flags: newFlagSet(),
},
- "SA4015": makeCallCheckerAnalyzer(checkMathIntRules),
"SA4016": {
+ Name: "SA4016",
Run: CheckSillyBitwiseOps,
- Requires: []*analysis.Analyzer{inspect.Analyzer, facts.TokenFile},
+ Doc: Docs["SA4016"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer, facts.TokenFile},
+ Flags: newFlagSet(),
},
"SA4017": {
+ Name: "SA4017",
Run: CheckPureFunctions,
- Requires: []*analysis.Analyzer{buildir.Analyzer, facts.Purity},
+ Doc: Docs["SA4017"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer, facts.Purity},
+ Flags: newFlagSet(),
},
"SA4018": {
+ Name: "SA4018",
Run: CheckSelfAssignment,
- Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated, facts.TokenFile, facts.Purity},
+ Doc: Docs["SA4018"].String(),
+ Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated, facts.TokenFile},
+ Flags: newFlagSet(),
},
"SA4019": {
+ Name: "SA4019",
Run: CheckDuplicateBuildConstraints,
+ Doc: Docs["SA4019"].String(),
Requires: []*analysis.Analyzer{facts.Generated},
+ Flags: newFlagSet(),
},
"SA4020": {
+ Name: "SA4020",
Run: CheckUnreachableTypeCases,
+ Doc: Docs["SA4020"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Flags: newFlagSet(),
},
"SA4021": {
+ Name: "SA4021",
Run: CheckSingleArgAppend,
+ Doc: Docs["SA4021"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated, facts.TokenFile},
+ Flags: newFlagSet(),
},
"SA5000": {
+ Name: "SA5000",
Run: CheckNilMaps,
- Requires: []*analysis.Analyzer{buildir.Analyzer},
+ Doc: Docs["SA5000"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer},
+ Flags: newFlagSet(),
},
"SA5001": {
+ Name: "SA5001",
Run: CheckEarlyDefer,
+ Doc: Docs["SA5001"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Flags: newFlagSet(),
},
"SA5002": {
+ Name: "SA5002",
Run: CheckInfiniteEmptyLoop,
+ Doc: Docs["SA5002"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Flags: newFlagSet(),
},
"SA5003": {
+ Name: "SA5003",
Run: CheckDeferInInfiniteLoop,
+ Doc: Docs["SA5003"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Flags: newFlagSet(),
},
"SA5004": {
+ Name: "SA5004",
Run: CheckLoopEmptyDefault,
+ Doc: Docs["SA5004"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Flags: newFlagSet(),
},
"SA5005": {
+ Name: "SA5005",
Run: CheckCyclicFinalizer,
- Requires: []*analysis.Analyzer{buildir.Analyzer},
+ Doc: Docs["SA5005"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer},
+ Flags: newFlagSet(),
},
"SA5007": {
+ Name: "SA5007",
Run: CheckInfiniteRecursion,
- Requires: []*analysis.Analyzer{buildir.Analyzer},
+ Doc: Docs["SA5007"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer},
+ Flags: newFlagSet(),
},
"SA5008": {
+ Name: "SA5008",
Run: CheckStructTags,
+ Doc: Docs["SA5008"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Flags: newFlagSet(),
},
- "SA5009": makeCallCheckerAnalyzer(checkPrintfRules),
- "SA5010": {
- Run: CheckImpossibleTypeAssertion,
- Requires: []*analysis.Analyzer{buildir.Analyzer, facts.TokenFile},
- },
- "SA5011": {
- Run: CheckMaybeNil,
- Requires: []*analysis.Analyzer{buildir.Analyzer},
+ "SA5009": {
+ Name: "SA5009",
+ Run: callChecker(checkPrintfRules),
+ Doc: Docs["SA5009"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
+ Flags: newFlagSet(),
},
- "SA6000": makeCallCheckerAnalyzer(checkRegexpMatchLoopRules),
+ "SA6000": {
+ Name: "SA6000",
+ Run: callChecker(checkRegexpMatchLoopRules),
+ Doc: Docs["SA6000"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
+ Flags: newFlagSet(),
+ },
"SA6001": {
+ Name: "SA6001",
Run: CheckMapBytesKey,
- Requires: []*analysis.Analyzer{buildir.Analyzer},
+ Doc: Docs["SA6001"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer},
+ Flags: newFlagSet(),
+ },
+ "SA6002": {
+ Name: "SA6002",
+ Run: callChecker(checkSyncPoolValueRules),
+ Doc: Docs["SA6002"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
+ Flags: newFlagSet(),
},
- "SA6002": makeCallCheckerAnalyzer(checkSyncPoolValueRules),
"SA6003": {
+ Name: "SA6003",
Run: CheckRangeStringRunes,
- Requires: []*analysis.Analyzer{buildir.Analyzer},
+ Doc: Docs["SA6003"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer},
+ Flags: newFlagSet(),
},
"SA6005": {
+ Name: "SA6005",
Run: CheckToLowerToUpperComparison,
+ Doc: Docs["SA6005"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Flags: newFlagSet(),
},
"SA9001": {
+ Name: "SA9001",
Run: CheckDubiousDeferInChannelRangeLoop,
+ Doc: Docs["SA9001"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Flags: newFlagSet(),
},
"SA9002": {
+ Name: "SA9002",
Run: CheckNonOctalFileMode,
+ Doc: Docs["SA9002"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Flags: newFlagSet(),
},
"SA9003": {
+ Name: "SA9003",
Run: CheckEmptyBranch,
- Requires: []*analysis.Analyzer{buildir.Analyzer, facts.TokenFile, facts.Generated},
+ Doc: Docs["SA9003"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer, facts.TokenFile, facts.Generated},
+ Flags: newFlagSet(),
},
"SA9004": {
+ Name: "SA9004",
Run: CheckMissingEnumTypesInDeclaration,
+ Doc: Docs["SA9004"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Flags: newFlagSet(),
},
// Filtering generated code because it may include empty structs generated from data models.
- "SA9005": makeCallCheckerAnalyzer(checkNoopMarshal, facts.Generated),
-
- "SA4022": {
- Run: CheckAddressIsNil,
- Requires: []*analysis.Analyzer{inspect.Analyzer},
+ "SA9005": {
+ Name: "SA9005",
+ Run: callChecker(checkNoopMarshal),
+ Doc: Docs["SA9005"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer, facts.Generated, facts.TokenFile},
+ Flags: newFlagSet(),
},
-})
+}
diff --git a/vendor/honnef.co/go/tools/staticcheck/buildtag.go b/vendor/honnef.co/go/tools/staticcheck/buildtag.go
index 58e1e4ae1..888d3e9dc 100644
--- a/vendor/honnef.co/go/tools/staticcheck/buildtag.go
+++ b/vendor/honnef.co/go/tools/staticcheck/buildtag.go
@@ -4,12 +4,12 @@ import (
"go/ast"
"strings"
- "honnef.co/go/tools/code"
+ . "honnef.co/go/tools/lint/lintdsl"
)
func buildTags(f *ast.File) [][]string {
var out [][]string
- for _, line := range strings.Split(code.Preamble(f), "\n") {
+ for _, line := range strings.Split(Preamble(f), "\n") {
if !strings.HasPrefix(line, "+build ") {
continue
}
diff --git a/vendor/honnef.co/go/tools/staticcheck/doc.go b/vendor/honnef.co/go/tools/staticcheck/doc.go
index 319ea2707..4a87d4a24 100644
--- a/vendor/honnef.co/go/tools/staticcheck/doc.go
+++ b/vendor/honnef.co/go/tools/staticcheck/doc.go
@@ -3,22 +3,22 @@ package staticcheck
import "honnef.co/go/tools/lint"
var Docs = map[string]*lint.Documentation{
- "SA1000": {
+ "SA1000": &lint.Documentation{
Title: `Invalid regular expression`,
Since: "2017.1",
},
- "SA1001": {
+ "SA1001": &lint.Documentation{
Title: `Invalid template`,
Since: "2017.1",
},
- "SA1002": {
+ "SA1002": &lint.Documentation{
Title: `Invalid format in time.Parse`,
Since: "2017.1",
},
- "SA1003": {
+ "SA1003": &lint.Documentation{
Title: `Unsupported argument to functions in encoding/binary`,
Text: `The encoding/binary package can only serialize types with known sizes.
This precludes the use of the int and uint types, as their sizes
@@ -29,7 +29,7 @@ Before Go 1.8, bool wasn't supported, either.`,
Since: "2017.1",
},
- "SA1004": {
+ "SA1004": &lint.Documentation{
Title: `Suspiciously small untyped constant in time.Sleep`,
Text: `The time.Sleep function takes a time.Duration as its only argument.
Durations are expressed in nanoseconds. Thus, calling time.Sleep(1)
@@ -46,7 +46,7 @@ for some amount of nanoseconds.`,
Since: "2017.1",
},
- "SA1005": {
+ "SA1005": &lint.Documentation{
Title: `Invalid first argument to exec.Command`,
Text: `os/exec runs programs directly (using variants of the fork and exec
system calls on Unix systems). This shouldn't be confused with running
@@ -69,7 +69,7 @@ Windows, will have a /bin/sh program:
Since: "2017.1",
},
- "SA1006": {
+ "SA1006": &lint.Documentation{
Title: `Printf with dynamic first argument and no further arguments`,
Text: `Using fmt.Printf with a dynamic first argument can lead to unexpected
output. The first argument is a format string, where certain character
@@ -93,12 +93,12 @@ and pass the string as an argument.`,
Since: "2017.1",
},
- "SA1007": {
+ "SA1007": &lint.Documentation{
Title: `Invalid URL in net/url.Parse`,
Since: "2017.1",
},
- "SA1008": {
+ "SA1008": &lint.Documentation{
Title: `Non-canonical key in http.Header map`,
Text: `Keys in http.Header maps are canonical, meaning they follow a specific
combination of uppercase and lowercase letters. Methods such as
@@ -123,39 +123,39 @@ http.CanonicalHeaderKey.`,
Since: "2017.1",
},
- "SA1010": {
+ "SA1010": &lint.Documentation{
Title: `(*regexp.Regexp).FindAll called with n == 0, which will always return zero results`,
Text: `If n >= 0, the function returns at most n matches/submatches. To
return all results, specify a negative number.`,
Since: "2017.1",
},
- "SA1011": {
+ "SA1011": &lint.Documentation{
Title: `Various methods in the strings package expect valid UTF-8, but invalid input is provided`,
Since: "2017.1",
},
- "SA1012": {
+ "SA1012": &lint.Documentation{
Title: `A nil context.Context is being passed to a function, consider using context.TODO instead`,
Since: "2017.1",
},
- "SA1013": {
+ "SA1013": &lint.Documentation{
Title: `io.Seeker.Seek is being called with the whence constant as the first argument, but it should be the second`,
Since: "2017.1",
},
- "SA1014": {
+ "SA1014": &lint.Documentation{
Title: `Non-pointer value passed to Unmarshal or Decode`,
Since: "2017.1",
},
- "SA1015": {
+ "SA1015": &lint.Documentation{
Title: `Using time.Tick in a way that will leak. Consider using time.NewTicker, and only use time.Tick in tests, commands and endless functions`,
Since: "2017.1",
},
- "SA1016": {
+ "SA1016": &lint.Documentation{
Title: `Trapping a signal that cannot be trapped`,
Text: `Not all signals can be intercepted by a process. Speficially, on
UNIX-like systems, the syscall.SIGKILL and syscall.SIGSTOP signals are
@@ -164,7 +164,7 @@ kernel. It is therefore pointless to try and handle these signals.`,
Since: "2017.1",
},
- "SA1017": {
+ "SA1017": &lint.Documentation{
Title: `Channels used with os/signal.Notify should be buffered`,
Text: `The os/signal package uses non-blocking channel sends when delivering
signals. If the receiving end of the channel isn't ready and the
@@ -175,24 +175,24 @@ signal value, a buffer of size 1 is sufficient.`,
Since: "2017.1",
},
- "SA1018": {
+ "SA1018": &lint.Documentation{
Title: `strings.Replace called with n == 0, which does nothing`,
Text: `With n == 0, zero instances will be replaced. To replace all
instances, use a negative number, or use strings.ReplaceAll.`,
Since: "2017.1",
},
- "SA1019": {
+ "SA1019": &lint.Documentation{
Title: `Using a deprecated function, variable, constant or field`,
Since: "2017.1",
},
- "SA1020": {
+ "SA1020": &lint.Documentation{
Title: `Using an invalid host:port pair with a net.Listen-related function`,
Since: "2017.1",
},
- "SA1021": {
+ "SA1021": &lint.Documentation{
Title: `Using bytes.Equal to compare two net.IP`,
Text: `A net.IP stores an IPv4 or IPv6 address as a slice of bytes. The
length of the slice for an IPv4 address, however, can be either 4 or
@@ -202,13 +202,13 @@ be used, as it takes both representations into account.`,
Since: "2017.1",
},
- "SA1023": {
+ "SA1023": &lint.Documentation{
Title: `Modifying the buffer in an io.Writer implementation`,
Text: `Write must not modify the slice data, even temporarily.`,
Since: "2017.1",
},
- "SA1024": {
+ "SA1024": &lint.Documentation{
Title: `A string cutset contains duplicate characters`,
Text: `The strings.TrimLeft and strings.TrimRight functions take cutsets, not
prefixes. A cutset is treated as a set of characters to remove from a
@@ -223,17 +223,17 @@ In order to remove one string from another, use strings.TrimPrefix instead.`,
Since: "2017.1",
},
- "SA1025": {
+ "SA1025": &lint.Documentation{
Title: `It is not possible to use (*time.Timer).Reset's return value correctly`,
Since: "2019.1",
},
- "SA1026": {
+ "SA1026": &lint.Documentation{
Title: `Cannot marshal channels or functions`,
Since: "2019.2",
},
- "SA1027": {
+ "SA1027": &lint.Documentation{
Title: `Atomic access to 64-bit variable must be 64-bit aligned`,
Text: `On ARM, x86-32, and 32-bit MIPS, it is the caller's responsibility to
arrange for 64-bit alignment of 64-bit words accessed atomically. The
@@ -245,32 +245,12 @@ in a struct.`,
Since: "2019.2",
},
- "SA1028": {
- Title: `sort.Slice can only be used on slices`,
- Text: `The first argument of sort.Slice must be a slice.`,
- Since: "Unreleased",
- },
-
- "SA1029": {
- Title: `Inappropriate key in call to context.WithValue`,
- Text: `The provided key must be comparable and should not be
-of type string or any other built-in type to avoid collisions between
-packages using context. Users of WithValue should define their own
-types for keys.
-
-To avoid allocating when assigning to an interface{},
-context keys often have concrete type struct{}. Alternatively,
-exported context key variables' static type should be a pointer or
-interface.`,
- Since: "Unreleased",
- },
-
- "SA2000": {
+ "SA2000": &lint.Documentation{
Title: `sync.WaitGroup.Add called inside the goroutine, leading to a race condition`,
Since: "2017.1",
},
- "SA2001": {
+ "SA2001": &lint.Documentation{
Title: `Empty critical section, did you mean to defer the unlock?`,
Text: `Empty critical sections of the kind
@@ -291,17 +271,17 @@ rare false positive.`,
Since: "2017.1",
},
- "SA2002": {
+ "SA2002": &lint.Documentation{
Title: `Called testing.T.FailNow or SkipNow in a goroutine, which isn't allowed`,
Since: "2017.1",
},
- "SA2003": {
+ "SA2003": &lint.Documentation{
Title: `Deferred Lock right after locking, likely meant to defer Unlock instead`,
Since: "2017.1",
},
- "SA3000": {
+ "SA3000": &lint.Documentation{
Title: `TestMain doesn't call os.Exit, hiding test failures`,
Text: `Test executables (and in turn 'go test') exit with a non-zero status
code if any tests failed. When specifying your own TestMain function,
@@ -312,7 +292,7 @@ os.Exit(m.Run()).`,
Since: "2017.1",
},
- "SA3001": {
+ "SA3001": &lint.Documentation{
Title: `Assigning to b.N in benchmarks distorts the results`,
Text: `The testing package dynamically sets b.N to improve the reliability of
benchmarks and uses it in computations to determine the duration of a
@@ -321,102 +301,102 @@ falsify results.`,
Since: "2017.1",
},
- "SA4000": {
+ "SA4000": &lint.Documentation{
Title: `Boolean expression has identical expressions on both sides`,
Since: "2017.1",
},
- "SA4001": {
+ "SA4001": &lint.Documentation{
Title: `&*x gets simplified to x, it does not copy x`,
Since: "2017.1",
},
- "SA4002": {
+ "SA4002": &lint.Documentation{
Title: `Comparing strings with known different sizes has predictable results`,
Since: "2017.1",
},
- "SA4003": {
+ "SA4003": &lint.Documentation{
Title: `Comparing unsigned values against negative values is pointless`,
Since: "2017.1",
},
- "SA4004": {
+ "SA4004": &lint.Documentation{
Title: `The loop exits unconditionally after one iteration`,
Since: "2017.1",
},
- "SA4005": {
+ "SA4005": &lint.Documentation{
Title: `Field assignment that will never be observed. Did you mean to use a pointer receiver?`,
Since: "2017.1",
},
- "SA4006": {
+ "SA4006": &lint.Documentation{
Title: `A value assigned to a variable is never read before being overwritten. Forgotten error check or dead code?`,
Since: "2017.1",
},
- "SA4008": {
+ "SA4008": &lint.Documentation{
Title: `The variable in the loop condition never changes, are you incrementing the wrong variable?`,
Since: "2017.1",
},
- "SA4009": {
+ "SA4009": &lint.Documentation{
Title: `A function argument is overwritten before its first use`,
Since: "2017.1",
},
- "SA4010": {
+ "SA4010": &lint.Documentation{
Title: `The result of append will never be observed anywhere`,
Since: "2017.1",
},
- "SA4011": {
+ "SA4011": &lint.Documentation{
Title: `Break statement with no effect. Did you mean to break out of an outer loop?`,
Since: "2017.1",
},
- "SA4012": {
+ "SA4012": &lint.Documentation{
Title: `Comparing a value against NaN even though no value is equal to NaN`,
Since: "2017.1",
},
- "SA4013": {
+ "SA4013": &lint.Documentation{
Title: `Negating a boolean twice (!!b) is the same as writing b. This is either redundant, or a typo.`,
Since: "2017.1",
},
- "SA4014": {
+ "SA4014": &lint.Documentation{
Title: `An if/else if chain has repeated conditions and no side-effects; if the condition didn't match the first time, it won't match the second time, either`,
Since: "2017.1",
},
- "SA4015": {
+ "SA4015": &lint.Documentation{
Title: `Calling functions like math.Ceil on floats converted from integers doesn't do anything useful`,
Since: "2017.1",
},
- "SA4016": {
+ "SA4016": &lint.Documentation{
Title: `Certain bitwise operations, such as x ^ 0, do not do anything useful`,
Since: "2017.1",
},
- "SA4017": {
+ "SA4017": &lint.Documentation{
Title: `A pure function's return value is discarded, making the call pointless`,
Since: "2017.1",
},
- "SA4018": {
+ "SA4018": &lint.Documentation{
Title: `Self-assignment of variables`,
Since: "2017.1",
},
- "SA4019": {
+ "SA4019": &lint.Documentation{
Title: `Multiple, identical build constraints in the same file`,
Since: "2017.1",
},
- "SA4020": {
+ "SA4020": &lint.Documentation{
Title: `Unreachable case clause in a type switch`,
Text: `In a type switch like the following
@@ -487,33 +467,27 @@ and therefore doSomething()'s return value implements both.`,
Since: "2019.2",
},
- "SA4021": {
+ "SA4021": &lint.Documentation{
Title: `x = append(y) is equivalent to x = y`,
Since: "2019.2",
},
- "SA4022": {
- Title: `Comparing the address of a variable against nil`,
- Text: `Code such as 'if &x == nil' is meaningless, because taking the address of a variable always yields a non-nil pointer.`,
- Since: "Unreleased",
- },
-
- "SA5000": {
+ "SA5000": &lint.Documentation{
Title: `Assignment to nil map`,
Since: "2017.1",
},
- "SA5001": {
+ "SA5001": &lint.Documentation{
Title: `Defering Close before checking for a possible error`,
Since: "2017.1",
},
- "SA5002": {
+ "SA5002": &lint.Documentation{
Title: `The empty for loop (for {}) spins and can block the scheduler`,
Since: "2017.1",
},
- "SA5003": {
+ "SA5003": &lint.Documentation{
Title: `Defers in infinite loops will never execute`,
Text: `Defers are scoped to the surrounding function, not the surrounding
block. In a function that never returns, i.e. one containing an
@@ -521,12 +495,12 @@ infinite loop, defers will never execute.`,
Since: "2017.1",
},
- "SA5004": {
+ "SA5004": &lint.Documentation{
Title: `for { select { ... with an empty default branch spins`,
Since: "2017.1",
},
- "SA5005": {
+ "SA5005": &lint.Documentation{
Title: `The finalizer references the finalized object, preventing garbage collection`,
Text: `A finalizer is a function associated with an object that runs when the
garbage collector is ready to collect said object, that is when the
@@ -542,12 +516,12 @@ to zero before the object is being passed to the finalizer.`,
Since: "2017.1",
},
- "SA5006": {
+ "SA5006": &lint.Documentation{
Title: `Slice index out of bounds`,
Since: "2017.1",
},
- "SA5007": {
+ "SA5007": &lint.Documentation{
Title: `Infinite recursive call`,
Text: `A function that calls itself recursively needs to have an exit
condition. Otherwise it will recurse forever, until the system runs
@@ -561,74 +535,22 @@ should be used instead.`,
Since: "2017.1",
},
- "SA5008": {
+ "SA5008": &lint.Documentation{
Title: `Invalid struct tag`,
Since: "2019.2",
},
- "SA5009": {
+ "SA5009": &lint.Documentation{
Title: `Invalid Printf call`,
Since: "2019.2",
},
- "SA5010": {
- Title: `Impossible type assertion`,
-
- Text: `Some type assertions can be statically proven to be
-impossible. This is the case when the method sets of both
-arguments of the type assertion conflict with each other, for
-example by containing the same method with different
-signatures.
-
-The Go compiler already applies this check when asserting from an
-interface value to a concrete type. If the concrete type misses
-methods from the interface, or if function signatures don't match,
-then the type assertion can never succeed.
-
-This check applies the same logic when asserting from one interface to
-another. If both interface types contain the same method but with
-different signatures, then the type assertion can never succeed,
-either. `,
-
- Since: "Unreleased",
- },
-
- "SA5011": {
- Title: `Possible nil pointer dereference`,
-
- Text: `A pointer is being dereferenced unconditionally, while
-also being checked against nil in another place. This suggests that
-the pointer may be nil and dereferencing it may panic. This is
-commonly a result of improperly ordered code or missing return
-statements. Consider the following examples:
-
- func fn(x *int) {
- fmt.Println(*x)
-
- // This nil check is equally important for the previous dereference
- if x != nil {
- foo(*x)
- }
- }
-
- func TestFoo(t *testing.T) {
- x := compute()
- if x == nil {
- t.Errorf("nil pointer received")
- }
-
- // t.Errorf does not abort the test, so if x is nil, the next line will panic.
- foo(*x)
- }`,
- Since: "Unreleased",
- },
-
- "SA6000": {
+ "SA6000": &lint.Documentation{
Title: `Using regexp.Match or related in a loop, should use regexp.Compile`,
Since: "2017.1",
},
- "SA6001": {
+ "SA6001": &lint.Documentation{
Title: `Missing an optimization opportunity when indexing maps by byte slices`,
Text: `Map keys must be comparable, which precludes the use of byte slices.
@@ -658,7 +580,7 @@ f5f5a8b6209f84961687d993b93ea0d397f5d5bf in the Go repository.`,
Since: "2017.1",
},
- "SA6002": {
+ "SA6002": &lint.Documentation{
Title: `Storing non-pointer values in sync.Pool allocates memory`,
Text: `A sync.Pool is used to avoid unnecessary allocations and reduce the
amount of work the garbage collector has to do.
@@ -675,7 +597,7 @@ that discuss this problem.`,
Since: "2017.1",
},
- "SA6003": {
+ "SA6003": &lint.Documentation{
Title: `Converting a string to a slice of runes before ranging over it`,
Text: `You may want to loop over the runes in a string. Instead of converting
the string to a slice of runes and looping over that, you can loop
@@ -697,7 +619,7 @@ the slice of runes.`,
Since: "2017.1",
},
- "SA6005": {
+ "SA6005": &lint.Documentation{
Title: `Inefficient string comparison with strings.ToLower or strings.ToUpper`,
Text: `Converting two strings to the same case and comparing them like so
@@ -721,22 +643,22 @@ https://blog.digitalocean.com/how-to-efficiently-compare-strings-in-go/`,
Since: "2019.2",
},
- "SA9001": {
+ "SA9001": &lint.Documentation{
Title: `Defers in range loops may not run when you expect them to`,
Since: "2017.1",
},
- "SA9002": {
+ "SA9002": &lint.Documentation{
Title: `Using a non-octal os.FileMode that looks like it was meant to be in octal.`,
Since: "2017.1",
},
- "SA9003": {
+ "SA9003": &lint.Documentation{
Title: `Empty body in an if or else branch`,
Since: "2017.1",
},
- "SA9004": {
+ "SA9004": &lint.Documentation{
Title: `Only the first constant has an explicit type`,
Text: `In a constant declaration such as the following:
@@ -828,7 +750,7 @@ as EnumSecond has no explicit type, and thus defaults to int.`,
Since: "2019.1",
},
- "SA9005": {
+ "SA9005": &lint.Documentation{
Title: `Trying to marshal a struct with no public fields nor custom marshaling`,
Text: `The encoding/json and encoding/xml packages only operate on exported
fields in structs, not unexported ones. It is usually an error to try
diff --git a/vendor/honnef.co/go/tools/staticcheck/knowledge.go b/vendor/honnef.co/go/tools/staticcheck/knowledge.go
new file mode 100644
index 000000000..4c12b866a
--- /dev/null
+++ b/vendor/honnef.co/go/tools/staticcheck/knowledge.go
@@ -0,0 +1,25 @@
+package staticcheck
+
+import (
+ "reflect"
+
+ "golang.org/x/tools/go/analysis"
+ "honnef.co/go/tools/internal/passes/buildssa"
+ "honnef.co/go/tools/ssa"
+ "honnef.co/go/tools/staticcheck/vrp"
+)
+
+var valueRangesAnalyzer = &analysis.Analyzer{
+ Name: "vrp",
+ Doc: "calculate value ranges of functions",
+ Run: func(pass *analysis.Pass) (interface{}, error) {
+ m := map[*ssa.Function]vrp.Ranges{}
+ for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
+ vr := vrp.BuildGraph(ssafn).Solve()
+ m[ssafn] = vr
+ }
+ return m, nil
+ },
+ Requires: []*analysis.Analyzer{buildssa.Analyzer},
+ ResultType: reflect.TypeOf(map[*ssa.Function]vrp.Ranges{}),
+}
diff --git a/vendor/honnef.co/go/tools/staticcheck/lint.go b/vendor/honnef.co/go/tools/staticcheck/lint.go
index 3f174b8bd..1558cbf94 100644
--- a/vendor/honnef.co/go/tools/staticcheck/lint.go
+++ b/vendor/honnef.co/go/tools/staticcheck/lint.go
@@ -1,5 +1,5 @@
// Package staticcheck contains a linter for Go source code.
-package staticcheck
+package staticcheck // import "honnef.co/go/tools/staticcheck"
import (
"fmt"
@@ -19,20 +19,17 @@ import (
"unicode"
. "honnef.co/go/tools/arg"
- "honnef.co/go/tools/code"
"honnef.co/go/tools/deprecated"
- "honnef.co/go/tools/edit"
"honnef.co/go/tools/facts"
"honnef.co/go/tools/functions"
- "honnef.co/go/tools/internal/passes/buildir"
+ "honnef.co/go/tools/internal/passes/buildssa"
"honnef.co/go/tools/internal/sharedcheck"
- "honnef.co/go/tools/ir"
- "honnef.co/go/tools/ir/irutil"
"honnef.co/go/tools/lint"
. "honnef.co/go/tools/lint/lintdsl"
- "honnef.co/go/tools/pattern"
"honnef.co/go/tools/printf"
- "honnef.co/go/tools/report"
+ "honnef.co/go/tools/ssa"
+ "honnef.co/go/tools/ssautil"
+ "honnef.co/go/tools/staticcheck/vrp"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
@@ -41,27 +38,6 @@ import (
"golang.org/x/tools/go/types/typeutil"
)
-func checkSortSlice(call *Call) {
- c := call.Instr.Common().StaticCallee()
- arg := call.Args[0]
-
- T := arg.Value.Value.Type().Underlying()
- switch T.(type) {
- case *types.Interface:
- // we don't know.
- // TODO(dh): if the value is a phi node we can look at its edges
- if k, ok := arg.Value.Value.(*ir.Const); ok && k.Value == nil {
- // literal nil, e.g. sort.Sort(nil, ...)
- arg.Invalid(fmt.Sprintf("cannot call %s on nil literal", c))
- }
- case *types.Slice:
- // this is fine
- default:
- // this is not fine
- arg.Invalid(fmt.Sprintf("%s must only be called on slices, was called on %s", c, T))
- }
-}
-
func validRegexp(call *Call) {
arg := call.Args[0]
err := ValidateRegexp(arg.Value)
@@ -100,7 +76,7 @@ func unmarshalPointer(name string, arg int) CallCheck {
func pointlessIntMath(call *Call) {
if ConvertedFromInt(call.Args[0].Value) {
- call.Invalid(fmt.Sprintf("calling %s on a converted integer is pointless", code.CallName(call.Instr.Common())))
+ call.Invalid(fmt.Sprintf("calling %s on a converted integer is pointless", CallName(call.Instr.Common())))
}
}
@@ -154,7 +130,7 @@ var (
"(*sync.Pool).Put": func(call *Call) {
arg := call.Args[Arg("(*sync.Pool).Put.x")]
typ := arg.Value.Value.Type()
- if !code.IsPointerLike(typ) {
+ if !IsPointerLike(typ) {
arg.Invalid("argument should be pointer-like to avoid allocations")
}
},
@@ -275,42 +251,31 @@ var (
// TODO(dh): detect printf wrappers
checkPrintfRules = map[string]CallCheck{
- "fmt.Errorf": func(call *Call) { checkPrintfCall(call, 0, 1) },
- "fmt.Printf": func(call *Call) { checkPrintfCall(call, 0, 1) },
- "fmt.Sprintf": func(call *Call) { checkPrintfCall(call, 0, 1) },
- "fmt.Fprintf": func(call *Call) { checkPrintfCall(call, 1, 2) },
- "golang.org/x/xerrors.Errorf": func(call *Call) { checkPrintfCall(call, 0, 1) },
- }
-
- checkSortSliceRules = map[string]CallCheck{
- "sort.Slice": checkSortSlice,
- "sort.SliceIsSorted": checkSortSlice,
- "sort.SliceStable": checkSortSlice,
- }
-
- checkWithValueKeyRules = map[string]CallCheck{
- "context.WithValue": checkWithValueKey,
+ "fmt.Errorf": func(call *Call) { checkPrintfCall(call, 0, 1) },
+ "fmt.Printf": func(call *Call) { checkPrintfCall(call, 0, 1) },
+ "fmt.Sprintf": func(call *Call) { checkPrintfCall(call, 0, 1) },
+ "fmt.Fprintf": func(call *Call) { checkPrintfCall(call, 1, 2) },
}
)
func checkPrintfCall(call *Call, fIdx, vIdx int) {
f := call.Args[fIdx]
- var args []ir.Value
+ var args []ssa.Value
switch v := call.Args[vIdx].Value.Value.(type) {
- case *ir.Slice:
+ case *ssa.Slice:
var ok bool
- args, ok = irutil.Vararg(v)
+ args, ok = ssautil.Vararg(v)
if !ok {
// We don't know what the actual arguments to the function are
return
}
- case *ir.Const:
+ case *ssa.Const:
// nil, i.e. no arguments
default:
// We don't know what the actual arguments to the function are
return
}
- checkPrintfCallImpl(f, f.Value.Value, args)
+ checkPrintfCallImpl(call, f.Value.Value, args)
}
type verbFlag int
@@ -321,9 +286,6 @@ const (
isFP
isString
isPointer
- // Verbs that accept "pseudo pointers" will sometimes dereference
- // non-nil pointers. For example, %x on a non-nil *struct will print the
- // individual fields, but on a nil pointer it will print the address.
isPseudoPointer
isSlice
isAny
@@ -341,7 +303,6 @@ var verbs = [...]verbFlag{
'g': isFP,
'G': isFP,
'o': isPseudoPointer | isInt,
- 'O': isPseudoPointer | isInt,
'p': isSlice | isPointer | noRecurse,
'q': isInt | isString,
's': isString,
@@ -349,11 +310,11 @@ var verbs = [...]verbFlag{
'T': isAny,
'U': isInt,
'v': isAny,
- 'X': isPseudoPointer | isInt | isFP | isString,
- 'x': isPseudoPointer | isInt | isFP | isString,
+ 'X': isPseudoPointer | isInt | isString,
+ 'x': isPseudoPointer | isInt | isString,
}
-func checkPrintfCallImpl(carg *Argument, f ir.Value, args []ir.Value) {
+func checkPrintfCallImpl(call *Call, f ssa.Value, args []ssa.Value) {
var msCache *typeutil.MethodSetCache
if f.Parent() != nil {
msCache = &f.Parent().Prog.MethodSets
@@ -368,7 +329,7 @@ func checkPrintfCallImpl(carg *Argument, f ir.Value, args []ir.Value) {
if verbs[verb]&isSlice != 0 {
return []types.Type{T}, false
}
- if verbs[verb]&isString != 0 && code.IsType(T.Elem().Underlying(), "byte") {
+ if verbs[verb]&isString != 0 && IsType(T.Elem().Underlying(), "byte") {
return []types.Type{T}, false
}
return []types.Type{T.Elem()}, true
@@ -410,7 +371,7 @@ func checkPrintfCallImpl(carg *Argument, f ir.Value, args []ir.Value) {
if sig.Results().Len() != 1 {
return false
}
- if !code.IsType(sig.Results().At(0).Type(), "string") {
+ if !IsType(sig.Results().At(0).Type(), "string") {
return false
}
return true
@@ -432,7 +393,7 @@ func checkPrintfCallImpl(carg *Argument, f ir.Value, args []ir.Value) {
if sig.Results().Len() != 1 {
return false
}
- if !code.IsType(sig.Results().At(0).Type(), "string") {
+ if !IsType(sig.Results().At(0).Type(), "string") {
return false
}
return true
@@ -496,10 +457,10 @@ func checkPrintfCallImpl(carg *Argument, f ir.Value, args []ir.Value) {
T = T.Underlying()
if flags&(isPointer|isPseudoPointer) == 0 && top {
- T = code.Dereference(T)
+ T = Dereference(T)
}
if flags&isPseudoPointer != 0 && top {
- t := code.Dereference(T)
+ t := Dereference(T)
if _, ok := t.Underlying().(*types.Struct); ok {
T = t
}
@@ -528,11 +489,11 @@ func checkPrintfCallImpl(carg *Argument, f ir.Value, args []ir.Value) {
return true
}
- if flags&isString != 0 && (code.IsType(T, "[]byte") || isStringer(T, ms) || isError(T, ms)) {
+ if flags&isString != 0 && (IsType(T, "[]byte") || isStringer(T, ms) || isError(T, ms)) {
return true
}
- if flags&isPointer != 0 && code.IsPointerLike(T) {
+ if flags&isPointer != 0 && IsPointerLike(T) {
return true
}
if flags&isPseudoPointer != 0 {
@@ -543,29 +504,10 @@ func checkPrintfCallImpl(carg *Argument, f ir.Value, args []ir.Value) {
}
if _, ok := U.Elem().Underlying().(*types.Struct); !ok {
- // TODO(dh): can this condition ever be false? For
- // *T, if T is a struct, we'll already have
- // dereferenced it, meaning the *types.Pointer
- // branch couldn't have been taken. For T that
- // aren't structs, this condition will always
- // evaluate to true.
return true
}
case *types.Chan, *types.Signature:
- // Channels and functions are always treated as
- // pointers and never recursed into.
return true
- case *types.Basic:
- if U.Kind() == types.UnsafePointer {
- return true
- }
- case *types.Interface:
- // we will already have bailed if the type is an
- // interface.
- panic("unreachable")
- default:
- // other pointer-like types, such as maps or slices,
- // will be printed element-wise.
}
}
@@ -592,13 +534,13 @@ func checkPrintfCallImpl(carg *Argument, f ir.Value, args []ir.Value) {
return true
}
- k, ok := f.(*ir.Const)
+ k, ok := f.(*ssa.Const)
if !ok {
return
}
actions, err := printf.Parse(constant.StringVal(k.Value))
if err != nil {
- carg.Invalid("couldn't parse format string")
+ call.Invalid("couldn't parse format string")
return
}
@@ -617,18 +559,18 @@ func checkPrintfCallImpl(carg *Argument, f ir.Value, args []ir.Value) {
ptr = star.Index + 1
}
if idx == 0 {
- carg.Invalid(fmt.Sprintf("Printf format %s reads invalid arg 0; indices are 1-based", verb.Raw))
+ call.Invalid(fmt.Sprintf("Printf format %s reads invalid arg 0; indices are 1-based", verb.Raw))
return false
}
if idx > len(args) {
- carg.Invalid(
+ call.Invalid(
fmt.Sprintf("Printf format %s reads arg #%d, but call has only %d args",
verb.Raw, idx, len(args)))
return false
}
- if arg, ok := args[idx-1].(*ir.MakeInterface); ok {
+ if arg, ok := args[idx-1].(*ssa.MakeInterface); ok {
if !isInfo(arg.X.Type(), types.IsInteger) {
- carg.Invalid(fmt.Sprintf("Printf format %s reads non-int arg #%d as argument of *", verb.Raw, idx))
+ call.Invalid(fmt.Sprintf("Printf format %s reads non-int arg #%d as argument of *", verb.Raw, idx))
}
}
}
@@ -654,19 +596,19 @@ func checkPrintfCallImpl(carg *Argument, f ir.Value, args []ir.Value) {
off = verb.Value
}
if off > len(args) {
- carg.Invalid(
+ call.Invalid(
fmt.Sprintf("Printf format %s reads arg #%d, but call has only %d args",
verb.Raw, off, len(args)))
return
} else if verb.Value == 0 && verb.Letter != '%' {
- carg.Invalid(fmt.Sprintf("Printf format %s reads invalid arg 0; indices are 1-based", verb.Raw))
+ call.Invalid(fmt.Sprintf("Printf format %s reads invalid arg 0; indices are 1-based", verb.Raw))
return
} else if off != 0 {
- arg, ok := args[off-1].(*ir.MakeInterface)
+ arg, ok := args[off-1].(*ssa.MakeInterface)
if ok {
if !checkType(verb.Letter, arg.X.Type(), true) {
- carg.Invalid(fmt.Sprintf("Printf format %s has arg #%d of wrong type %s",
- verb.Raw, ptr, args[ptr-1].(*ir.MakeInterface).X.Type()))
+ call.Invalid(fmt.Sprintf("Printf format %s has arg #%d of wrong type %s",
+ verb.Raw, ptr, args[ptr-1].(*ssa.MakeInterface).X.Type()))
return
}
}
@@ -684,7 +626,7 @@ func checkPrintfCallImpl(carg *Argument, f ir.Value, args []ir.Value) {
}
if !hasExplicit && ptr <= len(args) {
- carg.Invalid(fmt.Sprintf("Printf call needs %d args but has %d args", ptr-1, len(args)))
+ call.Invalid(fmt.Sprintf("Printf call needs %d args but has %d args", ptr-1, len(args)))
}
}
@@ -694,7 +636,7 @@ func checkAtomicAlignmentImpl(call *Call) {
// Not running on a 32-bit platform
return
}
- v, ok := call.Args[0].Value.Value.(*ir.FieldAddr)
+ v, ok := call.Args[0].Value.Value.(*ssa.FieldAddr)
if !ok {
// TODO(dh): also check indexing into arrays and slices
return
@@ -709,26 +651,26 @@ func checkAtomicAlignmentImpl(call *Call) {
if off%8 != 0 {
msg := fmt.Sprintf("address of non 64-bit aligned field %s passed to %s",
T.Field(v.Field).Name(),
- code.CallName(call.Instr.Common()))
+ CallName(call.Instr.Common()))
call.Invalid(msg)
}
}
func checkNoopMarshalImpl(argN int, meths ...string) CallCheck {
return func(call *Call) {
- if code.IsGenerated(call.Pass, call.Instr.Pos()) {
+ if IsGenerated(call.Pass, call.Instr.Pos()) {
return
}
arg := call.Args[argN]
T := arg.Value.Value.Type()
- Ts, ok := code.Dereference(T).Underlying().(*types.Struct)
+ Ts, ok := Dereference(T).Underlying().(*types.Struct)
if !ok {
return
}
if Ts.NumFields() == 0 {
return
}
- fields := code.FlattenFields(Ts)
+ fields := FlattenFields(Ts)
for _, field := range fields {
if field.Var.Exported() {
return
@@ -754,7 +696,7 @@ func checkUnsupportedMarshalImpl(argN int, tag string, meths ...string) CallChec
arg := call.Args[argN]
T := arg.Value.Value.Type()
- Ts, ok := code.Dereference(T).Underlying().(*types.Struct)
+ Ts, ok := Dereference(T).Underlying().(*types.Struct)
if !ok {
return
}
@@ -766,7 +708,7 @@ func checkUnsupportedMarshalImpl(argN int, tag string, meths ...string) CallChec
return
}
}
- fields := code.FlattenFields(Ts)
+ fields := FlattenFields(Ts)
for _, field := range fields {
if !(field.Var.Exported()) {
continue
@@ -793,14 +735,14 @@ func checkUnsupportedMarshalImpl(argN int, tag string, meths ...string) CallChec
func fieldPath(start types.Type, indices []int) string {
p := start.String()
for _, idx := range indices {
- field := code.Dereference(start).Underlying().(*types.Struct).Field(idx)
+ field := Dereference(start).Underlying().(*types.Struct).Field(idx)
start = field.Type()
p += "." + field.Name()
}
return p
}
-func isInLoop(b *ir.BasicBlock) bool {
+func isInLoop(b *ssa.BasicBlock) bool {
sets := functions.FindLoops(b.Parent())
for _, set := range sets {
if set.Has(b) {
@@ -813,70 +755,24 @@ func isInLoop(b *ir.BasicBlock) bool {
func CheckUntrappableSignal(pass *analysis.Pass) (interface{}, error) {
fn := func(node ast.Node) {
call := node.(*ast.CallExpr)
- if !code.IsCallToAnyAST(pass, call,
+ if !IsCallToAnyAST(pass, call,
"os/signal.Ignore", "os/signal.Notify", "os/signal.Reset") {
return
}
-
- hasSigterm := false
for _, arg := range call.Args {
if conv, ok := arg.(*ast.CallExpr); ok && isName(pass, conv.Fun, "os.Signal") {
arg = conv.Args[0]
}
- if isName(pass, arg, "syscall.SIGTERM") {
- hasSigterm = true
- break
- }
-
- }
- for i, arg := range call.Args {
- if conv, ok := arg.(*ast.CallExpr); ok && isName(pass, conv.Fun, "os.Signal") {
- arg = conv.Args[0]
- }
-
if isName(pass, arg, "os.Kill") || isName(pass, arg, "syscall.SIGKILL") {
- var fixes []analysis.SuggestedFix
- if !hasSigterm {
- nargs := make([]ast.Expr, len(call.Args))
- for j, a := range call.Args {
- if i == j {
- nargs[j] = Selector("syscall", "SIGTERM")
- } else {
- nargs[j] = a
- }
- }
- ncall := *call
- ncall.Args = nargs
- fixes = append(fixes, edit.Fix(fmt.Sprintf("use syscall.SIGTERM instead of %s", report.Render(pass, arg)), edit.ReplaceWithNode(pass.Fset, call, &ncall)))
- }
- nargs := make([]ast.Expr, 0, len(call.Args))
- for j, a := range call.Args {
- if i == j {
- continue
- }
- nargs = append(nargs, a)
- }
- ncall := *call
- ncall.Args = nargs
- fixes = append(fixes, edit.Fix(fmt.Sprintf("remove %s from list of arguments", report.Render(pass, arg)), edit.ReplaceWithNode(pass.Fset, call, &ncall)))
- report.Report(pass, arg, fmt.Sprintf("%s cannot be trapped (did you mean syscall.SIGTERM?)", report.Render(pass, arg)), report.Fixes(fixes...))
+ ReportNodef(pass, arg, "%s cannot be trapped (did you mean syscall.SIGTERM?)", Render(pass, arg))
}
if isName(pass, arg, "syscall.SIGSTOP") {
- nargs := make([]ast.Expr, 0, len(call.Args)-1)
- for j, a := range call.Args {
- if i == j {
- continue
- }
- nargs = append(nargs, a)
- }
- ncall := *call
- ncall.Args = nargs
- report.Report(pass, arg, "syscall.SIGSTOP cannot be trapped", report.Fixes(edit.Fix("remove syscall.SIGSTOP from list of arguments", edit.ReplaceWithNode(pass.Fset, call, &ncall))))
+ ReportNodef(pass, arg, "%s signal cannot be trapped", Render(pass, arg))
}
}
}
- code.Preorder(pass, fn, (*ast.CallExpr)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn)
return nil, nil
}
@@ -884,23 +780,23 @@ func CheckTemplate(pass *analysis.Pass) (interface{}, error) {
fn := func(node ast.Node) {
call := node.(*ast.CallExpr)
var kind string
- switch code.CallNameAST(pass, call) {
- case "(*text/template.Template).Parse":
+ if IsCallToAST(pass, call, "(*text/template.Template).Parse") {
kind = "text"
- case "(*html/template.Template).Parse":
+ } else if IsCallToAST(pass, call, "(*html/template.Template).Parse") {
kind = "html"
- default:
+ } else {
return
}
sel := call.Fun.(*ast.SelectorExpr)
- if !code.IsCallToAnyAST(pass, sel.X, "text/template.New", "html/template.New") {
+ if !IsCallToAST(pass, sel.X, "text/template.New") &&
+ !IsCallToAST(pass, sel.X, "html/template.New") {
// TODO(dh): this is a cheap workaround for templates with
// different delims. A better solution with less false
// negatives would use data flow analysis to see where the
// template comes from and where it has been
return
}
- s, ok := code.ExprToString(pass, call.Args[Arg("(*text/template.Template).Parse.text")])
+ s, ok := ExprToString(pass, call.Args[Arg("(*text/template.Template).Parse.text")])
if !ok {
return
}
@@ -914,23 +810,18 @@ func CheckTemplate(pass *analysis.Pass) (interface{}, error) {
if err != nil {
// TODO(dominikh): whitelist other parse errors, if any
if strings.Contains(err.Error(), "unexpected") {
- report.Report(pass, call.Args[Arg("(*text/template.Template).Parse.text")], err.Error())
+ ReportNodef(pass, call.Args[Arg("(*text/template.Template).Parse.text")], "%s", err)
}
}
}
- code.Preorder(pass, fn, (*ast.CallExpr)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn)
return nil, nil
}
-var (
- checkTimeSleepConstantPatternRns = pattern.MustParse(`(BinaryExpr duration "*" (SelectorExpr (Ident "time") (Ident "Nanosecond")))`)
- checkTimeSleepConstantPatternRs = pattern.MustParse(`(BinaryExpr duration "*" (SelectorExpr (Ident "time") (Ident "Second")))`)
-)
-
func CheckTimeSleepConstant(pass *analysis.Pass) (interface{}, error) {
fn := func(node ast.Node) {
call := node.(*ast.CallExpr)
- if !code.IsCallToAST(pass, call, "time.Sleep") {
+ if !IsCallToAST(pass, call, "time.Sleep") {
return
}
lit, ok := call.Args[Arg("time.Sleep.d")].(*ast.BasicLit)
@@ -947,31 +838,37 @@ func CheckTimeSleepConstant(pass *analysis.Pass) (interface{}, error) {
// because the user could've meant 2 minutes.
return
}
-
- report.Report(pass, lit,
- fmt.Sprintf("sleeping for %d nanoseconds is probably a bug; be explicit if it isn't", n), report.Fixes(
- edit.Fix("explicitly use nanoseconds", edit.ReplaceWithPattern(pass, checkTimeSleepConstantPatternRns, pattern.State{"duration": lit}, lit)),
- edit.Fix("use seconds", edit.ReplaceWithPattern(pass, checkTimeSleepConstantPatternRs, pattern.State{"duration": lit}, lit))))
+ recommendation := "time.Sleep(time.Nanosecond)"
+ if n != 1 {
+ recommendation = fmt.Sprintf("time.Sleep(%d * time.Nanosecond)", n)
+ }
+ ReportNodef(pass, call.Args[Arg("time.Sleep.d")],
+ "sleeping for %d nanoseconds is probably a bug. Be explicit if it isn't: %s", n, recommendation)
}
- code.Preorder(pass, fn, (*ast.CallExpr)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn)
return nil, nil
}
-var checkWaitgroupAddQ = pattern.MustParse(`
- (GoStmt
- (CallExpr
- (FuncLit
- _
- call@(CallExpr (Function "(*sync.WaitGroup).Add") _):_) _))`)
-
func CheckWaitgroupAdd(pass *analysis.Pass) (interface{}, error) {
fn := func(node ast.Node) {
- if m, ok := Match(pass, checkWaitgroupAddQ, node); ok {
- call := m.State["call"].(ast.Node)
- report.Report(pass, call, fmt.Sprintf("should call %s before starting the goroutine to avoid a race", report.Render(pass, call)))
+ g := node.(*ast.GoStmt)
+ fun, ok := g.Call.Fun.(*ast.FuncLit)
+ if !ok {
+ return
+ }
+ if len(fun.Body.List) == 0 {
+ return
+ }
+ stmt, ok := fun.Body.List[0].(*ast.ExprStmt)
+ if !ok {
+ return
+ }
+ if IsCallToAST(pass, stmt.X, "(*sync.WaitGroup).Add") {
+ ReportNodef(pass, stmt, "should call %s before starting the goroutine to avoid a race",
+ Render(pass, stmt))
}
}
- code.Preorder(pass, fn, (*ast.GoStmt)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.GoStmt)(nil)}, fn)
return nil, nil
}
@@ -1000,7 +897,7 @@ func CheckInfiniteEmptyLoop(pass *analysis.Pass) (interface{}, error) {
// channel receives.
if loop.Cond != nil {
- if code.MayHaveSideEffects(pass, loop.Cond, nil) {
+ if hasSideEffects(loop.Cond) {
return
}
if ident, ok := loop.Cond.(*ast.Ident); ok {
@@ -1011,11 +908,11 @@ func CheckInfiniteEmptyLoop(pass *analysis.Pass) (interface{}, error) {
}
}
}
- report.Report(pass, loop, "loop condition never changes or has a race condition")
+ ReportNodef(pass, loop, "loop condition never changes or has a race condition")
}
- report.Report(pass, loop, "this loop will spin, using 100%% CPU", report.ShortRange())
+ ReportNodef(pass, loop, "this loop will spin, using 100%% CPU")
}
- code.Preorder(pass, fn, (*ast.ForStmt)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn)
return nil, nil
}
@@ -1054,10 +951,10 @@ func CheckDeferInInfiniteLoop(pass *analysis.Pass) (interface{}, error) {
return
}
for _, stmt := range defers {
- report.Report(pass, stmt, "defers in this infinite loop will never run")
+ ReportNodef(pass, stmt, "defers in this infinite loop will never run")
}
}
- code.Preorder(pass, fn, (*ast.ForStmt)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn)
return nil, nil
}
@@ -1072,7 +969,7 @@ func CheckDubiousDeferInChannelRangeLoop(pass *analysis.Pass) (interface{}, erro
fn2 := func(node ast.Node) bool {
switch stmt := node.(type) {
case *ast.DeferStmt:
- report.Report(pass, stmt, "defers in this range loop won't run unless the channel gets closed")
+ ReportNodef(pass, stmt, "defers in this range loop won't run unless the channel gets closed")
case *ast.FuncLit:
// Don't look into function bodies
return false
@@ -1081,7 +978,7 @@ func CheckDubiousDeferInChannelRangeLoop(pass *analysis.Pass) (interface{}, erro
}
ast.Inspect(loop.Body, fn2)
}
- code.Preorder(pass, fn, (*ast.RangeStmt)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn)
return nil, nil
}
@@ -1096,7 +993,7 @@ func CheckTestMainExit(pass *analysis.Pass) (interface{}, error) {
if !push {
if fnmain != nil && node == fnmain {
if !callsExit && callsRun {
- report.Report(pass, fnmain, "TestMain should call os.Exit to set exit code")
+ ReportNodef(pass, fnmain, "TestMain should call os.Exit to set exit code")
}
fnmain = nil
callsExit = false
@@ -1118,7 +1015,7 @@ func CheckTestMainExit(pass *analysis.Pass) (interface{}, error) {
arg = pass.TypesInfo.ObjectOf(node.Type.Params.List[0].Names[0])
return true
case *ast.CallExpr:
- if code.IsCallToAST(pass, node, "os.Exit") {
+ if IsCallToAST(pass, node, "os.Exit") {
callsExit = true
return false
}
@@ -1139,7 +1036,7 @@ func CheckTestMainExit(pass *analysis.Pass) (interface{}, error) {
}
return true
default:
- ExhaustiveTypeSwitch(node)
+ // unreachable
return true
}
}
@@ -1158,26 +1055,26 @@ func isTestMain(pass *analysis.Pass, decl *ast.FuncDecl) bool {
if len(arg.Names) != 1 {
return false
}
- return code.IsOfType(pass, arg.Type, "*testing.M")
+ return IsOfType(pass, arg.Type, "*testing.M")
}
func CheckExec(pass *analysis.Pass) (interface{}, error) {
fn := func(node ast.Node) {
call := node.(*ast.CallExpr)
- if !code.IsCallToAST(pass, call, "os/exec.Command") {
+ if !IsCallToAST(pass, call, "os/exec.Command") {
return
}
- val, ok := code.ExprToString(pass, call.Args[Arg("os/exec.Command.name")])
+ val, ok := ExprToString(pass, call.Args[Arg("os/exec.Command.name")])
if !ok {
return
}
if !strings.Contains(val, " ") || strings.Contains(val, `\`) || strings.Contains(val, "/") {
return
}
- report.Report(pass, call.Args[Arg("os/exec.Command.name")],
+ ReportNodef(pass, call.Args[Arg("os/exec.Command.name")],
"first argument to exec.Command looks like a shell command, but a program name or path are expected")
}
- code.Preorder(pass, fn, (*ast.CallExpr)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn)
return nil, nil
}
@@ -1192,53 +1089,25 @@ func CheckLoopEmptyDefault(pass *analysis.Pass) (interface{}, error) {
return
}
for _, c := range sel.Body.List {
- // FIXME this leaves behind an empty line, and possibly
- // comments in the default branch. We can't easily fix
- // either.
if comm, ok := c.(*ast.CommClause); ok && comm.Comm == nil && len(comm.Body) == 0 {
- report.Report(pass, comm, "should not have an empty default case in a for+select loop; the loop will spin",
- report.Fixes(edit.Fix("remove empty default branch", edit.Delete(comm))))
- // there can only be one default case
- break
+ ReportNodef(pass, comm, "should not have an empty default case in a for+select loop. The loop will spin.")
}
}
}
- code.Preorder(pass, fn, (*ast.ForStmt)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn)
return nil, nil
}
func CheckLhsRhsIdentical(pass *analysis.Pass) (interface{}, error) {
- var isFloat func(T types.Type) bool
- isFloat = func(T types.Type) bool {
- switch T := T.Underlying().(type) {
- case *types.Basic:
- kind := T.Kind()
- return kind == types.Float32 || kind == types.Float64
- case *types.Array:
- return isFloat(T.Elem())
- case *types.Struct:
- for i := 0; i < T.NumFields(); i++ {
- if !isFloat(T.Field(i).Type()) {
- return false
- }
- }
- return true
- default:
- return false
- }
- }
-
- // TODO(dh): this check ignores the existence of side-effects and
- // happily flags fn() == fn() – so far, we've had nobody complain
- // about a false positive, and it's caught several bugs in real
- // code.
fn := func(node ast.Node) {
op := node.(*ast.BinaryExpr)
switch op.Op {
case token.EQL, token.NEQ:
- if isFloat(pass.TypesInfo.TypeOf(op.X)) {
- // f == f and f != f might be used to check for NaN
- return
+ if basic, ok := pass.TypesInfo.TypeOf(op.X).Underlying().(*types.Basic); ok {
+ if kind := basic.Kind(); kind == types.Float32 || kind == types.Float64 {
+ // f == f and f != f might be used to check for NaN
+ return
+ }
}
case token.SUB, token.QUO, token.AND, token.REM, token.OR, token.XOR, token.AND_NOT,
token.LAND, token.LOR, token.LSS, token.GTR, token.LEQ, token.GEQ:
@@ -1248,15 +1117,12 @@ func CheckLhsRhsIdentical(pass *analysis.Pass) (interface{}, error) {
return
}
- if reflect.TypeOf(op.X) != reflect.TypeOf(op.Y) {
- return
- }
- if report.Render(pass, op.X) != report.Render(pass, op.Y) {
+ if Render(pass, op.X) != Render(pass, op.Y) {
return
}
l1, ok1 := op.X.(*ast.BasicLit)
l2, ok2 := op.Y.(*ast.BasicLit)
- if ok1 && ok2 && l1.Kind == token.INT && l2.Kind == l1.Kind && l1.Value == "0" && l2.Value == l1.Value && code.IsGenerated(pass, l1.Pos()) {
+ if ok1 && ok2 && l1.Kind == token.INT && l2.Kind == l1.Kind && l1.Value == "0" && l2.Value == l1.Value && IsGenerated(pass, l1.Pos()) {
// cgo generates the following function call:
// _cgoCheckPointer(_cgoBase0, 0 == 0) – it uses 0 == 0
// instead of true in case the user shadowed the
@@ -1269,9 +1135,9 @@ func CheckLhsRhsIdentical(pass *analysis.Pass) (interface{}, error) {
// 0 == 0 are slim.
return
}
- report.Report(pass, op, fmt.Sprintf("identical expressions on the left and right side of the '%s' operator", op.Op))
+ ReportNodef(pass, op, "identical expressions on the left and right side of the '%s' operator", op.Op)
}
- code.Preorder(pass, fn, (*ast.BinaryExpr)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn)
return nil, nil
}
@@ -1284,7 +1150,7 @@ func CheckScopedBreak(pass *analysis.Pass) (interface{}, error) {
case *ast.RangeStmt:
body = node.Body
default:
- ExhaustiveTypeSwitch(node)
+ panic(fmt.Sprintf("unreachable: %T", node))
}
for _, stmt := range body.List {
var blocks [][]ast.Stmt
@@ -1325,27 +1191,24 @@ func CheckScopedBreak(pass *analysis.Pass) (interface{}, error) {
if !ok || branch.Tok != token.BREAK || branch.Label != nil {
continue
}
- report.Report(pass, branch, "ineffective break statement. Did you mean to break out of the outer loop?")
+ ReportNodef(pass, branch, "ineffective break statement. Did you mean to break out of the outer loop?")
}
}
}
}
- code.Preorder(pass, fn, (*ast.ForStmt)(nil), (*ast.RangeStmt)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.ForStmt)(nil), (*ast.RangeStmt)(nil)}, fn)
return nil, nil
}
func CheckUnsafePrintf(pass *analysis.Pass) (interface{}, error) {
fn := func(node ast.Node) {
call := node.(*ast.CallExpr)
- name := code.CallNameAST(pass, call)
var arg int
-
- switch name {
- case "fmt.Printf", "fmt.Sprintf", "log.Printf":
+ if IsCallToAnyAST(pass, call, "fmt.Printf", "fmt.Sprintf", "log.Printf") {
arg = Arg("fmt.Printf.format")
- case "fmt.Fprintf":
+ } else if IsCallToAnyAST(pass, call, "fmt.Fprintf") {
arg = Arg("fmt.Fprintf.format")
- default:
+ } else {
return
}
if len(call.Args) != arg+1 {
@@ -1356,13 +1219,10 @@ func CheckUnsafePrintf(pass *analysis.Pass) (interface{}, error) {
default:
return
}
-
- alt := name[:len(name)-1]
- report.Report(pass, call,
- "printf-style function with dynamic format string and no further arguments should use print-style function instead",
- report.Fixes(edit.Fix(fmt.Sprintf("use %s instead of %s", alt, name), edit.ReplaceWithString(pass.Fset, call.Fun, alt))))
+ ReportNodef(pass, call.Args[arg],
+ "printf-style function with dynamic format string and no further arguments should use print-style function instead")
}
- code.Preorder(pass, fn, (*ast.CallExpr)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn)
return nil, nil
}
@@ -1428,10 +1288,10 @@ func CheckEarlyDefer(pass *analysis.Pass) (interface{}, error) {
if sel.Sel.Name != "Close" {
continue
}
- report.Report(pass, def, fmt.Sprintf("should check returned error before deferring %s", report.Render(pass, def.Call)))
+ ReportNodef(pass, def, "should check returned error before deferring %s", Render(pass, def.Call))
}
}
- code.Preorder(pass, fn, (*ast.BlockStmt)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn)
return nil, nil
}
@@ -1445,20 +1305,15 @@ func selectorX(sel *ast.SelectorExpr) ast.Node {
}
func CheckEmptyCriticalSection(pass *analysis.Pass) (interface{}, error) {
- if pass.Pkg.Path() == "sync_test" {
- // exception for the sync package's tests
- return nil, nil
- }
-
// Initially it might seem like this check would be easier to
- // implement using IR. After all, we're only checking for two
+ // implement in SSA. After all, we're only checking for two
// consecutive method calls. In reality, however, there may be any
// number of other instructions between the lock and unlock, while
// still constituting an empty critical section. For example,
// given `m.x().Lock(); m.x().Unlock()`, there will be a call to
// x(). In the AST-based approach, this has a tiny potential for a
// false positive (the second call to x might be doing work that
- // is protected by the mutex). In an IR-based approach, however,
+ // is protected by the mutex). In an SSA-based approach, however,
// it would miss a lot of real bugs.
mutexParams := func(s ast.Stmt) (x ast.Expr, funcName string, ok bool) {
@@ -1496,38 +1351,73 @@ func CheckEmptyCriticalSection(pass *analysis.Pass) (interface{}, error) {
sel1, method1, ok1 := mutexParams(block.List[i])
sel2, method2, ok2 := mutexParams(block.List[i+1])
- if !ok1 || !ok2 || report.Render(pass, sel1) != report.Render(pass, sel2) {
+ if !ok1 || !ok2 || Render(pass, sel1) != Render(pass, sel2) {
continue
}
if (method1 == "Lock" && method2 == "Unlock") ||
(method1 == "RLock" && method2 == "RUnlock") {
- report.Report(pass, block.List[i+1], "empty critical section")
+ ReportNodef(pass, block.List[i+1], "empty critical section")
}
}
}
- code.Preorder(pass, fn, (*ast.BlockStmt)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn)
return nil, nil
}
-var (
- // cgo produces code like fn(&*_Cvar_kSomeCallbacks) which we don't
- // want to flag.
- cgoIdent = regexp.MustCompile(`^_C(func|var)_.+$`)
- checkIneffectiveCopyQ1 = pattern.MustParse(`(UnaryExpr "&" (StarExpr obj))`)
- checkIneffectiveCopyQ2 = pattern.MustParse(`(StarExpr (UnaryExpr "&" _))`)
-)
+// cgo produces code like fn(&*_Cvar_kSomeCallbacks) which we don't
+// want to flag.
+var cgoIdent = regexp.MustCompile(`^_C(func|var)_.+$`)
func CheckIneffectiveCopy(pass *analysis.Pass) (interface{}, error) {
fn := func(node ast.Node) {
- if m, ok := Match(pass, checkIneffectiveCopyQ1, node); ok {
- if ident, ok := m.State["obj"].(*ast.Ident); !ok || !cgoIdent.MatchString(ident.Name) {
- report.Report(pass, node, "&*x will be simplified to x. It will not copy x.")
+ if unary, ok := node.(*ast.UnaryExpr); ok {
+ if star, ok := unary.X.(*ast.StarExpr); ok && unary.Op == token.AND {
+ ident, ok := star.X.(*ast.Ident)
+ if !ok || !cgoIdent.MatchString(ident.Name) {
+ ReportNodef(pass, unary, "&*x will be simplified to x. It will not copy x.")
+ }
+ }
+ }
+
+ if star, ok := node.(*ast.StarExpr); ok {
+ if unary, ok := star.X.(*ast.UnaryExpr); ok && unary.Op == token.AND {
+ ReportNodef(pass, star, "*&x will be simplified to x. It will not copy x.")
+ }
+ }
+ }
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.UnaryExpr)(nil), (*ast.StarExpr)(nil)}, fn)
+ return nil, nil
+}
+
+func CheckDiffSizeComparison(pass *analysis.Pass) (interface{}, error) {
+ ranges := pass.ResultOf[valueRangesAnalyzer].(map[*ssa.Function]vrp.Ranges)
+ for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
+ for _, b := range ssafn.Blocks {
+ for _, ins := range b.Instrs {
+ binop, ok := ins.(*ssa.BinOp)
+ if !ok {
+ continue
+ }
+ if binop.Op != token.EQL && binop.Op != token.NEQ {
+ continue
+ }
+ _, ok1 := binop.X.(*ssa.Slice)
+ _, ok2 := binop.Y.(*ssa.Slice)
+ if !ok1 && !ok2 {
+ continue
+ }
+ r := ranges[ssafn]
+ r1, ok1 := r.Get(binop.X).(vrp.StringInterval)
+ r2, ok2 := r.Get(binop.Y).(vrp.StringInterval)
+ if !ok1 || !ok2 {
+ continue
+ }
+ if r1.Length.Intersection(r2.Length).Empty() {
+ pass.Reportf(binop.Pos(), "comparing strings of different sizes for equality will always return false")
+ }
}
- } else if _, ok := Match(pass, checkIneffectiveCopyQ2, node); ok {
- report.Report(pass, node, "*&x will be simplified to x. It will not copy x.")
}
}
- code.Preorder(pass, fn, (*ast.UnaryExpr)(nil), (*ast.StarExpr)(nil))
return nil, nil
}
@@ -1546,7 +1436,7 @@ func CheckCanonicalHeaderKey(pass *analysis.Pass) (interface{}, error) {
if !ok {
continue
}
- if code.IsOfType(pass, op.X, "net/http.Header") {
+ if IsOfType(pass, op.X, "net/http.Header") {
return false
}
}
@@ -1556,34 +1446,17 @@ func CheckCanonicalHeaderKey(pass *analysis.Pass) (interface{}, error) {
if !ok {
return true
}
- if !code.IsOfType(pass, op.X, "net/http.Header") {
+ if !IsOfType(pass, op.X, "net/http.Header") {
return true
}
- s, ok := code.ExprToString(pass, op.Index)
+ s, ok := ExprToString(pass, op.Index)
if !ok {
return true
}
- canonical := http.CanonicalHeaderKey(s)
- if s == canonical {
+ if s == http.CanonicalHeaderKey(s) {
return true
}
- var fix analysis.SuggestedFix
- switch op.Index.(type) {
- case *ast.BasicLit:
- fix = edit.Fix("canonicalize header key", edit.ReplaceWithString(pass.Fset, op.Index, strconv.Quote(canonical)))
- case *ast.Ident:
- call := &ast.CallExpr{
- Fun: Selector("http", "CanonicalHeaderKey"),
- Args: []ast.Expr{op.Index},
- }
- fix = edit.Fix("wrap in http.CanonicalHeaderKey", edit.ReplaceWithNode(pass.Fset, op.Index, call))
- }
- msg := fmt.Sprintf("keys in http.Header are canonicalized, %q is not canonical; fix the constant or use http.CanonicalHeaderKey", s)
- if fix.Message != "" {
- report.Report(pass, op, msg, report.Fixes(fix))
- } else {
- report.Report(pass, op, msg)
- }
+ ReportNodef(pass, op, "keys in http.Header are canonicalized, %q is not canonical; fix the constant or use http.CanonicalHeaderKey", s)
return true
}
pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Nodes([]ast.Node{(*ast.AssignStmt)(nil), (*ast.IndexExpr)(nil)}, fn)
@@ -1603,43 +1476,43 @@ func CheckBenchmarkN(pass *analysis.Pass) (interface{}, error) {
if sel.Sel.Name != "N" {
return
}
- if !code.IsOfType(pass, sel.X, "*testing.B") {
+ if !IsOfType(pass, sel.X, "*testing.B") {
return
}
- report.Report(pass, assign, fmt.Sprintf("should not assign to %s", report.Render(pass, sel)))
+ ReportNodef(pass, assign, "should not assign to %s", Render(pass, sel))
}
- code.Preorder(pass, fn, (*ast.AssignStmt)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn)
return nil, nil
}
func CheckUnreadVariableValues(pass *analysis.Pass) (interface{}, error) {
- for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
- if code.IsExample(fn) {
+ for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
+ if IsExample(ssafn) {
continue
}
- node := fn.Source()
+ node := ssafn.Syntax()
if node == nil {
continue
}
- if gen, ok := code.Generator(pass, node.Pos()); ok && gen == facts.Goyacc {
+ if gen, ok := Generator(pass, node.Pos()); ok && gen == facts.Goyacc {
// Don't flag unused values in code generated by goyacc.
// There may be hundreds of those due to the way the state
// machine is constructed.
continue
}
- switchTags := map[ir.Value]struct{}{}
+ switchTags := map[ssa.Value]struct{}{}
ast.Inspect(node, func(node ast.Node) bool {
s, ok := node.(*ast.SwitchStmt)
if !ok {
return true
}
- v, _ := fn.ValueForExpr(s.Tag)
+ v, _ := ssafn.ValueForExpr(s.Tag)
switchTags[v] = struct{}{}
return true
})
- hasUse := func(v ir.Value) bool {
+ hasUse := func(v ssa.Value) bool {
if _, ok := switchTags[v]; ok {
return true
}
@@ -1648,7 +1521,7 @@ func CheckUnreadVariableValues(pass *analysis.Pass) (interface{}, error) {
// TODO investigate why refs can be nil
return true
}
- return len(code.FilterDebug(*refs)) > 0
+ return len(FilterDebug(*refs)) > 0
}
ast.Inspect(node, func(node ast.Node) bool {
@@ -1660,7 +1533,7 @@ func CheckUnreadVariableValues(pass *analysis.Pass) (interface{}, error) {
// Either a function call with multiple return values,
// or a comma-ok assignment
- val, _ := fn.ValueForExpr(assign.Rhs[0])
+ val, _ := ssafn.ValueForExpr(assign.Rhs[0])
if val == nil {
return true
}
@@ -1669,7 +1542,7 @@ func CheckUnreadVariableValues(pass *analysis.Pass) (interface{}, error) {
return true
}
for _, ref := range *refs {
- ex, ok := ref.(*ir.Extract)
+ ex, ok := ref.(*ssa.Extract)
if !ok {
continue
}
@@ -1678,7 +1551,7 @@ func CheckUnreadVariableValues(pass *analysis.Pass) (interface{}, error) {
if ident, ok := lhs.(*ast.Ident); !ok || ok && ident.Name == "_" {
continue
}
- report.Report(pass, assign, fmt.Sprintf("this value of %s is never used", lhs))
+ ReportNodef(pass, lhs, "this value of %s is never used", lhs)
}
}
return true
@@ -1688,13 +1561,13 @@ func CheckUnreadVariableValues(pass *analysis.Pass) (interface{}, error) {
if ident, ok := lhs.(*ast.Ident); !ok || ok && ident.Name == "_" {
continue
}
- val, _ := fn.ValueForExpr(rhs)
+ val, _ := ssafn.ValueForExpr(rhs)
if val == nil {
continue
}
if !hasUse(val) {
- report.Report(pass, assign, fmt.Sprintf("this value of %s is never used", lhs))
+ ReportNodef(pass, lhs, "this value of %s is never used", lhs)
}
}
return true
@@ -1704,21 +1577,21 @@ func CheckUnreadVariableValues(pass *analysis.Pass) (interface{}, error) {
}
func CheckPredeterminedBooleanExprs(pass *analysis.Pass) (interface{}, error) {
- for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
- for _, block := range fn.Blocks {
+ for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
+ for _, block := range ssafn.Blocks {
for _, ins := range block.Instrs {
- binop, ok := ins.(*ir.BinOp)
+ ssabinop, ok := ins.(*ssa.BinOp)
if !ok {
continue
}
- switch binop.Op {
+ switch ssabinop.Op {
case token.GTR, token.LSS, token.EQL, token.NEQ, token.LEQ, token.GEQ:
default:
continue
}
- xs, ok1 := consts(binop.X, nil, nil)
- ys, ok2 := consts(binop.Y, nil, nil)
+ xs, ok1 := consts(ssabinop.X, nil, nil)
+ ys, ok2 := consts(ssabinop.Y, nil, nil)
if !ok1 || !ok2 || len(xs) == 0 || len(ys) == 0 {
continue
}
@@ -1732,14 +1605,15 @@ func CheckPredeterminedBooleanExprs(pass *analysis.Pass) (interface{}, error) {
}
continue
}
- if constant.Compare(x.Value, binop.Op, y.Value) {
+ if constant.Compare(x.Value, ssabinop.Op, y.Value) {
trues++
}
}
}
b := trues != 0
if trues == 0 || trues == len(xs)*len(ys) {
- report.Report(pass, binop, fmt.Sprintf("binary expression is always %t for all possible values (%s %s %s)", b, xs, binop.Op, ys))
+ pass.Reportf(ssabinop.Pos(), "binary expression is always %t for all possible values (%s %s %s)",
+ b, xs, ssabinop.Op, ys)
}
}
}
@@ -1748,21 +1622,21 @@ func CheckPredeterminedBooleanExprs(pass *analysis.Pass) (interface{}, error) {
}
func CheckNilMaps(pass *analysis.Pass) (interface{}, error) {
- for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
- for _, block := range fn.Blocks {
+ for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
+ for _, block := range ssafn.Blocks {
for _, ins := range block.Instrs {
- mu, ok := ins.(*ir.MapUpdate)
+ mu, ok := ins.(*ssa.MapUpdate)
if !ok {
continue
}
- c, ok := mu.Map.(*ir.Const)
+ c, ok := mu.Map.(*ssa.Const)
if !ok {
continue
}
if c.Value != nil {
continue
}
- report.Report(pass, mu, "assignment to nil map")
+ pass.Reportf(mu.Pos(), "assignment to nil map")
}
}
}
@@ -1775,7 +1649,7 @@ func CheckExtremeComparison(pass *analysis.Pass) (interface{}, error) {
if !ok {
return false
}
- return code.IsObject(pass.TypesInfo.ObjectOf(sel.Sel), name)
+ return IsObject(pass.TypesInfo.ObjectOf(sel.Sel), name)
}
fn := func(node ast.Node) {
@@ -1820,45 +1694,45 @@ func CheckExtremeComparison(pass *analysis.Pass) (interface{}, error) {
if (expr.Op == token.GTR || expr.Op == token.GEQ) && isobj(expr.Y, max) ||
(expr.Op == token.LSS || expr.Op == token.LEQ) && isobj(expr.X, max) {
- report.Report(pass, expr, fmt.Sprintf("no value of type %s is greater than %s", basic, max))
+ ReportNodef(pass, expr, "no value of type %s is greater than %s", basic, max)
}
if expr.Op == token.LEQ && isobj(expr.Y, max) ||
expr.Op == token.GEQ && isobj(expr.X, max) {
- report.Report(pass, expr, fmt.Sprintf("every value of type %s is <= %s", basic, max))
+ ReportNodef(pass, expr, "every value of type %s is <= %s", basic, max)
}
if (basic.Info() & types.IsUnsigned) != 0 {
- if (expr.Op == token.LSS && code.IsIntLiteral(expr.Y, "0")) ||
- (expr.Op == token.GTR && code.IsIntLiteral(expr.X, "0")) {
- report.Report(pass, expr, fmt.Sprintf("no value of type %s is less than 0", basic))
+ if (expr.Op == token.LSS || expr.Op == token.LEQ) && IsIntLiteral(expr.Y, "0") ||
+ (expr.Op == token.GTR || expr.Op == token.GEQ) && IsIntLiteral(expr.X, "0") {
+ ReportNodef(pass, expr, "no value of type %s is less than 0", basic)
}
- if expr.Op == token.GEQ && code.IsIntLiteral(expr.Y, "0") ||
- expr.Op == token.LEQ && code.IsIntLiteral(expr.X, "0") {
- report.Report(pass, expr, fmt.Sprintf("every value of type %s is >= 0", basic))
+ if expr.Op == token.GEQ && IsIntLiteral(expr.Y, "0") ||
+ expr.Op == token.LEQ && IsIntLiteral(expr.X, "0") {
+ ReportNodef(pass, expr, "every value of type %s is >= 0", basic)
}
} else {
if (expr.Op == token.LSS || expr.Op == token.LEQ) && isobj(expr.Y, min) ||
(expr.Op == token.GTR || expr.Op == token.GEQ) && isobj(expr.X, min) {
- report.Report(pass, expr, fmt.Sprintf("no value of type %s is less than %s", basic, min))
+ ReportNodef(pass, expr, "no value of type %s is less than %s", basic, min)
}
if expr.Op == token.GEQ && isobj(expr.Y, min) ||
expr.Op == token.LEQ && isobj(expr.X, min) {
- report.Report(pass, expr, fmt.Sprintf("every value of type %s is >= %s", basic, min))
+ ReportNodef(pass, expr, "every value of type %s is >= %s", basic, min)
}
}
}
- code.Preorder(pass, fn, (*ast.BinaryExpr)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn)
return nil, nil
}
-func consts(val ir.Value, out []*ir.Const, visitedPhis map[string]bool) ([]*ir.Const, bool) {
+func consts(val ssa.Value, out []*ssa.Const, visitedPhis map[string]bool) ([]*ssa.Const, bool) {
if visitedPhis == nil {
visitedPhis = map[string]bool{}
}
var ok bool
switch val := val.(type) {
- case *ir.Phi:
+ case *ssa.Phi:
if visitedPhis[val.Name()] {
break
}
@@ -1870,9 +1744,9 @@ func consts(val ir.Value, out []*ir.Const, visitedPhis map[string]bool) ([]*ir.C
return nil, false
}
}
- case *ir.Const:
+ case *ssa.Const:
out = append(out, val)
- case *ir.Convert:
+ case *ssa.Convert:
out, ok = consts(val.X, out, visitedPhis)
if !ok {
return nil, false
@@ -1883,7 +1757,7 @@ func consts(val ir.Value, out []*ir.Const, visitedPhis map[string]bool) ([]*ir.C
if len(out) < 2 {
return out, true
}
- uniq := []*ir.Const{out[0]}
+ uniq := []*ssa.Const{out[0]}
for _, val := range out[1:] {
if val.Value == uniq[len(uniq)-1].Value {
continue
@@ -1894,8 +1768,8 @@ func consts(val ir.Value, out []*ir.Const, visitedPhis map[string]bool) ([]*ir.C
}
func CheckLoopCondition(pass *analysis.Pass) (interface{}, error) {
- for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
- cb := func(node ast.Node) bool {
+ for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
+ fn := func(node ast.Node) bool {
loop, ok := node.(*ast.ForStmt)
if !ok {
return true
@@ -1926,42 +1800,42 @@ func CheckLoopCondition(pass *analysis.Pass) (interface{}, error) {
return true
}
- v, isAddr := fn.ValueForExpr(cond.X)
+ v, isAddr := ssafn.ValueForExpr(cond.X)
if v == nil || isAddr {
return true
}
switch v := v.(type) {
- case *ir.Phi:
+ case *ssa.Phi:
ops := v.Operands(nil)
if len(ops) != 2 {
return true
}
- _, ok := (*ops[0]).(*ir.Const)
+ _, ok := (*ops[0]).(*ssa.Const)
if !ok {
return true
}
- sigma, ok := (*ops[1]).(*ir.Sigma)
+ sigma, ok := (*ops[1]).(*ssa.Sigma)
if !ok {
return true
}
if sigma.X != v {
return true
}
- case *ir.Load:
+ case *ssa.UnOp:
return true
}
- report.Report(pass, cond, "variable in loop condition never changes")
+ ReportNodef(pass, cond, "variable in loop condition never changes")
return true
}
- Inspect(fn.Source(), cb)
+ Inspect(ssafn.Syntax(), fn)
}
return nil, nil
}
func CheckArgOverwritten(pass *analysis.Pass) (interface{}, error) {
- for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
- cb := func(node ast.Node) bool {
+ for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
+ fn := func(node ast.Node) bool {
var typ *ast.FuncType
var body *ast.BlockStmt
switch fn := node.(type) {
@@ -1981,29 +1855,26 @@ func CheckArgOverwritten(pass *analysis.Pass) (interface{}, error) {
for _, field := range typ.Params.List {
for _, arg := range field.Names {
obj := pass.TypesInfo.ObjectOf(arg)
- var irobj *ir.Parameter
- for _, param := range fn.Params {
+ var ssaobj *ssa.Parameter
+ for _, param := range ssafn.Params {
if param.Object() == obj {
- irobj = param
+ ssaobj = param
break
}
}
- if irobj == nil {
+ if ssaobj == nil {
continue
}
- refs := irobj.Referrers()
+ refs := ssaobj.Referrers()
if refs == nil {
continue
}
- if len(code.FilterDebug(*refs)) != 0 {
+ if len(FilterDebug(*refs)) != 0 {
continue
}
- var assignment ast.Node
+ assigned := false
ast.Inspect(body, func(node ast.Node) bool {
- if assignment != nil {
- return false
- }
assign, ok := node.(*ast.AssignStmt)
if !ok {
return true
@@ -2014,21 +1885,20 @@ func CheckArgOverwritten(pass *analysis.Pass) (interface{}, error) {
continue
}
if pass.TypesInfo.ObjectOf(ident) == obj {
- assignment = assign
+ assigned = true
return false
}
}
return true
})
- if assignment != nil {
- report.Report(pass, arg, fmt.Sprintf("argument %s is overwritten before first use", arg),
- report.Related(assignment, fmt.Sprintf("assignment to %s", arg)))
+ if assigned {
+ ReportNodef(pass, arg, "argument %s is overwritten before first use", arg)
}
}
}
return true
}
- Inspect(fn.Source(), cb)
+ Inspect(ssafn.Syntax(), fn)
}
return nil, nil
}
@@ -2050,7 +1920,7 @@ func CheckIneffectiveLoop(pass *analysis.Pass) (interface{}, error) {
case *ast.FuncLit:
body = fn.Body
default:
- ExhaustiveTypeSwitch(node)
+ panic(fmt.Sprintf("unreachable: %T", node))
}
if body == nil {
return
@@ -2133,133 +2003,136 @@ func CheckIneffectiveLoop(pass *analysis.Pass) (interface{}, error) {
return true
})
if unconditionalExit != nil {
- report.Report(pass, unconditionalExit, "the surrounding loop is unconditionally terminated")
+ ReportNodef(pass, unconditionalExit, "the surrounding loop is unconditionally terminated")
}
return true
})
}
- code.Preorder(pass, fn, (*ast.FuncDecl)(nil), (*ast.FuncLit)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)}, fn)
return nil, nil
}
-var checkNilContextQ = pattern.MustParse(`(CallExpr fun@(Function _) (Builtin "nil"):_)`)
-
func CheckNilContext(pass *analysis.Pass) (interface{}, error) {
- todo := &ast.CallExpr{
- Fun: Selector("context", "TODO"),
- }
- bg := &ast.CallExpr{
- Fun: Selector("context", "Background"),
- }
fn := func(node ast.Node) {
- m, ok := Match(pass, checkNilContextQ, node)
- if !ok {
+ call := node.(*ast.CallExpr)
+ if len(call.Args) == 0 {
return
}
-
- call := node.(*ast.CallExpr)
- fun, ok := m.State["fun"].(*types.Func)
+ if typ, ok := pass.TypesInfo.TypeOf(call.Args[0]).(*types.Basic); !ok || typ.Kind() != types.UntypedNil {
+ return
+ }
+ sig, ok := pass.TypesInfo.TypeOf(call.Fun).(*types.Signature)
if !ok {
- // it might also be a builtin
return
}
- sig := fun.Type().(*types.Signature)
if sig.Params().Len() == 0 {
- // Our CallExpr might've matched a method expression, like
- // (*T).Foo(nil) – here, nil isn't the first argument of
- // the Foo method, but the method receiver.
return
}
- if !code.IsType(sig.Params().At(0).Type(), "context.Context") {
+ if !IsType(sig.Params().At(0).Type(), "context.Context") {
return
}
- report.Report(pass, call.Args[0],
- "do not pass a nil Context, even if a function permits it; pass context.TODO if you are unsure about which Context to use", report.Fixes(
- edit.Fix("use context.TODO", edit.ReplaceWithNode(pass.Fset, call.Args[0], todo)),
- edit.Fix("use context.Background", edit.ReplaceWithNode(pass.Fset, call.Args[0], bg))))
+ ReportNodef(pass, call.Args[0],
+ "do not pass a nil Context, even if a function permits it; pass context.TODO if you are unsure about which Context to use")
}
- code.Preorder(pass, fn, (*ast.CallExpr)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn)
return nil, nil
}
-var (
- checkSeekerQ = pattern.MustParse(`(CallExpr fun@(SelectorExpr _ (Ident "Seek")) [arg1@(SelectorExpr (Ident "io") (Ident (Or "SeekStart" "SeekCurrent" "SeekEnd"))) arg2])`)
- checkSeekerR = pattern.MustParse(`(CallExpr fun [arg2 arg1])`)
-)
-
func CheckSeeker(pass *analysis.Pass) (interface{}, error) {
fn := func(node ast.Node) {
- if _, edits, ok := MatchAndEdit(pass, checkSeekerQ, checkSeekerR, node); ok {
- report.Report(pass, node, "the first argument of io.Seeker is the offset, but an io.Seek* constant is being used instead",
- report.Fixes(edit.Fix("swap arguments", edits...)))
+ call := node.(*ast.CallExpr)
+ sel, ok := call.Fun.(*ast.SelectorExpr)
+ if !ok {
+ return
+ }
+ if sel.Sel.Name != "Seek" {
+ return
}
+ if len(call.Args) != 2 {
+ return
+ }
+ arg0, ok := call.Args[Arg("(io.Seeker).Seek.offset")].(*ast.SelectorExpr)
+ if !ok {
+ return
+ }
+ switch arg0.Sel.Name {
+ case "SeekStart", "SeekCurrent", "SeekEnd":
+ default:
+ return
+ }
+ pkg, ok := arg0.X.(*ast.Ident)
+ if !ok {
+ return
+ }
+ if pkg.Name != "io" {
+ return
+ }
+ ReportNodef(pass, call, "the first argument of io.Seeker is the offset, but an io.Seek* constant is being used instead")
}
- code.Preorder(pass, fn, (*ast.CallExpr)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn)
return nil, nil
}
func CheckIneffectiveAppend(pass *analysis.Pass) (interface{}, error) {
- isAppend := func(ins ir.Value) bool {
- call, ok := ins.(*ir.Call)
+ isAppend := func(ins ssa.Value) bool {
+ call, ok := ins.(*ssa.Call)
if !ok {
return false
}
if call.Call.IsInvoke() {
return false
}
- if builtin, ok := call.Call.Value.(*ir.Builtin); !ok || builtin.Name() != "append" {
+ if builtin, ok := call.Call.Value.(*ssa.Builtin); !ok || builtin.Name() != "append" {
return false
}
return true
}
- for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
- for _, block := range fn.Blocks {
+ for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
+ for _, block := range ssafn.Blocks {
for _, ins := range block.Instrs {
- val, ok := ins.(ir.Value)
+ val, ok := ins.(ssa.Value)
if !ok || !isAppend(val) {
continue
}
isUsed := false
- visited := map[ir.Instruction]bool{}
- var walkRefs func(refs []ir.Instruction)
- walkRefs = func(refs []ir.Instruction) {
+ visited := map[ssa.Instruction]bool{}
+ var walkRefs func(refs []ssa.Instruction)
+ walkRefs = func(refs []ssa.Instruction) {
loop:
for _, ref := range refs {
if visited[ref] {
continue
}
visited[ref] = true
- if _, ok := ref.(*ir.DebugRef); ok {
+ if _, ok := ref.(*ssa.DebugRef); ok {
continue
}
switch ref := ref.(type) {
- case *ir.Phi:
+ case *ssa.Phi:
walkRefs(*ref.Referrers())
- case *ir.Sigma:
+ case *ssa.Sigma:
walkRefs(*ref.Referrers())
- case ir.Value:
+ case ssa.Value:
if !isAppend(ref) {
isUsed = true
} else {
walkRefs(*ref.Referrers())
}
- case ir.Instruction:
+ case ssa.Instruction:
isUsed = true
break loop
}
}
}
-
refs := val.Referrers()
if refs == nil {
continue
}
walkRefs(*refs)
-
if !isUsed {
- report.Report(pass, ins, "this result of append is never used, except maybe in other appends")
+ pass.Reportf(ins.Pos(), "this result of append is never used, except maybe in other appends")
}
}
}
@@ -2268,19 +2141,19 @@ func CheckIneffectiveAppend(pass *analysis.Pass) (interface{}, error) {
}
func CheckConcurrentTesting(pass *analysis.Pass) (interface{}, error) {
- for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
- for _, block := range fn.Blocks {
+ for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
+ for _, block := range ssafn.Blocks {
for _, ins := range block.Instrs {
- gostmt, ok := ins.(*ir.Go)
+ gostmt, ok := ins.(*ssa.Go)
if !ok {
continue
}
- var fn *ir.Function
+ var fn *ssa.Function
switch val := gostmt.Call.Value.(type) {
- case *ir.Function:
+ case *ssa.Function:
fn = val
- case *ir.MakeClosure:
- fn = val.Fn.(*ir.Function)
+ case *ssa.MakeClosure:
+ fn = val.Fn.(*ssa.Function)
default:
continue
}
@@ -2289,7 +2162,7 @@ func CheckConcurrentTesting(pass *analysis.Pass) (interface{}, error) {
}
for _, block := range fn.Blocks {
for _, ins := range block.Instrs {
- call, ok := ins.(*ir.Call)
+ call, ok := ins.(*ssa.Call)
if !ok {
continue
}
@@ -2304,7 +2177,7 @@ func CheckConcurrentTesting(pass *analysis.Pass) (interface{}, error) {
if recv == nil {
continue
}
- if !code.IsType(recv.Type(), "*testing.common") {
+ if !IsType(recv.Type(), "*testing.common") {
continue
}
fn, ok := call.Call.StaticCallee().Object().(*types.Func)
@@ -2317,11 +2190,7 @@ func CheckConcurrentTesting(pass *analysis.Pass) (interface{}, error) {
default:
continue
}
- // TODO(dh): don't report multiple diagnostics
- // for multiple calls to T.Fatal, but do
- // collect all of them as related information
- report.Report(pass, gostmt, fmt.Sprintf("the goroutine calls T.%s, which must be called in the same goroutine as the test", name),
- report.Related(call, fmt.Sprintf("call to T.%s", name)))
+ pass.Reportf(gostmt.Pos(), "the goroutine calls T.%s, which must be called in the same goroutine as the test", name)
}
}
}
@@ -2330,12 +2199,12 @@ func CheckConcurrentTesting(pass *analysis.Pass) (interface{}, error) {
return nil, nil
}
-func eachCall(fn *ir.Function, cb func(caller *ir.Function, site ir.CallInstruction, callee *ir.Function)) {
- for _, b := range fn.Blocks {
+func eachCall(ssafn *ssa.Function, fn func(caller *ssa.Function, site ssa.CallInstruction, callee *ssa.Function)) {
+ for _, b := range ssafn.Blocks {
for _, instr := range b.Instrs {
- if site, ok := instr.(ir.CallInstruction); ok {
+ if site, ok := instr.(ssa.CallInstruction); ok {
if g := site.Common().StaticCallee(); g != nil {
- cb(fn, site, g)
+ fn(ssafn, site, g)
}
}
}
@@ -2343,62 +2212,62 @@ func eachCall(fn *ir.Function, cb func(caller *ir.Function, site ir.CallInstruct
}
func CheckCyclicFinalizer(pass *analysis.Pass) (interface{}, error) {
- cb := func(caller *ir.Function, site ir.CallInstruction, callee *ir.Function) {
+ fn := func(caller *ssa.Function, site ssa.CallInstruction, callee *ssa.Function) {
if callee.RelString(nil) != "runtime.SetFinalizer" {
return
}
arg0 := site.Common().Args[Arg("runtime.SetFinalizer.obj")]
- if iface, ok := arg0.(*ir.MakeInterface); ok {
+ if iface, ok := arg0.(*ssa.MakeInterface); ok {
arg0 = iface.X
}
- load, ok := arg0.(*ir.Load)
+ unop, ok := arg0.(*ssa.UnOp)
if !ok {
return
}
- v, ok := load.X.(*ir.Alloc)
+ v, ok := unop.X.(*ssa.Alloc)
if !ok {
return
}
arg1 := site.Common().Args[Arg("runtime.SetFinalizer.finalizer")]
- if iface, ok := arg1.(*ir.MakeInterface); ok {
+ if iface, ok := arg1.(*ssa.MakeInterface); ok {
arg1 = iface.X
}
- mc, ok := arg1.(*ir.MakeClosure)
+ mc, ok := arg1.(*ssa.MakeClosure)
if !ok {
return
}
for _, b := range mc.Bindings {
if b == v {
pos := lint.DisplayPosition(pass.Fset, mc.Fn.Pos())
- report.Report(pass, site, fmt.Sprintf("the finalizer closes over the object, preventing the finalizer from ever running (at %s)", pos))
+ pass.Reportf(site.Pos(), "the finalizer closes over the object, preventing the finalizer from ever running (at %s)", pos)
}
}
}
- for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
- eachCall(fn, cb)
+ for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
+ eachCall(ssafn, fn)
}
return nil, nil
}
/*
func CheckSliceOutOfBounds(pass *analysis.Pass) (interface{}, error) {
- for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
- for _, block := range fn.Blocks {
+ for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
+ for _, block := range ssafn.Blocks {
for _, ins := range block.Instrs {
- ia, ok := ins.(*ir.IndexAddr)
+ ia, ok := ins.(*ssa.IndexAddr)
if !ok {
continue
}
if _, ok := ia.X.Type().Underlying().(*types.Slice); !ok {
continue
}
- sr, ok1 := c.funcDescs.Get(fn).Ranges[ia.X].(vrp.SliceInterval)
- idxr, ok2 := c.funcDescs.Get(fn).Ranges[ia.Index].(vrp.IntInterval)
+ sr, ok1 := c.funcDescs.Get(ssafn).Ranges[ia.X].(vrp.SliceInterval)
+ idxr, ok2 := c.funcDescs.Get(ssafn).Ranges[ia.Index].(vrp.IntInterval)
if !ok1 || !ok2 || !sr.IsKnown() || !idxr.IsKnown() || sr.Length.Empty() || idxr.Empty() {
continue
}
if idxr.Lower.Cmp(sr.Length.Upper) >= 0 {
- report.Nodef(pass, ia, "index out of bounds")
+ ReportNodef(pass, ia, "index out of bounds")
}
}
}
@@ -2408,25 +2277,25 @@ func CheckSliceOutOfBounds(pass *analysis.Pass) (interface{}, error) {
*/
func CheckDeferLock(pass *analysis.Pass) (interface{}, error) {
- for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
- for _, block := range fn.Blocks {
- instrs := code.FilterDebug(block.Instrs)
+ for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
+ for _, block := range ssafn.Blocks {
+ instrs := FilterDebug(block.Instrs)
if len(instrs) < 2 {
continue
}
for i, ins := range instrs[:len(instrs)-1] {
- call, ok := ins.(*ir.Call)
+ call, ok := ins.(*ssa.Call)
if !ok {
continue
}
- if !code.IsCallToAny(call.Common(), "(*sync.Mutex).Lock", "(*sync.RWMutex).RLock") {
+ if !IsCallTo(call.Common(), "(*sync.Mutex).Lock") && !IsCallTo(call.Common(), "(*sync.RWMutex).RLock") {
continue
}
- nins, ok := instrs[i+1].(*ir.Defer)
+ nins, ok := instrs[i+1].(*ssa.Defer)
if !ok {
continue
}
- if !code.IsCallToAny(&nins.Call, "(*sync.Mutex).Lock", "(*sync.RWMutex).RLock") {
+ if !IsCallTo(&nins.Call, "(*sync.Mutex).Lock") && !IsCallTo(&nins.Call, "(*sync.RWMutex).RLock") {
continue
}
if call.Common().Args[0] != nins.Call.Args[0] {
@@ -2440,7 +2309,7 @@ func CheckDeferLock(pass *analysis.Pass) (interface{}, error) {
case "RLock":
alt = "RUnlock"
}
- report.Report(pass, nins, fmt.Sprintf("deferring %s right after having locked already; did you mean to defer %s?", name, alt))
+ pass.Reportf(nins.Pos(), "deferring %s right after having locked already; did you mean to defer %s?", name, alt)
}
}
}
@@ -2448,22 +2317,22 @@ func CheckDeferLock(pass *analysis.Pass) (interface{}, error) {
}
func CheckNaNComparison(pass *analysis.Pass) (interface{}, error) {
- isNaN := func(v ir.Value) bool {
- call, ok := v.(*ir.Call)
+ isNaN := func(v ssa.Value) bool {
+ call, ok := v.(*ssa.Call)
if !ok {
return false
}
- return code.IsCallTo(call.Common(), "math.NaN")
+ return IsCallTo(call.Common(), "math.NaN")
}
- for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
- for _, block := range fn.Blocks {
+ for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
+ for _, block := range ssafn.Blocks {
for _, ins := range block.Instrs {
- ins, ok := ins.(*ir.BinOp)
+ ins, ok := ins.(*ssa.BinOp)
if !ok {
continue
}
if isNaN(ins.X) || isNaN(ins.Y) {
- report.Report(pass, ins, "no value is equal to NaN, not even NaN itself")
+ pass.Reportf(ins.Pos(), "no value is equal to NaN, not even NaN itself")
}
}
}
@@ -2472,12 +2341,12 @@ func CheckNaNComparison(pass *analysis.Pass) (interface{}, error) {
}
func CheckInfiniteRecursion(pass *analysis.Pass) (interface{}, error) {
- for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
- eachCall(fn, func(caller *ir.Function, site ir.CallInstruction, callee *ir.Function) {
- if callee != fn {
+ for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
+ eachCall(ssafn, func(caller *ssa.Function, site ssa.CallInstruction, callee *ssa.Function) {
+ if callee != ssafn {
return
}
- if _, ok := site.(*ir.Go); ok {
+ if _, ok := site.(*ssa.Go); ok {
// Recursively spawning goroutines doesn't consume
// stack space infinitely, so don't flag it.
return
@@ -2485,14 +2354,14 @@ func CheckInfiniteRecursion(pass *analysis.Pass) (interface{}, error) {
block := site.Block()
canReturn := false
- for _, b := range fn.Blocks {
+ for _, b := range ssafn.Blocks {
if block.Dominates(b) {
continue
}
if len(b.Instrs) == 0 {
continue
}
- if _, ok := b.Control().(*ir.Return); ok {
+ if _, ok := b.Instrs[len(b.Instrs)-1].(*ssa.Return); ok {
canReturn = true
break
}
@@ -2500,7 +2369,7 @@ func CheckInfiniteRecursion(pass *analysis.Pass) (interface{}, error) {
if canReturn {
return
}
- report.Report(pass, site, "infinite recursive call")
+ pass.Reportf(site.Pos(), "infinite recursive call")
})
}
return nil, nil
@@ -2533,211 +2402,141 @@ func isName(pass *analysis.Pass, expr ast.Expr, name string) bool {
}
func CheckLeakyTimeTick(pass *analysis.Pass) (interface{}, error) {
- for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
- if code.IsMainLike(pass) || code.IsInTest(pass, fn) {
+ for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
+ if IsInMain(pass, ssafn) || IsInTest(pass, ssafn) {
continue
}
- for _, block := range fn.Blocks {
+ for _, block := range ssafn.Blocks {
for _, ins := range block.Instrs {
- call, ok := ins.(*ir.Call)
- if !ok || !code.IsCallTo(call.Common(), "time.Tick") {
+ call, ok := ins.(*ssa.Call)
+ if !ok || !IsCallTo(call.Common(), "time.Tick") {
continue
}
if !functions.Terminates(call.Parent()) {
continue
}
- report.Report(pass, call, "using time.Tick leaks the underlying ticker, consider using it only in endless functions, tests and the main package, and use time.NewTicker here")
+ pass.Reportf(call.Pos(), "using time.Tick leaks the underlying ticker, consider using it only in endless functions, tests and the main package, and use time.NewTicker here")
}
}
}
return nil, nil
}
-var checkDoubleNegationQ = pattern.MustParse(`(UnaryExpr "!" single@(UnaryExpr "!" x))`)
-
func CheckDoubleNegation(pass *analysis.Pass) (interface{}, error) {
fn := func(node ast.Node) {
- if m, ok := Match(pass, checkDoubleNegationQ, node); ok {
- report.Report(pass, node, "negating a boolean twice has no effect; is this a typo?", report.Fixes(
- edit.Fix("turn into single negation", edit.ReplaceWithNode(pass.Fset, node, m.State["single"].(ast.Node))),
- edit.Fix("remove double negation", edit.ReplaceWithNode(pass.Fset, node, m.State["x"].(ast.Node)))))
+ unary1 := node.(*ast.UnaryExpr)
+ unary2, ok := unary1.X.(*ast.UnaryExpr)
+ if !ok {
+ return
+ }
+ if unary1.Op != token.NOT || unary2.Op != token.NOT {
+ return
}
+ ReportNodef(pass, unary1, "negating a boolean twice has no effect; is this a typo?")
}
- code.Preorder(pass, fn, (*ast.UnaryExpr)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.UnaryExpr)(nil)}, fn)
return nil, nil
}
+func hasSideEffects(node ast.Node) bool {
+ dynamic := false
+ ast.Inspect(node, func(node ast.Node) bool {
+ switch node := node.(type) {
+ case *ast.CallExpr:
+ dynamic = true
+ return false
+ case *ast.UnaryExpr:
+ if node.Op == token.ARROW {
+ dynamic = true
+ return false
+ }
+ }
+ return true
+ })
+ return dynamic
+}
+
func CheckRepeatedIfElse(pass *analysis.Pass) (interface{}, error) {
seen := map[ast.Node]bool{}
- var collectConds func(ifstmt *ast.IfStmt, conds []ast.Expr) ([]ast.Expr, bool)
- collectConds = func(ifstmt *ast.IfStmt, conds []ast.Expr) ([]ast.Expr, bool) {
+ var collectConds func(ifstmt *ast.IfStmt, inits []ast.Stmt, conds []ast.Expr) ([]ast.Stmt, []ast.Expr)
+ collectConds = func(ifstmt *ast.IfStmt, inits []ast.Stmt, conds []ast.Expr) ([]ast.Stmt, []ast.Expr) {
seen[ifstmt] = true
- // Bail if any if-statement has an Init statement or side effects in its condition
if ifstmt.Init != nil {
- return nil, false
- }
- if code.MayHaveSideEffects(pass, ifstmt.Cond, nil) {
- return nil, false
+ inits = append(inits, ifstmt.Init)
}
-
conds = append(conds, ifstmt.Cond)
if elsestmt, ok := ifstmt.Else.(*ast.IfStmt); ok {
- return collectConds(elsestmt, conds)
+ return collectConds(elsestmt, inits, conds)
}
- return conds, true
+ return inits, conds
}
fn := func(node ast.Node) {
ifstmt := node.(*ast.IfStmt)
if seen[ifstmt] {
- // this if-statement is part of an if/else-if chain that we've already processed
- return
- }
- if ifstmt.Else == nil {
- // there can be at most one condition
return
}
- conds, ok := collectConds(ifstmt, nil)
- if !ok {
+ inits, conds := collectConds(ifstmt, nil, nil)
+ if len(inits) > 0 {
return
}
- if len(conds) < 2 {
- return
+ for _, cond := range conds {
+ if hasSideEffects(cond) {
+ return
+ }
}
counts := map[string]int{}
for _, cond := range conds {
- s := report.Render(pass, cond)
+ s := Render(pass, cond)
counts[s]++
if counts[s] == 2 {
- report.Report(pass, cond, "this condition occurs multiple times in this if/else if chain")
+ ReportNodef(pass, cond, "this condition occurs multiple times in this if/else if chain")
}
}
}
- code.Preorder(pass, fn, (*ast.IfStmt)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn)
return nil, nil
}
func CheckSillyBitwiseOps(pass *analysis.Pass) (interface{}, error) {
- // FIXME(dh): what happened here?
- if false {
- for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
- for _, block := range fn.Blocks {
- for _, ins := range block.Instrs {
- ins, ok := ins.(*ir.BinOp)
- if !ok {
- continue
- }
-
- if c, ok := ins.Y.(*ir.Const); !ok || c.Value == nil || c.Value.Kind() != constant.Int || c.Uint64() != 0 {
- continue
- }
- switch ins.Op {
- case token.AND, token.OR, token.XOR:
- default:
- // we do not flag shifts because too often, x<<0 is part
- // of a pattern, x<<0, x<<8, x<<16, ...
- continue
- }
- path, _ := astutil.PathEnclosingInterval(code.File(pass, ins), ins.Pos(), ins.Pos())
- if len(path) == 0 {
- continue
- }
+ for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
+ for _, block := range ssafn.Blocks {
+ for _, ins := range block.Instrs {
+ ins, ok := ins.(*ssa.BinOp)
+ if !ok {
+ continue
+ }
- if node, ok := path[0].(*ast.BinaryExpr); !ok || !code.IsIntLiteral(node.Y, "0") {
- continue
- }
+ if c, ok := ins.Y.(*ssa.Const); !ok || c.Value == nil || c.Value.Kind() != constant.Int || c.Uint64() != 0 {
+ continue
+ }
+ switch ins.Op {
+ case token.AND, token.OR, token.XOR:
+ default:
+ // we do not flag shifts because too often, x<<0 is part
+ // of a pattern, x<<0, x<<8, x<<16, ...
+ continue
+ }
+ path, _ := astutil.PathEnclosingInterval(File(pass, ins), ins.Pos(), ins.Pos())
+ if len(path) == 0 {
+ continue
+ }
+ if node, ok := path[0].(*ast.BinaryExpr); !ok || !IsZero(node.Y) {
+ continue
+ }
- switch ins.Op {
- case token.AND:
- report.Report(pass, ins, "x & 0 always equals 0")
- case token.OR, token.XOR:
- report.Report(pass, ins, fmt.Sprintf("x %s 0 always equals x", ins.Op))
- }
+ switch ins.Op {
+ case token.AND:
+ pass.Reportf(ins.Pos(), "x & 0 always equals 0")
+ case token.OR, token.XOR:
+ pass.Reportf(ins.Pos(), "x %s 0 always equals x", ins.Op)
}
}
}
}
- fn := func(node ast.Node) {
- binop := node.(*ast.BinaryExpr)
- b, ok := pass.TypesInfo.TypeOf(binop).Underlying().(*types.Basic)
- if !ok {
- return
- }
- if (b.Info() & types.IsInteger) == 0 {
- return
- }
- switch binop.Op {
- case token.AND, token.OR, token.XOR:
- default:
- // we do not flag shifts because too often, x<<0 is part
- // of a pattern, x<<0, x<<8, x<<16, ...
- return
- }
- switch y := binop.Y.(type) {
- case *ast.Ident:
- obj, ok := pass.TypesInfo.ObjectOf(y).(*types.Const)
- if !ok {
- return
- }
- if v, _ := constant.Int64Val(obj.Val()); v != 0 {
- return
- }
- path, _ := astutil.PathEnclosingInterval(code.File(pass, obj), obj.Pos(), obj.Pos())
- if len(path) < 2 {
- return
- }
- spec, ok := path[1].(*ast.ValueSpec)
- if !ok {
- return
- }
- if len(spec.Names) != 1 || len(spec.Values) != 1 {
- // TODO(dh): we could support this
- return
- }
- ident, ok := spec.Values[0].(*ast.Ident)
- if !ok {
- return
- }
- if !isIota(pass.TypesInfo.ObjectOf(ident)) {
- return
- }
- switch binop.Op {
- case token.AND:
- report.Report(pass, node,
- fmt.Sprintf("%s always equals 0; %s is defined as iota and has value 0, maybe %s is meant to be 1 << iota?", report.Render(pass, binop), report.Render(pass, binop.Y), report.Render(pass, binop.Y)))
- case token.OR, token.XOR:
- report.Report(pass, node,
- fmt.Sprintf("%s always equals %s; %s is defined as iota and has value 0, maybe %s is meant to be 1 << iota?", report.Render(pass, binop), report.Render(pass, binop.X), report.Render(pass, binop.Y), report.Render(pass, binop.Y)))
- }
- case *ast.BasicLit:
- if !code.IsIntLiteral(binop.Y, "0") {
- return
- }
- switch binop.Op {
- case token.AND:
- report.Report(pass, node, fmt.Sprintf("%s always equals 0", report.Render(pass, binop)))
- case token.OR, token.XOR:
- report.Report(pass, node, fmt.Sprintf("%s always equals %s", report.Render(pass, binop), report.Render(pass, binop.X)))
- }
- default:
- return
- }
- }
- code.Preorder(pass, fn, (*ast.BinaryExpr)(nil))
return nil, nil
}
-func isIota(obj types.Object) bool {
- if obj.Name() != "iota" {
- return false
- }
- c, ok := obj.(*types.Const)
- if !ok {
- return false
- }
- return c.Pkg() == nil
-}
-
func CheckNonOctalFileMode(pass *analysis.Pass) (interface{}, error) {
fn := func(node ast.Node) {
call := node.(*ast.CallExpr)
@@ -2746,12 +2545,14 @@ func CheckNonOctalFileMode(pass *analysis.Pass) (interface{}, error) {
return
}
n := sig.Params().Len()
+ var args []int
for i := 0; i < n; i++ {
typ := sig.Params().At(i).Type()
- if !code.IsType(typ, "os.FileMode") {
- continue
+ if IsType(typ, "os.FileMode") {
+ args = append(args, i)
}
-
+ }
+ for _, i := range args {
lit, ok := call.Args[i].(*ast.BasicLit)
if !ok {
continue
@@ -2766,12 +2567,11 @@ func CheckNonOctalFileMode(pass *analysis.Pass) (interface{}, error) {
if err != nil {
continue
}
- report.Report(pass, call.Args[i], fmt.Sprintf("file mode '%s' evaluates to %#o; did you mean '0%s'?", lit.Value, v, lit.Value),
- report.Fixes(edit.Fix("fix octal literal", edit.ReplaceWithString(pass.Fset, call.Args[i], "0"+lit.Value))))
+ ReportNodef(pass, call.Args[i], "file mode '%s' evaluates to %#o; did you mean '0%s'?", lit.Value, v, lit.Value)
}
}
}
- code.Preorder(pass, fn, (*ast.CallExpr)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn)
return nil, nil
}
@@ -2779,12 +2579,12 @@ func CheckPureFunctions(pass *analysis.Pass) (interface{}, error) {
pure := pass.ResultOf[facts.Purity].(facts.PurityResult)
fnLoop:
- for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
- if code.IsInTest(pass, fn) {
- params := fn.Signature.Params()
+ for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
+ if IsInTest(pass, ssafn) {
+ params := ssafn.Signature.Params()
for i := 0; i < params.Len(); i++ {
param := params.At(i)
- if code.IsType(param.Type(), "*testing.B") {
+ if IsType(param.Type(), "*testing.B") {
// Ignore discarded pure functions in code related
// to benchmarks. Instead of matching BenchmarkFoo
// functions, we match any function accepting a
@@ -2797,17 +2597,16 @@ fnLoop:
}
}
- for _, b := range fn.Blocks {
+ for _, b := range ssafn.Blocks {
for _, ins := range b.Instrs {
- ins, ok := ins.(*ir.Call)
+ ins, ok := ins.(*ssa.Call)
if !ok {
continue
}
refs := ins.Referrers()
- if refs == nil || len(code.FilterDebug(*refs)) > 0 {
+ if refs == nil || len(FilterDebug(*refs)) > 0 {
continue
}
-
callee := ins.Common().StaticCallee()
if callee == nil {
continue
@@ -2817,11 +2616,8 @@ fnLoop:
continue
}
if _, ok := pure[callee.Object().(*types.Func)]; ok {
- if pass.Pkg.Path() == "fmt_test" && callee.Object().(*types.Func).FullName() == "fmt.Sprintf" {
- // special case for benchmarks in the fmt package
- continue
- }
- report.Report(pass, ins, fmt.Sprintf("%s is a pure function but its return value is ignored", callee.Name()))
+ pass.Reportf(ins.Pos(), "%s is a pure function but its return value is ignored", callee.Name())
+ continue
}
}
}
@@ -2869,8 +2665,8 @@ func CheckDeprecated(pass *analysis.Pass) (interface{}, error) {
// already in 1.0, and we're targeting 1.2, it still
// makes sense to use the alternative from 1.0, to be
// future-proof.
- minVersion := deprecated.Stdlib[code.SelectorName(pass, sel)].AlternativeAvailableSince
- if !code.IsGoVersion(pass, minVersion) {
+ minVersion := deprecated.Stdlib[SelectorName(pass, sel)].AlternativeAvailableSince
+ if !IsGoVersion(pass, minVersion) {
return true
}
@@ -2881,29 +2677,27 @@ func CheckDeprecated(pass *analysis.Pass) (interface{}, error) {
return true
}
}
- report.Report(pass, sel, fmt.Sprintf("%s is deprecated: %s", report.Render(pass, sel), depr.Msg))
+ ReportNodef(pass, sel, "%s is deprecated: %s", Render(pass, sel), depr.Msg)
return true
}
return true
}
+ imps := map[string]*types.Package{}
+ for _, imp := range pass.Pkg.Imports() {
+ imps[imp.Path()] = imp
+ }
fn2 := func(node ast.Node) {
spec := node.(*ast.ImportSpec)
- var imp *types.Package
- if spec.Name != nil {
- imp = pass.TypesInfo.ObjectOf(spec.Name).(*types.PkgName).Imported()
- } else {
- imp = pass.TypesInfo.Implicits[spec].(*types.PkgName).Imported()
- }
-
p := spec.Path.Value
path := p[1 : len(p)-1]
+ imp := imps[path]
if depr, ok := deprs.Packages[imp]; ok {
- report.Report(pass, spec, fmt.Sprintf("package %s is deprecated: %s", path, depr.Msg))
+ ReportNodef(pass, spec, "Package %s is deprecated: %s", path, depr.Msg)
}
}
pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Nodes(nil, fn)
- code.Preorder(pass, fn2, (*ast.ImportSpec)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.ImportSpec)(nil)}, fn2)
return nil, nil
}
@@ -2914,7 +2708,8 @@ func callChecker(rules map[string]CallCheck) func(pass *analysis.Pass) (interfac
}
func checkCalls(pass *analysis.Pass, rules map[string]CallCheck) (interface{}, error) {
- cb := func(caller *ir.Function, site ir.CallInstruction, callee *ir.Function) {
+ ranges := pass.ResultOf[valueRangesAnalyzer].(map[*ssa.Function]vrp.Ranges)
+ fn := func(caller *ssa.Function, site ssa.CallInstruction, callee *ssa.Function) {
obj, ok := callee.Object().(*types.Func)
if !ok {
return
@@ -2925,15 +2720,16 @@ func checkCalls(pass *analysis.Pass, rules map[string]CallCheck) (interface{}, e
return
}
var args []*Argument
- irargs := site.Common().Args
+ ssaargs := site.Common().Args
if callee.Signature.Recv() != nil {
- irargs = irargs[1:]
+ ssaargs = ssaargs[1:]
}
- for _, arg := range irargs {
- if iarg, ok := arg.(*ir.MakeInterface); ok {
+ for _, arg := range ssaargs {
+ if iarg, ok := arg.(*ssa.MakeInterface); ok {
arg = iarg.X
}
- args = append(args, &Argument{Value: Value{arg}})
+ vr := ranges[site.Parent()][arg]
+ args = append(args, &Argument{Value: Value{arg, vr}})
}
call := &Call{
Pass: pass,
@@ -2942,45 +2738,44 @@ func checkCalls(pass *analysis.Pass, rules map[string]CallCheck) (interface{}, e
Parent: site.Parent(),
}
r(call)
- path, _ := astutil.PathEnclosingInterval(code.File(pass, site), site.Pos(), site.Pos())
- var astcall *ast.CallExpr
- for _, el := range path {
- if expr, ok := el.(*ast.CallExpr); ok {
- astcall = expr
- break
- }
- }
for idx, arg := range call.Args {
+ _ = idx
for _, e := range arg.invalids {
- if astcall != nil {
- report.Report(pass, astcall.Args[idx], e)
- } else {
- report.Report(pass, site, e)
- }
+ // path, _ := astutil.PathEnclosingInterval(f.File, edge.Site.Pos(), edge.Site.Pos())
+ // if len(path) < 2 {
+ // continue
+ // }
+ // astcall, ok := path[0].(*ast.CallExpr)
+ // if !ok {
+ // continue
+ // }
+ // pass.Reportf(astcall.Args[idx], "%s", e)
+
+ pass.Reportf(site.Pos(), "%s", e)
}
}
for _, e := range call.invalids {
- report.Report(pass, call.Instr, e)
+ pass.Reportf(call.Instr.Common().Pos(), "%s", e)
}
}
- for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
- eachCall(fn, cb)
+ for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
+ eachCall(ssafn, fn)
}
return nil, nil
}
-func shortCallName(call *ir.CallCommon) string {
+func shortCallName(call *ssa.CallCommon) string {
if call.IsInvoke() {
return ""
}
switch v := call.Value.(type) {
- case *ir.Function:
+ case *ssa.Function:
fn, ok := v.Object().(*types.Func)
if !ok {
return ""
}
return fn.Name()
- case *ir.Builtin:
+ case *ssa.Builtin:
return v.Name()
}
return ""
@@ -2991,9 +2786,9 @@ func CheckWriterBufferModified(pass *analysis.Pass) (interface{}, error) {
// Taint the argument as MUST_NOT_MODIFY, then propagate that
// through functions like bytes.Split
- for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
- sig := fn.Signature
- if fn.Name() != "Write" || sig.Recv() == nil || sig.Params().Len() != 1 || sig.Results().Len() != 2 {
+ for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
+ sig := ssafn.Signature
+ if ssafn.Name() != "Write" || sig.Recv() == nil || sig.Params().Len() != 1 || sig.Results().Len() != 2 {
continue
}
tArg, ok := sig.Params().At(0).Type().(*types.Slice)
@@ -3006,30 +2801,30 @@ func CheckWriterBufferModified(pass *analysis.Pass) (interface{}, error) {
if basic, ok := sig.Results().At(0).Type().(*types.Basic); !ok || basic.Kind() != types.Int {
continue
}
- if named, ok := sig.Results().At(1).Type().(*types.Named); !ok || !code.IsType(named, "error") {
+ if named, ok := sig.Results().At(1).Type().(*types.Named); !ok || !IsType(named, "error") {
continue
}
- for _, block := range fn.Blocks {
+ for _, block := range ssafn.Blocks {
for _, ins := range block.Instrs {
switch ins := ins.(type) {
- case *ir.Store:
- addr, ok := ins.Addr.(*ir.IndexAddr)
+ case *ssa.Store:
+ addr, ok := ins.Addr.(*ssa.IndexAddr)
if !ok {
continue
}
- if addr.X != fn.Params[1] {
+ if addr.X != ssafn.Params[1] {
continue
}
- report.Report(pass, ins, "io.Writer.Write must not modify the provided buffer, not even temporarily")
- case *ir.Call:
- if !code.IsCallTo(ins.Common(), "append") {
+ pass.Reportf(ins.Pos(), "io.Writer.Write must not modify the provided buffer, not even temporarily")
+ case *ssa.Call:
+ if !IsCallTo(ins.Common(), "append") {
continue
}
- if ins.Common().Args[0] != fn.Params[1] {
+ if ins.Common().Args[0] != ssafn.Params[1] {
continue
}
- report.Report(pass, ins, "io.Writer.Write must not modify the provided buffer, not even temporarily")
+ pass.Reportf(ins.Pos(), "io.Writer.Write must not modify the provided buffer, not even temporarily")
}
}
}
@@ -3050,14 +2845,14 @@ func loopedRegexp(name string) CallCheck {
}
func CheckEmptyBranch(pass *analysis.Pass) (interface{}, error) {
- for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
- if fn.Source() == nil {
+ for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
+ if ssafn.Syntax() == nil {
continue
}
- if code.IsExample(fn) {
+ if IsExample(ssafn) {
continue
}
- cb := func(node ast.Node) bool {
+ fn := func(node ast.Node) bool {
ifstmt, ok := node.(*ast.IfStmt)
if !ok {
return true
@@ -3067,26 +2862,26 @@ func CheckEmptyBranch(pass *analysis.Pass) (interface{}, error) {
if !ok || len(b.List) != 0 {
return true
}
- report.Report(pass, ifstmt.Else, "empty branch", report.FilterGenerated(), report.ShortRange())
+ ReportfFG(pass, ifstmt.Else.Pos(), "empty branch")
}
if len(ifstmt.Body.List) != 0 {
return true
}
- report.Report(pass, ifstmt, "empty branch", report.FilterGenerated(), report.ShortRange())
+ ReportfFG(pass, ifstmt.Pos(), "empty branch")
return true
}
- Inspect(fn.Source(), cb)
+ Inspect(ssafn.Syntax(), fn)
}
return nil, nil
}
func CheckMapBytesKey(pass *analysis.Pass) (interface{}, error) {
- for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
+ for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
for _, b := range fn.Blocks {
insLoop:
for _, ins := range b.Instrs {
// find []byte -> string conversions
- conv, ok := ins.(*ir.Convert)
+ conv, ok := ins.(*ssa.Convert)
if !ok || conv.Type() != types.Universe.Lookup("string").Type() {
continue
}
@@ -3103,7 +2898,7 @@ func CheckMapBytesKey(pass *analysis.Pass) (interface{}, error) {
// skip first reference, that's the conversion itself
for _, ref := range (*refs)[1:] {
switch ref := ref.(type) {
- case *ir.DebugRef:
+ case *ssa.DebugRef:
if _, ok := ref.Expr.(*ast.Ident); !ok {
// the string seems to be used somewhere
// unexpected; the default branch should
@@ -3112,7 +2907,7 @@ func CheckMapBytesKey(pass *analysis.Pass) (interface{}, error) {
} else {
ident = true
}
- case *ir.MapLookup:
+ case *ssa.Lookup:
default:
// the string is used somewhere else than a
// map lookup
@@ -3125,7 +2920,7 @@ func CheckMapBytesKey(pass *analysis.Pass) (interface{}, error) {
if !ident {
continue
}
- report.Report(pass, conv, "m[string(key)] would be more efficient than k := string(key); m[k]")
+ pass.Reportf(conv.Pos(), "m[string(key)] would be more efficient than k := string(key); m[k]")
}
}
}
@@ -3137,30 +2932,20 @@ func CheckRangeStringRunes(pass *analysis.Pass) (interface{}, error) {
}
func CheckSelfAssignment(pass *analysis.Pass) (interface{}, error) {
- pure := pass.ResultOf[facts.Purity].(facts.PurityResult)
-
fn := func(node ast.Node) {
assign := node.(*ast.AssignStmt)
if assign.Tok != token.ASSIGN || len(assign.Lhs) != len(assign.Rhs) {
return
}
- for i, lhs := range assign.Lhs {
- rhs := assign.Rhs[i]
- if reflect.TypeOf(lhs) != reflect.TypeOf(rhs) {
- continue
- }
- if code.MayHaveSideEffects(pass, lhs, pure) || code.MayHaveSideEffects(pass, rhs, pure) {
- continue
- }
-
- rlh := report.Render(pass, lhs)
- rrh := report.Render(pass, rhs)
+ for i, stmt := range assign.Lhs {
+ rlh := Render(pass, stmt)
+ rrh := Render(pass, assign.Rhs[i])
if rlh == rrh {
- report.Report(pass, assign, fmt.Sprintf("self-assignment of %s to %s", rrh, rlh), report.FilterGenerated())
+ ReportfFG(pass, assign.Pos(), "self-assignment of %s to %s", rrh, rlh)
}
}
}
- code.Preorder(pass, fn, (*ast.AssignStmt)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn)
return nil, nil
}
@@ -3191,10 +2976,9 @@ func CheckDuplicateBuildConstraints(pass *analysis.Pass) (interface{}, error) {
continue
}
if buildTagsIdentical(constraint1, constraint2) {
- msg := fmt.Sprintf("identical build constraints %q and %q",
+ ReportfFG(pass, f.Pos(), "identical build constraints %q and %q",
strings.Join(constraint1, " "),
strings.Join(constraint2, " "))
- report.Report(pass, f, msg, report.FilterGenerated(), report.ShortRange())
}
}
}
@@ -3205,17 +2989,19 @@ func CheckDuplicateBuildConstraints(pass *analysis.Pass) (interface{}, error) {
func CheckSillyRegexp(pass *analysis.Pass) (interface{}, error) {
// We could use the rule checking engine for this, but the
// arguments aren't really invalid.
- for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
+ for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
for _, b := range fn.Blocks {
for _, ins := range b.Instrs {
- call, ok := ins.(*ir.Call)
+ call, ok := ins.(*ssa.Call)
if !ok {
continue
}
- if !code.IsCallToAny(call.Common(), "regexp.MustCompile", "regexp.Compile", "regexp.Match", "regexp.MatchReader", "regexp.MatchString") {
+ switch CallName(call.Common()) {
+ case "regexp.MustCompile", "regexp.Compile", "regexp.Match", "regexp.MatchReader", "regexp.MatchString":
+ default:
continue
}
- c, ok := call.Common().Args[0].(*ir.Const)
+ c, ok := call.Common().Args[0].(*ssa.Const)
if !ok {
continue
}
@@ -3227,7 +3013,7 @@ func CheckSillyRegexp(pass *analysis.Pass) (interface{}, error) {
if re.Op != syntax.OpLiteral && re.Op != syntax.OpEmptyMatch {
continue
}
- report.Report(pass, call, "regular expression does not contain any meta characters")
+ pass.Reportf(call.Pos(), "regular expression does not contain any meta characters")
}
}
}
@@ -3244,7 +3030,7 @@ func CheckMissingEnumTypesInDeclaration(pass *analysis.Pass) (interface{}, error
return
}
- groups := code.GroupSpecs(pass.Fset, decl.Specs)
+ groups := GroupSpecs(pass.Fset, decl.Specs)
groupLoop:
for _, group := range groups {
if len(group) < 2 {
@@ -3277,37 +3063,30 @@ func CheckMissingEnumTypesInDeclaration(pass *analysis.Pass) (interface{}, error
continue groupLoop
}
}
- var edits []analysis.TextEdit
- typ := group[0].(*ast.ValueSpec).Type
- for _, spec := range group[1:] {
- nspec := *spec.(*ast.ValueSpec)
- nspec.Type = typ
- edits = append(edits, edit.ReplaceWithNode(pass.Fset, spec, &nspec))
- }
- report.Report(pass, group[0], "only the first constant in this group has an explicit type", report.Fixes(edit.Fix("add type to all constants in group", edits...)))
+ ReportNodef(pass, group[0], "only the first constant in this group has an explicit type")
}
}
- code.Preorder(pass, fn, (*ast.GenDecl)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.GenDecl)(nil)}, fn)
return nil, nil
}
func CheckTimerResetReturnValue(pass *analysis.Pass) (interface{}, error) {
- for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
+ for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
for _, block := range fn.Blocks {
for _, ins := range block.Instrs {
- call, ok := ins.(*ir.Call)
+ call, ok := ins.(*ssa.Call)
if !ok {
continue
}
- if !code.IsCallTo(call.Common(), "(*time.Timer).Reset") {
+ if !IsCallTo(call.Common(), "(*time.Timer).Reset") {
continue
}
refs := call.Referrers()
if refs == nil {
continue
}
- for _, ref := range code.FilterDebug(*refs) {
- ifstmt, ok := ref.(*ir.If)
+ for _, ref := range FilterDebug(*refs) {
+ ifstmt, ok := ref.(*ssa.If)
if !ok {
continue
}
@@ -3322,7 +3101,7 @@ func CheckTimerResetReturnValue(pass *analysis.Pass) (interface{}, error) {
// statements a la "if x || y"
continue
}
- irutil.Walk(succ, func(b *ir.BasicBlock) bool {
+ ssautil.Walk(succ, func(b *ssa.BasicBlock) bool {
if !succ.Dominates(b) {
// We've reached the end of the branch
return false
@@ -3335,7 +3114,7 @@ func CheckTimerResetReturnValue(pass *analysis.Pass) (interface{}, error) {
// priority, considering the rarity of
// Reset and the tiny likeliness of a
// false positive
- if ins, ok := ins.(*ir.Recv); ok && code.IsType(ins.Chan.Type(), "<-chan time.Time") {
+ if ins, ok := ins.(*ssa.UnOp); ok && ins.Op == token.ARROW && IsType(ins.X.Type(), "<-chan time.Time") {
found = true
return false
}
@@ -3345,7 +3124,7 @@ func CheckTimerResetReturnValue(pass *analysis.Pass) (interface{}, error) {
}
if found {
- report.Report(pass, call, "it is not possible to use Reset's return value correctly, as there is a race condition between draining the channel and the new timer expiring")
+ pass.Reportf(call.Pos(), "it is not possible to use Reset's return value correctly, as there is a race condition between draining the channel and the new timer expiring")
}
}
}
@@ -3354,33 +3133,43 @@ func CheckTimerResetReturnValue(pass *analysis.Pass) (interface{}, error) {
return nil, nil
}
-var (
- checkToLowerToUpperComparisonQ = pattern.MustParse(`
- (BinaryExpr
- (CallExpr fun@(Function (Or "strings.ToLower" "strings.ToUpper")) [a])
- tok@(Or "==" "!=")
- (CallExpr fun [b]))`)
- checkToLowerToUpperComparisonR = pattern.MustParse(`(CallExpr (SelectorExpr (Ident "strings") (Ident "EqualFold")) [a b])`)
-)
-
func CheckToLowerToUpperComparison(pass *analysis.Pass) (interface{}, error) {
fn := func(node ast.Node) {
- m, ok := Match(pass, checkToLowerToUpperComparisonQ, node)
- if !ok {
+ binExpr := node.(*ast.BinaryExpr)
+
+ var negative bool
+ switch binExpr.Op {
+ case token.EQL:
+ negative = false
+ case token.NEQ:
+ negative = true
+ default:
return
}
- rn := pattern.NodeToAST(checkToLowerToUpperComparisonR.Root, m.State).(ast.Expr)
- if m.State["tok"].(token.Token) == token.NEQ {
- rn = &ast.UnaryExpr{
- Op: token.NOT,
- X: rn,
- }
+
+ const (
+ lo = "strings.ToLower"
+ up = "strings.ToUpper"
+ )
+
+ var call string
+ if IsCallToAST(pass, binExpr.X, lo) && IsCallToAST(pass, binExpr.Y, lo) {
+ call = lo
+ } else if IsCallToAST(pass, binExpr.X, up) && IsCallToAST(pass, binExpr.Y, up) {
+ call = up
+ } else {
+ return
+ }
+
+ bang := ""
+ if negative {
+ bang = "!"
}
- report.Report(pass, node, "should use strings.EqualFold instead", report.Fixes(edit.Fix("replace with strings.EqualFold", edit.ReplaceWithNode(pass.Fset, node, rn))))
+ ReportNodef(pass, binExpr, "should use %sstrings.EqualFold(a, b) instead of %s(a) %s %s(b)", bang, call, binExpr.Op, call)
}
- code.Preorder(pass, fn, (*ast.BinaryExpr)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn)
return nil, nil
}
@@ -3442,47 +3231,32 @@ func CheckUnreachableTypeCases(pass *analysis.Pass) (interface{}, error) {
for i, cc := range ccs[:len(ccs)-1] {
for _, next := range ccs[i+1:] {
if T, V, yes := subsumesAny(cc.types, next.types); yes {
- report.Report(pass, next.cc, fmt.Sprintf("unreachable case clause: %s will always match before %s", T.String(), V.String()),
- report.ShortRange())
+ ReportNodef(pass, next.cc, "unreachable case clause: %s will always match before %s", T.String(), V.String())
}
}
}
}
- code.Preorder(pass, fn, (*ast.TypeSwitchStmt)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.TypeSwitchStmt)(nil)}, fn)
return nil, nil
}
-var checkSingleArgAppendQ = pattern.MustParse(`(CallExpr (Builtin "append") [_])`)
-
func CheckSingleArgAppend(pass *analysis.Pass) (interface{}, error) {
fn := func(node ast.Node) {
- _, ok := Match(pass, checkSingleArgAppendQ, node)
- if !ok {
+ if !IsCallToAST(pass, node, "append") {
return
}
- report.Report(pass, node, "x = append(y) is equivalent to x = y", report.FilterGenerated())
+ call := node.(*ast.CallExpr)
+ if len(call.Args) != 1 {
+ return
+ }
+ ReportfFG(pass, call.Pos(), "x = append(y) is equivalent to x = y")
}
- code.Preorder(pass, fn, (*ast.CallExpr)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn)
return nil, nil
}
func CheckStructTags(pass *analysis.Pass) (interface{}, error) {
- importsGoFlags := false
-
- // we use the AST instead of (*types.Package).Imports to work
- // around vendored packages in GOPATH mode. A vendored package's
- // path will include the vendoring subtree as a prefix.
- for _, f := range pass.Files {
- for _, imp := range f.Imports {
- v := imp.Path.Value
- if v[1:len(v)-1] == "github.com/jessevdk/go-flags" {
- importsGoFlags = true
- break
- }
- }
- }
-
fn := func(node ast.Node) {
for _, field := range node.(*ast.StructType).Fields.List {
if field.Tag == nil {
@@ -3490,16 +3264,13 @@ func CheckStructTags(pass *analysis.Pass) (interface{}, error) {
}
tags, err := parseStructTag(field.Tag.Value[1 : len(field.Tag.Value)-1])
if err != nil {
- report.Report(pass, field.Tag, fmt.Sprintf("unparseable struct tag: %s", err))
+ ReportNodef(pass, field.Tag, "unparseable struct tag: %s", err)
continue
}
for k, v := range tags {
if len(v) > 1 {
- isGoFlagsTag := importsGoFlags &&
- (k == "choice" || k == "optional-value" || k == "default")
- if !isGoFlagsTag {
- report.Report(pass, field.Tag, fmt.Sprintf("duplicate struct tag %q", k))
- }
+ ReportNodef(pass, field.Tag, "duplicate struct tag %q", k)
+ continue
}
switch k {
@@ -3511,24 +3282,18 @@ func CheckStructTags(pass *analysis.Pass) (interface{}, error) {
}
}
}
- code.Preorder(pass, fn, (*ast.StructType)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.StructType)(nil)}, fn)
return nil, nil
}
func checkJSONTag(pass *analysis.Pass, field *ast.Field, tag string) {
- if pass.Pkg.Path() == "encoding/json" || pass.Pkg.Path() == "encoding/json_test" {
- // don't flag malformed JSON tags in the encoding/json
- // package; it knows what it is doing, and it is testing
- // itself.
- return
- }
//lint:ignore SA9003 TODO(dh): should we flag empty tags?
if len(tag) == 0 {
}
fields := strings.Split(tag, ",")
for _, r := range fields[0] {
if !unicode.IsLetter(r) && !unicode.IsDigit(r) && !strings.ContainsRune("!#$%&()*+-./:<=>?@[]^_{|}~ ", r) {
- report.Report(pass, field.Tag, fmt.Sprintf("invalid JSON field name %q", fields[0]))
+ ReportNodef(pass, field.Tag, "invalid JSON field name %q", fields[0])
}
}
var co, cs, ci int
@@ -3541,25 +3306,25 @@ func checkJSONTag(pass *analysis.Pass, field *ast.Field, tag string) {
case "string":
cs++
// only for string, floating point, integer and bool
- T := code.Dereference(pass.TypesInfo.TypeOf(field.Type).Underlying()).Underlying()
+ T := Dereference(pass.TypesInfo.TypeOf(field.Type).Underlying()).Underlying()
basic, ok := T.(*types.Basic)
if !ok || (basic.Info()&(types.IsBoolean|types.IsInteger|types.IsFloat|types.IsString)) == 0 {
- report.Report(pass, field.Tag, "the JSON string option only applies to fields of type string, floating point, integer or bool, or pointers to those")
+ ReportNodef(pass, field.Tag, "the JSON string option only applies to fields of type string, floating point, integer or bool, or pointers to those")
}
case "inline":
ci++
default:
- report.Report(pass, field.Tag, fmt.Sprintf("unknown JSON option %q", s))
+ ReportNodef(pass, field.Tag, "unknown JSON option %q", s)
}
}
if co > 1 {
- report.Report(pass, field.Tag, `duplicate JSON option "omitempty"`)
+ ReportNodef(pass, field.Tag, `duplicate JSON option "omitempty"`)
}
if cs > 1 {
- report.Report(pass, field.Tag, `duplicate JSON option "string"`)
+ ReportNodef(pass, field.Tag, `duplicate JSON option "string"`)
}
if ci > 1 {
- report.Report(pass, field.Tag, `duplicate JSON option "inline"`)
+ ReportNodef(pass, field.Tag, `duplicate JSON option "inline"`)
}
}
@@ -3581,211 +3346,15 @@ func checkXMLTag(pass *analysis.Pass, field *ast.Field, tag string) {
counts[s]++
case "":
default:
- report.Report(pass, field.Tag, fmt.Sprintf("unknown XML option %q", s))
+ ReportNodef(pass, field.Tag, "unknown XML option %q", s)
}
}
for k, v := range counts {
if v > 1 {
- report.Report(pass, field.Tag, fmt.Sprintf("duplicate XML option %q", k))
+ ReportNodef(pass, field.Tag, "duplicate XML option %q", k)
}
}
if len(exclusives) > 1 {
- report.Report(pass, field.Tag, fmt.Sprintf("XML options %s are mutually exclusive", strings.Join(exclusives, " and ")))
- }
-}
-
-func CheckImpossibleTypeAssertion(pass *analysis.Pass) (interface{}, error) {
- type entry struct {
- l, r *types.Func
- }
-
- msc := &pass.ResultOf[buildir.Analyzer].(*buildir.IR).Pkg.Prog.MethodSets
- for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
- for _, b := range fn.Blocks {
- for _, instr := range b.Instrs {
- assert, ok := instr.(*ir.TypeAssert)
- if !ok {
- continue
- }
- var wrong []entry
- left := assert.X.Type()
- right := assert.AssertedType
- righti, ok := right.Underlying().(*types.Interface)
-
- if !ok {
- // We only care about interface->interface
- // assertions. The Go compiler already catches
- // impossible interface->concrete assertions.
- continue
- }
-
- ms := msc.MethodSet(left)
- for i := 0; i < righti.NumMethods(); i++ {
- mr := righti.Method(i)
- sel := ms.Lookup(mr.Pkg(), mr.Name())
- if sel == nil {
- continue
- }
- ml := sel.Obj().(*types.Func)
- if types.AssignableTo(ml.Type(), mr.Type()) {
- continue
- }
-
- wrong = append(wrong, entry{ml, mr})
- }
-
- if len(wrong) != 0 {
- s := fmt.Sprintf("impossible type assertion; %s and %s contradict each other:",
- types.TypeString(left, types.RelativeTo(pass.Pkg)),
- types.TypeString(right, types.RelativeTo(pass.Pkg)))
- for _, e := range wrong {
- s += fmt.Sprintf("\n\twrong type for %s method", e.l.Name())
- s += fmt.Sprintf("\n\t\thave %s", e.l.Type())
- s += fmt.Sprintf("\n\t\twant %s", e.r.Type())
- }
- report.Report(pass, assert, s)
- }
- }
- }
- }
- return nil, nil
-}
-
-func checkWithValueKey(call *Call) {
- arg := call.Args[1]
- T := arg.Value.Value.Type()
- if T, ok := T.(*types.Basic); ok {
- arg.Invalid(
- fmt.Sprintf("should not use built-in type %s as key for value; define your own type to avoid collisions", T))
- }
- if !types.Comparable(T) {
- arg.Invalid(fmt.Sprintf("keys used with context.WithValue must be comparable, but type %s is not comparable", T))
- }
-}
-
-func CheckMaybeNil(pass *analysis.Pass) (interface{}, error) {
- // This is an extremely trivial check that doesn't try to reason
- // about control flow. That is, phis and sigmas do not propagate
- // any information. As such, we can flag this:
- //
- // _ = *x
- // if x == nil { return }
- //
- // but we cannot flag this:
- //
- // if x == nil { println(x) }
- // _ = *x
- //
- // nor many other variations of conditional uses of or assignments to x.
- //
- // However, even this trivial implementation finds plenty of
- // real-world bugs, such as dereference before nil pointer check,
- // or using t.Error instead of t.Fatal when encountering nil
- // pointers.
- //
- // On the flip side, our naive implementation avoids false positives in branches, such as
- //
- // if x != nil { _ = *x }
- //
- // due to the same lack of propagating information through sigma
- // nodes. x inside the branch will be independent of the x in the
- // nil pointer check.
- //
- //
- // We could implement a more powerful check, but then we'd be
- // getting false positives instead of false negatives because
- // we're incapable of deducing relationships between variables.
- // For example, a function might return a pointer and an error,
- // and the error being nil guarantees that the pointer is not nil.
- // Depending on the surrounding code, the pointer may still end up
- // being checked against nil in one place, and guarded by a check
- // on the error in another, which would lead to us marking some
- // loads as unsafe.
- //
- // Unfortunately, simply hard-coding the relationship between
- // return values wouldn't eliminate all false positives, either.
- // Many other more subtle relationships exist. An abridged example
- // from real code:
- //
- // if a == nil && b == nil { return }
- // c := fn(a)
- // if c != "" { _ = *a }
- //
- // where `fn` is guaranteed to return a non-empty string if a
- // isn't nil.
- //
- // We choose to err on the side of false negatives.
-
- isNilConst := func(v ir.Value) bool {
- if code.IsPointerLike(v.Type()) {
- if k, ok := v.(*ir.Const); ok {
- return k.IsNil()
- }
- }
- return false
- }
-
- for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
- maybeNil := map[ir.Value]ir.Instruction{}
- for _, b := range fn.Blocks {
- for _, instr := range b.Instrs {
- if instr, ok := instr.(*ir.BinOp); ok {
- var ptr ir.Value
- if isNilConst(instr.X) {
- ptr = instr.Y
- } else if isNilConst(instr.Y) {
- ptr = instr.X
- }
- maybeNil[ptr] = instr
- }
- }
- }
-
- for _, b := range fn.Blocks {
- for _, instr := range b.Instrs {
- var ptr ir.Value
- switch instr := instr.(type) {
- case *ir.Load:
- ptr = instr.X
- case *ir.Store:
- ptr = instr.Addr
- case *ir.IndexAddr:
- ptr = instr.X
- case *ir.FieldAddr:
- ptr = instr.X
- }
- if ptr != nil {
- switch ptr.(type) {
- case *ir.Alloc, *ir.FieldAddr, *ir.IndexAddr:
- // these cannot be nil
- continue
- }
- if r, ok := maybeNil[ptr]; ok {
- report.Report(pass, instr, "possible nil pointer dereference",
- report.Related(r, "this check suggests that the pointer can be nil"))
- }
- }
- }
- }
+ ReportNodef(pass, field.Tag, "XML options %s are mutually exclusive", strings.Join(exclusives, " and "))
}
-
- return nil, nil
-}
-
-var checkAddressIsNilQ = pattern.MustParse(
- `(BinaryExpr
- (UnaryExpr "&" _)
- (Or "==" "!=")
- (Builtin "nil"))`)
-
-func CheckAddressIsNil(pass *analysis.Pass) (interface{}, error) {
- fn := func(node ast.Node) {
- _, ok := Match(pass, checkAddressIsNilQ, node)
- if !ok {
- return
- }
- report.Report(pass, node, "the address of a variable cannot be nil")
- }
- code.Preorder(pass, fn, (*ast.BinaryExpr)(nil))
- return nil, nil
}
diff --git a/vendor/honnef.co/go/tools/staticcheck/rules.go b/vendor/honnef.co/go/tools/staticcheck/rules.go
index 57f7282de..0152cac1a 100644
--- a/vendor/honnef.co/go/tools/staticcheck/rules.go
+++ b/vendor/honnef.co/go/tools/staticcheck/rules.go
@@ -14,8 +14,9 @@ import (
"unicode/utf8"
"golang.org/x/tools/go/analysis"
- "honnef.co/go/tools/code"
- "honnef.co/go/tools/ir"
+ . "honnef.co/go/tools/lint/lintdsl"
+ "honnef.co/go/tools/ssa"
+ "honnef.co/go/tools/staticcheck/vrp"
)
const (
@@ -26,10 +27,10 @@ const (
type Call struct {
Pass *analysis.Pass
- Instr ir.CallInstruction
+ Instr ssa.CallInstruction
Args []*Argument
- Parent *ir.Function
+ Parent *ssa.Function
invalids []string
}
@@ -43,21 +44,22 @@ type Argument struct {
invalids []string
}
-type Value struct {
- Value ir.Value
-}
-
func (arg *Argument) Invalid(msg string) {
arg.invalids = append(arg.invalids, msg)
}
+type Value struct {
+ Value ssa.Value
+ Range vrp.Range
+}
+
type CallCheck func(call *Call)
-func extractConsts(v ir.Value) []*ir.Const {
+func extractConsts(v ssa.Value) []*ssa.Const {
switch v := v.(type) {
- case *ir.Const:
- return []*ir.Const{v}
- case *ir.MakeInterface:
+ case *ssa.Const:
+ return []*ssa.Const{v}
+ case *ssa.MakeInterface:
return extractConsts(v.X)
default:
return nil
@@ -116,6 +118,20 @@ func ValidateURL(v Value) error {
return nil
}
+func IntValue(v Value, z vrp.Z) bool {
+ r, ok := v.Range.(vrp.IntInterval)
+ if !ok || !r.IsKnown() {
+ return false
+ }
+ if r.Lower != r.Upper {
+ return false
+ }
+ if r.Lower.Cmp(z) == 0 {
+ return true
+ }
+ return false
+}
+
func InvalidUTF8(v Value) bool {
for _, c := range extractConsts(v.Value) {
if c.Value == nil {
@@ -133,21 +149,13 @@ func InvalidUTF8(v Value) bool {
}
func UnbufferedChannel(v Value) bool {
- // TODO(dh): this check of course misses many cases of unbuffered
- // channels, such as any in phi or sigma nodes. We'll eventually
- // replace this function.
- val := v.Value
- if ct, ok := val.(*ir.ChangeType); ok {
- val = ct.X
- }
- mk, ok := val.(*ir.MakeChan)
- if !ok {
+ r, ok := v.Range.(vrp.ChannelInterval)
+ if !ok || !r.IsKnown() {
return false
}
- if k, ok := mk.Size.(*ir.Const); ok && k.Value.Kind() == constant.Int {
- if v, ok := constant.Int64Val(k.Value); ok && v == 0 {
- return true
- }
+ if r.Size.Lower.Cmp(vrp.NewZ(0)) == 0 &&
+ r.Size.Upper.Cmp(vrp.NewZ(0)) == 0 {
+ return true
}
return false
}
@@ -161,7 +169,7 @@ func Pointer(v Value) bool {
}
func ConvertedFromInt(v Value) bool {
- conv, ok := v.Value.(*ir.Convert)
+ conv, ok := v.Value.(*ssa.Convert)
if !ok {
return false
}
@@ -185,7 +193,7 @@ func validEncodingBinaryType(pass *analysis.Pass, typ types.Type) bool {
types.Float32, types.Float64, types.Complex64, types.Complex128, types.Invalid:
return true
case types.Bool:
- return code.IsGoVersion(pass, 8)
+ return IsGoVersion(pass, 8)
}
return false
case *types.Struct:
@@ -224,10 +232,8 @@ func CanBinaryMarshal(pass *analysis.Pass, v Value) bool {
func RepeatZeroTimes(name string, arg int) CallCheck {
return func(call *Call) {
arg := call.Args[arg]
- if k, ok := arg.Value.Value.(*ir.Const); ok && k.Value.Kind() == constant.Int {
- if v, ok := constant.Int64Val(k.Value); ok && v == 0 {
- arg.Invalid(fmt.Sprintf("calling %s with n == 0 will return no results, did you mean -1?", name))
- }
+ if IntValue(arg.Value, vrp.NewZ(0)) {
+ arg.Invalid(fmt.Sprintf("calling %s with n == 0 will return no results, did you mean -1?", name))
}
}
}
@@ -287,8 +293,8 @@ func ValidHostPort(v Value) bool {
// ConvertedFrom reports whether value v was converted from type typ.
func ConvertedFrom(v Value, typ string) bool {
- change, ok := v.Value.(*ir.ChangeType)
- return ok && code.IsType(change.X.Type(), typ)
+ change, ok := v.Value.(*ssa.ChangeType)
+ return ok && IsType(change.X.Type(), typ)
}
func UniqueStringCutset(v Value) bool {
diff --git a/vendor/honnef.co/go/tools/staticcheck/vrp/channel.go b/vendor/honnef.co/go/tools/staticcheck/vrp/channel.go
new file mode 100644
index 000000000..0ef73787b
--- /dev/null
+++ b/vendor/honnef.co/go/tools/staticcheck/vrp/channel.go
@@ -0,0 +1,73 @@
+package vrp
+
+import (
+ "fmt"
+
+ "honnef.co/go/tools/ssa"
+)
+
+type ChannelInterval struct {
+ Size IntInterval
+}
+
+func (c ChannelInterval) Union(other Range) Range {
+ i, ok := other.(ChannelInterval)
+ if !ok {
+ i = ChannelInterval{EmptyIntInterval}
+ }
+ if c.Size.Empty() || !c.Size.IsKnown() {
+ return i
+ }
+ if i.Size.Empty() || !i.Size.IsKnown() {
+ return c
+ }
+ return ChannelInterval{
+ Size: c.Size.Union(i.Size).(IntInterval),
+ }
+}
+
+func (c ChannelInterval) String() string {
+ return c.Size.String()
+}
+
+func (c ChannelInterval) IsKnown() bool {
+ return c.Size.IsKnown()
+}
+
+type MakeChannelConstraint struct {
+ aConstraint
+ Buffer ssa.Value
+}
+type ChannelChangeTypeConstraint struct {
+ aConstraint
+ X ssa.Value
+}
+
+func NewMakeChannelConstraint(buffer, y ssa.Value) Constraint {
+ return &MakeChannelConstraint{NewConstraint(y), buffer}
+}
+func NewChannelChangeTypeConstraint(x, y ssa.Value) Constraint {
+ return &ChannelChangeTypeConstraint{NewConstraint(y), x}
+}
+
+func (c *MakeChannelConstraint) Operands() []ssa.Value { return []ssa.Value{c.Buffer} }
+func (c *ChannelChangeTypeConstraint) Operands() []ssa.Value { return []ssa.Value{c.X} }
+
+func (c *MakeChannelConstraint) String() string {
+ return fmt.Sprintf("%s = make(chan, %s)", c.Y().Name(), c.Buffer.Name())
+}
+func (c *ChannelChangeTypeConstraint) String() string {
+ return fmt.Sprintf("%s = changetype(%s)", c.Y().Name(), c.X.Name())
+}
+
+func (c *MakeChannelConstraint) Eval(g *Graph) Range {
+ i, ok := g.Range(c.Buffer).(IntInterval)
+ if !ok {
+ return ChannelInterval{NewIntInterval(NewZ(0), PInfinity)}
+ }
+ if i.Lower.Sign() == -1 {
+ i.Lower = NewZ(0)
+ }
+ return ChannelInterval{i}
+}
+func (c *ChannelChangeTypeConstraint) Eval(g *Graph) Range { return g.Range(c.X) }
diff --git a/vendor/honnef.co/go/tools/staticcheck/vrp/int.go b/vendor/honnef.co/go/tools/staticcheck/vrp/int.go
new file mode 100644
index 000000000..926bb7af3
--- /dev/null
+++ b/vendor/honnef.co/go/tools/staticcheck/vrp/int.go
@@ -0,0 +1,476 @@
+package vrp
+
+import (
+ "fmt"
+ "go/token"
+ "go/types"
+ "math/big"
+
+ "honnef.co/go/tools/ssa"
+)
+
+type Zs []Z
+
+func (zs Zs) Len() int {
+ return len(zs)
+}
+
+func (zs Zs) Less(i int, j int) bool {
+ return zs[i].Cmp(zs[j]) == -1
+}
+
+func (zs Zs) Swap(i int, j int) {
+ zs[i], zs[j] = zs[j], zs[i]
+}
+
+type Z struct {
+ infinity int8
+ integer *big.Int
+}
+
+func NewZ(n int64) Z {
+ return NewBigZ(big.NewInt(n))
+}
+
+func NewBigZ(n *big.Int) Z {
+ return Z{integer: n}
+}
+
+func (z1 Z) Infinite() bool {
+ return z1.infinity != 0
+}
+
+func (z1 Z) Add(z2 Z) Z {
+ if z2.Sign() == -1 {
+ return z1.Sub(z2.Negate())
+ }
+ if z1 == NInfinity {
+ return NInfinity
+ }
+ if z1 == PInfinity {
+ return PInfinity
+ }
+ if z2 == PInfinity {
+ return PInfinity
+ }
+
+ if !z1.Infinite() && !z2.Infinite() {
+ n := &big.Int{}
+ n.Add(z1.integer, z2.integer)
+ return NewBigZ(n)
+ }
+
+ panic(fmt.Sprintf("%s + %s is not defined", z1, z2))
+}
+
+func (z1 Z) Sub(z2 Z) Z {
+ if z2.Sign() == -1 {
+ return z1.Add(z2.Negate())
+ }
+ if !z1.Infinite() && !z2.Infinite() {
+ n := &big.Int{}
+ n.Sub(z1.integer, z2.integer)
+ return NewBigZ(n)
+ }
+
+ if z1 != PInfinity && z2 == PInfinity {
+ return NInfinity
+ }
+ if z1.Infinite() && !z2.Infinite() {
+ return Z{infinity: z1.infinity}
+ }
+ if z1 == PInfinity && z2 == PInfinity {
+ return PInfinity
+ }
+ panic(fmt.Sprintf("%s - %s is not defined", z1, z2))
+}
+
+func (z1 Z) Mul(z2 Z) Z {
+ if (z1.integer != nil && z1.integer.Sign() == 0) ||
+ (z2.integer != nil && z2.integer.Sign() == 0) {
+ return NewBigZ(&big.Int{})
+ }
+
+ if z1.infinity != 0 || z2.infinity != 0 {
+ return Z{infinity: int8(z1.Sign() * z2.Sign())}
+ }
+
+ n := &big.Int{}
+ n.Mul(z1.integer, z2.integer)
+ return NewBigZ(n)
+}
+
+func (z1 Z) Negate() Z {
+ if z1.infinity == 1 {
+ return NInfinity
+ }
+ if z1.infinity == -1 {
+ return PInfinity
+ }
+ n := &big.Int{}
+ n.Neg(z1.integer)
+ return NewBigZ(n)
+}
+
+func (z1 Z) Sign() int {
+ if z1.infinity != 0 {
+ return int(z1.infinity)
+ }
+ return z1.integer.Sign()
+}
+
+func (z1 Z) String() string {
+ if z1 == NInfinity {
+ return "-∞"
+ }
+ if z1 == PInfinity {
+ return "∞"
+ }
+ return fmt.Sprintf("%d", z1.integer)
+}
+
+func (z1 Z) Cmp(z2 Z) int {
+ if z1.infinity == z2.infinity && z1.infinity != 0 {
+ return 0
+ }
+ if z1 == PInfinity {
+ return 1
+ }
+ if z1 == NInfinity {
+ return -1
+ }
+ if z2 == NInfinity {
+ return 1
+ }
+ if z2 == PInfinity {
+ return -1
+ }
+ return z1.integer.Cmp(z2.integer)
+}
+
+func MaxZ(zs ...Z) Z {
+ if len(zs) == 0 {
+ panic("Max called with no arguments")
+ }
+ if len(zs) == 1 {
+ return zs[0]
+ }
+ ret := zs[0]
+ for _, z := range zs[1:] {
+ if z.Cmp(ret) == 1 {
+ ret = z
+ }
+ }
+ return ret
+}
+
+func MinZ(zs ...Z) Z {
+ if len(zs) == 0 {
+ panic("Min called with no arguments")
+ }
+ if len(zs) == 1 {
+ return zs[0]
+ }
+ ret := zs[0]
+ for _, z := range zs[1:] {
+ if z.Cmp(ret) == -1 {
+ ret = z
+ }
+ }
+ return ret
+}
+
+var NInfinity = Z{infinity: -1}
+var PInfinity = Z{infinity: 1}
+var EmptyIntInterval = IntInterval{true, PInfinity, NInfinity}
+
+func InfinityFor(v ssa.Value) IntInterval {
+ if b, ok := v.Type().Underlying().(*types.Basic); ok {
+ if (b.Info() & types.IsUnsigned) != 0 {
+ return NewIntInterval(NewZ(0), PInfinity)
+ }
+ }
+ return NewIntInterval(NInfinity, PInfinity)
+}
+
+type IntInterval struct {
+ known bool
+ Lower Z
+ Upper Z
+}
+
+func NewIntInterval(l, u Z) IntInterval {
+ if u.Cmp(l) == -1 {
+ return EmptyIntInterval
+ }
+ return IntInterval{known: true, Lower: l, Upper: u}
+}
+
+func (i IntInterval) IsKnown() bool {
+ return i.known
+}
+
+func (i IntInterval) Empty() bool {
+ return i.Lower == PInfinity && i.Upper == NInfinity
+}
+
+func (i IntInterval) IsMaxRange() bool {
+ return i.Lower == NInfinity && i.Upper == PInfinity
+}
+
+func (i1 IntInterval) Intersection(i2 IntInterval) IntInterval {
+ if !i1.IsKnown() {
+ return i2
+ }
+ if !i2.IsKnown() {
+ return i1
+ }
+ if i1.Empty() || i2.Empty() {
+ return EmptyIntInterval
+ }
+ i3 := NewIntInterval(MaxZ(i1.Lower, i2.Lower), MinZ(i1.Upper, i2.Upper))
+ if i3.Lower.Cmp(i3.Upper) == 1 {
+ return EmptyIntInterval
+ }
+ return i3
+}
+
+func (i1 IntInterval) Union(other Range) Range {
+ i2, ok := other.(IntInterval)
+ if !ok {
+ i2 = EmptyIntInterval
+ }
+ if i1.Empty() || !i1.IsKnown() {
+ return i2
+ }
+ if i2.Empty() || !i2.IsKnown() {
+ return i1
+ }
+ return NewIntInterval(MinZ(i1.Lower, i2.Lower), MaxZ(i1.Upper, i2.Upper))
+}
+
+func (i1 IntInterval) Add(i2 IntInterval) IntInterval {
+ if i1.Empty() || i2.Empty() {
+ return EmptyIntInterval
+ }
+ l1, u1, l2, u2 := i1.Lower, i1.Upper, i2.Lower, i2.Upper
+ return NewIntInterval(l1.Add(l2), u1.Add(u2))
+}
+
+func (i1 IntInterval) Sub(i2 IntInterval) IntInterval {
+ if i1.Empty() || i2.Empty() {
+ return EmptyIntInterval
+ }
+ l1, u1, l2, u2 := i1.Lower, i1.Upper, i2.Lower, i2.Upper
+ return NewIntInterval(l1.Sub(u2), u1.Sub(l2))
+}
+
+func (i1 IntInterval) Mul(i2 IntInterval) IntInterval {
+ if i1.Empty() || i2.Empty() {
+ return EmptyIntInterval
+ }
+ x1, x2 := i1.Lower, i1.Upper
+ y1, y2 := i2.Lower, i2.Upper
+ return NewIntInterval(
+ MinZ(x1.Mul(y1), x1.Mul(y2), x2.Mul(y1), x2.Mul(y2)),
+ MaxZ(x1.Mul(y1), x1.Mul(y2), x2.Mul(y1), x2.Mul(y2)),
+ )
+}
+
+func (i1 IntInterval) String() string {
+ if !i1.IsKnown() {
+ return "[⊥, ⊥]"
+ }
+ if i1.Empty() {
+ return "{}"
+ }
+ return fmt.Sprintf("[%s, %s]", i1.Lower, i1.Upper)
+}
+
+type IntArithmeticConstraint struct {
+ aConstraint
+ A ssa.Value
+ B ssa.Value
+ Op token.Token
+ Fn func(IntInterval, IntInterval) IntInterval
+}
+
+type IntAddConstraint struct{ *IntArithmeticConstraint }
+type IntSubConstraint struct{ *IntArithmeticConstraint }
+type IntMulConstraint struct{ *IntArithmeticConstraint }
+
+type IntConversionConstraint struct {
+ aConstraint
+ X ssa.Value
+}
+
+type IntIntersectionConstraint struct {
+ aConstraint
+ ranges Ranges
+ A ssa.Value
+ B ssa.Value
+ Op token.Token
+ I IntInterval
+ resolved bool
+}
+
+type IntIntervalConstraint struct {
+ aConstraint
+ I IntInterval
+}
+
+func NewIntArithmeticConstraint(a, b, y ssa.Value, op token.Token, fn func(IntInterval, IntInterval) IntInterval) *IntArithmeticConstraint {
+ return &IntArithmeticConstraint{NewConstraint(y), a, b, op, fn}
+}
+func NewIntAddConstraint(a, b, y ssa.Value) Constraint {
+ return &IntAddConstraint{NewIntArithmeticConstraint(a, b, y, token.ADD, IntInterval.Add)}
+}
+func NewIntSubConstraint(a, b, y ssa.Value) Constraint {
+ return &IntSubConstraint{NewIntArithmeticConstraint(a, b, y, token.SUB, IntInterval.Sub)}
+}
+func NewIntMulConstraint(a, b, y ssa.Value) Constraint {
+ return &IntMulConstraint{NewIntArithmeticConstraint(a, b, y, token.MUL, IntInterval.Mul)}
+}
+func NewIntConversionConstraint(x, y ssa.Value) Constraint {
+ return &IntConversionConstraint{NewConstraint(y), x}
+}
+func NewIntIntersectionConstraint(a, b ssa.Value, op token.Token, ranges Ranges, y ssa.Value) Constraint {
+ return &IntIntersectionConstraint{
+ aConstraint: NewConstraint(y),
+ ranges: ranges,
+ A: a,
+ B: b,
+ Op: op,
+ }
+}
+func NewIntIntervalConstraint(i IntInterval, y ssa.Value) Constraint {
+ return &IntIntervalConstraint{NewConstraint(y), i}
+}
+
+func (c *IntArithmeticConstraint) Operands() []ssa.Value { return []ssa.Value{c.A, c.B} }
+func (c *IntConversionConstraint) Operands() []ssa.Value { return []ssa.Value{c.X} }
+func (c *IntIntersectionConstraint) Operands() []ssa.Value { return []ssa.Value{c.A} }
+func (s *IntIntervalConstraint) Operands() []ssa.Value { return nil }
+
+func (c *IntArithmeticConstraint) String() string {
+ return fmt.Sprintf("%s = %s %s %s", c.Y().Name(), c.A.Name(), c.Op, c.B.Name())
+}
+func (c *IntConversionConstraint) String() string {
+ return fmt.Sprintf("%s = %s(%s)", c.Y().Name(), c.Y().Type(), c.X.Name())
+}
+func (c *IntIntersectionConstraint) String() string {
+ return fmt.Sprintf("%s = %s %s %s (%t branch)", c.Y().Name(), c.A.Name(), c.Op, c.B.Name(), c.Y().(*ssa.Sigma).Branch)
+}
+func (c *IntIntervalConstraint) String() string { return fmt.Sprintf("%s = %s", c.Y().Name(), c.I) }
+
+func (c *IntArithmeticConstraint) Eval(g *Graph) Range {
+ i1, i2 := g.Range(c.A).(IntInterval), g.Range(c.B).(IntInterval)
+ if !i1.IsKnown() || !i2.IsKnown() {
+ return IntInterval{}
+ }
+ return c.Fn(i1, i2)
+}
+func (c *IntConversionConstraint) Eval(g *Graph) Range {
+ s := &types.StdSizes{
+ // XXX is it okay to assume the largest word size, or do we
+ // need to be platform specific?
+ WordSize: 8,
+ MaxAlign: 1,
+ }
+ fromI := g.Range(c.X).(IntInterval)
+ toI := g.Range(c.Y()).(IntInterval)
+ fromT := c.X.Type().Underlying().(*types.Basic)
+ toT := c.Y().Type().Underlying().(*types.Basic)
+ fromB := s.Sizeof(c.X.Type())
+ toB := s.Sizeof(c.Y().Type())
+
+ if !fromI.IsKnown() {
+ return toI
+ }
+ if !toI.IsKnown() {
+ return fromI
+ }
+
+ // uint<N> -> sint/uint<M>, M > N: [max(0, l1), min(2**N-1, u2)]
+ if (fromT.Info()&types.IsUnsigned != 0) &&
+ toB > fromB {
+
+ n := big.NewInt(1)
+ n.Lsh(n, uint(fromB*8))
+ n.Sub(n, big.NewInt(1))
+ return NewIntInterval(
+ MaxZ(NewZ(0), fromI.Lower),
+ MinZ(NewBigZ(n), toI.Upper),
+ )
+ }
+
+ // sint<N> -> sint<M>, M > N; [max(-∞, l1), min(2**N-1, u2)]
+ if (fromT.Info()&types.IsUnsigned == 0) &&
+ (toT.Info()&types.IsUnsigned == 0) &&
+ toB > fromB {
+
+ n := big.NewInt(1)
+ n.Lsh(n, uint(fromB*8))
+ n.Sub(n, big.NewInt(1))
+ return NewIntInterval(
+ MaxZ(NInfinity, fromI.Lower),
+ MinZ(NewBigZ(n), toI.Upper),
+ )
+ }
+
+ return fromI
+}
+func (c *IntIntersectionConstraint) Eval(g *Graph) Range {
+ xi := g.Range(c.A).(IntInterval)
+ if !xi.IsKnown() {
+ return c.I
+ }
+ return xi.Intersection(c.I)
+}
+func (c *IntIntervalConstraint) Eval(*Graph) Range { return c.I }
+
+func (c *IntIntersectionConstraint) Futures() []ssa.Value {
+ return []ssa.Value{c.B}
+}
+
+func (c *IntIntersectionConstraint) Resolve() {
+ r, ok := c.ranges[c.B].(IntInterval)
+ if !ok {
+ c.I = InfinityFor(c.Y())
+ return
+ }
+
+ switch c.Op {
+ case token.EQL:
+ c.I = r
+ case token.GTR:
+ c.I = NewIntInterval(r.Lower.Add(NewZ(1)), PInfinity)
+ case token.GEQ:
+ c.I = NewIntInterval(r.Lower, PInfinity)
+ case token.LSS:
+ // TODO(dh): do we need 0 instead of NInfinity for uints?
+ c.I = NewIntInterval(NInfinity, r.Upper.Sub(NewZ(1)))
+ case token.LEQ:
+ c.I = NewIntInterval(NInfinity, r.Upper)
+ case token.NEQ:
+ c.I = InfinityFor(c.Y())
+ default:
+ panic("unsupported op " + c.Op.String())
+ }
+}
+
+func (c *IntIntersectionConstraint) IsKnown() bool {
+ return c.I.IsKnown()
+}
+
+func (c *IntIntersectionConstraint) MarkUnresolved() {
+ c.resolved = false
+}
+
+func (c *IntIntersectionConstraint) MarkResolved() {
+ c.resolved = true
+}
+
+func (c *IntIntersectionConstraint) IsResolved() bool {
+ return c.resolved
+}
diff --git a/vendor/honnef.co/go/tools/staticcheck/vrp/slice.go b/vendor/honnef.co/go/tools/staticcheck/vrp/slice.go
new file mode 100644
index 000000000..40658dd8d
--- /dev/null
+++ b/vendor/honnef.co/go/tools/staticcheck/vrp/slice.go
@@ -0,0 +1,273 @@
+package vrp
+
+// TODO(dh): most of the constraints have implementations identical to
+// that of strings. Consider reusing them.
+
+import (
+ "fmt"
+ "go/types"
+
+ "honnef.co/go/tools/ssa"
+)
+
+type SliceInterval struct {
+ Length IntInterval
+}
+
+func (s SliceInterval) Union(other Range) Range {
+ i, ok := other.(SliceInterval)
+ if !ok {
+ i = SliceInterval{EmptyIntInterval}
+ }
+ if s.Length.Empty() || !s.Length.IsKnown() {
+ return i
+ }
+ if i.Length.Empty() || !i.Length.IsKnown() {
+ return s
+ }
+ return SliceInterval{
+ Length: s.Length.Union(i.Length).(IntInterval),
+ }
+}
+func (s SliceInterval) String() string { return s.Length.String() }
+func (s SliceInterval) IsKnown() bool { return s.Length.IsKnown() }
+
+type SliceAppendConstraint struct {
+ aConstraint
+ A ssa.Value
+ B ssa.Value
+}
+
+type SliceSliceConstraint struct {
+ aConstraint
+ X ssa.Value
+ Lower ssa.Value
+ Upper ssa.Value
+}
+
+type ArraySliceConstraint struct {
+ aConstraint
+ X ssa.Value
+ Lower ssa.Value
+ Upper ssa.Value
+}
+
+type SliceIntersectionConstraint struct {
+ aConstraint
+ X ssa.Value
+ I IntInterval
+}
+
+type SliceLengthConstraint struct {
+ aConstraint
+ X ssa.Value
+}
+
+type MakeSliceConstraint struct {
+ aConstraint
+ Size ssa.Value
+}
+
+type SliceIntervalConstraint struct {
+ aConstraint
+ I IntInterval
+}
+
+func NewSliceAppendConstraint(a, b, y ssa.Value) Constraint {
+ return &SliceAppendConstraint{NewConstraint(y), a, b}
+}
+func NewSliceSliceConstraint(x, lower, upper, y ssa.Value) Constraint {
+ return &SliceSliceConstraint{NewConstraint(y), x, lower, upper}
+}
+func NewArraySliceConstraint(x, lower, upper, y ssa.Value) Constraint {
+ return &ArraySliceConstraint{NewConstraint(y), x, lower, upper}
+}
+func NewSliceIntersectionConstraint(x ssa.Value, i IntInterval, y ssa.Value) Constraint {
+ return &SliceIntersectionConstraint{NewConstraint(y), x, i}
+}
+func NewSliceLengthConstraint(x, y ssa.Value) Constraint {
+ return &SliceLengthConstraint{NewConstraint(y), x}
+}
+func NewMakeSliceConstraint(size, y ssa.Value) Constraint {
+ return &MakeSliceConstraint{NewConstraint(y), size}
+}
+func NewSliceIntervalConstraint(i IntInterval, y ssa.Value) Constraint {
+ return &SliceIntervalConstraint{NewConstraint(y), i}
+}
+
+func (c *SliceAppendConstraint) Operands() []ssa.Value { return []ssa.Value{c.A, c.B} }
+func (c *SliceSliceConstraint) Operands() []ssa.Value {
+ ops := []ssa.Value{c.X}
+ if c.Lower != nil {
+ ops = append(ops, c.Lower)
+ }
+ if c.Upper != nil {
+ ops = append(ops, c.Upper)
+ }
+ return ops
+}
+func (c *ArraySliceConstraint) Operands() []ssa.Value {
+ ops := []ssa.Value{c.X}
+ if c.Lower != nil {
+ ops = append(ops, c.Lower)
+ }
+ if c.Upper != nil {
+ ops = append(ops, c.Upper)
+ }
+ return ops
+}
+func (c *SliceIntersectionConstraint) Operands() []ssa.Value { return []ssa.Value{c.X} }
+func (c *SliceLengthConstraint) Operands() []ssa.Value { return []ssa.Value{c.X} }
+func (c *MakeSliceConstraint) Operands() []ssa.Value { return []ssa.Value{c.Size} }
+func (s *SliceIntervalConstraint) Operands() []ssa.Value { return nil }
+
+func (c *SliceAppendConstraint) String() string {
+ return fmt.Sprintf("%s = append(%s, %s)", c.Y().Name(), c.A.Name(), c.B.Name())
+}
+func (c *SliceSliceConstraint) String() string {
+ var lname, uname string
+ if c.Lower != nil {
+ lname = c.Lower.Name()
+ }
+ if c.Upper != nil {
+ uname = c.Upper.Name()
+ }
+ return fmt.Sprintf("%s[%s:%s]", c.X.Name(), lname, uname)
+}
+func (c *ArraySliceConstraint) String() string {
+ var lname, uname string
+ if c.Lower != nil {
+ lname = c.Lower.Name()
+ }
+ if c.Upper != nil {
+ uname = c.Upper.Name()
+ }
+ return fmt.Sprintf("%s[%s:%s]", c.X.Name(), lname, uname)
+}
+func (c *SliceIntersectionConstraint) String() string {
+ return fmt.Sprintf("%s = %s.%t ⊓ %s", c.Y().Name(), c.X.Name(), c.Y().(*ssa.Sigma).Branch, c.I)
+}
+func (c *SliceLengthConstraint) String() string {
+ return fmt.Sprintf("%s = len(%s)", c.Y().Name(), c.X.Name())
+}
+func (c *MakeSliceConstraint) String() string {
+ return fmt.Sprintf("%s = make(slice, %s)", c.Y().Name(), c.Size.Name())
+}
+func (c *SliceIntervalConstraint) String() string { return fmt.Sprintf("%s = %s", c.Y().Name(), c.I) }
+
+func (c *SliceAppendConstraint) Eval(g *Graph) Range {
+ l1 := g.Range(c.A).(SliceInterval).Length
+ var l2 IntInterval
+ switch r := g.Range(c.B).(type) {
+ case SliceInterval:
+ l2 = r.Length
+ case StringInterval:
+ l2 = r.Length
+ default:
+ return SliceInterval{}
+ }
+ if !l1.IsKnown() || !l2.IsKnown() {
+ return SliceInterval{}
+ }
+ return SliceInterval{
+ Length: l1.Add(l2),
+ }
+}
+func (c *SliceSliceConstraint) Eval(g *Graph) Range {
+ lr := NewIntInterval(NewZ(0), NewZ(0))
+ if c.Lower != nil {
+ lr = g.Range(c.Lower).(IntInterval)
+ }
+ ur := g.Range(c.X).(SliceInterval).Length
+ if c.Upper != nil {
+ ur = g.Range(c.Upper).(IntInterval)
+ }
+ if !lr.IsKnown() || !ur.IsKnown() {
+ return SliceInterval{}
+ }
+
+ ls := []Z{
+ ur.Lower.Sub(lr.Lower),
+ ur.Upper.Sub(lr.Lower),
+ ur.Lower.Sub(lr.Upper),
+ ur.Upper.Sub(lr.Upper),
+ }
+ // TODO(dh): if we don't truncate lengths to 0 we might be able to
+ // easily detect slices with high < low. we'd need to treat -∞
+ // specially, though.
+ for i, l := range ls {
+ if l.Sign() == -1 {
+ ls[i] = NewZ(0)
+ }
+ }
+
+ return SliceInterval{
+ Length: NewIntInterval(MinZ(ls...), MaxZ(ls...)),
+ }
+}
+func (c *ArraySliceConstraint) Eval(g *Graph) Range {
+ lr := NewIntInterval(NewZ(0), NewZ(0))
+ if c.Lower != nil {
+ lr = g.Range(c.Lower).(IntInterval)
+ }
+ var l int64
+ switch typ := c.X.Type().(type) {
+ case *types.Array:
+ l = typ.Len()
+ case *types.Pointer:
+ l = typ.Elem().(*types.Array).Len()
+ }
+ ur := NewIntInterval(NewZ(l), NewZ(l))
+ if c.Upper != nil {
+ ur = g.Range(c.Upper).(IntInterval)
+ }
+ if !lr.IsKnown() || !ur.IsKnown() {
+ return SliceInterval{}
+ }
+
+ ls := []Z{
+ ur.Lower.Sub(lr.Lower),
+ ur.Upper.Sub(lr.Lower),
+ ur.Lower.Sub(lr.Upper),
+ ur.Upper.Sub(lr.Upper),
+ }
+ // TODO(dh): if we don't truncate lengths to 0 we might be able to
+ // easily detect slices with high < low. we'd need to treat -∞
+ // specially, though.
+ for i, l := range ls {
+ if l.Sign() == -1 {
+ ls[i] = NewZ(0)
+ }
+ }
+
+ return SliceInterval{
+ Length: NewIntInterval(MinZ(ls...), MaxZ(ls...)),
+ }
+}
+func (c *SliceIntersectionConstraint) Eval(g *Graph) Range {
+ xi := g.Range(c.X).(SliceInterval)
+ if !xi.IsKnown() {
+ return c.I
+ }
+ return SliceInterval{
+ Length: xi.Length.Intersection(c.I),
+ }
+}
+func (c *SliceLengthConstraint) Eval(g *Graph) Range {
+ i := g.Range(c.X).(SliceInterval).Length
+ if !i.IsKnown() {
+ return NewIntInterval(NewZ(0), PInfinity)
+ }
+ return i
+}
+func (c *MakeSliceConstraint) Eval(g *Graph) Range {
+ i, ok := g.Range(c.Size).(IntInterval)
+ if !ok {
+ return SliceInterval{NewIntInterval(NewZ(0), PInfinity)}
+ }
+ if i.Lower.Sign() == -1 {
+ i.Lower = NewZ(0)
+ }
+ return SliceInterval{i}
+}
+func (c *SliceIntervalConstraint) Eval(*Graph) Range { return SliceInterval{c.I} }
diff --git a/vendor/honnef.co/go/tools/staticcheck/vrp/string.go b/vendor/honnef.co/go/tools/staticcheck/vrp/string.go
new file mode 100644
index 000000000..e05877f9f
--- /dev/null
+++ b/vendor/honnef.co/go/tools/staticcheck/vrp/string.go
@@ -0,0 +1,258 @@
+package vrp
+
+import (
+ "fmt"
+ "go/token"
+ "go/types"
+
+ "honnef.co/go/tools/ssa"
+)
+
+type StringInterval struct {
+ Length IntInterval
+}
+
+func (s StringInterval) Union(other Range) Range {
+ i, ok := other.(StringInterval)
+ if !ok {
+ i = StringInterval{EmptyIntInterval}
+ }
+ if s.Length.Empty() || !s.Length.IsKnown() {
+ return i
+ }
+ if i.Length.Empty() || !i.Length.IsKnown() {
+ return s
+ }
+ return StringInterval{
+ Length: s.Length.Union(i.Length).(IntInterval),
+ }
+}
+
+func (s StringInterval) String() string {
+ return s.Length.String()
+}
+
+func (s StringInterval) IsKnown() bool {
+ return s.Length.IsKnown()
+}
+
+type StringSliceConstraint struct {
+ aConstraint
+ X ssa.Value
+ Lower ssa.Value
+ Upper ssa.Value
+}
+
+type StringIntersectionConstraint struct {
+ aConstraint
+ ranges Ranges
+ A ssa.Value
+ B ssa.Value
+ Op token.Token
+ I IntInterval
+ resolved bool
+}
+
+type StringConcatConstraint struct {
+ aConstraint
+ A ssa.Value
+ B ssa.Value
+}
+
+type StringLengthConstraint struct {
+ aConstraint
+ X ssa.Value
+}
+
+type StringIntervalConstraint struct {
+ aConstraint
+ I IntInterval
+}
+
+func NewStringSliceConstraint(x, lower, upper, y ssa.Value) Constraint {
+ return &StringSliceConstraint{NewConstraint(y), x, lower, upper}
+}
+func NewStringIntersectionConstraint(a, b ssa.Value, op token.Token, ranges Ranges, y ssa.Value) Constraint {
+ return &StringIntersectionConstraint{
+ aConstraint: NewConstraint(y),
+ ranges: ranges,
+ A: a,
+ B: b,
+ Op: op,
+ }
+}
+func NewStringConcatConstraint(a, b, y ssa.Value) Constraint {
+ return &StringConcatConstraint{NewConstraint(y), a, b}
+}
+func NewStringLengthConstraint(x ssa.Value, y ssa.Value) Constraint {
+ return &StringLengthConstraint{NewConstraint(y), x}
+}
+func NewStringIntervalConstraint(i IntInterval, y ssa.Value) Constraint {
+ return &StringIntervalConstraint{NewConstraint(y), i}
+}
+
+func (c *StringSliceConstraint) Operands() []ssa.Value {
+ vs := []ssa.Value{c.X}
+ if c.Lower != nil {
+ vs = append(vs, c.Lower)
+ }
+ if c.Upper != nil {
+ vs = append(vs, c.Upper)
+ }
+ return vs
+}
+func (c *StringIntersectionConstraint) Operands() []ssa.Value { return []ssa.Value{c.A} }
+func (c StringConcatConstraint) Operands() []ssa.Value { return []ssa.Value{c.A, c.B} }
+func (c *StringLengthConstraint) Operands() []ssa.Value { return []ssa.Value{c.X} }
+func (s *StringIntervalConstraint) Operands() []ssa.Value { return nil }
+
+func (c *StringSliceConstraint) String() string {
+ var lname, uname string
+ if c.Lower != nil {
+ lname = c.Lower.Name()
+ }
+ if c.Upper != nil {
+ uname = c.Upper.Name()
+ }
+ return fmt.Sprintf("%s[%s:%s]", c.X.Name(), lname, uname)
+}
+func (c *StringIntersectionConstraint) String() string {
+ return fmt.Sprintf("%s = %s %s %s (%t branch)", c.Y().Name(), c.A.Name(), c.Op, c.B.Name(), c.Y().(*ssa.Sigma).Branch)
+}
+func (c StringConcatConstraint) String() string {
+ return fmt.Sprintf("%s = %s + %s", c.Y().Name(), c.A.Name(), c.B.Name())
+}
+func (c *StringLengthConstraint) String() string {
+ return fmt.Sprintf("%s = len(%s)", c.Y().Name(), c.X.Name())
+}
+func (c *StringIntervalConstraint) String() string { return fmt.Sprintf("%s = %s", c.Y().Name(), c.I) }
+
+func (c *StringSliceConstraint) Eval(g *Graph) Range {
+ lr := NewIntInterval(NewZ(0), NewZ(0))
+ if c.Lower != nil {
+ lr = g.Range(c.Lower).(IntInterval)
+ }
+ ur := g.Range(c.X).(StringInterval).Length
+ if c.Upper != nil {
+ ur = g.Range(c.Upper).(IntInterval)
+ }
+ if !lr.IsKnown() || !ur.IsKnown() {
+ return StringInterval{}
+ }
+
+ ls := []Z{
+ ur.Lower.Sub(lr.Lower),
+ ur.Upper.Sub(lr.Lower),
+ ur.Lower.Sub(lr.Upper),
+ ur.Upper.Sub(lr.Upper),
+ }
+ // TODO(dh): if we don't truncate lengths to 0 we might be able to
+ // easily detect slices with high < low. we'd need to treat -∞
+ // specially, though.
+ for i, l := range ls {
+ if l.Sign() == -1 {
+ ls[i] = NewZ(0)
+ }
+ }
+
+ return StringInterval{
+ Length: NewIntInterval(MinZ(ls...), MaxZ(ls...)),
+ }
+}
+func (c *StringIntersectionConstraint) Eval(g *Graph) Range {
+ var l IntInterval
+ switch r := g.Range(c.A).(type) {
+ case StringInterval:
+ l = r.Length
+ case IntInterval:
+ l = r
+ }
+
+ if !l.IsKnown() {
+ return StringInterval{c.I}
+ }
+ return StringInterval{
+ Length: l.Intersection(c.I),
+ }
+}
+func (c StringConcatConstraint) Eval(g *Graph) Range {
+ i1, i2 := g.Range(c.A).(StringInterval), g.Range(c.B).(StringInterval)
+ if !i1.Length.IsKnown() || !i2.Length.IsKnown() {
+ return StringInterval{}
+ }
+ return StringInterval{
+ Length: i1.Length.Add(i2.Length),
+ }
+}
+func (c *StringLengthConstraint) Eval(g *Graph) Range {
+ i := g.Range(c.X).(StringInterval).Length
+ if !i.IsKnown() {
+ return NewIntInterval(NewZ(0), PInfinity)
+ }
+ return i
+}
+func (c *StringIntervalConstraint) Eval(*Graph) Range { return StringInterval{c.I} }
+
+func (c *StringIntersectionConstraint) Futures() []ssa.Value {
+ return []ssa.Value{c.B}
+}
+
+func (c *StringIntersectionConstraint) Resolve() {
+ if (c.A.Type().Underlying().(*types.Basic).Info() & types.IsString) != 0 {
+ // comparing two strings
+ r, ok := c.ranges[c.B].(StringInterval)
+ if !ok {
+ c.I = NewIntInterval(NewZ(0), PInfinity)
+ return
+ }
+ switch c.Op {
+ case token.EQL:
+ c.I = r.Length
+ case token.GTR, token.GEQ:
+ c.I = NewIntInterval(r.Length.Lower, PInfinity)
+ case token.LSS, token.LEQ:
+ c.I = NewIntInterval(NewZ(0), r.Length.Upper)
+ case token.NEQ:
+ default:
+ panic("unsupported op " + c.Op.String())
+ }
+ } else {
+ r, ok := c.ranges[c.B].(IntInterval)
+ if !ok {
+ c.I = NewIntInterval(NewZ(0), PInfinity)
+ return
+ }
+ // comparing two lengths
+ switch c.Op {
+ case token.EQL:
+ c.I = r
+ case token.GTR:
+ c.I = NewIntInterval(r.Lower.Add(NewZ(1)), PInfinity)
+ case token.GEQ:
+ c.I = NewIntInterval(r.Lower, PInfinity)
+ case token.LSS:
+ c.I = NewIntInterval(NInfinity, r.Upper.Sub(NewZ(1)))
+ case token.LEQ:
+ c.I = NewIntInterval(NInfinity, r.Upper)
+ case token.NEQ:
+ default:
+ panic("unsupported op " + c.Op.String())
+ }
+ }
+}
+
+func (c *StringIntersectionConstraint) IsKnown() bool {
+ return c.I.IsKnown()
+}
+
+func (c *StringIntersectionConstraint) MarkUnresolved() {
+ c.resolved = false
+}
+
+func (c *StringIntersectionConstraint) MarkResolved() {
+ c.resolved = true
+}
+
+func (c *StringIntersectionConstraint) IsResolved() bool {
+ return c.resolved
+}
diff --git a/vendor/honnef.co/go/tools/staticcheck/vrp/vrp.go b/vendor/honnef.co/go/tools/staticcheck/vrp/vrp.go
new file mode 100644
index 000000000..3c138e512
--- /dev/null
+++ b/vendor/honnef.co/go/tools/staticcheck/vrp/vrp.go
@@ -0,0 +1,1056 @@
+package vrp
+
+// TODO(dh) widening and narrowing have a lot of code in common. Make
+// it reusable.
+
+import (
+ "fmt"
+ "go/constant"
+ "go/token"
+ "go/types"
+ "math/big"
+ "sort"
+ "strings"
+
+ "honnef.co/go/tools/lint"
+ "honnef.co/go/tools/ssa"
+)
+
+type Future interface {
+ Constraint
+ Futures() []ssa.Value
+ Resolve()
+ IsKnown() bool
+ MarkUnresolved()
+ MarkResolved()
+ IsResolved() bool
+}
+
+type Range interface {
+ Union(other Range) Range
+ IsKnown() bool
+}
+
+type Constraint interface {
+ Y() ssa.Value
+ isConstraint()
+ String() string
+ Eval(*Graph) Range
+ Operands() []ssa.Value
+}
+
+type aConstraint struct {
+ y ssa.Value
+}
+
+func NewConstraint(y ssa.Value) aConstraint {
+ return aConstraint{y}
+}
+
+func (aConstraint) isConstraint() {}
+func (c aConstraint) Y() ssa.Value { return c.y }
+
+type PhiConstraint struct {
+ aConstraint
+ Vars []ssa.Value
+}
+
+func NewPhiConstraint(vars []ssa.Value, y ssa.Value) Constraint {
+ uniqm := map[ssa.Value]struct{}{}
+ for _, v := range vars {
+ uniqm[v] = struct{}{}
+ }
+ var uniq []ssa.Value
+ for v := range uniqm {
+ uniq = append(uniq, v)
+ }
+ return &PhiConstraint{
+ aConstraint: NewConstraint(y),
+ Vars: uniq,
+ }
+}
+
+func (c *PhiConstraint) Operands() []ssa.Value {
+ return c.Vars
+}
+
+func (c *PhiConstraint) Eval(g *Graph) Range {
+ i := Range(nil)
+ for _, v := range c.Vars {
+ i = g.Range(v).Union(i)
+ }
+ return i
+}
+
+func (c *PhiConstraint) String() string {
+ names := make([]string, len(c.Vars))
+ for i, v := range c.Vars {
+ names[i] = v.Name()
+ }
+ return fmt.Sprintf("%s = φ(%s)", c.Y().Name(), strings.Join(names, ", "))
+}
+
+func isSupportedType(typ types.Type) bool {
+ switch typ := typ.Underlying().(type) {
+ case *types.Basic:
+ switch typ.Kind() {
+ case types.String, types.UntypedString:
+ return true
+ default:
+ if (typ.Info() & types.IsInteger) == 0 {
+ return false
+ }
+ }
+ case *types.Chan:
+ return true
+ case *types.Slice:
+ return true
+ default:
+ return false
+ }
+ return true
+}
+
+func ConstantToZ(c constant.Value) Z {
+ s := constant.ToInt(c).ExactString()
+ n := &big.Int{}
+ n.SetString(s, 10)
+ return NewBigZ(n)
+}
+
+func sigmaInteger(g *Graph, ins *ssa.Sigma, cond *ssa.BinOp, ops []*ssa.Value) Constraint {
+ op := cond.Op
+ if !ins.Branch {
+ op = (invertToken(op))
+ }
+
+ switch op {
+ case token.EQL, token.GTR, token.GEQ, token.LSS, token.LEQ:
+ default:
+ return nil
+ }
+ var a, b ssa.Value
+ if (*ops[0]) == ins.X {
+ a = *ops[0]
+ b = *ops[1]
+ } else {
+ a = *ops[1]
+ b = *ops[0]
+ op = flipToken(op)
+ }
+ return NewIntIntersectionConstraint(a, b, op, g.ranges, ins)
+}
+
+func sigmaString(g *Graph, ins *ssa.Sigma, cond *ssa.BinOp, ops []*ssa.Value) Constraint {
+ op := cond.Op
+ if !ins.Branch {
+ op = (invertToken(op))
+ }
+
+ switch op {
+ case token.EQL, token.GTR, token.GEQ, token.LSS, token.LEQ:
+ default:
+ return nil
+ }
+
+ if ((*ops[0]).Type().Underlying().(*types.Basic).Info() & types.IsString) == 0 {
+ var a, b ssa.Value
+ call, ok := (*ops[0]).(*ssa.Call)
+ if ok && call.Common().Args[0] == ins.X {
+ a = *ops[0]
+ b = *ops[1]
+ } else {
+ a = *ops[1]
+ b = *ops[0]
+ op = flipToken(op)
+ }
+ return NewStringIntersectionConstraint(a, b, op, g.ranges, ins)
+ }
+ var a, b ssa.Value
+ if (*ops[0]) == ins.X {
+ a = *ops[0]
+ b = *ops[1]
+ } else {
+ a = *ops[1]
+ b = *ops[0]
+ op = flipToken(op)
+ }
+ return NewStringIntersectionConstraint(a, b, op, g.ranges, ins)
+}
+
+func sigmaSlice(g *Graph, ins *ssa.Sigma, cond *ssa.BinOp, ops []*ssa.Value) Constraint {
+ // TODO(dh) sigmaSlice and sigmaString are a lot alike. Can they
+ // be merged?
+ //
+ // XXX support futures
+
+ op := cond.Op
+ if !ins.Branch {
+ op = (invertToken(op))
+ }
+
+ k, ok := (*ops[1]).(*ssa.Const)
+ // XXX investigate in what cases this wouldn't be a Const
+ //
+ // XXX what if left and right are swapped?
+ if !ok {
+ return nil
+ }
+
+ call, ok := (*ops[0]).(*ssa.Call)
+ if !ok {
+ return nil
+ }
+ builtin, ok := call.Common().Value.(*ssa.Builtin)
+ if !ok {
+ return nil
+ }
+ if builtin.Name() != "len" {
+ return nil
+ }
+ callops := call.Operands(nil)
+
+ v := ConstantToZ(k.Value)
+ c := NewSliceIntersectionConstraint(*callops[1], IntInterval{}, ins).(*SliceIntersectionConstraint)
+ switch op {
+ case token.EQL:
+ c.I = NewIntInterval(v, v)
+ case token.GTR, token.GEQ:
+ off := int64(0)
+ if cond.Op == token.GTR {
+ off = 1
+ }
+ c.I = NewIntInterval(
+ v.Add(NewZ(off)),
+ PInfinity,
+ )
+ case token.LSS, token.LEQ:
+ off := int64(0)
+ if cond.Op == token.LSS {
+ off = -1
+ }
+ c.I = NewIntInterval(
+ NInfinity,
+ v.Add(NewZ(off)),
+ )
+ default:
+ return nil
+ }
+ return c
+}
+
+func BuildGraph(f *ssa.Function) *Graph {
+ g := &Graph{
+ Vertices: map[interface{}]*Vertex{},
+ ranges: Ranges{},
+ }
+
+ var cs []Constraint
+
+ ops := make([]*ssa.Value, 16)
+ seen := map[ssa.Value]bool{}
+ for _, block := range f.Blocks {
+ for _, ins := range block.Instrs {
+ ops = ins.Operands(ops[:0])
+ for _, op := range ops {
+ if c, ok := (*op).(*ssa.Const); ok {
+ if seen[c] {
+ continue
+ }
+ seen[c] = true
+ if c.Value == nil {
+ switch c.Type().Underlying().(type) {
+ case *types.Slice:
+ cs = append(cs, NewSliceIntervalConstraint(NewIntInterval(NewZ(0), NewZ(0)), c))
+ }
+ continue
+ }
+ switch c.Value.Kind() {
+ case constant.Int:
+ v := ConstantToZ(c.Value)
+ cs = append(cs, NewIntIntervalConstraint(NewIntInterval(v, v), c))
+ case constant.String:
+ s := constant.StringVal(c.Value)
+ n := NewZ(int64(len(s)))
+ cs = append(cs, NewStringIntervalConstraint(NewIntInterval(n, n), c))
+ }
+ }
+ }
+ }
+ }
+ for _, block := range f.Blocks {
+ for _, ins := range block.Instrs {
+ switch ins := ins.(type) {
+ case *ssa.Convert:
+ switch v := ins.Type().Underlying().(type) {
+ case *types.Basic:
+ if (v.Info() & types.IsInteger) == 0 {
+ continue
+ }
+ cs = append(cs, NewIntConversionConstraint(ins.X, ins))
+ }
+ case *ssa.Call:
+ if static := ins.Common().StaticCallee(); static != nil {
+ if fn, ok := static.Object().(*types.Func); ok {
+ switch lint.FuncName(fn) {
+ case "bytes.Index", "bytes.IndexAny", "bytes.IndexByte",
+ "bytes.IndexFunc", "bytes.IndexRune", "bytes.LastIndex",
+ "bytes.LastIndexAny", "bytes.LastIndexByte", "bytes.LastIndexFunc",
+ "strings.Index", "strings.IndexAny", "strings.IndexByte",
+ "strings.IndexFunc", "strings.IndexRune", "strings.LastIndex",
+ "strings.LastIndexAny", "strings.LastIndexByte", "strings.LastIndexFunc":
+ // TODO(dh): instead of limiting by +∞,
+ // limit by the upper bound of the passed
+ // string
+ cs = append(cs, NewIntIntervalConstraint(NewIntInterval(NewZ(-1), PInfinity), ins))
+ case "bytes.Title", "bytes.ToLower", "bytes.ToTitle", "bytes.ToUpper",
+ "strings.Title", "strings.ToLower", "strings.ToTitle", "strings.ToUpper":
+ cs = append(cs, NewCopyConstraint(ins.Common().Args[0], ins))
+ case "bytes.ToLowerSpecial", "bytes.ToTitleSpecial", "bytes.ToUpperSpecial",
+ "strings.ToLowerSpecial", "strings.ToTitleSpecial", "strings.ToUpperSpecial":
+ cs = append(cs, NewCopyConstraint(ins.Common().Args[1], ins))
+ case "bytes.Compare", "strings.Compare":
+ cs = append(cs, NewIntIntervalConstraint(NewIntInterval(NewZ(-1), NewZ(1)), ins))
+ case "bytes.Count", "strings.Count":
+ // TODO(dh): instead of limiting by +∞,
+ // limit by the upper bound of the passed
+ // string.
+ cs = append(cs, NewIntIntervalConstraint(NewIntInterval(NewZ(0), PInfinity), ins))
+ case "bytes.Map", "bytes.TrimFunc", "bytes.TrimLeft", "bytes.TrimLeftFunc",
+ "bytes.TrimRight", "bytes.TrimRightFunc", "bytes.TrimSpace",
+ "strings.Map", "strings.TrimFunc", "strings.TrimLeft", "strings.TrimLeftFunc",
+ "strings.TrimRight", "strings.TrimRightFunc", "strings.TrimSpace":
+ // TODO(dh): lower = 0, upper = upper of passed string
+ case "bytes.TrimPrefix", "bytes.TrimSuffix",
+ "strings.TrimPrefix", "strings.TrimSuffix":
+ // TODO(dh) range between "unmodified" and len(cutset) removed
+ case "(*bytes.Buffer).Cap", "(*bytes.Buffer).Len", "(*bytes.Reader).Len", "(*bytes.Reader).Size":
+ cs = append(cs, NewIntIntervalConstraint(NewIntInterval(NewZ(0), PInfinity), ins))
+ }
+ }
+ }
+ builtin, ok := ins.Common().Value.(*ssa.Builtin)
+ ops := ins.Operands(nil)
+ if !ok {
+ continue
+ }
+ switch builtin.Name() {
+ case "len":
+ switch op1 := (*ops[1]).Type().Underlying().(type) {
+ case *types.Basic:
+ if op1.Kind() == types.String || op1.Kind() == types.UntypedString {
+ cs = append(cs, NewStringLengthConstraint(*ops[1], ins))
+ }
+ case *types.Slice:
+ cs = append(cs, NewSliceLengthConstraint(*ops[1], ins))
+ }
+
+ case "append":
+ cs = append(cs, NewSliceAppendConstraint(ins.Common().Args[0], ins.Common().Args[1], ins))
+ }
+ case *ssa.BinOp:
+ ops := ins.Operands(nil)
+ basic, ok := (*ops[0]).Type().Underlying().(*types.Basic)
+ if !ok {
+ continue
+ }
+ switch basic.Kind() {
+ case types.Int, types.Int8, types.Int16, types.Int32, types.Int64,
+ types.Uint, types.Uint8, types.Uint16, types.Uint32, types.Uint64, types.UntypedInt:
+ fns := map[token.Token]func(ssa.Value, ssa.Value, ssa.Value) Constraint{
+ token.ADD: NewIntAddConstraint,
+ token.SUB: NewIntSubConstraint,
+ token.MUL: NewIntMulConstraint,
+ // XXX support QUO, REM, SHL, SHR
+ }
+ fn, ok := fns[ins.Op]
+ if ok {
+ cs = append(cs, fn(*ops[0], *ops[1], ins))
+ }
+ case types.String, types.UntypedString:
+ if ins.Op == token.ADD {
+ cs = append(cs, NewStringConcatConstraint(*ops[0], *ops[1], ins))
+ }
+ }
+ case *ssa.Slice:
+ typ := ins.X.Type().Underlying()
+ switch typ := typ.(type) {
+ case *types.Basic:
+ cs = append(cs, NewStringSliceConstraint(ins.X, ins.Low, ins.High, ins))
+ case *types.Slice:
+ cs = append(cs, NewSliceSliceConstraint(ins.X, ins.Low, ins.High, ins))
+ case *types.Array:
+ cs = append(cs, NewArraySliceConstraint(ins.X, ins.Low, ins.High, ins))
+ case *types.Pointer:
+ if _, ok := typ.Elem().(*types.Array); !ok {
+ continue
+ }
+ cs = append(cs, NewArraySliceConstraint(ins.X, ins.Low, ins.High, ins))
+ }
+ case *ssa.Phi:
+ if !isSupportedType(ins.Type()) {
+ continue
+ }
+ ops := ins.Operands(nil)
+ dops := make([]ssa.Value, len(ops))
+ for i, op := range ops {
+ dops[i] = *op
+ }
+ cs = append(cs, NewPhiConstraint(dops, ins))
+ case *ssa.Sigma:
+ pred := ins.Block().Preds[0]
+ instrs := pred.Instrs
+ cond, ok := instrs[len(instrs)-1].(*ssa.If).Cond.(*ssa.BinOp)
+ ops := cond.Operands(nil)
+ if !ok {
+ continue
+ }
+ switch typ := ins.Type().Underlying().(type) {
+ case *types.Basic:
+ var c Constraint
+ switch typ.Kind() {
+ case types.Int, types.Int8, types.Int16, types.Int32, types.Int64,
+ types.Uint, types.Uint8, types.Uint16, types.Uint32, types.Uint64, types.UntypedInt:
+ c = sigmaInteger(g, ins, cond, ops)
+ case types.String, types.UntypedString:
+ c = sigmaString(g, ins, cond, ops)
+ }
+ if c != nil {
+ cs = append(cs, c)
+ }
+ case *types.Slice:
+ c := sigmaSlice(g, ins, cond, ops)
+ if c != nil {
+ cs = append(cs, c)
+ }
+ default:
+ //log.Printf("unsupported sigma type %T", typ) // XXX
+ }
+ case *ssa.MakeChan:
+ cs = append(cs, NewMakeChannelConstraint(ins.Size, ins))
+ case *ssa.MakeSlice:
+ cs = append(cs, NewMakeSliceConstraint(ins.Len, ins))
+ case *ssa.ChangeType:
+ switch ins.X.Type().Underlying().(type) {
+ case *types.Chan:
+ cs = append(cs, NewChannelChangeTypeConstraint(ins.X, ins))
+ }
+ }
+ }
+ }
+
+ for _, c := range cs {
+ if c == nil {
+ panic("nil constraint")
+ }
+ // If V is used in constraint C, then we create an edge V->C
+ for _, op := range c.Operands() {
+ g.AddEdge(op, c, false)
+ }
+ if c, ok := c.(Future); ok {
+ for _, op := range c.Futures() {
+ g.AddEdge(op, c, true)
+ }
+ }
+ // If constraint C defines variable V, then we create an edge
+ // C->V
+ g.AddEdge(c, c.Y(), false)
+ }
+
+ g.FindSCCs()
+ g.sccEdges = make([][]Edge, len(g.SCCs))
+ g.futures = make([][]Future, len(g.SCCs))
+ for _, e := range g.Edges {
+ g.sccEdges[e.From.SCC] = append(g.sccEdges[e.From.SCC], e)
+ if !e.control {
+ continue
+ }
+ if c, ok := e.To.Value.(Future); ok {
+ g.futures[e.From.SCC] = append(g.futures[e.From.SCC], c)
+ }
+ }
+ return g
+}
+
+func (g *Graph) Solve() Ranges {
+ var consts []Z
+ off := NewZ(1)
+ for _, n := range g.Vertices {
+ if c, ok := n.Value.(*ssa.Const); ok {
+ basic, ok := c.Type().Underlying().(*types.Basic)
+ if !ok {
+ continue
+ }
+ if (basic.Info() & types.IsInteger) != 0 {
+ z := ConstantToZ(c.Value)
+ consts = append(consts, z)
+ consts = append(consts, z.Add(off))
+ consts = append(consts, z.Sub(off))
+ }
+ }
+
+ }
+ sort.Sort(Zs(consts))
+
+ for scc, vertices := range g.SCCs {
+ n := 0
+ n = len(vertices)
+ if n == 1 {
+ g.resolveFutures(scc)
+ v := vertices[0]
+ if v, ok := v.Value.(ssa.Value); ok {
+ switch typ := v.Type().Underlying().(type) {
+ case *types.Basic:
+ switch typ.Kind() {
+ case types.String, types.UntypedString:
+ if !g.Range(v).(StringInterval).IsKnown() {
+ g.SetRange(v, StringInterval{NewIntInterval(NewZ(0), PInfinity)})
+ }
+ default:
+ if !g.Range(v).(IntInterval).IsKnown() {
+ g.SetRange(v, InfinityFor(v))
+ }
+ }
+ case *types.Chan:
+ if !g.Range(v).(ChannelInterval).IsKnown() {
+ g.SetRange(v, ChannelInterval{NewIntInterval(NewZ(0), PInfinity)})
+ }
+ case *types.Slice:
+ if !g.Range(v).(SliceInterval).IsKnown() {
+ g.SetRange(v, SliceInterval{NewIntInterval(NewZ(0), PInfinity)})
+ }
+ }
+ }
+ if c, ok := v.Value.(Constraint); ok {
+ g.SetRange(c.Y(), c.Eval(g))
+ }
+ } else {
+ uses := g.uses(scc)
+ entries := g.entries(scc)
+ for len(entries) > 0 {
+ v := entries[len(entries)-1]
+ entries = entries[:len(entries)-1]
+ for _, use := range uses[v] {
+ if g.widen(use, consts) {
+ entries = append(entries, use.Y())
+ }
+ }
+ }
+
+ g.resolveFutures(scc)
+
+ // XXX this seems to be necessary, but shouldn't be.
+ // removing it leads to nil pointer derefs; investigate
+ // where we're not setting values correctly.
+ for _, n := range vertices {
+ if v, ok := n.Value.(ssa.Value); ok {
+ i, ok := g.Range(v).(IntInterval)
+ if !ok {
+ continue
+ }
+ if !i.IsKnown() {
+ g.SetRange(v, InfinityFor(v))
+ }
+ }
+ }
+
+ actives := g.actives(scc)
+ for len(actives) > 0 {
+ v := actives[len(actives)-1]
+ actives = actives[:len(actives)-1]
+ for _, use := range uses[v] {
+ if g.narrow(use) {
+ actives = append(actives, use.Y())
+ }
+ }
+ }
+ }
+ // propagate scc
+ for _, edge := range g.sccEdges[scc] {
+ if edge.control {
+ continue
+ }
+ if edge.From.SCC == edge.To.SCC {
+ continue
+ }
+ if c, ok := edge.To.Value.(Constraint); ok {
+ g.SetRange(c.Y(), c.Eval(g))
+ }
+ if c, ok := edge.To.Value.(Future); ok {
+ if !c.IsKnown() {
+ c.MarkUnresolved()
+ }
+ }
+ }
+ }
+
+ for v, r := range g.ranges {
+ i, ok := r.(IntInterval)
+ if !ok {
+ continue
+ }
+ if (v.Type().Underlying().(*types.Basic).Info() & types.IsUnsigned) == 0 {
+ if i.Upper != PInfinity {
+ s := &types.StdSizes{
+ // XXX is it okay to assume the largest word size, or do we
+ // need to be platform specific?
+ WordSize: 8,
+ MaxAlign: 1,
+ }
+ bits := (s.Sizeof(v.Type()) * 8) - 1
+ n := big.NewInt(1)
+ n = n.Lsh(n, uint(bits))
+ upper, lower := &big.Int{}, &big.Int{}
+ upper.Sub(n, big.NewInt(1))
+ lower.Neg(n)
+
+ if i.Upper.Cmp(NewBigZ(upper)) == 1 {
+ i = NewIntInterval(NInfinity, PInfinity)
+ } else if i.Lower.Cmp(NewBigZ(lower)) == -1 {
+ i = NewIntInterval(NInfinity, PInfinity)
+ }
+ }
+ }
+
+ g.ranges[v] = i
+ }
+
+ return g.ranges
+}
+
+func VertexString(v *Vertex) string {
+ switch v := v.Value.(type) {
+ case Constraint:
+ return v.String()
+ case ssa.Value:
+ return v.Name()
+ case nil:
+ return "BUG: nil vertex value"
+ default:
+ panic(fmt.Sprintf("unexpected type %T", v))
+ }
+}
+
+type Vertex struct {
+ Value interface{} // one of Constraint or ssa.Value
+ SCC int
+ index int
+ lowlink int
+ stack bool
+
+ Succs []Edge
+}
+
+type Ranges map[ssa.Value]Range
+
+func (r Ranges) Get(x ssa.Value) Range {
+ if x == nil {
+ return nil
+ }
+ i, ok := r[x]
+ if !ok {
+ switch x := x.Type().Underlying().(type) {
+ case *types.Basic:
+ switch x.Kind() {
+ case types.String, types.UntypedString:
+ return StringInterval{}
+ default:
+ return IntInterval{}
+ }
+ case *types.Chan:
+ return ChannelInterval{}
+ case *types.Slice:
+ return SliceInterval{}
+ }
+ }
+ return i
+}
+
+type Graph struct {
+ Vertices map[interface{}]*Vertex
+ Edges []Edge
+ SCCs [][]*Vertex
+ ranges Ranges
+
+ // map SCCs to futures
+ futures [][]Future
+ // map SCCs to edges
+ sccEdges [][]Edge
+}
+
+func (g Graph) Graphviz() string {
+ var lines []string
+ lines = append(lines, "digraph{")
+ ids := map[interface{}]int{}
+ i := 1
+ for _, v := range g.Vertices {
+ ids[v] = i
+ shape := "box"
+ if _, ok := v.Value.(ssa.Value); ok {
+ shape = "oval"
+ }
+ lines = append(lines, fmt.Sprintf(`n%d [shape="%s", label=%q, colorscheme=spectral11, style="filled", fillcolor="%d"]`,
+ i, shape, VertexString(v), (v.SCC%11)+1))
+ i++
+ }
+ for _, e := range g.Edges {
+ style := "solid"
+ if e.control {
+ style = "dashed"
+ }
+ lines = append(lines, fmt.Sprintf(`n%d -> n%d [style="%s"]`, ids[e.From], ids[e.To], style))
+ }
+ lines = append(lines, "}")
+ return strings.Join(lines, "\n")
+}
+
+func (g *Graph) SetRange(x ssa.Value, r Range) {
+ g.ranges[x] = r
+}
+
+func (g *Graph) Range(x ssa.Value) Range {
+ return g.ranges.Get(x)
+}
+
+func (g *Graph) widen(c Constraint, consts []Z) bool {
+ setRange := func(i Range) {
+ g.SetRange(c.Y(), i)
+ }
+ widenIntInterval := func(oi, ni IntInterval) (IntInterval, bool) {
+ if !ni.IsKnown() {
+ return oi, false
+ }
+ nlc := NInfinity
+ nuc := PInfinity
+
+ // Don't get stuck widening for an absurd amount of time due
+ // to an excess number of constants, as may be present in
+ // table-based scanners.
+ if len(consts) < 1000 {
+ for _, co := range consts {
+ if co.Cmp(ni.Lower) <= 0 {
+ nlc = co
+ break
+ }
+ }
+ for _, co := range consts {
+ if co.Cmp(ni.Upper) >= 0 {
+ nuc = co
+ break
+ }
+ }
+ }
+
+ if !oi.IsKnown() {
+ return ni, true
+ }
+ if ni.Lower.Cmp(oi.Lower) == -1 && ni.Upper.Cmp(oi.Upper) == 1 {
+ return NewIntInterval(nlc, nuc), true
+ }
+ if ni.Lower.Cmp(oi.Lower) == -1 {
+ return NewIntInterval(nlc, oi.Upper), true
+ }
+ if ni.Upper.Cmp(oi.Upper) == 1 {
+ return NewIntInterval(oi.Lower, nuc), true
+ }
+ return oi, false
+ }
+ switch oi := g.Range(c.Y()).(type) {
+ case IntInterval:
+ ni := c.Eval(g).(IntInterval)
+ si, changed := widenIntInterval(oi, ni)
+ if changed {
+ setRange(si)
+ return true
+ }
+ return false
+ case StringInterval:
+ ni := c.Eval(g).(StringInterval)
+ si, changed := widenIntInterval(oi.Length, ni.Length)
+ if changed {
+ setRange(StringInterval{si})
+ return true
+ }
+ return false
+ case SliceInterval:
+ ni := c.Eval(g).(SliceInterval)
+ si, changed := widenIntInterval(oi.Length, ni.Length)
+ if changed {
+ setRange(SliceInterval{si})
+ return true
+ }
+ return false
+ default:
+ return false
+ }
+}
+
+func (g *Graph) narrow(c Constraint) bool {
+ narrowIntInterval := func(oi, ni IntInterval) (IntInterval, bool) {
+ oLower := oi.Lower
+ oUpper := oi.Upper
+ nLower := ni.Lower
+ nUpper := ni.Upper
+
+ if oLower == NInfinity && nLower != NInfinity {
+ return NewIntInterval(nLower, oUpper), true
+ }
+ if oUpper == PInfinity && nUpper != PInfinity {
+ return NewIntInterval(oLower, nUpper), true
+ }
+ if oLower.Cmp(nLower) == 1 {
+ return NewIntInterval(nLower, oUpper), true
+ }
+ if oUpper.Cmp(nUpper) == -1 {
+ return NewIntInterval(oLower, nUpper), true
+ }
+ return oi, false
+ }
+ switch oi := g.Range(c.Y()).(type) {
+ case IntInterval:
+ ni := c.Eval(g).(IntInterval)
+ si, changed := narrowIntInterval(oi, ni)
+ if changed {
+ g.SetRange(c.Y(), si)
+ return true
+ }
+ return false
+ case StringInterval:
+ ni := c.Eval(g).(StringInterval)
+ si, changed := narrowIntInterval(oi.Length, ni.Length)
+ if changed {
+ g.SetRange(c.Y(), StringInterval{si})
+ return true
+ }
+ return false
+ case SliceInterval:
+ ni := c.Eval(g).(SliceInterval)
+ si, changed := narrowIntInterval(oi.Length, ni.Length)
+ if changed {
+ g.SetRange(c.Y(), SliceInterval{si})
+ return true
+ }
+ return false
+ default:
+ return false
+ }
+}
+
+func (g *Graph) resolveFutures(scc int) {
+ for _, c := range g.futures[scc] {
+ c.Resolve()
+ }
+}
+
+func (g *Graph) entries(scc int) []ssa.Value {
+ var entries []ssa.Value
+ for _, n := range g.Vertices {
+ if n.SCC != scc {
+ continue
+ }
+ if v, ok := n.Value.(ssa.Value); ok {
+ // XXX avoid quadratic runtime
+ //
+ // XXX I cannot think of any code where the future and its
+ // variables aren't in the same SCC, in which case this
+ // code isn't very useful (the variables won't be resolved
+ // yet). Before we have a cross-SCC example, however, we
+ // can't really verify that this code is working
+ // correctly, or indeed doing anything useful.
+ for _, on := range g.Vertices {
+ if c, ok := on.Value.(Future); ok {
+ if c.Y() == v {
+ if !c.IsResolved() {
+ g.SetRange(c.Y(), c.Eval(g))
+ c.MarkResolved()
+ }
+ break
+ }
+ }
+ }
+ if g.Range(v).IsKnown() {
+ entries = append(entries, v)
+ }
+ }
+ }
+ return entries
+}
+
+func (g *Graph) uses(scc int) map[ssa.Value][]Constraint {
+ m := map[ssa.Value][]Constraint{}
+ for _, e := range g.sccEdges[scc] {
+ if e.control {
+ continue
+ }
+ if v, ok := e.From.Value.(ssa.Value); ok {
+ c := e.To.Value.(Constraint)
+ sink := c.Y()
+ if g.Vertices[sink].SCC == scc {
+ m[v] = append(m[v], c)
+ }
+ }
+ }
+ return m
+}
+
+func (g *Graph) actives(scc int) []ssa.Value {
+ var actives []ssa.Value
+ for _, n := range g.Vertices {
+ if n.SCC != scc {
+ continue
+ }
+ if v, ok := n.Value.(ssa.Value); ok {
+ if _, ok := v.(*ssa.Const); !ok {
+ actives = append(actives, v)
+ }
+ }
+ }
+ return actives
+}
+
+func (g *Graph) AddEdge(from, to interface{}, ctrl bool) {
+ vf, ok := g.Vertices[from]
+ if !ok {
+ vf = &Vertex{Value: from}
+ g.Vertices[from] = vf
+ }
+ vt, ok := g.Vertices[to]
+ if !ok {
+ vt = &Vertex{Value: to}
+ g.Vertices[to] = vt
+ }
+ e := Edge{From: vf, To: vt, control: ctrl}
+ g.Edges = append(g.Edges, e)
+ vf.Succs = append(vf.Succs, e)
+}
+
+type Edge struct {
+ From, To *Vertex
+ control bool
+}
+
+func (e Edge) String() string {
+ return fmt.Sprintf("%s -> %s", VertexString(e.From), VertexString(e.To))
+}
+
+func (g *Graph) FindSCCs() {
+ // use Tarjan to find the SCCs
+
+ index := 1
+ var s []*Vertex
+
+ scc := 0
+ var strongconnect func(v *Vertex)
+ strongconnect = func(v *Vertex) {
+ // set the depth index for v to the smallest unused index
+ v.index = index
+ v.lowlink = index
+ index++
+ s = append(s, v)
+ v.stack = true
+
+ for _, e := range v.Succs {
+ w := e.To
+ if w.index == 0 {
+ // successor w has not yet been visited; recurse on it
+ strongconnect(w)
+ if w.lowlink < v.lowlink {
+ v.lowlink = w.lowlink
+ }
+ } else if w.stack {
+ // successor w is in stack s and hence in the current scc
+ if w.index < v.lowlink {
+ v.lowlink = w.index
+ }
+ }
+ }
+
+ if v.lowlink == v.index {
+ for {
+ w := s[len(s)-1]
+ s = s[:len(s)-1]
+ w.stack = false
+ w.SCC = scc
+ if w == v {
+ break
+ }
+ }
+ scc++
+ }
+ }
+ for _, v := range g.Vertices {
+ if v.index == 0 {
+ strongconnect(v)
+ }
+ }
+
+ g.SCCs = make([][]*Vertex, scc)
+ for _, n := range g.Vertices {
+ n.SCC = scc - n.SCC - 1
+ g.SCCs[n.SCC] = append(g.SCCs[n.SCC], n)
+ }
+}
+
+func invertToken(tok token.Token) token.Token {
+ switch tok {
+ case token.LSS:
+ return token.GEQ
+ case token.GTR:
+ return token.LEQ
+ case token.EQL:
+ return token.NEQ
+ case token.NEQ:
+ return token.EQL
+ case token.GEQ:
+ return token.LSS
+ case token.LEQ:
+ return token.GTR
+ default:
+ panic(fmt.Sprintf("unsupported token %s", tok))
+ }
+}
+
+func flipToken(tok token.Token) token.Token {
+ switch tok {
+ case token.LSS:
+ return token.GTR
+ case token.GTR:
+ return token.LSS
+ case token.EQL:
+ return token.EQL
+ case token.NEQ:
+ return token.NEQ
+ case token.GEQ:
+ return token.LEQ
+ case token.LEQ:
+ return token.GEQ
+ default:
+ panic(fmt.Sprintf("unsupported token %s", tok))
+ }
+}
+
+type CopyConstraint struct {
+ aConstraint
+ X ssa.Value
+}
+
+func (c *CopyConstraint) String() string {
+ return fmt.Sprintf("%s = copy(%s)", c.Y().Name(), c.X.Name())
+}
+
+func (c *CopyConstraint) Eval(g *Graph) Range {
+ return g.Range(c.X)
+}
+
+func (c *CopyConstraint) Operands() []ssa.Value {
+ return []ssa.Value{c.X}
+}
+
+func NewCopyConstraint(x, y ssa.Value) Constraint {
+ return &CopyConstraint{
+ aConstraint: aConstraint{
+ y: y,
+ },
+ X: x,
+ }
+}
diff --git a/vendor/honnef.co/go/tools/stylecheck/analysis.go b/vendor/honnef.co/go/tools/stylecheck/analysis.go
index 0f93f4436..f252487f7 100644
--- a/vendor/honnef.co/go/tools/stylecheck/analysis.go
+++ b/vendor/honnef.co/go/tools/stylecheck/analysis.go
@@ -1,81 +1,111 @@
package stylecheck
import (
+ "flag"
+
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"honnef.co/go/tools/config"
"honnef.co/go/tools/facts"
- "honnef.co/go/tools/internal/passes/buildir"
+ "honnef.co/go/tools/internal/passes/buildssa"
"honnef.co/go/tools/lint/lintutil"
)
-var Analyzers = lintutil.InitializeAnalyzers(Docs, map[string]*analysis.Analyzer{
+func newFlagSet() flag.FlagSet {
+ fs := flag.NewFlagSet("", flag.PanicOnError)
+ fs.Var(lintutil.NewVersionFlag(), "go", "Target Go version")
+ return *fs
+}
+
+var Analyzers = map[string]*analysis.Analyzer{
"ST1000": {
- Run: CheckPackageComment,
+ Name: "ST1000",
+ Run: CheckPackageComment,
+ Doc: Docs["ST1000"].String(),
+ Requires: []*analysis.Analyzer{},
+ Flags: newFlagSet(),
},
"ST1001": {
+ Name: "ST1001",
Run: CheckDotImports,
+ Doc: Docs["ST1001"].String(),
Requires: []*analysis.Analyzer{facts.Generated, config.Analyzer},
+ Flags: newFlagSet(),
},
"ST1003": {
+ Name: "ST1003",
Run: CheckNames,
- Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated, config.Analyzer},
+ Doc: Docs["ST1003"].String(),
+ Requires: []*analysis.Analyzer{facts.Generated, config.Analyzer},
+ Flags: newFlagSet(),
},
"ST1005": {
+ Name: "ST1005",
Run: CheckErrorStrings,
- Requires: []*analysis.Analyzer{buildir.Analyzer},
+ Doc: Docs["ST1005"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer},
+ Flags: newFlagSet(),
},
"ST1006": {
+ Name: "ST1006",
Run: CheckReceiverNames,
- Requires: []*analysis.Analyzer{buildir.Analyzer, facts.Generated},
+ Doc: Docs["ST1006"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer, facts.Generated},
+ Flags: newFlagSet(),
},
"ST1008": {
+ Name: "ST1008",
Run: CheckErrorReturn,
- Requires: []*analysis.Analyzer{buildir.Analyzer},
+ Doc: Docs["ST1008"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer},
+ Flags: newFlagSet(),
},
"ST1011": {
- Run: CheckTimeNames,
- Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Name: "ST1011",
+ Run: CheckTimeNames,
+ Doc: Docs["ST1011"].String(),
+ Flags: newFlagSet(),
},
"ST1012": {
+ Name: "ST1012",
Run: CheckErrorVarNames,
+ Doc: Docs["ST1012"].String(),
Requires: []*analysis.Analyzer{config.Analyzer},
+ Flags: newFlagSet(),
},
"ST1013": {
- Run: CheckHTTPStatusCodes,
- // TODO(dh): why does this depend on facts.TokenFile?
- Requires: []*analysis.Analyzer{facts.Generated, facts.TokenFile, config.Analyzer, inspect.Analyzer},
+ Name: "ST1013",
+ Run: CheckHTTPStatusCodes,
+ Doc: Docs["ST1013"].String(),
+ Requires: []*analysis.Analyzer{facts.Generated, facts.TokenFile, config.Analyzer},
+ Flags: newFlagSet(),
},
"ST1015": {
+ Name: "ST1015",
Run: CheckDefaultCaseOrder,
+ Doc: Docs["ST1015"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated, facts.TokenFile},
+ Flags: newFlagSet(),
},
"ST1016": {
+ Name: "ST1016",
Run: CheckReceiverNamesIdentical,
- Requires: []*analysis.Analyzer{buildir.Analyzer, facts.Generated},
+ Doc: Docs["ST1016"].String(),
+ Requires: []*analysis.Analyzer{buildssa.Analyzer},
+ Flags: newFlagSet(),
},
"ST1017": {
+ Name: "ST1017",
Run: CheckYodaConditions,
+ Doc: Docs["ST1017"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated, facts.TokenFile},
+ Flags: newFlagSet(),
},
"ST1018": {
+ Name: "ST1018",
Run: CheckInvisibleCharacters,
+ Doc: Docs["ST1018"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Flags: newFlagSet(),
},
- "ST1019": {
- Run: CheckDuplicatedImports,
- Requires: []*analysis.Analyzer{facts.Generated, config.Analyzer},
- },
- "ST1020": {
- Run: CheckExportedFunctionDocs,
- Requires: []*analysis.Analyzer{facts.Generated, inspect.Analyzer},
- },
- "ST1021": {
- Run: CheckExportedTypeDocs,
- Requires: []*analysis.Analyzer{facts.Generated, inspect.Analyzer},
- },
- "ST1022": {
- Run: CheckExportedVarDocs,
- Requires: []*analysis.Analyzer{facts.Generated, inspect.Analyzer},
- },
-})
+}
diff --git a/vendor/honnef.co/go/tools/stylecheck/doc.go b/vendor/honnef.co/go/tools/stylecheck/doc.go
index c3420ade7..9097214d9 100644
--- a/vendor/honnef.co/go/tools/stylecheck/doc.go
+++ b/vendor/honnef.co/go/tools/stylecheck/doc.go
@@ -3,7 +3,7 @@ package stylecheck
import "honnef.co/go/tools/lint"
var Docs = map[string]*lint.Documentation{
- "ST1000": {
+ "ST1000": &lint.Documentation{
Title: `Incorrect or missing package comment`,
Text: `Packages must have a package comment that is formatted according to
the guidelines laid out in
@@ -12,7 +12,7 @@ https://github.com/golang/go/wiki/CodeReviewComments#package-comments.`,
NonDefault: true,
},
- "ST1001": {
+ "ST1001": &lint.Documentation{
Title: `Dot imports are discouraged`,
Text: `Dot imports that aren't in external test packages are discouraged.
@@ -42,7 +42,7 @@ Quoting Go Code Review Comments:
Options: []string{"dot_import_whitelist"},
},
- "ST1003": {
+ "ST1003": &lint.Documentation{
Title: `Poorly chosen identifier`,
Text: `Identifiers, such as variable and package names, follow certain rules.
@@ -57,7 +57,7 @@ See the following links for details:
Options: []string{"initialisms"},
},
- "ST1005": {
+ "ST1005": &lint.Documentation{
Title: `Incorrectly formatted error string`,
Text: `Error strings follow a set of guidelines to ensure uniformity and good
composability.
@@ -73,7 +73,7 @@ Quoting Go Code Review Comments:
Since: "2019.1",
},
- "ST1006": {
+ "ST1006": &lint.Documentation{
Title: `Poorly chosen receiver name`,
Text: `Quoting Go Code Review Comments:
@@ -91,13 +91,13 @@ Quoting Go Code Review Comments:
Since: "2019.1",
},
- "ST1008": {
+ "ST1008": &lint.Documentation{
Title: `A function's error value should be its last return value`,
Text: `A function's error value should be its last return value.`,
Since: `2019.1`,
},
- "ST1011": {
+ "ST1011": &lint.Documentation{
Title: `Poorly chosen name for variable of type time.Duration`,
Text: `time.Duration values represent an amount of time, which is represented
as a count of nanoseconds. An expression like 5 * time.Microsecond
@@ -107,14 +107,14 @@ Milli.`,
Since: `2019.1`,
},
- "ST1012": {
+ "ST1012": &lint.Documentation{
Title: `Poorly chosen name for error variable`,
Text: `Error variables that are part of an API should be called errFoo or
ErrFoo.`,
Since: "2019.1",
},
- "ST1013": {
+ "ST1013": &lint.Documentation{
Title: `Should use constants for HTTP error codes, not magic numbers`,
Text: `HTTP has a tremendous number of status codes. While some of those are
well known (200, 400, 404, 500), most of them are not. The net/http
@@ -126,18 +126,18 @@ readability of your code.`,
Options: []string{"http_status_code_whitelist"},
},
- "ST1015": {
+ "ST1015": &lint.Documentation{
Title: `A switch's default case should be the first or last case`,
Since: "2019.1",
},
- "ST1016": {
+ "ST1016": &lint.Documentation{
Title: `Use consistent method receiver names`,
Since: "2019.1",
NonDefault: true,
},
- "ST1017": {
+ "ST1017": &lint.Documentation{
Title: `Don't use Yoda conditions`,
Text: `Yoda conditions are conditions of the kind 'if 42 == x', where the
literal is on the left side of the comparison. These are a common
@@ -147,64 +147,8 @@ bug, we prefer the more idiomatic 'if x == 42'.`,
Since: "2019.2",
},
- "ST1018": {
+ "ST1018": &lint.Documentation{
Title: `Avoid zero-width and control characters in string literals`,
Since: "2019.2",
},
-
- "ST1019": {
- Title: `Importing the same package multiple times`,
- Since: "Unreleased",
- },
-
- "ST1020": {
- Title: "The documentation of an exported function should start with the function's name",
- Text: `Doc comments work best as complete sentences, which
-allow a wide variety of automated presentations. The first sentence
-should be a one-sentence summary that starts with the name being
-declared.
-
-If every doc comment begins with the name of the item it describes,
-you can use the doc subcommand of the go tool and run the output
-through grep.
-
-See https://golang.org/doc/effective_go.html#commentary for more
-information on how to write good documentation.`,
- Since: "Unreleased",
- NonDefault: true,
- },
-
- "ST1021": {
- Title: "The documentation of an exported type should start with type's name",
- Text: `Doc comments work best as complete sentences, which
-allow a wide variety of automated presentations. The first sentence
-should be a one-sentence summary that starts with the name being
-declared.
-
-If every doc comment begins with the name of the item it describes,
-you can use the doc subcommand of the go tool and run the output
-through grep.
-
-See https://golang.org/doc/effective_go.html#commentary for more
-information on how to write good documentation.`,
- Since: "Unreleased",
- NonDefault: true,
- },
-
- "ST1022": {
- Title: "The documentation of an exported variable or constant should start with variable's name",
- Text: `Doc comments work best as complete sentences, which
-allow a wide variety of automated presentations. The first sentence
-should be a one-sentence summary that starts with the name being
-declared.
-
-If every doc comment begins with the name of the item it describes,
-you can use the doc subcommand of the go tool and run the output
-through grep.
-
-See https://golang.org/doc/effective_go.html#commentary for more
-information on how to write good documentation.`,
- Since: "Unreleased",
- NonDefault: true,
- },
}
diff --git a/vendor/honnef.co/go/tools/stylecheck/lint.go b/vendor/honnef.co/go/tools/stylecheck/lint.go
index c23f22a56..1699d5898 100644
--- a/vendor/honnef.co/go/tools/stylecheck/lint.go
+++ b/vendor/honnef.co/go/tools/stylecheck/lint.go
@@ -1,4 +1,4 @@
-package stylecheck
+package stylecheck // import "honnef.co/go/tools/stylecheck"
import (
"fmt"
@@ -6,20 +6,15 @@ import (
"go/constant"
"go/token"
"go/types"
- "sort"
"strconv"
"strings"
"unicode"
"unicode/utf8"
- "honnef.co/go/tools/code"
"honnef.co/go/tools/config"
- "honnef.co/go/tools/edit"
- "honnef.co/go/tools/internal/passes/buildir"
- "honnef.co/go/tools/ir"
+ "honnef.co/go/tools/internal/passes/buildssa"
. "honnef.co/go/tools/lint/lintdsl"
- "honnef.co/go/tools/pattern"
- "honnef.co/go/tools/report"
+ "honnef.co/go/tools/ssa"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
@@ -41,14 +36,14 @@ func CheckPackageComment(pass *analysis.Pass) (interface{}, error) {
}
hasDocs := false
for _, f := range pass.Files {
- if code.IsInTest(pass, f) {
+ if IsInTest(pass, f) {
continue
}
if f.Doc != nil && len(f.Doc.List) > 0 {
hasDocs = true
prefix := "Package " + f.Name.Name + " "
if !strings.HasPrefix(strings.TrimSpace(f.Doc.Text()), prefix) {
- report.Report(pass, f.Doc, fmt.Sprintf(`package comment should be of the form "%s..."`, prefix))
+ ReportNodef(pass, f.Doc, `package comment should be of the form "%s..."`, prefix)
}
f.Doc.Text()
}
@@ -56,10 +51,10 @@ func CheckPackageComment(pass *analysis.Pass) (interface{}, error) {
if !hasDocs {
for _, f := range pass.Files {
- if code.IsInTest(pass, f) {
+ if IsInTest(pass, f) {
continue
}
- report.Report(pass, f, "at least one file in a package should have a package comment", report.ShortRange())
+ ReportNodef(pass, f, "at least one file in a package should have a package comment")
}
}
return nil, nil
@@ -77,38 +72,8 @@ func CheckDotImports(pass *analysis.Pass) (interface{}, error) {
}
}
- if imp.Name != nil && imp.Name.Name == "." && !code.IsInTest(pass, f) {
- report.Report(pass, imp, "should not use dot imports", report.FilterGenerated())
- }
- }
- }
- return nil, nil
-}
-
-func CheckDuplicatedImports(pass *analysis.Pass) (interface{}, error) {
- for _, f := range pass.Files {
- // Collect all imports by their import path
- imports := make(map[string][]*ast.ImportSpec, len(f.Imports))
- for _, imp := range f.Imports {
- imports[imp.Path.Value] = append(imports[imp.Path.Value], imp)
- }
-
- for path, value := range imports {
- if path[1:len(path)-1] == "unsafe" {
- // Don't flag unsafe. Cgo generated code imports
- // unsafe using the blank identifier, and most
- // user-written cgo code also imports unsafe
- // explicitly.
- continue
- }
- // If there's more than one import per path, we flag that
- if len(value) > 1 {
- s := fmt.Sprintf("package %s is being imported more than once", path)
- opts := []report.Option{report.FilterGenerated()}
- for _, imp := range value[1:] {
- opts = append(opts, report.Related(imp, fmt.Sprintf("other import of %s", path)))
- }
- report.Report(pass, value[0], s, opts...)
+ if imp.Name != nil && imp.Name.Name == "." && !IsInTest(pass, f) {
+ ReportNodefFG(pass, imp, "should not use dot imports")
}
}
}
@@ -118,7 +83,7 @@ func CheckDuplicatedImports(pass *analysis.Pass) (interface{}, error) {
func CheckBlankImports(pass *analysis.Pass) (interface{}, error) {
fset := pass.Fset
for _, f := range pass.Files {
- if code.IsMainLike(pass) || code.IsInTest(pass, f) {
+ if IsInMain(pass, f) || IsInTest(pass, f) {
continue
}
@@ -152,7 +117,7 @@ func CheckBlankImports(pass *analysis.Pass) (interface{}, error) {
for i, imp := range f.Imports {
pos := fset.Position(imp.Pos())
- if !code.IsBlank(imp.Name) {
+ if !IsBlank(imp.Name) {
continue
}
// Only flag the first blank import in a group of imports,
@@ -161,13 +126,13 @@ func CheckBlankImports(pass *analysis.Pass) (interface{}, error) {
if i > 0 {
prev := f.Imports[i-1]
prevPos := fset.Position(prev.Pos())
- if pos.Line-1 == prevPos.Line && code.IsBlank(prev.Name) {
+ if pos.Line-1 == prevPos.Line && IsBlank(prev.Name) {
continue
}
}
if imp.Doc == nil && imp.Comment == nil && !skip[imp] {
- report.Report(pass, imp, "a blank import should be only in a main or test package, or have a comment justifying it")
+ ReportNodef(pass, imp, "a blank import should be only in a main or test package, or have a comment justifying it")
}
}
}
@@ -187,7 +152,7 @@ func CheckIncDec(pass *analysis.Pass) (interface{}, error) {
return
}
if (len(assign.Lhs) != 1 || len(assign.Rhs) != 1) ||
- !code.IsIntLiteral(assign.Rhs[0], "1") {
+ !IsIntLiteral(assign.Rhs[0], "1") {
return
}
@@ -199,15 +164,15 @@ func CheckIncDec(pass *analysis.Pass) (interface{}, error) {
suffix = "--"
}
- report.Report(pass, assign, fmt.Sprintf("should replace %s with %s%s", report.Render(pass, assign), report.Render(pass, assign.Lhs[0]), suffix))
+ ReportNodef(pass, assign, "should replace %s with %s%s", Render(pass, assign), Render(pass, assign.Lhs[0]), suffix)
}
- code.Preorder(pass, fn, (*ast.AssignStmt)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn)
return nil, nil
}
func CheckErrorReturn(pass *analysis.Pass) (interface{}, error) {
fnLoop:
- for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
+ for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
sig := fn.Type().(*types.Signature)
rets := sig.Results()
if rets == nil || rets.Len() < 2 {
@@ -221,7 +186,7 @@ fnLoop:
}
for i := rets.Len() - 2; i >= 0; i-- {
if rets.At(i).Type() == types.Universe.Lookup("error").Type() {
- report.Report(pass, rets.At(i), "error should be returned as the last argument", report.ShortRange())
+ pass.Reportf(rets.At(i).Pos(), "error should be returned as the last argument")
continue fnLoop
}
}
@@ -232,23 +197,23 @@ fnLoop:
// CheckUnexportedReturn checks that exported functions on exported
// types do not return unexported types.
func CheckUnexportedReturn(pass *analysis.Pass) (interface{}, error) {
- for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
+ for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
if fn.Synthetic != "" || fn.Parent() != nil {
continue
}
- if !ast.IsExported(fn.Name()) || code.IsMain(pass) || code.IsInTest(pass, fn) {
+ if !ast.IsExported(fn.Name()) || IsInMain(pass, fn) || IsInTest(pass, fn) {
continue
}
sig := fn.Type().(*types.Signature)
- if sig.Recv() != nil && !ast.IsExported(code.Dereference(sig.Recv().Type()).(*types.Named).Obj().Name()) {
+ if sig.Recv() != nil && !ast.IsExported(Dereference(sig.Recv().Type()).(*types.Named).Obj().Name()) {
continue
}
res := sig.Results()
for i := 0; i < res.Len(); i++ {
- if named, ok := code.DereferenceR(res.At(i).Type()).(*types.Named); ok &&
+ if named, ok := DereferenceR(res.At(i).Type()).(*types.Named); ok &&
!ast.IsExported(named.Obj().Name()) &&
named != types.Universe.Lookup("error").Type() {
- report.Report(pass, fn, "should not return unexported type")
+ pass.Reportf(fn.Pos(), "should not return unexported type")
}
}
}
@@ -256,22 +221,22 @@ func CheckUnexportedReturn(pass *analysis.Pass) (interface{}, error) {
}
func CheckReceiverNames(pass *analysis.Pass) (interface{}, error) {
- irpkg := pass.ResultOf[buildir.Analyzer].(*buildir.IR).Pkg
- for _, m := range irpkg.Members {
+ ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).Pkg
+ for _, m := range ssapkg.Members {
if T, ok := m.Object().(*types.TypeName); ok && !T.IsAlias() {
ms := typeutil.IntuitiveMethodSet(T.Type(), nil)
for _, sel := range ms {
fn := sel.Obj().(*types.Func)
recv := fn.Type().(*types.Signature).Recv()
- if code.Dereference(recv.Type()) != T.Type() {
+ if Dereference(recv.Type()) != T.Type() {
// skip embedded methods
continue
}
if recv.Name() == "self" || recv.Name() == "this" {
- report.Report(pass, recv, `receiver name should be a reflection of its identity; don't use generic names such as "this" or "self"`, report.FilterGenerated())
+ ReportfFG(pass, recv.Pos(), `receiver name should be a reflection of its identity; don't use generic names such as "this" or "self"`)
}
if recv.Name() == "_" {
- report.Report(pass, recv, "receiver name should not be an underscore, omit the name if it is unused", report.FilterGenerated())
+ ReportfFG(pass, recv.Pos(), "receiver name should not be an underscore, omit the name if it is unused")
}
}
}
@@ -280,8 +245,8 @@ func CheckReceiverNames(pass *analysis.Pass) (interface{}, error) {
}
func CheckReceiverNamesIdentical(pass *analysis.Pass) (interface{}, error) {
- irpkg := pass.ResultOf[buildir.Analyzer].(*buildir.IR).Pkg
- for _, m := range irpkg.Members {
+ ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).Pkg
+ for _, m := range ssapkg.Members {
names := map[string]int{}
var firstFn *types.Func
@@ -290,11 +255,7 @@ func CheckReceiverNamesIdentical(pass *analysis.Pass) (interface{}, error) {
for _, sel := range ms {
fn := sel.Obj().(*types.Func)
recv := fn.Type().(*types.Signature).Recv()
- if code.IsGenerated(pass, recv.Pos()) {
- // Don't concern ourselves with methods in generated code
- continue
- }
- if code.Dereference(recv.Type()) != T.Type() {
+ if Dereference(recv.Type()) != T.Type() {
// skip embedded methods
continue
}
@@ -312,9 +273,8 @@ func CheckReceiverNamesIdentical(pass *analysis.Pass) (interface{}, error) {
for name, count := range names {
seen = append(seen, fmt.Sprintf("%dx %q", count, name))
}
- sort.Strings(seen)
- report.Report(pass, firstFn, fmt.Sprintf("methods on the same type should have the same receiver name (seen %s)", strings.Join(seen, ", ")))
+ pass.Reportf(firstFn.Pos(), "methods on the same type should have the same receiver name (seen %s)", strings.Join(seen, ", "))
}
}
return nil, nil
@@ -324,7 +284,7 @@ func CheckContextFirstArg(pass *analysis.Pass) (interface{}, error) {
// TODO(dh): this check doesn't apply to test helpers. Example from the stdlib:
// func helperCommandContext(t *testing.T, ctx context.Context, s ...string) (cmd *exec.Cmd) {
fnLoop:
- for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
+ for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
if fn.Synthetic != "" || fn.Parent() != nil {
continue
}
@@ -338,7 +298,7 @@ fnLoop:
for i := 1; i < params.Len(); i++ {
param := params.At(i)
if types.TypeString(param.Type(), nil) == "context.Context" {
- report.Report(pass, param, "context.Context should be the first argument of a function", report.ShortRange())
+ pass.Reportf(param.Pos(), "context.Context should be the first argument of a function")
continue fnLoop
}
}
@@ -347,20 +307,20 @@ fnLoop:
}
func CheckErrorStrings(pass *analysis.Pass) (interface{}, error) {
- objNames := map[*ir.Package]map[string]bool{}
- irpkg := pass.ResultOf[buildir.Analyzer].(*buildir.IR).Pkg
- objNames[irpkg] = map[string]bool{}
- for _, m := range irpkg.Members {
- if typ, ok := m.(*ir.Type); ok {
- objNames[irpkg][typ.Name()] = true
+ objNames := map[*ssa.Package]map[string]bool{}
+ ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).Pkg
+ objNames[ssapkg] = map[string]bool{}
+ for _, m := range ssapkg.Members {
+ if typ, ok := m.(*ssa.Type); ok {
+ objNames[ssapkg][typ.Name()] = true
}
}
- for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
+ for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
objNames[fn.Package()][fn.Name()] = true
}
- for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
- if code.IsInTest(pass, fn) {
+ for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
+ if IsInTest(pass, fn) {
// We don't care about malformed error messages in tests;
// they're usually for direct human consumption, not part
// of an API
@@ -369,15 +329,15 @@ func CheckErrorStrings(pass *analysis.Pass) (interface{}, error) {
for _, block := range fn.Blocks {
instrLoop:
for _, ins := range block.Instrs {
- call, ok := ins.(*ir.Call)
+ call, ok := ins.(*ssa.Call)
if !ok {
continue
}
- if !code.IsCallToAny(call.Common(), "errors.New", "fmt.Errorf") {
+ if !IsCallTo(call.Common(), "errors.New") && !IsCallTo(call.Common(), "fmt.Errorf") {
continue
}
- k, ok := call.Common().Args[0].(*ir.Const)
+ k, ok := call.Common().Args[0].(*ssa.Const)
if !ok {
continue
}
@@ -388,7 +348,7 @@ func CheckErrorStrings(pass *analysis.Pass) (interface{}, error) {
}
switch s[len(s)-1] {
case '.', ':', '!', '\n':
- report.Report(pass, call, "error strings should not end with punctuation or a newline")
+ pass.Reportf(call.Pos(), "error strings should not end with punctuation or a newline")
}
idx := strings.IndexByte(s, ' ')
if idx == -1 {
@@ -422,7 +382,7 @@ func CheckErrorStrings(pass *analysis.Pass) (interface{}, error) {
//
// It could still be a proper noun, though.
- report.Report(pass, call, "error strings should not be capitalized")
+ pass.Reportf(call.Pos(), "error strings should not be capitalized")
}
}
}
@@ -437,47 +397,34 @@ func CheckTimeNames(pass *analysis.Pass) (interface{}, error) {
"Usec", "Usecs", "Microseconds",
"MS", "Ms",
}
- fn := func(names []*ast.Ident) {
+ fn := func(T types.Type, names []*ast.Ident) {
+ if !IsType(T, "time.Duration") && !IsType(T, "*time.Duration") {
+ return
+ }
for _, name := range names {
- if _, ok := pass.TypesInfo.Defs[name]; !ok {
- continue
- }
- T := pass.TypesInfo.TypeOf(name)
- if !code.IsType(T, "time.Duration") && !code.IsType(T, "*time.Duration") {
- continue
- }
for _, suffix := range suffixes {
if strings.HasSuffix(name.Name, suffix) {
- report.Report(pass, name, fmt.Sprintf("var %s is of type %v; don't use unit-specific suffix %q", name.Name, T, suffix))
+ ReportNodef(pass, name, "var %s is of type %v; don't use unit-specific suffix %q", name.Name, T, suffix)
break
}
}
}
}
-
- fn2 := func(node ast.Node) {
- switch node := node.(type) {
- case *ast.ValueSpec:
- fn(node.Names)
- case *ast.FieldList:
- for _, field := range node.List {
- fn(field.Names)
- }
- case *ast.AssignStmt:
- if node.Tok != token.DEFINE {
- break
- }
- var names []*ast.Ident
- for _, lhs := range node.Lhs {
- if lhs, ok := lhs.(*ast.Ident); ok {
- names = append(names, lhs)
+ for _, f := range pass.Files {
+ ast.Inspect(f, func(node ast.Node) bool {
+ switch node := node.(type) {
+ case *ast.ValueSpec:
+ T := pass.TypesInfo.TypeOf(node.Type)
+ fn(T, node.Names)
+ case *ast.FieldList:
+ for _, field := range node.List {
+ T := pass.TypesInfo.TypeOf(field.Type)
+ fn(T, field.Names)
}
}
- fn(names)
- }
+ return true
+ })
}
-
- code.Preorder(pass, fn2, (*ast.ValueSpec)(nil), (*ast.FieldList)(nil), (*ast.AssignStmt)(nil))
return nil, nil
}
@@ -496,21 +443,16 @@ func CheckErrorVarNames(pass *analysis.Pass) (interface{}, error) {
for i, name := range spec.Names {
val := spec.Values[i]
- if !code.IsCallToAnyAST(pass, val, "errors.New", "fmt.Errorf") {
+ if !IsCallToAST(pass, val, "errors.New") && !IsCallToAST(pass, val, "fmt.Errorf") {
continue
}
- if pass.Pkg.Path() == "net/http" && strings.HasPrefix(name.Name, "http2err") {
- // special case for internal variable names of
- // bundled HTTP 2 code in net/http
- continue
- }
prefix := "err"
if name.IsExported() {
prefix = "Err"
}
if !strings.HasPrefix(name.Name, prefix) {
- report.Report(pass, name, fmt.Sprintf("error var %s should have name of the form %sFoo", name.Name, prefix))
+ ReportNodef(pass, name, "error var %s should have name of the form %sFoo", name.Name, prefix)
}
}
}
@@ -586,11 +528,17 @@ func CheckHTTPStatusCodes(pass *analysis.Pass) (interface{}, error) {
for _, code := range config.For(pass).HTTPStatusCodeWhitelist {
whitelist[code] = true
}
- fn := func(node ast.Node) {
- call := node.(*ast.CallExpr)
+ fn := func(node ast.Node) bool {
+ if node == nil {
+ return true
+ }
+ call, ok := node.(*ast.CallExpr)
+ if !ok {
+ return true
+ }
var arg int
- switch code.CallNameAST(pass, call) {
+ switch CallNameAST(pass, call) {
case "net/http.Error":
arg = 2
case "net/http.Redirect":
@@ -600,29 +548,31 @@ func CheckHTTPStatusCodes(pass *analysis.Pass) (interface{}, error) {
case "net/http.RedirectHandler":
arg = 1
default:
- return
+ return true
}
lit, ok := call.Args[arg].(*ast.BasicLit)
if !ok {
- return
+ return true
}
if whitelist[lit.Value] {
- return
+ return true
}
n, err := strconv.Atoi(lit.Value)
if err != nil {
- return
+ return true
}
s, ok := httpStatusCodes[n]
if !ok {
- return
+ return true
}
- report.Report(pass, lit, fmt.Sprintf("should use constant http.%s instead of numeric literal %d", s, n),
- report.FilterGenerated(),
- report.Fixes(edit.Fix(fmt.Sprintf("use http.%s instead of %d", s, n), edit.ReplaceWithString(pass.Fset, lit, "http."+s))))
+ ReportNodefFG(pass, lit, "should use constant http.%s instead of numeric literal %d", s, n)
+ return true
+ }
+ // OPT(dh): replace with inspector
+ for _, f := range pass.Files {
+ ast.Inspect(f, fn)
}
- code.Preorder(pass, fn, (*ast.CallExpr)(nil))
return nil, nil
}
@@ -632,29 +582,31 @@ func CheckDefaultCaseOrder(pass *analysis.Pass) (interface{}, error) {
list := stmt.Body.List
for i, c := range list {
if c.(*ast.CaseClause).List == nil && i != 0 && i != len(list)-1 {
- report.Report(pass, c, "default case should be first or last in switch statement", report.FilterGenerated())
+ ReportNodefFG(pass, c, "default case should be first or last in switch statement")
break
}
}
}
- code.Preorder(pass, fn, (*ast.SwitchStmt)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.SwitchStmt)(nil)}, fn)
return nil, nil
}
-var (
- checkYodaConditionsQ = pattern.MustParse(`(BinaryExpr left@(BasicLit _ _) tok@(Or "==" "!=") right@(Not (BasicLit _ _)))`)
- checkYodaConditionsR = pattern.MustParse(`(BinaryExpr right tok left)`)
-)
-
func CheckYodaConditions(pass *analysis.Pass) (interface{}, error) {
fn := func(node ast.Node) {
- if _, edits, ok := MatchAndEdit(pass, checkYodaConditionsQ, checkYodaConditionsR, node); ok {
- report.Report(pass, node, "don't use Yoda conditions",
- report.FilterGenerated(),
- report.Fixes(edit.Fix("un-Yoda-fy", edits...)))
+ cond := node.(*ast.BinaryExpr)
+ if cond.Op != token.EQL && cond.Op != token.NEQ {
+ return
+ }
+ if _, ok := cond.X.(*ast.BasicLit); !ok {
+ return
}
+ if _, ok := cond.Y.(*ast.BasicLit); ok {
+ // Don't flag lit == lit conditions, just in case
+ return
+ }
+ ReportNodefFG(pass, cond, "don't use Yoda conditions")
}
- code.Preorder(pass, fn, (*ast.BinaryExpr)(nil))
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn)
return nil, nil
}
@@ -664,251 +616,14 @@ func CheckInvisibleCharacters(pass *analysis.Pass) (interface{}, error) {
if lit.Kind != token.STRING {
return
}
-
- type invalid struct {
- r rune
- off int
- }
- var invalids []invalid
- hasFormat := false
- hasControl := false
- for off, r := range lit.Value {
+ for _, r := range lit.Value {
if unicode.Is(unicode.Cf, r) {
- invalids = append(invalids, invalid{r, off})
- hasFormat = true
+ ReportNodef(pass, lit, "string literal contains the Unicode format character %U, consider using the %q escape sequence", r, r)
} else if unicode.Is(unicode.Cc, r) && r != '\n' && r != '\t' && r != '\r' {
- invalids = append(invalids, invalid{r, off})
- hasControl = true
+ ReportNodef(pass, lit, "string literal contains the Unicode control character %U, consider using the %q escape sequence", r, r)
}
}
-
- switch len(invalids) {
- case 0:
- return
- case 1:
- var kind string
- if hasFormat {
- kind = "format"
- } else if hasControl {
- kind = "control"
- } else {
- panic("unreachable")
- }
-
- r := invalids[0]
- msg := fmt.Sprintf("string literal contains the Unicode %s character %U, consider using the %q escape sequence instead", kind, r.r, r.r)
-
- replacement := strconv.QuoteRune(r.r)
- replacement = replacement[1 : len(replacement)-1]
- edit := analysis.SuggestedFix{
- Message: fmt.Sprintf("replace %s character %U with %q", kind, r.r, r.r),
- TextEdits: []analysis.TextEdit{{
- Pos: lit.Pos() + token.Pos(r.off),
- End: lit.Pos() + token.Pos(r.off) + token.Pos(utf8.RuneLen(r.r)),
- NewText: []byte(replacement),
- }},
- }
- delete := analysis.SuggestedFix{
- Message: fmt.Sprintf("delete %s character %U", kind, r),
- TextEdits: []analysis.TextEdit{{
- Pos: lit.Pos() + token.Pos(r.off),
- End: lit.Pos() + token.Pos(r.off) + token.Pos(utf8.RuneLen(r.r)),
- }},
- }
- report.Report(pass, lit, msg, report.Fixes(edit, delete))
- default:
- var kind string
- if hasFormat && hasControl {
- kind = "format and control"
- } else if hasFormat {
- kind = "format"
- } else if hasControl {
- kind = "control"
- } else {
- panic("unreachable")
- }
-
- msg := fmt.Sprintf("string literal contains Unicode %s characters, consider using escape sequences instead", kind)
- var edits []analysis.TextEdit
- var deletions []analysis.TextEdit
- for _, r := range invalids {
- replacement := strconv.QuoteRune(r.r)
- replacement = replacement[1 : len(replacement)-1]
- edits = append(edits, analysis.TextEdit{
- Pos: lit.Pos() + token.Pos(r.off),
- End: lit.Pos() + token.Pos(r.off) + token.Pos(utf8.RuneLen(r.r)),
- NewText: []byte(replacement),
- })
- deletions = append(deletions, analysis.TextEdit{
- Pos: lit.Pos() + token.Pos(r.off),
- End: lit.Pos() + token.Pos(r.off) + token.Pos(utf8.RuneLen(r.r)),
- })
- }
- edit := analysis.SuggestedFix{
- Message: fmt.Sprintf("replace all %s characters with escape sequences", kind),
- TextEdits: edits,
- }
- delete := analysis.SuggestedFix{
- Message: fmt.Sprintf("delete all %s characters", kind),
- TextEdits: deletions,
- }
- report.Report(pass, lit, msg, report.Fixes(edit, delete))
- }
}
- code.Preorder(pass, fn, (*ast.BasicLit)(nil))
- return nil, nil
-}
-
-func CheckExportedFunctionDocs(pass *analysis.Pass) (interface{}, error) {
- fn := func(node ast.Node) {
- if code.IsInTest(pass, node) {
- return
- }
-
- decl := node.(*ast.FuncDecl)
- if decl.Doc == nil {
- return
- }
- if !ast.IsExported(decl.Name.Name) {
- return
- }
- kind := "function"
- if decl.Recv != nil {
- kind = "method"
- switch T := decl.Recv.List[0].Type.(type) {
- case *ast.StarExpr:
- if !ast.IsExported(T.X.(*ast.Ident).Name) {
- return
- }
- case *ast.Ident:
- if !ast.IsExported(T.Name) {
- return
- }
- default:
- ExhaustiveTypeSwitch(T)
- }
- }
- prefix := decl.Name.Name + " "
- if !strings.HasPrefix(decl.Doc.Text(), prefix) {
- report.Report(pass, decl.Doc, fmt.Sprintf(`comment on exported %s %s should be of the form "%s..."`, kind, decl.Name.Name, prefix), report.FilterGenerated())
- }
- }
-
- code.Preorder(pass, fn, (*ast.FuncDecl)(nil))
- return nil, nil
-}
-
-func CheckExportedTypeDocs(pass *analysis.Pass) (interface{}, error) {
- var genDecl *ast.GenDecl
- fn := func(node ast.Node, push bool) bool {
- if !push {
- genDecl = nil
- return false
- }
- if code.IsInTest(pass, node) {
- return false
- }
-
- switch node := node.(type) {
- case *ast.GenDecl:
- if node.Tok == token.IMPORT {
- return false
- }
- genDecl = node
- return true
- case *ast.TypeSpec:
- if !ast.IsExported(node.Name.Name) {
- return false
- }
-
- doc := node.Doc
- if doc == nil {
- if len(genDecl.Specs) != 1 {
- // more than one spec in the GenDecl, don't validate the
- // docstring
- return false
- }
- if genDecl.Lparen.IsValid() {
- // 'type ( T )' is weird, don't guess the user's intention
- return false
- }
- doc = genDecl.Doc
- if doc == nil {
- return false
- }
- }
-
- s := doc.Text()
- articles := [...]string{"A", "An", "The"}
- for _, a := range articles {
- if strings.HasPrefix(s, a+" ") {
- s = s[len(a)+1:]
- break
- }
- }
- if !strings.HasPrefix(s, node.Name.Name+" ") {
- report.Report(pass, doc, fmt.Sprintf(`comment on exported type %s should be of the form "%s ..." (with optional leading article)`, node.Name.Name, node.Name.Name), report.FilterGenerated())
- }
- return false
- case *ast.FuncLit, *ast.FuncDecl:
- return false
- default:
- ExhaustiveTypeSwitch(node)
- return false
- }
- }
-
- pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Nodes([]ast.Node{(*ast.GenDecl)(nil), (*ast.TypeSpec)(nil), (*ast.FuncLit)(nil), (*ast.FuncDecl)(nil)}, fn)
- return nil, nil
-}
-
-func CheckExportedVarDocs(pass *analysis.Pass) (interface{}, error) {
- var genDecl *ast.GenDecl
- fn := func(node ast.Node, push bool) bool {
- if !push {
- genDecl = nil
- return false
- }
- if code.IsInTest(pass, node) {
- return false
- }
-
- switch node := node.(type) {
- case *ast.GenDecl:
- if node.Tok == token.IMPORT {
- return false
- }
- genDecl = node
- return true
- case *ast.ValueSpec:
- if genDecl.Lparen.IsValid() || len(node.Names) > 1 {
- // Don't try to guess the user's intention
- return false
- }
- name := node.Names[0].Name
- if !ast.IsExported(name) {
- return false
- }
- if genDecl.Doc == nil {
- return false
- }
- prefix := name + " "
- if !strings.HasPrefix(genDecl.Doc.Text(), prefix) {
- kind := "var"
- if genDecl.Tok == token.CONST {
- kind = "const"
- }
- report.Report(pass, genDecl.Doc, fmt.Sprintf(`comment on exported %s %s should be of the form "%s..."`, kind, name, prefix), report.FilterGenerated())
- }
- return false
- case *ast.FuncLit, *ast.FuncDecl:
- return false
- default:
- ExhaustiveTypeSwitch(node)
- return false
- }
- }
-
- pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Nodes([]ast.Node{(*ast.GenDecl)(nil), (*ast.ValueSpec)(nil), (*ast.FuncLit)(nil), (*ast.FuncDecl)(nil)}, fn)
+ pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BasicLit)(nil)}, fn)
return nil, nil
}
diff --git a/vendor/honnef.co/go/tools/stylecheck/names.go b/vendor/honnef.co/go/tools/stylecheck/names.go
index ffc689e98..160f9d7ff 100644
--- a/vendor/honnef.co/go/tools/stylecheck/names.go
+++ b/vendor/honnef.co/go/tools/stylecheck/names.go
@@ -4,16 +4,14 @@
package stylecheck
import (
- "fmt"
"go/ast"
"go/token"
"strings"
"unicode"
"golang.org/x/tools/go/analysis"
- "honnef.co/go/tools/code"
"honnef.co/go/tools/config"
- "honnef.co/go/tools/report"
+ . "honnef.co/go/tools/lint/lintdsl"
)
// knownNameExceptions is a set of names that are known to be exempt from naming checks.
@@ -48,7 +46,7 @@ func CheckNames(pass *analysis.Pass) (interface{}, error) {
// Handle two common styles from other languages that don't belong in Go.
if len(id.Name) >= 5 && allCaps(id.Name) && strings.Contains(id.Name, "_") {
- report.Report(pass, id, "should not use ALL_CAPS in Go names; use CamelCase instead", report.FilterGenerated())
+ ReportfFG(pass, id.Pos(), "should not use ALL_CAPS in Go names; use CamelCase instead")
return
}
@@ -58,10 +56,10 @@ func CheckNames(pass *analysis.Pass) (interface{}, error) {
}
if len(id.Name) > 2 && strings.Contains(id.Name[1:len(id.Name)-1], "_") {
- report.Report(pass, id, fmt.Sprintf("should not use underscores in Go names; %s %s should be %s", thing, id.Name, should), report.FilterGenerated())
+ ReportfFG(pass, id.Pos(), "should not use underscores in Go names; %s %s should be %s", thing, id.Name, should)
return
}
- report.Report(pass, id, fmt.Sprintf("%s %s should be %s", thing, id.Name, should), report.FilterGenerated())
+ ReportfFG(pass, id.Pos(), "%s %s should be %s", thing, id.Name, should)
}
checkList := func(fl *ast.FieldList, thing string, initialisms map[string]bool) {
if fl == nil {
@@ -82,111 +80,101 @@ func CheckNames(pass *analysis.Pass) (interface{}, error) {
for _, f := range pass.Files {
// Package names need slightly different handling than other names.
if !strings.HasSuffix(f.Name.Name, "_test") && strings.Contains(f.Name.Name, "_") {
- report.Report(pass, f, "should not use underscores in package names", report.FilterGenerated())
+ ReportfFG(pass, f.Pos(), "should not use underscores in package names")
}
if strings.IndexFunc(f.Name.Name, unicode.IsUpper) != -1 {
- report.Report(pass, f, fmt.Sprintf("should not use MixedCaps in package name; %s should be %s", f.Name.Name, strings.ToLower(f.Name.Name)), report.FilterGenerated())
+ ReportfFG(pass, f.Pos(), "should not use MixedCaps in package name; %s should be %s", f.Name.Name, strings.ToLower(f.Name.Name))
}
- }
- fn := func(node ast.Node) {
- switch v := node.(type) {
- case *ast.AssignStmt:
- if v.Tok != token.DEFINE {
- return
- }
- for _, exp := range v.Lhs {
- if id, ok := exp.(*ast.Ident); ok {
- check(id, "var", initialisms)
+ ast.Inspect(f, func(node ast.Node) bool {
+ switch v := node.(type) {
+ case *ast.AssignStmt:
+ if v.Tok != token.DEFINE {
+ return true
+ }
+ for _, exp := range v.Lhs {
+ if id, ok := exp.(*ast.Ident); ok {
+ check(id, "var", initialisms)
+ }
+ }
+ case *ast.FuncDecl:
+ // Functions with no body are defined elsewhere (in
+ // assembly, or via go:linkname). These are likely to
+ // be something very low level (such as the runtime),
+ // where our rules don't apply.
+ if v.Body == nil {
+ return true
}
- }
- case *ast.FuncDecl:
- // Functions with no body are defined elsewhere (in
- // assembly, or via go:linkname). These are likely to
- // be something very low level (such as the runtime),
- // where our rules don't apply.
- if v.Body == nil {
- return
- }
- if code.IsInTest(pass, v) && (strings.HasPrefix(v.Name.Name, "Example") || strings.HasPrefix(v.Name.Name, "Test") || strings.HasPrefix(v.Name.Name, "Benchmark")) {
- return
- }
+ if IsInTest(pass, v) && (strings.HasPrefix(v.Name.Name, "Example") || strings.HasPrefix(v.Name.Name, "Test") || strings.HasPrefix(v.Name.Name, "Benchmark")) {
+ return true
+ }
- thing := "func"
- if v.Recv != nil {
- thing = "method"
- }
+ thing := "func"
+ if v.Recv != nil {
+ thing = "method"
+ }
- if !isTechnicallyExported(v) {
- check(v.Name, thing, initialisms)
- }
+ if !isTechnicallyExported(v) {
+ check(v.Name, thing, initialisms)
+ }
- checkList(v.Type.Params, thing+" parameter", initialisms)
- checkList(v.Type.Results, thing+" result", initialisms)
- case *ast.GenDecl:
- if v.Tok == token.IMPORT {
- return
- }
- var thing string
- switch v.Tok {
- case token.CONST:
- thing = "const"
- case token.TYPE:
- thing = "type"
- case token.VAR:
- thing = "var"
- }
- for _, spec := range v.Specs {
- switch s := spec.(type) {
- case *ast.TypeSpec:
- check(s.Name, thing, initialisms)
- case *ast.ValueSpec:
- for _, id := range s.Names {
- check(id, thing, initialisms)
+ checkList(v.Type.Params, thing+" parameter", initialisms)
+ checkList(v.Type.Results, thing+" result", initialisms)
+ case *ast.GenDecl:
+ if v.Tok == token.IMPORT {
+ return true
+ }
+ var thing string
+ switch v.Tok {
+ case token.CONST:
+ thing = "const"
+ case token.TYPE:
+ thing = "type"
+ case token.VAR:
+ thing = "var"
+ }
+ for _, spec := range v.Specs {
+ switch s := spec.(type) {
+ case *ast.TypeSpec:
+ check(s.Name, thing, initialisms)
+ case *ast.ValueSpec:
+ for _, id := range s.Names {
+ check(id, thing, initialisms)
+ }
}
}
- }
- case *ast.InterfaceType:
- // Do not check interface method names.
- // They are often constrained by the method names of concrete types.
- for _, x := range v.Methods.List {
- ft, ok := x.Type.(*ast.FuncType)
- if !ok { // might be an embedded interface name
- continue
+ case *ast.InterfaceType:
+ // Do not check interface method names.
+ // They are often constrainted by the method names of concrete types.
+ for _, x := range v.Methods.List {
+ ft, ok := x.Type.(*ast.FuncType)
+ if !ok { // might be an embedded interface name
+ continue
+ }
+ checkList(ft.Params, "interface method parameter", initialisms)
+ checkList(ft.Results, "interface method result", initialisms)
}
- checkList(ft.Params, "interface method parameter", initialisms)
- checkList(ft.Results, "interface method result", initialisms)
- }
- case *ast.RangeStmt:
- if v.Tok == token.ASSIGN {
- return
- }
- if id, ok := v.Key.(*ast.Ident); ok {
- check(id, "range var", initialisms)
- }
- if id, ok := v.Value.(*ast.Ident); ok {
- check(id, "range var", initialisms)
- }
- case *ast.StructType:
- for _, f := range v.Fields.List {
- for _, id := range f.Names {
- check(id, "struct field", initialisms)
+ case *ast.RangeStmt:
+ if v.Tok == token.ASSIGN {
+ return true
+ }
+ if id, ok := v.Key.(*ast.Ident); ok {
+ check(id, "range var", initialisms)
+ }
+ if id, ok := v.Value.(*ast.Ident); ok {
+ check(id, "range var", initialisms)
+ }
+ case *ast.StructType:
+ for _, f := range v.Fields.List {
+ for _, id := range f.Names {
+ check(id, "struct field", initialisms)
+ }
}
}
- }
- }
-
- needle := []ast.Node{
- (*ast.AssignStmt)(nil),
- (*ast.FuncDecl)(nil),
- (*ast.GenDecl)(nil),
- (*ast.InterfaceType)(nil),
- (*ast.RangeStmt)(nil),
- (*ast.StructType)(nil),
+ return true
+ })
}
-
- code.Preorder(pass, fn, needle...)
return nil, nil
}
diff --git a/vendor/honnef.co/go/tools/unused/unused.go b/vendor/honnef.co/go/tools/unused/unused.go
index 0df5fc8ff..152d3692d 100644
--- a/vendor/honnef.co/go/tools/unused/unused.go
+++ b/vendor/honnef.co/go/tools/unused/unused.go
@@ -11,11 +11,11 @@ import (
"sync/atomic"
"golang.org/x/tools/go/analysis"
- "honnef.co/go/tools/code"
"honnef.co/go/tools/go/types/typeutil"
- "honnef.co/go/tools/internal/passes/buildir"
- "honnef.co/go/tools/ir"
+ "honnef.co/go/tools/internal/passes/buildssa"
"honnef.co/go/tools/lint"
+ "honnef.co/go/tools/lint/lintdsl"
+ "honnef.co/go/tools/ssa"
)
// The graph we construct omits nodes along a path that do not
@@ -103,7 +103,7 @@ import (
from method to type.
- (8.3) All interface methods are marked as used, even if they never get
- called. This is to accommodate sum types (unexported interface
+ called. This is to accomodate sum types (unexported interface
method that must exist but never gets called.)
- (8.4) All embedded interfaces are marked as used. This is an
@@ -419,8 +419,8 @@ type pkg struct {
Pkg *types.Package
TypesInfo *types.Info
TypesSizes types.Sizes
- IR *ir.Package
- SrcFuncs []*ir.Function
+ SSA *ssa.Package
+ SrcFuncs []*ssa.Function
}
type Checker struct {
@@ -450,7 +450,7 @@ func (c *Checker) Analyzer() *analysis.Analyzer {
Name: name,
Doc: "Unused code",
Run: c.Run,
- Requires: []*analysis.Analyzer{buildir.Analyzer},
+ Requires: []*analysis.Analyzer{buildssa.Analyzer},
}
}
@@ -477,15 +477,15 @@ func (c *Checker) Run(pass *analysis.Pass) (interface{}, error) {
c.initialPackages[pass.Pkg] = struct{}{}
c.mu.Unlock()
- irpkg := pass.ResultOf[buildir.Analyzer].(*buildir.IR)
+ ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA)
pkg := &pkg{
Fset: pass.Fset,
Files: pass.Files,
Pkg: pass.Pkg,
TypesInfo: pass.TypesInfo,
TypesSizes: pass.TypesSizes,
- IR: irpkg.Pkg,
- SrcFuncs: irpkg.SrcFuncs,
+ SSA: ssapkg.Pkg,
+ SrcFuncs: ssapkg.SrcFuncs,
}
c.processPkg(c.graph, pkg)
@@ -638,9 +638,10 @@ func (c *Checker) results() []types.Object {
c.debugf("digraph{\n")
debugNode(c.graph.Root)
- for _, v := range c.graph.Nodes {
- debugNode(v)
- }
+ c.graph.Nodes.Range(func(k, v interface{}) bool {
+ debugNode(v.(*Node))
+ return true
+ })
c.graph.TypeNodes.Iterate(func(key types.Type, value interface{}) {
debugNode(value.(*Node))
})
@@ -654,9 +655,10 @@ func (c *Checker) results() []types.Object {
// don't flag its receiver. if a named type is unused, don't
// flag its methods.
- for _, v := range c.graph.Nodes {
- c.graph.quieten(v)
- }
+ c.graph.Nodes.Range(func(k, v interface{}) bool {
+ c.graph.quieten(v.(*Node))
+ return true
+ })
c.graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) {
c.graph.quieten(value.(*Node))
})
@@ -686,9 +688,10 @@ func (c *Checker) results() []types.Object {
}
c.debugf("n%d [color=gray];\n", node.id)
}
- for _, v := range c.graph.Nodes {
- report(v)
- }
+ c.graph.Nodes.Range(func(k, v interface{}) bool {
+ report(v.(*Node))
+ return true
+ })
c.graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) {
report(value.(*Node))
})
@@ -776,6 +779,8 @@ type Graph struct {
fset *token.FileSet
Root *Node
seenTypes typeutil.Map
+ Nodes sync.Map // map[interface{}]*Node
+ objNodes sync.Map // map[objNodeKey]*Node
// read-only
wholeProgram bool
@@ -783,8 +788,6 @@ type Graph struct {
// need synchronisation
mu sync.Mutex
TypeNodes typeutil.Map
- Nodes map[interface{}]*Node
- objNodes map[objNodeKey]*Node
}
type context struct {
@@ -793,13 +796,13 @@ type context struct {
seenFns map[string]struct{}
seenTypes *typeutil.Map
nodeCounter uint64
+
+ // local cache for the map in Graph
+ typeNodes typeutil.Map
}
func NewGraph() *Graph {
- g := &Graph{
- Nodes: map[interface{}]*Node{},
- objNodes: map[objNodeKey]*Node{},
- }
+ g := &Graph{}
g.Root = g.newNode(&context{}, nil)
return g
}
@@ -841,48 +844,49 @@ type Node struct {
}
func (g *Graph) nodeMaybe(obj types.Object) (*Node, bool) {
- g.mu.Lock()
- defer g.mu.Unlock()
- if node, ok := g.Nodes[obj]; ok {
- return node, true
+ if node, ok := g.Nodes.Load(obj); ok {
+ return node.(*Node), true
}
return nil, false
}
func (g *Graph) node(ctx *context, obj interface{}) (node *Node, new bool) {
- g.mu.Lock()
- defer g.mu.Unlock()
- switch obj := obj.(type) {
- case types.Type:
- if v := g.TypeNodes.At(obj); v != nil {
+ if t, ok := obj.(types.Type); ok {
+ if v := ctx.typeNodes.At(t); v != nil {
return v.(*Node), false
}
- node := g.newNode(ctx, obj)
- g.TypeNodes.Set(obj, node)
- return node, true
- case types.Object:
- if node, ok := g.Nodes[obj]; ok {
- return node, false
+ g.mu.Lock()
+ defer g.mu.Unlock()
+
+ if v := g.TypeNodes.At(t); v != nil {
+ return v.(*Node), false
}
+ node := g.newNode(ctx, t)
+ g.TypeNodes.Set(t, node)
+ ctx.typeNodes.Set(t, node)
+ return node, true
+ }
+ if node, ok := g.Nodes.Load(obj); ok {
+ return node.(*Node), false
+ }
+
+ if obj, ok := obj.(types.Object); ok {
key := objNodeKeyFor(g.fset, obj)
- if onode, ok := g.objNodes[key]; ok {
+ if o, ok := g.objNodes.Load(key); ok {
+ onode := o.(*Node)
return onode, false
}
node = g.newNode(ctx, obj)
- g.Nodes[obj] = node
- g.objNodes[key] = node
- return node, true
- default:
- if node, ok := g.Nodes[obj]; ok {
- return node, false
- }
-
- node = g.newNode(ctx, obj)
- g.Nodes[obj] = node
+ g.Nodes.Store(obj, node)
+ g.objNodes.Store(key, node)
return node, true
}
+
+ node = g.newNode(ctx, obj)
+ g.Nodes.Store(obj, node)
+ return node, true
}
func (g *Graph) newNode(ctx *context, obj interface{}) *Node {
@@ -1063,7 +1067,7 @@ func (g *Graph) entry(pkg *pkg) {
ctx.seenTypes = &typeutil.Map{}
}
- scopes := map[*types.Scope]*ir.Function{}
+ scopes := map[*types.Scope]*ssa.Function{}
for _, fn := range pkg.SrcFuncs {
if fn.Object() != nil {
scope := fn.Object().(*types.Func).Scope()
@@ -1083,12 +1087,12 @@ func (g *Graph) entry(pkg *pkg) {
// (1.8) packages use symbols linked via go:linkname
fields := strings.Fields(c.Text)
if len(fields) == 3 {
- if m, ok := pkg.IR.Members[fields[1]]; ok {
+ if m, ok := pkg.SSA.Members[fields[1]]; ok {
var obj types.Object
switch m := m.(type) {
- case *ir.Global:
+ case *ssa.Global:
obj = m.Object()
- case *ir.Function:
+ case *ssa.Function:
obj = m.Object()
default:
panic(fmt.Sprintf("unhandled type: %T", m))
@@ -1102,7 +1106,7 @@ func (g *Graph) entry(pkg *pkg) {
}
}
- surroundingFunc := func(obj types.Object) *ir.Function {
+ surroundingFunc := func(obj types.Object) *ssa.Function {
scope := obj.Parent()
for scope != nil {
if fn := scopes[scope]; fn != nil {
@@ -1113,10 +1117,10 @@ func (g *Graph) entry(pkg *pkg) {
return nil
}
- // IR form won't tell us about locally scoped types that aren't
+ // SSA form won't tell us about locally scoped types that aren't
// being used. Walk the list of Defs to get all named types.
//
- // IR form also won't tell us about constants; use Defs and Uses
+ // SSA form also won't tell us about constants; use Defs and Uses
// to determine which constants exist and which are being used.
for _, obj := range pkg.TypesInfo.Defs {
switch obj := obj.(type) {
@@ -1139,7 +1143,7 @@ func (g *Graph) entry(pkg *pkg) {
if fn.Object() != nil {
ctx.see(fn.Object())
}
- node := fn.Source()
+ node := fn.Syntax()
if node == nil {
continue
}
@@ -1217,7 +1221,7 @@ func (g *Graph) entry(pkg *pkg) {
case *ast.GenDecl:
switch n.Tok {
case token.CONST:
- groups := code.GroupSpecs(pkg.Fset, n.Specs)
+ groups := lintdsl.GroupSpecs(pkg.Fset, n.Specs)
for _, specs := range groups {
if len(specs) > 1 {
cg := &ConstGroup{}
@@ -1289,11 +1293,11 @@ func (g *Graph) entry(pkg *pkg) {
})
}
- for _, m := range pkg.IR.Members {
+ for _, m := range pkg.SSA.Members {
switch m := m.(type) {
- case *ir.NamedConst:
+ case *ssa.NamedConst:
// nothing to do, we collect all constants from Defs
- case *ir.Global:
+ case *ssa.Global:
if m.Object() != nil {
ctx.see(m.Object())
if g.trackExportedIdentifier(ctx, m.Object()) {
@@ -1301,7 +1305,7 @@ func (g *Graph) entry(pkg *pkg) {
ctx.use(m.Object(), nil, edgeExportedVariable)
}
}
- case *ir.Function:
+ case *ssa.Function:
mObj := owningObject(m)
if mObj != nil {
ctx.see(mObj)
@@ -1327,8 +1331,8 @@ func (g *Graph) entry(pkg *pkg) {
// (9.8) runtime functions that may be called from user code via the compiler
ctx.use(mObj, nil, edgeRuntimeFunction)
}
- if m.Source() != nil {
- doc := m.Source().(*ast.FuncDecl).Doc
+ if m.Syntax() != nil {
+ doc := m.Syntax().(*ast.FuncDecl).Doc
if doc != nil {
for _, cmt := range doc.List {
if strings.HasPrefix(cmt.Text, "//go:cgo_export_") {
@@ -1339,7 +1343,7 @@ func (g *Graph) entry(pkg *pkg) {
}
}
g.function(ctx, m)
- case *ir.Type:
+ case *ssa.Type:
if m.Object() != nil {
ctx.see(m.Object())
if g.trackExportedIdentifier(ctx, m.Object()) {
@@ -1379,7 +1383,7 @@ func (g *Graph) entry(pkg *pkg) {
// (8.0) handle interfaces
for _, t := range notIfaces {
- ms := pkg.IR.Prog.MethodSets.MethodSet(t)
+ ms := pkg.SSA.Prog.MethodSets.MethodSet(t)
for _, iface := range ifaces {
if sels, ok := g.implements(t, iface, ms); ok {
for _, sel := range sels {
@@ -1396,19 +1400,19 @@ func (g *Graph) useMethod(ctx *context, t types.Type, sel *types.Selection, by i
path := sel.Index()
assert(obj != nil)
if len(path) > 1 {
- base := code.Dereference(t).Underlying().(*types.Struct)
+ base := lintdsl.Dereference(t).Underlying().(*types.Struct)
for _, idx := range path[:len(path)-1] {
next := base.Field(idx)
// (6.3) structs use embedded fields that help implement interfaces
ctx.see(base)
ctx.seeAndUse(next, base, edgeProvidesMethod)
- base, _ = code.Dereference(next.Type()).Underlying().(*types.Struct)
+ base, _ = lintdsl.Dereference(next.Type()).Underlying().(*types.Struct)
}
}
ctx.seeAndUse(obj, by, kind)
}
-func owningObject(fn *ir.Function) types.Object {
+func owningObject(fn *ssa.Function) types.Object {
if fn.Object() != nil {
return fn.Object()
}
@@ -1418,8 +1422,8 @@ func owningObject(fn *ir.Function) types.Object {
return nil
}
-func (g *Graph) function(ctx *context, fn *ir.Function) {
- if fn.Package() != nil && fn.Package() != ctx.pkg.IR {
+func (g *Graph) function(ctx *context, fn *ssa.Function) {
+ if fn.Package() != nil && fn.Package() != ctx.pkg.SSA {
return
}
@@ -1499,7 +1503,7 @@ func (g *Graph) typ(ctx *context, t types.Type, parent types.Type) {
// the pointer type to get the full method set
T = types.NewPointer(T)
}
- ms := ctx.pkg.IR.Prog.MethodSets.MethodSet(T)
+ ms := ctx.pkg.SSA.Prog.MethodSets.MethodSet(T)
for j := 0; j < ms.Len(); j++ {
if ms.At(j).Obj().Exported() {
// (6.4) structs use embedded fields that have exported methods (recursively)
@@ -1512,7 +1516,7 @@ func (g *Graph) typ(ctx *context, t types.Type, parent types.Type) {
seen := map[*types.Struct]struct{}{}
var hasExportedField func(t types.Type) bool
hasExportedField = func(T types.Type) bool {
- t, ok := code.Dereference(T).Underlying().(*types.Struct)
+ t, ok := lintdsl.Dereference(T).Underlying().(*types.Struct)
if !ok {
return false
}
@@ -1561,7 +1565,7 @@ func (g *Graph) typ(ctx *context, t types.Type, parent types.Type) {
// (2.1) named types use exported methods
ctx.use(t.Method(i), t, edgeExportedMethod)
}
- g.function(ctx, ctx.pkg.IR.Prog.FuncValue(t.Method(i)))
+ g.function(ctx, ctx.pkg.SSA.Prog.FuncValue(t.Method(i)))
}
g.typ(ctx, t.Underlying(), t)
@@ -1642,22 +1646,22 @@ func (g *Graph) signature(ctx *context, sig *types.Signature, fn types.Object) {
}
}
-func (g *Graph) instructions(ctx *context, fn *ir.Function) {
+func (g *Graph) instructions(ctx *context, fn *ssa.Function) {
fnObj := owningObject(fn)
for _, b := range fn.Blocks {
for _, instr := range b.Instrs {
ops := instr.Operands(nil)
switch instr.(type) {
- case *ir.Store:
+ case *ssa.Store:
// (9.7) variable _reads_ use variables, writes do not
ops = ops[1:]
- case *ir.DebugRef:
+ case *ssa.DebugRef:
ops = nil
}
for _, arg := range ops {
- walkPhi(*arg, func(v ir.Value) {
+ walkPhi(*arg, func(v ssa.Value) {
switch v := v.(type) {
- case *ir.Function:
+ case *ssa.Function:
// (4.3) functions use closures and bound methods.
// (4.5) functions use functions they call
// (9.5) instructions use their operands
@@ -1666,11 +1670,11 @@ func (g *Graph) instructions(ctx *context, fn *ir.Function) {
ctx.seeAndUse(owningObject(v), fnObj, edgeInstructionOperand)
}
g.function(ctx, v)
- case *ir.Const:
+ case *ssa.Const:
// (9.6) instructions use their operands' types
ctx.seeAndUse(v.Type(), fnObj, edgeType)
g.typ(ctx, v.Type(), nil)
- case *ir.Global:
+ case *ssa.Global:
if v.Object() != nil {
// (9.5) instructions use their operands
ctx.seeAndUse(v.Object(), fnObj, edgeInstructionOperand)
@@ -1678,8 +1682,8 @@ func (g *Graph) instructions(ctx *context, fn *ir.Function) {
}
})
}
- if v, ok := instr.(ir.Value); ok {
- if _, ok := v.(*ir.Range); !ok {
+ if v, ok := instr.(ssa.Value); ok {
+ if _, ok := v.(*ssa.Range); !ok {
// See https://github.com/golang/go/issues/19670
// (4.8) instructions use their types
@@ -1689,29 +1693,29 @@ func (g *Graph) instructions(ctx *context, fn *ir.Function) {
}
}
switch instr := instr.(type) {
- case *ir.Field:
+ case *ssa.Field:
st := instr.X.Type().Underlying().(*types.Struct)
field := st.Field(instr.Field)
// (4.7) functions use fields they access
ctx.seeAndUse(field, fnObj, edgeFieldAccess)
- case *ir.FieldAddr:
- st := code.Dereference(instr.X.Type()).Underlying().(*types.Struct)
+ case *ssa.FieldAddr:
+ st := lintdsl.Dereference(instr.X.Type()).Underlying().(*types.Struct)
field := st.Field(instr.Field)
// (4.7) functions use fields they access
ctx.seeAndUse(field, fnObj, edgeFieldAccess)
- case *ir.Store:
+ case *ssa.Store:
// nothing to do, handled generically by operands
- case *ir.Call:
+ case *ssa.Call:
c := instr.Common()
if !c.IsInvoke() {
// handled generically as an instruction operand
if g.wholeProgram {
// (e3) special case known reflection-based method callers
- switch code.CallName(c) {
+ switch lintdsl.CallName(c) {
case "net/rpc.Register", "net/rpc.RegisterName", "(*net/rpc.Server).Register", "(*net/rpc.Server).RegisterName":
- var arg ir.Value
- switch code.CallName(c) {
+ var arg ssa.Value
+ switch lintdsl.CallName(c) {
case "net/rpc.Register":
arg = c.Args[0]
case "net/rpc.RegisterName":
@@ -1721,10 +1725,10 @@ func (g *Graph) instructions(ctx *context, fn *ir.Function) {
case "(*net/rpc.Server).RegisterName":
arg = c.Args[2]
}
- walkPhi(arg, func(v ir.Value) {
- if v, ok := v.(*ir.MakeInterface); ok {
- walkPhi(v.X, func(vv ir.Value) {
- ms := ctx.pkg.IR.Prog.MethodSets.MethodSet(vv.Type())
+ walkPhi(arg, func(v ssa.Value) {
+ if v, ok := v.(*ssa.MakeInterface); ok {
+ walkPhi(v.X, func(vv ssa.Value) {
+ ms := ctx.pkg.SSA.Prog.MethodSets.MethodSet(vv.Type())
for i := 0; i < ms.Len(); i++ {
if ms.At(i).Obj().Exported() {
g.useMethod(ctx, vv.Type(), ms.At(i), fnObj, edgeNetRPCRegister)
@@ -1739,13 +1743,13 @@ func (g *Graph) instructions(ctx *context, fn *ir.Function) {
// (4.5) functions use functions/interface methods they call
ctx.seeAndUse(c.Method, fnObj, edgeInterfaceCall)
}
- case *ir.Return:
+ case *ssa.Return:
// nothing to do, handled generically by operands
- case *ir.ChangeType:
+ case *ssa.ChangeType:
// conversion type handled generically
- s1, ok1 := code.Dereference(instr.Type()).Underlying().(*types.Struct)
- s2, ok2 := code.Dereference(instr.X.Type()).Underlying().(*types.Struct)
+ s1, ok1 := lintdsl.Dereference(instr.Type()).Underlying().(*types.Struct)
+ s2, ok2 := lintdsl.Dereference(instr.X.Type()).Underlying().(*types.Struct)
if ok1 && ok2 {
// Converting between two structs. The fields are
// relevant for the conversion, but only if the
@@ -1764,13 +1768,13 @@ func (g *Graph) instructions(ctx *context, fn *ir.Function) {
ctx.seeAndUse(s2.Field(i), s1.Field(i), edgeStructConversion)
}
}
- case *ir.MakeInterface:
+ case *ssa.MakeInterface:
// nothing to do, handled generically by operands
- case *ir.Slice:
+ case *ssa.Slice:
// nothing to do, handled generically by operands
- case *ir.RunDefers:
+ case *ssa.RunDefers:
// nothing to do, the deferred functions are already marked use by defering them.
- case *ir.Convert:
+ case *ssa.Convert:
// to unsafe.Pointer
if typ, ok := instr.Type().(*types.Basic); ok && typ.Kind() == types.UnsafePointer {
if ptr, ok := instr.X.Type().Underlying().(*types.Pointer); ok {
@@ -1793,79 +1797,61 @@ func (g *Graph) instructions(ctx *context, fn *ir.Function) {
}
}
}
- case *ir.TypeAssert:
+ case *ssa.TypeAssert:
// nothing to do, handled generically by instruction
// type (possibly a tuple, which contains the asserted
// to type). redundantly handled by the type of
- // ir.Extract, too
- case *ir.MakeClosure:
+ // ssa.Extract, too
+ case *ssa.MakeClosure:
// nothing to do, handled generically by operands
- case *ir.Alloc:
- // nothing to do
- case *ir.UnOp:
- // nothing to do
- case *ir.BinOp:
- // nothing to do
- case *ir.If:
- // nothing to do
- case *ir.Jump:
+ case *ssa.Alloc:
// nothing to do
- case *ir.Unreachable:
+ case *ssa.UnOp:
// nothing to do
- case *ir.IndexAddr:
+ case *ssa.BinOp:
// nothing to do
- case *ir.Extract:
+ case *ssa.If:
// nothing to do
- case *ir.Panic:
+ case *ssa.Jump:
// nothing to do
- case *ir.DebugRef:
+ case *ssa.IndexAddr:
// nothing to do
- case *ir.BlankStore:
+ case *ssa.Extract:
// nothing to do
- case *ir.Phi:
+ case *ssa.Panic:
// nothing to do
- case *ir.Sigma:
+ case *ssa.DebugRef:
// nothing to do
- case *ir.MakeMap:
+ case *ssa.BlankStore:
// nothing to do
- case *ir.MapUpdate:
+ case *ssa.Phi:
// nothing to do
- case *ir.MapLookup:
+ case *ssa.MakeMap:
// nothing to do
- case *ir.StringLookup:
+ case *ssa.MapUpdate:
// nothing to do
- case *ir.MakeSlice:
+ case *ssa.Lookup:
// nothing to do
- case *ir.Send:
+ case *ssa.MakeSlice:
// nothing to do
- case *ir.MakeChan:
+ case *ssa.Send:
// nothing to do
- case *ir.Range:
+ case *ssa.MakeChan:
// nothing to do
- case *ir.Next:
+ case *ssa.Range:
// nothing to do
- case *ir.Index:
+ case *ssa.Next:
// nothing to do
- case *ir.Select:
+ case *ssa.Index:
// nothing to do
- case *ir.ChangeInterface:
+ case *ssa.Select:
// nothing to do
- case *ir.Load:
- // nothing to do
- case *ir.Go:
- // nothing to do
- case *ir.Defer:
- // nothing to do
- case *ir.Parameter:
- // nothing to do
- case *ir.Const:
- // nothing to do
- case *ir.Recv:
- // nothing to do
- case *ir.TypeSwitch:
- // nothing to do
- case *ir.ConstantSwitch:
+ case *ssa.ChangeInterface:
// nothing to do
+ case *ssa.Go:
+ // nothing to do, handled generically by operands
+ case *ssa.Defer:
+ // nothing to do, handled generically by operands
default:
panic(fmt.Sprintf("unreachable: %T", instr))
}
@@ -1906,22 +1892,22 @@ func isNoCopyType(typ types.Type) bool {
return true
}
-func walkPhi(v ir.Value, fn func(v ir.Value)) {
- phi, ok := v.(*ir.Phi)
+func walkPhi(v ssa.Value, fn func(v ssa.Value)) {
+ phi, ok := v.(*ssa.Phi)
if !ok {
fn(v)
return
}
- seen := map[ir.Value]struct{}{}
- var impl func(v *ir.Phi)
- impl = func(v *ir.Phi) {
+ seen := map[ssa.Value]struct{}{}
+ var impl func(v *ssa.Phi)
+ impl = func(v *ssa.Phi) {
if _, ok := seen[v]; ok {
return
}
seen[v] = struct{}{}
for _, e := range v.Edges {
- if ev, ok := e.(*ir.Phi); ok {
+ if ev, ok := e.(*ssa.Phi); ok {
impl(ev)
} else {
fn(e)
diff --git a/vendor/honnef.co/go/tools/version/version.go b/vendor/honnef.co/go/tools/version/version.go
index a12f70fb4..468e8efd6 100644
--- a/vendor/honnef.co/go/tools/version/version.go
+++ b/vendor/honnef.co/go/tools/version/version.go
@@ -7,7 +7,7 @@ import (
"runtime"
)
-const Version = "devel"
+const Version = "2019.2.3"
// version returns a version descriptor and reports whether the
// version is a known release.
diff --git a/vendor/modules.txt b/vendor/modules.txt
new file mode 100644
index 000000000..a5edd91f2
--- /dev/null
+++ b/vendor/modules.txt
@@ -0,0 +1,229 @@
+# cloud.google.com/go v0.52.1-0.20200128230113-ad7cd32b39df
+## explicit
+cloud.google.com/go
+cloud.google.com/go/compute/metadata
+cloud.google.com/go/iam
+cloud.google.com/go/internal
+cloud.google.com/go/internal/optional
+cloud.google.com/go/internal/trace
+cloud.google.com/go/internal/version
+# cloud.google.com/go/storage v1.0.0
+## explicit
+cloud.google.com/go/storage
+# github.com/BurntSushi/toml v0.3.1
+## explicit
+github.com/BurntSushi/toml
+# github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e
+## explicit
+github.com/golang/groupcache/lru
+# github.com/golang/protobuf v1.3.3
+## explicit
+github.com/golang/protobuf/proto
+github.com/golang/protobuf/protoc-gen-go
+github.com/golang/protobuf/protoc-gen-go/descriptor
+github.com/golang/protobuf/protoc-gen-go/generator
+github.com/golang/protobuf/protoc-gen-go/generator/internal/remap
+github.com/golang/protobuf/protoc-gen-go/grpc
+github.com/golang/protobuf/protoc-gen-go/plugin
+github.com/golang/protobuf/ptypes
+github.com/golang/protobuf/ptypes/any
+github.com/golang/protobuf/ptypes/duration
+github.com/golang/protobuf/ptypes/timestamp
+# github.com/google/go-cmp v0.4.0
+## explicit
+github.com/google/go-cmp/cmp
+github.com/google/go-cmp/cmp/internal/diff
+github.com/google/go-cmp/cmp/internal/flags
+github.com/google/go-cmp/cmp/internal/function
+github.com/google/go-cmp/cmp/internal/value
+# github.com/googleapis/gax-go v1.0.4-0.20191018151119-b443e5a67ec8
+## explicit
+# github.com/googleapis/gax-go/v2 v2.0.5
+github.com/googleapis/gax-go/v2
+# github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6
+## explicit
+github.com/ianlancetaylor/demangle
+# github.com/jstemmer/go-junit-report v0.9.2-0.20191008195320-984a47ca6b0a
+## explicit
+github.com/jstemmer/go-junit-report
+github.com/jstemmer/go-junit-report/formatter
+github.com/jstemmer/go-junit-report/parser
+# go.opencensus.io v0.22.3-0.20200113180412-d851005f548f
+## explicit
+go.opencensus.io
+go.opencensus.io/internal
+go.opencensus.io/internal/tagencoding
+go.opencensus.io/metric/metricdata
+go.opencensus.io/metric/metricproducer
+go.opencensus.io/plugin/ochttp
+go.opencensus.io/plugin/ochttp/propagation/b3
+go.opencensus.io/resource
+go.opencensus.io/stats
+go.opencensus.io/stats/internal
+go.opencensus.io/stats/view
+go.opencensus.io/tag
+go.opencensus.io/trace
+go.opencensus.io/trace/internal
+go.opencensus.io/trace/propagation
+go.opencensus.io/trace/tracestate
+# golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a
+## explicit
+golang.org/x/exp/apidiff
+golang.org/x/exp/cmd/apidiff
+# golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f
+## explicit
+golang.org/x/lint
+golang.org/x/lint/golint
+# golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa
+## explicit
+golang.org/x/net/context
+golang.org/x/net/context/ctxhttp
+golang.org/x/net/http/httpguts
+golang.org/x/net/http2
+golang.org/x/net/http2/hpack
+golang.org/x/net/idna
+golang.org/x/net/internal/timeseries
+golang.org/x/net/trace
+# golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d
+## explicit
+golang.org/x/oauth2
+golang.org/x/oauth2/google
+golang.org/x/oauth2/internal
+golang.org/x/oauth2/jws
+golang.org/x/oauth2/jwt
+# golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae
+## explicit
+golang.org/x/sys/internal/unsafeheader
+golang.org/x/sys/unix
+# golang.org/x/text v0.3.3-0.20191230102452-929e72ca90de
+## explicit
+golang.org/x/text/secure/bidirule
+golang.org/x/text/transform
+golang.org/x/text/unicode/bidi
+golang.org/x/text/unicode/norm
+# golang.org/x/tools v0.0.0-20200128002243-345141a36859
+## explicit
+golang.org/x/tools/cmd/goimports
+golang.org/x/tools/go/analysis
+golang.org/x/tools/go/analysis/passes/inspect
+golang.org/x/tools/go/ast/astutil
+golang.org/x/tools/go/ast/inspector
+golang.org/x/tools/go/buildutil
+golang.org/x/tools/go/gcexportdata
+golang.org/x/tools/go/internal/gcimporter
+golang.org/x/tools/go/internal/packagesdriver
+golang.org/x/tools/go/packages
+golang.org/x/tools/go/types/objectpath
+golang.org/x/tools/go/types/typeutil
+golang.org/x/tools/internal/fastwalk
+golang.org/x/tools/internal/gopathwalk
+golang.org/x/tools/internal/imports
+golang.org/x/tools/internal/module
+golang.org/x/tools/internal/packagesinternal
+golang.org/x/tools/internal/semver
+# google.golang.org/api v0.15.1-0.20200128000756-b6b5836d7694
+## explicit
+google.golang.org/api/compute/v0.beta
+google.golang.org/api/googleapi
+google.golang.org/api/googleapi/transport
+google.golang.org/api/internal
+google.golang.org/api/internal/gensupport
+google.golang.org/api/internal/third_party/uritemplates
+google.golang.org/api/iterator
+google.golang.org/api/option
+google.golang.org/api/storage/v1
+google.golang.org/api/transport/http
+google.golang.org/api/transport/http/internal/propagation
+# google.golang.org/appengine v1.6.6-0.20191219230319-b6ce0843b556
+## explicit
+google.golang.org/appengine
+google.golang.org/appengine/aetest
+google.golang.org/appengine/datastore
+google.golang.org/appengine/datastore/internal/cloudkey
+google.golang.org/appengine/datastore/internal/cloudpb
+google.golang.org/appengine/internal
+google.golang.org/appengine/internal/app_identity
+google.golang.org/appengine/internal/base
+google.golang.org/appengine/internal/datastore
+google.golang.org/appengine/internal/log
+google.golang.org/appengine/internal/mail
+google.golang.org/appengine/internal/modules
+google.golang.org/appengine/internal/remote_api
+google.golang.org/appengine/internal/urlfetch
+google.golang.org/appengine/internal/user
+google.golang.org/appengine/log
+google.golang.org/appengine/mail
+google.golang.org/appengine/urlfetch
+google.golang.org/appengine/user
+# google.golang.org/genproto v0.0.0-20200128133413-58ce757ed39b
+## explicit
+google.golang.org/genproto/googleapis/api/annotations
+google.golang.org/genproto/googleapis/iam/v1
+google.golang.org/genproto/googleapis/rpc/code
+google.golang.org/genproto/googleapis/rpc/status
+google.golang.org/genproto/googleapis/type/expr
+# google.golang.org/grpc v1.28.0-pre.0.20200128222427-f97821dd2fec
+## explicit
+google.golang.org/grpc
+google.golang.org/grpc/attributes
+google.golang.org/grpc/backoff
+google.golang.org/grpc/balancer
+google.golang.org/grpc/balancer/base
+google.golang.org/grpc/balancer/roundrobin
+google.golang.org/grpc/binarylog/grpc_binarylog_v1
+google.golang.org/grpc/codes
+google.golang.org/grpc/connectivity
+google.golang.org/grpc/credentials
+google.golang.org/grpc/credentials/internal
+google.golang.org/grpc/encoding
+google.golang.org/grpc/encoding/proto
+google.golang.org/grpc/grpclog
+google.golang.org/grpc/internal
+google.golang.org/grpc/internal/backoff
+google.golang.org/grpc/internal/balancerload
+google.golang.org/grpc/internal/binarylog
+google.golang.org/grpc/internal/buffer
+google.golang.org/grpc/internal/channelz
+google.golang.org/grpc/internal/envconfig
+google.golang.org/grpc/internal/grpcrand
+google.golang.org/grpc/internal/grpcsync
+google.golang.org/grpc/internal/resolver/dns
+google.golang.org/grpc/internal/resolver/passthrough
+google.golang.org/grpc/internal/syscall
+google.golang.org/grpc/internal/transport
+google.golang.org/grpc/keepalive
+google.golang.org/grpc/metadata
+google.golang.org/grpc/naming
+google.golang.org/grpc/peer
+google.golang.org/grpc/resolver
+google.golang.org/grpc/serviceconfig
+google.golang.org/grpc/stats
+google.golang.org/grpc/status
+google.golang.org/grpc/tap
+# honnef.co/go/tools v0.0.1-2019.2.3
+## explicit
+honnef.co/go/tools/arg
+honnef.co/go/tools/cmd/staticcheck
+honnef.co/go/tools/config
+honnef.co/go/tools/deprecated
+honnef.co/go/tools/facts
+honnef.co/go/tools/functions
+honnef.co/go/tools/go/types/typeutil
+honnef.co/go/tools/internal/cache
+honnef.co/go/tools/internal/passes/buildssa
+honnef.co/go/tools/internal/renameio
+honnef.co/go/tools/internal/sharedcheck
+honnef.co/go/tools/lint
+honnef.co/go/tools/lint/lintdsl
+honnef.co/go/tools/lint/lintutil
+honnef.co/go/tools/lint/lintutil/format
+honnef.co/go/tools/loader
+honnef.co/go/tools/printf
+honnef.co/go/tools/simple
+honnef.co/go/tools/ssa
+honnef.co/go/tools/ssautil
+honnef.co/go/tools/staticcheck
+honnef.co/go/tools/staticcheck/vrp
+honnef.co/go/tools/stylecheck
+honnef.co/go/tools/unused
+honnef.co/go/tools/version