From 23b8c7b4eab0375b3d59cf4b2a1f3d7356515f95 Mon Sep 17 00:00:00 2001 From: Joseph Richey Date: Sun, 11 Feb 2018 20:34:07 -0800 Subject: [PATCH] vendor: include source for tools This change vendors the source for all our build, formatting, and linting tools. Generated by running "dep ensure". --- Gopkg.lock | 69 +- vendor/github.com/golang/lint/LICENSE | 27 + .../github.com/golang/lint/golint/golint.go | 159 + .../github.com/golang/lint/golint/import.go | 310 + vendor/github.com/golang/lint/lint.go | 1697 +++ .../protoc-gen-go/descriptor/descriptor.pb.go | 2215 ++++ .../golang/protobuf/protoc-gen-go/doc.go | 51 + .../protoc-gen-go/generator/generator.go | 2866 +++++ .../protobuf/protoc-gen-go/grpc/grpc.go | 463 + .../protobuf/protoc-gen-go/link_grpc.go | 34 + .../golang/protobuf/protoc-gen-go/main.go | 98 + .../protoc-gen-go/plugin/plugin.pb.go | 293 + vendor/github.com/kisielk/gotool/LEGAL | 32 + vendor/github.com/kisielk/gotool/LICENSE | 20 + vendor/github.com/kisielk/gotool/go13.go | 15 + vendor/github.com/kisielk/gotool/go14-15.go | 15 + vendor/github.com/kisielk/gotool/go16-18.go | 15 + .../kisielk/gotool/internal/load/path.go | 27 + .../kisielk/gotool/internal/load/pkg.go | 25 + .../kisielk/gotool/internal/load/search.go | 354 + vendor/github.com/kisielk/gotool/match.go | 56 + vendor/github.com/kisielk/gotool/match18.go | 317 + vendor/github.com/kisielk/gotool/tool.go | 48 + vendor/github.com/wadey/gocovmerge/LICENSE | 22 + .../github.com/wadey/gocovmerge/gocovmerge.go | 111 + vendor/golang.org/x/tools/AUTHORS | 3 + vendor/golang.org/x/tools/CONTRIBUTORS | 3 + vendor/golang.org/x/tools/LICENSE | 27 + vendor/golang.org/x/tools/PATENTS | 22 + vendor/golang.org/x/tools/cmd/getgo/LICENSE | 27 + .../golang.org/x/tools/cmd/goimports/doc.go | 45 + .../x/tools/cmd/goimports/goimports.go | 369 + .../x/tools/cmd/goimports/goimports_gc.go | 26 + .../x/tools/cmd/goimports/goimports_not_gc.go | 11 + vendor/golang.org/x/tools/cover/profile.go | 213 + .../x/tools/go/ast/astutil/enclosing.go | 627 ++ .../x/tools/go/ast/astutil/imports.go | 470 + .../x/tools/go/ast/astutil/rewrite.go | 477 + .../golang.org/x/tools/go/ast/astutil/util.go | 14 + .../x/tools/go/buildutil/allpackages.go | 198 + .../x/tools/go/buildutil/fakecontext.go | 108 + .../x/tools/go/buildutil/overlay.go | 103 + .../golang.org/x/tools/go/buildutil/tags.go | 75 + .../golang.org/x/tools/go/buildutil/util.go | 212 + .../x/tools/go/gcexportdata/gcexportdata.go | 100 + .../x/tools/go/gcexportdata/importer.go | 73 + .../x/tools/go/gcexportdata/main.go | 81 + .../x/tools/go/gcimporter15/bexport.go | 828 ++ .../x/tools/go/gcimporter15/bimport.go | 993 ++ .../x/tools/go/gcimporter15/exportdata.go | 93 + .../x/tools/go/gcimporter15/gcimporter.go | 1041 ++ .../x/tools/go/gcimporter15/isAlias18.go | 13 + .../x/tools/go/gcimporter15/isAlias19.go | 13 + vendor/golang.org/x/tools/go/loader/cgo.go | 207 + .../x/tools/go/loader/cgo_pkgconfig.go | 39 + vendor/golang.org/x/tools/go/loader/doc.go | 205 + vendor/golang.org/x/tools/go/loader/loader.go | 1077 ++ vendor/golang.org/x/tools/go/loader/util.go | 124 + .../x/tools/go/types/typeutil/imports.go | 31 + .../x/tools/go/types/typeutil/map.go | 313 + .../tools/go/types/typeutil/methodsetcache.go | 72 + .../x/tools/go/types/typeutil/ui.go | 52 + vendor/golang.org/x/tools/imports/fastwalk.go | 187 + .../x/tools/imports/fastwalk_dirent_fileno.go | 13 + .../x/tools/imports/fastwalk_dirent_ino.go | 14 + .../x/tools/imports/fastwalk_portable.go | 29 + .../x/tools/imports/fastwalk_unix.go | 123 + vendor/golang.org/x/tools/imports/fix.go | 1082 ++ vendor/golang.org/x/tools/imports/imports.go | 297 + vendor/golang.org/x/tools/imports/mkindex.go | 173 + vendor/golang.org/x/tools/imports/mkstdlib.go | 107 + .../golang.org/x/tools/imports/sortimports.go | 212 + vendor/golang.org/x/tools/imports/zstdlib.go | 9734 +++++++++++++++++ .../x/tools/third_party/moduleloader/LICENSE | 22 + .../x/tools/third_party/typescript/LICENSE | 55 + .../x/tools/third_party/webcomponents/LICENSE | 27 + vendor/honnef.co/go/tools/LICENSE | 20 + .../honnef.co/go/tools/callgraph/callgraph.go | 129 + .../go/tools/callgraph/static/static.go | 35 + vendor/honnef.co/go/tools/callgraph/util.go | 181 + .../go/tools/cmd/megacheck/megacheck.go | 122 + .../honnef.co/go/tools/deprecated/stdlib.go | 54 + .../honnef.co/go/tools/functions/concrete.go | 56 + .../honnef.co/go/tools/functions/functions.go | 150 + vendor/honnef.co/go/tools/functions/loops.go | 50 + vendor/honnef.co/go/tools/functions/pure.go | 123 + .../go/tools/functions/terminates.go | 24 + vendor/honnef.co/go/tools/gcsizes/LICENSE | 27 + .../go/tools/internal/sharedcheck/lint.go | 70 + vendor/honnef.co/go/tools/lint/LICENSE | 28 + vendor/honnef.co/go/tools/lint/lint.go | 844 ++ .../honnef.co/go/tools/lint/lintutil/util.go | 349 + vendor/honnef.co/go/tools/simple/lint.go | 1771 +++ vendor/honnef.co/go/tools/simple/lint17.go | 7 + vendor/honnef.co/go/tools/simple/lint18.go | 7 + vendor/honnef.co/go/tools/ssa/LICENSE | 28 + vendor/honnef.co/go/tools/ssa/blockopt.go | 195 + vendor/honnef.co/go/tools/ssa/builder.go | 2383 ++++ vendor/honnef.co/go/tools/ssa/const.go | 171 + vendor/honnef.co/go/tools/ssa/const15.go | 171 + vendor/honnef.co/go/tools/ssa/create.go | 263 + vendor/honnef.co/go/tools/ssa/doc.go | 123 + vendor/honnef.co/go/tools/ssa/dom.go | 341 + vendor/honnef.co/go/tools/ssa/emit.go | 475 + vendor/honnef.co/go/tools/ssa/func.go | 703 ++ vendor/honnef.co/go/tools/ssa/identical.go | 7 + vendor/honnef.co/go/tools/ssa/identical_17.go | 7 + vendor/honnef.co/go/tools/ssa/lift.go | 608 + vendor/honnef.co/go/tools/ssa/lvalue.go | 125 + vendor/honnef.co/go/tools/ssa/methods.go | 241 + vendor/honnef.co/go/tools/ssa/mode.go | 100 + vendor/honnef.co/go/tools/ssa/print.go | 433 + vendor/honnef.co/go/tools/ssa/sanity.go | 523 + vendor/honnef.co/go/tools/ssa/source.go | 299 + vendor/honnef.co/go/tools/ssa/ssa.go | 1751 +++ vendor/honnef.co/go/tools/ssa/ssautil/load.go | 97 + .../honnef.co/go/tools/ssa/ssautil/switch.go | 236 + .../honnef.co/go/tools/ssa/ssautil/visit.go | 79 + vendor/honnef.co/go/tools/ssa/testmain.go | 266 + vendor/honnef.co/go/tools/ssa/util.go | 121 + vendor/honnef.co/go/tools/ssa/wrappers.go | 296 + vendor/honnef.co/go/tools/ssa/write.go | 5 + .../go/tools/staticcheck/buildtag.go | 21 + vendor/honnef.co/go/tools/staticcheck/lint.go | 2738 +++++ .../honnef.co/go/tools/staticcheck/rules.go | 321 + .../go/tools/staticcheck/vrp/channel.go | 73 + .../honnef.co/go/tools/staticcheck/vrp/int.go | 476 + .../go/tools/staticcheck/vrp/slice.go | 273 + .../go/tools/staticcheck/vrp/string.go | 258 + .../honnef.co/go/tools/staticcheck/vrp/vrp.go | 1049 ++ vendor/honnef.co/go/tools/unused/unused.go | 1064 ++ vendor/honnef.co/go/tools/version/version.go | 17 + 132 files changed, 50182 insertions(+), 1 deletion(-) create mode 100644 vendor/github.com/golang/lint/LICENSE create mode 100644 vendor/github.com/golang/lint/golint/golint.go create mode 100644 vendor/github.com/golang/lint/golint/import.go create mode 100644 vendor/github.com/golang/lint/lint.go create mode 100644 vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.pb.go create mode 100644 vendor/github.com/golang/protobuf/protoc-gen-go/doc.go create mode 100644 vendor/github.com/golang/protobuf/protoc-gen-go/generator/generator.go create mode 100644 vendor/github.com/golang/protobuf/protoc-gen-go/grpc/grpc.go create mode 100644 vendor/github.com/golang/protobuf/protoc-gen-go/link_grpc.go create mode 100644 vendor/github.com/golang/protobuf/protoc-gen-go/main.go create mode 100644 vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.pb.go create mode 100644 vendor/github.com/kisielk/gotool/LEGAL create mode 100644 vendor/github.com/kisielk/gotool/LICENSE create mode 100644 vendor/github.com/kisielk/gotool/go13.go create mode 100644 vendor/github.com/kisielk/gotool/go14-15.go create mode 100644 vendor/github.com/kisielk/gotool/go16-18.go create mode 100644 vendor/github.com/kisielk/gotool/internal/load/path.go create mode 100644 vendor/github.com/kisielk/gotool/internal/load/pkg.go create mode 100644 vendor/github.com/kisielk/gotool/internal/load/search.go create mode 100644 vendor/github.com/kisielk/gotool/match.go create mode 100644 vendor/github.com/kisielk/gotool/match18.go create mode 100644 vendor/github.com/kisielk/gotool/tool.go create mode 100644 vendor/github.com/wadey/gocovmerge/LICENSE create mode 100644 vendor/github.com/wadey/gocovmerge/gocovmerge.go create mode 100644 vendor/golang.org/x/tools/AUTHORS create mode 100644 vendor/golang.org/x/tools/CONTRIBUTORS create mode 100644 vendor/golang.org/x/tools/LICENSE create mode 100644 vendor/golang.org/x/tools/PATENTS create mode 100644 vendor/golang.org/x/tools/cmd/getgo/LICENSE create mode 100644 vendor/golang.org/x/tools/cmd/goimports/doc.go create mode 100644 vendor/golang.org/x/tools/cmd/goimports/goimports.go create mode 100644 vendor/golang.org/x/tools/cmd/goimports/goimports_gc.go create mode 100644 vendor/golang.org/x/tools/cmd/goimports/goimports_not_gc.go create mode 100644 vendor/golang.org/x/tools/cover/profile.go create mode 100644 vendor/golang.org/x/tools/go/ast/astutil/enclosing.go create mode 100644 vendor/golang.org/x/tools/go/ast/astutil/imports.go create mode 100644 vendor/golang.org/x/tools/go/ast/astutil/rewrite.go create mode 100644 vendor/golang.org/x/tools/go/ast/astutil/util.go create mode 100644 vendor/golang.org/x/tools/go/buildutil/allpackages.go create mode 100644 vendor/golang.org/x/tools/go/buildutil/fakecontext.go create mode 100644 vendor/golang.org/x/tools/go/buildutil/overlay.go create mode 100644 vendor/golang.org/x/tools/go/buildutil/tags.go create mode 100644 vendor/golang.org/x/tools/go/buildutil/util.go create mode 100644 vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go create mode 100644 vendor/golang.org/x/tools/go/gcexportdata/importer.go create mode 100644 vendor/golang.org/x/tools/go/gcexportdata/main.go create mode 100644 vendor/golang.org/x/tools/go/gcimporter15/bexport.go create mode 100644 vendor/golang.org/x/tools/go/gcimporter15/bimport.go create mode 100644 vendor/golang.org/x/tools/go/gcimporter15/exportdata.go create mode 100644 vendor/golang.org/x/tools/go/gcimporter15/gcimporter.go create mode 100644 vendor/golang.org/x/tools/go/gcimporter15/isAlias18.go create mode 100644 vendor/golang.org/x/tools/go/gcimporter15/isAlias19.go create mode 100644 vendor/golang.org/x/tools/go/loader/cgo.go create mode 100644 vendor/golang.org/x/tools/go/loader/cgo_pkgconfig.go create mode 100644 vendor/golang.org/x/tools/go/loader/doc.go create mode 100644 vendor/golang.org/x/tools/go/loader/loader.go create mode 100644 vendor/golang.org/x/tools/go/loader/util.go create mode 100644 vendor/golang.org/x/tools/go/types/typeutil/imports.go create mode 100644 vendor/golang.org/x/tools/go/types/typeutil/map.go create mode 100644 vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go create mode 100644 vendor/golang.org/x/tools/go/types/typeutil/ui.go create mode 100644 vendor/golang.org/x/tools/imports/fastwalk.go create mode 100644 vendor/golang.org/x/tools/imports/fastwalk_dirent_fileno.go create mode 100644 vendor/golang.org/x/tools/imports/fastwalk_dirent_ino.go create mode 100644 vendor/golang.org/x/tools/imports/fastwalk_portable.go create mode 100644 vendor/golang.org/x/tools/imports/fastwalk_unix.go create mode 100644 vendor/golang.org/x/tools/imports/fix.go create mode 100644 vendor/golang.org/x/tools/imports/imports.go create mode 100644 vendor/golang.org/x/tools/imports/mkindex.go create mode 100644 vendor/golang.org/x/tools/imports/mkstdlib.go create mode 100644 vendor/golang.org/x/tools/imports/sortimports.go create mode 100644 vendor/golang.org/x/tools/imports/zstdlib.go create mode 100644 vendor/golang.org/x/tools/third_party/moduleloader/LICENSE create mode 100644 vendor/golang.org/x/tools/third_party/typescript/LICENSE create mode 100644 vendor/golang.org/x/tools/third_party/webcomponents/LICENSE create mode 100644 vendor/honnef.co/go/tools/LICENSE create mode 100644 vendor/honnef.co/go/tools/callgraph/callgraph.go create mode 100644 vendor/honnef.co/go/tools/callgraph/static/static.go create mode 100644 vendor/honnef.co/go/tools/callgraph/util.go create mode 100644 vendor/honnef.co/go/tools/cmd/megacheck/megacheck.go create mode 100644 vendor/honnef.co/go/tools/deprecated/stdlib.go create mode 100644 vendor/honnef.co/go/tools/functions/concrete.go create mode 100644 vendor/honnef.co/go/tools/functions/functions.go create mode 100644 vendor/honnef.co/go/tools/functions/loops.go create mode 100644 vendor/honnef.co/go/tools/functions/pure.go create mode 100644 vendor/honnef.co/go/tools/functions/terminates.go create mode 100644 vendor/honnef.co/go/tools/gcsizes/LICENSE create mode 100644 vendor/honnef.co/go/tools/internal/sharedcheck/lint.go create mode 100644 vendor/honnef.co/go/tools/lint/LICENSE create mode 100644 vendor/honnef.co/go/tools/lint/lint.go create mode 100644 vendor/honnef.co/go/tools/lint/lintutil/util.go create mode 100644 vendor/honnef.co/go/tools/simple/lint.go create mode 100644 vendor/honnef.co/go/tools/simple/lint17.go create mode 100644 vendor/honnef.co/go/tools/simple/lint18.go create mode 100644 vendor/honnef.co/go/tools/ssa/LICENSE create mode 100644 vendor/honnef.co/go/tools/ssa/blockopt.go create mode 100644 vendor/honnef.co/go/tools/ssa/builder.go create mode 100644 vendor/honnef.co/go/tools/ssa/const.go create mode 100644 vendor/honnef.co/go/tools/ssa/const15.go create mode 100644 vendor/honnef.co/go/tools/ssa/create.go create mode 100644 vendor/honnef.co/go/tools/ssa/doc.go create mode 100644 vendor/honnef.co/go/tools/ssa/dom.go create mode 100644 vendor/honnef.co/go/tools/ssa/emit.go create mode 100644 vendor/honnef.co/go/tools/ssa/func.go create mode 100644 vendor/honnef.co/go/tools/ssa/identical.go create mode 100644 vendor/honnef.co/go/tools/ssa/identical_17.go create mode 100644 vendor/honnef.co/go/tools/ssa/lift.go create mode 100644 vendor/honnef.co/go/tools/ssa/lvalue.go create mode 100644 vendor/honnef.co/go/tools/ssa/methods.go create mode 100644 vendor/honnef.co/go/tools/ssa/mode.go create mode 100644 vendor/honnef.co/go/tools/ssa/print.go create mode 100644 vendor/honnef.co/go/tools/ssa/sanity.go create mode 100644 vendor/honnef.co/go/tools/ssa/source.go create mode 100644 vendor/honnef.co/go/tools/ssa/ssa.go create mode 100644 vendor/honnef.co/go/tools/ssa/ssautil/load.go create mode 100644 vendor/honnef.co/go/tools/ssa/ssautil/switch.go create mode 100644 vendor/honnef.co/go/tools/ssa/ssautil/visit.go create mode 100644 vendor/honnef.co/go/tools/ssa/testmain.go create mode 100644 vendor/honnef.co/go/tools/ssa/util.go create mode 100644 vendor/honnef.co/go/tools/ssa/wrappers.go create mode 100644 vendor/honnef.co/go/tools/ssa/write.go create mode 100644 vendor/honnef.co/go/tools/staticcheck/buildtag.go create mode 100644 vendor/honnef.co/go/tools/staticcheck/lint.go create mode 100644 vendor/honnef.co/go/tools/staticcheck/rules.go create mode 100644 vendor/honnef.co/go/tools/staticcheck/vrp/channel.go create mode 100644 vendor/honnef.co/go/tools/staticcheck/vrp/int.go create mode 100644 vendor/honnef.co/go/tools/staticcheck/vrp/slice.go create mode 100644 vendor/honnef.co/go/tools/staticcheck/vrp/string.go create mode 100644 vendor/honnef.co/go/tools/staticcheck/vrp/vrp.go create mode 100644 vendor/honnef.co/go/tools/unused/unused.go create mode 100644 vendor/honnef.co/go/tools/version/version.go diff --git a/Gopkg.lock b/Gopkg.lock index 44d6e66..2a5953a 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -1,16 +1,39 @@ # This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. +[[projects]] + branch = "master" + name = "github.com/golang/lint" + packages = [ + ".", + "golint" + ] + revision = "e14d9b0f1d332b1420c1ffa32562ad2dc84d645d" + [[projects]] name = "github.com/golang/protobuf" packages = [ "jsonpb", "proto", + "protoc-gen-go", + "protoc-gen-go/descriptor", + "protoc-gen-go/generator", + "protoc-gen-go/grpc", + "protoc-gen-go/plugin", "ptypes/struct" ] revision = "925541529c1fa6821df4e44ce2723319eb2be768" version = "v1.0.0" +[[projects]] + branch = "master" + name = "github.com/kisielk/gotool" + packages = [ + ".", + "internal/load" + ] + revision = "d6ce6262d87e3a4e153e86023ff56ae771554a41" + [[projects]] name = "github.com/pkg/errors" packages = ["."] @@ -23,6 +46,12 @@ revision = "cfb38830724cc34fedffe9a2a29fb54fa9169cd1" version = "v1.20.0" +[[projects]] + branch = "master" + name = "github.com/wadey/gocovmerge" + packages = ["."] + revision = "b5bfa59ec0adc420475f97f89b58045c721d761c" + [[projects]] branch = "master" name = "golang.org/x/crypto" @@ -43,9 +72,47 @@ ] revision = "37707fdb30a5b38865cfb95e5aab41707daec7fd" +[[projects]] + branch = "master" + name = "golang.org/x/tools" + packages = [ + "cmd/goimports", + "cover", + "go/ast/astutil", + "go/buildutil", + "go/gcexportdata", + "go/gcimporter15", + "go/loader", + "go/types/typeutil", + "imports" + ] + revision = "95b47aa5df4eda9bfbc0133f77c0e320f0275eba" + +[[projects]] + name = "honnef.co/go/tools" + packages = [ + "callgraph", + "callgraph/static", + "cmd/megacheck", + "deprecated", + "functions", + "internal/sharedcheck", + "lint", + "lint/lintutil", + "simple", + "ssa", + "ssa/ssautil", + "staticcheck", + "staticcheck/vrp", + "unused", + "version" + ] + revision = "d73ab98e7c39fdcf9ba65062e43d34310f198353" + version = "2017.2.2" + [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "bc88b18a26bdcbfb3778570bf06f7d44787a463c6a64c5cded849c5ed941fb52" + inputs-digest = "65c9e40abcc4d3679513c43ba962f87115e94ced6497ac13e9ef5685815c7da8" solver-name = "gps-cdcl" solver-version = 1 diff --git a/vendor/github.com/golang/lint/LICENSE b/vendor/github.com/golang/lint/LICENSE new file mode 100644 index 0000000..65d761b --- /dev/null +++ b/vendor/github.com/golang/lint/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2013 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/golang/lint/golint/golint.go b/vendor/github.com/golang/lint/golint/golint.go new file mode 100644 index 0000000..d8360ad --- /dev/null +++ b/vendor/github.com/golang/lint/golint/golint.go @@ -0,0 +1,159 @@ +// Copyright (c) 2013 The Go Authors. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file or at +// https://developers.google.com/open-source/licenses/bsd. + +// golint lints the Go source files named on its command line. +package main + +import ( + "flag" + "fmt" + "go/build" + "io/ioutil" + "os" + "path/filepath" + "strings" + + "github.com/golang/lint" +) + +var ( + minConfidence = flag.Float64("min_confidence", 0.8, "minimum confidence of a problem to print it") + setExitStatus = flag.Bool("set_exit_status", false, "set exit status to 1 if any issues are found") + suggestions int +) + +func usage() { + fmt.Fprintf(os.Stderr, "Usage of %s:\n", os.Args[0]) + fmt.Fprintf(os.Stderr, "\tgolint [flags] # runs on package in current directory\n") + fmt.Fprintf(os.Stderr, "\tgolint [flags] [packages]\n") + fmt.Fprintf(os.Stderr, "\tgolint [flags] [directories] # where a '/...' suffix includes all sub-directories\n") + fmt.Fprintf(os.Stderr, "\tgolint [flags] [files] # all must belong to a single package\n") + fmt.Fprintf(os.Stderr, "Flags:\n") + flag.PrintDefaults() +} + +func main() { + flag.Usage = usage + flag.Parse() + + if flag.NArg() == 0 { + lintDir(".") + } else { + // dirsRun, filesRun, and pkgsRun indicate whether golint is applied to + // directory, file or package targets. The distinction affects which + // checks are run. It is no valid to mix target types. + var dirsRun, filesRun, pkgsRun int + var args []string + for _, arg := range flag.Args() { + if strings.HasSuffix(arg, "/...") && isDir(arg[:len(arg)-len("/...")]) { + dirsRun = 1 + for _, dirname := range allPackagesInFS(arg) { + args = append(args, dirname) + } + } else if isDir(arg) { + dirsRun = 1 + args = append(args, arg) + } else if exists(arg) { + filesRun = 1 + args = append(args, arg) + } else { + pkgsRun = 1 + args = append(args, arg) + } + } + + if dirsRun+filesRun+pkgsRun != 1 { + usage() + os.Exit(2) + } + switch { + case dirsRun == 1: + for _, dir := range args { + lintDir(dir) + } + case filesRun == 1: + lintFiles(args...) + case pkgsRun == 1: + for _, pkg := range importPaths(args) { + lintPackage(pkg) + } + } + } + + if *setExitStatus && suggestions > 0 { + fmt.Fprintf(os.Stderr, "Found %d lint suggestions; failing.\n", suggestions) + os.Exit(1) + } +} + +func isDir(filename string) bool { + fi, err := os.Stat(filename) + return err == nil && fi.IsDir() +} + +func exists(filename string) bool { + _, err := os.Stat(filename) + return err == nil +} + +func lintFiles(filenames ...string) { + files := make(map[string][]byte) + for _, filename := range filenames { + src, err := ioutil.ReadFile(filename) + if err != nil { + fmt.Fprintln(os.Stderr, err) + continue + } + files[filename] = src + } + + l := new(lint.Linter) + ps, err := l.LintFiles(files) + if err != nil { + fmt.Fprintf(os.Stderr, "%v\n", err) + return + } + for _, p := range ps { + if p.Confidence >= *minConfidence { + fmt.Printf("%v: %s\n", p.Position, p.Text) + suggestions++ + } + } +} + +func lintDir(dirname string) { + pkg, err := build.ImportDir(dirname, 0) + lintImportedPackage(pkg, err) +} + +func lintPackage(pkgname string) { + pkg, err := build.Import(pkgname, ".", 0) + lintImportedPackage(pkg, err) +} + +func lintImportedPackage(pkg *build.Package, err error) { + if err != nil { + if _, nogo := err.(*build.NoGoError); nogo { + // Don't complain if the failure is due to no Go source files. + return + } + fmt.Fprintln(os.Stderr, err) + return + } + + var files []string + files = append(files, pkg.GoFiles...) + files = append(files, pkg.CgoFiles...) + files = append(files, pkg.TestGoFiles...) + if pkg.Dir != "." { + for i, f := range files { + files[i] = filepath.Join(pkg.Dir, f) + } + } + // TODO(dsymonds): Do foo_test too (pkg.XTestGoFiles) + + lintFiles(files...) +} diff --git a/vendor/github.com/golang/lint/golint/import.go b/vendor/github.com/golang/lint/golint/import.go new file mode 100644 index 0000000..02a0daa --- /dev/null +++ b/vendor/github.com/golang/lint/golint/import.go @@ -0,0 +1,310 @@ +package main + +/* + +This file holds a direct copy of the import path matching code of +https://github.com/golang/go/blob/master/src/cmd/go/main.go. It can be +replaced when https://golang.org/issue/8768 is resolved. + +It has been updated to follow upstream changes in a few ways. + +*/ + +import ( + "fmt" + "go/build" + "log" + "os" + "path" + "path/filepath" + "regexp" + "runtime" + "strings" +) + +var buildContext = build.Default + +var ( + goroot = filepath.Clean(runtime.GOROOT()) + gorootSrc = filepath.Join(goroot, "src") +) + +// importPathsNoDotExpansion returns the import paths to use for the given +// command line, but it does no ... expansion. +func importPathsNoDotExpansion(args []string) []string { + if len(args) == 0 { + return []string{"."} + } + var out []string + for _, a := range args { + // Arguments are supposed to be import paths, but + // as a courtesy to Windows developers, rewrite \ to / + // in command-line arguments. Handles .\... and so on. + if filepath.Separator == '\\' { + a = strings.Replace(a, `\`, `/`, -1) + } + + // Put argument in canonical form, but preserve leading ./. + if strings.HasPrefix(a, "./") { + a = "./" + path.Clean(a) + if a == "./." { + a = "." + } + } else { + a = path.Clean(a) + } + if a == "all" || a == "std" { + out = append(out, allPackages(a)...) + continue + } + out = append(out, a) + } + return out +} + +// importPaths returns the import paths to use for the given command line. +func importPaths(args []string) []string { + args = importPathsNoDotExpansion(args) + var out []string + for _, a := range args { + if strings.Contains(a, "...") { + if build.IsLocalImport(a) { + out = append(out, allPackagesInFS(a)...) + } else { + out = append(out, allPackages(a)...) + } + continue + } + out = append(out, a) + } + return out +} + +// matchPattern(pattern)(name) reports whether +// name matches pattern. Pattern is a limited glob +// pattern in which '...' means 'any string' and there +// is no other special syntax. +func matchPattern(pattern string) func(name string) bool { + re := regexp.QuoteMeta(pattern) + re = strings.Replace(re, `\.\.\.`, `.*`, -1) + // Special case: foo/... matches foo too. + if strings.HasSuffix(re, `/.*`) { + re = re[:len(re)-len(`/.*`)] + `(/.*)?` + } + reg := regexp.MustCompile(`^` + re + `$`) + return func(name string) bool { + return reg.MatchString(name) + } +} + +// hasPathPrefix reports whether the path s begins with the +// elements in prefix. +func hasPathPrefix(s, prefix string) bool { + switch { + default: + return false + case len(s) == len(prefix): + return s == prefix + case len(s) > len(prefix): + if prefix != "" && prefix[len(prefix)-1] == '/' { + return strings.HasPrefix(s, prefix) + } + return s[len(prefix)] == '/' && s[:len(prefix)] == prefix + } +} + +// treeCanMatchPattern(pattern)(name) reports whether +// name or children of name can possibly match pattern. +// Pattern is the same limited glob accepted by matchPattern. +func treeCanMatchPattern(pattern string) func(name string) bool { + wildCard := false + if i := strings.Index(pattern, "..."); i >= 0 { + wildCard = true + pattern = pattern[:i] + } + return func(name string) bool { + return len(name) <= len(pattern) && hasPathPrefix(pattern, name) || + wildCard && strings.HasPrefix(name, pattern) + } +} + +// allPackages returns all the packages that can be found +// under the $GOPATH directories and $GOROOT matching pattern. +// The pattern is either "all" (all packages), "std" (standard packages) +// or a path including "...". +func allPackages(pattern string) []string { + pkgs := matchPackages(pattern) + if len(pkgs) == 0 { + fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern) + } + return pkgs +} + +func matchPackages(pattern string) []string { + match := func(string) bool { return true } + treeCanMatch := func(string) bool { return true } + if pattern != "all" && pattern != "std" { + match = matchPattern(pattern) + treeCanMatch = treeCanMatchPattern(pattern) + } + + have := map[string]bool{ + "builtin": true, // ignore pseudo-package that exists only for documentation + } + if !buildContext.CgoEnabled { + have["runtime/cgo"] = true // ignore during walk + } + var pkgs []string + + // Commands + cmd := filepath.Join(goroot, "src/cmd") + string(filepath.Separator) + filepath.Walk(cmd, func(path string, fi os.FileInfo, err error) error { + if err != nil || !fi.IsDir() || path == cmd { + return nil + } + name := path[len(cmd):] + if !treeCanMatch(name) { + return filepath.SkipDir + } + // Commands are all in cmd/, not in subdirectories. + if strings.Contains(name, string(filepath.Separator)) { + return filepath.SkipDir + } + + // We use, e.g., cmd/gofmt as the pseudo import path for gofmt. + name = "cmd/" + name + if have[name] { + return nil + } + have[name] = true + if !match(name) { + return nil + } + _, err = buildContext.ImportDir(path, 0) + if err != nil { + if _, noGo := err.(*build.NoGoError); !noGo { + log.Print(err) + } + return nil + } + pkgs = append(pkgs, name) + return nil + }) + + for _, src := range buildContext.SrcDirs() { + if (pattern == "std" || pattern == "cmd") && src != gorootSrc { + continue + } + src = filepath.Clean(src) + string(filepath.Separator) + root := src + if pattern == "cmd" { + root += "cmd" + string(filepath.Separator) + } + filepath.Walk(root, func(path string, fi os.FileInfo, err error) error { + if err != nil || !fi.IsDir() || path == src { + return nil + } + + // Avoid .foo, _foo, and testdata directory trees. + _, elem := filepath.Split(path) + if strings.HasPrefix(elem, ".") || strings.HasPrefix(elem, "_") || elem == "testdata" { + return filepath.SkipDir + } + + name := filepath.ToSlash(path[len(src):]) + if pattern == "std" && (strings.Contains(name, ".") || name == "cmd") { + // The name "std" is only the standard library. + // If the name is cmd, it's the root of the command tree. + return filepath.SkipDir + } + if !treeCanMatch(name) { + return filepath.SkipDir + } + if have[name] { + return nil + } + have[name] = true + if !match(name) { + return nil + } + _, err = buildContext.ImportDir(path, 0) + if err != nil { + if _, noGo := err.(*build.NoGoError); noGo { + return nil + } + } + pkgs = append(pkgs, name) + return nil + }) + } + return pkgs +} + +// allPackagesInFS is like allPackages but is passed a pattern +// beginning ./ or ../, meaning it should scan the tree rooted +// at the given directory. There are ... in the pattern too. +func allPackagesInFS(pattern string) []string { + pkgs := matchPackagesInFS(pattern) + if len(pkgs) == 0 { + fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern) + } + return pkgs +} + +func matchPackagesInFS(pattern string) []string { + // Find directory to begin the scan. + // Could be smarter but this one optimization + // is enough for now, since ... is usually at the + // end of a path. + i := strings.Index(pattern, "...") + dir, _ := path.Split(pattern[:i]) + + // pattern begins with ./ or ../. + // path.Clean will discard the ./ but not the ../. + // We need to preserve the ./ for pattern matching + // and in the returned import paths. + prefix := "" + if strings.HasPrefix(pattern, "./") { + prefix = "./" + } + match := matchPattern(pattern) + + var pkgs []string + filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error { + if err != nil || !fi.IsDir() { + return nil + } + if path == dir { + // filepath.Walk starts at dir and recurses. For the recursive case, + // the path is the result of filepath.Join, which calls filepath.Clean. + // The initial case is not Cleaned, though, so we do this explicitly. + // + // This converts a path like "./io/" to "io". Without this step, running + // "cd $GOROOT/src/pkg; go list ./io/..." would incorrectly skip the io + // package, because prepending the prefix "./" to the unclean path would + // result in "././io", and match("././io") returns false. + path = filepath.Clean(path) + } + + // Avoid .foo, _foo, and testdata directory trees, but do not avoid "." or "..". + _, elem := filepath.Split(path) + dot := strings.HasPrefix(elem, ".") && elem != "." && elem != ".." + if dot || strings.HasPrefix(elem, "_") || elem == "testdata" { + return filepath.SkipDir + } + + name := prefix + filepath.ToSlash(path) + if !match(name) { + return nil + } + if _, err = build.ImportDir(path, 0); err != nil { + if _, noGo := err.(*build.NoGoError); !noGo { + log.Print(err) + } + return nil + } + pkgs = append(pkgs, name) + return nil + }) + return pkgs +} diff --git a/vendor/github.com/golang/lint/lint.go b/vendor/github.com/golang/lint/lint.go new file mode 100644 index 0000000..8bb1faa --- /dev/null +++ b/vendor/github.com/golang/lint/lint.go @@ -0,0 +1,1697 @@ +// Copyright (c) 2013 The Go Authors. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file or at +// https://developers.google.com/open-source/licenses/bsd. + +// Package lint contains a linter for Go source code. +package lint + +import ( + "bufio" + "bytes" + "fmt" + "go/ast" + "go/parser" + "go/printer" + "go/token" + "go/types" + "regexp" + "sort" + "strconv" + "strings" + "unicode" + "unicode/utf8" + + "golang.org/x/tools/go/gcexportdata" +) + +const styleGuideBase = "https://golang.org/wiki/CodeReviewComments" + +// A Linter lints Go source code. +type Linter struct { +} + +// Problem represents a problem in some source code. +type Problem struct { + Position token.Position // position in source file + Text string // the prose that describes the problem + Link string // (optional) the link to the style guide for the problem + Confidence float64 // a value in (0,1] estimating the confidence in this problem's correctness + LineText string // the source line + Category string // a short name for the general category of the problem + + // If the problem has a suggested fix (the minority case), + // ReplacementLine is a full replacement for the relevant line of the source file. + ReplacementLine string +} + +func (p *Problem) String() string { + if p.Link != "" { + return p.Text + "\n\n" + p.Link + } + return p.Text +} + +type byPosition []Problem + +func (p byPosition) Len() int { return len(p) } +func (p byPosition) Swap(i, j int) { p[i], p[j] = p[j], p[i] } + +func (p byPosition) Less(i, j int) bool { + pi, pj := p[i].Position, p[j].Position + + if pi.Filename != pj.Filename { + return pi.Filename < pj.Filename + } + if pi.Line != pj.Line { + return pi.Line < pj.Line + } + if pi.Column != pj.Column { + return pi.Column < pj.Column + } + + return p[i].Text < p[j].Text +} + +// Lint lints src. +func (l *Linter) Lint(filename string, src []byte) ([]Problem, error) { + return l.LintFiles(map[string][]byte{filename: src}) +} + +// LintFiles lints a set of files of a single package. +// The argument is a map of filename to source. +func (l *Linter) LintFiles(files map[string][]byte) ([]Problem, error) { + pkg := &pkg{ + fset: token.NewFileSet(), + files: make(map[string]*file), + } + var pkgName string + for filename, src := range files { + if isGenerated(src) { + continue // See issue #239 + } + f, err := parser.ParseFile(pkg.fset, filename, src, parser.ParseComments) + if err != nil { + return nil, err + } + if pkgName == "" { + pkgName = f.Name.Name + } else if f.Name.Name != pkgName { + return nil, fmt.Errorf("%s is in package %s, not %s", filename, f.Name.Name, pkgName) + } + pkg.files[filename] = &file{ + pkg: pkg, + f: f, + fset: pkg.fset, + src: src, + filename: filename, + } + } + if len(pkg.files) == 0 { + return nil, nil + } + return pkg.lint(), nil +} + +var ( + genHdr = []byte("// Code generated ") + genFtr = []byte(" DO NOT EDIT.") +) + +// isGenerated reports whether the source file is generated code +// according the rules from https://golang.org/s/generatedcode. +func isGenerated(src []byte) bool { + sc := bufio.NewScanner(bytes.NewReader(src)) + for sc.Scan() { + b := sc.Bytes() + if bytes.HasPrefix(b, genHdr) && bytes.HasSuffix(b, genFtr) && len(b) >= len(genHdr)+len(genFtr) { + return true + } + } + return false +} + +// pkg represents a package being linted. +type pkg struct { + fset *token.FileSet + files map[string]*file + + typesPkg *types.Package + typesInfo *types.Info + + // sortable is the set of types in the package that implement sort.Interface. + sortable map[string]bool + // main is whether this is a "main" package. + main bool + + problems []Problem +} + +func (p *pkg) lint() []Problem { + if err := p.typeCheck(); err != nil { + /* TODO(dsymonds): Consider reporting these errors when golint operates on entire packages. + if e, ok := err.(types.Error); ok { + pos := p.fset.Position(e.Pos) + conf := 1.0 + if strings.Contains(e.Msg, "can't find import: ") { + // Golint is probably being run in a context that doesn't support + // typechecking (e.g. package files aren't found), so don't warn about it. + conf = 0 + } + if conf > 0 { + p.errorfAt(pos, conf, category("typechecking"), e.Msg) + } + + // TODO(dsymonds): Abort if !e.Soft? + } + */ + } + + p.scanSortable() + p.main = p.isMain() + + for _, f := range p.files { + f.lint() + } + + sort.Sort(byPosition(p.problems)) + + return p.problems +} + +// file represents a file being linted. +type file struct { + pkg *pkg + f *ast.File + fset *token.FileSet + src []byte + filename string +} + +func (f *file) isTest() bool { return strings.HasSuffix(f.filename, "_test.go") } + +func (f *file) lint() { + f.lintPackageComment() + f.lintImports() + f.lintBlankImports() + f.lintExported() + f.lintNames() + f.lintVarDecls() + f.lintElses() + f.lintIfError() + f.lintRanges() + f.lintErrorf() + f.lintErrors() + f.lintErrorStrings() + f.lintReceiverNames() + f.lintIncDec() + f.lintErrorReturn() + f.lintUnexportedReturn() + f.lintTimeNames() + f.lintContextKeyTypes() + f.lintContextArgs() +} + +type link string +type category string + +// The variadic arguments may start with link and category types, +// and must end with a format string and any arguments. +// It returns the new Problem. +func (f *file) errorf(n ast.Node, confidence float64, args ...interface{}) *Problem { + pos := f.fset.Position(n.Pos()) + if pos.Filename == "" { + pos.Filename = f.filename + } + return f.pkg.errorfAt(pos, confidence, args...) +} + +func (p *pkg) errorfAt(pos token.Position, confidence float64, args ...interface{}) *Problem { + problem := Problem{ + Position: pos, + Confidence: confidence, + } + if pos.Filename != "" { + // The file might not exist in our mapping if a //line directive was encountered. + if f, ok := p.files[pos.Filename]; ok { + problem.LineText = srcLine(f.src, pos) + } + } + +argLoop: + for len(args) > 1 { // always leave at least the format string in args + switch v := args[0].(type) { + case link: + problem.Link = string(v) + case category: + problem.Category = string(v) + default: + break argLoop + } + args = args[1:] + } + + problem.Text = fmt.Sprintf(args[0].(string), args[1:]...) + + p.problems = append(p.problems, problem) + return &p.problems[len(p.problems)-1] +} + +var newImporter = func(fset *token.FileSet) types.ImporterFrom { + return gcexportdata.NewImporter(fset, make(map[string]*types.Package)) +} + +func (p *pkg) typeCheck() error { + config := &types.Config{ + // By setting a no-op error reporter, the type checker does as much work as possible. + Error: func(error) {}, + Importer: newImporter(p.fset), + } + info := &types.Info{ + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Scopes: make(map[ast.Node]*types.Scope), + } + var anyFile *file + var astFiles []*ast.File + for _, f := range p.files { + anyFile = f + astFiles = append(astFiles, f.f) + } + pkg, err := config.Check(anyFile.f.Name.Name, p.fset, astFiles, info) + // Remember the typechecking info, even if config.Check failed, + // since we will get partial information. + p.typesPkg = pkg + p.typesInfo = info + return err +} + +func (p *pkg) typeOf(expr ast.Expr) types.Type { + if p.typesInfo == nil { + return nil + } + return p.typesInfo.TypeOf(expr) +} + +func (p *pkg) isNamedType(typ types.Type, importPath, name string) bool { + n, ok := typ.(*types.Named) + if !ok { + return false + } + tn := n.Obj() + return tn != nil && tn.Pkg() != nil && tn.Pkg().Path() == importPath && tn.Name() == name +} + +// scopeOf returns the tightest scope encompassing id. +func (p *pkg) scopeOf(id *ast.Ident) *types.Scope { + var scope *types.Scope + if obj := p.typesInfo.ObjectOf(id); obj != nil { + scope = obj.Parent() + } + if scope == p.typesPkg.Scope() { + // We were given a top-level identifier. + // Use the file-level scope instead of the package-level scope. + pos := id.Pos() + for _, f := range p.files { + if f.f.Pos() <= pos && pos < f.f.End() { + scope = p.typesInfo.Scopes[f.f] + break + } + } + } + return scope +} + +func (p *pkg) scanSortable() { + p.sortable = make(map[string]bool) + + // bitfield for which methods exist on each type. + const ( + Len = 1 << iota + Less + Swap + ) + nmap := map[string]int{"Len": Len, "Less": Less, "Swap": Swap} + has := make(map[string]int) + for _, f := range p.files { + f.walk(func(n ast.Node) bool { + fn, ok := n.(*ast.FuncDecl) + if !ok || fn.Recv == nil || len(fn.Recv.List) == 0 { + return true + } + // TODO(dsymonds): We could check the signature to be more precise. + recv := receiverType(fn) + if i, ok := nmap[fn.Name.Name]; ok { + has[recv] |= i + } + return false + }) + } + for typ, ms := range has { + if ms == Len|Less|Swap { + p.sortable[typ] = true + } + } +} + +func (p *pkg) isMain() bool { + for _, f := range p.files { + if f.isMain() { + return true + } + } + return false +} + +func (f *file) isMain() bool { + if f.f.Name.Name == "main" { + return true + } + return false +} + +// lintPackageComment checks package comments. It complains if +// there is no package comment, or if it is not of the right form. +// This has a notable false positive in that a package comment +// could rightfully appear in a different file of the same package, +// but that's not easy to fix since this linter is file-oriented. +func (f *file) lintPackageComment() { + if f.isTest() { + return + } + + const ref = styleGuideBase + "#package-comments" + prefix := "Package " + f.f.Name.Name + " " + + // Look for a detached package comment. + // First, scan for the last comment that occurs before the "package" keyword. + var lastCG *ast.CommentGroup + for _, cg := range f.f.Comments { + if cg.Pos() > f.f.Package { + // Gone past "package" keyword. + break + } + lastCG = cg + } + if lastCG != nil && strings.HasPrefix(lastCG.Text(), prefix) { + endPos := f.fset.Position(lastCG.End()) + pkgPos := f.fset.Position(f.f.Package) + if endPos.Line+1 < pkgPos.Line { + // There isn't a great place to anchor this error; + // the start of the blank lines between the doc and the package statement + // is at least pointing at the location of the problem. + pos := token.Position{ + Filename: endPos.Filename, + // Offset not set; it is non-trivial, and doesn't appear to be needed. + Line: endPos.Line + 1, + Column: 1, + } + f.pkg.errorfAt(pos, 0.9, link(ref), category("comments"), "package comment is detached; there should be no blank lines between it and the package statement") + return + } + } + + if f.f.Doc == nil { + f.errorf(f.f, 0.2, link(ref), category("comments"), "should have a package comment, unless it's in another file for this package") + return + } + s := f.f.Doc.Text() + if ts := strings.TrimLeft(s, " \t"); ts != s { + f.errorf(f.f.Doc, 1, link(ref), category("comments"), "package comment should not have leading space") + s = ts + } + // Only non-main packages need to keep to this form. + if !f.pkg.main && !strings.HasPrefix(s, prefix) { + f.errorf(f.f.Doc, 1, link(ref), category("comments"), `package comment should be of the form "%s..."`, prefix) + } +} + +// lintBlankImports complains if a non-main package has blank imports that are +// not documented. +func (f *file) lintBlankImports() { + // In package main and in tests, we don't complain about blank imports. + if f.pkg.main || f.isTest() { + return + } + + // The first element of each contiguous group of blank imports should have + // an explanatory comment of some kind. + for i, imp := range f.f.Imports { + pos := f.fset.Position(imp.Pos()) + + if !isBlank(imp.Name) { + continue // Ignore non-blank imports. + } + if i > 0 { + prev := f.f.Imports[i-1] + prevPos := f.fset.Position(prev.Pos()) + if isBlank(prev.Name) && prevPos.Line+1 == pos.Line { + continue // A subsequent blank in a group. + } + } + + // This is the first blank import of a group. + if imp.Doc == nil && imp.Comment == nil { + ref := "" + f.errorf(imp, 1, link(ref), category("imports"), "a blank import should be only in a main or test package, or have a comment justifying it") + } + } +} + +// lintImports examines import blocks. +func (f *file) lintImports() { + for i, is := range f.f.Imports { + _ = i + if is.Name != nil && is.Name.Name == "." && !f.isTest() { + f.errorf(is, 1, link(styleGuideBase+"#import-dot"), category("imports"), "should not use dot imports") + } + + } +} + +const docCommentsLink = styleGuideBase + "#doc-comments" + +// lintExported examines the exported names. +// It complains if any required doc comments are missing, +// or if they are not of the right form. The exact rules are in +// lintFuncDoc, lintTypeDoc and lintValueSpecDoc; this function +// also tracks the GenDecl structure being traversed to permit +// doc comments for constants to be on top of the const block. +// It also complains if the names stutter when combined with +// the package name. +func (f *file) lintExported() { + if f.isTest() { + return + } + + var lastGen *ast.GenDecl // last GenDecl entered. + + // Set of GenDecls that have already had missing comments flagged. + genDeclMissingComments := make(map[*ast.GenDecl]bool) + + f.walk(func(node ast.Node) bool { + switch v := node.(type) { + case *ast.GenDecl: + if v.Tok == token.IMPORT { + return false + } + // token.CONST, token.TYPE or token.VAR + lastGen = v + return true + case *ast.FuncDecl: + f.lintFuncDoc(v) + if v.Recv == nil { + // Only check for stutter on functions, not methods. + // Method names are not used package-qualified. + f.checkStutter(v.Name, "func") + } + // Don't proceed inside funcs. + return false + case *ast.TypeSpec: + // inside a GenDecl, which usually has the doc + doc := v.Doc + if doc == nil { + doc = lastGen.Doc + } + f.lintTypeDoc(v, doc) + f.checkStutter(v.Name, "type") + // Don't proceed inside types. + return false + case *ast.ValueSpec: + f.lintValueSpecDoc(v, lastGen, genDeclMissingComments) + return false + } + return true + }) +} + +var allCapsRE = regexp.MustCompile(`^[A-Z0-9_]+$`) + +// knownNameExceptions is a set of names that are known to be exempt from naming checks. +// This is usually because they are constrained by having to match names in the +// standard library. +var knownNameExceptions = map[string]bool{ + "LastInsertId": true, // must match database/sql + "kWh": true, +} + +// lintNames examines all names in the file. +// It complains if any use underscores or incorrect known initialisms. +func (f *file) lintNames() { + // Package names need slightly different handling than other names. + if strings.Contains(f.f.Name.Name, "_") && !strings.HasSuffix(f.f.Name.Name, "_test") { + f.errorf(f.f, 1, link("http://golang.org/doc/effective_go.html#package-names"), category("naming"), "don't use an underscore in package name") + } + + check := func(id *ast.Ident, thing string) { + if id.Name == "_" { + return + } + if knownNameExceptions[id.Name] { + return + } + + // Handle two common styles from other languages that don't belong in Go. + if len(id.Name) >= 5 && allCapsRE.MatchString(id.Name) && strings.Contains(id.Name, "_") { + f.errorf(id, 0.8, link(styleGuideBase+"#mixed-caps"), category("naming"), "don't use ALL_CAPS in Go names; use CamelCase") + return + } + if len(id.Name) > 2 && id.Name[0] == 'k' && id.Name[1] >= 'A' && id.Name[1] <= 'Z' { + should := string(id.Name[1]+'a'-'A') + id.Name[2:] + f.errorf(id, 0.8, link(styleGuideBase+"#mixed-caps"), category("naming"), "don't use leading k in Go names; %s %s should be %s", thing, id.Name, should) + } + + should := lintName(id.Name) + if id.Name == should { + return + } + + if len(id.Name) > 2 && strings.Contains(id.Name[1:], "_") { + f.errorf(id, 0.9, link("http://golang.org/doc/effective_go.html#mixed-caps"), category("naming"), "don't use underscores in Go names; %s %s should be %s", thing, id.Name, should) + return + } + f.errorf(id, 0.8, link(styleGuideBase+"#initialisms"), category("naming"), "%s %s should be %s", thing, id.Name, should) + } + checkList := func(fl *ast.FieldList, thing string) { + if fl == nil { + return + } + for _, f := range fl.List { + for _, id := range f.Names { + check(id, thing) + } + } + } + f.walk(func(node ast.Node) bool { + switch v := node.(type) { + case *ast.AssignStmt: + if v.Tok == token.ASSIGN { + return true + } + for _, exp := range v.Lhs { + if id, ok := exp.(*ast.Ident); ok { + check(id, "var") + } + } + case *ast.FuncDecl: + if f.isTest() && (strings.HasPrefix(v.Name.Name, "Example") || strings.HasPrefix(v.Name.Name, "Test") || strings.HasPrefix(v.Name.Name, "Benchmark")) { + return true + } + + thing := "func" + if v.Recv != nil { + thing = "method" + } + + // Exclude naming warnings for functions that are exported to C but + // not exported in the Go API. + // See https://github.com/golang/lint/issues/144. + if ast.IsExported(v.Name.Name) || !isCgoExported(v) { + check(v.Name, thing) + } + + checkList(v.Type.Params, thing+" parameter") + checkList(v.Type.Results, thing+" result") + case *ast.GenDecl: + if v.Tok == token.IMPORT { + return true + } + var thing string + switch v.Tok { + case token.CONST: + thing = "const" + case token.TYPE: + thing = "type" + case token.VAR: + thing = "var" + } + for _, spec := range v.Specs { + switch s := spec.(type) { + case *ast.TypeSpec: + check(s.Name, thing) + case *ast.ValueSpec: + for _, id := range s.Names { + check(id, thing) + } + } + } + case *ast.InterfaceType: + // Do not check interface method names. + // They are often constrainted by the method names of concrete types. + for _, x := range v.Methods.List { + ft, ok := x.Type.(*ast.FuncType) + if !ok { // might be an embedded interface name + continue + } + checkList(ft.Params, "interface method parameter") + checkList(ft.Results, "interface method result") + } + case *ast.RangeStmt: + if v.Tok == token.ASSIGN { + return true + } + if id, ok := v.Key.(*ast.Ident); ok { + check(id, "range var") + } + if id, ok := v.Value.(*ast.Ident); ok { + check(id, "range var") + } + case *ast.StructType: + for _, f := range v.Fields.List { + for _, id := range f.Names { + check(id, "struct field") + } + } + } + return true + }) +} + +// lintName returns a different name if it should be different. +func lintName(name string) (should string) { + // Fast path for simple cases: "_" and all lowercase. + if name == "_" { + return name + } + allLower := true + for _, r := range name { + if !unicode.IsLower(r) { + allLower = false + break + } + } + if allLower { + return name + } + + // Split camelCase at any lower->upper transition, and split on underscores. + // Check each word for common initialisms. + runes := []rune(name) + w, i := 0, 0 // index of start of word, scan + for i+1 <= len(runes) { + eow := false // whether we hit the end of a word + if i+1 == len(runes) { + eow = true + } else if runes[i+1] == '_' { + // underscore; shift the remainder forward over any run of underscores + eow = true + n := 1 + for i+n+1 < len(runes) && runes[i+n+1] == '_' { + n++ + } + + // Leave at most one underscore if the underscore is between two digits + if i+n+1 < len(runes) && unicode.IsDigit(runes[i]) && unicode.IsDigit(runes[i+n+1]) { + n-- + } + + copy(runes[i+1:], runes[i+n+1:]) + runes = runes[:len(runes)-n] + } else if unicode.IsLower(runes[i]) && !unicode.IsLower(runes[i+1]) { + // lower->non-lower + eow = true + } + i++ + if !eow { + continue + } + + // [w,i) is a word. + word := string(runes[w:i]) + if u := strings.ToUpper(word); commonInitialisms[u] { + // Keep consistent case, which is lowercase only at the start. + if w == 0 && unicode.IsLower(runes[w]) { + u = strings.ToLower(u) + } + // All the common initialisms are ASCII, + // so we can replace the bytes exactly. + copy(runes[w:], []rune(u)) + } else if w > 0 && strings.ToLower(word) == word { + // already all lowercase, and not the first word, so uppercase the first character. + runes[w] = unicode.ToUpper(runes[w]) + } + w = i + } + return string(runes) +} + +// commonInitialisms is a set of common initialisms. +// Only add entries that are highly unlikely to be non-initialisms. +// For instance, "ID" is fine (Freudian code is rare), but "AND" is not. +var commonInitialisms = map[string]bool{ + "ACL": true, + "API": true, + "ASCII": true, + "CPU": true, + "CSS": true, + "DNS": true, + "EOF": true, + "GUID": true, + "HTML": true, + "HTTP": true, + "HTTPS": true, + "ID": true, + "IP": true, + "JSON": true, + "LHS": true, + "QPS": true, + "RAM": true, + "RHS": true, + "RPC": true, + "SLA": true, + "SMTP": true, + "SQL": true, + "SSH": true, + "TCP": true, + "TLS": true, + "TTL": true, + "UDP": true, + "UI": true, + "UID": true, + "UUID": true, + "URI": true, + "URL": true, + "UTF8": true, + "VM": true, + "XML": true, + "XMPP": true, + "XSRF": true, + "XSS": true, +} + +// lintTypeDoc examines the doc comment on a type. +// It complains if they are missing from an exported type, +// or if they are not of the standard form. +func (f *file) lintTypeDoc(t *ast.TypeSpec, doc *ast.CommentGroup) { + if !ast.IsExported(t.Name.Name) { + return + } + if doc == nil { + f.errorf(t, 1, link(docCommentsLink), category("comments"), "exported type %v should have comment or be unexported", t.Name) + return + } + + s := doc.Text() + articles := [...]string{"A", "An", "The"} + for _, a := range articles { + if strings.HasPrefix(s, a+" ") { + s = s[len(a)+1:] + break + } + } + if !strings.HasPrefix(s, t.Name.Name+" ") { + f.errorf(doc, 1, link(docCommentsLink), category("comments"), `comment on exported type %v should be of the form "%v ..." (with optional leading article)`, t.Name, t.Name) + } +} + +var commonMethods = map[string]bool{ + "Error": true, + "Read": true, + "ServeHTTP": true, + "String": true, + "Write": true, +} + +// lintFuncDoc examines doc comments on functions and methods. +// It complains if they are missing, or not of the right form. +// It has specific exclusions for well-known methods (see commonMethods above). +func (f *file) lintFuncDoc(fn *ast.FuncDecl) { + if !ast.IsExported(fn.Name.Name) { + // func is unexported + return + } + kind := "function" + name := fn.Name.Name + if fn.Recv != nil && len(fn.Recv.List) > 0 { + // method + kind = "method" + recv := receiverType(fn) + if !ast.IsExported(recv) { + // receiver is unexported + return + } + if commonMethods[name] { + return + } + switch name { + case "Len", "Less", "Swap": + if f.pkg.sortable[recv] { + return + } + } + name = recv + "." + name + } + if fn.Doc == nil { + f.errorf(fn, 1, link(docCommentsLink), category("comments"), "exported %s %s should have comment or be unexported", kind, name) + return + } + s := fn.Doc.Text() + prefix := fn.Name.Name + " " + if !strings.HasPrefix(s, prefix) { + f.errorf(fn.Doc, 1, link(docCommentsLink), category("comments"), `comment on exported %s %s should be of the form "%s..."`, kind, name, prefix) + } +} + +// lintValueSpecDoc examines package-global variables and constants. +// It complains if they are not individually declared, +// or if they are not suitably documented in the right form (unless they are in a block that is commented). +func (f *file) lintValueSpecDoc(vs *ast.ValueSpec, gd *ast.GenDecl, genDeclMissingComments map[*ast.GenDecl]bool) { + kind := "var" + if gd.Tok == token.CONST { + kind = "const" + } + + if len(vs.Names) > 1 { + // Check that none are exported except for the first. + for _, n := range vs.Names[1:] { + if ast.IsExported(n.Name) { + f.errorf(vs, 1, category("comments"), "exported %s %s should have its own declaration", kind, n.Name) + return + } + } + } + + // Only one name. + name := vs.Names[0].Name + if !ast.IsExported(name) { + return + } + + if vs.Doc == nil && gd.Doc == nil { + if genDeclMissingComments[gd] { + return + } + block := "" + if kind == "const" && gd.Lparen.IsValid() { + block = " (or a comment on this block)" + } + f.errorf(vs, 1, link(docCommentsLink), category("comments"), "exported %s %s should have comment%s or be unexported", kind, name, block) + genDeclMissingComments[gd] = true + return + } + // If this GenDecl has parens and a comment, we don't check its comment form. + if gd.Lparen.IsValid() && gd.Doc != nil { + return + } + // The relevant text to check will be on either vs.Doc or gd.Doc. + // Use vs.Doc preferentially. + doc := vs.Doc + if doc == nil { + doc = gd.Doc + } + prefix := name + " " + if !strings.HasPrefix(doc.Text(), prefix) { + f.errorf(doc, 1, link(docCommentsLink), category("comments"), `comment on exported %s %s should be of the form "%s..."`, kind, name, prefix) + } +} + +func (f *file) checkStutter(id *ast.Ident, thing string) { + pkg, name := f.f.Name.Name, id.Name + if !ast.IsExported(name) { + // unexported name + return + } + // A name stutters if the package name is a strict prefix + // and the next character of the name starts a new word. + if len(name) <= len(pkg) { + // name is too short to stutter. + // This permits the name to be the same as the package name. + return + } + if !strings.EqualFold(pkg, name[:len(pkg)]) { + return + } + // We can assume the name is well-formed UTF-8. + // If the next rune after the package name is uppercase or an underscore + // the it's starting a new word and thus this name stutters. + rem := name[len(pkg):] + if next, _ := utf8.DecodeRuneInString(rem); next == '_' || unicode.IsUpper(next) { + f.errorf(id, 0.8, link(styleGuideBase+"#package-names"), category("naming"), "%s name will be used as %s.%s by other packages, and that stutters; consider calling this %s", thing, pkg, name, rem) + } +} + +// zeroLiteral is a set of ast.BasicLit values that are zero values. +// It is not exhaustive. +var zeroLiteral = map[string]bool{ + "false": true, // bool + // runes + `'\x00'`: true, + `'\000'`: true, + // strings + `""`: true, + "``": true, + // numerics + "0": true, + "0.": true, + "0.0": true, + "0i": true, +} + +// lintVarDecls examines variable declarations. It complains about declarations with +// redundant LHS types that can be inferred from the RHS. +func (f *file) lintVarDecls() { + var lastGen *ast.GenDecl // last GenDecl entered. + + f.walk(func(node ast.Node) bool { + switch v := node.(type) { + case *ast.GenDecl: + if v.Tok != token.CONST && v.Tok != token.VAR { + return false + } + lastGen = v + return true + case *ast.ValueSpec: + if lastGen.Tok == token.CONST { + return false + } + if len(v.Names) > 1 || v.Type == nil || len(v.Values) == 0 { + return false + } + rhs := v.Values[0] + // An underscore var appears in a common idiom for compile-time interface satisfaction, + // as in "var _ Interface = (*Concrete)(nil)". + if isIdent(v.Names[0], "_") { + return false + } + // If the RHS is a zero value, suggest dropping it. + zero := false + if lit, ok := rhs.(*ast.BasicLit); ok { + zero = zeroLiteral[lit.Value] + } else if isIdent(rhs, "nil") { + zero = true + } + if zero { + f.errorf(rhs, 0.9, category("zero-value"), "should drop = %s from declaration of var %s; it is the zero value", f.render(rhs), v.Names[0]) + return false + } + lhsTyp := f.pkg.typeOf(v.Type) + rhsTyp := f.pkg.typeOf(rhs) + + if !validType(lhsTyp) || !validType(rhsTyp) { + // Type checking failed (often due to missing imports). + return false + } + + if !types.Identical(lhsTyp, rhsTyp) { + // Assignment to a different type is not redundant. + return false + } + + // The next three conditions are for suppressing the warning in situations + // where we were unable to typecheck. + + // If the LHS type is an interface, don't warn, since it is probably a + // concrete type on the RHS. Note that our feeble lexical check here + // will only pick up interface{} and other literal interface types; + // that covers most of the cases we care to exclude right now. + if _, ok := v.Type.(*ast.InterfaceType); ok { + return false + } + // If the RHS is an untyped const, only warn if the LHS type is its default type. + if defType, ok := f.isUntypedConst(rhs); ok && !isIdent(v.Type, defType) { + return false + } + + f.errorf(v.Type, 0.8, category("type-inference"), "should omit type %s from declaration of var %s; it will be inferred from the right-hand side", f.render(v.Type), v.Names[0]) + return false + } + return true + }) +} + +func validType(T types.Type) bool { + return T != nil && + T != types.Typ[types.Invalid] && + !strings.Contains(T.String(), "invalid type") // good but not foolproof +} + +// lintElses examines else blocks. It complains about any else block whose if block ends in a return. +func (f *file) lintElses() { + // We don't want to flag if { } else if { } else { } constructions. + // They will appear as an IfStmt whose Else field is also an IfStmt. + // Record such a node so we ignore it when we visit it. + ignore := make(map[*ast.IfStmt]bool) + + f.walk(func(node ast.Node) bool { + ifStmt, ok := node.(*ast.IfStmt) + if !ok || ifStmt.Else == nil { + return true + } + if ignore[ifStmt] { + return true + } + if elseif, ok := ifStmt.Else.(*ast.IfStmt); ok { + ignore[elseif] = true + return true + } + if _, ok := ifStmt.Else.(*ast.BlockStmt); !ok { + // only care about elses without conditions + return true + } + if len(ifStmt.Body.List) == 0 { + return true + } + shortDecl := false // does the if statement have a ":=" initialization statement? + if ifStmt.Init != nil { + if as, ok := ifStmt.Init.(*ast.AssignStmt); ok && as.Tok == token.DEFINE { + shortDecl = true + } + } + lastStmt := ifStmt.Body.List[len(ifStmt.Body.List)-1] + if _, ok := lastStmt.(*ast.ReturnStmt); ok { + extra := "" + if shortDecl { + extra = " (move short variable declaration to its own line if necessary)" + } + f.errorf(ifStmt.Else, 1, link(styleGuideBase+"#indent-error-flow"), category("indent"), "if block ends with a return statement, so drop this else and outdent its block"+extra) + } + return true + }) +} + +// lintRanges examines range clauses. It complains about redundant constructions. +func (f *file) lintRanges() { + f.walk(func(node ast.Node) bool { + rs, ok := node.(*ast.RangeStmt) + if !ok { + return true + } + if rs.Value == nil { + // for x = range m { ... } + return true // single var form + } + if !isIdent(rs.Value, "_") { + // for ?, y = range m { ... } + return true + } + + p := f.errorf(rs.Value, 1, category("range-loop"), "should omit 2nd value from range; this loop is equivalent to `for %s %s range ...`", f.render(rs.Key), rs.Tok) + + newRS := *rs // shallow copy + newRS.Value = nil + p.ReplacementLine = f.firstLineOf(&newRS, rs) + + return true + }) +} + +// lintErrorf examines errors.New and testing.Error calls. It complains if its only argument is an fmt.Sprintf invocation. +func (f *file) lintErrorf() { + f.walk(func(node ast.Node) bool { + ce, ok := node.(*ast.CallExpr) + if !ok || len(ce.Args) != 1 { + return true + } + isErrorsNew := isPkgDot(ce.Fun, "errors", "New") + var isTestingError bool + se, ok := ce.Fun.(*ast.SelectorExpr) + if ok && se.Sel.Name == "Error" { + if typ := f.pkg.typeOf(se.X); typ != nil { + isTestingError = typ.String() == "*testing.T" + } + } + if !isErrorsNew && !isTestingError { + return true + } + arg := ce.Args[0] + ce, ok = arg.(*ast.CallExpr) + if !ok || !isPkgDot(ce.Fun, "fmt", "Sprintf") { + return true + } + errorfPrefix := "fmt" + if isTestingError { + errorfPrefix = f.render(se.X) + } + p := f.errorf(node, 1, category("errors"), "should replace %s(fmt.Sprintf(...)) with %s.Errorf(...)", f.render(se), errorfPrefix) + + m := f.srcLineWithMatch(ce, `^(.*)`+f.render(se)+`\(fmt\.Sprintf\((.*)\)\)(.*)$`) + if m != nil { + p.ReplacementLine = m[1] + errorfPrefix + ".Errorf(" + m[2] + ")" + m[3] + } + + return true + }) +} + +// lintErrors examines global error vars. It complains if they aren't named in the standard way. +func (f *file) lintErrors() { + for _, decl := range f.f.Decls { + gd, ok := decl.(*ast.GenDecl) + if !ok || gd.Tok != token.VAR { + continue + } + for _, spec := range gd.Specs { + spec := spec.(*ast.ValueSpec) + if len(spec.Names) != 1 || len(spec.Values) != 1 { + continue + } + ce, ok := spec.Values[0].(*ast.CallExpr) + if !ok { + continue + } + if !isPkgDot(ce.Fun, "errors", "New") && !isPkgDot(ce.Fun, "fmt", "Errorf") { + continue + } + + id := spec.Names[0] + prefix := "err" + if id.IsExported() { + prefix = "Err" + } + if !strings.HasPrefix(id.Name, prefix) { + f.errorf(id, 0.9, category("naming"), "error var %s should have name of the form %sFoo", id.Name, prefix) + } + } + } +} + +func lintErrorString(s string) (isClean bool, conf float64) { + const basicConfidence = 0.8 + const capConfidence = basicConfidence - 0.2 + first, firstN := utf8.DecodeRuneInString(s) + last, _ := utf8.DecodeLastRuneInString(s) + if last == '.' || last == ':' || last == '!' || last == '\n' { + return false, basicConfidence + } + if unicode.IsUpper(first) { + // People use proper nouns and exported Go identifiers in error strings, + // so decrease the confidence of warnings for capitalization. + if len(s) <= firstN { + return false, capConfidence + } + // Flag strings starting with something that doesn't look like an initialism. + if second, _ := utf8.DecodeRuneInString(s[firstN:]); !unicode.IsUpper(second) { + return false, capConfidence + } + } + return true, 0 +} + +// lintErrorStrings examines error strings. +// It complains if they are capitalized or end in punctuation or a newline. +func (f *file) lintErrorStrings() { + f.walk(func(node ast.Node) bool { + ce, ok := node.(*ast.CallExpr) + if !ok { + return true + } + if !isPkgDot(ce.Fun, "errors", "New") && !isPkgDot(ce.Fun, "fmt", "Errorf") { + return true + } + if len(ce.Args) < 1 { + return true + } + str, ok := ce.Args[0].(*ast.BasicLit) + if !ok || str.Kind != token.STRING { + return true + } + s, _ := strconv.Unquote(str.Value) // can assume well-formed Go + if s == "" { + return true + } + clean, conf := lintErrorString(s) + if clean { + return true + } + + f.errorf(str, conf, link(styleGuideBase+"#error-strings"), category("errors"), + "error strings should not be capitalized or end with punctuation or a newline") + return true + }) +} + +// lintReceiverNames examines receiver names. It complains about inconsistent +// names used for the same type and names such as "this". +func (f *file) lintReceiverNames() { + typeReceiver := map[string]string{} + f.walk(func(n ast.Node) bool { + fn, ok := n.(*ast.FuncDecl) + if !ok || fn.Recv == nil || len(fn.Recv.List) == 0 { + return true + } + names := fn.Recv.List[0].Names + if len(names) < 1 { + return true + } + name := names[0].Name + const ref = styleGuideBase + "#receiver-names" + if name == "_" { + f.errorf(n, 1, link(ref), category("naming"), `receiver name should not be an underscore, omit the name if it is unused`) + return true + } + if name == "this" || name == "self" { + f.errorf(n, 1, link(ref), category("naming"), `receiver name should be a reflection of its identity; don't use generic names such as "this" or "self"`) + return true + } + recv := receiverType(fn) + if prev, ok := typeReceiver[recv]; ok && prev != name { + f.errorf(n, 1, link(ref), category("naming"), "receiver name %s should be consistent with previous receiver name %s for %s", name, prev, recv) + return true + } + typeReceiver[recv] = name + return true + }) +} + +// lintIncDec examines statements that increment or decrement a variable. +// It complains if they don't use x++ or x--. +func (f *file) lintIncDec() { + f.walk(func(n ast.Node) bool { + as, ok := n.(*ast.AssignStmt) + if !ok { + return true + } + if len(as.Lhs) != 1 { + return true + } + if !isOne(as.Rhs[0]) { + return true + } + var suffix string + switch as.Tok { + case token.ADD_ASSIGN: + suffix = "++" + case token.SUB_ASSIGN: + suffix = "--" + default: + return true + } + f.errorf(as, 0.8, category("unary-op"), "should replace %s with %s%s", f.render(as), f.render(as.Lhs[0]), suffix) + return true + }) +} + +// lintErrorReturn examines function declarations that return an error. +// It complains if the error isn't the last parameter. +func (f *file) lintErrorReturn() { + f.walk(func(n ast.Node) bool { + fn, ok := n.(*ast.FuncDecl) + if !ok || fn.Type.Results == nil { + return true + } + ret := fn.Type.Results.List + if len(ret) <= 1 { + return true + } + // An error return parameter should be the last parameter. + // Flag any error parameters found before the last. + for _, r := range ret[:len(ret)-1] { + if isIdent(r.Type, "error") { + f.errorf(fn, 0.9, category("arg-order"), "error should be the last type when returning multiple items") + break // only flag one + } + } + return true + }) +} + +// lintUnexportedReturn examines exported function declarations. +// It complains if any return an unexported type. +func (f *file) lintUnexportedReturn() { + f.walk(func(n ast.Node) bool { + fn, ok := n.(*ast.FuncDecl) + if !ok { + return true + } + if fn.Type.Results == nil { + return false + } + if !fn.Name.IsExported() { + return false + } + thing := "func" + if fn.Recv != nil && len(fn.Recv.List) > 0 { + thing = "method" + if !ast.IsExported(receiverType(fn)) { + // Don't report exported methods of unexported types, + // such as private implementations of sort.Interface. + return false + } + } + for _, ret := range fn.Type.Results.List { + typ := f.pkg.typeOf(ret.Type) + if exportedType(typ) { + continue + } + f.errorf(ret.Type, 0.8, category("unexported-type-in-api"), + "exported %s %s returns unexported type %s, which can be annoying to use", + thing, fn.Name.Name, typ) + break // only flag one + } + return false + }) +} + +// exportedType reports whether typ is an exported type. +// It is imprecise, and will err on the side of returning true, +// such as for composite types. +func exportedType(typ types.Type) bool { + switch T := typ.(type) { + case *types.Named: + // Builtin types have no package. + return T.Obj().Pkg() == nil || T.Obj().Exported() + case *types.Map: + return exportedType(T.Key()) && exportedType(T.Elem()) + case interface { + Elem() types.Type + }: // array, slice, pointer, chan + return exportedType(T.Elem()) + } + // Be conservative about other types, such as struct, interface, etc. + return true +} + +// timeSuffixes is a list of name suffixes that imply a time unit. +// This is not an exhaustive list. +var timeSuffixes = []string{ + "Sec", "Secs", "Seconds", + "Msec", "Msecs", + "Milli", "Millis", "Milliseconds", + "Usec", "Usecs", "Microseconds", + "MS", "Ms", +} + +func (f *file) lintTimeNames() { + f.walk(func(node ast.Node) bool { + v, ok := node.(*ast.ValueSpec) + if !ok { + return true + } + for _, name := range v.Names { + origTyp := f.pkg.typeOf(name) + // Look for time.Duration or *time.Duration; + // the latter is common when using flag.Duration. + typ := origTyp + if pt, ok := typ.(*types.Pointer); ok { + typ = pt.Elem() + } + if !f.pkg.isNamedType(typ, "time", "Duration") { + continue + } + suffix := "" + for _, suf := range timeSuffixes { + if strings.HasSuffix(name.Name, suf) { + suffix = suf + break + } + } + if suffix == "" { + continue + } + f.errorf(v, 0.9, category("time"), "var %s is of type %v; don't use unit-specific suffix %q", name.Name, origTyp, suffix) + } + return true + }) +} + +// lintContextKeyTypes checks for call expressions to context.WithValue with +// basic types used for the key argument. +// See: https://golang.org/issue/17293 +func (f *file) lintContextKeyTypes() { + f.walk(func(node ast.Node) bool { + switch node := node.(type) { + case *ast.CallExpr: + f.checkContextKeyType(node) + } + + return true + }) +} + +// checkContextKeyType reports an error if the call expression calls +// context.WithValue with a key argument of basic type. +func (f *file) checkContextKeyType(x *ast.CallExpr) { + sel, ok := x.Fun.(*ast.SelectorExpr) + if !ok { + return + } + pkg, ok := sel.X.(*ast.Ident) + if !ok || pkg.Name != "context" { + return + } + if sel.Sel.Name != "WithValue" { + return + } + + // key is second argument to context.WithValue + if len(x.Args) != 3 { + return + } + key := f.pkg.typesInfo.Types[x.Args[1]] + + if ktyp, ok := key.Type.(*types.Basic); ok && ktyp.Kind() != types.Invalid { + f.errorf(x, 1.0, category("context"), fmt.Sprintf("should not use basic type %s as key in context.WithValue", key.Type)) + } +} + +// lintContextArgs examines function declarations that contain an +// argument with a type of context.Context +// It complains if that argument isn't the first parameter. +func (f *file) lintContextArgs() { + f.walk(func(n ast.Node) bool { + fn, ok := n.(*ast.FuncDecl) + if !ok || len(fn.Type.Params.List) <= 1 { + return true + } + // A context.Context should be the first parameter of a function. + // Flag any that show up after the first. + for _, arg := range fn.Type.Params.List[1:] { + if isPkgDot(arg.Type, "context", "Context") { + f.errorf(fn, 0.9, link("https://golang.org/pkg/context/"), category("arg-order"), "context.Context should be the first parameter of a function") + break // only flag one + } + } + return true + }) +} + +// containsComments returns whether the interval [start, end) contains any +// comments without "// MATCH " prefix. +func (f *file) containsComments(start, end token.Pos) bool { + for _, cgroup := range f.f.Comments { + comments := cgroup.List + if comments[0].Slash >= end { + // All comments starting with this group are after end pos. + return false + } + if comments[len(comments)-1].Slash < start { + // Comments group ends before start pos. + continue + } + for _, c := range comments { + if start <= c.Slash && c.Slash < end && !strings.HasPrefix(c.Text, "// MATCH ") { + return true + } + } + } + return false +} + +func (f *file) lintIfError() { + f.walk(func(node ast.Node) bool { + switch v := node.(type) { + case *ast.BlockStmt: + for i := 0; i < len(v.List)-1; i++ { + // if var := whatever; var != nil { return var } + s, ok := v.List[i].(*ast.IfStmt) + if !ok || s.Body == nil || len(s.Body.List) != 1 || s.Else != nil { + continue + } + assign, ok := s.Init.(*ast.AssignStmt) + if !ok || len(assign.Lhs) != 1 || !(assign.Tok == token.DEFINE || assign.Tok == token.ASSIGN) { + continue + } + id, ok := assign.Lhs[0].(*ast.Ident) + if !ok { + continue + } + expr, ok := s.Cond.(*ast.BinaryExpr) + if !ok || expr.Op != token.NEQ { + continue + } + if lhs, ok := expr.X.(*ast.Ident); !ok || lhs.Name != id.Name { + continue + } + if rhs, ok := expr.Y.(*ast.Ident); !ok || rhs.Name != "nil" { + continue + } + r, ok := s.Body.List[0].(*ast.ReturnStmt) + if !ok || len(r.Results) != 1 { + continue + } + if r, ok := r.Results[0].(*ast.Ident); !ok || r.Name != id.Name { + continue + } + + // return nil + r, ok = v.List[i+1].(*ast.ReturnStmt) + if !ok || len(r.Results) != 1 { + continue + } + if r, ok := r.Results[0].(*ast.Ident); !ok || r.Name != "nil" { + continue + } + + // check if there are any comments explaining the construct, don't emit an error if there are some. + if f.containsComments(s.Pos(), r.Pos()) { + continue + } + + f.errorf(v.List[i], 0.9, "redundant if ...; err != nil check, just return error instead.") + } + } + return true + }) +} + +// receiverType returns the named type of the method receiver, sans "*", +// or "invalid-type" if fn.Recv is ill formed. +func receiverType(fn *ast.FuncDecl) string { + switch e := fn.Recv.List[0].Type.(type) { + case *ast.Ident: + return e.Name + case *ast.StarExpr: + if id, ok := e.X.(*ast.Ident); ok { + return id.Name + } + } + // The parser accepts much more than just the legal forms. + return "invalid-type" +} + +func (f *file) walk(fn func(ast.Node) bool) { + ast.Walk(walker(fn), f.f) +} + +func (f *file) render(x interface{}) string { + var buf bytes.Buffer + if err := printer.Fprint(&buf, f.fset, x); err != nil { + panic(err) + } + return buf.String() +} + +func (f *file) debugRender(x interface{}) string { + var buf bytes.Buffer + if err := ast.Fprint(&buf, f.fset, x, nil); err != nil { + panic(err) + } + return buf.String() +} + +// walker adapts a function to satisfy the ast.Visitor interface. +// The function return whether the walk should proceed into the node's children. +type walker func(ast.Node) bool + +func (w walker) Visit(node ast.Node) ast.Visitor { + if w(node) { + return w + } + return nil +} + +func isIdent(expr ast.Expr, ident string) bool { + id, ok := expr.(*ast.Ident) + return ok && id.Name == ident +} + +// isBlank returns whether id is the blank identifier "_". +// If id == nil, the answer is false. +func isBlank(id *ast.Ident) bool { return id != nil && id.Name == "_" } + +func isPkgDot(expr ast.Expr, pkg, name string) bool { + sel, ok := expr.(*ast.SelectorExpr) + return ok && isIdent(sel.X, pkg) && isIdent(sel.Sel, name) +} + +func isOne(expr ast.Expr) bool { + lit, ok := expr.(*ast.BasicLit) + return ok && lit.Kind == token.INT && lit.Value == "1" +} + +func isCgoExported(f *ast.FuncDecl) bool { + if f.Recv != nil || f.Doc == nil { + return false + } + + cgoExport := regexp.MustCompile(fmt.Sprintf("(?m)^//export %s$", regexp.QuoteMeta(f.Name.Name))) + for _, c := range f.Doc.List { + if cgoExport.MatchString(c.Text) { + return true + } + } + return false +} + +var basicTypeKinds = map[types.BasicKind]string{ + types.UntypedBool: "bool", + types.UntypedInt: "int", + types.UntypedRune: "rune", + types.UntypedFloat: "float64", + types.UntypedComplex: "complex128", + types.UntypedString: "string", +} + +// isUntypedConst reports whether expr is an untyped constant, +// and indicates what its default type is. +// scope may be nil. +func (f *file) isUntypedConst(expr ast.Expr) (defType string, ok bool) { + // Re-evaluate expr outside of its context to see if it's untyped. + // (An expr evaluated within, for example, an assignment context will get the type of the LHS.) + exprStr := f.render(expr) + tv, err := types.Eval(f.fset, f.pkg.typesPkg, expr.Pos(), exprStr) + if err != nil { + return "", false + } + if b, ok := tv.Type.(*types.Basic); ok { + if dt, ok := basicTypeKinds[b.Kind()]; ok { + return dt, true + } + } + + return "", false +} + +// firstLineOf renders the given node and returns its first line. +// It will also match the indentation of another node. +func (f *file) firstLineOf(node, match ast.Node) string { + line := f.render(node) + if i := strings.Index(line, "\n"); i >= 0 { + line = line[:i] + } + return f.indentOf(match) + line +} + +func (f *file) indentOf(node ast.Node) string { + line := srcLine(f.src, f.fset.Position(node.Pos())) + for i, r := range line { + switch r { + case ' ', '\t': + default: + return line[:i] + } + } + return line // unusual or empty line +} + +func (f *file) srcLineWithMatch(node ast.Node, pattern string) (m []string) { + line := srcLine(f.src, f.fset.Position(node.Pos())) + line = strings.TrimSuffix(line, "\n") + rx := regexp.MustCompile(pattern) + return rx.FindStringSubmatch(line) +} + +// srcLine returns the complete line at p, including the terminating newline. +func srcLine(src []byte, p token.Position) string { + // Run to end of line in both directions if not at line start/end. + lo, hi := p.Offset, p.Offset+1 + for lo > 0 && src[lo-1] != '\n' { + lo-- + } + for hi < len(src) && src[hi-1] != '\n' { + hi++ + } + return string(src[lo:hi]) +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.pb.go new file mode 100644 index 0000000..c6a91bc --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor/descriptor.pb.go @@ -0,0 +1,2215 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/protobuf/descriptor.proto + +/* +Package descriptor is a generated protocol buffer package. + +It is generated from these files: + google/protobuf/descriptor.proto + +It has these top-level messages: + FileDescriptorSet + FileDescriptorProto + DescriptorProto + ExtensionRangeOptions + FieldDescriptorProto + OneofDescriptorProto + EnumDescriptorProto + EnumValueDescriptorProto + ServiceDescriptorProto + MethodDescriptorProto + FileOptions + MessageOptions + FieldOptions + OneofOptions + EnumOptions + EnumValueOptions + ServiceOptions + MethodOptions + UninterpretedOption + SourceCodeInfo + GeneratedCodeInfo +*/ +package descriptor + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type FieldDescriptorProto_Type int32 + +const ( + // 0 is reserved for errors. + // Order is weird for historical reasons. + FieldDescriptorProto_TYPE_DOUBLE FieldDescriptorProto_Type = 1 + FieldDescriptorProto_TYPE_FLOAT FieldDescriptorProto_Type = 2 + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + // negative values are likely. + FieldDescriptorProto_TYPE_INT64 FieldDescriptorProto_Type = 3 + FieldDescriptorProto_TYPE_UINT64 FieldDescriptorProto_Type = 4 + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + // negative values are likely. + FieldDescriptorProto_TYPE_INT32 FieldDescriptorProto_Type = 5 + FieldDescriptorProto_TYPE_FIXED64 FieldDescriptorProto_Type = 6 + FieldDescriptorProto_TYPE_FIXED32 FieldDescriptorProto_Type = 7 + FieldDescriptorProto_TYPE_BOOL FieldDescriptorProto_Type = 8 + FieldDescriptorProto_TYPE_STRING FieldDescriptorProto_Type = 9 + // Tag-delimited aggregate. + // Group type is deprecated and not supported in proto3. However, Proto3 + // implementations should still be able to parse the group wire format and + // treat group fields as unknown fields. + FieldDescriptorProto_TYPE_GROUP FieldDescriptorProto_Type = 10 + FieldDescriptorProto_TYPE_MESSAGE FieldDescriptorProto_Type = 11 + // New in version 2. + FieldDescriptorProto_TYPE_BYTES FieldDescriptorProto_Type = 12 + FieldDescriptorProto_TYPE_UINT32 FieldDescriptorProto_Type = 13 + FieldDescriptorProto_TYPE_ENUM FieldDescriptorProto_Type = 14 + FieldDescriptorProto_TYPE_SFIXED32 FieldDescriptorProto_Type = 15 + FieldDescriptorProto_TYPE_SFIXED64 FieldDescriptorProto_Type = 16 + FieldDescriptorProto_TYPE_SINT32 FieldDescriptorProto_Type = 17 + FieldDescriptorProto_TYPE_SINT64 FieldDescriptorProto_Type = 18 +) + +var FieldDescriptorProto_Type_name = map[int32]string{ + 1: "TYPE_DOUBLE", + 2: "TYPE_FLOAT", + 3: "TYPE_INT64", + 4: "TYPE_UINT64", + 5: "TYPE_INT32", + 6: "TYPE_FIXED64", + 7: "TYPE_FIXED32", + 8: "TYPE_BOOL", + 9: "TYPE_STRING", + 10: "TYPE_GROUP", + 11: "TYPE_MESSAGE", + 12: "TYPE_BYTES", + 13: "TYPE_UINT32", + 14: "TYPE_ENUM", + 15: "TYPE_SFIXED32", + 16: "TYPE_SFIXED64", + 17: "TYPE_SINT32", + 18: "TYPE_SINT64", +} +var FieldDescriptorProto_Type_value = map[string]int32{ + "TYPE_DOUBLE": 1, + "TYPE_FLOAT": 2, + "TYPE_INT64": 3, + "TYPE_UINT64": 4, + "TYPE_INT32": 5, + "TYPE_FIXED64": 6, + "TYPE_FIXED32": 7, + "TYPE_BOOL": 8, + "TYPE_STRING": 9, + "TYPE_GROUP": 10, + "TYPE_MESSAGE": 11, + "TYPE_BYTES": 12, + "TYPE_UINT32": 13, + "TYPE_ENUM": 14, + "TYPE_SFIXED32": 15, + "TYPE_SFIXED64": 16, + "TYPE_SINT32": 17, + "TYPE_SINT64": 18, +} + +func (x FieldDescriptorProto_Type) Enum() *FieldDescriptorProto_Type { + p := new(FieldDescriptorProto_Type) + *p = x + return p +} +func (x FieldDescriptorProto_Type) String() string { + return proto.EnumName(FieldDescriptorProto_Type_name, int32(x)) +} +func (x *FieldDescriptorProto_Type) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(FieldDescriptorProto_Type_value, data, "FieldDescriptorProto_Type") + if err != nil { + return err + } + *x = FieldDescriptorProto_Type(value) + return nil +} +func (FieldDescriptorProto_Type) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{4, 0} } + +type FieldDescriptorProto_Label int32 + +const ( + // 0 is reserved for errors + FieldDescriptorProto_LABEL_OPTIONAL FieldDescriptorProto_Label = 1 + FieldDescriptorProto_LABEL_REQUIRED FieldDescriptorProto_Label = 2 + FieldDescriptorProto_LABEL_REPEATED FieldDescriptorProto_Label = 3 +) + +var FieldDescriptorProto_Label_name = map[int32]string{ + 1: "LABEL_OPTIONAL", + 2: "LABEL_REQUIRED", + 3: "LABEL_REPEATED", +} +var FieldDescriptorProto_Label_value = map[string]int32{ + "LABEL_OPTIONAL": 1, + "LABEL_REQUIRED": 2, + "LABEL_REPEATED": 3, +} + +func (x FieldDescriptorProto_Label) Enum() *FieldDescriptorProto_Label { + p := new(FieldDescriptorProto_Label) + *p = x + return p +} +func (x FieldDescriptorProto_Label) String() string { + return proto.EnumName(FieldDescriptorProto_Label_name, int32(x)) +} +func (x *FieldDescriptorProto_Label) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(FieldDescriptorProto_Label_value, data, "FieldDescriptorProto_Label") + if err != nil { + return err + } + *x = FieldDescriptorProto_Label(value) + return nil +} +func (FieldDescriptorProto_Label) EnumDescriptor() ([]byte, []int) { + return fileDescriptor0, []int{4, 1} +} + +// Generated classes can be optimized for speed or code size. +type FileOptions_OptimizeMode int32 + +const ( + FileOptions_SPEED FileOptions_OptimizeMode = 1 + // etc. + FileOptions_CODE_SIZE FileOptions_OptimizeMode = 2 + FileOptions_LITE_RUNTIME FileOptions_OptimizeMode = 3 +) + +var FileOptions_OptimizeMode_name = map[int32]string{ + 1: "SPEED", + 2: "CODE_SIZE", + 3: "LITE_RUNTIME", +} +var FileOptions_OptimizeMode_value = map[string]int32{ + "SPEED": 1, + "CODE_SIZE": 2, + "LITE_RUNTIME": 3, +} + +func (x FileOptions_OptimizeMode) Enum() *FileOptions_OptimizeMode { + p := new(FileOptions_OptimizeMode) + *p = x + return p +} +func (x FileOptions_OptimizeMode) String() string { + return proto.EnumName(FileOptions_OptimizeMode_name, int32(x)) +} +func (x *FileOptions_OptimizeMode) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(FileOptions_OptimizeMode_value, data, "FileOptions_OptimizeMode") + if err != nil { + return err + } + *x = FileOptions_OptimizeMode(value) + return nil +} +func (FileOptions_OptimizeMode) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{10, 0} } + +type FieldOptions_CType int32 + +const ( + // Default mode. + FieldOptions_STRING FieldOptions_CType = 0 + FieldOptions_CORD FieldOptions_CType = 1 + FieldOptions_STRING_PIECE FieldOptions_CType = 2 +) + +var FieldOptions_CType_name = map[int32]string{ + 0: "STRING", + 1: "CORD", + 2: "STRING_PIECE", +} +var FieldOptions_CType_value = map[string]int32{ + "STRING": 0, + "CORD": 1, + "STRING_PIECE": 2, +} + +func (x FieldOptions_CType) Enum() *FieldOptions_CType { + p := new(FieldOptions_CType) + *p = x + return p +} +func (x FieldOptions_CType) String() string { + return proto.EnumName(FieldOptions_CType_name, int32(x)) +} +func (x *FieldOptions_CType) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(FieldOptions_CType_value, data, "FieldOptions_CType") + if err != nil { + return err + } + *x = FieldOptions_CType(value) + return nil +} +func (FieldOptions_CType) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{12, 0} } + +type FieldOptions_JSType int32 + +const ( + // Use the default type. + FieldOptions_JS_NORMAL FieldOptions_JSType = 0 + // Use JavaScript strings. + FieldOptions_JS_STRING FieldOptions_JSType = 1 + // Use JavaScript numbers. + FieldOptions_JS_NUMBER FieldOptions_JSType = 2 +) + +var FieldOptions_JSType_name = map[int32]string{ + 0: "JS_NORMAL", + 1: "JS_STRING", + 2: "JS_NUMBER", +} +var FieldOptions_JSType_value = map[string]int32{ + "JS_NORMAL": 0, + "JS_STRING": 1, + "JS_NUMBER": 2, +} + +func (x FieldOptions_JSType) Enum() *FieldOptions_JSType { + p := new(FieldOptions_JSType) + *p = x + return p +} +func (x FieldOptions_JSType) String() string { + return proto.EnumName(FieldOptions_JSType_name, int32(x)) +} +func (x *FieldOptions_JSType) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(FieldOptions_JSType_value, data, "FieldOptions_JSType") + if err != nil { + return err + } + *x = FieldOptions_JSType(value) + return nil +} +func (FieldOptions_JSType) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{12, 1} } + +// Is this method side-effect-free (or safe in HTTP parlance), or idempotent, +// or neither? HTTP based RPC implementation may choose GET verb for safe +// methods, and PUT verb for idempotent methods instead of the default POST. +type MethodOptions_IdempotencyLevel int32 + +const ( + MethodOptions_IDEMPOTENCY_UNKNOWN MethodOptions_IdempotencyLevel = 0 + MethodOptions_NO_SIDE_EFFECTS MethodOptions_IdempotencyLevel = 1 + MethodOptions_IDEMPOTENT MethodOptions_IdempotencyLevel = 2 +) + +var MethodOptions_IdempotencyLevel_name = map[int32]string{ + 0: "IDEMPOTENCY_UNKNOWN", + 1: "NO_SIDE_EFFECTS", + 2: "IDEMPOTENT", +} +var MethodOptions_IdempotencyLevel_value = map[string]int32{ + "IDEMPOTENCY_UNKNOWN": 0, + "NO_SIDE_EFFECTS": 1, + "IDEMPOTENT": 2, +} + +func (x MethodOptions_IdempotencyLevel) Enum() *MethodOptions_IdempotencyLevel { + p := new(MethodOptions_IdempotencyLevel) + *p = x + return p +} +func (x MethodOptions_IdempotencyLevel) String() string { + return proto.EnumName(MethodOptions_IdempotencyLevel_name, int32(x)) +} +func (x *MethodOptions_IdempotencyLevel) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(MethodOptions_IdempotencyLevel_value, data, "MethodOptions_IdempotencyLevel") + if err != nil { + return err + } + *x = MethodOptions_IdempotencyLevel(value) + return nil +} +func (MethodOptions_IdempotencyLevel) EnumDescriptor() ([]byte, []int) { + return fileDescriptor0, []int{17, 0} +} + +// The protocol compiler can output a FileDescriptorSet containing the .proto +// files it parses. +type FileDescriptorSet struct { + File []*FileDescriptorProto `protobuf:"bytes,1,rep,name=file" json:"file,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *FileDescriptorSet) Reset() { *m = FileDescriptorSet{} } +func (m *FileDescriptorSet) String() string { return proto.CompactTextString(m) } +func (*FileDescriptorSet) ProtoMessage() {} +func (*FileDescriptorSet) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +func (m *FileDescriptorSet) GetFile() []*FileDescriptorProto { + if m != nil { + return m.File + } + return nil +} + +// Describes a complete .proto file. +type FileDescriptorProto struct { + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Package *string `protobuf:"bytes,2,opt,name=package" json:"package,omitempty"` + // Names of files imported by this file. + Dependency []string `protobuf:"bytes,3,rep,name=dependency" json:"dependency,omitempty"` + // Indexes of the public imported files in the dependency list above. + PublicDependency []int32 `protobuf:"varint,10,rep,name=public_dependency,json=publicDependency" json:"public_dependency,omitempty"` + // Indexes of the weak imported files in the dependency list. + // For Google-internal migration only. Do not use. + WeakDependency []int32 `protobuf:"varint,11,rep,name=weak_dependency,json=weakDependency" json:"weak_dependency,omitempty"` + // All top-level definitions in this file. + MessageType []*DescriptorProto `protobuf:"bytes,4,rep,name=message_type,json=messageType" json:"message_type,omitempty"` + EnumType []*EnumDescriptorProto `protobuf:"bytes,5,rep,name=enum_type,json=enumType" json:"enum_type,omitempty"` + Service []*ServiceDescriptorProto `protobuf:"bytes,6,rep,name=service" json:"service,omitempty"` + Extension []*FieldDescriptorProto `protobuf:"bytes,7,rep,name=extension" json:"extension,omitempty"` + Options *FileOptions `protobuf:"bytes,8,opt,name=options" json:"options,omitempty"` + // This field contains optional information about the original source code. + // You may safely remove this entire field without harming runtime + // functionality of the descriptors -- the information is needed only by + // development tools. + SourceCodeInfo *SourceCodeInfo `protobuf:"bytes,9,opt,name=source_code_info,json=sourceCodeInfo" json:"source_code_info,omitempty"` + // The syntax of the proto file. + // The supported values are "proto2" and "proto3". + Syntax *string `protobuf:"bytes,12,opt,name=syntax" json:"syntax,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *FileDescriptorProto) Reset() { *m = FileDescriptorProto{} } +func (m *FileDescriptorProto) String() string { return proto.CompactTextString(m) } +func (*FileDescriptorProto) ProtoMessage() {} +func (*FileDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } + +func (m *FileDescriptorProto) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *FileDescriptorProto) GetPackage() string { + if m != nil && m.Package != nil { + return *m.Package + } + return "" +} + +func (m *FileDescriptorProto) GetDependency() []string { + if m != nil { + return m.Dependency + } + return nil +} + +func (m *FileDescriptorProto) GetPublicDependency() []int32 { + if m != nil { + return m.PublicDependency + } + return nil +} + +func (m *FileDescriptorProto) GetWeakDependency() []int32 { + if m != nil { + return m.WeakDependency + } + return nil +} + +func (m *FileDescriptorProto) GetMessageType() []*DescriptorProto { + if m != nil { + return m.MessageType + } + return nil +} + +func (m *FileDescriptorProto) GetEnumType() []*EnumDescriptorProto { + if m != nil { + return m.EnumType + } + return nil +} + +func (m *FileDescriptorProto) GetService() []*ServiceDescriptorProto { + if m != nil { + return m.Service + } + return nil +} + +func (m *FileDescriptorProto) GetExtension() []*FieldDescriptorProto { + if m != nil { + return m.Extension + } + return nil +} + +func (m *FileDescriptorProto) GetOptions() *FileOptions { + if m != nil { + return m.Options + } + return nil +} + +func (m *FileDescriptorProto) GetSourceCodeInfo() *SourceCodeInfo { + if m != nil { + return m.SourceCodeInfo + } + return nil +} + +func (m *FileDescriptorProto) GetSyntax() string { + if m != nil && m.Syntax != nil { + return *m.Syntax + } + return "" +} + +// Describes a message type. +type DescriptorProto struct { + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Field []*FieldDescriptorProto `protobuf:"bytes,2,rep,name=field" json:"field,omitempty"` + Extension []*FieldDescriptorProto `protobuf:"bytes,6,rep,name=extension" json:"extension,omitempty"` + NestedType []*DescriptorProto `protobuf:"bytes,3,rep,name=nested_type,json=nestedType" json:"nested_type,omitempty"` + EnumType []*EnumDescriptorProto `protobuf:"bytes,4,rep,name=enum_type,json=enumType" json:"enum_type,omitempty"` + ExtensionRange []*DescriptorProto_ExtensionRange `protobuf:"bytes,5,rep,name=extension_range,json=extensionRange" json:"extension_range,omitempty"` + OneofDecl []*OneofDescriptorProto `protobuf:"bytes,8,rep,name=oneof_decl,json=oneofDecl" json:"oneof_decl,omitempty"` + Options *MessageOptions `protobuf:"bytes,7,opt,name=options" json:"options,omitempty"` + ReservedRange []*DescriptorProto_ReservedRange `protobuf:"bytes,9,rep,name=reserved_range,json=reservedRange" json:"reserved_range,omitempty"` + // Reserved field names, which may not be used by fields in the same message. + // A given name may only be reserved once. + ReservedName []string `protobuf:"bytes,10,rep,name=reserved_name,json=reservedName" json:"reserved_name,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *DescriptorProto) Reset() { *m = DescriptorProto{} } +func (m *DescriptorProto) String() string { return proto.CompactTextString(m) } +func (*DescriptorProto) ProtoMessage() {} +func (*DescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } + +func (m *DescriptorProto) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *DescriptorProto) GetField() []*FieldDescriptorProto { + if m != nil { + return m.Field + } + return nil +} + +func (m *DescriptorProto) GetExtension() []*FieldDescriptorProto { + if m != nil { + return m.Extension + } + return nil +} + +func (m *DescriptorProto) GetNestedType() []*DescriptorProto { + if m != nil { + return m.NestedType + } + return nil +} + +func (m *DescriptorProto) GetEnumType() []*EnumDescriptorProto { + if m != nil { + return m.EnumType + } + return nil +} + +func (m *DescriptorProto) GetExtensionRange() []*DescriptorProto_ExtensionRange { + if m != nil { + return m.ExtensionRange + } + return nil +} + +func (m *DescriptorProto) GetOneofDecl() []*OneofDescriptorProto { + if m != nil { + return m.OneofDecl + } + return nil +} + +func (m *DescriptorProto) GetOptions() *MessageOptions { + if m != nil { + return m.Options + } + return nil +} + +func (m *DescriptorProto) GetReservedRange() []*DescriptorProto_ReservedRange { + if m != nil { + return m.ReservedRange + } + return nil +} + +func (m *DescriptorProto) GetReservedName() []string { + if m != nil { + return m.ReservedName + } + return nil +} + +type DescriptorProto_ExtensionRange struct { + Start *int32 `protobuf:"varint,1,opt,name=start" json:"start,omitempty"` + End *int32 `protobuf:"varint,2,opt,name=end" json:"end,omitempty"` + Options *ExtensionRangeOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *DescriptorProto_ExtensionRange) Reset() { *m = DescriptorProto_ExtensionRange{} } +func (m *DescriptorProto_ExtensionRange) String() string { return proto.CompactTextString(m) } +func (*DescriptorProto_ExtensionRange) ProtoMessage() {} +func (*DescriptorProto_ExtensionRange) Descriptor() ([]byte, []int) { + return fileDescriptor0, []int{2, 0} +} + +func (m *DescriptorProto_ExtensionRange) GetStart() int32 { + if m != nil && m.Start != nil { + return *m.Start + } + return 0 +} + +func (m *DescriptorProto_ExtensionRange) GetEnd() int32 { + if m != nil && m.End != nil { + return *m.End + } + return 0 +} + +func (m *DescriptorProto_ExtensionRange) GetOptions() *ExtensionRangeOptions { + if m != nil { + return m.Options + } + return nil +} + +// Range of reserved tag numbers. Reserved tag numbers may not be used by +// fields or extension ranges in the same message. Reserved ranges may +// not overlap. +type DescriptorProto_ReservedRange struct { + Start *int32 `protobuf:"varint,1,opt,name=start" json:"start,omitempty"` + End *int32 `protobuf:"varint,2,opt,name=end" json:"end,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *DescriptorProto_ReservedRange) Reset() { *m = DescriptorProto_ReservedRange{} } +func (m *DescriptorProto_ReservedRange) String() string { return proto.CompactTextString(m) } +func (*DescriptorProto_ReservedRange) ProtoMessage() {} +func (*DescriptorProto_ReservedRange) Descriptor() ([]byte, []int) { + return fileDescriptor0, []int{2, 1} +} + +func (m *DescriptorProto_ReservedRange) GetStart() int32 { + if m != nil && m.Start != nil { + return *m.Start + } + return 0 +} + +func (m *DescriptorProto_ReservedRange) GetEnd() int32 { + if m != nil && m.End != nil { + return *m.End + } + return 0 +} + +type ExtensionRangeOptions struct { + // The parser stores options it doesn't recognize here. See above. + UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *ExtensionRangeOptions) Reset() { *m = ExtensionRangeOptions{} } +func (m *ExtensionRangeOptions) String() string { return proto.CompactTextString(m) } +func (*ExtensionRangeOptions) ProtoMessage() {} +func (*ExtensionRangeOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } + +var extRange_ExtensionRangeOptions = []proto.ExtensionRange{ + {1000, 536870911}, +} + +func (*ExtensionRangeOptions) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_ExtensionRangeOptions +} + +func (m *ExtensionRangeOptions) GetUninterpretedOption() []*UninterpretedOption { + if m != nil { + return m.UninterpretedOption + } + return nil +} + +// Describes a field within a message. +type FieldDescriptorProto struct { + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Number *int32 `protobuf:"varint,3,opt,name=number" json:"number,omitempty"` + Label *FieldDescriptorProto_Label `protobuf:"varint,4,opt,name=label,enum=google.protobuf.FieldDescriptorProto_Label" json:"label,omitempty"` + // If type_name is set, this need not be set. If both this and type_name + // are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + Type *FieldDescriptorProto_Type `protobuf:"varint,5,opt,name=type,enum=google.protobuf.FieldDescriptorProto_Type" json:"type,omitempty"` + // For message and enum types, this is the name of the type. If the name + // starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + // rules are used to find the type (i.e. first the nested types within this + // message are searched, then within the parent, on up to the root + // namespace). + TypeName *string `protobuf:"bytes,6,opt,name=type_name,json=typeName" json:"type_name,omitempty"` + // For extensions, this is the name of the type being extended. It is + // resolved in the same manner as type_name. + Extendee *string `protobuf:"bytes,2,opt,name=extendee" json:"extendee,omitempty"` + // For numeric types, contains the original text representation of the value. + // For booleans, "true" or "false". + // For strings, contains the default text contents (not escaped in any way). + // For bytes, contains the C escaped value. All bytes >= 128 are escaped. + // TODO(kenton): Base-64 encode? + DefaultValue *string `protobuf:"bytes,7,opt,name=default_value,json=defaultValue" json:"default_value,omitempty"` + // If set, gives the index of a oneof in the containing type's oneof_decl + // list. This field is a member of that oneof. + OneofIndex *int32 `protobuf:"varint,9,opt,name=oneof_index,json=oneofIndex" json:"oneof_index,omitempty"` + // JSON name of this field. The value is set by protocol compiler. If the + // user has set a "json_name" option on this field, that option's value + // will be used. Otherwise, it's deduced from the field's name by converting + // it to camelCase. + JsonName *string `protobuf:"bytes,10,opt,name=json_name,json=jsonName" json:"json_name,omitempty"` + Options *FieldOptions `protobuf:"bytes,8,opt,name=options" json:"options,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *FieldDescriptorProto) Reset() { *m = FieldDescriptorProto{} } +func (m *FieldDescriptorProto) String() string { return proto.CompactTextString(m) } +func (*FieldDescriptorProto) ProtoMessage() {} +func (*FieldDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } + +func (m *FieldDescriptorProto) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *FieldDescriptorProto) GetNumber() int32 { + if m != nil && m.Number != nil { + return *m.Number + } + return 0 +} + +func (m *FieldDescriptorProto) GetLabel() FieldDescriptorProto_Label { + if m != nil && m.Label != nil { + return *m.Label + } + return FieldDescriptorProto_LABEL_OPTIONAL +} + +func (m *FieldDescriptorProto) GetType() FieldDescriptorProto_Type { + if m != nil && m.Type != nil { + return *m.Type + } + return FieldDescriptorProto_TYPE_DOUBLE +} + +func (m *FieldDescriptorProto) GetTypeName() string { + if m != nil && m.TypeName != nil { + return *m.TypeName + } + return "" +} + +func (m *FieldDescriptorProto) GetExtendee() string { + if m != nil && m.Extendee != nil { + return *m.Extendee + } + return "" +} + +func (m *FieldDescriptorProto) GetDefaultValue() string { + if m != nil && m.DefaultValue != nil { + return *m.DefaultValue + } + return "" +} + +func (m *FieldDescriptorProto) GetOneofIndex() int32 { + if m != nil && m.OneofIndex != nil { + return *m.OneofIndex + } + return 0 +} + +func (m *FieldDescriptorProto) GetJsonName() string { + if m != nil && m.JsonName != nil { + return *m.JsonName + } + return "" +} + +func (m *FieldDescriptorProto) GetOptions() *FieldOptions { + if m != nil { + return m.Options + } + return nil +} + +// Describes a oneof. +type OneofDescriptorProto struct { + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Options *OneofOptions `protobuf:"bytes,2,opt,name=options" json:"options,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *OneofDescriptorProto) Reset() { *m = OneofDescriptorProto{} } +func (m *OneofDescriptorProto) String() string { return proto.CompactTextString(m) } +func (*OneofDescriptorProto) ProtoMessage() {} +func (*OneofDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} } + +func (m *OneofDescriptorProto) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *OneofDescriptorProto) GetOptions() *OneofOptions { + if m != nil { + return m.Options + } + return nil +} + +// Describes an enum type. +type EnumDescriptorProto struct { + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Value []*EnumValueDescriptorProto `protobuf:"bytes,2,rep,name=value" json:"value,omitempty"` + Options *EnumOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *EnumDescriptorProto) Reset() { *m = EnumDescriptorProto{} } +func (m *EnumDescriptorProto) String() string { return proto.CompactTextString(m) } +func (*EnumDescriptorProto) ProtoMessage() {} +func (*EnumDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} } + +func (m *EnumDescriptorProto) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *EnumDescriptorProto) GetValue() []*EnumValueDescriptorProto { + if m != nil { + return m.Value + } + return nil +} + +func (m *EnumDescriptorProto) GetOptions() *EnumOptions { + if m != nil { + return m.Options + } + return nil +} + +// Describes a value within an enum. +type EnumValueDescriptorProto struct { + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Number *int32 `protobuf:"varint,2,opt,name=number" json:"number,omitempty"` + Options *EnumValueOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *EnumValueDescriptorProto) Reset() { *m = EnumValueDescriptorProto{} } +func (m *EnumValueDescriptorProto) String() string { return proto.CompactTextString(m) } +func (*EnumValueDescriptorProto) ProtoMessage() {} +func (*EnumValueDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} } + +func (m *EnumValueDescriptorProto) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *EnumValueDescriptorProto) GetNumber() int32 { + if m != nil && m.Number != nil { + return *m.Number + } + return 0 +} + +func (m *EnumValueDescriptorProto) GetOptions() *EnumValueOptions { + if m != nil { + return m.Options + } + return nil +} + +// Describes a service. +type ServiceDescriptorProto struct { + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Method []*MethodDescriptorProto `protobuf:"bytes,2,rep,name=method" json:"method,omitempty"` + Options *ServiceOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *ServiceDescriptorProto) Reset() { *m = ServiceDescriptorProto{} } +func (m *ServiceDescriptorProto) String() string { return proto.CompactTextString(m) } +func (*ServiceDescriptorProto) ProtoMessage() {} +func (*ServiceDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8} } + +func (m *ServiceDescriptorProto) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *ServiceDescriptorProto) GetMethod() []*MethodDescriptorProto { + if m != nil { + return m.Method + } + return nil +} + +func (m *ServiceDescriptorProto) GetOptions() *ServiceOptions { + if m != nil { + return m.Options + } + return nil +} + +// Describes a method of a service. +type MethodDescriptorProto struct { + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Input and output type names. These are resolved in the same way as + // FieldDescriptorProto.type_name, but must refer to a message type. + InputType *string `protobuf:"bytes,2,opt,name=input_type,json=inputType" json:"input_type,omitempty"` + OutputType *string `protobuf:"bytes,3,opt,name=output_type,json=outputType" json:"output_type,omitempty"` + Options *MethodOptions `protobuf:"bytes,4,opt,name=options" json:"options,omitempty"` + // Identifies if client streams multiple client messages + ClientStreaming *bool `protobuf:"varint,5,opt,name=client_streaming,json=clientStreaming,def=0" json:"client_streaming,omitempty"` + // Identifies if server streams multiple server messages + ServerStreaming *bool `protobuf:"varint,6,opt,name=server_streaming,json=serverStreaming,def=0" json:"server_streaming,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *MethodDescriptorProto) Reset() { *m = MethodDescriptorProto{} } +func (m *MethodDescriptorProto) String() string { return proto.CompactTextString(m) } +func (*MethodDescriptorProto) ProtoMessage() {} +func (*MethodDescriptorProto) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{9} } + +const Default_MethodDescriptorProto_ClientStreaming bool = false +const Default_MethodDescriptorProto_ServerStreaming bool = false + +func (m *MethodDescriptorProto) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *MethodDescriptorProto) GetInputType() string { + if m != nil && m.InputType != nil { + return *m.InputType + } + return "" +} + +func (m *MethodDescriptorProto) GetOutputType() string { + if m != nil && m.OutputType != nil { + return *m.OutputType + } + return "" +} + +func (m *MethodDescriptorProto) GetOptions() *MethodOptions { + if m != nil { + return m.Options + } + return nil +} + +func (m *MethodDescriptorProto) GetClientStreaming() bool { + if m != nil && m.ClientStreaming != nil { + return *m.ClientStreaming + } + return Default_MethodDescriptorProto_ClientStreaming +} + +func (m *MethodDescriptorProto) GetServerStreaming() bool { + if m != nil && m.ServerStreaming != nil { + return *m.ServerStreaming + } + return Default_MethodDescriptorProto_ServerStreaming +} + +type FileOptions struct { + // Sets the Java package where classes generated from this .proto will be + // placed. By default, the proto package is used, but this is often + // inappropriate because proto packages do not normally start with backwards + // domain names. + JavaPackage *string `protobuf:"bytes,1,opt,name=java_package,json=javaPackage" json:"java_package,omitempty"` + // If set, all the classes from the .proto file are wrapped in a single + // outer class with the given name. This applies to both Proto1 + // (equivalent to the old "--one_java_file" option) and Proto2 (where + // a .proto always translates to a single class, but you may want to + // explicitly choose the class name). + JavaOuterClassname *string `protobuf:"bytes,8,opt,name=java_outer_classname,json=javaOuterClassname" json:"java_outer_classname,omitempty"` + // If set true, then the Java code generator will generate a separate .java + // file for each top-level message, enum, and service defined in the .proto + // file. Thus, these types will *not* be nested inside the outer class + // named by java_outer_classname. However, the outer class will still be + // generated to contain the file's getDescriptor() method as well as any + // top-level extensions defined in the file. + JavaMultipleFiles *bool `protobuf:"varint,10,opt,name=java_multiple_files,json=javaMultipleFiles,def=0" json:"java_multiple_files,omitempty"` + // This option does nothing. + JavaGenerateEqualsAndHash *bool `protobuf:"varint,20,opt,name=java_generate_equals_and_hash,json=javaGenerateEqualsAndHash" json:"java_generate_equals_and_hash,omitempty"` + // If set true, then the Java2 code generator will generate code that + // throws an exception whenever an attempt is made to assign a non-UTF-8 + // byte sequence to a string field. + // Message reflection will do the same. + // However, an extension field still accepts non-UTF-8 byte sequences. + // This option has no effect on when used with the lite runtime. + JavaStringCheckUtf8 *bool `protobuf:"varint,27,opt,name=java_string_check_utf8,json=javaStringCheckUtf8,def=0" json:"java_string_check_utf8,omitempty"` + OptimizeFor *FileOptions_OptimizeMode `protobuf:"varint,9,opt,name=optimize_for,json=optimizeFor,enum=google.protobuf.FileOptions_OptimizeMode,def=1" json:"optimize_for,omitempty"` + // Sets the Go package where structs generated from this .proto will be + // placed. If omitted, the Go package will be derived from the following: + // - The basename of the package import path, if provided. + // - Otherwise, the package statement in the .proto file, if present. + // - Otherwise, the basename of the .proto file, without extension. + GoPackage *string `protobuf:"bytes,11,opt,name=go_package,json=goPackage" json:"go_package,omitempty"` + // Should generic services be generated in each language? "Generic" services + // are not specific to any particular RPC system. They are generated by the + // main code generators in each language (without additional plugins). + // Generic services were the only kind of service generation supported by + // early versions of google.protobuf. + // + // Generic services are now considered deprecated in favor of using plugins + // that generate code specific to your particular RPC system. Therefore, + // these default to false. Old code which depends on generic services should + // explicitly set them to true. + CcGenericServices *bool `protobuf:"varint,16,opt,name=cc_generic_services,json=ccGenericServices,def=0" json:"cc_generic_services,omitempty"` + JavaGenericServices *bool `protobuf:"varint,17,opt,name=java_generic_services,json=javaGenericServices,def=0" json:"java_generic_services,omitempty"` + PyGenericServices *bool `protobuf:"varint,18,opt,name=py_generic_services,json=pyGenericServices,def=0" json:"py_generic_services,omitempty"` + PhpGenericServices *bool `protobuf:"varint,42,opt,name=php_generic_services,json=phpGenericServices,def=0" json:"php_generic_services,omitempty"` + // Is this file deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for everything in the file, or it will be completely ignored; in the very + // least, this is a formalization for deprecating files. + Deprecated *bool `protobuf:"varint,23,opt,name=deprecated,def=0" json:"deprecated,omitempty"` + // Enables the use of arenas for the proto messages in this file. This applies + // only to generated classes for C++. + CcEnableArenas *bool `protobuf:"varint,31,opt,name=cc_enable_arenas,json=ccEnableArenas,def=0" json:"cc_enable_arenas,omitempty"` + // Sets the objective c class prefix which is prepended to all objective c + // generated classes from this .proto. There is no default. + ObjcClassPrefix *string `protobuf:"bytes,36,opt,name=objc_class_prefix,json=objcClassPrefix" json:"objc_class_prefix,omitempty"` + // Namespace for generated classes; defaults to the package. + CsharpNamespace *string `protobuf:"bytes,37,opt,name=csharp_namespace,json=csharpNamespace" json:"csharp_namespace,omitempty"` + // By default Swift generators will take the proto package and CamelCase it + // replacing '.' with underscore and use that to prefix the types/symbols + // defined. When this options is provided, they will use this value instead + // to prefix the types/symbols defined. + SwiftPrefix *string `protobuf:"bytes,39,opt,name=swift_prefix,json=swiftPrefix" json:"swift_prefix,omitempty"` + // Sets the php class prefix which is prepended to all php generated classes + // from this .proto. Default is empty. + PhpClassPrefix *string `protobuf:"bytes,40,opt,name=php_class_prefix,json=phpClassPrefix" json:"php_class_prefix,omitempty"` + // Use this option to change the namespace of php generated classes. Default + // is empty. When this option is empty, the package name will be used for + // determining the namespace. + PhpNamespace *string `protobuf:"bytes,41,opt,name=php_namespace,json=phpNamespace" json:"php_namespace,omitempty"` + // The parser stores options it doesn't recognize here. See above. + UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *FileOptions) Reset() { *m = FileOptions{} } +func (m *FileOptions) String() string { return proto.CompactTextString(m) } +func (*FileOptions) ProtoMessage() {} +func (*FileOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{10} } + +var extRange_FileOptions = []proto.ExtensionRange{ + {1000, 536870911}, +} + +func (*FileOptions) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_FileOptions +} + +const Default_FileOptions_JavaMultipleFiles bool = false +const Default_FileOptions_JavaStringCheckUtf8 bool = false +const Default_FileOptions_OptimizeFor FileOptions_OptimizeMode = FileOptions_SPEED +const Default_FileOptions_CcGenericServices bool = false +const Default_FileOptions_JavaGenericServices bool = false +const Default_FileOptions_PyGenericServices bool = false +const Default_FileOptions_PhpGenericServices bool = false +const Default_FileOptions_Deprecated bool = false +const Default_FileOptions_CcEnableArenas bool = false + +func (m *FileOptions) GetJavaPackage() string { + if m != nil && m.JavaPackage != nil { + return *m.JavaPackage + } + return "" +} + +func (m *FileOptions) GetJavaOuterClassname() string { + if m != nil && m.JavaOuterClassname != nil { + return *m.JavaOuterClassname + } + return "" +} + +func (m *FileOptions) GetJavaMultipleFiles() bool { + if m != nil && m.JavaMultipleFiles != nil { + return *m.JavaMultipleFiles + } + return Default_FileOptions_JavaMultipleFiles +} + +func (m *FileOptions) GetJavaGenerateEqualsAndHash() bool { + if m != nil && m.JavaGenerateEqualsAndHash != nil { + return *m.JavaGenerateEqualsAndHash + } + return false +} + +func (m *FileOptions) GetJavaStringCheckUtf8() bool { + if m != nil && m.JavaStringCheckUtf8 != nil { + return *m.JavaStringCheckUtf8 + } + return Default_FileOptions_JavaStringCheckUtf8 +} + +func (m *FileOptions) GetOptimizeFor() FileOptions_OptimizeMode { + if m != nil && m.OptimizeFor != nil { + return *m.OptimizeFor + } + return Default_FileOptions_OptimizeFor +} + +func (m *FileOptions) GetGoPackage() string { + if m != nil && m.GoPackage != nil { + return *m.GoPackage + } + return "" +} + +func (m *FileOptions) GetCcGenericServices() bool { + if m != nil && m.CcGenericServices != nil { + return *m.CcGenericServices + } + return Default_FileOptions_CcGenericServices +} + +func (m *FileOptions) GetJavaGenericServices() bool { + if m != nil && m.JavaGenericServices != nil { + return *m.JavaGenericServices + } + return Default_FileOptions_JavaGenericServices +} + +func (m *FileOptions) GetPyGenericServices() bool { + if m != nil && m.PyGenericServices != nil { + return *m.PyGenericServices + } + return Default_FileOptions_PyGenericServices +} + +func (m *FileOptions) GetPhpGenericServices() bool { + if m != nil && m.PhpGenericServices != nil { + return *m.PhpGenericServices + } + return Default_FileOptions_PhpGenericServices +} + +func (m *FileOptions) GetDeprecated() bool { + if m != nil && m.Deprecated != nil { + return *m.Deprecated + } + return Default_FileOptions_Deprecated +} + +func (m *FileOptions) GetCcEnableArenas() bool { + if m != nil && m.CcEnableArenas != nil { + return *m.CcEnableArenas + } + return Default_FileOptions_CcEnableArenas +} + +func (m *FileOptions) GetObjcClassPrefix() string { + if m != nil && m.ObjcClassPrefix != nil { + return *m.ObjcClassPrefix + } + return "" +} + +func (m *FileOptions) GetCsharpNamespace() string { + if m != nil && m.CsharpNamespace != nil { + return *m.CsharpNamespace + } + return "" +} + +func (m *FileOptions) GetSwiftPrefix() string { + if m != nil && m.SwiftPrefix != nil { + return *m.SwiftPrefix + } + return "" +} + +func (m *FileOptions) GetPhpClassPrefix() string { + if m != nil && m.PhpClassPrefix != nil { + return *m.PhpClassPrefix + } + return "" +} + +func (m *FileOptions) GetPhpNamespace() string { + if m != nil && m.PhpNamespace != nil { + return *m.PhpNamespace + } + return "" +} + +func (m *FileOptions) GetUninterpretedOption() []*UninterpretedOption { + if m != nil { + return m.UninterpretedOption + } + return nil +} + +type MessageOptions struct { + // Set true to use the old proto1 MessageSet wire format for extensions. + // This is provided for backwards-compatibility with the MessageSet wire + // format. You should not use this for any other reason: It's less + // efficient, has fewer features, and is more complicated. + // + // The message must be defined exactly as follows: + // message Foo { + // option message_set_wire_format = true; + // extensions 4 to max; + // } + // Note that the message cannot have any defined fields; MessageSets only + // have extensions. + // + // All extensions of your type must be singular messages; e.g. they cannot + // be int32s, enums, or repeated messages. + // + // Because this is an option, the above two restrictions are not enforced by + // the protocol compiler. + MessageSetWireFormat *bool `protobuf:"varint,1,opt,name=message_set_wire_format,json=messageSetWireFormat,def=0" json:"message_set_wire_format,omitempty"` + // Disables the generation of the standard "descriptor()" accessor, which can + // conflict with a field of the same name. This is meant to make migration + // from proto1 easier; new code should avoid fields named "descriptor". + NoStandardDescriptorAccessor *bool `protobuf:"varint,2,opt,name=no_standard_descriptor_accessor,json=noStandardDescriptorAccessor,def=0" json:"no_standard_descriptor_accessor,omitempty"` + // Is this message deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the message, or it will be completely ignored; in the very least, + // this is a formalization for deprecating messages. + Deprecated *bool `protobuf:"varint,3,opt,name=deprecated,def=0" json:"deprecated,omitempty"` + // Whether the message is an automatically generated map entry type for the + // maps field. + // + // For maps fields: + // map map_field = 1; + // The parsed descriptor looks like: + // message MapFieldEntry { + // option map_entry = true; + // optional KeyType key = 1; + // optional ValueType value = 2; + // } + // repeated MapFieldEntry map_field = 1; + // + // Implementations may choose not to generate the map_entry=true message, but + // use a native map in the target language to hold the keys and values. + // The reflection APIs in such implementions still need to work as + // if the field is a repeated message field. + // + // NOTE: Do not set the option in .proto files. Always use the maps syntax + // instead. The option should only be implicitly set by the proto compiler + // parser. + MapEntry *bool `protobuf:"varint,7,opt,name=map_entry,json=mapEntry" json:"map_entry,omitempty"` + // The parser stores options it doesn't recognize here. See above. + UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *MessageOptions) Reset() { *m = MessageOptions{} } +func (m *MessageOptions) String() string { return proto.CompactTextString(m) } +func (*MessageOptions) ProtoMessage() {} +func (*MessageOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{11} } + +var extRange_MessageOptions = []proto.ExtensionRange{ + {1000, 536870911}, +} + +func (*MessageOptions) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_MessageOptions +} + +const Default_MessageOptions_MessageSetWireFormat bool = false +const Default_MessageOptions_NoStandardDescriptorAccessor bool = false +const Default_MessageOptions_Deprecated bool = false + +func (m *MessageOptions) GetMessageSetWireFormat() bool { + if m != nil && m.MessageSetWireFormat != nil { + return *m.MessageSetWireFormat + } + return Default_MessageOptions_MessageSetWireFormat +} + +func (m *MessageOptions) GetNoStandardDescriptorAccessor() bool { + if m != nil && m.NoStandardDescriptorAccessor != nil { + return *m.NoStandardDescriptorAccessor + } + return Default_MessageOptions_NoStandardDescriptorAccessor +} + +func (m *MessageOptions) GetDeprecated() bool { + if m != nil && m.Deprecated != nil { + return *m.Deprecated + } + return Default_MessageOptions_Deprecated +} + +func (m *MessageOptions) GetMapEntry() bool { + if m != nil && m.MapEntry != nil { + return *m.MapEntry + } + return false +} + +func (m *MessageOptions) GetUninterpretedOption() []*UninterpretedOption { + if m != nil { + return m.UninterpretedOption + } + return nil +} + +type FieldOptions struct { + // The ctype option instructs the C++ code generator to use a different + // representation of the field than it normally would. See the specific + // options below. This option is not yet implemented in the open source + // release -- sorry, we'll try to include it in a future version! + Ctype *FieldOptions_CType `protobuf:"varint,1,opt,name=ctype,enum=google.protobuf.FieldOptions_CType,def=0" json:"ctype,omitempty"` + // The packed option can be enabled for repeated primitive fields to enable + // a more efficient representation on the wire. Rather than repeatedly + // writing the tag and type for each element, the entire array is encoded as + // a single length-delimited blob. In proto3, only explicit setting it to + // false will avoid using packed encoding. + Packed *bool `protobuf:"varint,2,opt,name=packed" json:"packed,omitempty"` + // The jstype option determines the JavaScript type used for values of the + // field. The option is permitted only for 64 bit integral and fixed types + // (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + // is represented as JavaScript string, which avoids loss of precision that + // can happen when a large value is converted to a floating point JavaScript. + // Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + // use the JavaScript "number" type. The behavior of the default option + // JS_NORMAL is implementation dependent. + // + // This option is an enum to permit additional types to be added, e.g. + // goog.math.Integer. + Jstype *FieldOptions_JSType `protobuf:"varint,6,opt,name=jstype,enum=google.protobuf.FieldOptions_JSType,def=0" json:"jstype,omitempty"` + // Should this field be parsed lazily? Lazy applies only to message-type + // fields. It means that when the outer message is initially parsed, the + // inner message's contents will not be parsed but instead stored in encoded + // form. The inner message will actually be parsed when it is first accessed. + // + // This is only a hint. Implementations are free to choose whether to use + // eager or lazy parsing regardless of the value of this option. However, + // setting this option true suggests that the protocol author believes that + // using lazy parsing on this field is worth the additional bookkeeping + // overhead typically needed to implement it. + // + // This option does not affect the public interface of any generated code; + // all method signatures remain the same. Furthermore, thread-safety of the + // interface is not affected by this option; const methods remain safe to + // call from multiple threads concurrently, while non-const methods continue + // to require exclusive access. + // + // + // Note that implementations may choose not to check required fields within + // a lazy sub-message. That is, calling IsInitialized() on the outer message + // may return true even if the inner message has missing required fields. + // This is necessary because otherwise the inner message would have to be + // parsed in order to perform the check, defeating the purpose of lazy + // parsing. An implementation which chooses not to check required fields + // must be consistent about it. That is, for any particular sub-message, the + // implementation must either *always* check its required fields, or *never* + // check its required fields, regardless of whether or not the message has + // been parsed. + Lazy *bool `protobuf:"varint,5,opt,name=lazy,def=0" json:"lazy,omitempty"` + // Is this field deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for accessors, or it will be completely ignored; in the very least, this + // is a formalization for deprecating fields. + Deprecated *bool `protobuf:"varint,3,opt,name=deprecated,def=0" json:"deprecated,omitempty"` + // For Google-internal migration only. Do not use. + Weak *bool `protobuf:"varint,10,opt,name=weak,def=0" json:"weak,omitempty"` + // The parser stores options it doesn't recognize here. See above. + UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *FieldOptions) Reset() { *m = FieldOptions{} } +func (m *FieldOptions) String() string { return proto.CompactTextString(m) } +func (*FieldOptions) ProtoMessage() {} +func (*FieldOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{12} } + +var extRange_FieldOptions = []proto.ExtensionRange{ + {1000, 536870911}, +} + +func (*FieldOptions) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_FieldOptions +} + +const Default_FieldOptions_Ctype FieldOptions_CType = FieldOptions_STRING +const Default_FieldOptions_Jstype FieldOptions_JSType = FieldOptions_JS_NORMAL +const Default_FieldOptions_Lazy bool = false +const Default_FieldOptions_Deprecated bool = false +const Default_FieldOptions_Weak bool = false + +func (m *FieldOptions) GetCtype() FieldOptions_CType { + if m != nil && m.Ctype != nil { + return *m.Ctype + } + return Default_FieldOptions_Ctype +} + +func (m *FieldOptions) GetPacked() bool { + if m != nil && m.Packed != nil { + return *m.Packed + } + return false +} + +func (m *FieldOptions) GetJstype() FieldOptions_JSType { + if m != nil && m.Jstype != nil { + return *m.Jstype + } + return Default_FieldOptions_Jstype +} + +func (m *FieldOptions) GetLazy() bool { + if m != nil && m.Lazy != nil { + return *m.Lazy + } + return Default_FieldOptions_Lazy +} + +func (m *FieldOptions) GetDeprecated() bool { + if m != nil && m.Deprecated != nil { + return *m.Deprecated + } + return Default_FieldOptions_Deprecated +} + +func (m *FieldOptions) GetWeak() bool { + if m != nil && m.Weak != nil { + return *m.Weak + } + return Default_FieldOptions_Weak +} + +func (m *FieldOptions) GetUninterpretedOption() []*UninterpretedOption { + if m != nil { + return m.UninterpretedOption + } + return nil +} + +type OneofOptions struct { + // The parser stores options it doesn't recognize here. See above. + UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *OneofOptions) Reset() { *m = OneofOptions{} } +func (m *OneofOptions) String() string { return proto.CompactTextString(m) } +func (*OneofOptions) ProtoMessage() {} +func (*OneofOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{13} } + +var extRange_OneofOptions = []proto.ExtensionRange{ + {1000, 536870911}, +} + +func (*OneofOptions) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_OneofOptions +} + +func (m *OneofOptions) GetUninterpretedOption() []*UninterpretedOption { + if m != nil { + return m.UninterpretedOption + } + return nil +} + +type EnumOptions struct { + // Set this option to true to allow mapping different tag names to the same + // value. + AllowAlias *bool `protobuf:"varint,2,opt,name=allow_alias,json=allowAlias" json:"allow_alias,omitempty"` + // Is this enum deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum, or it will be completely ignored; in the very least, this + // is a formalization for deprecating enums. + Deprecated *bool `protobuf:"varint,3,opt,name=deprecated,def=0" json:"deprecated,omitempty"` + // The parser stores options it doesn't recognize here. See above. + UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *EnumOptions) Reset() { *m = EnumOptions{} } +func (m *EnumOptions) String() string { return proto.CompactTextString(m) } +func (*EnumOptions) ProtoMessage() {} +func (*EnumOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{14} } + +var extRange_EnumOptions = []proto.ExtensionRange{ + {1000, 536870911}, +} + +func (*EnumOptions) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_EnumOptions +} + +const Default_EnumOptions_Deprecated bool = false + +func (m *EnumOptions) GetAllowAlias() bool { + if m != nil && m.AllowAlias != nil { + return *m.AllowAlias + } + return false +} + +func (m *EnumOptions) GetDeprecated() bool { + if m != nil && m.Deprecated != nil { + return *m.Deprecated + } + return Default_EnumOptions_Deprecated +} + +func (m *EnumOptions) GetUninterpretedOption() []*UninterpretedOption { + if m != nil { + return m.UninterpretedOption + } + return nil +} + +type EnumValueOptions struct { + // Is this enum value deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum value, or it will be completely ignored; in the very least, + // this is a formalization for deprecating enum values. + Deprecated *bool `protobuf:"varint,1,opt,name=deprecated,def=0" json:"deprecated,omitempty"` + // The parser stores options it doesn't recognize here. See above. + UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *EnumValueOptions) Reset() { *m = EnumValueOptions{} } +func (m *EnumValueOptions) String() string { return proto.CompactTextString(m) } +func (*EnumValueOptions) ProtoMessage() {} +func (*EnumValueOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{15} } + +var extRange_EnumValueOptions = []proto.ExtensionRange{ + {1000, 536870911}, +} + +func (*EnumValueOptions) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_EnumValueOptions +} + +const Default_EnumValueOptions_Deprecated bool = false + +func (m *EnumValueOptions) GetDeprecated() bool { + if m != nil && m.Deprecated != nil { + return *m.Deprecated + } + return Default_EnumValueOptions_Deprecated +} + +func (m *EnumValueOptions) GetUninterpretedOption() []*UninterpretedOption { + if m != nil { + return m.UninterpretedOption + } + return nil +} + +type ServiceOptions struct { + // Is this service deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the service, or it will be completely ignored; in the very least, + // this is a formalization for deprecating services. + Deprecated *bool `protobuf:"varint,33,opt,name=deprecated,def=0" json:"deprecated,omitempty"` + // The parser stores options it doesn't recognize here. See above. + UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *ServiceOptions) Reset() { *m = ServiceOptions{} } +func (m *ServiceOptions) String() string { return proto.CompactTextString(m) } +func (*ServiceOptions) ProtoMessage() {} +func (*ServiceOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{16} } + +var extRange_ServiceOptions = []proto.ExtensionRange{ + {1000, 536870911}, +} + +func (*ServiceOptions) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_ServiceOptions +} + +const Default_ServiceOptions_Deprecated bool = false + +func (m *ServiceOptions) GetDeprecated() bool { + if m != nil && m.Deprecated != nil { + return *m.Deprecated + } + return Default_ServiceOptions_Deprecated +} + +func (m *ServiceOptions) GetUninterpretedOption() []*UninterpretedOption { + if m != nil { + return m.UninterpretedOption + } + return nil +} + +type MethodOptions struct { + // Is this method deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the method, or it will be completely ignored; in the very least, + // this is a formalization for deprecating methods. + Deprecated *bool `protobuf:"varint,33,opt,name=deprecated,def=0" json:"deprecated,omitempty"` + IdempotencyLevel *MethodOptions_IdempotencyLevel `protobuf:"varint,34,opt,name=idempotency_level,json=idempotencyLevel,enum=google.protobuf.MethodOptions_IdempotencyLevel,def=0" json:"idempotency_level,omitempty"` + // The parser stores options it doesn't recognize here. See above. + UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *MethodOptions) Reset() { *m = MethodOptions{} } +func (m *MethodOptions) String() string { return proto.CompactTextString(m) } +func (*MethodOptions) ProtoMessage() {} +func (*MethodOptions) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{17} } + +var extRange_MethodOptions = []proto.ExtensionRange{ + {1000, 536870911}, +} + +func (*MethodOptions) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_MethodOptions +} + +const Default_MethodOptions_Deprecated bool = false +const Default_MethodOptions_IdempotencyLevel MethodOptions_IdempotencyLevel = MethodOptions_IDEMPOTENCY_UNKNOWN + +func (m *MethodOptions) GetDeprecated() bool { + if m != nil && m.Deprecated != nil { + return *m.Deprecated + } + return Default_MethodOptions_Deprecated +} + +func (m *MethodOptions) GetIdempotencyLevel() MethodOptions_IdempotencyLevel { + if m != nil && m.IdempotencyLevel != nil { + return *m.IdempotencyLevel + } + return Default_MethodOptions_IdempotencyLevel +} + +func (m *MethodOptions) GetUninterpretedOption() []*UninterpretedOption { + if m != nil { + return m.UninterpretedOption + } + return nil +} + +// A message representing a option the parser does not recognize. This only +// appears in options protos created by the compiler::Parser class. +// DescriptorPool resolves these when building Descriptor objects. Therefore, +// options protos in descriptor objects (e.g. returned by Descriptor::options(), +// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions +// in them. +type UninterpretedOption struct { + Name []*UninterpretedOption_NamePart `protobuf:"bytes,2,rep,name=name" json:"name,omitempty"` + // The value of the uninterpreted option, in whatever type the tokenizer + // identified it as during parsing. Exactly one of these should be set. + IdentifierValue *string `protobuf:"bytes,3,opt,name=identifier_value,json=identifierValue" json:"identifier_value,omitempty"` + PositiveIntValue *uint64 `protobuf:"varint,4,opt,name=positive_int_value,json=positiveIntValue" json:"positive_int_value,omitempty"` + NegativeIntValue *int64 `protobuf:"varint,5,opt,name=negative_int_value,json=negativeIntValue" json:"negative_int_value,omitempty"` + DoubleValue *float64 `protobuf:"fixed64,6,opt,name=double_value,json=doubleValue" json:"double_value,omitempty"` + StringValue []byte `protobuf:"bytes,7,opt,name=string_value,json=stringValue" json:"string_value,omitempty"` + AggregateValue *string `protobuf:"bytes,8,opt,name=aggregate_value,json=aggregateValue" json:"aggregate_value,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *UninterpretedOption) Reset() { *m = UninterpretedOption{} } +func (m *UninterpretedOption) String() string { return proto.CompactTextString(m) } +func (*UninterpretedOption) ProtoMessage() {} +func (*UninterpretedOption) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{18} } + +func (m *UninterpretedOption) GetName() []*UninterpretedOption_NamePart { + if m != nil { + return m.Name + } + return nil +} + +func (m *UninterpretedOption) GetIdentifierValue() string { + if m != nil && m.IdentifierValue != nil { + return *m.IdentifierValue + } + return "" +} + +func (m *UninterpretedOption) GetPositiveIntValue() uint64 { + if m != nil && m.PositiveIntValue != nil { + return *m.PositiveIntValue + } + return 0 +} + +func (m *UninterpretedOption) GetNegativeIntValue() int64 { + if m != nil && m.NegativeIntValue != nil { + return *m.NegativeIntValue + } + return 0 +} + +func (m *UninterpretedOption) GetDoubleValue() float64 { + if m != nil && m.DoubleValue != nil { + return *m.DoubleValue + } + return 0 +} + +func (m *UninterpretedOption) GetStringValue() []byte { + if m != nil { + return m.StringValue + } + return nil +} + +func (m *UninterpretedOption) GetAggregateValue() string { + if m != nil && m.AggregateValue != nil { + return *m.AggregateValue + } + return "" +} + +// The name of the uninterpreted option. Each string represents a segment in +// a dot-separated name. is_extension is true iff a segment represents an +// extension (denoted with parentheses in options specs in .proto files). +// E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents +// "foo.(bar.baz).qux". +type UninterpretedOption_NamePart struct { + NamePart *string `protobuf:"bytes,1,req,name=name_part,json=namePart" json:"name_part,omitempty"` + IsExtension *bool `protobuf:"varint,2,req,name=is_extension,json=isExtension" json:"is_extension,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *UninterpretedOption_NamePart) Reset() { *m = UninterpretedOption_NamePart{} } +func (m *UninterpretedOption_NamePart) String() string { return proto.CompactTextString(m) } +func (*UninterpretedOption_NamePart) ProtoMessage() {} +func (*UninterpretedOption_NamePart) Descriptor() ([]byte, []int) { + return fileDescriptor0, []int{18, 0} +} + +func (m *UninterpretedOption_NamePart) GetNamePart() string { + if m != nil && m.NamePart != nil { + return *m.NamePart + } + return "" +} + +func (m *UninterpretedOption_NamePart) GetIsExtension() bool { + if m != nil && m.IsExtension != nil { + return *m.IsExtension + } + return false +} + +// Encapsulates information about the original source file from which a +// FileDescriptorProto was generated. +type SourceCodeInfo struct { + // A Location identifies a piece of source code in a .proto file which + // corresponds to a particular definition. This information is intended + // to be useful to IDEs, code indexers, documentation generators, and similar + // tools. + // + // For example, say we have a file like: + // message Foo { + // optional string foo = 1; + // } + // Let's look at just the field definition: + // optional string foo = 1; + // ^ ^^ ^^ ^ ^^^ + // a bc de f ghi + // We have the following locations: + // span path represents + // [a,i) [ 4, 0, 2, 0 ] The whole field definition. + // [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + // [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + // [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + // [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + // + // Notes: + // - A location may refer to a repeated field itself (i.e. not to any + // particular index within it). This is used whenever a set of elements are + // logically enclosed in a single code segment. For example, an entire + // extend block (possibly containing multiple extension definitions) will + // have an outer location whose path refers to the "extensions" repeated + // field without an index. + // - Multiple locations may have the same path. This happens when a single + // logical declaration is spread out across multiple places. The most + // obvious example is the "extend" block again -- there may be multiple + // extend blocks in the same scope, each of which will have the same path. + // - A location's span is not always a subset of its parent's span. For + // example, the "extendee" of an extension declaration appears at the + // beginning of the "extend" block and is shared by all extensions within + // the block. + // - Just because a location's span is a subset of some other location's span + // does not mean that it is a descendent. For example, a "group" defines + // both a type and a field in a single declaration. Thus, the locations + // corresponding to the type and field and their components will overlap. + // - Code which tries to interpret locations should probably be designed to + // ignore those that it doesn't understand, as more types of locations could + // be recorded in the future. + Location []*SourceCodeInfo_Location `protobuf:"bytes,1,rep,name=location" json:"location,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *SourceCodeInfo) Reset() { *m = SourceCodeInfo{} } +func (m *SourceCodeInfo) String() string { return proto.CompactTextString(m) } +func (*SourceCodeInfo) ProtoMessage() {} +func (*SourceCodeInfo) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{19} } + +func (m *SourceCodeInfo) GetLocation() []*SourceCodeInfo_Location { + if m != nil { + return m.Location + } + return nil +} + +type SourceCodeInfo_Location struct { + // Identifies which part of the FileDescriptorProto was defined at this + // location. + // + // Each element is a field number or an index. They form a path from + // the root FileDescriptorProto to the place where the definition. For + // example, this path: + // [ 4, 3, 2, 7, 1 ] + // refers to: + // file.message_type(3) // 4, 3 + // .field(7) // 2, 7 + // .name() // 1 + // This is because FileDescriptorProto.message_type has field number 4: + // repeated DescriptorProto message_type = 4; + // and DescriptorProto.field has field number 2: + // repeated FieldDescriptorProto field = 2; + // and FieldDescriptorProto.name has field number 1: + // optional string name = 1; + // + // Thus, the above path gives the location of a field name. If we removed + // the last element: + // [ 4, 3, 2, 7 ] + // this path refers to the whole field declaration (from the beginning + // of the label to the terminating semicolon). + Path []int32 `protobuf:"varint,1,rep,packed,name=path" json:"path,omitempty"` + // Always has exactly three or four elements: start line, start column, + // end line (optional, otherwise assumed same as start line), end column. + // These are packed into a single field for efficiency. Note that line + // and column numbers are zero-based -- typically you will want to add + // 1 to each before displaying to a user. + Span []int32 `protobuf:"varint,2,rep,packed,name=span" json:"span,omitempty"` + // If this SourceCodeInfo represents a complete declaration, these are any + // comments appearing before and after the declaration which appear to be + // attached to the declaration. + // + // A series of line comments appearing on consecutive lines, with no other + // tokens appearing on those lines, will be treated as a single comment. + // + // leading_detached_comments will keep paragraphs of comments that appear + // before (but not connected to) the current element. Each paragraph, + // separated by empty lines, will be one comment element in the repeated + // field. + // + // Only the comment content is provided; comment markers (e.g. //) are + // stripped out. For block comments, leading whitespace and an asterisk + // will be stripped from the beginning of each line other than the first. + // Newlines are included in the output. + // + // Examples: + // + // optional int32 foo = 1; // Comment attached to foo. + // // Comment attached to bar. + // optional int32 bar = 2; + // + // optional string baz = 3; + // // Comment attached to baz. + // // Another line attached to baz. + // + // // Comment attached to qux. + // // + // // Another line attached to qux. + // optional double qux = 4; + // + // // Detached comment for corge. This is not leading or trailing comments + // // to qux or corge because there are blank lines separating it from + // // both. + // + // // Detached comment for corge paragraph 2. + // + // optional string corge = 5; + // /* Block comment attached + // * to corge. Leading asterisks + // * will be removed. */ + // /* Block comment attached to + // * grault. */ + // optional int32 grault = 6; + // + // // ignored detached comments. + LeadingComments *string `protobuf:"bytes,3,opt,name=leading_comments,json=leadingComments" json:"leading_comments,omitempty"` + TrailingComments *string `protobuf:"bytes,4,opt,name=trailing_comments,json=trailingComments" json:"trailing_comments,omitempty"` + LeadingDetachedComments []string `protobuf:"bytes,6,rep,name=leading_detached_comments,json=leadingDetachedComments" json:"leading_detached_comments,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *SourceCodeInfo_Location) Reset() { *m = SourceCodeInfo_Location{} } +func (m *SourceCodeInfo_Location) String() string { return proto.CompactTextString(m) } +func (*SourceCodeInfo_Location) ProtoMessage() {} +func (*SourceCodeInfo_Location) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{19, 0} } + +func (m *SourceCodeInfo_Location) GetPath() []int32 { + if m != nil { + return m.Path + } + return nil +} + +func (m *SourceCodeInfo_Location) GetSpan() []int32 { + if m != nil { + return m.Span + } + return nil +} + +func (m *SourceCodeInfo_Location) GetLeadingComments() string { + if m != nil && m.LeadingComments != nil { + return *m.LeadingComments + } + return "" +} + +func (m *SourceCodeInfo_Location) GetTrailingComments() string { + if m != nil && m.TrailingComments != nil { + return *m.TrailingComments + } + return "" +} + +func (m *SourceCodeInfo_Location) GetLeadingDetachedComments() []string { + if m != nil { + return m.LeadingDetachedComments + } + return nil +} + +// Describes the relationship between generated code and its original source +// file. A GeneratedCodeInfo message is associated with only one generated +// source file, but may contain references to different source .proto files. +type GeneratedCodeInfo struct { + // An Annotation connects some span of text in generated code to an element + // of its generating .proto file. + Annotation []*GeneratedCodeInfo_Annotation `protobuf:"bytes,1,rep,name=annotation" json:"annotation,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *GeneratedCodeInfo) Reset() { *m = GeneratedCodeInfo{} } +func (m *GeneratedCodeInfo) String() string { return proto.CompactTextString(m) } +func (*GeneratedCodeInfo) ProtoMessage() {} +func (*GeneratedCodeInfo) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{20} } + +func (m *GeneratedCodeInfo) GetAnnotation() []*GeneratedCodeInfo_Annotation { + if m != nil { + return m.Annotation + } + return nil +} + +type GeneratedCodeInfo_Annotation struct { + // Identifies the element in the original source .proto file. This field + // is formatted the same as SourceCodeInfo.Location.path. + Path []int32 `protobuf:"varint,1,rep,packed,name=path" json:"path,omitempty"` + // Identifies the filesystem path to the original source .proto. + SourceFile *string `protobuf:"bytes,2,opt,name=source_file,json=sourceFile" json:"source_file,omitempty"` + // Identifies the starting offset in bytes in the generated code + // that relates to the identified object. + Begin *int32 `protobuf:"varint,3,opt,name=begin" json:"begin,omitempty"` + // Identifies the ending offset in bytes in the generated code that + // relates to the identified offset. The end offset should be one past + // the last relevant byte (so the length of the text = end - begin). + End *int32 `protobuf:"varint,4,opt,name=end" json:"end,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *GeneratedCodeInfo_Annotation) Reset() { *m = GeneratedCodeInfo_Annotation{} } +func (m *GeneratedCodeInfo_Annotation) String() string { return proto.CompactTextString(m) } +func (*GeneratedCodeInfo_Annotation) ProtoMessage() {} +func (*GeneratedCodeInfo_Annotation) Descriptor() ([]byte, []int) { + return fileDescriptor0, []int{20, 0} +} + +func (m *GeneratedCodeInfo_Annotation) GetPath() []int32 { + if m != nil { + return m.Path + } + return nil +} + +func (m *GeneratedCodeInfo_Annotation) GetSourceFile() string { + if m != nil && m.SourceFile != nil { + return *m.SourceFile + } + return "" +} + +func (m *GeneratedCodeInfo_Annotation) GetBegin() int32 { + if m != nil && m.Begin != nil { + return *m.Begin + } + return 0 +} + +func (m *GeneratedCodeInfo_Annotation) GetEnd() int32 { + if m != nil && m.End != nil { + return *m.End + } + return 0 +} + +func init() { + proto.RegisterType((*FileDescriptorSet)(nil), "google.protobuf.FileDescriptorSet") + proto.RegisterType((*FileDescriptorProto)(nil), "google.protobuf.FileDescriptorProto") + proto.RegisterType((*DescriptorProto)(nil), "google.protobuf.DescriptorProto") + proto.RegisterType((*DescriptorProto_ExtensionRange)(nil), "google.protobuf.DescriptorProto.ExtensionRange") + proto.RegisterType((*DescriptorProto_ReservedRange)(nil), "google.protobuf.DescriptorProto.ReservedRange") + proto.RegisterType((*ExtensionRangeOptions)(nil), "google.protobuf.ExtensionRangeOptions") + proto.RegisterType((*FieldDescriptorProto)(nil), "google.protobuf.FieldDescriptorProto") + proto.RegisterType((*OneofDescriptorProto)(nil), "google.protobuf.OneofDescriptorProto") + proto.RegisterType((*EnumDescriptorProto)(nil), "google.protobuf.EnumDescriptorProto") + proto.RegisterType((*EnumValueDescriptorProto)(nil), "google.protobuf.EnumValueDescriptorProto") + proto.RegisterType((*ServiceDescriptorProto)(nil), "google.protobuf.ServiceDescriptorProto") + proto.RegisterType((*MethodDescriptorProto)(nil), "google.protobuf.MethodDescriptorProto") + proto.RegisterType((*FileOptions)(nil), "google.protobuf.FileOptions") + proto.RegisterType((*MessageOptions)(nil), "google.protobuf.MessageOptions") + proto.RegisterType((*FieldOptions)(nil), "google.protobuf.FieldOptions") + proto.RegisterType((*OneofOptions)(nil), "google.protobuf.OneofOptions") + proto.RegisterType((*EnumOptions)(nil), "google.protobuf.EnumOptions") + proto.RegisterType((*EnumValueOptions)(nil), "google.protobuf.EnumValueOptions") + proto.RegisterType((*ServiceOptions)(nil), "google.protobuf.ServiceOptions") + proto.RegisterType((*MethodOptions)(nil), "google.protobuf.MethodOptions") + proto.RegisterType((*UninterpretedOption)(nil), "google.protobuf.UninterpretedOption") + proto.RegisterType((*UninterpretedOption_NamePart)(nil), "google.protobuf.UninterpretedOption.NamePart") + proto.RegisterType((*SourceCodeInfo)(nil), "google.protobuf.SourceCodeInfo") + proto.RegisterType((*SourceCodeInfo_Location)(nil), "google.protobuf.SourceCodeInfo.Location") + proto.RegisterType((*GeneratedCodeInfo)(nil), "google.protobuf.GeneratedCodeInfo") + proto.RegisterType((*GeneratedCodeInfo_Annotation)(nil), "google.protobuf.GeneratedCodeInfo.Annotation") + proto.RegisterEnum("google.protobuf.FieldDescriptorProto_Type", FieldDescriptorProto_Type_name, FieldDescriptorProto_Type_value) + proto.RegisterEnum("google.protobuf.FieldDescriptorProto_Label", FieldDescriptorProto_Label_name, FieldDescriptorProto_Label_value) + proto.RegisterEnum("google.protobuf.FileOptions_OptimizeMode", FileOptions_OptimizeMode_name, FileOptions_OptimizeMode_value) + proto.RegisterEnum("google.protobuf.FieldOptions_CType", FieldOptions_CType_name, FieldOptions_CType_value) + proto.RegisterEnum("google.protobuf.FieldOptions_JSType", FieldOptions_JSType_name, FieldOptions_JSType_value) + proto.RegisterEnum("google.protobuf.MethodOptions_IdempotencyLevel", MethodOptions_IdempotencyLevel_name, MethodOptions_IdempotencyLevel_value) +} + +func init() { proto.RegisterFile("google/protobuf/descriptor.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 2519 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x59, 0xdd, 0x6e, 0x1b, 0xc7, + 0x15, 0x0e, 0x7f, 0x45, 0x1e, 0x52, 0xd4, 0x68, 0xa4, 0xd8, 0x6b, 0xe5, 0xc7, 0x32, 0xf3, 0x63, + 0xd9, 0x69, 0xa8, 0x40, 0xb1, 0x1d, 0x47, 0x29, 0xd2, 0x52, 0xe4, 0x5a, 0xa1, 0x4a, 0x91, 0xec, + 0x92, 0x6a, 0x7e, 0x6e, 0x16, 0xa3, 0xdd, 0x21, 0xb9, 0xf6, 0x72, 0x77, 0xb3, 0xbb, 0xb4, 0xad, + 0xa0, 0x17, 0x06, 0x7a, 0x55, 0xa0, 0x0f, 0x50, 0x14, 0x45, 0x2f, 0x72, 0x13, 0xa0, 0x0f, 0x50, + 0x20, 0x77, 0x7d, 0x82, 0x02, 0x79, 0x83, 0xa2, 0x28, 0xd0, 0x3e, 0x46, 0x31, 0x33, 0xbb, 0xcb, + 0x5d, 0xfe, 0xc4, 0x6a, 0x80, 0x38, 0x57, 0xe4, 0x7c, 0xe7, 0x3b, 0x67, 0xce, 0x9c, 0x39, 0x33, + 0x73, 0x66, 0x16, 0x76, 0x47, 0xb6, 0x3d, 0x32, 0xe9, 0xbe, 0xe3, 0xda, 0xbe, 0x7d, 0x3e, 0x1d, + 0xee, 0xeb, 0xd4, 0xd3, 0x5c, 0xc3, 0xf1, 0x6d, 0xb7, 0xc6, 0x31, 0xbc, 0x21, 0x18, 0xb5, 0x90, + 0x51, 0x3d, 0x85, 0xcd, 0x07, 0x86, 0x49, 0x9b, 0x11, 0xb1, 0x4f, 0x7d, 0x7c, 0x1f, 0xb2, 0x43, + 0xc3, 0xa4, 0x52, 0x6a, 0x37, 0xb3, 0x57, 0x3a, 0x78, 0xb3, 0x36, 0xa7, 0x54, 0x4b, 0x6a, 0xf4, + 0x18, 0xac, 0x70, 0x8d, 0xea, 0xbf, 0xb3, 0xb0, 0xb5, 0x44, 0x8a, 0x31, 0x64, 0x2d, 0x32, 0x61, + 0x16, 0x53, 0x7b, 0x45, 0x85, 0xff, 0xc7, 0x12, 0xac, 0x39, 0x44, 0x7b, 0x44, 0x46, 0x54, 0x4a, + 0x73, 0x38, 0x6c, 0xe2, 0xd7, 0x01, 0x74, 0xea, 0x50, 0x4b, 0xa7, 0x96, 0x76, 0x21, 0x65, 0x76, + 0x33, 0x7b, 0x45, 0x25, 0x86, 0xe0, 0x77, 0x60, 0xd3, 0x99, 0x9e, 0x9b, 0x86, 0xa6, 0xc6, 0x68, + 0xb0, 0x9b, 0xd9, 0xcb, 0x29, 0x48, 0x08, 0x9a, 0x33, 0xf2, 0x4d, 0xd8, 0x78, 0x42, 0xc9, 0xa3, + 0x38, 0xb5, 0xc4, 0xa9, 0x15, 0x06, 0xc7, 0x88, 0x0d, 0x28, 0x4f, 0xa8, 0xe7, 0x91, 0x11, 0x55, + 0xfd, 0x0b, 0x87, 0x4a, 0x59, 0x3e, 0xfa, 0xdd, 0x85, 0xd1, 0xcf, 0x8f, 0xbc, 0x14, 0x68, 0x0d, + 0x2e, 0x1c, 0x8a, 0xeb, 0x50, 0xa4, 0xd6, 0x74, 0x22, 0x2c, 0xe4, 0x56, 0xc4, 0x4f, 0xb6, 0xa6, + 0x93, 0x79, 0x2b, 0x05, 0xa6, 0x16, 0x98, 0x58, 0xf3, 0xa8, 0xfb, 0xd8, 0xd0, 0xa8, 0x94, 0xe7, + 0x06, 0x6e, 0x2e, 0x18, 0xe8, 0x0b, 0xf9, 0xbc, 0x8d, 0x50, 0x0f, 0x37, 0xa0, 0x48, 0x9f, 0xfa, + 0xd4, 0xf2, 0x0c, 0xdb, 0x92, 0xd6, 0xb8, 0x91, 0xb7, 0x96, 0xcc, 0x22, 0x35, 0xf5, 0x79, 0x13, + 0x33, 0x3d, 0x7c, 0x0f, 0xd6, 0x6c, 0xc7, 0x37, 0x6c, 0xcb, 0x93, 0x0a, 0xbb, 0xa9, 0xbd, 0xd2, + 0xc1, 0xab, 0x4b, 0x13, 0xa1, 0x2b, 0x38, 0x4a, 0x48, 0xc6, 0x2d, 0x40, 0x9e, 0x3d, 0x75, 0x35, + 0xaa, 0x6a, 0xb6, 0x4e, 0x55, 0xc3, 0x1a, 0xda, 0x52, 0x91, 0x1b, 0xb8, 0xbe, 0x38, 0x10, 0x4e, + 0x6c, 0xd8, 0x3a, 0x6d, 0x59, 0x43, 0x5b, 0xa9, 0x78, 0x89, 0x36, 0xbe, 0x02, 0x79, 0xef, 0xc2, + 0xf2, 0xc9, 0x53, 0xa9, 0xcc, 0x33, 0x24, 0x68, 0x55, 0xbf, 0xcd, 0xc3, 0xc6, 0x65, 0x52, 0xec, + 0x23, 0xc8, 0x0d, 0xd9, 0x28, 0xa5, 0xf4, 0xff, 0x13, 0x03, 0xa1, 0x93, 0x0c, 0x62, 0xfe, 0x07, + 0x06, 0xb1, 0x0e, 0x25, 0x8b, 0x7a, 0x3e, 0xd5, 0x45, 0x46, 0x64, 0x2e, 0x99, 0x53, 0x20, 0x94, + 0x16, 0x53, 0x2a, 0xfb, 0x83, 0x52, 0xea, 0x33, 0xd8, 0x88, 0x5c, 0x52, 0x5d, 0x62, 0x8d, 0xc2, + 0xdc, 0xdc, 0x7f, 0x9e, 0x27, 0x35, 0x39, 0xd4, 0x53, 0x98, 0x9a, 0x52, 0xa1, 0x89, 0x36, 0x6e, + 0x02, 0xd8, 0x16, 0xb5, 0x87, 0xaa, 0x4e, 0x35, 0x53, 0x2a, 0xac, 0x88, 0x52, 0x97, 0x51, 0x16, + 0xa2, 0x64, 0x0b, 0x54, 0x33, 0xf1, 0x87, 0xb3, 0x54, 0x5b, 0x5b, 0x91, 0x29, 0xa7, 0x62, 0x91, + 0x2d, 0x64, 0xdb, 0x19, 0x54, 0x5c, 0xca, 0xf2, 0x9e, 0xea, 0xc1, 0xc8, 0x8a, 0xdc, 0x89, 0xda, + 0x73, 0x47, 0xa6, 0x04, 0x6a, 0x62, 0x60, 0xeb, 0x6e, 0xbc, 0x89, 0xdf, 0x80, 0x08, 0x50, 0x79, + 0x5a, 0x01, 0xdf, 0x85, 0xca, 0x21, 0xd8, 0x21, 0x13, 0xba, 0xf3, 0x15, 0x54, 0x92, 0xe1, 0xc1, + 0xdb, 0x90, 0xf3, 0x7c, 0xe2, 0xfa, 0x3c, 0x0b, 0x73, 0x8a, 0x68, 0x60, 0x04, 0x19, 0x6a, 0xe9, + 0x7c, 0x97, 0xcb, 0x29, 0xec, 0x2f, 0xfe, 0xe5, 0x6c, 0xc0, 0x19, 0x3e, 0xe0, 0xb7, 0x17, 0x67, + 0x34, 0x61, 0x79, 0x7e, 0xdc, 0x3b, 0x1f, 0xc0, 0x7a, 0x62, 0x00, 0x97, 0xed, 0xba, 0xfa, 0x5b, + 0x78, 0x79, 0xa9, 0x69, 0xfc, 0x19, 0x6c, 0x4f, 0x2d, 0xc3, 0xf2, 0xa9, 0xeb, 0xb8, 0x94, 0x65, + 0xac, 0xe8, 0x4a, 0xfa, 0xcf, 0xda, 0x8a, 0x9c, 0x3b, 0x8b, 0xb3, 0x85, 0x15, 0x65, 0x6b, 0xba, + 0x08, 0xde, 0x2e, 0x16, 0xfe, 0xbb, 0x86, 0x9e, 0x3d, 0x7b, 0xf6, 0x2c, 0x5d, 0xfd, 0x63, 0x1e, + 0xb6, 0x97, 0xad, 0x99, 0xa5, 0xcb, 0xf7, 0x0a, 0xe4, 0xad, 0xe9, 0xe4, 0x9c, 0xba, 0x3c, 0x48, + 0x39, 0x25, 0x68, 0xe1, 0x3a, 0xe4, 0x4c, 0x72, 0x4e, 0x4d, 0x29, 0xbb, 0x9b, 0xda, 0xab, 0x1c, + 0xbc, 0x73, 0xa9, 0x55, 0x59, 0x6b, 0x33, 0x15, 0x45, 0x68, 0xe2, 0x8f, 0x21, 0x1b, 0x6c, 0xd1, + 0xcc, 0xc2, 0xed, 0xcb, 0x59, 0x60, 0x6b, 0x49, 0xe1, 0x7a, 0xf8, 0x15, 0x28, 0xb2, 0x5f, 0x91, + 0x1b, 0x79, 0xee, 0x73, 0x81, 0x01, 0x2c, 0x2f, 0xf0, 0x0e, 0x14, 0xf8, 0x32, 0xd1, 0x69, 0x78, + 0xb4, 0x45, 0x6d, 0x96, 0x58, 0x3a, 0x1d, 0x92, 0xa9, 0xe9, 0xab, 0x8f, 0x89, 0x39, 0xa5, 0x3c, + 0xe1, 0x8b, 0x4a, 0x39, 0x00, 0x7f, 0xc3, 0x30, 0x7c, 0x1d, 0x4a, 0x62, 0x55, 0x19, 0x96, 0x4e, + 0x9f, 0xf2, 0xdd, 0x33, 0xa7, 0x88, 0x85, 0xd6, 0x62, 0x08, 0xeb, 0xfe, 0xa1, 0x67, 0x5b, 0x61, + 0x6a, 0xf2, 0x2e, 0x18, 0xc0, 0xbb, 0xff, 0x60, 0x7e, 0xe3, 0x7e, 0x6d, 0xf9, 0xf0, 0xe6, 0x73, + 0xaa, 0xfa, 0xb7, 0x34, 0x64, 0xf9, 0x7e, 0xb1, 0x01, 0xa5, 0xc1, 0xe7, 0x3d, 0x59, 0x6d, 0x76, + 0xcf, 0x8e, 0xda, 0x32, 0x4a, 0xe1, 0x0a, 0x00, 0x07, 0x1e, 0xb4, 0xbb, 0xf5, 0x01, 0x4a, 0x47, + 0xed, 0x56, 0x67, 0x70, 0xef, 0x0e, 0xca, 0x44, 0x0a, 0x67, 0x02, 0xc8, 0xc6, 0x09, 0xef, 0x1f, + 0xa0, 0x1c, 0x46, 0x50, 0x16, 0x06, 0x5a, 0x9f, 0xc9, 0xcd, 0x7b, 0x77, 0x50, 0x3e, 0x89, 0xbc, + 0x7f, 0x80, 0xd6, 0xf0, 0x3a, 0x14, 0x39, 0x72, 0xd4, 0xed, 0xb6, 0x51, 0x21, 0xb2, 0xd9, 0x1f, + 0x28, 0xad, 0xce, 0x31, 0x2a, 0x46, 0x36, 0x8f, 0x95, 0xee, 0x59, 0x0f, 0x41, 0x64, 0xe1, 0x54, + 0xee, 0xf7, 0xeb, 0xc7, 0x32, 0x2a, 0x45, 0x8c, 0xa3, 0xcf, 0x07, 0x72, 0x1f, 0x95, 0x13, 0x6e, + 0xbd, 0x7f, 0x80, 0xd6, 0xa3, 0x2e, 0xe4, 0xce, 0xd9, 0x29, 0xaa, 0xe0, 0x4d, 0x58, 0x17, 0x5d, + 0x84, 0x4e, 0x6c, 0xcc, 0x41, 0xf7, 0xee, 0x20, 0x34, 0x73, 0x44, 0x58, 0xd9, 0x4c, 0x00, 0xf7, + 0xee, 0x20, 0x5c, 0x6d, 0x40, 0x8e, 0x67, 0x17, 0xc6, 0x50, 0x69, 0xd7, 0x8f, 0xe4, 0xb6, 0xda, + 0xed, 0x0d, 0x5a, 0xdd, 0x4e, 0xbd, 0x8d, 0x52, 0x33, 0x4c, 0x91, 0x7f, 0x7d, 0xd6, 0x52, 0xe4, + 0x26, 0x4a, 0xc7, 0xb1, 0x9e, 0x5c, 0x1f, 0xc8, 0x4d, 0x94, 0xa9, 0x6a, 0xb0, 0xbd, 0x6c, 0x9f, + 0x5c, 0xba, 0x32, 0x62, 0x53, 0x9c, 0x5e, 0x31, 0xc5, 0xdc, 0xd6, 0xc2, 0x14, 0x7f, 0x9d, 0x82, + 0xad, 0x25, 0x67, 0xc5, 0xd2, 0x4e, 0x7e, 0x01, 0x39, 0x91, 0xa2, 0xe2, 0xf4, 0xbc, 0xb5, 0xf4, + 0xd0, 0xe1, 0x09, 0xbb, 0x70, 0x82, 0x72, 0xbd, 0x78, 0x05, 0x91, 0x59, 0x51, 0x41, 0x30, 0x13, + 0x0b, 0x4e, 0xfe, 0x2e, 0x05, 0xd2, 0x2a, 0xdb, 0xcf, 0xd9, 0x28, 0xd2, 0x89, 0x8d, 0xe2, 0xa3, + 0x79, 0x07, 0x6e, 0xac, 0x1e, 0xc3, 0x82, 0x17, 0xdf, 0xa4, 0xe0, 0xca, 0xf2, 0x42, 0x6b, 0xa9, + 0x0f, 0x1f, 0x43, 0x7e, 0x42, 0xfd, 0xb1, 0x1d, 0x16, 0x1b, 0x6f, 0x2f, 0x39, 0xc2, 0x98, 0x78, + 0x3e, 0x56, 0x81, 0x56, 0xfc, 0x0c, 0xcc, 0xac, 0xaa, 0x96, 0x84, 0x37, 0x0b, 0x9e, 0xfe, 0x3e, + 0x0d, 0x2f, 0x2f, 0x35, 0xbe, 0xd4, 0xd1, 0xd7, 0x00, 0x0c, 0xcb, 0x99, 0xfa, 0xa2, 0xa0, 0x10, + 0xfb, 0x53, 0x91, 0x23, 0x7c, 0xed, 0xb3, 0xbd, 0x67, 0xea, 0x47, 0xf2, 0x0c, 0x97, 0x83, 0x80, + 0x38, 0xe1, 0xfe, 0xcc, 0xd1, 0x2c, 0x77, 0xf4, 0xf5, 0x15, 0x23, 0x5d, 0x38, 0xab, 0xdf, 0x03, + 0xa4, 0x99, 0x06, 0xb5, 0x7c, 0xd5, 0xf3, 0x5d, 0x4a, 0x26, 0x86, 0x35, 0xe2, 0x1b, 0x70, 0xe1, + 0x30, 0x37, 0x24, 0xa6, 0x47, 0x95, 0x0d, 0x21, 0xee, 0x87, 0x52, 0xa6, 0xc1, 0xcf, 0x38, 0x37, + 0xa6, 0x91, 0x4f, 0x68, 0x08, 0x71, 0xa4, 0x51, 0xfd, 0xb6, 0x00, 0xa5, 0x58, 0x59, 0x8a, 0x6f, + 0x40, 0xf9, 0x21, 0x79, 0x4c, 0xd4, 0xf0, 0xaa, 0x21, 0x22, 0x51, 0x62, 0x58, 0x2f, 0xb8, 0x6e, + 0xbc, 0x07, 0xdb, 0x9c, 0x62, 0x4f, 0x7d, 0xea, 0xaa, 0x9a, 0x49, 0x3c, 0x8f, 0x07, 0xad, 0xc0, + 0xa9, 0x98, 0xc9, 0xba, 0x4c, 0xd4, 0x08, 0x25, 0xf8, 0x2e, 0x6c, 0x71, 0x8d, 0xc9, 0xd4, 0xf4, + 0x0d, 0xc7, 0xa4, 0x2a, 0xbb, 0xfc, 0x78, 0x7c, 0x23, 0x8e, 0x3c, 0xdb, 0x64, 0x8c, 0xd3, 0x80, + 0xc0, 0x3c, 0xf2, 0x70, 0x13, 0x5e, 0xe3, 0x6a, 0x23, 0x6a, 0x51, 0x97, 0xf8, 0x54, 0xa5, 0x5f, + 0x4e, 0x89, 0xe9, 0xa9, 0xc4, 0xd2, 0xd5, 0x31, 0xf1, 0xc6, 0xd2, 0x36, 0x33, 0x70, 0x94, 0x96, + 0x52, 0xca, 0x35, 0x46, 0x3c, 0x0e, 0x78, 0x32, 0xa7, 0xd5, 0x2d, 0xfd, 0x13, 0xe2, 0x8d, 0xf1, + 0x21, 0x5c, 0xe1, 0x56, 0x3c, 0xdf, 0x35, 0xac, 0x91, 0xaa, 0x8d, 0xa9, 0xf6, 0x48, 0x9d, 0xfa, + 0xc3, 0xfb, 0xd2, 0x2b, 0xf1, 0xfe, 0xb9, 0x87, 0x7d, 0xce, 0x69, 0x30, 0xca, 0x99, 0x3f, 0xbc, + 0x8f, 0xfb, 0x50, 0x66, 0x93, 0x31, 0x31, 0xbe, 0xa2, 0xea, 0xd0, 0x76, 0xf9, 0xc9, 0x52, 0x59, + 0xb2, 0xb2, 0x63, 0x11, 0xac, 0x75, 0x03, 0x85, 0x53, 0x5b, 0xa7, 0x87, 0xb9, 0x7e, 0x4f, 0x96, + 0x9b, 0x4a, 0x29, 0xb4, 0xf2, 0xc0, 0x76, 0x59, 0x42, 0x8d, 0xec, 0x28, 0xc0, 0x25, 0x91, 0x50, + 0x23, 0x3b, 0x0c, 0xef, 0x5d, 0xd8, 0xd2, 0x34, 0x31, 0x66, 0x43, 0x53, 0x83, 0x2b, 0x8a, 0x27, + 0xa1, 0x44, 0xb0, 0x34, 0xed, 0x58, 0x10, 0x82, 0x1c, 0xf7, 0xf0, 0x87, 0xf0, 0xf2, 0x2c, 0x58, + 0x71, 0xc5, 0xcd, 0x85, 0x51, 0xce, 0xab, 0xde, 0x85, 0x2d, 0xe7, 0x62, 0x51, 0x11, 0x27, 0x7a, + 0x74, 0x2e, 0xe6, 0xd5, 0x3e, 0x80, 0x6d, 0x67, 0xec, 0x2c, 0xea, 0xdd, 0x8e, 0xeb, 0x61, 0x67, + 0xec, 0xcc, 0x2b, 0xbe, 0xc5, 0xef, 0xab, 0x2e, 0xd5, 0x88, 0x4f, 0x75, 0xe9, 0x6a, 0x9c, 0x1e, + 0x13, 0xe0, 0x7d, 0x40, 0x9a, 0xa6, 0x52, 0x8b, 0x9c, 0x9b, 0x54, 0x25, 0x2e, 0xb5, 0x88, 0x27, + 0x5d, 0x8f, 0x93, 0x2b, 0x9a, 0x26, 0x73, 0x69, 0x9d, 0x0b, 0xf1, 0x6d, 0xd8, 0xb4, 0xcf, 0x1f, + 0x6a, 0x22, 0x25, 0x55, 0xc7, 0xa5, 0x43, 0xe3, 0xa9, 0xf4, 0x26, 0x8f, 0xef, 0x06, 0x13, 0xf0, + 0x84, 0xec, 0x71, 0x18, 0xdf, 0x02, 0xa4, 0x79, 0x63, 0xe2, 0x3a, 0xbc, 0x26, 0xf0, 0x1c, 0xa2, + 0x51, 0xe9, 0x2d, 0x41, 0x15, 0x78, 0x27, 0x84, 0xd9, 0x92, 0xf0, 0x9e, 0x18, 0x43, 0x3f, 0xb4, + 0x78, 0x53, 0x2c, 0x09, 0x8e, 0x05, 0xd6, 0xf6, 0x00, 0xb1, 0x50, 0x24, 0x3a, 0xde, 0xe3, 0xb4, + 0x8a, 0x33, 0x76, 0xe2, 0xfd, 0xbe, 0x01, 0xeb, 0x8c, 0x39, 0xeb, 0xf4, 0x96, 0xa8, 0x67, 0x9c, + 0x71, 0xac, 0xc7, 0x1f, 0xad, 0xb4, 0xac, 0x1e, 0x42, 0x39, 0x9e, 0x9f, 0xb8, 0x08, 0x22, 0x43, + 0x51, 0x8a, 0x9d, 0xf5, 0x8d, 0x6e, 0x93, 0x9d, 0xd2, 0x5f, 0xc8, 0x28, 0xcd, 0xaa, 0x85, 0x76, + 0x6b, 0x20, 0xab, 0xca, 0x59, 0x67, 0xd0, 0x3a, 0x95, 0x51, 0x26, 0x56, 0x96, 0x9e, 0x64, 0x0b, + 0x6f, 0xa3, 0x9b, 0xd5, 0xef, 0xd2, 0x50, 0x49, 0xde, 0x33, 0xf0, 0xcf, 0xe1, 0x6a, 0xf8, 0x28, + 0xe0, 0x51, 0x5f, 0x7d, 0x62, 0xb8, 0x7c, 0xe1, 0x4c, 0x88, 0xa8, 0xb3, 0xa3, 0xa9, 0xdb, 0x0e, + 0x58, 0x7d, 0xea, 0x7f, 0x6a, 0xb8, 0x6c, 0x59, 0x4c, 0x88, 0x8f, 0xdb, 0x70, 0xdd, 0xb2, 0x55, + 0xcf, 0x27, 0x96, 0x4e, 0x5c, 0x5d, 0x9d, 0x3d, 0xc7, 0xa8, 0x44, 0xd3, 0xa8, 0xe7, 0xd9, 0xe2, + 0xc0, 0x8a, 0xac, 0xbc, 0x6a, 0xd9, 0xfd, 0x80, 0x3c, 0xdb, 0xc9, 0xeb, 0x01, 0x75, 0x2e, 0xcd, + 0x32, 0xab, 0xd2, 0xec, 0x15, 0x28, 0x4e, 0x88, 0xa3, 0x52, 0xcb, 0x77, 0x2f, 0x78, 0x75, 0x59, + 0x50, 0x0a, 0x13, 0xe2, 0xc8, 0xac, 0xfd, 0x42, 0x8a, 0xfc, 0x93, 0x6c, 0xa1, 0x80, 0x8a, 0x27, + 0xd9, 0x42, 0x11, 0x41, 0xf5, 0x5f, 0x19, 0x28, 0xc7, 0xab, 0x4d, 0x56, 0xbc, 0x6b, 0xfc, 0x64, + 0x49, 0xf1, 0xbd, 0xe7, 0x8d, 0xef, 0xad, 0x4d, 0x6b, 0x0d, 0x76, 0xe4, 0x1c, 0xe6, 0x45, 0x0d, + 0xa8, 0x08, 0x4d, 0x76, 0xdc, 0xb3, 0xdd, 0x86, 0x8a, 0x7b, 0x4d, 0x41, 0x09, 0x5a, 0xf8, 0x18, + 0xf2, 0x0f, 0x3d, 0x6e, 0x3b, 0xcf, 0x6d, 0xbf, 0xf9, 0xfd, 0xb6, 0x4f, 0xfa, 0xdc, 0x78, 0xf1, + 0xa4, 0xaf, 0x76, 0xba, 0xca, 0x69, 0xbd, 0xad, 0x04, 0xea, 0xf8, 0x1a, 0x64, 0x4d, 0xf2, 0xd5, + 0x45, 0xf2, 0x70, 0xe2, 0xd0, 0x65, 0x27, 0xe1, 0x1a, 0x64, 0x9f, 0x50, 0xf2, 0x28, 0x79, 0x24, + 0x70, 0xe8, 0x47, 0x5c, 0x0c, 0xfb, 0x90, 0xe3, 0xf1, 0xc2, 0x00, 0x41, 0xc4, 0xd0, 0x4b, 0xb8, + 0x00, 0xd9, 0x46, 0x57, 0x61, 0x0b, 0x02, 0x41, 0x59, 0xa0, 0x6a, 0xaf, 0x25, 0x37, 0x64, 0x94, + 0xae, 0xde, 0x85, 0xbc, 0x08, 0x02, 0x5b, 0x2c, 0x51, 0x18, 0xd0, 0x4b, 0x41, 0x33, 0xb0, 0x91, + 0x0a, 0xa5, 0x67, 0xa7, 0x47, 0xb2, 0x82, 0xd2, 0xc9, 0xa9, 0xce, 0xa2, 0x5c, 0xd5, 0x83, 0x72, + 0xbc, 0xdc, 0x7c, 0x31, 0x57, 0xc9, 0xbf, 0xa7, 0xa0, 0x14, 0x2b, 0x1f, 0x59, 0xe1, 0x42, 0x4c, + 0xd3, 0x7e, 0xa2, 0x12, 0xd3, 0x20, 0x5e, 0x90, 0x1a, 0xc0, 0xa1, 0x3a, 0x43, 0x2e, 0x3b, 0x75, + 0x2f, 0x68, 0x89, 0xe4, 0x50, 0xbe, 0xfa, 0x97, 0x14, 0xa0, 0xf9, 0x02, 0x74, 0xce, 0xcd, 0xd4, + 0x4f, 0xe9, 0x66, 0xf5, 0xcf, 0x29, 0xa8, 0x24, 0xab, 0xce, 0x39, 0xf7, 0x6e, 0xfc, 0xa4, 0xee, + 0xfd, 0x33, 0x0d, 0xeb, 0x89, 0x5a, 0xf3, 0xb2, 0xde, 0x7d, 0x09, 0x9b, 0x86, 0x4e, 0x27, 0x8e, + 0xed, 0x53, 0x4b, 0xbb, 0x50, 0x4d, 0xfa, 0x98, 0x9a, 0x52, 0x95, 0x6f, 0x1a, 0xfb, 0xdf, 0x5f, + 0xcd, 0xd6, 0x5a, 0x33, 0xbd, 0x36, 0x53, 0x3b, 0xdc, 0x6a, 0x35, 0xe5, 0xd3, 0x5e, 0x77, 0x20, + 0x77, 0x1a, 0x9f, 0xab, 0x67, 0x9d, 0x5f, 0x75, 0xba, 0x9f, 0x76, 0x14, 0x64, 0xcc, 0xd1, 0x7e, + 0xc4, 0x65, 0xdf, 0x03, 0x34, 0xef, 0x14, 0xbe, 0x0a, 0xcb, 0xdc, 0x42, 0x2f, 0xe1, 0x2d, 0xd8, + 0xe8, 0x74, 0xd5, 0x7e, 0xab, 0x29, 0xab, 0xf2, 0x83, 0x07, 0x72, 0x63, 0xd0, 0x17, 0xd7, 0xfb, + 0x88, 0x3d, 0x48, 0x2c, 0xf0, 0xea, 0x9f, 0x32, 0xb0, 0xb5, 0xc4, 0x13, 0x5c, 0x0f, 0x6e, 0x16, + 0xe2, 0xb2, 0xf3, 0xee, 0x65, 0xbc, 0xaf, 0xb1, 0x82, 0xa0, 0x47, 0x5c, 0x3f, 0xb8, 0x88, 0xdc, + 0x02, 0x16, 0x25, 0xcb, 0x37, 0x86, 0x06, 0x75, 0x83, 0xd7, 0x10, 0x71, 0xdd, 0xd8, 0x98, 0xe1, + 0xe2, 0x41, 0xe4, 0x67, 0x80, 0x1d, 0xdb, 0x33, 0x7c, 0xe3, 0x31, 0x55, 0x0d, 0x2b, 0x7c, 0x3a, + 0x61, 0xd7, 0x8f, 0xac, 0x82, 0x42, 0x49, 0xcb, 0xf2, 0x23, 0xb6, 0x45, 0x47, 0x64, 0x8e, 0xcd, + 0x36, 0xf3, 0x8c, 0x82, 0x42, 0x49, 0xc4, 0xbe, 0x01, 0x65, 0xdd, 0x9e, 0xb2, 0x9a, 0x4c, 0xf0, + 0xd8, 0xd9, 0x91, 0x52, 0x4a, 0x02, 0x8b, 0x28, 0x41, 0xb5, 0x3d, 0x7b, 0xb3, 0x29, 0x2b, 0x25, + 0x81, 0x09, 0xca, 0x4d, 0xd8, 0x20, 0xa3, 0x91, 0xcb, 0x8c, 0x87, 0x86, 0xc4, 0xfd, 0xa1, 0x12, + 0xc1, 0x9c, 0xb8, 0x73, 0x02, 0x85, 0x30, 0x0e, 0xec, 0xa8, 0x66, 0x91, 0x50, 0x1d, 0xf1, 0x6e, + 0x97, 0xde, 0x2b, 0x2a, 0x05, 0x2b, 0x14, 0xde, 0x80, 0xb2, 0xe1, 0xa9, 0xb3, 0x27, 0xe8, 0xf4, + 0x6e, 0x7a, 0xaf, 0xa0, 0x94, 0x0c, 0x2f, 0x7a, 0xbe, 0xab, 0x7e, 0x93, 0x86, 0x4a, 0xf2, 0x09, + 0x1d, 0x37, 0xa1, 0x60, 0xda, 0x1a, 0xe1, 0xa9, 0x25, 0xbe, 0xdf, 0xec, 0x3d, 0xe7, 0xd5, 0xbd, + 0xd6, 0x0e, 0xf8, 0x4a, 0xa4, 0xb9, 0xf3, 0x8f, 0x14, 0x14, 0x42, 0x18, 0x5f, 0x81, 0xac, 0x43, + 0xfc, 0x31, 0x37, 0x97, 0x3b, 0x4a, 0xa3, 0x94, 0xc2, 0xdb, 0x0c, 0xf7, 0x1c, 0x62, 0xf1, 0x14, + 0x08, 0x70, 0xd6, 0x66, 0xf3, 0x6a, 0x52, 0xa2, 0xf3, 0xcb, 0x89, 0x3d, 0x99, 0x50, 0xcb, 0xf7, + 0xc2, 0x79, 0x0d, 0xf0, 0x46, 0x00, 0xe3, 0x77, 0x60, 0xd3, 0x77, 0x89, 0x61, 0x26, 0xb8, 0x59, + 0xce, 0x45, 0xa1, 0x20, 0x22, 0x1f, 0xc2, 0xb5, 0xd0, 0xae, 0x4e, 0x7d, 0xa2, 0x8d, 0xa9, 0x3e, + 0x53, 0xca, 0xf3, 0xf7, 0xd9, 0xab, 0x01, 0xa1, 0x19, 0xc8, 0x43, 0xdd, 0xea, 0x77, 0x29, 0xd8, + 0x0c, 0xaf, 0x53, 0x7a, 0x14, 0xac, 0x53, 0x00, 0x62, 0x59, 0xb6, 0x1f, 0x0f, 0xd7, 0x62, 0x2a, + 0x2f, 0xe8, 0xd5, 0xea, 0x91, 0x92, 0x12, 0x33, 0xb0, 0x33, 0x01, 0x98, 0x49, 0x56, 0x86, 0xed, + 0x3a, 0x94, 0x82, 0xef, 0x23, 0xfc, 0x23, 0x9b, 0xb8, 0x80, 0x83, 0x80, 0xd8, 0xbd, 0x0b, 0x6f, + 0x43, 0xee, 0x9c, 0x8e, 0x0c, 0x2b, 0x78, 0xf5, 0x14, 0x8d, 0xf0, 0x25, 0x37, 0x1b, 0xbd, 0xe4, + 0x1e, 0xfd, 0x21, 0x05, 0x5b, 0x9a, 0x3d, 0x99, 0xf7, 0xf7, 0x08, 0xcd, 0xbd, 0x02, 0x78, 0x9f, + 0xa4, 0xbe, 0xf8, 0x78, 0x64, 0xf8, 0xe3, 0xe9, 0x79, 0x4d, 0xb3, 0x27, 0xfb, 0x23, 0xdb, 0x24, + 0xd6, 0x68, 0xf6, 0x95, 0x90, 0xff, 0xd1, 0xde, 0x1d, 0x51, 0xeb, 0xdd, 0x91, 0x1d, 0xfb, 0x66, + 0xf8, 0xd1, 0xec, 0xef, 0xd7, 0xe9, 0xcc, 0x71, 0xef, 0xe8, 0xaf, 0xe9, 0x9d, 0x63, 0xd1, 0x57, + 0x2f, 0x8c, 0x8d, 0x42, 0x87, 0x26, 0xd5, 0xd8, 0x78, 0xff, 0x17, 0x00, 0x00, 0xff, 0xff, 0x0c, + 0xab, 0xb6, 0x37, 0x7e, 0x1c, 0x00, 0x00, +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/doc.go b/vendor/github.com/golang/protobuf/protoc-gen-go/doc.go new file mode 100644 index 0000000..0d6055d --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/doc.go @@ -0,0 +1,51 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2010 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +/* + A plugin for the Google protocol buffer compiler to generate Go code. + Run it by building this program and putting it in your path with the name + protoc-gen-go + That word 'go' at the end becomes part of the option string set for the + protocol compiler, so once the protocol compiler (protoc) is installed + you can run + protoc --go_out=output_directory input_directory/file.proto + to generate Go bindings for the protocol defined by file.proto. + With that input, the output will be written to + output_directory/file.pb.go + + The generated code is documented in the package comment for + the library. + + See the README and documentation for protocol buffers to learn more: + https://developers.google.com/protocol-buffers/ + +*/ +package documentation diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/generator/generator.go b/vendor/github.com/golang/protobuf/protoc-gen-go/generator/generator.go new file mode 100644 index 0000000..60d5246 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/generator/generator.go @@ -0,0 +1,2866 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2010 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +/* + The code generator for the plugin for the Google protocol buffer compiler. + It generates Go code from the protocol buffer description files read by the + main routine. +*/ +package generator + +import ( + "bufio" + "bytes" + "compress/gzip" + "fmt" + "go/parser" + "go/printer" + "go/token" + "log" + "os" + "path" + "strconv" + "strings" + "unicode" + "unicode/utf8" + + "github.com/golang/protobuf/proto" + + "github.com/golang/protobuf/protoc-gen-go/descriptor" + plugin "github.com/golang/protobuf/protoc-gen-go/plugin" +) + +// generatedCodeVersion indicates a version of the generated code. +// It is incremented whenever an incompatibility between the generated code and +// proto package is introduced; the generated code references +// a constant, proto.ProtoPackageIsVersionN (where N is generatedCodeVersion). +const generatedCodeVersion = 2 + +// A Plugin provides functionality to add to the output during Go code generation, +// such as to produce RPC stubs. +type Plugin interface { + // Name identifies the plugin. + Name() string + // Init is called once after data structures are built but before + // code generation begins. + Init(g *Generator) + // Generate produces the code generated by the plugin for this file, + // except for the imports, by calling the generator's methods P, In, and Out. + Generate(file *FileDescriptor) + // GenerateImports produces the import declarations for this file. + // It is called after Generate. + GenerateImports(file *FileDescriptor) +} + +var plugins []Plugin + +// RegisterPlugin installs a (second-order) plugin to be run when the Go output is generated. +// It is typically called during initialization. +func RegisterPlugin(p Plugin) { + plugins = append(plugins, p) +} + +// Each type we import as a protocol buffer (other than FileDescriptorProto) needs +// a pointer to the FileDescriptorProto that represents it. These types achieve that +// wrapping by placing each Proto inside a struct with the pointer to its File. The +// structs have the same names as their contents, with "Proto" removed. +// FileDescriptor is used to store the things that it points to. + +// The file and package name method are common to messages and enums. +type common struct { + file *descriptor.FileDescriptorProto // File this object comes from. +} + +// PackageName is name in the package clause in the generated file. +func (c *common) PackageName() string { return uniquePackageOf(c.file) } + +func (c *common) File() *descriptor.FileDescriptorProto { return c.file } + +func fileIsProto3(file *descriptor.FileDescriptorProto) bool { + return file.GetSyntax() == "proto3" +} + +func (c *common) proto3() bool { return fileIsProto3(c.file) } + +// Descriptor represents a protocol buffer message. +type Descriptor struct { + common + *descriptor.DescriptorProto + parent *Descriptor // The containing message, if any. + nested []*Descriptor // Inner messages, if any. + enums []*EnumDescriptor // Inner enums, if any. + ext []*ExtensionDescriptor // Extensions, if any. + typename []string // Cached typename vector. + index int // The index into the container, whether the file or another message. + path string // The SourceCodeInfo path as comma-separated integers. + group bool +} + +// TypeName returns the elements of the dotted type name. +// The package name is not part of this name. +func (d *Descriptor) TypeName() []string { + if d.typename != nil { + return d.typename + } + n := 0 + for parent := d; parent != nil; parent = parent.parent { + n++ + } + s := make([]string, n, n) + for parent := d; parent != nil; parent = parent.parent { + n-- + s[n] = parent.GetName() + } + d.typename = s + return s +} + +// EnumDescriptor describes an enum. If it's at top level, its parent will be nil. +// Otherwise it will be the descriptor of the message in which it is defined. +type EnumDescriptor struct { + common + *descriptor.EnumDescriptorProto + parent *Descriptor // The containing message, if any. + typename []string // Cached typename vector. + index int // The index into the container, whether the file or a message. + path string // The SourceCodeInfo path as comma-separated integers. +} + +// TypeName returns the elements of the dotted type name. +// The package name is not part of this name. +func (e *EnumDescriptor) TypeName() (s []string) { + if e.typename != nil { + return e.typename + } + name := e.GetName() + if e.parent == nil { + s = make([]string, 1) + } else { + pname := e.parent.TypeName() + s = make([]string, len(pname)+1) + copy(s, pname) + } + s[len(s)-1] = name + e.typename = s + return s +} + +// Everything but the last element of the full type name, CamelCased. +// The values of type Foo.Bar are call Foo_value1... not Foo_Bar_value1... . +func (e *EnumDescriptor) prefix() string { + if e.parent == nil { + // If the enum is not part of a message, the prefix is just the type name. + return CamelCase(*e.Name) + "_" + } + typeName := e.TypeName() + return CamelCaseSlice(typeName[0:len(typeName)-1]) + "_" +} + +// The integer value of the named constant in this enumerated type. +func (e *EnumDescriptor) integerValueAsString(name string) string { + for _, c := range e.Value { + if c.GetName() == name { + return fmt.Sprint(c.GetNumber()) + } + } + log.Fatal("cannot find value for enum constant") + return "" +} + +// ExtensionDescriptor describes an extension. If it's at top level, its parent will be nil. +// Otherwise it will be the descriptor of the message in which it is defined. +type ExtensionDescriptor struct { + common + *descriptor.FieldDescriptorProto + parent *Descriptor // The containing message, if any. +} + +// TypeName returns the elements of the dotted type name. +// The package name is not part of this name. +func (e *ExtensionDescriptor) TypeName() (s []string) { + name := e.GetName() + if e.parent == nil { + // top-level extension + s = make([]string, 1) + } else { + pname := e.parent.TypeName() + s = make([]string, len(pname)+1) + copy(s, pname) + } + s[len(s)-1] = name + return s +} + +// DescName returns the variable name used for the generated descriptor. +func (e *ExtensionDescriptor) DescName() string { + // The full type name. + typeName := e.TypeName() + // Each scope of the extension is individually CamelCased, and all are joined with "_" with an "E_" prefix. + for i, s := range typeName { + typeName[i] = CamelCase(s) + } + return "E_" + strings.Join(typeName, "_") +} + +// ImportedDescriptor describes a type that has been publicly imported from another file. +type ImportedDescriptor struct { + common + o Object +} + +func (id *ImportedDescriptor) TypeName() []string { return id.o.TypeName() } + +// FileDescriptor describes an protocol buffer descriptor file (.proto). +// It includes slices of all the messages and enums defined within it. +// Those slices are constructed by WrapTypes. +type FileDescriptor struct { + *descriptor.FileDescriptorProto + desc []*Descriptor // All the messages defined in this file. + enum []*EnumDescriptor // All the enums defined in this file. + ext []*ExtensionDescriptor // All the top-level extensions defined in this file. + imp []*ImportedDescriptor // All types defined in files publicly imported by this file. + + // Comments, stored as a map of path (comma-separated integers) to the comment. + comments map[string]*descriptor.SourceCodeInfo_Location + + // The full list of symbols that are exported, + // as a map from the exported object to its symbols. + // This is used for supporting public imports. + exported map[Object][]symbol + + index int // The index of this file in the list of files to generate code for + + proto3 bool // whether to generate proto3 code for this file +} + +// PackageName is the package name we'll use in the generated code to refer to this file. +func (d *FileDescriptor) PackageName() string { return uniquePackageOf(d.FileDescriptorProto) } + +// VarName is the variable name we'll use in the generated code to refer +// to the compressed bytes of this descriptor. It is not exported, so +// it is only valid inside the generated package. +func (d *FileDescriptor) VarName() string { return fmt.Sprintf("fileDescriptor%d", d.index) } + +// goPackageOption interprets the file's go_package option. +// If there is no go_package, it returns ("", "", false). +// If there's a simple name, it returns ("", pkg, true). +// If the option implies an import path, it returns (impPath, pkg, true). +func (d *FileDescriptor) goPackageOption() (impPath, pkg string, ok bool) { + pkg = d.GetOptions().GetGoPackage() + if pkg == "" { + return + } + ok = true + // The presence of a slash implies there's an import path. + slash := strings.LastIndex(pkg, "/") + if slash < 0 { + return + } + impPath, pkg = pkg, pkg[slash+1:] + // A semicolon-delimited suffix overrides the package name. + sc := strings.IndexByte(impPath, ';') + if sc < 0 { + return + } + impPath, pkg = impPath[:sc], impPath[sc+1:] + return +} + +// goPackageName returns the Go package name to use in the +// generated Go file. The result explicit reports whether the name +// came from an option go_package statement. If explicit is false, +// the name was derived from the protocol buffer's package statement +// or the input file name. +func (d *FileDescriptor) goPackageName() (name string, explicit bool) { + // Does the file have a "go_package" option? + if _, pkg, ok := d.goPackageOption(); ok { + return pkg, true + } + + // Does the file have a package clause? + if pkg := d.GetPackage(); pkg != "" { + return pkg, false + } + // Use the file base name. + return baseName(d.GetName()), false +} + +// goFileName returns the output name for the generated Go file. +func (d *FileDescriptor) goFileName() string { + name := *d.Name + if ext := path.Ext(name); ext == ".proto" || ext == ".protodevel" { + name = name[:len(name)-len(ext)] + } + name += ".pb.go" + + // Does the file have a "go_package" option? + // If it does, it may override the filename. + if impPath, _, ok := d.goPackageOption(); ok && impPath != "" { + // Replace the existing dirname with the declared import path. + _, name = path.Split(name) + name = path.Join(impPath, name) + return name + } + + return name +} + +func (d *FileDescriptor) addExport(obj Object, sym symbol) { + d.exported[obj] = append(d.exported[obj], sym) +} + +// symbol is an interface representing an exported Go symbol. +type symbol interface { + // GenerateAlias should generate an appropriate alias + // for the symbol from the named package. + GenerateAlias(g *Generator, pkg string) +} + +type messageSymbol struct { + sym string + hasExtensions, isMessageSet bool + hasOneof bool + getters []getterSymbol +} + +type getterSymbol struct { + name string + typ string + typeName string // canonical name in proto world; empty for proto.Message and similar + genType bool // whether typ contains a generated type (message/group/enum) +} + +func (ms *messageSymbol) GenerateAlias(g *Generator, pkg string) { + remoteSym := pkg + "." + ms.sym + + g.P("type ", ms.sym, " ", remoteSym) + g.P("func (m *", ms.sym, ") Reset() { (*", remoteSym, ")(m).Reset() }") + g.P("func (m *", ms.sym, ") String() string { return (*", remoteSym, ")(m).String() }") + g.P("func (*", ms.sym, ") ProtoMessage() {}") + if ms.hasExtensions { + g.P("func (*", ms.sym, ") ExtensionRangeArray() []", g.Pkg["proto"], ".ExtensionRange ", + "{ return (*", remoteSym, ")(nil).ExtensionRangeArray() }") + if ms.isMessageSet { + g.P("func (m *", ms.sym, ") Marshal() ([]byte, error) ", + "{ return (*", remoteSym, ")(m).Marshal() }") + g.P("func (m *", ms.sym, ") Unmarshal(buf []byte) error ", + "{ return (*", remoteSym, ")(m).Unmarshal(buf) }") + } + } + if ms.hasOneof { + // Oneofs and public imports do not mix well. + // We can make them work okay for the binary format, + // but they're going to break weirdly for text/JSON. + enc := "_" + ms.sym + "_OneofMarshaler" + dec := "_" + ms.sym + "_OneofUnmarshaler" + size := "_" + ms.sym + "_OneofSizer" + encSig := "(msg " + g.Pkg["proto"] + ".Message, b *" + g.Pkg["proto"] + ".Buffer) error" + decSig := "(msg " + g.Pkg["proto"] + ".Message, tag, wire int, b *" + g.Pkg["proto"] + ".Buffer) (bool, error)" + sizeSig := "(msg " + g.Pkg["proto"] + ".Message) int" + g.P("func (m *", ms.sym, ") XXX_OneofFuncs() (func", encSig, ", func", decSig, ", func", sizeSig, ", []interface{}) {") + g.P("return ", enc, ", ", dec, ", ", size, ", nil") + g.P("}") + + g.P("func ", enc, encSig, " {") + g.P("m := msg.(*", ms.sym, ")") + g.P("m0 := (*", remoteSym, ")(m)") + g.P("enc, _, _, _ := m0.XXX_OneofFuncs()") + g.P("return enc(m0, b)") + g.P("}") + + g.P("func ", dec, decSig, " {") + g.P("m := msg.(*", ms.sym, ")") + g.P("m0 := (*", remoteSym, ")(m)") + g.P("_, dec, _, _ := m0.XXX_OneofFuncs()") + g.P("return dec(m0, tag, wire, b)") + g.P("}") + + g.P("func ", size, sizeSig, " {") + g.P("m := msg.(*", ms.sym, ")") + g.P("m0 := (*", remoteSym, ")(m)") + g.P("_, _, size, _ := m0.XXX_OneofFuncs()") + g.P("return size(m0)") + g.P("}") + } + for _, get := range ms.getters { + + if get.typeName != "" { + g.RecordTypeUse(get.typeName) + } + typ := get.typ + val := "(*" + remoteSym + ")(m)." + get.name + "()" + if get.genType { + // typ will be "*pkg.T" (message/group) or "pkg.T" (enum) + // or "map[t]*pkg.T" (map to message/enum). + // The first two of those might have a "[]" prefix if it is repeated. + // Drop any package qualifier since we have hoisted the type into this package. + rep := strings.HasPrefix(typ, "[]") + if rep { + typ = typ[2:] + } + isMap := strings.HasPrefix(typ, "map[") + star := typ[0] == '*' + if !isMap { // map types handled lower down + typ = typ[strings.Index(typ, ".")+1:] + } + if star { + typ = "*" + typ + } + if rep { + // Go does not permit conversion between slice types where both + // element types are named. That means we need to generate a bit + // of code in this situation. + // typ is the element type. + // val is the expression to get the slice from the imported type. + + ctyp := typ // conversion type expression; "Foo" or "(*Foo)" + if star { + ctyp = "(" + typ + ")" + } + + g.P("func (m *", ms.sym, ") ", get.name, "() []", typ, " {") + g.In() + g.P("o := ", val) + g.P("if o == nil {") + g.In() + g.P("return nil") + g.Out() + g.P("}") + g.P("s := make([]", typ, ", len(o))") + g.P("for i, x := range o {") + g.In() + g.P("s[i] = ", ctyp, "(x)") + g.Out() + g.P("}") + g.P("return s") + g.Out() + g.P("}") + continue + } + if isMap { + // Split map[keyTyp]valTyp. + bra, ket := strings.Index(typ, "["), strings.Index(typ, "]") + keyTyp, valTyp := typ[bra+1:ket], typ[ket+1:] + // Drop any package qualifier. + // Only the value type may be foreign. + star := valTyp[0] == '*' + valTyp = valTyp[strings.Index(valTyp, ".")+1:] + if star { + valTyp = "*" + valTyp + } + + typ := "map[" + keyTyp + "]" + valTyp + g.P("func (m *", ms.sym, ") ", get.name, "() ", typ, " {") + g.P("o := ", val) + g.P("if o == nil { return nil }") + g.P("s := make(", typ, ", len(o))") + g.P("for k, v := range o {") + g.P("s[k] = (", valTyp, ")(v)") + g.P("}") + g.P("return s") + g.P("}") + continue + } + // Convert imported type into the forwarding type. + val = "(" + typ + ")(" + val + ")" + } + + g.P("func (m *", ms.sym, ") ", get.name, "() ", typ, " { return ", val, " }") + } + +} + +type enumSymbol struct { + name string + proto3 bool // Whether this came from a proto3 file. +} + +func (es enumSymbol) GenerateAlias(g *Generator, pkg string) { + s := es.name + g.P("type ", s, " ", pkg, ".", s) + g.P("var ", s, "_name = ", pkg, ".", s, "_name") + g.P("var ", s, "_value = ", pkg, ".", s, "_value") + g.P("func (x ", s, ") String() string { return (", pkg, ".", s, ")(x).String() }") + if !es.proto3 { + g.P("func (x ", s, ") Enum() *", s, "{ return (*", s, ")((", pkg, ".", s, ")(x).Enum()) }") + g.P("func (x *", s, ") UnmarshalJSON(data []byte) error { return (*", pkg, ".", s, ")(x).UnmarshalJSON(data) }") + } +} + +type constOrVarSymbol struct { + sym string + typ string // either "const" or "var" + cast string // if non-empty, a type cast is required (used for enums) +} + +func (cs constOrVarSymbol) GenerateAlias(g *Generator, pkg string) { + v := pkg + "." + cs.sym + if cs.cast != "" { + v = cs.cast + "(" + v + ")" + } + g.P(cs.typ, " ", cs.sym, " = ", v) +} + +// Object is an interface abstracting the abilities shared by enums, messages, extensions and imported objects. +type Object interface { + PackageName() string // The name we use in our output (a_b_c), possibly renamed for uniqueness. + TypeName() []string + File() *descriptor.FileDescriptorProto +} + +// Each package name we generate must be unique. The package we're generating +// gets its own name but every other package must have a unique name that does +// not conflict in the code we generate. These names are chosen globally (although +// they don't have to be, it simplifies things to do them globally). +func uniquePackageOf(fd *descriptor.FileDescriptorProto) string { + s, ok := uniquePackageName[fd] + if !ok { + log.Fatal("internal error: no package name defined for " + fd.GetName()) + } + return s +} + +// Generator is the type whose methods generate the output, stored in the associated response structure. +type Generator struct { + *bytes.Buffer + + Request *plugin.CodeGeneratorRequest // The input. + Response *plugin.CodeGeneratorResponse // The output. + + Param map[string]string // Command-line parameters. + PackageImportPath string // Go import path of the package we're generating code for + ImportPrefix string // String to prefix to imported package file names. + ImportMap map[string]string // Mapping from .proto file name to import path + + Pkg map[string]string // The names under which we import support packages + + packageName string // What we're calling ourselves. + allFiles []*FileDescriptor // All files in the tree + allFilesByName map[string]*FileDescriptor // All files by filename. + genFiles []*FileDescriptor // Those files we will generate output for. + file *FileDescriptor // The file we are compiling now. + usedPackages map[string]bool // Names of packages used in current file. + typeNameToObject map[string]Object // Key is a fully-qualified name in input syntax. + init []string // Lines to emit in the init function. + indent string + writeOutput bool +} + +// New creates a new generator and allocates the request and response protobufs. +func New() *Generator { + g := new(Generator) + g.Buffer = new(bytes.Buffer) + g.Request = new(plugin.CodeGeneratorRequest) + g.Response = new(plugin.CodeGeneratorResponse) + return g +} + +// Error reports a problem, including an error, and exits the program. +func (g *Generator) Error(err error, msgs ...string) { + s := strings.Join(msgs, " ") + ":" + err.Error() + log.Print("protoc-gen-go: error:", s) + os.Exit(1) +} + +// Fail reports a problem and exits the program. +func (g *Generator) Fail(msgs ...string) { + s := strings.Join(msgs, " ") + log.Print("protoc-gen-go: error:", s) + os.Exit(1) +} + +// CommandLineParameters breaks the comma-separated list of key=value pairs +// in the parameter (a member of the request protobuf) into a key/value map. +// It then sets file name mappings defined by those entries. +func (g *Generator) CommandLineParameters(parameter string) { + g.Param = make(map[string]string) + for _, p := range strings.Split(parameter, ",") { + if i := strings.Index(p, "="); i < 0 { + g.Param[p] = "" + } else { + g.Param[p[0:i]] = p[i+1:] + } + } + + g.ImportMap = make(map[string]string) + pluginList := "none" // Default list of plugin names to enable (empty means all). + for k, v := range g.Param { + switch k { + case "import_prefix": + g.ImportPrefix = v + case "import_path": + g.PackageImportPath = v + case "plugins": + pluginList = v + default: + if len(k) > 0 && k[0] == 'M' { + g.ImportMap[k[1:]] = v + } + } + } + if pluginList != "" { + // Amend the set of plugins. + enabled := make(map[string]bool) + for _, name := range strings.Split(pluginList, "+") { + enabled[name] = true + } + var nplugins []Plugin + for _, p := range plugins { + if enabled[p.Name()] { + nplugins = append(nplugins, p) + } + } + plugins = nplugins + } +} + +// DefaultPackageName returns the package name printed for the object. +// If its file is in a different package, it returns the package name we're using for this file, plus ".". +// Otherwise it returns the empty string. +func (g *Generator) DefaultPackageName(obj Object) string { + pkg := obj.PackageName() + if pkg == g.packageName { + return "" + } + return pkg + "." +} + +// For each input file, the unique package name to use, underscored. +var uniquePackageName = make(map[*descriptor.FileDescriptorProto]string) + +// Package names already registered. Key is the name from the .proto file; +// value is the name that appears in the generated code. +var pkgNamesInUse = make(map[string]bool) + +// Create and remember a guaranteed unique package name for this file descriptor. +// Pkg is the candidate name. If f is nil, it's a builtin package like "proto" and +// has no file descriptor. +func RegisterUniquePackageName(pkg string, f *FileDescriptor) string { + // Convert dots to underscores before finding a unique alias. + pkg = strings.Map(badToUnderscore, pkg) + + for i, orig := 1, pkg; pkgNamesInUse[pkg]; i++ { + // It's a duplicate; must rename. + pkg = orig + strconv.Itoa(i) + } + // Install it. + pkgNamesInUse[pkg] = true + if f != nil { + uniquePackageName[f.FileDescriptorProto] = pkg + } + return pkg +} + +var isGoKeyword = map[string]bool{ + "break": true, + "case": true, + "chan": true, + "const": true, + "continue": true, + "default": true, + "else": true, + "defer": true, + "fallthrough": true, + "for": true, + "func": true, + "go": true, + "goto": true, + "if": true, + "import": true, + "interface": true, + "map": true, + "package": true, + "range": true, + "return": true, + "select": true, + "struct": true, + "switch": true, + "type": true, + "var": true, +} + +// defaultGoPackage returns the package name to use, +// derived from the import path of the package we're building code for. +func (g *Generator) defaultGoPackage() string { + p := g.PackageImportPath + if i := strings.LastIndex(p, "/"); i >= 0 { + p = p[i+1:] + } + if p == "" { + return "" + } + + p = strings.Map(badToUnderscore, p) + // Identifier must not be keyword: insert _. + if isGoKeyword[p] { + p = "_" + p + } + // Identifier must not begin with digit: insert _. + if r, _ := utf8.DecodeRuneInString(p); unicode.IsDigit(r) { + p = "_" + p + } + return p +} + +// SetPackageNames sets the package name for this run. +// The package name must agree across all files being generated. +// It also defines unique package names for all imported files. +func (g *Generator) SetPackageNames() { + // Register the name for this package. It will be the first name + // registered so is guaranteed to be unmodified. + pkg, explicit := g.genFiles[0].goPackageName() + + // Check all files for an explicit go_package option. + for _, f := range g.genFiles { + thisPkg, thisExplicit := f.goPackageName() + if thisExplicit { + if !explicit { + // Let this file's go_package option serve for all input files. + pkg, explicit = thisPkg, true + } else if thisPkg != pkg { + g.Fail("inconsistent package names:", thisPkg, pkg) + } + } + } + + // If we don't have an explicit go_package option but we have an + // import path, use that. + if !explicit { + p := g.defaultGoPackage() + if p != "" { + pkg, explicit = p, true + } + } + + // If there was no go_package and no import path to use, + // double-check that all the inputs have the same implicit + // Go package name. + if !explicit { + for _, f := range g.genFiles { + thisPkg, _ := f.goPackageName() + if thisPkg != pkg { + g.Fail("inconsistent package names:", thisPkg, pkg) + } + } + } + + g.packageName = RegisterUniquePackageName(pkg, g.genFiles[0]) + + // Register the support package names. They might collide with the + // name of a package we import. + g.Pkg = map[string]string{ + "fmt": RegisterUniquePackageName("fmt", nil), + "math": RegisterUniquePackageName("math", nil), + "proto": RegisterUniquePackageName("proto", nil), + } + +AllFiles: + for _, f := range g.allFiles { + for _, genf := range g.genFiles { + if f == genf { + // In this package already. + uniquePackageName[f.FileDescriptorProto] = g.packageName + continue AllFiles + } + } + // The file is a dependency, so we want to ignore its go_package option + // because that is only relevant for its specific generated output. + pkg := f.GetPackage() + if pkg == "" { + pkg = baseName(*f.Name) + } + RegisterUniquePackageName(pkg, f) + } +} + +// WrapTypes walks the incoming data, wrapping DescriptorProtos, EnumDescriptorProtos +// and FileDescriptorProtos into file-referenced objects within the Generator. +// It also creates the list of files to generate and so should be called before GenerateAllFiles. +func (g *Generator) WrapTypes() { + g.allFiles = make([]*FileDescriptor, 0, len(g.Request.ProtoFile)) + g.allFilesByName = make(map[string]*FileDescriptor, len(g.allFiles)) + for _, f := range g.Request.ProtoFile { + // We must wrap the descriptors before we wrap the enums + descs := wrapDescriptors(f) + g.buildNestedDescriptors(descs) + enums := wrapEnumDescriptors(f, descs) + g.buildNestedEnums(descs, enums) + exts := wrapExtensions(f) + fd := &FileDescriptor{ + FileDescriptorProto: f, + desc: descs, + enum: enums, + ext: exts, + exported: make(map[Object][]symbol), + proto3: fileIsProto3(f), + } + extractComments(fd) + g.allFiles = append(g.allFiles, fd) + g.allFilesByName[f.GetName()] = fd + } + for _, fd := range g.allFiles { + fd.imp = wrapImported(fd.FileDescriptorProto, g) + } + + g.genFiles = make([]*FileDescriptor, 0, len(g.Request.FileToGenerate)) + for _, fileName := range g.Request.FileToGenerate { + fd := g.allFilesByName[fileName] + if fd == nil { + g.Fail("could not find file named", fileName) + } + fd.index = len(g.genFiles) + g.genFiles = append(g.genFiles, fd) + } +} + +// Scan the descriptors in this file. For each one, build the slice of nested descriptors +func (g *Generator) buildNestedDescriptors(descs []*Descriptor) { + for _, desc := range descs { + if len(desc.NestedType) != 0 { + for _, nest := range descs { + if nest.parent == desc { + desc.nested = append(desc.nested, nest) + } + } + if len(desc.nested) != len(desc.NestedType) { + g.Fail("internal error: nesting failure for", desc.GetName()) + } + } + } +} + +func (g *Generator) buildNestedEnums(descs []*Descriptor, enums []*EnumDescriptor) { + for _, desc := range descs { + if len(desc.EnumType) != 0 { + for _, enum := range enums { + if enum.parent == desc { + desc.enums = append(desc.enums, enum) + } + } + if len(desc.enums) != len(desc.EnumType) { + g.Fail("internal error: enum nesting failure for", desc.GetName()) + } + } + } +} + +// Construct the Descriptor +func newDescriptor(desc *descriptor.DescriptorProto, parent *Descriptor, file *descriptor.FileDescriptorProto, index int) *Descriptor { + d := &Descriptor{ + common: common{file}, + DescriptorProto: desc, + parent: parent, + index: index, + } + if parent == nil { + d.path = fmt.Sprintf("%d,%d", messagePath, index) + } else { + d.path = fmt.Sprintf("%s,%d,%d", parent.path, messageMessagePath, index) + } + + // The only way to distinguish a group from a message is whether + // the containing message has a TYPE_GROUP field that matches. + if parent != nil { + parts := d.TypeName() + if file.Package != nil { + parts = append([]string{*file.Package}, parts...) + } + exp := "." + strings.Join(parts, ".") + for _, field := range parent.Field { + if field.GetType() == descriptor.FieldDescriptorProto_TYPE_GROUP && field.GetTypeName() == exp { + d.group = true + break + } + } + } + + for _, field := range desc.Extension { + d.ext = append(d.ext, &ExtensionDescriptor{common{file}, field, d}) + } + + return d +} + +// Return a slice of all the Descriptors defined within this file +func wrapDescriptors(file *descriptor.FileDescriptorProto) []*Descriptor { + sl := make([]*Descriptor, 0, len(file.MessageType)+10) + for i, desc := range file.MessageType { + sl = wrapThisDescriptor(sl, desc, nil, file, i) + } + return sl +} + +// Wrap this Descriptor, recursively +func wrapThisDescriptor(sl []*Descriptor, desc *descriptor.DescriptorProto, parent *Descriptor, file *descriptor.FileDescriptorProto, index int) []*Descriptor { + sl = append(sl, newDescriptor(desc, parent, file, index)) + me := sl[len(sl)-1] + for i, nested := range desc.NestedType { + sl = wrapThisDescriptor(sl, nested, me, file, i) + } + return sl +} + +// Construct the EnumDescriptor +func newEnumDescriptor(desc *descriptor.EnumDescriptorProto, parent *Descriptor, file *descriptor.FileDescriptorProto, index int) *EnumDescriptor { + ed := &EnumDescriptor{ + common: common{file}, + EnumDescriptorProto: desc, + parent: parent, + index: index, + } + if parent == nil { + ed.path = fmt.Sprintf("%d,%d", enumPath, index) + } else { + ed.path = fmt.Sprintf("%s,%d,%d", parent.path, messageEnumPath, index) + } + return ed +} + +// Return a slice of all the EnumDescriptors defined within this file +func wrapEnumDescriptors(file *descriptor.FileDescriptorProto, descs []*Descriptor) []*EnumDescriptor { + sl := make([]*EnumDescriptor, 0, len(file.EnumType)+10) + // Top-level enums. + for i, enum := range file.EnumType { + sl = append(sl, newEnumDescriptor(enum, nil, file, i)) + } + // Enums within messages. Enums within embedded messages appear in the outer-most message. + for _, nested := range descs { + for i, enum := range nested.EnumType { + sl = append(sl, newEnumDescriptor(enum, nested, file, i)) + } + } + return sl +} + +// Return a slice of all the top-level ExtensionDescriptors defined within this file. +func wrapExtensions(file *descriptor.FileDescriptorProto) []*ExtensionDescriptor { + var sl []*ExtensionDescriptor + for _, field := range file.Extension { + sl = append(sl, &ExtensionDescriptor{common{file}, field, nil}) + } + return sl +} + +// Return a slice of all the types that are publicly imported into this file. +func wrapImported(file *descriptor.FileDescriptorProto, g *Generator) (sl []*ImportedDescriptor) { + for _, index := range file.PublicDependency { + df := g.fileByName(file.Dependency[index]) + for _, d := range df.desc { + if d.GetOptions().GetMapEntry() { + continue + } + sl = append(sl, &ImportedDescriptor{common{file}, d}) + } + for _, e := range df.enum { + sl = append(sl, &ImportedDescriptor{common{file}, e}) + } + for _, ext := range df.ext { + sl = append(sl, &ImportedDescriptor{common{file}, ext}) + } + } + return +} + +func extractComments(file *FileDescriptor) { + file.comments = make(map[string]*descriptor.SourceCodeInfo_Location) + for _, loc := range file.GetSourceCodeInfo().GetLocation() { + if loc.LeadingComments == nil { + continue + } + var p []string + for _, n := range loc.Path { + p = append(p, strconv.Itoa(int(n))) + } + file.comments[strings.Join(p, ",")] = loc + } +} + +// BuildTypeNameMap builds the map from fully qualified type names to objects. +// The key names for the map come from the input data, which puts a period at the beginning. +// It should be called after SetPackageNames and before GenerateAllFiles. +func (g *Generator) BuildTypeNameMap() { + g.typeNameToObject = make(map[string]Object) + for _, f := range g.allFiles { + // The names in this loop are defined by the proto world, not us, so the + // package name may be empty. If so, the dotted package name of X will + // be ".X"; otherwise it will be ".pkg.X". + dottedPkg := "." + f.GetPackage() + if dottedPkg != "." { + dottedPkg += "." + } + for _, enum := range f.enum { + name := dottedPkg + dottedSlice(enum.TypeName()) + g.typeNameToObject[name] = enum + } + for _, desc := range f.desc { + name := dottedPkg + dottedSlice(desc.TypeName()) + g.typeNameToObject[name] = desc + } + } +} + +// ObjectNamed, given a fully-qualified input type name as it appears in the input data, +// returns the descriptor for the message or enum with that name. +func (g *Generator) ObjectNamed(typeName string) Object { + o, ok := g.typeNameToObject[typeName] + if !ok { + g.Fail("can't find object with type", typeName) + } + + // If the file of this object isn't a direct dependency of the current file, + // or in the current file, then this object has been publicly imported into + // a dependency of the current file. + // We should return the ImportedDescriptor object for it instead. + direct := *o.File().Name == *g.file.Name + if !direct { + for _, dep := range g.file.Dependency { + if *g.fileByName(dep).Name == *o.File().Name { + direct = true + break + } + } + } + if !direct { + found := false + Loop: + for _, dep := range g.file.Dependency { + df := g.fileByName(*g.fileByName(dep).Name) + for _, td := range df.imp { + if td.o == o { + // Found it! + o = td + found = true + break Loop + } + } + } + if !found { + log.Printf("protoc-gen-go: WARNING: failed finding publicly imported dependency for %v, used in %v", typeName, *g.file.Name) + } + } + + return o +} + +// P prints the arguments to the generated output. It handles strings and int32s, plus +// handling indirections because they may be *string, etc. +func (g *Generator) P(str ...interface{}) { + if !g.writeOutput { + return + } + g.WriteString(g.indent) + for _, v := range str { + switch s := v.(type) { + case string: + g.WriteString(s) + case *string: + g.WriteString(*s) + case bool: + fmt.Fprintf(g, "%t", s) + case *bool: + fmt.Fprintf(g, "%t", *s) + case int: + fmt.Fprintf(g, "%d", s) + case *int32: + fmt.Fprintf(g, "%d", *s) + case *int64: + fmt.Fprintf(g, "%d", *s) + case float64: + fmt.Fprintf(g, "%g", s) + case *float64: + fmt.Fprintf(g, "%g", *s) + default: + g.Fail(fmt.Sprintf("unknown type in printer: %T", v)) + } + } + g.WriteByte('\n') +} + +// addInitf stores the given statement to be printed inside the file's init function. +// The statement is given as a format specifier and arguments. +func (g *Generator) addInitf(stmt string, a ...interface{}) { + g.init = append(g.init, fmt.Sprintf(stmt, a...)) +} + +// In Indents the output one tab stop. +func (g *Generator) In() { g.indent += "\t" } + +// Out unindents the output one tab stop. +func (g *Generator) Out() { + if len(g.indent) > 0 { + g.indent = g.indent[1:] + } +} + +// GenerateAllFiles generates the output for all the files we're outputting. +func (g *Generator) GenerateAllFiles() { + // Initialize the plugins + for _, p := range plugins { + p.Init(g) + } + // Generate the output. The generator runs for every file, even the files + // that we don't generate output for, so that we can collate the full list + // of exported symbols to support public imports. + genFileMap := make(map[*FileDescriptor]bool, len(g.genFiles)) + for _, file := range g.genFiles { + genFileMap[file] = true + } + for _, file := range g.allFiles { + g.Reset() + g.writeOutput = genFileMap[file] + g.generate(file) + if !g.writeOutput { + continue + } + g.Response.File = append(g.Response.File, &plugin.CodeGeneratorResponse_File{ + Name: proto.String(file.goFileName()), + Content: proto.String(g.String()), + }) + } +} + +// Run all the plugins associated with the file. +func (g *Generator) runPlugins(file *FileDescriptor) { + for _, p := range plugins { + p.Generate(file) + } +} + +// FileOf return the FileDescriptor for this FileDescriptorProto. +func (g *Generator) FileOf(fd *descriptor.FileDescriptorProto) *FileDescriptor { + for _, file := range g.allFiles { + if file.FileDescriptorProto == fd { + return file + } + } + g.Fail("could not find file in table:", fd.GetName()) + return nil +} + +// Fill the response protocol buffer with the generated output for all the files we're +// supposed to generate. +func (g *Generator) generate(file *FileDescriptor) { + g.file = g.FileOf(file.FileDescriptorProto) + g.usedPackages = make(map[string]bool) + + if g.file.index == 0 { + // For one file in the package, assert version compatibility. + g.P("// This is a compile-time assertion to ensure that this generated file") + g.P("// is compatible with the proto package it is being compiled against.") + g.P("// A compilation error at this line likely means your copy of the") + g.P("// proto package needs to be updated.") + g.P("const _ = ", g.Pkg["proto"], ".ProtoPackageIsVersion", generatedCodeVersion, " // please upgrade the proto package") + g.P() + } + for _, td := range g.file.imp { + g.generateImported(td) + } + for _, enum := range g.file.enum { + g.generateEnum(enum) + } + for _, desc := range g.file.desc { + // Don't generate virtual messages for maps. + if desc.GetOptions().GetMapEntry() { + continue + } + g.generateMessage(desc) + } + for _, ext := range g.file.ext { + g.generateExtension(ext) + } + g.generateInitFunction() + + // Run the plugins before the imports so we know which imports are necessary. + g.runPlugins(file) + + g.generateFileDescriptor(file) + + // Generate header and imports last, though they appear first in the output. + rem := g.Buffer + g.Buffer = new(bytes.Buffer) + g.generateHeader() + g.generateImports() + if !g.writeOutput { + return + } + g.Write(rem.Bytes()) + + // Reformat generated code. + fset := token.NewFileSet() + raw := g.Bytes() + ast, err := parser.ParseFile(fset, "", g, parser.ParseComments) + if err != nil { + // Print out the bad code with line numbers. + // This should never happen in practice, but it can while changing generated code, + // so consider this a debugging aid. + var src bytes.Buffer + s := bufio.NewScanner(bytes.NewReader(raw)) + for line := 1; s.Scan(); line++ { + fmt.Fprintf(&src, "%5d\t%s\n", line, s.Bytes()) + } + g.Fail("bad Go source code was generated:", err.Error(), "\n"+src.String()) + } + g.Reset() + err = (&printer.Config{Mode: printer.TabIndent | printer.UseSpaces, Tabwidth: 8}).Fprint(g, fset, ast) + if err != nil { + g.Fail("generated Go source code could not be reformatted:", err.Error()) + } +} + +// Generate the header, including package definition +func (g *Generator) generateHeader() { + g.P("// Code generated by protoc-gen-go. DO NOT EDIT.") + g.P("// source: ", g.file.Name) + g.P() + + name := g.file.PackageName() + + if g.file.index == 0 { + // Generate package docs for the first file in the package. + g.P("/*") + g.P("Package ", name, " is a generated protocol buffer package.") + g.P() + if loc, ok := g.file.comments[strconv.Itoa(packagePath)]; ok { + // not using g.PrintComments because this is a /* */ comment block. + text := strings.TrimSuffix(loc.GetLeadingComments(), "\n") + for _, line := range strings.Split(text, "\n") { + line = strings.TrimPrefix(line, " ") + // ensure we don't escape from the block comment + line = strings.Replace(line, "*/", "* /", -1) + g.P(line) + } + g.P() + } + var topMsgs []string + g.P("It is generated from these files:") + for _, f := range g.genFiles { + g.P("\t", f.Name) + for _, msg := range f.desc { + if msg.parent != nil { + continue + } + topMsgs = append(topMsgs, CamelCaseSlice(msg.TypeName())) + } + } + g.P() + g.P("It has these top-level messages:") + for _, msg := range topMsgs { + g.P("\t", msg) + } + g.P("*/") + } + + g.P("package ", name) + g.P() +} + +// PrintComments prints any comments from the source .proto file. +// The path is a comma-separated list of integers. +// It returns an indication of whether any comments were printed. +// See descriptor.proto for its format. +func (g *Generator) PrintComments(path string) bool { + if !g.writeOutput { + return false + } + if loc, ok := g.file.comments[path]; ok { + text := strings.TrimSuffix(loc.GetLeadingComments(), "\n") + for _, line := range strings.Split(text, "\n") { + g.P("// ", strings.TrimPrefix(line, " ")) + } + return true + } + return false +} + +func (g *Generator) fileByName(filename string) *FileDescriptor { + return g.allFilesByName[filename] +} + +// weak returns whether the ith import of the current file is a weak import. +func (g *Generator) weak(i int32) bool { + for _, j := range g.file.WeakDependency { + if j == i { + return true + } + } + return false +} + +// Generate the imports +func (g *Generator) generateImports() { + // We almost always need a proto import. Rather than computing when we + // do, which is tricky when there's a plugin, just import it and + // reference it later. The same argument applies to the fmt and math packages. + g.P("import " + g.Pkg["proto"] + " " + strconv.Quote(g.ImportPrefix+"github.com/golang/protobuf/proto")) + g.P("import " + g.Pkg["fmt"] + ` "fmt"`) + g.P("import " + g.Pkg["math"] + ` "math"`) + for i, s := range g.file.Dependency { + fd := g.fileByName(s) + // Do not import our own package. + if fd.PackageName() == g.packageName { + continue + } + filename := fd.goFileName() + // By default, import path is the dirname of the Go filename. + importPath := path.Dir(filename) + if substitution, ok := g.ImportMap[s]; ok { + importPath = substitution + } + importPath = g.ImportPrefix + importPath + // Skip weak imports. + if g.weak(int32(i)) { + g.P("// skipping weak import ", fd.PackageName(), " ", strconv.Quote(importPath)) + continue + } + // We need to import all the dependencies, even if we don't reference them, + // because other code and tools depend on having the full transitive closure + // of protocol buffer types in the binary. + pname := fd.PackageName() + if _, ok := g.usedPackages[pname]; !ok { + pname = "_" + } + g.P("import ", pname, " ", strconv.Quote(importPath)) + } + g.P() + // TODO: may need to worry about uniqueness across plugins + for _, p := range plugins { + p.GenerateImports(g.file) + g.P() + } + g.P("// Reference imports to suppress errors if they are not otherwise used.") + g.P("var _ = ", g.Pkg["proto"], ".Marshal") + g.P("var _ = ", g.Pkg["fmt"], ".Errorf") + g.P("var _ = ", g.Pkg["math"], ".Inf") + g.P() +} + +func (g *Generator) generateImported(id *ImportedDescriptor) { + // Don't generate public import symbols for files that we are generating + // code for, since those symbols will already be in this package. + // We can't simply avoid creating the ImportedDescriptor objects, + // because g.genFiles isn't populated at that stage. + tn := id.TypeName() + sn := tn[len(tn)-1] + df := g.FileOf(id.o.File()) + filename := *df.Name + for _, fd := range g.genFiles { + if *fd.Name == filename { + g.P("// Ignoring public import of ", sn, " from ", filename) + g.P() + return + } + } + g.P("// ", sn, " from public import ", filename) + g.usedPackages[df.PackageName()] = true + + for _, sym := range df.exported[id.o] { + sym.GenerateAlias(g, df.PackageName()) + } + + g.P() +} + +// Generate the enum definitions for this EnumDescriptor. +func (g *Generator) generateEnum(enum *EnumDescriptor) { + // The full type name + typeName := enum.TypeName() + // The full type name, CamelCased. + ccTypeName := CamelCaseSlice(typeName) + ccPrefix := enum.prefix() + + g.PrintComments(enum.path) + g.P("type ", ccTypeName, " int32") + g.file.addExport(enum, enumSymbol{ccTypeName, enum.proto3()}) + g.P("const (") + g.In() + for i, e := range enum.Value { + g.PrintComments(fmt.Sprintf("%s,%d,%d", enum.path, enumValuePath, i)) + + name := ccPrefix + *e.Name + g.P(name, " ", ccTypeName, " = ", e.Number) + g.file.addExport(enum, constOrVarSymbol{name, "const", ccTypeName}) + } + g.Out() + g.P(")") + g.P("var ", ccTypeName, "_name = map[int32]string{") + g.In() + generated := make(map[int32]bool) // avoid duplicate values + for _, e := range enum.Value { + duplicate := "" + if _, present := generated[*e.Number]; present { + duplicate = "// Duplicate value: " + } + g.P(duplicate, e.Number, ": ", strconv.Quote(*e.Name), ",") + generated[*e.Number] = true + } + g.Out() + g.P("}") + g.P("var ", ccTypeName, "_value = map[string]int32{") + g.In() + for _, e := range enum.Value { + g.P(strconv.Quote(*e.Name), ": ", e.Number, ",") + } + g.Out() + g.P("}") + + if !enum.proto3() { + g.P("func (x ", ccTypeName, ") Enum() *", ccTypeName, " {") + g.In() + g.P("p := new(", ccTypeName, ")") + g.P("*p = x") + g.P("return p") + g.Out() + g.P("}") + } + + g.P("func (x ", ccTypeName, ") String() string {") + g.In() + g.P("return ", g.Pkg["proto"], ".EnumName(", ccTypeName, "_name, int32(x))") + g.Out() + g.P("}") + + if !enum.proto3() { + g.P("func (x *", ccTypeName, ") UnmarshalJSON(data []byte) error {") + g.In() + g.P("value, err := ", g.Pkg["proto"], ".UnmarshalJSONEnum(", ccTypeName, `_value, data, "`, ccTypeName, `")`) + g.P("if err != nil {") + g.In() + g.P("return err") + g.Out() + g.P("}") + g.P("*x = ", ccTypeName, "(value)") + g.P("return nil") + g.Out() + g.P("}") + } + + var indexes []string + for m := enum.parent; m != nil; m = m.parent { + // XXX: skip groups? + indexes = append([]string{strconv.Itoa(m.index)}, indexes...) + } + indexes = append(indexes, strconv.Itoa(enum.index)) + g.P("func (", ccTypeName, ") EnumDescriptor() ([]byte, []int) { return ", g.file.VarName(), ", []int{", strings.Join(indexes, ", "), "} }") + if enum.file.GetPackage() == "google.protobuf" && enum.GetName() == "NullValue" { + g.P("func (", ccTypeName, `) XXX_WellKnownType() string { return "`, enum.GetName(), `" }`) + } + + g.P() +} + +// The tag is a string like "varint,2,opt,name=fieldname,def=7" that +// identifies details of the field for the protocol buffer marshaling and unmarshaling +// code. The fields are: +// wire encoding +// protocol tag number +// opt,req,rep for optional, required, or repeated +// packed whether the encoding is "packed" (optional; repeated primitives only) +// name= the original declared name +// enum= the name of the enum type if it is an enum-typed field. +// proto3 if this field is in a proto3 message +// def= string representation of the default value, if any. +// The default value must be in a representation that can be used at run-time +// to generate the default value. Thus bools become 0 and 1, for instance. +func (g *Generator) goTag(message *Descriptor, field *descriptor.FieldDescriptorProto, wiretype string) string { + optrepreq := "" + switch { + case isOptional(field): + optrepreq = "opt" + case isRequired(field): + optrepreq = "req" + case isRepeated(field): + optrepreq = "rep" + } + var defaultValue string + if dv := field.DefaultValue; dv != nil { // set means an explicit default + defaultValue = *dv + // Some types need tweaking. + switch *field.Type { + case descriptor.FieldDescriptorProto_TYPE_BOOL: + if defaultValue == "true" { + defaultValue = "1" + } else { + defaultValue = "0" + } + case descriptor.FieldDescriptorProto_TYPE_STRING, + descriptor.FieldDescriptorProto_TYPE_BYTES: + // Nothing to do. Quoting is done for the whole tag. + case descriptor.FieldDescriptorProto_TYPE_ENUM: + // For enums we need to provide the integer constant. + obj := g.ObjectNamed(field.GetTypeName()) + if id, ok := obj.(*ImportedDescriptor); ok { + // It is an enum that was publicly imported. + // We need the underlying type. + obj = id.o + } + enum, ok := obj.(*EnumDescriptor) + if !ok { + log.Printf("obj is a %T", obj) + if id, ok := obj.(*ImportedDescriptor); ok { + log.Printf("id.o is a %T", id.o) + } + g.Fail("unknown enum type", CamelCaseSlice(obj.TypeName())) + } + defaultValue = enum.integerValueAsString(defaultValue) + } + defaultValue = ",def=" + defaultValue + } + enum := "" + if *field.Type == descriptor.FieldDescriptorProto_TYPE_ENUM { + // We avoid using obj.PackageName(), because we want to use the + // original (proto-world) package name. + obj := g.ObjectNamed(field.GetTypeName()) + if id, ok := obj.(*ImportedDescriptor); ok { + obj = id.o + } + enum = ",enum=" + if pkg := obj.File().GetPackage(); pkg != "" { + enum += pkg + "." + } + enum += CamelCaseSlice(obj.TypeName()) + } + packed := "" + if (field.Options != nil && field.Options.GetPacked()) || + // Per https://developers.google.com/protocol-buffers/docs/proto3#simple: + // "In proto3, repeated fields of scalar numeric types use packed encoding by default." + (message.proto3() && (field.Options == nil || field.Options.Packed == nil) && + isRepeated(field) && isScalar(field)) { + packed = ",packed" + } + fieldName := field.GetName() + name := fieldName + if *field.Type == descriptor.FieldDescriptorProto_TYPE_GROUP { + // We must use the type name for groups instead of + // the field name to preserve capitalization. + // type_name in FieldDescriptorProto is fully-qualified, + // but we only want the local part. + name = *field.TypeName + if i := strings.LastIndex(name, "."); i >= 0 { + name = name[i+1:] + } + } + if json := field.GetJsonName(); json != "" && json != name { + // TODO: escaping might be needed, in which case + // perhaps this should be in its own "json" tag. + name += ",json=" + json + } + name = ",name=" + name + if message.proto3() { + // We only need the extra tag for []byte fields; + // no need to add noise for the others. + if *field.Type == descriptor.FieldDescriptorProto_TYPE_BYTES { + name += ",proto3" + } + + } + oneof := "" + if field.OneofIndex != nil { + oneof = ",oneof" + } + return strconv.Quote(fmt.Sprintf("%s,%d,%s%s%s%s%s%s", + wiretype, + field.GetNumber(), + optrepreq, + packed, + name, + enum, + oneof, + defaultValue)) +} + +func needsStar(typ descriptor.FieldDescriptorProto_Type) bool { + switch typ { + case descriptor.FieldDescriptorProto_TYPE_GROUP: + return false + case descriptor.FieldDescriptorProto_TYPE_MESSAGE: + return false + case descriptor.FieldDescriptorProto_TYPE_BYTES: + return false + } + return true +} + +// TypeName is the printed name appropriate for an item. If the object is in the current file, +// TypeName drops the package name and underscores the rest. +// Otherwise the object is from another package; and the result is the underscored +// package name followed by the item name. +// The result always has an initial capital. +func (g *Generator) TypeName(obj Object) string { + return g.DefaultPackageName(obj) + CamelCaseSlice(obj.TypeName()) +} + +// TypeNameWithPackage is like TypeName, but always includes the package +// name even if the object is in our own package. +func (g *Generator) TypeNameWithPackage(obj Object) string { + return obj.PackageName() + CamelCaseSlice(obj.TypeName()) +} + +// GoType returns a string representing the type name, and the wire type +func (g *Generator) GoType(message *Descriptor, field *descriptor.FieldDescriptorProto) (typ string, wire string) { + // TODO: Options. + switch *field.Type { + case descriptor.FieldDescriptorProto_TYPE_DOUBLE: + typ, wire = "float64", "fixed64" + case descriptor.FieldDescriptorProto_TYPE_FLOAT: + typ, wire = "float32", "fixed32" + case descriptor.FieldDescriptorProto_TYPE_INT64: + typ, wire = "int64", "varint" + case descriptor.FieldDescriptorProto_TYPE_UINT64: + typ, wire = "uint64", "varint" + case descriptor.FieldDescriptorProto_TYPE_INT32: + typ, wire = "int32", "varint" + case descriptor.FieldDescriptorProto_TYPE_UINT32: + typ, wire = "uint32", "varint" + case descriptor.FieldDescriptorProto_TYPE_FIXED64: + typ, wire = "uint64", "fixed64" + case descriptor.FieldDescriptorProto_TYPE_FIXED32: + typ, wire = "uint32", "fixed32" + case descriptor.FieldDescriptorProto_TYPE_BOOL: + typ, wire = "bool", "varint" + case descriptor.FieldDescriptorProto_TYPE_STRING: + typ, wire = "string", "bytes" + case descriptor.FieldDescriptorProto_TYPE_GROUP: + desc := g.ObjectNamed(field.GetTypeName()) + typ, wire = "*"+g.TypeName(desc), "group" + case descriptor.FieldDescriptorProto_TYPE_MESSAGE: + desc := g.ObjectNamed(field.GetTypeName()) + typ, wire = "*"+g.TypeName(desc), "bytes" + case descriptor.FieldDescriptorProto_TYPE_BYTES: + typ, wire = "[]byte", "bytes" + case descriptor.FieldDescriptorProto_TYPE_ENUM: + desc := g.ObjectNamed(field.GetTypeName()) + typ, wire = g.TypeName(desc), "varint" + case descriptor.FieldDescriptorProto_TYPE_SFIXED32: + typ, wire = "int32", "fixed32" + case descriptor.FieldDescriptorProto_TYPE_SFIXED64: + typ, wire = "int64", "fixed64" + case descriptor.FieldDescriptorProto_TYPE_SINT32: + typ, wire = "int32", "zigzag32" + case descriptor.FieldDescriptorProto_TYPE_SINT64: + typ, wire = "int64", "zigzag64" + default: + g.Fail("unknown type for", field.GetName()) + } + if isRepeated(field) { + typ = "[]" + typ + } else if message != nil && message.proto3() { + return + } else if field.OneofIndex != nil && message != nil { + return + } else if needsStar(*field.Type) { + typ = "*" + typ + } + return +} + +func (g *Generator) RecordTypeUse(t string) { + if obj, ok := g.typeNameToObject[t]; ok { + // Call ObjectNamed to get the true object to record the use. + obj = g.ObjectNamed(t) + g.usedPackages[obj.PackageName()] = true + } +} + +// Method names that may be generated. Fields with these names get an +// underscore appended. Any change to this set is a potential incompatible +// API change because it changes generated field names. +var methodNames = [...]string{ + "Reset", + "String", + "ProtoMessage", + "Marshal", + "Unmarshal", + "ExtensionRangeArray", + "ExtensionMap", + "Descriptor", +} + +// Names of messages in the `google.protobuf` package for which +// we will generate XXX_WellKnownType methods. +var wellKnownTypes = map[string]bool{ + "Any": true, + "Duration": true, + "Empty": true, + "Struct": true, + "Timestamp": true, + + "Value": true, + "ListValue": true, + "DoubleValue": true, + "FloatValue": true, + "Int64Value": true, + "UInt64Value": true, + "Int32Value": true, + "UInt32Value": true, + "BoolValue": true, + "StringValue": true, + "BytesValue": true, +} + +// Generate the type and default constant definitions for this Descriptor. +func (g *Generator) generateMessage(message *Descriptor) { + // The full type name + typeName := message.TypeName() + // The full type name, CamelCased. + ccTypeName := CamelCaseSlice(typeName) + + usedNames := make(map[string]bool) + for _, n := range methodNames { + usedNames[n] = true + } + fieldNames := make(map[*descriptor.FieldDescriptorProto]string) + fieldGetterNames := make(map[*descriptor.FieldDescriptorProto]string) + fieldTypes := make(map[*descriptor.FieldDescriptorProto]string) + mapFieldTypes := make(map[*descriptor.FieldDescriptorProto]string) + + oneofFieldName := make(map[int32]string) // indexed by oneof_index field of FieldDescriptorProto + oneofDisc := make(map[int32]string) // name of discriminator method + oneofTypeName := make(map[*descriptor.FieldDescriptorProto]string) // without star + oneofInsertPoints := make(map[int32]int) // oneof_index => offset of g.Buffer + + g.PrintComments(message.path) + g.P("type ", ccTypeName, " struct {") + g.In() + + // allocNames finds a conflict-free variation of the given strings, + // consistently mutating their suffixes. + // It returns the same number of strings. + allocNames := func(ns ...string) []string { + Loop: + for { + for _, n := range ns { + if usedNames[n] { + for i := range ns { + ns[i] += "_" + } + continue Loop + } + } + for _, n := range ns { + usedNames[n] = true + } + return ns + } + } + + for i, field := range message.Field { + // Allocate the getter and the field at the same time so name + // collisions create field/method consistent names. + // TODO: This allocation occurs based on the order of the fields + // in the proto file, meaning that a change in the field + // ordering can change generated Method/Field names. + base := CamelCase(*field.Name) + ns := allocNames(base, "Get"+base) + fieldName, fieldGetterName := ns[0], ns[1] + typename, wiretype := g.GoType(message, field) + jsonName := *field.Name + tag := fmt.Sprintf("protobuf:%s json:%q", g.goTag(message, field, wiretype), jsonName+",omitempty") + + fieldNames[field] = fieldName + fieldGetterNames[field] = fieldGetterName + + oneof := field.OneofIndex != nil + if oneof && oneofFieldName[*field.OneofIndex] == "" { + odp := message.OneofDecl[int(*field.OneofIndex)] + fname := allocNames(CamelCase(odp.GetName()))[0] + + // This is the first field of a oneof we haven't seen before. + // Generate the union field. + com := g.PrintComments(fmt.Sprintf("%s,%d,%d", message.path, messageOneofPath, *field.OneofIndex)) + if com { + g.P("//") + } + g.P("// Types that are valid to be assigned to ", fname, ":") + // Generate the rest of this comment later, + // when we've computed any disambiguation. + oneofInsertPoints[*field.OneofIndex] = g.Buffer.Len() + + dname := "is" + ccTypeName + "_" + fname + oneofFieldName[*field.OneofIndex] = fname + oneofDisc[*field.OneofIndex] = dname + tag := `protobuf_oneof:"` + odp.GetName() + `"` + g.P(fname, " ", dname, " `", tag, "`") + } + + if *field.Type == descriptor.FieldDescriptorProto_TYPE_MESSAGE { + desc := g.ObjectNamed(field.GetTypeName()) + if d, ok := desc.(*Descriptor); ok && d.GetOptions().GetMapEntry() { + // Figure out the Go types and tags for the key and value types. + keyField, valField := d.Field[0], d.Field[1] + keyType, keyWire := g.GoType(d, keyField) + valType, valWire := g.GoType(d, valField) + keyTag, valTag := g.goTag(d, keyField, keyWire), g.goTag(d, valField, valWire) + + // We don't use stars, except for message-typed values. + // Message and enum types are the only two possibly foreign types used in maps, + // so record their use. They are not permitted as map keys. + keyType = strings.TrimPrefix(keyType, "*") + switch *valField.Type { + case descriptor.FieldDescriptorProto_TYPE_ENUM: + valType = strings.TrimPrefix(valType, "*") + g.RecordTypeUse(valField.GetTypeName()) + case descriptor.FieldDescriptorProto_TYPE_MESSAGE: + g.RecordTypeUse(valField.GetTypeName()) + default: + valType = strings.TrimPrefix(valType, "*") + } + + typename = fmt.Sprintf("map[%s]%s", keyType, valType) + mapFieldTypes[field] = typename // record for the getter generation + + tag += fmt.Sprintf(" protobuf_key:%s protobuf_val:%s", keyTag, valTag) + } + } + + fieldTypes[field] = typename + + if oneof { + tname := ccTypeName + "_" + fieldName + // It is possible for this to collide with a message or enum + // nested in this message. Check for collisions. + for { + ok := true + for _, desc := range message.nested { + if CamelCaseSlice(desc.TypeName()) == tname { + ok = false + break + } + } + for _, enum := range message.enums { + if CamelCaseSlice(enum.TypeName()) == tname { + ok = false + break + } + } + if !ok { + tname += "_" + continue + } + break + } + + oneofTypeName[field] = tname + continue + } + + g.PrintComments(fmt.Sprintf("%s,%d,%d", message.path, messageFieldPath, i)) + g.P(fieldName, "\t", typename, "\t`", tag, "`") + g.RecordTypeUse(field.GetTypeName()) + } + if len(message.ExtensionRange) > 0 { + g.P(g.Pkg["proto"], ".XXX_InternalExtensions `json:\"-\"`") + } + if !message.proto3() { + g.P("XXX_unrecognized\t[]byte `json:\"-\"`") + } + g.Out() + g.P("}") + + // Update g.Buffer to list valid oneof types. + // We do this down here, after we've disambiguated the oneof type names. + // We go in reverse order of insertion point to avoid invalidating offsets. + for oi := int32(len(message.OneofDecl)); oi >= 0; oi-- { + ip := oneofInsertPoints[oi] + all := g.Buffer.Bytes() + rem := all[ip:] + g.Buffer = bytes.NewBuffer(all[:ip:ip]) // set cap so we don't scribble on rem + for _, field := range message.Field { + if field.OneofIndex == nil || *field.OneofIndex != oi { + continue + } + g.P("//\t*", oneofTypeName[field]) + } + g.Buffer.Write(rem) + } + + // Reset, String and ProtoMessage methods. + g.P("func (m *", ccTypeName, ") Reset() { *m = ", ccTypeName, "{} }") + g.P("func (m *", ccTypeName, ") String() string { return ", g.Pkg["proto"], ".CompactTextString(m) }") + g.P("func (*", ccTypeName, ") ProtoMessage() {}") + var indexes []string + for m := message; m != nil; m = m.parent { + indexes = append([]string{strconv.Itoa(m.index)}, indexes...) + } + g.P("func (*", ccTypeName, ") Descriptor() ([]byte, []int) { return ", g.file.VarName(), ", []int{", strings.Join(indexes, ", "), "} }") + // TODO: Revisit the decision to use a XXX_WellKnownType method + // if we change proto.MessageName to work with multiple equivalents. + if message.file.GetPackage() == "google.protobuf" && wellKnownTypes[message.GetName()] { + g.P("func (*", ccTypeName, `) XXX_WellKnownType() string { return "`, message.GetName(), `" }`) + } + + // Extension support methods + var hasExtensions, isMessageSet bool + if len(message.ExtensionRange) > 0 { + hasExtensions = true + // message_set_wire_format only makes sense when extensions are defined. + if opts := message.Options; opts != nil && opts.GetMessageSetWireFormat() { + isMessageSet = true + g.P() + g.P("func (m *", ccTypeName, ") Marshal() ([]byte, error) {") + g.In() + g.P("return ", g.Pkg["proto"], ".MarshalMessageSet(&m.XXX_InternalExtensions)") + g.Out() + g.P("}") + g.P("func (m *", ccTypeName, ") Unmarshal(buf []byte) error {") + g.In() + g.P("return ", g.Pkg["proto"], ".UnmarshalMessageSet(buf, &m.XXX_InternalExtensions)") + g.Out() + g.P("}") + g.P("func (m *", ccTypeName, ") MarshalJSON() ([]byte, error) {") + g.In() + g.P("return ", g.Pkg["proto"], ".MarshalMessageSetJSON(&m.XXX_InternalExtensions)") + g.Out() + g.P("}") + g.P("func (m *", ccTypeName, ") UnmarshalJSON(buf []byte) error {") + g.In() + g.P("return ", g.Pkg["proto"], ".UnmarshalMessageSetJSON(buf, &m.XXX_InternalExtensions)") + g.Out() + g.P("}") + g.P("// ensure ", ccTypeName, " satisfies proto.Marshaler and proto.Unmarshaler") + g.P("var _ ", g.Pkg["proto"], ".Marshaler = (*", ccTypeName, ")(nil)") + g.P("var _ ", g.Pkg["proto"], ".Unmarshaler = (*", ccTypeName, ")(nil)") + } + + g.P() + g.P("var extRange_", ccTypeName, " = []", g.Pkg["proto"], ".ExtensionRange{") + g.In() + for _, r := range message.ExtensionRange { + end := fmt.Sprint(*r.End - 1) // make range inclusive on both ends + g.P("{", r.Start, ", ", end, "},") + } + g.Out() + g.P("}") + g.P("func (*", ccTypeName, ") ExtensionRangeArray() []", g.Pkg["proto"], ".ExtensionRange {") + g.In() + g.P("return extRange_", ccTypeName) + g.Out() + g.P("}") + } + + // Default constants + defNames := make(map[*descriptor.FieldDescriptorProto]string) + for _, field := range message.Field { + def := field.GetDefaultValue() + if def == "" { + continue + } + fieldname := "Default_" + ccTypeName + "_" + CamelCase(*field.Name) + defNames[field] = fieldname + typename, _ := g.GoType(message, field) + if typename[0] == '*' { + typename = typename[1:] + } + kind := "const " + switch { + case typename == "bool": + case typename == "string": + def = strconv.Quote(def) + case typename == "[]byte": + def = "[]byte(" + strconv.Quote(unescape(def)) + ")" + kind = "var " + case def == "inf", def == "-inf", def == "nan": + // These names are known to, and defined by, the protocol language. + switch def { + case "inf": + def = "math.Inf(1)" + case "-inf": + def = "math.Inf(-1)" + case "nan": + def = "math.NaN()" + } + if *field.Type == descriptor.FieldDescriptorProto_TYPE_FLOAT { + def = "float32(" + def + ")" + } + kind = "var " + case *field.Type == descriptor.FieldDescriptorProto_TYPE_ENUM: + // Must be an enum. Need to construct the prefixed name. + obj := g.ObjectNamed(field.GetTypeName()) + var enum *EnumDescriptor + if id, ok := obj.(*ImportedDescriptor); ok { + // The enum type has been publicly imported. + enum, _ = id.o.(*EnumDescriptor) + } else { + enum, _ = obj.(*EnumDescriptor) + } + if enum == nil { + log.Printf("don't know how to generate constant for %s", fieldname) + continue + } + def = g.DefaultPackageName(obj) + enum.prefix() + def + } + g.P(kind, fieldname, " ", typename, " = ", def) + g.file.addExport(message, constOrVarSymbol{fieldname, kind, ""}) + } + g.P() + + // Oneof per-field types, discriminants and getters. + // + // Generate unexported named types for the discriminant interfaces. + // We shouldn't have to do this, but there was (~19 Aug 2015) a compiler/linker bug + // that was triggered by using anonymous interfaces here. + // TODO: Revisit this and consider reverting back to anonymous interfaces. + for oi := range message.OneofDecl { + dname := oneofDisc[int32(oi)] + g.P("type ", dname, " interface { ", dname, "() }") + } + g.P() + for _, field := range message.Field { + if field.OneofIndex == nil { + continue + } + _, wiretype := g.GoType(message, field) + tag := "protobuf:" + g.goTag(message, field, wiretype) + g.P("type ", oneofTypeName[field], " struct{ ", fieldNames[field], " ", fieldTypes[field], " `", tag, "` }") + g.RecordTypeUse(field.GetTypeName()) + } + g.P() + for _, field := range message.Field { + if field.OneofIndex == nil { + continue + } + g.P("func (*", oneofTypeName[field], ") ", oneofDisc[*field.OneofIndex], "() {}") + } + g.P() + for oi := range message.OneofDecl { + fname := oneofFieldName[int32(oi)] + g.P("func (m *", ccTypeName, ") Get", fname, "() ", oneofDisc[int32(oi)], " {") + g.P("if m != nil { return m.", fname, " }") + g.P("return nil") + g.P("}") + } + g.P() + + // Field getters + var getters []getterSymbol + for _, field := range message.Field { + oneof := field.OneofIndex != nil + + fname := fieldNames[field] + typename, _ := g.GoType(message, field) + if t, ok := mapFieldTypes[field]; ok { + typename = t + } + mname := fieldGetterNames[field] + star := "" + if needsStar(*field.Type) && typename[0] == '*' { + typename = typename[1:] + star = "*" + } + + // Only export getter symbols for basic types, + // and for messages and enums in the same package. + // Groups are not exported. + // Foreign types can't be hoisted through a public import because + // the importer may not already be importing the defining .proto. + // As an example, imagine we have an import tree like this: + // A.proto -> B.proto -> C.proto + // If A publicly imports B, we need to generate the getters from B in A's output, + // but if one such getter returns something from C then we cannot do that + // because A is not importing C already. + var getter, genType bool + switch *field.Type { + case descriptor.FieldDescriptorProto_TYPE_GROUP: + getter = false + case descriptor.FieldDescriptorProto_TYPE_MESSAGE, descriptor.FieldDescriptorProto_TYPE_ENUM: + // Only export getter if its return type is in this package. + getter = g.ObjectNamed(field.GetTypeName()).PackageName() == message.PackageName() + genType = true + default: + getter = true + } + if getter { + getters = append(getters, getterSymbol{ + name: mname, + typ: typename, + typeName: field.GetTypeName(), + genType: genType, + }) + } + + g.P("func (m *", ccTypeName, ") "+mname+"() "+typename+" {") + g.In() + def, hasDef := defNames[field] + typeDefaultIsNil := false // whether this field type's default value is a literal nil unless specified + switch *field.Type { + case descriptor.FieldDescriptorProto_TYPE_BYTES: + typeDefaultIsNil = !hasDef + case descriptor.FieldDescriptorProto_TYPE_GROUP, descriptor.FieldDescriptorProto_TYPE_MESSAGE: + typeDefaultIsNil = true + } + if isRepeated(field) { + typeDefaultIsNil = true + } + if typeDefaultIsNil && !oneof { + // A bytes field with no explicit default needs less generated code, + // as does a message or group field, or a repeated field. + g.P("if m != nil {") + g.In() + g.P("return m." + fname) + g.Out() + g.P("}") + g.P("return nil") + g.Out() + g.P("}") + g.P() + continue + } + if !oneof { + if message.proto3() { + g.P("if m != nil {") + } else { + g.P("if m != nil && m." + fname + " != nil {") + } + g.In() + g.P("return " + star + "m." + fname) + g.Out() + g.P("}") + } else { + uname := oneofFieldName[*field.OneofIndex] + tname := oneofTypeName[field] + g.P("if x, ok := m.Get", uname, "().(*", tname, "); ok {") + g.P("return x.", fname) + g.P("}") + } + if hasDef { + if *field.Type != descriptor.FieldDescriptorProto_TYPE_BYTES { + g.P("return " + def) + } else { + // The default is a []byte var. + // Make a copy when returning it to be safe. + g.P("return append([]byte(nil), ", def, "...)") + } + } else { + switch *field.Type { + case descriptor.FieldDescriptorProto_TYPE_BOOL: + g.P("return false") + case descriptor.FieldDescriptorProto_TYPE_STRING: + g.P(`return ""`) + case descriptor.FieldDescriptorProto_TYPE_GROUP, + descriptor.FieldDescriptorProto_TYPE_MESSAGE, + descriptor.FieldDescriptorProto_TYPE_BYTES: + // This is only possible for oneof fields. + g.P("return nil") + case descriptor.FieldDescriptorProto_TYPE_ENUM: + // The default default for an enum is the first value in the enum, + // not zero. + obj := g.ObjectNamed(field.GetTypeName()) + var enum *EnumDescriptor + if id, ok := obj.(*ImportedDescriptor); ok { + // The enum type has been publicly imported. + enum, _ = id.o.(*EnumDescriptor) + } else { + enum, _ = obj.(*EnumDescriptor) + } + if enum == nil { + log.Printf("don't know how to generate getter for %s", field.GetName()) + continue + } + if len(enum.Value) == 0 { + g.P("return 0 // empty enum") + } else { + first := enum.Value[0].GetName() + g.P("return ", g.DefaultPackageName(obj)+enum.prefix()+first) + } + default: + g.P("return 0") + } + } + g.Out() + g.P("}") + g.P() + } + + if !message.group { + ms := &messageSymbol{ + sym: ccTypeName, + hasExtensions: hasExtensions, + isMessageSet: isMessageSet, + hasOneof: len(message.OneofDecl) > 0, + getters: getters, + } + g.file.addExport(message, ms) + } + + // Oneof functions + if len(message.OneofDecl) > 0 { + fieldWire := make(map[*descriptor.FieldDescriptorProto]string) + + // method + enc := "_" + ccTypeName + "_OneofMarshaler" + dec := "_" + ccTypeName + "_OneofUnmarshaler" + size := "_" + ccTypeName + "_OneofSizer" + encSig := "(msg " + g.Pkg["proto"] + ".Message, b *" + g.Pkg["proto"] + ".Buffer) error" + decSig := "(msg " + g.Pkg["proto"] + ".Message, tag, wire int, b *" + g.Pkg["proto"] + ".Buffer) (bool, error)" + sizeSig := "(msg " + g.Pkg["proto"] + ".Message) (n int)" + + g.P("// XXX_OneofFuncs is for the internal use of the proto package.") + g.P("func (*", ccTypeName, ") XXX_OneofFuncs() (func", encSig, ", func", decSig, ", func", sizeSig, ", []interface{}) {") + g.P("return ", enc, ", ", dec, ", ", size, ", []interface{}{") + for _, field := range message.Field { + if field.OneofIndex == nil { + continue + } + g.P("(*", oneofTypeName[field], ")(nil),") + } + g.P("}") + g.P("}") + g.P() + + // marshaler + g.P("func ", enc, encSig, " {") + g.P("m := msg.(*", ccTypeName, ")") + for oi, odp := range message.OneofDecl { + g.P("// ", odp.GetName()) + fname := oneofFieldName[int32(oi)] + g.P("switch x := m.", fname, ".(type) {") + for _, field := range message.Field { + if field.OneofIndex == nil || int(*field.OneofIndex) != oi { + continue + } + g.P("case *", oneofTypeName[field], ":") + var wire, pre, post string + val := "x." + fieldNames[field] // overridden for TYPE_BOOL + canFail := false // only TYPE_MESSAGE and TYPE_GROUP can fail + switch *field.Type { + case descriptor.FieldDescriptorProto_TYPE_DOUBLE: + wire = "WireFixed64" + pre = "b.EncodeFixed64(" + g.Pkg["math"] + ".Float64bits(" + post = "))" + case descriptor.FieldDescriptorProto_TYPE_FLOAT: + wire = "WireFixed32" + pre = "b.EncodeFixed32(uint64(" + g.Pkg["math"] + ".Float32bits(" + post = ")))" + case descriptor.FieldDescriptorProto_TYPE_INT64, + descriptor.FieldDescriptorProto_TYPE_UINT64: + wire = "WireVarint" + pre, post = "b.EncodeVarint(uint64(", "))" + case descriptor.FieldDescriptorProto_TYPE_INT32, + descriptor.FieldDescriptorProto_TYPE_UINT32, + descriptor.FieldDescriptorProto_TYPE_ENUM: + wire = "WireVarint" + pre, post = "b.EncodeVarint(uint64(", "))" + case descriptor.FieldDescriptorProto_TYPE_FIXED64, + descriptor.FieldDescriptorProto_TYPE_SFIXED64: + wire = "WireFixed64" + pre, post = "b.EncodeFixed64(uint64(", "))" + case descriptor.FieldDescriptorProto_TYPE_FIXED32, + descriptor.FieldDescriptorProto_TYPE_SFIXED32: + wire = "WireFixed32" + pre, post = "b.EncodeFixed32(uint64(", "))" + case descriptor.FieldDescriptorProto_TYPE_BOOL: + // bool needs special handling. + g.P("t := uint64(0)") + g.P("if ", val, " { t = 1 }") + val = "t" + wire = "WireVarint" + pre, post = "b.EncodeVarint(", ")" + case descriptor.FieldDescriptorProto_TYPE_STRING: + wire = "WireBytes" + pre, post = "b.EncodeStringBytes(", ")" + case descriptor.FieldDescriptorProto_TYPE_GROUP: + wire = "WireStartGroup" + pre, post = "b.Marshal(", ")" + canFail = true + case descriptor.FieldDescriptorProto_TYPE_MESSAGE: + wire = "WireBytes" + pre, post = "b.EncodeMessage(", ")" + canFail = true + case descriptor.FieldDescriptorProto_TYPE_BYTES: + wire = "WireBytes" + pre, post = "b.EncodeRawBytes(", ")" + case descriptor.FieldDescriptorProto_TYPE_SINT32: + wire = "WireVarint" + pre, post = "b.EncodeZigzag32(uint64(", "))" + case descriptor.FieldDescriptorProto_TYPE_SINT64: + wire = "WireVarint" + pre, post = "b.EncodeZigzag64(uint64(", "))" + default: + g.Fail("unhandled oneof field type ", field.Type.String()) + } + fieldWire[field] = wire + g.P("b.EncodeVarint(", field.Number, "<<3|", g.Pkg["proto"], ".", wire, ")") + if !canFail { + g.P(pre, val, post) + } else { + g.P("if err := ", pre, val, post, "; err != nil {") + g.P("return err") + g.P("}") + } + if *field.Type == descriptor.FieldDescriptorProto_TYPE_GROUP { + g.P("b.EncodeVarint(", field.Number, "<<3|", g.Pkg["proto"], ".WireEndGroup)") + } + } + g.P("case nil:") + g.P("default: return ", g.Pkg["fmt"], `.Errorf("`, ccTypeName, ".", fname, ` has unexpected type %T", x)`) + g.P("}") + } + g.P("return nil") + g.P("}") + g.P() + + // unmarshaler + g.P("func ", dec, decSig, " {") + g.P("m := msg.(*", ccTypeName, ")") + g.P("switch tag {") + for _, field := range message.Field { + if field.OneofIndex == nil { + continue + } + odp := message.OneofDecl[int(*field.OneofIndex)] + g.P("case ", field.Number, ": // ", odp.GetName(), ".", *field.Name) + g.P("if wire != ", g.Pkg["proto"], ".", fieldWire[field], " {") + g.P("return true, ", g.Pkg["proto"], ".ErrInternalBadWireType") + g.P("}") + lhs := "x, err" // overridden for TYPE_MESSAGE and TYPE_GROUP + var dec, cast, cast2 string + switch *field.Type { + case descriptor.FieldDescriptorProto_TYPE_DOUBLE: + dec, cast = "b.DecodeFixed64()", g.Pkg["math"]+".Float64frombits" + case descriptor.FieldDescriptorProto_TYPE_FLOAT: + dec, cast, cast2 = "b.DecodeFixed32()", "uint32", g.Pkg["math"]+".Float32frombits" + case descriptor.FieldDescriptorProto_TYPE_INT64: + dec, cast = "b.DecodeVarint()", "int64" + case descriptor.FieldDescriptorProto_TYPE_UINT64: + dec = "b.DecodeVarint()" + case descriptor.FieldDescriptorProto_TYPE_INT32: + dec, cast = "b.DecodeVarint()", "int32" + case descriptor.FieldDescriptorProto_TYPE_FIXED64: + dec = "b.DecodeFixed64()" + case descriptor.FieldDescriptorProto_TYPE_FIXED32: + dec, cast = "b.DecodeFixed32()", "uint32" + case descriptor.FieldDescriptorProto_TYPE_BOOL: + dec = "b.DecodeVarint()" + // handled specially below + case descriptor.FieldDescriptorProto_TYPE_STRING: + dec = "b.DecodeStringBytes()" + case descriptor.FieldDescriptorProto_TYPE_GROUP: + g.P("msg := new(", fieldTypes[field][1:], ")") // drop star + lhs = "err" + dec = "b.DecodeGroup(msg)" + // handled specially below + case descriptor.FieldDescriptorProto_TYPE_MESSAGE: + g.P("msg := new(", fieldTypes[field][1:], ")") // drop star + lhs = "err" + dec = "b.DecodeMessage(msg)" + // handled specially below + case descriptor.FieldDescriptorProto_TYPE_BYTES: + dec = "b.DecodeRawBytes(true)" + case descriptor.FieldDescriptorProto_TYPE_UINT32: + dec, cast = "b.DecodeVarint()", "uint32" + case descriptor.FieldDescriptorProto_TYPE_ENUM: + dec, cast = "b.DecodeVarint()", fieldTypes[field] + case descriptor.FieldDescriptorProto_TYPE_SFIXED32: + dec, cast = "b.DecodeFixed32()", "int32" + case descriptor.FieldDescriptorProto_TYPE_SFIXED64: + dec, cast = "b.DecodeFixed64()", "int64" + case descriptor.FieldDescriptorProto_TYPE_SINT32: + dec, cast = "b.DecodeZigzag32()", "int32" + case descriptor.FieldDescriptorProto_TYPE_SINT64: + dec, cast = "b.DecodeZigzag64()", "int64" + default: + g.Fail("unhandled oneof field type ", field.Type.String()) + } + g.P(lhs, " := ", dec) + val := "x" + if cast != "" { + val = cast + "(" + val + ")" + } + if cast2 != "" { + val = cast2 + "(" + val + ")" + } + switch *field.Type { + case descriptor.FieldDescriptorProto_TYPE_BOOL: + val += " != 0" + case descriptor.FieldDescriptorProto_TYPE_GROUP, + descriptor.FieldDescriptorProto_TYPE_MESSAGE: + val = "msg" + } + g.P("m.", oneofFieldName[*field.OneofIndex], " = &", oneofTypeName[field], "{", val, "}") + g.P("return true, err") + } + g.P("default: return false, nil") + g.P("}") + g.P("}") + g.P() + + // sizer + g.P("func ", size, sizeSig, " {") + g.P("m := msg.(*", ccTypeName, ")") + for oi, odp := range message.OneofDecl { + g.P("// ", odp.GetName()) + fname := oneofFieldName[int32(oi)] + g.P("switch x := m.", fname, ".(type) {") + for _, field := range message.Field { + if field.OneofIndex == nil || int(*field.OneofIndex) != oi { + continue + } + g.P("case *", oneofTypeName[field], ":") + val := "x." + fieldNames[field] + var wire, varint, fixed string + switch *field.Type { + case descriptor.FieldDescriptorProto_TYPE_DOUBLE: + wire = "WireFixed64" + fixed = "8" + case descriptor.FieldDescriptorProto_TYPE_FLOAT: + wire = "WireFixed32" + fixed = "4" + case descriptor.FieldDescriptorProto_TYPE_INT64, + descriptor.FieldDescriptorProto_TYPE_UINT64, + descriptor.FieldDescriptorProto_TYPE_INT32, + descriptor.FieldDescriptorProto_TYPE_UINT32, + descriptor.FieldDescriptorProto_TYPE_ENUM: + wire = "WireVarint" + varint = val + case descriptor.FieldDescriptorProto_TYPE_FIXED64, + descriptor.FieldDescriptorProto_TYPE_SFIXED64: + wire = "WireFixed64" + fixed = "8" + case descriptor.FieldDescriptorProto_TYPE_FIXED32, + descriptor.FieldDescriptorProto_TYPE_SFIXED32: + wire = "WireFixed32" + fixed = "4" + case descriptor.FieldDescriptorProto_TYPE_BOOL: + wire = "WireVarint" + fixed = "1" + case descriptor.FieldDescriptorProto_TYPE_STRING: + wire = "WireBytes" + fixed = "len(" + val + ")" + varint = fixed + case descriptor.FieldDescriptorProto_TYPE_GROUP: + wire = "WireStartGroup" + fixed = g.Pkg["proto"] + ".Size(" + val + ")" + case descriptor.FieldDescriptorProto_TYPE_MESSAGE: + wire = "WireBytes" + g.P("s := ", g.Pkg["proto"], ".Size(", val, ")") + fixed = "s" + varint = fixed + case descriptor.FieldDescriptorProto_TYPE_BYTES: + wire = "WireBytes" + fixed = "len(" + val + ")" + varint = fixed + case descriptor.FieldDescriptorProto_TYPE_SINT32: + wire = "WireVarint" + varint = "(uint32(" + val + ") << 1) ^ uint32((int32(" + val + ") >> 31))" + case descriptor.FieldDescriptorProto_TYPE_SINT64: + wire = "WireVarint" + varint = "uint64(" + val + " << 1) ^ uint64((int64(" + val + ") >> 63))" + default: + g.Fail("unhandled oneof field type ", field.Type.String()) + } + g.P("n += ", g.Pkg["proto"], ".SizeVarint(", field.Number, "<<3|", g.Pkg["proto"], ".", wire, ")") + if varint != "" { + g.P("n += ", g.Pkg["proto"], ".SizeVarint(uint64(", varint, "))") + } + if fixed != "" { + g.P("n += ", fixed) + } + if *field.Type == descriptor.FieldDescriptorProto_TYPE_GROUP { + g.P("n += ", g.Pkg["proto"], ".SizeVarint(", field.Number, "<<3|", g.Pkg["proto"], ".WireEndGroup)") + } + } + g.P("case nil:") + g.P("default:") + g.P("panic(", g.Pkg["fmt"], ".Sprintf(\"proto: unexpected type %T in oneof\", x))") + g.P("}") + } + g.P("return n") + g.P("}") + g.P() + } + + for _, ext := range message.ext { + g.generateExtension(ext) + } + + fullName := strings.Join(message.TypeName(), ".") + if g.file.Package != nil { + fullName = *g.file.Package + "." + fullName + } + + g.addInitf("%s.RegisterType((*%s)(nil), %q)", g.Pkg["proto"], ccTypeName, fullName) +} + +var escapeChars = [256]byte{ + 'a': '\a', 'b': '\b', 'f': '\f', 'n': '\n', 'r': '\r', 't': '\t', 'v': '\v', '\\': '\\', '"': '"', '\'': '\'', '?': '?', +} + +// unescape reverses the "C" escaping that protoc does for default values of bytes fields. +// It is best effort in that it effectively ignores malformed input. Seemingly invalid escape +// sequences are conveyed, unmodified, into the decoded result. +func unescape(s string) string { + // NB: Sadly, we can't use strconv.Unquote because protoc will escape both + // single and double quotes, but strconv.Unquote only allows one or the + // other (based on actual surrounding quotes of its input argument). + + var out []byte + for len(s) > 0 { + // regular character, or too short to be valid escape + if s[0] != '\\' || len(s) < 2 { + out = append(out, s[0]) + s = s[1:] + } else if c := escapeChars[s[1]]; c != 0 { + // escape sequence + out = append(out, c) + s = s[2:] + } else if s[1] == 'x' || s[1] == 'X' { + // hex escape, e.g. "\x80 + if len(s) < 4 { + // too short to be valid + out = append(out, s[:2]...) + s = s[2:] + continue + } + v, err := strconv.ParseUint(s[2:4], 16, 8) + if err != nil { + out = append(out, s[:4]...) + } else { + out = append(out, byte(v)) + } + s = s[4:] + } else if '0' <= s[1] && s[1] <= '7' { + // octal escape, can vary from 1 to 3 octal digits; e.g., "\0" "\40" or "\164" + // so consume up to 2 more bytes or up to end-of-string + n := len(s[1:]) - len(strings.TrimLeft(s[1:], "01234567")) + if n > 3 { + n = 3 + } + v, err := strconv.ParseUint(s[1:1+n], 8, 8) + if err != nil { + out = append(out, s[:1+n]...) + } else { + out = append(out, byte(v)) + } + s = s[1+n:] + } else { + // bad escape, just propagate the slash as-is + out = append(out, s[0]) + s = s[1:] + } + } + + return string(out) +} + +func (g *Generator) generateExtension(ext *ExtensionDescriptor) { + ccTypeName := ext.DescName() + + extObj := g.ObjectNamed(*ext.Extendee) + var extDesc *Descriptor + if id, ok := extObj.(*ImportedDescriptor); ok { + // This is extending a publicly imported message. + // We need the underlying type for goTag. + extDesc = id.o.(*Descriptor) + } else { + extDesc = extObj.(*Descriptor) + } + extendedType := "*" + g.TypeName(extObj) // always use the original + field := ext.FieldDescriptorProto + fieldType, wireType := g.GoType(ext.parent, field) + tag := g.goTag(extDesc, field, wireType) + g.RecordTypeUse(*ext.Extendee) + if n := ext.FieldDescriptorProto.TypeName; n != nil { + // foreign extension type + g.RecordTypeUse(*n) + } + + typeName := ext.TypeName() + + // Special case for proto2 message sets: If this extension is extending + // proto2_bridge.MessageSet, and its final name component is "message_set_extension", + // then drop that last component. + mset := false + if extendedType == "*proto2_bridge.MessageSet" && typeName[len(typeName)-1] == "message_set_extension" { + typeName = typeName[:len(typeName)-1] + mset = true + } + + // For text formatting, the package must be exactly what the .proto file declares, + // ignoring overrides such as the go_package option, and with no dot/underscore mapping. + extName := strings.Join(typeName, ".") + if g.file.Package != nil { + extName = *g.file.Package + "." + extName + } + + g.P("var ", ccTypeName, " = &", g.Pkg["proto"], ".ExtensionDesc{") + g.In() + g.P("ExtendedType: (", extendedType, ")(nil),") + g.P("ExtensionType: (", fieldType, ")(nil),") + g.P("Field: ", field.Number, ",") + g.P(`Name: "`, extName, `",`) + g.P("Tag: ", tag, ",") + g.P(`Filename: "`, g.file.GetName(), `",`) + + g.Out() + g.P("}") + g.P() + + if mset { + // Generate a bit more code to register with message_set.go. + g.addInitf("%s.RegisterMessageSetType((%s)(nil), %d, %q)", g.Pkg["proto"], fieldType, *field.Number, extName) + } + + g.file.addExport(ext, constOrVarSymbol{ccTypeName, "var", ""}) +} + +func (g *Generator) generateInitFunction() { + for _, enum := range g.file.enum { + g.generateEnumRegistration(enum) + } + for _, d := range g.file.desc { + for _, ext := range d.ext { + g.generateExtensionRegistration(ext) + } + } + for _, ext := range g.file.ext { + g.generateExtensionRegistration(ext) + } + if len(g.init) == 0 { + return + } + g.P("func init() {") + g.In() + for _, l := range g.init { + g.P(l) + } + g.Out() + g.P("}") + g.init = nil +} + +func (g *Generator) generateFileDescriptor(file *FileDescriptor) { + // Make a copy and trim source_code_info data. + // TODO: Trim this more when we know exactly what we need. + pb := proto.Clone(file.FileDescriptorProto).(*descriptor.FileDescriptorProto) + pb.SourceCodeInfo = nil + + b, err := proto.Marshal(pb) + if err != nil { + g.Fail(err.Error()) + } + + var buf bytes.Buffer + w, _ := gzip.NewWriterLevel(&buf, gzip.BestCompression) + w.Write(b) + w.Close() + b = buf.Bytes() + + v := file.VarName() + g.P() + g.P("func init() { ", g.Pkg["proto"], ".RegisterFile(", strconv.Quote(*file.Name), ", ", v, ") }") + g.P("var ", v, " = []byte{") + g.In() + g.P("// ", len(b), " bytes of a gzipped FileDescriptorProto") + for len(b) > 0 { + n := 16 + if n > len(b) { + n = len(b) + } + + s := "" + for _, c := range b[:n] { + s += fmt.Sprintf("0x%02x,", c) + } + g.P(s) + + b = b[n:] + } + g.Out() + g.P("}") +} + +func (g *Generator) generateEnumRegistration(enum *EnumDescriptor) { + // // We always print the full (proto-world) package name here. + pkg := enum.File().GetPackage() + if pkg != "" { + pkg += "." + } + // The full type name + typeName := enum.TypeName() + // The full type name, CamelCased. + ccTypeName := CamelCaseSlice(typeName) + g.addInitf("%s.RegisterEnum(%q, %[3]s_name, %[3]s_value)", g.Pkg["proto"], pkg+ccTypeName, ccTypeName) +} + +func (g *Generator) generateExtensionRegistration(ext *ExtensionDescriptor) { + g.addInitf("%s.RegisterExtension(%s)", g.Pkg["proto"], ext.DescName()) +} + +// And now lots of helper functions. + +// Is c an ASCII lower-case letter? +func isASCIILower(c byte) bool { + return 'a' <= c && c <= 'z' +} + +// Is c an ASCII digit? +func isASCIIDigit(c byte) bool { + return '0' <= c && c <= '9' +} + +// CamelCase returns the CamelCased name. +// If there is an interior underscore followed by a lower case letter, +// drop the underscore and convert the letter to upper case. +// There is a remote possibility of this rewrite causing a name collision, +// but it's so remote we're prepared to pretend it's nonexistent - since the +// C++ generator lowercases names, it's extremely unlikely to have two fields +// with different capitalizations. +// In short, _my_field_name_2 becomes XMyFieldName_2. +func CamelCase(s string) string { + if s == "" { + return "" + } + t := make([]byte, 0, 32) + i := 0 + if s[0] == '_' { + // Need a capital letter; drop the '_'. + t = append(t, 'X') + i++ + } + // Invariant: if the next letter is lower case, it must be converted + // to upper case. + // That is, we process a word at a time, where words are marked by _ or + // upper case letter. Digits are treated as words. + for ; i < len(s); i++ { + c := s[i] + if c == '_' && i+1 < len(s) && isASCIILower(s[i+1]) { + continue // Skip the underscore in s. + } + if isASCIIDigit(c) { + t = append(t, c) + continue + } + // Assume we have a letter now - if not, it's a bogus identifier. + // The next word is a sequence of characters that must start upper case. + if isASCIILower(c) { + c ^= ' ' // Make it a capital letter. + } + t = append(t, c) // Guaranteed not lower case. + // Accept lower case sequence that follows. + for i+1 < len(s) && isASCIILower(s[i+1]) { + i++ + t = append(t, s[i]) + } + } + return string(t) +} + +// CamelCaseSlice is like CamelCase, but the argument is a slice of strings to +// be joined with "_". +func CamelCaseSlice(elem []string) string { return CamelCase(strings.Join(elem, "_")) } + +// dottedSlice turns a sliced name into a dotted name. +func dottedSlice(elem []string) string { return strings.Join(elem, ".") } + +// Is this field optional? +func isOptional(field *descriptor.FieldDescriptorProto) bool { + return field.Label != nil && *field.Label == descriptor.FieldDescriptorProto_LABEL_OPTIONAL +} + +// Is this field required? +func isRequired(field *descriptor.FieldDescriptorProto) bool { + return field.Label != nil && *field.Label == descriptor.FieldDescriptorProto_LABEL_REQUIRED +} + +// Is this field repeated? +func isRepeated(field *descriptor.FieldDescriptorProto) bool { + return field.Label != nil && *field.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED +} + +// Is this field a scalar numeric type? +func isScalar(field *descriptor.FieldDescriptorProto) bool { + if field.Type == nil { + return false + } + switch *field.Type { + case descriptor.FieldDescriptorProto_TYPE_DOUBLE, + descriptor.FieldDescriptorProto_TYPE_FLOAT, + descriptor.FieldDescriptorProto_TYPE_INT64, + descriptor.FieldDescriptorProto_TYPE_UINT64, + descriptor.FieldDescriptorProto_TYPE_INT32, + descriptor.FieldDescriptorProto_TYPE_FIXED64, + descriptor.FieldDescriptorProto_TYPE_FIXED32, + descriptor.FieldDescriptorProto_TYPE_BOOL, + descriptor.FieldDescriptorProto_TYPE_UINT32, + descriptor.FieldDescriptorProto_TYPE_ENUM, + descriptor.FieldDescriptorProto_TYPE_SFIXED32, + descriptor.FieldDescriptorProto_TYPE_SFIXED64, + descriptor.FieldDescriptorProto_TYPE_SINT32, + descriptor.FieldDescriptorProto_TYPE_SINT64: + return true + default: + return false + } +} + +// badToUnderscore is the mapping function used to generate Go names from package names, +// which can be dotted in the input .proto file. It replaces non-identifier characters such as +// dot or dash with underscore. +func badToUnderscore(r rune) rune { + if unicode.IsLetter(r) || unicode.IsDigit(r) || r == '_' { + return r + } + return '_' +} + +// baseName returns the last path element of the name, with the last dotted suffix removed. +func baseName(name string) string { + // First, find the last element + if i := strings.LastIndex(name, "/"); i >= 0 { + name = name[i+1:] + } + // Now drop the suffix + if i := strings.LastIndex(name, "."); i >= 0 { + name = name[0:i] + } + return name +} + +// The SourceCodeInfo message describes the location of elements of a parsed +// .proto file by way of a "path", which is a sequence of integers that +// describe the route from a FileDescriptorProto to the relevant submessage. +// The path alternates between a field number of a repeated field, and an index +// into that repeated field. The constants below define the field numbers that +// are used. +// +// See descriptor.proto for more information about this. +const ( + // tag numbers in FileDescriptorProto + packagePath = 2 // package + messagePath = 4 // message_type + enumPath = 5 // enum_type + // tag numbers in DescriptorProto + messageFieldPath = 2 // field + messageMessagePath = 3 // nested_type + messageEnumPath = 4 // enum_type + messageOneofPath = 8 // oneof_decl + // tag numbers in EnumDescriptorProto + enumValuePath = 2 // value +) diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/grpc/grpc.go b/vendor/github.com/golang/protobuf/protoc-gen-go/grpc/grpc.go new file mode 100644 index 0000000..2660e47 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/grpc/grpc.go @@ -0,0 +1,463 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2015 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Package grpc outputs gRPC service descriptions in Go code. +// It runs as a plugin for the Go protocol buffer compiler plugin. +// It is linked in to protoc-gen-go. +package grpc + +import ( + "fmt" + "path" + "strconv" + "strings" + + pb "github.com/golang/protobuf/protoc-gen-go/descriptor" + "github.com/golang/protobuf/protoc-gen-go/generator" +) + +// generatedCodeVersion indicates a version of the generated code. +// It is incremented whenever an incompatibility between the generated code and +// the grpc package is introduced; the generated code references +// a constant, grpc.SupportPackageIsVersionN (where N is generatedCodeVersion). +const generatedCodeVersion = 4 + +// Paths for packages used by code generated in this file, +// relative to the import_prefix of the generator.Generator. +const ( + contextPkgPath = "golang.org/x/net/context" + grpcPkgPath = "google.golang.org/grpc" +) + +func init() { + generator.RegisterPlugin(new(grpc)) +} + +// grpc is an implementation of the Go protocol buffer compiler's +// plugin architecture. It generates bindings for gRPC support. +type grpc struct { + gen *generator.Generator +} + +// Name returns the name of this plugin, "grpc". +func (g *grpc) Name() string { + return "grpc" +} + +// The names for packages imported in the generated code. +// They may vary from the final path component of the import path +// if the name is used by other packages. +var ( + contextPkg string + grpcPkg string +) + +// Init initializes the plugin. +func (g *grpc) Init(gen *generator.Generator) { + g.gen = gen + contextPkg = generator.RegisterUniquePackageName("context", nil) + grpcPkg = generator.RegisterUniquePackageName("grpc", nil) +} + +// Given a type name defined in a .proto, return its object. +// Also record that we're using it, to guarantee the associated import. +func (g *grpc) objectNamed(name string) generator.Object { + g.gen.RecordTypeUse(name) + return g.gen.ObjectNamed(name) +} + +// Given a type name defined in a .proto, return its name as we will print it. +func (g *grpc) typeName(str string) string { + return g.gen.TypeName(g.objectNamed(str)) +} + +// P forwards to g.gen.P. +func (g *grpc) P(args ...interface{}) { g.gen.P(args...) } + +// Generate generates code for the services in the given file. +func (g *grpc) Generate(file *generator.FileDescriptor) { + if len(file.FileDescriptorProto.Service) == 0 { + return + } + + g.P("// Reference imports to suppress errors if they are not otherwise used.") + g.P("var _ ", contextPkg, ".Context") + g.P("var _ ", grpcPkg, ".ClientConn") + g.P() + + // Assert version compatibility. + g.P("// This is a compile-time assertion to ensure that this generated file") + g.P("// is compatible with the grpc package it is being compiled against.") + g.P("const _ = ", grpcPkg, ".SupportPackageIsVersion", generatedCodeVersion) + g.P() + + for i, service := range file.FileDescriptorProto.Service { + g.generateService(file, service, i) + } +} + +// GenerateImports generates the import declaration for this file. +func (g *grpc) GenerateImports(file *generator.FileDescriptor) { + if len(file.FileDescriptorProto.Service) == 0 { + return + } + g.P("import (") + g.P(contextPkg, " ", strconv.Quote(path.Join(g.gen.ImportPrefix, contextPkgPath))) + g.P(grpcPkg, " ", strconv.Quote(path.Join(g.gen.ImportPrefix, grpcPkgPath))) + g.P(")") + g.P() +} + +// reservedClientName records whether a client name is reserved on the client side. +var reservedClientName = map[string]bool{ +// TODO: do we need any in gRPC? +} + +func unexport(s string) string { return strings.ToLower(s[:1]) + s[1:] } + +// generateService generates all the code for the named service. +func (g *grpc) generateService(file *generator.FileDescriptor, service *pb.ServiceDescriptorProto, index int) { + path := fmt.Sprintf("6,%d", index) // 6 means service. + + origServName := service.GetName() + fullServName := origServName + if pkg := file.GetPackage(); pkg != "" { + fullServName = pkg + "." + fullServName + } + servName := generator.CamelCase(origServName) + + g.P() + g.P("// Client API for ", servName, " service") + g.P() + + // Client interface. + g.P("type ", servName, "Client interface {") + for i, method := range service.Method { + g.gen.PrintComments(fmt.Sprintf("%s,2,%d", path, i)) // 2 means method in a service. + g.P(g.generateClientSignature(servName, method)) + } + g.P("}") + g.P() + + // Client structure. + g.P("type ", unexport(servName), "Client struct {") + g.P("cc *", grpcPkg, ".ClientConn") + g.P("}") + g.P() + + // NewClient factory. + g.P("func New", servName, "Client (cc *", grpcPkg, ".ClientConn) ", servName, "Client {") + g.P("return &", unexport(servName), "Client{cc}") + g.P("}") + g.P() + + var methodIndex, streamIndex int + serviceDescVar := "_" + servName + "_serviceDesc" + // Client method implementations. + for _, method := range service.Method { + var descExpr string + if !method.GetServerStreaming() && !method.GetClientStreaming() { + // Unary RPC method + descExpr = fmt.Sprintf("&%s.Methods[%d]", serviceDescVar, methodIndex) + methodIndex++ + } else { + // Streaming RPC method + descExpr = fmt.Sprintf("&%s.Streams[%d]", serviceDescVar, streamIndex) + streamIndex++ + } + g.generateClientMethod(servName, fullServName, serviceDescVar, method, descExpr) + } + + g.P("// Server API for ", servName, " service") + g.P() + + // Server interface. + serverType := servName + "Server" + g.P("type ", serverType, " interface {") + for i, method := range service.Method { + g.gen.PrintComments(fmt.Sprintf("%s,2,%d", path, i)) // 2 means method in a service. + g.P(g.generateServerSignature(servName, method)) + } + g.P("}") + g.P() + + // Server registration. + g.P("func Register", servName, "Server(s *", grpcPkg, ".Server, srv ", serverType, ") {") + g.P("s.RegisterService(&", serviceDescVar, `, srv)`) + g.P("}") + g.P() + + // Server handler implementations. + var handlerNames []string + for _, method := range service.Method { + hname := g.generateServerMethod(servName, fullServName, method) + handlerNames = append(handlerNames, hname) + } + + // Service descriptor. + g.P("var ", serviceDescVar, " = ", grpcPkg, ".ServiceDesc {") + g.P("ServiceName: ", strconv.Quote(fullServName), ",") + g.P("HandlerType: (*", serverType, ")(nil),") + g.P("Methods: []", grpcPkg, ".MethodDesc{") + for i, method := range service.Method { + if method.GetServerStreaming() || method.GetClientStreaming() { + continue + } + g.P("{") + g.P("MethodName: ", strconv.Quote(method.GetName()), ",") + g.P("Handler: ", handlerNames[i], ",") + g.P("},") + } + g.P("},") + g.P("Streams: []", grpcPkg, ".StreamDesc{") + for i, method := range service.Method { + if !method.GetServerStreaming() && !method.GetClientStreaming() { + continue + } + g.P("{") + g.P("StreamName: ", strconv.Quote(method.GetName()), ",") + g.P("Handler: ", handlerNames[i], ",") + if method.GetServerStreaming() { + g.P("ServerStreams: true,") + } + if method.GetClientStreaming() { + g.P("ClientStreams: true,") + } + g.P("},") + } + g.P("},") + g.P("Metadata: \"", file.GetName(), "\",") + g.P("}") + g.P() +} + +// generateClientSignature returns the client-side signature for a method. +func (g *grpc) generateClientSignature(servName string, method *pb.MethodDescriptorProto) string { + origMethName := method.GetName() + methName := generator.CamelCase(origMethName) + if reservedClientName[methName] { + methName += "_" + } + reqArg := ", in *" + g.typeName(method.GetInputType()) + if method.GetClientStreaming() { + reqArg = "" + } + respName := "*" + g.typeName(method.GetOutputType()) + if method.GetServerStreaming() || method.GetClientStreaming() { + respName = servName + "_" + generator.CamelCase(origMethName) + "Client" + } + return fmt.Sprintf("%s(ctx %s.Context%s, opts ...%s.CallOption) (%s, error)", methName, contextPkg, reqArg, grpcPkg, respName) +} + +func (g *grpc) generateClientMethod(servName, fullServName, serviceDescVar string, method *pb.MethodDescriptorProto, descExpr string) { + sname := fmt.Sprintf("/%s/%s", fullServName, method.GetName()) + methName := generator.CamelCase(method.GetName()) + inType := g.typeName(method.GetInputType()) + outType := g.typeName(method.GetOutputType()) + + g.P("func (c *", unexport(servName), "Client) ", g.generateClientSignature(servName, method), "{") + if !method.GetServerStreaming() && !method.GetClientStreaming() { + g.P("out := new(", outType, ")") + // TODO: Pass descExpr to Invoke. + g.P("err := ", grpcPkg, `.Invoke(ctx, "`, sname, `", in, out, c.cc, opts...)`) + g.P("if err != nil { return nil, err }") + g.P("return out, nil") + g.P("}") + g.P() + return + } + streamType := unexport(servName) + methName + "Client" + g.P("stream, err := ", grpcPkg, ".NewClientStream(ctx, ", descExpr, `, c.cc, "`, sname, `", opts...)`) + g.P("if err != nil { return nil, err }") + g.P("x := &", streamType, "{stream}") + if !method.GetClientStreaming() { + g.P("if err := x.ClientStream.SendMsg(in); err != nil { return nil, err }") + g.P("if err := x.ClientStream.CloseSend(); err != nil { return nil, err }") + } + g.P("return x, nil") + g.P("}") + g.P() + + genSend := method.GetClientStreaming() + genRecv := method.GetServerStreaming() + genCloseAndRecv := !method.GetServerStreaming() + + // Stream auxiliary types and methods. + g.P("type ", servName, "_", methName, "Client interface {") + if genSend { + g.P("Send(*", inType, ") error") + } + if genRecv { + g.P("Recv() (*", outType, ", error)") + } + if genCloseAndRecv { + g.P("CloseAndRecv() (*", outType, ", error)") + } + g.P(grpcPkg, ".ClientStream") + g.P("}") + g.P() + + g.P("type ", streamType, " struct {") + g.P(grpcPkg, ".ClientStream") + g.P("}") + g.P() + + if genSend { + g.P("func (x *", streamType, ") Send(m *", inType, ") error {") + g.P("return x.ClientStream.SendMsg(m)") + g.P("}") + g.P() + } + if genRecv { + g.P("func (x *", streamType, ") Recv() (*", outType, ", error) {") + g.P("m := new(", outType, ")") + g.P("if err := x.ClientStream.RecvMsg(m); err != nil { return nil, err }") + g.P("return m, nil") + g.P("}") + g.P() + } + if genCloseAndRecv { + g.P("func (x *", streamType, ") CloseAndRecv() (*", outType, ", error) {") + g.P("if err := x.ClientStream.CloseSend(); err != nil { return nil, err }") + g.P("m := new(", outType, ")") + g.P("if err := x.ClientStream.RecvMsg(m); err != nil { return nil, err }") + g.P("return m, nil") + g.P("}") + g.P() + } +} + +// generateServerSignature returns the server-side signature for a method. +func (g *grpc) generateServerSignature(servName string, method *pb.MethodDescriptorProto) string { + origMethName := method.GetName() + methName := generator.CamelCase(origMethName) + if reservedClientName[methName] { + methName += "_" + } + + var reqArgs []string + ret := "error" + if !method.GetServerStreaming() && !method.GetClientStreaming() { + reqArgs = append(reqArgs, contextPkg+".Context") + ret = "(*" + g.typeName(method.GetOutputType()) + ", error)" + } + if !method.GetClientStreaming() { + reqArgs = append(reqArgs, "*"+g.typeName(method.GetInputType())) + } + if method.GetServerStreaming() || method.GetClientStreaming() { + reqArgs = append(reqArgs, servName+"_"+generator.CamelCase(origMethName)+"Server") + } + + return methName + "(" + strings.Join(reqArgs, ", ") + ") " + ret +} + +func (g *grpc) generateServerMethod(servName, fullServName string, method *pb.MethodDescriptorProto) string { + methName := generator.CamelCase(method.GetName()) + hname := fmt.Sprintf("_%s_%s_Handler", servName, methName) + inType := g.typeName(method.GetInputType()) + outType := g.typeName(method.GetOutputType()) + + if !method.GetServerStreaming() && !method.GetClientStreaming() { + g.P("func ", hname, "(srv interface{}, ctx ", contextPkg, ".Context, dec func(interface{}) error, interceptor ", grpcPkg, ".UnaryServerInterceptor) (interface{}, error) {") + g.P("in := new(", inType, ")") + g.P("if err := dec(in); err != nil { return nil, err }") + g.P("if interceptor == nil { return srv.(", servName, "Server).", methName, "(ctx, in) }") + g.P("info := &", grpcPkg, ".UnaryServerInfo{") + g.P("Server: srv,") + g.P("FullMethod: ", strconv.Quote(fmt.Sprintf("/%s/%s", fullServName, methName)), ",") + g.P("}") + g.P("handler := func(ctx ", contextPkg, ".Context, req interface{}) (interface{}, error) {") + g.P("return srv.(", servName, "Server).", methName, "(ctx, req.(*", inType, "))") + g.P("}") + g.P("return interceptor(ctx, in, info, handler)") + g.P("}") + g.P() + return hname + } + streamType := unexport(servName) + methName + "Server" + g.P("func ", hname, "(srv interface{}, stream ", grpcPkg, ".ServerStream) error {") + if !method.GetClientStreaming() { + g.P("m := new(", inType, ")") + g.P("if err := stream.RecvMsg(m); err != nil { return err }") + g.P("return srv.(", servName, "Server).", methName, "(m, &", streamType, "{stream})") + } else { + g.P("return srv.(", servName, "Server).", methName, "(&", streamType, "{stream})") + } + g.P("}") + g.P() + + genSend := method.GetServerStreaming() + genSendAndClose := !method.GetServerStreaming() + genRecv := method.GetClientStreaming() + + // Stream auxiliary types and methods. + g.P("type ", servName, "_", methName, "Server interface {") + if genSend { + g.P("Send(*", outType, ") error") + } + if genSendAndClose { + g.P("SendAndClose(*", outType, ") error") + } + if genRecv { + g.P("Recv() (*", inType, ", error)") + } + g.P(grpcPkg, ".ServerStream") + g.P("}") + g.P() + + g.P("type ", streamType, " struct {") + g.P(grpcPkg, ".ServerStream") + g.P("}") + g.P() + + if genSend { + g.P("func (x *", streamType, ") Send(m *", outType, ") error {") + g.P("return x.ServerStream.SendMsg(m)") + g.P("}") + g.P() + } + if genSendAndClose { + g.P("func (x *", streamType, ") SendAndClose(m *", outType, ") error {") + g.P("return x.ServerStream.SendMsg(m)") + g.P("}") + g.P() + } + if genRecv { + g.P("func (x *", streamType, ") Recv() (*", inType, ", error) {") + g.P("m := new(", inType, ")") + g.P("if err := x.ServerStream.RecvMsg(m); err != nil { return nil, err }") + g.P("return m, nil") + g.P("}") + g.P() + } + + return hname +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/link_grpc.go b/vendor/github.com/golang/protobuf/protoc-gen-go/link_grpc.go new file mode 100644 index 0000000..532a550 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/link_grpc.go @@ -0,0 +1,34 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2015 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package main + +import _ "github.com/golang/protobuf/protoc-gen-go/grpc" diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/main.go b/vendor/github.com/golang/protobuf/protoc-gen-go/main.go new file mode 100644 index 0000000..8e2486d --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/main.go @@ -0,0 +1,98 @@ +// Go support for Protocol Buffers - Google's data interchange format +// +// Copyright 2010 The Go Authors. All rights reserved. +// https://github.com/golang/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// protoc-gen-go is a plugin for the Google protocol buffer compiler to generate +// Go code. Run it by building this program and putting it in your path with +// the name +// protoc-gen-go +// That word 'go' at the end becomes part of the option string set for the +// protocol compiler, so once the protocol compiler (protoc) is installed +// you can run +// protoc --go_out=output_directory input_directory/file.proto +// to generate Go bindings for the protocol defined by file.proto. +// With that input, the output will be written to +// output_directory/file.pb.go +// +// The generated code is documented in the package comment for +// the library. +// +// See the README and documentation for protocol buffers to learn more: +// https://developers.google.com/protocol-buffers/ +package main + +import ( + "io/ioutil" + "os" + + "github.com/golang/protobuf/proto" + "github.com/golang/protobuf/protoc-gen-go/generator" +) + +func main() { + // Begin by allocating a generator. The request and response structures are stored there + // so we can do error handling easily - the response structure contains the field to + // report failure. + g := generator.New() + + data, err := ioutil.ReadAll(os.Stdin) + if err != nil { + g.Error(err, "reading input") + } + + if err := proto.Unmarshal(data, g.Request); err != nil { + g.Error(err, "parsing input proto") + } + + if len(g.Request.FileToGenerate) == 0 { + g.Fail("no files to generate") + } + + g.CommandLineParameters(g.Request.GetParameter()) + + // Create a wrapped version of the Descriptors and EnumDescriptors that + // point to the file that defines them. + g.WrapTypes() + + g.SetPackageNames() + g.BuildTypeNameMap() + + g.GenerateAllFiles() + + // Send back the results. + data, err = proto.Marshal(g.Response) + if err != nil { + g.Error(err, "failed to marshal output proto") + } + _, err = os.Stdout.Write(data) + if err != nil { + g.Error(err, "failed to write output proto") + } +} diff --git a/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.pb.go b/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.pb.go new file mode 100644 index 0000000..c608a24 --- /dev/null +++ b/vendor/github.com/golang/protobuf/protoc-gen-go/plugin/plugin.pb.go @@ -0,0 +1,293 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/protobuf/compiler/plugin.proto + +/* +Package plugin_go is a generated protocol buffer package. + +It is generated from these files: + google/protobuf/compiler/plugin.proto + +It has these top-level messages: + Version + CodeGeneratorRequest + CodeGeneratorResponse +*/ +package plugin_go + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import google_protobuf "github.com/golang/protobuf/protoc-gen-go/descriptor" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The version number of protocol compiler. +type Version struct { + Major *int32 `protobuf:"varint,1,opt,name=major" json:"major,omitempty"` + Minor *int32 `protobuf:"varint,2,opt,name=minor" json:"minor,omitempty"` + Patch *int32 `protobuf:"varint,3,opt,name=patch" json:"patch,omitempty"` + // A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should + // be empty for mainline stable releases. + Suffix *string `protobuf:"bytes,4,opt,name=suffix" json:"suffix,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Version) Reset() { *m = Version{} } +func (m *Version) String() string { return proto.CompactTextString(m) } +func (*Version) ProtoMessage() {} +func (*Version) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +func (m *Version) GetMajor() int32 { + if m != nil && m.Major != nil { + return *m.Major + } + return 0 +} + +func (m *Version) GetMinor() int32 { + if m != nil && m.Minor != nil { + return *m.Minor + } + return 0 +} + +func (m *Version) GetPatch() int32 { + if m != nil && m.Patch != nil { + return *m.Patch + } + return 0 +} + +func (m *Version) GetSuffix() string { + if m != nil && m.Suffix != nil { + return *m.Suffix + } + return "" +} + +// An encoded CodeGeneratorRequest is written to the plugin's stdin. +type CodeGeneratorRequest struct { + // The .proto files that were explicitly listed on the command-line. The + // code generator should generate code only for these files. Each file's + // descriptor will be included in proto_file, below. + FileToGenerate []string `protobuf:"bytes,1,rep,name=file_to_generate,json=fileToGenerate" json:"file_to_generate,omitempty"` + // The generator parameter passed on the command-line. + Parameter *string `protobuf:"bytes,2,opt,name=parameter" json:"parameter,omitempty"` + // FileDescriptorProtos for all files in files_to_generate and everything + // they import. The files will appear in topological order, so each file + // appears before any file that imports it. + // + // protoc guarantees that all proto_files will be written after + // the fields above, even though this is not technically guaranteed by the + // protobuf wire format. This theoretically could allow a plugin to stream + // in the FileDescriptorProtos and handle them one by one rather than read + // the entire set into memory at once. However, as of this writing, this + // is not similarly optimized on protoc's end -- it will store all fields in + // memory at once before sending them to the plugin. + // + // Type names of fields and extensions in the FileDescriptorProto are always + // fully qualified. + ProtoFile []*google_protobuf.FileDescriptorProto `protobuf:"bytes,15,rep,name=proto_file,json=protoFile" json:"proto_file,omitempty"` + // The version number of protocol compiler. + CompilerVersion *Version `protobuf:"bytes,3,opt,name=compiler_version,json=compilerVersion" json:"compiler_version,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *CodeGeneratorRequest) Reset() { *m = CodeGeneratorRequest{} } +func (m *CodeGeneratorRequest) String() string { return proto.CompactTextString(m) } +func (*CodeGeneratorRequest) ProtoMessage() {} +func (*CodeGeneratorRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } + +func (m *CodeGeneratorRequest) GetFileToGenerate() []string { + if m != nil { + return m.FileToGenerate + } + return nil +} + +func (m *CodeGeneratorRequest) GetParameter() string { + if m != nil && m.Parameter != nil { + return *m.Parameter + } + return "" +} + +func (m *CodeGeneratorRequest) GetProtoFile() []*google_protobuf.FileDescriptorProto { + if m != nil { + return m.ProtoFile + } + return nil +} + +func (m *CodeGeneratorRequest) GetCompilerVersion() *Version { + if m != nil { + return m.CompilerVersion + } + return nil +} + +// The plugin writes an encoded CodeGeneratorResponse to stdout. +type CodeGeneratorResponse struct { + // Error message. If non-empty, code generation failed. The plugin process + // should exit with status code zero even if it reports an error in this way. + // + // This should be used to indicate errors in .proto files which prevent the + // code generator from generating correct code. Errors which indicate a + // problem in protoc itself -- such as the input CodeGeneratorRequest being + // unparseable -- should be reported by writing a message to stderr and + // exiting with a non-zero status code. + Error *string `protobuf:"bytes,1,opt,name=error" json:"error,omitempty"` + File []*CodeGeneratorResponse_File `protobuf:"bytes,15,rep,name=file" json:"file,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *CodeGeneratorResponse) Reset() { *m = CodeGeneratorResponse{} } +func (m *CodeGeneratorResponse) String() string { return proto.CompactTextString(m) } +func (*CodeGeneratorResponse) ProtoMessage() {} +func (*CodeGeneratorResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } + +func (m *CodeGeneratorResponse) GetError() string { + if m != nil && m.Error != nil { + return *m.Error + } + return "" +} + +func (m *CodeGeneratorResponse) GetFile() []*CodeGeneratorResponse_File { + if m != nil { + return m.File + } + return nil +} + +// Represents a single generated file. +type CodeGeneratorResponse_File struct { + // The file name, relative to the output directory. The name must not + // contain "." or ".." components and must be relative, not be absolute (so, + // the file cannot lie outside the output directory). "/" must be used as + // the path separator, not "\". + // + // If the name is omitted, the content will be appended to the previous + // file. This allows the generator to break large files into small chunks, + // and allows the generated text to be streamed back to protoc so that large + // files need not reside completely in memory at one time. Note that as of + // this writing protoc does not optimize for this -- it will read the entire + // CodeGeneratorResponse before writing files to disk. + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // If non-empty, indicates that the named file should already exist, and the + // content here is to be inserted into that file at a defined insertion + // point. This feature allows a code generator to extend the output + // produced by another code generator. The original generator may provide + // insertion points by placing special annotations in the file that look + // like: + // @@protoc_insertion_point(NAME) + // The annotation can have arbitrary text before and after it on the line, + // which allows it to be placed in a comment. NAME should be replaced with + // an identifier naming the point -- this is what other generators will use + // as the insertion_point. Code inserted at this point will be placed + // immediately above the line containing the insertion point (thus multiple + // insertions to the same point will come out in the order they were added). + // The double-@ is intended to make it unlikely that the generated code + // could contain things that look like insertion points by accident. + // + // For example, the C++ code generator places the following line in the + // .pb.h files that it generates: + // // @@protoc_insertion_point(namespace_scope) + // This line appears within the scope of the file's package namespace, but + // outside of any particular class. Another plugin can then specify the + // insertion_point "namespace_scope" to generate additional classes or + // other declarations that should be placed in this scope. + // + // Note that if the line containing the insertion point begins with + // whitespace, the same whitespace will be added to every line of the + // inserted text. This is useful for languages like Python, where + // indentation matters. In these languages, the insertion point comment + // should be indented the same amount as any inserted code will need to be + // in order to work correctly in that context. + // + // The code generator that generates the initial file and the one which + // inserts into it must both run as part of a single invocation of protoc. + // Code generators are executed in the order in which they appear on the + // command line. + // + // If |insertion_point| is present, |name| must also be present. + InsertionPoint *string `protobuf:"bytes,2,opt,name=insertion_point,json=insertionPoint" json:"insertion_point,omitempty"` + // The file contents. + Content *string `protobuf:"bytes,15,opt,name=content" json:"content,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *CodeGeneratorResponse_File) Reset() { *m = CodeGeneratorResponse_File{} } +func (m *CodeGeneratorResponse_File) String() string { return proto.CompactTextString(m) } +func (*CodeGeneratorResponse_File) ProtoMessage() {} +func (*CodeGeneratorResponse_File) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2, 0} } + +func (m *CodeGeneratorResponse_File) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *CodeGeneratorResponse_File) GetInsertionPoint() string { + if m != nil && m.InsertionPoint != nil { + return *m.InsertionPoint + } + return "" +} + +func (m *CodeGeneratorResponse_File) GetContent() string { + if m != nil && m.Content != nil { + return *m.Content + } + return "" +} + +func init() { + proto.RegisterType((*Version)(nil), "google.protobuf.compiler.Version") + proto.RegisterType((*CodeGeneratorRequest)(nil), "google.protobuf.compiler.CodeGeneratorRequest") + proto.RegisterType((*CodeGeneratorResponse)(nil), "google.protobuf.compiler.CodeGeneratorResponse") + proto.RegisterType((*CodeGeneratorResponse_File)(nil), "google.protobuf.compiler.CodeGeneratorResponse.File") +} + +func init() { proto.RegisterFile("google/protobuf/compiler/plugin.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 417 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x92, 0xcf, 0x6a, 0x14, 0x41, + 0x10, 0xc6, 0x19, 0x77, 0x63, 0x98, 0x8a, 0x64, 0x43, 0x13, 0xa5, 0x09, 0x39, 0x8c, 0x8b, 0xe2, + 0x5c, 0x32, 0x0b, 0xc1, 0x8b, 0x78, 0x4b, 0x44, 0x3d, 0x78, 0x58, 0x1a, 0xf1, 0x20, 0xc8, 0x30, + 0x99, 0xd4, 0x74, 0x5a, 0x66, 0xba, 0xc6, 0xee, 0x1e, 0xf1, 0x49, 0x7d, 0x0f, 0xdf, 0x40, 0xfa, + 0xcf, 0x24, 0xb2, 0xb8, 0xa7, 0xee, 0xef, 0x57, 0xd5, 0xd5, 0x55, 0x1f, 0x05, 0x2f, 0x25, 0x91, + 0xec, 0x71, 0x33, 0x1a, 0x72, 0x74, 0x33, 0x75, 0x9b, 0x96, 0x86, 0x51, 0xf5, 0x68, 0x36, 0x63, + 0x3f, 0x49, 0xa5, 0xab, 0x10, 0x60, 0x3c, 0xa6, 0x55, 0x73, 0x5a, 0x35, 0xa7, 0x9d, 0x15, 0xbb, + 0x05, 0x6e, 0xd1, 0xb6, 0x46, 0x8d, 0x8e, 0x4c, 0xcc, 0x5e, 0xb7, 0x70, 0xf8, 0x05, 0x8d, 0x55, + 0xa4, 0xd9, 0x29, 0x1c, 0x0c, 0xcd, 0x77, 0x32, 0x3c, 0x2b, 0xb2, 0xf2, 0x40, 0x44, 0x11, 0xa8, + 0xd2, 0x64, 0xf8, 0xa3, 0x44, 0xbd, 0xf0, 0x74, 0x6c, 0x5c, 0x7b, 0xc7, 0x17, 0x91, 0x06, 0xc1, + 0x9e, 0xc1, 0x63, 0x3b, 0x75, 0x9d, 0xfa, 0xc5, 0x97, 0x45, 0x56, 0xe6, 0x22, 0xa9, 0xf5, 0x9f, + 0x0c, 0x4e, 0xaf, 0xe9, 0x16, 0x3f, 0xa0, 0x46, 0xd3, 0x38, 0x32, 0x02, 0x7f, 0x4c, 0x68, 0x1d, + 0x2b, 0xe1, 0xa4, 0x53, 0x3d, 0xd6, 0x8e, 0x6a, 0x19, 0x63, 0xc8, 0xb3, 0x62, 0x51, 0xe6, 0xe2, + 0xd8, 0xf3, 0xcf, 0x94, 0x5e, 0x20, 0x3b, 0x87, 0x7c, 0x6c, 0x4c, 0x33, 0xa0, 0xc3, 0xd8, 0x4a, + 0x2e, 0x1e, 0x00, 0xbb, 0x06, 0x08, 0xe3, 0xd4, 0xfe, 0x15, 0x5f, 0x15, 0x8b, 0xf2, 0xe8, 0xf2, + 0x45, 0xb5, 0x6b, 0xcb, 0x7b, 0xd5, 0xe3, 0xbb, 0x7b, 0x03, 0xb6, 0x1e, 0x8b, 0x3c, 0x44, 0x7d, + 0x84, 0x7d, 0x82, 0x93, 0xd9, 0xb8, 0xfa, 0x67, 0xf4, 0x24, 0x8c, 0x77, 0x74, 0xf9, 0xbc, 0xda, + 0xe7, 0x70, 0x95, 0xcc, 0x13, 0xab, 0x99, 0x24, 0xb0, 0xfe, 0x9d, 0xc1, 0xd3, 0x9d, 0x99, 0xed, + 0x48, 0xda, 0xa2, 0xf7, 0x0e, 0x8d, 0x49, 0x3e, 0xe7, 0x22, 0x0a, 0xf6, 0x11, 0x96, 0xff, 0x34, + 0xff, 0x7a, 0xff, 0x8f, 0xff, 0x2d, 0x1a, 0x66, 0x13, 0xa1, 0xc2, 0xd9, 0x37, 0x58, 0x86, 0x79, + 0x18, 0x2c, 0x75, 0x33, 0x60, 0xfa, 0x26, 0xdc, 0xd9, 0x2b, 0x58, 0x29, 0x6d, 0xd1, 0x38, 0x45, + 0xba, 0x1e, 0x49, 0x69, 0x97, 0xcc, 0x3c, 0xbe, 0xc7, 0x5b, 0x4f, 0x19, 0x87, 0xc3, 0x96, 0xb4, + 0x43, 0xed, 0xf8, 0x2a, 0x24, 0xcc, 0xf2, 0x4a, 0xc2, 0x79, 0x4b, 0xc3, 0xde, 0xfe, 0xae, 0x9e, + 0x6c, 0xc3, 0x6e, 0x06, 0x7b, 0xed, 0xd7, 0x37, 0x52, 0xb9, 0xbb, 0xe9, 0xc6, 0x87, 0x37, 0x92, + 0xfa, 0x46, 0xcb, 0x87, 0x65, 0x0c, 0x97, 0xf6, 0x42, 0xa2, 0xbe, 0x90, 0x94, 0x56, 0xfa, 0x6d, + 0x3c, 0x6a, 0x49, 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0xf7, 0x15, 0x40, 0xc5, 0xfe, 0x02, 0x00, + 0x00, +} diff --git a/vendor/github.com/kisielk/gotool/LEGAL b/vendor/github.com/kisielk/gotool/LEGAL new file mode 100644 index 0000000..72b859c --- /dev/null +++ b/vendor/github.com/kisielk/gotool/LEGAL @@ -0,0 +1,32 @@ +All the files in this distribution are covered under either the MIT +license (see the file LICENSE) except some files mentioned below. + +match.go, match_test.go: + + Copyright (c) 2009 The Go Authors. All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with the + distribution. + * Neither the name of Google Inc. nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/kisielk/gotool/LICENSE b/vendor/github.com/kisielk/gotool/LICENSE new file mode 100644 index 0000000..1cbf651 --- /dev/null +++ b/vendor/github.com/kisielk/gotool/LICENSE @@ -0,0 +1,20 @@ +Copyright (c) 2013 Kamil Kisiel + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/kisielk/gotool/go13.go b/vendor/github.com/kisielk/gotool/go13.go new file mode 100644 index 0000000..2dd9b3f --- /dev/null +++ b/vendor/github.com/kisielk/gotool/go13.go @@ -0,0 +1,15 @@ +// +build !go1.4 + +package gotool + +import ( + "go/build" + "path/filepath" + "runtime" +) + +var gorootSrc = filepath.Join(runtime.GOROOT(), "src", "pkg") + +func shouldIgnoreImport(p *build.Package) bool { + return true +} diff --git a/vendor/github.com/kisielk/gotool/go14-15.go b/vendor/github.com/kisielk/gotool/go14-15.go new file mode 100644 index 0000000..aa99a32 --- /dev/null +++ b/vendor/github.com/kisielk/gotool/go14-15.go @@ -0,0 +1,15 @@ +// +build go1.4,!go1.6 + +package gotool + +import ( + "go/build" + "path/filepath" + "runtime" +) + +var gorootSrc = filepath.Join(runtime.GOROOT(), "src") + +func shouldIgnoreImport(p *build.Package) bool { + return true +} diff --git a/vendor/github.com/kisielk/gotool/go16-18.go b/vendor/github.com/kisielk/gotool/go16-18.go new file mode 100644 index 0000000..f25cec1 --- /dev/null +++ b/vendor/github.com/kisielk/gotool/go16-18.go @@ -0,0 +1,15 @@ +// +build go1.6,!go1.9 + +package gotool + +import ( + "go/build" + "path/filepath" + "runtime" +) + +var gorootSrc = filepath.Join(runtime.GOROOT(), "src") + +func shouldIgnoreImport(p *build.Package) bool { + return p == nil || len(p.InvalidGoFiles) == 0 +} diff --git a/vendor/github.com/kisielk/gotool/internal/load/path.go b/vendor/github.com/kisielk/gotool/internal/load/path.go new file mode 100644 index 0000000..74e15b9 --- /dev/null +++ b/vendor/github.com/kisielk/gotool/internal/load/path.go @@ -0,0 +1,27 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.9 + +package load + +import ( + "strings" +) + +// hasPathPrefix reports whether the path s begins with the +// elements in prefix. +func hasPathPrefix(s, prefix string) bool { + switch { + default: + return false + case len(s) == len(prefix): + return s == prefix + case len(s) > len(prefix): + if prefix != "" && prefix[len(prefix)-1] == '/' { + return strings.HasPrefix(s, prefix) + } + return s[len(prefix)] == '/' && s[:len(prefix)] == prefix + } +} diff --git a/vendor/github.com/kisielk/gotool/internal/load/pkg.go b/vendor/github.com/kisielk/gotool/internal/load/pkg.go new file mode 100644 index 0000000..b937ede --- /dev/null +++ b/vendor/github.com/kisielk/gotool/internal/load/pkg.go @@ -0,0 +1,25 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.9 + +// Package load loads packages. +package load + +import ( + "strings" +) + +// isStandardImportPath reports whether $GOROOT/src/path should be considered +// part of the standard distribution. For historical reasons we allow people to add +// their own code to $GOROOT instead of using $GOPATH, but we assume that +// code will start with a domain name (dot in the first element). +func isStandardImportPath(path string) bool { + i := strings.Index(path, "/") + if i < 0 { + i = len(path) + } + elem := path[:i] + return !strings.Contains(elem, ".") +} diff --git a/vendor/github.com/kisielk/gotool/internal/load/search.go b/vendor/github.com/kisielk/gotool/internal/load/search.go new file mode 100644 index 0000000..17ed62d --- /dev/null +++ b/vendor/github.com/kisielk/gotool/internal/load/search.go @@ -0,0 +1,354 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.9 + +package load + +import ( + "fmt" + "go/build" + "log" + "os" + "path" + "path/filepath" + "regexp" + "strings" +) + +// Context specifies values for operation of ImportPaths that would +// otherwise come from cmd/go/internal/cfg package. +// +// This is a construct added for gotool purposes and doesn't have +// an equivalent upstream in cmd/go. +type Context struct { + // BuildContext is the build context to use. + BuildContext build.Context + + // GOROOTsrc is the location of the src directory in GOROOT. + // At this time, it's used only in MatchPackages to skip + // GOOROOT/src entry from BuildContext.SrcDirs output. + GOROOTsrc string +} + +// allPackages returns all the packages that can be found +// under the $GOPATH directories and $GOROOT matching pattern. +// The pattern is either "all" (all packages), "std" (standard packages), +// "cmd" (standard commands), or a path including "...". +func (c *Context) allPackages(pattern string) []string { + pkgs := c.MatchPackages(pattern) + if len(pkgs) == 0 { + fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern) + } + return pkgs +} + +// allPackagesInFS is like allPackages but is passed a pattern +// beginning ./ or ../, meaning it should scan the tree rooted +// at the given directory. There are ... in the pattern too. +func (c *Context) allPackagesInFS(pattern string) []string { + pkgs := c.MatchPackagesInFS(pattern) + if len(pkgs) == 0 { + fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern) + } + return pkgs +} + +// MatchPackages returns a list of package paths matching pattern +// (see go help packages for pattern syntax). +func (c *Context) MatchPackages(pattern string) []string { + match := func(string) bool { return true } + treeCanMatch := func(string) bool { return true } + if !IsMetaPackage(pattern) { + match = matchPattern(pattern) + treeCanMatch = treeCanMatchPattern(pattern) + } + + have := map[string]bool{ + "builtin": true, // ignore pseudo-package that exists only for documentation + } + if !c.BuildContext.CgoEnabled { + have["runtime/cgo"] = true // ignore during walk + } + var pkgs []string + + for _, src := range c.BuildContext.SrcDirs() { + if (pattern == "std" || pattern == "cmd") && src != c.GOROOTsrc { + continue + } + src = filepath.Clean(src) + string(filepath.Separator) + root := src + if pattern == "cmd" { + root += "cmd" + string(filepath.Separator) + } + filepath.Walk(root, func(path string, fi os.FileInfo, err error) error { + if err != nil || path == src { + return nil + } + + want := true + // Avoid .foo, _foo, and testdata directory trees. + _, elem := filepath.Split(path) + if strings.HasPrefix(elem, ".") || strings.HasPrefix(elem, "_") || elem == "testdata" { + want = false + } + + name := filepath.ToSlash(path[len(src):]) + if pattern == "std" && (!isStandardImportPath(name) || name == "cmd") { + // The name "std" is only the standard library. + // If the name is cmd, it's the root of the command tree. + want = false + } + if !treeCanMatch(name) { + want = false + } + + if !fi.IsDir() { + if fi.Mode()&os.ModeSymlink != 0 && want { + if target, err := os.Stat(path); err == nil && target.IsDir() { + fmt.Fprintf(os.Stderr, "warning: ignoring symlink %s\n", path) + } + } + return nil + } + if !want { + return filepath.SkipDir + } + + if have[name] { + return nil + } + have[name] = true + if !match(name) { + return nil + } + pkg, err := c.BuildContext.ImportDir(path, 0) + if err != nil { + if _, noGo := err.(*build.NoGoError); noGo { + return nil + } + } + + // If we are expanding "cmd", skip main + // packages under cmd/vendor. At least as of + // March, 2017, there is one there for the + // vendored pprof tool. + if pattern == "cmd" && strings.HasPrefix(pkg.ImportPath, "cmd/vendor") && pkg.Name == "main" { + return nil + } + + pkgs = append(pkgs, name) + return nil + }) + } + return pkgs +} + +// MatchPackagesInFS returns a list of package paths matching pattern, +// which must begin with ./ or ../ +// (see go help packages for pattern syntax). +func (c *Context) MatchPackagesInFS(pattern string) []string { + // Find directory to begin the scan. + // Could be smarter but this one optimization + // is enough for now, since ... is usually at the + // end of a path. + i := strings.Index(pattern, "...") + dir, _ := path.Split(pattern[:i]) + + // pattern begins with ./ or ../. + // path.Clean will discard the ./ but not the ../. + // We need to preserve the ./ for pattern matching + // and in the returned import paths. + prefix := "" + if strings.HasPrefix(pattern, "./") { + prefix = "./" + } + match := matchPattern(pattern) + + var pkgs []string + filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error { + if err != nil || !fi.IsDir() { + return nil + } + if path == dir { + // filepath.Walk starts at dir and recurses. For the recursive case, + // the path is the result of filepath.Join, which calls filepath.Clean. + // The initial case is not Cleaned, though, so we do this explicitly. + // + // This converts a path like "./io/" to "io". Without this step, running + // "cd $GOROOT/src; go list ./io/..." would incorrectly skip the io + // package, because prepending the prefix "./" to the unclean path would + // result in "././io", and match("././io") returns false. + path = filepath.Clean(path) + } + + // Avoid .foo, _foo, and testdata directory trees, but do not avoid "." or "..". + _, elem := filepath.Split(path) + dot := strings.HasPrefix(elem, ".") && elem != "." && elem != ".." + if dot || strings.HasPrefix(elem, "_") || elem == "testdata" { + return filepath.SkipDir + } + + name := prefix + filepath.ToSlash(path) + if !match(name) { + return nil + } + + // We keep the directory if we can import it, or if we can't import it + // due to invalid Go source files. This means that directories containing + // parse errors will be built (and fail) instead of being silently skipped + // as not matching the pattern. Go 1.5 and earlier skipped, but that + // behavior means people miss serious mistakes. + // See golang.org/issue/11407. + if p, err := c.BuildContext.ImportDir(path, 0); err != nil && (p == nil || len(p.InvalidGoFiles) == 0) { + if _, noGo := err.(*build.NoGoError); !noGo { + log.Print(err) + } + return nil + } + pkgs = append(pkgs, name) + return nil + }) + return pkgs +} + +// treeCanMatchPattern(pattern)(name) reports whether +// name or children of name can possibly match pattern. +// Pattern is the same limited glob accepted by matchPattern. +func treeCanMatchPattern(pattern string) func(name string) bool { + wildCard := false + if i := strings.Index(pattern, "..."); i >= 0 { + wildCard = true + pattern = pattern[:i] + } + return func(name string) bool { + return len(name) <= len(pattern) && hasPathPrefix(pattern, name) || + wildCard && strings.HasPrefix(name, pattern) + } +} + +// matchPattern(pattern)(name) reports whether +// name matches pattern. Pattern is a limited glob +// pattern in which '...' means 'any string' and there +// is no other special syntax. +// Unfortunately, there are two special cases. Quoting "go help packages": +// +// First, /... at the end of the pattern can match an empty string, +// so that net/... matches both net and packages in its subdirectories, like net/http. +// Second, any slash-separted pattern element containing a wildcard never +// participates in a match of the "vendor" element in the path of a vendored +// package, so that ./... does not match packages in subdirectories of +// ./vendor or ./mycode/vendor, but ./vendor/... and ./mycode/vendor/... do. +// Note, however, that a directory named vendor that itself contains code +// is not a vendored package: cmd/vendor would be a command named vendor, +// and the pattern cmd/... matches it. +func matchPattern(pattern string) func(name string) bool { + // Convert pattern to regular expression. + // The strategy for the trailing /... is to nest it in an explicit ? expression. + // The strategy for the vendor exclusion is to change the unmatchable + // vendor strings to a disallowed code point (vendorChar) and to use + // "(anything but that codepoint)*" as the implementation of the ... wildcard. + // This is a bit complicated but the obvious alternative, + // namely a hand-written search like in most shell glob matchers, + // is too easy to make accidentally exponential. + // Using package regexp guarantees linear-time matching. + + const vendorChar = "\x00" + + if strings.Contains(pattern, vendorChar) { + return func(name string) bool { return false } + } + + re := regexp.QuoteMeta(pattern) + re = replaceVendor(re, vendorChar) + switch { + case strings.HasSuffix(re, `/`+vendorChar+`/\.\.\.`): + re = strings.TrimSuffix(re, `/`+vendorChar+`/\.\.\.`) + `(/vendor|/` + vendorChar + `/\.\.\.)` + case re == vendorChar+`/\.\.\.`: + re = `(/vendor|/` + vendorChar + `/\.\.\.)` + case strings.HasSuffix(re, `/\.\.\.`): + re = strings.TrimSuffix(re, `/\.\.\.`) + `(/\.\.\.)?` + } + re = strings.Replace(re, `\.\.\.`, `[^`+vendorChar+`]*`, -1) + + reg := regexp.MustCompile(`^` + re + `$`) + + return func(name string) bool { + if strings.Contains(name, vendorChar) { + return false + } + return reg.MatchString(replaceVendor(name, vendorChar)) + } +} + +// replaceVendor returns the result of replacing +// non-trailing vendor path elements in x with repl. +func replaceVendor(x, repl string) string { + if !strings.Contains(x, "vendor") { + return x + } + elem := strings.Split(x, "/") + for i := 0; i < len(elem)-1; i++ { + if elem[i] == "vendor" { + elem[i] = repl + } + } + return strings.Join(elem, "/") +} + +// ImportPaths returns the import paths to use for the given command line. +func (c *Context) ImportPaths(args []string) []string { + args = c.ImportPathsNoDotExpansion(args) + var out []string + for _, a := range args { + if strings.Contains(a, "...") { + if build.IsLocalImport(a) { + out = append(out, c.allPackagesInFS(a)...) + } else { + out = append(out, c.allPackages(a)...) + } + continue + } + out = append(out, a) + } + return out +} + +// ImportPathsNoDotExpansion returns the import paths to use for the given +// command line, but it does no ... expansion. +func (c *Context) ImportPathsNoDotExpansion(args []string) []string { + if len(args) == 0 { + return []string{"."} + } + var out []string + for _, a := range args { + // Arguments are supposed to be import paths, but + // as a courtesy to Windows developers, rewrite \ to / + // in command-line arguments. Handles .\... and so on. + if filepath.Separator == '\\' { + a = strings.Replace(a, `\`, `/`, -1) + } + + // Put argument in canonical form, but preserve leading ./. + if strings.HasPrefix(a, "./") { + a = "./" + path.Clean(a) + if a == "./." { + a = "." + } + } else { + a = path.Clean(a) + } + if IsMetaPackage(a) { + out = append(out, c.allPackages(a)...) + continue + } + out = append(out, a) + } + return out +} + +// IsMetaPackage checks if name is a reserved package name that expands to multiple packages. +func IsMetaPackage(name string) bool { + return name == "std" || name == "cmd" || name == "all" +} diff --git a/vendor/github.com/kisielk/gotool/match.go b/vendor/github.com/kisielk/gotool/match.go new file mode 100644 index 0000000..4dbdbff --- /dev/null +++ b/vendor/github.com/kisielk/gotool/match.go @@ -0,0 +1,56 @@ +// Copyright (c) 2009 The Go Authors. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +build go1.9 + +package gotool + +import ( + "path/filepath" + + "github.com/kisielk/gotool/internal/load" +) + +// importPaths returns the import paths to use for the given command line. +func (c *Context) importPaths(args []string) []string { + lctx := load.Context{ + BuildContext: c.BuildContext, + GOROOTsrc: c.joinPath(c.BuildContext.GOROOT, "src"), + } + return lctx.ImportPaths(args) +} + +// joinPath calls c.BuildContext.JoinPath (if not nil) or else filepath.Join. +// +// It's a copy of the unexported build.Context.joinPath helper. +func (c *Context) joinPath(elem ...string) string { + if f := c.BuildContext.JoinPath; f != nil { + return f(elem...) + } + return filepath.Join(elem...) +} diff --git a/vendor/github.com/kisielk/gotool/match18.go b/vendor/github.com/kisielk/gotool/match18.go new file mode 100644 index 0000000..6d6b136 --- /dev/null +++ b/vendor/github.com/kisielk/gotool/match18.go @@ -0,0 +1,317 @@ +// Copyright (c) 2009 The Go Authors. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +build !go1.9 + +package gotool + +import ( + "fmt" + "go/build" + "log" + "os" + "path" + "path/filepath" + "regexp" + "strings" +) + +// This file contains code from the Go distribution. + +// matchPattern(pattern)(name) reports whether +// name matches pattern. Pattern is a limited glob +// pattern in which '...' means 'any string' and there +// is no other special syntax. +func matchPattern(pattern string) func(name string) bool { + re := regexp.QuoteMeta(pattern) + re = strings.Replace(re, `\.\.\.`, `.*`, -1) + // Special case: foo/... matches foo too. + if strings.HasSuffix(re, `/.*`) { + re = re[:len(re)-len(`/.*`)] + `(/.*)?` + } + reg := regexp.MustCompile(`^` + re + `$`) + return reg.MatchString +} + +// matchPackages returns a list of package paths matching pattern +// (see go help packages for pattern syntax). +func (c *Context) matchPackages(pattern string) []string { + match := func(string) bool { return true } + treeCanMatch := func(string) bool { return true } + if !isMetaPackage(pattern) { + match = matchPattern(pattern) + treeCanMatch = treeCanMatchPattern(pattern) + } + + have := map[string]bool{ + "builtin": true, // ignore pseudo-package that exists only for documentation + } + if !c.BuildContext.CgoEnabled { + have["runtime/cgo"] = true // ignore during walk + } + var pkgs []string + + for _, src := range c.BuildContext.SrcDirs() { + if (pattern == "std" || pattern == "cmd") && src != gorootSrc { + continue + } + src = filepath.Clean(src) + string(filepath.Separator) + root := src + if pattern == "cmd" { + root += "cmd" + string(filepath.Separator) + } + filepath.Walk(root, func(path string, fi os.FileInfo, err error) error { + if err != nil || !fi.IsDir() || path == src { + return nil + } + + // Avoid .foo, _foo, and testdata directory trees. + _, elem := filepath.Split(path) + if strings.HasPrefix(elem, ".") || strings.HasPrefix(elem, "_") || elem == "testdata" { + return filepath.SkipDir + } + + name := filepath.ToSlash(path[len(src):]) + if pattern == "std" && (!isStandardImportPath(name) || name == "cmd") { + // The name "std" is only the standard library. + // If the name is cmd, it's the root of the command tree. + return filepath.SkipDir + } + if !treeCanMatch(name) { + return filepath.SkipDir + } + if have[name] { + return nil + } + have[name] = true + if !match(name) { + return nil + } + _, err = c.BuildContext.ImportDir(path, 0) + if err != nil { + if _, noGo := err.(*build.NoGoError); noGo { + return nil + } + } + pkgs = append(pkgs, name) + return nil + }) + } + return pkgs +} + +// importPathsNoDotExpansion returns the import paths to use for the given +// command line, but it does no ... expansion. +func (c *Context) importPathsNoDotExpansion(args []string) []string { + if len(args) == 0 { + return []string{"."} + } + var out []string + for _, a := range args { + // Arguments are supposed to be import paths, but + // as a courtesy to Windows developers, rewrite \ to / + // in command-line arguments. Handles .\... and so on. + if filepath.Separator == '\\' { + a = strings.Replace(a, `\`, `/`, -1) + } + + // Put argument in canonical form, but preserve leading ./. + if strings.HasPrefix(a, "./") { + a = "./" + path.Clean(a) + if a == "./." { + a = "." + } + } else { + a = path.Clean(a) + } + if isMetaPackage(a) { + out = append(out, c.allPackages(a)...) + continue + } + out = append(out, a) + } + return out +} + +// importPaths returns the import paths to use for the given command line. +func (c *Context) importPaths(args []string) []string { + args = c.importPathsNoDotExpansion(args) + var out []string + for _, a := range args { + if strings.Contains(a, "...") { + if build.IsLocalImport(a) { + out = append(out, c.allPackagesInFS(a)...) + } else { + out = append(out, c.allPackages(a)...) + } + continue + } + out = append(out, a) + } + return out +} + +// allPackages returns all the packages that can be found +// under the $GOPATH directories and $GOROOT matching pattern. +// The pattern is either "all" (all packages), "std" (standard packages), +// "cmd" (standard commands), or a path including "...". +func (c *Context) allPackages(pattern string) []string { + pkgs := c.matchPackages(pattern) + if len(pkgs) == 0 { + fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern) + } + return pkgs +} + +// allPackagesInFS is like allPackages but is passed a pattern +// beginning ./ or ../, meaning it should scan the tree rooted +// at the given directory. There are ... in the pattern too. +func (c *Context) allPackagesInFS(pattern string) []string { + pkgs := c.matchPackagesInFS(pattern) + if len(pkgs) == 0 { + fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern) + } + return pkgs +} + +// matchPackagesInFS returns a list of package paths matching pattern, +// which must begin with ./ or ../ +// (see go help packages for pattern syntax). +func (c *Context) matchPackagesInFS(pattern string) []string { + // Find directory to begin the scan. + // Could be smarter but this one optimization + // is enough for now, since ... is usually at the + // end of a path. + i := strings.Index(pattern, "...") + dir, _ := path.Split(pattern[:i]) + + // pattern begins with ./ or ../. + // path.Clean will discard the ./ but not the ../. + // We need to preserve the ./ for pattern matching + // and in the returned import paths. + prefix := "" + if strings.HasPrefix(pattern, "./") { + prefix = "./" + } + match := matchPattern(pattern) + + var pkgs []string + filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error { + if err != nil || !fi.IsDir() { + return nil + } + if path == dir { + // filepath.Walk starts at dir and recurses. For the recursive case, + // the path is the result of filepath.Join, which calls filepath.Clean. + // The initial case is not Cleaned, though, so we do this explicitly. + // + // This converts a path like "./io/" to "io". Without this step, running + // "cd $GOROOT/src; go list ./io/..." would incorrectly skip the io + // package, because prepending the prefix "./" to the unclean path would + // result in "././io", and match("././io") returns false. + path = filepath.Clean(path) + } + + // Avoid .foo, _foo, and testdata directory trees, but do not avoid "." or "..". + _, elem := filepath.Split(path) + dot := strings.HasPrefix(elem, ".") && elem != "." && elem != ".." + if dot || strings.HasPrefix(elem, "_") || elem == "testdata" { + return filepath.SkipDir + } + + name := prefix + filepath.ToSlash(path) + if !match(name) { + return nil + } + + // We keep the directory if we can import it, or if we can't import it + // due to invalid Go source files. This means that directories containing + // parse errors will be built (and fail) instead of being silently skipped + // as not matching the pattern. Go 1.5 and earlier skipped, but that + // behavior means people miss serious mistakes. + // See golang.org/issue/11407. + if p, err := c.BuildContext.ImportDir(path, 0); err != nil && shouldIgnoreImport(p) { + if _, noGo := err.(*build.NoGoError); !noGo { + log.Print(err) + } + return nil + } + pkgs = append(pkgs, name) + return nil + }) + return pkgs +} + +// isMetaPackage checks if name is a reserved package name that expands to multiple packages. +func isMetaPackage(name string) bool { + return name == "std" || name == "cmd" || name == "all" +} + +// isStandardImportPath reports whether $GOROOT/src/path should be considered +// part of the standard distribution. For historical reasons we allow people to add +// their own code to $GOROOT instead of using $GOPATH, but we assume that +// code will start with a domain name (dot in the first element). +func isStandardImportPath(path string) bool { + i := strings.Index(path, "/") + if i < 0 { + i = len(path) + } + elem := path[:i] + return !strings.Contains(elem, ".") +} + +// hasPathPrefix reports whether the path s begins with the +// elements in prefix. +func hasPathPrefix(s, prefix string) bool { + switch { + default: + return false + case len(s) == len(prefix): + return s == prefix + case len(s) > len(prefix): + if prefix != "" && prefix[len(prefix)-1] == '/' { + return strings.HasPrefix(s, prefix) + } + return s[len(prefix)] == '/' && s[:len(prefix)] == prefix + } +} + +// treeCanMatchPattern(pattern)(name) reports whether +// name or children of name can possibly match pattern. +// Pattern is the same limited glob accepted by matchPattern. +func treeCanMatchPattern(pattern string) func(name string) bool { + wildCard := false + if i := strings.Index(pattern, "..."); i >= 0 { + wildCard = true + pattern = pattern[:i] + } + return func(name string) bool { + return len(name) <= len(pattern) && hasPathPrefix(pattern, name) || + wildCard && strings.HasPrefix(name, pattern) + } +} diff --git a/vendor/github.com/kisielk/gotool/tool.go b/vendor/github.com/kisielk/gotool/tool.go new file mode 100644 index 0000000..c7409e1 --- /dev/null +++ b/vendor/github.com/kisielk/gotool/tool.go @@ -0,0 +1,48 @@ +// Package gotool contains utility functions used to implement the standard +// "cmd/go" tool, provided as a convenience to developers who want to write +// tools with similar semantics. +package gotool + +import "go/build" + +// Export functions here to make it easier to keep the implementations up to date with upstream. + +// DefaultContext is the default context that uses build.Default. +var DefaultContext = Context{ + BuildContext: build.Default, +} + +// A Context specifies the supporting context. +type Context struct { + // BuildContext is the build.Context that is used when computing import paths. + BuildContext build.Context +} + +// ImportPaths returns the import paths to use for the given command line. +// +// The path "all" is expanded to all packages in $GOPATH and $GOROOT. +// The path "std" is expanded to all packages in the Go standard library. +// The path "cmd" is expanded to all Go standard commands. +// The string "..." is treated as a wildcard within a path. +// When matching recursively, directories are ignored if they are prefixed with +// a dot or an underscore (such as ".foo" or "_foo"), or are named "testdata". +// Relative import paths are not converted to full import paths. +// If args is empty, a single element "." is returned. +func (c *Context) ImportPaths(args []string) []string { + return c.importPaths(args) +} + +// ImportPaths returns the import paths to use for the given command line +// using default context. +// +// The path "all" is expanded to all packages in $GOPATH and $GOROOT. +// The path "std" is expanded to all packages in the Go standard library. +// The path "cmd" is expanded to all Go standard commands. +// The string "..." is treated as a wildcard within a path. +// When matching recursively, directories are ignored if they are prefixed with +// a dot or an underscore (such as ".foo" or "_foo"), or are named "testdata". +// Relative import paths are not converted to full import paths. +// If args is empty, a single element "." is returned. +func ImportPaths(args []string) []string { + return DefaultContext.importPaths(args) +} diff --git a/vendor/github.com/wadey/gocovmerge/LICENSE b/vendor/github.com/wadey/gocovmerge/LICENSE new file mode 100644 index 0000000..455fb10 --- /dev/null +++ b/vendor/github.com/wadey/gocovmerge/LICENSE @@ -0,0 +1,22 @@ +Copyright (c) 2015, Wade Simmons +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/wadey/gocovmerge/gocovmerge.go b/vendor/github.com/wadey/gocovmerge/gocovmerge.go new file mode 100644 index 0000000..e809983 --- /dev/null +++ b/vendor/github.com/wadey/gocovmerge/gocovmerge.go @@ -0,0 +1,111 @@ +// gocovmerge takes the results from multiple `go test -coverprofile` runs and +// merges them into one profile +package main + +import ( + "flag" + "fmt" + "io" + "log" + "os" + "sort" + + "golang.org/x/tools/cover" +) + +func mergeProfiles(p *cover.Profile, merge *cover.Profile) { + if p.Mode != merge.Mode { + log.Fatalf("cannot merge profiles with different modes") + } + // Since the blocks are sorted, we can keep track of where the last block + // was inserted and only look at the blocks after that as targets for merge + startIndex := 0 + for _, b := range merge.Blocks { + startIndex = mergeProfileBlock(p, b, startIndex) + } +} + +func mergeProfileBlock(p *cover.Profile, pb cover.ProfileBlock, startIndex int) int { + sortFunc := func(i int) bool { + pi := p.Blocks[i+startIndex] + return pi.StartLine >= pb.StartLine && (pi.StartLine != pb.StartLine || pi.StartCol >= pb.StartCol) + } + + i := 0 + if sortFunc(i) != true { + i = sort.Search(len(p.Blocks)-startIndex, sortFunc) + } + i += startIndex + if i < len(p.Blocks) && p.Blocks[i].StartLine == pb.StartLine && p.Blocks[i].StartCol == pb.StartCol { + if p.Blocks[i].EndLine != pb.EndLine || p.Blocks[i].EndCol != pb.EndCol { + log.Fatalf("OVERLAP MERGE: %v %v %v", p.FileName, p.Blocks[i], pb) + } + switch p.Mode { + case "set": + p.Blocks[i].Count |= pb.Count + case "count", "atomic": + p.Blocks[i].Count += pb.Count + default: + log.Fatalf("unsupported covermode: '%s'", p.Mode) + } + } else { + if i > 0 { + pa := p.Blocks[i-1] + if pa.EndLine >= pb.EndLine && (pa.EndLine != pb.EndLine || pa.EndCol > pb.EndCol) { + log.Fatalf("OVERLAP BEFORE: %v %v %v", p.FileName, pa, pb) + } + } + if i < len(p.Blocks)-1 { + pa := p.Blocks[i+1] + if pa.StartLine <= pb.StartLine && (pa.StartLine != pb.StartLine || pa.StartCol < pb.StartCol) { + log.Fatalf("OVERLAP AFTER: %v %v %v", p.FileName, pa, pb) + } + } + p.Blocks = append(p.Blocks, cover.ProfileBlock{}) + copy(p.Blocks[i+1:], p.Blocks[i:]) + p.Blocks[i] = pb + } + return i + 1 +} + +func addProfile(profiles []*cover.Profile, p *cover.Profile) []*cover.Profile { + i := sort.Search(len(profiles), func(i int) bool { return profiles[i].FileName >= p.FileName }) + if i < len(profiles) && profiles[i].FileName == p.FileName { + mergeProfiles(profiles[i], p) + } else { + profiles = append(profiles, nil) + copy(profiles[i+1:], profiles[i:]) + profiles[i] = p + } + return profiles +} + +func dumpProfiles(profiles []*cover.Profile, out io.Writer) { + if len(profiles) == 0 { + return + } + fmt.Fprintf(out, "mode: %s\n", profiles[0].Mode) + for _, p := range profiles { + for _, b := range p.Blocks { + fmt.Fprintf(out, "%s:%d.%d,%d.%d %d %d\n", p.FileName, b.StartLine, b.StartCol, b.EndLine, b.EndCol, b.NumStmt, b.Count) + } + } +} + +func main() { + flag.Parse() + + var merged []*cover.Profile + + for _, file := range flag.Args() { + profiles, err := cover.ParseProfiles(file) + if err != nil { + log.Fatalf("failed to parse profiles: %v", err) + } + for _, p := range profiles { + merged = addProfile(merged, p) + } + } + + dumpProfiles(merged, os.Stdout) +} diff --git a/vendor/golang.org/x/tools/AUTHORS b/vendor/golang.org/x/tools/AUTHORS new file mode 100644 index 0000000..15167cd --- /dev/null +++ b/vendor/golang.org/x/tools/AUTHORS @@ -0,0 +1,3 @@ +# This source code refers to The Go Authors for copyright purposes. +# The master list of authors is in the main Go distribution, +# visible at http://tip.golang.org/AUTHORS. diff --git a/vendor/golang.org/x/tools/CONTRIBUTORS b/vendor/golang.org/x/tools/CONTRIBUTORS new file mode 100644 index 0000000..1c4577e --- /dev/null +++ b/vendor/golang.org/x/tools/CONTRIBUTORS @@ -0,0 +1,3 @@ +# This source code was written by the Go contributors. +# The master list of contributors is in the main Go distribution, +# visible at http://tip.golang.org/CONTRIBUTORS. diff --git a/vendor/golang.org/x/tools/LICENSE b/vendor/golang.org/x/tools/LICENSE new file mode 100644 index 0000000..6a66aea --- /dev/null +++ b/vendor/golang.org/x/tools/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/golang.org/x/tools/PATENTS b/vendor/golang.org/x/tools/PATENTS new file mode 100644 index 0000000..7330990 --- /dev/null +++ b/vendor/golang.org/x/tools/PATENTS @@ -0,0 +1,22 @@ +Additional IP Rights Grant (Patents) + +"This implementation" means the copyrightable works distributed by +Google as part of the Go project. + +Google hereby grants to You a perpetual, worldwide, non-exclusive, +no-charge, royalty-free, irrevocable (except as stated in this section) +patent license to make, have made, use, offer to sell, sell, import, +transfer and otherwise run, modify and propagate the contents of this +implementation of Go, where such license applies only to those patent +claims, both currently owned or controlled by Google and acquired in +the future, licensable by Google that are necessarily infringed by this +implementation of Go. This grant does not include claims that would be +infringed only as a consequence of further modification of this +implementation. If you or your agent or exclusive licensee institute or +order or agree to the institution of patent litigation against any +entity (including a cross-claim or counterclaim in a lawsuit) alleging +that this implementation of Go or any code incorporated within this +implementation of Go constitutes direct or contributory patent +infringement, or inducement of patent infringement, then any patent +rights granted to you under this License for this implementation of Go +shall terminate as of the date such litigation is filed. diff --git a/vendor/golang.org/x/tools/cmd/getgo/LICENSE b/vendor/golang.org/x/tools/cmd/getgo/LICENSE new file mode 100644 index 0000000..32017f8 --- /dev/null +++ b/vendor/golang.org/x/tools/cmd/getgo/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2017 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/golang.org/x/tools/cmd/goimports/doc.go b/vendor/golang.org/x/tools/cmd/goimports/doc.go new file mode 100644 index 0000000..f5ca3a0 --- /dev/null +++ b/vendor/golang.org/x/tools/cmd/goimports/doc.go @@ -0,0 +1,45 @@ +/* + +Command goimports updates your Go import lines, +adding missing ones and removing unreferenced ones. + + $ go get golang.org/x/tools/cmd/goimports + +In addition to fixing imports, goimports also formats +your code in the same style as gofmt so it can be used +as a replacement for your editor's gofmt-on-save hook. + +For emacs, make sure you have the latest go-mode.el: + https://github.com/dominikh/go-mode.el +Then in your .emacs file: + (setq gofmt-command "goimports") + (add-to-list 'load-path "/home/you/somewhere/emacs/") + (require 'go-mode-autoloads) + (add-hook 'before-save-hook 'gofmt-before-save) + +For vim, set "gofmt_command" to "goimports": + https://golang.org/change/39c724dd7f252 + https://golang.org/wiki/IDEsAndTextEditorPlugins + etc + +For GoSublime, follow the steps described here: + http://michaelwhatcott.com/gosublime-goimports/ + +For other editors, you probably know what to do. + +To exclude directories in your $GOPATH from being scanned for Go +files, goimports respects a configuration file at +$GOPATH/src/.goimportsignore which may contain blank lines, comment +lines (beginning with '#'), or lines naming a directory relative to +the configuration file to ignore when scanning. No globbing or regex +patterns are allowed. Use the "-v" verbose flag to verify it's +working and see what goimports is doing. + +File bugs or feature requests at: + + https://golang.org/issues/new?title=x/tools/cmd/goimports:+ + +Happy hacking! + +*/ +package main // import "golang.org/x/tools/cmd/goimports" diff --git a/vendor/golang.org/x/tools/cmd/goimports/goimports.go b/vendor/golang.org/x/tools/cmd/goimports/goimports.go new file mode 100644 index 0000000..16e3083 --- /dev/null +++ b/vendor/golang.org/x/tools/cmd/goimports/goimports.go @@ -0,0 +1,369 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package main + +import ( + "bufio" + "bytes" + "errors" + "flag" + "fmt" + "go/scanner" + "io" + "io/ioutil" + "log" + "os" + "os/exec" + "path/filepath" + "runtime" + "runtime/pprof" + "strings" + + "golang.org/x/tools/imports" +) + +var ( + // main operation modes + list = flag.Bool("l", false, "list files whose formatting differs from goimport's") + write = flag.Bool("w", false, "write result to (source) file instead of stdout") + doDiff = flag.Bool("d", false, "display diffs instead of rewriting files") + srcdir = flag.String("srcdir", "", "choose imports as if source code is from `dir`. When operating on a single file, dir may instead be the complete file name.") + verbose bool // verbose logging + + cpuProfile = flag.String("cpuprofile", "", "CPU profile output") + memProfile = flag.String("memprofile", "", "memory profile output") + memProfileRate = flag.Int("memrate", 0, "if > 0, sets runtime.MemProfileRate") + + options = &imports.Options{ + TabWidth: 8, + TabIndent: true, + Comments: true, + Fragment: true, + } + exitCode = 0 +) + +func init() { + flag.BoolVar(&options.AllErrors, "e", false, "report all errors (not just the first 10 on different lines)") + flag.StringVar(&imports.LocalPrefix, "local", "", "put imports beginning with this string after 3rd-party packages") +} + +func report(err error) { + scanner.PrintError(os.Stderr, err) + exitCode = 2 +} + +func usage() { + fmt.Fprintf(os.Stderr, "usage: goimports [flags] [path ...]\n") + flag.PrintDefaults() + os.Exit(2) +} + +func isGoFile(f os.FileInfo) bool { + // ignore non-Go files + name := f.Name() + return !f.IsDir() && !strings.HasPrefix(name, ".") && strings.HasSuffix(name, ".go") +} + +// argumentType is which mode goimports was invoked as. +type argumentType int + +const ( + // fromStdin means the user is piping their source into goimports. + fromStdin argumentType = iota + + // singleArg is the common case from editors, when goimports is run on + // a single file. + singleArg + + // multipleArg is when the user ran "goimports file1.go file2.go" + // or ran goimports on a directory tree. + multipleArg +) + +func processFile(filename string, in io.Reader, out io.Writer, argType argumentType) error { + opt := options + if argType == fromStdin { + nopt := *options + nopt.Fragment = true + opt = &nopt + } + + if in == nil { + f, err := os.Open(filename) + if err != nil { + return err + } + defer f.Close() + in = f + } + + src, err := ioutil.ReadAll(in) + if err != nil { + return err + } + + target := filename + if *srcdir != "" { + // Determine whether the provided -srcdirc is a directory or file + // and then use it to override the target. + // + // See https://github.com/dominikh/go-mode.el/issues/146 + if isFile(*srcdir) { + if argType == multipleArg { + return errors.New("-srcdir value can't be a file when passing multiple arguments or when walking directories") + } + target = *srcdir + } else if argType == singleArg && strings.HasSuffix(*srcdir, ".go") && !isDir(*srcdir) { + // For a file which doesn't exist on disk yet, but might shortly. + // e.g. user in editor opens $DIR/newfile.go and newfile.go doesn't yet exist on disk. + // The goimports on-save hook writes the buffer to a temp file + // first and runs goimports before the actual save to newfile.go. + // The editor's buffer is named "newfile.go" so that is passed to goimports as: + // goimports -srcdir=/gopath/src/pkg/newfile.go /tmp/gofmtXXXXXXXX.go + // and then the editor reloads the result from the tmp file and writes + // it to newfile.go. + target = *srcdir + } else { + // Pretend that file is from *srcdir in order to decide + // visible imports correctly. + target = filepath.Join(*srcdir, filepath.Base(filename)) + } + } + + res, err := imports.Process(target, src, opt) + if err != nil { + return err + } + + if !bytes.Equal(src, res) { + // formatting has changed + if *list { + fmt.Fprintln(out, filename) + } + if *write { + if argType == fromStdin { + // filename is "" + return errors.New("can't use -w on stdin") + } + err = ioutil.WriteFile(filename, res, 0) + if err != nil { + return err + } + } + if *doDiff { + if argType == fromStdin { + filename = "stdin.go" // because .orig looks silly + } + data, err := diff(src, res, filename) + if err != nil { + return fmt.Errorf("computing diff: %s", err) + } + fmt.Printf("diff -u %s %s\n", filepath.ToSlash(filename+".orig"), filepath.ToSlash(filename)) + out.Write(data) + } + } + + if !*list && !*write && !*doDiff { + _, err = out.Write(res) + } + + return err +} + +func visitFile(path string, f os.FileInfo, err error) error { + if err == nil && isGoFile(f) { + err = processFile(path, nil, os.Stdout, multipleArg) + } + if err != nil { + report(err) + } + return nil +} + +func walkDir(path string) { + filepath.Walk(path, visitFile) +} + +func main() { + runtime.GOMAXPROCS(runtime.NumCPU()) + + // call gofmtMain in a separate function + // so that it can use defer and have them + // run before the exit. + gofmtMain() + os.Exit(exitCode) +} + +// parseFlags parses command line flags and returns the paths to process. +// It's a var so that custom implementations can replace it in other files. +var parseFlags = func() []string { + flag.BoolVar(&verbose, "v", false, "verbose logging") + + flag.Parse() + return flag.Args() +} + +func bufferedFileWriter(dest string) (w io.Writer, close func()) { + f, err := os.Create(dest) + if err != nil { + log.Fatal(err) + } + bw := bufio.NewWriter(f) + return bw, func() { + if err := bw.Flush(); err != nil { + log.Fatalf("error flushing %v: %v", dest, err) + } + if err := f.Close(); err != nil { + log.Fatal(err) + } + } +} + +func gofmtMain() { + flag.Usage = usage + paths := parseFlags() + + if *cpuProfile != "" { + bw, flush := bufferedFileWriter(*cpuProfile) + pprof.StartCPUProfile(bw) + defer flush() + defer pprof.StopCPUProfile() + } + // doTrace is a conditionally compiled wrapper around runtime/trace. It is + // used to allow goimports to compile under gccgo, which does not support + // runtime/trace. See https://golang.org/issue/15544. + defer doTrace()() + if *memProfileRate > 0 { + runtime.MemProfileRate = *memProfileRate + bw, flush := bufferedFileWriter(*memProfile) + defer func() { + runtime.GC() // materialize all statistics + if err := pprof.WriteHeapProfile(bw); err != nil { + log.Fatal(err) + } + flush() + }() + } + + if verbose { + log.SetFlags(log.LstdFlags | log.Lmicroseconds) + imports.Debug = true + } + if options.TabWidth < 0 { + fmt.Fprintf(os.Stderr, "negative tabwidth %d\n", options.TabWidth) + exitCode = 2 + return + } + + if len(paths) == 0 { + if err := processFile("", os.Stdin, os.Stdout, fromStdin); err != nil { + report(err) + } + return + } + + argType := singleArg + if len(paths) > 1 { + argType = multipleArg + } + + for _, path := range paths { + switch dir, err := os.Stat(path); { + case err != nil: + report(err) + case dir.IsDir(): + walkDir(path) + default: + if err := processFile(path, nil, os.Stdout, argType); err != nil { + report(err) + } + } + } +} + +func writeTempFile(dir, prefix string, data []byte) (string, error) { + file, err := ioutil.TempFile(dir, prefix) + if err != nil { + return "", err + } + _, err = file.Write(data) + if err1 := file.Close(); err == nil { + err = err1 + } + if err != nil { + os.Remove(file.Name()) + return "", err + } + return file.Name(), nil +} + +func diff(b1, b2 []byte, filename string) (data []byte, err error) { + f1, err := writeTempFile("", "gofmt", b1) + if err != nil { + return + } + defer os.Remove(f1) + + f2, err := writeTempFile("", "gofmt", b2) + if err != nil { + return + } + defer os.Remove(f2) + + cmd := "diff" + if runtime.GOOS == "plan9" { + cmd = "/bin/ape/diff" + } + + data, err = exec.Command(cmd, "-u", f1, f2).CombinedOutput() + if len(data) > 0 { + // diff exits with a non-zero status when the files don't match. + // Ignore that failure as long as we get output. + return replaceTempFilename(data, filename) + } + return +} + +// replaceTempFilename replaces temporary filenames in diff with actual one. +// +// --- /tmp/gofmt316145376 2017-02-03 19:13:00.280468375 -0500 +// +++ /tmp/gofmt617882815 2017-02-03 19:13:00.280468375 -0500 +// ... +// -> +// --- path/to/file.go.orig 2017-02-03 19:13:00.280468375 -0500 +// +++ path/to/file.go 2017-02-03 19:13:00.280468375 -0500 +// ... +func replaceTempFilename(diff []byte, filename string) ([]byte, error) { + bs := bytes.SplitN(diff, []byte{'\n'}, 3) + if len(bs) < 3 { + return nil, fmt.Errorf("got unexpected diff for %s", filename) + } + // Preserve timestamps. + var t0, t1 []byte + if i := bytes.LastIndexByte(bs[0], '\t'); i != -1 { + t0 = bs[0][i:] + } + if i := bytes.LastIndexByte(bs[1], '\t'); i != -1 { + t1 = bs[1][i:] + } + // Always print filepath with slash separator. + f := filepath.ToSlash(filename) + bs[0] = []byte(fmt.Sprintf("--- %s%s", f+".orig", t0)) + bs[1] = []byte(fmt.Sprintf("+++ %s%s", f, t1)) + return bytes.Join(bs, []byte{'\n'}), nil +} + +// isFile reports whether name is a file. +func isFile(name string) bool { + fi, err := os.Stat(name) + return err == nil && fi.Mode().IsRegular() +} + +// isDir reports whether name is a directory. +func isDir(name string) bool { + fi, err := os.Stat(name) + return err == nil && fi.IsDir() +} diff --git a/vendor/golang.org/x/tools/cmd/goimports/goimports_gc.go b/vendor/golang.org/x/tools/cmd/goimports/goimports_gc.go new file mode 100644 index 0000000..21d867e --- /dev/null +++ b/vendor/golang.org/x/tools/cmd/goimports/goimports_gc.go @@ -0,0 +1,26 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build gc + +package main + +import ( + "flag" + "runtime/trace" +) + +var traceProfile = flag.String("trace", "", "trace profile output") + +func doTrace() func() { + if *traceProfile != "" { + bw, flush := bufferedFileWriter(*traceProfile) + trace.Start(bw) + return func() { + flush() + trace.Stop() + } + } + return func() {} +} diff --git a/vendor/golang.org/x/tools/cmd/goimports/goimports_not_gc.go b/vendor/golang.org/x/tools/cmd/goimports/goimports_not_gc.go new file mode 100644 index 0000000..f5531ce --- /dev/null +++ b/vendor/golang.org/x/tools/cmd/goimports/goimports_not_gc.go @@ -0,0 +1,11 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !gc + +package main + +func doTrace() func() { + return func() {} +} diff --git a/vendor/golang.org/x/tools/cover/profile.go b/vendor/golang.org/x/tools/cover/profile.go new file mode 100644 index 0000000..b6c8120 --- /dev/null +++ b/vendor/golang.org/x/tools/cover/profile.go @@ -0,0 +1,213 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package cover provides support for parsing coverage profiles +// generated by "go test -coverprofile=cover.out". +package cover // import "golang.org/x/tools/cover" + +import ( + "bufio" + "fmt" + "math" + "os" + "regexp" + "sort" + "strconv" + "strings" +) + +// Profile represents the profiling data for a specific file. +type Profile struct { + FileName string + Mode string + Blocks []ProfileBlock +} + +// ProfileBlock represents a single block of profiling data. +type ProfileBlock struct { + StartLine, StartCol int + EndLine, EndCol int + NumStmt, Count int +} + +type byFileName []*Profile + +func (p byFileName) Len() int { return len(p) } +func (p byFileName) Less(i, j int) bool { return p[i].FileName < p[j].FileName } +func (p byFileName) Swap(i, j int) { p[i], p[j] = p[j], p[i] } + +// ParseProfiles parses profile data in the specified file and returns a +// Profile for each source file described therein. +func ParseProfiles(fileName string) ([]*Profile, error) { + pf, err := os.Open(fileName) + if err != nil { + return nil, err + } + defer pf.Close() + + files := make(map[string]*Profile) + buf := bufio.NewReader(pf) + // First line is "mode: foo", where foo is "set", "count", or "atomic". + // Rest of file is in the format + // encoding/base64/base64.go:34.44,37.40 3 1 + // where the fields are: name.go:line.column,line.column numberOfStatements count + s := bufio.NewScanner(buf) + mode := "" + for s.Scan() { + line := s.Text() + if mode == "" { + const p = "mode: " + if !strings.HasPrefix(line, p) || line == p { + return nil, fmt.Errorf("bad mode line: %v", line) + } + mode = line[len(p):] + continue + } + m := lineRe.FindStringSubmatch(line) + if m == nil { + return nil, fmt.Errorf("line %q doesn't match expected format: %v", line, lineRe) + } + fn := m[1] + p := files[fn] + if p == nil { + p = &Profile{ + FileName: fn, + Mode: mode, + } + files[fn] = p + } + p.Blocks = append(p.Blocks, ProfileBlock{ + StartLine: toInt(m[2]), + StartCol: toInt(m[3]), + EndLine: toInt(m[4]), + EndCol: toInt(m[5]), + NumStmt: toInt(m[6]), + Count: toInt(m[7]), + }) + } + if err := s.Err(); err != nil { + return nil, err + } + for _, p := range files { + sort.Sort(blocksByStart(p.Blocks)) + // Merge samples from the same location. + j := 1 + for i := 1; i < len(p.Blocks); i++ { + b := p.Blocks[i] + last := p.Blocks[j-1] + if b.StartLine == last.StartLine && + b.StartCol == last.StartCol && + b.EndLine == last.EndLine && + b.EndCol == last.EndCol { + if b.NumStmt != last.NumStmt { + return nil, fmt.Errorf("inconsistent NumStmt: changed from %d to %d", last.NumStmt, b.NumStmt) + } + if mode == "set" { + p.Blocks[j-1].Count |= b.Count + } else { + p.Blocks[j-1].Count += b.Count + } + continue + } + p.Blocks[j] = b + j++ + } + p.Blocks = p.Blocks[:j] + } + // Generate a sorted slice. + profiles := make([]*Profile, 0, len(files)) + for _, profile := range files { + profiles = append(profiles, profile) + } + sort.Sort(byFileName(profiles)) + return profiles, nil +} + +type blocksByStart []ProfileBlock + +func (b blocksByStart) Len() int { return len(b) } +func (b blocksByStart) Swap(i, j int) { b[i], b[j] = b[j], b[i] } +func (b blocksByStart) Less(i, j int) bool { + bi, bj := b[i], b[j] + return bi.StartLine < bj.StartLine || bi.StartLine == bj.StartLine && bi.StartCol < bj.StartCol +} + +var lineRe = regexp.MustCompile(`^(.+):([0-9]+).([0-9]+),([0-9]+).([0-9]+) ([0-9]+) ([0-9]+)$`) + +func toInt(s string) int { + i, err := strconv.Atoi(s) + if err != nil { + panic(err) + } + return i +} + +// Boundary represents the position in a source file of the beginning or end of a +// block as reported by the coverage profile. In HTML mode, it will correspond to +// the opening or closing of a tag and will be used to colorize the source +type Boundary struct { + Offset int // Location as a byte offset in the source file. + Start bool // Is this the start of a block? + Count int // Event count from the cover profile. + Norm float64 // Count normalized to [0..1]. +} + +// Boundaries returns a Profile as a set of Boundary objects within the provided src. +func (p *Profile) Boundaries(src []byte) (boundaries []Boundary) { + // Find maximum count. + max := 0 + for _, b := range p.Blocks { + if b.Count > max { + max = b.Count + } + } + // Divisor for normalization. + divisor := math.Log(float64(max)) + + // boundary returns a Boundary, populating the Norm field with a normalized Count. + boundary := func(offset int, start bool, count int) Boundary { + b := Boundary{Offset: offset, Start: start, Count: count} + if !start || count == 0 { + return b + } + if max <= 1 { + b.Norm = 0.8 // Profile is in"set" mode; we want a heat map. Use cov8 in the CSS. + } else if count > 0 { + b.Norm = math.Log(float64(count)) / divisor + } + return b + } + + line, col := 1, 2 // TODO: Why is this 2? + for si, bi := 0, 0; si < len(src) && bi < len(p.Blocks); { + b := p.Blocks[bi] + if b.StartLine == line && b.StartCol == col { + boundaries = append(boundaries, boundary(si, true, b.Count)) + } + if b.EndLine == line && b.EndCol == col || line > b.EndLine { + boundaries = append(boundaries, boundary(si, false, 0)) + bi++ + continue // Don't advance through src; maybe the next block starts here. + } + if src[si] == '\n' { + line++ + col = 0 + } + col++ + si++ + } + sort.Sort(boundariesByPos(boundaries)) + return +} + +type boundariesByPos []Boundary + +func (b boundariesByPos) Len() int { return len(b) } +func (b boundariesByPos) Swap(i, j int) { b[i], b[j] = b[j], b[i] } +func (b boundariesByPos) Less(i, j int) bool { + if b[i].Offset == b[j].Offset { + return !b[i].Start && b[j].Start + } + return b[i].Offset < b[j].Offset +} diff --git a/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go b/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go new file mode 100644 index 0000000..6b7052b --- /dev/null +++ b/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go @@ -0,0 +1,627 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package astutil + +// This file defines utilities for working with source positions. + +import ( + "fmt" + "go/ast" + "go/token" + "sort" +) + +// PathEnclosingInterval returns the node that encloses the source +// interval [start, end), and all its ancestors up to the AST root. +// +// The definition of "enclosing" used by this function considers +// additional whitespace abutting a node to be enclosed by it. +// In this example: +// +// z := x + y // add them +// <-A-> +// <----B-----> +// +// the ast.BinaryExpr(+) node is considered to enclose interval B +// even though its [Pos()..End()) is actually only interval A. +// This behaviour makes user interfaces more tolerant of imperfect +// input. +// +// This function treats tokens as nodes, though they are not included +// in the result. e.g. PathEnclosingInterval("+") returns the +// enclosing ast.BinaryExpr("x + y"). +// +// If start==end, the 1-char interval following start is used instead. +// +// The 'exact' result is true if the interval contains only path[0] +// and perhaps some adjacent whitespace. It is false if the interval +// overlaps multiple children of path[0], or if it contains only +// interior whitespace of path[0]. +// In this example: +// +// z := x + y // add them +// <--C--> <---E--> +// ^ +// D +// +// intervals C, D and E are inexact. C is contained by the +// z-assignment statement, because it spans three of its children (:=, +// x, +). So too is the 1-char interval D, because it contains only +// interior whitespace of the assignment. E is considered interior +// whitespace of the BlockStmt containing the assignment. +// +// Precondition: [start, end) both lie within the same file as root. +// TODO(adonovan): return (nil, false) in this case and remove precond. +// Requires FileSet; see loader.tokenFileContainsPos. +// +// Postcondition: path is never nil; it always contains at least 'root'. +// +func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Node, exact bool) { + // fmt.Printf("EnclosingInterval %d %d\n", start, end) // debugging + + // Precondition: node.[Pos..End) and adjoining whitespace contain [start, end). + var visit func(node ast.Node) bool + visit = func(node ast.Node) bool { + path = append(path, node) + + nodePos := node.Pos() + nodeEnd := node.End() + + // fmt.Printf("visit(%T, %d, %d)\n", node, nodePos, nodeEnd) // debugging + + // Intersect [start, end) with interval of node. + if start < nodePos { + start = nodePos + } + if end > nodeEnd { + end = nodeEnd + } + + // Find sole child that contains [start, end). + children := childrenOf(node) + l := len(children) + for i, child := range children { + // [childPos, childEnd) is unaugmented interval of child. + childPos := child.Pos() + childEnd := child.End() + + // [augPos, augEnd) is whitespace-augmented interval of child. + augPos := childPos + augEnd := childEnd + if i > 0 { + augPos = children[i-1].End() // start of preceding whitespace + } + if i < l-1 { + nextChildPos := children[i+1].Pos() + // Does [start, end) lie between child and next child? + if start >= augEnd && end <= nextChildPos { + return false // inexact match + } + augEnd = nextChildPos // end of following whitespace + } + + // fmt.Printf("\tchild %d: [%d..%d)\tcontains interval [%d..%d)?\n", + // i, augPos, augEnd, start, end) // debugging + + // Does augmented child strictly contain [start, end)? + if augPos <= start && end <= augEnd { + _, isToken := child.(tokenNode) + return isToken || visit(child) + } + + // Does [start, end) overlap multiple children? + // i.e. left-augmented child contains start + // but LR-augmented child does not contain end. + if start < childEnd && end > augEnd { + break + } + } + + // No single child contained [start, end), + // so node is the result. Is it exact? + + // (It's tempting to put this condition before the + // child loop, but it gives the wrong result in the + // case where a node (e.g. ExprStmt) and its sole + // child have equal intervals.) + if start == nodePos && end == nodeEnd { + return true // exact match + } + + return false // inexact: overlaps multiple children + } + + if start > end { + start, end = end, start + } + + if start < root.End() && end > root.Pos() { + if start == end { + end = start + 1 // empty interval => interval of size 1 + } + exact = visit(root) + + // Reverse the path: + for i, l := 0, len(path); i < l/2; i++ { + path[i], path[l-1-i] = path[l-1-i], path[i] + } + } else { + // Selection lies within whitespace preceding the + // first (or following the last) declaration in the file. + // The result nonetheless always includes the ast.File. + path = append(path, root) + } + + return +} + +// tokenNode is a dummy implementation of ast.Node for a single token. +// They are used transiently by PathEnclosingInterval but never escape +// this package. +// +type tokenNode struct { + pos token.Pos + end token.Pos +} + +func (n tokenNode) Pos() token.Pos { + return n.pos +} + +func (n tokenNode) End() token.Pos { + return n.end +} + +func tok(pos token.Pos, len int) ast.Node { + return tokenNode{pos, pos + token.Pos(len)} +} + +// childrenOf returns the direct non-nil children of ast.Node n. +// It may include fake ast.Node implementations for bare tokens. +// it is not safe to call (e.g.) ast.Walk on such nodes. +// +func childrenOf(n ast.Node) []ast.Node { + var children []ast.Node + + // First add nodes for all true subtrees. + ast.Inspect(n, func(node ast.Node) bool { + if node == n { // push n + return true // recur + } + if node != nil { // push child + children = append(children, node) + } + return false // no recursion + }) + + // Then add fake Nodes for bare tokens. + switch n := n.(type) { + case *ast.ArrayType: + children = append(children, + tok(n.Lbrack, len("[")), + tok(n.Elt.End(), len("]"))) + + case *ast.AssignStmt: + children = append(children, + tok(n.TokPos, len(n.Tok.String()))) + + case *ast.BasicLit: + children = append(children, + tok(n.ValuePos, len(n.Value))) + + case *ast.BinaryExpr: + children = append(children, tok(n.OpPos, len(n.Op.String()))) + + case *ast.BlockStmt: + children = append(children, + tok(n.Lbrace, len("{")), + tok(n.Rbrace, len("}"))) + + case *ast.BranchStmt: + children = append(children, + tok(n.TokPos, len(n.Tok.String()))) + + case *ast.CallExpr: + children = append(children, + tok(n.Lparen, len("(")), + tok(n.Rparen, len(")"))) + if n.Ellipsis != 0 { + children = append(children, tok(n.Ellipsis, len("..."))) + } + + case *ast.CaseClause: + if n.List == nil { + children = append(children, + tok(n.Case, len("default"))) + } else { + children = append(children, + tok(n.Case, len("case"))) + } + children = append(children, tok(n.Colon, len(":"))) + + case *ast.ChanType: + switch n.Dir { + case ast.RECV: + children = append(children, tok(n.Begin, len("<-chan"))) + case ast.SEND: + children = append(children, tok(n.Begin, len("chan<-"))) + case ast.RECV | ast.SEND: + children = append(children, tok(n.Begin, len("chan"))) + } + + case *ast.CommClause: + if n.Comm == nil { + children = append(children, + tok(n.Case, len("default"))) + } else { + children = append(children, + tok(n.Case, len("case"))) + } + children = append(children, tok(n.Colon, len(":"))) + + case *ast.Comment: + // nop + + case *ast.CommentGroup: + // nop + + case *ast.CompositeLit: + children = append(children, + tok(n.Lbrace, len("{")), + tok(n.Rbrace, len("{"))) + + case *ast.DeclStmt: + // nop + + case *ast.DeferStmt: + children = append(children, + tok(n.Defer, len("defer"))) + + case *ast.Ellipsis: + children = append(children, + tok(n.Ellipsis, len("..."))) + + case *ast.EmptyStmt: + // nop + + case *ast.ExprStmt: + // nop + + case *ast.Field: + // TODO(adonovan): Field.{Doc,Comment,Tag}? + + case *ast.FieldList: + children = append(children, + tok(n.Opening, len("(")), + tok(n.Closing, len(")"))) + + case *ast.File: + // TODO test: Doc + children = append(children, + tok(n.Package, len("package"))) + + case *ast.ForStmt: + children = append(children, + tok(n.For, len("for"))) + + case *ast.FuncDecl: + // TODO(adonovan): FuncDecl.Comment? + + // Uniquely, FuncDecl breaks the invariant that + // preorder traversal yields tokens in lexical order: + // in fact, FuncDecl.Recv precedes FuncDecl.Type.Func. + // + // As a workaround, we inline the case for FuncType + // here and order things correctly. + // + children = nil // discard ast.Walk(FuncDecl) info subtrees + children = append(children, tok(n.Type.Func, len("func"))) + if n.Recv != nil { + children = append(children, n.Recv) + } + children = append(children, n.Name) + if n.Type.Params != nil { + children = append(children, n.Type.Params) + } + if n.Type.Results != nil { + children = append(children, n.Type.Results) + } + if n.Body != nil { + children = append(children, n.Body) + } + + case *ast.FuncLit: + // nop + + case *ast.FuncType: + if n.Func != 0 { + children = append(children, + tok(n.Func, len("func"))) + } + + case *ast.GenDecl: + children = append(children, + tok(n.TokPos, len(n.Tok.String()))) + if n.Lparen != 0 { + children = append(children, + tok(n.Lparen, len("(")), + tok(n.Rparen, len(")"))) + } + + case *ast.GoStmt: + children = append(children, + tok(n.Go, len("go"))) + + case *ast.Ident: + children = append(children, + tok(n.NamePos, len(n.Name))) + + case *ast.IfStmt: + children = append(children, + tok(n.If, len("if"))) + + case *ast.ImportSpec: + // TODO(adonovan): ImportSpec.{Doc,EndPos}? + + case *ast.IncDecStmt: + children = append(children, + tok(n.TokPos, len(n.Tok.String()))) + + case *ast.IndexExpr: + children = append(children, + tok(n.Lbrack, len("{")), + tok(n.Rbrack, len("}"))) + + case *ast.InterfaceType: + children = append(children, + tok(n.Interface, len("interface"))) + + case *ast.KeyValueExpr: + children = append(children, + tok(n.Colon, len(":"))) + + case *ast.LabeledStmt: + children = append(children, + tok(n.Colon, len(":"))) + + case *ast.MapType: + children = append(children, + tok(n.Map, len("map"))) + + case *ast.ParenExpr: + children = append(children, + tok(n.Lparen, len("(")), + tok(n.Rparen, len(")"))) + + case *ast.RangeStmt: + children = append(children, + tok(n.For, len("for")), + tok(n.TokPos, len(n.Tok.String()))) + + case *ast.ReturnStmt: + children = append(children, + tok(n.Return, len("return"))) + + case *ast.SelectStmt: + children = append(children, + tok(n.Select, len("select"))) + + case *ast.SelectorExpr: + // nop + + case *ast.SendStmt: + children = append(children, + tok(n.Arrow, len("<-"))) + + case *ast.SliceExpr: + children = append(children, + tok(n.Lbrack, len("[")), + tok(n.Rbrack, len("]"))) + + case *ast.StarExpr: + children = append(children, tok(n.Star, len("*"))) + + case *ast.StructType: + children = append(children, tok(n.Struct, len("struct"))) + + case *ast.SwitchStmt: + children = append(children, tok(n.Switch, len("switch"))) + + case *ast.TypeAssertExpr: + children = append(children, + tok(n.Lparen-1, len(".")), + tok(n.Lparen, len("(")), + tok(n.Rparen, len(")"))) + + case *ast.TypeSpec: + // TODO(adonovan): TypeSpec.{Doc,Comment}? + + case *ast.TypeSwitchStmt: + children = append(children, tok(n.Switch, len("switch"))) + + case *ast.UnaryExpr: + children = append(children, tok(n.OpPos, len(n.Op.String()))) + + case *ast.ValueSpec: + // TODO(adonovan): ValueSpec.{Doc,Comment}? + + case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt: + // nop + } + + // TODO(adonovan): opt: merge the logic of ast.Inspect() into + // the switch above so we can make interleaved callbacks for + // both Nodes and Tokens in the right order and avoid the need + // to sort. + sort.Sort(byPos(children)) + + return children +} + +type byPos []ast.Node + +func (sl byPos) Len() int { + return len(sl) +} +func (sl byPos) Less(i, j int) bool { + return sl[i].Pos() < sl[j].Pos() +} +func (sl byPos) Swap(i, j int) { + sl[i], sl[j] = sl[j], sl[i] +} + +// NodeDescription returns a description of the concrete type of n suitable +// for a user interface. +// +// TODO(adonovan): in some cases (e.g. Field, FieldList, Ident, +// StarExpr) we could be much more specific given the path to the AST +// root. Perhaps we should do that. +// +func NodeDescription(n ast.Node) string { + switch n := n.(type) { + case *ast.ArrayType: + return "array type" + case *ast.AssignStmt: + return "assignment" + case *ast.BadDecl: + return "bad declaration" + case *ast.BadExpr: + return "bad expression" + case *ast.BadStmt: + return "bad statement" + case *ast.BasicLit: + return "basic literal" + case *ast.BinaryExpr: + return fmt.Sprintf("binary %s operation", n.Op) + case *ast.BlockStmt: + return "block" + case *ast.BranchStmt: + switch n.Tok { + case token.BREAK: + return "break statement" + case token.CONTINUE: + return "continue statement" + case token.GOTO: + return "goto statement" + case token.FALLTHROUGH: + return "fall-through statement" + } + case *ast.CallExpr: + if len(n.Args) == 1 && !n.Ellipsis.IsValid() { + return "function call (or conversion)" + } + return "function call" + case *ast.CaseClause: + return "case clause" + case *ast.ChanType: + return "channel type" + case *ast.CommClause: + return "communication clause" + case *ast.Comment: + return "comment" + case *ast.CommentGroup: + return "comment group" + case *ast.CompositeLit: + return "composite literal" + case *ast.DeclStmt: + return NodeDescription(n.Decl) + " statement" + case *ast.DeferStmt: + return "defer statement" + case *ast.Ellipsis: + return "ellipsis" + case *ast.EmptyStmt: + return "empty statement" + case *ast.ExprStmt: + return "expression statement" + case *ast.Field: + // Can be any of these: + // struct {x, y int} -- struct field(s) + // struct {T} -- anon struct field + // interface {I} -- interface embedding + // interface {f()} -- interface method + // func (A) func(B) C -- receiver, param(s), result(s) + return "field/method/parameter" + case *ast.FieldList: + return "field/method/parameter list" + case *ast.File: + return "source file" + case *ast.ForStmt: + return "for loop" + case *ast.FuncDecl: + return "function declaration" + case *ast.FuncLit: + return "function literal" + case *ast.FuncType: + return "function type" + case *ast.GenDecl: + switch n.Tok { + case token.IMPORT: + return "import declaration" + case token.CONST: + return "constant declaration" + case token.TYPE: + return "type declaration" + case token.VAR: + return "variable declaration" + } + case *ast.GoStmt: + return "go statement" + case *ast.Ident: + return "identifier" + case *ast.IfStmt: + return "if statement" + case *ast.ImportSpec: + return "import specification" + case *ast.IncDecStmt: + if n.Tok == token.INC { + return "increment statement" + } + return "decrement statement" + case *ast.IndexExpr: + return "index expression" + case *ast.InterfaceType: + return "interface type" + case *ast.KeyValueExpr: + return "key/value association" + case *ast.LabeledStmt: + return "statement label" + case *ast.MapType: + return "map type" + case *ast.Package: + return "package" + case *ast.ParenExpr: + return "parenthesized " + NodeDescription(n.X) + case *ast.RangeStmt: + return "range loop" + case *ast.ReturnStmt: + return "return statement" + case *ast.SelectStmt: + return "select statement" + case *ast.SelectorExpr: + return "selector" + case *ast.SendStmt: + return "channel send" + case *ast.SliceExpr: + return "slice expression" + case *ast.StarExpr: + return "*-operation" // load/store expr or pointer type + case *ast.StructType: + return "struct type" + case *ast.SwitchStmt: + return "switch statement" + case *ast.TypeAssertExpr: + return "type assertion" + case *ast.TypeSpec: + return "type specification" + case *ast.TypeSwitchStmt: + return "type switch" + case *ast.UnaryExpr: + return fmt.Sprintf("unary %s operation", n.Op) + case *ast.ValueSpec: + return "value specification" + + } + panic(fmt.Sprintf("unexpected node type: %T", n)) +} diff --git a/vendor/golang.org/x/tools/go/ast/astutil/imports.go b/vendor/golang.org/x/tools/go/ast/astutil/imports.go new file mode 100644 index 0000000..83f196c --- /dev/null +++ b/vendor/golang.org/x/tools/go/ast/astutil/imports.go @@ -0,0 +1,470 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package astutil contains common utilities for working with the Go AST. +package astutil // import "golang.org/x/tools/go/ast/astutil" + +import ( + "fmt" + "go/ast" + "go/token" + "strconv" + "strings" +) + +// AddImport adds the import path to the file f, if absent. +func AddImport(fset *token.FileSet, f *ast.File, ipath string) (added bool) { + return AddNamedImport(fset, f, "", ipath) +} + +// AddNamedImport adds the import path to the file f, if absent. +// If name is not empty, it is used to rename the import. +// +// For example, calling +// AddNamedImport(fset, f, "pathpkg", "path") +// adds +// import pathpkg "path" +func AddNamedImport(fset *token.FileSet, f *ast.File, name, ipath string) (added bool) { + if imports(f, ipath) { + return false + } + + newImport := &ast.ImportSpec{ + Path: &ast.BasicLit{ + Kind: token.STRING, + Value: strconv.Quote(ipath), + }, + } + if name != "" { + newImport.Name = &ast.Ident{Name: name} + } + + // Find an import decl to add to. + // The goal is to find an existing import + // whose import path has the longest shared + // prefix with ipath. + var ( + bestMatch = -1 // length of longest shared prefix + lastImport = -1 // index in f.Decls of the file's final import decl + impDecl *ast.GenDecl // import decl containing the best match + impIndex = -1 // spec index in impDecl containing the best match + + isThirdPartyPath = isThirdParty(ipath) + ) + for i, decl := range f.Decls { + gen, ok := decl.(*ast.GenDecl) + if ok && gen.Tok == token.IMPORT { + lastImport = i + // Do not add to import "C", to avoid disrupting the + // association with its doc comment, breaking cgo. + if declImports(gen, "C") { + continue + } + + // Match an empty import decl if that's all that is available. + if len(gen.Specs) == 0 && bestMatch == -1 { + impDecl = gen + } + + // Compute longest shared prefix with imports in this group and find best + // matched import spec. + // 1. Always prefer import spec with longest shared prefix. + // 2. While match length is 0, + // - for stdlib package: prefer first import spec. + // - for third party package: prefer first third party import spec. + // We cannot use last import spec as best match for third party package + // because grouped imports are usually placed last by goimports -local + // flag. + // See issue #19190. + seenAnyThirdParty := false + for j, spec := range gen.Specs { + impspec := spec.(*ast.ImportSpec) + p := importPath(impspec) + n := matchLen(p, ipath) + if n > bestMatch || (bestMatch == 0 && !seenAnyThirdParty && isThirdPartyPath) { + bestMatch = n + impDecl = gen + impIndex = j + } + seenAnyThirdParty = seenAnyThirdParty || isThirdParty(p) + } + } + } + + // If no import decl found, add one after the last import. + if impDecl == nil { + impDecl = &ast.GenDecl{ + Tok: token.IMPORT, + } + if lastImport >= 0 { + impDecl.TokPos = f.Decls[lastImport].End() + } else { + // There are no existing imports. + // Our new import goes after the package declaration and after + // the comment, if any, that starts on the same line as the + // package declaration. + impDecl.TokPos = f.Package + + file := fset.File(f.Package) + pkgLine := file.Line(f.Package) + for _, c := range f.Comments { + if file.Line(c.Pos()) > pkgLine { + break + } + impDecl.TokPos = c.End() + } + } + f.Decls = append(f.Decls, nil) + copy(f.Decls[lastImport+2:], f.Decls[lastImport+1:]) + f.Decls[lastImport+1] = impDecl + } + + // Insert new import at insertAt. + insertAt := 0 + if impIndex >= 0 { + // insert after the found import + insertAt = impIndex + 1 + } + impDecl.Specs = append(impDecl.Specs, nil) + copy(impDecl.Specs[insertAt+1:], impDecl.Specs[insertAt:]) + impDecl.Specs[insertAt] = newImport + pos := impDecl.Pos() + if insertAt > 0 { + // If there is a comment after an existing import, preserve the comment + // position by adding the new import after the comment. + if spec, ok := impDecl.Specs[insertAt-1].(*ast.ImportSpec); ok && spec.Comment != nil { + pos = spec.Comment.End() + } else { + // Assign same position as the previous import, + // so that the sorter sees it as being in the same block. + pos = impDecl.Specs[insertAt-1].Pos() + } + } + if newImport.Name != nil { + newImport.Name.NamePos = pos + } + newImport.Path.ValuePos = pos + newImport.EndPos = pos + + // Clean up parens. impDecl contains at least one spec. + if len(impDecl.Specs) == 1 { + // Remove unneeded parens. + impDecl.Lparen = token.NoPos + } else if !impDecl.Lparen.IsValid() { + // impDecl needs parens added. + impDecl.Lparen = impDecl.Specs[0].Pos() + } + + f.Imports = append(f.Imports, newImport) + + if len(f.Decls) <= 1 { + return true + } + + // Merge all the import declarations into the first one. + var first *ast.GenDecl + for i := 0; i < len(f.Decls); i++ { + decl := f.Decls[i] + gen, ok := decl.(*ast.GenDecl) + if !ok || gen.Tok != token.IMPORT || declImports(gen, "C") { + continue + } + if first == nil { + first = gen + continue // Don't touch the first one. + } + // We now know there is more than one package in this import + // declaration. Ensure that it ends up parenthesized. + first.Lparen = first.Pos() + // Move the imports of the other import declaration to the first one. + for _, spec := range gen.Specs { + spec.(*ast.ImportSpec).Path.ValuePos = first.Pos() + first.Specs = append(first.Specs, spec) + } + f.Decls = append(f.Decls[:i], f.Decls[i+1:]...) + i-- + } + + return true +} + +func isThirdParty(importPath string) bool { + // Third party package import path usually contains "." (".com", ".org", ...) + // This logic is taken from golang.org/x/tools/imports package. + return strings.Contains(importPath, ".") +} + +// DeleteImport deletes the import path from the file f, if present. +func DeleteImport(fset *token.FileSet, f *ast.File, path string) (deleted bool) { + return DeleteNamedImport(fset, f, "", path) +} + +// DeleteNamedImport deletes the import with the given name and path from the file f, if present. +func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (deleted bool) { + var delspecs []*ast.ImportSpec + var delcomments []*ast.CommentGroup + + // Find the import nodes that import path, if any. + for i := 0; i < len(f.Decls); i++ { + decl := f.Decls[i] + gen, ok := decl.(*ast.GenDecl) + if !ok || gen.Tok != token.IMPORT { + continue + } + for j := 0; j < len(gen.Specs); j++ { + spec := gen.Specs[j] + impspec := spec.(*ast.ImportSpec) + if impspec.Name == nil && name != "" { + continue + } + if impspec.Name != nil && impspec.Name.Name != name { + continue + } + if importPath(impspec) != path { + continue + } + + // We found an import spec that imports path. + // Delete it. + delspecs = append(delspecs, impspec) + deleted = true + copy(gen.Specs[j:], gen.Specs[j+1:]) + gen.Specs = gen.Specs[:len(gen.Specs)-1] + + // If this was the last import spec in this decl, + // delete the decl, too. + if len(gen.Specs) == 0 { + copy(f.Decls[i:], f.Decls[i+1:]) + f.Decls = f.Decls[:len(f.Decls)-1] + i-- + break + } else if len(gen.Specs) == 1 { + if impspec.Doc != nil { + delcomments = append(delcomments, impspec.Doc) + } + if impspec.Comment != nil { + delcomments = append(delcomments, impspec.Comment) + } + for _, cg := range f.Comments { + // Found comment on the same line as the import spec. + if cg.End() < impspec.Pos() && fset.Position(cg.End()).Line == fset.Position(impspec.Pos()).Line { + delcomments = append(delcomments, cg) + break + } + } + + spec := gen.Specs[0].(*ast.ImportSpec) + + // Move the documentation right after the import decl. + if spec.Doc != nil { + for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Doc.Pos()).Line { + fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line) + } + } + for _, cg := range f.Comments { + if cg.End() < spec.Pos() && fset.Position(cg.End()).Line == fset.Position(spec.Pos()).Line { + for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Pos()).Line { + fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line) + } + break + } + } + } + if j > 0 { + lastImpspec := gen.Specs[j-1].(*ast.ImportSpec) + lastLine := fset.Position(lastImpspec.Path.ValuePos).Line + line := fset.Position(impspec.Path.ValuePos).Line + + // We deleted an entry but now there may be + // a blank line-sized hole where the import was. + if line-lastLine > 1 { + // There was a blank line immediately preceding the deleted import, + // so there's no need to close the hole. + // Do nothing. + } else if line != fset.File(gen.Rparen).LineCount() { + // There was no blank line. Close the hole. + fset.File(gen.Rparen).MergeLine(line) + } + } + j-- + } + } + + // Delete imports from f.Imports. + for i := 0; i < len(f.Imports); i++ { + imp := f.Imports[i] + for j, del := range delspecs { + if imp == del { + copy(f.Imports[i:], f.Imports[i+1:]) + f.Imports = f.Imports[:len(f.Imports)-1] + copy(delspecs[j:], delspecs[j+1:]) + delspecs = delspecs[:len(delspecs)-1] + i-- + break + } + } + } + + // Delete comments from f.Comments. + for i := 0; i < len(f.Comments); i++ { + cg := f.Comments[i] + for j, del := range delcomments { + if cg == del { + copy(f.Comments[i:], f.Comments[i+1:]) + f.Comments = f.Comments[:len(f.Comments)-1] + copy(delcomments[j:], delcomments[j+1:]) + delcomments = delcomments[:len(delcomments)-1] + i-- + break + } + } + } + + if len(delspecs) > 0 { + panic(fmt.Sprintf("deleted specs from Decls but not Imports: %v", delspecs)) + } + + return +} + +// RewriteImport rewrites any import of path oldPath to path newPath. +func RewriteImport(fset *token.FileSet, f *ast.File, oldPath, newPath string) (rewrote bool) { + for _, imp := range f.Imports { + if importPath(imp) == oldPath { + rewrote = true + // record old End, because the default is to compute + // it using the length of imp.Path.Value. + imp.EndPos = imp.End() + imp.Path.Value = strconv.Quote(newPath) + } + } + return +} + +// UsesImport reports whether a given import is used. +func UsesImport(f *ast.File, path string) (used bool) { + spec := importSpec(f, path) + if spec == nil { + return + } + + name := spec.Name.String() + switch name { + case "": + // If the package name is not explicitly specified, + // make an educated guess. This is not guaranteed to be correct. + lastSlash := strings.LastIndex(path, "/") + if lastSlash == -1 { + name = path + } else { + name = path[lastSlash+1:] + } + case "_", ".": + // Not sure if this import is used - err on the side of caution. + return true + } + + ast.Walk(visitFn(func(n ast.Node) { + sel, ok := n.(*ast.SelectorExpr) + if ok && isTopName(sel.X, name) { + used = true + } + }), f) + + return +} + +type visitFn func(node ast.Node) + +func (fn visitFn) Visit(node ast.Node) ast.Visitor { + fn(node) + return fn +} + +// imports returns true if f imports path. +func imports(f *ast.File, path string) bool { + return importSpec(f, path) != nil +} + +// importSpec returns the import spec if f imports path, +// or nil otherwise. +func importSpec(f *ast.File, path string) *ast.ImportSpec { + for _, s := range f.Imports { + if importPath(s) == path { + return s + } + } + return nil +} + +// importPath returns the unquoted import path of s, +// or "" if the path is not properly quoted. +func importPath(s *ast.ImportSpec) string { + t, err := strconv.Unquote(s.Path.Value) + if err == nil { + return t + } + return "" +} + +// declImports reports whether gen contains an import of path. +func declImports(gen *ast.GenDecl, path string) bool { + if gen.Tok != token.IMPORT { + return false + } + for _, spec := range gen.Specs { + impspec := spec.(*ast.ImportSpec) + if importPath(impspec) == path { + return true + } + } + return false +} + +// matchLen returns the length of the longest path segment prefix shared by x and y. +func matchLen(x, y string) int { + n := 0 + for i := 0; i < len(x) && i < len(y) && x[i] == y[i]; i++ { + if x[i] == '/' { + n++ + } + } + return n +} + +// isTopName returns true if n is a top-level unresolved identifier with the given name. +func isTopName(n ast.Expr, name string) bool { + id, ok := n.(*ast.Ident) + return ok && id.Name == name && id.Obj == nil +} + +// Imports returns the file imports grouped by paragraph. +func Imports(fset *token.FileSet, f *ast.File) [][]*ast.ImportSpec { + var groups [][]*ast.ImportSpec + + for _, decl := range f.Decls { + genDecl, ok := decl.(*ast.GenDecl) + if !ok || genDecl.Tok != token.IMPORT { + break + } + + group := []*ast.ImportSpec{} + + var lastLine int + for _, spec := range genDecl.Specs { + importSpec := spec.(*ast.ImportSpec) + pos := importSpec.Path.ValuePos + line := fset.Position(pos).Line + if lastLine > 0 && pos > 0 && line-lastLine > 1 { + groups = append(groups, group) + group = []*ast.ImportSpec{} + } + group = append(group, importSpec) + lastLine = line + } + groups = append(groups, group) + } + + return groups +} diff --git a/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go b/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go new file mode 100644 index 0000000..cf72ea9 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go @@ -0,0 +1,477 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package astutil + +import ( + "fmt" + "go/ast" + "reflect" + "sort" +) + +// An ApplyFunc is invoked by Apply for each node n, even if n is nil, +// before and/or after the node's children, using a Cursor describing +// the current node and providing operations on it. +// +// The return value of ApplyFunc controls the syntax tree traversal. +// See Apply for details. +type ApplyFunc func(*Cursor) bool + +// Apply traverses a syntax tree recursively, starting with root, +// and calling pre and post for each node as described below. +// Apply returns the syntax tree, possibly modified. +// +// If pre is not nil, it is called for each node before the node's +// children are traversed (pre-order). If pre returns false, no +// children are traversed, and post is not called for that node. +// +// If post is not nil, and a prior call of pre didn't return false, +// post is called for each node after its children are traversed +// (post-order). If post returns false, traversal is terminated and +// Apply returns immediately. +// +// Only fields that refer to AST nodes are considered children; +// i.e., token.Pos, Scopes, Objects, and fields of basic types +// (strings, etc.) are ignored. +// +// Children are traversed in the order in which they appear in the +// respective node's struct definition. A package's files are +// traversed in the filenames' alphabetical order. +// +func Apply(root ast.Node, pre, post ApplyFunc) (result ast.Node) { + parent := &struct{ ast.Node }{root} + defer func() { + if r := recover(); r != nil && r != abort { + panic(r) + } + result = parent.Node + }() + a := &application{pre: pre, post: post} + a.apply(parent, "Node", nil, root) + return +} + +var abort = new(int) // singleton, to signal termination of Apply + +// A Cursor describes a node encountered during Apply. +// Information about the node and its parent is available +// from the Node, Parent, Name, and Index methods. +// +// If p is a variable of type and value of the current parent node +// c.Parent(), and f is the field identifier with name c.Name(), +// the following invariants hold: +// +// p.f == c.Node() if c.Index() < 0 +// p.f[c.Index()] == c.Node() if c.Index() >= 0 +// +// The methods Replace, Delete, InsertBefore, and InsertAfter +// can be used to change the AST without disrupting Apply. +type Cursor struct { + parent ast.Node + name string + iter *iterator // valid if non-nil + node ast.Node +} + +// Node returns the current Node. +func (c *Cursor) Node() ast.Node { return c.node } + +// Parent returns the parent of the current Node. +func (c *Cursor) Parent() ast.Node { return c.parent } + +// Name returns the name of the parent Node field that contains the current Node. +// If the parent is a *ast.Package and the current Node is a *ast.File, Name returns +// the filename for the current Node. +func (c *Cursor) Name() string { return c.name } + +// Index reports the index >= 0 of the current Node in the slice of Nodes that +// contains it, or a value < 0 if the current Node is not part of a slice. +// The index of the current node changes if InsertBefore is called while +// processing the current node. +func (c *Cursor) Index() int { + if c.iter != nil { + return c.iter.index + } + return -1 +} + +// field returns the current node's parent field value. +func (c *Cursor) field() reflect.Value { + return reflect.Indirect(reflect.ValueOf(c.parent)).FieldByName(c.name) +} + +// Replace replaces the current Node with n. +// The replacement node is not walked by Apply. +func (c *Cursor) Replace(n ast.Node) { + if _, ok := c.node.(*ast.File); ok { + file, ok := n.(*ast.File) + if !ok { + panic("attempt to replace *ast.File with non-*ast.File") + } + c.parent.(*ast.Package).Files[c.name] = file + return + } + + v := c.field() + if i := c.Index(); i >= 0 { + v = v.Index(i) + } + v.Set(reflect.ValueOf(n)) +} + +// Delete deletes the current Node from its containing slice. +// If the current Node is not part of a slice, Delete panics. +// As a special case, if the current node is a package file, +// Delete removes it from the package's Files map. +func (c *Cursor) Delete() { + if _, ok := c.node.(*ast.File); ok { + delete(c.parent.(*ast.Package).Files, c.name) + return + } + + i := c.Index() + if i < 0 { + panic("Delete node not contained in slice") + } + v := c.field() + l := v.Len() + reflect.Copy(v.Slice(i, l), v.Slice(i+1, l)) + v.Index(l - 1).Set(reflect.Zero(v.Type().Elem())) + v.SetLen(l - 1) + c.iter.step-- +} + +// InsertAfter inserts n after the current Node in its containing slice. +// If the current Node is not part of a slice, InsertAfter panics. +// Apply does not walk n. +func (c *Cursor) InsertAfter(n ast.Node) { + i := c.Index() + if i < 0 { + panic("InsertAfter node not contained in slice") + } + v := c.field() + v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem()))) + l := v.Len() + reflect.Copy(v.Slice(i+2, l), v.Slice(i+1, l)) + v.Index(i + 1).Set(reflect.ValueOf(n)) + c.iter.step++ +} + +// InsertBefore inserts n before the current Node in its containing slice. +// If the current Node is not part of a slice, InsertBefore panics. +// Apply will not walk n. +func (c *Cursor) InsertBefore(n ast.Node) { + i := c.Index() + if i < 0 { + panic("InsertBefore node not contained in slice") + } + v := c.field() + v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem()))) + l := v.Len() + reflect.Copy(v.Slice(i+1, l), v.Slice(i, l)) + v.Index(i).Set(reflect.ValueOf(n)) + c.iter.index++ +} + +// application carries all the shared data so we can pass it around cheaply. +type application struct { + pre, post ApplyFunc + cursor Cursor + iter iterator +} + +func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast.Node) { + // convert typed nil into untyped nil + if v := reflect.ValueOf(n); v.Kind() == reflect.Ptr && v.IsNil() { + n = nil + } + + // avoid heap-allocating a new cursor for each apply call; reuse a.cursor instead + saved := a.cursor + a.cursor.parent = parent + a.cursor.name = name + a.cursor.iter = iter + a.cursor.node = n + + if a.pre != nil && !a.pre(&a.cursor) { + a.cursor = saved + return + } + + // walk children + // (the order of the cases matches the order of the corresponding node types in go/ast) + switch n := n.(type) { + case nil: + // nothing to do + + // Comments and fields + case *ast.Comment: + // nothing to do + + case *ast.CommentGroup: + if n != nil { + a.applyList(n, "List") + } + + case *ast.Field: + a.apply(n, "Doc", nil, n.Doc) + a.applyList(n, "Names") + a.apply(n, "Type", nil, n.Type) + a.apply(n, "Tag", nil, n.Tag) + a.apply(n, "Comment", nil, n.Comment) + + case *ast.FieldList: + a.applyList(n, "List") + + // Expressions + case *ast.BadExpr, *ast.Ident, *ast.BasicLit: + // nothing to do + + case *ast.Ellipsis: + a.apply(n, "Elt", nil, n.Elt) + + case *ast.FuncLit: + a.apply(n, "Type", nil, n.Type) + a.apply(n, "Body", nil, n.Body) + + case *ast.CompositeLit: + a.apply(n, "Type", nil, n.Type) + a.applyList(n, "Elts") + + case *ast.ParenExpr: + a.apply(n, "X", nil, n.X) + + case *ast.SelectorExpr: + a.apply(n, "X", nil, n.X) + a.apply(n, "Sel", nil, n.Sel) + + case *ast.IndexExpr: + a.apply(n, "X", nil, n.X) + a.apply(n, "Index", nil, n.Index) + + case *ast.SliceExpr: + a.apply(n, "X", nil, n.X) + a.apply(n, "Low", nil, n.Low) + a.apply(n, "High", nil, n.High) + a.apply(n, "Max", nil, n.Max) + + case *ast.TypeAssertExpr: + a.apply(n, "X", nil, n.X) + a.apply(n, "Type", nil, n.Type) + + case *ast.CallExpr: + a.apply(n, "Fun", nil, n.Fun) + a.applyList(n, "Args") + + case *ast.StarExpr: + a.apply(n, "X", nil, n.X) + + case *ast.UnaryExpr: + a.apply(n, "X", nil, n.X) + + case *ast.BinaryExpr: + a.apply(n, "X", nil, n.X) + a.apply(n, "Y", nil, n.Y) + + case *ast.KeyValueExpr: + a.apply(n, "Key", nil, n.Key) + a.apply(n, "Value", nil, n.Value) + + // Types + case *ast.ArrayType: + a.apply(n, "Len", nil, n.Len) + a.apply(n, "Elt", nil, n.Elt) + + case *ast.StructType: + a.apply(n, "Fields", nil, n.Fields) + + case *ast.FuncType: + a.apply(n, "Params", nil, n.Params) + a.apply(n, "Results", nil, n.Results) + + case *ast.InterfaceType: + a.apply(n, "Methods", nil, n.Methods) + + case *ast.MapType: + a.apply(n, "Key", nil, n.Key) + a.apply(n, "Value", nil, n.Value) + + case *ast.ChanType: + a.apply(n, "Value", nil, n.Value) + + // Statements + case *ast.BadStmt: + // nothing to do + + case *ast.DeclStmt: + a.apply(n, "Decl", nil, n.Decl) + + case *ast.EmptyStmt: + // nothing to do + + case *ast.LabeledStmt: + a.apply(n, "Label", nil, n.Label) + a.apply(n, "Stmt", nil, n.Stmt) + + case *ast.ExprStmt: + a.apply(n, "X", nil, n.X) + + case *ast.SendStmt: + a.apply(n, "Chan", nil, n.Chan) + a.apply(n, "Value", nil, n.Value) + + case *ast.IncDecStmt: + a.apply(n, "X", nil, n.X) + + case *ast.AssignStmt: + a.applyList(n, "Lhs") + a.applyList(n, "Rhs") + + case *ast.GoStmt: + a.apply(n, "Call", nil, n.Call) + + case *ast.DeferStmt: + a.apply(n, "Call", nil, n.Call) + + case *ast.ReturnStmt: + a.applyList(n, "Results") + + case *ast.BranchStmt: + a.apply(n, "Label", nil, n.Label) + + case *ast.BlockStmt: + a.applyList(n, "List") + + case *ast.IfStmt: + a.apply(n, "Init", nil, n.Init) + a.apply(n, "Cond", nil, n.Cond) + a.apply(n, "Body", nil, n.Body) + a.apply(n, "Else", nil, n.Else) + + case *ast.CaseClause: + a.applyList(n, "List") + a.applyList(n, "Body") + + case *ast.SwitchStmt: + a.apply(n, "Init", nil, n.Init) + a.apply(n, "Tag", nil, n.Tag) + a.apply(n, "Body", nil, n.Body) + + case *ast.TypeSwitchStmt: + a.apply(n, "Init", nil, n.Init) + a.apply(n, "Assign", nil, n.Assign) + a.apply(n, "Body", nil, n.Body) + + case *ast.CommClause: + a.apply(n, "Comm", nil, n.Comm) + a.applyList(n, "Body") + + case *ast.SelectStmt: + a.apply(n, "Body", nil, n.Body) + + case *ast.ForStmt: + a.apply(n, "Init", nil, n.Init) + a.apply(n, "Cond", nil, n.Cond) + a.apply(n, "Post", nil, n.Post) + a.apply(n, "Body", nil, n.Body) + + case *ast.RangeStmt: + a.apply(n, "Key", nil, n.Key) + a.apply(n, "Value", nil, n.Value) + a.apply(n, "X", nil, n.X) + a.apply(n, "Body", nil, n.Body) + + // Declarations + case *ast.ImportSpec: + a.apply(n, "Doc", nil, n.Doc) + a.apply(n, "Name", nil, n.Name) + a.apply(n, "Path", nil, n.Path) + a.apply(n, "Comment", nil, n.Comment) + + case *ast.ValueSpec: + a.apply(n, "Doc", nil, n.Doc) + a.applyList(n, "Names") + a.apply(n, "Type", nil, n.Type) + a.applyList(n, "Values") + a.apply(n, "Comment", nil, n.Comment) + + case *ast.TypeSpec: + a.apply(n, "Doc", nil, n.Doc) + a.apply(n, "Name", nil, n.Name) + a.apply(n, "Type", nil, n.Type) + a.apply(n, "Comment", nil, n.Comment) + + case *ast.BadDecl: + // nothing to do + + case *ast.GenDecl: + a.apply(n, "Doc", nil, n.Doc) + a.applyList(n, "Specs") + + case *ast.FuncDecl: + a.apply(n, "Doc", nil, n.Doc) + a.apply(n, "Recv", nil, n.Recv) + a.apply(n, "Name", nil, n.Name) + a.apply(n, "Type", nil, n.Type) + a.apply(n, "Body", nil, n.Body) + + // Files and packages + case *ast.File: + a.apply(n, "Doc", nil, n.Doc) + a.apply(n, "Name", nil, n.Name) + a.applyList(n, "Decls") + // Don't walk n.Comments; they have either been walked already if + // they are Doc comments, or they can be easily walked explicitly. + + case *ast.Package: + // collect and sort names for reproducible behavior + var names []string + for name := range n.Files { + names = append(names, name) + } + sort.Strings(names) + for _, name := range names { + a.apply(n, name, nil, n.Files[name]) + } + + default: + panic(fmt.Sprintf("Apply: unexpected node type %T", n)) + } + + if a.post != nil && !a.post(&a.cursor) { + panic(abort) + } + + a.cursor = saved +} + +// An iterator controls iteration over a slice of nodes. +type iterator struct { + index, step int +} + +func (a *application) applyList(parent ast.Node, name string) { + // avoid heap-allocating a new iterator for each applyList call; reuse a.iter instead + saved := a.iter + a.iter.index = 0 + for { + // must reload parent.name each time, since cursor modifications might change it + v := reflect.Indirect(reflect.ValueOf(parent)).FieldByName(name) + if a.iter.index >= v.Len() { + break + } + + // element x may be nil in a bad AST - be cautious + var x ast.Node + if e := v.Index(a.iter.index); e.IsValid() { + x = e.Interface().(ast.Node) + } + + a.iter.step = 1 + a.apply(parent, name, &a.iter, x) + a.iter.index += a.iter.step + } + a.iter = saved +} diff --git a/vendor/golang.org/x/tools/go/ast/astutil/util.go b/vendor/golang.org/x/tools/go/ast/astutil/util.go new file mode 100644 index 0000000..7630629 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ast/astutil/util.go @@ -0,0 +1,14 @@ +package astutil + +import "go/ast" + +// Unparen returns e with any enclosing parentheses stripped. +func Unparen(e ast.Expr) ast.Expr { + for { + p, ok := e.(*ast.ParenExpr) + if !ok { + return e + } + e = p.X + } +} diff --git a/vendor/golang.org/x/tools/go/buildutil/allpackages.go b/vendor/golang.org/x/tools/go/buildutil/allpackages.go new file mode 100644 index 0000000..c0cb03e --- /dev/null +++ b/vendor/golang.org/x/tools/go/buildutil/allpackages.go @@ -0,0 +1,198 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package buildutil provides utilities related to the go/build +// package in the standard library. +// +// All I/O is done via the build.Context file system interface, which must +// be concurrency-safe. +package buildutil // import "golang.org/x/tools/go/buildutil" + +import ( + "go/build" + "os" + "path/filepath" + "sort" + "strings" + "sync" +) + +// AllPackages returns the package path of each Go package in any source +// directory of the specified build context (e.g. $GOROOT or an element +// of $GOPATH). Errors are ignored. The results are sorted. +// All package paths are canonical, and thus may contain "/vendor/". +// +// The result may include import paths for directories that contain no +// *.go files, such as "archive" (in $GOROOT/src). +// +// All I/O is done via the build.Context file system interface, +// which must be concurrency-safe. +// +func AllPackages(ctxt *build.Context) []string { + var list []string + ForEachPackage(ctxt, func(pkg string, _ error) { + list = append(list, pkg) + }) + sort.Strings(list) + return list +} + +// ForEachPackage calls the found function with the package path of +// each Go package it finds in any source directory of the specified +// build context (e.g. $GOROOT or an element of $GOPATH). +// All package paths are canonical, and thus may contain "/vendor/". +// +// If the package directory exists but could not be read, the second +// argument to the found function provides the error. +// +// All I/O is done via the build.Context file system interface, +// which must be concurrency-safe. +// +func ForEachPackage(ctxt *build.Context, found func(importPath string, err error)) { + ch := make(chan item) + + var wg sync.WaitGroup + for _, root := range ctxt.SrcDirs() { + root := root + wg.Add(1) + go func() { + allPackages(ctxt, root, ch) + wg.Done() + }() + } + go func() { + wg.Wait() + close(ch) + }() + + // All calls to found occur in the caller's goroutine. + for i := range ch { + found(i.importPath, i.err) + } +} + +type item struct { + importPath string + err error // (optional) +} + +// We use a process-wide counting semaphore to limit +// the number of parallel calls to ReadDir. +var ioLimit = make(chan bool, 20) + +func allPackages(ctxt *build.Context, root string, ch chan<- item) { + root = filepath.Clean(root) + string(os.PathSeparator) + + var wg sync.WaitGroup + + var walkDir func(dir string) + walkDir = func(dir string) { + // Avoid .foo, _foo, and testdata directory trees. + base := filepath.Base(dir) + if base == "" || base[0] == '.' || base[0] == '_' || base == "testdata" { + return + } + + pkg := filepath.ToSlash(strings.TrimPrefix(dir, root)) + + // Prune search if we encounter any of these import paths. + switch pkg { + case "builtin": + return + } + + ioLimit <- true + files, err := ReadDir(ctxt, dir) + <-ioLimit + if pkg != "" || err != nil { + ch <- item{pkg, err} + } + for _, fi := range files { + fi := fi + if fi.IsDir() { + wg.Add(1) + go func() { + walkDir(filepath.Join(dir, fi.Name())) + wg.Done() + }() + } + } + } + + walkDir(root) + wg.Wait() +} + +// ExpandPatterns returns the set of packages matched by patterns, +// which may have the following forms: +// +// golang.org/x/tools/cmd/guru # a single package +// golang.org/x/tools/... # all packages beneath dir +// ... # the entire workspace. +// +// Order is significant: a pattern preceded by '-' removes matching +// packages from the set. For example, these patterns match all encoding +// packages except encoding/xml: +// +// encoding/... -encoding/xml +// +// A trailing slash in a pattern is ignored. (Path components of Go +// package names are separated by slash, not the platform's path separator.) +// +func ExpandPatterns(ctxt *build.Context, patterns []string) map[string]bool { + // TODO(adonovan): support other features of 'go list': + // - "std"/"cmd"/"all" meta-packages + // - "..." not at the end of a pattern + // - relative patterns using "./" or "../" prefix + + pkgs := make(map[string]bool) + doPkg := func(pkg string, neg bool) { + if neg { + delete(pkgs, pkg) + } else { + pkgs[pkg] = true + } + } + + // Scan entire workspace if wildcards are present. + // TODO(adonovan): opt: scan only the necessary subtrees of the workspace. + var all []string + for _, arg := range patterns { + if strings.HasSuffix(arg, "...") { + all = AllPackages(ctxt) + break + } + } + + for _, arg := range patterns { + if arg == "" { + continue + } + + neg := arg[0] == '-' + if neg { + arg = arg[1:] + } + + if arg == "..." { + // ... matches all packages + for _, pkg := range all { + doPkg(pkg, neg) + } + } else if dir := strings.TrimSuffix(arg, "/..."); dir != arg { + // dir/... matches all packages beneath dir + for _, pkg := range all { + if strings.HasPrefix(pkg, dir) && + (len(pkg) == len(dir) || pkg[len(dir)] == '/') { + doPkg(pkg, neg) + } + } + } else { + // single package + doPkg(strings.TrimSuffix(arg, "/"), neg) + } + } + + return pkgs +} diff --git a/vendor/golang.org/x/tools/go/buildutil/fakecontext.go b/vendor/golang.org/x/tools/go/buildutil/fakecontext.go new file mode 100644 index 0000000..24cbcbe --- /dev/null +++ b/vendor/golang.org/x/tools/go/buildutil/fakecontext.go @@ -0,0 +1,108 @@ +package buildutil + +import ( + "fmt" + "go/build" + "io" + "io/ioutil" + "os" + "path" + "path/filepath" + "sort" + "strings" + "time" +) + +// FakeContext returns a build.Context for the fake file tree specified +// by pkgs, which maps package import paths to a mapping from file base +// names to contents. +// +// The fake Context has a GOROOT of "/go" and no GOPATH, and overrides +// the necessary file access methods to read from memory instead of the +// real file system. +// +// Unlike a real file tree, the fake one has only two levels---packages +// and files---so ReadDir("/go/src/") returns all packages under +// /go/src/ including, for instance, "math" and "math/big". +// ReadDir("/go/src/math/big") would return all the files in the +// "math/big" package. +// +func FakeContext(pkgs map[string]map[string]string) *build.Context { + clean := func(filename string) string { + f := path.Clean(filepath.ToSlash(filename)) + // Removing "/go/src" while respecting segment + // boundaries has this unfortunate corner case: + if f == "/go/src" { + return "" + } + return strings.TrimPrefix(f, "/go/src/") + } + + ctxt := build.Default // copy + ctxt.GOROOT = "/go" + ctxt.GOPATH = "" + ctxt.IsDir = func(dir string) bool { + dir = clean(dir) + if dir == "" { + return true // needed by (*build.Context).SrcDirs + } + return pkgs[dir] != nil + } + ctxt.ReadDir = func(dir string) ([]os.FileInfo, error) { + dir = clean(dir) + var fis []os.FileInfo + if dir == "" { + // enumerate packages + for importPath := range pkgs { + fis = append(fis, fakeDirInfo(importPath)) + } + } else { + // enumerate files of package + for basename := range pkgs[dir] { + fis = append(fis, fakeFileInfo(basename)) + } + } + sort.Sort(byName(fis)) + return fis, nil + } + ctxt.OpenFile = func(filename string) (io.ReadCloser, error) { + filename = clean(filename) + dir, base := path.Split(filename) + content, ok := pkgs[path.Clean(dir)][base] + if !ok { + return nil, fmt.Errorf("file not found: %s", filename) + } + return ioutil.NopCloser(strings.NewReader(content)), nil + } + ctxt.IsAbsPath = func(path string) bool { + path = filepath.ToSlash(path) + // Don't rely on the default (filepath.Path) since on + // Windows, it reports virtual paths as non-absolute. + return strings.HasPrefix(path, "/") + } + return &ctxt +} + +type byName []os.FileInfo + +func (s byName) Len() int { return len(s) } +func (s byName) Swap(i, j int) { s[i], s[j] = s[j], s[i] } +func (s byName) Less(i, j int) bool { return s[i].Name() < s[j].Name() } + +type fakeFileInfo string + +func (fi fakeFileInfo) Name() string { return string(fi) } +func (fakeFileInfo) Sys() interface{} { return nil } +func (fakeFileInfo) ModTime() time.Time { return time.Time{} } +func (fakeFileInfo) IsDir() bool { return false } +func (fakeFileInfo) Size() int64 { return 0 } +func (fakeFileInfo) Mode() os.FileMode { return 0644 } + +type fakeDirInfo string + +func (fd fakeDirInfo) Name() string { return string(fd) } +func (fakeDirInfo) Sys() interface{} { return nil } +func (fakeDirInfo) ModTime() time.Time { return time.Time{} } +func (fakeDirInfo) IsDir() bool { return true } +func (fakeDirInfo) Size() int64 { return 0 } +func (fakeDirInfo) Mode() os.FileMode { return 0755 } diff --git a/vendor/golang.org/x/tools/go/buildutil/overlay.go b/vendor/golang.org/x/tools/go/buildutil/overlay.go new file mode 100644 index 0000000..3f71c4f --- /dev/null +++ b/vendor/golang.org/x/tools/go/buildutil/overlay.go @@ -0,0 +1,103 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package buildutil + +import ( + "bufio" + "bytes" + "fmt" + "go/build" + "io" + "io/ioutil" + "path/filepath" + "strconv" + "strings" +) + +// OverlayContext overlays a build.Context with additional files from +// a map. Files in the map take precedence over other files. +// +// In addition to plain string comparison, two file names are +// considered equal if their base names match and their directory +// components point at the same directory on the file system. That is, +// symbolic links are followed for directories, but not files. +// +// A common use case for OverlayContext is to allow editors to pass in +// a set of unsaved, modified files. +// +// Currently, only the Context.OpenFile function will respect the +// overlay. This may change in the future. +func OverlayContext(orig *build.Context, overlay map[string][]byte) *build.Context { + // TODO(dominikh): Implement IsDir, HasSubdir and ReadDir + + rc := func(data []byte) (io.ReadCloser, error) { + return ioutil.NopCloser(bytes.NewBuffer(data)), nil + } + + copy := *orig // make a copy + ctxt := © + ctxt.OpenFile = func(path string) (io.ReadCloser, error) { + // Fast path: names match exactly. + if content, ok := overlay[path]; ok { + return rc(content) + } + + // Slow path: check for same file under a different + // alias, perhaps due to a symbolic link. + for filename, content := range overlay { + if sameFile(path, filename) { + return rc(content) + } + } + + return OpenFile(orig, path) + } + return ctxt +} + +// ParseOverlayArchive parses an archive containing Go files and their +// contents. The result is intended to be used with OverlayContext. +// +// +// Archive format +// +// The archive consists of a series of files. Each file consists of a +// name, a decimal file size and the file contents, separated by +// newlinews. No newline follows after the file contents. +func ParseOverlayArchive(archive io.Reader) (map[string][]byte, error) { + overlay := make(map[string][]byte) + r := bufio.NewReader(archive) + for { + // Read file name. + filename, err := r.ReadString('\n') + if err != nil { + if err == io.EOF { + break // OK + } + return nil, fmt.Errorf("reading archive file name: %v", err) + } + filename = filepath.Clean(strings.TrimSpace(filename)) + + // Read file size. + sz, err := r.ReadString('\n') + if err != nil { + return nil, fmt.Errorf("reading size of archive file %s: %v", filename, err) + } + sz = strings.TrimSpace(sz) + size, err := strconv.ParseUint(sz, 10, 32) + if err != nil { + return nil, fmt.Errorf("parsing size of archive file %s: %v", filename, err) + } + + // Read file content. + content := make([]byte, size) + if _, err := io.ReadFull(r, content); err != nil { + return nil, fmt.Errorf("reading archive file %s: %v", filename, err) + } + overlay[filename] = content + } + + return overlay, nil +} diff --git a/vendor/golang.org/x/tools/go/buildutil/tags.go b/vendor/golang.org/x/tools/go/buildutil/tags.go new file mode 100644 index 0000000..486606f --- /dev/null +++ b/vendor/golang.org/x/tools/go/buildutil/tags.go @@ -0,0 +1,75 @@ +package buildutil + +// This logic was copied from stringsFlag from $GOROOT/src/cmd/go/build.go. + +import "fmt" + +const TagsFlagDoc = "a list of `build tags` to consider satisfied during the build. " + + "For more information about build tags, see the description of " + + "build constraints in the documentation for the go/build package" + +// TagsFlag is an implementation of the flag.Value and flag.Getter interfaces that parses +// a flag value in the same manner as go build's -tags flag and +// populates a []string slice. +// +// See $GOROOT/src/go/build/doc.go for description of build tags. +// See $GOROOT/src/cmd/go/doc.go for description of 'go build -tags' flag. +// +// Example: +// flag.Var((*buildutil.TagsFlag)(&build.Default.BuildTags), "tags", buildutil.TagsFlagDoc) +type TagsFlag []string + +func (v *TagsFlag) Set(s string) error { + var err error + *v, err = splitQuotedFields(s) + if *v == nil { + *v = []string{} + } + return err +} + +func (v *TagsFlag) Get() interface{} { return *v } + +func splitQuotedFields(s string) ([]string, error) { + // Split fields allowing '' or "" around elements. + // Quotes further inside the string do not count. + var f []string + for len(s) > 0 { + for len(s) > 0 && isSpaceByte(s[0]) { + s = s[1:] + } + if len(s) == 0 { + break + } + // Accepted quoted string. No unescaping inside. + if s[0] == '"' || s[0] == '\'' { + quote := s[0] + s = s[1:] + i := 0 + for i < len(s) && s[i] != quote { + i++ + } + if i >= len(s) { + return nil, fmt.Errorf("unterminated %c string", quote) + } + f = append(f, s[:i]) + s = s[i+1:] + continue + } + i := 0 + for i < len(s) && !isSpaceByte(s[i]) { + i++ + } + f = append(f, s[:i]) + s = s[i:] + } + return f, nil +} + +func (v *TagsFlag) String() string { + return "" +} + +func isSpaceByte(c byte) bool { + return c == ' ' || c == '\t' || c == '\n' || c == '\r' +} diff --git a/vendor/golang.org/x/tools/go/buildutil/util.go b/vendor/golang.org/x/tools/go/buildutil/util.go new file mode 100644 index 0000000..fc923d7 --- /dev/null +++ b/vendor/golang.org/x/tools/go/buildutil/util.go @@ -0,0 +1,212 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package buildutil + +import ( + "fmt" + "go/ast" + "go/build" + "go/parser" + "go/token" + "io" + "io/ioutil" + "os" + "path" + "path/filepath" + "strings" +) + +// ParseFile behaves like parser.ParseFile, +// but uses the build context's file system interface, if any. +// +// If file is not absolute (as defined by IsAbsPath), the (dir, file) +// components are joined using JoinPath; dir must be absolute. +// +// The displayPath function, if provided, is used to transform the +// filename that will be attached to the ASTs. +// +// TODO(adonovan): call this from go/loader.parseFiles when the tree thaws. +// +func ParseFile(fset *token.FileSet, ctxt *build.Context, displayPath func(string) string, dir string, file string, mode parser.Mode) (*ast.File, error) { + if !IsAbsPath(ctxt, file) { + file = JoinPath(ctxt, dir, file) + } + rd, err := OpenFile(ctxt, file) + if err != nil { + return nil, err + } + defer rd.Close() // ignore error + if displayPath != nil { + file = displayPath(file) + } + return parser.ParseFile(fset, file, rd, mode) +} + +// ContainingPackage returns the package containing filename. +// +// If filename is not absolute, it is interpreted relative to working directory dir. +// All I/O is via the build context's file system interface, if any. +// +// The '...Files []string' fields of the resulting build.Package are not +// populated (build.FindOnly mode). +// +func ContainingPackage(ctxt *build.Context, dir, filename string) (*build.Package, error) { + if !IsAbsPath(ctxt, filename) { + filename = JoinPath(ctxt, dir, filename) + } + + // We must not assume the file tree uses + // "/" always, + // `\` always, + // or os.PathSeparator (which varies by platform), + // but to make any progress, we are forced to assume that + // paths will not use `\` unless the PathSeparator + // is also `\`, thus we can rely on filepath.ToSlash for some sanity. + + dirSlash := path.Dir(filepath.ToSlash(filename)) + "/" + + // We assume that no source root (GOPATH[i] or GOROOT) contains any other. + for _, srcdir := range ctxt.SrcDirs() { + srcdirSlash := filepath.ToSlash(srcdir) + "/" + if importPath, ok := HasSubdir(ctxt, srcdirSlash, dirSlash); ok { + return ctxt.Import(importPath, dir, build.FindOnly) + } + } + + return nil, fmt.Errorf("can't find package containing %s", filename) +} + +// -- Effective methods of file system interface ------------------------- + +// (go/build.Context defines these as methods, but does not export them.) + +// hasSubdir calls ctxt.HasSubdir (if not nil) or else uses +// the local file system to answer the question. +func HasSubdir(ctxt *build.Context, root, dir string) (rel string, ok bool) { + if f := ctxt.HasSubdir; f != nil { + return f(root, dir) + } + + // Try using paths we received. + if rel, ok = hasSubdir(root, dir); ok { + return + } + + // Try expanding symlinks and comparing + // expanded against unexpanded and + // expanded against expanded. + rootSym, _ := filepath.EvalSymlinks(root) + dirSym, _ := filepath.EvalSymlinks(dir) + + if rel, ok = hasSubdir(rootSym, dir); ok { + return + } + if rel, ok = hasSubdir(root, dirSym); ok { + return + } + return hasSubdir(rootSym, dirSym) +} + +func hasSubdir(root, dir string) (rel string, ok bool) { + const sep = string(filepath.Separator) + root = filepath.Clean(root) + if !strings.HasSuffix(root, sep) { + root += sep + } + + dir = filepath.Clean(dir) + if !strings.HasPrefix(dir, root) { + return "", false + } + + return filepath.ToSlash(dir[len(root):]), true +} + +// FileExists returns true if the specified file exists, +// using the build context's file system interface. +func FileExists(ctxt *build.Context, path string) bool { + if ctxt.OpenFile != nil { + r, err := ctxt.OpenFile(path) + if err != nil { + return false + } + r.Close() // ignore error + return true + } + _, err := os.Stat(path) + return err == nil +} + +// OpenFile behaves like os.Open, +// but uses the build context's file system interface, if any. +func OpenFile(ctxt *build.Context, path string) (io.ReadCloser, error) { + if ctxt.OpenFile != nil { + return ctxt.OpenFile(path) + } + return os.Open(path) +} + +// IsAbsPath behaves like filepath.IsAbs, +// but uses the build context's file system interface, if any. +func IsAbsPath(ctxt *build.Context, path string) bool { + if ctxt.IsAbsPath != nil { + return ctxt.IsAbsPath(path) + } + return filepath.IsAbs(path) +} + +// JoinPath behaves like filepath.Join, +// but uses the build context's file system interface, if any. +func JoinPath(ctxt *build.Context, path ...string) string { + if ctxt.JoinPath != nil { + return ctxt.JoinPath(path...) + } + return filepath.Join(path...) +} + +// IsDir behaves like os.Stat plus IsDir, +// but uses the build context's file system interface, if any. +func IsDir(ctxt *build.Context, path string) bool { + if ctxt.IsDir != nil { + return ctxt.IsDir(path) + } + fi, err := os.Stat(path) + return err == nil && fi.IsDir() +} + +// ReadDir behaves like ioutil.ReadDir, +// but uses the build context's file system interface, if any. +func ReadDir(ctxt *build.Context, path string) ([]os.FileInfo, error) { + if ctxt.ReadDir != nil { + return ctxt.ReadDir(path) + } + return ioutil.ReadDir(path) +} + +// SplitPathList behaves like filepath.SplitList, +// but uses the build context's file system interface, if any. +func SplitPathList(ctxt *build.Context, s string) []string { + if ctxt.SplitPathList != nil { + return ctxt.SplitPathList(s) + } + return filepath.SplitList(s) +} + +// sameFile returns true if x and y have the same basename and denote +// the same file. +// +func sameFile(x, y string) bool { + if path.Clean(x) == path.Clean(y) { + return true + } + if filepath.Base(x) == filepath.Base(y) { // (optimisation) + if xi, err := os.Stat(x); err == nil { + if yi, err := os.Stat(y); err == nil { + return os.SameFile(xi, yi) + } + } + } + return false +} diff --git a/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go b/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go new file mode 100644 index 0000000..e53270e --- /dev/null +++ b/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go @@ -0,0 +1,100 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package gcexportdata provides functions for locating, reading, and +// writing export data files containing type information produced by the +// gc compiler. This package supports go1.7 export data format and all +// later versions. +// +// This package replaces the deprecated golang.org/x/tools/go/gcimporter15 +// package, which will be deleted in October 2017. +// +// Although it might seem convenient for this package to live alongside +// go/types in the standard library, this would cause version skew +// problems for developer tools that use it, since they must be able to +// consume the outputs of the gc compiler both before and after a Go +// update such as from Go 1.7 to Go 1.8. Because this package lives in +// golang.org/x/tools, sites can update their version of this repo some +// time before the Go 1.8 release and rebuild and redeploy their +// developer tools, which will then be able to consume both Go 1.7 and +// Go 1.8 export data files, so they will work before and after the +// Go update. (See discussion at https://github.com/golang/go/issues/15651.) +// +package gcexportdata // import "golang.org/x/tools/go/gcexportdata" + +import ( + "bufio" + "bytes" + "fmt" + "go/token" + "go/types" + "io" + "io/ioutil" + + gcimporter "golang.org/x/tools/go/gcimporter15" +) + +// Find returns the name of an object (.o) or archive (.a) file +// containing type information for the specified import path, +// using the workspace layout conventions of go/build. +// If no file was found, an empty filename is returned. +// +// A relative srcDir is interpreted relative to the current working directory. +// +// Find also returns the package's resolved (canonical) import path, +// reflecting the effects of srcDir and vendoring on importPath. +func Find(importPath, srcDir string) (filename, path string) { + return gcimporter.FindPkg(importPath, srcDir) +} + +// NewReader returns a reader for the export data section of an object +// (.o) or archive (.a) file read from r. The new reader may provide +// additional trailing data beyond the end of the export data. +func NewReader(r io.Reader) (io.Reader, error) { + buf := bufio.NewReader(r) + _, err := gcimporter.FindExportData(buf) + // If we ever switch to a zip-like archive format with the ToC + // at the end, we can return the correct portion of export data, + // but for now we must return the entire rest of the file. + return buf, err +} + +// Read reads export data from in, decodes it, and returns type +// information for the package. +// The package name is specified by path. +// File position information is added to fset. +// +// Read may inspect and add to the imports map to ensure that references +// within the export data to other packages are consistent. The caller +// must ensure that imports[path] does not exist, or exists but is +// incomplete (see types.Package.Complete), and Read inserts the +// resulting package into this map entry. +// +// On return, the state of the reader is undefined. +func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package, path string) (*types.Package, error) { + data, err := ioutil.ReadAll(in) + if err != nil { + return nil, fmt.Errorf("reading export data for %q: %v", path, err) + } + + if bytes.HasPrefix(data, []byte("!")) { + return nil, fmt.Errorf("can't read export data for %q directly from an archive file (call gcexportdata.NewReader first to extract export data)", path) + } + + // The App Engine Go runtime v1.6 uses the old export data format. + // TODO(adonovan): delete once v1.7 has been around for a while. + if bytes.HasPrefix(data, []byte("package ")) { + return gcimporter.ImportData(imports, path, path, bytes.NewReader(data)) + } + + _, pkg, err := gcimporter.BImportData(fset, imports, data, path) + return pkg, err +} + +// Write writes encoded type information for the specified package to out. +// The FileSet provides file position information for named objects. +func Write(out io.Writer, fset *token.FileSet, pkg *types.Package) error { + _, err := out.Write(gcimporter.BExportData(fset, pkg)) + return err +} diff --git a/vendor/golang.org/x/tools/go/gcexportdata/importer.go b/vendor/golang.org/x/tools/go/gcexportdata/importer.go new file mode 100644 index 0000000..efe221e --- /dev/null +++ b/vendor/golang.org/x/tools/go/gcexportdata/importer.go @@ -0,0 +1,73 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gcexportdata + +import ( + "fmt" + "go/token" + "go/types" + "os" +) + +// NewImporter returns a new instance of the types.Importer interface +// that reads type information from export data files written by gc. +// The Importer also satisfies types.ImporterFrom. +// +// Export data files are located using "go build" workspace conventions +// and the build.Default context. +// +// Use this importer instead of go/importer.For("gc", ...) to avoid the +// version-skew problems described in the documentation of this package, +// or to control the FileSet or access the imports map populated during +// package loading. +// +func NewImporter(fset *token.FileSet, imports map[string]*types.Package) types.ImporterFrom { + return importer{fset, imports} +} + +type importer struct { + fset *token.FileSet + imports map[string]*types.Package +} + +func (imp importer) Import(importPath string) (*types.Package, error) { + return imp.ImportFrom(importPath, "", 0) +} + +func (imp importer) ImportFrom(importPath, srcDir string, mode types.ImportMode) (_ *types.Package, err error) { + filename, path := Find(importPath, srcDir) + if filename == "" { + if importPath == "unsafe" { + // Even for unsafe, call Find first in case + // the package was vendored. + return types.Unsafe, nil + } + return nil, fmt.Errorf("can't find import: %s", importPath) + } + + if pkg, ok := imp.imports[path]; ok && pkg.Complete() { + return pkg, nil // cache hit + } + + // open file + f, err := os.Open(filename) + if err != nil { + return nil, err + } + defer func() { + f.Close() + if err != nil { + // add file name to error + err = fmt.Errorf("reading export data: %s: %v", filename, err) + } + }() + + r, err := NewReader(f) + if err != nil { + return nil, err + } + + return Read(r, imp.fset, imp.imports, path) +} diff --git a/vendor/golang.org/x/tools/go/gcexportdata/main.go b/vendor/golang.org/x/tools/go/gcexportdata/main.go new file mode 100644 index 0000000..106046c --- /dev/null +++ b/vendor/golang.org/x/tools/go/gcexportdata/main.go @@ -0,0 +1,81 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build ignore + +// The gcexportdata command is a diagnostic tool that displays the +// contents of gc export data files. +package main + +import ( + "flag" + "fmt" + "go/token" + "go/types" + "log" + "os" + + "golang.org/x/tools/go/gcexportdata" + "golang.org/x/tools/go/types/typeutil" +) + +func main() { + log.SetPrefix("gcexportdata: ") + log.SetFlags(0) + flag.Usage = func() { + fmt.Fprintln(os.Stderr, "usage: gcexportdata file.a") + } + flag.Parse() + if flag.NArg() != 1 { + flag.Usage() + os.Exit(2) + } + filename := flag.Args()[0] + + f, err := os.Open(filename) + if err != nil { + log.Fatal(err) + } + + r, err := gcexportdata.NewReader(f) + if err != nil { + log.Fatalf("%s: %s", filename, err) + } + + // Decode the package. + imports := make(map[string]*types.Package) + fset := token.NewFileSet() + pkg, err := gcexportdata.Read(r, fset, imports, "dummy") + if err != nil { + log.Fatal("%s: %s", filename, err) + } + + // Print all package-level declarations, including non-exported ones. + fmt.Printf("package %s\n", pkg.Name()) + for _, imp := range pkg.Imports() { + fmt.Printf("import %q\n", imp.Path()) + } + qual := func(p *types.Package) string { + if pkg == p { + return "" + } + return p.Name() + } + scope := pkg.Scope() + for _, name := range scope.Names() { + obj := scope.Lookup(name) + fmt.Printf("%s: %s\n", + fset.Position(obj.Pos()), + types.ObjectString(obj, qual)) + + // For types, print each method. + if _, ok := obj.(*types.TypeName); ok { + for _, method := range typeutil.IntuitiveMethodSet(obj.Type(), nil) { + fmt.Printf("%s: %s\n", + fset.Position(method.Obj().Pos()), + types.SelectionString(method, qual)) + } + } + } +} diff --git a/vendor/golang.org/x/tools/go/gcimporter15/bexport.go b/vendor/golang.org/x/tools/go/gcimporter15/bexport.go new file mode 100644 index 0000000..cbf8bc0 --- /dev/null +++ b/vendor/golang.org/x/tools/go/gcimporter15/bexport.go @@ -0,0 +1,828 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Binary package export. +// This file was derived from $GOROOT/src/cmd/compile/internal/gc/bexport.go; +// see that file for specification of the format. + +package gcimporter + +import ( + "bytes" + "encoding/binary" + "fmt" + "go/ast" + "go/constant" + "go/token" + "go/types" + "log" + "math" + "math/big" + "sort" + "strings" +) + +// If debugFormat is set, each integer and string value is preceded by a marker +// and position information in the encoding. This mechanism permits an importer +// to recognize immediately when it is out of sync. The importer recognizes this +// mode automatically (i.e., it can import export data produced with debugging +// support even if debugFormat is not set at the time of import). This mode will +// lead to massively larger export data (by a factor of 2 to 3) and should only +// be enabled during development and debugging. +// +// NOTE: This flag is the first flag to enable if importing dies because of +// (suspected) format errors, and whenever a change is made to the format. +const debugFormat = false // default: false + +// If trace is set, debugging output is printed to std out. +const trace = false // default: false + +// Current export format version. Increase with each format change. +// 4: type name objects support type aliases, uses aliasTag +// 3: Go1.8 encoding (same as version 2, aliasTag defined but never used) +// 2: removed unused bool in ODCL export (compiler only) +// 1: header format change (more regular), export package for _ struct fields +// 0: Go1.7 encoding +const exportVersion = 4 + +// trackAllTypes enables cycle tracking for all types, not just named +// types. The existing compiler invariants assume that unnamed types +// that are not completely set up are not used, or else there are spurious +// errors. +// If disabled, only named types are tracked, possibly leading to slightly +// less efficient encoding in rare cases. It also prevents the export of +// some corner-case type declarations (but those are not handled correctly +// with with the textual export format either). +// TODO(gri) enable and remove once issues caused by it are fixed +const trackAllTypes = false + +type exporter struct { + fset *token.FileSet + out bytes.Buffer + + // object -> index maps, indexed in order of serialization + strIndex map[string]int + pkgIndex map[*types.Package]int + typIndex map[types.Type]int + + // position encoding + posInfoFormat bool + prevFile string + prevLine int + + // debugging support + written int // bytes written + indent int // for trace +} + +// BExportData returns binary export data for pkg. +// If no file set is provided, position info will be missing. +func BExportData(fset *token.FileSet, pkg *types.Package) []byte { + p := exporter{ + fset: fset, + strIndex: map[string]int{"": 0}, // empty string is mapped to 0 + pkgIndex: make(map[*types.Package]int), + typIndex: make(map[types.Type]int), + posInfoFormat: true, // TODO(gri) might become a flag, eventually + } + + // write version info + // The version string must start with "version %d" where %d is the version + // number. Additional debugging information may follow after a blank; that + // text is ignored by the importer. + p.rawStringln(fmt.Sprintf("version %d", exportVersion)) + var debug string + if debugFormat { + debug = "debug" + } + p.rawStringln(debug) // cannot use p.bool since it's affected by debugFormat; also want to see this clearly + p.bool(trackAllTypes) + p.bool(p.posInfoFormat) + + // --- generic export data --- + + // populate type map with predeclared "known" types + for index, typ := range predeclared { + p.typIndex[typ] = index + } + if len(p.typIndex) != len(predeclared) { + log.Fatalf("gcimporter: duplicate entries in type map?") + } + + // write package data + p.pkg(pkg, true) + if trace { + p.tracef("\n") + } + + // write objects + objcount := 0 + scope := pkg.Scope() + for _, name := range scope.Names() { + if !ast.IsExported(name) { + continue + } + if trace { + p.tracef("\n") + } + p.obj(scope.Lookup(name)) + objcount++ + } + + // indicate end of list + if trace { + p.tracef("\n") + } + p.tag(endTag) + + // for self-verification only (redundant) + p.int(objcount) + + if trace { + p.tracef("\n") + } + + // --- end of export data --- + + return p.out.Bytes() +} + +func (p *exporter) pkg(pkg *types.Package, emptypath bool) { + if pkg == nil { + log.Fatalf("gcimporter: unexpected nil pkg") + } + + // if we saw the package before, write its index (>= 0) + if i, ok := p.pkgIndex[pkg]; ok { + p.index('P', i) + return + } + + // otherwise, remember the package, write the package tag (< 0) and package data + if trace { + p.tracef("P%d = { ", len(p.pkgIndex)) + defer p.tracef("} ") + } + p.pkgIndex[pkg] = len(p.pkgIndex) + + p.tag(packageTag) + p.string(pkg.Name()) + if emptypath { + p.string("") + } else { + p.string(pkg.Path()) + } +} + +func (p *exporter) obj(obj types.Object) { + switch obj := obj.(type) { + case *types.Const: + p.tag(constTag) + p.pos(obj) + p.qualifiedName(obj) + p.typ(obj.Type()) + p.value(obj.Val()) + + case *types.TypeName: + if isAlias(obj) { + p.tag(aliasTag) + p.pos(obj) + p.qualifiedName(obj) + } else { + p.tag(typeTag) + } + p.typ(obj.Type()) + + case *types.Var: + p.tag(varTag) + p.pos(obj) + p.qualifiedName(obj) + p.typ(obj.Type()) + + case *types.Func: + p.tag(funcTag) + p.pos(obj) + p.qualifiedName(obj) + sig := obj.Type().(*types.Signature) + p.paramList(sig.Params(), sig.Variadic()) + p.paramList(sig.Results(), false) + + default: + log.Fatalf("gcimporter: unexpected object %v (%T)", obj, obj) + } +} + +func (p *exporter) pos(obj types.Object) { + if !p.posInfoFormat { + return + } + + file, line := p.fileLine(obj) + if file == p.prevFile { + // common case: write line delta + // delta == 0 means different file or no line change + delta := line - p.prevLine + p.int(delta) + if delta == 0 { + p.int(-1) // -1 means no file change + } + } else { + // different file + p.int(0) + // Encode filename as length of common prefix with previous + // filename, followed by (possibly empty) suffix. Filenames + // frequently share path prefixes, so this can save a lot + // of space and make export data size less dependent on file + // path length. The suffix is unlikely to be empty because + // file names tend to end in ".go". + n := commonPrefixLen(p.prevFile, file) + p.int(n) // n >= 0 + p.string(file[n:]) // write suffix only + p.prevFile = file + p.int(line) + } + p.prevLine = line +} + +func (p *exporter) fileLine(obj types.Object) (file string, line int) { + if p.fset != nil { + pos := p.fset.Position(obj.Pos()) + file = pos.Filename + line = pos.Line + } + return +} + +func commonPrefixLen(a, b string) int { + if len(a) > len(b) { + a, b = b, a + } + // len(a) <= len(b) + i := 0 + for i < len(a) && a[i] == b[i] { + i++ + } + return i +} + +func (p *exporter) qualifiedName(obj types.Object) { + p.string(obj.Name()) + p.pkg(obj.Pkg(), false) +} + +func (p *exporter) typ(t types.Type) { + if t == nil { + log.Fatalf("gcimporter: nil type") + } + + // Possible optimization: Anonymous pointer types *T where + // T is a named type are common. We could canonicalize all + // such types *T to a single type PT = *T. This would lead + // to at most one *T entry in typIndex, and all future *T's + // would be encoded as the respective index directly. Would + // save 1 byte (pointerTag) per *T and reduce the typIndex + // size (at the cost of a canonicalization map). We can do + // this later, without encoding format change. + + // if we saw the type before, write its index (>= 0) + if i, ok := p.typIndex[t]; ok { + p.index('T', i) + return + } + + // otherwise, remember the type, write the type tag (< 0) and type data + if trackAllTypes { + if trace { + p.tracef("T%d = {>\n", len(p.typIndex)) + defer p.tracef("<\n} ") + } + p.typIndex[t] = len(p.typIndex) + } + + switch t := t.(type) { + case *types.Named: + if !trackAllTypes { + // if we don't track all types, track named types now + p.typIndex[t] = len(p.typIndex) + } + + p.tag(namedTag) + p.pos(t.Obj()) + p.qualifiedName(t.Obj()) + p.typ(t.Underlying()) + if !types.IsInterface(t) { + p.assocMethods(t) + } + + case *types.Array: + p.tag(arrayTag) + p.int64(t.Len()) + p.typ(t.Elem()) + + case *types.Slice: + p.tag(sliceTag) + p.typ(t.Elem()) + + case *dddSlice: + p.tag(dddTag) + p.typ(t.elem) + + case *types.Struct: + p.tag(structTag) + p.fieldList(t) + + case *types.Pointer: + p.tag(pointerTag) + p.typ(t.Elem()) + + case *types.Signature: + p.tag(signatureTag) + p.paramList(t.Params(), t.Variadic()) + p.paramList(t.Results(), false) + + case *types.Interface: + p.tag(interfaceTag) + p.iface(t) + + case *types.Map: + p.tag(mapTag) + p.typ(t.Key()) + p.typ(t.Elem()) + + case *types.Chan: + p.tag(chanTag) + p.int(int(3 - t.Dir())) // hack + p.typ(t.Elem()) + + default: + log.Fatalf("gcimporter: unexpected type %T: %s", t, t) + } +} + +func (p *exporter) assocMethods(named *types.Named) { + // Sort methods (for determinism). + var methods []*types.Func + for i := 0; i < named.NumMethods(); i++ { + methods = append(methods, named.Method(i)) + } + sort.Sort(methodsByName(methods)) + + p.int(len(methods)) + + if trace && methods != nil { + p.tracef("associated methods {>\n") + } + + for i, m := range methods { + if trace && i > 0 { + p.tracef("\n") + } + + p.pos(m) + name := m.Name() + p.string(name) + if !exported(name) { + p.pkg(m.Pkg(), false) + } + + sig := m.Type().(*types.Signature) + p.paramList(types.NewTuple(sig.Recv()), false) + p.paramList(sig.Params(), sig.Variadic()) + p.paramList(sig.Results(), false) + p.int(0) // dummy value for go:nointerface pragma - ignored by importer + } + + if trace && methods != nil { + p.tracef("<\n} ") + } +} + +type methodsByName []*types.Func + +func (x methodsByName) Len() int { return len(x) } +func (x methodsByName) Swap(i, j int) { x[i], x[j] = x[j], x[i] } +func (x methodsByName) Less(i, j int) bool { return x[i].Name() < x[j].Name() } + +func (p *exporter) fieldList(t *types.Struct) { + if trace && t.NumFields() > 0 { + p.tracef("fields {>\n") + defer p.tracef("<\n} ") + } + + p.int(t.NumFields()) + for i := 0; i < t.NumFields(); i++ { + if trace && i > 0 { + p.tracef("\n") + } + p.field(t.Field(i)) + p.string(t.Tag(i)) + } +} + +func (p *exporter) field(f *types.Var) { + if !f.IsField() { + log.Fatalf("gcimporter: field expected") + } + + p.pos(f) + p.fieldName(f) + p.typ(f.Type()) +} + +func (p *exporter) iface(t *types.Interface) { + // TODO(gri): enable importer to load embedded interfaces, + // then emit Embeddeds and ExplicitMethods separately here. + p.int(0) + + n := t.NumMethods() + if trace && n > 0 { + p.tracef("methods {>\n") + defer p.tracef("<\n} ") + } + p.int(n) + for i := 0; i < n; i++ { + if trace && i > 0 { + p.tracef("\n") + } + p.method(t.Method(i)) + } +} + +func (p *exporter) method(m *types.Func) { + sig := m.Type().(*types.Signature) + if sig.Recv() == nil { + log.Fatalf("gcimporter: method expected") + } + + p.pos(m) + p.string(m.Name()) + if m.Name() != "_" && !ast.IsExported(m.Name()) { + p.pkg(m.Pkg(), false) + } + + // interface method; no need to encode receiver. + p.paramList(sig.Params(), sig.Variadic()) + p.paramList(sig.Results(), false) +} + +func (p *exporter) fieldName(f *types.Var) { + name := f.Name() + + if f.Anonymous() { + // anonymous field - we distinguish between 3 cases: + // 1) field name matches base type name and is exported + // 2) field name matches base type name and is not exported + // 3) field name doesn't match base type name (alias name) + bname := basetypeName(f.Type()) + if name == bname { + if ast.IsExported(name) { + name = "" // 1) we don't need to know the field name or package + } else { + name = "?" // 2) use unexported name "?" to force package export + } + } else { + // 3) indicate alias and export name as is + // (this requires an extra "@" but this is a rare case) + p.string("@") + } + } + + p.string(name) + if name != "" && !ast.IsExported(name) { + p.pkg(f.Pkg(), false) + } +} + +func basetypeName(typ types.Type) string { + switch typ := deref(typ).(type) { + case *types.Basic: + return typ.Name() + case *types.Named: + return typ.Obj().Name() + default: + return "" // unnamed type + } +} + +func (p *exporter) paramList(params *types.Tuple, variadic bool) { + // use negative length to indicate unnamed parameters + // (look at the first parameter only since either all + // names are present or all are absent) + n := params.Len() + if n > 0 && params.At(0).Name() == "" { + n = -n + } + p.int(n) + for i := 0; i < params.Len(); i++ { + q := params.At(i) + t := q.Type() + if variadic && i == params.Len()-1 { + t = &dddSlice{t.(*types.Slice).Elem()} + } + p.typ(t) + if n > 0 { + name := q.Name() + p.string(name) + if name != "_" { + p.pkg(q.Pkg(), false) + } + } + p.string("") // no compiler-specific info + } +} + +func (p *exporter) value(x constant.Value) { + if trace { + p.tracef("= ") + } + + switch x.Kind() { + case constant.Bool: + tag := falseTag + if constant.BoolVal(x) { + tag = trueTag + } + p.tag(tag) + + case constant.Int: + if v, exact := constant.Int64Val(x); exact { + // common case: x fits into an int64 - use compact encoding + p.tag(int64Tag) + p.int64(v) + return + } + // uncommon case: large x - use float encoding + // (powers of 2 will be encoded efficiently with exponent) + p.tag(floatTag) + p.float(constant.ToFloat(x)) + + case constant.Float: + p.tag(floatTag) + p.float(x) + + case constant.Complex: + p.tag(complexTag) + p.float(constant.Real(x)) + p.float(constant.Imag(x)) + + case constant.String: + p.tag(stringTag) + p.string(constant.StringVal(x)) + + case constant.Unknown: + // package contains type errors + p.tag(unknownTag) + + default: + log.Fatalf("gcimporter: unexpected value %v (%T)", x, x) + } +} + +func (p *exporter) float(x constant.Value) { + if x.Kind() != constant.Float { + log.Fatalf("gcimporter: unexpected constant %v, want float", x) + } + // extract sign (there is no -0) + sign := constant.Sign(x) + if sign == 0 { + // x == 0 + p.int(0) + return + } + // x != 0 + + var f big.Float + if v, exact := constant.Float64Val(x); exact { + // float64 + f.SetFloat64(v) + } else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int { + // TODO(gri): add big.Rat accessor to constant.Value. + r := valueToRat(num) + f.SetRat(r.Quo(r, valueToRat(denom))) + } else { + // Value too large to represent as a fraction => inaccessible. + // TODO(gri): add big.Float accessor to constant.Value. + f.SetFloat64(math.MaxFloat64) // FIXME + } + + // extract exponent such that 0.5 <= m < 1.0 + var m big.Float + exp := f.MantExp(&m) + + // extract mantissa as *big.Int + // - set exponent large enough so mant satisfies mant.IsInt() + // - get *big.Int from mant + m.SetMantExp(&m, int(m.MinPrec())) + mant, acc := m.Int(nil) + if acc != big.Exact { + log.Fatalf("gcimporter: internal error") + } + + p.int(sign) + p.int(exp) + p.string(string(mant.Bytes())) +} + +func valueToRat(x constant.Value) *big.Rat { + // Convert little-endian to big-endian. + // I can't believe this is necessary. + bytes := constant.Bytes(x) + for i := 0; i < len(bytes)/2; i++ { + bytes[i], bytes[len(bytes)-1-i] = bytes[len(bytes)-1-i], bytes[i] + } + return new(big.Rat).SetInt(new(big.Int).SetBytes(bytes)) +} + +func (p *exporter) bool(b bool) bool { + if trace { + p.tracef("[") + defer p.tracef("= %v] ", b) + } + + x := 0 + if b { + x = 1 + } + p.int(x) + return b +} + +// ---------------------------------------------------------------------------- +// Low-level encoders + +func (p *exporter) index(marker byte, index int) { + if index < 0 { + log.Fatalf("gcimporter: invalid index < 0") + } + if debugFormat { + p.marker('t') + } + if trace { + p.tracef("%c%d ", marker, index) + } + p.rawInt64(int64(index)) +} + +func (p *exporter) tag(tag int) { + if tag >= 0 { + log.Fatalf("gcimporter: invalid tag >= 0") + } + if debugFormat { + p.marker('t') + } + if trace { + p.tracef("%s ", tagString[-tag]) + } + p.rawInt64(int64(tag)) +} + +func (p *exporter) int(x int) { + p.int64(int64(x)) +} + +func (p *exporter) int64(x int64) { + if debugFormat { + p.marker('i') + } + if trace { + p.tracef("%d ", x) + } + p.rawInt64(x) +} + +func (p *exporter) string(s string) { + if debugFormat { + p.marker('s') + } + if trace { + p.tracef("%q ", s) + } + // if we saw the string before, write its index (>= 0) + // (the empty string is mapped to 0) + if i, ok := p.strIndex[s]; ok { + p.rawInt64(int64(i)) + return + } + // otherwise, remember string and write its negative length and bytes + p.strIndex[s] = len(p.strIndex) + p.rawInt64(-int64(len(s))) + for i := 0; i < len(s); i++ { + p.rawByte(s[i]) + } +} + +// marker emits a marker byte and position information which makes +// it easy for a reader to detect if it is "out of sync". Used for +// debugFormat format only. +func (p *exporter) marker(m byte) { + p.rawByte(m) + // Enable this for help tracking down the location + // of an incorrect marker when running in debugFormat. + if false && trace { + p.tracef("#%d ", p.written) + } + p.rawInt64(int64(p.written)) +} + +// rawInt64 should only be used by low-level encoders. +func (p *exporter) rawInt64(x int64) { + var tmp [binary.MaxVarintLen64]byte + n := binary.PutVarint(tmp[:], x) + for i := 0; i < n; i++ { + p.rawByte(tmp[i]) + } +} + +// rawStringln should only be used to emit the initial version string. +func (p *exporter) rawStringln(s string) { + for i := 0; i < len(s); i++ { + p.rawByte(s[i]) + } + p.rawByte('\n') +} + +// rawByte is the bottleneck interface to write to p.out. +// rawByte escapes b as follows (any encoding does that +// hides '$'): +// +// '$' => '|' 'S' +// '|' => '|' '|' +// +// Necessary so other tools can find the end of the +// export data by searching for "$$". +// rawByte should only be used by low-level encoders. +func (p *exporter) rawByte(b byte) { + switch b { + case '$': + // write '$' as '|' 'S' + b = 'S' + fallthrough + case '|': + // write '|' as '|' '|' + p.out.WriteByte('|') + p.written++ + } + p.out.WriteByte(b) + p.written++ +} + +// tracef is like fmt.Printf but it rewrites the format string +// to take care of indentation. +func (p *exporter) tracef(format string, args ...interface{}) { + if strings.ContainsAny(format, "<>\n") { + var buf bytes.Buffer + for i := 0; i < len(format); i++ { + // no need to deal with runes + ch := format[i] + switch ch { + case '>': + p.indent++ + continue + case '<': + p.indent-- + continue + } + buf.WriteByte(ch) + if ch == '\n' { + for j := p.indent; j > 0; j-- { + buf.WriteString(". ") + } + } + } + format = buf.String() + } + fmt.Printf(format, args...) +} + +// Debugging support. +// (tagString is only used when tracing is enabled) +var tagString = [...]string{ + // Packages + -packageTag: "package", + + // Types + -namedTag: "named type", + -arrayTag: "array", + -sliceTag: "slice", + -dddTag: "ddd", + -structTag: "struct", + -pointerTag: "pointer", + -signatureTag: "signature", + -interfaceTag: "interface", + -mapTag: "map", + -chanTag: "chan", + + // Values + -falseTag: "false", + -trueTag: "true", + -int64Tag: "int64", + -floatTag: "float", + -fractionTag: "fraction", + -complexTag: "complex", + -stringTag: "string", + -unknownTag: "unknown", + + // Type aliases + -aliasTag: "alias", +} diff --git a/vendor/golang.org/x/tools/go/gcimporter15/bimport.go b/vendor/golang.org/x/tools/go/gcimporter15/bimport.go new file mode 100644 index 0000000..1936a7f --- /dev/null +++ b/vendor/golang.org/x/tools/go/gcimporter15/bimport.go @@ -0,0 +1,993 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file is a copy of $GOROOT/src/go/internal/gcimporter/bimport.go. + +package gcimporter + +import ( + "encoding/binary" + "fmt" + "go/constant" + "go/token" + "go/types" + "sort" + "strconv" + "strings" + "sync" + "unicode" + "unicode/utf8" +) + +type importer struct { + imports map[string]*types.Package + data []byte + importpath string + buf []byte // for reading strings + version int // export format version + + // object lists + strList []string // in order of appearance + pathList []string // in order of appearance + pkgList []*types.Package // in order of appearance + typList []types.Type // in order of appearance + interfaceList []*types.Interface // for delayed completion only + trackAllTypes bool + + // position encoding + posInfoFormat bool + prevFile string + prevLine int + fset *token.FileSet + files map[string]*token.File + + // debugging support + debugFormat bool + read int // bytes read +} + +// BImportData imports a package from the serialized package data +// and returns the number of bytes consumed and a reference to the package. +// If the export data version is not recognized or the format is otherwise +// compromised, an error is returned. +func BImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) { + // catch panics and return them as errors + defer func() { + if e := recover(); e != nil { + // The package (filename) causing the problem is added to this + // error by a wrapper in the caller (Import in gcimporter.go). + // Return a (possibly nil or incomplete) package unchanged (see #16088). + err = fmt.Errorf("cannot import, possibly version skew (%v) - reinstall package", e) + } + }() + + p := importer{ + imports: imports, + data: data, + importpath: path, + version: -1, // unknown version + strList: []string{""}, // empty string is mapped to 0 + pathList: []string{""}, // empty string is mapped to 0 + fset: fset, + files: make(map[string]*token.File), + } + + // read version info + var versionstr string + if b := p.rawByte(); b == 'c' || b == 'd' { + // Go1.7 encoding; first byte encodes low-level + // encoding format (compact vs debug). + // For backward-compatibility only (avoid problems with + // old installed packages). Newly compiled packages use + // the extensible format string. + // TODO(gri) Remove this support eventually; after Go1.8. + if b == 'd' { + p.debugFormat = true + } + p.trackAllTypes = p.rawByte() == 'a' + p.posInfoFormat = p.int() != 0 + versionstr = p.string() + if versionstr == "v1" { + p.version = 0 + } + } else { + // Go1.8 extensible encoding + // read version string and extract version number (ignore anything after the version number) + versionstr = p.rawStringln(b) + if s := strings.SplitN(versionstr, " ", 3); len(s) >= 2 && s[0] == "version" { + if v, err := strconv.Atoi(s[1]); err == nil && v > 0 { + p.version = v + } + } + } + + // read version specific flags - extend as necessary + switch p.version { + // case 6: + // ... + // fallthrough + case 5, 4, 3, 2, 1: + p.debugFormat = p.rawStringln(p.rawByte()) == "debug" + p.trackAllTypes = p.int() != 0 + p.posInfoFormat = p.int() != 0 + case 0: + // Go1.7 encoding format - nothing to do here + default: + errorf("unknown export format version %d (%q)", p.version, versionstr) + } + + // --- generic export data --- + + // populate typList with predeclared "known" types + p.typList = append(p.typList, predeclared...) + + // read package data + pkg = p.pkg() + + // read objects of phase 1 only (see cmd/compiler/internal/gc/bexport.go) + objcount := 0 + for { + tag := p.tagOrIndex() + if tag == endTag { + break + } + p.obj(tag) + objcount++ + } + + // self-verification + if count := p.int(); count != objcount { + errorf("got %d objects; want %d", objcount, count) + } + + // ignore compiler-specific import data + + // complete interfaces + // TODO(gri) re-investigate if we still need to do this in a delayed fashion + for _, typ := range p.interfaceList { + typ.Complete() + } + + // record all referenced packages as imports + list := append(([]*types.Package)(nil), p.pkgList[1:]...) + sort.Sort(byPath(list)) + pkg.SetImports(list) + + // package was imported completely and without errors + pkg.MarkComplete() + + return p.read, pkg, nil +} + +func errorf(format string, args ...interface{}) { + panic(fmt.Sprintf(format, args...)) +} + +func (p *importer) pkg() *types.Package { + // if the package was seen before, i is its index (>= 0) + i := p.tagOrIndex() + if i >= 0 { + return p.pkgList[i] + } + + // otherwise, i is the package tag (< 0) + if i != packageTag { + errorf("unexpected package tag %d version %d", i, p.version) + } + + // read package data + name := p.string() + var path string + if p.version >= 5 { + path = p.path() + } else { + path = p.string() + } + + // we should never see an empty package name + if name == "" { + errorf("empty package name in import") + } + + // an empty path denotes the package we are currently importing; + // it must be the first package we see + if (path == "") != (len(p.pkgList) == 0) { + errorf("package path %q for pkg index %d", path, len(p.pkgList)) + } + + // if the package was imported before, use that one; otherwise create a new one + if path == "" { + path = p.importpath + } + pkg := p.imports[path] + if pkg == nil { + pkg = types.NewPackage(path, name) + p.imports[path] = pkg + } else if pkg.Name() != name { + errorf("conflicting names %s and %s for package %q", pkg.Name(), name, path) + } + p.pkgList = append(p.pkgList, pkg) + + return pkg +} + +// objTag returns the tag value for each object kind. +func objTag(obj types.Object) int { + switch obj.(type) { + case *types.Const: + return constTag + case *types.TypeName: + return typeTag + case *types.Var: + return varTag + case *types.Func: + return funcTag + default: + errorf("unexpected object: %v (%T)", obj, obj) // panics + panic("unreachable") + } +} + +func sameObj(a, b types.Object) bool { + // Because unnamed types are not canonicalized, we cannot simply compare types for + // (pointer) identity. + // Ideally we'd check equality of constant values as well, but this is good enough. + return objTag(a) == objTag(b) && types.Identical(a.Type(), b.Type()) +} + +func (p *importer) declare(obj types.Object) { + pkg := obj.Pkg() + if alt := pkg.Scope().Insert(obj); alt != nil { + // This can only trigger if we import a (non-type) object a second time. + // Excluding type aliases, this cannot happen because 1) we only import a package + // once; and b) we ignore compiler-specific export data which may contain + // functions whose inlined function bodies refer to other functions that + // were already imported. + // However, type aliases require reexporting the original type, so we need + // to allow it (see also the comment in cmd/compile/internal/gc/bimport.go, + // method importer.obj, switch case importing functions). + // TODO(gri) review/update this comment once the gc compiler handles type aliases. + if !sameObj(obj, alt) { + errorf("inconsistent import:\n\t%v\npreviously imported as:\n\t%v\n", obj, alt) + } + } +} + +func (p *importer) obj(tag int) { + switch tag { + case constTag: + pos := p.pos() + pkg, name := p.qualifiedName() + typ := p.typ(nil) + val := p.value() + p.declare(types.NewConst(pos, pkg, name, typ, val)) + + case aliasTag: + // TODO(gri) verify type alias hookup is correct + pos := p.pos() + pkg, name := p.qualifiedName() + typ := p.typ(nil) + p.declare(types.NewTypeName(pos, pkg, name, typ)) + + case typeTag: + p.typ(nil) + + case varTag: + pos := p.pos() + pkg, name := p.qualifiedName() + typ := p.typ(nil) + p.declare(types.NewVar(pos, pkg, name, typ)) + + case funcTag: + pos := p.pos() + pkg, name := p.qualifiedName() + params, isddd := p.paramList() + result, _ := p.paramList() + sig := types.NewSignature(nil, params, result, isddd) + p.declare(types.NewFunc(pos, pkg, name, sig)) + + default: + errorf("unexpected object tag %d", tag) + } +} + +const deltaNewFile = -64 // see cmd/compile/internal/gc/bexport.go + +func (p *importer) pos() token.Pos { + if !p.posInfoFormat { + return token.NoPos + } + + file := p.prevFile + line := p.prevLine + delta := p.int() + line += delta + if p.version >= 5 { + if delta == deltaNewFile { + if n := p.int(); n >= 0 { + // file changed + file = p.path() + line = n + } + } + } else { + if delta == 0 { + if n := p.int(); n >= 0 { + // file changed + file = p.prevFile[:n] + p.string() + line = p.int() + } + } + } + p.prevFile = file + p.prevLine = line + + // Synthesize a token.Pos + + // Since we don't know the set of needed file positions, we + // reserve maxlines positions per file. + const maxlines = 64 * 1024 + f := p.files[file] + if f == nil { + f = p.fset.AddFile(file, -1, maxlines) + p.files[file] = f + // Allocate the fake linebreak indices on first use. + // TODO(adonovan): opt: save ~512KB using a more complex scheme? + fakeLinesOnce.Do(func() { + fakeLines = make([]int, maxlines) + for i := range fakeLines { + fakeLines[i] = i + } + }) + f.SetLines(fakeLines) + } + + if line > maxlines { + line = 1 + } + + // Treat the file as if it contained only newlines + // and column=1: use the line number as the offset. + return f.Pos(line - 1) +} + +var ( + fakeLines []int + fakeLinesOnce sync.Once +) + +func (p *importer) qualifiedName() (pkg *types.Package, name string) { + name = p.string() + pkg = p.pkg() + return +} + +func (p *importer) record(t types.Type) { + p.typList = append(p.typList, t) +} + +// A dddSlice is a types.Type representing ...T parameters. +// It only appears for parameter types and does not escape +// the importer. +type dddSlice struct { + elem types.Type +} + +func (t *dddSlice) Underlying() types.Type { return t } +func (t *dddSlice) String() string { return "..." + t.elem.String() } + +// parent is the package which declared the type; parent == nil means +// the package currently imported. The parent package is needed for +// exported struct fields and interface methods which don't contain +// explicit package information in the export data. +func (p *importer) typ(parent *types.Package) types.Type { + // if the type was seen before, i is its index (>= 0) + i := p.tagOrIndex() + if i >= 0 { + return p.typList[i] + } + + // otherwise, i is the type tag (< 0) + switch i { + case namedTag: + // read type object + pos := p.pos() + parent, name := p.qualifiedName() + scope := parent.Scope() + obj := scope.Lookup(name) + + // if the object doesn't exist yet, create and insert it + if obj == nil { + obj = types.NewTypeName(pos, parent, name, nil) + scope.Insert(obj) + } + + if _, ok := obj.(*types.TypeName); !ok { + errorf("pkg = %s, name = %s => %s", parent, name, obj) + } + + // associate new named type with obj if it doesn't exist yet + t0 := types.NewNamed(obj.(*types.TypeName), nil, nil) + + // but record the existing type, if any + t := obj.Type().(*types.Named) + p.record(t) + + // read underlying type + t0.SetUnderlying(p.typ(parent)) + + // interfaces don't have associated methods + if types.IsInterface(t0) { + return t + } + + // read associated methods + for i := p.int(); i > 0; i-- { + // TODO(gri) replace this with something closer to fieldName + pos := p.pos() + name := p.string() + if !exported(name) { + p.pkg() + } + + recv, _ := p.paramList() // TODO(gri) do we need a full param list for the receiver? + params, isddd := p.paramList() + result, _ := p.paramList() + p.int() // go:nointerface pragma - discarded + + sig := types.NewSignature(recv.At(0), params, result, isddd) + t0.AddMethod(types.NewFunc(pos, parent, name, sig)) + } + + return t + + case arrayTag: + t := new(types.Array) + if p.trackAllTypes { + p.record(t) + } + + n := p.int64() + *t = *types.NewArray(p.typ(parent), n) + return t + + case sliceTag: + t := new(types.Slice) + if p.trackAllTypes { + p.record(t) + } + + *t = *types.NewSlice(p.typ(parent)) + return t + + case dddTag: + t := new(dddSlice) + if p.trackAllTypes { + p.record(t) + } + + t.elem = p.typ(parent) + return t + + case structTag: + t := new(types.Struct) + if p.trackAllTypes { + p.record(t) + } + + *t = *types.NewStruct(p.fieldList(parent)) + return t + + case pointerTag: + t := new(types.Pointer) + if p.trackAllTypes { + p.record(t) + } + + *t = *types.NewPointer(p.typ(parent)) + return t + + case signatureTag: + t := new(types.Signature) + if p.trackAllTypes { + p.record(t) + } + + params, isddd := p.paramList() + result, _ := p.paramList() + *t = *types.NewSignature(nil, params, result, isddd) + return t + + case interfaceTag: + // Create a dummy entry in the type list. This is safe because we + // cannot expect the interface type to appear in a cycle, as any + // such cycle must contain a named type which would have been + // first defined earlier. + n := len(p.typList) + if p.trackAllTypes { + p.record(nil) + } + + var embeddeds []*types.Named + for n := p.int(); n > 0; n-- { + p.pos() + embeddeds = append(embeddeds, p.typ(parent).(*types.Named)) + } + + t := types.NewInterface(p.methodList(parent), embeddeds) + p.interfaceList = append(p.interfaceList, t) + if p.trackAllTypes { + p.typList[n] = t + } + return t + + case mapTag: + t := new(types.Map) + if p.trackAllTypes { + p.record(t) + } + + key := p.typ(parent) + val := p.typ(parent) + *t = *types.NewMap(key, val) + return t + + case chanTag: + t := new(types.Chan) + if p.trackAllTypes { + p.record(t) + } + + var dir types.ChanDir + // tag values must match the constants in cmd/compile/internal/gc/go.go + switch d := p.int(); d { + case 1 /* Crecv */ : + dir = types.RecvOnly + case 2 /* Csend */ : + dir = types.SendOnly + case 3 /* Cboth */ : + dir = types.SendRecv + default: + errorf("unexpected channel dir %d", d) + } + val := p.typ(parent) + *t = *types.NewChan(dir, val) + return t + + default: + errorf("unexpected type tag %d", i) // panics + panic("unreachable") + } +} + +func (p *importer) fieldList(parent *types.Package) (fields []*types.Var, tags []string) { + if n := p.int(); n > 0 { + fields = make([]*types.Var, n) + tags = make([]string, n) + for i := range fields { + fields[i], tags[i] = p.field(parent) + } + } + return +} + +func (p *importer) field(parent *types.Package) (*types.Var, string) { + pos := p.pos() + pkg, name, alias := p.fieldName(parent) + typ := p.typ(parent) + tag := p.string() + + anonymous := false + if name == "" { + // anonymous field - typ must be T or *T and T must be a type name + switch typ := deref(typ).(type) { + case *types.Basic: // basic types are named types + pkg = nil // // objects defined in Universe scope have no package + name = typ.Name() + case *types.Named: + name = typ.Obj().Name() + default: + errorf("named base type expected") + } + anonymous = true + } else if alias { + // anonymous field: we have an explicit name because it's an alias + anonymous = true + } + + return types.NewField(pos, pkg, name, typ, anonymous), tag +} + +func (p *importer) methodList(parent *types.Package) (methods []*types.Func) { + if n := p.int(); n > 0 { + methods = make([]*types.Func, n) + for i := range methods { + methods[i] = p.method(parent) + } + } + return +} + +func (p *importer) method(parent *types.Package) *types.Func { + pos := p.pos() + pkg, name, _ := p.fieldName(parent) + params, isddd := p.paramList() + result, _ := p.paramList() + sig := types.NewSignature(nil, params, result, isddd) + return types.NewFunc(pos, pkg, name, sig) +} + +func (p *importer) fieldName(parent *types.Package) (pkg *types.Package, name string, alias bool) { + name = p.string() + pkg = parent + if pkg == nil { + // use the imported package instead + pkg = p.pkgList[0] + } + if p.version == 0 && name == "_" { + // version 0 didn't export a package for _ fields + return + } + switch name { + case "": + // 1) field name matches base type name and is exported: nothing to do + case "?": + // 2) field name matches base type name and is not exported: need package + name = "" + pkg = p.pkg() + case "@": + // 3) field name doesn't match type name (alias) + name = p.string() + alias = true + fallthrough + default: + if !exported(name) { + pkg = p.pkg() + } + } + return +} + +func (p *importer) paramList() (*types.Tuple, bool) { + n := p.int() + if n == 0 { + return nil, false + } + // negative length indicates unnamed parameters + named := true + if n < 0 { + n = -n + named = false + } + // n > 0 + params := make([]*types.Var, n) + isddd := false + for i := range params { + params[i], isddd = p.param(named) + } + return types.NewTuple(params...), isddd +} + +func (p *importer) param(named bool) (*types.Var, bool) { + t := p.typ(nil) + td, isddd := t.(*dddSlice) + if isddd { + t = types.NewSlice(td.elem) + } + + var pkg *types.Package + var name string + if named { + name = p.string() + if name == "" { + errorf("expected named parameter") + } + if name != "_" { + pkg = p.pkg() + } + if i := strings.Index(name, "·"); i > 0 { + name = name[:i] // cut off gc-specific parameter numbering + } + } + + // read and discard compiler-specific info + p.string() + + return types.NewVar(token.NoPos, pkg, name, t), isddd +} + +func exported(name string) bool { + ch, _ := utf8.DecodeRuneInString(name) + return unicode.IsUpper(ch) +} + +func (p *importer) value() constant.Value { + switch tag := p.tagOrIndex(); tag { + case falseTag: + return constant.MakeBool(false) + case trueTag: + return constant.MakeBool(true) + case int64Tag: + return constant.MakeInt64(p.int64()) + case floatTag: + return p.float() + case complexTag: + re := p.float() + im := p.float() + return constant.BinaryOp(re, token.ADD, constant.MakeImag(im)) + case stringTag: + return constant.MakeString(p.string()) + case unknownTag: + return constant.MakeUnknown() + default: + errorf("unexpected value tag %d", tag) // panics + panic("unreachable") + } +} + +func (p *importer) float() constant.Value { + sign := p.int() + if sign == 0 { + return constant.MakeInt64(0) + } + + exp := p.int() + mant := []byte(p.string()) // big endian + + // remove leading 0's if any + for len(mant) > 0 && mant[0] == 0 { + mant = mant[1:] + } + + // convert to little endian + // TODO(gri) go/constant should have a more direct conversion function + // (e.g., once it supports a big.Float based implementation) + for i, j := 0, len(mant)-1; i < j; i, j = i+1, j-1 { + mant[i], mant[j] = mant[j], mant[i] + } + + // adjust exponent (constant.MakeFromBytes creates an integer value, + // but mant represents the mantissa bits such that 0.5 <= mant < 1.0) + exp -= len(mant) << 3 + if len(mant) > 0 { + for msd := mant[len(mant)-1]; msd&0x80 == 0; msd <<= 1 { + exp++ + } + } + + x := constant.MakeFromBytes(mant) + switch { + case exp < 0: + d := constant.Shift(constant.MakeInt64(1), token.SHL, uint(-exp)) + x = constant.BinaryOp(x, token.QUO, d) + case exp > 0: + x = constant.Shift(x, token.SHL, uint(exp)) + } + + if sign < 0 { + x = constant.UnaryOp(token.SUB, x, 0) + } + return x +} + +// ---------------------------------------------------------------------------- +// Low-level decoders + +func (p *importer) tagOrIndex() int { + if p.debugFormat { + p.marker('t') + } + + return int(p.rawInt64()) +} + +func (p *importer) int() int { + x := p.int64() + if int64(int(x)) != x { + errorf("exported integer too large") + } + return int(x) +} + +func (p *importer) int64() int64 { + if p.debugFormat { + p.marker('i') + } + + return p.rawInt64() +} + +func (p *importer) path() string { + if p.debugFormat { + p.marker('p') + } + // if the path was seen before, i is its index (>= 0) + // (the empty string is at index 0) + i := p.rawInt64() + if i >= 0 { + return p.pathList[i] + } + // otherwise, i is the negative path length (< 0) + a := make([]string, -i) + for n := range a { + a[n] = p.string() + } + s := strings.Join(a, "/") + p.pathList = append(p.pathList, s) + return s +} + +func (p *importer) string() string { + if p.debugFormat { + p.marker('s') + } + // if the string was seen before, i is its index (>= 0) + // (the empty string is at index 0) + i := p.rawInt64() + if i >= 0 { + return p.strList[i] + } + // otherwise, i is the negative string length (< 0) + if n := int(-i); n <= cap(p.buf) { + p.buf = p.buf[:n] + } else { + p.buf = make([]byte, n) + } + for i := range p.buf { + p.buf[i] = p.rawByte() + } + s := string(p.buf) + p.strList = append(p.strList, s) + return s +} + +func (p *importer) marker(want byte) { + if got := p.rawByte(); got != want { + errorf("incorrect marker: got %c; want %c (pos = %d)", got, want, p.read) + } + + pos := p.read + if n := int(p.rawInt64()); n != pos { + errorf("incorrect position: got %d; want %d", n, pos) + } +} + +// rawInt64 should only be used by low-level decoders. +func (p *importer) rawInt64() int64 { + i, err := binary.ReadVarint(p) + if err != nil { + errorf("read error: %v", err) + } + return i +} + +// rawStringln should only be used to read the initial version string. +func (p *importer) rawStringln(b byte) string { + p.buf = p.buf[:0] + for b != '\n' { + p.buf = append(p.buf, b) + b = p.rawByte() + } + return string(p.buf) +} + +// needed for binary.ReadVarint in rawInt64 +func (p *importer) ReadByte() (byte, error) { + return p.rawByte(), nil +} + +// byte is the bottleneck interface for reading p.data. +// It unescapes '|' 'S' to '$' and '|' '|' to '|'. +// rawByte should only be used by low-level decoders. +func (p *importer) rawByte() byte { + b := p.data[0] + r := 1 + if b == '|' { + b = p.data[1] + r = 2 + switch b { + case 'S': + b = '$' + case '|': + // nothing to do + default: + errorf("unexpected escape sequence in export data") + } + } + p.data = p.data[r:] + p.read += r + return b + +} + +// ---------------------------------------------------------------------------- +// Export format + +// Tags. Must be < 0. +const ( + // Objects + packageTag = -(iota + 1) + constTag + typeTag + varTag + funcTag + endTag + + // Types + namedTag + arrayTag + sliceTag + dddTag + structTag + pointerTag + signatureTag + interfaceTag + mapTag + chanTag + + // Values + falseTag + trueTag + int64Tag + floatTag + fractionTag // not used by gc + complexTag + stringTag + nilTag // only used by gc (appears in exported inlined function bodies) + unknownTag // not used by gc (only appears in packages with errors) + + // Type aliases + aliasTag +) + +var predeclared = []types.Type{ + // basic types + types.Typ[types.Bool], + types.Typ[types.Int], + types.Typ[types.Int8], + types.Typ[types.Int16], + types.Typ[types.Int32], + types.Typ[types.Int64], + types.Typ[types.Uint], + types.Typ[types.Uint8], + types.Typ[types.Uint16], + types.Typ[types.Uint32], + types.Typ[types.Uint64], + types.Typ[types.Uintptr], + types.Typ[types.Float32], + types.Typ[types.Float64], + types.Typ[types.Complex64], + types.Typ[types.Complex128], + types.Typ[types.String], + + // basic type aliases + types.Universe.Lookup("byte").Type(), + types.Universe.Lookup("rune").Type(), + + // error + types.Universe.Lookup("error").Type(), + + // untyped types + types.Typ[types.UntypedBool], + types.Typ[types.UntypedInt], + types.Typ[types.UntypedRune], + types.Typ[types.UntypedFloat], + types.Typ[types.UntypedComplex], + types.Typ[types.UntypedString], + types.Typ[types.UntypedNil], + + // package unsafe + types.Typ[types.UnsafePointer], + + // invalid type + types.Typ[types.Invalid], // only appears in packages with errors + + // used internally by gc; never used by this package or in .a files + anyType{}, +} + +type anyType struct{} + +func (t anyType) Underlying() types.Type { return t } +func (t anyType) String() string { return "any" } diff --git a/vendor/golang.org/x/tools/go/gcimporter15/exportdata.go b/vendor/golang.org/x/tools/go/gcimporter15/exportdata.go new file mode 100644 index 0000000..f33dc56 --- /dev/null +++ b/vendor/golang.org/x/tools/go/gcimporter15/exportdata.go @@ -0,0 +1,93 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file is a copy of $GOROOT/src/go/internal/gcimporter/exportdata.go. + +// This file implements FindExportData. + +package gcimporter + +import ( + "bufio" + "fmt" + "io" + "strconv" + "strings" +) + +func readGopackHeader(r *bufio.Reader) (name string, size int, err error) { + // See $GOROOT/include/ar.h. + hdr := make([]byte, 16+12+6+6+8+10+2) + _, err = io.ReadFull(r, hdr) + if err != nil { + return + } + // leave for debugging + if false { + fmt.Printf("header: %s", hdr) + } + s := strings.TrimSpace(string(hdr[16+12+6+6+8:][:10])) + size, err = strconv.Atoi(s) + if err != nil || hdr[len(hdr)-2] != '`' || hdr[len(hdr)-1] != '\n' { + err = fmt.Errorf("invalid archive header") + return + } + name = strings.TrimSpace(string(hdr[:16])) + return +} + +// FindExportData positions the reader r at the beginning of the +// export data section of an underlying GC-created object/archive +// file by reading from it. The reader must be positioned at the +// start of the file before calling this function. The hdr result +// is the string before the export data, either "$$" or "$$B". +// +func FindExportData(r *bufio.Reader) (hdr string, err error) { + // Read first line to make sure this is an object file. + line, err := r.ReadSlice('\n') + if err != nil { + err = fmt.Errorf("can't find export data (%v)", err) + return + } + + if string(line) == "!\n" { + // Archive file. Scan to __.PKGDEF. + var name string + if name, _, err = readGopackHeader(r); err != nil { + return + } + + // First entry should be __.PKGDEF. + if name != "__.PKGDEF" { + err = fmt.Errorf("go archive is missing __.PKGDEF") + return + } + + // Read first line of __.PKGDEF data, so that line + // is once again the first line of the input. + if line, err = r.ReadSlice('\n'); err != nil { + err = fmt.Errorf("can't find export data (%v)", err) + return + } + } + + // Now at __.PKGDEF in archive or still at beginning of file. + // Either way, line should begin with "go object ". + if !strings.HasPrefix(string(line), "go object ") { + err = fmt.Errorf("not a Go object file") + return + } + + // Skip over object header to export data. + // Begins after first line starting with $$. + for line[0] != '$' { + if line, err = r.ReadSlice('\n'); err != nil { + err = fmt.Errorf("can't find export data (%v)", err) + return + } + } + hdr = string(line) + + return +} diff --git a/vendor/golang.org/x/tools/go/gcimporter15/gcimporter.go b/vendor/golang.org/x/tools/go/gcimporter15/gcimporter.go new file mode 100644 index 0000000..6fbc9d7 --- /dev/null +++ b/vendor/golang.org/x/tools/go/gcimporter15/gcimporter.go @@ -0,0 +1,1041 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file is a copy of $GOROOT/src/go/internal/gcimporter/gcimporter.go, +// but it also contains the original source-based importer code for Go1.6. +// Once we stop supporting 1.6, we can remove that code. + +// Package gcimporter15 provides various functions for reading +// gc-generated object files that can be used to implement the +// Importer interface defined by the Go 1.5 standard library package. +// +// Deprecated: this package will be deleted in October 2017. +// New code should use golang.org/x/tools/go/gcexportdata. +// +package gcimporter // import "golang.org/x/tools/go/gcimporter15" + +import ( + "bufio" + "errors" + "fmt" + "go/build" + exact "go/constant" + "go/token" + "go/types" + "io" + "io/ioutil" + "os" + "path/filepath" + "sort" + "strconv" + "strings" + "text/scanner" +) + +// debugging/development support +const debug = false + +var pkgExts = [...]string{".a", ".o"} + +// FindPkg returns the filename and unique package id for an import +// path based on package information provided by build.Import (using +// the build.Default build.Context). A relative srcDir is interpreted +// relative to the current working directory. +// If no file was found, an empty filename is returned. +// +func FindPkg(path, srcDir string) (filename, id string) { + if path == "" { + return + } + + var noext string + switch { + default: + // "x" -> "$GOPATH/pkg/$GOOS_$GOARCH/x.ext", "x" + // Don't require the source files to be present. + if abs, err := filepath.Abs(srcDir); err == nil { // see issue 14282 + srcDir = abs + } + bp, _ := build.Import(path, srcDir, build.FindOnly|build.AllowBinary) + if bp.PkgObj == "" { + return + } + noext = strings.TrimSuffix(bp.PkgObj, ".a") + id = bp.ImportPath + + case build.IsLocalImport(path): + // "./x" -> "/this/directory/x.ext", "/this/directory/x" + noext = filepath.Join(srcDir, path) + id = noext + + case filepath.IsAbs(path): + // for completeness only - go/build.Import + // does not support absolute imports + // "/x" -> "/x.ext", "/x" + noext = path + id = path + } + + if false { // for debugging + if path != id { + fmt.Printf("%s -> %s\n", path, id) + } + } + + // try extensions + for _, ext := range pkgExts { + filename = noext + ext + if f, err := os.Stat(filename); err == nil && !f.IsDir() { + return + } + } + + filename = "" // not found + return +} + +// ImportData imports a package by reading the gc-generated export data, +// adds the corresponding package object to the packages map indexed by id, +// and returns the object. +// +// The packages map must contains all packages already imported. The data +// reader position must be the beginning of the export data section. The +// filename is only used in error messages. +// +// If packages[id] contains the completely imported package, that package +// can be used directly, and there is no need to call this function (but +// there is also no harm but for extra time used). +// +func ImportData(packages map[string]*types.Package, filename, id string, data io.Reader) (pkg *types.Package, err error) { + // support for parser error handling + defer func() { + switch r := recover().(type) { + case nil: + // nothing to do + case importError: + err = r + default: + panic(r) // internal error + } + }() + + var p parser + p.init(filename, id, data, packages) + pkg = p.parseExport() + + return +} + +// Import imports a gc-generated package given its import path and srcDir, adds +// the corresponding package object to the packages map, and returns the object. +// The packages map must contain all packages already imported. +// +func Import(packages map[string]*types.Package, path, srcDir string) (pkg *types.Package, err error) { + filename, id := FindPkg(path, srcDir) + if filename == "" { + if path == "unsafe" { + return types.Unsafe, nil + } + err = fmt.Errorf("can't find import: %s", id) + return + } + + // no need to re-import if the package was imported completely before + if pkg = packages[id]; pkg != nil && pkg.Complete() { + return + } + + // open file + f, err := os.Open(filename) + if err != nil { + return + } + defer func() { + f.Close() + if err != nil { + // add file name to error + err = fmt.Errorf("reading export data: %s: %v", filename, err) + } + }() + + var hdr string + buf := bufio.NewReader(f) + if hdr, err = FindExportData(buf); err != nil { + return + } + + switch hdr { + case "$$\n": + return ImportData(packages, filename, id, buf) + case "$$B\n": + var data []byte + data, err = ioutil.ReadAll(buf) + if err == nil { + fset := token.NewFileSet() + _, pkg, err = BImportData(fset, packages, data, id) + return + } + default: + err = fmt.Errorf("unknown export data header: %q", hdr) + } + + return +} + +// ---------------------------------------------------------------------------- +// Parser + +// TODO(gri) Imported objects don't have position information. +// Ideally use the debug table line info; alternatively +// create some fake position (or the position of the +// import). That way error messages referring to imported +// objects can print meaningful information. + +// parser parses the exports inside a gc compiler-produced +// object/archive file and populates its scope with the results. +type parser struct { + scanner scanner.Scanner + tok rune // current token + lit string // literal string; only valid for Ident, Int, String tokens + id string // package id of imported package + sharedPkgs map[string]*types.Package // package id -> package object (across importer) + localPkgs map[string]*types.Package // package id -> package object (just this package) +} + +func (p *parser) init(filename, id string, src io.Reader, packages map[string]*types.Package) { + p.scanner.Init(src) + p.scanner.Error = func(_ *scanner.Scanner, msg string) { p.error(msg) } + p.scanner.Mode = scanner.ScanIdents | scanner.ScanInts | scanner.ScanChars | scanner.ScanStrings | scanner.ScanComments | scanner.SkipComments + p.scanner.Whitespace = 1<<'\t' | 1<<' ' + p.scanner.Filename = filename // for good error messages + p.next() + p.id = id + p.sharedPkgs = packages + if debug { + // check consistency of packages map + for _, pkg := range packages { + if pkg.Name() == "" { + fmt.Printf("no package name for %s\n", pkg.Path()) + } + } + } +} + +func (p *parser) next() { + p.tok = p.scanner.Scan() + switch p.tok { + case scanner.Ident, scanner.Int, scanner.Char, scanner.String, '·': + p.lit = p.scanner.TokenText() + default: + p.lit = "" + } + if debug { + fmt.Printf("%s: %q -> %q\n", scanner.TokenString(p.tok), p.scanner.TokenText(), p.lit) + } +} + +func declTypeName(pkg *types.Package, name string) *types.TypeName { + scope := pkg.Scope() + if obj := scope.Lookup(name); obj != nil { + return obj.(*types.TypeName) + } + obj := types.NewTypeName(token.NoPos, pkg, name, nil) + // a named type may be referred to before the underlying type + // is known - set it up + types.NewNamed(obj, nil, nil) + scope.Insert(obj) + return obj +} + +// ---------------------------------------------------------------------------- +// Error handling + +// Internal errors are boxed as importErrors. +type importError struct { + pos scanner.Position + err error +} + +func (e importError) Error() string { + return fmt.Sprintf("import error %s (byte offset = %d): %s", e.pos, e.pos.Offset, e.err) +} + +func (p *parser) error(err interface{}) { + if s, ok := err.(string); ok { + err = errors.New(s) + } + // panic with a runtime.Error if err is not an error + panic(importError{p.scanner.Pos(), err.(error)}) +} + +func (p *parser) errorf(format string, args ...interface{}) { + p.error(fmt.Sprintf(format, args...)) +} + +func (p *parser) expect(tok rune) string { + lit := p.lit + if p.tok != tok { + p.errorf("expected %s, got %s (%s)", scanner.TokenString(tok), scanner.TokenString(p.tok), lit) + } + p.next() + return lit +} + +func (p *parser) expectSpecial(tok string) { + sep := 'x' // not white space + i := 0 + for i < len(tok) && p.tok == rune(tok[i]) && sep > ' ' { + sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token + p.next() + i++ + } + if i < len(tok) { + p.errorf("expected %q, got %q", tok, tok[0:i]) + } +} + +func (p *parser) expectKeyword(keyword string) { + lit := p.expect(scanner.Ident) + if lit != keyword { + p.errorf("expected keyword %s, got %q", keyword, lit) + } +} + +// ---------------------------------------------------------------------------- +// Qualified and unqualified names + +// PackageId = string_lit . +// +func (p *parser) parsePackageId() string { + id, err := strconv.Unquote(p.expect(scanner.String)) + if err != nil { + p.error(err) + } + // id == "" stands for the imported package id + // (only known at time of package installation) + if id == "" { + id = p.id + } + return id +} + +// PackageName = ident . +// +func (p *parser) parsePackageName() string { + return p.expect(scanner.Ident) +} + +// dotIdentifier = ( ident | '·' ) { ident | int | '·' } . +func (p *parser) parseDotIdent() string { + ident := "" + if p.tok != scanner.Int { + sep := 'x' // not white space + for (p.tok == scanner.Ident || p.tok == scanner.Int || p.tok == '·') && sep > ' ' { + ident += p.lit + sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token + p.next() + } + } + if ident == "" { + p.expect(scanner.Ident) // use expect() for error handling + } + return ident +} + +// QualifiedName = "@" PackageId "." ( "?" | dotIdentifier ) . +// +func (p *parser) parseQualifiedName() (id, name string) { + p.expect('@') + id = p.parsePackageId() + p.expect('.') + // Per rev f280b8a485fd (10/2/2013), qualified names may be used for anonymous fields. + if p.tok == '?' { + p.next() + } else { + name = p.parseDotIdent() + } + return +} + +// getPkg returns the package for a given id. If the package is +// not found, create the package and add it to the p.localPkgs +// and p.sharedPkgs maps. name is the (expected) name of the +// package. If name == "", the package name is expected to be +// set later via an import clause in the export data. +// +// id identifies a package, usually by a canonical package path like +// "encoding/json" but possibly by a non-canonical import path like +// "./json". +// +func (p *parser) getPkg(id, name string) *types.Package { + // package unsafe is not in the packages maps - handle explicitly + if id == "unsafe" { + return types.Unsafe + } + + pkg := p.localPkgs[id] + if pkg == nil { + // first import of id from this package + pkg = p.sharedPkgs[id] + if pkg == nil { + // first import of id by this importer; + // add (possibly unnamed) pkg to shared packages + pkg = types.NewPackage(id, name) + p.sharedPkgs[id] = pkg + } + // add (possibly unnamed) pkg to local packages + if p.localPkgs == nil { + p.localPkgs = make(map[string]*types.Package) + } + p.localPkgs[id] = pkg + } else if name != "" { + // package exists already and we have an expected package name; + // make sure names match or set package name if necessary + if pname := pkg.Name(); pname == "" { + pkg.SetName(name) + } else if pname != name { + p.errorf("%s package name mismatch: %s (given) vs %s (expected)", id, pname, name) + } + } + return pkg +} + +// parseExportedName is like parseQualifiedName, but +// the package id is resolved to an imported *types.Package. +// +func (p *parser) parseExportedName() (pkg *types.Package, name string) { + id, name := p.parseQualifiedName() + pkg = p.getPkg(id, "") + return +} + +// ---------------------------------------------------------------------------- +// Types + +// BasicType = identifier . +// +func (p *parser) parseBasicType() types.Type { + id := p.expect(scanner.Ident) + obj := types.Universe.Lookup(id) + if obj, ok := obj.(*types.TypeName); ok { + return obj.Type() + } + p.errorf("not a basic type: %s", id) + return nil +} + +// ArrayType = "[" int_lit "]" Type . +// +func (p *parser) parseArrayType(parent *types.Package) types.Type { + // "[" already consumed and lookahead known not to be "]" + lit := p.expect(scanner.Int) + p.expect(']') + elem := p.parseType(parent) + n, err := strconv.ParseInt(lit, 10, 64) + if err != nil { + p.error(err) + } + return types.NewArray(elem, n) +} + +// MapType = "map" "[" Type "]" Type . +// +func (p *parser) parseMapType(parent *types.Package) types.Type { + p.expectKeyword("map") + p.expect('[') + key := p.parseType(parent) + p.expect(']') + elem := p.parseType(parent) + return types.NewMap(key, elem) +} + +// Name = identifier | "?" | QualifiedName . +// +// For unqualified and anonymous names, the returned package is the parent +// package unless parent == nil, in which case the returned package is the +// package being imported. (The parent package is not nil if the the name +// is an unqualified struct field or interface method name belonging to a +// type declared in another package.) +// +// For qualified names, the returned package is nil (and not created if +// it doesn't exist yet) unless materializePkg is set (which creates an +// unnamed package with valid package path). In the latter case, a +// subsequent import clause is expected to provide a name for the package. +// +func (p *parser) parseName(parent *types.Package, materializePkg bool) (pkg *types.Package, name string) { + pkg = parent + if pkg == nil { + pkg = p.sharedPkgs[p.id] + } + switch p.tok { + case scanner.Ident: + name = p.lit + p.next() + case '?': + // anonymous + p.next() + case '@': + // exported name prefixed with package path + pkg = nil + var id string + id, name = p.parseQualifiedName() + if materializePkg { + pkg = p.getPkg(id, "") + } + default: + p.error("name expected") + } + return +} + +func deref(typ types.Type) types.Type { + if p, _ := typ.(*types.Pointer); p != nil { + return p.Elem() + } + return typ +} + +// Field = Name Type [ string_lit ] . +// +func (p *parser) parseField(parent *types.Package) (*types.Var, string) { + pkg, name := p.parseName(parent, true) + + if name == "_" { + // Blank fields should be package-qualified because they + // are unexported identifiers, but gc does not qualify them. + // Assuming that the ident belongs to the current package + // causes types to change during re-exporting, leading + // to spurious "can't assign A to B" errors from go/types. + // As a workaround, pretend all blank fields belong + // to the same unique dummy package. + const blankpkg = "<_>" + pkg = p.getPkg(blankpkg, blankpkg) + } + + typ := p.parseType(parent) + anonymous := false + if name == "" { + // anonymous field - typ must be T or *T and T must be a type name + switch typ := deref(typ).(type) { + case *types.Basic: // basic types are named types + pkg = nil // objects defined in Universe scope have no package + name = typ.Name() + case *types.Named: + name = typ.Obj().Name() + default: + p.errorf("anonymous field expected") + } + anonymous = true + } + tag := "" + if p.tok == scanner.String { + s := p.expect(scanner.String) + var err error + tag, err = strconv.Unquote(s) + if err != nil { + p.errorf("invalid struct tag %s: %s", s, err) + } + } + return types.NewField(token.NoPos, pkg, name, typ, anonymous), tag +} + +// StructType = "struct" "{" [ FieldList ] "}" . +// FieldList = Field { ";" Field } . +// +func (p *parser) parseStructType(parent *types.Package) types.Type { + var fields []*types.Var + var tags []string + + p.expectKeyword("struct") + p.expect('{') + for i := 0; p.tok != '}' && p.tok != scanner.EOF; i++ { + if i > 0 { + p.expect(';') + } + fld, tag := p.parseField(parent) + if tag != "" && tags == nil { + tags = make([]string, i) + } + if tags != nil { + tags = append(tags, tag) + } + fields = append(fields, fld) + } + p.expect('}') + + return types.NewStruct(fields, tags) +} + +// Parameter = ( identifier | "?" ) [ "..." ] Type [ string_lit ] . +// +func (p *parser) parseParameter() (par *types.Var, isVariadic bool) { + _, name := p.parseName(nil, false) + // remove gc-specific parameter numbering + if i := strings.Index(name, "·"); i >= 0 { + name = name[:i] + } + if p.tok == '.' { + p.expectSpecial("...") + isVariadic = true + } + typ := p.parseType(nil) + if isVariadic { + typ = types.NewSlice(typ) + } + // ignore argument tag (e.g. "noescape") + if p.tok == scanner.String { + p.next() + } + // TODO(gri) should we provide a package? + par = types.NewVar(token.NoPos, nil, name, typ) + return +} + +// Parameters = "(" [ ParameterList ] ")" . +// ParameterList = { Parameter "," } Parameter . +// +func (p *parser) parseParameters() (list []*types.Var, isVariadic bool) { + p.expect('(') + for p.tok != ')' && p.tok != scanner.EOF { + if len(list) > 0 { + p.expect(',') + } + par, variadic := p.parseParameter() + list = append(list, par) + if variadic { + if isVariadic { + p.error("... not on final argument") + } + isVariadic = true + } + } + p.expect(')') + + return +} + +// Signature = Parameters [ Result ] . +// Result = Type | Parameters . +// +func (p *parser) parseSignature(recv *types.Var) *types.Signature { + params, isVariadic := p.parseParameters() + + // optional result type + var results []*types.Var + if p.tok == '(' { + var variadic bool + results, variadic = p.parseParameters() + if variadic { + p.error("... not permitted on result type") + } + } + + return types.NewSignature(recv, types.NewTuple(params...), types.NewTuple(results...), isVariadic) +} + +// InterfaceType = "interface" "{" [ MethodList ] "}" . +// MethodList = Method { ";" Method } . +// Method = Name Signature . +// +// The methods of embedded interfaces are always "inlined" +// by the compiler and thus embedded interfaces are never +// visible in the export data. +// +func (p *parser) parseInterfaceType(parent *types.Package) types.Type { + var methods []*types.Func + + p.expectKeyword("interface") + p.expect('{') + for i := 0; p.tok != '}' && p.tok != scanner.EOF; i++ { + if i > 0 { + p.expect(';') + } + pkg, name := p.parseName(parent, true) + sig := p.parseSignature(nil) + methods = append(methods, types.NewFunc(token.NoPos, pkg, name, sig)) + } + p.expect('}') + + // Complete requires the type's embedded interfaces to be fully defined, + // but we do not define any + return types.NewInterface(methods, nil).Complete() +} + +// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type . +// +func (p *parser) parseChanType(parent *types.Package) types.Type { + dir := types.SendRecv + if p.tok == scanner.Ident { + p.expectKeyword("chan") + if p.tok == '<' { + p.expectSpecial("<-") + dir = types.SendOnly + } + } else { + p.expectSpecial("<-") + p.expectKeyword("chan") + dir = types.RecvOnly + } + elem := p.parseType(parent) + return types.NewChan(dir, elem) +} + +// Type = +// BasicType | TypeName | ArrayType | SliceType | StructType | +// PointerType | FuncType | InterfaceType | MapType | ChanType | +// "(" Type ")" . +// +// BasicType = ident . +// TypeName = ExportedName . +// SliceType = "[" "]" Type . +// PointerType = "*" Type . +// FuncType = "func" Signature . +// +func (p *parser) parseType(parent *types.Package) types.Type { + switch p.tok { + case scanner.Ident: + switch p.lit { + default: + return p.parseBasicType() + case "struct": + return p.parseStructType(parent) + case "func": + // FuncType + p.next() + return p.parseSignature(nil) + case "interface": + return p.parseInterfaceType(parent) + case "map": + return p.parseMapType(parent) + case "chan": + return p.parseChanType(parent) + } + case '@': + // TypeName + pkg, name := p.parseExportedName() + return declTypeName(pkg, name).Type() + case '[': + p.next() // look ahead + if p.tok == ']' { + // SliceType + p.next() + return types.NewSlice(p.parseType(parent)) + } + return p.parseArrayType(parent) + case '*': + // PointerType + p.next() + return types.NewPointer(p.parseType(parent)) + case '<': + return p.parseChanType(parent) + case '(': + // "(" Type ")" + p.next() + typ := p.parseType(parent) + p.expect(')') + return typ + } + p.errorf("expected type, got %s (%q)", scanner.TokenString(p.tok), p.lit) + return nil +} + +// ---------------------------------------------------------------------------- +// Declarations + +// ImportDecl = "import" PackageName PackageId . +// +func (p *parser) parseImportDecl() { + p.expectKeyword("import") + name := p.parsePackageName() + p.getPkg(p.parsePackageId(), name) +} + +// int_lit = [ "+" | "-" ] { "0" ... "9" } . +// +func (p *parser) parseInt() string { + s := "" + switch p.tok { + case '-': + s = "-" + p.next() + case '+': + p.next() + } + return s + p.expect(scanner.Int) +} + +// number = int_lit [ "p" int_lit ] . +// +func (p *parser) parseNumber() (typ *types.Basic, val exact.Value) { + // mantissa + mant := exact.MakeFromLiteral(p.parseInt(), token.INT, 0) + if mant == nil { + panic("invalid mantissa") + } + + if p.lit == "p" { + // exponent (base 2) + p.next() + exp, err := strconv.ParseInt(p.parseInt(), 10, 0) + if err != nil { + p.error(err) + } + if exp < 0 { + denom := exact.MakeInt64(1) + denom = exact.Shift(denom, token.SHL, uint(-exp)) + typ = types.Typ[types.UntypedFloat] + val = exact.BinaryOp(mant, token.QUO, denom) + return + } + if exp > 0 { + mant = exact.Shift(mant, token.SHL, uint(exp)) + } + typ = types.Typ[types.UntypedFloat] + val = mant + return + } + + typ = types.Typ[types.UntypedInt] + val = mant + return +} + +// ConstDecl = "const" ExportedName [ Type ] "=" Literal . +// Literal = bool_lit | int_lit | float_lit | complex_lit | rune_lit | string_lit . +// bool_lit = "true" | "false" . +// complex_lit = "(" float_lit "+" float_lit "i" ")" . +// rune_lit = "(" int_lit "+" int_lit ")" . +// string_lit = `"` { unicode_char } `"` . +// +func (p *parser) parseConstDecl() { + p.expectKeyword("const") + pkg, name := p.parseExportedName() + + var typ0 types.Type + if p.tok != '=' { + // constant types are never structured - no need for parent type + typ0 = p.parseType(nil) + } + + p.expect('=') + var typ types.Type + var val exact.Value + switch p.tok { + case scanner.Ident: + // bool_lit + if p.lit != "true" && p.lit != "false" { + p.error("expected true or false") + } + typ = types.Typ[types.UntypedBool] + val = exact.MakeBool(p.lit == "true") + p.next() + + case '-', scanner.Int: + // int_lit + typ, val = p.parseNumber() + + case '(': + // complex_lit or rune_lit + p.next() + if p.tok == scanner.Char { + p.next() + p.expect('+') + typ = types.Typ[types.UntypedRune] + _, val = p.parseNumber() + p.expect(')') + break + } + _, re := p.parseNumber() + p.expect('+') + _, im := p.parseNumber() + p.expectKeyword("i") + p.expect(')') + typ = types.Typ[types.UntypedComplex] + val = exact.BinaryOp(re, token.ADD, exact.MakeImag(im)) + + case scanner.Char: + // rune_lit + typ = types.Typ[types.UntypedRune] + val = exact.MakeFromLiteral(p.lit, token.CHAR, 0) + p.next() + + case scanner.String: + // string_lit + typ = types.Typ[types.UntypedString] + val = exact.MakeFromLiteral(p.lit, token.STRING, 0) + p.next() + + default: + p.errorf("expected literal got %s", scanner.TokenString(p.tok)) + } + + if typ0 == nil { + typ0 = typ + } + + pkg.Scope().Insert(types.NewConst(token.NoPos, pkg, name, typ0, val)) +} + +// TypeDecl = "type" ExportedName Type . +// +func (p *parser) parseTypeDecl() { + p.expectKeyword("type") + pkg, name := p.parseExportedName() + obj := declTypeName(pkg, name) + + // The type object may have been imported before and thus already + // have a type associated with it. We still need to parse the type + // structure, but throw it away if the object already has a type. + // This ensures that all imports refer to the same type object for + // a given type declaration. + typ := p.parseType(pkg) + + if name := obj.Type().(*types.Named); name.Underlying() == nil { + name.SetUnderlying(typ) + } +} + +// VarDecl = "var" ExportedName Type . +// +func (p *parser) parseVarDecl() { + p.expectKeyword("var") + pkg, name := p.parseExportedName() + typ := p.parseType(pkg) + pkg.Scope().Insert(types.NewVar(token.NoPos, pkg, name, typ)) +} + +// Func = Signature [ Body ] . +// Body = "{" ... "}" . +// +func (p *parser) parseFunc(recv *types.Var) *types.Signature { + sig := p.parseSignature(recv) + if p.tok == '{' { + p.next() + for i := 1; i > 0; p.next() { + switch p.tok { + case '{': + i++ + case '}': + i-- + } + } + } + return sig +} + +// MethodDecl = "func" Receiver Name Func . +// Receiver = "(" ( identifier | "?" ) [ "*" ] ExportedName ")" . +// +func (p *parser) parseMethodDecl() { + // "func" already consumed + p.expect('(') + recv, _ := p.parseParameter() // receiver + p.expect(')') + + // determine receiver base type object + base := deref(recv.Type()).(*types.Named) + + // parse method name, signature, and possibly inlined body + _, name := p.parseName(nil, false) + sig := p.parseFunc(recv) + + // methods always belong to the same package as the base type object + pkg := base.Obj().Pkg() + + // add method to type unless type was imported before + // and method exists already + // TODO(gri) This leads to a quadratic algorithm - ok for now because method counts are small. + base.AddMethod(types.NewFunc(token.NoPos, pkg, name, sig)) +} + +// FuncDecl = "func" ExportedName Func . +// +func (p *parser) parseFuncDecl() { + // "func" already consumed + pkg, name := p.parseExportedName() + typ := p.parseFunc(nil) + pkg.Scope().Insert(types.NewFunc(token.NoPos, pkg, name, typ)) +} + +// Decl = [ ImportDecl | ConstDecl | TypeDecl | VarDecl | FuncDecl | MethodDecl ] "\n" . +// +func (p *parser) parseDecl() { + if p.tok == scanner.Ident { + switch p.lit { + case "import": + p.parseImportDecl() + case "const": + p.parseConstDecl() + case "type": + p.parseTypeDecl() + case "var": + p.parseVarDecl() + case "func": + p.next() // look ahead + if p.tok == '(' { + p.parseMethodDecl() + } else { + p.parseFuncDecl() + } + } + } + p.expect('\n') +} + +// ---------------------------------------------------------------------------- +// Export + +// Export = "PackageClause { Decl } "$$" . +// PackageClause = "package" PackageName [ "safe" ] "\n" . +// +func (p *parser) parseExport() *types.Package { + p.expectKeyword("package") + name := p.parsePackageName() + if p.tok == scanner.Ident && p.lit == "safe" { + // package was compiled with -u option - ignore + p.next() + } + p.expect('\n') + + pkg := p.getPkg(p.id, name) + + for p.tok != '$' && p.tok != scanner.EOF { + p.parseDecl() + } + + if ch := p.scanner.Peek(); p.tok != '$' || ch != '$' { + // don't call next()/expect() since reading past the + // export data may cause scanner errors (e.g. NUL chars) + p.errorf("expected '$$', got %s %c", scanner.TokenString(p.tok), ch) + } + + if n := p.scanner.ErrorCount; n != 0 { + p.errorf("expected no scanner errors, got %d", n) + } + + // Record all locally referenced packages as imports. + var imports []*types.Package + for id, pkg2 := range p.localPkgs { + if pkg2.Name() == "" { + p.errorf("%s package has no name", id) + } + if id == p.id { + continue // avoid self-edge + } + imports = append(imports, pkg2) + } + sort.Sort(byPath(imports)) + pkg.SetImports(imports) + + // package was imported completely and without errors + pkg.MarkComplete() + + return pkg +} + +type byPath []*types.Package + +func (a byPath) Len() int { return len(a) } +func (a byPath) Swap(i, j int) { a[i], a[j] = a[j], a[i] } +func (a byPath) Less(i, j int) bool { return a[i].Path() < a[j].Path() } diff --git a/vendor/golang.org/x/tools/go/gcimporter15/isAlias18.go b/vendor/golang.org/x/tools/go/gcimporter15/isAlias18.go new file mode 100644 index 0000000..225ffee --- /dev/null +++ b/vendor/golang.org/x/tools/go/gcimporter15/isAlias18.go @@ -0,0 +1,13 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !go1.9 + +package gcimporter + +import "go/types" + +func isAlias(obj *types.TypeName) bool { + return false // there are no type aliases before Go 1.9 +} diff --git a/vendor/golang.org/x/tools/go/gcimporter15/isAlias19.go b/vendor/golang.org/x/tools/go/gcimporter15/isAlias19.go new file mode 100644 index 0000000..c2025d8 --- /dev/null +++ b/vendor/golang.org/x/tools/go/gcimporter15/isAlias19.go @@ -0,0 +1,13 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.9 + +package gcimporter + +import "go/types" + +func isAlias(obj *types.TypeName) bool { + return obj.IsAlias() +} diff --git a/vendor/golang.org/x/tools/go/loader/cgo.go b/vendor/golang.org/x/tools/go/loader/cgo.go new file mode 100644 index 0000000..72c6f50 --- /dev/null +++ b/vendor/golang.org/x/tools/go/loader/cgo.go @@ -0,0 +1,207 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package loader + +// This file handles cgo preprocessing of files containing `import "C"`. +// +// DESIGN +// +// The approach taken is to run the cgo processor on the package's +// CgoFiles and parse the output, faking the filenames of the +// resulting ASTs so that the synthetic file containing the C types is +// called "C" (e.g. "~/go/src/net/C") and the preprocessed files +// have their original names (e.g. "~/go/src/net/cgo_unix.go"), +// not the names of the actual temporary files. +// +// The advantage of this approach is its fidelity to 'go build'. The +// downside is that the token.Position.Offset for each AST node is +// incorrect, being an offset within the temporary file. Line numbers +// should still be correct because of the //line comments. +// +// The logic of this file is mostly plundered from the 'go build' +// tool, which also invokes the cgo preprocessor. +// +// +// REJECTED ALTERNATIVE +// +// An alternative approach that we explored is to extend go/types' +// Importer mechanism to provide the identity of the importing package +// so that each time `import "C"` appears it resolves to a different +// synthetic package containing just the objects needed in that case. +// The loader would invoke cgo but parse only the cgo_types.go file +// defining the package-level objects, discarding the other files +// resulting from preprocessing. +// +// The benefit of this approach would have been that source-level +// syntax information would correspond exactly to the original cgo +// file, with no preprocessing involved, making source tools like +// godoc, guru, and eg happy. However, the approach was rejected +// due to the additional complexity it would impose on go/types. (It +// made for a beautiful demo, though.) +// +// cgo files, despite their *.go extension, are not legal Go source +// files per the specification since they may refer to unexported +// members of package "C" such as C.int. Also, a function such as +// C.getpwent has in effect two types, one matching its C type and one +// which additionally returns (errno C.int). The cgo preprocessor +// uses name mangling to distinguish these two functions in the +// processed code, but go/types would need to duplicate this logic in +// its handling of function calls, analogous to the treatment of map +// lookups in which y=m[k] and y,ok=m[k] are both legal. + +import ( + "fmt" + "go/ast" + "go/build" + "go/parser" + "go/token" + "io/ioutil" + "log" + "os" + "os/exec" + "path/filepath" + "regexp" + "strings" +) + +// processCgoFiles invokes the cgo preprocessor on bp.CgoFiles, parses +// the output and returns the resulting ASTs. +// +func processCgoFiles(bp *build.Package, fset *token.FileSet, DisplayPath func(path string) string, mode parser.Mode) ([]*ast.File, error) { + tmpdir, err := ioutil.TempDir("", strings.Replace(bp.ImportPath, "/", "_", -1)+"_C") + if err != nil { + return nil, err + } + defer os.RemoveAll(tmpdir) + + pkgdir := bp.Dir + if DisplayPath != nil { + pkgdir = DisplayPath(pkgdir) + } + + cgoFiles, cgoDisplayFiles, err := runCgo(bp, pkgdir, tmpdir) + if err != nil { + return nil, err + } + var files []*ast.File + for i := range cgoFiles { + rd, err := os.Open(cgoFiles[i]) + if err != nil { + return nil, err + } + display := filepath.Join(bp.Dir, cgoDisplayFiles[i]) + f, err := parser.ParseFile(fset, display, rd, mode) + rd.Close() + if err != nil { + return nil, err + } + files = append(files, f) + } + return files, nil +} + +var cgoRe = regexp.MustCompile(`[/\\:]`) + +// runCgo invokes the cgo preprocessor on bp.CgoFiles and returns two +// lists of files: the resulting processed files (in temporary +// directory tmpdir) and the corresponding names of the unprocessed files. +// +// runCgo is adapted from (*builder).cgo in +// $GOROOT/src/cmd/go/build.go, but these features are unsupported: +// Objective C, CGOPKGPATH, CGO_FLAGS. +// +func runCgo(bp *build.Package, pkgdir, tmpdir string) (files, displayFiles []string, err error) { + cgoCPPFLAGS, _, _, _ := cflags(bp, true) + _, cgoexeCFLAGS, _, _ := cflags(bp, false) + + if len(bp.CgoPkgConfig) > 0 { + pcCFLAGS, err := pkgConfigFlags(bp) + if err != nil { + return nil, nil, err + } + cgoCPPFLAGS = append(cgoCPPFLAGS, pcCFLAGS...) + } + + // Allows including _cgo_export.h from .[ch] files in the package. + cgoCPPFLAGS = append(cgoCPPFLAGS, "-I", tmpdir) + + // _cgo_gotypes.go (displayed "C") contains the type definitions. + files = append(files, filepath.Join(tmpdir, "_cgo_gotypes.go")) + displayFiles = append(displayFiles, "C") + for _, fn := range bp.CgoFiles { + // "foo.cgo1.go" (displayed "foo.go") is the processed Go source. + f := cgoRe.ReplaceAllString(fn[:len(fn)-len("go")], "_") + files = append(files, filepath.Join(tmpdir, f+"cgo1.go")) + displayFiles = append(displayFiles, fn) + } + + var cgoflags []string + if bp.Goroot && bp.ImportPath == "runtime/cgo" { + cgoflags = append(cgoflags, "-import_runtime_cgo=false") + } + if bp.Goroot && bp.ImportPath == "runtime/race" || bp.ImportPath == "runtime/cgo" { + cgoflags = append(cgoflags, "-import_syscall=false") + } + + args := stringList( + "go", "tool", "cgo", "-objdir", tmpdir, cgoflags, "--", + cgoCPPFLAGS, cgoexeCFLAGS, bp.CgoFiles, + ) + if false { + log.Printf("Running cgo for package %q: %s (dir=%s)", bp.ImportPath, args, pkgdir) + } + cmd := exec.Command(args[0], args[1:]...) + cmd.Dir = pkgdir + cmd.Stdout = os.Stderr + cmd.Stderr = os.Stderr + if err := cmd.Run(); err != nil { + return nil, nil, fmt.Errorf("cgo failed: %s: %s", args, err) + } + + return files, displayFiles, nil +} + +// -- unmodified from 'go build' --------------------------------------- + +// Return the flags to use when invoking the C or C++ compilers, or cgo. +func cflags(p *build.Package, def bool) (cppflags, cflags, cxxflags, ldflags []string) { + var defaults string + if def { + defaults = "-g -O2" + } + + cppflags = stringList(envList("CGO_CPPFLAGS", ""), p.CgoCPPFLAGS) + cflags = stringList(envList("CGO_CFLAGS", defaults), p.CgoCFLAGS) + cxxflags = stringList(envList("CGO_CXXFLAGS", defaults), p.CgoCXXFLAGS) + ldflags = stringList(envList("CGO_LDFLAGS", defaults), p.CgoLDFLAGS) + return +} + +// envList returns the value of the given environment variable broken +// into fields, using the default value when the variable is empty. +func envList(key, def string) []string { + v := os.Getenv(key) + if v == "" { + v = def + } + return strings.Fields(v) +} + +// stringList's arguments should be a sequence of string or []string values. +// stringList flattens them into a single []string. +func stringList(args ...interface{}) []string { + var x []string + for _, arg := range args { + switch arg := arg.(type) { + case []string: + x = append(x, arg...) + case string: + x = append(x, arg) + default: + panic("stringList: invalid argument") + } + } + return x +} diff --git a/vendor/golang.org/x/tools/go/loader/cgo_pkgconfig.go b/vendor/golang.org/x/tools/go/loader/cgo_pkgconfig.go new file mode 100644 index 0000000..de57422 --- /dev/null +++ b/vendor/golang.org/x/tools/go/loader/cgo_pkgconfig.go @@ -0,0 +1,39 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package loader + +import ( + "errors" + "fmt" + "go/build" + "os/exec" + "strings" +) + +// pkgConfig runs pkg-config with the specified arguments and returns the flags it prints. +func pkgConfig(mode string, pkgs []string) (flags []string, err error) { + cmd := exec.Command("pkg-config", append([]string{mode}, pkgs...)...) + out, err := cmd.CombinedOutput() + if err != nil { + s := fmt.Sprintf("%s failed: %v", strings.Join(cmd.Args, " "), err) + if len(out) > 0 { + s = fmt.Sprintf("%s: %s", s, out) + } + return nil, errors.New(s) + } + if len(out) > 0 { + flags = strings.Fields(string(out)) + } + return +} + +// pkgConfigFlags calls pkg-config if needed and returns the cflags +// needed to build the package. +func pkgConfigFlags(p *build.Package) (cflags []string, err error) { + if len(p.CgoPkgConfig) == 0 { + return nil, nil + } + return pkgConfig("--cflags", p.CgoPkgConfig) +} diff --git a/vendor/golang.org/x/tools/go/loader/doc.go b/vendor/golang.org/x/tools/go/loader/doc.go new file mode 100644 index 0000000..9b51c9e --- /dev/null +++ b/vendor/golang.org/x/tools/go/loader/doc.go @@ -0,0 +1,205 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package loader loads a complete Go program from source code, parsing +// and type-checking the initial packages plus their transitive closure +// of dependencies. The ASTs and the derived facts are retained for +// later use. +// +// THIS INTERFACE IS EXPERIMENTAL AND IS LIKELY TO CHANGE. +// +// The package defines two primary types: Config, which specifies a +// set of initial packages to load and various other options; and +// Program, which is the result of successfully loading the packages +// specified by a configuration. +// +// The configuration can be set directly, but *Config provides various +// convenience methods to simplify the common cases, each of which can +// be called any number of times. Finally, these are followed by a +// call to Load() to actually load and type-check the program. +// +// var conf loader.Config +// +// // Use the command-line arguments to specify +// // a set of initial packages to load from source. +// // See FromArgsUsage for help. +// rest, err := conf.FromArgs(os.Args[1:], wantTests) +// +// // Parse the specified files and create an ad hoc package with path "foo". +// // All files must have the same 'package' declaration. +// conf.CreateFromFilenames("foo", "foo.go", "bar.go") +// +// // Create an ad hoc package with path "foo" from +// // the specified already-parsed files. +// // All ASTs must have the same 'package' declaration. +// conf.CreateFromFiles("foo", parsedFiles) +// +// // Add "runtime" to the set of packages to be loaded. +// conf.Import("runtime") +// +// // Adds "fmt" and "fmt_test" to the set of packages +// // to be loaded. "fmt" will include *_test.go files. +// conf.ImportWithTests("fmt") +// +// // Finally, load all the packages specified by the configuration. +// prog, err := conf.Load() +// +// See examples_test.go for examples of API usage. +// +// +// CONCEPTS AND TERMINOLOGY +// +// The WORKSPACE is the set of packages accessible to the loader. The +// workspace is defined by Config.Build, a *build.Context. The +// default context treats subdirectories of $GOROOT and $GOPATH as +// packages, but this behavior may be overridden. +// +// An AD HOC package is one specified as a set of source files on the +// command line. In the simplest case, it may consist of a single file +// such as $GOROOT/src/net/http/triv.go. +// +// EXTERNAL TEST packages are those comprised of a set of *_test.go +// files all with the same 'package foo_test' declaration, all in the +// same directory. (go/build.Package calls these files XTestFiles.) +// +// An IMPORTABLE package is one that can be referred to by some import +// spec. Every importable package is uniquely identified by its +// PACKAGE PATH or just PATH, a string such as "fmt", "encoding/json", +// or "cmd/vendor/golang.org/x/arch/x86/x86asm". A package path +// typically denotes a subdirectory of the workspace. +// +// An import declaration uses an IMPORT PATH to refer to a package. +// Most import declarations use the package path as the import path. +// +// Due to VENDORING (https://golang.org/s/go15vendor), the +// interpretation of an import path may depend on the directory in which +// it appears. To resolve an import path to a package path, go/build +// must search the enclosing directories for a subdirectory named +// "vendor". +// +// ad hoc packages and external test packages are NON-IMPORTABLE. The +// path of an ad hoc package is inferred from the package +// declarations of its files and is therefore not a unique package key. +// For example, Config.CreatePkgs may specify two initial ad hoc +// packages, both with path "main". +// +// An AUGMENTED package is an importable package P plus all the +// *_test.go files with same 'package foo' declaration as P. +// (go/build.Package calls these files TestFiles.) +// +// The INITIAL packages are those specified in the configuration. A +// DEPENDENCY is a package loaded to satisfy an import in an initial +// package or another dependency. +// +package loader + +// IMPLEMENTATION NOTES +// +// 'go test', in-package test files, and import cycles +// --------------------------------------------------- +// +// An external test package may depend upon members of the augmented +// package that are not in the unaugmented package, such as functions +// that expose internals. (See bufio/export_test.go for an example.) +// So, the loader must ensure that for each external test package +// it loads, it also augments the corresponding non-test package. +// +// The import graph over n unaugmented packages must be acyclic; the +// import graph over n-1 unaugmented packages plus one augmented +// package must also be acyclic. ('go test' relies on this.) But the +// import graph over n augmented packages may contain cycles. +// +// First, all the (unaugmented) non-test packages and their +// dependencies are imported in the usual way; the loader reports an +// error if it detects an import cycle. +// +// Then, each package P for which testing is desired is augmented by +// the list P' of its in-package test files, by calling +// (*types.Checker).Files. This arrangement ensures that P' may +// reference definitions within P, but P may not reference definitions +// within P'. Furthermore, P' may import any other package, including +// ones that depend upon P, without an import cycle error. +// +// Consider two packages A and B, both of which have lists of +// in-package test files we'll call A' and B', and which have the +// following import graph edges: +// B imports A +// B' imports A +// A' imports B +// This last edge would be expected to create an error were it not +// for the special type-checking discipline above. +// Cycles of size greater than two are possible. For example: +// compress/bzip2/bzip2_test.go (package bzip2) imports "io/ioutil" +// io/ioutil/tempfile_test.go (package ioutil) imports "regexp" +// regexp/exec_test.go (package regexp) imports "compress/bzip2" +// +// +// Concurrency +// ----------- +// +// Let us define the import dependency graph as follows. Each node is a +// list of files passed to (Checker).Files at once. Many of these lists +// are the production code of an importable Go package, so those nodes +// are labelled by the package's path. The remaining nodes are +// ad hoc packages and lists of in-package *_test.go files that augment +// an importable package; those nodes have no label. +// +// The edges of the graph represent import statements appearing within a +// file. An edge connects a node (a list of files) to the node it +// imports, which is importable and thus always labelled. +// +// Loading is controlled by this dependency graph. +// +// To reduce I/O latency, we start loading a package's dependencies +// asynchronously as soon as we've parsed its files and enumerated its +// imports (scanImports). This performs a preorder traversal of the +// import dependency graph. +// +// To exploit hardware parallelism, we type-check unrelated packages in +// parallel, where "unrelated" means not ordered by the partial order of +// the import dependency graph. +// +// We use a concurrency-safe non-blocking cache (importer.imported) to +// record the results of type-checking, whether success or failure. An +// entry is created in this cache by startLoad the first time the +// package is imported. The first goroutine to request an entry becomes +// responsible for completing the task and broadcasting completion to +// subsequent requestors, which block until then. +// +// Type checking occurs in (parallel) postorder: we cannot type-check a +// set of files until we have loaded and type-checked all of their +// immediate dependencies (and thus all of their transitive +// dependencies). If the input were guaranteed free of import cycles, +// this would be trivial: we could simply wait for completion of the +// dependencies and then invoke the typechecker. +// +// But as we saw in the 'go test' section above, some cycles in the +// import graph over packages are actually legal, so long as the +// cycle-forming edge originates in the in-package test files that +// augment the package. This explains why the nodes of the import +// dependency graph are not packages, but lists of files: the unlabelled +// nodes avoid the cycles. Consider packages A and B where B imports A +// and A's in-package tests AT import B. The naively constructed import +// graph over packages would contain a cycle (A+AT) --> B --> (A+AT) but +// the graph over lists of files is AT --> B --> A, where AT is an +// unlabelled node. +// +// Awaiting completion of the dependencies in a cyclic graph would +// deadlock, so we must materialize the import dependency graph (as +// importer.graph) and check whether each import edge forms a cycle. If +// x imports y, and the graph already contains a path from y to x, then +// there is an import cycle, in which case the processing of x must not +// wait for the completion of processing of y. +// +// When the type-checker makes a callback (doImport) to the loader for a +// given import edge, there are two possible cases. In the normal case, +// the dependency has already been completely type-checked; doImport +// does a cache lookup and returns it. In the cyclic case, the entry in +// the cache is still necessarily incomplete, indicating a cycle. We +// perform the cycle check again to obtain the error message, and return +// the error. +// +// The result of using concurrency is about a 2.5x speedup for stdlib_test. + +// TODO(adonovan): overhaul the package documentation. diff --git a/vendor/golang.org/x/tools/go/loader/loader.go b/vendor/golang.org/x/tools/go/loader/loader.go new file mode 100644 index 0000000..de756f7 --- /dev/null +++ b/vendor/golang.org/x/tools/go/loader/loader.go @@ -0,0 +1,1077 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package loader + +// See doc.go for package documentation and implementation notes. + +import ( + "errors" + "fmt" + "go/ast" + "go/build" + "go/parser" + "go/token" + "go/types" + "os" + "path/filepath" + "sort" + "strings" + "sync" + "time" + + "golang.org/x/tools/go/ast/astutil" +) + +var ignoreVendor build.ImportMode + +const trace = false // show timing info for type-checking + +// Config specifies the configuration for loading a whole program from +// Go source code. +// The zero value for Config is a ready-to-use default configuration. +type Config struct { + // Fset is the file set for the parser to use when loading the + // program. If nil, it may be lazily initialized by any + // method of Config. + Fset *token.FileSet + + // ParserMode specifies the mode to be used by the parser when + // loading source packages. + ParserMode parser.Mode + + // TypeChecker contains options relating to the type checker. + // + // The supplied IgnoreFuncBodies is not used; the effective + // value comes from the TypeCheckFuncBodies func below. + // The supplied Import function is not used either. + TypeChecker types.Config + + // TypeCheckFuncBodies is a predicate over package paths. + // A package for which the predicate is false will + // have its package-level declarations type checked, but not + // its function bodies; this can be used to quickly load + // dependencies from source. If nil, all func bodies are type + // checked. + TypeCheckFuncBodies func(path string) bool + + // If Build is non-nil, it is used to locate source packages. + // Otherwise &build.Default is used. + // + // By default, cgo is invoked to preprocess Go files that + // import the fake package "C". This behaviour can be + // disabled by setting CGO_ENABLED=0 in the environment prior + // to startup, or by setting Build.CgoEnabled=false. + Build *build.Context + + // The current directory, used for resolving relative package + // references such as "./go/loader". If empty, os.Getwd will be + // used instead. + Cwd string + + // If DisplayPath is non-nil, it is used to transform each + // file name obtained from Build.Import(). This can be used + // to prevent a virtualized build.Config's file names from + // leaking into the user interface. + DisplayPath func(path string) string + + // If AllowErrors is true, Load will return a Program even + // if some of the its packages contained I/O, parser or type + // errors; such errors are accessible via PackageInfo.Errors. If + // false, Load will fail if any package had an error. + AllowErrors bool + + // CreatePkgs specifies a list of non-importable initial + // packages to create. The resulting packages will appear in + // the corresponding elements of the Program.Created slice. + CreatePkgs []PkgSpec + + // ImportPkgs specifies a set of initial packages to load. + // The map keys are package paths. + // + // The map value indicates whether to load tests. If true, Load + // will add and type-check two lists of files to the package: + // non-test files followed by in-package *_test.go files. In + // addition, it will append the external test package (if any) + // to Program.Created. + ImportPkgs map[string]bool + + // FindPackage is called during Load to create the build.Package + // for a given import path from a given directory. + // If FindPackage is nil, (*build.Context).Import is used. + // A client may use this hook to adapt to a proprietary build + // system that does not follow the "go build" layout + // conventions, for example. + // + // It must be safe to call concurrently from multiple goroutines. + FindPackage func(ctxt *build.Context, importPath, fromDir string, mode build.ImportMode) (*build.Package, error) + + // AfterTypeCheck is called immediately after a list of files + // has been type-checked and appended to info.Files. + // + // This optional hook function is the earliest opportunity for + // the client to observe the output of the type checker, + // which may be useful to reduce analysis latency when loading + // a large program. + // + // The function is permitted to modify info.Info, for instance + // to clear data structures that are no longer needed, which can + // dramatically reduce peak memory consumption. + // + // The function may be called twice for the same PackageInfo: + // once for the files of the package and again for the + // in-package test files. + // + // It must be safe to call concurrently from multiple goroutines. + AfterTypeCheck func(info *PackageInfo, files []*ast.File) +} + +// A PkgSpec specifies a non-importable package to be created by Load. +// Files are processed first, but typically only one of Files and +// Filenames is provided. The path needn't be globally unique. +// +// For vendoring purposes, the package's directory is the one that +// contains the first file. +type PkgSpec struct { + Path string // package path ("" => use package declaration) + Files []*ast.File // ASTs of already-parsed files + Filenames []string // names of files to be parsed +} + +// A Program is a Go program loaded from source as specified by a Config. +type Program struct { + Fset *token.FileSet // the file set for this program + + // Created[i] contains the initial package whose ASTs or + // filenames were supplied by Config.CreatePkgs[i], followed by + // the external test package, if any, of each package in + // Config.ImportPkgs ordered by ImportPath. + // + // NOTE: these files must not import "C". Cgo preprocessing is + // only performed on imported packages, not ad hoc packages. + // + // TODO(adonovan): we need to copy and adapt the logic of + // goFilesPackage (from $GOROOT/src/cmd/go/build.go) and make + // Config.Import and Config.Create methods return the same kind + // of entity, essentially a build.Package. + // Perhaps we can even reuse that type directly. + Created []*PackageInfo + + // Imported contains the initially imported packages, + // as specified by Config.ImportPkgs. + Imported map[string]*PackageInfo + + // AllPackages contains the PackageInfo of every package + // encountered by Load: all initial packages and all + // dependencies, including incomplete ones. + AllPackages map[*types.Package]*PackageInfo + + // importMap is the canonical mapping of package paths to + // packages. It contains all Imported initial packages, but not + // Created ones, and all imported dependencies. + importMap map[string]*types.Package +} + +// PackageInfo holds the ASTs and facts derived by the type-checker +// for a single package. +// +// Not mutated once exposed via the API. +// +type PackageInfo struct { + Pkg *types.Package + Importable bool // true if 'import "Pkg.Path()"' would resolve to this + TransitivelyErrorFree bool // true if Pkg and all its dependencies are free of errors + Files []*ast.File // syntax trees for the package's files + Errors []error // non-nil if the package had errors + types.Info // type-checker deductions. + dir string // package directory + + checker *types.Checker // transient type-checker state + errorFunc func(error) +} + +func (info *PackageInfo) String() string { return info.Pkg.Path() } + +func (info *PackageInfo) appendError(err error) { + if info.errorFunc != nil { + info.errorFunc(err) + } else { + fmt.Fprintln(os.Stderr, err) + } + info.Errors = append(info.Errors, err) +} + +func (conf *Config) fset() *token.FileSet { + if conf.Fset == nil { + conf.Fset = token.NewFileSet() + } + return conf.Fset +} + +// ParseFile is a convenience function (intended for testing) that invokes +// the parser using the Config's FileSet, which is initialized if nil. +// +// src specifies the parser input as a string, []byte, or io.Reader, and +// filename is its apparent name. If src is nil, the contents of +// filename are read from the file system. +// +func (conf *Config) ParseFile(filename string, src interface{}) (*ast.File, error) { + // TODO(adonovan): use conf.build() etc like parseFiles does. + return parser.ParseFile(conf.fset(), filename, src, conf.ParserMode) +} + +// FromArgsUsage is a partial usage message that applications calling +// FromArgs may wish to include in their -help output. +const FromArgsUsage = ` + is a list of arguments denoting a set of initial packages. +It may take one of two forms: + +1. A list of *.go source files. + + All of the specified files are loaded, parsed and type-checked + as a single package. All the files must belong to the same directory. + +2. A list of import paths, each denoting a package. + + The package's directory is found relative to the $GOROOT and + $GOPATH using similar logic to 'go build', and the *.go files in + that directory are loaded, parsed and type-checked as a single + package. + + In addition, all *_test.go files in the directory are then loaded + and parsed. Those files whose package declaration equals that of + the non-*_test.go files are included in the primary package. Test + files whose package declaration ends with "_test" are type-checked + as another package, the 'external' test package, so that a single + import path may denote two packages. (Whether this behaviour is + enabled is tool-specific, and may depend on additional flags.) + +A '--' argument terminates the list of packages. +` + +// FromArgs interprets args as a set of initial packages to load from +// source and updates the configuration. It returns the list of +// unconsumed arguments. +// +// It is intended for use in command-line interfaces that require a +// set of initial packages to be specified; see FromArgsUsage message +// for details. +// +// Only superficial errors are reported at this stage; errors dependent +// on I/O are detected during Load. +// +func (conf *Config) FromArgs(args []string, xtest bool) ([]string, error) { + var rest []string + for i, arg := range args { + if arg == "--" { + rest = args[i+1:] + args = args[:i] + break // consume "--" and return the remaining args + } + } + + if len(args) > 0 && strings.HasSuffix(args[0], ".go") { + // Assume args is a list of a *.go files + // denoting a single ad hoc package. + for _, arg := range args { + if !strings.HasSuffix(arg, ".go") { + return nil, fmt.Errorf("named files must be .go files: %s", arg) + } + } + conf.CreateFromFilenames("", args...) + } else { + // Assume args are directories each denoting a + // package and (perhaps) an external test, iff xtest. + for _, arg := range args { + if xtest { + conf.ImportWithTests(arg) + } else { + conf.Import(arg) + } + } + } + + return rest, nil +} + +// CreateFromFilenames is a convenience function that adds +// a conf.CreatePkgs entry to create a package of the specified *.go +// files. +// +func (conf *Config) CreateFromFilenames(path string, filenames ...string) { + conf.CreatePkgs = append(conf.CreatePkgs, PkgSpec{Path: path, Filenames: filenames}) +} + +// CreateFromFiles is a convenience function that adds a conf.CreatePkgs +// entry to create package of the specified path and parsed files. +// +func (conf *Config) CreateFromFiles(path string, files ...*ast.File) { + conf.CreatePkgs = append(conf.CreatePkgs, PkgSpec{Path: path, Files: files}) +} + +// ImportWithTests is a convenience function that adds path to +// ImportPkgs, the set of initial source packages located relative to +// $GOPATH. The package will be augmented by any *_test.go files in +// its directory that contain a "package x" (not "package x_test") +// declaration. +// +// In addition, if any *_test.go files contain a "package x_test" +// declaration, an additional package comprising just those files will +// be added to CreatePkgs. +// +func (conf *Config) ImportWithTests(path string) { conf.addImport(path, true) } + +// Import is a convenience function that adds path to ImportPkgs, the +// set of initial packages that will be imported from source. +// +func (conf *Config) Import(path string) { conf.addImport(path, false) } + +func (conf *Config) addImport(path string, tests bool) { + if path == "C" { + return // ignore; not a real package + } + if conf.ImportPkgs == nil { + conf.ImportPkgs = make(map[string]bool) + } + conf.ImportPkgs[path] = conf.ImportPkgs[path] || tests +} + +// PathEnclosingInterval returns the PackageInfo and ast.Node that +// contain source interval [start, end), and all the node's ancestors +// up to the AST root. It searches all ast.Files of all packages in prog. +// exact is defined as for astutil.PathEnclosingInterval. +// +// The zero value is returned if not found. +// +func (prog *Program) PathEnclosingInterval(start, end token.Pos) (pkg *PackageInfo, path []ast.Node, exact bool) { + for _, info := range prog.AllPackages { + for _, f := range info.Files { + if f.Pos() == token.NoPos { + // This can happen if the parser saw + // too many errors and bailed out. + // (Use parser.AllErrors to prevent that.) + continue + } + if !tokenFileContainsPos(prog.Fset.File(f.Pos()), start) { + continue + } + if path, exact := astutil.PathEnclosingInterval(f, start, end); path != nil { + return info, path, exact + } + } + } + return nil, nil, false +} + +// InitialPackages returns a new slice containing the set of initial +// packages (Created + Imported) in unspecified order. +// +func (prog *Program) InitialPackages() []*PackageInfo { + infos := make([]*PackageInfo, 0, len(prog.Created)+len(prog.Imported)) + infos = append(infos, prog.Created...) + for _, info := range prog.Imported { + infos = append(infos, info) + } + return infos +} + +// Package returns the ASTs and results of type checking for the +// specified package. +func (prog *Program) Package(path string) *PackageInfo { + if info, ok := prog.AllPackages[prog.importMap[path]]; ok { + return info + } + for _, info := range prog.Created { + if path == info.Pkg.Path() { + return info + } + } + return nil +} + +// ---------- Implementation ---------- + +// importer holds the working state of the algorithm. +type importer struct { + conf *Config // the client configuration + start time.Time // for logging + + progMu sync.Mutex // guards prog + prog *Program // the resulting program + + // findpkg is a memoization of FindPackage. + findpkgMu sync.Mutex // guards findpkg + findpkg map[findpkgKey]*findpkgValue + + importedMu sync.Mutex // guards imported + imported map[string]*importInfo // all imported packages (incl. failures) by import path + + // import dependency graph: graph[x][y] => x imports y + // + // Since non-importable packages cannot be cyclic, we ignore + // their imports, thus we only need the subgraph over importable + // packages. Nodes are identified by their import paths. + graphMu sync.Mutex + graph map[string]map[string]bool +} + +type findpkgKey struct { + importPath string + fromDir string + mode build.ImportMode +} + +type findpkgValue struct { + ready chan struct{} // closed to broadcast readiness + bp *build.Package + err error +} + +// importInfo tracks the success or failure of a single import. +// +// Upon completion, exactly one of info and err is non-nil: +// info on successful creation of a package, err otherwise. +// A successful package may still contain type errors. +// +type importInfo struct { + path string // import path + info *PackageInfo // results of typechecking (including errors) + complete chan struct{} // closed to broadcast that info is set. +} + +// awaitCompletion blocks until ii is complete, +// i.e. the info field is safe to inspect. +func (ii *importInfo) awaitCompletion() { + <-ii.complete // wait for close +} + +// Complete marks ii as complete. +// Its info and err fields will not be subsequently updated. +func (ii *importInfo) Complete(info *PackageInfo) { + if info == nil { + panic("info == nil") + } + ii.info = info + close(ii.complete) +} + +type importError struct { + path string // import path + err error // reason for failure to create a package +} + +// Load creates the initial packages specified by conf.{Create,Import}Pkgs, +// loading their dependencies packages as needed. +// +// On success, Load returns a Program containing a PackageInfo for +// each package. On failure, it returns an error. +// +// If AllowErrors is true, Load will return a Program even if some +// packages contained I/O, parser or type errors, or if dependencies +// were missing. (Such errors are accessible via PackageInfo.Errors. If +// false, Load will fail if any package had an error. +// +// It is an error if no packages were loaded. +// +func (conf *Config) Load() (*Program, error) { + // Create a simple default error handler for parse/type errors. + if conf.TypeChecker.Error == nil { + conf.TypeChecker.Error = func(e error) { fmt.Fprintln(os.Stderr, e) } + } + + // Set default working directory for relative package references. + if conf.Cwd == "" { + var err error + conf.Cwd, err = os.Getwd() + if err != nil { + return nil, err + } + } + + // Install default FindPackage hook using go/build logic. + if conf.FindPackage == nil { + conf.FindPackage = (*build.Context).Import + } + + prog := &Program{ + Fset: conf.fset(), + Imported: make(map[string]*PackageInfo), + importMap: make(map[string]*types.Package), + AllPackages: make(map[*types.Package]*PackageInfo), + } + + imp := importer{ + conf: conf, + prog: prog, + findpkg: make(map[findpkgKey]*findpkgValue), + imported: make(map[string]*importInfo), + start: time.Now(), + graph: make(map[string]map[string]bool), + } + + // -- loading proper (concurrent phase) -------------------------------- + + var errpkgs []string // packages that contained errors + + // Load the initially imported packages and their dependencies, + // in parallel. + // No vendor check on packages imported from the command line. + infos, importErrors := imp.importAll("", conf.Cwd, conf.ImportPkgs, ignoreVendor) + for _, ie := range importErrors { + conf.TypeChecker.Error(ie.err) // failed to create package + errpkgs = append(errpkgs, ie.path) + } + for _, info := range infos { + prog.Imported[info.Pkg.Path()] = info + } + + // Augment the designated initial packages by their tests. + // Dependencies are loaded in parallel. + var xtestPkgs []*build.Package + for importPath, augment := range conf.ImportPkgs { + if !augment { + continue + } + + // No vendor check on packages imported from command line. + bp, err := imp.findPackage(importPath, conf.Cwd, ignoreVendor) + if err != nil { + // Package not found, or can't even parse package declaration. + // Already reported by previous loop; ignore it. + continue + } + + // Needs external test package? + if len(bp.XTestGoFiles) > 0 { + xtestPkgs = append(xtestPkgs, bp) + } + + // Consult the cache using the canonical package path. + path := bp.ImportPath + imp.importedMu.Lock() // (unnecessary, we're sequential here) + ii, ok := imp.imported[path] + // Paranoid checks added due to issue #11012. + if !ok { + // Unreachable. + // The previous loop called importAll and thus + // startLoad for each path in ImportPkgs, which + // populates imp.imported[path] with a non-zero value. + panic(fmt.Sprintf("imported[%q] not found", path)) + } + if ii == nil { + // Unreachable. + // The ii values in this loop are the same as in + // the previous loop, which enforced the invariant + // that at least one of ii.err and ii.info is non-nil. + panic(fmt.Sprintf("imported[%q] == nil", path)) + } + if ii.info == nil { + // Unreachable. + // awaitCompletion has the postcondition + // ii.info != nil. + panic(fmt.Sprintf("imported[%q].info = nil", path)) + } + info := ii.info + imp.importedMu.Unlock() + + // Parse the in-package test files. + files, errs := imp.conf.parsePackageFiles(bp, 't') + for _, err := range errs { + info.appendError(err) + } + + // The test files augmenting package P cannot be imported, + // but may import packages that import P, + // so we must disable the cycle check. + imp.addFiles(info, files, false) + } + + createPkg := func(path, dir string, files []*ast.File, errs []error) { + info := imp.newPackageInfo(path, dir) + for _, err := range errs { + info.appendError(err) + } + + // Ad hoc packages are non-importable, + // so no cycle check is needed. + // addFiles loads dependencies in parallel. + imp.addFiles(info, files, false) + prog.Created = append(prog.Created, info) + } + + // Create packages specified by conf.CreatePkgs. + for _, cp := range conf.CreatePkgs { + files, errs := parseFiles(conf.fset(), conf.build(), nil, conf.Cwd, cp.Filenames, conf.ParserMode) + files = append(files, cp.Files...) + + path := cp.Path + if path == "" { + if len(files) > 0 { + path = files[0].Name.Name + } else { + path = "(unnamed)" + } + } + + dir := conf.Cwd + if len(files) > 0 && files[0].Pos().IsValid() { + dir = filepath.Dir(conf.fset().File(files[0].Pos()).Name()) + } + createPkg(path, dir, files, errs) + } + + // Create external test packages. + sort.Sort(byImportPath(xtestPkgs)) + for _, bp := range xtestPkgs { + files, errs := imp.conf.parsePackageFiles(bp, 'x') + createPkg(bp.ImportPath+"_test", bp.Dir, files, errs) + } + + // -- finishing up (sequential) ---------------------------------------- + + if len(prog.Imported)+len(prog.Created) == 0 { + return nil, errors.New("no initial packages were loaded") + } + + // Create infos for indirectly imported packages. + // e.g. incomplete packages without syntax, loaded from export data. + for _, obj := range prog.importMap { + info := prog.AllPackages[obj] + if info == nil { + prog.AllPackages[obj] = &PackageInfo{Pkg: obj, Importable: true} + } else { + // finished + info.checker = nil + info.errorFunc = nil + } + } + + if !conf.AllowErrors { + // Report errors in indirectly imported packages. + for _, info := range prog.AllPackages { + if len(info.Errors) > 0 { + errpkgs = append(errpkgs, info.Pkg.Path()) + } + } + if errpkgs != nil { + var more string + if len(errpkgs) > 3 { + more = fmt.Sprintf(" and %d more", len(errpkgs)-3) + errpkgs = errpkgs[:3] + } + return nil, fmt.Errorf("couldn't load packages due to errors: %s%s", + strings.Join(errpkgs, ", "), more) + } + } + + markErrorFreePackages(prog.AllPackages) + + return prog, nil +} + +type byImportPath []*build.Package + +func (b byImportPath) Len() int { return len(b) } +func (b byImportPath) Less(i, j int) bool { return b[i].ImportPath < b[j].ImportPath } +func (b byImportPath) Swap(i, j int) { b[i], b[j] = b[j], b[i] } + +// markErrorFreePackages sets the TransitivelyErrorFree flag on all +// applicable packages. +func markErrorFreePackages(allPackages map[*types.Package]*PackageInfo) { + // Build the transpose of the import graph. + importedBy := make(map[*types.Package]map[*types.Package]bool) + for P := range allPackages { + for _, Q := range P.Imports() { + clients, ok := importedBy[Q] + if !ok { + clients = make(map[*types.Package]bool) + importedBy[Q] = clients + } + clients[P] = true + } + } + + // Find all packages reachable from some error package. + reachable := make(map[*types.Package]bool) + var visit func(*types.Package) + visit = func(p *types.Package) { + if !reachable[p] { + reachable[p] = true + for q := range importedBy[p] { + visit(q) + } + } + } + for _, info := range allPackages { + if len(info.Errors) > 0 { + visit(info.Pkg) + } + } + + // Mark the others as "transitively error-free". + for _, info := range allPackages { + if !reachable[info.Pkg] { + info.TransitivelyErrorFree = true + } + } +} + +// build returns the effective build context. +func (conf *Config) build() *build.Context { + if conf.Build != nil { + return conf.Build + } + return &build.Default +} + +// parsePackageFiles enumerates the files belonging to package path, +// then loads, parses and returns them, plus a list of I/O or parse +// errors that were encountered. +// +// 'which' indicates which files to include: +// 'g': include non-test *.go source files (GoFiles + processed CgoFiles) +// 't': include in-package *_test.go source files (TestGoFiles) +// 'x': include external *_test.go source files. (XTestGoFiles) +// +func (conf *Config) parsePackageFiles(bp *build.Package, which rune) ([]*ast.File, []error) { + if bp.ImportPath == "unsafe" { + return nil, nil + } + var filenames []string + switch which { + case 'g': + filenames = bp.GoFiles + case 't': + filenames = bp.TestGoFiles + case 'x': + filenames = bp.XTestGoFiles + default: + panic(which) + } + + files, errs := parseFiles(conf.fset(), conf.build(), conf.DisplayPath, bp.Dir, filenames, conf.ParserMode) + + // Preprocess CgoFiles and parse the outputs (sequentially). + if which == 'g' && bp.CgoFiles != nil { + cgofiles, err := processCgoFiles(bp, conf.fset(), conf.DisplayPath, conf.ParserMode) + if err != nil { + errs = append(errs, err) + } else { + files = append(files, cgofiles...) + } + } + + return files, errs +} + +// doImport imports the package denoted by path. +// It implements the types.Importer signature. +// +// It returns an error if a package could not be created +// (e.g. go/build or parse error), but type errors are reported via +// the types.Config.Error callback (the first of which is also saved +// in the package's PackageInfo). +// +// Idempotent. +// +func (imp *importer) doImport(from *PackageInfo, to string) (*types.Package, error) { + if to == "C" { + // This should be unreachable, but ad hoc packages are + // not currently subject to cgo preprocessing. + // See https://github.com/golang/go/issues/11627. + return nil, fmt.Errorf(`the loader doesn't cgo-process ad hoc packages like %q; see Go issue 11627`, + from.Pkg.Path()) + } + + bp, err := imp.findPackage(to, from.dir, 0) + if err != nil { + return nil, err + } + + // The standard unsafe package is handled specially, + // and has no PackageInfo. + if bp.ImportPath == "unsafe" { + return types.Unsafe, nil + } + + // Look for the package in the cache using its canonical path. + path := bp.ImportPath + imp.importedMu.Lock() + ii := imp.imported[path] + imp.importedMu.Unlock() + if ii == nil { + panic("internal error: unexpected import: " + path) + } + if ii.info != nil { + return ii.info.Pkg, nil + } + + // Import of incomplete package: this indicates a cycle. + fromPath := from.Pkg.Path() + if cycle := imp.findPath(path, fromPath); cycle != nil { + cycle = append([]string{fromPath}, cycle...) + return nil, fmt.Errorf("import cycle: %s", strings.Join(cycle, " -> ")) + } + + panic("internal error: import of incomplete (yet acyclic) package: " + fromPath) +} + +// findPackage locates the package denoted by the importPath in the +// specified directory. +func (imp *importer) findPackage(importPath, fromDir string, mode build.ImportMode) (*build.Package, error) { + // We use a non-blocking duplicate-suppressing cache (gopl.io §9.7) + // to avoid holding the lock around FindPackage. + key := findpkgKey{importPath, fromDir, mode} + imp.findpkgMu.Lock() + v, ok := imp.findpkg[key] + if ok { + // cache hit + imp.findpkgMu.Unlock() + + <-v.ready // wait for entry to become ready + } else { + // Cache miss: this goroutine becomes responsible for + // populating the map entry and broadcasting its readiness. + v = &findpkgValue{ready: make(chan struct{})} + imp.findpkg[key] = v + imp.findpkgMu.Unlock() + + ioLimit <- true + v.bp, v.err = imp.conf.FindPackage(imp.conf.build(), importPath, fromDir, mode) + <-ioLimit + + if _, ok := v.err.(*build.NoGoError); ok { + v.err = nil // empty directory is not an error + } + + close(v.ready) // broadcast ready condition + } + return v.bp, v.err +} + +// importAll loads, parses, and type-checks the specified packages in +// parallel and returns their completed importInfos in unspecified order. +// +// fromPath is the package path of the importing package, if it is +// importable, "" otherwise. It is used for cycle detection. +// +// fromDir is the directory containing the import declaration that +// caused these imports. +// +func (imp *importer) importAll(fromPath, fromDir string, imports map[string]bool, mode build.ImportMode) (infos []*PackageInfo, errors []importError) { + // TODO(adonovan): opt: do the loop in parallel once + // findPackage is non-blocking. + var pending []*importInfo + for importPath := range imports { + bp, err := imp.findPackage(importPath, fromDir, mode) + if err != nil { + errors = append(errors, importError{ + path: importPath, + err: err, + }) + continue + } + pending = append(pending, imp.startLoad(bp)) + } + + if fromPath != "" { + // We're loading a set of imports. + // + // We must record graph edges from the importing package + // to its dependencies, and check for cycles. + imp.graphMu.Lock() + deps, ok := imp.graph[fromPath] + if !ok { + deps = make(map[string]bool) + imp.graph[fromPath] = deps + } + for _, ii := range pending { + deps[ii.path] = true + } + imp.graphMu.Unlock() + } + + for _, ii := range pending { + if fromPath != "" { + if cycle := imp.findPath(ii.path, fromPath); cycle != nil { + // Cycle-forming import: we must not await its + // completion since it would deadlock. + // + // We don't record the error in ii since + // the error is really associated with the + // cycle-forming edge, not the package itself. + // (Also it would complicate the + // invariants of importPath completion.) + if trace { + fmt.Fprintf(os.Stderr, "import cycle: %q\n", cycle) + } + continue + } + } + ii.awaitCompletion() + infos = append(infos, ii.info) + } + + return infos, errors +} + +// findPath returns an arbitrary path from 'from' to 'to' in the import +// graph, or nil if there was none. +func (imp *importer) findPath(from, to string) []string { + imp.graphMu.Lock() + defer imp.graphMu.Unlock() + + seen := make(map[string]bool) + var search func(stack []string, importPath string) []string + search = func(stack []string, importPath string) []string { + if !seen[importPath] { + seen[importPath] = true + stack = append(stack, importPath) + if importPath == to { + return stack + } + for x := range imp.graph[importPath] { + if p := search(stack, x); p != nil { + return p + } + } + } + return nil + } + return search(make([]string, 0, 20), from) +} + +// startLoad initiates the loading, parsing and type-checking of the +// specified package and its dependencies, if it has not already begun. +// +// It returns an importInfo, not necessarily in a completed state. The +// caller must call awaitCompletion() before accessing its info field. +// +// startLoad is concurrency-safe and idempotent. +// +func (imp *importer) startLoad(bp *build.Package) *importInfo { + path := bp.ImportPath + imp.importedMu.Lock() + ii, ok := imp.imported[path] + if !ok { + ii = &importInfo{path: path, complete: make(chan struct{})} + imp.imported[path] = ii + go func() { + info := imp.load(bp) + ii.Complete(info) + }() + } + imp.importedMu.Unlock() + + return ii +} + +// load implements package loading by parsing Go source files +// located by go/build. +func (imp *importer) load(bp *build.Package) *PackageInfo { + info := imp.newPackageInfo(bp.ImportPath, bp.Dir) + info.Importable = true + files, errs := imp.conf.parsePackageFiles(bp, 'g') + for _, err := range errs { + info.appendError(err) + } + + imp.addFiles(info, files, true) + + imp.progMu.Lock() + imp.prog.importMap[bp.ImportPath] = info.Pkg + imp.progMu.Unlock() + + return info +} + +// addFiles adds and type-checks the specified files to info, loading +// their dependencies if needed. The order of files determines the +// package initialization order. It may be called multiple times on the +// same package. Errors are appended to the info.Errors field. +// +// cycleCheck determines whether the imports within files create +// dependency edges that should be checked for potential cycles. +// +func (imp *importer) addFiles(info *PackageInfo, files []*ast.File, cycleCheck bool) { + // Ensure the dependencies are loaded, in parallel. + var fromPath string + if cycleCheck { + fromPath = info.Pkg.Path() + } + // TODO(adonovan): opt: make the caller do scanImports. + // Callers with a build.Package can skip it. + imp.importAll(fromPath, info.dir, scanImports(files), 0) + + if trace { + fmt.Fprintf(os.Stderr, "%s: start %q (%d)\n", + time.Since(imp.start), info.Pkg.Path(), len(files)) + } + + // Don't call checker.Files on Unsafe, even with zero files, + // because it would mutate the package, which is a global. + if info.Pkg == types.Unsafe { + if len(files) > 0 { + panic(`"unsafe" package contains unexpected files`) + } + } else { + // Ignore the returned (first) error since we + // already collect them all in the PackageInfo. + info.checker.Files(files) + info.Files = append(info.Files, files...) + } + + if imp.conf.AfterTypeCheck != nil { + imp.conf.AfterTypeCheck(info, files) + } + + if trace { + fmt.Fprintf(os.Stderr, "%s: stop %q\n", + time.Since(imp.start), info.Pkg.Path()) + } +} + +func (imp *importer) newPackageInfo(path, dir string) *PackageInfo { + var pkg *types.Package + if path == "unsafe" { + pkg = types.Unsafe + } else { + pkg = types.NewPackage(path, "") + } + info := &PackageInfo{ + Pkg: pkg, + Info: types.Info{ + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + Scopes: make(map[ast.Node]*types.Scope), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + }, + errorFunc: imp.conf.TypeChecker.Error, + dir: dir, + } + + // Copy the types.Config so we can vary it across PackageInfos. + tc := imp.conf.TypeChecker + tc.IgnoreFuncBodies = false + if f := imp.conf.TypeCheckFuncBodies; f != nil { + tc.IgnoreFuncBodies = !f(path) + } + tc.Importer = closure{imp, info} + tc.Error = info.appendError // appendError wraps the user's Error function + + info.checker = types.NewChecker(&tc, imp.conf.fset(), pkg, &info.Info) + imp.progMu.Lock() + imp.prog.AllPackages[pkg] = info + imp.progMu.Unlock() + return info +} + +type closure struct { + imp *importer + info *PackageInfo +} + +func (c closure) Import(to string) (*types.Package, error) { return c.imp.doImport(c.info, to) } diff --git a/vendor/golang.org/x/tools/go/loader/util.go b/vendor/golang.org/x/tools/go/loader/util.go new file mode 100644 index 0000000..7f38dd7 --- /dev/null +++ b/vendor/golang.org/x/tools/go/loader/util.go @@ -0,0 +1,124 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package loader + +import ( + "go/ast" + "go/build" + "go/parser" + "go/token" + "io" + "os" + "strconv" + "sync" + + "golang.org/x/tools/go/buildutil" +) + +// We use a counting semaphore to limit +// the number of parallel I/O calls per process. +var ioLimit = make(chan bool, 10) + +// parseFiles parses the Go source files within directory dir and +// returns the ASTs of the ones that could be at least partially parsed, +// along with a list of I/O and parse errors encountered. +// +// I/O is done via ctxt, which may specify a virtual file system. +// displayPath is used to transform the filenames attached to the ASTs. +// +func parseFiles(fset *token.FileSet, ctxt *build.Context, displayPath func(string) string, dir string, files []string, mode parser.Mode) ([]*ast.File, []error) { + if displayPath == nil { + displayPath = func(path string) string { return path } + } + var wg sync.WaitGroup + n := len(files) + parsed := make([]*ast.File, n) + errors := make([]error, n) + for i, file := range files { + if !buildutil.IsAbsPath(ctxt, file) { + file = buildutil.JoinPath(ctxt, dir, file) + } + wg.Add(1) + go func(i int, file string) { + ioLimit <- true // wait + defer func() { + wg.Done() + <-ioLimit // signal + }() + var rd io.ReadCloser + var err error + if ctxt.OpenFile != nil { + rd, err = ctxt.OpenFile(file) + } else { + rd, err = os.Open(file) + } + if err != nil { + errors[i] = err // open failed + return + } + + // ParseFile may return both an AST and an error. + parsed[i], errors[i] = parser.ParseFile(fset, displayPath(file), rd, mode) + rd.Close() + }(i, file) + } + wg.Wait() + + // Eliminate nils, preserving order. + var o int + for _, f := range parsed { + if f != nil { + parsed[o] = f + o++ + } + } + parsed = parsed[:o] + + o = 0 + for _, err := range errors { + if err != nil { + errors[o] = err + o++ + } + } + errors = errors[:o] + + return parsed, errors +} + +// scanImports returns the set of all import paths from all +// import specs in the specified files. +func scanImports(files []*ast.File) map[string]bool { + imports := make(map[string]bool) + for _, f := range files { + for _, decl := range f.Decls { + if decl, ok := decl.(*ast.GenDecl); ok && decl.Tok == token.IMPORT { + for _, spec := range decl.Specs { + spec := spec.(*ast.ImportSpec) + + // NB: do not assume the program is well-formed! + path, err := strconv.Unquote(spec.Path.Value) + if err != nil { + continue // quietly ignore the error + } + if path == "C" { + continue // skip pseudopackage + } + imports[path] = true + } + } + } + } + return imports +} + +// ---------- Internal helpers ---------- + +// TODO(adonovan): make this a method: func (*token.File) Contains(token.Pos) +func tokenFileContainsPos(f *token.File, pos token.Pos) bool { + p := int(pos) + base := f.Base() + return base <= p && p < base+f.Size() +} diff --git a/vendor/golang.org/x/tools/go/types/typeutil/imports.go b/vendor/golang.org/x/tools/go/types/typeutil/imports.go new file mode 100644 index 0000000..9c441db --- /dev/null +++ b/vendor/golang.org/x/tools/go/types/typeutil/imports.go @@ -0,0 +1,31 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typeutil + +import "go/types" + +// Dependencies returns all dependencies of the specified packages. +// +// Dependent packages appear in topological order: if package P imports +// package Q, Q appears earlier than P in the result. +// The algorithm follows import statements in the order they +// appear in the source code, so the result is a total order. +// +func Dependencies(pkgs ...*types.Package) []*types.Package { + var result []*types.Package + seen := make(map[*types.Package]bool) + var visit func(pkgs []*types.Package) + visit = func(pkgs []*types.Package) { + for _, p := range pkgs { + if !seen[p] { + seen[p] = true + visit(p.Imports()) + result = append(result, p) + } + } + } + visit(pkgs) + return result +} diff --git a/vendor/golang.org/x/tools/go/types/typeutil/map.go b/vendor/golang.org/x/tools/go/types/typeutil/map.go new file mode 100644 index 0000000..c7f7545 --- /dev/null +++ b/vendor/golang.org/x/tools/go/types/typeutil/map.go @@ -0,0 +1,313 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package typeutil defines various utilities for types, such as Map, +// a mapping from types.Type to interface{} values. +package typeutil // import "golang.org/x/tools/go/types/typeutil" + +import ( + "bytes" + "fmt" + "go/types" + "reflect" +) + +// Map is a hash-table-based mapping from types (types.Type) to +// arbitrary interface{} values. The concrete types that implement +// the Type interface are pointers. Since they are not canonicalized, +// == cannot be used to check for equivalence, and thus we cannot +// simply use a Go map. +// +// Just as with map[K]V, a nil *Map is a valid empty map. +// +// Not thread-safe. +// +type Map struct { + hasher Hasher // shared by many Maps + table map[uint32][]entry // maps hash to bucket; entry.key==nil means unused + length int // number of map entries +} + +// entry is an entry (key/value association) in a hash bucket. +type entry struct { + key types.Type + value interface{} +} + +// SetHasher sets the hasher used by Map. +// +// All Hashers are functionally equivalent but contain internal state +// used to cache the results of hashing previously seen types. +// +// A single Hasher created by MakeHasher() may be shared among many +// Maps. This is recommended if the instances have many keys in +// common, as it will amortize the cost of hash computation. +// +// A Hasher may grow without bound as new types are seen. Even when a +// type is deleted from the map, the Hasher never shrinks, since other +// types in the map may reference the deleted type indirectly. +// +// Hashers are not thread-safe, and read-only operations such as +// Map.Lookup require updates to the hasher, so a full Mutex lock (not a +// read-lock) is require around all Map operations if a shared +// hasher is accessed from multiple threads. +// +// If SetHasher is not called, the Map will create a private hasher at +// the first call to Insert. +// +func (m *Map) SetHasher(hasher Hasher) { + m.hasher = hasher +} + +// Delete removes the entry with the given key, if any. +// It returns true if the entry was found. +// +func (m *Map) Delete(key types.Type) bool { + if m != nil && m.table != nil { + hash := m.hasher.Hash(key) + bucket := m.table[hash] + for i, e := range bucket { + if e.key != nil && types.Identical(key, e.key) { + // We can't compact the bucket as it + // would disturb iterators. + bucket[i] = entry{} + m.length-- + return true + } + } + } + return false +} + +// At returns the map entry for the given key. +// The result is nil if the entry is not present. +// +func (m *Map) At(key types.Type) interface{} { + if m != nil && m.table != nil { + for _, e := range m.table[m.hasher.Hash(key)] { + if e.key != nil && types.Identical(key, e.key) { + return e.value + } + } + } + return nil +} + +// Set sets the map entry for key to val, +// and returns the previous entry, if any. +func (m *Map) Set(key types.Type, value interface{}) (prev interface{}) { + if m.table != nil { + hash := m.hasher.Hash(key) + bucket := m.table[hash] + var hole *entry + for i, e := range bucket { + if e.key == nil { + hole = &bucket[i] + } else if types.Identical(key, e.key) { + prev = e.value + bucket[i].value = value + return + } + } + + if hole != nil { + *hole = entry{key, value} // overwrite deleted entry + } else { + m.table[hash] = append(bucket, entry{key, value}) + } + } else { + if m.hasher.memo == nil { + m.hasher = MakeHasher() + } + hash := m.hasher.Hash(key) + m.table = map[uint32][]entry{hash: {entry{key, value}}} + } + + m.length++ + return +} + +// Len returns the number of map entries. +func (m *Map) Len() int { + if m != nil { + return m.length + } + return 0 +} + +// Iterate calls function f on each entry in the map in unspecified order. +// +// If f should mutate the map, Iterate provides the same guarantees as +// Go maps: if f deletes a map entry that Iterate has not yet reached, +// f will not be invoked for it, but if f inserts a map entry that +// Iterate has not yet reached, whether or not f will be invoked for +// it is unspecified. +// +func (m *Map) Iterate(f func(key types.Type, value interface{})) { + if m != nil { + for _, bucket := range m.table { + for _, e := range bucket { + if e.key != nil { + f(e.key, e.value) + } + } + } + } +} + +// Keys returns a new slice containing the set of map keys. +// The order is unspecified. +func (m *Map) Keys() []types.Type { + keys := make([]types.Type, 0, m.Len()) + m.Iterate(func(key types.Type, _ interface{}) { + keys = append(keys, key) + }) + return keys +} + +func (m *Map) toString(values bool) string { + if m == nil { + return "{}" + } + var buf bytes.Buffer + fmt.Fprint(&buf, "{") + sep := "" + m.Iterate(func(key types.Type, value interface{}) { + fmt.Fprint(&buf, sep) + sep = ", " + fmt.Fprint(&buf, key) + if values { + fmt.Fprintf(&buf, ": %q", value) + } + }) + fmt.Fprint(&buf, "}") + return buf.String() +} + +// String returns a string representation of the map's entries. +// Values are printed using fmt.Sprintf("%v", v). +// Order is unspecified. +// +func (m *Map) String() string { + return m.toString(true) +} + +// KeysString returns a string representation of the map's key set. +// Order is unspecified. +// +func (m *Map) KeysString() string { + return m.toString(false) +} + +//////////////////////////////////////////////////////////////////////// +// Hasher + +// A Hasher maps each type to its hash value. +// For efficiency, a hasher uses memoization; thus its memory +// footprint grows monotonically over time. +// Hashers are not thread-safe. +// Hashers have reference semantics. +// Call MakeHasher to create a Hasher. +type Hasher struct { + memo map[types.Type]uint32 +} + +// MakeHasher returns a new Hasher instance. +func MakeHasher() Hasher { + return Hasher{make(map[types.Type]uint32)} +} + +// Hash computes a hash value for the given type t such that +// Identical(t, t') => Hash(t) == Hash(t'). +func (h Hasher) Hash(t types.Type) uint32 { + hash, ok := h.memo[t] + if !ok { + hash = h.hashFor(t) + h.memo[t] = hash + } + return hash +} + +// hashString computes the Fowler–Noll–Vo hash of s. +func hashString(s string) uint32 { + var h uint32 + for i := 0; i < len(s); i++ { + h ^= uint32(s[i]) + h *= 16777619 + } + return h +} + +// hashFor computes the hash of t. +func (h Hasher) hashFor(t types.Type) uint32 { + // See Identical for rationale. + switch t := t.(type) { + case *types.Basic: + return uint32(t.Kind()) + + case *types.Array: + return 9043 + 2*uint32(t.Len()) + 3*h.Hash(t.Elem()) + + case *types.Slice: + return 9049 + 2*h.Hash(t.Elem()) + + case *types.Struct: + var hash uint32 = 9059 + for i, n := 0, t.NumFields(); i < n; i++ { + f := t.Field(i) + if f.Anonymous() { + hash += 8861 + } + hash += hashString(t.Tag(i)) + hash += hashString(f.Name()) // (ignore f.Pkg) + hash += h.Hash(f.Type()) + } + return hash + + case *types.Pointer: + return 9067 + 2*h.Hash(t.Elem()) + + case *types.Signature: + var hash uint32 = 9091 + if t.Variadic() { + hash *= 8863 + } + return hash + 3*h.hashTuple(t.Params()) + 5*h.hashTuple(t.Results()) + + case *types.Interface: + var hash uint32 = 9103 + for i, n := 0, t.NumMethods(); i < n; i++ { + // See go/types.identicalMethods for rationale. + // Method order is not significant. + // Ignore m.Pkg(). + m := t.Method(i) + hash += 3*hashString(m.Name()) + 5*h.Hash(m.Type()) + } + return hash + + case *types.Map: + return 9109 + 2*h.Hash(t.Key()) + 3*h.Hash(t.Elem()) + + case *types.Chan: + return 9127 + 2*uint32(t.Dir()) + 3*h.Hash(t.Elem()) + + case *types.Named: + // Not safe with a copying GC; objects may move. + return uint32(reflect.ValueOf(t.Obj()).Pointer()) + + case *types.Tuple: + return h.hashTuple(t) + } + panic(t) +} + +func (h Hasher) hashTuple(tuple *types.Tuple) uint32 { + // See go/types.identicalTypes for rationale. + n := tuple.Len() + var hash uint32 = 9137 + 2*uint32(n) + for i := 0; i < n; i++ { + hash += 3 * h.Hash(tuple.At(i).Type()) + } + return hash +} diff --git a/vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go b/vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go new file mode 100644 index 0000000..3208461 --- /dev/null +++ b/vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go @@ -0,0 +1,72 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file implements a cache of method sets. + +package typeutil + +import ( + "go/types" + "sync" +) + +// A MethodSetCache records the method set of each type T for which +// MethodSet(T) is called so that repeat queries are fast. +// The zero value is a ready-to-use cache instance. +type MethodSetCache struct { + mu sync.Mutex + named map[*types.Named]struct{ value, pointer *types.MethodSet } // method sets for named N and *N + others map[types.Type]*types.MethodSet // all other types +} + +// MethodSet returns the method set of type T. It is thread-safe. +// +// If cache is nil, this function is equivalent to types.NewMethodSet(T). +// Utility functions can thus expose an optional *MethodSetCache +// parameter to clients that care about performance. +// +func (cache *MethodSetCache) MethodSet(T types.Type) *types.MethodSet { + if cache == nil { + return types.NewMethodSet(T) + } + cache.mu.Lock() + defer cache.mu.Unlock() + + switch T := T.(type) { + case *types.Named: + return cache.lookupNamed(T).value + + case *types.Pointer: + if N, ok := T.Elem().(*types.Named); ok { + return cache.lookupNamed(N).pointer + } + } + + // all other types + // (The map uses pointer equivalence, not type identity.) + mset := cache.others[T] + if mset == nil { + mset = types.NewMethodSet(T) + if cache.others == nil { + cache.others = make(map[types.Type]*types.MethodSet) + } + cache.others[T] = mset + } + return mset +} + +func (cache *MethodSetCache) lookupNamed(named *types.Named) struct{ value, pointer *types.MethodSet } { + if cache.named == nil { + cache.named = make(map[*types.Named]struct{ value, pointer *types.MethodSet }) + } + // Avoid recomputing mset(*T) for each distinct Pointer + // instance whose underlying type is a named type. + msets, ok := cache.named[named] + if !ok { + msets.value = types.NewMethodSet(named) + msets.pointer = types.NewMethodSet(types.NewPointer(named)) + cache.named[named] = msets + } + return msets +} diff --git a/vendor/golang.org/x/tools/go/types/typeutil/ui.go b/vendor/golang.org/x/tools/go/types/typeutil/ui.go new file mode 100644 index 0000000..9849c24 --- /dev/null +++ b/vendor/golang.org/x/tools/go/types/typeutil/ui.go @@ -0,0 +1,52 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typeutil + +// This file defines utilities for user interfaces that display types. + +import "go/types" + +// IntuitiveMethodSet returns the intuitive method set of a type T, +// which is the set of methods you can call on an addressable value of +// that type. +// +// The result always contains MethodSet(T), and is exactly MethodSet(T) +// for interface types and for pointer-to-concrete types. +// For all other concrete types T, the result additionally +// contains each method belonging to *T if there is no identically +// named method on T itself. +// +// This corresponds to user intuition about method sets; +// this function is intended only for user interfaces. +// +// The order of the result is as for types.MethodSet(T). +// +func IntuitiveMethodSet(T types.Type, msets *MethodSetCache) []*types.Selection { + isPointerToConcrete := func(T types.Type) bool { + ptr, ok := T.(*types.Pointer) + return ok && !types.IsInterface(ptr.Elem()) + } + + var result []*types.Selection + mset := msets.MethodSet(T) + if types.IsInterface(T) || isPointerToConcrete(T) { + for i, n := 0, mset.Len(); i < n; i++ { + result = append(result, mset.At(i)) + } + } else { + // T is some other concrete type. + // Report methods of T and *T, preferring those of T. + pmset := msets.MethodSet(types.NewPointer(T)) + for i, n := 0, pmset.Len(); i < n; i++ { + meth := pmset.At(i) + if m := mset.Lookup(meth.Obj().Pkg(), meth.Obj().Name()); m != nil { + meth = m + } + result = append(result, meth) + } + + } + return result +} diff --git a/vendor/golang.org/x/tools/imports/fastwalk.go b/vendor/golang.org/x/tools/imports/fastwalk.go new file mode 100644 index 0000000..31e6e27 --- /dev/null +++ b/vendor/golang.org/x/tools/imports/fastwalk.go @@ -0,0 +1,187 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// A faster implementation of filepath.Walk. +// +// filepath.Walk's design necessarily calls os.Lstat on each file, +// even if the caller needs less info. And goimports only need to know +// the type of each file. The kernel interface provides the type in +// the Readdir call but the standard library ignored it. +// fastwalk_unix.go contains a fork of the syscall routines. +// +// See golang.org/issue/16399 + +package imports + +import ( + "errors" + "os" + "path/filepath" + "runtime" + "sync" +) + +// traverseLink is a sentinel error for fastWalk, similar to filepath.SkipDir. +var traverseLink = errors.New("traverse symlink, assuming target is a directory") + +// fastWalk walks the file tree rooted at root, calling walkFn for +// each file or directory in the tree, including root. +// +// If fastWalk returns filepath.SkipDir, the directory is skipped. +// +// Unlike filepath.Walk: +// * file stat calls must be done by the user. +// The only provided metadata is the file type, which does not include +// any permission bits. +// * multiple goroutines stat the filesystem concurrently. The provided +// walkFn must be safe for concurrent use. +// * fastWalk can follow symlinks if walkFn returns the traverseLink +// sentinel error. It is the walkFn's responsibility to prevent +// fastWalk from going into symlink cycles. +func fastWalk(root string, walkFn func(path string, typ os.FileMode) error) error { + // TODO(bradfitz): make numWorkers configurable? We used a + // minimum of 4 to give the kernel more info about multiple + // things we want, in hopes its I/O scheduling can take + // advantage of that. Hopefully most are in cache. Maybe 4 is + // even too low of a minimum. Profile more. + numWorkers := 4 + if n := runtime.NumCPU(); n > numWorkers { + numWorkers = n + } + + // Make sure to wait for all workers to finish, otherwise + // walkFn could still be called after returning. This Wait call + // runs after close(e.donec) below. + var wg sync.WaitGroup + defer wg.Wait() + + w := &walker{ + fn: walkFn, + enqueuec: make(chan walkItem, numWorkers), // buffered for performance + workc: make(chan walkItem, numWorkers), // buffered for performance + donec: make(chan struct{}), + + // buffered for correctness & not leaking goroutines: + resc: make(chan error, numWorkers), + } + defer close(w.donec) + + for i := 0; i < numWorkers; i++ { + wg.Add(1) + go w.doWork(&wg) + } + todo := []walkItem{{dir: root}} + out := 0 + for { + workc := w.workc + var workItem walkItem + if len(todo) == 0 { + workc = nil + } else { + workItem = todo[len(todo)-1] + } + select { + case workc <- workItem: + todo = todo[:len(todo)-1] + out++ + case it := <-w.enqueuec: + todo = append(todo, it) + case err := <-w.resc: + out-- + if err != nil { + return err + } + if out == 0 && len(todo) == 0 { + // It's safe to quit here, as long as the buffered + // enqueue channel isn't also readable, which might + // happen if the worker sends both another unit of + // work and its result before the other select was + // scheduled and both w.resc and w.enqueuec were + // readable. + select { + case it := <-w.enqueuec: + todo = append(todo, it) + default: + return nil + } + } + } + } +} + +// doWork reads directories as instructed (via workc) and runs the +// user's callback function. +func (w *walker) doWork(wg *sync.WaitGroup) { + defer wg.Done() + for { + select { + case <-w.donec: + return + case it := <-w.workc: + select { + case <-w.donec: + return + case w.resc <- w.walk(it.dir, !it.callbackDone): + } + } + } +} + +type walker struct { + fn func(path string, typ os.FileMode) error + + donec chan struct{} // closed on fastWalk's return + workc chan walkItem // to workers + enqueuec chan walkItem // from workers + resc chan error // from workers +} + +type walkItem struct { + dir string + callbackDone bool // callback already called; don't do it again +} + +func (w *walker) enqueue(it walkItem) { + select { + case w.enqueuec <- it: + case <-w.donec: + } +} + +func (w *walker) onDirEnt(dirName, baseName string, typ os.FileMode) error { + joined := dirName + string(os.PathSeparator) + baseName + if typ == os.ModeDir { + w.enqueue(walkItem{dir: joined}) + return nil + } + + err := w.fn(joined, typ) + if typ == os.ModeSymlink { + if err == traverseLink { + // Set callbackDone so we don't call it twice for both the + // symlink-as-symlink and the symlink-as-directory later: + w.enqueue(walkItem{dir: joined, callbackDone: true}) + return nil + } + if err == filepath.SkipDir { + // Permit SkipDir on symlinks too. + return nil + } + } + return err +} + +func (w *walker) walk(root string, runUserCallback bool) error { + if runUserCallback { + err := w.fn(root, os.ModeDir) + if err == filepath.SkipDir { + return nil + } + if err != nil { + return err + } + } + + return readDir(root, w.onDirEnt) +} diff --git a/vendor/golang.org/x/tools/imports/fastwalk_dirent_fileno.go b/vendor/golang.org/x/tools/imports/fastwalk_dirent_fileno.go new file mode 100644 index 0000000..f1fd649 --- /dev/null +++ b/vendor/golang.org/x/tools/imports/fastwalk_dirent_fileno.go @@ -0,0 +1,13 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build freebsd openbsd netbsd + +package imports + +import "syscall" + +func direntInode(dirent *syscall.Dirent) uint64 { + return uint64(dirent.Fileno) +} diff --git a/vendor/golang.org/x/tools/imports/fastwalk_dirent_ino.go b/vendor/golang.org/x/tools/imports/fastwalk_dirent_ino.go new file mode 100644 index 0000000..9fc6de0 --- /dev/null +++ b/vendor/golang.org/x/tools/imports/fastwalk_dirent_ino.go @@ -0,0 +1,14 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build linux darwin +// +build !appengine + +package imports + +import "syscall" + +func direntInode(dirent *syscall.Dirent) uint64 { + return uint64(dirent.Ino) +} diff --git a/vendor/golang.org/x/tools/imports/fastwalk_portable.go b/vendor/golang.org/x/tools/imports/fastwalk_portable.go new file mode 100644 index 0000000..6c26583 --- /dev/null +++ b/vendor/golang.org/x/tools/imports/fastwalk_portable.go @@ -0,0 +1,29 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build appengine !linux,!darwin,!freebsd,!openbsd,!netbsd + +package imports + +import ( + "io/ioutil" + "os" +) + +// readDir calls fn for each directory entry in dirName. +// It does not descend into directories or follow symlinks. +// If fn returns a non-nil error, readDir returns with that error +// immediately. +func readDir(dirName string, fn func(dirName, entName string, typ os.FileMode) error) error { + fis, err := ioutil.ReadDir(dirName) + if err != nil { + return err + } + for _, fi := range fis { + if err := fn(dirName, fi.Name(), fi.Mode()&os.ModeType); err != nil { + return err + } + } + return nil +} diff --git a/vendor/golang.org/x/tools/imports/fastwalk_unix.go b/vendor/golang.org/x/tools/imports/fastwalk_unix.go new file mode 100644 index 0000000..db6ee8d --- /dev/null +++ b/vendor/golang.org/x/tools/imports/fastwalk_unix.go @@ -0,0 +1,123 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build linux darwin freebsd openbsd netbsd +// +build !appengine + +package imports + +import ( + "bytes" + "fmt" + "os" + "syscall" + "unsafe" +) + +const blockSize = 8 << 10 + +// unknownFileMode is a sentinel (and bogus) os.FileMode +// value used to represent a syscall.DT_UNKNOWN Dirent.Type. +const unknownFileMode os.FileMode = os.ModeNamedPipe | os.ModeSocket | os.ModeDevice + +func readDir(dirName string, fn func(dirName, entName string, typ os.FileMode) error) error { + fd, err := syscall.Open(dirName, 0, 0) + if err != nil { + return err + } + defer syscall.Close(fd) + + // The buffer must be at least a block long. + buf := make([]byte, blockSize) // stack-allocated; doesn't escape + bufp := 0 // starting read position in buf + nbuf := 0 // end valid data in buf + for { + if bufp >= nbuf { + bufp = 0 + nbuf, err = syscall.ReadDirent(fd, buf) + if err != nil { + return os.NewSyscallError("readdirent", err) + } + if nbuf <= 0 { + return nil + } + } + consumed, name, typ := parseDirEnt(buf[bufp:nbuf]) + bufp += consumed + if name == "" || name == "." || name == ".." { + continue + } + // Fallback for filesystems (like old XFS) that don't + // support Dirent.Type and have DT_UNKNOWN (0) there + // instead. + if typ == unknownFileMode { + fi, err := os.Lstat(dirName + "/" + name) + if err != nil { + // It got deleted in the meantime. + if os.IsNotExist(err) { + continue + } + return err + } + typ = fi.Mode() & os.ModeType + } + if err := fn(dirName, name, typ); err != nil { + return err + } + } +} + +func parseDirEnt(buf []byte) (consumed int, name string, typ os.FileMode) { + // golang.org/issue/15653 + dirent := (*syscall.Dirent)(unsafe.Pointer(&buf[0])) + if v := unsafe.Offsetof(dirent.Reclen) + unsafe.Sizeof(dirent.Reclen); uintptr(len(buf)) < v { + panic(fmt.Sprintf("buf size of %d smaller than dirent header size %d", len(buf), v)) + } + if len(buf) < int(dirent.Reclen) { + panic(fmt.Sprintf("buf size %d < record length %d", len(buf), dirent.Reclen)) + } + consumed = int(dirent.Reclen) + if direntInode(dirent) == 0 { // File absent in directory. + return + } + switch dirent.Type { + case syscall.DT_REG: + typ = 0 + case syscall.DT_DIR: + typ = os.ModeDir + case syscall.DT_LNK: + typ = os.ModeSymlink + case syscall.DT_BLK: + typ = os.ModeDevice + case syscall.DT_FIFO: + typ = os.ModeNamedPipe + case syscall.DT_SOCK: + typ = os.ModeSocket + case syscall.DT_UNKNOWN: + typ = unknownFileMode + default: + // Skip weird things. + // It's probably a DT_WHT (http://lwn.net/Articles/325369/) + // or something. Revisit if/when this package is moved outside + // of goimports. goimports only cares about regular files, + // symlinks, and directories. + return + } + + nameBuf := (*[unsafe.Sizeof(dirent.Name)]byte)(unsafe.Pointer(&dirent.Name[0])) + nameLen := bytes.IndexByte(nameBuf[:], 0) + if nameLen < 0 { + panic("failed to find terminating 0 byte in dirent") + } + + // Special cases for common things: + if nameLen == 1 && nameBuf[0] == '.' { + name = "." + } else if nameLen == 2 && nameBuf[0] == '.' && nameBuf[1] == '.' { + name = ".." + } else { + name = string(nameBuf[:nameLen]) + } + return +} diff --git a/vendor/golang.org/x/tools/imports/fix.go b/vendor/golang.org/x/tools/imports/fix.go new file mode 100644 index 0000000..8dbbd27 --- /dev/null +++ b/vendor/golang.org/x/tools/imports/fix.go @@ -0,0 +1,1082 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package imports + +import ( + "bufio" + "bytes" + "fmt" + "go/ast" + "go/build" + "go/parser" + "go/token" + "io/ioutil" + "log" + "os" + "path" + "path/filepath" + "sort" + "strings" + "sync" + + "golang.org/x/tools/go/ast/astutil" +) + +// Debug controls verbose logging. +var Debug = false + +var ( + inTests = false // set true by fix_test.go; if false, no need to use testMu + testMu sync.RWMutex // guards globals reset by tests; used only if inTests +) + +// LocalPrefix, if set, instructs Process to sort import paths with the given +// prefix into another group after 3rd-party packages. +var LocalPrefix string + +// importToGroup is a list of functions which map from an import path to +// a group number. +var importToGroup = []func(importPath string) (num int, ok bool){ + func(importPath string) (num int, ok bool) { + if LocalPrefix != "" && strings.HasPrefix(importPath, LocalPrefix) { + return 3, true + } + return + }, + func(importPath string) (num int, ok bool) { + if strings.HasPrefix(importPath, "appengine") { + return 2, true + } + return + }, + func(importPath string) (num int, ok bool) { + if strings.Contains(importPath, ".") { + return 1, true + } + return + }, +} + +func importGroup(importPath string) int { + for _, fn := range importToGroup { + if n, ok := fn(importPath); ok { + return n + } + } + return 0 +} + +// importInfo is a summary of information about one import. +type importInfo struct { + Path string // full import path (e.g. "crypto/rand") + Alias string // import alias, if present (e.g. "crand") +} + +// packageInfo is a summary of features found in a package. +type packageInfo struct { + Globals map[string]bool // symbol => true + Imports map[string]importInfo // pkg base name or alias => info + // refs are a set of package references currently satisfied by imports. + // first key: either base package (e.g. "fmt") or renamed package + // second key: referenced package symbol (e.g. "Println") + Refs map[string]map[string]bool +} + +// dirPackageInfo exposes the dirPackageInfoFile function so that it can be overridden. +var dirPackageInfo = dirPackageInfoFile + +// dirPackageInfoFile gets information from other files in the package. +func dirPackageInfoFile(pkgName, srcDir, filename string) (*packageInfo, error) { + considerTests := strings.HasSuffix(filename, "_test.go") + + fileBase := filepath.Base(filename) + packageFileInfos, err := ioutil.ReadDir(srcDir) + if err != nil { + return nil, err + } + + info := &packageInfo{ + Globals: make(map[string]bool), + Imports: make(map[string]importInfo), + Refs: make(map[string]map[string]bool), + } + + visitor := collectReferences(info.Refs) + for _, fi := range packageFileInfos { + if fi.Name() == fileBase || !strings.HasSuffix(fi.Name(), ".go") { + continue + } + if !considerTests && strings.HasSuffix(fi.Name(), "_test.go") { + continue + } + + fileSet := token.NewFileSet() + root, err := parser.ParseFile(fileSet, filepath.Join(srcDir, fi.Name()), nil, 0) + if err != nil { + continue + } + + for _, decl := range root.Decls { + genDecl, ok := decl.(*ast.GenDecl) + if !ok { + continue + } + + for _, spec := range genDecl.Specs { + valueSpec, ok := spec.(*ast.ValueSpec) + if !ok { + continue + } + info.Globals[valueSpec.Names[0].Name] = true + } + } + + for _, imp := range root.Imports { + impInfo := importInfo{Path: strings.Trim(imp.Path.Value, `"`)} + name := path.Base(impInfo.Path) + if imp.Name != nil { + name = strings.Trim(imp.Name.Name, `"`) + impInfo.Alias = name + } + info.Imports[name] = impInfo + } + + ast.Walk(visitor, root) + } + return info, nil +} + +// collectReferences returns a visitor that collects all exported package +// references +func collectReferences(refs map[string]map[string]bool) visitFn { + var visitor visitFn + visitor = func(node ast.Node) ast.Visitor { + if node == nil { + return visitor + } + switch v := node.(type) { + case *ast.SelectorExpr: + xident, ok := v.X.(*ast.Ident) + if !ok { + break + } + if xident.Obj != nil { + // if the parser can resolve it, it's not a package ref + break + } + pkgName := xident.Name + r := refs[pkgName] + if r == nil { + r = make(map[string]bool) + refs[pkgName] = r + } + if ast.IsExported(v.Sel.Name) { + r[v.Sel.Name] = true + } + } + return visitor + } + return visitor +} + +func fixImports(fset *token.FileSet, f *ast.File, filename string) (added []string, err error) { + // refs are a set of possible package references currently unsatisfied by imports. + // first key: either base package (e.g. "fmt") or renamed package + // second key: referenced package symbol (e.g. "Println") + refs := make(map[string]map[string]bool) + + // decls are the current package imports. key is base package or renamed package. + decls := make(map[string]*ast.ImportSpec) + + abs, err := filepath.Abs(filename) + if err != nil { + return nil, err + } + srcDir := filepath.Dir(abs) + if Debug { + log.Printf("fixImports(filename=%q), abs=%q, srcDir=%q ...", filename, abs, srcDir) + } + + var packageInfo *packageInfo + var loadedPackageInfo bool + + // collect potential uses of packages. + var visitor visitFn + visitor = visitFn(func(node ast.Node) ast.Visitor { + if node == nil { + return visitor + } + switch v := node.(type) { + case *ast.ImportSpec: + if v.Name != nil { + decls[v.Name.Name] = v + break + } + ipath := strings.Trim(v.Path.Value, `"`) + if ipath == "C" { + break + } + local := importPathToName(ipath, srcDir) + decls[local] = v + case *ast.SelectorExpr: + xident, ok := v.X.(*ast.Ident) + if !ok { + break + } + if xident.Obj != nil { + // if the parser can resolve it, it's not a package ref + break + } + pkgName := xident.Name + if refs[pkgName] == nil { + refs[pkgName] = make(map[string]bool) + } + if !loadedPackageInfo { + loadedPackageInfo = true + packageInfo, _ = dirPackageInfo(f.Name.Name, srcDir, filename) + } + if decls[pkgName] == nil && (packageInfo == nil || !packageInfo.Globals[pkgName]) { + refs[pkgName][v.Sel.Name] = true + } + } + return visitor + }) + ast.Walk(visitor, f) + + // Nil out any unused ImportSpecs, to be removed in following passes + unusedImport := map[string]string{} + for pkg, is := range decls { + if refs[pkg] == nil && pkg != "_" && pkg != "." { + name := "" + if is.Name != nil { + name = is.Name.Name + } + unusedImport[strings.Trim(is.Path.Value, `"`)] = name + } + } + for ipath, name := range unusedImport { + if ipath == "C" { + // Don't remove cgo stuff. + continue + } + astutil.DeleteNamedImport(fset, f, name, ipath) + } + + for pkgName, symbols := range refs { + if len(symbols) == 0 { + // skip over packages already imported + delete(refs, pkgName) + } + } + + // Fast path, all references already imported. + if len(refs) == 0 { + return nil, nil + } + + // Can assume this will be necessary in all cases now. + if !loadedPackageInfo { + packageInfo, _ = dirPackageInfo(f.Name.Name, srcDir, filename) + } + + // Search for imports matching potential package references. + searches := 0 + type result struct { + ipath string // import path (if err == nil) + name string // optional name to rename import as + err error + } + results := make(chan result) + for pkgName, symbols := range refs { + go func(pkgName string, symbols map[string]bool) { + if packageInfo != nil { + sibling := packageInfo.Imports[pkgName] + if sibling.Path != "" { + refs := packageInfo.Refs[pkgName] + for symbol := range symbols { + if refs[symbol] { + results <- result{ipath: sibling.Path, name: sibling.Alias} + return + } + } + } + } + ipath, rename, err := findImport(pkgName, symbols, filename) + r := result{ipath: ipath, err: err} + if rename { + r.name = pkgName + } + results <- r + }(pkgName, symbols) + searches++ + } + for i := 0; i < searches; i++ { + result := <-results + if result.err != nil { + return nil, result.err + } + if result.ipath != "" { + if result.name != "" { + astutil.AddNamedImport(fset, f, result.name, result.ipath) + } else { + astutil.AddImport(fset, f, result.ipath) + } + added = append(added, result.ipath) + } + } + + return added, nil +} + +// importPathToName returns the package name for the given import path. +var importPathToName func(importPath, srcDir string) (packageName string) = importPathToNameGoPath + +// importPathToNameBasic assumes the package name is the base of import path. +func importPathToNameBasic(importPath, srcDir string) (packageName string) { + return path.Base(importPath) +} + +// importPathToNameGoPath finds out the actual package name, as declared in its .go files. +// If there's a problem, it falls back to using importPathToNameBasic. +func importPathToNameGoPath(importPath, srcDir string) (packageName string) { + // Fast path for standard library without going to disk. + if pkg, ok := stdImportPackage[importPath]; ok { + return pkg + } + + pkgName, err := importPathToNameGoPathParse(importPath, srcDir) + if Debug { + log.Printf("importPathToNameGoPathParse(%q, srcDir=%q) = %q, %v", importPath, srcDir, pkgName, err) + } + if err == nil { + return pkgName + } + return importPathToNameBasic(importPath, srcDir) +} + +// importPathToNameGoPathParse is a faster version of build.Import if +// the only thing desired is the package name. It uses build.FindOnly +// to find the directory and then only parses one file in the package, +// trusting that the files in the directory are consistent. +func importPathToNameGoPathParse(importPath, srcDir string) (packageName string, err error) { + buildPkg, err := build.Import(importPath, srcDir, build.FindOnly) + if err != nil { + return "", err + } + d, err := os.Open(buildPkg.Dir) + if err != nil { + return "", err + } + names, err := d.Readdirnames(-1) + d.Close() + if err != nil { + return "", err + } + sort.Strings(names) // to have predictable behavior + var lastErr error + var nfile int + for _, name := range names { + if !strings.HasSuffix(name, ".go") { + continue + } + if strings.HasSuffix(name, "_test.go") { + continue + } + nfile++ + fullFile := filepath.Join(buildPkg.Dir, name) + + fset := token.NewFileSet() + f, err := parser.ParseFile(fset, fullFile, nil, parser.PackageClauseOnly) + if err != nil { + lastErr = err + continue + } + pkgName := f.Name.Name + if pkgName == "documentation" { + // Special case from go/build.ImportDir, not + // handled by ctx.MatchFile. + continue + } + if pkgName == "main" { + // Also skip package main, assuming it's a +build ignore generator or example. + // Since you can't import a package main anyway, there's no harm here. + continue + } + return pkgName, nil + } + if lastErr != nil { + return "", lastErr + } + return "", fmt.Errorf("no importable package found in %d Go files", nfile) +} + +var stdImportPackage = map[string]string{} // "net/http" => "http" + +func init() { + // Nothing in the standard library has a package name not + // matching its import base name. + for _, pkg := range stdlib { + if _, ok := stdImportPackage[pkg]; !ok { + stdImportPackage[pkg] = path.Base(pkg) + } + } +} + +// Directory-scanning state. +var ( + // scanGoRootOnce guards calling scanGoRoot (for $GOROOT) + scanGoRootOnce sync.Once + // scanGoPathOnce guards calling scanGoPath (for $GOPATH) + scanGoPathOnce sync.Once + + // populateIgnoreOnce guards calling populateIgnore + populateIgnoreOnce sync.Once + ignoredDirs []os.FileInfo + + dirScanMu sync.RWMutex + dirScan map[string]*pkg // abs dir path => *pkg +) + +type pkg struct { + dir string // absolute file path to pkg directory ("/usr/lib/go/src/net/http") + importPath string // full pkg import path ("net/http", "foo/bar/vendor/a/b") + importPathShort string // vendorless import path ("net/http", "a/b") + distance int // relative distance to target +} + +// byDistanceOrImportPathShortLength sorts by relative distance breaking ties +// on the short import path length and then the import string itself. +type byDistanceOrImportPathShortLength []*pkg + +func (s byDistanceOrImportPathShortLength) Len() int { return len(s) } +func (s byDistanceOrImportPathShortLength) Less(i, j int) bool { + di, dj := s[i].distance, s[j].distance + if di == -1 { + return false + } + if dj == -1 { + return true + } + if di != dj { + return di < dj + } + + vi, vj := s[i].importPathShort, s[j].importPathShort + if len(vi) != len(vj) { + return len(vi) < len(vj) + } + return vi < vj +} +func (s byDistanceOrImportPathShortLength) Swap(i, j int) { s[i], s[j] = s[j], s[i] } + +func distance(basepath, targetpath string) int { + p, err := filepath.Rel(basepath, targetpath) + if err != nil { + return -1 + } + if p == "." { + return 0 + } + return strings.Count(p, string(filepath.Separator)) + 1 +} + +// guarded by populateIgnoreOnce; populates ignoredDirs. +func populateIgnore() { + for _, srcDir := range build.Default.SrcDirs() { + if srcDir == filepath.Join(build.Default.GOROOT, "src") { + continue + } + populateIgnoredDirs(srcDir) + } +} + +// populateIgnoredDirs reads an optional config file at /.goimportsignore +// of relative directories to ignore when scanning for go files. +// The provided path is one of the $GOPATH entries with "src" appended. +func populateIgnoredDirs(path string) { + file := filepath.Join(path, ".goimportsignore") + slurp, err := ioutil.ReadFile(file) + if Debug { + if err != nil { + log.Print(err) + } else { + log.Printf("Read %s", file) + } + } + if err != nil { + return + } + bs := bufio.NewScanner(bytes.NewReader(slurp)) + for bs.Scan() { + line := strings.TrimSpace(bs.Text()) + if line == "" || strings.HasPrefix(line, "#") { + continue + } + full := filepath.Join(path, line) + if fi, err := os.Stat(full); err == nil { + ignoredDirs = append(ignoredDirs, fi) + if Debug { + log.Printf("Directory added to ignore list: %s", full) + } + } else if Debug { + log.Printf("Error statting entry in .goimportsignore: %v", err) + } + } +} + +func skipDir(fi os.FileInfo) bool { + for _, ignoredDir := range ignoredDirs { + if os.SameFile(fi, ignoredDir) { + return true + } + } + return false +} + +// shouldTraverse reports whether the symlink fi should, found in dir, +// should be followed. It makes sure symlinks were never visited +// before to avoid symlink loops. +func shouldTraverse(dir string, fi os.FileInfo) bool { + path := filepath.Join(dir, fi.Name()) + target, err := filepath.EvalSymlinks(path) + if err != nil { + if !os.IsNotExist(err) { + fmt.Fprintln(os.Stderr, err) + } + return false + } + ts, err := os.Stat(target) + if err != nil { + fmt.Fprintln(os.Stderr, err) + return false + } + if !ts.IsDir() { + return false + } + if skipDir(ts) { + return false + } + // Check for symlink loops by statting each directory component + // and seeing if any are the same file as ts. + for { + parent := filepath.Dir(path) + if parent == path { + // Made it to the root without seeing a cycle. + // Use this symlink. + return true + } + parentInfo, err := os.Stat(parent) + if err != nil { + return false + } + if os.SameFile(ts, parentInfo) { + // Cycle. Don't traverse. + return false + } + path = parent + } + +} + +var testHookScanDir = func(dir string) {} + +var scanGoRootDone = make(chan struct{}) // closed when scanGoRoot is done + +func scanGoRoot() { + go func() { + scanGoDirs(true) + close(scanGoRootDone) + }() +} + +func scanGoPath() { scanGoDirs(false) } + +func scanGoDirs(goRoot bool) { + if Debug { + which := "$GOROOT" + if !goRoot { + which = "$GOPATH" + } + log.Printf("scanning " + which) + defer log.Printf("scanned " + which) + } + dirScanMu.Lock() + if dirScan == nil { + dirScan = make(map[string]*pkg) + } + dirScanMu.Unlock() + + for _, srcDir := range build.Default.SrcDirs() { + isGoroot := srcDir == filepath.Join(build.Default.GOROOT, "src") + if isGoroot != goRoot { + continue + } + testHookScanDir(srcDir) + walkFn := func(path string, typ os.FileMode) error { + dir := filepath.Dir(path) + if typ.IsRegular() { + if dir == srcDir { + // Doesn't make sense to have regular files + // directly in your $GOPATH/src or $GOROOT/src. + return nil + } + if !strings.HasSuffix(path, ".go") { + return nil + } + dirScanMu.Lock() + if _, dup := dirScan[dir]; !dup { + importpath := filepath.ToSlash(dir[len(srcDir)+len("/"):]) + dirScan[dir] = &pkg{ + importPath: importpath, + importPathShort: vendorlessImportPath(importpath), + dir: dir, + } + } + dirScanMu.Unlock() + return nil + } + if typ == os.ModeDir { + base := filepath.Base(path) + if base == "" || base[0] == '.' || base[0] == '_' || + base == "testdata" || base == "node_modules" { + return filepath.SkipDir + } + fi, err := os.Lstat(path) + if err == nil && skipDir(fi) { + if Debug { + log.Printf("skipping directory %q under %s", fi.Name(), dir) + } + return filepath.SkipDir + } + return nil + } + if typ == os.ModeSymlink { + base := filepath.Base(path) + if strings.HasPrefix(base, ".#") { + // Emacs noise. + return nil + } + fi, err := os.Lstat(path) + if err != nil { + // Just ignore it. + return nil + } + if shouldTraverse(dir, fi) { + return traverseLink + } + } + return nil + } + if err := fastWalk(srcDir, walkFn); err != nil { + log.Printf("goimports: scanning directory %v: %v", srcDir, err) + } + } +} + +// vendorlessImportPath returns the devendorized version of the provided import path. +// e.g. "foo/bar/vendor/a/b" => "a/b" +func vendorlessImportPath(ipath string) string { + // Devendorize for use in import statement. + if i := strings.LastIndex(ipath, "/vendor/"); i >= 0 { + return ipath[i+len("/vendor/"):] + } + if strings.HasPrefix(ipath, "vendor/") { + return ipath[len("vendor/"):] + } + return ipath +} + +// loadExports returns the set of exported symbols in the package at dir. +// It returns nil on error or if the package name in dir does not match expectPackage. +var loadExports func(expectPackage, dir string) map[string]bool = loadExportsGoPath + +func loadExportsGoPath(expectPackage, dir string) map[string]bool { + if Debug { + log.Printf("loading exports in dir %s (seeking package %s)", dir, expectPackage) + } + exports := make(map[string]bool) + + ctx := build.Default + + // ReadDir is like ioutil.ReadDir, but only returns *.go files + // and filters out _test.go files since they're not relevant + // and only slow things down. + ctx.ReadDir = func(dir string) (notTests []os.FileInfo, err error) { + all, err := ioutil.ReadDir(dir) + if err != nil { + return nil, err + } + notTests = all[:0] + for _, fi := range all { + name := fi.Name() + if strings.HasSuffix(name, ".go") && !strings.HasSuffix(name, "_test.go") { + notTests = append(notTests, fi) + } + } + return notTests, nil + } + + files, err := ctx.ReadDir(dir) + if err != nil { + log.Print(err) + return nil + } + + fset := token.NewFileSet() + + for _, fi := range files { + match, err := ctx.MatchFile(dir, fi.Name()) + if err != nil || !match { + continue + } + fullFile := filepath.Join(dir, fi.Name()) + f, err := parser.ParseFile(fset, fullFile, nil, 0) + if err != nil { + if Debug { + log.Printf("Parsing %s: %v", fullFile, err) + } + return nil + } + pkgName := f.Name.Name + if pkgName == "documentation" { + // Special case from go/build.ImportDir, not + // handled by ctx.MatchFile. + continue + } + if pkgName != expectPackage { + if Debug { + log.Printf("scan of dir %v is not expected package %v (actually %v)", dir, expectPackage, pkgName) + } + return nil + } + for name := range f.Scope.Objects { + if ast.IsExported(name) { + exports[name] = true + } + } + } + + if Debug { + exportList := make([]string, 0, len(exports)) + for k := range exports { + exportList = append(exportList, k) + } + sort.Strings(exportList) + log.Printf("loaded exports in dir %v (package %v): %v", dir, expectPackage, strings.Join(exportList, ", ")) + } + return exports +} + +// findImport searches for a package with the given symbols. +// If no package is found, findImport returns ("", false, nil) +// +// This is declared as a variable rather than a function so goimports +// can be easily extended by adding a file with an init function. +// +// The rename value tells goimports whether to use the package name as +// a local qualifier in an import. For example, if findImports("pkg", +// "X") returns ("foo/bar", rename=true), then goimports adds the +// import line: +// import pkg "foo/bar" +// to satisfy uses of pkg.X in the file. +var findImport func(pkgName string, symbols map[string]bool, filename string) (foundPkg string, rename bool, err error) = findImportGoPath + +// findImportGoPath is the normal implementation of findImport. +// (Some companies have their own internally.) +func findImportGoPath(pkgName string, symbols map[string]bool, filename string) (foundPkg string, rename bool, err error) { + if inTests { + testMu.RLock() + defer testMu.RUnlock() + } + + pkgDir, err := filepath.Abs(filename) + if err != nil { + return "", false, err + } + pkgDir = filepath.Dir(pkgDir) + + // Fast path for the standard library. + // In the common case we hopefully never have to scan the GOPATH, which can + // be slow with moving disks. + if pkg, rename, ok := findImportStdlib(pkgName, symbols); ok { + return pkg, rename, nil + } + if pkgName == "rand" && symbols["Read"] { + // Special-case rand.Read. + // + // If findImportStdlib didn't find it above, don't go + // searching for it, lest it find and pick math/rand + // in GOROOT (new as of Go 1.6) + // + // crypto/rand is the safer choice. + return "", false, nil + } + + // TODO(sameer): look at the import lines for other Go files in the + // local directory, since the user is likely to import the same packages + // in the current Go file. Return rename=true when the other Go files + // use a renamed package that's also used in the current file. + + // Read all the $GOPATH/src/.goimportsignore files before scanning directories. + populateIgnoreOnce.Do(populateIgnore) + + // Start scanning the $GOROOT asynchronously, then run the + // GOPATH scan synchronously if needed, and then wait for the + // $GOROOT to finish. + // + // TODO(bradfitz): run each $GOPATH entry async. But nobody + // really has more than one anyway, so low priority. + scanGoRootOnce.Do(scanGoRoot) // async + if !fileInDir(filename, build.Default.GOROOT) { + scanGoPathOnce.Do(scanGoPath) // blocking + } + <-scanGoRootDone + + // Find candidate packages, looking only at their directory names first. + var candidates []*pkg + for _, pkg := range dirScan { + if pkgIsCandidate(filename, pkgName, pkg) { + pkg.distance = distance(pkgDir, pkg.dir) + candidates = append(candidates, pkg) + } + } + + // Sort the candidates by their import package length, + // assuming that shorter package names are better than long + // ones. Note that this sorts by the de-vendored name, so + // there's no "penalty" for vendoring. + sort.Sort(byDistanceOrImportPathShortLength(candidates)) + if Debug { + for i, pkg := range candidates { + log.Printf("%s candidate %d/%d: %v in %v", pkgName, i+1, len(candidates), pkg.importPathShort, pkg.dir) + } + } + + // Collect exports for packages with matching names. + + done := make(chan struct{}) // closed when we find the answer + defer close(done) + + rescv := make([]chan *pkg, len(candidates)) + for i := range candidates { + rescv[i] = make(chan *pkg) + } + const maxConcurrentPackageImport = 4 + loadExportsSem := make(chan struct{}, maxConcurrentPackageImport) + + go func() { + for i, pkg := range candidates { + select { + case loadExportsSem <- struct{}{}: + select { + case <-done: + return + default: + } + case <-done: + return + } + pkg := pkg + resc := rescv[i] + go func() { + if inTests { + testMu.RLock() + defer testMu.RUnlock() + } + defer func() { <-loadExportsSem }() + exports := loadExports(pkgName, pkg.dir) + + // If it doesn't have the right + // symbols, send nil to mean no match. + for symbol := range symbols { + if !exports[symbol] { + pkg = nil + break + } + } + select { + case resc <- pkg: + case <-done: + } + }() + } + }() + for _, resc := range rescv { + pkg := <-resc + if pkg == nil { + continue + } + // If the package name in the source doesn't match the import path's base, + // return true so the rewriter adds a name (import foo "github.com/bar/go-foo") + needsRename := path.Base(pkg.importPath) != pkgName + return pkg.importPathShort, needsRename, nil + } + return "", false, nil +} + +// pkgIsCandidate reports whether pkg is a candidate for satisfying the +// finding which package pkgIdent in the file named by filename is trying +// to refer to. +// +// This check is purely lexical and is meant to be as fast as possible +// because it's run over all $GOPATH directories to filter out poor +// candidates in order to limit the CPU and I/O later parsing the +// exports in candidate packages. +// +// filename is the file being formatted. +// pkgIdent is the package being searched for, like "client" (if +// searching for "client.New") +func pkgIsCandidate(filename, pkgIdent string, pkg *pkg) bool { + // Check "internal" and "vendor" visibility: + if !canUse(filename, pkg.dir) { + return false + } + + // Speed optimization to minimize disk I/O: + // the last two components on disk must contain the + // package name somewhere. + // + // This permits mismatch naming like directory + // "go-foo" being package "foo", or "pkg.v3" being "pkg", + // or directory "google.golang.org/api/cloudbilling/v1" + // being package "cloudbilling", but doesn't + // permit a directory "foo" to be package + // "bar", which is strongly discouraged + // anyway. There's no reason goimports needs + // to be slow just to accomodate that. + lastTwo := lastTwoComponents(pkg.importPathShort) + if strings.Contains(lastTwo, pkgIdent) { + return true + } + if hasHyphenOrUpperASCII(lastTwo) && !hasHyphenOrUpperASCII(pkgIdent) { + lastTwo = lowerASCIIAndRemoveHyphen(lastTwo) + if strings.Contains(lastTwo, pkgIdent) { + return true + } + } + + return false +} + +func hasHyphenOrUpperASCII(s string) bool { + for i := 0; i < len(s); i++ { + b := s[i] + if b == '-' || ('A' <= b && b <= 'Z') { + return true + } + } + return false +} + +func lowerASCIIAndRemoveHyphen(s string) (ret string) { + buf := make([]byte, 0, len(s)) + for i := 0; i < len(s); i++ { + b := s[i] + switch { + case b == '-': + continue + case 'A' <= b && b <= 'Z': + buf = append(buf, b+('a'-'A')) + default: + buf = append(buf, b) + } + } + return string(buf) +} + +// canUse reports whether the package in dir is usable from filename, +// respecting the Go "internal" and "vendor" visibility rules. +func canUse(filename, dir string) bool { + // Fast path check, before any allocations. If it doesn't contain vendor + // or internal, it's not tricky: + // Note that this can false-negative on directories like "notinternal", + // but we check it correctly below. This is just a fast path. + if !strings.Contains(dir, "vendor") && !strings.Contains(dir, "internal") { + return true + } + + dirSlash := filepath.ToSlash(dir) + if !strings.Contains(dirSlash, "/vendor/") && !strings.Contains(dirSlash, "/internal/") && !strings.HasSuffix(dirSlash, "/internal") { + return true + } + // Vendor or internal directory only visible from children of parent. + // That means the path from the current directory to the target directory + // can contain ../vendor or ../internal but not ../foo/vendor or ../foo/internal + // or bar/vendor or bar/internal. + // After stripping all the leading ../, the only okay place to see vendor or internal + // is at the very beginning of the path. + absfile, err := filepath.Abs(filename) + if err != nil { + return false + } + absdir, err := filepath.Abs(dir) + if err != nil { + return false + } + rel, err := filepath.Rel(absfile, absdir) + if err != nil { + return false + } + relSlash := filepath.ToSlash(rel) + if i := strings.LastIndex(relSlash, "../"); i >= 0 { + relSlash = relSlash[i+len("../"):] + } + return !strings.Contains(relSlash, "/vendor/") && !strings.Contains(relSlash, "/internal/") && !strings.HasSuffix(relSlash, "/internal") +} + +// lastTwoComponents returns at most the last two path components +// of v, using either / or \ as the path separator. +func lastTwoComponents(v string) string { + nslash := 0 + for i := len(v) - 1; i >= 0; i-- { + if v[i] == '/' || v[i] == '\\' { + nslash++ + if nslash == 2 { + return v[i:] + } + } + } + return v +} + +type visitFn func(node ast.Node) ast.Visitor + +func (fn visitFn) Visit(node ast.Node) ast.Visitor { + return fn(node) +} + +func findImportStdlib(shortPkg string, symbols map[string]bool) (importPath string, rename, ok bool) { + for symbol := range symbols { + key := shortPkg + "." + symbol + path := stdlib[key] + if path == "" { + if key == "rand.Read" { + continue + } + return "", false, false + } + if importPath != "" && importPath != path { + // Ambiguous. Symbols pointed to different things. + return "", false, false + } + importPath = path + } + if importPath == "" && shortPkg == "rand" && symbols["Read"] { + return "crypto/rand", false, true + } + return importPath, false, importPath != "" +} + +// fileInDir reports whether the provided file path looks like +// it's in dir. (without hitting the filesystem) +func fileInDir(file, dir string) bool { + rest := strings.TrimPrefix(file, dir) + if len(rest) == len(file) { + // dir is not a prefix of file. + return false + } + // Check for boundary: either nothing (file == dir), or a slash. + return len(rest) == 0 || rest[0] == '/' || rest[0] == '\\' +} diff --git a/vendor/golang.org/x/tools/imports/imports.go b/vendor/golang.org/x/tools/imports/imports.go new file mode 100644 index 0000000..d789bb5 --- /dev/null +++ b/vendor/golang.org/x/tools/imports/imports.go @@ -0,0 +1,297 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:generate go run mkstdlib.go + +// Package imports implements a Go pretty-printer (like package "go/format") +// that also adds or removes import statements as necessary. +package imports // import "golang.org/x/tools/imports" + +import ( + "bufio" + "bytes" + "fmt" + "go/ast" + "go/format" + "go/parser" + "go/printer" + "go/token" + "io" + "regexp" + "strconv" + "strings" + + "golang.org/x/tools/go/ast/astutil" +) + +// Options specifies options for processing files. +type Options struct { + Fragment bool // Accept fragment of a source file (no package statement) + AllErrors bool // Report all errors (not just the first 10 on different lines) + + Comments bool // Print comments (true if nil *Options provided) + TabIndent bool // Use tabs for indent (true if nil *Options provided) + TabWidth int // Tab width (8 if nil *Options provided) + + FormatOnly bool // Disable the insertion and deletion of imports +} + +// Process formats and adjusts imports for the provided file. +// If opt is nil the defaults are used. +// +// Note that filename's directory influences which imports can be chosen, +// so it is important that filename be accurate. +// To process data ``as if'' it were in filename, pass the data as a non-nil src. +func Process(filename string, src []byte, opt *Options) ([]byte, error) { + if opt == nil { + opt = &Options{Comments: true, TabIndent: true, TabWidth: 8} + } + + fileSet := token.NewFileSet() + file, adjust, err := parse(fileSet, filename, src, opt) + if err != nil { + return nil, err + } + + if !opt.FormatOnly { + _, err = fixImports(fileSet, file, filename) + if err != nil { + return nil, err + } + } + + sortImports(fileSet, file) + imps := astutil.Imports(fileSet, file) + + var spacesBefore []string // import paths we need spaces before + for _, impSection := range imps { + // Within each block of contiguous imports, see if any + // import lines are in different group numbers. If so, + // we'll need to put a space between them so it's + // compatible with gofmt. + lastGroup := -1 + for _, importSpec := range impSection { + importPath, _ := strconv.Unquote(importSpec.Path.Value) + groupNum := importGroup(importPath) + if groupNum != lastGroup && lastGroup != -1 { + spacesBefore = append(spacesBefore, importPath) + } + lastGroup = groupNum + } + + } + + printerMode := printer.UseSpaces + if opt.TabIndent { + printerMode |= printer.TabIndent + } + printConfig := &printer.Config{Mode: printerMode, Tabwidth: opt.TabWidth} + + var buf bytes.Buffer + err = printConfig.Fprint(&buf, fileSet, file) + if err != nil { + return nil, err + } + out := buf.Bytes() + if adjust != nil { + out = adjust(src, out) + } + if len(spacesBefore) > 0 { + out, err = addImportSpaces(bytes.NewReader(out), spacesBefore) + if err != nil { + return nil, err + } + } + + out, err = format.Source(out) + if err != nil { + return nil, err + } + return out, nil +} + +// parse parses src, which was read from filename, +// as a Go source file or statement list. +func parse(fset *token.FileSet, filename string, src []byte, opt *Options) (*ast.File, func(orig, src []byte) []byte, error) { + parserMode := parser.Mode(0) + if opt.Comments { + parserMode |= parser.ParseComments + } + if opt.AllErrors { + parserMode |= parser.AllErrors + } + + // Try as whole source file. + file, err := parser.ParseFile(fset, filename, src, parserMode) + if err == nil { + return file, nil, nil + } + // If the error is that the source file didn't begin with a + // package line and we accept fragmented input, fall through to + // try as a source fragment. Stop and return on any other error. + if !opt.Fragment || !strings.Contains(err.Error(), "expected 'package'") { + return nil, nil, err + } + + // If this is a declaration list, make it a source file + // by inserting a package clause. + // Insert using a ;, not a newline, so that the line numbers + // in psrc match the ones in src. + psrc := append([]byte("package main;"), src...) + file, err = parser.ParseFile(fset, filename, psrc, parserMode) + if err == nil { + // If a main function exists, we will assume this is a main + // package and leave the file. + if containsMainFunc(file) { + return file, nil, nil + } + + adjust := func(orig, src []byte) []byte { + // Remove the package clause. + // Gofmt has turned the ; into a \n. + src = src[len("package main\n"):] + return matchSpace(orig, src) + } + return file, adjust, nil + } + // If the error is that the source file didn't begin with a + // declaration, fall through to try as a statement list. + // Stop and return on any other error. + if !strings.Contains(err.Error(), "expected declaration") { + return nil, nil, err + } + + // If this is a statement list, make it a source file + // by inserting a package clause and turning the list + // into a function body. This handles expressions too. + // Insert using a ;, not a newline, so that the line numbers + // in fsrc match the ones in src. + fsrc := append(append([]byte("package p; func _() {"), src...), '}') + file, err = parser.ParseFile(fset, filename, fsrc, parserMode) + if err == nil { + adjust := func(orig, src []byte) []byte { + // Remove the wrapping. + // Gofmt has turned the ; into a \n\n. + src = src[len("package p\n\nfunc _() {"):] + src = src[:len(src)-len("}\n")] + // Gofmt has also indented the function body one level. + // Remove that indent. + src = bytes.Replace(src, []byte("\n\t"), []byte("\n"), -1) + return matchSpace(orig, src) + } + return file, adjust, nil + } + + // Failed, and out of options. + return nil, nil, err +} + +// containsMainFunc checks if a file contains a function declaration with the +// function signature 'func main()' +func containsMainFunc(file *ast.File) bool { + for _, decl := range file.Decls { + if f, ok := decl.(*ast.FuncDecl); ok { + if f.Name.Name != "main" { + continue + } + + if len(f.Type.Params.List) != 0 { + continue + } + + if f.Type.Results != nil && len(f.Type.Results.List) != 0 { + continue + } + + return true + } + } + + return false +} + +func cutSpace(b []byte) (before, middle, after []byte) { + i := 0 + for i < len(b) && (b[i] == ' ' || b[i] == '\t' || b[i] == '\n') { + i++ + } + j := len(b) + for j > 0 && (b[j-1] == ' ' || b[j-1] == '\t' || b[j-1] == '\n') { + j-- + } + if i <= j { + return b[:i], b[i:j], b[j:] + } + return nil, nil, b[j:] +} + +// matchSpace reformats src to use the same space context as orig. +// 1) If orig begins with blank lines, matchSpace inserts them at the beginning of src. +// 2) matchSpace copies the indentation of the first non-blank line in orig +// to every non-blank line in src. +// 3) matchSpace copies the trailing space from orig and uses it in place +// of src's trailing space. +func matchSpace(orig []byte, src []byte) []byte { + before, _, after := cutSpace(orig) + i := bytes.LastIndex(before, []byte{'\n'}) + before, indent := before[:i+1], before[i+1:] + + _, src, _ = cutSpace(src) + + var b bytes.Buffer + b.Write(before) + for len(src) > 0 { + line := src + if i := bytes.IndexByte(line, '\n'); i >= 0 { + line, src = line[:i+1], line[i+1:] + } else { + src = nil + } + if len(line) > 0 && line[0] != '\n' { // not blank + b.Write(indent) + } + b.Write(line) + } + b.Write(after) + return b.Bytes() +} + +var impLine = regexp.MustCompile(`^\s+(?:[\w\.]+\s+)?"(.+)"`) + +// Used to set Scanner buffer size so that large tokens can be handled. +// see https://github.com/golang/go/issues/18201 +const maxScanTokenSize = bufio.MaxScanTokenSize * 16 + +func addImportSpaces(r io.Reader, breaks []string) ([]byte, error) { + var out bytes.Buffer + sc := bufio.NewScanner(r) + sc.Buffer(nil, maxScanTokenSize) + inImports := false + done := false + for sc.Scan() { + s := sc.Text() + + if !inImports && !done && strings.HasPrefix(s, "import") { + inImports = true + } + if inImports && (strings.HasPrefix(s, "var") || + strings.HasPrefix(s, "func") || + strings.HasPrefix(s, "const") || + strings.HasPrefix(s, "type")) { + done = true + inImports = false + } + if inImports && len(breaks) > 0 { + if m := impLine.FindStringSubmatch(s); m != nil { + if m[1] == breaks[0] { + out.WriteByte('\n') + breaks = breaks[1:] + } + } + } + + fmt.Fprintln(&out, s) + } + return out.Bytes(), sc.Err() +} diff --git a/vendor/golang.org/x/tools/imports/mkindex.go b/vendor/golang.org/x/tools/imports/mkindex.go new file mode 100644 index 0000000..755e239 --- /dev/null +++ b/vendor/golang.org/x/tools/imports/mkindex.go @@ -0,0 +1,173 @@ +// +build ignore + +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Command mkindex creates the file "pkgindex.go" containing an index of the Go +// standard library. The file is intended to be built as part of the imports +// package, so that the package may be used in environments where a GOROOT is +// not available (such as App Engine). +package main + +import ( + "bytes" + "fmt" + "go/ast" + "go/build" + "go/format" + "go/parser" + "go/token" + "io/ioutil" + "log" + "os" + "path" + "path/filepath" + "strings" +) + +var ( + pkgIndex = make(map[string][]pkg) + exports = make(map[string]map[string]bool) +) + +func main() { + // Don't use GOPATH. + ctx := build.Default + ctx.GOPATH = "" + + // Populate pkgIndex global from GOROOT. + for _, path := range ctx.SrcDirs() { + f, err := os.Open(path) + if err != nil { + log.Print(err) + continue + } + children, err := f.Readdir(-1) + f.Close() + if err != nil { + log.Print(err) + continue + } + for _, child := range children { + if child.IsDir() { + loadPkg(path, child.Name()) + } + } + } + // Populate exports global. + for _, ps := range pkgIndex { + for _, p := range ps { + e := loadExports(p.dir) + if e != nil { + exports[p.dir] = e + } + } + } + + // Construct source file. + var buf bytes.Buffer + fmt.Fprint(&buf, pkgIndexHead) + fmt.Fprintf(&buf, "var pkgIndexMaster = %#v\n", pkgIndex) + fmt.Fprintf(&buf, "var exportsMaster = %#v\n", exports) + src := buf.Bytes() + + // Replace main.pkg type name with pkg. + src = bytes.Replace(src, []byte("main.pkg"), []byte("pkg"), -1) + // Replace actual GOROOT with "/go". + src = bytes.Replace(src, []byte(ctx.GOROOT), []byte("/go"), -1) + // Add some line wrapping. + src = bytes.Replace(src, []byte("}, "), []byte("},\n"), -1) + src = bytes.Replace(src, []byte("true, "), []byte("true,\n"), -1) + + var err error + src, err = format.Source(src) + if err != nil { + log.Fatal(err) + } + + // Write out source file. + err = ioutil.WriteFile("pkgindex.go", src, 0644) + if err != nil { + log.Fatal(err) + } +} + +const pkgIndexHead = `package imports + +func init() { + pkgIndexOnce.Do(func() { + pkgIndex.m = pkgIndexMaster + }) + loadExports = func(dir string) map[string]bool { + return exportsMaster[dir] + } +} +` + +type pkg struct { + importpath string // full pkg import path, e.g. "net/http" + dir string // absolute file path to pkg directory e.g. "/usr/lib/go/src/fmt" +} + +var fset = token.NewFileSet() + +func loadPkg(root, importpath string) { + shortName := path.Base(importpath) + if shortName == "testdata" { + return + } + + dir := filepath.Join(root, importpath) + pkgIndex[shortName] = append(pkgIndex[shortName], pkg{ + importpath: importpath, + dir: dir, + }) + + pkgDir, err := os.Open(dir) + if err != nil { + return + } + children, err := pkgDir.Readdir(-1) + pkgDir.Close() + if err != nil { + return + } + for _, child := range children { + name := child.Name() + if name == "" { + continue + } + if c := name[0]; c == '.' || ('0' <= c && c <= '9') { + continue + } + if child.IsDir() { + loadPkg(root, filepath.Join(importpath, name)) + } + } +} + +func loadExports(dir string) map[string]bool { + exports := make(map[string]bool) + buildPkg, err := build.ImportDir(dir, 0) + if err != nil { + if strings.Contains(err.Error(), "no buildable Go source files in") { + return nil + } + log.Printf("could not import %q: %v", dir, err) + return nil + } + for _, file := range buildPkg.GoFiles { + f, err := parser.ParseFile(fset, filepath.Join(dir, file), nil, 0) + if err != nil { + log.Printf("could not parse %q: %v", file, err) + continue + } + for name := range f.Scope.Objects { + if ast.IsExported(name) { + exports[name] = true + } + } + } + return exports +} diff --git a/vendor/golang.org/x/tools/imports/mkstdlib.go b/vendor/golang.org/x/tools/imports/mkstdlib.go new file mode 100644 index 0000000..02e4727 --- /dev/null +++ b/vendor/golang.org/x/tools/imports/mkstdlib.go @@ -0,0 +1,107 @@ +// +build ignore + +// mkstdlib generates the zstdlib.go file, containing the Go standard +// library API symbols. It's baked into the binary to avoid scanning +// GOPATH in the common case. +package main + +import ( + "bufio" + "bytes" + "fmt" + "go/format" + "io" + "io/ioutil" + "log" + "os" + "path" + "path/filepath" + "regexp" + "runtime" + "sort" + "strings" +) + +func mustOpen(name string) io.Reader { + f, err := os.Open(name) + if err != nil { + log.Fatal(err) + } + return f +} + +func api(base string) string { + return filepath.Join(runtime.GOROOT(), "api", base) +} + +var sym = regexp.MustCompile(`^pkg (\S+).*?, (?:var|func|type|const) ([A-Z]\w*)`) + +func main() { + var buf bytes.Buffer + outf := func(format string, args ...interface{}) { + fmt.Fprintf(&buf, format, args...) + } + outf("// Code generated by mkstdlib.go. DO NOT EDIT.\n\n") + outf("package imports\n") + outf("var stdlib = map[string]string{\n") + f := io.MultiReader( + mustOpen(api("go1.txt")), + mustOpen(api("go1.1.txt")), + mustOpen(api("go1.2.txt")), + mustOpen(api("go1.3.txt")), + mustOpen(api("go1.4.txt")), + mustOpen(api("go1.5.txt")), + mustOpen(api("go1.6.txt")), + mustOpen(api("go1.7.txt")), + mustOpen(api("go1.8.txt")), + mustOpen(api("go1.9.txt")), + mustOpen(api("go1.10.txt")), + ) + sc := bufio.NewScanner(f) + fullImport := map[string]string{} // "zip.NewReader" => "archive/zip" + ambiguous := map[string]bool{} + var keys []string + for sc.Scan() { + l := sc.Text() + has := func(v string) bool { return strings.Contains(l, v) } + if has("struct, ") || has("interface, ") || has(", method (") { + continue + } + if m := sym.FindStringSubmatch(l); m != nil { + full := m[1] + key := path.Base(full) + "." + m[2] + if exist, ok := fullImport[key]; ok { + if exist != full { + ambiguous[key] = true + } + } else { + fullImport[key] = full + keys = append(keys, key) + } + } + } + if err := sc.Err(); err != nil { + log.Fatal(err) + } + sort.Strings(keys) + for _, key := range keys { + if ambiguous[key] { + outf("\t// %q is ambiguous\n", key) + } else { + outf("\t%q: %q,\n", key, fullImport[key]) + } + } + outf("\n") + for _, sym := range [...]string{"Alignof", "ArbitraryType", "Offsetof", "Pointer", "Sizeof"} { + outf("\t%q: %q,\n", "unsafe."+sym, "unsafe") + } + outf("}\n") + fmtbuf, err := format.Source(buf.Bytes()) + if err != nil { + log.Fatal(err) + } + err = ioutil.WriteFile("zstdlib.go", fmtbuf, 0666) + if err != nil { + log.Fatal(err) + } +} diff --git a/vendor/golang.org/x/tools/imports/sortimports.go b/vendor/golang.org/x/tools/imports/sortimports.go new file mode 100644 index 0000000..653afc5 --- /dev/null +++ b/vendor/golang.org/x/tools/imports/sortimports.go @@ -0,0 +1,212 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Hacked up copy of go/ast/import.go + +package imports + +import ( + "go/ast" + "go/token" + "sort" + "strconv" +) + +// sortImports sorts runs of consecutive import lines in import blocks in f. +// It also removes duplicate imports when it is possible to do so without data loss. +func sortImports(fset *token.FileSet, f *ast.File) { + for i, d := range f.Decls { + d, ok := d.(*ast.GenDecl) + if !ok || d.Tok != token.IMPORT { + // Not an import declaration, so we're done. + // Imports are always first. + break + } + + if len(d.Specs) == 0 { + // Empty import block, remove it. + f.Decls = append(f.Decls[:i], f.Decls[i+1:]...) + } + + if !d.Lparen.IsValid() { + // Not a block: sorted by default. + continue + } + + // Identify and sort runs of specs on successive lines. + i := 0 + specs := d.Specs[:0] + for j, s := range d.Specs { + if j > i && fset.Position(s.Pos()).Line > 1+fset.Position(d.Specs[j-1].End()).Line { + // j begins a new run. End this one. + specs = append(specs, sortSpecs(fset, f, d.Specs[i:j])...) + i = j + } + } + specs = append(specs, sortSpecs(fset, f, d.Specs[i:])...) + d.Specs = specs + + // Deduping can leave a blank line before the rparen; clean that up. + if len(d.Specs) > 0 { + lastSpec := d.Specs[len(d.Specs)-1] + lastLine := fset.Position(lastSpec.Pos()).Line + if rParenLine := fset.Position(d.Rparen).Line; rParenLine > lastLine+1 { + fset.File(d.Rparen).MergeLine(rParenLine - 1) + } + } + } +} + +func importPath(s ast.Spec) string { + t, err := strconv.Unquote(s.(*ast.ImportSpec).Path.Value) + if err == nil { + return t + } + return "" +} + +func importName(s ast.Spec) string { + n := s.(*ast.ImportSpec).Name + if n == nil { + return "" + } + return n.Name +} + +func importComment(s ast.Spec) string { + c := s.(*ast.ImportSpec).Comment + if c == nil { + return "" + } + return c.Text() +} + +// collapse indicates whether prev may be removed, leaving only next. +func collapse(prev, next ast.Spec) bool { + if importPath(next) != importPath(prev) || importName(next) != importName(prev) { + return false + } + return prev.(*ast.ImportSpec).Comment == nil +} + +type posSpan struct { + Start token.Pos + End token.Pos +} + +func sortSpecs(fset *token.FileSet, f *ast.File, specs []ast.Spec) []ast.Spec { + // Can't short-circuit here even if specs are already sorted, + // since they might yet need deduplication. + // A lone import, however, may be safely ignored. + if len(specs) <= 1 { + return specs + } + + // Record positions for specs. + pos := make([]posSpan, len(specs)) + for i, s := range specs { + pos[i] = posSpan{s.Pos(), s.End()} + } + + // Identify comments in this range. + // Any comment from pos[0].Start to the final line counts. + lastLine := fset.Position(pos[len(pos)-1].End).Line + cstart := len(f.Comments) + cend := len(f.Comments) + for i, g := range f.Comments { + if g.Pos() < pos[0].Start { + continue + } + if i < cstart { + cstart = i + } + if fset.Position(g.End()).Line > lastLine { + cend = i + break + } + } + comments := f.Comments[cstart:cend] + + // Assign each comment to the import spec preceding it. + importComment := map[*ast.ImportSpec][]*ast.CommentGroup{} + specIndex := 0 + for _, g := range comments { + for specIndex+1 < len(specs) && pos[specIndex+1].Start <= g.Pos() { + specIndex++ + } + s := specs[specIndex].(*ast.ImportSpec) + importComment[s] = append(importComment[s], g) + } + + // Sort the import specs by import path. + // Remove duplicates, when possible without data loss. + // Reassign the import paths to have the same position sequence. + // Reassign each comment to abut the end of its spec. + // Sort the comments by new position. + sort.Sort(byImportSpec(specs)) + + // Dedup. Thanks to our sorting, we can just consider + // adjacent pairs of imports. + deduped := specs[:0] + for i, s := range specs { + if i == len(specs)-1 || !collapse(s, specs[i+1]) { + deduped = append(deduped, s) + } else { + p := s.Pos() + fset.File(p).MergeLine(fset.Position(p).Line) + } + } + specs = deduped + + // Fix up comment positions + for i, s := range specs { + s := s.(*ast.ImportSpec) + if s.Name != nil { + s.Name.NamePos = pos[i].Start + } + s.Path.ValuePos = pos[i].Start + s.EndPos = pos[i].End + for _, g := range importComment[s] { + for _, c := range g.List { + c.Slash = pos[i].End + } + } + } + + sort.Sort(byCommentPos(comments)) + + return specs +} + +type byImportSpec []ast.Spec // slice of *ast.ImportSpec + +func (x byImportSpec) Len() int { return len(x) } +func (x byImportSpec) Swap(i, j int) { x[i], x[j] = x[j], x[i] } +func (x byImportSpec) Less(i, j int) bool { + ipath := importPath(x[i]) + jpath := importPath(x[j]) + + igroup := importGroup(ipath) + jgroup := importGroup(jpath) + if igroup != jgroup { + return igroup < jgroup + } + + if ipath != jpath { + return ipath < jpath + } + iname := importName(x[i]) + jname := importName(x[j]) + + if iname != jname { + return iname < jname + } + return importComment(x[i]) < importComment(x[j]) +} + +type byCommentPos []*ast.CommentGroup + +func (x byCommentPos) Len() int { return len(x) } +func (x byCommentPos) Swap(i, j int) { x[i], x[j] = x[j], x[i] } +func (x byCommentPos) Less(i, j int) bool { return x[i].Pos() < x[j].Pos() } diff --git a/vendor/golang.org/x/tools/imports/zstdlib.go b/vendor/golang.org/x/tools/imports/zstdlib.go new file mode 100644 index 0000000..c590a4b --- /dev/null +++ b/vendor/golang.org/x/tools/imports/zstdlib.go @@ -0,0 +1,9734 @@ +// Code generated by mkstdlib.go. DO NOT EDIT. + +package imports + +var stdlib = map[string]string{ + "adler32.Checksum": "hash/adler32", + "adler32.New": "hash/adler32", + "adler32.Size": "hash/adler32", + "aes.BlockSize": "crypto/aes", + "aes.KeySizeError": "crypto/aes", + "aes.NewCipher": "crypto/aes", + "ascii85.CorruptInputError": "encoding/ascii85", + "ascii85.Decode": "encoding/ascii85", + "ascii85.Encode": "encoding/ascii85", + "ascii85.MaxEncodedLen": "encoding/ascii85", + "ascii85.NewDecoder": "encoding/ascii85", + "ascii85.NewEncoder": "encoding/ascii85", + "asn1.BitString": "encoding/asn1", + "asn1.ClassApplication": "encoding/asn1", + "asn1.ClassContextSpecific": "encoding/asn1", + "asn1.ClassPrivate": "encoding/asn1", + "asn1.ClassUniversal": "encoding/asn1", + "asn1.Enumerated": "encoding/asn1", + "asn1.Flag": "encoding/asn1", + "asn1.Marshal": "encoding/asn1", + "asn1.MarshalWithParams": "encoding/asn1", + "asn1.NullBytes": "encoding/asn1", + "asn1.NullRawValue": "encoding/asn1", + "asn1.ObjectIdentifier": "encoding/asn1", + "asn1.RawContent": "encoding/asn1", + "asn1.RawValue": "encoding/asn1", + "asn1.StructuralError": "encoding/asn1", + "asn1.SyntaxError": "encoding/asn1", + "asn1.TagBitString": "encoding/asn1", + "asn1.TagBoolean": "encoding/asn1", + "asn1.TagEnum": "encoding/asn1", + "asn1.TagGeneralString": "encoding/asn1", + "asn1.TagGeneralizedTime": "encoding/asn1", + "asn1.TagIA5String": "encoding/asn1", + "asn1.TagInteger": "encoding/asn1", + "asn1.TagNull": "encoding/asn1", + "asn1.TagNumericString": "encoding/asn1", + "asn1.TagOID": "encoding/asn1", + "asn1.TagOctetString": "encoding/asn1", + "asn1.TagPrintableString": "encoding/asn1", + "asn1.TagSequence": "encoding/asn1", + "asn1.TagSet": "encoding/asn1", + "asn1.TagT61String": "encoding/asn1", + "asn1.TagUTCTime": "encoding/asn1", + "asn1.TagUTF8String": "encoding/asn1", + "asn1.Unmarshal": "encoding/asn1", + "asn1.UnmarshalWithParams": "encoding/asn1", + "ast.ArrayType": "go/ast", + "ast.AssignStmt": "go/ast", + "ast.Bad": "go/ast", + "ast.BadDecl": "go/ast", + "ast.BadExpr": "go/ast", + "ast.BadStmt": "go/ast", + "ast.BasicLit": "go/ast", + "ast.BinaryExpr": "go/ast", + "ast.BlockStmt": "go/ast", + "ast.BranchStmt": "go/ast", + "ast.CallExpr": "go/ast", + "ast.CaseClause": "go/ast", + "ast.ChanDir": "go/ast", + "ast.ChanType": "go/ast", + "ast.CommClause": "go/ast", + "ast.Comment": "go/ast", + "ast.CommentGroup": "go/ast", + "ast.CommentMap": "go/ast", + "ast.CompositeLit": "go/ast", + "ast.Con": "go/ast", + "ast.DeclStmt": "go/ast", + "ast.DeferStmt": "go/ast", + "ast.Ellipsis": "go/ast", + "ast.EmptyStmt": "go/ast", + "ast.ExprStmt": "go/ast", + "ast.Field": "go/ast", + "ast.FieldFilter": "go/ast", + "ast.FieldList": "go/ast", + "ast.File": "go/ast", + "ast.FileExports": "go/ast", + "ast.Filter": "go/ast", + "ast.FilterDecl": "go/ast", + "ast.FilterFile": "go/ast", + "ast.FilterFuncDuplicates": "go/ast", + "ast.FilterImportDuplicates": "go/ast", + "ast.FilterPackage": "go/ast", + "ast.FilterUnassociatedComments": "go/ast", + "ast.ForStmt": "go/ast", + "ast.Fprint": "go/ast", + "ast.Fun": "go/ast", + "ast.FuncDecl": "go/ast", + "ast.FuncLit": "go/ast", + "ast.FuncType": "go/ast", + "ast.GenDecl": "go/ast", + "ast.GoStmt": "go/ast", + "ast.Ident": "go/ast", + "ast.IfStmt": "go/ast", + "ast.ImportSpec": "go/ast", + "ast.Importer": "go/ast", + "ast.IncDecStmt": "go/ast", + "ast.IndexExpr": "go/ast", + "ast.Inspect": "go/ast", + "ast.InterfaceType": "go/ast", + "ast.IsExported": "go/ast", + "ast.KeyValueExpr": "go/ast", + "ast.LabeledStmt": "go/ast", + "ast.Lbl": "go/ast", + "ast.MapType": "go/ast", + "ast.MergeMode": "go/ast", + "ast.MergePackageFiles": "go/ast", + "ast.NewCommentMap": "go/ast", + "ast.NewIdent": "go/ast", + "ast.NewObj": "go/ast", + "ast.NewPackage": "go/ast", + "ast.NewScope": "go/ast", + "ast.Node": "go/ast", + "ast.NotNilFilter": "go/ast", + "ast.ObjKind": "go/ast", + "ast.Object": "go/ast", + "ast.Package": "go/ast", + "ast.PackageExports": "go/ast", + "ast.ParenExpr": "go/ast", + "ast.Pkg": "go/ast", + "ast.Print": "go/ast", + "ast.RECV": "go/ast", + "ast.RangeStmt": "go/ast", + "ast.ReturnStmt": "go/ast", + "ast.SEND": "go/ast", + "ast.Scope": "go/ast", + "ast.SelectStmt": "go/ast", + "ast.SelectorExpr": "go/ast", + "ast.SendStmt": "go/ast", + "ast.SliceExpr": "go/ast", + "ast.SortImports": "go/ast", + "ast.StarExpr": "go/ast", + "ast.StructType": "go/ast", + "ast.SwitchStmt": "go/ast", + "ast.Typ": "go/ast", + "ast.TypeAssertExpr": "go/ast", + "ast.TypeSpec": "go/ast", + "ast.TypeSwitchStmt": "go/ast", + "ast.UnaryExpr": "go/ast", + "ast.ValueSpec": "go/ast", + "ast.Var": "go/ast", + "ast.Visitor": "go/ast", + "ast.Walk": "go/ast", + "atomic.AddInt32": "sync/atomic", + "atomic.AddInt64": "sync/atomic", + "atomic.AddUint32": "sync/atomic", + "atomic.AddUint64": "sync/atomic", + "atomic.AddUintptr": "sync/atomic", + "atomic.CompareAndSwapInt32": "sync/atomic", + "atomic.CompareAndSwapInt64": "sync/atomic", + "atomic.CompareAndSwapPointer": "sync/atomic", + "atomic.CompareAndSwapUint32": "sync/atomic", + "atomic.CompareAndSwapUint64": "sync/atomic", + "atomic.CompareAndSwapUintptr": "sync/atomic", + "atomic.LoadInt32": "sync/atomic", + "atomic.LoadInt64": "sync/atomic", + "atomic.LoadPointer": "sync/atomic", + "atomic.LoadUint32": "sync/atomic", + "atomic.LoadUint64": "sync/atomic", + "atomic.LoadUintptr": "sync/atomic", + "atomic.StoreInt32": "sync/atomic", + "atomic.StoreInt64": "sync/atomic", + "atomic.StorePointer": "sync/atomic", + "atomic.StoreUint32": "sync/atomic", + "atomic.StoreUint64": "sync/atomic", + "atomic.StoreUintptr": "sync/atomic", + "atomic.SwapInt32": "sync/atomic", + "atomic.SwapInt64": "sync/atomic", + "atomic.SwapPointer": "sync/atomic", + "atomic.SwapUint32": "sync/atomic", + "atomic.SwapUint64": "sync/atomic", + "atomic.SwapUintptr": "sync/atomic", + "atomic.Value": "sync/atomic", + "base32.CorruptInputError": "encoding/base32", + "base32.Encoding": "encoding/base32", + "base32.HexEncoding": "encoding/base32", + "base32.NewDecoder": "encoding/base32", + "base32.NewEncoder": "encoding/base32", + "base32.NewEncoding": "encoding/base32", + "base32.NoPadding": "encoding/base32", + "base32.StdEncoding": "encoding/base32", + "base32.StdPadding": "encoding/base32", + "base64.CorruptInputError": "encoding/base64", + "base64.Encoding": "encoding/base64", + "base64.NewDecoder": "encoding/base64", + "base64.NewEncoder": "encoding/base64", + "base64.NewEncoding": "encoding/base64", + "base64.NoPadding": "encoding/base64", + "base64.RawStdEncoding": "encoding/base64", + "base64.RawURLEncoding": "encoding/base64", + "base64.StdEncoding": "encoding/base64", + "base64.StdPadding": "encoding/base64", + "base64.URLEncoding": "encoding/base64", + "big.Above": "math/big", + "big.Accuracy": "math/big", + "big.AwayFromZero": "math/big", + "big.Below": "math/big", + "big.ErrNaN": "math/big", + "big.Exact": "math/big", + "big.Float": "math/big", + "big.Int": "math/big", + "big.Jacobi": "math/big", + "big.MaxBase": "math/big", + "big.MaxExp": "math/big", + "big.MaxPrec": "math/big", + "big.MinExp": "math/big", + "big.NewFloat": "math/big", + "big.NewInt": "math/big", + "big.NewRat": "math/big", + "big.ParseFloat": "math/big", + "big.Rat": "math/big", + "big.RoundingMode": "math/big", + "big.ToNearestAway": "math/big", + "big.ToNearestEven": "math/big", + "big.ToNegativeInf": "math/big", + "big.ToPositiveInf": "math/big", + "big.ToZero": "math/big", + "big.Word": "math/big", + "binary.BigEndian": "encoding/binary", + "binary.ByteOrder": "encoding/binary", + "binary.LittleEndian": "encoding/binary", + "binary.MaxVarintLen16": "encoding/binary", + "binary.MaxVarintLen32": "encoding/binary", + "binary.MaxVarintLen64": "encoding/binary", + "binary.PutUvarint": "encoding/binary", + "binary.PutVarint": "encoding/binary", + "binary.Read": "encoding/binary", + "binary.ReadUvarint": "encoding/binary", + "binary.ReadVarint": "encoding/binary", + "binary.Size": "encoding/binary", + "binary.Uvarint": "encoding/binary", + "binary.Varint": "encoding/binary", + "binary.Write": "encoding/binary", + "bits.LeadingZeros": "math/bits", + "bits.LeadingZeros16": "math/bits", + "bits.LeadingZeros32": "math/bits", + "bits.LeadingZeros64": "math/bits", + "bits.LeadingZeros8": "math/bits", + "bits.Len": "math/bits", + "bits.Len16": "math/bits", + "bits.Len32": "math/bits", + "bits.Len64": "math/bits", + "bits.Len8": "math/bits", + "bits.OnesCount": "math/bits", + "bits.OnesCount16": "math/bits", + "bits.OnesCount32": "math/bits", + "bits.OnesCount64": "math/bits", + "bits.OnesCount8": "math/bits", + "bits.Reverse": "math/bits", + "bits.Reverse16": "math/bits", + "bits.Reverse32": "math/bits", + "bits.Reverse64": "math/bits", + "bits.Reverse8": "math/bits", + "bits.ReverseBytes": "math/bits", + "bits.ReverseBytes16": "math/bits", + "bits.ReverseBytes32": "math/bits", + "bits.ReverseBytes64": "math/bits", + "bits.RotateLeft": "math/bits", + "bits.RotateLeft16": "math/bits", + "bits.RotateLeft32": "math/bits", + "bits.RotateLeft64": "math/bits", + "bits.RotateLeft8": "math/bits", + "bits.TrailingZeros": "math/bits", + "bits.TrailingZeros16": "math/bits", + "bits.TrailingZeros32": "math/bits", + "bits.TrailingZeros64": "math/bits", + "bits.TrailingZeros8": "math/bits", + "bits.UintSize": "math/bits", + "bufio.ErrAdvanceTooFar": "bufio", + "bufio.ErrBufferFull": "bufio", + "bufio.ErrFinalToken": "bufio", + "bufio.ErrInvalidUnreadByte": "bufio", + "bufio.ErrInvalidUnreadRune": "bufio", + "bufio.ErrNegativeAdvance": "bufio", + "bufio.ErrNegativeCount": "bufio", + "bufio.ErrTooLong": "bufio", + "bufio.MaxScanTokenSize": "bufio", + "bufio.NewReadWriter": "bufio", + "bufio.NewReader": "bufio", + "bufio.NewReaderSize": "bufio", + "bufio.NewScanner": "bufio", + "bufio.NewWriter": "bufio", + "bufio.NewWriterSize": "bufio", + "bufio.ReadWriter": "bufio", + "bufio.Reader": "bufio", + "bufio.ScanBytes": "bufio", + "bufio.ScanLines": "bufio", + "bufio.ScanRunes": "bufio", + "bufio.ScanWords": "bufio", + "bufio.Scanner": "bufio", + "bufio.SplitFunc": "bufio", + "bufio.Writer": "bufio", + "build.AllowBinary": "go/build", + "build.ArchChar": "go/build", + "build.Context": "go/build", + "build.Default": "go/build", + "build.FindOnly": "go/build", + "build.IgnoreVendor": "go/build", + "build.Import": "go/build", + "build.ImportComment": "go/build", + "build.ImportDir": "go/build", + "build.ImportMode": "go/build", + "build.IsLocalImport": "go/build", + "build.MultiplePackageError": "go/build", + "build.NoGoError": "go/build", + "build.Package": "go/build", + "build.ToolDir": "go/build", + "bytes.Buffer": "bytes", + "bytes.Compare": "bytes", + "bytes.Contains": "bytes", + "bytes.ContainsAny": "bytes", + "bytes.ContainsRune": "bytes", + "bytes.Count": "bytes", + "bytes.Equal": "bytes", + "bytes.EqualFold": "bytes", + "bytes.ErrTooLarge": "bytes", + "bytes.Fields": "bytes", + "bytes.FieldsFunc": "bytes", + "bytes.HasPrefix": "bytes", + "bytes.HasSuffix": "bytes", + "bytes.Index": "bytes", + "bytes.IndexAny": "bytes", + "bytes.IndexByte": "bytes", + "bytes.IndexFunc": "bytes", + "bytes.IndexRune": "bytes", + "bytes.Join": "bytes", + "bytes.LastIndex": "bytes", + "bytes.LastIndexAny": "bytes", + "bytes.LastIndexByte": "bytes", + "bytes.LastIndexFunc": "bytes", + "bytes.Map": "bytes", + "bytes.MinRead": "bytes", + "bytes.NewBuffer": "bytes", + "bytes.NewBufferString": "bytes", + "bytes.NewReader": "bytes", + "bytes.Reader": "bytes", + "bytes.Repeat": "bytes", + "bytes.Replace": "bytes", + "bytes.Runes": "bytes", + "bytes.Split": "bytes", + "bytes.SplitAfter": "bytes", + "bytes.SplitAfterN": "bytes", + "bytes.SplitN": "bytes", + "bytes.Title": "bytes", + "bytes.ToLower": "bytes", + "bytes.ToLowerSpecial": "bytes", + "bytes.ToTitle": "bytes", + "bytes.ToTitleSpecial": "bytes", + "bytes.ToUpper": "bytes", + "bytes.ToUpperSpecial": "bytes", + "bytes.Trim": "bytes", + "bytes.TrimFunc": "bytes", + "bytes.TrimLeft": "bytes", + "bytes.TrimLeftFunc": "bytes", + "bytes.TrimPrefix": "bytes", + "bytes.TrimRight": "bytes", + "bytes.TrimRightFunc": "bytes", + "bytes.TrimSpace": "bytes", + "bytes.TrimSuffix": "bytes", + "bzip2.NewReader": "compress/bzip2", + "bzip2.StructuralError": "compress/bzip2", + "cgi.Handler": "net/http/cgi", + "cgi.Request": "net/http/cgi", + "cgi.RequestFromMap": "net/http/cgi", + "cgi.Serve": "net/http/cgi", + "cipher.AEAD": "crypto/cipher", + "cipher.Block": "crypto/cipher", + "cipher.BlockMode": "crypto/cipher", + "cipher.NewCBCDecrypter": "crypto/cipher", + "cipher.NewCBCEncrypter": "crypto/cipher", + "cipher.NewCFBDecrypter": "crypto/cipher", + "cipher.NewCFBEncrypter": "crypto/cipher", + "cipher.NewCTR": "crypto/cipher", + "cipher.NewGCM": "crypto/cipher", + "cipher.NewGCMWithNonceSize": "crypto/cipher", + "cipher.NewOFB": "crypto/cipher", + "cipher.Stream": "crypto/cipher", + "cipher.StreamReader": "crypto/cipher", + "cipher.StreamWriter": "crypto/cipher", + "cmplx.Abs": "math/cmplx", + "cmplx.Acos": "math/cmplx", + "cmplx.Acosh": "math/cmplx", + "cmplx.Asin": "math/cmplx", + "cmplx.Asinh": "math/cmplx", + "cmplx.Atan": "math/cmplx", + "cmplx.Atanh": "math/cmplx", + "cmplx.Conj": "math/cmplx", + "cmplx.Cos": "math/cmplx", + "cmplx.Cosh": "math/cmplx", + "cmplx.Cot": "math/cmplx", + "cmplx.Exp": "math/cmplx", + "cmplx.Inf": "math/cmplx", + "cmplx.IsInf": "math/cmplx", + "cmplx.IsNaN": "math/cmplx", + "cmplx.Log": "math/cmplx", + "cmplx.Log10": "math/cmplx", + "cmplx.NaN": "math/cmplx", + "cmplx.Phase": "math/cmplx", + "cmplx.Polar": "math/cmplx", + "cmplx.Pow": "math/cmplx", + "cmplx.Rect": "math/cmplx", + "cmplx.Sin": "math/cmplx", + "cmplx.Sinh": "math/cmplx", + "cmplx.Sqrt": "math/cmplx", + "cmplx.Tan": "math/cmplx", + "cmplx.Tanh": "math/cmplx", + "color.Alpha": "image/color", + "color.Alpha16": "image/color", + "color.Alpha16Model": "image/color", + "color.AlphaModel": "image/color", + "color.Black": "image/color", + "color.CMYK": "image/color", + "color.CMYKModel": "image/color", + "color.CMYKToRGB": "image/color", + "color.Color": "image/color", + "color.Gray": "image/color", + "color.Gray16": "image/color", + "color.Gray16Model": "image/color", + "color.GrayModel": "image/color", + "color.Model": "image/color", + "color.ModelFunc": "image/color", + "color.NRGBA": "image/color", + "color.NRGBA64": "image/color", + "color.NRGBA64Model": "image/color", + "color.NRGBAModel": "image/color", + "color.NYCbCrA": "image/color", + "color.NYCbCrAModel": "image/color", + "color.Opaque": "image/color", + "color.Palette": "image/color", + "color.RGBA": "image/color", + "color.RGBA64": "image/color", + "color.RGBA64Model": "image/color", + "color.RGBAModel": "image/color", + "color.RGBToCMYK": "image/color", + "color.RGBToYCbCr": "image/color", + "color.Transparent": "image/color", + "color.White": "image/color", + "color.YCbCr": "image/color", + "color.YCbCrModel": "image/color", + "color.YCbCrToRGB": "image/color", + "constant.BinaryOp": "go/constant", + "constant.BitLen": "go/constant", + "constant.Bool": "go/constant", + "constant.BoolVal": "go/constant", + "constant.Bytes": "go/constant", + "constant.Compare": "go/constant", + "constant.Complex": "go/constant", + "constant.Denom": "go/constant", + "constant.Float": "go/constant", + "constant.Float32Val": "go/constant", + "constant.Float64Val": "go/constant", + "constant.Imag": "go/constant", + "constant.Int": "go/constant", + "constant.Int64Val": "go/constant", + "constant.Kind": "go/constant", + "constant.MakeBool": "go/constant", + "constant.MakeFloat64": "go/constant", + "constant.MakeFromBytes": "go/constant", + "constant.MakeFromLiteral": "go/constant", + "constant.MakeImag": "go/constant", + "constant.MakeInt64": "go/constant", + "constant.MakeString": "go/constant", + "constant.MakeUint64": "go/constant", + "constant.MakeUnknown": "go/constant", + "constant.Num": "go/constant", + "constant.Real": "go/constant", + "constant.Shift": "go/constant", + "constant.Sign": "go/constant", + "constant.String": "go/constant", + "constant.StringVal": "go/constant", + "constant.ToComplex": "go/constant", + "constant.ToFloat": "go/constant", + "constant.ToInt": "go/constant", + "constant.Uint64Val": "go/constant", + "constant.UnaryOp": "go/constant", + "constant.Unknown": "go/constant", + "context.Background": "context", + "context.CancelFunc": "context", + "context.Canceled": "context", + "context.Context": "context", + "context.DeadlineExceeded": "context", + "context.TODO": "context", + "context.WithCancel": "context", + "context.WithDeadline": "context", + "context.WithTimeout": "context", + "context.WithValue": "context", + "cookiejar.Jar": "net/http/cookiejar", + "cookiejar.New": "net/http/cookiejar", + "cookiejar.Options": "net/http/cookiejar", + "cookiejar.PublicSuffixList": "net/http/cookiejar", + "crc32.Castagnoli": "hash/crc32", + "crc32.Checksum": "hash/crc32", + "crc32.ChecksumIEEE": "hash/crc32", + "crc32.IEEE": "hash/crc32", + "crc32.IEEETable": "hash/crc32", + "crc32.Koopman": "hash/crc32", + "crc32.MakeTable": "hash/crc32", + "crc32.New": "hash/crc32", + "crc32.NewIEEE": "hash/crc32", + "crc32.Size": "hash/crc32", + "crc32.Table": "hash/crc32", + "crc32.Update": "hash/crc32", + "crc64.Checksum": "hash/crc64", + "crc64.ECMA": "hash/crc64", + "crc64.ISO": "hash/crc64", + "crc64.MakeTable": "hash/crc64", + "crc64.New": "hash/crc64", + "crc64.Size": "hash/crc64", + "crc64.Table": "hash/crc64", + "crc64.Update": "hash/crc64", + "crypto.BLAKE2b_256": "crypto", + "crypto.BLAKE2b_384": "crypto", + "crypto.BLAKE2b_512": "crypto", + "crypto.BLAKE2s_256": "crypto", + "crypto.Decrypter": "crypto", + "crypto.DecrypterOpts": "crypto", + "crypto.Hash": "crypto", + "crypto.MD4": "crypto", + "crypto.MD5": "crypto", + "crypto.MD5SHA1": "crypto", + "crypto.PrivateKey": "crypto", + "crypto.PublicKey": "crypto", + "crypto.RIPEMD160": "crypto", + "crypto.RegisterHash": "crypto", + "crypto.SHA1": "crypto", + "crypto.SHA224": "crypto", + "crypto.SHA256": "crypto", + "crypto.SHA384": "crypto", + "crypto.SHA3_224": "crypto", + "crypto.SHA3_256": "crypto", + "crypto.SHA3_384": "crypto", + "crypto.SHA3_512": "crypto", + "crypto.SHA512": "crypto", + "crypto.SHA512_224": "crypto", + "crypto.SHA512_256": "crypto", + "crypto.Signer": "crypto", + "crypto.SignerOpts": "crypto", + "csv.ErrBareQuote": "encoding/csv", + "csv.ErrFieldCount": "encoding/csv", + "csv.ErrQuote": "encoding/csv", + "csv.ErrTrailingComma": "encoding/csv", + "csv.NewReader": "encoding/csv", + "csv.NewWriter": "encoding/csv", + "csv.ParseError": "encoding/csv", + "csv.Reader": "encoding/csv", + "csv.Writer": "encoding/csv", + "debug.FreeOSMemory": "runtime/debug", + "debug.GCStats": "runtime/debug", + "debug.PrintStack": "runtime/debug", + "debug.ReadGCStats": "runtime/debug", + "debug.SetGCPercent": "runtime/debug", + "debug.SetMaxStack": "runtime/debug", + "debug.SetMaxThreads": "runtime/debug", + "debug.SetPanicOnFault": "runtime/debug", + "debug.SetTraceback": "runtime/debug", + "debug.Stack": "runtime/debug", + "debug.WriteHeapDump": "runtime/debug", + "des.BlockSize": "crypto/des", + "des.KeySizeError": "crypto/des", + "des.NewCipher": "crypto/des", + "des.NewTripleDESCipher": "crypto/des", + "doc.AllDecls": "go/doc", + "doc.AllMethods": "go/doc", + "doc.Example": "go/doc", + "doc.Examples": "go/doc", + "doc.Filter": "go/doc", + "doc.Func": "go/doc", + "doc.IllegalPrefixes": "go/doc", + "doc.IsPredeclared": "go/doc", + "doc.Mode": "go/doc", + "doc.New": "go/doc", + "doc.Note": "go/doc", + "doc.Package": "go/doc", + "doc.Synopsis": "go/doc", + "doc.ToHTML": "go/doc", + "doc.ToText": "go/doc", + "doc.Type": "go/doc", + "doc.Value": "go/doc", + "draw.Draw": "image/draw", + "draw.DrawMask": "image/draw", + "draw.Drawer": "image/draw", + "draw.FloydSteinberg": "image/draw", + "draw.Image": "image/draw", + "draw.Op": "image/draw", + "draw.Over": "image/draw", + "draw.Quantizer": "image/draw", + "draw.Src": "image/draw", + "driver.Bool": "database/sql/driver", + "driver.ColumnConverter": "database/sql/driver", + "driver.Conn": "database/sql/driver", + "driver.ConnBeginTx": "database/sql/driver", + "driver.ConnPrepareContext": "database/sql/driver", + "driver.Connector": "database/sql/driver", + "driver.DefaultParameterConverter": "database/sql/driver", + "driver.Driver": "database/sql/driver", + "driver.DriverContext": "database/sql/driver", + "driver.ErrBadConn": "database/sql/driver", + "driver.ErrRemoveArgument": "database/sql/driver", + "driver.ErrSkip": "database/sql/driver", + "driver.Execer": "database/sql/driver", + "driver.ExecerContext": "database/sql/driver", + "driver.Int32": "database/sql/driver", + "driver.IsScanValue": "database/sql/driver", + "driver.IsValue": "database/sql/driver", + "driver.IsolationLevel": "database/sql/driver", + "driver.NamedValue": "database/sql/driver", + "driver.NamedValueChecker": "database/sql/driver", + "driver.NotNull": "database/sql/driver", + "driver.Null": "database/sql/driver", + "driver.Pinger": "database/sql/driver", + "driver.Queryer": "database/sql/driver", + "driver.QueryerContext": "database/sql/driver", + "driver.Result": "database/sql/driver", + "driver.ResultNoRows": "database/sql/driver", + "driver.Rows": "database/sql/driver", + "driver.RowsAffected": "database/sql/driver", + "driver.RowsColumnTypeDatabaseTypeName": "database/sql/driver", + "driver.RowsColumnTypeLength": "database/sql/driver", + "driver.RowsColumnTypeNullable": "database/sql/driver", + "driver.RowsColumnTypePrecisionScale": "database/sql/driver", + "driver.RowsColumnTypeScanType": "database/sql/driver", + "driver.RowsNextResultSet": "database/sql/driver", + "driver.SessionResetter": "database/sql/driver", + "driver.Stmt": "database/sql/driver", + "driver.StmtExecContext": "database/sql/driver", + "driver.StmtQueryContext": "database/sql/driver", + "driver.String": "database/sql/driver", + "driver.Tx": "database/sql/driver", + "driver.TxOptions": "database/sql/driver", + "driver.Value": "database/sql/driver", + "driver.ValueConverter": "database/sql/driver", + "driver.Valuer": "database/sql/driver", + "dsa.ErrInvalidPublicKey": "crypto/dsa", + "dsa.GenerateKey": "crypto/dsa", + "dsa.GenerateParameters": "crypto/dsa", + "dsa.L1024N160": "crypto/dsa", + "dsa.L2048N224": "crypto/dsa", + "dsa.L2048N256": "crypto/dsa", + "dsa.L3072N256": "crypto/dsa", + "dsa.ParameterSizes": "crypto/dsa", + "dsa.Parameters": "crypto/dsa", + "dsa.PrivateKey": "crypto/dsa", + "dsa.PublicKey": "crypto/dsa", + "dsa.Sign": "crypto/dsa", + "dsa.Verify": "crypto/dsa", + "dwarf.AddrType": "debug/dwarf", + "dwarf.ArrayType": "debug/dwarf", + "dwarf.Attr": "debug/dwarf", + "dwarf.AttrAbstractOrigin": "debug/dwarf", + "dwarf.AttrAccessibility": "debug/dwarf", + "dwarf.AttrAddrClass": "debug/dwarf", + "dwarf.AttrAllocated": "debug/dwarf", + "dwarf.AttrArtificial": "debug/dwarf", + "dwarf.AttrAssociated": "debug/dwarf", + "dwarf.AttrBaseTypes": "debug/dwarf", + "dwarf.AttrBitOffset": "debug/dwarf", + "dwarf.AttrBitSize": "debug/dwarf", + "dwarf.AttrByteSize": "debug/dwarf", + "dwarf.AttrCallColumn": "debug/dwarf", + "dwarf.AttrCallFile": "debug/dwarf", + "dwarf.AttrCallLine": "debug/dwarf", + "dwarf.AttrCalling": "debug/dwarf", + "dwarf.AttrCommonRef": "debug/dwarf", + "dwarf.AttrCompDir": "debug/dwarf", + "dwarf.AttrConstValue": "debug/dwarf", + "dwarf.AttrContainingType": "debug/dwarf", + "dwarf.AttrCount": "debug/dwarf", + "dwarf.AttrDataLocation": "debug/dwarf", + "dwarf.AttrDataMemberLoc": "debug/dwarf", + "dwarf.AttrDeclColumn": "debug/dwarf", + "dwarf.AttrDeclFile": "debug/dwarf", + "dwarf.AttrDeclLine": "debug/dwarf", + "dwarf.AttrDeclaration": "debug/dwarf", + "dwarf.AttrDefaultValue": "debug/dwarf", + "dwarf.AttrDescription": "debug/dwarf", + "dwarf.AttrDiscr": "debug/dwarf", + "dwarf.AttrDiscrList": "debug/dwarf", + "dwarf.AttrDiscrValue": "debug/dwarf", + "dwarf.AttrEncoding": "debug/dwarf", + "dwarf.AttrEntrypc": "debug/dwarf", + "dwarf.AttrExtension": "debug/dwarf", + "dwarf.AttrExternal": "debug/dwarf", + "dwarf.AttrFrameBase": "debug/dwarf", + "dwarf.AttrFriend": "debug/dwarf", + "dwarf.AttrHighpc": "debug/dwarf", + "dwarf.AttrIdentifierCase": "debug/dwarf", + "dwarf.AttrImport": "debug/dwarf", + "dwarf.AttrInline": "debug/dwarf", + "dwarf.AttrIsOptional": "debug/dwarf", + "dwarf.AttrLanguage": "debug/dwarf", + "dwarf.AttrLocation": "debug/dwarf", + "dwarf.AttrLowerBound": "debug/dwarf", + "dwarf.AttrLowpc": "debug/dwarf", + "dwarf.AttrMacroInfo": "debug/dwarf", + "dwarf.AttrName": "debug/dwarf", + "dwarf.AttrNamelistItem": "debug/dwarf", + "dwarf.AttrOrdering": "debug/dwarf", + "dwarf.AttrPriority": "debug/dwarf", + "dwarf.AttrProducer": "debug/dwarf", + "dwarf.AttrPrototyped": "debug/dwarf", + "dwarf.AttrRanges": "debug/dwarf", + "dwarf.AttrReturnAddr": "debug/dwarf", + "dwarf.AttrSegment": "debug/dwarf", + "dwarf.AttrSibling": "debug/dwarf", + "dwarf.AttrSpecification": "debug/dwarf", + "dwarf.AttrStartScope": "debug/dwarf", + "dwarf.AttrStaticLink": "debug/dwarf", + "dwarf.AttrStmtList": "debug/dwarf", + "dwarf.AttrStride": "debug/dwarf", + "dwarf.AttrStrideSize": "debug/dwarf", + "dwarf.AttrStringLength": "debug/dwarf", + "dwarf.AttrTrampoline": "debug/dwarf", + "dwarf.AttrType": "debug/dwarf", + "dwarf.AttrUpperBound": "debug/dwarf", + "dwarf.AttrUseLocation": "debug/dwarf", + "dwarf.AttrUseUTF8": "debug/dwarf", + "dwarf.AttrVarParam": "debug/dwarf", + "dwarf.AttrVirtuality": "debug/dwarf", + "dwarf.AttrVisibility": "debug/dwarf", + "dwarf.AttrVtableElemLoc": "debug/dwarf", + "dwarf.BasicType": "debug/dwarf", + "dwarf.BoolType": "debug/dwarf", + "dwarf.CharType": "debug/dwarf", + "dwarf.Class": "debug/dwarf", + "dwarf.ClassAddress": "debug/dwarf", + "dwarf.ClassBlock": "debug/dwarf", + "dwarf.ClassConstant": "debug/dwarf", + "dwarf.ClassExprLoc": "debug/dwarf", + "dwarf.ClassFlag": "debug/dwarf", + "dwarf.ClassLinePtr": "debug/dwarf", + "dwarf.ClassLocListPtr": "debug/dwarf", + "dwarf.ClassMacPtr": "debug/dwarf", + "dwarf.ClassRangeListPtr": "debug/dwarf", + "dwarf.ClassReference": "debug/dwarf", + "dwarf.ClassReferenceAlt": "debug/dwarf", + "dwarf.ClassReferenceSig": "debug/dwarf", + "dwarf.ClassString": "debug/dwarf", + "dwarf.ClassStringAlt": "debug/dwarf", + "dwarf.ClassUnknown": "debug/dwarf", + "dwarf.CommonType": "debug/dwarf", + "dwarf.ComplexType": "debug/dwarf", + "dwarf.Data": "debug/dwarf", + "dwarf.DecodeError": "debug/dwarf", + "dwarf.DotDotDotType": "debug/dwarf", + "dwarf.Entry": "debug/dwarf", + "dwarf.EnumType": "debug/dwarf", + "dwarf.EnumValue": "debug/dwarf", + "dwarf.ErrUnknownPC": "debug/dwarf", + "dwarf.Field": "debug/dwarf", + "dwarf.FloatType": "debug/dwarf", + "dwarf.FuncType": "debug/dwarf", + "dwarf.IntType": "debug/dwarf", + "dwarf.LineEntry": "debug/dwarf", + "dwarf.LineFile": "debug/dwarf", + "dwarf.LineReader": "debug/dwarf", + "dwarf.LineReaderPos": "debug/dwarf", + "dwarf.New": "debug/dwarf", + "dwarf.Offset": "debug/dwarf", + "dwarf.PtrType": "debug/dwarf", + "dwarf.QualType": "debug/dwarf", + "dwarf.Reader": "debug/dwarf", + "dwarf.StructField": "debug/dwarf", + "dwarf.StructType": "debug/dwarf", + "dwarf.Tag": "debug/dwarf", + "dwarf.TagAccessDeclaration": "debug/dwarf", + "dwarf.TagArrayType": "debug/dwarf", + "dwarf.TagBaseType": "debug/dwarf", + "dwarf.TagCatchDwarfBlock": "debug/dwarf", + "dwarf.TagClassType": "debug/dwarf", + "dwarf.TagCommonDwarfBlock": "debug/dwarf", + "dwarf.TagCommonInclusion": "debug/dwarf", + "dwarf.TagCompileUnit": "debug/dwarf", + "dwarf.TagCondition": "debug/dwarf", + "dwarf.TagConstType": "debug/dwarf", + "dwarf.TagConstant": "debug/dwarf", + "dwarf.TagDwarfProcedure": "debug/dwarf", + "dwarf.TagEntryPoint": "debug/dwarf", + "dwarf.TagEnumerationType": "debug/dwarf", + "dwarf.TagEnumerator": "debug/dwarf", + "dwarf.TagFileType": "debug/dwarf", + "dwarf.TagFormalParameter": "debug/dwarf", + "dwarf.TagFriend": "debug/dwarf", + "dwarf.TagImportedDeclaration": "debug/dwarf", + "dwarf.TagImportedModule": "debug/dwarf", + "dwarf.TagImportedUnit": "debug/dwarf", + "dwarf.TagInheritance": "debug/dwarf", + "dwarf.TagInlinedSubroutine": "debug/dwarf", + "dwarf.TagInterfaceType": "debug/dwarf", + "dwarf.TagLabel": "debug/dwarf", + "dwarf.TagLexDwarfBlock": "debug/dwarf", + "dwarf.TagMember": "debug/dwarf", + "dwarf.TagModule": "debug/dwarf", + "dwarf.TagMutableType": "debug/dwarf", + "dwarf.TagNamelist": "debug/dwarf", + "dwarf.TagNamelistItem": "debug/dwarf", + "dwarf.TagNamespace": "debug/dwarf", + "dwarf.TagPackedType": "debug/dwarf", + "dwarf.TagPartialUnit": "debug/dwarf", + "dwarf.TagPointerType": "debug/dwarf", + "dwarf.TagPtrToMemberType": "debug/dwarf", + "dwarf.TagReferenceType": "debug/dwarf", + "dwarf.TagRestrictType": "debug/dwarf", + "dwarf.TagRvalueReferenceType": "debug/dwarf", + "dwarf.TagSetType": "debug/dwarf", + "dwarf.TagSharedType": "debug/dwarf", + "dwarf.TagStringType": "debug/dwarf", + "dwarf.TagStructType": "debug/dwarf", + "dwarf.TagSubprogram": "debug/dwarf", + "dwarf.TagSubrangeType": "debug/dwarf", + "dwarf.TagSubroutineType": "debug/dwarf", + "dwarf.TagTemplateAlias": "debug/dwarf", + "dwarf.TagTemplateTypeParameter": "debug/dwarf", + "dwarf.TagTemplateValueParameter": "debug/dwarf", + "dwarf.TagThrownType": "debug/dwarf", + "dwarf.TagTryDwarfBlock": "debug/dwarf", + "dwarf.TagTypeUnit": "debug/dwarf", + "dwarf.TagTypedef": "debug/dwarf", + "dwarf.TagUnionType": "debug/dwarf", + "dwarf.TagUnspecifiedParameters": "debug/dwarf", + "dwarf.TagUnspecifiedType": "debug/dwarf", + "dwarf.TagVariable": "debug/dwarf", + "dwarf.TagVariant": "debug/dwarf", + "dwarf.TagVariantPart": "debug/dwarf", + "dwarf.TagVolatileType": "debug/dwarf", + "dwarf.TagWithStmt": "debug/dwarf", + "dwarf.Type": "debug/dwarf", + "dwarf.TypedefType": "debug/dwarf", + "dwarf.UcharType": "debug/dwarf", + "dwarf.UintType": "debug/dwarf", + "dwarf.UnspecifiedType": "debug/dwarf", + "dwarf.VoidType": "debug/dwarf", + "ecdsa.GenerateKey": "crypto/ecdsa", + "ecdsa.PrivateKey": "crypto/ecdsa", + "ecdsa.PublicKey": "crypto/ecdsa", + "ecdsa.Sign": "crypto/ecdsa", + "ecdsa.Verify": "crypto/ecdsa", + "elf.ARM_MAGIC_TRAMP_NUMBER": "debug/elf", + "elf.COMPRESS_HIOS": "debug/elf", + "elf.COMPRESS_HIPROC": "debug/elf", + "elf.COMPRESS_LOOS": "debug/elf", + "elf.COMPRESS_LOPROC": "debug/elf", + "elf.COMPRESS_ZLIB": "debug/elf", + "elf.Chdr32": "debug/elf", + "elf.Chdr64": "debug/elf", + "elf.Class": "debug/elf", + "elf.CompressionType": "debug/elf", + "elf.DF_BIND_NOW": "debug/elf", + "elf.DF_ORIGIN": "debug/elf", + "elf.DF_STATIC_TLS": "debug/elf", + "elf.DF_SYMBOLIC": "debug/elf", + "elf.DF_TEXTREL": "debug/elf", + "elf.DT_BIND_NOW": "debug/elf", + "elf.DT_DEBUG": "debug/elf", + "elf.DT_ENCODING": "debug/elf", + "elf.DT_FINI": "debug/elf", + "elf.DT_FINI_ARRAY": "debug/elf", + "elf.DT_FINI_ARRAYSZ": "debug/elf", + "elf.DT_FLAGS": "debug/elf", + "elf.DT_HASH": "debug/elf", + "elf.DT_HIOS": "debug/elf", + "elf.DT_HIPROC": "debug/elf", + "elf.DT_INIT": "debug/elf", + "elf.DT_INIT_ARRAY": "debug/elf", + "elf.DT_INIT_ARRAYSZ": "debug/elf", + "elf.DT_JMPREL": "debug/elf", + "elf.DT_LOOS": "debug/elf", + "elf.DT_LOPROC": "debug/elf", + "elf.DT_NEEDED": "debug/elf", + "elf.DT_NULL": "debug/elf", + "elf.DT_PLTGOT": "debug/elf", + "elf.DT_PLTREL": "debug/elf", + "elf.DT_PLTRELSZ": "debug/elf", + "elf.DT_PREINIT_ARRAY": "debug/elf", + "elf.DT_PREINIT_ARRAYSZ": "debug/elf", + "elf.DT_REL": "debug/elf", + "elf.DT_RELA": "debug/elf", + "elf.DT_RELAENT": "debug/elf", + "elf.DT_RELASZ": "debug/elf", + "elf.DT_RELENT": "debug/elf", + "elf.DT_RELSZ": "debug/elf", + "elf.DT_RPATH": "debug/elf", + "elf.DT_RUNPATH": "debug/elf", + "elf.DT_SONAME": "debug/elf", + "elf.DT_STRSZ": "debug/elf", + "elf.DT_STRTAB": "debug/elf", + "elf.DT_SYMBOLIC": "debug/elf", + "elf.DT_SYMENT": "debug/elf", + "elf.DT_SYMTAB": "debug/elf", + "elf.DT_TEXTREL": "debug/elf", + "elf.DT_VERNEED": "debug/elf", + "elf.DT_VERNEEDNUM": "debug/elf", + "elf.DT_VERSYM": "debug/elf", + "elf.Data": "debug/elf", + "elf.Dyn32": "debug/elf", + "elf.Dyn64": "debug/elf", + "elf.DynFlag": "debug/elf", + "elf.DynTag": "debug/elf", + "elf.EI_ABIVERSION": "debug/elf", + "elf.EI_CLASS": "debug/elf", + "elf.EI_DATA": "debug/elf", + "elf.EI_NIDENT": "debug/elf", + "elf.EI_OSABI": "debug/elf", + "elf.EI_PAD": "debug/elf", + "elf.EI_VERSION": "debug/elf", + "elf.ELFCLASS32": "debug/elf", + "elf.ELFCLASS64": "debug/elf", + "elf.ELFCLASSNONE": "debug/elf", + "elf.ELFDATA2LSB": "debug/elf", + "elf.ELFDATA2MSB": "debug/elf", + "elf.ELFDATANONE": "debug/elf", + "elf.ELFMAG": "debug/elf", + "elf.ELFOSABI_86OPEN": "debug/elf", + "elf.ELFOSABI_AIX": "debug/elf", + "elf.ELFOSABI_ARM": "debug/elf", + "elf.ELFOSABI_FREEBSD": "debug/elf", + "elf.ELFOSABI_HPUX": "debug/elf", + "elf.ELFOSABI_HURD": "debug/elf", + "elf.ELFOSABI_IRIX": "debug/elf", + "elf.ELFOSABI_LINUX": "debug/elf", + "elf.ELFOSABI_MODESTO": "debug/elf", + "elf.ELFOSABI_NETBSD": "debug/elf", + "elf.ELFOSABI_NONE": "debug/elf", + "elf.ELFOSABI_NSK": "debug/elf", + "elf.ELFOSABI_OPENBSD": "debug/elf", + "elf.ELFOSABI_OPENVMS": "debug/elf", + "elf.ELFOSABI_SOLARIS": "debug/elf", + "elf.ELFOSABI_STANDALONE": "debug/elf", + "elf.ELFOSABI_TRU64": "debug/elf", + "elf.EM_386": "debug/elf", + "elf.EM_486": "debug/elf", + "elf.EM_68HC12": "debug/elf", + "elf.EM_68K": "debug/elf", + "elf.EM_860": "debug/elf", + "elf.EM_88K": "debug/elf", + "elf.EM_960": "debug/elf", + "elf.EM_AARCH64": "debug/elf", + "elf.EM_ALPHA": "debug/elf", + "elf.EM_ALPHA_STD": "debug/elf", + "elf.EM_ARC": "debug/elf", + "elf.EM_ARM": "debug/elf", + "elf.EM_COLDFIRE": "debug/elf", + "elf.EM_FR20": "debug/elf", + "elf.EM_H8S": "debug/elf", + "elf.EM_H8_300": "debug/elf", + "elf.EM_H8_300H": "debug/elf", + "elf.EM_H8_500": "debug/elf", + "elf.EM_IA_64": "debug/elf", + "elf.EM_M32": "debug/elf", + "elf.EM_ME16": "debug/elf", + "elf.EM_MIPS": "debug/elf", + "elf.EM_MIPS_RS3_LE": "debug/elf", + "elf.EM_MIPS_RS4_BE": "debug/elf", + "elf.EM_MIPS_X": "debug/elf", + "elf.EM_MMA": "debug/elf", + "elf.EM_NCPU": "debug/elf", + "elf.EM_NDR1": "debug/elf", + "elf.EM_NONE": "debug/elf", + "elf.EM_PARISC": "debug/elf", + "elf.EM_PCP": "debug/elf", + "elf.EM_PPC": "debug/elf", + "elf.EM_PPC64": "debug/elf", + "elf.EM_RCE": "debug/elf", + "elf.EM_RH32": "debug/elf", + "elf.EM_S370": "debug/elf", + "elf.EM_S390": "debug/elf", + "elf.EM_SH": "debug/elf", + "elf.EM_SPARC": "debug/elf", + "elf.EM_SPARC32PLUS": "debug/elf", + "elf.EM_SPARCV9": "debug/elf", + "elf.EM_ST100": "debug/elf", + "elf.EM_STARCORE": "debug/elf", + "elf.EM_TINYJ": "debug/elf", + "elf.EM_TRICORE": "debug/elf", + "elf.EM_V800": "debug/elf", + "elf.EM_VPP500": "debug/elf", + "elf.EM_X86_64": "debug/elf", + "elf.ET_CORE": "debug/elf", + "elf.ET_DYN": "debug/elf", + "elf.ET_EXEC": "debug/elf", + "elf.ET_HIOS": "debug/elf", + "elf.ET_HIPROC": "debug/elf", + "elf.ET_LOOS": "debug/elf", + "elf.ET_LOPROC": "debug/elf", + "elf.ET_NONE": "debug/elf", + "elf.ET_REL": "debug/elf", + "elf.EV_CURRENT": "debug/elf", + "elf.EV_NONE": "debug/elf", + "elf.ErrNoSymbols": "debug/elf", + "elf.File": "debug/elf", + "elf.FileHeader": "debug/elf", + "elf.FormatError": "debug/elf", + "elf.Header32": "debug/elf", + "elf.Header64": "debug/elf", + "elf.ImportedSymbol": "debug/elf", + "elf.Machine": "debug/elf", + "elf.NT_FPREGSET": "debug/elf", + "elf.NT_PRPSINFO": "debug/elf", + "elf.NT_PRSTATUS": "debug/elf", + "elf.NType": "debug/elf", + "elf.NewFile": "debug/elf", + "elf.OSABI": "debug/elf", + "elf.Open": "debug/elf", + "elf.PF_MASKOS": "debug/elf", + "elf.PF_MASKPROC": "debug/elf", + "elf.PF_R": "debug/elf", + "elf.PF_W": "debug/elf", + "elf.PF_X": "debug/elf", + "elf.PT_DYNAMIC": "debug/elf", + "elf.PT_HIOS": "debug/elf", + "elf.PT_HIPROC": "debug/elf", + "elf.PT_INTERP": "debug/elf", + "elf.PT_LOAD": "debug/elf", + "elf.PT_LOOS": "debug/elf", + "elf.PT_LOPROC": "debug/elf", + "elf.PT_NOTE": "debug/elf", + "elf.PT_NULL": "debug/elf", + "elf.PT_PHDR": "debug/elf", + "elf.PT_SHLIB": "debug/elf", + "elf.PT_TLS": "debug/elf", + "elf.Prog": "debug/elf", + "elf.Prog32": "debug/elf", + "elf.Prog64": "debug/elf", + "elf.ProgFlag": "debug/elf", + "elf.ProgHeader": "debug/elf", + "elf.ProgType": "debug/elf", + "elf.R_386": "debug/elf", + "elf.R_386_16": "debug/elf", + "elf.R_386_32": "debug/elf", + "elf.R_386_32PLT": "debug/elf", + "elf.R_386_8": "debug/elf", + "elf.R_386_COPY": "debug/elf", + "elf.R_386_GLOB_DAT": "debug/elf", + "elf.R_386_GOT32": "debug/elf", + "elf.R_386_GOT32X": "debug/elf", + "elf.R_386_GOTOFF": "debug/elf", + "elf.R_386_GOTPC": "debug/elf", + "elf.R_386_IRELATIVE": "debug/elf", + "elf.R_386_JMP_SLOT": "debug/elf", + "elf.R_386_NONE": "debug/elf", + "elf.R_386_PC16": "debug/elf", + "elf.R_386_PC32": "debug/elf", + "elf.R_386_PC8": "debug/elf", + "elf.R_386_PLT32": "debug/elf", + "elf.R_386_RELATIVE": "debug/elf", + "elf.R_386_SIZE32": "debug/elf", + "elf.R_386_TLS_DESC": "debug/elf", + "elf.R_386_TLS_DESC_CALL": "debug/elf", + "elf.R_386_TLS_DTPMOD32": "debug/elf", + "elf.R_386_TLS_DTPOFF32": "debug/elf", + "elf.R_386_TLS_GD": "debug/elf", + "elf.R_386_TLS_GD_32": "debug/elf", + "elf.R_386_TLS_GD_CALL": "debug/elf", + "elf.R_386_TLS_GD_POP": "debug/elf", + "elf.R_386_TLS_GD_PUSH": "debug/elf", + "elf.R_386_TLS_GOTDESC": "debug/elf", + "elf.R_386_TLS_GOTIE": "debug/elf", + "elf.R_386_TLS_IE": "debug/elf", + "elf.R_386_TLS_IE_32": "debug/elf", + "elf.R_386_TLS_LDM": "debug/elf", + "elf.R_386_TLS_LDM_32": "debug/elf", + "elf.R_386_TLS_LDM_CALL": "debug/elf", + "elf.R_386_TLS_LDM_POP": "debug/elf", + "elf.R_386_TLS_LDM_PUSH": "debug/elf", + "elf.R_386_TLS_LDO_32": "debug/elf", + "elf.R_386_TLS_LE": "debug/elf", + "elf.R_386_TLS_LE_32": "debug/elf", + "elf.R_386_TLS_TPOFF": "debug/elf", + "elf.R_386_TLS_TPOFF32": "debug/elf", + "elf.R_390": "debug/elf", + "elf.R_390_12": "debug/elf", + "elf.R_390_16": "debug/elf", + "elf.R_390_20": "debug/elf", + "elf.R_390_32": "debug/elf", + "elf.R_390_64": "debug/elf", + "elf.R_390_8": "debug/elf", + "elf.R_390_COPY": "debug/elf", + "elf.R_390_GLOB_DAT": "debug/elf", + "elf.R_390_GOT12": "debug/elf", + "elf.R_390_GOT16": "debug/elf", + "elf.R_390_GOT20": "debug/elf", + "elf.R_390_GOT32": "debug/elf", + "elf.R_390_GOT64": "debug/elf", + "elf.R_390_GOTENT": "debug/elf", + "elf.R_390_GOTOFF": "debug/elf", + "elf.R_390_GOTOFF16": "debug/elf", + "elf.R_390_GOTOFF64": "debug/elf", + "elf.R_390_GOTPC": "debug/elf", + "elf.R_390_GOTPCDBL": "debug/elf", + "elf.R_390_GOTPLT12": "debug/elf", + "elf.R_390_GOTPLT16": "debug/elf", + "elf.R_390_GOTPLT20": "debug/elf", + "elf.R_390_GOTPLT32": "debug/elf", + "elf.R_390_GOTPLT64": "debug/elf", + "elf.R_390_GOTPLTENT": "debug/elf", + "elf.R_390_GOTPLTOFF16": "debug/elf", + "elf.R_390_GOTPLTOFF32": "debug/elf", + "elf.R_390_GOTPLTOFF64": "debug/elf", + "elf.R_390_JMP_SLOT": "debug/elf", + "elf.R_390_NONE": "debug/elf", + "elf.R_390_PC16": "debug/elf", + "elf.R_390_PC16DBL": "debug/elf", + "elf.R_390_PC32": "debug/elf", + "elf.R_390_PC32DBL": "debug/elf", + "elf.R_390_PC64": "debug/elf", + "elf.R_390_PLT16DBL": "debug/elf", + "elf.R_390_PLT32": "debug/elf", + "elf.R_390_PLT32DBL": "debug/elf", + "elf.R_390_PLT64": "debug/elf", + "elf.R_390_RELATIVE": "debug/elf", + "elf.R_390_TLS_DTPMOD": "debug/elf", + "elf.R_390_TLS_DTPOFF": "debug/elf", + "elf.R_390_TLS_GD32": "debug/elf", + "elf.R_390_TLS_GD64": "debug/elf", + "elf.R_390_TLS_GDCALL": "debug/elf", + "elf.R_390_TLS_GOTIE12": "debug/elf", + "elf.R_390_TLS_GOTIE20": "debug/elf", + "elf.R_390_TLS_GOTIE32": "debug/elf", + "elf.R_390_TLS_GOTIE64": "debug/elf", + "elf.R_390_TLS_IE32": "debug/elf", + "elf.R_390_TLS_IE64": "debug/elf", + "elf.R_390_TLS_IEENT": "debug/elf", + "elf.R_390_TLS_LDCALL": "debug/elf", + "elf.R_390_TLS_LDM32": "debug/elf", + "elf.R_390_TLS_LDM64": "debug/elf", + "elf.R_390_TLS_LDO32": "debug/elf", + "elf.R_390_TLS_LDO64": "debug/elf", + "elf.R_390_TLS_LE32": "debug/elf", + "elf.R_390_TLS_LE64": "debug/elf", + "elf.R_390_TLS_LOAD": "debug/elf", + "elf.R_390_TLS_TPOFF": "debug/elf", + "elf.R_AARCH64": "debug/elf", + "elf.R_AARCH64_ABS16": "debug/elf", + "elf.R_AARCH64_ABS32": "debug/elf", + "elf.R_AARCH64_ABS64": "debug/elf", + "elf.R_AARCH64_ADD_ABS_LO12_NC": "debug/elf", + "elf.R_AARCH64_ADR_GOT_PAGE": "debug/elf", + "elf.R_AARCH64_ADR_PREL_LO21": "debug/elf", + "elf.R_AARCH64_ADR_PREL_PG_HI21": "debug/elf", + "elf.R_AARCH64_ADR_PREL_PG_HI21_NC": "debug/elf", + "elf.R_AARCH64_CALL26": "debug/elf", + "elf.R_AARCH64_CONDBR19": "debug/elf", + "elf.R_AARCH64_COPY": "debug/elf", + "elf.R_AARCH64_GLOB_DAT": "debug/elf", + "elf.R_AARCH64_GOT_LD_PREL19": "debug/elf", + "elf.R_AARCH64_IRELATIVE": "debug/elf", + "elf.R_AARCH64_JUMP26": "debug/elf", + "elf.R_AARCH64_JUMP_SLOT": "debug/elf", + "elf.R_AARCH64_LD64_GOTOFF_LO15": "debug/elf", + "elf.R_AARCH64_LD64_GOTPAGE_LO15": "debug/elf", + "elf.R_AARCH64_LD64_GOT_LO12_NC": "debug/elf", + "elf.R_AARCH64_LDST128_ABS_LO12_NC": "debug/elf", + "elf.R_AARCH64_LDST16_ABS_LO12_NC": "debug/elf", + "elf.R_AARCH64_LDST32_ABS_LO12_NC": "debug/elf", + "elf.R_AARCH64_LDST64_ABS_LO12_NC": "debug/elf", + "elf.R_AARCH64_LDST8_ABS_LO12_NC": "debug/elf", + "elf.R_AARCH64_LD_PREL_LO19": "debug/elf", + "elf.R_AARCH64_MOVW_SABS_G0": "debug/elf", + "elf.R_AARCH64_MOVW_SABS_G1": "debug/elf", + "elf.R_AARCH64_MOVW_SABS_G2": "debug/elf", + "elf.R_AARCH64_MOVW_UABS_G0": "debug/elf", + "elf.R_AARCH64_MOVW_UABS_G0_NC": "debug/elf", + "elf.R_AARCH64_MOVW_UABS_G1": "debug/elf", + "elf.R_AARCH64_MOVW_UABS_G1_NC": "debug/elf", + "elf.R_AARCH64_MOVW_UABS_G2": "debug/elf", + "elf.R_AARCH64_MOVW_UABS_G2_NC": "debug/elf", + "elf.R_AARCH64_MOVW_UABS_G3": "debug/elf", + "elf.R_AARCH64_NONE": "debug/elf", + "elf.R_AARCH64_NULL": "debug/elf", + "elf.R_AARCH64_P32_ABS16": "debug/elf", + "elf.R_AARCH64_P32_ABS32": "debug/elf", + "elf.R_AARCH64_P32_ADD_ABS_LO12_NC": "debug/elf", + "elf.R_AARCH64_P32_ADR_GOT_PAGE": "debug/elf", + "elf.R_AARCH64_P32_ADR_PREL_LO21": "debug/elf", + "elf.R_AARCH64_P32_ADR_PREL_PG_HI21": "debug/elf", + "elf.R_AARCH64_P32_CALL26": "debug/elf", + "elf.R_AARCH64_P32_CONDBR19": "debug/elf", + "elf.R_AARCH64_P32_COPY": "debug/elf", + "elf.R_AARCH64_P32_GLOB_DAT": "debug/elf", + "elf.R_AARCH64_P32_GOT_LD_PREL19": "debug/elf", + "elf.R_AARCH64_P32_IRELATIVE": "debug/elf", + "elf.R_AARCH64_P32_JUMP26": "debug/elf", + "elf.R_AARCH64_P32_JUMP_SLOT": "debug/elf", + "elf.R_AARCH64_P32_LD32_GOT_LO12_NC": "debug/elf", + "elf.R_AARCH64_P32_LDST128_ABS_LO12_NC": "debug/elf", + "elf.R_AARCH64_P32_LDST16_ABS_LO12_NC": "debug/elf", + "elf.R_AARCH64_P32_LDST32_ABS_LO12_NC": "debug/elf", + "elf.R_AARCH64_P32_LDST64_ABS_LO12_NC": "debug/elf", + "elf.R_AARCH64_P32_LDST8_ABS_LO12_NC": "debug/elf", + "elf.R_AARCH64_P32_LD_PREL_LO19": "debug/elf", + "elf.R_AARCH64_P32_MOVW_SABS_G0": "debug/elf", + "elf.R_AARCH64_P32_MOVW_UABS_G0": "debug/elf", + "elf.R_AARCH64_P32_MOVW_UABS_G0_NC": "debug/elf", + "elf.R_AARCH64_P32_MOVW_UABS_G1": "debug/elf", + "elf.R_AARCH64_P32_PREL16": "debug/elf", + "elf.R_AARCH64_P32_PREL32": "debug/elf", + "elf.R_AARCH64_P32_RELATIVE": "debug/elf", + "elf.R_AARCH64_P32_TLSDESC": "debug/elf", + "elf.R_AARCH64_P32_TLSDESC_ADD_LO12_NC": "debug/elf", + "elf.R_AARCH64_P32_TLSDESC_ADR_PAGE21": "debug/elf", + "elf.R_AARCH64_P32_TLSDESC_ADR_PREL21": "debug/elf", + "elf.R_AARCH64_P32_TLSDESC_CALL": "debug/elf", + "elf.R_AARCH64_P32_TLSDESC_LD32_LO12_NC": "debug/elf", + "elf.R_AARCH64_P32_TLSDESC_LD_PREL19": "debug/elf", + "elf.R_AARCH64_P32_TLSGD_ADD_LO12_NC": "debug/elf", + "elf.R_AARCH64_P32_TLSGD_ADR_PAGE21": "debug/elf", + "elf.R_AARCH64_P32_TLSIE_ADR_GOTTPREL_PAGE21": "debug/elf", + "elf.R_AARCH64_P32_TLSIE_LD32_GOTTPREL_LO12_NC": "debug/elf", + "elf.R_AARCH64_P32_TLSIE_LD_GOTTPREL_PREL19": "debug/elf", + "elf.R_AARCH64_P32_TLSLE_ADD_TPREL_HI12": "debug/elf", + "elf.R_AARCH64_P32_TLSLE_ADD_TPREL_LO12": "debug/elf", + "elf.R_AARCH64_P32_TLSLE_ADD_TPREL_LO12_NC": "debug/elf", + "elf.R_AARCH64_P32_TLSLE_MOVW_TPREL_G0": "debug/elf", + "elf.R_AARCH64_P32_TLSLE_MOVW_TPREL_G0_NC": "debug/elf", + "elf.R_AARCH64_P32_TLSLE_MOVW_TPREL_G1": "debug/elf", + "elf.R_AARCH64_P32_TLS_DTPMOD": "debug/elf", + "elf.R_AARCH64_P32_TLS_DTPREL": "debug/elf", + "elf.R_AARCH64_P32_TLS_TPREL": "debug/elf", + "elf.R_AARCH64_P32_TSTBR14": "debug/elf", + "elf.R_AARCH64_PREL16": "debug/elf", + "elf.R_AARCH64_PREL32": "debug/elf", + "elf.R_AARCH64_PREL64": "debug/elf", + "elf.R_AARCH64_RELATIVE": "debug/elf", + "elf.R_AARCH64_TLSDESC": "debug/elf", + "elf.R_AARCH64_TLSDESC_ADD": "debug/elf", + "elf.R_AARCH64_TLSDESC_ADD_LO12_NC": "debug/elf", + "elf.R_AARCH64_TLSDESC_ADR_PAGE21": "debug/elf", + "elf.R_AARCH64_TLSDESC_ADR_PREL21": "debug/elf", + "elf.R_AARCH64_TLSDESC_CALL": "debug/elf", + "elf.R_AARCH64_TLSDESC_LD64_LO12_NC": "debug/elf", + "elf.R_AARCH64_TLSDESC_LDR": "debug/elf", + "elf.R_AARCH64_TLSDESC_LD_PREL19": "debug/elf", + "elf.R_AARCH64_TLSDESC_OFF_G0_NC": "debug/elf", + "elf.R_AARCH64_TLSDESC_OFF_G1": "debug/elf", + "elf.R_AARCH64_TLSGD_ADD_LO12_NC": "debug/elf", + "elf.R_AARCH64_TLSGD_ADR_PAGE21": "debug/elf", + "elf.R_AARCH64_TLSGD_ADR_PREL21": "debug/elf", + "elf.R_AARCH64_TLSGD_MOVW_G0_NC": "debug/elf", + "elf.R_AARCH64_TLSGD_MOVW_G1": "debug/elf", + "elf.R_AARCH64_TLSIE_ADR_GOTTPREL_PAGE21": "debug/elf", + "elf.R_AARCH64_TLSIE_LD64_GOTTPREL_LO12_NC": "debug/elf", + "elf.R_AARCH64_TLSIE_LD_GOTTPREL_PREL19": "debug/elf", + "elf.R_AARCH64_TLSIE_MOVW_GOTTPREL_G0_NC": "debug/elf", + "elf.R_AARCH64_TLSIE_MOVW_GOTTPREL_G1": "debug/elf", + "elf.R_AARCH64_TLSLD_ADR_PAGE21": "debug/elf", + "elf.R_AARCH64_TLSLD_ADR_PREL21": "debug/elf", + "elf.R_AARCH64_TLSLD_LDST128_DTPREL_LO12": "debug/elf", + "elf.R_AARCH64_TLSLD_LDST128_DTPREL_LO12_NC": "debug/elf", + "elf.R_AARCH64_TLSLE_ADD_TPREL_HI12": "debug/elf", + "elf.R_AARCH64_TLSLE_ADD_TPREL_LO12": "debug/elf", + "elf.R_AARCH64_TLSLE_ADD_TPREL_LO12_NC": "debug/elf", + "elf.R_AARCH64_TLSLE_LDST128_TPREL_LO12": "debug/elf", + "elf.R_AARCH64_TLSLE_LDST128_TPREL_LO12_NC": "debug/elf", + "elf.R_AARCH64_TLSLE_MOVW_TPREL_G0": "debug/elf", + "elf.R_AARCH64_TLSLE_MOVW_TPREL_G0_NC": "debug/elf", + "elf.R_AARCH64_TLSLE_MOVW_TPREL_G1": "debug/elf", + "elf.R_AARCH64_TLSLE_MOVW_TPREL_G1_NC": "debug/elf", + "elf.R_AARCH64_TLSLE_MOVW_TPREL_G2": "debug/elf", + "elf.R_AARCH64_TLS_DTPMOD64": "debug/elf", + "elf.R_AARCH64_TLS_DTPREL64": "debug/elf", + "elf.R_AARCH64_TLS_TPREL64": "debug/elf", + "elf.R_AARCH64_TSTBR14": "debug/elf", + "elf.R_ALPHA": "debug/elf", + "elf.R_ALPHA_BRADDR": "debug/elf", + "elf.R_ALPHA_COPY": "debug/elf", + "elf.R_ALPHA_GLOB_DAT": "debug/elf", + "elf.R_ALPHA_GPDISP": "debug/elf", + "elf.R_ALPHA_GPREL32": "debug/elf", + "elf.R_ALPHA_GPRELHIGH": "debug/elf", + "elf.R_ALPHA_GPRELLOW": "debug/elf", + "elf.R_ALPHA_GPVALUE": "debug/elf", + "elf.R_ALPHA_HINT": "debug/elf", + "elf.R_ALPHA_IMMED_BR_HI32": "debug/elf", + "elf.R_ALPHA_IMMED_GP_16": "debug/elf", + "elf.R_ALPHA_IMMED_GP_HI32": "debug/elf", + "elf.R_ALPHA_IMMED_LO32": "debug/elf", + "elf.R_ALPHA_IMMED_SCN_HI32": "debug/elf", + "elf.R_ALPHA_JMP_SLOT": "debug/elf", + "elf.R_ALPHA_LITERAL": "debug/elf", + "elf.R_ALPHA_LITUSE": "debug/elf", + "elf.R_ALPHA_NONE": "debug/elf", + "elf.R_ALPHA_OP_PRSHIFT": "debug/elf", + "elf.R_ALPHA_OP_PSUB": "debug/elf", + "elf.R_ALPHA_OP_PUSH": "debug/elf", + "elf.R_ALPHA_OP_STORE": "debug/elf", + "elf.R_ALPHA_REFLONG": "debug/elf", + "elf.R_ALPHA_REFQUAD": "debug/elf", + "elf.R_ALPHA_RELATIVE": "debug/elf", + "elf.R_ALPHA_SREL16": "debug/elf", + "elf.R_ALPHA_SREL32": "debug/elf", + "elf.R_ALPHA_SREL64": "debug/elf", + "elf.R_ARM": "debug/elf", + "elf.R_ARM_ABS12": "debug/elf", + "elf.R_ARM_ABS16": "debug/elf", + "elf.R_ARM_ABS32": "debug/elf", + "elf.R_ARM_ABS32_NOI": "debug/elf", + "elf.R_ARM_ABS8": "debug/elf", + "elf.R_ARM_ALU_PCREL_15_8": "debug/elf", + "elf.R_ARM_ALU_PCREL_23_15": "debug/elf", + "elf.R_ARM_ALU_PCREL_7_0": "debug/elf", + "elf.R_ARM_ALU_PC_G0": "debug/elf", + "elf.R_ARM_ALU_PC_G0_NC": "debug/elf", + "elf.R_ARM_ALU_PC_G1": "debug/elf", + "elf.R_ARM_ALU_PC_G1_NC": "debug/elf", + "elf.R_ARM_ALU_PC_G2": "debug/elf", + "elf.R_ARM_ALU_SBREL_19_12_NC": "debug/elf", + "elf.R_ARM_ALU_SBREL_27_20_CK": "debug/elf", + "elf.R_ARM_ALU_SB_G0": "debug/elf", + "elf.R_ARM_ALU_SB_G0_NC": "debug/elf", + "elf.R_ARM_ALU_SB_G1": "debug/elf", + "elf.R_ARM_ALU_SB_G1_NC": "debug/elf", + "elf.R_ARM_ALU_SB_G2": "debug/elf", + "elf.R_ARM_AMP_VCALL9": "debug/elf", + "elf.R_ARM_BASE_ABS": "debug/elf", + "elf.R_ARM_CALL": "debug/elf", + "elf.R_ARM_COPY": "debug/elf", + "elf.R_ARM_GLOB_DAT": "debug/elf", + "elf.R_ARM_GNU_VTENTRY": "debug/elf", + "elf.R_ARM_GNU_VTINHERIT": "debug/elf", + "elf.R_ARM_GOT32": "debug/elf", + "elf.R_ARM_GOTOFF": "debug/elf", + "elf.R_ARM_GOTOFF12": "debug/elf", + "elf.R_ARM_GOTPC": "debug/elf", + "elf.R_ARM_GOTRELAX": "debug/elf", + "elf.R_ARM_GOT_ABS": "debug/elf", + "elf.R_ARM_GOT_BREL12": "debug/elf", + "elf.R_ARM_GOT_PREL": "debug/elf", + "elf.R_ARM_IRELATIVE": "debug/elf", + "elf.R_ARM_JUMP24": "debug/elf", + "elf.R_ARM_JUMP_SLOT": "debug/elf", + "elf.R_ARM_LDC_PC_G0": "debug/elf", + "elf.R_ARM_LDC_PC_G1": "debug/elf", + "elf.R_ARM_LDC_PC_G2": "debug/elf", + "elf.R_ARM_LDC_SB_G0": "debug/elf", + "elf.R_ARM_LDC_SB_G1": "debug/elf", + "elf.R_ARM_LDC_SB_G2": "debug/elf", + "elf.R_ARM_LDRS_PC_G0": "debug/elf", + "elf.R_ARM_LDRS_PC_G1": "debug/elf", + "elf.R_ARM_LDRS_PC_G2": "debug/elf", + "elf.R_ARM_LDRS_SB_G0": "debug/elf", + "elf.R_ARM_LDRS_SB_G1": "debug/elf", + "elf.R_ARM_LDRS_SB_G2": "debug/elf", + "elf.R_ARM_LDR_PC_G1": "debug/elf", + "elf.R_ARM_LDR_PC_G2": "debug/elf", + "elf.R_ARM_LDR_SBREL_11_10_NC": "debug/elf", + "elf.R_ARM_LDR_SB_G0": "debug/elf", + "elf.R_ARM_LDR_SB_G1": "debug/elf", + "elf.R_ARM_LDR_SB_G2": "debug/elf", + "elf.R_ARM_ME_TOO": "debug/elf", + "elf.R_ARM_MOVT_ABS": "debug/elf", + "elf.R_ARM_MOVT_BREL": "debug/elf", + "elf.R_ARM_MOVT_PREL": "debug/elf", + "elf.R_ARM_MOVW_ABS_NC": "debug/elf", + "elf.R_ARM_MOVW_BREL": "debug/elf", + "elf.R_ARM_MOVW_BREL_NC": "debug/elf", + "elf.R_ARM_MOVW_PREL_NC": "debug/elf", + "elf.R_ARM_NONE": "debug/elf", + "elf.R_ARM_PC13": "debug/elf", + "elf.R_ARM_PC24": "debug/elf", + "elf.R_ARM_PLT32": "debug/elf", + "elf.R_ARM_PLT32_ABS": "debug/elf", + "elf.R_ARM_PREL31": "debug/elf", + "elf.R_ARM_PRIVATE_0": "debug/elf", + "elf.R_ARM_PRIVATE_1": "debug/elf", + "elf.R_ARM_PRIVATE_10": "debug/elf", + "elf.R_ARM_PRIVATE_11": "debug/elf", + "elf.R_ARM_PRIVATE_12": "debug/elf", + "elf.R_ARM_PRIVATE_13": "debug/elf", + "elf.R_ARM_PRIVATE_14": "debug/elf", + "elf.R_ARM_PRIVATE_15": "debug/elf", + "elf.R_ARM_PRIVATE_2": "debug/elf", + "elf.R_ARM_PRIVATE_3": "debug/elf", + "elf.R_ARM_PRIVATE_4": "debug/elf", + "elf.R_ARM_PRIVATE_5": "debug/elf", + "elf.R_ARM_PRIVATE_6": "debug/elf", + "elf.R_ARM_PRIVATE_7": "debug/elf", + "elf.R_ARM_PRIVATE_8": "debug/elf", + "elf.R_ARM_PRIVATE_9": "debug/elf", + "elf.R_ARM_RABS32": "debug/elf", + "elf.R_ARM_RBASE": "debug/elf", + "elf.R_ARM_REL32": "debug/elf", + "elf.R_ARM_REL32_NOI": "debug/elf", + "elf.R_ARM_RELATIVE": "debug/elf", + "elf.R_ARM_RPC24": "debug/elf", + "elf.R_ARM_RREL32": "debug/elf", + "elf.R_ARM_RSBREL32": "debug/elf", + "elf.R_ARM_RXPC25": "debug/elf", + "elf.R_ARM_SBREL31": "debug/elf", + "elf.R_ARM_SBREL32": "debug/elf", + "elf.R_ARM_SWI24": "debug/elf", + "elf.R_ARM_TARGET1": "debug/elf", + "elf.R_ARM_TARGET2": "debug/elf", + "elf.R_ARM_THM_ABS5": "debug/elf", + "elf.R_ARM_THM_ALU_ABS_G0_NC": "debug/elf", + "elf.R_ARM_THM_ALU_ABS_G1_NC": "debug/elf", + "elf.R_ARM_THM_ALU_ABS_G2_NC": "debug/elf", + "elf.R_ARM_THM_ALU_ABS_G3": "debug/elf", + "elf.R_ARM_THM_ALU_PREL_11_0": "debug/elf", + "elf.R_ARM_THM_GOT_BREL12": "debug/elf", + "elf.R_ARM_THM_JUMP11": "debug/elf", + "elf.R_ARM_THM_JUMP19": "debug/elf", + "elf.R_ARM_THM_JUMP24": "debug/elf", + "elf.R_ARM_THM_JUMP6": "debug/elf", + "elf.R_ARM_THM_JUMP8": "debug/elf", + "elf.R_ARM_THM_MOVT_ABS": "debug/elf", + "elf.R_ARM_THM_MOVT_BREL": "debug/elf", + "elf.R_ARM_THM_MOVT_PREL": "debug/elf", + "elf.R_ARM_THM_MOVW_ABS_NC": "debug/elf", + "elf.R_ARM_THM_MOVW_BREL": "debug/elf", + "elf.R_ARM_THM_MOVW_BREL_NC": "debug/elf", + "elf.R_ARM_THM_MOVW_PREL_NC": "debug/elf", + "elf.R_ARM_THM_PC12": "debug/elf", + "elf.R_ARM_THM_PC22": "debug/elf", + "elf.R_ARM_THM_PC8": "debug/elf", + "elf.R_ARM_THM_RPC22": "debug/elf", + "elf.R_ARM_THM_SWI8": "debug/elf", + "elf.R_ARM_THM_TLS_CALL": "debug/elf", + "elf.R_ARM_THM_TLS_DESCSEQ16": "debug/elf", + "elf.R_ARM_THM_TLS_DESCSEQ32": "debug/elf", + "elf.R_ARM_THM_XPC22": "debug/elf", + "elf.R_ARM_TLS_CALL": "debug/elf", + "elf.R_ARM_TLS_DESCSEQ": "debug/elf", + "elf.R_ARM_TLS_DTPMOD32": "debug/elf", + "elf.R_ARM_TLS_DTPOFF32": "debug/elf", + "elf.R_ARM_TLS_GD32": "debug/elf", + "elf.R_ARM_TLS_GOTDESC": "debug/elf", + "elf.R_ARM_TLS_IE12GP": "debug/elf", + "elf.R_ARM_TLS_IE32": "debug/elf", + "elf.R_ARM_TLS_LDM32": "debug/elf", + "elf.R_ARM_TLS_LDO12": "debug/elf", + "elf.R_ARM_TLS_LDO32": "debug/elf", + "elf.R_ARM_TLS_LE12": "debug/elf", + "elf.R_ARM_TLS_LE32": "debug/elf", + "elf.R_ARM_TLS_TPOFF32": "debug/elf", + "elf.R_ARM_V4BX": "debug/elf", + "elf.R_ARM_XPC25": "debug/elf", + "elf.R_INFO": "debug/elf", + "elf.R_INFO32": "debug/elf", + "elf.R_MIPS": "debug/elf", + "elf.R_MIPS_16": "debug/elf", + "elf.R_MIPS_26": "debug/elf", + "elf.R_MIPS_32": "debug/elf", + "elf.R_MIPS_64": "debug/elf", + "elf.R_MIPS_ADD_IMMEDIATE": "debug/elf", + "elf.R_MIPS_CALL16": "debug/elf", + "elf.R_MIPS_CALL_HI16": "debug/elf", + "elf.R_MIPS_CALL_LO16": "debug/elf", + "elf.R_MIPS_DELETE": "debug/elf", + "elf.R_MIPS_GOT16": "debug/elf", + "elf.R_MIPS_GOT_DISP": "debug/elf", + "elf.R_MIPS_GOT_HI16": "debug/elf", + "elf.R_MIPS_GOT_LO16": "debug/elf", + "elf.R_MIPS_GOT_OFST": "debug/elf", + "elf.R_MIPS_GOT_PAGE": "debug/elf", + "elf.R_MIPS_GPREL16": "debug/elf", + "elf.R_MIPS_GPREL32": "debug/elf", + "elf.R_MIPS_HI16": "debug/elf", + "elf.R_MIPS_HIGHER": "debug/elf", + "elf.R_MIPS_HIGHEST": "debug/elf", + "elf.R_MIPS_INSERT_A": "debug/elf", + "elf.R_MIPS_INSERT_B": "debug/elf", + "elf.R_MIPS_JALR": "debug/elf", + "elf.R_MIPS_LITERAL": "debug/elf", + "elf.R_MIPS_LO16": "debug/elf", + "elf.R_MIPS_NONE": "debug/elf", + "elf.R_MIPS_PC16": "debug/elf", + "elf.R_MIPS_PJUMP": "debug/elf", + "elf.R_MIPS_REL16": "debug/elf", + "elf.R_MIPS_REL32": "debug/elf", + "elf.R_MIPS_RELGOT": "debug/elf", + "elf.R_MIPS_SCN_DISP": "debug/elf", + "elf.R_MIPS_SHIFT5": "debug/elf", + "elf.R_MIPS_SHIFT6": "debug/elf", + "elf.R_MIPS_SUB": "debug/elf", + "elf.R_MIPS_TLS_DTPMOD32": "debug/elf", + "elf.R_MIPS_TLS_DTPMOD64": "debug/elf", + "elf.R_MIPS_TLS_DTPREL32": "debug/elf", + "elf.R_MIPS_TLS_DTPREL64": "debug/elf", + "elf.R_MIPS_TLS_DTPREL_HI16": "debug/elf", + "elf.R_MIPS_TLS_DTPREL_LO16": "debug/elf", + "elf.R_MIPS_TLS_GD": "debug/elf", + "elf.R_MIPS_TLS_GOTTPREL": "debug/elf", + "elf.R_MIPS_TLS_LDM": "debug/elf", + "elf.R_MIPS_TLS_TPREL32": "debug/elf", + "elf.R_MIPS_TLS_TPREL64": "debug/elf", + "elf.R_MIPS_TLS_TPREL_HI16": "debug/elf", + "elf.R_MIPS_TLS_TPREL_LO16": "debug/elf", + "elf.R_PPC": "debug/elf", + "elf.R_PPC64": "debug/elf", + "elf.R_PPC64_ADDR14": "debug/elf", + "elf.R_PPC64_ADDR14_BRNTAKEN": "debug/elf", + "elf.R_PPC64_ADDR14_BRTAKEN": "debug/elf", + "elf.R_PPC64_ADDR16": "debug/elf", + "elf.R_PPC64_ADDR16_DS": "debug/elf", + "elf.R_PPC64_ADDR16_HA": "debug/elf", + "elf.R_PPC64_ADDR16_HI": "debug/elf", + "elf.R_PPC64_ADDR16_HIGH": "debug/elf", + "elf.R_PPC64_ADDR16_HIGHA": "debug/elf", + "elf.R_PPC64_ADDR16_HIGHER": "debug/elf", + "elf.R_PPC64_ADDR16_HIGHERA": "debug/elf", + "elf.R_PPC64_ADDR16_HIGHEST": "debug/elf", + "elf.R_PPC64_ADDR16_HIGHESTA": "debug/elf", + "elf.R_PPC64_ADDR16_LO": "debug/elf", + "elf.R_PPC64_ADDR16_LO_DS": "debug/elf", + "elf.R_PPC64_ADDR24": "debug/elf", + "elf.R_PPC64_ADDR32": "debug/elf", + "elf.R_PPC64_ADDR64": "debug/elf", + "elf.R_PPC64_ADDR64_LOCAL": "debug/elf", + "elf.R_PPC64_DTPMOD64": "debug/elf", + "elf.R_PPC64_DTPREL16": "debug/elf", + "elf.R_PPC64_DTPREL16_DS": "debug/elf", + "elf.R_PPC64_DTPREL16_HA": "debug/elf", + "elf.R_PPC64_DTPREL16_HI": "debug/elf", + "elf.R_PPC64_DTPREL16_HIGH": "debug/elf", + "elf.R_PPC64_DTPREL16_HIGHA": "debug/elf", + "elf.R_PPC64_DTPREL16_HIGHER": "debug/elf", + "elf.R_PPC64_DTPREL16_HIGHERA": "debug/elf", + "elf.R_PPC64_DTPREL16_HIGHEST": "debug/elf", + "elf.R_PPC64_DTPREL16_HIGHESTA": "debug/elf", + "elf.R_PPC64_DTPREL16_LO": "debug/elf", + "elf.R_PPC64_DTPREL16_LO_DS": "debug/elf", + "elf.R_PPC64_DTPREL64": "debug/elf", + "elf.R_PPC64_ENTRY": "debug/elf", + "elf.R_PPC64_GOT16": "debug/elf", + "elf.R_PPC64_GOT16_DS": "debug/elf", + "elf.R_PPC64_GOT16_HA": "debug/elf", + "elf.R_PPC64_GOT16_HI": "debug/elf", + "elf.R_PPC64_GOT16_LO": "debug/elf", + "elf.R_PPC64_GOT16_LO_DS": "debug/elf", + "elf.R_PPC64_GOT_DTPREL16_DS": "debug/elf", + "elf.R_PPC64_GOT_DTPREL16_HA": "debug/elf", + "elf.R_PPC64_GOT_DTPREL16_HI": "debug/elf", + "elf.R_PPC64_GOT_DTPREL16_LO_DS": "debug/elf", + "elf.R_PPC64_GOT_TLSGD16": "debug/elf", + "elf.R_PPC64_GOT_TLSGD16_HA": "debug/elf", + "elf.R_PPC64_GOT_TLSGD16_HI": "debug/elf", + "elf.R_PPC64_GOT_TLSGD16_LO": "debug/elf", + "elf.R_PPC64_GOT_TLSLD16": "debug/elf", + "elf.R_PPC64_GOT_TLSLD16_HA": "debug/elf", + "elf.R_PPC64_GOT_TLSLD16_HI": "debug/elf", + "elf.R_PPC64_GOT_TLSLD16_LO": "debug/elf", + "elf.R_PPC64_GOT_TPREL16_DS": "debug/elf", + "elf.R_PPC64_GOT_TPREL16_HA": "debug/elf", + "elf.R_PPC64_GOT_TPREL16_HI": "debug/elf", + "elf.R_PPC64_GOT_TPREL16_LO_DS": "debug/elf", + "elf.R_PPC64_IRELATIVE": "debug/elf", + "elf.R_PPC64_JMP_IREL": "debug/elf", + "elf.R_PPC64_JMP_SLOT": "debug/elf", + "elf.R_PPC64_NONE": "debug/elf", + "elf.R_PPC64_PLT16_LO_DS": "debug/elf", + "elf.R_PPC64_PLTGOT16": "debug/elf", + "elf.R_PPC64_PLTGOT16_DS": "debug/elf", + "elf.R_PPC64_PLTGOT16_HA": "debug/elf", + "elf.R_PPC64_PLTGOT16_HI": "debug/elf", + "elf.R_PPC64_PLTGOT16_LO": "debug/elf", + "elf.R_PPC64_PLTGOT_LO_DS": "debug/elf", + "elf.R_PPC64_REL14": "debug/elf", + "elf.R_PPC64_REL14_BRNTAKEN": "debug/elf", + "elf.R_PPC64_REL14_BRTAKEN": "debug/elf", + "elf.R_PPC64_REL16": "debug/elf", + "elf.R_PPC64_REL16DX_HA": "debug/elf", + "elf.R_PPC64_REL16_HA": "debug/elf", + "elf.R_PPC64_REL16_HI": "debug/elf", + "elf.R_PPC64_REL16_LO": "debug/elf", + "elf.R_PPC64_REL24": "debug/elf", + "elf.R_PPC64_REL24_NOTOC": "debug/elf", + "elf.R_PPC64_REL32": "debug/elf", + "elf.R_PPC64_REL64": "debug/elf", + "elf.R_PPC64_SECTOFF_DS": "debug/elf", + "elf.R_PPC64_SECTOFF_LO_DS": "debug/elf", + "elf.R_PPC64_TLS": "debug/elf", + "elf.R_PPC64_TLSGD": "debug/elf", + "elf.R_PPC64_TLSLD": "debug/elf", + "elf.R_PPC64_TOC": "debug/elf", + "elf.R_PPC64_TOC16": "debug/elf", + "elf.R_PPC64_TOC16_DS": "debug/elf", + "elf.R_PPC64_TOC16_HA": "debug/elf", + "elf.R_PPC64_TOC16_HI": "debug/elf", + "elf.R_PPC64_TOC16_LO": "debug/elf", + "elf.R_PPC64_TOC16_LO_DS": "debug/elf", + "elf.R_PPC64_TOCSAVE": "debug/elf", + "elf.R_PPC64_TPREL16": "debug/elf", + "elf.R_PPC64_TPREL16_DS": "debug/elf", + "elf.R_PPC64_TPREL16_HA": "debug/elf", + "elf.R_PPC64_TPREL16_HI": "debug/elf", + "elf.R_PPC64_TPREL16_HIGH": "debug/elf", + "elf.R_PPC64_TPREL16_HIGHA": "debug/elf", + "elf.R_PPC64_TPREL16_HIGHER": "debug/elf", + "elf.R_PPC64_TPREL16_HIGHERA": "debug/elf", + "elf.R_PPC64_TPREL16_HIGHEST": "debug/elf", + "elf.R_PPC64_TPREL16_HIGHESTA": "debug/elf", + "elf.R_PPC64_TPREL16_LO": "debug/elf", + "elf.R_PPC64_TPREL16_LO_DS": "debug/elf", + "elf.R_PPC64_TPREL64": "debug/elf", + "elf.R_PPC_ADDR14": "debug/elf", + "elf.R_PPC_ADDR14_BRNTAKEN": "debug/elf", + "elf.R_PPC_ADDR14_BRTAKEN": "debug/elf", + "elf.R_PPC_ADDR16": "debug/elf", + "elf.R_PPC_ADDR16_HA": "debug/elf", + "elf.R_PPC_ADDR16_HI": "debug/elf", + "elf.R_PPC_ADDR16_LO": "debug/elf", + "elf.R_PPC_ADDR24": "debug/elf", + "elf.R_PPC_ADDR32": "debug/elf", + "elf.R_PPC_COPY": "debug/elf", + "elf.R_PPC_DTPMOD32": "debug/elf", + "elf.R_PPC_DTPREL16": "debug/elf", + "elf.R_PPC_DTPREL16_HA": "debug/elf", + "elf.R_PPC_DTPREL16_HI": "debug/elf", + "elf.R_PPC_DTPREL16_LO": "debug/elf", + "elf.R_PPC_DTPREL32": "debug/elf", + "elf.R_PPC_EMB_BIT_FLD": "debug/elf", + "elf.R_PPC_EMB_MRKREF": "debug/elf", + "elf.R_PPC_EMB_NADDR16": "debug/elf", + "elf.R_PPC_EMB_NADDR16_HA": "debug/elf", + "elf.R_PPC_EMB_NADDR16_HI": "debug/elf", + "elf.R_PPC_EMB_NADDR16_LO": "debug/elf", + "elf.R_PPC_EMB_NADDR32": "debug/elf", + "elf.R_PPC_EMB_RELSDA": "debug/elf", + "elf.R_PPC_EMB_RELSEC16": "debug/elf", + "elf.R_PPC_EMB_RELST_HA": "debug/elf", + "elf.R_PPC_EMB_RELST_HI": "debug/elf", + "elf.R_PPC_EMB_RELST_LO": "debug/elf", + "elf.R_PPC_EMB_SDA21": "debug/elf", + "elf.R_PPC_EMB_SDA2I16": "debug/elf", + "elf.R_PPC_EMB_SDA2REL": "debug/elf", + "elf.R_PPC_EMB_SDAI16": "debug/elf", + "elf.R_PPC_GLOB_DAT": "debug/elf", + "elf.R_PPC_GOT16": "debug/elf", + "elf.R_PPC_GOT16_HA": "debug/elf", + "elf.R_PPC_GOT16_HI": "debug/elf", + "elf.R_PPC_GOT16_LO": "debug/elf", + "elf.R_PPC_GOT_TLSGD16": "debug/elf", + "elf.R_PPC_GOT_TLSGD16_HA": "debug/elf", + "elf.R_PPC_GOT_TLSGD16_HI": "debug/elf", + "elf.R_PPC_GOT_TLSGD16_LO": "debug/elf", + "elf.R_PPC_GOT_TLSLD16": "debug/elf", + "elf.R_PPC_GOT_TLSLD16_HA": "debug/elf", + "elf.R_PPC_GOT_TLSLD16_HI": "debug/elf", + "elf.R_PPC_GOT_TLSLD16_LO": "debug/elf", + "elf.R_PPC_GOT_TPREL16": "debug/elf", + "elf.R_PPC_GOT_TPREL16_HA": "debug/elf", + "elf.R_PPC_GOT_TPREL16_HI": "debug/elf", + "elf.R_PPC_GOT_TPREL16_LO": "debug/elf", + "elf.R_PPC_JMP_SLOT": "debug/elf", + "elf.R_PPC_LOCAL24PC": "debug/elf", + "elf.R_PPC_NONE": "debug/elf", + "elf.R_PPC_PLT16_HA": "debug/elf", + "elf.R_PPC_PLT16_HI": "debug/elf", + "elf.R_PPC_PLT16_LO": "debug/elf", + "elf.R_PPC_PLT32": "debug/elf", + "elf.R_PPC_PLTREL24": "debug/elf", + "elf.R_PPC_PLTREL32": "debug/elf", + "elf.R_PPC_REL14": "debug/elf", + "elf.R_PPC_REL14_BRNTAKEN": "debug/elf", + "elf.R_PPC_REL14_BRTAKEN": "debug/elf", + "elf.R_PPC_REL24": "debug/elf", + "elf.R_PPC_REL32": "debug/elf", + "elf.R_PPC_RELATIVE": "debug/elf", + "elf.R_PPC_SDAREL16": "debug/elf", + "elf.R_PPC_SECTOFF": "debug/elf", + "elf.R_PPC_SECTOFF_HA": "debug/elf", + "elf.R_PPC_SECTOFF_HI": "debug/elf", + "elf.R_PPC_SECTOFF_LO": "debug/elf", + "elf.R_PPC_TLS": "debug/elf", + "elf.R_PPC_TPREL16": "debug/elf", + "elf.R_PPC_TPREL16_HA": "debug/elf", + "elf.R_PPC_TPREL16_HI": "debug/elf", + "elf.R_PPC_TPREL16_LO": "debug/elf", + "elf.R_PPC_TPREL32": "debug/elf", + "elf.R_PPC_UADDR16": "debug/elf", + "elf.R_PPC_UADDR32": "debug/elf", + "elf.R_SPARC": "debug/elf", + "elf.R_SPARC_10": "debug/elf", + "elf.R_SPARC_11": "debug/elf", + "elf.R_SPARC_13": "debug/elf", + "elf.R_SPARC_16": "debug/elf", + "elf.R_SPARC_22": "debug/elf", + "elf.R_SPARC_32": "debug/elf", + "elf.R_SPARC_5": "debug/elf", + "elf.R_SPARC_6": "debug/elf", + "elf.R_SPARC_64": "debug/elf", + "elf.R_SPARC_7": "debug/elf", + "elf.R_SPARC_8": "debug/elf", + "elf.R_SPARC_COPY": "debug/elf", + "elf.R_SPARC_DISP16": "debug/elf", + "elf.R_SPARC_DISP32": "debug/elf", + "elf.R_SPARC_DISP64": "debug/elf", + "elf.R_SPARC_DISP8": "debug/elf", + "elf.R_SPARC_GLOB_DAT": "debug/elf", + "elf.R_SPARC_GLOB_JMP": "debug/elf", + "elf.R_SPARC_GOT10": "debug/elf", + "elf.R_SPARC_GOT13": "debug/elf", + "elf.R_SPARC_GOT22": "debug/elf", + "elf.R_SPARC_H44": "debug/elf", + "elf.R_SPARC_HH22": "debug/elf", + "elf.R_SPARC_HI22": "debug/elf", + "elf.R_SPARC_HIPLT22": "debug/elf", + "elf.R_SPARC_HIX22": "debug/elf", + "elf.R_SPARC_HM10": "debug/elf", + "elf.R_SPARC_JMP_SLOT": "debug/elf", + "elf.R_SPARC_L44": "debug/elf", + "elf.R_SPARC_LM22": "debug/elf", + "elf.R_SPARC_LO10": "debug/elf", + "elf.R_SPARC_LOPLT10": "debug/elf", + "elf.R_SPARC_LOX10": "debug/elf", + "elf.R_SPARC_M44": "debug/elf", + "elf.R_SPARC_NONE": "debug/elf", + "elf.R_SPARC_OLO10": "debug/elf", + "elf.R_SPARC_PC10": "debug/elf", + "elf.R_SPARC_PC22": "debug/elf", + "elf.R_SPARC_PCPLT10": "debug/elf", + "elf.R_SPARC_PCPLT22": "debug/elf", + "elf.R_SPARC_PCPLT32": "debug/elf", + "elf.R_SPARC_PC_HH22": "debug/elf", + "elf.R_SPARC_PC_HM10": "debug/elf", + "elf.R_SPARC_PC_LM22": "debug/elf", + "elf.R_SPARC_PLT32": "debug/elf", + "elf.R_SPARC_PLT64": "debug/elf", + "elf.R_SPARC_REGISTER": "debug/elf", + "elf.R_SPARC_RELATIVE": "debug/elf", + "elf.R_SPARC_UA16": "debug/elf", + "elf.R_SPARC_UA32": "debug/elf", + "elf.R_SPARC_UA64": "debug/elf", + "elf.R_SPARC_WDISP16": "debug/elf", + "elf.R_SPARC_WDISP19": "debug/elf", + "elf.R_SPARC_WDISP22": "debug/elf", + "elf.R_SPARC_WDISP30": "debug/elf", + "elf.R_SPARC_WPLT30": "debug/elf", + "elf.R_SYM32": "debug/elf", + "elf.R_SYM64": "debug/elf", + "elf.R_TYPE32": "debug/elf", + "elf.R_TYPE64": "debug/elf", + "elf.R_X86_64": "debug/elf", + "elf.R_X86_64_16": "debug/elf", + "elf.R_X86_64_32": "debug/elf", + "elf.R_X86_64_32S": "debug/elf", + "elf.R_X86_64_64": "debug/elf", + "elf.R_X86_64_8": "debug/elf", + "elf.R_X86_64_COPY": "debug/elf", + "elf.R_X86_64_DTPMOD64": "debug/elf", + "elf.R_X86_64_DTPOFF32": "debug/elf", + "elf.R_X86_64_DTPOFF64": "debug/elf", + "elf.R_X86_64_GLOB_DAT": "debug/elf", + "elf.R_X86_64_GOT32": "debug/elf", + "elf.R_X86_64_GOT64": "debug/elf", + "elf.R_X86_64_GOTOFF64": "debug/elf", + "elf.R_X86_64_GOTPC32": "debug/elf", + "elf.R_X86_64_GOTPC32_TLSDESC": "debug/elf", + "elf.R_X86_64_GOTPC64": "debug/elf", + "elf.R_X86_64_GOTPCREL": "debug/elf", + "elf.R_X86_64_GOTPCREL64": "debug/elf", + "elf.R_X86_64_GOTPCRELX": "debug/elf", + "elf.R_X86_64_GOTPLT64": "debug/elf", + "elf.R_X86_64_GOTTPOFF": "debug/elf", + "elf.R_X86_64_IRELATIVE": "debug/elf", + "elf.R_X86_64_JMP_SLOT": "debug/elf", + "elf.R_X86_64_NONE": "debug/elf", + "elf.R_X86_64_PC16": "debug/elf", + "elf.R_X86_64_PC32": "debug/elf", + "elf.R_X86_64_PC32_BND": "debug/elf", + "elf.R_X86_64_PC64": "debug/elf", + "elf.R_X86_64_PC8": "debug/elf", + "elf.R_X86_64_PLT32": "debug/elf", + "elf.R_X86_64_PLT32_BND": "debug/elf", + "elf.R_X86_64_PLTOFF64": "debug/elf", + "elf.R_X86_64_RELATIVE": "debug/elf", + "elf.R_X86_64_RELATIVE64": "debug/elf", + "elf.R_X86_64_REX_GOTPCRELX": "debug/elf", + "elf.R_X86_64_SIZE32": "debug/elf", + "elf.R_X86_64_SIZE64": "debug/elf", + "elf.R_X86_64_TLSDESC": "debug/elf", + "elf.R_X86_64_TLSDESC_CALL": "debug/elf", + "elf.R_X86_64_TLSGD": "debug/elf", + "elf.R_X86_64_TLSLD": "debug/elf", + "elf.R_X86_64_TPOFF32": "debug/elf", + "elf.R_X86_64_TPOFF64": "debug/elf", + "elf.Rel32": "debug/elf", + "elf.Rel64": "debug/elf", + "elf.Rela32": "debug/elf", + "elf.Rela64": "debug/elf", + "elf.SHF_ALLOC": "debug/elf", + "elf.SHF_COMPRESSED": "debug/elf", + "elf.SHF_EXECINSTR": "debug/elf", + "elf.SHF_GROUP": "debug/elf", + "elf.SHF_INFO_LINK": "debug/elf", + "elf.SHF_LINK_ORDER": "debug/elf", + "elf.SHF_MASKOS": "debug/elf", + "elf.SHF_MASKPROC": "debug/elf", + "elf.SHF_MERGE": "debug/elf", + "elf.SHF_OS_NONCONFORMING": "debug/elf", + "elf.SHF_STRINGS": "debug/elf", + "elf.SHF_TLS": "debug/elf", + "elf.SHF_WRITE": "debug/elf", + "elf.SHN_ABS": "debug/elf", + "elf.SHN_COMMON": "debug/elf", + "elf.SHN_HIOS": "debug/elf", + "elf.SHN_HIPROC": "debug/elf", + "elf.SHN_HIRESERVE": "debug/elf", + "elf.SHN_LOOS": "debug/elf", + "elf.SHN_LOPROC": "debug/elf", + "elf.SHN_LORESERVE": "debug/elf", + "elf.SHN_UNDEF": "debug/elf", + "elf.SHN_XINDEX": "debug/elf", + "elf.SHT_DYNAMIC": "debug/elf", + "elf.SHT_DYNSYM": "debug/elf", + "elf.SHT_FINI_ARRAY": "debug/elf", + "elf.SHT_GNU_ATTRIBUTES": "debug/elf", + "elf.SHT_GNU_HASH": "debug/elf", + "elf.SHT_GNU_LIBLIST": "debug/elf", + "elf.SHT_GNU_VERDEF": "debug/elf", + "elf.SHT_GNU_VERNEED": "debug/elf", + "elf.SHT_GNU_VERSYM": "debug/elf", + "elf.SHT_GROUP": "debug/elf", + "elf.SHT_HASH": "debug/elf", + "elf.SHT_HIOS": "debug/elf", + "elf.SHT_HIPROC": "debug/elf", + "elf.SHT_HIUSER": "debug/elf", + "elf.SHT_INIT_ARRAY": "debug/elf", + "elf.SHT_LOOS": "debug/elf", + "elf.SHT_LOPROC": "debug/elf", + "elf.SHT_LOUSER": "debug/elf", + "elf.SHT_NOBITS": "debug/elf", + "elf.SHT_NOTE": "debug/elf", + "elf.SHT_NULL": "debug/elf", + "elf.SHT_PREINIT_ARRAY": "debug/elf", + "elf.SHT_PROGBITS": "debug/elf", + "elf.SHT_REL": "debug/elf", + "elf.SHT_RELA": "debug/elf", + "elf.SHT_SHLIB": "debug/elf", + "elf.SHT_STRTAB": "debug/elf", + "elf.SHT_SYMTAB": "debug/elf", + "elf.SHT_SYMTAB_SHNDX": "debug/elf", + "elf.STB_GLOBAL": "debug/elf", + "elf.STB_HIOS": "debug/elf", + "elf.STB_HIPROC": "debug/elf", + "elf.STB_LOCAL": "debug/elf", + "elf.STB_LOOS": "debug/elf", + "elf.STB_LOPROC": "debug/elf", + "elf.STB_WEAK": "debug/elf", + "elf.STT_COMMON": "debug/elf", + "elf.STT_FILE": "debug/elf", + "elf.STT_FUNC": "debug/elf", + "elf.STT_HIOS": "debug/elf", + "elf.STT_HIPROC": "debug/elf", + "elf.STT_LOOS": "debug/elf", + "elf.STT_LOPROC": "debug/elf", + "elf.STT_NOTYPE": "debug/elf", + "elf.STT_OBJECT": "debug/elf", + "elf.STT_SECTION": "debug/elf", + "elf.STT_TLS": "debug/elf", + "elf.STV_DEFAULT": "debug/elf", + "elf.STV_HIDDEN": "debug/elf", + "elf.STV_INTERNAL": "debug/elf", + "elf.STV_PROTECTED": "debug/elf", + "elf.ST_BIND": "debug/elf", + "elf.ST_INFO": "debug/elf", + "elf.ST_TYPE": "debug/elf", + "elf.ST_VISIBILITY": "debug/elf", + "elf.Section": "debug/elf", + "elf.Section32": "debug/elf", + "elf.Section64": "debug/elf", + "elf.SectionFlag": "debug/elf", + "elf.SectionHeader": "debug/elf", + "elf.SectionIndex": "debug/elf", + "elf.SectionType": "debug/elf", + "elf.Sym32": "debug/elf", + "elf.Sym32Size": "debug/elf", + "elf.Sym64": "debug/elf", + "elf.Sym64Size": "debug/elf", + "elf.SymBind": "debug/elf", + "elf.SymType": "debug/elf", + "elf.SymVis": "debug/elf", + "elf.Symbol": "debug/elf", + "elf.Type": "debug/elf", + "elf.Version": "debug/elf", + "elliptic.Curve": "crypto/elliptic", + "elliptic.CurveParams": "crypto/elliptic", + "elliptic.GenerateKey": "crypto/elliptic", + "elliptic.Marshal": "crypto/elliptic", + "elliptic.P224": "crypto/elliptic", + "elliptic.P256": "crypto/elliptic", + "elliptic.P384": "crypto/elliptic", + "elliptic.P521": "crypto/elliptic", + "elliptic.Unmarshal": "crypto/elliptic", + "encoding.BinaryMarshaler": "encoding", + "encoding.BinaryUnmarshaler": "encoding", + "encoding.TextMarshaler": "encoding", + "encoding.TextUnmarshaler": "encoding", + "errors.New": "errors", + "exec.Cmd": "os/exec", + "exec.Command": "os/exec", + "exec.CommandContext": "os/exec", + "exec.ErrNotFound": "os/exec", + "exec.Error": "os/exec", + "exec.ExitError": "os/exec", + "exec.LookPath": "os/exec", + "expvar.Do": "expvar", + "expvar.Float": "expvar", + "expvar.Func": "expvar", + "expvar.Get": "expvar", + "expvar.Handler": "expvar", + "expvar.Int": "expvar", + "expvar.KeyValue": "expvar", + "expvar.Map": "expvar", + "expvar.NewFloat": "expvar", + "expvar.NewInt": "expvar", + "expvar.NewMap": "expvar", + "expvar.NewString": "expvar", + "expvar.Publish": "expvar", + "expvar.String": "expvar", + "expvar.Var": "expvar", + "fcgi.ErrConnClosed": "net/http/fcgi", + "fcgi.ErrRequestAborted": "net/http/fcgi", + "fcgi.ProcessEnv": "net/http/fcgi", + "fcgi.Serve": "net/http/fcgi", + "filepath.Abs": "path/filepath", + "filepath.Base": "path/filepath", + "filepath.Clean": "path/filepath", + "filepath.Dir": "path/filepath", + "filepath.ErrBadPattern": "path/filepath", + "filepath.EvalSymlinks": "path/filepath", + "filepath.Ext": "path/filepath", + "filepath.FromSlash": "path/filepath", + "filepath.Glob": "path/filepath", + "filepath.HasPrefix": "path/filepath", + "filepath.IsAbs": "path/filepath", + "filepath.Join": "path/filepath", + "filepath.ListSeparator": "path/filepath", + "filepath.Match": "path/filepath", + "filepath.Rel": "path/filepath", + "filepath.Separator": "path/filepath", + "filepath.SkipDir": "path/filepath", + "filepath.Split": "path/filepath", + "filepath.SplitList": "path/filepath", + "filepath.ToSlash": "path/filepath", + "filepath.VolumeName": "path/filepath", + "filepath.Walk": "path/filepath", + "filepath.WalkFunc": "path/filepath", + "flag.Arg": "flag", + "flag.Args": "flag", + "flag.Bool": "flag", + "flag.BoolVar": "flag", + "flag.CommandLine": "flag", + "flag.ContinueOnError": "flag", + "flag.Duration": "flag", + "flag.DurationVar": "flag", + "flag.ErrHelp": "flag", + "flag.ErrorHandling": "flag", + "flag.ExitOnError": "flag", + "flag.Flag": "flag", + "flag.FlagSet": "flag", + "flag.Float64": "flag", + "flag.Float64Var": "flag", + "flag.Getter": "flag", + "flag.Int": "flag", + "flag.Int64": "flag", + "flag.Int64Var": "flag", + "flag.IntVar": "flag", + "flag.Lookup": "flag", + "flag.NArg": "flag", + "flag.NFlag": "flag", + "flag.NewFlagSet": "flag", + "flag.PanicOnError": "flag", + "flag.Parse": "flag", + "flag.Parsed": "flag", + "flag.PrintDefaults": "flag", + "flag.Set": "flag", + "flag.String": "flag", + "flag.StringVar": "flag", + "flag.Uint": "flag", + "flag.Uint64": "flag", + "flag.Uint64Var": "flag", + "flag.UintVar": "flag", + "flag.UnquoteUsage": "flag", + "flag.Usage": "flag", + "flag.Value": "flag", + "flag.Var": "flag", + "flag.Visit": "flag", + "flag.VisitAll": "flag", + "flate.BestCompression": "compress/flate", + "flate.BestSpeed": "compress/flate", + "flate.CorruptInputError": "compress/flate", + "flate.DefaultCompression": "compress/flate", + "flate.HuffmanOnly": "compress/flate", + "flate.InternalError": "compress/flate", + "flate.NewReader": "compress/flate", + "flate.NewReaderDict": "compress/flate", + "flate.NewWriter": "compress/flate", + "flate.NewWriterDict": "compress/flate", + "flate.NoCompression": "compress/flate", + "flate.ReadError": "compress/flate", + "flate.Reader": "compress/flate", + "flate.Resetter": "compress/flate", + "flate.WriteError": "compress/flate", + "flate.Writer": "compress/flate", + "fmt.Errorf": "fmt", + "fmt.Formatter": "fmt", + "fmt.Fprint": "fmt", + "fmt.Fprintf": "fmt", + "fmt.Fprintln": "fmt", + "fmt.Fscan": "fmt", + "fmt.Fscanf": "fmt", + "fmt.Fscanln": "fmt", + "fmt.GoStringer": "fmt", + "fmt.Print": "fmt", + "fmt.Printf": "fmt", + "fmt.Println": "fmt", + "fmt.Scan": "fmt", + "fmt.ScanState": "fmt", + "fmt.Scanf": "fmt", + "fmt.Scanln": "fmt", + "fmt.Scanner": "fmt", + "fmt.Sprint": "fmt", + "fmt.Sprintf": "fmt", + "fmt.Sprintln": "fmt", + "fmt.Sscan": "fmt", + "fmt.Sscanf": "fmt", + "fmt.Sscanln": "fmt", + "fmt.State": "fmt", + "fmt.Stringer": "fmt", + "fnv.New128": "hash/fnv", + "fnv.New128a": "hash/fnv", + "fnv.New32": "hash/fnv", + "fnv.New32a": "hash/fnv", + "fnv.New64": "hash/fnv", + "fnv.New64a": "hash/fnv", + "format.Node": "go/format", + "format.Source": "go/format", + "gif.Decode": "image/gif", + "gif.DecodeAll": "image/gif", + "gif.DecodeConfig": "image/gif", + "gif.DisposalBackground": "image/gif", + "gif.DisposalNone": "image/gif", + "gif.DisposalPrevious": "image/gif", + "gif.Encode": "image/gif", + "gif.EncodeAll": "image/gif", + "gif.GIF": "image/gif", + "gif.Options": "image/gif", + "gob.CommonType": "encoding/gob", + "gob.Decoder": "encoding/gob", + "gob.Encoder": "encoding/gob", + "gob.GobDecoder": "encoding/gob", + "gob.GobEncoder": "encoding/gob", + "gob.NewDecoder": "encoding/gob", + "gob.NewEncoder": "encoding/gob", + "gob.Register": "encoding/gob", + "gob.RegisterName": "encoding/gob", + "gosym.DecodingError": "debug/gosym", + "gosym.Func": "debug/gosym", + "gosym.LineTable": "debug/gosym", + "gosym.NewLineTable": "debug/gosym", + "gosym.NewTable": "debug/gosym", + "gosym.Obj": "debug/gosym", + "gosym.Sym": "debug/gosym", + "gosym.Table": "debug/gosym", + "gosym.UnknownFileError": "debug/gosym", + "gosym.UnknownLineError": "debug/gosym", + "gzip.BestCompression": "compress/gzip", + "gzip.BestSpeed": "compress/gzip", + "gzip.DefaultCompression": "compress/gzip", + "gzip.ErrChecksum": "compress/gzip", + "gzip.ErrHeader": "compress/gzip", + "gzip.Header": "compress/gzip", + "gzip.HuffmanOnly": "compress/gzip", + "gzip.NewReader": "compress/gzip", + "gzip.NewWriter": "compress/gzip", + "gzip.NewWriterLevel": "compress/gzip", + "gzip.NoCompression": "compress/gzip", + "gzip.Reader": "compress/gzip", + "gzip.Writer": "compress/gzip", + "hash.Hash": "hash", + "hash.Hash32": "hash", + "hash.Hash64": "hash", + "heap.Fix": "container/heap", + "heap.Init": "container/heap", + "heap.Interface": "container/heap", + "heap.Pop": "container/heap", + "heap.Push": "container/heap", + "heap.Remove": "container/heap", + "hex.Decode": "encoding/hex", + "hex.DecodeString": "encoding/hex", + "hex.DecodedLen": "encoding/hex", + "hex.Dump": "encoding/hex", + "hex.Dumper": "encoding/hex", + "hex.Encode": "encoding/hex", + "hex.EncodeToString": "encoding/hex", + "hex.EncodedLen": "encoding/hex", + "hex.ErrLength": "encoding/hex", + "hex.InvalidByteError": "encoding/hex", + "hex.NewDecoder": "encoding/hex", + "hex.NewEncoder": "encoding/hex", + "hmac.Equal": "crypto/hmac", + "hmac.New": "crypto/hmac", + "html.EscapeString": "html", + "html.UnescapeString": "html", + "http.CanonicalHeaderKey": "net/http", + "http.Client": "net/http", + "http.CloseNotifier": "net/http", + "http.ConnState": "net/http", + "http.Cookie": "net/http", + "http.CookieJar": "net/http", + "http.DefaultClient": "net/http", + "http.DefaultMaxHeaderBytes": "net/http", + "http.DefaultMaxIdleConnsPerHost": "net/http", + "http.DefaultServeMux": "net/http", + "http.DefaultTransport": "net/http", + "http.DetectContentType": "net/http", + "http.Dir": "net/http", + "http.ErrAbortHandler": "net/http", + "http.ErrBodyNotAllowed": "net/http", + "http.ErrBodyReadAfterClose": "net/http", + "http.ErrContentLength": "net/http", + "http.ErrHandlerTimeout": "net/http", + "http.ErrHeaderTooLong": "net/http", + "http.ErrHijacked": "net/http", + "http.ErrLineTooLong": "net/http", + "http.ErrMissingBoundary": "net/http", + "http.ErrMissingContentLength": "net/http", + "http.ErrMissingFile": "net/http", + "http.ErrNoCookie": "net/http", + "http.ErrNoLocation": "net/http", + "http.ErrNotMultipart": "net/http", + "http.ErrNotSupported": "net/http", + "http.ErrServerClosed": "net/http", + "http.ErrShortBody": "net/http", + "http.ErrSkipAltProtocol": "net/http", + "http.ErrUnexpectedTrailer": "net/http", + "http.ErrUseLastResponse": "net/http", + "http.ErrWriteAfterFlush": "net/http", + "http.Error": "net/http", + "http.File": "net/http", + "http.FileServer": "net/http", + "http.FileSystem": "net/http", + "http.Flusher": "net/http", + "http.Get": "net/http", + "http.Handle": "net/http", + "http.HandleFunc": "net/http", + "http.Handler": "net/http", + "http.HandlerFunc": "net/http", + "http.Head": "net/http", + "http.Header": "net/http", + "http.Hijacker": "net/http", + "http.ListenAndServe": "net/http", + "http.ListenAndServeTLS": "net/http", + "http.LocalAddrContextKey": "net/http", + "http.MaxBytesReader": "net/http", + "http.MethodConnect": "net/http", + "http.MethodDelete": "net/http", + "http.MethodGet": "net/http", + "http.MethodHead": "net/http", + "http.MethodOptions": "net/http", + "http.MethodPatch": "net/http", + "http.MethodPost": "net/http", + "http.MethodPut": "net/http", + "http.MethodTrace": "net/http", + "http.NewFileTransport": "net/http", + "http.NewRequest": "net/http", + "http.NewServeMux": "net/http", + "http.NoBody": "net/http", + "http.NotFound": "net/http", + "http.NotFoundHandler": "net/http", + "http.ParseHTTPVersion": "net/http", + "http.ParseTime": "net/http", + "http.Post": "net/http", + "http.PostForm": "net/http", + "http.ProtocolError": "net/http", + "http.ProxyFromEnvironment": "net/http", + "http.ProxyURL": "net/http", + "http.PushOptions": "net/http", + "http.Pusher": "net/http", + "http.ReadRequest": "net/http", + "http.ReadResponse": "net/http", + "http.Redirect": "net/http", + "http.RedirectHandler": "net/http", + "http.Request": "net/http", + "http.Response": "net/http", + "http.ResponseWriter": "net/http", + "http.RoundTripper": "net/http", + "http.Serve": "net/http", + "http.ServeContent": "net/http", + "http.ServeFile": "net/http", + "http.ServeMux": "net/http", + "http.ServeTLS": "net/http", + "http.Server": "net/http", + "http.ServerContextKey": "net/http", + "http.SetCookie": "net/http", + "http.StateActive": "net/http", + "http.StateClosed": "net/http", + "http.StateHijacked": "net/http", + "http.StateIdle": "net/http", + "http.StateNew": "net/http", + "http.StatusAccepted": "net/http", + "http.StatusAlreadyReported": "net/http", + "http.StatusBadGateway": "net/http", + "http.StatusBadRequest": "net/http", + "http.StatusConflict": "net/http", + "http.StatusContinue": "net/http", + "http.StatusCreated": "net/http", + "http.StatusExpectationFailed": "net/http", + "http.StatusFailedDependency": "net/http", + "http.StatusForbidden": "net/http", + "http.StatusFound": "net/http", + "http.StatusGatewayTimeout": "net/http", + "http.StatusGone": "net/http", + "http.StatusHTTPVersionNotSupported": "net/http", + "http.StatusIMUsed": "net/http", + "http.StatusInsufficientStorage": "net/http", + "http.StatusInternalServerError": "net/http", + "http.StatusLengthRequired": "net/http", + "http.StatusLocked": "net/http", + "http.StatusLoopDetected": "net/http", + "http.StatusMethodNotAllowed": "net/http", + "http.StatusMovedPermanently": "net/http", + "http.StatusMultiStatus": "net/http", + "http.StatusMultipleChoices": "net/http", + "http.StatusNetworkAuthenticationRequired": "net/http", + "http.StatusNoContent": "net/http", + "http.StatusNonAuthoritativeInfo": "net/http", + "http.StatusNotAcceptable": "net/http", + "http.StatusNotExtended": "net/http", + "http.StatusNotFound": "net/http", + "http.StatusNotImplemented": "net/http", + "http.StatusNotModified": "net/http", + "http.StatusOK": "net/http", + "http.StatusPartialContent": "net/http", + "http.StatusPaymentRequired": "net/http", + "http.StatusPermanentRedirect": "net/http", + "http.StatusPreconditionFailed": "net/http", + "http.StatusPreconditionRequired": "net/http", + "http.StatusProcessing": "net/http", + "http.StatusProxyAuthRequired": "net/http", + "http.StatusRequestEntityTooLarge": "net/http", + "http.StatusRequestHeaderFieldsTooLarge": "net/http", + "http.StatusRequestTimeout": "net/http", + "http.StatusRequestURITooLong": "net/http", + "http.StatusRequestedRangeNotSatisfiable": "net/http", + "http.StatusResetContent": "net/http", + "http.StatusSeeOther": "net/http", + "http.StatusServiceUnavailable": "net/http", + "http.StatusSwitchingProtocols": "net/http", + "http.StatusTeapot": "net/http", + "http.StatusTemporaryRedirect": "net/http", + "http.StatusText": "net/http", + "http.StatusTooManyRequests": "net/http", + "http.StatusUnauthorized": "net/http", + "http.StatusUnavailableForLegalReasons": "net/http", + "http.StatusUnprocessableEntity": "net/http", + "http.StatusUnsupportedMediaType": "net/http", + "http.StatusUpgradeRequired": "net/http", + "http.StatusUseProxy": "net/http", + "http.StatusVariantAlsoNegotiates": "net/http", + "http.StripPrefix": "net/http", + "http.TimeFormat": "net/http", + "http.TimeoutHandler": "net/http", + "http.TrailerPrefix": "net/http", + "http.Transport": "net/http", + "httptest.DefaultRemoteAddr": "net/http/httptest", + "httptest.NewRecorder": "net/http/httptest", + "httptest.NewRequest": "net/http/httptest", + "httptest.NewServer": "net/http/httptest", + "httptest.NewTLSServer": "net/http/httptest", + "httptest.NewUnstartedServer": "net/http/httptest", + "httptest.ResponseRecorder": "net/http/httptest", + "httptest.Server": "net/http/httptest", + "httptrace.ClientTrace": "net/http/httptrace", + "httptrace.ContextClientTrace": "net/http/httptrace", + "httptrace.DNSDoneInfo": "net/http/httptrace", + "httptrace.DNSStartInfo": "net/http/httptrace", + "httptrace.GotConnInfo": "net/http/httptrace", + "httptrace.WithClientTrace": "net/http/httptrace", + "httptrace.WroteRequestInfo": "net/http/httptrace", + "httputil.BufferPool": "net/http/httputil", + "httputil.ClientConn": "net/http/httputil", + "httputil.DumpRequest": "net/http/httputil", + "httputil.DumpRequestOut": "net/http/httputil", + "httputil.DumpResponse": "net/http/httputil", + "httputil.ErrClosed": "net/http/httputil", + "httputil.ErrLineTooLong": "net/http/httputil", + "httputil.ErrPersistEOF": "net/http/httputil", + "httputil.ErrPipeline": "net/http/httputil", + "httputil.NewChunkedReader": "net/http/httputil", + "httputil.NewChunkedWriter": "net/http/httputil", + "httputil.NewClientConn": "net/http/httputil", + "httputil.NewProxyClientConn": "net/http/httputil", + "httputil.NewServerConn": "net/http/httputil", + "httputil.NewSingleHostReverseProxy": "net/http/httputil", + "httputil.ReverseProxy": "net/http/httputil", + "httputil.ServerConn": "net/http/httputil", + "image.Alpha": "image", + "image.Alpha16": "image", + "image.Black": "image", + "image.CMYK": "image", + "image.Config": "image", + "image.Decode": "image", + "image.DecodeConfig": "image", + "image.ErrFormat": "image", + "image.Gray": "image", + "image.Gray16": "image", + "image.Image": "image", + "image.NRGBA": "image", + "image.NRGBA64": "image", + "image.NYCbCrA": "image", + "image.NewAlpha": "image", + "image.NewAlpha16": "image", + "image.NewCMYK": "image", + "image.NewGray": "image", + "image.NewGray16": "image", + "image.NewNRGBA": "image", + "image.NewNRGBA64": "image", + "image.NewNYCbCrA": "image", + "image.NewPaletted": "image", + "image.NewRGBA": "image", + "image.NewRGBA64": "image", + "image.NewUniform": "image", + "image.NewYCbCr": "image", + "image.Opaque": "image", + "image.Paletted": "image", + "image.PalettedImage": "image", + "image.Point": "image", + "image.Pt": "image", + "image.RGBA": "image", + "image.RGBA64": "image", + "image.Rect": "image", + "image.Rectangle": "image", + "image.RegisterFormat": "image", + "image.Transparent": "image", + "image.Uniform": "image", + "image.White": "image", + "image.YCbCr": "image", + "image.YCbCrSubsampleRatio": "image", + "image.YCbCrSubsampleRatio410": "image", + "image.YCbCrSubsampleRatio411": "image", + "image.YCbCrSubsampleRatio420": "image", + "image.YCbCrSubsampleRatio422": "image", + "image.YCbCrSubsampleRatio440": "image", + "image.YCbCrSubsampleRatio444": "image", + "image.ZP": "image", + "image.ZR": "image", + "importer.Default": "go/importer", + "importer.For": "go/importer", + "importer.Lookup": "go/importer", + "io.ByteReader": "io", + "io.ByteScanner": "io", + "io.ByteWriter": "io", + "io.Closer": "io", + "io.Copy": "io", + "io.CopyBuffer": "io", + "io.CopyN": "io", + "io.EOF": "io", + "io.ErrClosedPipe": "io", + "io.ErrNoProgress": "io", + "io.ErrShortBuffer": "io", + "io.ErrShortWrite": "io", + "io.ErrUnexpectedEOF": "io", + "io.LimitReader": "io", + "io.LimitedReader": "io", + "io.MultiReader": "io", + "io.MultiWriter": "io", + "io.NewSectionReader": "io", + "io.Pipe": "io", + "io.PipeReader": "io", + "io.PipeWriter": "io", + "io.ReadAtLeast": "io", + "io.ReadCloser": "io", + "io.ReadFull": "io", + "io.ReadSeeker": "io", + "io.ReadWriteCloser": "io", + "io.ReadWriteSeeker": "io", + "io.ReadWriter": "io", + "io.Reader": "io", + "io.ReaderAt": "io", + "io.ReaderFrom": "io", + "io.RuneReader": "io", + "io.RuneScanner": "io", + "io.SectionReader": "io", + "io.SeekCurrent": "io", + "io.SeekEnd": "io", + "io.SeekStart": "io", + "io.Seeker": "io", + "io.TeeReader": "io", + "io.WriteCloser": "io", + "io.WriteSeeker": "io", + "io.WriteString": "io", + "io.Writer": "io", + "io.WriterAt": "io", + "io.WriterTo": "io", + "iotest.DataErrReader": "testing/iotest", + "iotest.ErrTimeout": "testing/iotest", + "iotest.HalfReader": "testing/iotest", + "iotest.NewReadLogger": "testing/iotest", + "iotest.NewWriteLogger": "testing/iotest", + "iotest.OneByteReader": "testing/iotest", + "iotest.TimeoutReader": "testing/iotest", + "iotest.TruncateWriter": "testing/iotest", + "ioutil.Discard": "io/ioutil", + "ioutil.NopCloser": "io/ioutil", + "ioutil.ReadAll": "io/ioutil", + "ioutil.ReadDir": "io/ioutil", + "ioutil.ReadFile": "io/ioutil", + "ioutil.TempDir": "io/ioutil", + "ioutil.TempFile": "io/ioutil", + "ioutil.WriteFile": "io/ioutil", + "jpeg.Decode": "image/jpeg", + "jpeg.DecodeConfig": "image/jpeg", + "jpeg.DefaultQuality": "image/jpeg", + "jpeg.Encode": "image/jpeg", + "jpeg.FormatError": "image/jpeg", + "jpeg.Options": "image/jpeg", + "jpeg.Reader": "image/jpeg", + "jpeg.UnsupportedError": "image/jpeg", + "json.Compact": "encoding/json", + "json.Decoder": "encoding/json", + "json.Delim": "encoding/json", + "json.Encoder": "encoding/json", + "json.HTMLEscape": "encoding/json", + "json.Indent": "encoding/json", + "json.InvalidUTF8Error": "encoding/json", + "json.InvalidUnmarshalError": "encoding/json", + "json.Marshal": "encoding/json", + "json.MarshalIndent": "encoding/json", + "json.Marshaler": "encoding/json", + "json.MarshalerError": "encoding/json", + "json.NewDecoder": "encoding/json", + "json.NewEncoder": "encoding/json", + "json.Number": "encoding/json", + "json.RawMessage": "encoding/json", + "json.SyntaxError": "encoding/json", + "json.Token": "encoding/json", + "json.Unmarshal": "encoding/json", + "json.UnmarshalFieldError": "encoding/json", + "json.UnmarshalTypeError": "encoding/json", + "json.Unmarshaler": "encoding/json", + "json.UnsupportedTypeError": "encoding/json", + "json.UnsupportedValueError": "encoding/json", + "json.Valid": "encoding/json", + "jsonrpc.Dial": "net/rpc/jsonrpc", + "jsonrpc.NewClient": "net/rpc/jsonrpc", + "jsonrpc.NewClientCodec": "net/rpc/jsonrpc", + "jsonrpc.NewServerCodec": "net/rpc/jsonrpc", + "jsonrpc.ServeConn": "net/rpc/jsonrpc", + "list.Element": "container/list", + "list.List": "container/list", + "list.New": "container/list", + "log.Fatal": "log", + "log.Fatalf": "log", + "log.Fatalln": "log", + "log.Flags": "log", + "log.LUTC": "log", + "log.Ldate": "log", + "log.Llongfile": "log", + "log.Lmicroseconds": "log", + "log.Logger": "log", + "log.Lshortfile": "log", + "log.LstdFlags": "log", + "log.Ltime": "log", + "log.New": "log", + "log.Output": "log", + "log.Panic": "log", + "log.Panicf": "log", + "log.Panicln": "log", + "log.Prefix": "log", + "log.Print": "log", + "log.Printf": "log", + "log.Println": "log", + "log.SetFlags": "log", + "log.SetOutput": "log", + "log.SetPrefix": "log", + "lzw.LSB": "compress/lzw", + "lzw.MSB": "compress/lzw", + "lzw.NewReader": "compress/lzw", + "lzw.NewWriter": "compress/lzw", + "lzw.Order": "compress/lzw", + "macho.ARM64_RELOC_ADDEND": "debug/macho", + "macho.ARM64_RELOC_BRANCH26": "debug/macho", + "macho.ARM64_RELOC_GOT_LOAD_PAGE21": "debug/macho", + "macho.ARM64_RELOC_GOT_LOAD_PAGEOFF12": "debug/macho", + "macho.ARM64_RELOC_PAGE21": "debug/macho", + "macho.ARM64_RELOC_PAGEOFF12": "debug/macho", + "macho.ARM64_RELOC_POINTER_TO_GOT": "debug/macho", + "macho.ARM64_RELOC_SUBTRACTOR": "debug/macho", + "macho.ARM64_RELOC_TLVP_LOAD_PAGE21": "debug/macho", + "macho.ARM64_RELOC_TLVP_LOAD_PAGEOFF12": "debug/macho", + "macho.ARM64_RELOC_UNSIGNED": "debug/macho", + "macho.ARM_RELOC_BR24": "debug/macho", + "macho.ARM_RELOC_HALF": "debug/macho", + "macho.ARM_RELOC_HALF_SECTDIFF": "debug/macho", + "macho.ARM_RELOC_LOCAL_SECTDIFF": "debug/macho", + "macho.ARM_RELOC_PAIR": "debug/macho", + "macho.ARM_RELOC_PB_LA_PTR": "debug/macho", + "macho.ARM_RELOC_SECTDIFF": "debug/macho", + "macho.ARM_RELOC_VANILLA": "debug/macho", + "macho.ARM_THUMB_32BIT_BRANCH": "debug/macho", + "macho.ARM_THUMB_RELOC_BR22": "debug/macho", + "macho.Cpu": "debug/macho", + "macho.Cpu386": "debug/macho", + "macho.CpuAmd64": "debug/macho", + "macho.CpuArm": "debug/macho", + "macho.CpuPpc": "debug/macho", + "macho.CpuPpc64": "debug/macho", + "macho.Dylib": "debug/macho", + "macho.DylibCmd": "debug/macho", + "macho.Dysymtab": "debug/macho", + "macho.DysymtabCmd": "debug/macho", + "macho.ErrNotFat": "debug/macho", + "macho.FatArch": "debug/macho", + "macho.FatArchHeader": "debug/macho", + "macho.FatFile": "debug/macho", + "macho.File": "debug/macho", + "macho.FileHeader": "debug/macho", + "macho.FlagAllModsBound": "debug/macho", + "macho.FlagAllowStackExecution": "debug/macho", + "macho.FlagAppExtensionSafe": "debug/macho", + "macho.FlagBindAtLoad": "debug/macho", + "macho.FlagBindsToWeak": "debug/macho", + "macho.FlagCanonical": "debug/macho", + "macho.FlagDeadStrippableDylib": "debug/macho", + "macho.FlagDyldLink": "debug/macho", + "macho.FlagForceFlat": "debug/macho", + "macho.FlagHasTLVDescriptors": "debug/macho", + "macho.FlagIncrLink": "debug/macho", + "macho.FlagLazyInit": "debug/macho", + "macho.FlagNoFixPrebinding": "debug/macho", + "macho.FlagNoHeapExecution": "debug/macho", + "macho.FlagNoMultiDefs": "debug/macho", + "macho.FlagNoReexportedDylibs": "debug/macho", + "macho.FlagNoUndefs": "debug/macho", + "macho.FlagPIE": "debug/macho", + "macho.FlagPrebindable": "debug/macho", + "macho.FlagPrebound": "debug/macho", + "macho.FlagRootSafe": "debug/macho", + "macho.FlagSetuidSafe": "debug/macho", + "macho.FlagSplitSegs": "debug/macho", + "macho.FlagSubsectionsViaSymbols": "debug/macho", + "macho.FlagTwoLevel": "debug/macho", + "macho.FlagWeakDefines": "debug/macho", + "macho.FormatError": "debug/macho", + "macho.GENERIC_RELOC_LOCAL_SECTDIFF": "debug/macho", + "macho.GENERIC_RELOC_PAIR": "debug/macho", + "macho.GENERIC_RELOC_PB_LA_PTR": "debug/macho", + "macho.GENERIC_RELOC_SECTDIFF": "debug/macho", + "macho.GENERIC_RELOC_TLV": "debug/macho", + "macho.GENERIC_RELOC_VANILLA": "debug/macho", + "macho.Load": "debug/macho", + "macho.LoadBytes": "debug/macho", + "macho.LoadCmd": "debug/macho", + "macho.LoadCmdDylib": "debug/macho", + "macho.LoadCmdDylinker": "debug/macho", + "macho.LoadCmdDysymtab": "debug/macho", + "macho.LoadCmdRpath": "debug/macho", + "macho.LoadCmdSegment": "debug/macho", + "macho.LoadCmdSegment64": "debug/macho", + "macho.LoadCmdSymtab": "debug/macho", + "macho.LoadCmdThread": "debug/macho", + "macho.LoadCmdUnixThread": "debug/macho", + "macho.Magic32": "debug/macho", + "macho.Magic64": "debug/macho", + "macho.MagicFat": "debug/macho", + "macho.NewFatFile": "debug/macho", + "macho.NewFile": "debug/macho", + "macho.Nlist32": "debug/macho", + "macho.Nlist64": "debug/macho", + "macho.Open": "debug/macho", + "macho.OpenFat": "debug/macho", + "macho.Regs386": "debug/macho", + "macho.RegsAMD64": "debug/macho", + "macho.Reloc": "debug/macho", + "macho.RelocTypeARM": "debug/macho", + "macho.RelocTypeARM64": "debug/macho", + "macho.RelocTypeGeneric": "debug/macho", + "macho.RelocTypeX86_64": "debug/macho", + "macho.Rpath": "debug/macho", + "macho.RpathCmd": "debug/macho", + "macho.Section": "debug/macho", + "macho.Section32": "debug/macho", + "macho.Section64": "debug/macho", + "macho.SectionHeader": "debug/macho", + "macho.Segment": "debug/macho", + "macho.Segment32": "debug/macho", + "macho.Segment64": "debug/macho", + "macho.SegmentHeader": "debug/macho", + "macho.Symbol": "debug/macho", + "macho.Symtab": "debug/macho", + "macho.SymtabCmd": "debug/macho", + "macho.Thread": "debug/macho", + "macho.Type": "debug/macho", + "macho.TypeBundle": "debug/macho", + "macho.TypeDylib": "debug/macho", + "macho.TypeExec": "debug/macho", + "macho.TypeObj": "debug/macho", + "macho.X86_64_RELOC_BRANCH": "debug/macho", + "macho.X86_64_RELOC_GOT": "debug/macho", + "macho.X86_64_RELOC_GOT_LOAD": "debug/macho", + "macho.X86_64_RELOC_SIGNED": "debug/macho", + "macho.X86_64_RELOC_SIGNED_1": "debug/macho", + "macho.X86_64_RELOC_SIGNED_2": "debug/macho", + "macho.X86_64_RELOC_SIGNED_4": "debug/macho", + "macho.X86_64_RELOC_SUBTRACTOR": "debug/macho", + "macho.X86_64_RELOC_TLV": "debug/macho", + "macho.X86_64_RELOC_UNSIGNED": "debug/macho", + "mail.Address": "net/mail", + "mail.AddressParser": "net/mail", + "mail.ErrHeaderNotPresent": "net/mail", + "mail.Header": "net/mail", + "mail.Message": "net/mail", + "mail.ParseAddress": "net/mail", + "mail.ParseAddressList": "net/mail", + "mail.ParseDate": "net/mail", + "mail.ReadMessage": "net/mail", + "math.Abs": "math", + "math.Acos": "math", + "math.Acosh": "math", + "math.Asin": "math", + "math.Asinh": "math", + "math.Atan": "math", + "math.Atan2": "math", + "math.Atanh": "math", + "math.Cbrt": "math", + "math.Ceil": "math", + "math.Copysign": "math", + "math.Cos": "math", + "math.Cosh": "math", + "math.Dim": "math", + "math.E": "math", + "math.Erf": "math", + "math.Erfc": "math", + "math.Erfcinv": "math", + "math.Erfinv": "math", + "math.Exp": "math", + "math.Exp2": "math", + "math.Expm1": "math", + "math.Float32bits": "math", + "math.Float32frombits": "math", + "math.Float64bits": "math", + "math.Float64frombits": "math", + "math.Floor": "math", + "math.Frexp": "math", + "math.Gamma": "math", + "math.Hypot": "math", + "math.Ilogb": "math", + "math.Inf": "math", + "math.IsInf": "math", + "math.IsNaN": "math", + "math.J0": "math", + "math.J1": "math", + "math.Jn": "math", + "math.Ldexp": "math", + "math.Lgamma": "math", + "math.Ln10": "math", + "math.Ln2": "math", + "math.Log": "math", + "math.Log10": "math", + "math.Log10E": "math", + "math.Log1p": "math", + "math.Log2": "math", + "math.Log2E": "math", + "math.Logb": "math", + "math.Max": "math", + "math.MaxFloat32": "math", + "math.MaxFloat64": "math", + "math.MaxInt16": "math", + "math.MaxInt32": "math", + "math.MaxInt64": "math", + "math.MaxInt8": "math", + "math.MaxUint16": "math", + "math.MaxUint32": "math", + "math.MaxUint64": "math", + "math.MaxUint8": "math", + "math.Min": "math", + "math.MinInt16": "math", + "math.MinInt32": "math", + "math.MinInt64": "math", + "math.MinInt8": "math", + "math.Mod": "math", + "math.Modf": "math", + "math.NaN": "math", + "math.Nextafter": "math", + "math.Nextafter32": "math", + "math.Phi": "math", + "math.Pi": "math", + "math.Pow": "math", + "math.Pow10": "math", + "math.Remainder": "math", + "math.Round": "math", + "math.RoundToEven": "math", + "math.Signbit": "math", + "math.Sin": "math", + "math.Sincos": "math", + "math.Sinh": "math", + "math.SmallestNonzeroFloat32": "math", + "math.SmallestNonzeroFloat64": "math", + "math.Sqrt": "math", + "math.Sqrt2": "math", + "math.SqrtE": "math", + "math.SqrtPhi": "math", + "math.SqrtPi": "math", + "math.Tan": "math", + "math.Tanh": "math", + "math.Trunc": "math", + "math.Y0": "math", + "math.Y1": "math", + "math.Yn": "math", + "md5.BlockSize": "crypto/md5", + "md5.New": "crypto/md5", + "md5.Size": "crypto/md5", + "md5.Sum": "crypto/md5", + "mime.AddExtensionType": "mime", + "mime.BEncoding": "mime", + "mime.ErrInvalidMediaParameter": "mime", + "mime.ExtensionsByType": "mime", + "mime.FormatMediaType": "mime", + "mime.ParseMediaType": "mime", + "mime.QEncoding": "mime", + "mime.TypeByExtension": "mime", + "mime.WordDecoder": "mime", + "mime.WordEncoder": "mime", + "multipart.ErrMessageTooLarge": "mime/multipart", + "multipart.File": "mime/multipart", + "multipart.FileHeader": "mime/multipart", + "multipart.Form": "mime/multipart", + "multipart.NewReader": "mime/multipart", + "multipart.NewWriter": "mime/multipart", + "multipart.Part": "mime/multipart", + "multipart.Reader": "mime/multipart", + "multipart.Writer": "mime/multipart", + "net.Addr": "net", + "net.AddrError": "net", + "net.Buffers": "net", + "net.CIDRMask": "net", + "net.Conn": "net", + "net.DNSConfigError": "net", + "net.DNSError": "net", + "net.DefaultResolver": "net", + "net.Dial": "net", + "net.DialIP": "net", + "net.DialTCP": "net", + "net.DialTimeout": "net", + "net.DialUDP": "net", + "net.DialUnix": "net", + "net.Dialer": "net", + "net.ErrWriteToConnected": "net", + "net.Error": "net", + "net.FileConn": "net", + "net.FileListener": "net", + "net.FilePacketConn": "net", + "net.FlagBroadcast": "net", + "net.FlagLoopback": "net", + "net.FlagMulticast": "net", + "net.FlagPointToPoint": "net", + "net.FlagUp": "net", + "net.Flags": "net", + "net.HardwareAddr": "net", + "net.IP": "net", + "net.IPAddr": "net", + "net.IPConn": "net", + "net.IPMask": "net", + "net.IPNet": "net", + "net.IPv4": "net", + "net.IPv4Mask": "net", + "net.IPv4allrouter": "net", + "net.IPv4allsys": "net", + "net.IPv4bcast": "net", + "net.IPv4len": "net", + "net.IPv4zero": "net", + "net.IPv6interfacelocalallnodes": "net", + "net.IPv6len": "net", + "net.IPv6linklocalallnodes": "net", + "net.IPv6linklocalallrouters": "net", + "net.IPv6loopback": "net", + "net.IPv6unspecified": "net", + "net.IPv6zero": "net", + "net.Interface": "net", + "net.InterfaceAddrs": "net", + "net.InterfaceByIndex": "net", + "net.InterfaceByName": "net", + "net.Interfaces": "net", + "net.InvalidAddrError": "net", + "net.JoinHostPort": "net", + "net.Listen": "net", + "net.ListenIP": "net", + "net.ListenMulticastUDP": "net", + "net.ListenPacket": "net", + "net.ListenTCP": "net", + "net.ListenUDP": "net", + "net.ListenUnix": "net", + "net.ListenUnixgram": "net", + "net.Listener": "net", + "net.LookupAddr": "net", + "net.LookupCNAME": "net", + "net.LookupHost": "net", + "net.LookupIP": "net", + "net.LookupMX": "net", + "net.LookupNS": "net", + "net.LookupPort": "net", + "net.LookupSRV": "net", + "net.LookupTXT": "net", + "net.MX": "net", + "net.NS": "net", + "net.OpError": "net", + "net.PacketConn": "net", + "net.ParseCIDR": "net", + "net.ParseError": "net", + "net.ParseIP": "net", + "net.ParseMAC": "net", + "net.Pipe": "net", + "net.ResolveIPAddr": "net", + "net.ResolveTCPAddr": "net", + "net.ResolveUDPAddr": "net", + "net.ResolveUnixAddr": "net", + "net.Resolver": "net", + "net.SRV": "net", + "net.SplitHostPort": "net", + "net.TCPAddr": "net", + "net.TCPConn": "net", + "net.TCPListener": "net", + "net.UDPAddr": "net", + "net.UDPConn": "net", + "net.UnixAddr": "net", + "net.UnixConn": "net", + "net.UnixListener": "net", + "net.UnknownNetworkError": "net", + "os.Args": "os", + "os.Chdir": "os", + "os.Chmod": "os", + "os.Chown": "os", + "os.Chtimes": "os", + "os.Clearenv": "os", + "os.Create": "os", + "os.DevNull": "os", + "os.Environ": "os", + "os.ErrClosed": "os", + "os.ErrExist": "os", + "os.ErrInvalid": "os", + "os.ErrNoDeadline": "os", + "os.ErrNotExist": "os", + "os.ErrPermission": "os", + "os.Executable": "os", + "os.Exit": "os", + "os.Expand": "os", + "os.ExpandEnv": "os", + "os.File": "os", + "os.FileInfo": "os", + "os.FileMode": "os", + "os.FindProcess": "os", + "os.Getegid": "os", + "os.Getenv": "os", + "os.Geteuid": "os", + "os.Getgid": "os", + "os.Getgroups": "os", + "os.Getpagesize": "os", + "os.Getpid": "os", + "os.Getppid": "os", + "os.Getuid": "os", + "os.Getwd": "os", + "os.Hostname": "os", + "os.Interrupt": "os", + "os.IsExist": "os", + "os.IsNotExist": "os", + "os.IsPathSeparator": "os", + "os.IsPermission": "os", + "os.IsTimeout": "os", + "os.Kill": "os", + "os.Lchown": "os", + "os.Link": "os", + "os.LinkError": "os", + "os.LookupEnv": "os", + "os.Lstat": "os", + "os.Mkdir": "os", + "os.MkdirAll": "os", + "os.ModeAppend": "os", + "os.ModeCharDevice": "os", + "os.ModeDevice": "os", + "os.ModeDir": "os", + "os.ModeExclusive": "os", + "os.ModeNamedPipe": "os", + "os.ModePerm": "os", + "os.ModeSetgid": "os", + "os.ModeSetuid": "os", + "os.ModeSocket": "os", + "os.ModeSticky": "os", + "os.ModeSymlink": "os", + "os.ModeTemporary": "os", + "os.ModeType": "os", + "os.NewFile": "os", + "os.NewSyscallError": "os", + "os.O_APPEND": "os", + "os.O_CREATE": "os", + "os.O_EXCL": "os", + "os.O_RDONLY": "os", + "os.O_RDWR": "os", + "os.O_SYNC": "os", + "os.O_TRUNC": "os", + "os.O_WRONLY": "os", + "os.Open": "os", + "os.OpenFile": "os", + "os.PathError": "os", + "os.PathListSeparator": "os", + "os.PathSeparator": "os", + "os.Pipe": "os", + "os.ProcAttr": "os", + "os.Process": "os", + "os.ProcessState": "os", + "os.Readlink": "os", + "os.Remove": "os", + "os.RemoveAll": "os", + "os.Rename": "os", + "os.SEEK_CUR": "os", + "os.SEEK_END": "os", + "os.SEEK_SET": "os", + "os.SameFile": "os", + "os.Setenv": "os", + "os.Signal": "os", + "os.StartProcess": "os", + "os.Stat": "os", + "os.Stderr": "os", + "os.Stdin": "os", + "os.Stdout": "os", + "os.Symlink": "os", + "os.SyscallError": "os", + "os.TempDir": "os", + "os.Truncate": "os", + "os.Unsetenv": "os", + "palette.Plan9": "image/color/palette", + "palette.WebSafe": "image/color/palette", + "parse.ActionNode": "text/template/parse", + "parse.BoolNode": "text/template/parse", + "parse.BranchNode": "text/template/parse", + "parse.ChainNode": "text/template/parse", + "parse.CommandNode": "text/template/parse", + "parse.DotNode": "text/template/parse", + "parse.FieldNode": "text/template/parse", + "parse.IdentifierNode": "text/template/parse", + "parse.IfNode": "text/template/parse", + "parse.IsEmptyTree": "text/template/parse", + "parse.ListNode": "text/template/parse", + "parse.New": "text/template/parse", + "parse.NewIdentifier": "text/template/parse", + "parse.NilNode": "text/template/parse", + "parse.Node": "text/template/parse", + "parse.NodeAction": "text/template/parse", + "parse.NodeBool": "text/template/parse", + "parse.NodeChain": "text/template/parse", + "parse.NodeCommand": "text/template/parse", + "parse.NodeDot": "text/template/parse", + "parse.NodeField": "text/template/parse", + "parse.NodeIdentifier": "text/template/parse", + "parse.NodeIf": "text/template/parse", + "parse.NodeList": "text/template/parse", + "parse.NodeNil": "text/template/parse", + "parse.NodeNumber": "text/template/parse", + "parse.NodePipe": "text/template/parse", + "parse.NodeRange": "text/template/parse", + "parse.NodeString": "text/template/parse", + "parse.NodeTemplate": "text/template/parse", + "parse.NodeText": "text/template/parse", + "parse.NodeType": "text/template/parse", + "parse.NodeVariable": "text/template/parse", + "parse.NodeWith": "text/template/parse", + "parse.NumberNode": "text/template/parse", + "parse.Parse": "text/template/parse", + "parse.PipeNode": "text/template/parse", + "parse.Pos": "text/template/parse", + "parse.RangeNode": "text/template/parse", + "parse.StringNode": "text/template/parse", + "parse.TemplateNode": "text/template/parse", + "parse.TextNode": "text/template/parse", + "parse.Tree": "text/template/parse", + "parse.VariableNode": "text/template/parse", + "parse.WithNode": "text/template/parse", + "parser.AllErrors": "go/parser", + "parser.DeclarationErrors": "go/parser", + "parser.ImportsOnly": "go/parser", + "parser.Mode": "go/parser", + "parser.PackageClauseOnly": "go/parser", + "parser.ParseComments": "go/parser", + "parser.ParseDir": "go/parser", + "parser.ParseExpr": "go/parser", + "parser.ParseExprFrom": "go/parser", + "parser.ParseFile": "go/parser", + "parser.SpuriousErrors": "go/parser", + "parser.Trace": "go/parser", + "path.Base": "path", + "path.Clean": "path", + "path.Dir": "path", + "path.ErrBadPattern": "path", + "path.Ext": "path", + "path.IsAbs": "path", + "path.Join": "path", + "path.Match": "path", + "path.Split": "path", + "pe.COFFSymbol": "debug/pe", + "pe.COFFSymbolSize": "debug/pe", + "pe.DataDirectory": "debug/pe", + "pe.File": "debug/pe", + "pe.FileHeader": "debug/pe", + "pe.FormatError": "debug/pe", + "pe.IMAGE_FILE_MACHINE_AM33": "debug/pe", + "pe.IMAGE_FILE_MACHINE_AMD64": "debug/pe", + "pe.IMAGE_FILE_MACHINE_ARM": "debug/pe", + "pe.IMAGE_FILE_MACHINE_EBC": "debug/pe", + "pe.IMAGE_FILE_MACHINE_I386": "debug/pe", + "pe.IMAGE_FILE_MACHINE_IA64": "debug/pe", + "pe.IMAGE_FILE_MACHINE_M32R": "debug/pe", + "pe.IMAGE_FILE_MACHINE_MIPS16": "debug/pe", + "pe.IMAGE_FILE_MACHINE_MIPSFPU": "debug/pe", + "pe.IMAGE_FILE_MACHINE_MIPSFPU16": "debug/pe", + "pe.IMAGE_FILE_MACHINE_POWERPC": "debug/pe", + "pe.IMAGE_FILE_MACHINE_POWERPCFP": "debug/pe", + "pe.IMAGE_FILE_MACHINE_R4000": "debug/pe", + "pe.IMAGE_FILE_MACHINE_SH3": "debug/pe", + "pe.IMAGE_FILE_MACHINE_SH3DSP": "debug/pe", + "pe.IMAGE_FILE_MACHINE_SH4": "debug/pe", + "pe.IMAGE_FILE_MACHINE_SH5": "debug/pe", + "pe.IMAGE_FILE_MACHINE_THUMB": "debug/pe", + "pe.IMAGE_FILE_MACHINE_UNKNOWN": "debug/pe", + "pe.IMAGE_FILE_MACHINE_WCEMIPSV2": "debug/pe", + "pe.ImportDirectory": "debug/pe", + "pe.NewFile": "debug/pe", + "pe.Open": "debug/pe", + "pe.OptionalHeader32": "debug/pe", + "pe.OptionalHeader64": "debug/pe", + "pe.Reloc": "debug/pe", + "pe.Section": "debug/pe", + "pe.SectionHeader": "debug/pe", + "pe.SectionHeader32": "debug/pe", + "pe.StringTable": "debug/pe", + "pe.Symbol": "debug/pe", + "pem.Block": "encoding/pem", + "pem.Decode": "encoding/pem", + "pem.Encode": "encoding/pem", + "pem.EncodeToMemory": "encoding/pem", + "pkix.AlgorithmIdentifier": "crypto/x509/pkix", + "pkix.AttributeTypeAndValue": "crypto/x509/pkix", + "pkix.AttributeTypeAndValueSET": "crypto/x509/pkix", + "pkix.CertificateList": "crypto/x509/pkix", + "pkix.Extension": "crypto/x509/pkix", + "pkix.Name": "crypto/x509/pkix", + "pkix.RDNSequence": "crypto/x509/pkix", + "pkix.RelativeDistinguishedNameSET": "crypto/x509/pkix", + "pkix.RevokedCertificate": "crypto/x509/pkix", + "pkix.TBSCertificateList": "crypto/x509/pkix", + "plan9obj.File": "debug/plan9obj", + "plan9obj.FileHeader": "debug/plan9obj", + "plan9obj.Magic386": "debug/plan9obj", + "plan9obj.Magic64": "debug/plan9obj", + "plan9obj.MagicAMD64": "debug/plan9obj", + "plan9obj.MagicARM": "debug/plan9obj", + "plan9obj.NewFile": "debug/plan9obj", + "plan9obj.Open": "debug/plan9obj", + "plan9obj.Section": "debug/plan9obj", + "plan9obj.SectionHeader": "debug/plan9obj", + "plan9obj.Sym": "debug/plan9obj", + "plugin.Open": "plugin", + "plugin.Plugin": "plugin", + "plugin.Symbol": "plugin", + "png.BestCompression": "image/png", + "png.BestSpeed": "image/png", + "png.CompressionLevel": "image/png", + "png.Decode": "image/png", + "png.DecodeConfig": "image/png", + "png.DefaultCompression": "image/png", + "png.Encode": "image/png", + "png.Encoder": "image/png", + "png.EncoderBuffer": "image/png", + "png.EncoderBufferPool": "image/png", + "png.FormatError": "image/png", + "png.NoCompression": "image/png", + "png.UnsupportedError": "image/png", + "pprof.Cmdline": "net/http/pprof", + "pprof.Do": "runtime/pprof", + "pprof.ForLabels": "runtime/pprof", + "pprof.Handler": "net/http/pprof", + "pprof.Index": "net/http/pprof", + "pprof.Label": "runtime/pprof", + "pprof.LabelSet": "runtime/pprof", + "pprof.Labels": "runtime/pprof", + "pprof.Lookup": "runtime/pprof", + "pprof.NewProfile": "runtime/pprof", + // "pprof.Profile" is ambiguous + "pprof.Profiles": "runtime/pprof", + "pprof.SetGoroutineLabels": "runtime/pprof", + "pprof.StartCPUProfile": "runtime/pprof", + "pprof.StopCPUProfile": "runtime/pprof", + "pprof.Symbol": "net/http/pprof", + "pprof.Trace": "net/http/pprof", + "pprof.WithLabels": "runtime/pprof", + "pprof.WriteHeapProfile": "runtime/pprof", + "printer.CommentedNode": "go/printer", + "printer.Config": "go/printer", + "printer.Fprint": "go/printer", + "printer.Mode": "go/printer", + "printer.RawFormat": "go/printer", + "printer.SourcePos": "go/printer", + "printer.TabIndent": "go/printer", + "printer.UseSpaces": "go/printer", + "quick.Check": "testing/quick", + "quick.CheckEqual": "testing/quick", + "quick.CheckEqualError": "testing/quick", + "quick.CheckError": "testing/quick", + "quick.Config": "testing/quick", + "quick.Generator": "testing/quick", + "quick.SetupError": "testing/quick", + "quick.Value": "testing/quick", + "quotedprintable.NewReader": "mime/quotedprintable", + "quotedprintable.NewWriter": "mime/quotedprintable", + "quotedprintable.Reader": "mime/quotedprintable", + "quotedprintable.Writer": "mime/quotedprintable", + "rand.ExpFloat64": "math/rand", + "rand.Float32": "math/rand", + "rand.Float64": "math/rand", + // "rand.Int" is ambiguous + "rand.Int31": "math/rand", + "rand.Int31n": "math/rand", + "rand.Int63": "math/rand", + "rand.Int63n": "math/rand", + "rand.Intn": "math/rand", + "rand.New": "math/rand", + "rand.NewSource": "math/rand", + "rand.NewZipf": "math/rand", + "rand.NormFloat64": "math/rand", + "rand.Perm": "math/rand", + "rand.Prime": "crypto/rand", + "rand.Rand": "math/rand", + // "rand.Read" is ambiguous + "rand.Reader": "crypto/rand", + "rand.Seed": "math/rand", + "rand.Shuffle": "math/rand", + "rand.Source": "math/rand", + "rand.Source64": "math/rand", + "rand.Uint32": "math/rand", + "rand.Uint64": "math/rand", + "rand.Zipf": "math/rand", + "rc4.Cipher": "crypto/rc4", + "rc4.KeySizeError": "crypto/rc4", + "rc4.NewCipher": "crypto/rc4", + "reflect.Append": "reflect", + "reflect.AppendSlice": "reflect", + "reflect.Array": "reflect", + "reflect.ArrayOf": "reflect", + "reflect.Bool": "reflect", + "reflect.BothDir": "reflect", + "reflect.Chan": "reflect", + "reflect.ChanDir": "reflect", + "reflect.ChanOf": "reflect", + "reflect.Complex128": "reflect", + "reflect.Complex64": "reflect", + "reflect.Copy": "reflect", + "reflect.DeepEqual": "reflect", + "reflect.Float32": "reflect", + "reflect.Float64": "reflect", + "reflect.Func": "reflect", + "reflect.FuncOf": "reflect", + "reflect.Indirect": "reflect", + "reflect.Int": "reflect", + "reflect.Int16": "reflect", + "reflect.Int32": "reflect", + "reflect.Int64": "reflect", + "reflect.Int8": "reflect", + "reflect.Interface": "reflect", + "reflect.Invalid": "reflect", + "reflect.Kind": "reflect", + "reflect.MakeChan": "reflect", + "reflect.MakeFunc": "reflect", + "reflect.MakeMap": "reflect", + "reflect.MakeMapWithSize": "reflect", + "reflect.MakeSlice": "reflect", + "reflect.Map": "reflect", + "reflect.MapOf": "reflect", + "reflect.Method": "reflect", + "reflect.New": "reflect", + "reflect.NewAt": "reflect", + "reflect.Ptr": "reflect", + "reflect.PtrTo": "reflect", + "reflect.RecvDir": "reflect", + "reflect.Select": "reflect", + "reflect.SelectCase": "reflect", + "reflect.SelectDefault": "reflect", + "reflect.SelectDir": "reflect", + "reflect.SelectRecv": "reflect", + "reflect.SelectSend": "reflect", + "reflect.SendDir": "reflect", + "reflect.Slice": "reflect", + "reflect.SliceHeader": "reflect", + "reflect.SliceOf": "reflect", + "reflect.String": "reflect", + "reflect.StringHeader": "reflect", + "reflect.Struct": "reflect", + "reflect.StructField": "reflect", + "reflect.StructOf": "reflect", + "reflect.StructTag": "reflect", + "reflect.Swapper": "reflect", + "reflect.TypeOf": "reflect", + "reflect.Uint": "reflect", + "reflect.Uint16": "reflect", + "reflect.Uint32": "reflect", + "reflect.Uint64": "reflect", + "reflect.Uint8": "reflect", + "reflect.Uintptr": "reflect", + "reflect.UnsafePointer": "reflect", + "reflect.Value": "reflect", + "reflect.ValueError": "reflect", + "reflect.ValueOf": "reflect", + "reflect.Zero": "reflect", + "regexp.Compile": "regexp", + "regexp.CompilePOSIX": "regexp", + "regexp.Match": "regexp", + "regexp.MatchReader": "regexp", + "regexp.MatchString": "regexp", + "regexp.MustCompile": "regexp", + "regexp.MustCompilePOSIX": "regexp", + "regexp.QuoteMeta": "regexp", + "regexp.Regexp": "regexp", + "ring.New": "container/ring", + "ring.Ring": "container/ring", + "rpc.Accept": "net/rpc", + "rpc.Call": "net/rpc", + "rpc.Client": "net/rpc", + "rpc.ClientCodec": "net/rpc", + "rpc.DefaultDebugPath": "net/rpc", + "rpc.DefaultRPCPath": "net/rpc", + "rpc.DefaultServer": "net/rpc", + "rpc.Dial": "net/rpc", + "rpc.DialHTTP": "net/rpc", + "rpc.DialHTTPPath": "net/rpc", + "rpc.ErrShutdown": "net/rpc", + "rpc.HandleHTTP": "net/rpc", + "rpc.NewClient": "net/rpc", + "rpc.NewClientWithCodec": "net/rpc", + "rpc.NewServer": "net/rpc", + "rpc.Register": "net/rpc", + "rpc.RegisterName": "net/rpc", + "rpc.Request": "net/rpc", + "rpc.Response": "net/rpc", + "rpc.ServeCodec": "net/rpc", + "rpc.ServeConn": "net/rpc", + "rpc.ServeRequest": "net/rpc", + "rpc.Server": "net/rpc", + "rpc.ServerCodec": "net/rpc", + "rpc.ServerError": "net/rpc", + "rsa.CRTValue": "crypto/rsa", + "rsa.DecryptOAEP": "crypto/rsa", + "rsa.DecryptPKCS1v15": "crypto/rsa", + "rsa.DecryptPKCS1v15SessionKey": "crypto/rsa", + "rsa.EncryptOAEP": "crypto/rsa", + "rsa.EncryptPKCS1v15": "crypto/rsa", + "rsa.ErrDecryption": "crypto/rsa", + "rsa.ErrMessageTooLong": "crypto/rsa", + "rsa.ErrVerification": "crypto/rsa", + "rsa.GenerateKey": "crypto/rsa", + "rsa.GenerateMultiPrimeKey": "crypto/rsa", + "rsa.OAEPOptions": "crypto/rsa", + "rsa.PKCS1v15DecryptOptions": "crypto/rsa", + "rsa.PSSOptions": "crypto/rsa", + "rsa.PSSSaltLengthAuto": "crypto/rsa", + "rsa.PSSSaltLengthEqualsHash": "crypto/rsa", + "rsa.PrecomputedValues": "crypto/rsa", + "rsa.PrivateKey": "crypto/rsa", + "rsa.PublicKey": "crypto/rsa", + "rsa.SignPKCS1v15": "crypto/rsa", + "rsa.SignPSS": "crypto/rsa", + "rsa.VerifyPKCS1v15": "crypto/rsa", + "rsa.VerifyPSS": "crypto/rsa", + "runtime.BlockProfile": "runtime", + "runtime.BlockProfileRecord": "runtime", + "runtime.Breakpoint": "runtime", + "runtime.CPUProfile": "runtime", + "runtime.Caller": "runtime", + "runtime.Callers": "runtime", + "runtime.CallersFrames": "runtime", + "runtime.Compiler": "runtime", + "runtime.Error": "runtime", + "runtime.Frame": "runtime", + "runtime.Frames": "runtime", + "runtime.Func": "runtime", + "runtime.FuncForPC": "runtime", + "runtime.GC": "runtime", + "runtime.GOARCH": "runtime", + "runtime.GOMAXPROCS": "runtime", + "runtime.GOOS": "runtime", + "runtime.GOROOT": "runtime", + "runtime.Goexit": "runtime", + "runtime.GoroutineProfile": "runtime", + "runtime.Gosched": "runtime", + "runtime.KeepAlive": "runtime", + "runtime.LockOSThread": "runtime", + "runtime.MemProfile": "runtime", + "runtime.MemProfileRate": "runtime", + "runtime.MemProfileRecord": "runtime", + "runtime.MemStats": "runtime", + "runtime.MutexProfile": "runtime", + "runtime.NumCPU": "runtime", + "runtime.NumCgoCall": "runtime", + "runtime.NumGoroutine": "runtime", + "runtime.ReadMemStats": "runtime", + "runtime.ReadTrace": "runtime", + "runtime.SetBlockProfileRate": "runtime", + "runtime.SetCPUProfileRate": "runtime", + "runtime.SetCgoTraceback": "runtime", + "runtime.SetFinalizer": "runtime", + "runtime.SetMutexProfileFraction": "runtime", + "runtime.Stack": "runtime", + "runtime.StackRecord": "runtime", + "runtime.StartTrace": "runtime", + "runtime.StopTrace": "runtime", + "runtime.ThreadCreateProfile": "runtime", + "runtime.TypeAssertionError": "runtime", + "runtime.UnlockOSThread": "runtime", + "runtime.Version": "runtime", + "scanner.Char": "text/scanner", + "scanner.Comment": "text/scanner", + "scanner.EOF": "text/scanner", + "scanner.Error": "go/scanner", + "scanner.ErrorHandler": "go/scanner", + "scanner.ErrorList": "go/scanner", + "scanner.Float": "text/scanner", + "scanner.GoTokens": "text/scanner", + "scanner.GoWhitespace": "text/scanner", + "scanner.Ident": "text/scanner", + "scanner.Int": "text/scanner", + "scanner.Mode": "go/scanner", + "scanner.Position": "text/scanner", + "scanner.PrintError": "go/scanner", + "scanner.RawString": "text/scanner", + "scanner.ScanChars": "text/scanner", + // "scanner.ScanComments" is ambiguous + "scanner.ScanFloats": "text/scanner", + "scanner.ScanIdents": "text/scanner", + "scanner.ScanInts": "text/scanner", + "scanner.ScanRawStrings": "text/scanner", + "scanner.ScanStrings": "text/scanner", + // "scanner.Scanner" is ambiguous + "scanner.SkipComments": "text/scanner", + "scanner.String": "text/scanner", + "scanner.TokenString": "text/scanner", + "sha1.BlockSize": "crypto/sha1", + "sha1.New": "crypto/sha1", + "sha1.Size": "crypto/sha1", + "sha1.Sum": "crypto/sha1", + "sha256.BlockSize": "crypto/sha256", + "sha256.New": "crypto/sha256", + "sha256.New224": "crypto/sha256", + "sha256.Size": "crypto/sha256", + "sha256.Size224": "crypto/sha256", + "sha256.Sum224": "crypto/sha256", + "sha256.Sum256": "crypto/sha256", + "sha512.BlockSize": "crypto/sha512", + "sha512.New": "crypto/sha512", + "sha512.New384": "crypto/sha512", + "sha512.New512_224": "crypto/sha512", + "sha512.New512_256": "crypto/sha512", + "sha512.Size": "crypto/sha512", + "sha512.Size224": "crypto/sha512", + "sha512.Size256": "crypto/sha512", + "sha512.Size384": "crypto/sha512", + "sha512.Sum384": "crypto/sha512", + "sha512.Sum512": "crypto/sha512", + "sha512.Sum512_224": "crypto/sha512", + "sha512.Sum512_256": "crypto/sha512", + "signal.Ignore": "os/signal", + "signal.Notify": "os/signal", + "signal.Reset": "os/signal", + "signal.Stop": "os/signal", + "smtp.Auth": "net/smtp", + "smtp.CRAMMD5Auth": "net/smtp", + "smtp.Client": "net/smtp", + "smtp.Dial": "net/smtp", + "smtp.NewClient": "net/smtp", + "smtp.PlainAuth": "net/smtp", + "smtp.SendMail": "net/smtp", + "smtp.ServerInfo": "net/smtp", + "sort.Float64Slice": "sort", + "sort.Float64s": "sort", + "sort.Float64sAreSorted": "sort", + "sort.IntSlice": "sort", + "sort.Interface": "sort", + "sort.Ints": "sort", + "sort.IntsAreSorted": "sort", + "sort.IsSorted": "sort", + "sort.Reverse": "sort", + "sort.Search": "sort", + "sort.SearchFloat64s": "sort", + "sort.SearchInts": "sort", + "sort.SearchStrings": "sort", + "sort.Slice": "sort", + "sort.SliceIsSorted": "sort", + "sort.SliceStable": "sort", + "sort.Sort": "sort", + "sort.Stable": "sort", + "sort.StringSlice": "sort", + "sort.Strings": "sort", + "sort.StringsAreSorted": "sort", + "sql.ColumnType": "database/sql", + "sql.Conn": "database/sql", + "sql.DB": "database/sql", + "sql.DBStats": "database/sql", + "sql.Drivers": "database/sql", + "sql.ErrConnDone": "database/sql", + "sql.ErrNoRows": "database/sql", + "sql.ErrTxDone": "database/sql", + "sql.IsolationLevel": "database/sql", + "sql.LevelDefault": "database/sql", + "sql.LevelLinearizable": "database/sql", + "sql.LevelReadCommitted": "database/sql", + "sql.LevelReadUncommitted": "database/sql", + "sql.LevelRepeatableRead": "database/sql", + "sql.LevelSerializable": "database/sql", + "sql.LevelSnapshot": "database/sql", + "sql.LevelWriteCommitted": "database/sql", + "sql.Named": "database/sql", + "sql.NamedArg": "database/sql", + "sql.NullBool": "database/sql", + "sql.NullFloat64": "database/sql", + "sql.NullInt64": "database/sql", + "sql.NullString": "database/sql", + "sql.Open": "database/sql", + "sql.OpenDB": "database/sql", + "sql.Out": "database/sql", + "sql.RawBytes": "database/sql", + "sql.Register": "database/sql", + "sql.Result": "database/sql", + "sql.Row": "database/sql", + "sql.Rows": "database/sql", + "sql.Scanner": "database/sql", + "sql.Stmt": "database/sql", + "sql.Tx": "database/sql", + "sql.TxOptions": "database/sql", + "strconv.AppendBool": "strconv", + "strconv.AppendFloat": "strconv", + "strconv.AppendInt": "strconv", + "strconv.AppendQuote": "strconv", + "strconv.AppendQuoteRune": "strconv", + "strconv.AppendQuoteRuneToASCII": "strconv", + "strconv.AppendQuoteRuneToGraphic": "strconv", + "strconv.AppendQuoteToASCII": "strconv", + "strconv.AppendQuoteToGraphic": "strconv", + "strconv.AppendUint": "strconv", + "strconv.Atoi": "strconv", + "strconv.CanBackquote": "strconv", + "strconv.ErrRange": "strconv", + "strconv.ErrSyntax": "strconv", + "strconv.FormatBool": "strconv", + "strconv.FormatFloat": "strconv", + "strconv.FormatInt": "strconv", + "strconv.FormatUint": "strconv", + "strconv.IntSize": "strconv", + "strconv.IsGraphic": "strconv", + "strconv.IsPrint": "strconv", + "strconv.Itoa": "strconv", + "strconv.NumError": "strconv", + "strconv.ParseBool": "strconv", + "strconv.ParseFloat": "strconv", + "strconv.ParseInt": "strconv", + "strconv.ParseUint": "strconv", + "strconv.Quote": "strconv", + "strconv.QuoteRune": "strconv", + "strconv.QuoteRuneToASCII": "strconv", + "strconv.QuoteRuneToGraphic": "strconv", + "strconv.QuoteToASCII": "strconv", + "strconv.QuoteToGraphic": "strconv", + "strconv.Unquote": "strconv", + "strconv.UnquoteChar": "strconv", + "strings.Builder": "strings", + "strings.Compare": "strings", + "strings.Contains": "strings", + "strings.ContainsAny": "strings", + "strings.ContainsRune": "strings", + "strings.Count": "strings", + "strings.EqualFold": "strings", + "strings.Fields": "strings", + "strings.FieldsFunc": "strings", + "strings.HasPrefix": "strings", + "strings.HasSuffix": "strings", + "strings.Index": "strings", + "strings.IndexAny": "strings", + "strings.IndexByte": "strings", + "strings.IndexFunc": "strings", + "strings.IndexRune": "strings", + "strings.Join": "strings", + "strings.LastIndex": "strings", + "strings.LastIndexAny": "strings", + "strings.LastIndexByte": "strings", + "strings.LastIndexFunc": "strings", + "strings.Map": "strings", + "strings.NewReader": "strings", + "strings.NewReplacer": "strings", + "strings.Reader": "strings", + "strings.Repeat": "strings", + "strings.Replace": "strings", + "strings.Replacer": "strings", + "strings.Split": "strings", + "strings.SplitAfter": "strings", + "strings.SplitAfterN": "strings", + "strings.SplitN": "strings", + "strings.Title": "strings", + "strings.ToLower": "strings", + "strings.ToLowerSpecial": "strings", + "strings.ToTitle": "strings", + "strings.ToTitleSpecial": "strings", + "strings.ToUpper": "strings", + "strings.ToUpperSpecial": "strings", + "strings.Trim": "strings", + "strings.TrimFunc": "strings", + "strings.TrimLeft": "strings", + "strings.TrimLeftFunc": "strings", + "strings.TrimPrefix": "strings", + "strings.TrimRight": "strings", + "strings.TrimRightFunc": "strings", + "strings.TrimSpace": "strings", + "strings.TrimSuffix": "strings", + "subtle.ConstantTimeByteEq": "crypto/subtle", + "subtle.ConstantTimeCompare": "crypto/subtle", + "subtle.ConstantTimeCopy": "crypto/subtle", + "subtle.ConstantTimeEq": "crypto/subtle", + "subtle.ConstantTimeLessOrEq": "crypto/subtle", + "subtle.ConstantTimeSelect": "crypto/subtle", + "suffixarray.Index": "index/suffixarray", + "suffixarray.New": "index/suffixarray", + "sync.Cond": "sync", + "sync.Locker": "sync", + "sync.Map": "sync", + "sync.Mutex": "sync", + "sync.NewCond": "sync", + "sync.Once": "sync", + "sync.Pool": "sync", + "sync.RWMutex": "sync", + "sync.WaitGroup": "sync", + "syntax.ClassNL": "regexp/syntax", + "syntax.Compile": "regexp/syntax", + "syntax.DotNL": "regexp/syntax", + "syntax.EmptyBeginLine": "regexp/syntax", + "syntax.EmptyBeginText": "regexp/syntax", + "syntax.EmptyEndLine": "regexp/syntax", + "syntax.EmptyEndText": "regexp/syntax", + "syntax.EmptyNoWordBoundary": "regexp/syntax", + "syntax.EmptyOp": "regexp/syntax", + "syntax.EmptyOpContext": "regexp/syntax", + "syntax.EmptyWordBoundary": "regexp/syntax", + "syntax.ErrInternalError": "regexp/syntax", + "syntax.ErrInvalidCharClass": "regexp/syntax", + "syntax.ErrInvalidCharRange": "regexp/syntax", + "syntax.ErrInvalidEscape": "regexp/syntax", + "syntax.ErrInvalidNamedCapture": "regexp/syntax", + "syntax.ErrInvalidPerlOp": "regexp/syntax", + "syntax.ErrInvalidRepeatOp": "regexp/syntax", + "syntax.ErrInvalidRepeatSize": "regexp/syntax", + "syntax.ErrInvalidUTF8": "regexp/syntax", + "syntax.ErrMissingBracket": "regexp/syntax", + "syntax.ErrMissingParen": "regexp/syntax", + "syntax.ErrMissingRepeatArgument": "regexp/syntax", + "syntax.ErrTrailingBackslash": "regexp/syntax", + "syntax.ErrUnexpectedParen": "regexp/syntax", + "syntax.Error": "regexp/syntax", + "syntax.ErrorCode": "regexp/syntax", + "syntax.Flags": "regexp/syntax", + "syntax.FoldCase": "regexp/syntax", + "syntax.Inst": "regexp/syntax", + "syntax.InstAlt": "regexp/syntax", + "syntax.InstAltMatch": "regexp/syntax", + "syntax.InstCapture": "regexp/syntax", + "syntax.InstEmptyWidth": "regexp/syntax", + "syntax.InstFail": "regexp/syntax", + "syntax.InstMatch": "regexp/syntax", + "syntax.InstNop": "regexp/syntax", + "syntax.InstOp": "regexp/syntax", + "syntax.InstRune": "regexp/syntax", + "syntax.InstRune1": "regexp/syntax", + "syntax.InstRuneAny": "regexp/syntax", + "syntax.InstRuneAnyNotNL": "regexp/syntax", + "syntax.IsWordChar": "regexp/syntax", + "syntax.Literal": "regexp/syntax", + "syntax.MatchNL": "regexp/syntax", + "syntax.NonGreedy": "regexp/syntax", + "syntax.OneLine": "regexp/syntax", + "syntax.Op": "regexp/syntax", + "syntax.OpAlternate": "regexp/syntax", + "syntax.OpAnyChar": "regexp/syntax", + "syntax.OpAnyCharNotNL": "regexp/syntax", + "syntax.OpBeginLine": "regexp/syntax", + "syntax.OpBeginText": "regexp/syntax", + "syntax.OpCapture": "regexp/syntax", + "syntax.OpCharClass": "regexp/syntax", + "syntax.OpConcat": "regexp/syntax", + "syntax.OpEmptyMatch": "regexp/syntax", + "syntax.OpEndLine": "regexp/syntax", + "syntax.OpEndText": "regexp/syntax", + "syntax.OpLiteral": "regexp/syntax", + "syntax.OpNoMatch": "regexp/syntax", + "syntax.OpNoWordBoundary": "regexp/syntax", + "syntax.OpPlus": "regexp/syntax", + "syntax.OpQuest": "regexp/syntax", + "syntax.OpRepeat": "regexp/syntax", + "syntax.OpStar": "regexp/syntax", + "syntax.OpWordBoundary": "regexp/syntax", + "syntax.POSIX": "regexp/syntax", + "syntax.Parse": "regexp/syntax", + "syntax.Perl": "regexp/syntax", + "syntax.PerlX": "regexp/syntax", + "syntax.Prog": "regexp/syntax", + "syntax.Regexp": "regexp/syntax", + "syntax.Simple": "regexp/syntax", + "syntax.UnicodeGroups": "regexp/syntax", + "syntax.WasDollar": "regexp/syntax", + "syscall.AF_ALG": "syscall", + "syscall.AF_APPLETALK": "syscall", + "syscall.AF_ARP": "syscall", + "syscall.AF_ASH": "syscall", + "syscall.AF_ATM": "syscall", + "syscall.AF_ATMPVC": "syscall", + "syscall.AF_ATMSVC": "syscall", + "syscall.AF_AX25": "syscall", + "syscall.AF_BLUETOOTH": "syscall", + "syscall.AF_BRIDGE": "syscall", + "syscall.AF_CAIF": "syscall", + "syscall.AF_CAN": "syscall", + "syscall.AF_CCITT": "syscall", + "syscall.AF_CHAOS": "syscall", + "syscall.AF_CNT": "syscall", + "syscall.AF_COIP": "syscall", + "syscall.AF_DATAKIT": "syscall", + "syscall.AF_DECnet": "syscall", + "syscall.AF_DLI": "syscall", + "syscall.AF_E164": "syscall", + "syscall.AF_ECMA": "syscall", + "syscall.AF_ECONET": "syscall", + "syscall.AF_ENCAP": "syscall", + "syscall.AF_FILE": "syscall", + "syscall.AF_HYLINK": "syscall", + "syscall.AF_IEEE80211": "syscall", + "syscall.AF_IEEE802154": "syscall", + "syscall.AF_IMPLINK": "syscall", + "syscall.AF_INET": "syscall", + "syscall.AF_INET6": "syscall", + "syscall.AF_INET6_SDP": "syscall", + "syscall.AF_INET_SDP": "syscall", + "syscall.AF_IPX": "syscall", + "syscall.AF_IRDA": "syscall", + "syscall.AF_ISDN": "syscall", + "syscall.AF_ISO": "syscall", + "syscall.AF_IUCV": "syscall", + "syscall.AF_KEY": "syscall", + "syscall.AF_LAT": "syscall", + "syscall.AF_LINK": "syscall", + "syscall.AF_LLC": "syscall", + "syscall.AF_LOCAL": "syscall", + "syscall.AF_MAX": "syscall", + "syscall.AF_MPLS": "syscall", + "syscall.AF_NATM": "syscall", + "syscall.AF_NDRV": "syscall", + "syscall.AF_NETBEUI": "syscall", + "syscall.AF_NETBIOS": "syscall", + "syscall.AF_NETGRAPH": "syscall", + "syscall.AF_NETLINK": "syscall", + "syscall.AF_NETROM": "syscall", + "syscall.AF_NS": "syscall", + "syscall.AF_OROUTE": "syscall", + "syscall.AF_OSI": "syscall", + "syscall.AF_PACKET": "syscall", + "syscall.AF_PHONET": "syscall", + "syscall.AF_PPP": "syscall", + "syscall.AF_PPPOX": "syscall", + "syscall.AF_PUP": "syscall", + "syscall.AF_RDS": "syscall", + "syscall.AF_RESERVED_36": "syscall", + "syscall.AF_ROSE": "syscall", + "syscall.AF_ROUTE": "syscall", + "syscall.AF_RXRPC": "syscall", + "syscall.AF_SCLUSTER": "syscall", + "syscall.AF_SECURITY": "syscall", + "syscall.AF_SIP": "syscall", + "syscall.AF_SLOW": "syscall", + "syscall.AF_SNA": "syscall", + "syscall.AF_SYSTEM": "syscall", + "syscall.AF_TIPC": "syscall", + "syscall.AF_UNIX": "syscall", + "syscall.AF_UNSPEC": "syscall", + "syscall.AF_VENDOR00": "syscall", + "syscall.AF_VENDOR01": "syscall", + "syscall.AF_VENDOR02": "syscall", + "syscall.AF_VENDOR03": "syscall", + "syscall.AF_VENDOR04": "syscall", + "syscall.AF_VENDOR05": "syscall", + "syscall.AF_VENDOR06": "syscall", + "syscall.AF_VENDOR07": "syscall", + "syscall.AF_VENDOR08": "syscall", + "syscall.AF_VENDOR09": "syscall", + "syscall.AF_VENDOR10": "syscall", + "syscall.AF_VENDOR11": "syscall", + "syscall.AF_VENDOR12": "syscall", + "syscall.AF_VENDOR13": "syscall", + "syscall.AF_VENDOR14": "syscall", + "syscall.AF_VENDOR15": "syscall", + "syscall.AF_VENDOR16": "syscall", + "syscall.AF_VENDOR17": "syscall", + "syscall.AF_VENDOR18": "syscall", + "syscall.AF_VENDOR19": "syscall", + "syscall.AF_VENDOR20": "syscall", + "syscall.AF_VENDOR21": "syscall", + "syscall.AF_VENDOR22": "syscall", + "syscall.AF_VENDOR23": "syscall", + "syscall.AF_VENDOR24": "syscall", + "syscall.AF_VENDOR25": "syscall", + "syscall.AF_VENDOR26": "syscall", + "syscall.AF_VENDOR27": "syscall", + "syscall.AF_VENDOR28": "syscall", + "syscall.AF_VENDOR29": "syscall", + "syscall.AF_VENDOR30": "syscall", + "syscall.AF_VENDOR31": "syscall", + "syscall.AF_VENDOR32": "syscall", + "syscall.AF_VENDOR33": "syscall", + "syscall.AF_VENDOR34": "syscall", + "syscall.AF_VENDOR35": "syscall", + "syscall.AF_VENDOR36": "syscall", + "syscall.AF_VENDOR37": "syscall", + "syscall.AF_VENDOR38": "syscall", + "syscall.AF_VENDOR39": "syscall", + "syscall.AF_VENDOR40": "syscall", + "syscall.AF_VENDOR41": "syscall", + "syscall.AF_VENDOR42": "syscall", + "syscall.AF_VENDOR43": "syscall", + "syscall.AF_VENDOR44": "syscall", + "syscall.AF_VENDOR45": "syscall", + "syscall.AF_VENDOR46": "syscall", + "syscall.AF_VENDOR47": "syscall", + "syscall.AF_WANPIPE": "syscall", + "syscall.AF_X25": "syscall", + "syscall.AI_CANONNAME": "syscall", + "syscall.AI_NUMERICHOST": "syscall", + "syscall.AI_PASSIVE": "syscall", + "syscall.APPLICATION_ERROR": "syscall", + "syscall.ARPHRD_ADAPT": "syscall", + "syscall.ARPHRD_APPLETLK": "syscall", + "syscall.ARPHRD_ARCNET": "syscall", + "syscall.ARPHRD_ASH": "syscall", + "syscall.ARPHRD_ATM": "syscall", + "syscall.ARPHRD_AX25": "syscall", + "syscall.ARPHRD_BIF": "syscall", + "syscall.ARPHRD_CHAOS": "syscall", + "syscall.ARPHRD_CISCO": "syscall", + "syscall.ARPHRD_CSLIP": "syscall", + "syscall.ARPHRD_CSLIP6": "syscall", + "syscall.ARPHRD_DDCMP": "syscall", + "syscall.ARPHRD_DLCI": "syscall", + "syscall.ARPHRD_ECONET": "syscall", + "syscall.ARPHRD_EETHER": "syscall", + "syscall.ARPHRD_ETHER": "syscall", + "syscall.ARPHRD_EUI64": "syscall", + "syscall.ARPHRD_FCAL": "syscall", + "syscall.ARPHRD_FCFABRIC": "syscall", + "syscall.ARPHRD_FCPL": "syscall", + "syscall.ARPHRD_FCPP": "syscall", + "syscall.ARPHRD_FDDI": "syscall", + "syscall.ARPHRD_FRAD": "syscall", + "syscall.ARPHRD_FRELAY": "syscall", + "syscall.ARPHRD_HDLC": "syscall", + "syscall.ARPHRD_HIPPI": "syscall", + "syscall.ARPHRD_HWX25": "syscall", + "syscall.ARPHRD_IEEE1394": "syscall", + "syscall.ARPHRD_IEEE802": "syscall", + "syscall.ARPHRD_IEEE80211": "syscall", + "syscall.ARPHRD_IEEE80211_PRISM": "syscall", + "syscall.ARPHRD_IEEE80211_RADIOTAP": "syscall", + "syscall.ARPHRD_IEEE802154": "syscall", + "syscall.ARPHRD_IEEE802154_PHY": "syscall", + "syscall.ARPHRD_IEEE802_TR": "syscall", + "syscall.ARPHRD_INFINIBAND": "syscall", + "syscall.ARPHRD_IPDDP": "syscall", + "syscall.ARPHRD_IPGRE": "syscall", + "syscall.ARPHRD_IRDA": "syscall", + "syscall.ARPHRD_LAPB": "syscall", + "syscall.ARPHRD_LOCALTLK": "syscall", + "syscall.ARPHRD_LOOPBACK": "syscall", + "syscall.ARPHRD_METRICOM": "syscall", + "syscall.ARPHRD_NETROM": "syscall", + "syscall.ARPHRD_NONE": "syscall", + "syscall.ARPHRD_PIMREG": "syscall", + "syscall.ARPHRD_PPP": "syscall", + "syscall.ARPHRD_PRONET": "syscall", + "syscall.ARPHRD_RAWHDLC": "syscall", + "syscall.ARPHRD_ROSE": "syscall", + "syscall.ARPHRD_RSRVD": "syscall", + "syscall.ARPHRD_SIT": "syscall", + "syscall.ARPHRD_SKIP": "syscall", + "syscall.ARPHRD_SLIP": "syscall", + "syscall.ARPHRD_SLIP6": "syscall", + "syscall.ARPHRD_STRIP": "syscall", + "syscall.ARPHRD_TUNNEL": "syscall", + "syscall.ARPHRD_TUNNEL6": "syscall", + "syscall.ARPHRD_VOID": "syscall", + "syscall.ARPHRD_X25": "syscall", + "syscall.AUTHTYPE_CLIENT": "syscall", + "syscall.AUTHTYPE_SERVER": "syscall", + "syscall.Accept": "syscall", + "syscall.Accept4": "syscall", + "syscall.AcceptEx": "syscall", + "syscall.Access": "syscall", + "syscall.Acct": "syscall", + "syscall.AddrinfoW": "syscall", + "syscall.Adjtime": "syscall", + "syscall.Adjtimex": "syscall", + "syscall.AttachLsf": "syscall", + "syscall.B0": "syscall", + "syscall.B1000000": "syscall", + "syscall.B110": "syscall", + "syscall.B115200": "syscall", + "syscall.B1152000": "syscall", + "syscall.B1200": "syscall", + "syscall.B134": "syscall", + "syscall.B14400": "syscall", + "syscall.B150": "syscall", + "syscall.B1500000": "syscall", + "syscall.B1800": "syscall", + "syscall.B19200": "syscall", + "syscall.B200": "syscall", + "syscall.B2000000": "syscall", + "syscall.B230400": "syscall", + "syscall.B2400": "syscall", + "syscall.B2500000": "syscall", + "syscall.B28800": "syscall", + "syscall.B300": "syscall", + "syscall.B3000000": "syscall", + "syscall.B3500000": "syscall", + "syscall.B38400": "syscall", + "syscall.B4000000": "syscall", + "syscall.B460800": "syscall", + "syscall.B4800": "syscall", + "syscall.B50": "syscall", + "syscall.B500000": "syscall", + "syscall.B57600": "syscall", + "syscall.B576000": "syscall", + "syscall.B600": "syscall", + "syscall.B7200": "syscall", + "syscall.B75": "syscall", + "syscall.B76800": "syscall", + "syscall.B921600": "syscall", + "syscall.B9600": "syscall", + "syscall.BASE_PROTOCOL": "syscall", + "syscall.BIOCFEEDBACK": "syscall", + "syscall.BIOCFLUSH": "syscall", + "syscall.BIOCGBLEN": "syscall", + "syscall.BIOCGDIRECTION": "syscall", + "syscall.BIOCGDIRFILT": "syscall", + "syscall.BIOCGDLT": "syscall", + "syscall.BIOCGDLTLIST": "syscall", + "syscall.BIOCGETBUFMODE": "syscall", + "syscall.BIOCGETIF": "syscall", + "syscall.BIOCGETZMAX": "syscall", + "syscall.BIOCGFEEDBACK": "syscall", + "syscall.BIOCGFILDROP": "syscall", + "syscall.BIOCGHDRCMPLT": "syscall", + "syscall.BIOCGRSIG": "syscall", + "syscall.BIOCGRTIMEOUT": "syscall", + "syscall.BIOCGSEESENT": "syscall", + "syscall.BIOCGSTATS": "syscall", + "syscall.BIOCGSTATSOLD": "syscall", + "syscall.BIOCGTSTAMP": "syscall", + "syscall.BIOCIMMEDIATE": "syscall", + "syscall.BIOCLOCK": "syscall", + "syscall.BIOCPROMISC": "syscall", + "syscall.BIOCROTZBUF": "syscall", + "syscall.BIOCSBLEN": "syscall", + "syscall.BIOCSDIRECTION": "syscall", + "syscall.BIOCSDIRFILT": "syscall", + "syscall.BIOCSDLT": "syscall", + "syscall.BIOCSETBUFMODE": "syscall", + "syscall.BIOCSETF": "syscall", + "syscall.BIOCSETFNR": "syscall", + "syscall.BIOCSETIF": "syscall", + "syscall.BIOCSETWF": "syscall", + "syscall.BIOCSETZBUF": "syscall", + "syscall.BIOCSFEEDBACK": "syscall", + "syscall.BIOCSFILDROP": "syscall", + "syscall.BIOCSHDRCMPLT": "syscall", + "syscall.BIOCSRSIG": "syscall", + "syscall.BIOCSRTIMEOUT": "syscall", + "syscall.BIOCSSEESENT": "syscall", + "syscall.BIOCSTCPF": "syscall", + "syscall.BIOCSTSTAMP": "syscall", + "syscall.BIOCSUDPF": "syscall", + "syscall.BIOCVERSION": "syscall", + "syscall.BPF_A": "syscall", + "syscall.BPF_ABS": "syscall", + "syscall.BPF_ADD": "syscall", + "syscall.BPF_ALIGNMENT": "syscall", + "syscall.BPF_ALIGNMENT32": "syscall", + "syscall.BPF_ALU": "syscall", + "syscall.BPF_AND": "syscall", + "syscall.BPF_B": "syscall", + "syscall.BPF_BUFMODE_BUFFER": "syscall", + "syscall.BPF_BUFMODE_ZBUF": "syscall", + "syscall.BPF_DFLTBUFSIZE": "syscall", + "syscall.BPF_DIRECTION_IN": "syscall", + "syscall.BPF_DIRECTION_OUT": "syscall", + "syscall.BPF_DIV": "syscall", + "syscall.BPF_H": "syscall", + "syscall.BPF_IMM": "syscall", + "syscall.BPF_IND": "syscall", + "syscall.BPF_JA": "syscall", + "syscall.BPF_JEQ": "syscall", + "syscall.BPF_JGE": "syscall", + "syscall.BPF_JGT": "syscall", + "syscall.BPF_JMP": "syscall", + "syscall.BPF_JSET": "syscall", + "syscall.BPF_K": "syscall", + "syscall.BPF_LD": "syscall", + "syscall.BPF_LDX": "syscall", + "syscall.BPF_LEN": "syscall", + "syscall.BPF_LSH": "syscall", + "syscall.BPF_MAJOR_VERSION": "syscall", + "syscall.BPF_MAXBUFSIZE": "syscall", + "syscall.BPF_MAXINSNS": "syscall", + "syscall.BPF_MEM": "syscall", + "syscall.BPF_MEMWORDS": "syscall", + "syscall.BPF_MINBUFSIZE": "syscall", + "syscall.BPF_MINOR_VERSION": "syscall", + "syscall.BPF_MISC": "syscall", + "syscall.BPF_MSH": "syscall", + "syscall.BPF_MUL": "syscall", + "syscall.BPF_NEG": "syscall", + "syscall.BPF_OR": "syscall", + "syscall.BPF_RELEASE": "syscall", + "syscall.BPF_RET": "syscall", + "syscall.BPF_RSH": "syscall", + "syscall.BPF_ST": "syscall", + "syscall.BPF_STX": "syscall", + "syscall.BPF_SUB": "syscall", + "syscall.BPF_TAX": "syscall", + "syscall.BPF_TXA": "syscall", + "syscall.BPF_T_BINTIME": "syscall", + "syscall.BPF_T_BINTIME_FAST": "syscall", + "syscall.BPF_T_BINTIME_MONOTONIC": "syscall", + "syscall.BPF_T_BINTIME_MONOTONIC_FAST": "syscall", + "syscall.BPF_T_FAST": "syscall", + "syscall.BPF_T_FLAG_MASK": "syscall", + "syscall.BPF_T_FORMAT_MASK": "syscall", + "syscall.BPF_T_MICROTIME": "syscall", + "syscall.BPF_T_MICROTIME_FAST": "syscall", + "syscall.BPF_T_MICROTIME_MONOTONIC": "syscall", + "syscall.BPF_T_MICROTIME_MONOTONIC_FAST": "syscall", + "syscall.BPF_T_MONOTONIC": "syscall", + "syscall.BPF_T_MONOTONIC_FAST": "syscall", + "syscall.BPF_T_NANOTIME": "syscall", + "syscall.BPF_T_NANOTIME_FAST": "syscall", + "syscall.BPF_T_NANOTIME_MONOTONIC": "syscall", + "syscall.BPF_T_NANOTIME_MONOTONIC_FAST": "syscall", + "syscall.BPF_T_NONE": "syscall", + "syscall.BPF_T_NORMAL": "syscall", + "syscall.BPF_W": "syscall", + "syscall.BPF_X": "syscall", + "syscall.BRKINT": "syscall", + "syscall.Bind": "syscall", + "syscall.BindToDevice": "syscall", + "syscall.BpfBuflen": "syscall", + "syscall.BpfDatalink": "syscall", + "syscall.BpfHdr": "syscall", + "syscall.BpfHeadercmpl": "syscall", + "syscall.BpfInsn": "syscall", + "syscall.BpfInterface": "syscall", + "syscall.BpfJump": "syscall", + "syscall.BpfProgram": "syscall", + "syscall.BpfStat": "syscall", + "syscall.BpfStats": "syscall", + "syscall.BpfStmt": "syscall", + "syscall.BpfTimeout": "syscall", + "syscall.BpfTimeval": "syscall", + "syscall.BpfVersion": "syscall", + "syscall.BpfZbuf": "syscall", + "syscall.BpfZbufHeader": "syscall", + "syscall.ByHandleFileInformation": "syscall", + "syscall.BytePtrFromString": "syscall", + "syscall.ByteSliceFromString": "syscall", + "syscall.CCR0_FLUSH": "syscall", + "syscall.CERT_CHAIN_POLICY_AUTHENTICODE": "syscall", + "syscall.CERT_CHAIN_POLICY_AUTHENTICODE_TS": "syscall", + "syscall.CERT_CHAIN_POLICY_BASE": "syscall", + "syscall.CERT_CHAIN_POLICY_BASIC_CONSTRAINTS": "syscall", + "syscall.CERT_CHAIN_POLICY_EV": "syscall", + "syscall.CERT_CHAIN_POLICY_MICROSOFT_ROOT": "syscall", + "syscall.CERT_CHAIN_POLICY_NT_AUTH": "syscall", + "syscall.CERT_CHAIN_POLICY_SSL": "syscall", + "syscall.CERT_E_CN_NO_MATCH": "syscall", + "syscall.CERT_E_EXPIRED": "syscall", + "syscall.CERT_E_PURPOSE": "syscall", + "syscall.CERT_E_ROLE": "syscall", + "syscall.CERT_E_UNTRUSTEDROOT": "syscall", + "syscall.CERT_STORE_ADD_ALWAYS": "syscall", + "syscall.CERT_STORE_DEFER_CLOSE_UNTIL_LAST_FREE_FLAG": "syscall", + "syscall.CERT_STORE_PROV_MEMORY": "syscall", + "syscall.CERT_TRUST_HAS_EXCLUDED_NAME_CONSTRAINT": "syscall", + "syscall.CERT_TRUST_HAS_NOT_DEFINED_NAME_CONSTRAINT": "syscall", + "syscall.CERT_TRUST_HAS_NOT_PERMITTED_NAME_CONSTRAINT": "syscall", + "syscall.CERT_TRUST_HAS_NOT_SUPPORTED_CRITICAL_EXT": "syscall", + "syscall.CERT_TRUST_HAS_NOT_SUPPORTED_NAME_CONSTRAINT": "syscall", + "syscall.CERT_TRUST_INVALID_BASIC_CONSTRAINTS": "syscall", + "syscall.CERT_TRUST_INVALID_EXTENSION": "syscall", + "syscall.CERT_TRUST_INVALID_NAME_CONSTRAINTS": "syscall", + "syscall.CERT_TRUST_INVALID_POLICY_CONSTRAINTS": "syscall", + "syscall.CERT_TRUST_IS_CYCLIC": "syscall", + "syscall.CERT_TRUST_IS_EXPLICIT_DISTRUST": "syscall", + "syscall.CERT_TRUST_IS_NOT_SIGNATURE_VALID": "syscall", + "syscall.CERT_TRUST_IS_NOT_TIME_VALID": "syscall", + "syscall.CERT_TRUST_IS_NOT_VALID_FOR_USAGE": "syscall", + "syscall.CERT_TRUST_IS_OFFLINE_REVOCATION": "syscall", + "syscall.CERT_TRUST_IS_REVOKED": "syscall", + "syscall.CERT_TRUST_IS_UNTRUSTED_ROOT": "syscall", + "syscall.CERT_TRUST_NO_ERROR": "syscall", + "syscall.CERT_TRUST_NO_ISSUANCE_CHAIN_POLICY": "syscall", + "syscall.CERT_TRUST_REVOCATION_STATUS_UNKNOWN": "syscall", + "syscall.CFLUSH": "syscall", + "syscall.CLOCAL": "syscall", + "syscall.CLONE_CHILD_CLEARTID": "syscall", + "syscall.CLONE_CHILD_SETTID": "syscall", + "syscall.CLONE_CSIGNAL": "syscall", + "syscall.CLONE_DETACHED": "syscall", + "syscall.CLONE_FILES": "syscall", + "syscall.CLONE_FS": "syscall", + "syscall.CLONE_IO": "syscall", + "syscall.CLONE_NEWIPC": "syscall", + "syscall.CLONE_NEWNET": "syscall", + "syscall.CLONE_NEWNS": "syscall", + "syscall.CLONE_NEWPID": "syscall", + "syscall.CLONE_NEWUSER": "syscall", + "syscall.CLONE_NEWUTS": "syscall", + "syscall.CLONE_PARENT": "syscall", + "syscall.CLONE_PARENT_SETTID": "syscall", + "syscall.CLONE_PID": "syscall", + "syscall.CLONE_PTRACE": "syscall", + "syscall.CLONE_SETTLS": "syscall", + "syscall.CLONE_SIGHAND": "syscall", + "syscall.CLONE_SYSVSEM": "syscall", + "syscall.CLONE_THREAD": "syscall", + "syscall.CLONE_UNTRACED": "syscall", + "syscall.CLONE_VFORK": "syscall", + "syscall.CLONE_VM": "syscall", + "syscall.CPUID_CFLUSH": "syscall", + "syscall.CREAD": "syscall", + "syscall.CREATE_ALWAYS": "syscall", + "syscall.CREATE_NEW": "syscall", + "syscall.CREATE_NEW_PROCESS_GROUP": "syscall", + "syscall.CREATE_UNICODE_ENVIRONMENT": "syscall", + "syscall.CRYPT_DEFAULT_CONTAINER_OPTIONAL": "syscall", + "syscall.CRYPT_DELETEKEYSET": "syscall", + "syscall.CRYPT_MACHINE_KEYSET": "syscall", + "syscall.CRYPT_NEWKEYSET": "syscall", + "syscall.CRYPT_SILENT": "syscall", + "syscall.CRYPT_VERIFYCONTEXT": "syscall", + "syscall.CS5": "syscall", + "syscall.CS6": "syscall", + "syscall.CS7": "syscall", + "syscall.CS8": "syscall", + "syscall.CSIZE": "syscall", + "syscall.CSTART": "syscall", + "syscall.CSTATUS": "syscall", + "syscall.CSTOP": "syscall", + "syscall.CSTOPB": "syscall", + "syscall.CSUSP": "syscall", + "syscall.CTL_MAXNAME": "syscall", + "syscall.CTL_NET": "syscall", + "syscall.CTL_QUERY": "syscall", + "syscall.CTRL_BREAK_EVENT": "syscall", + "syscall.CTRL_C_EVENT": "syscall", + "syscall.CancelIo": "syscall", + "syscall.CancelIoEx": "syscall", + "syscall.CertAddCertificateContextToStore": "syscall", + "syscall.CertChainContext": "syscall", + "syscall.CertChainElement": "syscall", + "syscall.CertChainPara": "syscall", + "syscall.CertChainPolicyPara": "syscall", + "syscall.CertChainPolicyStatus": "syscall", + "syscall.CertCloseStore": "syscall", + "syscall.CertContext": "syscall", + "syscall.CertCreateCertificateContext": "syscall", + "syscall.CertEnhKeyUsage": "syscall", + "syscall.CertEnumCertificatesInStore": "syscall", + "syscall.CertFreeCertificateChain": "syscall", + "syscall.CertFreeCertificateContext": "syscall", + "syscall.CertGetCertificateChain": "syscall", + "syscall.CertOpenStore": "syscall", + "syscall.CertOpenSystemStore": "syscall", + "syscall.CertRevocationInfo": "syscall", + "syscall.CertSimpleChain": "syscall", + "syscall.CertTrustStatus": "syscall", + "syscall.CertUsageMatch": "syscall", + "syscall.CertVerifyCertificateChainPolicy": "syscall", + "syscall.Chdir": "syscall", + "syscall.CheckBpfVersion": "syscall", + "syscall.Chflags": "syscall", + "syscall.Chmod": "syscall", + "syscall.Chown": "syscall", + "syscall.Chroot": "syscall", + "syscall.Clearenv": "syscall", + "syscall.Close": "syscall", + "syscall.CloseHandle": "syscall", + "syscall.CloseOnExec": "syscall", + "syscall.Closesocket": "syscall", + "syscall.CmsgLen": "syscall", + "syscall.CmsgSpace": "syscall", + "syscall.Cmsghdr": "syscall", + "syscall.CommandLineToArgv": "syscall", + "syscall.ComputerName": "syscall", + "syscall.Conn": "syscall", + "syscall.Connect": "syscall", + "syscall.ConnectEx": "syscall", + "syscall.ConvertSidToStringSid": "syscall", + "syscall.ConvertStringSidToSid": "syscall", + "syscall.CopySid": "syscall", + "syscall.Creat": "syscall", + "syscall.CreateDirectory": "syscall", + "syscall.CreateFile": "syscall", + "syscall.CreateFileMapping": "syscall", + "syscall.CreateHardLink": "syscall", + "syscall.CreateIoCompletionPort": "syscall", + "syscall.CreatePipe": "syscall", + "syscall.CreateProcess": "syscall", + "syscall.CreateProcessAsUser": "syscall", + "syscall.CreateSymbolicLink": "syscall", + "syscall.CreateToolhelp32Snapshot": "syscall", + "syscall.Credential": "syscall", + "syscall.CryptAcquireContext": "syscall", + "syscall.CryptGenRandom": "syscall", + "syscall.CryptReleaseContext": "syscall", + "syscall.DIOCBSFLUSH": "syscall", + "syscall.DIOCOSFPFLUSH": "syscall", + "syscall.DLL": "syscall", + "syscall.DLLError": "syscall", + "syscall.DLT_A429": "syscall", + "syscall.DLT_A653_ICM": "syscall", + "syscall.DLT_AIRONET_HEADER": "syscall", + "syscall.DLT_AOS": "syscall", + "syscall.DLT_APPLE_IP_OVER_IEEE1394": "syscall", + "syscall.DLT_ARCNET": "syscall", + "syscall.DLT_ARCNET_LINUX": "syscall", + "syscall.DLT_ATM_CLIP": "syscall", + "syscall.DLT_ATM_RFC1483": "syscall", + "syscall.DLT_AURORA": "syscall", + "syscall.DLT_AX25": "syscall", + "syscall.DLT_AX25_KISS": "syscall", + "syscall.DLT_BACNET_MS_TP": "syscall", + "syscall.DLT_BLUETOOTH_HCI_H4": "syscall", + "syscall.DLT_BLUETOOTH_HCI_H4_WITH_PHDR": "syscall", + "syscall.DLT_CAN20B": "syscall", + "syscall.DLT_CAN_SOCKETCAN": "syscall", + "syscall.DLT_CHAOS": "syscall", + "syscall.DLT_CHDLC": "syscall", + "syscall.DLT_CISCO_IOS": "syscall", + "syscall.DLT_C_HDLC": "syscall", + "syscall.DLT_C_HDLC_WITH_DIR": "syscall", + "syscall.DLT_DBUS": "syscall", + "syscall.DLT_DECT": "syscall", + "syscall.DLT_DOCSIS": "syscall", + "syscall.DLT_DVB_CI": "syscall", + "syscall.DLT_ECONET": "syscall", + "syscall.DLT_EN10MB": "syscall", + "syscall.DLT_EN3MB": "syscall", + "syscall.DLT_ENC": "syscall", + "syscall.DLT_ERF": "syscall", + "syscall.DLT_ERF_ETH": "syscall", + "syscall.DLT_ERF_POS": "syscall", + "syscall.DLT_FC_2": "syscall", + "syscall.DLT_FC_2_WITH_FRAME_DELIMS": "syscall", + "syscall.DLT_FDDI": "syscall", + "syscall.DLT_FLEXRAY": "syscall", + "syscall.DLT_FRELAY": "syscall", + "syscall.DLT_FRELAY_WITH_DIR": "syscall", + "syscall.DLT_GCOM_SERIAL": "syscall", + "syscall.DLT_GCOM_T1E1": "syscall", + "syscall.DLT_GPF_F": "syscall", + "syscall.DLT_GPF_T": "syscall", + "syscall.DLT_GPRS_LLC": "syscall", + "syscall.DLT_GSMTAP_ABIS": "syscall", + "syscall.DLT_GSMTAP_UM": "syscall", + "syscall.DLT_HDLC": "syscall", + "syscall.DLT_HHDLC": "syscall", + "syscall.DLT_HIPPI": "syscall", + "syscall.DLT_IBM_SN": "syscall", + "syscall.DLT_IBM_SP": "syscall", + "syscall.DLT_IEEE802": "syscall", + "syscall.DLT_IEEE802_11": "syscall", + "syscall.DLT_IEEE802_11_RADIO": "syscall", + "syscall.DLT_IEEE802_11_RADIO_AVS": "syscall", + "syscall.DLT_IEEE802_15_4": "syscall", + "syscall.DLT_IEEE802_15_4_LINUX": "syscall", + "syscall.DLT_IEEE802_15_4_NOFCS": "syscall", + "syscall.DLT_IEEE802_15_4_NONASK_PHY": "syscall", + "syscall.DLT_IEEE802_16_MAC_CPS": "syscall", + "syscall.DLT_IEEE802_16_MAC_CPS_RADIO": "syscall", + "syscall.DLT_IPFILTER": "syscall", + "syscall.DLT_IPMB": "syscall", + "syscall.DLT_IPMB_LINUX": "syscall", + "syscall.DLT_IPNET": "syscall", + "syscall.DLT_IPOIB": "syscall", + "syscall.DLT_IPV4": "syscall", + "syscall.DLT_IPV6": "syscall", + "syscall.DLT_IP_OVER_FC": "syscall", + "syscall.DLT_JUNIPER_ATM1": "syscall", + "syscall.DLT_JUNIPER_ATM2": "syscall", + "syscall.DLT_JUNIPER_ATM_CEMIC": "syscall", + "syscall.DLT_JUNIPER_CHDLC": "syscall", + "syscall.DLT_JUNIPER_ES": "syscall", + "syscall.DLT_JUNIPER_ETHER": "syscall", + "syscall.DLT_JUNIPER_FIBRECHANNEL": "syscall", + "syscall.DLT_JUNIPER_FRELAY": "syscall", + "syscall.DLT_JUNIPER_GGSN": "syscall", + "syscall.DLT_JUNIPER_ISM": "syscall", + "syscall.DLT_JUNIPER_MFR": "syscall", + "syscall.DLT_JUNIPER_MLFR": "syscall", + "syscall.DLT_JUNIPER_MLPPP": "syscall", + "syscall.DLT_JUNIPER_MONITOR": "syscall", + "syscall.DLT_JUNIPER_PIC_PEER": "syscall", + "syscall.DLT_JUNIPER_PPP": "syscall", + "syscall.DLT_JUNIPER_PPPOE": "syscall", + "syscall.DLT_JUNIPER_PPPOE_ATM": "syscall", + "syscall.DLT_JUNIPER_SERVICES": "syscall", + "syscall.DLT_JUNIPER_SRX_E2E": "syscall", + "syscall.DLT_JUNIPER_ST": "syscall", + "syscall.DLT_JUNIPER_VP": "syscall", + "syscall.DLT_JUNIPER_VS": "syscall", + "syscall.DLT_LAPB_WITH_DIR": "syscall", + "syscall.DLT_LAPD": "syscall", + "syscall.DLT_LIN": "syscall", + "syscall.DLT_LINUX_EVDEV": "syscall", + "syscall.DLT_LINUX_IRDA": "syscall", + "syscall.DLT_LINUX_LAPD": "syscall", + "syscall.DLT_LINUX_PPP_WITHDIRECTION": "syscall", + "syscall.DLT_LINUX_SLL": "syscall", + "syscall.DLT_LOOP": "syscall", + "syscall.DLT_LTALK": "syscall", + "syscall.DLT_MATCHING_MAX": "syscall", + "syscall.DLT_MATCHING_MIN": "syscall", + "syscall.DLT_MFR": "syscall", + "syscall.DLT_MOST": "syscall", + "syscall.DLT_MPEG_2_TS": "syscall", + "syscall.DLT_MPLS": "syscall", + "syscall.DLT_MTP2": "syscall", + "syscall.DLT_MTP2_WITH_PHDR": "syscall", + "syscall.DLT_MTP3": "syscall", + "syscall.DLT_MUX27010": "syscall", + "syscall.DLT_NETANALYZER": "syscall", + "syscall.DLT_NETANALYZER_TRANSPARENT": "syscall", + "syscall.DLT_NFC_LLCP": "syscall", + "syscall.DLT_NFLOG": "syscall", + "syscall.DLT_NG40": "syscall", + "syscall.DLT_NULL": "syscall", + "syscall.DLT_PCI_EXP": "syscall", + "syscall.DLT_PFLOG": "syscall", + "syscall.DLT_PFSYNC": "syscall", + "syscall.DLT_PPI": "syscall", + "syscall.DLT_PPP": "syscall", + "syscall.DLT_PPP_BSDOS": "syscall", + "syscall.DLT_PPP_ETHER": "syscall", + "syscall.DLT_PPP_PPPD": "syscall", + "syscall.DLT_PPP_SERIAL": "syscall", + "syscall.DLT_PPP_WITH_DIR": "syscall", + "syscall.DLT_PPP_WITH_DIRECTION": "syscall", + "syscall.DLT_PRISM_HEADER": "syscall", + "syscall.DLT_PRONET": "syscall", + "syscall.DLT_RAIF1": "syscall", + "syscall.DLT_RAW": "syscall", + "syscall.DLT_RAWAF_MASK": "syscall", + "syscall.DLT_RIO": "syscall", + "syscall.DLT_SCCP": "syscall", + "syscall.DLT_SITA": "syscall", + "syscall.DLT_SLIP": "syscall", + "syscall.DLT_SLIP_BSDOS": "syscall", + "syscall.DLT_STANAG_5066_D_PDU": "syscall", + "syscall.DLT_SUNATM": "syscall", + "syscall.DLT_SYMANTEC_FIREWALL": "syscall", + "syscall.DLT_TZSP": "syscall", + "syscall.DLT_USB": "syscall", + "syscall.DLT_USB_LINUX": "syscall", + "syscall.DLT_USB_LINUX_MMAPPED": "syscall", + "syscall.DLT_USER0": "syscall", + "syscall.DLT_USER1": "syscall", + "syscall.DLT_USER10": "syscall", + "syscall.DLT_USER11": "syscall", + "syscall.DLT_USER12": "syscall", + "syscall.DLT_USER13": "syscall", + "syscall.DLT_USER14": "syscall", + "syscall.DLT_USER15": "syscall", + "syscall.DLT_USER2": "syscall", + "syscall.DLT_USER3": "syscall", + "syscall.DLT_USER4": "syscall", + "syscall.DLT_USER5": "syscall", + "syscall.DLT_USER6": "syscall", + "syscall.DLT_USER7": "syscall", + "syscall.DLT_USER8": "syscall", + "syscall.DLT_USER9": "syscall", + "syscall.DLT_WIHART": "syscall", + "syscall.DLT_X2E_SERIAL": "syscall", + "syscall.DLT_X2E_XORAYA": "syscall", + "syscall.DNSMXData": "syscall", + "syscall.DNSPTRData": "syscall", + "syscall.DNSRecord": "syscall", + "syscall.DNSSRVData": "syscall", + "syscall.DNSTXTData": "syscall", + "syscall.DNS_INFO_NO_RECORDS": "syscall", + "syscall.DNS_TYPE_A": "syscall", + "syscall.DNS_TYPE_A6": "syscall", + "syscall.DNS_TYPE_AAAA": "syscall", + "syscall.DNS_TYPE_ADDRS": "syscall", + "syscall.DNS_TYPE_AFSDB": "syscall", + "syscall.DNS_TYPE_ALL": "syscall", + "syscall.DNS_TYPE_ANY": "syscall", + "syscall.DNS_TYPE_ATMA": "syscall", + "syscall.DNS_TYPE_AXFR": "syscall", + "syscall.DNS_TYPE_CERT": "syscall", + "syscall.DNS_TYPE_CNAME": "syscall", + "syscall.DNS_TYPE_DHCID": "syscall", + "syscall.DNS_TYPE_DNAME": "syscall", + "syscall.DNS_TYPE_DNSKEY": "syscall", + "syscall.DNS_TYPE_DS": "syscall", + "syscall.DNS_TYPE_EID": "syscall", + "syscall.DNS_TYPE_GID": "syscall", + "syscall.DNS_TYPE_GPOS": "syscall", + "syscall.DNS_TYPE_HINFO": "syscall", + "syscall.DNS_TYPE_ISDN": "syscall", + "syscall.DNS_TYPE_IXFR": "syscall", + "syscall.DNS_TYPE_KEY": "syscall", + "syscall.DNS_TYPE_KX": "syscall", + "syscall.DNS_TYPE_LOC": "syscall", + "syscall.DNS_TYPE_MAILA": "syscall", + "syscall.DNS_TYPE_MAILB": "syscall", + "syscall.DNS_TYPE_MB": "syscall", + "syscall.DNS_TYPE_MD": "syscall", + "syscall.DNS_TYPE_MF": "syscall", + "syscall.DNS_TYPE_MG": "syscall", + "syscall.DNS_TYPE_MINFO": "syscall", + "syscall.DNS_TYPE_MR": "syscall", + "syscall.DNS_TYPE_MX": "syscall", + "syscall.DNS_TYPE_NAPTR": "syscall", + "syscall.DNS_TYPE_NBSTAT": "syscall", + "syscall.DNS_TYPE_NIMLOC": "syscall", + "syscall.DNS_TYPE_NS": "syscall", + "syscall.DNS_TYPE_NSAP": "syscall", + "syscall.DNS_TYPE_NSAPPTR": "syscall", + "syscall.DNS_TYPE_NSEC": "syscall", + "syscall.DNS_TYPE_NULL": "syscall", + "syscall.DNS_TYPE_NXT": "syscall", + "syscall.DNS_TYPE_OPT": "syscall", + "syscall.DNS_TYPE_PTR": "syscall", + "syscall.DNS_TYPE_PX": "syscall", + "syscall.DNS_TYPE_RP": "syscall", + "syscall.DNS_TYPE_RRSIG": "syscall", + "syscall.DNS_TYPE_RT": "syscall", + "syscall.DNS_TYPE_SIG": "syscall", + "syscall.DNS_TYPE_SINK": "syscall", + "syscall.DNS_TYPE_SOA": "syscall", + "syscall.DNS_TYPE_SRV": "syscall", + "syscall.DNS_TYPE_TEXT": "syscall", + "syscall.DNS_TYPE_TKEY": "syscall", + "syscall.DNS_TYPE_TSIG": "syscall", + "syscall.DNS_TYPE_UID": "syscall", + "syscall.DNS_TYPE_UINFO": "syscall", + "syscall.DNS_TYPE_UNSPEC": "syscall", + "syscall.DNS_TYPE_WINS": "syscall", + "syscall.DNS_TYPE_WINSR": "syscall", + "syscall.DNS_TYPE_WKS": "syscall", + "syscall.DNS_TYPE_X25": "syscall", + "syscall.DT_BLK": "syscall", + "syscall.DT_CHR": "syscall", + "syscall.DT_DIR": "syscall", + "syscall.DT_FIFO": "syscall", + "syscall.DT_LNK": "syscall", + "syscall.DT_REG": "syscall", + "syscall.DT_SOCK": "syscall", + "syscall.DT_UNKNOWN": "syscall", + "syscall.DT_WHT": "syscall", + "syscall.DUPLICATE_CLOSE_SOURCE": "syscall", + "syscall.DUPLICATE_SAME_ACCESS": "syscall", + "syscall.DeleteFile": "syscall", + "syscall.DetachLsf": "syscall", + "syscall.DeviceIoControl": "syscall", + "syscall.Dirent": "syscall", + "syscall.DnsNameCompare": "syscall", + "syscall.DnsQuery": "syscall", + "syscall.DnsRecordListFree": "syscall", + "syscall.DnsSectionAdditional": "syscall", + "syscall.DnsSectionAnswer": "syscall", + "syscall.DnsSectionAuthority": "syscall", + "syscall.DnsSectionQuestion": "syscall", + "syscall.Dup": "syscall", + "syscall.Dup2": "syscall", + "syscall.Dup3": "syscall", + "syscall.DuplicateHandle": "syscall", + "syscall.E2BIG": "syscall", + "syscall.EACCES": "syscall", + "syscall.EADDRINUSE": "syscall", + "syscall.EADDRNOTAVAIL": "syscall", + "syscall.EADV": "syscall", + "syscall.EAFNOSUPPORT": "syscall", + "syscall.EAGAIN": "syscall", + "syscall.EALREADY": "syscall", + "syscall.EAUTH": "syscall", + "syscall.EBADARCH": "syscall", + "syscall.EBADE": "syscall", + "syscall.EBADEXEC": "syscall", + "syscall.EBADF": "syscall", + "syscall.EBADFD": "syscall", + "syscall.EBADMACHO": "syscall", + "syscall.EBADMSG": "syscall", + "syscall.EBADR": "syscall", + "syscall.EBADRPC": "syscall", + "syscall.EBADRQC": "syscall", + "syscall.EBADSLT": "syscall", + "syscall.EBFONT": "syscall", + "syscall.EBUSY": "syscall", + "syscall.ECANCELED": "syscall", + "syscall.ECAPMODE": "syscall", + "syscall.ECHILD": "syscall", + "syscall.ECHO": "syscall", + "syscall.ECHOCTL": "syscall", + "syscall.ECHOE": "syscall", + "syscall.ECHOK": "syscall", + "syscall.ECHOKE": "syscall", + "syscall.ECHONL": "syscall", + "syscall.ECHOPRT": "syscall", + "syscall.ECHRNG": "syscall", + "syscall.ECOMM": "syscall", + "syscall.ECONNABORTED": "syscall", + "syscall.ECONNREFUSED": "syscall", + "syscall.ECONNRESET": "syscall", + "syscall.EDEADLK": "syscall", + "syscall.EDEADLOCK": "syscall", + "syscall.EDESTADDRREQ": "syscall", + "syscall.EDEVERR": "syscall", + "syscall.EDOM": "syscall", + "syscall.EDOOFUS": "syscall", + "syscall.EDOTDOT": "syscall", + "syscall.EDQUOT": "syscall", + "syscall.EEXIST": "syscall", + "syscall.EFAULT": "syscall", + "syscall.EFBIG": "syscall", + "syscall.EFER_LMA": "syscall", + "syscall.EFER_LME": "syscall", + "syscall.EFER_NXE": "syscall", + "syscall.EFER_SCE": "syscall", + "syscall.EFTYPE": "syscall", + "syscall.EHOSTDOWN": "syscall", + "syscall.EHOSTUNREACH": "syscall", + "syscall.EHWPOISON": "syscall", + "syscall.EIDRM": "syscall", + "syscall.EILSEQ": "syscall", + "syscall.EINPROGRESS": "syscall", + "syscall.EINTR": "syscall", + "syscall.EINVAL": "syscall", + "syscall.EIO": "syscall", + "syscall.EIPSEC": "syscall", + "syscall.EISCONN": "syscall", + "syscall.EISDIR": "syscall", + "syscall.EISNAM": "syscall", + "syscall.EKEYEXPIRED": "syscall", + "syscall.EKEYREJECTED": "syscall", + "syscall.EKEYREVOKED": "syscall", + "syscall.EL2HLT": "syscall", + "syscall.EL2NSYNC": "syscall", + "syscall.EL3HLT": "syscall", + "syscall.EL3RST": "syscall", + "syscall.ELAST": "syscall", + "syscall.ELF_NGREG": "syscall", + "syscall.ELF_PRARGSZ": "syscall", + "syscall.ELIBACC": "syscall", + "syscall.ELIBBAD": "syscall", + "syscall.ELIBEXEC": "syscall", + "syscall.ELIBMAX": "syscall", + "syscall.ELIBSCN": "syscall", + "syscall.ELNRNG": "syscall", + "syscall.ELOOP": "syscall", + "syscall.EMEDIUMTYPE": "syscall", + "syscall.EMFILE": "syscall", + "syscall.EMLINK": "syscall", + "syscall.EMSGSIZE": "syscall", + "syscall.EMT_TAGOVF": "syscall", + "syscall.EMULTIHOP": "syscall", + "syscall.EMUL_ENABLED": "syscall", + "syscall.EMUL_LINUX": "syscall", + "syscall.EMUL_LINUX32": "syscall", + "syscall.EMUL_MAXID": "syscall", + "syscall.EMUL_NATIVE": "syscall", + "syscall.ENAMETOOLONG": "syscall", + "syscall.ENAVAIL": "syscall", + "syscall.ENDRUNDISC": "syscall", + "syscall.ENEEDAUTH": "syscall", + "syscall.ENETDOWN": "syscall", + "syscall.ENETRESET": "syscall", + "syscall.ENETUNREACH": "syscall", + "syscall.ENFILE": "syscall", + "syscall.ENOANO": "syscall", + "syscall.ENOATTR": "syscall", + "syscall.ENOBUFS": "syscall", + "syscall.ENOCSI": "syscall", + "syscall.ENODATA": "syscall", + "syscall.ENODEV": "syscall", + "syscall.ENOENT": "syscall", + "syscall.ENOEXEC": "syscall", + "syscall.ENOKEY": "syscall", + "syscall.ENOLCK": "syscall", + "syscall.ENOLINK": "syscall", + "syscall.ENOMEDIUM": "syscall", + "syscall.ENOMEM": "syscall", + "syscall.ENOMSG": "syscall", + "syscall.ENONET": "syscall", + "syscall.ENOPKG": "syscall", + "syscall.ENOPOLICY": "syscall", + "syscall.ENOPROTOOPT": "syscall", + "syscall.ENOSPC": "syscall", + "syscall.ENOSR": "syscall", + "syscall.ENOSTR": "syscall", + "syscall.ENOSYS": "syscall", + "syscall.ENOTBLK": "syscall", + "syscall.ENOTCAPABLE": "syscall", + "syscall.ENOTCONN": "syscall", + "syscall.ENOTDIR": "syscall", + "syscall.ENOTEMPTY": "syscall", + "syscall.ENOTNAM": "syscall", + "syscall.ENOTRECOVERABLE": "syscall", + "syscall.ENOTSOCK": "syscall", + "syscall.ENOTSUP": "syscall", + "syscall.ENOTTY": "syscall", + "syscall.ENOTUNIQ": "syscall", + "syscall.ENXIO": "syscall", + "syscall.EN_SW_CTL_INF": "syscall", + "syscall.EN_SW_CTL_PREC": "syscall", + "syscall.EN_SW_CTL_ROUND": "syscall", + "syscall.EN_SW_DATACHAIN": "syscall", + "syscall.EN_SW_DENORM": "syscall", + "syscall.EN_SW_INVOP": "syscall", + "syscall.EN_SW_OVERFLOW": "syscall", + "syscall.EN_SW_PRECLOSS": "syscall", + "syscall.EN_SW_UNDERFLOW": "syscall", + "syscall.EN_SW_ZERODIV": "syscall", + "syscall.EOPNOTSUPP": "syscall", + "syscall.EOVERFLOW": "syscall", + "syscall.EOWNERDEAD": "syscall", + "syscall.EPERM": "syscall", + "syscall.EPFNOSUPPORT": "syscall", + "syscall.EPIPE": "syscall", + "syscall.EPOLLERR": "syscall", + "syscall.EPOLLET": "syscall", + "syscall.EPOLLHUP": "syscall", + "syscall.EPOLLIN": "syscall", + "syscall.EPOLLMSG": "syscall", + "syscall.EPOLLONESHOT": "syscall", + "syscall.EPOLLOUT": "syscall", + "syscall.EPOLLPRI": "syscall", + "syscall.EPOLLRDBAND": "syscall", + "syscall.EPOLLRDHUP": "syscall", + "syscall.EPOLLRDNORM": "syscall", + "syscall.EPOLLWRBAND": "syscall", + "syscall.EPOLLWRNORM": "syscall", + "syscall.EPOLL_CLOEXEC": "syscall", + "syscall.EPOLL_CTL_ADD": "syscall", + "syscall.EPOLL_CTL_DEL": "syscall", + "syscall.EPOLL_CTL_MOD": "syscall", + "syscall.EPOLL_NONBLOCK": "syscall", + "syscall.EPROCLIM": "syscall", + "syscall.EPROCUNAVAIL": "syscall", + "syscall.EPROGMISMATCH": "syscall", + "syscall.EPROGUNAVAIL": "syscall", + "syscall.EPROTO": "syscall", + "syscall.EPROTONOSUPPORT": "syscall", + "syscall.EPROTOTYPE": "syscall", + "syscall.EPWROFF": "syscall", + "syscall.ERANGE": "syscall", + "syscall.EREMCHG": "syscall", + "syscall.EREMOTE": "syscall", + "syscall.EREMOTEIO": "syscall", + "syscall.ERESTART": "syscall", + "syscall.ERFKILL": "syscall", + "syscall.EROFS": "syscall", + "syscall.ERPCMISMATCH": "syscall", + "syscall.ERROR_ACCESS_DENIED": "syscall", + "syscall.ERROR_ALREADY_EXISTS": "syscall", + "syscall.ERROR_BROKEN_PIPE": "syscall", + "syscall.ERROR_BUFFER_OVERFLOW": "syscall", + "syscall.ERROR_DIR_NOT_EMPTY": "syscall", + "syscall.ERROR_ENVVAR_NOT_FOUND": "syscall", + "syscall.ERROR_FILE_EXISTS": "syscall", + "syscall.ERROR_FILE_NOT_FOUND": "syscall", + "syscall.ERROR_HANDLE_EOF": "syscall", + "syscall.ERROR_INSUFFICIENT_BUFFER": "syscall", + "syscall.ERROR_IO_PENDING": "syscall", + "syscall.ERROR_MOD_NOT_FOUND": "syscall", + "syscall.ERROR_MORE_DATA": "syscall", + "syscall.ERROR_NETNAME_DELETED": "syscall", + "syscall.ERROR_NOT_FOUND": "syscall", + "syscall.ERROR_NO_MORE_FILES": "syscall", + "syscall.ERROR_OPERATION_ABORTED": "syscall", + "syscall.ERROR_PATH_NOT_FOUND": "syscall", + "syscall.ERROR_PRIVILEGE_NOT_HELD": "syscall", + "syscall.ERROR_PROC_NOT_FOUND": "syscall", + "syscall.ESHLIBVERS": "syscall", + "syscall.ESHUTDOWN": "syscall", + "syscall.ESOCKTNOSUPPORT": "syscall", + "syscall.ESPIPE": "syscall", + "syscall.ESRCH": "syscall", + "syscall.ESRMNT": "syscall", + "syscall.ESTALE": "syscall", + "syscall.ESTRPIPE": "syscall", + "syscall.ETHERCAP_JUMBO_MTU": "syscall", + "syscall.ETHERCAP_VLAN_HWTAGGING": "syscall", + "syscall.ETHERCAP_VLAN_MTU": "syscall", + "syscall.ETHERMIN": "syscall", + "syscall.ETHERMTU": "syscall", + "syscall.ETHERMTU_JUMBO": "syscall", + "syscall.ETHERTYPE_8023": "syscall", + "syscall.ETHERTYPE_AARP": "syscall", + "syscall.ETHERTYPE_ACCTON": "syscall", + "syscall.ETHERTYPE_AEONIC": "syscall", + "syscall.ETHERTYPE_ALPHA": "syscall", + "syscall.ETHERTYPE_AMBER": "syscall", + "syscall.ETHERTYPE_AMOEBA": "syscall", + "syscall.ETHERTYPE_AOE": "syscall", + "syscall.ETHERTYPE_APOLLO": "syscall", + "syscall.ETHERTYPE_APOLLODOMAIN": "syscall", + "syscall.ETHERTYPE_APPLETALK": "syscall", + "syscall.ETHERTYPE_APPLITEK": "syscall", + "syscall.ETHERTYPE_ARGONAUT": "syscall", + "syscall.ETHERTYPE_ARP": "syscall", + "syscall.ETHERTYPE_AT": "syscall", + "syscall.ETHERTYPE_ATALK": "syscall", + "syscall.ETHERTYPE_ATOMIC": "syscall", + "syscall.ETHERTYPE_ATT": "syscall", + "syscall.ETHERTYPE_ATTSTANFORD": "syscall", + "syscall.ETHERTYPE_AUTOPHON": "syscall", + "syscall.ETHERTYPE_AXIS": "syscall", + "syscall.ETHERTYPE_BCLOOP": "syscall", + "syscall.ETHERTYPE_BOFL": "syscall", + "syscall.ETHERTYPE_CABLETRON": "syscall", + "syscall.ETHERTYPE_CHAOS": "syscall", + "syscall.ETHERTYPE_COMDESIGN": "syscall", + "syscall.ETHERTYPE_COMPUGRAPHIC": "syscall", + "syscall.ETHERTYPE_COUNTERPOINT": "syscall", + "syscall.ETHERTYPE_CRONUS": "syscall", + "syscall.ETHERTYPE_CRONUSVLN": "syscall", + "syscall.ETHERTYPE_DCA": "syscall", + "syscall.ETHERTYPE_DDE": "syscall", + "syscall.ETHERTYPE_DEBNI": "syscall", + "syscall.ETHERTYPE_DECAM": "syscall", + "syscall.ETHERTYPE_DECCUST": "syscall", + "syscall.ETHERTYPE_DECDIAG": "syscall", + "syscall.ETHERTYPE_DECDNS": "syscall", + "syscall.ETHERTYPE_DECDTS": "syscall", + "syscall.ETHERTYPE_DECEXPER": "syscall", + "syscall.ETHERTYPE_DECLAST": "syscall", + "syscall.ETHERTYPE_DECLTM": "syscall", + "syscall.ETHERTYPE_DECMUMPS": "syscall", + "syscall.ETHERTYPE_DECNETBIOS": "syscall", + "syscall.ETHERTYPE_DELTACON": "syscall", + "syscall.ETHERTYPE_DIDDLE": "syscall", + "syscall.ETHERTYPE_DLOG1": "syscall", + "syscall.ETHERTYPE_DLOG2": "syscall", + "syscall.ETHERTYPE_DN": "syscall", + "syscall.ETHERTYPE_DOGFIGHT": "syscall", + "syscall.ETHERTYPE_DSMD": "syscall", + "syscall.ETHERTYPE_ECMA": "syscall", + "syscall.ETHERTYPE_ENCRYPT": "syscall", + "syscall.ETHERTYPE_ES": "syscall", + "syscall.ETHERTYPE_EXCELAN": "syscall", + "syscall.ETHERTYPE_EXPERDATA": "syscall", + "syscall.ETHERTYPE_FLIP": "syscall", + "syscall.ETHERTYPE_FLOWCONTROL": "syscall", + "syscall.ETHERTYPE_FRARP": "syscall", + "syscall.ETHERTYPE_GENDYN": "syscall", + "syscall.ETHERTYPE_HAYES": "syscall", + "syscall.ETHERTYPE_HIPPI_FP": "syscall", + "syscall.ETHERTYPE_HITACHI": "syscall", + "syscall.ETHERTYPE_HP": "syscall", + "syscall.ETHERTYPE_IEEEPUP": "syscall", + "syscall.ETHERTYPE_IEEEPUPAT": "syscall", + "syscall.ETHERTYPE_IMLBL": "syscall", + "syscall.ETHERTYPE_IMLBLDIAG": "syscall", + "syscall.ETHERTYPE_IP": "syscall", + "syscall.ETHERTYPE_IPAS": "syscall", + "syscall.ETHERTYPE_IPV6": "syscall", + "syscall.ETHERTYPE_IPX": "syscall", + "syscall.ETHERTYPE_IPXNEW": "syscall", + "syscall.ETHERTYPE_KALPANA": "syscall", + "syscall.ETHERTYPE_LANBRIDGE": "syscall", + "syscall.ETHERTYPE_LANPROBE": "syscall", + "syscall.ETHERTYPE_LAT": "syscall", + "syscall.ETHERTYPE_LBACK": "syscall", + "syscall.ETHERTYPE_LITTLE": "syscall", + "syscall.ETHERTYPE_LLDP": "syscall", + "syscall.ETHERTYPE_LOGICRAFT": "syscall", + "syscall.ETHERTYPE_LOOPBACK": "syscall", + "syscall.ETHERTYPE_MATRA": "syscall", + "syscall.ETHERTYPE_MAX": "syscall", + "syscall.ETHERTYPE_MERIT": "syscall", + "syscall.ETHERTYPE_MICP": "syscall", + "syscall.ETHERTYPE_MOPDL": "syscall", + "syscall.ETHERTYPE_MOPRC": "syscall", + "syscall.ETHERTYPE_MOTOROLA": "syscall", + "syscall.ETHERTYPE_MPLS": "syscall", + "syscall.ETHERTYPE_MPLS_MCAST": "syscall", + "syscall.ETHERTYPE_MUMPS": "syscall", + "syscall.ETHERTYPE_NBPCC": "syscall", + "syscall.ETHERTYPE_NBPCLAIM": "syscall", + "syscall.ETHERTYPE_NBPCLREQ": "syscall", + "syscall.ETHERTYPE_NBPCLRSP": "syscall", + "syscall.ETHERTYPE_NBPCREQ": "syscall", + "syscall.ETHERTYPE_NBPCRSP": "syscall", + "syscall.ETHERTYPE_NBPDG": "syscall", + "syscall.ETHERTYPE_NBPDGB": "syscall", + "syscall.ETHERTYPE_NBPDLTE": "syscall", + "syscall.ETHERTYPE_NBPRAR": "syscall", + "syscall.ETHERTYPE_NBPRAS": "syscall", + "syscall.ETHERTYPE_NBPRST": "syscall", + "syscall.ETHERTYPE_NBPSCD": "syscall", + "syscall.ETHERTYPE_NBPVCD": "syscall", + "syscall.ETHERTYPE_NBS": "syscall", + "syscall.ETHERTYPE_NCD": "syscall", + "syscall.ETHERTYPE_NESTAR": "syscall", + "syscall.ETHERTYPE_NETBEUI": "syscall", + "syscall.ETHERTYPE_NOVELL": "syscall", + "syscall.ETHERTYPE_NS": "syscall", + "syscall.ETHERTYPE_NSAT": "syscall", + "syscall.ETHERTYPE_NSCOMPAT": "syscall", + "syscall.ETHERTYPE_NTRAILER": "syscall", + "syscall.ETHERTYPE_OS9": "syscall", + "syscall.ETHERTYPE_OS9NET": "syscall", + "syscall.ETHERTYPE_PACER": "syscall", + "syscall.ETHERTYPE_PAE": "syscall", + "syscall.ETHERTYPE_PCS": "syscall", + "syscall.ETHERTYPE_PLANNING": "syscall", + "syscall.ETHERTYPE_PPP": "syscall", + "syscall.ETHERTYPE_PPPOE": "syscall", + "syscall.ETHERTYPE_PPPOEDISC": "syscall", + "syscall.ETHERTYPE_PRIMENTS": "syscall", + "syscall.ETHERTYPE_PUP": "syscall", + "syscall.ETHERTYPE_PUPAT": "syscall", + "syscall.ETHERTYPE_QINQ": "syscall", + "syscall.ETHERTYPE_RACAL": "syscall", + "syscall.ETHERTYPE_RATIONAL": "syscall", + "syscall.ETHERTYPE_RAWFR": "syscall", + "syscall.ETHERTYPE_RCL": "syscall", + "syscall.ETHERTYPE_RDP": "syscall", + "syscall.ETHERTYPE_RETIX": "syscall", + "syscall.ETHERTYPE_REVARP": "syscall", + "syscall.ETHERTYPE_SCA": "syscall", + "syscall.ETHERTYPE_SECTRA": "syscall", + "syscall.ETHERTYPE_SECUREDATA": "syscall", + "syscall.ETHERTYPE_SGITW": "syscall", + "syscall.ETHERTYPE_SG_BOUNCE": "syscall", + "syscall.ETHERTYPE_SG_DIAG": "syscall", + "syscall.ETHERTYPE_SG_NETGAMES": "syscall", + "syscall.ETHERTYPE_SG_RESV": "syscall", + "syscall.ETHERTYPE_SIMNET": "syscall", + "syscall.ETHERTYPE_SLOW": "syscall", + "syscall.ETHERTYPE_SLOWPROTOCOLS": "syscall", + "syscall.ETHERTYPE_SNA": "syscall", + "syscall.ETHERTYPE_SNMP": "syscall", + "syscall.ETHERTYPE_SONIX": "syscall", + "syscall.ETHERTYPE_SPIDER": "syscall", + "syscall.ETHERTYPE_SPRITE": "syscall", + "syscall.ETHERTYPE_STP": "syscall", + "syscall.ETHERTYPE_TALARIS": "syscall", + "syscall.ETHERTYPE_TALARISMC": "syscall", + "syscall.ETHERTYPE_TCPCOMP": "syscall", + "syscall.ETHERTYPE_TCPSM": "syscall", + "syscall.ETHERTYPE_TEC": "syscall", + "syscall.ETHERTYPE_TIGAN": "syscall", + "syscall.ETHERTYPE_TRAIL": "syscall", + "syscall.ETHERTYPE_TRANSETHER": "syscall", + "syscall.ETHERTYPE_TYMSHARE": "syscall", + "syscall.ETHERTYPE_UBBST": "syscall", + "syscall.ETHERTYPE_UBDEBUG": "syscall", + "syscall.ETHERTYPE_UBDIAGLOOP": "syscall", + "syscall.ETHERTYPE_UBDL": "syscall", + "syscall.ETHERTYPE_UBNIU": "syscall", + "syscall.ETHERTYPE_UBNMC": "syscall", + "syscall.ETHERTYPE_VALID": "syscall", + "syscall.ETHERTYPE_VARIAN": "syscall", + "syscall.ETHERTYPE_VAXELN": "syscall", + "syscall.ETHERTYPE_VEECO": "syscall", + "syscall.ETHERTYPE_VEXP": "syscall", + "syscall.ETHERTYPE_VGLAB": "syscall", + "syscall.ETHERTYPE_VINES": "syscall", + "syscall.ETHERTYPE_VINESECHO": "syscall", + "syscall.ETHERTYPE_VINESLOOP": "syscall", + "syscall.ETHERTYPE_VITAL": "syscall", + "syscall.ETHERTYPE_VLAN": "syscall", + "syscall.ETHERTYPE_VLTLMAN": "syscall", + "syscall.ETHERTYPE_VPROD": "syscall", + "syscall.ETHERTYPE_VURESERVED": "syscall", + "syscall.ETHERTYPE_WATERLOO": "syscall", + "syscall.ETHERTYPE_WELLFLEET": "syscall", + "syscall.ETHERTYPE_X25": "syscall", + "syscall.ETHERTYPE_X75": "syscall", + "syscall.ETHERTYPE_XNSSM": "syscall", + "syscall.ETHERTYPE_XTP": "syscall", + "syscall.ETHER_ADDR_LEN": "syscall", + "syscall.ETHER_ALIGN": "syscall", + "syscall.ETHER_CRC_LEN": "syscall", + "syscall.ETHER_CRC_POLY_BE": "syscall", + "syscall.ETHER_CRC_POLY_LE": "syscall", + "syscall.ETHER_HDR_LEN": "syscall", + "syscall.ETHER_MAX_DIX_LEN": "syscall", + "syscall.ETHER_MAX_LEN": "syscall", + "syscall.ETHER_MAX_LEN_JUMBO": "syscall", + "syscall.ETHER_MIN_LEN": "syscall", + "syscall.ETHER_PPPOE_ENCAP_LEN": "syscall", + "syscall.ETHER_TYPE_LEN": "syscall", + "syscall.ETHER_VLAN_ENCAP_LEN": "syscall", + "syscall.ETH_P_1588": "syscall", + "syscall.ETH_P_8021Q": "syscall", + "syscall.ETH_P_802_2": "syscall", + "syscall.ETH_P_802_3": "syscall", + "syscall.ETH_P_AARP": "syscall", + "syscall.ETH_P_ALL": "syscall", + "syscall.ETH_P_AOE": "syscall", + "syscall.ETH_P_ARCNET": "syscall", + "syscall.ETH_P_ARP": "syscall", + "syscall.ETH_P_ATALK": "syscall", + "syscall.ETH_P_ATMFATE": "syscall", + "syscall.ETH_P_ATMMPOA": "syscall", + "syscall.ETH_P_AX25": "syscall", + "syscall.ETH_P_BPQ": "syscall", + "syscall.ETH_P_CAIF": "syscall", + "syscall.ETH_P_CAN": "syscall", + "syscall.ETH_P_CONTROL": "syscall", + "syscall.ETH_P_CUST": "syscall", + "syscall.ETH_P_DDCMP": "syscall", + "syscall.ETH_P_DEC": "syscall", + "syscall.ETH_P_DIAG": "syscall", + "syscall.ETH_P_DNA_DL": "syscall", + "syscall.ETH_P_DNA_RC": "syscall", + "syscall.ETH_P_DNA_RT": "syscall", + "syscall.ETH_P_DSA": "syscall", + "syscall.ETH_P_ECONET": "syscall", + "syscall.ETH_P_EDSA": "syscall", + "syscall.ETH_P_FCOE": "syscall", + "syscall.ETH_P_FIP": "syscall", + "syscall.ETH_P_HDLC": "syscall", + "syscall.ETH_P_IEEE802154": "syscall", + "syscall.ETH_P_IEEEPUP": "syscall", + "syscall.ETH_P_IEEEPUPAT": "syscall", + "syscall.ETH_P_IP": "syscall", + "syscall.ETH_P_IPV6": "syscall", + "syscall.ETH_P_IPX": "syscall", + "syscall.ETH_P_IRDA": "syscall", + "syscall.ETH_P_LAT": "syscall", + "syscall.ETH_P_LINK_CTL": "syscall", + "syscall.ETH_P_LOCALTALK": "syscall", + "syscall.ETH_P_LOOP": "syscall", + "syscall.ETH_P_MOBITEX": "syscall", + "syscall.ETH_P_MPLS_MC": "syscall", + "syscall.ETH_P_MPLS_UC": "syscall", + "syscall.ETH_P_PAE": "syscall", + "syscall.ETH_P_PAUSE": "syscall", + "syscall.ETH_P_PHONET": "syscall", + "syscall.ETH_P_PPPTALK": "syscall", + "syscall.ETH_P_PPP_DISC": "syscall", + "syscall.ETH_P_PPP_MP": "syscall", + "syscall.ETH_P_PPP_SES": "syscall", + "syscall.ETH_P_PUP": "syscall", + "syscall.ETH_P_PUPAT": "syscall", + "syscall.ETH_P_RARP": "syscall", + "syscall.ETH_P_SCA": "syscall", + "syscall.ETH_P_SLOW": "syscall", + "syscall.ETH_P_SNAP": "syscall", + "syscall.ETH_P_TEB": "syscall", + "syscall.ETH_P_TIPC": "syscall", + "syscall.ETH_P_TRAILER": "syscall", + "syscall.ETH_P_TR_802_2": "syscall", + "syscall.ETH_P_WAN_PPP": "syscall", + "syscall.ETH_P_WCCP": "syscall", + "syscall.ETH_P_X25": "syscall", + "syscall.ETIME": "syscall", + "syscall.ETIMEDOUT": "syscall", + "syscall.ETOOMANYREFS": "syscall", + "syscall.ETXTBSY": "syscall", + "syscall.EUCLEAN": "syscall", + "syscall.EUNATCH": "syscall", + "syscall.EUSERS": "syscall", + "syscall.EVFILT_AIO": "syscall", + "syscall.EVFILT_FS": "syscall", + "syscall.EVFILT_LIO": "syscall", + "syscall.EVFILT_MACHPORT": "syscall", + "syscall.EVFILT_PROC": "syscall", + "syscall.EVFILT_READ": "syscall", + "syscall.EVFILT_SIGNAL": "syscall", + "syscall.EVFILT_SYSCOUNT": "syscall", + "syscall.EVFILT_THREADMARKER": "syscall", + "syscall.EVFILT_TIMER": "syscall", + "syscall.EVFILT_USER": "syscall", + "syscall.EVFILT_VM": "syscall", + "syscall.EVFILT_VNODE": "syscall", + "syscall.EVFILT_WRITE": "syscall", + "syscall.EV_ADD": "syscall", + "syscall.EV_CLEAR": "syscall", + "syscall.EV_DELETE": "syscall", + "syscall.EV_DISABLE": "syscall", + "syscall.EV_DISPATCH": "syscall", + "syscall.EV_DROP": "syscall", + "syscall.EV_ENABLE": "syscall", + "syscall.EV_EOF": "syscall", + "syscall.EV_ERROR": "syscall", + "syscall.EV_FLAG0": "syscall", + "syscall.EV_FLAG1": "syscall", + "syscall.EV_ONESHOT": "syscall", + "syscall.EV_OOBAND": "syscall", + "syscall.EV_POLL": "syscall", + "syscall.EV_RECEIPT": "syscall", + "syscall.EV_SYSFLAGS": "syscall", + "syscall.EWINDOWS": "syscall", + "syscall.EWOULDBLOCK": "syscall", + "syscall.EXDEV": "syscall", + "syscall.EXFULL": "syscall", + "syscall.EXTA": "syscall", + "syscall.EXTB": "syscall", + "syscall.EXTPROC": "syscall", + "syscall.Environ": "syscall", + "syscall.EpollCreate": "syscall", + "syscall.EpollCreate1": "syscall", + "syscall.EpollCtl": "syscall", + "syscall.EpollEvent": "syscall", + "syscall.EpollWait": "syscall", + "syscall.Errno": "syscall", + "syscall.EscapeArg": "syscall", + "syscall.Exchangedata": "syscall", + "syscall.Exec": "syscall", + "syscall.Exit": "syscall", + "syscall.ExitProcess": "syscall", + "syscall.FD_CLOEXEC": "syscall", + "syscall.FD_SETSIZE": "syscall", + "syscall.FILE_ACTION_ADDED": "syscall", + "syscall.FILE_ACTION_MODIFIED": "syscall", + "syscall.FILE_ACTION_REMOVED": "syscall", + "syscall.FILE_ACTION_RENAMED_NEW_NAME": "syscall", + "syscall.FILE_ACTION_RENAMED_OLD_NAME": "syscall", + "syscall.FILE_APPEND_DATA": "syscall", + "syscall.FILE_ATTRIBUTE_ARCHIVE": "syscall", + "syscall.FILE_ATTRIBUTE_DIRECTORY": "syscall", + "syscall.FILE_ATTRIBUTE_HIDDEN": "syscall", + "syscall.FILE_ATTRIBUTE_NORMAL": "syscall", + "syscall.FILE_ATTRIBUTE_READONLY": "syscall", + "syscall.FILE_ATTRIBUTE_REPARSE_POINT": "syscall", + "syscall.FILE_ATTRIBUTE_SYSTEM": "syscall", + "syscall.FILE_BEGIN": "syscall", + "syscall.FILE_CURRENT": "syscall", + "syscall.FILE_END": "syscall", + "syscall.FILE_FLAG_BACKUP_SEMANTICS": "syscall", + "syscall.FILE_FLAG_OPEN_REPARSE_POINT": "syscall", + "syscall.FILE_FLAG_OVERLAPPED": "syscall", + "syscall.FILE_LIST_DIRECTORY": "syscall", + "syscall.FILE_MAP_COPY": "syscall", + "syscall.FILE_MAP_EXECUTE": "syscall", + "syscall.FILE_MAP_READ": "syscall", + "syscall.FILE_MAP_WRITE": "syscall", + "syscall.FILE_NOTIFY_CHANGE_ATTRIBUTES": "syscall", + "syscall.FILE_NOTIFY_CHANGE_CREATION": "syscall", + "syscall.FILE_NOTIFY_CHANGE_DIR_NAME": "syscall", + "syscall.FILE_NOTIFY_CHANGE_FILE_NAME": "syscall", + "syscall.FILE_NOTIFY_CHANGE_LAST_ACCESS": "syscall", + "syscall.FILE_NOTIFY_CHANGE_LAST_WRITE": "syscall", + "syscall.FILE_NOTIFY_CHANGE_SIZE": "syscall", + "syscall.FILE_SHARE_DELETE": "syscall", + "syscall.FILE_SHARE_READ": "syscall", + "syscall.FILE_SHARE_WRITE": "syscall", + "syscall.FILE_SKIP_COMPLETION_PORT_ON_SUCCESS": "syscall", + "syscall.FILE_SKIP_SET_EVENT_ON_HANDLE": "syscall", + "syscall.FILE_TYPE_CHAR": "syscall", + "syscall.FILE_TYPE_DISK": "syscall", + "syscall.FILE_TYPE_PIPE": "syscall", + "syscall.FILE_TYPE_REMOTE": "syscall", + "syscall.FILE_TYPE_UNKNOWN": "syscall", + "syscall.FILE_WRITE_ATTRIBUTES": "syscall", + "syscall.FLUSHO": "syscall", + "syscall.FORMAT_MESSAGE_ALLOCATE_BUFFER": "syscall", + "syscall.FORMAT_MESSAGE_ARGUMENT_ARRAY": "syscall", + "syscall.FORMAT_MESSAGE_FROM_HMODULE": "syscall", + "syscall.FORMAT_MESSAGE_FROM_STRING": "syscall", + "syscall.FORMAT_MESSAGE_FROM_SYSTEM": "syscall", + "syscall.FORMAT_MESSAGE_IGNORE_INSERTS": "syscall", + "syscall.FORMAT_MESSAGE_MAX_WIDTH_MASK": "syscall", + "syscall.FSCTL_GET_REPARSE_POINT": "syscall", + "syscall.F_ADDFILESIGS": "syscall", + "syscall.F_ADDSIGS": "syscall", + "syscall.F_ALLOCATEALL": "syscall", + "syscall.F_ALLOCATECONTIG": "syscall", + "syscall.F_CANCEL": "syscall", + "syscall.F_CHKCLEAN": "syscall", + "syscall.F_CLOSEM": "syscall", + "syscall.F_DUP2FD": "syscall", + "syscall.F_DUP2FD_CLOEXEC": "syscall", + "syscall.F_DUPFD": "syscall", + "syscall.F_DUPFD_CLOEXEC": "syscall", + "syscall.F_EXLCK": "syscall", + "syscall.F_FLUSH_DATA": "syscall", + "syscall.F_FREEZE_FS": "syscall", + "syscall.F_FSCTL": "syscall", + "syscall.F_FSDIRMASK": "syscall", + "syscall.F_FSIN": "syscall", + "syscall.F_FSINOUT": "syscall", + "syscall.F_FSOUT": "syscall", + "syscall.F_FSPRIV": "syscall", + "syscall.F_FSVOID": "syscall", + "syscall.F_FULLFSYNC": "syscall", + "syscall.F_GETFD": "syscall", + "syscall.F_GETFL": "syscall", + "syscall.F_GETLEASE": "syscall", + "syscall.F_GETLK": "syscall", + "syscall.F_GETLK64": "syscall", + "syscall.F_GETLKPID": "syscall", + "syscall.F_GETNOSIGPIPE": "syscall", + "syscall.F_GETOWN": "syscall", + "syscall.F_GETOWN_EX": "syscall", + "syscall.F_GETPATH": "syscall", + "syscall.F_GETPATH_MTMINFO": "syscall", + "syscall.F_GETPIPE_SZ": "syscall", + "syscall.F_GETPROTECTIONCLASS": "syscall", + "syscall.F_GETSIG": "syscall", + "syscall.F_GLOBAL_NOCACHE": "syscall", + "syscall.F_LOCK": "syscall", + "syscall.F_LOG2PHYS": "syscall", + "syscall.F_LOG2PHYS_EXT": "syscall", + "syscall.F_MARKDEPENDENCY": "syscall", + "syscall.F_MAXFD": "syscall", + "syscall.F_NOCACHE": "syscall", + "syscall.F_NODIRECT": "syscall", + "syscall.F_NOTIFY": "syscall", + "syscall.F_OGETLK": "syscall", + "syscall.F_OK": "syscall", + "syscall.F_OSETLK": "syscall", + "syscall.F_OSETLKW": "syscall", + "syscall.F_PARAM_MASK": "syscall", + "syscall.F_PARAM_MAX": "syscall", + "syscall.F_PATHPKG_CHECK": "syscall", + "syscall.F_PEOFPOSMODE": "syscall", + "syscall.F_PREALLOCATE": "syscall", + "syscall.F_RDADVISE": "syscall", + "syscall.F_RDAHEAD": "syscall", + "syscall.F_RDLCK": "syscall", + "syscall.F_READAHEAD": "syscall", + "syscall.F_READBOOTSTRAP": "syscall", + "syscall.F_SETBACKINGSTORE": "syscall", + "syscall.F_SETFD": "syscall", + "syscall.F_SETFL": "syscall", + "syscall.F_SETLEASE": "syscall", + "syscall.F_SETLK": "syscall", + "syscall.F_SETLK64": "syscall", + "syscall.F_SETLKW": "syscall", + "syscall.F_SETLKW64": "syscall", + "syscall.F_SETLK_REMOTE": "syscall", + "syscall.F_SETNOSIGPIPE": "syscall", + "syscall.F_SETOWN": "syscall", + "syscall.F_SETOWN_EX": "syscall", + "syscall.F_SETPIPE_SZ": "syscall", + "syscall.F_SETPROTECTIONCLASS": "syscall", + "syscall.F_SETSIG": "syscall", + "syscall.F_SETSIZE": "syscall", + "syscall.F_SHLCK": "syscall", + "syscall.F_TEST": "syscall", + "syscall.F_THAW_FS": "syscall", + "syscall.F_TLOCK": "syscall", + "syscall.F_ULOCK": "syscall", + "syscall.F_UNLCK": "syscall", + "syscall.F_UNLCKSYS": "syscall", + "syscall.F_VOLPOSMODE": "syscall", + "syscall.F_WRITEBOOTSTRAP": "syscall", + "syscall.F_WRLCK": "syscall", + "syscall.Faccessat": "syscall", + "syscall.Fallocate": "syscall", + "syscall.Fbootstraptransfer_t": "syscall", + "syscall.Fchdir": "syscall", + "syscall.Fchflags": "syscall", + "syscall.Fchmod": "syscall", + "syscall.Fchmodat": "syscall", + "syscall.Fchown": "syscall", + "syscall.Fchownat": "syscall", + "syscall.FcntlFlock": "syscall", + "syscall.FdSet": "syscall", + "syscall.Fdatasync": "syscall", + "syscall.FileNotifyInformation": "syscall", + "syscall.Filetime": "syscall", + "syscall.FindClose": "syscall", + "syscall.FindFirstFile": "syscall", + "syscall.FindNextFile": "syscall", + "syscall.Flock": "syscall", + "syscall.Flock_t": "syscall", + "syscall.FlushBpf": "syscall", + "syscall.FlushFileBuffers": "syscall", + "syscall.FlushViewOfFile": "syscall", + "syscall.ForkExec": "syscall", + "syscall.ForkLock": "syscall", + "syscall.FormatMessage": "syscall", + "syscall.Fpathconf": "syscall", + "syscall.FreeAddrInfoW": "syscall", + "syscall.FreeEnvironmentStrings": "syscall", + "syscall.FreeLibrary": "syscall", + "syscall.Fsid": "syscall", + "syscall.Fstat": "syscall", + "syscall.Fstatfs": "syscall", + "syscall.Fstore_t": "syscall", + "syscall.Fsync": "syscall", + "syscall.Ftruncate": "syscall", + "syscall.FullPath": "syscall", + "syscall.Futimes": "syscall", + "syscall.Futimesat": "syscall", + "syscall.GENERIC_ALL": "syscall", + "syscall.GENERIC_EXECUTE": "syscall", + "syscall.GENERIC_READ": "syscall", + "syscall.GENERIC_WRITE": "syscall", + "syscall.GUID": "syscall", + "syscall.GetAcceptExSockaddrs": "syscall", + "syscall.GetAdaptersInfo": "syscall", + "syscall.GetAddrInfoW": "syscall", + "syscall.GetCommandLine": "syscall", + "syscall.GetComputerName": "syscall", + "syscall.GetConsoleMode": "syscall", + "syscall.GetCurrentDirectory": "syscall", + "syscall.GetCurrentProcess": "syscall", + "syscall.GetEnvironmentStrings": "syscall", + "syscall.GetEnvironmentVariable": "syscall", + "syscall.GetExitCodeProcess": "syscall", + "syscall.GetFileAttributes": "syscall", + "syscall.GetFileAttributesEx": "syscall", + "syscall.GetFileExInfoStandard": "syscall", + "syscall.GetFileExMaxInfoLevel": "syscall", + "syscall.GetFileInformationByHandle": "syscall", + "syscall.GetFileType": "syscall", + "syscall.GetFullPathName": "syscall", + "syscall.GetHostByName": "syscall", + "syscall.GetIfEntry": "syscall", + "syscall.GetLastError": "syscall", + "syscall.GetLengthSid": "syscall", + "syscall.GetLongPathName": "syscall", + "syscall.GetProcAddress": "syscall", + "syscall.GetProcessTimes": "syscall", + "syscall.GetProtoByName": "syscall", + "syscall.GetQueuedCompletionStatus": "syscall", + "syscall.GetServByName": "syscall", + "syscall.GetShortPathName": "syscall", + "syscall.GetStartupInfo": "syscall", + "syscall.GetStdHandle": "syscall", + "syscall.GetSystemTimeAsFileTime": "syscall", + "syscall.GetTempPath": "syscall", + "syscall.GetTimeZoneInformation": "syscall", + "syscall.GetTokenInformation": "syscall", + "syscall.GetUserNameEx": "syscall", + "syscall.GetUserProfileDirectory": "syscall", + "syscall.GetVersion": "syscall", + "syscall.Getcwd": "syscall", + "syscall.Getdents": "syscall", + "syscall.Getdirentries": "syscall", + "syscall.Getdtablesize": "syscall", + "syscall.Getegid": "syscall", + "syscall.Getenv": "syscall", + "syscall.Geteuid": "syscall", + "syscall.Getfsstat": "syscall", + "syscall.Getgid": "syscall", + "syscall.Getgroups": "syscall", + "syscall.Getpagesize": "syscall", + "syscall.Getpeername": "syscall", + "syscall.Getpgid": "syscall", + "syscall.Getpgrp": "syscall", + "syscall.Getpid": "syscall", + "syscall.Getppid": "syscall", + "syscall.Getpriority": "syscall", + "syscall.Getrlimit": "syscall", + "syscall.Getrusage": "syscall", + "syscall.Getsid": "syscall", + "syscall.Getsockname": "syscall", + "syscall.Getsockopt": "syscall", + "syscall.GetsockoptByte": "syscall", + "syscall.GetsockoptICMPv6Filter": "syscall", + "syscall.GetsockoptIPMreq": "syscall", + "syscall.GetsockoptIPMreqn": "syscall", + "syscall.GetsockoptIPv6MTUInfo": "syscall", + "syscall.GetsockoptIPv6Mreq": "syscall", + "syscall.GetsockoptInet4Addr": "syscall", + "syscall.GetsockoptInt": "syscall", + "syscall.GetsockoptUcred": "syscall", + "syscall.Gettid": "syscall", + "syscall.Gettimeofday": "syscall", + "syscall.Getuid": "syscall", + "syscall.Getwd": "syscall", + "syscall.Getxattr": "syscall", + "syscall.HANDLE_FLAG_INHERIT": "syscall", + "syscall.HKEY_CLASSES_ROOT": "syscall", + "syscall.HKEY_CURRENT_CONFIG": "syscall", + "syscall.HKEY_CURRENT_USER": "syscall", + "syscall.HKEY_DYN_DATA": "syscall", + "syscall.HKEY_LOCAL_MACHINE": "syscall", + "syscall.HKEY_PERFORMANCE_DATA": "syscall", + "syscall.HKEY_USERS": "syscall", + "syscall.HUPCL": "syscall", + "syscall.Handle": "syscall", + "syscall.Hostent": "syscall", + "syscall.ICANON": "syscall", + "syscall.ICMP6_FILTER": "syscall", + "syscall.ICMPV6_FILTER": "syscall", + "syscall.ICMPv6Filter": "syscall", + "syscall.ICRNL": "syscall", + "syscall.IEXTEN": "syscall", + "syscall.IFAN_ARRIVAL": "syscall", + "syscall.IFAN_DEPARTURE": "syscall", + "syscall.IFA_ADDRESS": "syscall", + "syscall.IFA_ANYCAST": "syscall", + "syscall.IFA_BROADCAST": "syscall", + "syscall.IFA_CACHEINFO": "syscall", + "syscall.IFA_F_DADFAILED": "syscall", + "syscall.IFA_F_DEPRECATED": "syscall", + "syscall.IFA_F_HOMEADDRESS": "syscall", + "syscall.IFA_F_NODAD": "syscall", + "syscall.IFA_F_OPTIMISTIC": "syscall", + "syscall.IFA_F_PERMANENT": "syscall", + "syscall.IFA_F_SECONDARY": "syscall", + "syscall.IFA_F_TEMPORARY": "syscall", + "syscall.IFA_F_TENTATIVE": "syscall", + "syscall.IFA_LABEL": "syscall", + "syscall.IFA_LOCAL": "syscall", + "syscall.IFA_MAX": "syscall", + "syscall.IFA_MULTICAST": "syscall", + "syscall.IFA_ROUTE": "syscall", + "syscall.IFA_UNSPEC": "syscall", + "syscall.IFF_ALLMULTI": "syscall", + "syscall.IFF_ALTPHYS": "syscall", + "syscall.IFF_AUTOMEDIA": "syscall", + "syscall.IFF_BROADCAST": "syscall", + "syscall.IFF_CANTCHANGE": "syscall", + "syscall.IFF_CANTCONFIG": "syscall", + "syscall.IFF_DEBUG": "syscall", + "syscall.IFF_DRV_OACTIVE": "syscall", + "syscall.IFF_DRV_RUNNING": "syscall", + "syscall.IFF_DYING": "syscall", + "syscall.IFF_DYNAMIC": "syscall", + "syscall.IFF_LINK0": "syscall", + "syscall.IFF_LINK1": "syscall", + "syscall.IFF_LINK2": "syscall", + "syscall.IFF_LOOPBACK": "syscall", + "syscall.IFF_MASTER": "syscall", + "syscall.IFF_MONITOR": "syscall", + "syscall.IFF_MULTICAST": "syscall", + "syscall.IFF_NOARP": "syscall", + "syscall.IFF_NOTRAILERS": "syscall", + "syscall.IFF_NO_PI": "syscall", + "syscall.IFF_OACTIVE": "syscall", + "syscall.IFF_ONE_QUEUE": "syscall", + "syscall.IFF_POINTOPOINT": "syscall", + "syscall.IFF_POINTTOPOINT": "syscall", + "syscall.IFF_PORTSEL": "syscall", + "syscall.IFF_PPROMISC": "syscall", + "syscall.IFF_PROMISC": "syscall", + "syscall.IFF_RENAMING": "syscall", + "syscall.IFF_RUNNING": "syscall", + "syscall.IFF_SIMPLEX": "syscall", + "syscall.IFF_SLAVE": "syscall", + "syscall.IFF_SMART": "syscall", + "syscall.IFF_STATICARP": "syscall", + "syscall.IFF_TAP": "syscall", + "syscall.IFF_TUN": "syscall", + "syscall.IFF_TUN_EXCL": "syscall", + "syscall.IFF_UP": "syscall", + "syscall.IFF_VNET_HDR": "syscall", + "syscall.IFLA_ADDRESS": "syscall", + "syscall.IFLA_BROADCAST": "syscall", + "syscall.IFLA_COST": "syscall", + "syscall.IFLA_IFALIAS": "syscall", + "syscall.IFLA_IFNAME": "syscall", + "syscall.IFLA_LINK": "syscall", + "syscall.IFLA_LINKINFO": "syscall", + "syscall.IFLA_LINKMODE": "syscall", + "syscall.IFLA_MAP": "syscall", + "syscall.IFLA_MASTER": "syscall", + "syscall.IFLA_MAX": "syscall", + "syscall.IFLA_MTU": "syscall", + "syscall.IFLA_NET_NS_PID": "syscall", + "syscall.IFLA_OPERSTATE": "syscall", + "syscall.IFLA_PRIORITY": "syscall", + "syscall.IFLA_PROTINFO": "syscall", + "syscall.IFLA_QDISC": "syscall", + "syscall.IFLA_STATS": "syscall", + "syscall.IFLA_TXQLEN": "syscall", + "syscall.IFLA_UNSPEC": "syscall", + "syscall.IFLA_WEIGHT": "syscall", + "syscall.IFLA_WIRELESS": "syscall", + "syscall.IFNAMSIZ": "syscall", + "syscall.IFT_1822": "syscall", + "syscall.IFT_A12MPPSWITCH": "syscall", + "syscall.IFT_AAL2": "syscall", + "syscall.IFT_AAL5": "syscall", + "syscall.IFT_ADSL": "syscall", + "syscall.IFT_AFLANE8023": "syscall", + "syscall.IFT_AFLANE8025": "syscall", + "syscall.IFT_ARAP": "syscall", + "syscall.IFT_ARCNET": "syscall", + "syscall.IFT_ARCNETPLUS": "syscall", + "syscall.IFT_ASYNC": "syscall", + "syscall.IFT_ATM": "syscall", + "syscall.IFT_ATMDXI": "syscall", + "syscall.IFT_ATMFUNI": "syscall", + "syscall.IFT_ATMIMA": "syscall", + "syscall.IFT_ATMLOGICAL": "syscall", + "syscall.IFT_ATMRADIO": "syscall", + "syscall.IFT_ATMSUBINTERFACE": "syscall", + "syscall.IFT_ATMVCIENDPT": "syscall", + "syscall.IFT_ATMVIRTUAL": "syscall", + "syscall.IFT_BGPPOLICYACCOUNTING": "syscall", + "syscall.IFT_BLUETOOTH": "syscall", + "syscall.IFT_BRIDGE": "syscall", + "syscall.IFT_BSC": "syscall", + "syscall.IFT_CARP": "syscall", + "syscall.IFT_CCTEMUL": "syscall", + "syscall.IFT_CELLULAR": "syscall", + "syscall.IFT_CEPT": "syscall", + "syscall.IFT_CES": "syscall", + "syscall.IFT_CHANNEL": "syscall", + "syscall.IFT_CNR": "syscall", + "syscall.IFT_COFFEE": "syscall", + "syscall.IFT_COMPOSITELINK": "syscall", + "syscall.IFT_DCN": "syscall", + "syscall.IFT_DIGITALPOWERLINE": "syscall", + "syscall.IFT_DIGITALWRAPPEROVERHEADCHANNEL": "syscall", + "syscall.IFT_DLSW": "syscall", + "syscall.IFT_DOCSCABLEDOWNSTREAM": "syscall", + "syscall.IFT_DOCSCABLEMACLAYER": "syscall", + "syscall.IFT_DOCSCABLEUPSTREAM": "syscall", + "syscall.IFT_DOCSCABLEUPSTREAMCHANNEL": "syscall", + "syscall.IFT_DS0": "syscall", + "syscall.IFT_DS0BUNDLE": "syscall", + "syscall.IFT_DS1FDL": "syscall", + "syscall.IFT_DS3": "syscall", + "syscall.IFT_DTM": "syscall", + "syscall.IFT_DUMMY": "syscall", + "syscall.IFT_DVBASILN": "syscall", + "syscall.IFT_DVBASIOUT": "syscall", + "syscall.IFT_DVBRCCDOWNSTREAM": "syscall", + "syscall.IFT_DVBRCCMACLAYER": "syscall", + "syscall.IFT_DVBRCCUPSTREAM": "syscall", + "syscall.IFT_ECONET": "syscall", + "syscall.IFT_ENC": "syscall", + "syscall.IFT_EON": "syscall", + "syscall.IFT_EPLRS": "syscall", + "syscall.IFT_ESCON": "syscall", + "syscall.IFT_ETHER": "syscall", + "syscall.IFT_FAITH": "syscall", + "syscall.IFT_FAST": "syscall", + "syscall.IFT_FASTETHER": "syscall", + "syscall.IFT_FASTETHERFX": "syscall", + "syscall.IFT_FDDI": "syscall", + "syscall.IFT_FIBRECHANNEL": "syscall", + "syscall.IFT_FRAMERELAYINTERCONNECT": "syscall", + "syscall.IFT_FRAMERELAYMPI": "syscall", + "syscall.IFT_FRDLCIENDPT": "syscall", + "syscall.IFT_FRELAY": "syscall", + "syscall.IFT_FRELAYDCE": "syscall", + "syscall.IFT_FRF16MFRBUNDLE": "syscall", + "syscall.IFT_FRFORWARD": "syscall", + "syscall.IFT_G703AT2MB": "syscall", + "syscall.IFT_G703AT64K": "syscall", + "syscall.IFT_GIF": "syscall", + "syscall.IFT_GIGABITETHERNET": "syscall", + "syscall.IFT_GR303IDT": "syscall", + "syscall.IFT_GR303RDT": "syscall", + "syscall.IFT_H323GATEKEEPER": "syscall", + "syscall.IFT_H323PROXY": "syscall", + "syscall.IFT_HDH1822": "syscall", + "syscall.IFT_HDLC": "syscall", + "syscall.IFT_HDSL2": "syscall", + "syscall.IFT_HIPERLAN2": "syscall", + "syscall.IFT_HIPPI": "syscall", + "syscall.IFT_HIPPIINTERFACE": "syscall", + "syscall.IFT_HOSTPAD": "syscall", + "syscall.IFT_HSSI": "syscall", + "syscall.IFT_HY": "syscall", + "syscall.IFT_IBM370PARCHAN": "syscall", + "syscall.IFT_IDSL": "syscall", + "syscall.IFT_IEEE1394": "syscall", + "syscall.IFT_IEEE80211": "syscall", + "syscall.IFT_IEEE80212": "syscall", + "syscall.IFT_IEEE8023ADLAG": "syscall", + "syscall.IFT_IFGSN": "syscall", + "syscall.IFT_IMT": "syscall", + "syscall.IFT_INFINIBAND": "syscall", + "syscall.IFT_INTERLEAVE": "syscall", + "syscall.IFT_IP": "syscall", + "syscall.IFT_IPFORWARD": "syscall", + "syscall.IFT_IPOVERATM": "syscall", + "syscall.IFT_IPOVERCDLC": "syscall", + "syscall.IFT_IPOVERCLAW": "syscall", + "syscall.IFT_IPSWITCH": "syscall", + "syscall.IFT_IPXIP": "syscall", + "syscall.IFT_ISDN": "syscall", + "syscall.IFT_ISDNBASIC": "syscall", + "syscall.IFT_ISDNPRIMARY": "syscall", + "syscall.IFT_ISDNS": "syscall", + "syscall.IFT_ISDNU": "syscall", + "syscall.IFT_ISO88022LLC": "syscall", + "syscall.IFT_ISO88023": "syscall", + "syscall.IFT_ISO88024": "syscall", + "syscall.IFT_ISO88025": "syscall", + "syscall.IFT_ISO88025CRFPINT": "syscall", + "syscall.IFT_ISO88025DTR": "syscall", + "syscall.IFT_ISO88025FIBER": "syscall", + "syscall.IFT_ISO88026": "syscall", + "syscall.IFT_ISUP": "syscall", + "syscall.IFT_L2VLAN": "syscall", + "syscall.IFT_L3IPVLAN": "syscall", + "syscall.IFT_L3IPXVLAN": "syscall", + "syscall.IFT_LAPB": "syscall", + "syscall.IFT_LAPD": "syscall", + "syscall.IFT_LAPF": "syscall", + "syscall.IFT_LINEGROUP": "syscall", + "syscall.IFT_LOCALTALK": "syscall", + "syscall.IFT_LOOP": "syscall", + "syscall.IFT_MEDIAMAILOVERIP": "syscall", + "syscall.IFT_MFSIGLINK": "syscall", + "syscall.IFT_MIOX25": "syscall", + "syscall.IFT_MODEM": "syscall", + "syscall.IFT_MPC": "syscall", + "syscall.IFT_MPLS": "syscall", + "syscall.IFT_MPLSTUNNEL": "syscall", + "syscall.IFT_MSDSL": "syscall", + "syscall.IFT_MVL": "syscall", + "syscall.IFT_MYRINET": "syscall", + "syscall.IFT_NFAS": "syscall", + "syscall.IFT_NSIP": "syscall", + "syscall.IFT_OPTICALCHANNEL": "syscall", + "syscall.IFT_OPTICALTRANSPORT": "syscall", + "syscall.IFT_OTHER": "syscall", + "syscall.IFT_P10": "syscall", + "syscall.IFT_P80": "syscall", + "syscall.IFT_PARA": "syscall", + "syscall.IFT_PDP": "syscall", + "syscall.IFT_PFLOG": "syscall", + "syscall.IFT_PFLOW": "syscall", + "syscall.IFT_PFSYNC": "syscall", + "syscall.IFT_PLC": "syscall", + "syscall.IFT_PON155": "syscall", + "syscall.IFT_PON622": "syscall", + "syscall.IFT_POS": "syscall", + "syscall.IFT_PPP": "syscall", + "syscall.IFT_PPPMULTILINKBUNDLE": "syscall", + "syscall.IFT_PROPATM": "syscall", + "syscall.IFT_PROPBWAP2MP": "syscall", + "syscall.IFT_PROPCNLS": "syscall", + "syscall.IFT_PROPDOCSWIRELESSDOWNSTREAM": "syscall", + "syscall.IFT_PROPDOCSWIRELESSMACLAYER": "syscall", + "syscall.IFT_PROPDOCSWIRELESSUPSTREAM": "syscall", + "syscall.IFT_PROPMUX": "syscall", + "syscall.IFT_PROPVIRTUAL": "syscall", + "syscall.IFT_PROPWIRELESSP2P": "syscall", + "syscall.IFT_PTPSERIAL": "syscall", + "syscall.IFT_PVC": "syscall", + "syscall.IFT_Q2931": "syscall", + "syscall.IFT_QLLC": "syscall", + "syscall.IFT_RADIOMAC": "syscall", + "syscall.IFT_RADSL": "syscall", + "syscall.IFT_REACHDSL": "syscall", + "syscall.IFT_RFC1483": "syscall", + "syscall.IFT_RS232": "syscall", + "syscall.IFT_RSRB": "syscall", + "syscall.IFT_SDLC": "syscall", + "syscall.IFT_SDSL": "syscall", + "syscall.IFT_SHDSL": "syscall", + "syscall.IFT_SIP": "syscall", + "syscall.IFT_SIPSIG": "syscall", + "syscall.IFT_SIPTG": "syscall", + "syscall.IFT_SLIP": "syscall", + "syscall.IFT_SMDSDXI": "syscall", + "syscall.IFT_SMDSICIP": "syscall", + "syscall.IFT_SONET": "syscall", + "syscall.IFT_SONETOVERHEADCHANNEL": "syscall", + "syscall.IFT_SONETPATH": "syscall", + "syscall.IFT_SONETVT": "syscall", + "syscall.IFT_SRP": "syscall", + "syscall.IFT_SS7SIGLINK": "syscall", + "syscall.IFT_STACKTOSTACK": "syscall", + "syscall.IFT_STARLAN": "syscall", + "syscall.IFT_STF": "syscall", + "syscall.IFT_T1": "syscall", + "syscall.IFT_TDLC": "syscall", + "syscall.IFT_TELINK": "syscall", + "syscall.IFT_TERMPAD": "syscall", + "syscall.IFT_TR008": "syscall", + "syscall.IFT_TRANSPHDLC": "syscall", + "syscall.IFT_TUNNEL": "syscall", + "syscall.IFT_ULTRA": "syscall", + "syscall.IFT_USB": "syscall", + "syscall.IFT_V11": "syscall", + "syscall.IFT_V35": "syscall", + "syscall.IFT_V36": "syscall", + "syscall.IFT_V37": "syscall", + "syscall.IFT_VDSL": "syscall", + "syscall.IFT_VIRTUALIPADDRESS": "syscall", + "syscall.IFT_VIRTUALTG": "syscall", + "syscall.IFT_VOICEDID": "syscall", + "syscall.IFT_VOICEEM": "syscall", + "syscall.IFT_VOICEEMFGD": "syscall", + "syscall.IFT_VOICEENCAP": "syscall", + "syscall.IFT_VOICEFGDEANA": "syscall", + "syscall.IFT_VOICEFXO": "syscall", + "syscall.IFT_VOICEFXS": "syscall", + "syscall.IFT_VOICEOVERATM": "syscall", + "syscall.IFT_VOICEOVERCABLE": "syscall", + "syscall.IFT_VOICEOVERFRAMERELAY": "syscall", + "syscall.IFT_VOICEOVERIP": "syscall", + "syscall.IFT_X213": "syscall", + "syscall.IFT_X25": "syscall", + "syscall.IFT_X25DDN": "syscall", + "syscall.IFT_X25HUNTGROUP": "syscall", + "syscall.IFT_X25MLP": "syscall", + "syscall.IFT_X25PLE": "syscall", + "syscall.IFT_XETHER": "syscall", + "syscall.IGNBRK": "syscall", + "syscall.IGNCR": "syscall", + "syscall.IGNORE": "syscall", + "syscall.IGNPAR": "syscall", + "syscall.IMAXBEL": "syscall", + "syscall.INFINITE": "syscall", + "syscall.INLCR": "syscall", + "syscall.INPCK": "syscall", + "syscall.INVALID_FILE_ATTRIBUTES": "syscall", + "syscall.IN_ACCESS": "syscall", + "syscall.IN_ALL_EVENTS": "syscall", + "syscall.IN_ATTRIB": "syscall", + "syscall.IN_CLASSA_HOST": "syscall", + "syscall.IN_CLASSA_MAX": "syscall", + "syscall.IN_CLASSA_NET": "syscall", + "syscall.IN_CLASSA_NSHIFT": "syscall", + "syscall.IN_CLASSB_HOST": "syscall", + "syscall.IN_CLASSB_MAX": "syscall", + "syscall.IN_CLASSB_NET": "syscall", + "syscall.IN_CLASSB_NSHIFT": "syscall", + "syscall.IN_CLASSC_HOST": "syscall", + "syscall.IN_CLASSC_NET": "syscall", + "syscall.IN_CLASSC_NSHIFT": "syscall", + "syscall.IN_CLASSD_HOST": "syscall", + "syscall.IN_CLASSD_NET": "syscall", + "syscall.IN_CLASSD_NSHIFT": "syscall", + "syscall.IN_CLOEXEC": "syscall", + "syscall.IN_CLOSE": "syscall", + "syscall.IN_CLOSE_NOWRITE": "syscall", + "syscall.IN_CLOSE_WRITE": "syscall", + "syscall.IN_CREATE": "syscall", + "syscall.IN_DELETE": "syscall", + "syscall.IN_DELETE_SELF": "syscall", + "syscall.IN_DONT_FOLLOW": "syscall", + "syscall.IN_EXCL_UNLINK": "syscall", + "syscall.IN_IGNORED": "syscall", + "syscall.IN_ISDIR": "syscall", + "syscall.IN_LINKLOCALNETNUM": "syscall", + "syscall.IN_LOOPBACKNET": "syscall", + "syscall.IN_MASK_ADD": "syscall", + "syscall.IN_MODIFY": "syscall", + "syscall.IN_MOVE": "syscall", + "syscall.IN_MOVED_FROM": "syscall", + "syscall.IN_MOVED_TO": "syscall", + "syscall.IN_MOVE_SELF": "syscall", + "syscall.IN_NONBLOCK": "syscall", + "syscall.IN_ONESHOT": "syscall", + "syscall.IN_ONLYDIR": "syscall", + "syscall.IN_OPEN": "syscall", + "syscall.IN_Q_OVERFLOW": "syscall", + "syscall.IN_RFC3021_HOST": "syscall", + "syscall.IN_RFC3021_MASK": "syscall", + "syscall.IN_RFC3021_NET": "syscall", + "syscall.IN_RFC3021_NSHIFT": "syscall", + "syscall.IN_UNMOUNT": "syscall", + "syscall.IOC_IN": "syscall", + "syscall.IOC_INOUT": "syscall", + "syscall.IOC_OUT": "syscall", + "syscall.IOC_VENDOR": "syscall", + "syscall.IOC_WS2": "syscall", + "syscall.IO_REPARSE_TAG_SYMLINK": "syscall", + "syscall.IPMreq": "syscall", + "syscall.IPMreqn": "syscall", + "syscall.IPPROTO_3PC": "syscall", + "syscall.IPPROTO_ADFS": "syscall", + "syscall.IPPROTO_AH": "syscall", + "syscall.IPPROTO_AHIP": "syscall", + "syscall.IPPROTO_APES": "syscall", + "syscall.IPPROTO_ARGUS": "syscall", + "syscall.IPPROTO_AX25": "syscall", + "syscall.IPPROTO_BHA": "syscall", + "syscall.IPPROTO_BLT": "syscall", + "syscall.IPPROTO_BRSATMON": "syscall", + "syscall.IPPROTO_CARP": "syscall", + "syscall.IPPROTO_CFTP": "syscall", + "syscall.IPPROTO_CHAOS": "syscall", + "syscall.IPPROTO_CMTP": "syscall", + "syscall.IPPROTO_COMP": "syscall", + "syscall.IPPROTO_CPHB": "syscall", + "syscall.IPPROTO_CPNX": "syscall", + "syscall.IPPROTO_DCCP": "syscall", + "syscall.IPPROTO_DDP": "syscall", + "syscall.IPPROTO_DGP": "syscall", + "syscall.IPPROTO_DIVERT": "syscall", + "syscall.IPPROTO_DIVERT_INIT": "syscall", + "syscall.IPPROTO_DIVERT_RESP": "syscall", + "syscall.IPPROTO_DONE": "syscall", + "syscall.IPPROTO_DSTOPTS": "syscall", + "syscall.IPPROTO_EGP": "syscall", + "syscall.IPPROTO_EMCON": "syscall", + "syscall.IPPROTO_ENCAP": "syscall", + "syscall.IPPROTO_EON": "syscall", + "syscall.IPPROTO_ESP": "syscall", + "syscall.IPPROTO_ETHERIP": "syscall", + "syscall.IPPROTO_FRAGMENT": "syscall", + "syscall.IPPROTO_GGP": "syscall", + "syscall.IPPROTO_GMTP": "syscall", + "syscall.IPPROTO_GRE": "syscall", + "syscall.IPPROTO_HELLO": "syscall", + "syscall.IPPROTO_HMP": "syscall", + "syscall.IPPROTO_HOPOPTS": "syscall", + "syscall.IPPROTO_ICMP": "syscall", + "syscall.IPPROTO_ICMPV6": "syscall", + "syscall.IPPROTO_IDP": "syscall", + "syscall.IPPROTO_IDPR": "syscall", + "syscall.IPPROTO_IDRP": "syscall", + "syscall.IPPROTO_IGMP": "syscall", + "syscall.IPPROTO_IGP": "syscall", + "syscall.IPPROTO_IGRP": "syscall", + "syscall.IPPROTO_IL": "syscall", + "syscall.IPPROTO_INLSP": "syscall", + "syscall.IPPROTO_INP": "syscall", + "syscall.IPPROTO_IP": "syscall", + "syscall.IPPROTO_IPCOMP": "syscall", + "syscall.IPPROTO_IPCV": "syscall", + "syscall.IPPROTO_IPEIP": "syscall", + "syscall.IPPROTO_IPIP": "syscall", + "syscall.IPPROTO_IPPC": "syscall", + "syscall.IPPROTO_IPV4": "syscall", + "syscall.IPPROTO_IPV6": "syscall", + "syscall.IPPROTO_IPV6_ICMP": "syscall", + "syscall.IPPROTO_IRTP": "syscall", + "syscall.IPPROTO_KRYPTOLAN": "syscall", + "syscall.IPPROTO_LARP": "syscall", + "syscall.IPPROTO_LEAF1": "syscall", + "syscall.IPPROTO_LEAF2": "syscall", + "syscall.IPPROTO_MAX": "syscall", + "syscall.IPPROTO_MAXID": "syscall", + "syscall.IPPROTO_MEAS": "syscall", + "syscall.IPPROTO_MH": "syscall", + "syscall.IPPROTO_MHRP": "syscall", + "syscall.IPPROTO_MICP": "syscall", + "syscall.IPPROTO_MOBILE": "syscall", + "syscall.IPPROTO_MPLS": "syscall", + "syscall.IPPROTO_MTP": "syscall", + "syscall.IPPROTO_MUX": "syscall", + "syscall.IPPROTO_ND": "syscall", + "syscall.IPPROTO_NHRP": "syscall", + "syscall.IPPROTO_NONE": "syscall", + "syscall.IPPROTO_NSP": "syscall", + "syscall.IPPROTO_NVPII": "syscall", + "syscall.IPPROTO_OLD_DIVERT": "syscall", + "syscall.IPPROTO_OSPFIGP": "syscall", + "syscall.IPPROTO_PFSYNC": "syscall", + "syscall.IPPROTO_PGM": "syscall", + "syscall.IPPROTO_PIGP": "syscall", + "syscall.IPPROTO_PIM": "syscall", + "syscall.IPPROTO_PRM": "syscall", + "syscall.IPPROTO_PUP": "syscall", + "syscall.IPPROTO_PVP": "syscall", + "syscall.IPPROTO_RAW": "syscall", + "syscall.IPPROTO_RCCMON": "syscall", + "syscall.IPPROTO_RDP": "syscall", + "syscall.IPPROTO_ROUTING": "syscall", + "syscall.IPPROTO_RSVP": "syscall", + "syscall.IPPROTO_RVD": "syscall", + "syscall.IPPROTO_SATEXPAK": "syscall", + "syscall.IPPROTO_SATMON": "syscall", + "syscall.IPPROTO_SCCSP": "syscall", + "syscall.IPPROTO_SCTP": "syscall", + "syscall.IPPROTO_SDRP": "syscall", + "syscall.IPPROTO_SEND": "syscall", + "syscall.IPPROTO_SEP": "syscall", + "syscall.IPPROTO_SKIP": "syscall", + "syscall.IPPROTO_SPACER": "syscall", + "syscall.IPPROTO_SRPC": "syscall", + "syscall.IPPROTO_ST": "syscall", + "syscall.IPPROTO_SVMTP": "syscall", + "syscall.IPPROTO_SWIPE": "syscall", + "syscall.IPPROTO_TCF": "syscall", + "syscall.IPPROTO_TCP": "syscall", + "syscall.IPPROTO_TLSP": "syscall", + "syscall.IPPROTO_TP": "syscall", + "syscall.IPPROTO_TPXX": "syscall", + "syscall.IPPROTO_TRUNK1": "syscall", + "syscall.IPPROTO_TRUNK2": "syscall", + "syscall.IPPROTO_TTP": "syscall", + "syscall.IPPROTO_UDP": "syscall", + "syscall.IPPROTO_UDPLITE": "syscall", + "syscall.IPPROTO_VINES": "syscall", + "syscall.IPPROTO_VISA": "syscall", + "syscall.IPPROTO_VMTP": "syscall", + "syscall.IPPROTO_VRRP": "syscall", + "syscall.IPPROTO_WBEXPAK": "syscall", + "syscall.IPPROTO_WBMON": "syscall", + "syscall.IPPROTO_WSN": "syscall", + "syscall.IPPROTO_XNET": "syscall", + "syscall.IPPROTO_XTP": "syscall", + "syscall.IPV6_2292DSTOPTS": "syscall", + "syscall.IPV6_2292HOPLIMIT": "syscall", + "syscall.IPV6_2292HOPOPTS": "syscall", + "syscall.IPV6_2292NEXTHOP": "syscall", + "syscall.IPV6_2292PKTINFO": "syscall", + "syscall.IPV6_2292PKTOPTIONS": "syscall", + "syscall.IPV6_2292RTHDR": "syscall", + "syscall.IPV6_ADDRFORM": "syscall", + "syscall.IPV6_ADD_MEMBERSHIP": "syscall", + "syscall.IPV6_AUTHHDR": "syscall", + "syscall.IPV6_AUTH_LEVEL": "syscall", + "syscall.IPV6_AUTOFLOWLABEL": "syscall", + "syscall.IPV6_BINDANY": "syscall", + "syscall.IPV6_BINDV6ONLY": "syscall", + "syscall.IPV6_BOUND_IF": "syscall", + "syscall.IPV6_CHECKSUM": "syscall", + "syscall.IPV6_DEFAULT_MULTICAST_HOPS": "syscall", + "syscall.IPV6_DEFAULT_MULTICAST_LOOP": "syscall", + "syscall.IPV6_DEFHLIM": "syscall", + "syscall.IPV6_DONTFRAG": "syscall", + "syscall.IPV6_DROP_MEMBERSHIP": "syscall", + "syscall.IPV6_DSTOPTS": "syscall", + "syscall.IPV6_ESP_NETWORK_LEVEL": "syscall", + "syscall.IPV6_ESP_TRANS_LEVEL": "syscall", + "syscall.IPV6_FAITH": "syscall", + "syscall.IPV6_FLOWINFO_MASK": "syscall", + "syscall.IPV6_FLOWLABEL_MASK": "syscall", + "syscall.IPV6_FRAGTTL": "syscall", + "syscall.IPV6_FW_ADD": "syscall", + "syscall.IPV6_FW_DEL": "syscall", + "syscall.IPV6_FW_FLUSH": "syscall", + "syscall.IPV6_FW_GET": "syscall", + "syscall.IPV6_FW_ZERO": "syscall", + "syscall.IPV6_HLIMDEC": "syscall", + "syscall.IPV6_HOPLIMIT": "syscall", + "syscall.IPV6_HOPOPTS": "syscall", + "syscall.IPV6_IPCOMP_LEVEL": "syscall", + "syscall.IPV6_IPSEC_POLICY": "syscall", + "syscall.IPV6_JOIN_ANYCAST": "syscall", + "syscall.IPV6_JOIN_GROUP": "syscall", + "syscall.IPV6_LEAVE_ANYCAST": "syscall", + "syscall.IPV6_LEAVE_GROUP": "syscall", + "syscall.IPV6_MAXHLIM": "syscall", + "syscall.IPV6_MAXOPTHDR": "syscall", + "syscall.IPV6_MAXPACKET": "syscall", + "syscall.IPV6_MAX_GROUP_SRC_FILTER": "syscall", + "syscall.IPV6_MAX_MEMBERSHIPS": "syscall", + "syscall.IPV6_MAX_SOCK_SRC_FILTER": "syscall", + "syscall.IPV6_MIN_MEMBERSHIPS": "syscall", + "syscall.IPV6_MMTU": "syscall", + "syscall.IPV6_MSFILTER": "syscall", + "syscall.IPV6_MTU": "syscall", + "syscall.IPV6_MTU_DISCOVER": "syscall", + "syscall.IPV6_MULTICAST_HOPS": "syscall", + "syscall.IPV6_MULTICAST_IF": "syscall", + "syscall.IPV6_MULTICAST_LOOP": "syscall", + "syscall.IPV6_NEXTHOP": "syscall", + "syscall.IPV6_OPTIONS": "syscall", + "syscall.IPV6_PATHMTU": "syscall", + "syscall.IPV6_PIPEX": "syscall", + "syscall.IPV6_PKTINFO": "syscall", + "syscall.IPV6_PMTUDISC_DO": "syscall", + "syscall.IPV6_PMTUDISC_DONT": "syscall", + "syscall.IPV6_PMTUDISC_PROBE": "syscall", + "syscall.IPV6_PMTUDISC_WANT": "syscall", + "syscall.IPV6_PORTRANGE": "syscall", + "syscall.IPV6_PORTRANGE_DEFAULT": "syscall", + "syscall.IPV6_PORTRANGE_HIGH": "syscall", + "syscall.IPV6_PORTRANGE_LOW": "syscall", + "syscall.IPV6_PREFER_TEMPADDR": "syscall", + "syscall.IPV6_RECVDSTOPTS": "syscall", + "syscall.IPV6_RECVDSTPORT": "syscall", + "syscall.IPV6_RECVERR": "syscall", + "syscall.IPV6_RECVHOPLIMIT": "syscall", + "syscall.IPV6_RECVHOPOPTS": "syscall", + "syscall.IPV6_RECVPATHMTU": "syscall", + "syscall.IPV6_RECVPKTINFO": "syscall", + "syscall.IPV6_RECVRTHDR": "syscall", + "syscall.IPV6_RECVTCLASS": "syscall", + "syscall.IPV6_ROUTER_ALERT": "syscall", + "syscall.IPV6_RTABLE": "syscall", + "syscall.IPV6_RTHDR": "syscall", + "syscall.IPV6_RTHDRDSTOPTS": "syscall", + "syscall.IPV6_RTHDR_LOOSE": "syscall", + "syscall.IPV6_RTHDR_STRICT": "syscall", + "syscall.IPV6_RTHDR_TYPE_0": "syscall", + "syscall.IPV6_RXDSTOPTS": "syscall", + "syscall.IPV6_RXHOPOPTS": "syscall", + "syscall.IPV6_SOCKOPT_RESERVED1": "syscall", + "syscall.IPV6_TCLASS": "syscall", + "syscall.IPV6_UNICAST_HOPS": "syscall", + "syscall.IPV6_USE_MIN_MTU": "syscall", + "syscall.IPV6_V6ONLY": "syscall", + "syscall.IPV6_VERSION": "syscall", + "syscall.IPV6_VERSION_MASK": "syscall", + "syscall.IPV6_XFRM_POLICY": "syscall", + "syscall.IP_ADD_MEMBERSHIP": "syscall", + "syscall.IP_ADD_SOURCE_MEMBERSHIP": "syscall", + "syscall.IP_AUTH_LEVEL": "syscall", + "syscall.IP_BINDANY": "syscall", + "syscall.IP_BLOCK_SOURCE": "syscall", + "syscall.IP_BOUND_IF": "syscall", + "syscall.IP_DEFAULT_MULTICAST_LOOP": "syscall", + "syscall.IP_DEFAULT_MULTICAST_TTL": "syscall", + "syscall.IP_DF": "syscall", + "syscall.IP_DIVERTFL": "syscall", + "syscall.IP_DONTFRAG": "syscall", + "syscall.IP_DROP_MEMBERSHIP": "syscall", + "syscall.IP_DROP_SOURCE_MEMBERSHIP": "syscall", + "syscall.IP_DUMMYNET3": "syscall", + "syscall.IP_DUMMYNET_CONFIGURE": "syscall", + "syscall.IP_DUMMYNET_DEL": "syscall", + "syscall.IP_DUMMYNET_FLUSH": "syscall", + "syscall.IP_DUMMYNET_GET": "syscall", + "syscall.IP_EF": "syscall", + "syscall.IP_ERRORMTU": "syscall", + "syscall.IP_ESP_NETWORK_LEVEL": "syscall", + "syscall.IP_ESP_TRANS_LEVEL": "syscall", + "syscall.IP_FAITH": "syscall", + "syscall.IP_FREEBIND": "syscall", + "syscall.IP_FW3": "syscall", + "syscall.IP_FW_ADD": "syscall", + "syscall.IP_FW_DEL": "syscall", + "syscall.IP_FW_FLUSH": "syscall", + "syscall.IP_FW_GET": "syscall", + "syscall.IP_FW_NAT_CFG": "syscall", + "syscall.IP_FW_NAT_DEL": "syscall", + "syscall.IP_FW_NAT_GET_CONFIG": "syscall", + "syscall.IP_FW_NAT_GET_LOG": "syscall", + "syscall.IP_FW_RESETLOG": "syscall", + "syscall.IP_FW_TABLE_ADD": "syscall", + "syscall.IP_FW_TABLE_DEL": "syscall", + "syscall.IP_FW_TABLE_FLUSH": "syscall", + "syscall.IP_FW_TABLE_GETSIZE": "syscall", + "syscall.IP_FW_TABLE_LIST": "syscall", + "syscall.IP_FW_ZERO": "syscall", + "syscall.IP_HDRINCL": "syscall", + "syscall.IP_IPCOMP_LEVEL": "syscall", + "syscall.IP_IPSECFLOWINFO": "syscall", + "syscall.IP_IPSEC_LOCAL_AUTH": "syscall", + "syscall.IP_IPSEC_LOCAL_CRED": "syscall", + "syscall.IP_IPSEC_LOCAL_ID": "syscall", + "syscall.IP_IPSEC_POLICY": "syscall", + "syscall.IP_IPSEC_REMOTE_AUTH": "syscall", + "syscall.IP_IPSEC_REMOTE_CRED": "syscall", + "syscall.IP_IPSEC_REMOTE_ID": "syscall", + "syscall.IP_MAXPACKET": "syscall", + "syscall.IP_MAX_GROUP_SRC_FILTER": "syscall", + "syscall.IP_MAX_MEMBERSHIPS": "syscall", + "syscall.IP_MAX_SOCK_MUTE_FILTER": "syscall", + "syscall.IP_MAX_SOCK_SRC_FILTER": "syscall", + "syscall.IP_MAX_SOURCE_FILTER": "syscall", + "syscall.IP_MF": "syscall", + "syscall.IP_MINFRAGSIZE": "syscall", + "syscall.IP_MINTTL": "syscall", + "syscall.IP_MIN_MEMBERSHIPS": "syscall", + "syscall.IP_MSFILTER": "syscall", + "syscall.IP_MSS": "syscall", + "syscall.IP_MTU": "syscall", + "syscall.IP_MTU_DISCOVER": "syscall", + "syscall.IP_MULTICAST_IF": "syscall", + "syscall.IP_MULTICAST_IFINDEX": "syscall", + "syscall.IP_MULTICAST_LOOP": "syscall", + "syscall.IP_MULTICAST_TTL": "syscall", + "syscall.IP_MULTICAST_VIF": "syscall", + "syscall.IP_NAT__XXX": "syscall", + "syscall.IP_OFFMASK": "syscall", + "syscall.IP_OLD_FW_ADD": "syscall", + "syscall.IP_OLD_FW_DEL": "syscall", + "syscall.IP_OLD_FW_FLUSH": "syscall", + "syscall.IP_OLD_FW_GET": "syscall", + "syscall.IP_OLD_FW_RESETLOG": "syscall", + "syscall.IP_OLD_FW_ZERO": "syscall", + "syscall.IP_ONESBCAST": "syscall", + "syscall.IP_OPTIONS": "syscall", + "syscall.IP_ORIGDSTADDR": "syscall", + "syscall.IP_PASSSEC": "syscall", + "syscall.IP_PIPEX": "syscall", + "syscall.IP_PKTINFO": "syscall", + "syscall.IP_PKTOPTIONS": "syscall", + "syscall.IP_PMTUDISC": "syscall", + "syscall.IP_PMTUDISC_DO": "syscall", + "syscall.IP_PMTUDISC_DONT": "syscall", + "syscall.IP_PMTUDISC_PROBE": "syscall", + "syscall.IP_PMTUDISC_WANT": "syscall", + "syscall.IP_PORTRANGE": "syscall", + "syscall.IP_PORTRANGE_DEFAULT": "syscall", + "syscall.IP_PORTRANGE_HIGH": "syscall", + "syscall.IP_PORTRANGE_LOW": "syscall", + "syscall.IP_RECVDSTADDR": "syscall", + "syscall.IP_RECVDSTPORT": "syscall", + "syscall.IP_RECVERR": "syscall", + "syscall.IP_RECVIF": "syscall", + "syscall.IP_RECVOPTS": "syscall", + "syscall.IP_RECVORIGDSTADDR": "syscall", + "syscall.IP_RECVPKTINFO": "syscall", + "syscall.IP_RECVRETOPTS": "syscall", + "syscall.IP_RECVRTABLE": "syscall", + "syscall.IP_RECVTOS": "syscall", + "syscall.IP_RECVTTL": "syscall", + "syscall.IP_RETOPTS": "syscall", + "syscall.IP_RF": "syscall", + "syscall.IP_ROUTER_ALERT": "syscall", + "syscall.IP_RSVP_OFF": "syscall", + "syscall.IP_RSVP_ON": "syscall", + "syscall.IP_RSVP_VIF_OFF": "syscall", + "syscall.IP_RSVP_VIF_ON": "syscall", + "syscall.IP_RTABLE": "syscall", + "syscall.IP_SENDSRCADDR": "syscall", + "syscall.IP_STRIPHDR": "syscall", + "syscall.IP_TOS": "syscall", + "syscall.IP_TRAFFIC_MGT_BACKGROUND": "syscall", + "syscall.IP_TRANSPARENT": "syscall", + "syscall.IP_TTL": "syscall", + "syscall.IP_UNBLOCK_SOURCE": "syscall", + "syscall.IP_XFRM_POLICY": "syscall", + "syscall.IPv6MTUInfo": "syscall", + "syscall.IPv6Mreq": "syscall", + "syscall.ISIG": "syscall", + "syscall.ISTRIP": "syscall", + "syscall.IUCLC": "syscall", + "syscall.IUTF8": "syscall", + "syscall.IXANY": "syscall", + "syscall.IXOFF": "syscall", + "syscall.IXON": "syscall", + "syscall.IfAddrmsg": "syscall", + "syscall.IfAnnounceMsghdr": "syscall", + "syscall.IfData": "syscall", + "syscall.IfInfomsg": "syscall", + "syscall.IfMsghdr": "syscall", + "syscall.IfaMsghdr": "syscall", + "syscall.IfmaMsghdr": "syscall", + "syscall.IfmaMsghdr2": "syscall", + "syscall.ImplementsGetwd": "syscall", + "syscall.Inet4Pktinfo": "syscall", + "syscall.Inet6Pktinfo": "syscall", + "syscall.InotifyAddWatch": "syscall", + "syscall.InotifyEvent": "syscall", + "syscall.InotifyInit": "syscall", + "syscall.InotifyInit1": "syscall", + "syscall.InotifyRmWatch": "syscall", + "syscall.InterfaceAddrMessage": "syscall", + "syscall.InterfaceAnnounceMessage": "syscall", + "syscall.InterfaceInfo": "syscall", + "syscall.InterfaceMessage": "syscall", + "syscall.InterfaceMulticastAddrMessage": "syscall", + "syscall.InvalidHandle": "syscall", + "syscall.Ioperm": "syscall", + "syscall.Iopl": "syscall", + "syscall.Iovec": "syscall", + "syscall.IpAdapterInfo": "syscall", + "syscall.IpAddrString": "syscall", + "syscall.IpAddressString": "syscall", + "syscall.IpMaskString": "syscall", + "syscall.Issetugid": "syscall", + "syscall.KEY_ALL_ACCESS": "syscall", + "syscall.KEY_CREATE_LINK": "syscall", + "syscall.KEY_CREATE_SUB_KEY": "syscall", + "syscall.KEY_ENUMERATE_SUB_KEYS": "syscall", + "syscall.KEY_EXECUTE": "syscall", + "syscall.KEY_NOTIFY": "syscall", + "syscall.KEY_QUERY_VALUE": "syscall", + "syscall.KEY_READ": "syscall", + "syscall.KEY_SET_VALUE": "syscall", + "syscall.KEY_WOW64_32KEY": "syscall", + "syscall.KEY_WOW64_64KEY": "syscall", + "syscall.KEY_WRITE": "syscall", + "syscall.Kevent": "syscall", + "syscall.Kevent_t": "syscall", + "syscall.Kill": "syscall", + "syscall.Klogctl": "syscall", + "syscall.Kqueue": "syscall", + "syscall.LANG_ENGLISH": "syscall", + "syscall.LAYERED_PROTOCOL": "syscall", + "syscall.LCNT_OVERLOAD_FLUSH": "syscall", + "syscall.LINUX_REBOOT_CMD_CAD_OFF": "syscall", + "syscall.LINUX_REBOOT_CMD_CAD_ON": "syscall", + "syscall.LINUX_REBOOT_CMD_HALT": "syscall", + "syscall.LINUX_REBOOT_CMD_KEXEC": "syscall", + "syscall.LINUX_REBOOT_CMD_POWER_OFF": "syscall", + "syscall.LINUX_REBOOT_CMD_RESTART": "syscall", + "syscall.LINUX_REBOOT_CMD_RESTART2": "syscall", + "syscall.LINUX_REBOOT_CMD_SW_SUSPEND": "syscall", + "syscall.LINUX_REBOOT_MAGIC1": "syscall", + "syscall.LINUX_REBOOT_MAGIC2": "syscall", + "syscall.LOCK_EX": "syscall", + "syscall.LOCK_NB": "syscall", + "syscall.LOCK_SH": "syscall", + "syscall.LOCK_UN": "syscall", + "syscall.LazyDLL": "syscall", + "syscall.LazyProc": "syscall", + "syscall.Lchown": "syscall", + "syscall.Linger": "syscall", + "syscall.Link": "syscall", + "syscall.Listen": "syscall", + "syscall.Listxattr": "syscall", + "syscall.LoadCancelIoEx": "syscall", + "syscall.LoadConnectEx": "syscall", + "syscall.LoadCreateSymbolicLink": "syscall", + "syscall.LoadDLL": "syscall", + "syscall.LoadGetAddrInfo": "syscall", + "syscall.LoadLibrary": "syscall", + "syscall.LoadSetFileCompletionNotificationModes": "syscall", + "syscall.LocalFree": "syscall", + "syscall.Log2phys_t": "syscall", + "syscall.LookupAccountName": "syscall", + "syscall.LookupAccountSid": "syscall", + "syscall.LookupSID": "syscall", + "syscall.LsfJump": "syscall", + "syscall.LsfSocket": "syscall", + "syscall.LsfStmt": "syscall", + "syscall.Lstat": "syscall", + "syscall.MADV_AUTOSYNC": "syscall", + "syscall.MADV_CAN_REUSE": "syscall", + "syscall.MADV_CORE": "syscall", + "syscall.MADV_DOFORK": "syscall", + "syscall.MADV_DONTFORK": "syscall", + "syscall.MADV_DONTNEED": "syscall", + "syscall.MADV_FREE": "syscall", + "syscall.MADV_FREE_REUSABLE": "syscall", + "syscall.MADV_FREE_REUSE": "syscall", + "syscall.MADV_HUGEPAGE": "syscall", + "syscall.MADV_HWPOISON": "syscall", + "syscall.MADV_MERGEABLE": "syscall", + "syscall.MADV_NOCORE": "syscall", + "syscall.MADV_NOHUGEPAGE": "syscall", + "syscall.MADV_NORMAL": "syscall", + "syscall.MADV_NOSYNC": "syscall", + "syscall.MADV_PROTECT": "syscall", + "syscall.MADV_RANDOM": "syscall", + "syscall.MADV_REMOVE": "syscall", + "syscall.MADV_SEQUENTIAL": "syscall", + "syscall.MADV_SPACEAVAIL": "syscall", + "syscall.MADV_UNMERGEABLE": "syscall", + "syscall.MADV_WILLNEED": "syscall", + "syscall.MADV_ZERO_WIRED_PAGES": "syscall", + "syscall.MAP_32BIT": "syscall", + "syscall.MAP_ALIGNED_SUPER": "syscall", + "syscall.MAP_ALIGNMENT_16MB": "syscall", + "syscall.MAP_ALIGNMENT_1TB": "syscall", + "syscall.MAP_ALIGNMENT_256TB": "syscall", + "syscall.MAP_ALIGNMENT_4GB": "syscall", + "syscall.MAP_ALIGNMENT_64KB": "syscall", + "syscall.MAP_ALIGNMENT_64PB": "syscall", + "syscall.MAP_ALIGNMENT_MASK": "syscall", + "syscall.MAP_ALIGNMENT_SHIFT": "syscall", + "syscall.MAP_ANON": "syscall", + "syscall.MAP_ANONYMOUS": "syscall", + "syscall.MAP_COPY": "syscall", + "syscall.MAP_DENYWRITE": "syscall", + "syscall.MAP_EXECUTABLE": "syscall", + "syscall.MAP_FILE": "syscall", + "syscall.MAP_FIXED": "syscall", + "syscall.MAP_FLAGMASK": "syscall", + "syscall.MAP_GROWSDOWN": "syscall", + "syscall.MAP_HASSEMAPHORE": "syscall", + "syscall.MAP_HUGETLB": "syscall", + "syscall.MAP_INHERIT": "syscall", + "syscall.MAP_INHERIT_COPY": "syscall", + "syscall.MAP_INHERIT_DEFAULT": "syscall", + "syscall.MAP_INHERIT_DONATE_COPY": "syscall", + "syscall.MAP_INHERIT_NONE": "syscall", + "syscall.MAP_INHERIT_SHARE": "syscall", + "syscall.MAP_JIT": "syscall", + "syscall.MAP_LOCKED": "syscall", + "syscall.MAP_NOCACHE": "syscall", + "syscall.MAP_NOCORE": "syscall", + "syscall.MAP_NOEXTEND": "syscall", + "syscall.MAP_NONBLOCK": "syscall", + "syscall.MAP_NORESERVE": "syscall", + "syscall.MAP_NOSYNC": "syscall", + "syscall.MAP_POPULATE": "syscall", + "syscall.MAP_PREFAULT_READ": "syscall", + "syscall.MAP_PRIVATE": "syscall", + "syscall.MAP_RENAME": "syscall", + "syscall.MAP_RESERVED0080": "syscall", + "syscall.MAP_RESERVED0100": "syscall", + "syscall.MAP_SHARED": "syscall", + "syscall.MAP_STACK": "syscall", + "syscall.MAP_TRYFIXED": "syscall", + "syscall.MAP_TYPE": "syscall", + "syscall.MAP_WIRED": "syscall", + "syscall.MAXIMUM_REPARSE_DATA_BUFFER_SIZE": "syscall", + "syscall.MAXLEN_IFDESCR": "syscall", + "syscall.MAXLEN_PHYSADDR": "syscall", + "syscall.MAX_ADAPTER_ADDRESS_LENGTH": "syscall", + "syscall.MAX_ADAPTER_DESCRIPTION_LENGTH": "syscall", + "syscall.MAX_ADAPTER_NAME_LENGTH": "syscall", + "syscall.MAX_COMPUTERNAME_LENGTH": "syscall", + "syscall.MAX_INTERFACE_NAME_LEN": "syscall", + "syscall.MAX_LONG_PATH": "syscall", + "syscall.MAX_PATH": "syscall", + "syscall.MAX_PROTOCOL_CHAIN": "syscall", + "syscall.MCL_CURRENT": "syscall", + "syscall.MCL_FUTURE": "syscall", + "syscall.MNT_DETACH": "syscall", + "syscall.MNT_EXPIRE": "syscall", + "syscall.MNT_FORCE": "syscall", + "syscall.MSG_BCAST": "syscall", + "syscall.MSG_CMSG_CLOEXEC": "syscall", + "syscall.MSG_COMPAT": "syscall", + "syscall.MSG_CONFIRM": "syscall", + "syscall.MSG_CONTROLMBUF": "syscall", + "syscall.MSG_CTRUNC": "syscall", + "syscall.MSG_DONTROUTE": "syscall", + "syscall.MSG_DONTWAIT": "syscall", + "syscall.MSG_EOF": "syscall", + "syscall.MSG_EOR": "syscall", + "syscall.MSG_ERRQUEUE": "syscall", + "syscall.MSG_FASTOPEN": "syscall", + "syscall.MSG_FIN": "syscall", + "syscall.MSG_FLUSH": "syscall", + "syscall.MSG_HAVEMORE": "syscall", + "syscall.MSG_HOLD": "syscall", + "syscall.MSG_IOVUSRSPACE": "syscall", + "syscall.MSG_LENUSRSPACE": "syscall", + "syscall.MSG_MCAST": "syscall", + "syscall.MSG_MORE": "syscall", + "syscall.MSG_NAMEMBUF": "syscall", + "syscall.MSG_NBIO": "syscall", + "syscall.MSG_NEEDSA": "syscall", + "syscall.MSG_NOSIGNAL": "syscall", + "syscall.MSG_NOTIFICATION": "syscall", + "syscall.MSG_OOB": "syscall", + "syscall.MSG_PEEK": "syscall", + "syscall.MSG_PROXY": "syscall", + "syscall.MSG_RCVMORE": "syscall", + "syscall.MSG_RST": "syscall", + "syscall.MSG_SEND": "syscall", + "syscall.MSG_SYN": "syscall", + "syscall.MSG_TRUNC": "syscall", + "syscall.MSG_TRYHARD": "syscall", + "syscall.MSG_USERFLAGS": "syscall", + "syscall.MSG_WAITALL": "syscall", + "syscall.MSG_WAITFORONE": "syscall", + "syscall.MSG_WAITSTREAM": "syscall", + "syscall.MS_ACTIVE": "syscall", + "syscall.MS_ASYNC": "syscall", + "syscall.MS_BIND": "syscall", + "syscall.MS_DEACTIVATE": "syscall", + "syscall.MS_DIRSYNC": "syscall", + "syscall.MS_INVALIDATE": "syscall", + "syscall.MS_I_VERSION": "syscall", + "syscall.MS_KERNMOUNT": "syscall", + "syscall.MS_KILLPAGES": "syscall", + "syscall.MS_MANDLOCK": "syscall", + "syscall.MS_MGC_MSK": "syscall", + "syscall.MS_MGC_VAL": "syscall", + "syscall.MS_MOVE": "syscall", + "syscall.MS_NOATIME": "syscall", + "syscall.MS_NODEV": "syscall", + "syscall.MS_NODIRATIME": "syscall", + "syscall.MS_NOEXEC": "syscall", + "syscall.MS_NOSUID": "syscall", + "syscall.MS_NOUSER": "syscall", + "syscall.MS_POSIXACL": "syscall", + "syscall.MS_PRIVATE": "syscall", + "syscall.MS_RDONLY": "syscall", + "syscall.MS_REC": "syscall", + "syscall.MS_RELATIME": "syscall", + "syscall.MS_REMOUNT": "syscall", + "syscall.MS_RMT_MASK": "syscall", + "syscall.MS_SHARED": "syscall", + "syscall.MS_SILENT": "syscall", + "syscall.MS_SLAVE": "syscall", + "syscall.MS_STRICTATIME": "syscall", + "syscall.MS_SYNC": "syscall", + "syscall.MS_SYNCHRONOUS": "syscall", + "syscall.MS_UNBINDABLE": "syscall", + "syscall.Madvise": "syscall", + "syscall.MapViewOfFile": "syscall", + "syscall.MaxTokenInfoClass": "syscall", + "syscall.Mclpool": "syscall", + "syscall.MibIfRow": "syscall", + "syscall.Mkdir": "syscall", + "syscall.Mkdirat": "syscall", + "syscall.Mkfifo": "syscall", + "syscall.Mknod": "syscall", + "syscall.Mknodat": "syscall", + "syscall.Mlock": "syscall", + "syscall.Mlockall": "syscall", + "syscall.Mmap": "syscall", + "syscall.Mount": "syscall", + "syscall.MoveFile": "syscall", + "syscall.Mprotect": "syscall", + "syscall.Msghdr": "syscall", + "syscall.Munlock": "syscall", + "syscall.Munlockall": "syscall", + "syscall.Munmap": "syscall", + "syscall.MustLoadDLL": "syscall", + "syscall.NAME_MAX": "syscall", + "syscall.NETLINK_ADD_MEMBERSHIP": "syscall", + "syscall.NETLINK_AUDIT": "syscall", + "syscall.NETLINK_BROADCAST_ERROR": "syscall", + "syscall.NETLINK_CONNECTOR": "syscall", + "syscall.NETLINK_DNRTMSG": "syscall", + "syscall.NETLINK_DROP_MEMBERSHIP": "syscall", + "syscall.NETLINK_ECRYPTFS": "syscall", + "syscall.NETLINK_FIB_LOOKUP": "syscall", + "syscall.NETLINK_FIREWALL": "syscall", + "syscall.NETLINK_GENERIC": "syscall", + "syscall.NETLINK_INET_DIAG": "syscall", + "syscall.NETLINK_IP6_FW": "syscall", + "syscall.NETLINK_ISCSI": "syscall", + "syscall.NETLINK_KOBJECT_UEVENT": "syscall", + "syscall.NETLINK_NETFILTER": "syscall", + "syscall.NETLINK_NFLOG": "syscall", + "syscall.NETLINK_NO_ENOBUFS": "syscall", + "syscall.NETLINK_PKTINFO": "syscall", + "syscall.NETLINK_RDMA": "syscall", + "syscall.NETLINK_ROUTE": "syscall", + "syscall.NETLINK_SCSITRANSPORT": "syscall", + "syscall.NETLINK_SELINUX": "syscall", + "syscall.NETLINK_UNUSED": "syscall", + "syscall.NETLINK_USERSOCK": "syscall", + "syscall.NETLINK_XFRM": "syscall", + "syscall.NET_RT_DUMP": "syscall", + "syscall.NET_RT_DUMP2": "syscall", + "syscall.NET_RT_FLAGS": "syscall", + "syscall.NET_RT_IFLIST": "syscall", + "syscall.NET_RT_IFLIST2": "syscall", + "syscall.NET_RT_IFLISTL": "syscall", + "syscall.NET_RT_IFMALIST": "syscall", + "syscall.NET_RT_MAXID": "syscall", + "syscall.NET_RT_OIFLIST": "syscall", + "syscall.NET_RT_OOIFLIST": "syscall", + "syscall.NET_RT_STAT": "syscall", + "syscall.NET_RT_STATS": "syscall", + "syscall.NET_RT_TABLE": "syscall", + "syscall.NET_RT_TRASH": "syscall", + "syscall.NLA_ALIGNTO": "syscall", + "syscall.NLA_F_NESTED": "syscall", + "syscall.NLA_F_NET_BYTEORDER": "syscall", + "syscall.NLA_HDRLEN": "syscall", + "syscall.NLMSG_ALIGNTO": "syscall", + "syscall.NLMSG_DONE": "syscall", + "syscall.NLMSG_ERROR": "syscall", + "syscall.NLMSG_HDRLEN": "syscall", + "syscall.NLMSG_MIN_TYPE": "syscall", + "syscall.NLMSG_NOOP": "syscall", + "syscall.NLMSG_OVERRUN": "syscall", + "syscall.NLM_F_ACK": "syscall", + "syscall.NLM_F_APPEND": "syscall", + "syscall.NLM_F_ATOMIC": "syscall", + "syscall.NLM_F_CREATE": "syscall", + "syscall.NLM_F_DUMP": "syscall", + "syscall.NLM_F_ECHO": "syscall", + "syscall.NLM_F_EXCL": "syscall", + "syscall.NLM_F_MATCH": "syscall", + "syscall.NLM_F_MULTI": "syscall", + "syscall.NLM_F_REPLACE": "syscall", + "syscall.NLM_F_REQUEST": "syscall", + "syscall.NLM_F_ROOT": "syscall", + "syscall.NOFLSH": "syscall", + "syscall.NOTE_ABSOLUTE": "syscall", + "syscall.NOTE_ATTRIB": "syscall", + "syscall.NOTE_CHILD": "syscall", + "syscall.NOTE_DELETE": "syscall", + "syscall.NOTE_EOF": "syscall", + "syscall.NOTE_EXEC": "syscall", + "syscall.NOTE_EXIT": "syscall", + "syscall.NOTE_EXITSTATUS": "syscall", + "syscall.NOTE_EXTEND": "syscall", + "syscall.NOTE_FFAND": "syscall", + "syscall.NOTE_FFCOPY": "syscall", + "syscall.NOTE_FFCTRLMASK": "syscall", + "syscall.NOTE_FFLAGSMASK": "syscall", + "syscall.NOTE_FFNOP": "syscall", + "syscall.NOTE_FFOR": "syscall", + "syscall.NOTE_FORK": "syscall", + "syscall.NOTE_LINK": "syscall", + "syscall.NOTE_LOWAT": "syscall", + "syscall.NOTE_NONE": "syscall", + "syscall.NOTE_NSECONDS": "syscall", + "syscall.NOTE_PCTRLMASK": "syscall", + "syscall.NOTE_PDATAMASK": "syscall", + "syscall.NOTE_REAP": "syscall", + "syscall.NOTE_RENAME": "syscall", + "syscall.NOTE_RESOURCEEND": "syscall", + "syscall.NOTE_REVOKE": "syscall", + "syscall.NOTE_SECONDS": "syscall", + "syscall.NOTE_SIGNAL": "syscall", + "syscall.NOTE_TRACK": "syscall", + "syscall.NOTE_TRACKERR": "syscall", + "syscall.NOTE_TRIGGER": "syscall", + "syscall.NOTE_TRUNCATE": "syscall", + "syscall.NOTE_USECONDS": "syscall", + "syscall.NOTE_VM_ERROR": "syscall", + "syscall.NOTE_VM_PRESSURE": "syscall", + "syscall.NOTE_VM_PRESSURE_SUDDEN_TERMINATE": "syscall", + "syscall.NOTE_VM_PRESSURE_TERMINATE": "syscall", + "syscall.NOTE_WRITE": "syscall", + "syscall.NameCanonical": "syscall", + "syscall.NameCanonicalEx": "syscall", + "syscall.NameDisplay": "syscall", + "syscall.NameDnsDomain": "syscall", + "syscall.NameFullyQualifiedDN": "syscall", + "syscall.NameSamCompatible": "syscall", + "syscall.NameServicePrincipal": "syscall", + "syscall.NameUniqueId": "syscall", + "syscall.NameUnknown": "syscall", + "syscall.NameUserPrincipal": "syscall", + "syscall.Nanosleep": "syscall", + "syscall.NetApiBufferFree": "syscall", + "syscall.NetGetJoinInformation": "syscall", + "syscall.NetSetupDomainName": "syscall", + "syscall.NetSetupUnjoined": "syscall", + "syscall.NetSetupUnknownStatus": "syscall", + "syscall.NetSetupWorkgroupName": "syscall", + "syscall.NetUserGetInfo": "syscall", + "syscall.NetlinkMessage": "syscall", + "syscall.NetlinkRIB": "syscall", + "syscall.NetlinkRouteAttr": "syscall", + "syscall.NetlinkRouteRequest": "syscall", + "syscall.NewCallback": "syscall", + "syscall.NewCallbackCDecl": "syscall", + "syscall.NewLazyDLL": "syscall", + "syscall.NlAttr": "syscall", + "syscall.NlMsgerr": "syscall", + "syscall.NlMsghdr": "syscall", + "syscall.NsecToFiletime": "syscall", + "syscall.NsecToTimespec": "syscall", + "syscall.NsecToTimeval": "syscall", + "syscall.Ntohs": "syscall", + "syscall.OCRNL": "syscall", + "syscall.OFDEL": "syscall", + "syscall.OFILL": "syscall", + "syscall.OFIOGETBMAP": "syscall", + "syscall.OID_PKIX_KP_SERVER_AUTH": "syscall", + "syscall.OID_SERVER_GATED_CRYPTO": "syscall", + "syscall.OID_SGC_NETSCAPE": "syscall", + "syscall.OLCUC": "syscall", + "syscall.ONLCR": "syscall", + "syscall.ONLRET": "syscall", + "syscall.ONOCR": "syscall", + "syscall.ONOEOT": "syscall", + "syscall.OPEN_ALWAYS": "syscall", + "syscall.OPEN_EXISTING": "syscall", + "syscall.OPOST": "syscall", + "syscall.O_ACCMODE": "syscall", + "syscall.O_ALERT": "syscall", + "syscall.O_ALT_IO": "syscall", + "syscall.O_APPEND": "syscall", + "syscall.O_ASYNC": "syscall", + "syscall.O_CLOEXEC": "syscall", + "syscall.O_CREAT": "syscall", + "syscall.O_DIRECT": "syscall", + "syscall.O_DIRECTORY": "syscall", + "syscall.O_DSYNC": "syscall", + "syscall.O_EVTONLY": "syscall", + "syscall.O_EXCL": "syscall", + "syscall.O_EXEC": "syscall", + "syscall.O_EXLOCK": "syscall", + "syscall.O_FSYNC": "syscall", + "syscall.O_LARGEFILE": "syscall", + "syscall.O_NDELAY": "syscall", + "syscall.O_NOATIME": "syscall", + "syscall.O_NOCTTY": "syscall", + "syscall.O_NOFOLLOW": "syscall", + "syscall.O_NONBLOCK": "syscall", + "syscall.O_NOSIGPIPE": "syscall", + "syscall.O_POPUP": "syscall", + "syscall.O_RDONLY": "syscall", + "syscall.O_RDWR": "syscall", + "syscall.O_RSYNC": "syscall", + "syscall.O_SHLOCK": "syscall", + "syscall.O_SYMLINK": "syscall", + "syscall.O_SYNC": "syscall", + "syscall.O_TRUNC": "syscall", + "syscall.O_TTY_INIT": "syscall", + "syscall.O_WRONLY": "syscall", + "syscall.Open": "syscall", + "syscall.OpenCurrentProcessToken": "syscall", + "syscall.OpenProcess": "syscall", + "syscall.OpenProcessToken": "syscall", + "syscall.Openat": "syscall", + "syscall.Overlapped": "syscall", + "syscall.PACKET_ADD_MEMBERSHIP": "syscall", + "syscall.PACKET_BROADCAST": "syscall", + "syscall.PACKET_DROP_MEMBERSHIP": "syscall", + "syscall.PACKET_FASTROUTE": "syscall", + "syscall.PACKET_HOST": "syscall", + "syscall.PACKET_LOOPBACK": "syscall", + "syscall.PACKET_MR_ALLMULTI": "syscall", + "syscall.PACKET_MR_MULTICAST": "syscall", + "syscall.PACKET_MR_PROMISC": "syscall", + "syscall.PACKET_MULTICAST": "syscall", + "syscall.PACKET_OTHERHOST": "syscall", + "syscall.PACKET_OUTGOING": "syscall", + "syscall.PACKET_RECV_OUTPUT": "syscall", + "syscall.PACKET_RX_RING": "syscall", + "syscall.PACKET_STATISTICS": "syscall", + "syscall.PAGE_EXECUTE_READ": "syscall", + "syscall.PAGE_EXECUTE_READWRITE": "syscall", + "syscall.PAGE_EXECUTE_WRITECOPY": "syscall", + "syscall.PAGE_READONLY": "syscall", + "syscall.PAGE_READWRITE": "syscall", + "syscall.PAGE_WRITECOPY": "syscall", + "syscall.PARENB": "syscall", + "syscall.PARMRK": "syscall", + "syscall.PARODD": "syscall", + "syscall.PENDIN": "syscall", + "syscall.PFL_HIDDEN": "syscall", + "syscall.PFL_MATCHES_PROTOCOL_ZERO": "syscall", + "syscall.PFL_MULTIPLE_PROTO_ENTRIES": "syscall", + "syscall.PFL_NETWORKDIRECT_PROVIDER": "syscall", + "syscall.PFL_RECOMMENDED_PROTO_ENTRY": "syscall", + "syscall.PF_FLUSH": "syscall", + "syscall.PKCS_7_ASN_ENCODING": "syscall", + "syscall.PMC5_PIPELINE_FLUSH": "syscall", + "syscall.PRIO_PGRP": "syscall", + "syscall.PRIO_PROCESS": "syscall", + "syscall.PRIO_USER": "syscall", + "syscall.PRI_IOFLUSH": "syscall", + "syscall.PROCESS_QUERY_INFORMATION": "syscall", + "syscall.PROCESS_TERMINATE": "syscall", + "syscall.PROT_EXEC": "syscall", + "syscall.PROT_GROWSDOWN": "syscall", + "syscall.PROT_GROWSUP": "syscall", + "syscall.PROT_NONE": "syscall", + "syscall.PROT_READ": "syscall", + "syscall.PROT_WRITE": "syscall", + "syscall.PROV_DH_SCHANNEL": "syscall", + "syscall.PROV_DSS": "syscall", + "syscall.PROV_DSS_DH": "syscall", + "syscall.PROV_EC_ECDSA_FULL": "syscall", + "syscall.PROV_EC_ECDSA_SIG": "syscall", + "syscall.PROV_EC_ECNRA_FULL": "syscall", + "syscall.PROV_EC_ECNRA_SIG": "syscall", + "syscall.PROV_FORTEZZA": "syscall", + "syscall.PROV_INTEL_SEC": "syscall", + "syscall.PROV_MS_EXCHANGE": "syscall", + "syscall.PROV_REPLACE_OWF": "syscall", + "syscall.PROV_RNG": "syscall", + "syscall.PROV_RSA_AES": "syscall", + "syscall.PROV_RSA_FULL": "syscall", + "syscall.PROV_RSA_SCHANNEL": "syscall", + "syscall.PROV_RSA_SIG": "syscall", + "syscall.PROV_SPYRUS_LYNKS": "syscall", + "syscall.PROV_SSL": "syscall", + "syscall.PR_CAPBSET_DROP": "syscall", + "syscall.PR_CAPBSET_READ": "syscall", + "syscall.PR_CLEAR_SECCOMP_FILTER": "syscall", + "syscall.PR_ENDIAN_BIG": "syscall", + "syscall.PR_ENDIAN_LITTLE": "syscall", + "syscall.PR_ENDIAN_PPC_LITTLE": "syscall", + "syscall.PR_FPEMU_NOPRINT": "syscall", + "syscall.PR_FPEMU_SIGFPE": "syscall", + "syscall.PR_FP_EXC_ASYNC": "syscall", + "syscall.PR_FP_EXC_DISABLED": "syscall", + "syscall.PR_FP_EXC_DIV": "syscall", + "syscall.PR_FP_EXC_INV": "syscall", + "syscall.PR_FP_EXC_NONRECOV": "syscall", + "syscall.PR_FP_EXC_OVF": "syscall", + "syscall.PR_FP_EXC_PRECISE": "syscall", + "syscall.PR_FP_EXC_RES": "syscall", + "syscall.PR_FP_EXC_SW_ENABLE": "syscall", + "syscall.PR_FP_EXC_UND": "syscall", + "syscall.PR_GET_DUMPABLE": "syscall", + "syscall.PR_GET_ENDIAN": "syscall", + "syscall.PR_GET_FPEMU": "syscall", + "syscall.PR_GET_FPEXC": "syscall", + "syscall.PR_GET_KEEPCAPS": "syscall", + "syscall.PR_GET_NAME": "syscall", + "syscall.PR_GET_PDEATHSIG": "syscall", + "syscall.PR_GET_SECCOMP": "syscall", + "syscall.PR_GET_SECCOMP_FILTER": "syscall", + "syscall.PR_GET_SECUREBITS": "syscall", + "syscall.PR_GET_TIMERSLACK": "syscall", + "syscall.PR_GET_TIMING": "syscall", + "syscall.PR_GET_TSC": "syscall", + "syscall.PR_GET_UNALIGN": "syscall", + "syscall.PR_MCE_KILL": "syscall", + "syscall.PR_MCE_KILL_CLEAR": "syscall", + "syscall.PR_MCE_KILL_DEFAULT": "syscall", + "syscall.PR_MCE_KILL_EARLY": "syscall", + "syscall.PR_MCE_KILL_GET": "syscall", + "syscall.PR_MCE_KILL_LATE": "syscall", + "syscall.PR_MCE_KILL_SET": "syscall", + "syscall.PR_SECCOMP_FILTER_EVENT": "syscall", + "syscall.PR_SECCOMP_FILTER_SYSCALL": "syscall", + "syscall.PR_SET_DUMPABLE": "syscall", + "syscall.PR_SET_ENDIAN": "syscall", + "syscall.PR_SET_FPEMU": "syscall", + "syscall.PR_SET_FPEXC": "syscall", + "syscall.PR_SET_KEEPCAPS": "syscall", + "syscall.PR_SET_NAME": "syscall", + "syscall.PR_SET_PDEATHSIG": "syscall", + "syscall.PR_SET_PTRACER": "syscall", + "syscall.PR_SET_SECCOMP": "syscall", + "syscall.PR_SET_SECCOMP_FILTER": "syscall", + "syscall.PR_SET_SECUREBITS": "syscall", + "syscall.PR_SET_TIMERSLACK": "syscall", + "syscall.PR_SET_TIMING": "syscall", + "syscall.PR_SET_TSC": "syscall", + "syscall.PR_SET_UNALIGN": "syscall", + "syscall.PR_TASK_PERF_EVENTS_DISABLE": "syscall", + "syscall.PR_TASK_PERF_EVENTS_ENABLE": "syscall", + "syscall.PR_TIMING_STATISTICAL": "syscall", + "syscall.PR_TIMING_TIMESTAMP": "syscall", + "syscall.PR_TSC_ENABLE": "syscall", + "syscall.PR_TSC_SIGSEGV": "syscall", + "syscall.PR_UNALIGN_NOPRINT": "syscall", + "syscall.PR_UNALIGN_SIGBUS": "syscall", + "syscall.PTRACE_ARCH_PRCTL": "syscall", + "syscall.PTRACE_ATTACH": "syscall", + "syscall.PTRACE_CONT": "syscall", + "syscall.PTRACE_DETACH": "syscall", + "syscall.PTRACE_EVENT_CLONE": "syscall", + "syscall.PTRACE_EVENT_EXEC": "syscall", + "syscall.PTRACE_EVENT_EXIT": "syscall", + "syscall.PTRACE_EVENT_FORK": "syscall", + "syscall.PTRACE_EVENT_VFORK": "syscall", + "syscall.PTRACE_EVENT_VFORK_DONE": "syscall", + "syscall.PTRACE_GETCRUNCHREGS": "syscall", + "syscall.PTRACE_GETEVENTMSG": "syscall", + "syscall.PTRACE_GETFPREGS": "syscall", + "syscall.PTRACE_GETFPXREGS": "syscall", + "syscall.PTRACE_GETHBPREGS": "syscall", + "syscall.PTRACE_GETREGS": "syscall", + "syscall.PTRACE_GETREGSET": "syscall", + "syscall.PTRACE_GETSIGINFO": "syscall", + "syscall.PTRACE_GETVFPREGS": "syscall", + "syscall.PTRACE_GETWMMXREGS": "syscall", + "syscall.PTRACE_GET_THREAD_AREA": "syscall", + "syscall.PTRACE_KILL": "syscall", + "syscall.PTRACE_OLDSETOPTIONS": "syscall", + "syscall.PTRACE_O_MASK": "syscall", + "syscall.PTRACE_O_TRACECLONE": "syscall", + "syscall.PTRACE_O_TRACEEXEC": "syscall", + "syscall.PTRACE_O_TRACEEXIT": "syscall", + "syscall.PTRACE_O_TRACEFORK": "syscall", + "syscall.PTRACE_O_TRACESYSGOOD": "syscall", + "syscall.PTRACE_O_TRACEVFORK": "syscall", + "syscall.PTRACE_O_TRACEVFORKDONE": "syscall", + "syscall.PTRACE_PEEKDATA": "syscall", + "syscall.PTRACE_PEEKTEXT": "syscall", + "syscall.PTRACE_PEEKUSR": "syscall", + "syscall.PTRACE_POKEDATA": "syscall", + "syscall.PTRACE_POKETEXT": "syscall", + "syscall.PTRACE_POKEUSR": "syscall", + "syscall.PTRACE_SETCRUNCHREGS": "syscall", + "syscall.PTRACE_SETFPREGS": "syscall", + "syscall.PTRACE_SETFPXREGS": "syscall", + "syscall.PTRACE_SETHBPREGS": "syscall", + "syscall.PTRACE_SETOPTIONS": "syscall", + "syscall.PTRACE_SETREGS": "syscall", + "syscall.PTRACE_SETREGSET": "syscall", + "syscall.PTRACE_SETSIGINFO": "syscall", + "syscall.PTRACE_SETVFPREGS": "syscall", + "syscall.PTRACE_SETWMMXREGS": "syscall", + "syscall.PTRACE_SET_SYSCALL": "syscall", + "syscall.PTRACE_SET_THREAD_AREA": "syscall", + "syscall.PTRACE_SINGLEBLOCK": "syscall", + "syscall.PTRACE_SINGLESTEP": "syscall", + "syscall.PTRACE_SYSCALL": "syscall", + "syscall.PTRACE_SYSEMU": "syscall", + "syscall.PTRACE_SYSEMU_SINGLESTEP": "syscall", + "syscall.PTRACE_TRACEME": "syscall", + "syscall.PT_ATTACH": "syscall", + "syscall.PT_ATTACHEXC": "syscall", + "syscall.PT_CONTINUE": "syscall", + "syscall.PT_DATA_ADDR": "syscall", + "syscall.PT_DENY_ATTACH": "syscall", + "syscall.PT_DETACH": "syscall", + "syscall.PT_FIRSTMACH": "syscall", + "syscall.PT_FORCEQUOTA": "syscall", + "syscall.PT_KILL": "syscall", + "syscall.PT_MASK": "syscall", + "syscall.PT_READ_D": "syscall", + "syscall.PT_READ_I": "syscall", + "syscall.PT_READ_U": "syscall", + "syscall.PT_SIGEXC": "syscall", + "syscall.PT_STEP": "syscall", + "syscall.PT_TEXT_ADDR": "syscall", + "syscall.PT_TEXT_END_ADDR": "syscall", + "syscall.PT_THUPDATE": "syscall", + "syscall.PT_TRACE_ME": "syscall", + "syscall.PT_WRITE_D": "syscall", + "syscall.PT_WRITE_I": "syscall", + "syscall.PT_WRITE_U": "syscall", + "syscall.ParseDirent": "syscall", + "syscall.ParseNetlinkMessage": "syscall", + "syscall.ParseNetlinkRouteAttr": "syscall", + "syscall.ParseRoutingMessage": "syscall", + "syscall.ParseRoutingSockaddr": "syscall", + "syscall.ParseSocketControlMessage": "syscall", + "syscall.ParseUnixCredentials": "syscall", + "syscall.ParseUnixRights": "syscall", + "syscall.PathMax": "syscall", + "syscall.Pathconf": "syscall", + "syscall.Pause": "syscall", + "syscall.Pipe": "syscall", + "syscall.Pipe2": "syscall", + "syscall.PivotRoot": "syscall", + "syscall.PostQueuedCompletionStatus": "syscall", + "syscall.Pread": "syscall", + "syscall.Proc": "syscall", + "syscall.ProcAttr": "syscall", + "syscall.Process32First": "syscall", + "syscall.Process32Next": "syscall", + "syscall.ProcessEntry32": "syscall", + "syscall.ProcessInformation": "syscall", + "syscall.Protoent": "syscall", + "syscall.PtraceAttach": "syscall", + "syscall.PtraceCont": "syscall", + "syscall.PtraceDetach": "syscall", + "syscall.PtraceGetEventMsg": "syscall", + "syscall.PtraceGetRegs": "syscall", + "syscall.PtracePeekData": "syscall", + "syscall.PtracePeekText": "syscall", + "syscall.PtracePokeData": "syscall", + "syscall.PtracePokeText": "syscall", + "syscall.PtraceRegs": "syscall", + "syscall.PtraceSetOptions": "syscall", + "syscall.PtraceSetRegs": "syscall", + "syscall.PtraceSingleStep": "syscall", + "syscall.PtraceSyscall": "syscall", + "syscall.Pwrite": "syscall", + "syscall.REG_BINARY": "syscall", + "syscall.REG_DWORD": "syscall", + "syscall.REG_DWORD_BIG_ENDIAN": "syscall", + "syscall.REG_DWORD_LITTLE_ENDIAN": "syscall", + "syscall.REG_EXPAND_SZ": "syscall", + "syscall.REG_FULL_RESOURCE_DESCRIPTOR": "syscall", + "syscall.REG_LINK": "syscall", + "syscall.REG_MULTI_SZ": "syscall", + "syscall.REG_NONE": "syscall", + "syscall.REG_QWORD": "syscall", + "syscall.REG_QWORD_LITTLE_ENDIAN": "syscall", + "syscall.REG_RESOURCE_LIST": "syscall", + "syscall.REG_RESOURCE_REQUIREMENTS_LIST": "syscall", + "syscall.REG_SZ": "syscall", + "syscall.RLIMIT_AS": "syscall", + "syscall.RLIMIT_CORE": "syscall", + "syscall.RLIMIT_CPU": "syscall", + "syscall.RLIMIT_DATA": "syscall", + "syscall.RLIMIT_FSIZE": "syscall", + "syscall.RLIMIT_NOFILE": "syscall", + "syscall.RLIMIT_STACK": "syscall", + "syscall.RLIM_INFINITY": "syscall", + "syscall.RTAX_ADVMSS": "syscall", + "syscall.RTAX_AUTHOR": "syscall", + "syscall.RTAX_BRD": "syscall", + "syscall.RTAX_CWND": "syscall", + "syscall.RTAX_DST": "syscall", + "syscall.RTAX_FEATURES": "syscall", + "syscall.RTAX_FEATURE_ALLFRAG": "syscall", + "syscall.RTAX_FEATURE_ECN": "syscall", + "syscall.RTAX_FEATURE_SACK": "syscall", + "syscall.RTAX_FEATURE_TIMESTAMP": "syscall", + "syscall.RTAX_GATEWAY": "syscall", + "syscall.RTAX_GENMASK": "syscall", + "syscall.RTAX_HOPLIMIT": "syscall", + "syscall.RTAX_IFA": "syscall", + "syscall.RTAX_IFP": "syscall", + "syscall.RTAX_INITCWND": "syscall", + "syscall.RTAX_INITRWND": "syscall", + "syscall.RTAX_LABEL": "syscall", + "syscall.RTAX_LOCK": "syscall", + "syscall.RTAX_MAX": "syscall", + "syscall.RTAX_MTU": "syscall", + "syscall.RTAX_NETMASK": "syscall", + "syscall.RTAX_REORDERING": "syscall", + "syscall.RTAX_RTO_MIN": "syscall", + "syscall.RTAX_RTT": "syscall", + "syscall.RTAX_RTTVAR": "syscall", + "syscall.RTAX_SRC": "syscall", + "syscall.RTAX_SRCMASK": "syscall", + "syscall.RTAX_SSTHRESH": "syscall", + "syscall.RTAX_TAG": "syscall", + "syscall.RTAX_UNSPEC": "syscall", + "syscall.RTAX_WINDOW": "syscall", + "syscall.RTA_ALIGNTO": "syscall", + "syscall.RTA_AUTHOR": "syscall", + "syscall.RTA_BRD": "syscall", + "syscall.RTA_CACHEINFO": "syscall", + "syscall.RTA_DST": "syscall", + "syscall.RTA_FLOW": "syscall", + "syscall.RTA_GATEWAY": "syscall", + "syscall.RTA_GENMASK": "syscall", + "syscall.RTA_IFA": "syscall", + "syscall.RTA_IFP": "syscall", + "syscall.RTA_IIF": "syscall", + "syscall.RTA_LABEL": "syscall", + "syscall.RTA_MAX": "syscall", + "syscall.RTA_METRICS": "syscall", + "syscall.RTA_MULTIPATH": "syscall", + "syscall.RTA_NETMASK": "syscall", + "syscall.RTA_OIF": "syscall", + "syscall.RTA_PREFSRC": "syscall", + "syscall.RTA_PRIORITY": "syscall", + "syscall.RTA_SRC": "syscall", + "syscall.RTA_SRCMASK": "syscall", + "syscall.RTA_TABLE": "syscall", + "syscall.RTA_TAG": "syscall", + "syscall.RTA_UNSPEC": "syscall", + "syscall.RTCF_DIRECTSRC": "syscall", + "syscall.RTCF_DOREDIRECT": "syscall", + "syscall.RTCF_LOG": "syscall", + "syscall.RTCF_MASQ": "syscall", + "syscall.RTCF_NAT": "syscall", + "syscall.RTCF_VALVE": "syscall", + "syscall.RTF_ADDRCLASSMASK": "syscall", + "syscall.RTF_ADDRCONF": "syscall", + "syscall.RTF_ALLONLINK": "syscall", + "syscall.RTF_ANNOUNCE": "syscall", + "syscall.RTF_BLACKHOLE": "syscall", + "syscall.RTF_BROADCAST": "syscall", + "syscall.RTF_CACHE": "syscall", + "syscall.RTF_CLONED": "syscall", + "syscall.RTF_CLONING": "syscall", + "syscall.RTF_CONDEMNED": "syscall", + "syscall.RTF_DEFAULT": "syscall", + "syscall.RTF_DELCLONE": "syscall", + "syscall.RTF_DONE": "syscall", + "syscall.RTF_DYNAMIC": "syscall", + "syscall.RTF_FLOW": "syscall", + "syscall.RTF_FMASK": "syscall", + "syscall.RTF_GATEWAY": "syscall", + "syscall.RTF_GWFLAG_COMPAT": "syscall", + "syscall.RTF_HOST": "syscall", + "syscall.RTF_IFREF": "syscall", + "syscall.RTF_IFSCOPE": "syscall", + "syscall.RTF_INTERFACE": "syscall", + "syscall.RTF_IRTT": "syscall", + "syscall.RTF_LINKRT": "syscall", + "syscall.RTF_LLDATA": "syscall", + "syscall.RTF_LLINFO": "syscall", + "syscall.RTF_LOCAL": "syscall", + "syscall.RTF_MASK": "syscall", + "syscall.RTF_MODIFIED": "syscall", + "syscall.RTF_MPATH": "syscall", + "syscall.RTF_MPLS": "syscall", + "syscall.RTF_MSS": "syscall", + "syscall.RTF_MTU": "syscall", + "syscall.RTF_MULTICAST": "syscall", + "syscall.RTF_NAT": "syscall", + "syscall.RTF_NOFORWARD": "syscall", + "syscall.RTF_NONEXTHOP": "syscall", + "syscall.RTF_NOPMTUDISC": "syscall", + "syscall.RTF_PERMANENT_ARP": "syscall", + "syscall.RTF_PINNED": "syscall", + "syscall.RTF_POLICY": "syscall", + "syscall.RTF_PRCLONING": "syscall", + "syscall.RTF_PROTO1": "syscall", + "syscall.RTF_PROTO2": "syscall", + "syscall.RTF_PROTO3": "syscall", + "syscall.RTF_REINSTATE": "syscall", + "syscall.RTF_REJECT": "syscall", + "syscall.RTF_RNH_LOCKED": "syscall", + "syscall.RTF_SOURCE": "syscall", + "syscall.RTF_SRC": "syscall", + "syscall.RTF_STATIC": "syscall", + "syscall.RTF_STICKY": "syscall", + "syscall.RTF_THROW": "syscall", + "syscall.RTF_TUNNEL": "syscall", + "syscall.RTF_UP": "syscall", + "syscall.RTF_USETRAILERS": "syscall", + "syscall.RTF_WASCLONED": "syscall", + "syscall.RTF_WINDOW": "syscall", + "syscall.RTF_XRESOLVE": "syscall", + "syscall.RTM_ADD": "syscall", + "syscall.RTM_BASE": "syscall", + "syscall.RTM_CHANGE": "syscall", + "syscall.RTM_CHGADDR": "syscall", + "syscall.RTM_DELACTION": "syscall", + "syscall.RTM_DELADDR": "syscall", + "syscall.RTM_DELADDRLABEL": "syscall", + "syscall.RTM_DELETE": "syscall", + "syscall.RTM_DELLINK": "syscall", + "syscall.RTM_DELMADDR": "syscall", + "syscall.RTM_DELNEIGH": "syscall", + "syscall.RTM_DELQDISC": "syscall", + "syscall.RTM_DELROUTE": "syscall", + "syscall.RTM_DELRULE": "syscall", + "syscall.RTM_DELTCLASS": "syscall", + "syscall.RTM_DELTFILTER": "syscall", + "syscall.RTM_DESYNC": "syscall", + "syscall.RTM_F_CLONED": "syscall", + "syscall.RTM_F_EQUALIZE": "syscall", + "syscall.RTM_F_NOTIFY": "syscall", + "syscall.RTM_F_PREFIX": "syscall", + "syscall.RTM_GET": "syscall", + "syscall.RTM_GET2": "syscall", + "syscall.RTM_GETACTION": "syscall", + "syscall.RTM_GETADDR": "syscall", + "syscall.RTM_GETADDRLABEL": "syscall", + "syscall.RTM_GETANYCAST": "syscall", + "syscall.RTM_GETDCB": "syscall", + "syscall.RTM_GETLINK": "syscall", + "syscall.RTM_GETMULTICAST": "syscall", + "syscall.RTM_GETNEIGH": "syscall", + "syscall.RTM_GETNEIGHTBL": "syscall", + "syscall.RTM_GETQDISC": "syscall", + "syscall.RTM_GETROUTE": "syscall", + "syscall.RTM_GETRULE": "syscall", + "syscall.RTM_GETTCLASS": "syscall", + "syscall.RTM_GETTFILTER": "syscall", + "syscall.RTM_IEEE80211": "syscall", + "syscall.RTM_IFANNOUNCE": "syscall", + "syscall.RTM_IFINFO": "syscall", + "syscall.RTM_IFINFO2": "syscall", + "syscall.RTM_LLINFO_UPD": "syscall", + "syscall.RTM_LOCK": "syscall", + "syscall.RTM_LOSING": "syscall", + "syscall.RTM_MAX": "syscall", + "syscall.RTM_MAXSIZE": "syscall", + "syscall.RTM_MISS": "syscall", + "syscall.RTM_NEWACTION": "syscall", + "syscall.RTM_NEWADDR": "syscall", + "syscall.RTM_NEWADDRLABEL": "syscall", + "syscall.RTM_NEWLINK": "syscall", + "syscall.RTM_NEWMADDR": "syscall", + "syscall.RTM_NEWMADDR2": "syscall", + "syscall.RTM_NEWNDUSEROPT": "syscall", + "syscall.RTM_NEWNEIGH": "syscall", + "syscall.RTM_NEWNEIGHTBL": "syscall", + "syscall.RTM_NEWPREFIX": "syscall", + "syscall.RTM_NEWQDISC": "syscall", + "syscall.RTM_NEWROUTE": "syscall", + "syscall.RTM_NEWRULE": "syscall", + "syscall.RTM_NEWTCLASS": "syscall", + "syscall.RTM_NEWTFILTER": "syscall", + "syscall.RTM_NR_FAMILIES": "syscall", + "syscall.RTM_NR_MSGTYPES": "syscall", + "syscall.RTM_OIFINFO": "syscall", + "syscall.RTM_OLDADD": "syscall", + "syscall.RTM_OLDDEL": "syscall", + "syscall.RTM_OOIFINFO": "syscall", + "syscall.RTM_REDIRECT": "syscall", + "syscall.RTM_RESOLVE": "syscall", + "syscall.RTM_RTTUNIT": "syscall", + "syscall.RTM_SETDCB": "syscall", + "syscall.RTM_SETGATE": "syscall", + "syscall.RTM_SETLINK": "syscall", + "syscall.RTM_SETNEIGHTBL": "syscall", + "syscall.RTM_VERSION": "syscall", + "syscall.RTNH_ALIGNTO": "syscall", + "syscall.RTNH_F_DEAD": "syscall", + "syscall.RTNH_F_ONLINK": "syscall", + "syscall.RTNH_F_PERVASIVE": "syscall", + "syscall.RTNLGRP_IPV4_IFADDR": "syscall", + "syscall.RTNLGRP_IPV4_MROUTE": "syscall", + "syscall.RTNLGRP_IPV4_ROUTE": "syscall", + "syscall.RTNLGRP_IPV4_RULE": "syscall", + "syscall.RTNLGRP_IPV6_IFADDR": "syscall", + "syscall.RTNLGRP_IPV6_IFINFO": "syscall", + "syscall.RTNLGRP_IPV6_MROUTE": "syscall", + "syscall.RTNLGRP_IPV6_PREFIX": "syscall", + "syscall.RTNLGRP_IPV6_ROUTE": "syscall", + "syscall.RTNLGRP_IPV6_RULE": "syscall", + "syscall.RTNLGRP_LINK": "syscall", + "syscall.RTNLGRP_ND_USEROPT": "syscall", + "syscall.RTNLGRP_NEIGH": "syscall", + "syscall.RTNLGRP_NONE": "syscall", + "syscall.RTNLGRP_NOTIFY": "syscall", + "syscall.RTNLGRP_TC": "syscall", + "syscall.RTN_ANYCAST": "syscall", + "syscall.RTN_BLACKHOLE": "syscall", + "syscall.RTN_BROADCAST": "syscall", + "syscall.RTN_LOCAL": "syscall", + "syscall.RTN_MAX": "syscall", + "syscall.RTN_MULTICAST": "syscall", + "syscall.RTN_NAT": "syscall", + "syscall.RTN_PROHIBIT": "syscall", + "syscall.RTN_THROW": "syscall", + "syscall.RTN_UNICAST": "syscall", + "syscall.RTN_UNREACHABLE": "syscall", + "syscall.RTN_UNSPEC": "syscall", + "syscall.RTN_XRESOLVE": "syscall", + "syscall.RTPROT_BIRD": "syscall", + "syscall.RTPROT_BOOT": "syscall", + "syscall.RTPROT_DHCP": "syscall", + "syscall.RTPROT_DNROUTED": "syscall", + "syscall.RTPROT_GATED": "syscall", + "syscall.RTPROT_KERNEL": "syscall", + "syscall.RTPROT_MRT": "syscall", + "syscall.RTPROT_NTK": "syscall", + "syscall.RTPROT_RA": "syscall", + "syscall.RTPROT_REDIRECT": "syscall", + "syscall.RTPROT_STATIC": "syscall", + "syscall.RTPROT_UNSPEC": "syscall", + "syscall.RTPROT_XORP": "syscall", + "syscall.RTPROT_ZEBRA": "syscall", + "syscall.RTV_EXPIRE": "syscall", + "syscall.RTV_HOPCOUNT": "syscall", + "syscall.RTV_MTU": "syscall", + "syscall.RTV_RPIPE": "syscall", + "syscall.RTV_RTT": "syscall", + "syscall.RTV_RTTVAR": "syscall", + "syscall.RTV_SPIPE": "syscall", + "syscall.RTV_SSTHRESH": "syscall", + "syscall.RTV_WEIGHT": "syscall", + "syscall.RT_CACHING_CONTEXT": "syscall", + "syscall.RT_CLASS_DEFAULT": "syscall", + "syscall.RT_CLASS_LOCAL": "syscall", + "syscall.RT_CLASS_MAIN": "syscall", + "syscall.RT_CLASS_MAX": "syscall", + "syscall.RT_CLASS_UNSPEC": "syscall", + "syscall.RT_DEFAULT_FIB": "syscall", + "syscall.RT_NORTREF": "syscall", + "syscall.RT_SCOPE_HOST": "syscall", + "syscall.RT_SCOPE_LINK": "syscall", + "syscall.RT_SCOPE_NOWHERE": "syscall", + "syscall.RT_SCOPE_SITE": "syscall", + "syscall.RT_SCOPE_UNIVERSE": "syscall", + "syscall.RT_TABLEID_MAX": "syscall", + "syscall.RT_TABLE_COMPAT": "syscall", + "syscall.RT_TABLE_DEFAULT": "syscall", + "syscall.RT_TABLE_LOCAL": "syscall", + "syscall.RT_TABLE_MAIN": "syscall", + "syscall.RT_TABLE_MAX": "syscall", + "syscall.RT_TABLE_UNSPEC": "syscall", + "syscall.RUSAGE_CHILDREN": "syscall", + "syscall.RUSAGE_SELF": "syscall", + "syscall.RUSAGE_THREAD": "syscall", + "syscall.Radvisory_t": "syscall", + "syscall.RawConn": "syscall", + "syscall.RawSockaddr": "syscall", + "syscall.RawSockaddrAny": "syscall", + "syscall.RawSockaddrDatalink": "syscall", + "syscall.RawSockaddrInet4": "syscall", + "syscall.RawSockaddrInet6": "syscall", + "syscall.RawSockaddrLinklayer": "syscall", + "syscall.RawSockaddrNetlink": "syscall", + "syscall.RawSockaddrUnix": "syscall", + "syscall.RawSyscall": "syscall", + "syscall.RawSyscall6": "syscall", + "syscall.Read": "syscall", + "syscall.ReadConsole": "syscall", + "syscall.ReadDirectoryChanges": "syscall", + "syscall.ReadDirent": "syscall", + "syscall.ReadFile": "syscall", + "syscall.Readlink": "syscall", + "syscall.Reboot": "syscall", + "syscall.Recvfrom": "syscall", + "syscall.Recvmsg": "syscall", + "syscall.RegCloseKey": "syscall", + "syscall.RegEnumKeyEx": "syscall", + "syscall.RegOpenKeyEx": "syscall", + "syscall.RegQueryInfoKey": "syscall", + "syscall.RegQueryValueEx": "syscall", + "syscall.RemoveDirectory": "syscall", + "syscall.Removexattr": "syscall", + "syscall.Rename": "syscall", + "syscall.Renameat": "syscall", + "syscall.Revoke": "syscall", + "syscall.Rlimit": "syscall", + "syscall.Rmdir": "syscall", + "syscall.RouteMessage": "syscall", + "syscall.RouteRIB": "syscall", + "syscall.RtAttr": "syscall", + "syscall.RtGenmsg": "syscall", + "syscall.RtMetrics": "syscall", + "syscall.RtMsg": "syscall", + "syscall.RtMsghdr": "syscall", + "syscall.RtNexthop": "syscall", + "syscall.Rusage": "syscall", + "syscall.SCM_BINTIME": "syscall", + "syscall.SCM_CREDENTIALS": "syscall", + "syscall.SCM_CREDS": "syscall", + "syscall.SCM_RIGHTS": "syscall", + "syscall.SCM_TIMESTAMP": "syscall", + "syscall.SCM_TIMESTAMPING": "syscall", + "syscall.SCM_TIMESTAMPNS": "syscall", + "syscall.SCM_TIMESTAMP_MONOTONIC": "syscall", + "syscall.SHUT_RD": "syscall", + "syscall.SHUT_RDWR": "syscall", + "syscall.SHUT_WR": "syscall", + "syscall.SID": "syscall", + "syscall.SIDAndAttributes": "syscall", + "syscall.SIGABRT": "syscall", + "syscall.SIGALRM": "syscall", + "syscall.SIGBUS": "syscall", + "syscall.SIGCHLD": "syscall", + "syscall.SIGCLD": "syscall", + "syscall.SIGCONT": "syscall", + "syscall.SIGEMT": "syscall", + "syscall.SIGFPE": "syscall", + "syscall.SIGHUP": "syscall", + "syscall.SIGILL": "syscall", + "syscall.SIGINFO": "syscall", + "syscall.SIGINT": "syscall", + "syscall.SIGIO": "syscall", + "syscall.SIGIOT": "syscall", + "syscall.SIGKILL": "syscall", + "syscall.SIGLIBRT": "syscall", + "syscall.SIGLWP": "syscall", + "syscall.SIGPIPE": "syscall", + "syscall.SIGPOLL": "syscall", + "syscall.SIGPROF": "syscall", + "syscall.SIGPWR": "syscall", + "syscall.SIGQUIT": "syscall", + "syscall.SIGSEGV": "syscall", + "syscall.SIGSTKFLT": "syscall", + "syscall.SIGSTOP": "syscall", + "syscall.SIGSYS": "syscall", + "syscall.SIGTERM": "syscall", + "syscall.SIGTHR": "syscall", + "syscall.SIGTRAP": "syscall", + "syscall.SIGTSTP": "syscall", + "syscall.SIGTTIN": "syscall", + "syscall.SIGTTOU": "syscall", + "syscall.SIGUNUSED": "syscall", + "syscall.SIGURG": "syscall", + "syscall.SIGUSR1": "syscall", + "syscall.SIGUSR2": "syscall", + "syscall.SIGVTALRM": "syscall", + "syscall.SIGWINCH": "syscall", + "syscall.SIGXCPU": "syscall", + "syscall.SIGXFSZ": "syscall", + "syscall.SIOCADDDLCI": "syscall", + "syscall.SIOCADDMULTI": "syscall", + "syscall.SIOCADDRT": "syscall", + "syscall.SIOCAIFADDR": "syscall", + "syscall.SIOCAIFGROUP": "syscall", + "syscall.SIOCALIFADDR": "syscall", + "syscall.SIOCARPIPLL": "syscall", + "syscall.SIOCATMARK": "syscall", + "syscall.SIOCAUTOADDR": "syscall", + "syscall.SIOCAUTONETMASK": "syscall", + "syscall.SIOCBRDGADD": "syscall", + "syscall.SIOCBRDGADDS": "syscall", + "syscall.SIOCBRDGARL": "syscall", + "syscall.SIOCBRDGDADDR": "syscall", + "syscall.SIOCBRDGDEL": "syscall", + "syscall.SIOCBRDGDELS": "syscall", + "syscall.SIOCBRDGFLUSH": "syscall", + "syscall.SIOCBRDGFRL": "syscall", + "syscall.SIOCBRDGGCACHE": "syscall", + "syscall.SIOCBRDGGFD": "syscall", + "syscall.SIOCBRDGGHT": "syscall", + "syscall.SIOCBRDGGIFFLGS": "syscall", + "syscall.SIOCBRDGGMA": "syscall", + "syscall.SIOCBRDGGPARAM": "syscall", + "syscall.SIOCBRDGGPRI": "syscall", + "syscall.SIOCBRDGGRL": "syscall", + "syscall.SIOCBRDGGSIFS": "syscall", + "syscall.SIOCBRDGGTO": "syscall", + "syscall.SIOCBRDGIFS": "syscall", + "syscall.SIOCBRDGRTS": "syscall", + "syscall.SIOCBRDGSADDR": "syscall", + "syscall.SIOCBRDGSCACHE": "syscall", + "syscall.SIOCBRDGSFD": "syscall", + "syscall.SIOCBRDGSHT": "syscall", + "syscall.SIOCBRDGSIFCOST": "syscall", + "syscall.SIOCBRDGSIFFLGS": "syscall", + "syscall.SIOCBRDGSIFPRIO": "syscall", + "syscall.SIOCBRDGSMA": "syscall", + "syscall.SIOCBRDGSPRI": "syscall", + "syscall.SIOCBRDGSPROTO": "syscall", + "syscall.SIOCBRDGSTO": "syscall", + "syscall.SIOCBRDGSTXHC": "syscall", + "syscall.SIOCDARP": "syscall", + "syscall.SIOCDELDLCI": "syscall", + "syscall.SIOCDELMULTI": "syscall", + "syscall.SIOCDELRT": "syscall", + "syscall.SIOCDEVPRIVATE": "syscall", + "syscall.SIOCDIFADDR": "syscall", + "syscall.SIOCDIFGROUP": "syscall", + "syscall.SIOCDIFPHYADDR": "syscall", + "syscall.SIOCDLIFADDR": "syscall", + "syscall.SIOCDRARP": "syscall", + "syscall.SIOCGARP": "syscall", + "syscall.SIOCGDRVSPEC": "syscall", + "syscall.SIOCGETKALIVE": "syscall", + "syscall.SIOCGETLABEL": "syscall", + "syscall.SIOCGETPFLOW": "syscall", + "syscall.SIOCGETPFSYNC": "syscall", + "syscall.SIOCGETSGCNT": "syscall", + "syscall.SIOCGETVIFCNT": "syscall", + "syscall.SIOCGETVLAN": "syscall", + "syscall.SIOCGHIWAT": "syscall", + "syscall.SIOCGIFADDR": "syscall", + "syscall.SIOCGIFADDRPREF": "syscall", + "syscall.SIOCGIFALIAS": "syscall", + "syscall.SIOCGIFALTMTU": "syscall", + "syscall.SIOCGIFASYNCMAP": "syscall", + "syscall.SIOCGIFBOND": "syscall", + "syscall.SIOCGIFBR": "syscall", + "syscall.SIOCGIFBRDADDR": "syscall", + "syscall.SIOCGIFCAP": "syscall", + "syscall.SIOCGIFCONF": "syscall", + "syscall.SIOCGIFCOUNT": "syscall", + "syscall.SIOCGIFDATA": "syscall", + "syscall.SIOCGIFDESCR": "syscall", + "syscall.SIOCGIFDEVMTU": "syscall", + "syscall.SIOCGIFDLT": "syscall", + "syscall.SIOCGIFDSTADDR": "syscall", + "syscall.SIOCGIFENCAP": "syscall", + "syscall.SIOCGIFFIB": "syscall", + "syscall.SIOCGIFFLAGS": "syscall", + "syscall.SIOCGIFGATTR": "syscall", + "syscall.SIOCGIFGENERIC": "syscall", + "syscall.SIOCGIFGMEMB": "syscall", + "syscall.SIOCGIFGROUP": "syscall", + "syscall.SIOCGIFHARDMTU": "syscall", + "syscall.SIOCGIFHWADDR": "syscall", + "syscall.SIOCGIFINDEX": "syscall", + "syscall.SIOCGIFKPI": "syscall", + "syscall.SIOCGIFMAC": "syscall", + "syscall.SIOCGIFMAP": "syscall", + "syscall.SIOCGIFMEDIA": "syscall", + "syscall.SIOCGIFMEM": "syscall", + "syscall.SIOCGIFMETRIC": "syscall", + "syscall.SIOCGIFMTU": "syscall", + "syscall.SIOCGIFNAME": "syscall", + "syscall.SIOCGIFNETMASK": "syscall", + "syscall.SIOCGIFPDSTADDR": "syscall", + "syscall.SIOCGIFPFLAGS": "syscall", + "syscall.SIOCGIFPHYS": "syscall", + "syscall.SIOCGIFPRIORITY": "syscall", + "syscall.SIOCGIFPSRCADDR": "syscall", + "syscall.SIOCGIFRDOMAIN": "syscall", + "syscall.SIOCGIFRTLABEL": "syscall", + "syscall.SIOCGIFSLAVE": "syscall", + "syscall.SIOCGIFSTATUS": "syscall", + "syscall.SIOCGIFTIMESLOT": "syscall", + "syscall.SIOCGIFTXQLEN": "syscall", + "syscall.SIOCGIFVLAN": "syscall", + "syscall.SIOCGIFWAKEFLAGS": "syscall", + "syscall.SIOCGIFXFLAGS": "syscall", + "syscall.SIOCGLIFADDR": "syscall", + "syscall.SIOCGLIFPHYADDR": "syscall", + "syscall.SIOCGLIFPHYRTABLE": "syscall", + "syscall.SIOCGLIFPHYTTL": "syscall", + "syscall.SIOCGLINKSTR": "syscall", + "syscall.SIOCGLOWAT": "syscall", + "syscall.SIOCGPGRP": "syscall", + "syscall.SIOCGPRIVATE_0": "syscall", + "syscall.SIOCGPRIVATE_1": "syscall", + "syscall.SIOCGRARP": "syscall", + "syscall.SIOCGSPPPPARAMS": "syscall", + "syscall.SIOCGSTAMP": "syscall", + "syscall.SIOCGSTAMPNS": "syscall", + "syscall.SIOCGVH": "syscall", + "syscall.SIOCGVNETID": "syscall", + "syscall.SIOCIFCREATE": "syscall", + "syscall.SIOCIFCREATE2": "syscall", + "syscall.SIOCIFDESTROY": "syscall", + "syscall.SIOCIFGCLONERS": "syscall", + "syscall.SIOCINITIFADDR": "syscall", + "syscall.SIOCPROTOPRIVATE": "syscall", + "syscall.SIOCRSLVMULTI": "syscall", + "syscall.SIOCRTMSG": "syscall", + "syscall.SIOCSARP": "syscall", + "syscall.SIOCSDRVSPEC": "syscall", + "syscall.SIOCSETKALIVE": "syscall", + "syscall.SIOCSETLABEL": "syscall", + "syscall.SIOCSETPFLOW": "syscall", + "syscall.SIOCSETPFSYNC": "syscall", + "syscall.SIOCSETVLAN": "syscall", + "syscall.SIOCSHIWAT": "syscall", + "syscall.SIOCSIFADDR": "syscall", + "syscall.SIOCSIFADDRPREF": "syscall", + "syscall.SIOCSIFALTMTU": "syscall", + "syscall.SIOCSIFASYNCMAP": "syscall", + "syscall.SIOCSIFBOND": "syscall", + "syscall.SIOCSIFBR": "syscall", + "syscall.SIOCSIFBRDADDR": "syscall", + "syscall.SIOCSIFCAP": "syscall", + "syscall.SIOCSIFDESCR": "syscall", + "syscall.SIOCSIFDSTADDR": "syscall", + "syscall.SIOCSIFENCAP": "syscall", + "syscall.SIOCSIFFIB": "syscall", + "syscall.SIOCSIFFLAGS": "syscall", + "syscall.SIOCSIFGATTR": "syscall", + "syscall.SIOCSIFGENERIC": "syscall", + "syscall.SIOCSIFHWADDR": "syscall", + "syscall.SIOCSIFHWBROADCAST": "syscall", + "syscall.SIOCSIFKPI": "syscall", + "syscall.SIOCSIFLINK": "syscall", + "syscall.SIOCSIFLLADDR": "syscall", + "syscall.SIOCSIFMAC": "syscall", + "syscall.SIOCSIFMAP": "syscall", + "syscall.SIOCSIFMEDIA": "syscall", + "syscall.SIOCSIFMEM": "syscall", + "syscall.SIOCSIFMETRIC": "syscall", + "syscall.SIOCSIFMTU": "syscall", + "syscall.SIOCSIFNAME": "syscall", + "syscall.SIOCSIFNETMASK": "syscall", + "syscall.SIOCSIFPFLAGS": "syscall", + "syscall.SIOCSIFPHYADDR": "syscall", + "syscall.SIOCSIFPHYS": "syscall", + "syscall.SIOCSIFPRIORITY": "syscall", + "syscall.SIOCSIFRDOMAIN": "syscall", + "syscall.SIOCSIFRTLABEL": "syscall", + "syscall.SIOCSIFRVNET": "syscall", + "syscall.SIOCSIFSLAVE": "syscall", + "syscall.SIOCSIFTIMESLOT": "syscall", + "syscall.SIOCSIFTXQLEN": "syscall", + "syscall.SIOCSIFVLAN": "syscall", + "syscall.SIOCSIFVNET": "syscall", + "syscall.SIOCSIFXFLAGS": "syscall", + "syscall.SIOCSLIFPHYADDR": "syscall", + "syscall.SIOCSLIFPHYRTABLE": "syscall", + "syscall.SIOCSLIFPHYTTL": "syscall", + "syscall.SIOCSLINKSTR": "syscall", + "syscall.SIOCSLOWAT": "syscall", + "syscall.SIOCSPGRP": "syscall", + "syscall.SIOCSRARP": "syscall", + "syscall.SIOCSSPPPPARAMS": "syscall", + "syscall.SIOCSVH": "syscall", + "syscall.SIOCSVNETID": "syscall", + "syscall.SIOCZIFDATA": "syscall", + "syscall.SIO_GET_EXTENSION_FUNCTION_POINTER": "syscall", + "syscall.SIO_GET_INTERFACE_LIST": "syscall", + "syscall.SIO_KEEPALIVE_VALS": "syscall", + "syscall.SIO_UDP_CONNRESET": "syscall", + "syscall.SOCK_CLOEXEC": "syscall", + "syscall.SOCK_DCCP": "syscall", + "syscall.SOCK_DGRAM": "syscall", + "syscall.SOCK_FLAGS_MASK": "syscall", + "syscall.SOCK_MAXADDRLEN": "syscall", + "syscall.SOCK_NONBLOCK": "syscall", + "syscall.SOCK_NOSIGPIPE": "syscall", + "syscall.SOCK_PACKET": "syscall", + "syscall.SOCK_RAW": "syscall", + "syscall.SOCK_RDM": "syscall", + "syscall.SOCK_SEQPACKET": "syscall", + "syscall.SOCK_STREAM": "syscall", + "syscall.SOL_AAL": "syscall", + "syscall.SOL_ATM": "syscall", + "syscall.SOL_DECNET": "syscall", + "syscall.SOL_ICMPV6": "syscall", + "syscall.SOL_IP": "syscall", + "syscall.SOL_IPV6": "syscall", + "syscall.SOL_IRDA": "syscall", + "syscall.SOL_PACKET": "syscall", + "syscall.SOL_RAW": "syscall", + "syscall.SOL_SOCKET": "syscall", + "syscall.SOL_TCP": "syscall", + "syscall.SOL_X25": "syscall", + "syscall.SOMAXCONN": "syscall", + "syscall.SO_ACCEPTCONN": "syscall", + "syscall.SO_ACCEPTFILTER": "syscall", + "syscall.SO_ATTACH_FILTER": "syscall", + "syscall.SO_BINDANY": "syscall", + "syscall.SO_BINDTODEVICE": "syscall", + "syscall.SO_BINTIME": "syscall", + "syscall.SO_BROADCAST": "syscall", + "syscall.SO_BSDCOMPAT": "syscall", + "syscall.SO_DEBUG": "syscall", + "syscall.SO_DETACH_FILTER": "syscall", + "syscall.SO_DOMAIN": "syscall", + "syscall.SO_DONTROUTE": "syscall", + "syscall.SO_DONTTRUNC": "syscall", + "syscall.SO_ERROR": "syscall", + "syscall.SO_KEEPALIVE": "syscall", + "syscall.SO_LABEL": "syscall", + "syscall.SO_LINGER": "syscall", + "syscall.SO_LINGER_SEC": "syscall", + "syscall.SO_LISTENINCQLEN": "syscall", + "syscall.SO_LISTENQLEN": "syscall", + "syscall.SO_LISTENQLIMIT": "syscall", + "syscall.SO_MARK": "syscall", + "syscall.SO_NETPROC": "syscall", + "syscall.SO_NKE": "syscall", + "syscall.SO_NOADDRERR": "syscall", + "syscall.SO_NOHEADER": "syscall", + "syscall.SO_NOSIGPIPE": "syscall", + "syscall.SO_NOTIFYCONFLICT": "syscall", + "syscall.SO_NO_CHECK": "syscall", + "syscall.SO_NO_DDP": "syscall", + "syscall.SO_NO_OFFLOAD": "syscall", + "syscall.SO_NP_EXTENSIONS": "syscall", + "syscall.SO_NREAD": "syscall", + "syscall.SO_NWRITE": "syscall", + "syscall.SO_OOBINLINE": "syscall", + "syscall.SO_OVERFLOWED": "syscall", + "syscall.SO_PASSCRED": "syscall", + "syscall.SO_PASSSEC": "syscall", + "syscall.SO_PEERCRED": "syscall", + "syscall.SO_PEERLABEL": "syscall", + "syscall.SO_PEERNAME": "syscall", + "syscall.SO_PEERSEC": "syscall", + "syscall.SO_PRIORITY": "syscall", + "syscall.SO_PROTOCOL": "syscall", + "syscall.SO_PROTOTYPE": "syscall", + "syscall.SO_RANDOMPORT": "syscall", + "syscall.SO_RCVBUF": "syscall", + "syscall.SO_RCVBUFFORCE": "syscall", + "syscall.SO_RCVLOWAT": "syscall", + "syscall.SO_RCVTIMEO": "syscall", + "syscall.SO_RESTRICTIONS": "syscall", + "syscall.SO_RESTRICT_DENYIN": "syscall", + "syscall.SO_RESTRICT_DENYOUT": "syscall", + "syscall.SO_RESTRICT_DENYSET": "syscall", + "syscall.SO_REUSEADDR": "syscall", + "syscall.SO_REUSEPORT": "syscall", + "syscall.SO_REUSESHAREUID": "syscall", + "syscall.SO_RTABLE": "syscall", + "syscall.SO_RXQ_OVFL": "syscall", + "syscall.SO_SECURITY_AUTHENTICATION": "syscall", + "syscall.SO_SECURITY_ENCRYPTION_NETWORK": "syscall", + "syscall.SO_SECURITY_ENCRYPTION_TRANSPORT": "syscall", + "syscall.SO_SETFIB": "syscall", + "syscall.SO_SNDBUF": "syscall", + "syscall.SO_SNDBUFFORCE": "syscall", + "syscall.SO_SNDLOWAT": "syscall", + "syscall.SO_SNDTIMEO": "syscall", + "syscall.SO_SPLICE": "syscall", + "syscall.SO_TIMESTAMP": "syscall", + "syscall.SO_TIMESTAMPING": "syscall", + "syscall.SO_TIMESTAMPNS": "syscall", + "syscall.SO_TIMESTAMP_MONOTONIC": "syscall", + "syscall.SO_TYPE": "syscall", + "syscall.SO_UPCALLCLOSEWAIT": "syscall", + "syscall.SO_UPDATE_ACCEPT_CONTEXT": "syscall", + "syscall.SO_UPDATE_CONNECT_CONTEXT": "syscall", + "syscall.SO_USELOOPBACK": "syscall", + "syscall.SO_USER_COOKIE": "syscall", + "syscall.SO_VENDOR": "syscall", + "syscall.SO_WANTMORE": "syscall", + "syscall.SO_WANTOOBFLAG": "syscall", + "syscall.SSLExtraCertChainPolicyPara": "syscall", + "syscall.STANDARD_RIGHTS_ALL": "syscall", + "syscall.STANDARD_RIGHTS_EXECUTE": "syscall", + "syscall.STANDARD_RIGHTS_READ": "syscall", + "syscall.STANDARD_RIGHTS_REQUIRED": "syscall", + "syscall.STANDARD_RIGHTS_WRITE": "syscall", + "syscall.STARTF_USESHOWWINDOW": "syscall", + "syscall.STARTF_USESTDHANDLES": "syscall", + "syscall.STD_ERROR_HANDLE": "syscall", + "syscall.STD_INPUT_HANDLE": "syscall", + "syscall.STD_OUTPUT_HANDLE": "syscall", + "syscall.SUBLANG_ENGLISH_US": "syscall", + "syscall.SW_FORCEMINIMIZE": "syscall", + "syscall.SW_HIDE": "syscall", + "syscall.SW_MAXIMIZE": "syscall", + "syscall.SW_MINIMIZE": "syscall", + "syscall.SW_NORMAL": "syscall", + "syscall.SW_RESTORE": "syscall", + "syscall.SW_SHOW": "syscall", + "syscall.SW_SHOWDEFAULT": "syscall", + "syscall.SW_SHOWMAXIMIZED": "syscall", + "syscall.SW_SHOWMINIMIZED": "syscall", + "syscall.SW_SHOWMINNOACTIVE": "syscall", + "syscall.SW_SHOWNA": "syscall", + "syscall.SW_SHOWNOACTIVATE": "syscall", + "syscall.SW_SHOWNORMAL": "syscall", + "syscall.SYMBOLIC_LINK_FLAG_DIRECTORY": "syscall", + "syscall.SYNCHRONIZE": "syscall", + "syscall.SYSCTL_VERSION": "syscall", + "syscall.SYSCTL_VERS_0": "syscall", + "syscall.SYSCTL_VERS_1": "syscall", + "syscall.SYSCTL_VERS_MASK": "syscall", + "syscall.SYS_ABORT2": "syscall", + "syscall.SYS_ACCEPT": "syscall", + "syscall.SYS_ACCEPT4": "syscall", + "syscall.SYS_ACCEPT_NOCANCEL": "syscall", + "syscall.SYS_ACCESS": "syscall", + "syscall.SYS_ACCESS_EXTENDED": "syscall", + "syscall.SYS_ACCT": "syscall", + "syscall.SYS_ADD_KEY": "syscall", + "syscall.SYS_ADD_PROFIL": "syscall", + "syscall.SYS_ADJFREQ": "syscall", + "syscall.SYS_ADJTIME": "syscall", + "syscall.SYS_ADJTIMEX": "syscall", + "syscall.SYS_AFS_SYSCALL": "syscall", + "syscall.SYS_AIO_CANCEL": "syscall", + "syscall.SYS_AIO_ERROR": "syscall", + "syscall.SYS_AIO_FSYNC": "syscall", + "syscall.SYS_AIO_READ": "syscall", + "syscall.SYS_AIO_RETURN": "syscall", + "syscall.SYS_AIO_SUSPEND": "syscall", + "syscall.SYS_AIO_SUSPEND_NOCANCEL": "syscall", + "syscall.SYS_AIO_WRITE": "syscall", + "syscall.SYS_ALARM": "syscall", + "syscall.SYS_ARCH_PRCTL": "syscall", + "syscall.SYS_ARM_FADVISE64_64": "syscall", + "syscall.SYS_ARM_SYNC_FILE_RANGE": "syscall", + "syscall.SYS_ATGETMSG": "syscall", + "syscall.SYS_ATPGETREQ": "syscall", + "syscall.SYS_ATPGETRSP": "syscall", + "syscall.SYS_ATPSNDREQ": "syscall", + "syscall.SYS_ATPSNDRSP": "syscall", + "syscall.SYS_ATPUTMSG": "syscall", + "syscall.SYS_ATSOCKET": "syscall", + "syscall.SYS_AUDIT": "syscall", + "syscall.SYS_AUDITCTL": "syscall", + "syscall.SYS_AUDITON": "syscall", + "syscall.SYS_AUDIT_SESSION_JOIN": "syscall", + "syscall.SYS_AUDIT_SESSION_PORT": "syscall", + "syscall.SYS_AUDIT_SESSION_SELF": "syscall", + "syscall.SYS_BDFLUSH": "syscall", + "syscall.SYS_BIND": "syscall", + "syscall.SYS_BINDAT": "syscall", + "syscall.SYS_BREAK": "syscall", + "syscall.SYS_BRK": "syscall", + "syscall.SYS_BSDTHREAD_CREATE": "syscall", + "syscall.SYS_BSDTHREAD_REGISTER": "syscall", + "syscall.SYS_BSDTHREAD_TERMINATE": "syscall", + "syscall.SYS_CAPGET": "syscall", + "syscall.SYS_CAPSET": "syscall", + "syscall.SYS_CAP_ENTER": "syscall", + "syscall.SYS_CAP_FCNTLS_GET": "syscall", + "syscall.SYS_CAP_FCNTLS_LIMIT": "syscall", + "syscall.SYS_CAP_GETMODE": "syscall", + "syscall.SYS_CAP_GETRIGHTS": "syscall", + "syscall.SYS_CAP_IOCTLS_GET": "syscall", + "syscall.SYS_CAP_IOCTLS_LIMIT": "syscall", + "syscall.SYS_CAP_NEW": "syscall", + "syscall.SYS_CAP_RIGHTS_GET": "syscall", + "syscall.SYS_CAP_RIGHTS_LIMIT": "syscall", + "syscall.SYS_CHDIR": "syscall", + "syscall.SYS_CHFLAGS": "syscall", + "syscall.SYS_CHFLAGSAT": "syscall", + "syscall.SYS_CHMOD": "syscall", + "syscall.SYS_CHMOD_EXTENDED": "syscall", + "syscall.SYS_CHOWN": "syscall", + "syscall.SYS_CHOWN32": "syscall", + "syscall.SYS_CHROOT": "syscall", + "syscall.SYS_CHUD": "syscall", + "syscall.SYS_CLOCK_ADJTIME": "syscall", + "syscall.SYS_CLOCK_GETCPUCLOCKID2": "syscall", + "syscall.SYS_CLOCK_GETRES": "syscall", + "syscall.SYS_CLOCK_GETTIME": "syscall", + "syscall.SYS_CLOCK_NANOSLEEP": "syscall", + "syscall.SYS_CLOCK_SETTIME": "syscall", + "syscall.SYS_CLONE": "syscall", + "syscall.SYS_CLOSE": "syscall", + "syscall.SYS_CLOSEFROM": "syscall", + "syscall.SYS_CLOSE_NOCANCEL": "syscall", + "syscall.SYS_CONNECT": "syscall", + "syscall.SYS_CONNECTAT": "syscall", + "syscall.SYS_CONNECT_NOCANCEL": "syscall", + "syscall.SYS_COPYFILE": "syscall", + "syscall.SYS_CPUSET": "syscall", + "syscall.SYS_CPUSET_GETAFFINITY": "syscall", + "syscall.SYS_CPUSET_GETID": "syscall", + "syscall.SYS_CPUSET_SETAFFINITY": "syscall", + "syscall.SYS_CPUSET_SETID": "syscall", + "syscall.SYS_CREAT": "syscall", + "syscall.SYS_CREATE_MODULE": "syscall", + "syscall.SYS_CSOPS": "syscall", + "syscall.SYS_DELETE": "syscall", + "syscall.SYS_DELETE_MODULE": "syscall", + "syscall.SYS_DUP": "syscall", + "syscall.SYS_DUP2": "syscall", + "syscall.SYS_DUP3": "syscall", + "syscall.SYS_EACCESS": "syscall", + "syscall.SYS_EPOLL_CREATE": "syscall", + "syscall.SYS_EPOLL_CREATE1": "syscall", + "syscall.SYS_EPOLL_CTL": "syscall", + "syscall.SYS_EPOLL_CTL_OLD": "syscall", + "syscall.SYS_EPOLL_PWAIT": "syscall", + "syscall.SYS_EPOLL_WAIT": "syscall", + "syscall.SYS_EPOLL_WAIT_OLD": "syscall", + "syscall.SYS_EVENTFD": "syscall", + "syscall.SYS_EVENTFD2": "syscall", + "syscall.SYS_EXCHANGEDATA": "syscall", + "syscall.SYS_EXECVE": "syscall", + "syscall.SYS_EXIT": "syscall", + "syscall.SYS_EXIT_GROUP": "syscall", + "syscall.SYS_EXTATTRCTL": "syscall", + "syscall.SYS_EXTATTR_DELETE_FD": "syscall", + "syscall.SYS_EXTATTR_DELETE_FILE": "syscall", + "syscall.SYS_EXTATTR_DELETE_LINK": "syscall", + "syscall.SYS_EXTATTR_GET_FD": "syscall", + "syscall.SYS_EXTATTR_GET_FILE": "syscall", + "syscall.SYS_EXTATTR_GET_LINK": "syscall", + "syscall.SYS_EXTATTR_LIST_FD": "syscall", + "syscall.SYS_EXTATTR_LIST_FILE": "syscall", + "syscall.SYS_EXTATTR_LIST_LINK": "syscall", + "syscall.SYS_EXTATTR_SET_FD": "syscall", + "syscall.SYS_EXTATTR_SET_FILE": "syscall", + "syscall.SYS_EXTATTR_SET_LINK": "syscall", + "syscall.SYS_FACCESSAT": "syscall", + "syscall.SYS_FADVISE64": "syscall", + "syscall.SYS_FADVISE64_64": "syscall", + "syscall.SYS_FALLOCATE": "syscall", + "syscall.SYS_FANOTIFY_INIT": "syscall", + "syscall.SYS_FANOTIFY_MARK": "syscall", + "syscall.SYS_FCHDIR": "syscall", + "syscall.SYS_FCHFLAGS": "syscall", + "syscall.SYS_FCHMOD": "syscall", + "syscall.SYS_FCHMODAT": "syscall", + "syscall.SYS_FCHMOD_EXTENDED": "syscall", + "syscall.SYS_FCHOWN": "syscall", + "syscall.SYS_FCHOWN32": "syscall", + "syscall.SYS_FCHOWNAT": "syscall", + "syscall.SYS_FCHROOT": "syscall", + "syscall.SYS_FCNTL": "syscall", + "syscall.SYS_FCNTL64": "syscall", + "syscall.SYS_FCNTL_NOCANCEL": "syscall", + "syscall.SYS_FDATASYNC": "syscall", + "syscall.SYS_FEXECVE": "syscall", + "syscall.SYS_FFCLOCK_GETCOUNTER": "syscall", + "syscall.SYS_FFCLOCK_GETESTIMATE": "syscall", + "syscall.SYS_FFCLOCK_SETESTIMATE": "syscall", + "syscall.SYS_FFSCTL": "syscall", + "syscall.SYS_FGETATTRLIST": "syscall", + "syscall.SYS_FGETXATTR": "syscall", + "syscall.SYS_FHOPEN": "syscall", + "syscall.SYS_FHSTAT": "syscall", + "syscall.SYS_FHSTATFS": "syscall", + "syscall.SYS_FILEPORT_MAKEFD": "syscall", + "syscall.SYS_FILEPORT_MAKEPORT": "syscall", + "syscall.SYS_FKTRACE": "syscall", + "syscall.SYS_FLISTXATTR": "syscall", + "syscall.SYS_FLOCK": "syscall", + "syscall.SYS_FORK": "syscall", + "syscall.SYS_FPATHCONF": "syscall", + "syscall.SYS_FREEBSD6_FTRUNCATE": "syscall", + "syscall.SYS_FREEBSD6_LSEEK": "syscall", + "syscall.SYS_FREEBSD6_MMAP": "syscall", + "syscall.SYS_FREEBSD6_PREAD": "syscall", + "syscall.SYS_FREEBSD6_PWRITE": "syscall", + "syscall.SYS_FREEBSD6_TRUNCATE": "syscall", + "syscall.SYS_FREMOVEXATTR": "syscall", + "syscall.SYS_FSCTL": "syscall", + "syscall.SYS_FSETATTRLIST": "syscall", + "syscall.SYS_FSETXATTR": "syscall", + "syscall.SYS_FSGETPATH": "syscall", + "syscall.SYS_FSTAT": "syscall", + "syscall.SYS_FSTAT64": "syscall", + "syscall.SYS_FSTAT64_EXTENDED": "syscall", + "syscall.SYS_FSTATAT": "syscall", + "syscall.SYS_FSTATAT64": "syscall", + "syscall.SYS_FSTATFS": "syscall", + "syscall.SYS_FSTATFS64": "syscall", + "syscall.SYS_FSTATV": "syscall", + "syscall.SYS_FSTATVFS1": "syscall", + "syscall.SYS_FSTAT_EXTENDED": "syscall", + "syscall.SYS_FSYNC": "syscall", + "syscall.SYS_FSYNC_NOCANCEL": "syscall", + "syscall.SYS_FSYNC_RANGE": "syscall", + "syscall.SYS_FTIME": "syscall", + "syscall.SYS_FTRUNCATE": "syscall", + "syscall.SYS_FTRUNCATE64": "syscall", + "syscall.SYS_FUTEX": "syscall", + "syscall.SYS_FUTIMENS": "syscall", + "syscall.SYS_FUTIMES": "syscall", + "syscall.SYS_FUTIMESAT": "syscall", + "syscall.SYS_GETATTRLIST": "syscall", + "syscall.SYS_GETAUDIT": "syscall", + "syscall.SYS_GETAUDIT_ADDR": "syscall", + "syscall.SYS_GETAUID": "syscall", + "syscall.SYS_GETCONTEXT": "syscall", + "syscall.SYS_GETCPU": "syscall", + "syscall.SYS_GETCWD": "syscall", + "syscall.SYS_GETDENTS": "syscall", + "syscall.SYS_GETDENTS64": "syscall", + "syscall.SYS_GETDIRENTRIES": "syscall", + "syscall.SYS_GETDIRENTRIES64": "syscall", + "syscall.SYS_GETDIRENTRIESATTR": "syscall", + "syscall.SYS_GETDTABLECOUNT": "syscall", + "syscall.SYS_GETDTABLESIZE": "syscall", + "syscall.SYS_GETEGID": "syscall", + "syscall.SYS_GETEGID32": "syscall", + "syscall.SYS_GETEUID": "syscall", + "syscall.SYS_GETEUID32": "syscall", + "syscall.SYS_GETFH": "syscall", + "syscall.SYS_GETFSSTAT": "syscall", + "syscall.SYS_GETFSSTAT64": "syscall", + "syscall.SYS_GETGID": "syscall", + "syscall.SYS_GETGID32": "syscall", + "syscall.SYS_GETGROUPS": "syscall", + "syscall.SYS_GETGROUPS32": "syscall", + "syscall.SYS_GETHOSTUUID": "syscall", + "syscall.SYS_GETITIMER": "syscall", + "syscall.SYS_GETLCID": "syscall", + "syscall.SYS_GETLOGIN": "syscall", + "syscall.SYS_GETLOGINCLASS": "syscall", + "syscall.SYS_GETPEERNAME": "syscall", + "syscall.SYS_GETPGID": "syscall", + "syscall.SYS_GETPGRP": "syscall", + "syscall.SYS_GETPID": "syscall", + "syscall.SYS_GETPMSG": "syscall", + "syscall.SYS_GETPPID": "syscall", + "syscall.SYS_GETPRIORITY": "syscall", + "syscall.SYS_GETRESGID": "syscall", + "syscall.SYS_GETRESGID32": "syscall", + "syscall.SYS_GETRESUID": "syscall", + "syscall.SYS_GETRESUID32": "syscall", + "syscall.SYS_GETRLIMIT": "syscall", + "syscall.SYS_GETRTABLE": "syscall", + "syscall.SYS_GETRUSAGE": "syscall", + "syscall.SYS_GETSGROUPS": "syscall", + "syscall.SYS_GETSID": "syscall", + "syscall.SYS_GETSOCKNAME": "syscall", + "syscall.SYS_GETSOCKOPT": "syscall", + "syscall.SYS_GETTHRID": "syscall", + "syscall.SYS_GETTID": "syscall", + "syscall.SYS_GETTIMEOFDAY": "syscall", + "syscall.SYS_GETUID": "syscall", + "syscall.SYS_GETUID32": "syscall", + "syscall.SYS_GETVFSSTAT": "syscall", + "syscall.SYS_GETWGROUPS": "syscall", + "syscall.SYS_GETXATTR": "syscall", + "syscall.SYS_GET_KERNEL_SYMS": "syscall", + "syscall.SYS_GET_MEMPOLICY": "syscall", + "syscall.SYS_GET_ROBUST_LIST": "syscall", + "syscall.SYS_GET_THREAD_AREA": "syscall", + "syscall.SYS_GTTY": "syscall", + "syscall.SYS_IDENTITYSVC": "syscall", + "syscall.SYS_IDLE": "syscall", + "syscall.SYS_INITGROUPS": "syscall", + "syscall.SYS_INIT_MODULE": "syscall", + "syscall.SYS_INOTIFY_ADD_WATCH": "syscall", + "syscall.SYS_INOTIFY_INIT": "syscall", + "syscall.SYS_INOTIFY_INIT1": "syscall", + "syscall.SYS_INOTIFY_RM_WATCH": "syscall", + "syscall.SYS_IOCTL": "syscall", + "syscall.SYS_IOPERM": "syscall", + "syscall.SYS_IOPL": "syscall", + "syscall.SYS_IOPOLICYSYS": "syscall", + "syscall.SYS_IOPRIO_GET": "syscall", + "syscall.SYS_IOPRIO_SET": "syscall", + "syscall.SYS_IO_CANCEL": "syscall", + "syscall.SYS_IO_DESTROY": "syscall", + "syscall.SYS_IO_GETEVENTS": "syscall", + "syscall.SYS_IO_SETUP": "syscall", + "syscall.SYS_IO_SUBMIT": "syscall", + "syscall.SYS_IPC": "syscall", + "syscall.SYS_ISSETUGID": "syscall", + "syscall.SYS_JAIL": "syscall", + "syscall.SYS_JAIL_ATTACH": "syscall", + "syscall.SYS_JAIL_GET": "syscall", + "syscall.SYS_JAIL_REMOVE": "syscall", + "syscall.SYS_JAIL_SET": "syscall", + "syscall.SYS_KDEBUG_TRACE": "syscall", + "syscall.SYS_KENV": "syscall", + "syscall.SYS_KEVENT": "syscall", + "syscall.SYS_KEVENT64": "syscall", + "syscall.SYS_KEXEC_LOAD": "syscall", + "syscall.SYS_KEYCTL": "syscall", + "syscall.SYS_KILL": "syscall", + "syscall.SYS_KLDFIND": "syscall", + "syscall.SYS_KLDFIRSTMOD": "syscall", + "syscall.SYS_KLDLOAD": "syscall", + "syscall.SYS_KLDNEXT": "syscall", + "syscall.SYS_KLDSTAT": "syscall", + "syscall.SYS_KLDSYM": "syscall", + "syscall.SYS_KLDUNLOAD": "syscall", + "syscall.SYS_KLDUNLOADF": "syscall", + "syscall.SYS_KQUEUE": "syscall", + "syscall.SYS_KQUEUE1": "syscall", + "syscall.SYS_KTIMER_CREATE": "syscall", + "syscall.SYS_KTIMER_DELETE": "syscall", + "syscall.SYS_KTIMER_GETOVERRUN": "syscall", + "syscall.SYS_KTIMER_GETTIME": "syscall", + "syscall.SYS_KTIMER_SETTIME": "syscall", + "syscall.SYS_KTRACE": "syscall", + "syscall.SYS_LCHFLAGS": "syscall", + "syscall.SYS_LCHMOD": "syscall", + "syscall.SYS_LCHOWN": "syscall", + "syscall.SYS_LCHOWN32": "syscall", + "syscall.SYS_LGETFH": "syscall", + "syscall.SYS_LGETXATTR": "syscall", + "syscall.SYS_LINK": "syscall", + "syscall.SYS_LINKAT": "syscall", + "syscall.SYS_LIO_LISTIO": "syscall", + "syscall.SYS_LISTEN": "syscall", + "syscall.SYS_LISTXATTR": "syscall", + "syscall.SYS_LLISTXATTR": "syscall", + "syscall.SYS_LOCK": "syscall", + "syscall.SYS_LOOKUP_DCOOKIE": "syscall", + "syscall.SYS_LPATHCONF": "syscall", + "syscall.SYS_LREMOVEXATTR": "syscall", + "syscall.SYS_LSEEK": "syscall", + "syscall.SYS_LSETXATTR": "syscall", + "syscall.SYS_LSTAT": "syscall", + "syscall.SYS_LSTAT64": "syscall", + "syscall.SYS_LSTAT64_EXTENDED": "syscall", + "syscall.SYS_LSTATV": "syscall", + "syscall.SYS_LSTAT_EXTENDED": "syscall", + "syscall.SYS_LUTIMES": "syscall", + "syscall.SYS_MAC_SYSCALL": "syscall", + "syscall.SYS_MADVISE": "syscall", + "syscall.SYS_MADVISE1": "syscall", + "syscall.SYS_MAXSYSCALL": "syscall", + "syscall.SYS_MBIND": "syscall", + "syscall.SYS_MIGRATE_PAGES": "syscall", + "syscall.SYS_MINCORE": "syscall", + "syscall.SYS_MINHERIT": "syscall", + "syscall.SYS_MKCOMPLEX": "syscall", + "syscall.SYS_MKDIR": "syscall", + "syscall.SYS_MKDIRAT": "syscall", + "syscall.SYS_MKDIR_EXTENDED": "syscall", + "syscall.SYS_MKFIFO": "syscall", + "syscall.SYS_MKFIFOAT": "syscall", + "syscall.SYS_MKFIFO_EXTENDED": "syscall", + "syscall.SYS_MKNOD": "syscall", + "syscall.SYS_MKNODAT": "syscall", + "syscall.SYS_MLOCK": "syscall", + "syscall.SYS_MLOCKALL": "syscall", + "syscall.SYS_MMAP": "syscall", + "syscall.SYS_MMAP2": "syscall", + "syscall.SYS_MODCTL": "syscall", + "syscall.SYS_MODFIND": "syscall", + "syscall.SYS_MODFNEXT": "syscall", + "syscall.SYS_MODIFY_LDT": "syscall", + "syscall.SYS_MODNEXT": "syscall", + "syscall.SYS_MODSTAT": "syscall", + "syscall.SYS_MODWATCH": "syscall", + "syscall.SYS_MOUNT": "syscall", + "syscall.SYS_MOVE_PAGES": "syscall", + "syscall.SYS_MPROTECT": "syscall", + "syscall.SYS_MPX": "syscall", + "syscall.SYS_MQUERY": "syscall", + "syscall.SYS_MQ_GETSETATTR": "syscall", + "syscall.SYS_MQ_NOTIFY": "syscall", + "syscall.SYS_MQ_OPEN": "syscall", + "syscall.SYS_MQ_TIMEDRECEIVE": "syscall", + "syscall.SYS_MQ_TIMEDSEND": "syscall", + "syscall.SYS_MQ_UNLINK": "syscall", + "syscall.SYS_MREMAP": "syscall", + "syscall.SYS_MSGCTL": "syscall", + "syscall.SYS_MSGGET": "syscall", + "syscall.SYS_MSGRCV": "syscall", + "syscall.SYS_MSGRCV_NOCANCEL": "syscall", + "syscall.SYS_MSGSND": "syscall", + "syscall.SYS_MSGSND_NOCANCEL": "syscall", + "syscall.SYS_MSGSYS": "syscall", + "syscall.SYS_MSYNC": "syscall", + "syscall.SYS_MSYNC_NOCANCEL": "syscall", + "syscall.SYS_MUNLOCK": "syscall", + "syscall.SYS_MUNLOCKALL": "syscall", + "syscall.SYS_MUNMAP": "syscall", + "syscall.SYS_NAME_TO_HANDLE_AT": "syscall", + "syscall.SYS_NANOSLEEP": "syscall", + "syscall.SYS_NEWFSTATAT": "syscall", + "syscall.SYS_NFSCLNT": "syscall", + "syscall.SYS_NFSSERVCTL": "syscall", + "syscall.SYS_NFSSVC": "syscall", + "syscall.SYS_NFSTAT": "syscall", + "syscall.SYS_NICE": "syscall", + "syscall.SYS_NLSTAT": "syscall", + "syscall.SYS_NMOUNT": "syscall", + "syscall.SYS_NSTAT": "syscall", + "syscall.SYS_NTP_ADJTIME": "syscall", + "syscall.SYS_NTP_GETTIME": "syscall", + "syscall.SYS_OABI_SYSCALL_BASE": "syscall", + "syscall.SYS_OBREAK": "syscall", + "syscall.SYS_OLDFSTAT": "syscall", + "syscall.SYS_OLDLSTAT": "syscall", + "syscall.SYS_OLDOLDUNAME": "syscall", + "syscall.SYS_OLDSTAT": "syscall", + "syscall.SYS_OLDUNAME": "syscall", + "syscall.SYS_OPEN": "syscall", + "syscall.SYS_OPENAT": "syscall", + "syscall.SYS_OPENBSD_POLL": "syscall", + "syscall.SYS_OPEN_BY_HANDLE_AT": "syscall", + "syscall.SYS_OPEN_EXTENDED": "syscall", + "syscall.SYS_OPEN_NOCANCEL": "syscall", + "syscall.SYS_OVADVISE": "syscall", + "syscall.SYS_PACCEPT": "syscall", + "syscall.SYS_PATHCONF": "syscall", + "syscall.SYS_PAUSE": "syscall", + "syscall.SYS_PCICONFIG_IOBASE": "syscall", + "syscall.SYS_PCICONFIG_READ": "syscall", + "syscall.SYS_PCICONFIG_WRITE": "syscall", + "syscall.SYS_PDFORK": "syscall", + "syscall.SYS_PDGETPID": "syscall", + "syscall.SYS_PDKILL": "syscall", + "syscall.SYS_PERF_EVENT_OPEN": "syscall", + "syscall.SYS_PERSONALITY": "syscall", + "syscall.SYS_PID_HIBERNATE": "syscall", + "syscall.SYS_PID_RESUME": "syscall", + "syscall.SYS_PID_SHUTDOWN_SOCKETS": "syscall", + "syscall.SYS_PID_SUSPEND": "syscall", + "syscall.SYS_PIPE": "syscall", + "syscall.SYS_PIPE2": "syscall", + "syscall.SYS_PIVOT_ROOT": "syscall", + "syscall.SYS_PMC_CONTROL": "syscall", + "syscall.SYS_PMC_GET_INFO": "syscall", + "syscall.SYS_POLL": "syscall", + "syscall.SYS_POLLTS": "syscall", + "syscall.SYS_POLL_NOCANCEL": "syscall", + "syscall.SYS_POSIX_FADVISE": "syscall", + "syscall.SYS_POSIX_FALLOCATE": "syscall", + "syscall.SYS_POSIX_OPENPT": "syscall", + "syscall.SYS_POSIX_SPAWN": "syscall", + "syscall.SYS_PPOLL": "syscall", + "syscall.SYS_PRCTL": "syscall", + "syscall.SYS_PREAD": "syscall", + "syscall.SYS_PREAD64": "syscall", + "syscall.SYS_PREADV": "syscall", + "syscall.SYS_PREAD_NOCANCEL": "syscall", + "syscall.SYS_PRLIMIT64": "syscall", + "syscall.SYS_PROCCTL": "syscall", + "syscall.SYS_PROCESS_POLICY": "syscall", + "syscall.SYS_PROCESS_VM_READV": "syscall", + "syscall.SYS_PROCESS_VM_WRITEV": "syscall", + "syscall.SYS_PROC_INFO": "syscall", + "syscall.SYS_PROF": "syscall", + "syscall.SYS_PROFIL": "syscall", + "syscall.SYS_PSELECT": "syscall", + "syscall.SYS_PSELECT6": "syscall", + "syscall.SYS_PSET_ASSIGN": "syscall", + "syscall.SYS_PSET_CREATE": "syscall", + "syscall.SYS_PSET_DESTROY": "syscall", + "syscall.SYS_PSYNCH_CVBROAD": "syscall", + "syscall.SYS_PSYNCH_CVCLRPREPOST": "syscall", + "syscall.SYS_PSYNCH_CVSIGNAL": "syscall", + "syscall.SYS_PSYNCH_CVWAIT": "syscall", + "syscall.SYS_PSYNCH_MUTEXDROP": "syscall", + "syscall.SYS_PSYNCH_MUTEXWAIT": "syscall", + "syscall.SYS_PSYNCH_RW_DOWNGRADE": "syscall", + "syscall.SYS_PSYNCH_RW_LONGRDLOCK": "syscall", + "syscall.SYS_PSYNCH_RW_RDLOCK": "syscall", + "syscall.SYS_PSYNCH_RW_UNLOCK": "syscall", + "syscall.SYS_PSYNCH_RW_UNLOCK2": "syscall", + "syscall.SYS_PSYNCH_RW_UPGRADE": "syscall", + "syscall.SYS_PSYNCH_RW_WRLOCK": "syscall", + "syscall.SYS_PSYNCH_RW_YIELDWRLOCK": "syscall", + "syscall.SYS_PTRACE": "syscall", + "syscall.SYS_PUTPMSG": "syscall", + "syscall.SYS_PWRITE": "syscall", + "syscall.SYS_PWRITE64": "syscall", + "syscall.SYS_PWRITEV": "syscall", + "syscall.SYS_PWRITE_NOCANCEL": "syscall", + "syscall.SYS_QUERY_MODULE": "syscall", + "syscall.SYS_QUOTACTL": "syscall", + "syscall.SYS_RASCTL": "syscall", + "syscall.SYS_RCTL_ADD_RULE": "syscall", + "syscall.SYS_RCTL_GET_LIMITS": "syscall", + "syscall.SYS_RCTL_GET_RACCT": "syscall", + "syscall.SYS_RCTL_GET_RULES": "syscall", + "syscall.SYS_RCTL_REMOVE_RULE": "syscall", + "syscall.SYS_READ": "syscall", + "syscall.SYS_READAHEAD": "syscall", + "syscall.SYS_READDIR": "syscall", + "syscall.SYS_READLINK": "syscall", + "syscall.SYS_READLINKAT": "syscall", + "syscall.SYS_READV": "syscall", + "syscall.SYS_READV_NOCANCEL": "syscall", + "syscall.SYS_READ_NOCANCEL": "syscall", + "syscall.SYS_REBOOT": "syscall", + "syscall.SYS_RECV": "syscall", + "syscall.SYS_RECVFROM": "syscall", + "syscall.SYS_RECVFROM_NOCANCEL": "syscall", + "syscall.SYS_RECVMMSG": "syscall", + "syscall.SYS_RECVMSG": "syscall", + "syscall.SYS_RECVMSG_NOCANCEL": "syscall", + "syscall.SYS_REMAP_FILE_PAGES": "syscall", + "syscall.SYS_REMOVEXATTR": "syscall", + "syscall.SYS_RENAME": "syscall", + "syscall.SYS_RENAMEAT": "syscall", + "syscall.SYS_REQUEST_KEY": "syscall", + "syscall.SYS_RESTART_SYSCALL": "syscall", + "syscall.SYS_REVOKE": "syscall", + "syscall.SYS_RFORK": "syscall", + "syscall.SYS_RMDIR": "syscall", + "syscall.SYS_RTPRIO": "syscall", + "syscall.SYS_RTPRIO_THREAD": "syscall", + "syscall.SYS_RT_SIGACTION": "syscall", + "syscall.SYS_RT_SIGPENDING": "syscall", + "syscall.SYS_RT_SIGPROCMASK": "syscall", + "syscall.SYS_RT_SIGQUEUEINFO": "syscall", + "syscall.SYS_RT_SIGRETURN": "syscall", + "syscall.SYS_RT_SIGSUSPEND": "syscall", + "syscall.SYS_RT_SIGTIMEDWAIT": "syscall", + "syscall.SYS_RT_TGSIGQUEUEINFO": "syscall", + "syscall.SYS_SBRK": "syscall", + "syscall.SYS_SCHED_GETAFFINITY": "syscall", + "syscall.SYS_SCHED_GETPARAM": "syscall", + "syscall.SYS_SCHED_GETSCHEDULER": "syscall", + "syscall.SYS_SCHED_GET_PRIORITY_MAX": "syscall", + "syscall.SYS_SCHED_GET_PRIORITY_MIN": "syscall", + "syscall.SYS_SCHED_RR_GET_INTERVAL": "syscall", + "syscall.SYS_SCHED_SETAFFINITY": "syscall", + "syscall.SYS_SCHED_SETPARAM": "syscall", + "syscall.SYS_SCHED_SETSCHEDULER": "syscall", + "syscall.SYS_SCHED_YIELD": "syscall", + "syscall.SYS_SCTP_GENERIC_RECVMSG": "syscall", + "syscall.SYS_SCTP_GENERIC_SENDMSG": "syscall", + "syscall.SYS_SCTP_GENERIC_SENDMSG_IOV": "syscall", + "syscall.SYS_SCTP_PEELOFF": "syscall", + "syscall.SYS_SEARCHFS": "syscall", + "syscall.SYS_SECURITY": "syscall", + "syscall.SYS_SELECT": "syscall", + "syscall.SYS_SELECT_NOCANCEL": "syscall", + "syscall.SYS_SEMCONFIG": "syscall", + "syscall.SYS_SEMCTL": "syscall", + "syscall.SYS_SEMGET": "syscall", + "syscall.SYS_SEMOP": "syscall", + "syscall.SYS_SEMSYS": "syscall", + "syscall.SYS_SEMTIMEDOP": "syscall", + "syscall.SYS_SEM_CLOSE": "syscall", + "syscall.SYS_SEM_DESTROY": "syscall", + "syscall.SYS_SEM_GETVALUE": "syscall", + "syscall.SYS_SEM_INIT": "syscall", + "syscall.SYS_SEM_OPEN": "syscall", + "syscall.SYS_SEM_POST": "syscall", + "syscall.SYS_SEM_TRYWAIT": "syscall", + "syscall.SYS_SEM_UNLINK": "syscall", + "syscall.SYS_SEM_WAIT": "syscall", + "syscall.SYS_SEM_WAIT_NOCANCEL": "syscall", + "syscall.SYS_SEND": "syscall", + "syscall.SYS_SENDFILE": "syscall", + "syscall.SYS_SENDFILE64": "syscall", + "syscall.SYS_SENDMMSG": "syscall", + "syscall.SYS_SENDMSG": "syscall", + "syscall.SYS_SENDMSG_NOCANCEL": "syscall", + "syscall.SYS_SENDTO": "syscall", + "syscall.SYS_SENDTO_NOCANCEL": "syscall", + "syscall.SYS_SETATTRLIST": "syscall", + "syscall.SYS_SETAUDIT": "syscall", + "syscall.SYS_SETAUDIT_ADDR": "syscall", + "syscall.SYS_SETAUID": "syscall", + "syscall.SYS_SETCONTEXT": "syscall", + "syscall.SYS_SETDOMAINNAME": "syscall", + "syscall.SYS_SETEGID": "syscall", + "syscall.SYS_SETEUID": "syscall", + "syscall.SYS_SETFIB": "syscall", + "syscall.SYS_SETFSGID": "syscall", + "syscall.SYS_SETFSGID32": "syscall", + "syscall.SYS_SETFSUID": "syscall", + "syscall.SYS_SETFSUID32": "syscall", + "syscall.SYS_SETGID": "syscall", + "syscall.SYS_SETGID32": "syscall", + "syscall.SYS_SETGROUPS": "syscall", + "syscall.SYS_SETGROUPS32": "syscall", + "syscall.SYS_SETHOSTNAME": "syscall", + "syscall.SYS_SETITIMER": "syscall", + "syscall.SYS_SETLCID": "syscall", + "syscall.SYS_SETLOGIN": "syscall", + "syscall.SYS_SETLOGINCLASS": "syscall", + "syscall.SYS_SETNS": "syscall", + "syscall.SYS_SETPGID": "syscall", + "syscall.SYS_SETPRIORITY": "syscall", + "syscall.SYS_SETPRIVEXEC": "syscall", + "syscall.SYS_SETREGID": "syscall", + "syscall.SYS_SETREGID32": "syscall", + "syscall.SYS_SETRESGID": "syscall", + "syscall.SYS_SETRESGID32": "syscall", + "syscall.SYS_SETRESUID": "syscall", + "syscall.SYS_SETRESUID32": "syscall", + "syscall.SYS_SETREUID": "syscall", + "syscall.SYS_SETREUID32": "syscall", + "syscall.SYS_SETRLIMIT": "syscall", + "syscall.SYS_SETRTABLE": "syscall", + "syscall.SYS_SETSGROUPS": "syscall", + "syscall.SYS_SETSID": "syscall", + "syscall.SYS_SETSOCKOPT": "syscall", + "syscall.SYS_SETTID": "syscall", + "syscall.SYS_SETTID_WITH_PID": "syscall", + "syscall.SYS_SETTIMEOFDAY": "syscall", + "syscall.SYS_SETUID": "syscall", + "syscall.SYS_SETUID32": "syscall", + "syscall.SYS_SETWGROUPS": "syscall", + "syscall.SYS_SETXATTR": "syscall", + "syscall.SYS_SET_MEMPOLICY": "syscall", + "syscall.SYS_SET_ROBUST_LIST": "syscall", + "syscall.SYS_SET_THREAD_AREA": "syscall", + "syscall.SYS_SET_TID_ADDRESS": "syscall", + "syscall.SYS_SGETMASK": "syscall", + "syscall.SYS_SHARED_REGION_CHECK_NP": "syscall", + "syscall.SYS_SHARED_REGION_MAP_AND_SLIDE_NP": "syscall", + "syscall.SYS_SHMAT": "syscall", + "syscall.SYS_SHMCTL": "syscall", + "syscall.SYS_SHMDT": "syscall", + "syscall.SYS_SHMGET": "syscall", + "syscall.SYS_SHMSYS": "syscall", + "syscall.SYS_SHM_OPEN": "syscall", + "syscall.SYS_SHM_UNLINK": "syscall", + "syscall.SYS_SHUTDOWN": "syscall", + "syscall.SYS_SIGACTION": "syscall", + "syscall.SYS_SIGALTSTACK": "syscall", + "syscall.SYS_SIGNAL": "syscall", + "syscall.SYS_SIGNALFD": "syscall", + "syscall.SYS_SIGNALFD4": "syscall", + "syscall.SYS_SIGPENDING": "syscall", + "syscall.SYS_SIGPROCMASK": "syscall", + "syscall.SYS_SIGQUEUE": "syscall", + "syscall.SYS_SIGQUEUEINFO": "syscall", + "syscall.SYS_SIGRETURN": "syscall", + "syscall.SYS_SIGSUSPEND": "syscall", + "syscall.SYS_SIGSUSPEND_NOCANCEL": "syscall", + "syscall.SYS_SIGTIMEDWAIT": "syscall", + "syscall.SYS_SIGWAIT": "syscall", + "syscall.SYS_SIGWAITINFO": "syscall", + "syscall.SYS_SOCKET": "syscall", + "syscall.SYS_SOCKETCALL": "syscall", + "syscall.SYS_SOCKETPAIR": "syscall", + "syscall.SYS_SPLICE": "syscall", + "syscall.SYS_SSETMASK": "syscall", + "syscall.SYS_SSTK": "syscall", + "syscall.SYS_STACK_SNAPSHOT": "syscall", + "syscall.SYS_STAT": "syscall", + "syscall.SYS_STAT64": "syscall", + "syscall.SYS_STAT64_EXTENDED": "syscall", + "syscall.SYS_STATFS": "syscall", + "syscall.SYS_STATFS64": "syscall", + "syscall.SYS_STATV": "syscall", + "syscall.SYS_STATVFS1": "syscall", + "syscall.SYS_STAT_EXTENDED": "syscall", + "syscall.SYS_STIME": "syscall", + "syscall.SYS_STTY": "syscall", + "syscall.SYS_SWAPCONTEXT": "syscall", + "syscall.SYS_SWAPCTL": "syscall", + "syscall.SYS_SWAPOFF": "syscall", + "syscall.SYS_SWAPON": "syscall", + "syscall.SYS_SYMLINK": "syscall", + "syscall.SYS_SYMLINKAT": "syscall", + "syscall.SYS_SYNC": "syscall", + "syscall.SYS_SYNCFS": "syscall", + "syscall.SYS_SYNC_FILE_RANGE": "syscall", + "syscall.SYS_SYSARCH": "syscall", + "syscall.SYS_SYSCALL": "syscall", + "syscall.SYS_SYSCALL_BASE": "syscall", + "syscall.SYS_SYSFS": "syscall", + "syscall.SYS_SYSINFO": "syscall", + "syscall.SYS_SYSLOG": "syscall", + "syscall.SYS_TEE": "syscall", + "syscall.SYS_TGKILL": "syscall", + "syscall.SYS_THREAD_SELFID": "syscall", + "syscall.SYS_THR_CREATE": "syscall", + "syscall.SYS_THR_EXIT": "syscall", + "syscall.SYS_THR_KILL": "syscall", + "syscall.SYS_THR_KILL2": "syscall", + "syscall.SYS_THR_NEW": "syscall", + "syscall.SYS_THR_SELF": "syscall", + "syscall.SYS_THR_SET_NAME": "syscall", + "syscall.SYS_THR_SUSPEND": "syscall", + "syscall.SYS_THR_WAKE": "syscall", + "syscall.SYS_TIME": "syscall", + "syscall.SYS_TIMERFD_CREATE": "syscall", + "syscall.SYS_TIMERFD_GETTIME": "syscall", + "syscall.SYS_TIMERFD_SETTIME": "syscall", + "syscall.SYS_TIMER_CREATE": "syscall", + "syscall.SYS_TIMER_DELETE": "syscall", + "syscall.SYS_TIMER_GETOVERRUN": "syscall", + "syscall.SYS_TIMER_GETTIME": "syscall", + "syscall.SYS_TIMER_SETTIME": "syscall", + "syscall.SYS_TIMES": "syscall", + "syscall.SYS_TKILL": "syscall", + "syscall.SYS_TRUNCATE": "syscall", + "syscall.SYS_TRUNCATE64": "syscall", + "syscall.SYS_TUXCALL": "syscall", + "syscall.SYS_UGETRLIMIT": "syscall", + "syscall.SYS_ULIMIT": "syscall", + "syscall.SYS_UMASK": "syscall", + "syscall.SYS_UMASK_EXTENDED": "syscall", + "syscall.SYS_UMOUNT": "syscall", + "syscall.SYS_UMOUNT2": "syscall", + "syscall.SYS_UNAME": "syscall", + "syscall.SYS_UNDELETE": "syscall", + "syscall.SYS_UNLINK": "syscall", + "syscall.SYS_UNLINKAT": "syscall", + "syscall.SYS_UNMOUNT": "syscall", + "syscall.SYS_UNSHARE": "syscall", + "syscall.SYS_USELIB": "syscall", + "syscall.SYS_USTAT": "syscall", + "syscall.SYS_UTIME": "syscall", + "syscall.SYS_UTIMENSAT": "syscall", + "syscall.SYS_UTIMES": "syscall", + "syscall.SYS_UTRACE": "syscall", + "syscall.SYS_UUIDGEN": "syscall", + "syscall.SYS_VADVISE": "syscall", + "syscall.SYS_VFORK": "syscall", + "syscall.SYS_VHANGUP": "syscall", + "syscall.SYS_VM86": "syscall", + "syscall.SYS_VM86OLD": "syscall", + "syscall.SYS_VMSPLICE": "syscall", + "syscall.SYS_VM_PRESSURE_MONITOR": "syscall", + "syscall.SYS_VSERVER": "syscall", + "syscall.SYS_WAIT4": "syscall", + "syscall.SYS_WAIT4_NOCANCEL": "syscall", + "syscall.SYS_WAIT6": "syscall", + "syscall.SYS_WAITEVENT": "syscall", + "syscall.SYS_WAITID": "syscall", + "syscall.SYS_WAITID_NOCANCEL": "syscall", + "syscall.SYS_WAITPID": "syscall", + "syscall.SYS_WATCHEVENT": "syscall", + "syscall.SYS_WORKQ_KERNRETURN": "syscall", + "syscall.SYS_WORKQ_OPEN": "syscall", + "syscall.SYS_WRITE": "syscall", + "syscall.SYS_WRITEV": "syscall", + "syscall.SYS_WRITEV_NOCANCEL": "syscall", + "syscall.SYS_WRITE_NOCANCEL": "syscall", + "syscall.SYS_YIELD": "syscall", + "syscall.SYS__LLSEEK": "syscall", + "syscall.SYS__LWP_CONTINUE": "syscall", + "syscall.SYS__LWP_CREATE": "syscall", + "syscall.SYS__LWP_CTL": "syscall", + "syscall.SYS__LWP_DETACH": "syscall", + "syscall.SYS__LWP_EXIT": "syscall", + "syscall.SYS__LWP_GETNAME": "syscall", + "syscall.SYS__LWP_GETPRIVATE": "syscall", + "syscall.SYS__LWP_KILL": "syscall", + "syscall.SYS__LWP_PARK": "syscall", + "syscall.SYS__LWP_SELF": "syscall", + "syscall.SYS__LWP_SETNAME": "syscall", + "syscall.SYS__LWP_SETPRIVATE": "syscall", + "syscall.SYS__LWP_SUSPEND": "syscall", + "syscall.SYS__LWP_UNPARK": "syscall", + "syscall.SYS__LWP_UNPARK_ALL": "syscall", + "syscall.SYS__LWP_WAIT": "syscall", + "syscall.SYS__LWP_WAKEUP": "syscall", + "syscall.SYS__NEWSELECT": "syscall", + "syscall.SYS__PSET_BIND": "syscall", + "syscall.SYS__SCHED_GETAFFINITY": "syscall", + "syscall.SYS__SCHED_GETPARAM": "syscall", + "syscall.SYS__SCHED_SETAFFINITY": "syscall", + "syscall.SYS__SCHED_SETPARAM": "syscall", + "syscall.SYS__SYSCTL": "syscall", + "syscall.SYS__UMTX_LOCK": "syscall", + "syscall.SYS__UMTX_OP": "syscall", + "syscall.SYS__UMTX_UNLOCK": "syscall", + "syscall.SYS___ACL_ACLCHECK_FD": "syscall", + "syscall.SYS___ACL_ACLCHECK_FILE": "syscall", + "syscall.SYS___ACL_ACLCHECK_LINK": "syscall", + "syscall.SYS___ACL_DELETE_FD": "syscall", + "syscall.SYS___ACL_DELETE_FILE": "syscall", + "syscall.SYS___ACL_DELETE_LINK": "syscall", + "syscall.SYS___ACL_GET_FD": "syscall", + "syscall.SYS___ACL_GET_FILE": "syscall", + "syscall.SYS___ACL_GET_LINK": "syscall", + "syscall.SYS___ACL_SET_FD": "syscall", + "syscall.SYS___ACL_SET_FILE": "syscall", + "syscall.SYS___ACL_SET_LINK": "syscall", + "syscall.SYS___CLONE": "syscall", + "syscall.SYS___DISABLE_THREADSIGNAL": "syscall", + "syscall.SYS___GETCWD": "syscall", + "syscall.SYS___GETLOGIN": "syscall", + "syscall.SYS___GET_TCB": "syscall", + "syscall.SYS___MAC_EXECVE": "syscall", + "syscall.SYS___MAC_GETFSSTAT": "syscall", + "syscall.SYS___MAC_GET_FD": "syscall", + "syscall.SYS___MAC_GET_FILE": "syscall", + "syscall.SYS___MAC_GET_LCID": "syscall", + "syscall.SYS___MAC_GET_LCTX": "syscall", + "syscall.SYS___MAC_GET_LINK": "syscall", + "syscall.SYS___MAC_GET_MOUNT": "syscall", + "syscall.SYS___MAC_GET_PID": "syscall", + "syscall.SYS___MAC_GET_PROC": "syscall", + "syscall.SYS___MAC_MOUNT": "syscall", + "syscall.SYS___MAC_SET_FD": "syscall", + "syscall.SYS___MAC_SET_FILE": "syscall", + "syscall.SYS___MAC_SET_LCTX": "syscall", + "syscall.SYS___MAC_SET_LINK": "syscall", + "syscall.SYS___MAC_SET_PROC": "syscall", + "syscall.SYS___MAC_SYSCALL": "syscall", + "syscall.SYS___OLD_SEMWAIT_SIGNAL": "syscall", + "syscall.SYS___OLD_SEMWAIT_SIGNAL_NOCANCEL": "syscall", + "syscall.SYS___POSIX_CHOWN": "syscall", + "syscall.SYS___POSIX_FCHOWN": "syscall", + "syscall.SYS___POSIX_LCHOWN": "syscall", + "syscall.SYS___POSIX_RENAME": "syscall", + "syscall.SYS___PTHREAD_CANCELED": "syscall", + "syscall.SYS___PTHREAD_CHDIR": "syscall", + "syscall.SYS___PTHREAD_FCHDIR": "syscall", + "syscall.SYS___PTHREAD_KILL": "syscall", + "syscall.SYS___PTHREAD_MARKCANCEL": "syscall", + "syscall.SYS___PTHREAD_SIGMASK": "syscall", + "syscall.SYS___QUOTACTL": "syscall", + "syscall.SYS___SEMCTL": "syscall", + "syscall.SYS___SEMWAIT_SIGNAL": "syscall", + "syscall.SYS___SEMWAIT_SIGNAL_NOCANCEL": "syscall", + "syscall.SYS___SETLOGIN": "syscall", + "syscall.SYS___SETUGID": "syscall", + "syscall.SYS___SET_TCB": "syscall", + "syscall.SYS___SIGACTION_SIGTRAMP": "syscall", + "syscall.SYS___SIGTIMEDWAIT": "syscall", + "syscall.SYS___SIGWAIT": "syscall", + "syscall.SYS___SIGWAIT_NOCANCEL": "syscall", + "syscall.SYS___SYSCTL": "syscall", + "syscall.SYS___TFORK": "syscall", + "syscall.SYS___THREXIT": "syscall", + "syscall.SYS___THRSIGDIVERT": "syscall", + "syscall.SYS___THRSLEEP": "syscall", + "syscall.SYS___THRWAKEUP": "syscall", + "syscall.S_ARCH1": "syscall", + "syscall.S_ARCH2": "syscall", + "syscall.S_BLKSIZE": "syscall", + "syscall.S_IEXEC": "syscall", + "syscall.S_IFBLK": "syscall", + "syscall.S_IFCHR": "syscall", + "syscall.S_IFDIR": "syscall", + "syscall.S_IFIFO": "syscall", + "syscall.S_IFLNK": "syscall", + "syscall.S_IFMT": "syscall", + "syscall.S_IFREG": "syscall", + "syscall.S_IFSOCK": "syscall", + "syscall.S_IFWHT": "syscall", + "syscall.S_IREAD": "syscall", + "syscall.S_IRGRP": "syscall", + "syscall.S_IROTH": "syscall", + "syscall.S_IRUSR": "syscall", + "syscall.S_IRWXG": "syscall", + "syscall.S_IRWXO": "syscall", + "syscall.S_IRWXU": "syscall", + "syscall.S_ISGID": "syscall", + "syscall.S_ISTXT": "syscall", + "syscall.S_ISUID": "syscall", + "syscall.S_ISVTX": "syscall", + "syscall.S_IWGRP": "syscall", + "syscall.S_IWOTH": "syscall", + "syscall.S_IWRITE": "syscall", + "syscall.S_IWUSR": "syscall", + "syscall.S_IXGRP": "syscall", + "syscall.S_IXOTH": "syscall", + "syscall.S_IXUSR": "syscall", + "syscall.S_LOGIN_SET": "syscall", + "syscall.SecurityAttributes": "syscall", + "syscall.Seek": "syscall", + "syscall.Select": "syscall", + "syscall.Sendfile": "syscall", + "syscall.Sendmsg": "syscall", + "syscall.SendmsgN": "syscall", + "syscall.Sendto": "syscall", + "syscall.Servent": "syscall", + "syscall.SetBpf": "syscall", + "syscall.SetBpfBuflen": "syscall", + "syscall.SetBpfDatalink": "syscall", + "syscall.SetBpfHeadercmpl": "syscall", + "syscall.SetBpfImmediate": "syscall", + "syscall.SetBpfInterface": "syscall", + "syscall.SetBpfPromisc": "syscall", + "syscall.SetBpfTimeout": "syscall", + "syscall.SetCurrentDirectory": "syscall", + "syscall.SetEndOfFile": "syscall", + "syscall.SetEnvironmentVariable": "syscall", + "syscall.SetFileAttributes": "syscall", + "syscall.SetFileCompletionNotificationModes": "syscall", + "syscall.SetFilePointer": "syscall", + "syscall.SetFileTime": "syscall", + "syscall.SetHandleInformation": "syscall", + "syscall.SetKevent": "syscall", + "syscall.SetLsfPromisc": "syscall", + "syscall.SetNonblock": "syscall", + "syscall.Setdomainname": "syscall", + "syscall.Setegid": "syscall", + "syscall.Setenv": "syscall", + "syscall.Seteuid": "syscall", + "syscall.Setfsgid": "syscall", + "syscall.Setfsuid": "syscall", + "syscall.Setgid": "syscall", + "syscall.Setgroups": "syscall", + "syscall.Sethostname": "syscall", + "syscall.Setlogin": "syscall", + "syscall.Setpgid": "syscall", + "syscall.Setpriority": "syscall", + "syscall.Setprivexec": "syscall", + "syscall.Setregid": "syscall", + "syscall.Setresgid": "syscall", + "syscall.Setresuid": "syscall", + "syscall.Setreuid": "syscall", + "syscall.Setrlimit": "syscall", + "syscall.Setsid": "syscall", + "syscall.Setsockopt": "syscall", + "syscall.SetsockoptByte": "syscall", + "syscall.SetsockoptICMPv6Filter": "syscall", + "syscall.SetsockoptIPMreq": "syscall", + "syscall.SetsockoptIPMreqn": "syscall", + "syscall.SetsockoptIPv6Mreq": "syscall", + "syscall.SetsockoptInet4Addr": "syscall", + "syscall.SetsockoptInt": "syscall", + "syscall.SetsockoptLinger": "syscall", + "syscall.SetsockoptString": "syscall", + "syscall.SetsockoptTimeval": "syscall", + "syscall.Settimeofday": "syscall", + "syscall.Setuid": "syscall", + "syscall.Setxattr": "syscall", + "syscall.Shutdown": "syscall", + "syscall.SidTypeAlias": "syscall", + "syscall.SidTypeComputer": "syscall", + "syscall.SidTypeDeletedAccount": "syscall", + "syscall.SidTypeDomain": "syscall", + "syscall.SidTypeGroup": "syscall", + "syscall.SidTypeInvalid": "syscall", + "syscall.SidTypeLabel": "syscall", + "syscall.SidTypeUnknown": "syscall", + "syscall.SidTypeUser": "syscall", + "syscall.SidTypeWellKnownGroup": "syscall", + "syscall.Signal": "syscall", + "syscall.SizeofBpfHdr": "syscall", + "syscall.SizeofBpfInsn": "syscall", + "syscall.SizeofBpfProgram": "syscall", + "syscall.SizeofBpfStat": "syscall", + "syscall.SizeofBpfVersion": "syscall", + "syscall.SizeofBpfZbuf": "syscall", + "syscall.SizeofBpfZbufHeader": "syscall", + "syscall.SizeofCmsghdr": "syscall", + "syscall.SizeofICMPv6Filter": "syscall", + "syscall.SizeofIPMreq": "syscall", + "syscall.SizeofIPMreqn": "syscall", + "syscall.SizeofIPv6MTUInfo": "syscall", + "syscall.SizeofIPv6Mreq": "syscall", + "syscall.SizeofIfAddrmsg": "syscall", + "syscall.SizeofIfAnnounceMsghdr": "syscall", + "syscall.SizeofIfData": "syscall", + "syscall.SizeofIfInfomsg": "syscall", + "syscall.SizeofIfMsghdr": "syscall", + "syscall.SizeofIfaMsghdr": "syscall", + "syscall.SizeofIfmaMsghdr": "syscall", + "syscall.SizeofIfmaMsghdr2": "syscall", + "syscall.SizeofInet4Pktinfo": "syscall", + "syscall.SizeofInet6Pktinfo": "syscall", + "syscall.SizeofInotifyEvent": "syscall", + "syscall.SizeofLinger": "syscall", + "syscall.SizeofMsghdr": "syscall", + "syscall.SizeofNlAttr": "syscall", + "syscall.SizeofNlMsgerr": "syscall", + "syscall.SizeofNlMsghdr": "syscall", + "syscall.SizeofRtAttr": "syscall", + "syscall.SizeofRtGenmsg": "syscall", + "syscall.SizeofRtMetrics": "syscall", + "syscall.SizeofRtMsg": "syscall", + "syscall.SizeofRtMsghdr": "syscall", + "syscall.SizeofRtNexthop": "syscall", + "syscall.SizeofSockFilter": "syscall", + "syscall.SizeofSockFprog": "syscall", + "syscall.SizeofSockaddrAny": "syscall", + "syscall.SizeofSockaddrDatalink": "syscall", + "syscall.SizeofSockaddrInet4": "syscall", + "syscall.SizeofSockaddrInet6": "syscall", + "syscall.SizeofSockaddrLinklayer": "syscall", + "syscall.SizeofSockaddrNetlink": "syscall", + "syscall.SizeofSockaddrUnix": "syscall", + "syscall.SizeofTCPInfo": "syscall", + "syscall.SizeofUcred": "syscall", + "syscall.SlicePtrFromStrings": "syscall", + "syscall.SockFilter": "syscall", + "syscall.SockFprog": "syscall", + "syscall.SockaddrDatalink": "syscall", + "syscall.SockaddrGen": "syscall", + "syscall.SockaddrInet4": "syscall", + "syscall.SockaddrInet6": "syscall", + "syscall.SockaddrLinklayer": "syscall", + "syscall.SockaddrNetlink": "syscall", + "syscall.SockaddrUnix": "syscall", + "syscall.Socket": "syscall", + "syscall.SocketControlMessage": "syscall", + "syscall.SocketDisableIPv6": "syscall", + "syscall.Socketpair": "syscall", + "syscall.Splice": "syscall", + "syscall.StartProcess": "syscall", + "syscall.StartupInfo": "syscall", + "syscall.Stat": "syscall", + "syscall.Stat_t": "syscall", + "syscall.Statfs": "syscall", + "syscall.Statfs_t": "syscall", + "syscall.Stderr": "syscall", + "syscall.Stdin": "syscall", + "syscall.Stdout": "syscall", + "syscall.StringBytePtr": "syscall", + "syscall.StringByteSlice": "syscall", + "syscall.StringSlicePtr": "syscall", + "syscall.StringToSid": "syscall", + "syscall.StringToUTF16": "syscall", + "syscall.StringToUTF16Ptr": "syscall", + "syscall.Symlink": "syscall", + "syscall.Sync": "syscall", + "syscall.SyncFileRange": "syscall", + "syscall.SysProcAttr": "syscall", + "syscall.SysProcIDMap": "syscall", + "syscall.Syscall": "syscall", + "syscall.Syscall12": "syscall", + "syscall.Syscall15": "syscall", + "syscall.Syscall6": "syscall", + "syscall.Syscall9": "syscall", + "syscall.Sysctl": "syscall", + "syscall.SysctlUint32": "syscall", + "syscall.Sysctlnode": "syscall", + "syscall.Sysinfo": "syscall", + "syscall.Sysinfo_t": "syscall", + "syscall.Systemtime": "syscall", + "syscall.TCGETS": "syscall", + "syscall.TCIFLUSH": "syscall", + "syscall.TCIOFLUSH": "syscall", + "syscall.TCOFLUSH": "syscall", + "syscall.TCPInfo": "syscall", + "syscall.TCPKeepalive": "syscall", + "syscall.TCP_CA_NAME_MAX": "syscall", + "syscall.TCP_CONGCTL": "syscall", + "syscall.TCP_CONGESTION": "syscall", + "syscall.TCP_CONNECTIONTIMEOUT": "syscall", + "syscall.TCP_CORK": "syscall", + "syscall.TCP_DEFER_ACCEPT": "syscall", + "syscall.TCP_INFO": "syscall", + "syscall.TCP_KEEPALIVE": "syscall", + "syscall.TCP_KEEPCNT": "syscall", + "syscall.TCP_KEEPIDLE": "syscall", + "syscall.TCP_KEEPINIT": "syscall", + "syscall.TCP_KEEPINTVL": "syscall", + "syscall.TCP_LINGER2": "syscall", + "syscall.TCP_MAXBURST": "syscall", + "syscall.TCP_MAXHLEN": "syscall", + "syscall.TCP_MAXOLEN": "syscall", + "syscall.TCP_MAXSEG": "syscall", + "syscall.TCP_MAXWIN": "syscall", + "syscall.TCP_MAX_SACK": "syscall", + "syscall.TCP_MAX_WINSHIFT": "syscall", + "syscall.TCP_MD5SIG": "syscall", + "syscall.TCP_MD5SIG_MAXKEYLEN": "syscall", + "syscall.TCP_MINMSS": "syscall", + "syscall.TCP_MINMSSOVERLOAD": "syscall", + "syscall.TCP_MSS": "syscall", + "syscall.TCP_NODELAY": "syscall", + "syscall.TCP_NOOPT": "syscall", + "syscall.TCP_NOPUSH": "syscall", + "syscall.TCP_NSTATES": "syscall", + "syscall.TCP_QUICKACK": "syscall", + "syscall.TCP_RXT_CONNDROPTIME": "syscall", + "syscall.TCP_RXT_FINDROP": "syscall", + "syscall.TCP_SACK_ENABLE": "syscall", + "syscall.TCP_SYNCNT": "syscall", + "syscall.TCP_VENDOR": "syscall", + "syscall.TCP_WINDOW_CLAMP": "syscall", + "syscall.TCSAFLUSH": "syscall", + "syscall.TCSETS": "syscall", + "syscall.TF_DISCONNECT": "syscall", + "syscall.TF_REUSE_SOCKET": "syscall", + "syscall.TF_USE_DEFAULT_WORKER": "syscall", + "syscall.TF_USE_KERNEL_APC": "syscall", + "syscall.TF_USE_SYSTEM_THREAD": "syscall", + "syscall.TF_WRITE_BEHIND": "syscall", + "syscall.TH32CS_INHERIT": "syscall", + "syscall.TH32CS_SNAPALL": "syscall", + "syscall.TH32CS_SNAPHEAPLIST": "syscall", + "syscall.TH32CS_SNAPMODULE": "syscall", + "syscall.TH32CS_SNAPMODULE32": "syscall", + "syscall.TH32CS_SNAPPROCESS": "syscall", + "syscall.TH32CS_SNAPTHREAD": "syscall", + "syscall.TIME_ZONE_ID_DAYLIGHT": "syscall", + "syscall.TIME_ZONE_ID_STANDARD": "syscall", + "syscall.TIME_ZONE_ID_UNKNOWN": "syscall", + "syscall.TIOCCBRK": "syscall", + "syscall.TIOCCDTR": "syscall", + "syscall.TIOCCONS": "syscall", + "syscall.TIOCDCDTIMESTAMP": "syscall", + "syscall.TIOCDRAIN": "syscall", + "syscall.TIOCDSIMICROCODE": "syscall", + "syscall.TIOCEXCL": "syscall", + "syscall.TIOCEXT": "syscall", + "syscall.TIOCFLAG_CDTRCTS": "syscall", + "syscall.TIOCFLAG_CLOCAL": "syscall", + "syscall.TIOCFLAG_CRTSCTS": "syscall", + "syscall.TIOCFLAG_MDMBUF": "syscall", + "syscall.TIOCFLAG_PPS": "syscall", + "syscall.TIOCFLAG_SOFTCAR": "syscall", + "syscall.TIOCFLUSH": "syscall", + "syscall.TIOCGDEV": "syscall", + "syscall.TIOCGDRAINWAIT": "syscall", + "syscall.TIOCGETA": "syscall", + "syscall.TIOCGETD": "syscall", + "syscall.TIOCGFLAGS": "syscall", + "syscall.TIOCGICOUNT": "syscall", + "syscall.TIOCGLCKTRMIOS": "syscall", + "syscall.TIOCGLINED": "syscall", + "syscall.TIOCGPGRP": "syscall", + "syscall.TIOCGPTN": "syscall", + "syscall.TIOCGQSIZE": "syscall", + "syscall.TIOCGRANTPT": "syscall", + "syscall.TIOCGRS485": "syscall", + "syscall.TIOCGSERIAL": "syscall", + "syscall.TIOCGSID": "syscall", + "syscall.TIOCGSIZE": "syscall", + "syscall.TIOCGSOFTCAR": "syscall", + "syscall.TIOCGTSTAMP": "syscall", + "syscall.TIOCGWINSZ": "syscall", + "syscall.TIOCINQ": "syscall", + "syscall.TIOCIXOFF": "syscall", + "syscall.TIOCIXON": "syscall", + "syscall.TIOCLINUX": "syscall", + "syscall.TIOCMBIC": "syscall", + "syscall.TIOCMBIS": "syscall", + "syscall.TIOCMGDTRWAIT": "syscall", + "syscall.TIOCMGET": "syscall", + "syscall.TIOCMIWAIT": "syscall", + "syscall.TIOCMODG": "syscall", + "syscall.TIOCMODS": "syscall", + "syscall.TIOCMSDTRWAIT": "syscall", + "syscall.TIOCMSET": "syscall", + "syscall.TIOCM_CAR": "syscall", + "syscall.TIOCM_CD": "syscall", + "syscall.TIOCM_CTS": "syscall", + "syscall.TIOCM_DCD": "syscall", + "syscall.TIOCM_DSR": "syscall", + "syscall.TIOCM_DTR": "syscall", + "syscall.TIOCM_LE": "syscall", + "syscall.TIOCM_RI": "syscall", + "syscall.TIOCM_RNG": "syscall", + "syscall.TIOCM_RTS": "syscall", + "syscall.TIOCM_SR": "syscall", + "syscall.TIOCM_ST": "syscall", + "syscall.TIOCNOTTY": "syscall", + "syscall.TIOCNXCL": "syscall", + "syscall.TIOCOUTQ": "syscall", + "syscall.TIOCPKT": "syscall", + "syscall.TIOCPKT_DATA": "syscall", + "syscall.TIOCPKT_DOSTOP": "syscall", + "syscall.TIOCPKT_FLUSHREAD": "syscall", + "syscall.TIOCPKT_FLUSHWRITE": "syscall", + "syscall.TIOCPKT_IOCTL": "syscall", + "syscall.TIOCPKT_NOSTOP": "syscall", + "syscall.TIOCPKT_START": "syscall", + "syscall.TIOCPKT_STOP": "syscall", + "syscall.TIOCPTMASTER": "syscall", + "syscall.TIOCPTMGET": "syscall", + "syscall.TIOCPTSNAME": "syscall", + "syscall.TIOCPTYGNAME": "syscall", + "syscall.TIOCPTYGRANT": "syscall", + "syscall.TIOCPTYUNLK": "syscall", + "syscall.TIOCRCVFRAME": "syscall", + "syscall.TIOCREMOTE": "syscall", + "syscall.TIOCSBRK": "syscall", + "syscall.TIOCSCONS": "syscall", + "syscall.TIOCSCTTY": "syscall", + "syscall.TIOCSDRAINWAIT": "syscall", + "syscall.TIOCSDTR": "syscall", + "syscall.TIOCSERCONFIG": "syscall", + "syscall.TIOCSERGETLSR": "syscall", + "syscall.TIOCSERGETMULTI": "syscall", + "syscall.TIOCSERGSTRUCT": "syscall", + "syscall.TIOCSERGWILD": "syscall", + "syscall.TIOCSERSETMULTI": "syscall", + "syscall.TIOCSERSWILD": "syscall", + "syscall.TIOCSER_TEMT": "syscall", + "syscall.TIOCSETA": "syscall", + "syscall.TIOCSETAF": "syscall", + "syscall.TIOCSETAW": "syscall", + "syscall.TIOCSETD": "syscall", + "syscall.TIOCSFLAGS": "syscall", + "syscall.TIOCSIG": "syscall", + "syscall.TIOCSLCKTRMIOS": "syscall", + "syscall.TIOCSLINED": "syscall", + "syscall.TIOCSPGRP": "syscall", + "syscall.TIOCSPTLCK": "syscall", + "syscall.TIOCSQSIZE": "syscall", + "syscall.TIOCSRS485": "syscall", + "syscall.TIOCSSERIAL": "syscall", + "syscall.TIOCSSIZE": "syscall", + "syscall.TIOCSSOFTCAR": "syscall", + "syscall.TIOCSTART": "syscall", + "syscall.TIOCSTAT": "syscall", + "syscall.TIOCSTI": "syscall", + "syscall.TIOCSTOP": "syscall", + "syscall.TIOCSTSTAMP": "syscall", + "syscall.TIOCSWINSZ": "syscall", + "syscall.TIOCTIMESTAMP": "syscall", + "syscall.TIOCUCNTL": "syscall", + "syscall.TIOCVHANGUP": "syscall", + "syscall.TIOCXMTFRAME": "syscall", + "syscall.TOKEN_ADJUST_DEFAULT": "syscall", + "syscall.TOKEN_ADJUST_GROUPS": "syscall", + "syscall.TOKEN_ADJUST_PRIVILEGES": "syscall", + "syscall.TOKEN_ALL_ACCESS": "syscall", + "syscall.TOKEN_ASSIGN_PRIMARY": "syscall", + "syscall.TOKEN_DUPLICATE": "syscall", + "syscall.TOKEN_EXECUTE": "syscall", + "syscall.TOKEN_IMPERSONATE": "syscall", + "syscall.TOKEN_QUERY": "syscall", + "syscall.TOKEN_QUERY_SOURCE": "syscall", + "syscall.TOKEN_READ": "syscall", + "syscall.TOKEN_WRITE": "syscall", + "syscall.TOSTOP": "syscall", + "syscall.TRUNCATE_EXISTING": "syscall", + "syscall.TUNATTACHFILTER": "syscall", + "syscall.TUNDETACHFILTER": "syscall", + "syscall.TUNGETFEATURES": "syscall", + "syscall.TUNGETIFF": "syscall", + "syscall.TUNGETSNDBUF": "syscall", + "syscall.TUNGETVNETHDRSZ": "syscall", + "syscall.TUNSETDEBUG": "syscall", + "syscall.TUNSETGROUP": "syscall", + "syscall.TUNSETIFF": "syscall", + "syscall.TUNSETLINK": "syscall", + "syscall.TUNSETNOCSUM": "syscall", + "syscall.TUNSETOFFLOAD": "syscall", + "syscall.TUNSETOWNER": "syscall", + "syscall.TUNSETPERSIST": "syscall", + "syscall.TUNSETSNDBUF": "syscall", + "syscall.TUNSETTXFILTER": "syscall", + "syscall.TUNSETVNETHDRSZ": "syscall", + "syscall.Tee": "syscall", + "syscall.TerminateProcess": "syscall", + "syscall.Termios": "syscall", + "syscall.Tgkill": "syscall", + "syscall.Time": "syscall", + "syscall.Time_t": "syscall", + "syscall.Times": "syscall", + "syscall.Timespec": "syscall", + "syscall.TimespecToNsec": "syscall", + "syscall.Timeval": "syscall", + "syscall.Timeval32": "syscall", + "syscall.TimevalToNsec": "syscall", + "syscall.Timex": "syscall", + "syscall.Timezoneinformation": "syscall", + "syscall.Tms": "syscall", + "syscall.Token": "syscall", + "syscall.TokenAccessInformation": "syscall", + "syscall.TokenAuditPolicy": "syscall", + "syscall.TokenDefaultDacl": "syscall", + "syscall.TokenElevation": "syscall", + "syscall.TokenElevationType": "syscall", + "syscall.TokenGroups": "syscall", + "syscall.TokenGroupsAndPrivileges": "syscall", + "syscall.TokenHasRestrictions": "syscall", + "syscall.TokenImpersonationLevel": "syscall", + "syscall.TokenIntegrityLevel": "syscall", + "syscall.TokenLinkedToken": "syscall", + "syscall.TokenLogonSid": "syscall", + "syscall.TokenMandatoryPolicy": "syscall", + "syscall.TokenOrigin": "syscall", + "syscall.TokenOwner": "syscall", + "syscall.TokenPrimaryGroup": "syscall", + "syscall.TokenPrivileges": "syscall", + "syscall.TokenRestrictedSids": "syscall", + "syscall.TokenSandBoxInert": "syscall", + "syscall.TokenSessionId": "syscall", + "syscall.TokenSessionReference": "syscall", + "syscall.TokenSource": "syscall", + "syscall.TokenStatistics": "syscall", + "syscall.TokenType": "syscall", + "syscall.TokenUIAccess": "syscall", + "syscall.TokenUser": "syscall", + "syscall.TokenVirtualizationAllowed": "syscall", + "syscall.TokenVirtualizationEnabled": "syscall", + "syscall.Tokenprimarygroup": "syscall", + "syscall.Tokenuser": "syscall", + "syscall.TranslateAccountName": "syscall", + "syscall.TranslateName": "syscall", + "syscall.TransmitFile": "syscall", + "syscall.TransmitFileBuffers": "syscall", + "syscall.Truncate": "syscall", + "syscall.USAGE_MATCH_TYPE_AND": "syscall", + "syscall.USAGE_MATCH_TYPE_OR": "syscall", + "syscall.UTF16FromString": "syscall", + "syscall.UTF16PtrFromString": "syscall", + "syscall.UTF16ToString": "syscall", + "syscall.Ucred": "syscall", + "syscall.Umask": "syscall", + "syscall.Uname": "syscall", + "syscall.Undelete": "syscall", + "syscall.UnixCredentials": "syscall", + "syscall.UnixRights": "syscall", + "syscall.Unlink": "syscall", + "syscall.Unlinkat": "syscall", + "syscall.UnmapViewOfFile": "syscall", + "syscall.Unmount": "syscall", + "syscall.Unsetenv": "syscall", + "syscall.Unshare": "syscall", + "syscall.UserInfo10": "syscall", + "syscall.Ustat": "syscall", + "syscall.Ustat_t": "syscall", + "syscall.Utimbuf": "syscall", + "syscall.Utime": "syscall", + "syscall.Utimes": "syscall", + "syscall.UtimesNano": "syscall", + "syscall.Utsname": "syscall", + "syscall.VDISCARD": "syscall", + "syscall.VDSUSP": "syscall", + "syscall.VEOF": "syscall", + "syscall.VEOL": "syscall", + "syscall.VEOL2": "syscall", + "syscall.VERASE": "syscall", + "syscall.VERASE2": "syscall", + "syscall.VINTR": "syscall", + "syscall.VKILL": "syscall", + "syscall.VLNEXT": "syscall", + "syscall.VMIN": "syscall", + "syscall.VQUIT": "syscall", + "syscall.VREPRINT": "syscall", + "syscall.VSTART": "syscall", + "syscall.VSTATUS": "syscall", + "syscall.VSTOP": "syscall", + "syscall.VSUSP": "syscall", + "syscall.VSWTC": "syscall", + "syscall.VT0": "syscall", + "syscall.VT1": "syscall", + "syscall.VTDLY": "syscall", + "syscall.VTIME": "syscall", + "syscall.VWERASE": "syscall", + "syscall.VirtualLock": "syscall", + "syscall.VirtualUnlock": "syscall", + "syscall.WAIT_ABANDONED": "syscall", + "syscall.WAIT_FAILED": "syscall", + "syscall.WAIT_OBJECT_0": "syscall", + "syscall.WAIT_TIMEOUT": "syscall", + "syscall.WALL": "syscall", + "syscall.WALLSIG": "syscall", + "syscall.WALTSIG": "syscall", + "syscall.WCLONE": "syscall", + "syscall.WCONTINUED": "syscall", + "syscall.WCOREFLAG": "syscall", + "syscall.WEXITED": "syscall", + "syscall.WLINUXCLONE": "syscall", + "syscall.WNOHANG": "syscall", + "syscall.WNOTHREAD": "syscall", + "syscall.WNOWAIT": "syscall", + "syscall.WNOZOMBIE": "syscall", + "syscall.WOPTSCHECKED": "syscall", + "syscall.WORDSIZE": "syscall", + "syscall.WSABuf": "syscall", + "syscall.WSACleanup": "syscall", + "syscall.WSADESCRIPTION_LEN": "syscall", + "syscall.WSAData": "syscall", + "syscall.WSAEACCES": "syscall", + "syscall.WSAECONNABORTED": "syscall", + "syscall.WSAECONNRESET": "syscall", + "syscall.WSAEnumProtocols": "syscall", + "syscall.WSAID_CONNECTEX": "syscall", + "syscall.WSAIoctl": "syscall", + "syscall.WSAPROTOCOL_LEN": "syscall", + "syscall.WSAProtocolChain": "syscall", + "syscall.WSAProtocolInfo": "syscall", + "syscall.WSARecv": "syscall", + "syscall.WSARecvFrom": "syscall", + "syscall.WSASYS_STATUS_LEN": "syscall", + "syscall.WSASend": "syscall", + "syscall.WSASendTo": "syscall", + "syscall.WSASendto": "syscall", + "syscall.WSAStartup": "syscall", + "syscall.WSTOPPED": "syscall", + "syscall.WTRAPPED": "syscall", + "syscall.WUNTRACED": "syscall", + "syscall.Wait4": "syscall", + "syscall.WaitForSingleObject": "syscall", + "syscall.WaitStatus": "syscall", + "syscall.Win32FileAttributeData": "syscall", + "syscall.Win32finddata": "syscall", + "syscall.Write": "syscall", + "syscall.WriteConsole": "syscall", + "syscall.WriteFile": "syscall", + "syscall.X509_ASN_ENCODING": "syscall", + "syscall.XCASE": "syscall", + "syscall.XP1_CONNECTIONLESS": "syscall", + "syscall.XP1_CONNECT_DATA": "syscall", + "syscall.XP1_DISCONNECT_DATA": "syscall", + "syscall.XP1_EXPEDITED_DATA": "syscall", + "syscall.XP1_GRACEFUL_CLOSE": "syscall", + "syscall.XP1_GUARANTEED_DELIVERY": "syscall", + "syscall.XP1_GUARANTEED_ORDER": "syscall", + "syscall.XP1_IFS_HANDLES": "syscall", + "syscall.XP1_MESSAGE_ORIENTED": "syscall", + "syscall.XP1_MULTIPOINT_CONTROL_PLANE": "syscall", + "syscall.XP1_MULTIPOINT_DATA_PLANE": "syscall", + "syscall.XP1_PARTIAL_MESSAGE": "syscall", + "syscall.XP1_PSEUDO_STREAM": "syscall", + "syscall.XP1_QOS_SUPPORTED": "syscall", + "syscall.XP1_SAN_SUPPORT_SDP": "syscall", + "syscall.XP1_SUPPORT_BROADCAST": "syscall", + "syscall.XP1_SUPPORT_MULTIPOINT": "syscall", + "syscall.XP1_UNI_RECV": "syscall", + "syscall.XP1_UNI_SEND": "syscall", + "syslog.Dial": "log/syslog", + "syslog.LOG_ALERT": "log/syslog", + "syslog.LOG_AUTH": "log/syslog", + "syslog.LOG_AUTHPRIV": "log/syslog", + "syslog.LOG_CRIT": "log/syslog", + "syslog.LOG_CRON": "log/syslog", + "syslog.LOG_DAEMON": "log/syslog", + "syslog.LOG_DEBUG": "log/syslog", + "syslog.LOG_EMERG": "log/syslog", + "syslog.LOG_ERR": "log/syslog", + "syslog.LOG_FTP": "log/syslog", + "syslog.LOG_INFO": "log/syslog", + "syslog.LOG_KERN": "log/syslog", + "syslog.LOG_LOCAL0": "log/syslog", + "syslog.LOG_LOCAL1": "log/syslog", + "syslog.LOG_LOCAL2": "log/syslog", + "syslog.LOG_LOCAL3": "log/syslog", + "syslog.LOG_LOCAL4": "log/syslog", + "syslog.LOG_LOCAL5": "log/syslog", + "syslog.LOG_LOCAL6": "log/syslog", + "syslog.LOG_LOCAL7": "log/syslog", + "syslog.LOG_LPR": "log/syslog", + "syslog.LOG_MAIL": "log/syslog", + "syslog.LOG_NEWS": "log/syslog", + "syslog.LOG_NOTICE": "log/syslog", + "syslog.LOG_SYSLOG": "log/syslog", + "syslog.LOG_USER": "log/syslog", + "syslog.LOG_UUCP": "log/syslog", + "syslog.LOG_WARNING": "log/syslog", + "syslog.New": "log/syslog", + "syslog.NewLogger": "log/syslog", + "syslog.Priority": "log/syslog", + "syslog.Writer": "log/syslog", + "tabwriter.AlignRight": "text/tabwriter", + "tabwriter.Debug": "text/tabwriter", + "tabwriter.DiscardEmptyColumns": "text/tabwriter", + "tabwriter.Escape": "text/tabwriter", + "tabwriter.FilterHTML": "text/tabwriter", + "tabwriter.NewWriter": "text/tabwriter", + "tabwriter.StripEscape": "text/tabwriter", + "tabwriter.TabIndent": "text/tabwriter", + "tabwriter.Writer": "text/tabwriter", + "tar.ErrFieldTooLong": "archive/tar", + "tar.ErrHeader": "archive/tar", + "tar.ErrWriteAfterClose": "archive/tar", + "tar.ErrWriteTooLong": "archive/tar", + "tar.FileInfoHeader": "archive/tar", + "tar.Format": "archive/tar", + "tar.FormatGNU": "archive/tar", + "tar.FormatPAX": "archive/tar", + "tar.FormatUSTAR": "archive/tar", + "tar.FormatUnknown": "archive/tar", + "tar.Header": "archive/tar", + "tar.NewReader": "archive/tar", + "tar.NewWriter": "archive/tar", + "tar.Reader": "archive/tar", + "tar.TypeBlock": "archive/tar", + "tar.TypeChar": "archive/tar", + "tar.TypeCont": "archive/tar", + "tar.TypeDir": "archive/tar", + "tar.TypeFifo": "archive/tar", + "tar.TypeGNULongLink": "archive/tar", + "tar.TypeGNULongName": "archive/tar", + "tar.TypeGNUSparse": "archive/tar", + "tar.TypeLink": "archive/tar", + "tar.TypeReg": "archive/tar", + "tar.TypeRegA": "archive/tar", + "tar.TypeSymlink": "archive/tar", + "tar.TypeXGlobalHeader": "archive/tar", + "tar.TypeXHeader": "archive/tar", + "tar.Writer": "archive/tar", + "template.CSS": "html/template", + "template.ErrAmbigContext": "html/template", + "template.ErrBadHTML": "html/template", + "template.ErrBranchEnd": "html/template", + "template.ErrEndContext": "html/template", + "template.ErrNoSuchTemplate": "html/template", + "template.ErrOutputContext": "html/template", + "template.ErrPartialCharset": "html/template", + "template.ErrPartialEscape": "html/template", + "template.ErrPredefinedEscaper": "html/template", + "template.ErrRangeLoopReentry": "html/template", + "template.ErrSlashAmbig": "html/template", + "template.Error": "html/template", + "template.ErrorCode": "html/template", + "template.ExecError": "text/template", + // "template.FuncMap" is ambiguous + "template.HTML": "html/template", + "template.HTMLAttr": "html/template", + // "template.HTMLEscape" is ambiguous + // "template.HTMLEscapeString" is ambiguous + // "template.HTMLEscaper" is ambiguous + // "template.IsTrue" is ambiguous + "template.JS": "html/template", + // "template.JSEscape" is ambiguous + // "template.JSEscapeString" is ambiguous + // "template.JSEscaper" is ambiguous + "template.JSStr": "html/template", + // "template.Must" is ambiguous + // "template.New" is ambiguous + "template.OK": "html/template", + // "template.ParseFiles" is ambiguous + // "template.ParseGlob" is ambiguous + "template.Srcset": "html/template", + // "template.Template" is ambiguous + "template.URL": "html/template", + // "template.URLQueryEscaper" is ambiguous + "testing.AllocsPerRun": "testing", + "testing.B": "testing", + "testing.Benchmark": "testing", + "testing.BenchmarkResult": "testing", + "testing.Cover": "testing", + "testing.CoverBlock": "testing", + "testing.CoverMode": "testing", + "testing.Coverage": "testing", + "testing.InternalBenchmark": "testing", + "testing.InternalExample": "testing", + "testing.InternalTest": "testing", + "testing.M": "testing", + "testing.Main": "testing", + "testing.MainStart": "testing", + "testing.PB": "testing", + "testing.RegisterCover": "testing", + "testing.RunBenchmarks": "testing", + "testing.RunExamples": "testing", + "testing.RunTests": "testing", + "testing.Short": "testing", + "testing.T": "testing", + "testing.Verbose": "testing", + "textproto.CanonicalMIMEHeaderKey": "net/textproto", + "textproto.Conn": "net/textproto", + "textproto.Dial": "net/textproto", + "textproto.Error": "net/textproto", + "textproto.MIMEHeader": "net/textproto", + "textproto.NewConn": "net/textproto", + "textproto.NewReader": "net/textproto", + "textproto.NewWriter": "net/textproto", + "textproto.Pipeline": "net/textproto", + "textproto.ProtocolError": "net/textproto", + "textproto.Reader": "net/textproto", + "textproto.TrimBytes": "net/textproto", + "textproto.TrimString": "net/textproto", + "textproto.Writer": "net/textproto", + "time.ANSIC": "time", + "time.After": "time", + "time.AfterFunc": "time", + "time.April": "time", + "time.August": "time", + "time.Date": "time", + "time.December": "time", + "time.Duration": "time", + "time.February": "time", + "time.FixedZone": "time", + "time.Friday": "time", + "time.Hour": "time", + "time.January": "time", + "time.July": "time", + "time.June": "time", + "time.Kitchen": "time", + "time.LoadLocation": "time", + "time.LoadLocationFromTZData": "time", + "time.Local": "time", + "time.Location": "time", + "time.March": "time", + "time.May": "time", + "time.Microsecond": "time", + "time.Millisecond": "time", + "time.Minute": "time", + "time.Monday": "time", + "time.Month": "time", + "time.Nanosecond": "time", + "time.NewTicker": "time", + "time.NewTimer": "time", + "time.November": "time", + "time.Now": "time", + "time.October": "time", + "time.Parse": "time", + "time.ParseDuration": "time", + "time.ParseError": "time", + "time.ParseInLocation": "time", + "time.RFC1123": "time", + "time.RFC1123Z": "time", + "time.RFC3339": "time", + "time.RFC3339Nano": "time", + "time.RFC822": "time", + "time.RFC822Z": "time", + "time.RFC850": "time", + "time.RubyDate": "time", + "time.Saturday": "time", + "time.Second": "time", + "time.September": "time", + "time.Since": "time", + "time.Sleep": "time", + "time.Stamp": "time", + "time.StampMicro": "time", + "time.StampMilli": "time", + "time.StampNano": "time", + "time.Sunday": "time", + "time.Thursday": "time", + "time.Tick": "time", + "time.Ticker": "time", + "time.Time": "time", + "time.Timer": "time", + "time.Tuesday": "time", + "time.UTC": "time", + "time.Unix": "time", + "time.UnixDate": "time", + "time.Until": "time", + "time.Wednesday": "time", + "time.Weekday": "time", + "tls.Certificate": "crypto/tls", + "tls.CertificateRequestInfo": "crypto/tls", + "tls.Client": "crypto/tls", + "tls.ClientAuthType": "crypto/tls", + "tls.ClientHelloInfo": "crypto/tls", + "tls.ClientSessionCache": "crypto/tls", + "tls.ClientSessionState": "crypto/tls", + "tls.Config": "crypto/tls", + "tls.Conn": "crypto/tls", + "tls.ConnectionState": "crypto/tls", + "tls.CurveID": "crypto/tls", + "tls.CurveP256": "crypto/tls", + "tls.CurveP384": "crypto/tls", + "tls.CurveP521": "crypto/tls", + "tls.Dial": "crypto/tls", + "tls.DialWithDialer": "crypto/tls", + "tls.ECDSAWithP256AndSHA256": "crypto/tls", + "tls.ECDSAWithP384AndSHA384": "crypto/tls", + "tls.ECDSAWithP521AndSHA512": "crypto/tls", + "tls.ECDSAWithSHA1": "crypto/tls", + "tls.Listen": "crypto/tls", + "tls.LoadX509KeyPair": "crypto/tls", + "tls.NewLRUClientSessionCache": "crypto/tls", + "tls.NewListener": "crypto/tls", + "tls.NoClientCert": "crypto/tls", + "tls.PKCS1WithSHA1": "crypto/tls", + "tls.PKCS1WithSHA256": "crypto/tls", + "tls.PKCS1WithSHA384": "crypto/tls", + "tls.PKCS1WithSHA512": "crypto/tls", + "tls.PSSWithSHA256": "crypto/tls", + "tls.PSSWithSHA384": "crypto/tls", + "tls.PSSWithSHA512": "crypto/tls", + "tls.RecordHeaderError": "crypto/tls", + "tls.RenegotiateFreelyAsClient": "crypto/tls", + "tls.RenegotiateNever": "crypto/tls", + "tls.RenegotiateOnceAsClient": "crypto/tls", + "tls.RenegotiationSupport": "crypto/tls", + "tls.RequestClientCert": "crypto/tls", + "tls.RequireAndVerifyClientCert": "crypto/tls", + "tls.RequireAnyClientCert": "crypto/tls", + "tls.Server": "crypto/tls", + "tls.SignatureScheme": "crypto/tls", + "tls.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA": "crypto/tls", + "tls.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256": "crypto/tls", + "tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256": "crypto/tls", + "tls.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA": "crypto/tls", + "tls.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384": "crypto/tls", + "tls.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305": "crypto/tls", + "tls.TLS_ECDHE_ECDSA_WITH_RC4_128_SHA": "crypto/tls", + "tls.TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA": "crypto/tls", + "tls.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA": "crypto/tls", + "tls.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256": "crypto/tls", + "tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256": "crypto/tls", + "tls.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA": "crypto/tls", + "tls.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384": "crypto/tls", + "tls.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305": "crypto/tls", + "tls.TLS_ECDHE_RSA_WITH_RC4_128_SHA": "crypto/tls", + "tls.TLS_FALLBACK_SCSV": "crypto/tls", + "tls.TLS_RSA_WITH_3DES_EDE_CBC_SHA": "crypto/tls", + "tls.TLS_RSA_WITH_AES_128_CBC_SHA": "crypto/tls", + "tls.TLS_RSA_WITH_AES_128_CBC_SHA256": "crypto/tls", + "tls.TLS_RSA_WITH_AES_128_GCM_SHA256": "crypto/tls", + "tls.TLS_RSA_WITH_AES_256_CBC_SHA": "crypto/tls", + "tls.TLS_RSA_WITH_AES_256_GCM_SHA384": "crypto/tls", + "tls.TLS_RSA_WITH_RC4_128_SHA": "crypto/tls", + "tls.VerifyClientCertIfGiven": "crypto/tls", + "tls.VersionSSL30": "crypto/tls", + "tls.VersionTLS10": "crypto/tls", + "tls.VersionTLS11": "crypto/tls", + "tls.VersionTLS12": "crypto/tls", + "tls.X25519": "crypto/tls", + "tls.X509KeyPair": "crypto/tls", + "token.ADD": "go/token", + "token.ADD_ASSIGN": "go/token", + "token.AND": "go/token", + "token.AND_ASSIGN": "go/token", + "token.AND_NOT": "go/token", + "token.AND_NOT_ASSIGN": "go/token", + "token.ARROW": "go/token", + "token.ASSIGN": "go/token", + "token.BREAK": "go/token", + "token.CASE": "go/token", + "token.CHAN": "go/token", + "token.CHAR": "go/token", + "token.COLON": "go/token", + "token.COMMA": "go/token", + "token.COMMENT": "go/token", + "token.CONST": "go/token", + "token.CONTINUE": "go/token", + "token.DEC": "go/token", + "token.DEFAULT": "go/token", + "token.DEFER": "go/token", + "token.DEFINE": "go/token", + "token.ELLIPSIS": "go/token", + "token.ELSE": "go/token", + "token.EOF": "go/token", + "token.EQL": "go/token", + "token.FALLTHROUGH": "go/token", + "token.FLOAT": "go/token", + "token.FOR": "go/token", + "token.FUNC": "go/token", + "token.File": "go/token", + "token.FileSet": "go/token", + "token.GEQ": "go/token", + "token.GO": "go/token", + "token.GOTO": "go/token", + "token.GTR": "go/token", + "token.HighestPrec": "go/token", + "token.IDENT": "go/token", + "token.IF": "go/token", + "token.ILLEGAL": "go/token", + "token.IMAG": "go/token", + "token.IMPORT": "go/token", + "token.INC": "go/token", + "token.INT": "go/token", + "token.INTERFACE": "go/token", + "token.LAND": "go/token", + "token.LBRACE": "go/token", + "token.LBRACK": "go/token", + "token.LEQ": "go/token", + "token.LOR": "go/token", + "token.LPAREN": "go/token", + "token.LSS": "go/token", + "token.Lookup": "go/token", + "token.LowestPrec": "go/token", + "token.MAP": "go/token", + "token.MUL": "go/token", + "token.MUL_ASSIGN": "go/token", + "token.NEQ": "go/token", + "token.NOT": "go/token", + "token.NewFileSet": "go/token", + "token.NoPos": "go/token", + "token.OR": "go/token", + "token.OR_ASSIGN": "go/token", + "token.PACKAGE": "go/token", + "token.PERIOD": "go/token", + "token.Pos": "go/token", + "token.Position": "go/token", + "token.QUO": "go/token", + "token.QUO_ASSIGN": "go/token", + "token.RANGE": "go/token", + "token.RBRACE": "go/token", + "token.RBRACK": "go/token", + "token.REM": "go/token", + "token.REM_ASSIGN": "go/token", + "token.RETURN": "go/token", + "token.RPAREN": "go/token", + "token.SELECT": "go/token", + "token.SEMICOLON": "go/token", + "token.SHL": "go/token", + "token.SHL_ASSIGN": "go/token", + "token.SHR": "go/token", + "token.SHR_ASSIGN": "go/token", + "token.STRING": "go/token", + "token.STRUCT": "go/token", + "token.SUB": "go/token", + "token.SUB_ASSIGN": "go/token", + "token.SWITCH": "go/token", + "token.TYPE": "go/token", + "token.Token": "go/token", + "token.UnaryPrec": "go/token", + "token.VAR": "go/token", + "token.XOR": "go/token", + "token.XOR_ASSIGN": "go/token", + "trace.Start": "runtime/trace", + "trace.Stop": "runtime/trace", + "types.Array": "go/types", + "types.AssertableTo": "go/types", + "types.AssignableTo": "go/types", + "types.Basic": "go/types", + "types.BasicInfo": "go/types", + "types.BasicKind": "go/types", + "types.Bool": "go/types", + "types.Builtin": "go/types", + "types.Byte": "go/types", + "types.Chan": "go/types", + "types.ChanDir": "go/types", + "types.Checker": "go/types", + "types.Comparable": "go/types", + "types.Complex128": "go/types", + "types.Complex64": "go/types", + "types.Config": "go/types", + "types.Const": "go/types", + "types.ConvertibleTo": "go/types", + "types.DefPredeclaredTestFuncs": "go/types", + "types.Default": "go/types", + "types.Error": "go/types", + "types.Eval": "go/types", + "types.ExprString": "go/types", + "types.FieldVal": "go/types", + "types.Float32": "go/types", + "types.Float64": "go/types", + "types.Func": "go/types", + "types.Id": "go/types", + "types.Identical": "go/types", + "types.IdenticalIgnoreTags": "go/types", + "types.Implements": "go/types", + "types.ImportMode": "go/types", + "types.Importer": "go/types", + "types.ImporterFrom": "go/types", + "types.Info": "go/types", + "types.Initializer": "go/types", + "types.Int": "go/types", + "types.Int16": "go/types", + "types.Int32": "go/types", + "types.Int64": "go/types", + "types.Int8": "go/types", + "types.Interface": "go/types", + "types.Invalid": "go/types", + "types.IsBoolean": "go/types", + "types.IsComplex": "go/types", + "types.IsConstType": "go/types", + "types.IsFloat": "go/types", + "types.IsInteger": "go/types", + "types.IsInterface": "go/types", + "types.IsNumeric": "go/types", + "types.IsOrdered": "go/types", + "types.IsString": "go/types", + "types.IsUnsigned": "go/types", + "types.IsUntyped": "go/types", + "types.Label": "go/types", + "types.LookupFieldOrMethod": "go/types", + "types.Map": "go/types", + "types.MethodExpr": "go/types", + "types.MethodSet": "go/types", + "types.MethodVal": "go/types", + "types.MissingMethod": "go/types", + "types.Named": "go/types", + "types.NewArray": "go/types", + "types.NewChan": "go/types", + "types.NewChecker": "go/types", + "types.NewConst": "go/types", + "types.NewField": "go/types", + "types.NewFunc": "go/types", + "types.NewInterface": "go/types", + "types.NewLabel": "go/types", + "types.NewMap": "go/types", + "types.NewMethodSet": "go/types", + "types.NewNamed": "go/types", + "types.NewPackage": "go/types", + "types.NewParam": "go/types", + "types.NewPkgName": "go/types", + "types.NewPointer": "go/types", + "types.NewScope": "go/types", + "types.NewSignature": "go/types", + "types.NewSlice": "go/types", + "types.NewStruct": "go/types", + "types.NewTuple": "go/types", + "types.NewTypeName": "go/types", + "types.NewVar": "go/types", + "types.Nil": "go/types", + "types.ObjectString": "go/types", + "types.Package": "go/types", + "types.PkgName": "go/types", + "types.Pointer": "go/types", + "types.Qualifier": "go/types", + "types.RecvOnly": "go/types", + "types.RelativeTo": "go/types", + "types.Rune": "go/types", + "types.Scope": "go/types", + "types.Selection": "go/types", + "types.SelectionKind": "go/types", + "types.SelectionString": "go/types", + "types.SendOnly": "go/types", + "types.SendRecv": "go/types", + "types.Signature": "go/types", + "types.Sizes": "go/types", + "types.SizesFor": "go/types", + "types.Slice": "go/types", + "types.StdSizes": "go/types", + "types.String": "go/types", + "types.Struct": "go/types", + "types.Tuple": "go/types", + "types.Typ": "go/types", + "types.Type": "go/types", + "types.TypeAndValue": "go/types", + "types.TypeName": "go/types", + "types.TypeString": "go/types", + "types.Uint": "go/types", + "types.Uint16": "go/types", + "types.Uint32": "go/types", + "types.Uint64": "go/types", + "types.Uint8": "go/types", + "types.Uintptr": "go/types", + "types.Universe": "go/types", + "types.Unsafe": "go/types", + "types.UnsafePointer": "go/types", + "types.UntypedBool": "go/types", + "types.UntypedComplex": "go/types", + "types.UntypedFloat": "go/types", + "types.UntypedInt": "go/types", + "types.UntypedNil": "go/types", + "types.UntypedRune": "go/types", + "types.UntypedString": "go/types", + "types.Var": "go/types", + "types.WriteExpr": "go/types", + "types.WriteSignature": "go/types", + "types.WriteType": "go/types", + "unicode.ASCII_Hex_Digit": "unicode", + "unicode.Adlam": "unicode", + "unicode.Ahom": "unicode", + "unicode.Anatolian_Hieroglyphs": "unicode", + "unicode.Arabic": "unicode", + "unicode.Armenian": "unicode", + "unicode.Avestan": "unicode", + "unicode.AzeriCase": "unicode", + "unicode.Balinese": "unicode", + "unicode.Bamum": "unicode", + "unicode.Bassa_Vah": "unicode", + "unicode.Batak": "unicode", + "unicode.Bengali": "unicode", + "unicode.Bhaiksuki": "unicode", + "unicode.Bidi_Control": "unicode", + "unicode.Bopomofo": "unicode", + "unicode.Brahmi": "unicode", + "unicode.Braille": "unicode", + "unicode.Buginese": "unicode", + "unicode.Buhid": "unicode", + "unicode.C": "unicode", + "unicode.Canadian_Aboriginal": "unicode", + "unicode.Carian": "unicode", + "unicode.CaseRange": "unicode", + "unicode.CaseRanges": "unicode", + "unicode.Categories": "unicode", + "unicode.Caucasian_Albanian": "unicode", + "unicode.Cc": "unicode", + "unicode.Cf": "unicode", + "unicode.Chakma": "unicode", + "unicode.Cham": "unicode", + "unicode.Cherokee": "unicode", + "unicode.Co": "unicode", + "unicode.Common": "unicode", + "unicode.Coptic": "unicode", + "unicode.Cs": "unicode", + "unicode.Cuneiform": "unicode", + "unicode.Cypriot": "unicode", + "unicode.Cyrillic": "unicode", + "unicode.Dash": "unicode", + "unicode.Deprecated": "unicode", + "unicode.Deseret": "unicode", + "unicode.Devanagari": "unicode", + "unicode.Diacritic": "unicode", + "unicode.Digit": "unicode", + "unicode.Duployan": "unicode", + "unicode.Egyptian_Hieroglyphs": "unicode", + "unicode.Elbasan": "unicode", + "unicode.Ethiopic": "unicode", + "unicode.Extender": "unicode", + "unicode.FoldCategory": "unicode", + "unicode.FoldScript": "unicode", + "unicode.Georgian": "unicode", + "unicode.Glagolitic": "unicode", + "unicode.Gothic": "unicode", + "unicode.Grantha": "unicode", + "unicode.GraphicRanges": "unicode", + "unicode.Greek": "unicode", + "unicode.Gujarati": "unicode", + "unicode.Gurmukhi": "unicode", + "unicode.Han": "unicode", + "unicode.Hangul": "unicode", + "unicode.Hanunoo": "unicode", + "unicode.Hatran": "unicode", + "unicode.Hebrew": "unicode", + "unicode.Hex_Digit": "unicode", + "unicode.Hiragana": "unicode", + "unicode.Hyphen": "unicode", + "unicode.IDS_Binary_Operator": "unicode", + "unicode.IDS_Trinary_Operator": "unicode", + "unicode.Ideographic": "unicode", + "unicode.Imperial_Aramaic": "unicode", + "unicode.In": "unicode", + "unicode.Inherited": "unicode", + "unicode.Inscriptional_Pahlavi": "unicode", + "unicode.Inscriptional_Parthian": "unicode", + "unicode.Is": "unicode", + "unicode.IsControl": "unicode", + "unicode.IsDigit": "unicode", + "unicode.IsGraphic": "unicode", + "unicode.IsLetter": "unicode", + "unicode.IsLower": "unicode", + "unicode.IsMark": "unicode", + "unicode.IsNumber": "unicode", + "unicode.IsOneOf": "unicode", + "unicode.IsPrint": "unicode", + "unicode.IsPunct": "unicode", + "unicode.IsSpace": "unicode", + "unicode.IsSymbol": "unicode", + "unicode.IsTitle": "unicode", + "unicode.IsUpper": "unicode", + "unicode.Javanese": "unicode", + "unicode.Join_Control": "unicode", + "unicode.Kaithi": "unicode", + "unicode.Kannada": "unicode", + "unicode.Katakana": "unicode", + "unicode.Kayah_Li": "unicode", + "unicode.Kharoshthi": "unicode", + "unicode.Khmer": "unicode", + "unicode.Khojki": "unicode", + "unicode.Khudawadi": "unicode", + "unicode.L": "unicode", + "unicode.Lao": "unicode", + "unicode.Latin": "unicode", + "unicode.Lepcha": "unicode", + "unicode.Letter": "unicode", + "unicode.Limbu": "unicode", + "unicode.Linear_A": "unicode", + "unicode.Linear_B": "unicode", + "unicode.Lisu": "unicode", + "unicode.Ll": "unicode", + "unicode.Lm": "unicode", + "unicode.Lo": "unicode", + "unicode.Logical_Order_Exception": "unicode", + "unicode.Lower": "unicode", + "unicode.LowerCase": "unicode", + "unicode.Lt": "unicode", + "unicode.Lu": "unicode", + "unicode.Lycian": "unicode", + "unicode.Lydian": "unicode", + "unicode.M": "unicode", + "unicode.Mahajani": "unicode", + "unicode.Malayalam": "unicode", + "unicode.Mandaic": "unicode", + "unicode.Manichaean": "unicode", + "unicode.Marchen": "unicode", + "unicode.Mark": "unicode", + "unicode.Masaram_Gondi": "unicode", + "unicode.MaxASCII": "unicode", + "unicode.MaxCase": "unicode", + "unicode.MaxLatin1": "unicode", + "unicode.MaxRune": "unicode", + "unicode.Mc": "unicode", + "unicode.Me": "unicode", + "unicode.Meetei_Mayek": "unicode", + "unicode.Mende_Kikakui": "unicode", + "unicode.Meroitic_Cursive": "unicode", + "unicode.Meroitic_Hieroglyphs": "unicode", + "unicode.Miao": "unicode", + "unicode.Mn": "unicode", + "unicode.Modi": "unicode", + "unicode.Mongolian": "unicode", + "unicode.Mro": "unicode", + "unicode.Multani": "unicode", + "unicode.Myanmar": "unicode", + "unicode.N": "unicode", + "unicode.Nabataean": "unicode", + "unicode.Nd": "unicode", + "unicode.New_Tai_Lue": "unicode", + "unicode.Newa": "unicode", + "unicode.Nko": "unicode", + "unicode.Nl": "unicode", + "unicode.No": "unicode", + "unicode.Noncharacter_Code_Point": "unicode", + "unicode.Number": "unicode", + "unicode.Nushu": "unicode", + "unicode.Ogham": "unicode", + "unicode.Ol_Chiki": "unicode", + "unicode.Old_Hungarian": "unicode", + "unicode.Old_Italic": "unicode", + "unicode.Old_North_Arabian": "unicode", + "unicode.Old_Permic": "unicode", + "unicode.Old_Persian": "unicode", + "unicode.Old_South_Arabian": "unicode", + "unicode.Old_Turkic": "unicode", + "unicode.Oriya": "unicode", + "unicode.Osage": "unicode", + "unicode.Osmanya": "unicode", + "unicode.Other": "unicode", + "unicode.Other_Alphabetic": "unicode", + "unicode.Other_Default_Ignorable_Code_Point": "unicode", + "unicode.Other_Grapheme_Extend": "unicode", + "unicode.Other_ID_Continue": "unicode", + "unicode.Other_ID_Start": "unicode", + "unicode.Other_Lowercase": "unicode", + "unicode.Other_Math": "unicode", + "unicode.Other_Uppercase": "unicode", + "unicode.P": "unicode", + "unicode.Pahawh_Hmong": "unicode", + "unicode.Palmyrene": "unicode", + "unicode.Pattern_Syntax": "unicode", + "unicode.Pattern_White_Space": "unicode", + "unicode.Pau_Cin_Hau": "unicode", + "unicode.Pc": "unicode", + "unicode.Pd": "unicode", + "unicode.Pe": "unicode", + "unicode.Pf": "unicode", + "unicode.Phags_Pa": "unicode", + "unicode.Phoenician": "unicode", + "unicode.Pi": "unicode", + "unicode.Po": "unicode", + "unicode.Prepended_Concatenation_Mark": "unicode", + "unicode.PrintRanges": "unicode", + "unicode.Properties": "unicode", + "unicode.Ps": "unicode", + "unicode.Psalter_Pahlavi": "unicode", + "unicode.Punct": "unicode", + "unicode.Quotation_Mark": "unicode", + "unicode.Radical": "unicode", + "unicode.Range16": "unicode", + "unicode.Range32": "unicode", + "unicode.RangeTable": "unicode", + "unicode.Regional_Indicator": "unicode", + "unicode.Rejang": "unicode", + "unicode.ReplacementChar": "unicode", + "unicode.Runic": "unicode", + "unicode.S": "unicode", + "unicode.STerm": "unicode", + "unicode.Samaritan": "unicode", + "unicode.Saurashtra": "unicode", + "unicode.Sc": "unicode", + "unicode.Scripts": "unicode", + "unicode.Sentence_Terminal": "unicode", + "unicode.Sharada": "unicode", + "unicode.Shavian": "unicode", + "unicode.Siddham": "unicode", + "unicode.SignWriting": "unicode", + "unicode.SimpleFold": "unicode", + "unicode.Sinhala": "unicode", + "unicode.Sk": "unicode", + "unicode.Sm": "unicode", + "unicode.So": "unicode", + "unicode.Soft_Dotted": "unicode", + "unicode.Sora_Sompeng": "unicode", + "unicode.Soyombo": "unicode", + "unicode.Space": "unicode", + "unicode.SpecialCase": "unicode", + "unicode.Sundanese": "unicode", + "unicode.Syloti_Nagri": "unicode", + "unicode.Symbol": "unicode", + "unicode.Syriac": "unicode", + "unicode.Tagalog": "unicode", + "unicode.Tagbanwa": "unicode", + "unicode.Tai_Le": "unicode", + "unicode.Tai_Tham": "unicode", + "unicode.Tai_Viet": "unicode", + "unicode.Takri": "unicode", + "unicode.Tamil": "unicode", + "unicode.Tangut": "unicode", + "unicode.Telugu": "unicode", + "unicode.Terminal_Punctuation": "unicode", + "unicode.Thaana": "unicode", + "unicode.Thai": "unicode", + "unicode.Tibetan": "unicode", + "unicode.Tifinagh": "unicode", + "unicode.Tirhuta": "unicode", + "unicode.Title": "unicode", + "unicode.TitleCase": "unicode", + "unicode.To": "unicode", + "unicode.ToLower": "unicode", + "unicode.ToTitle": "unicode", + "unicode.ToUpper": "unicode", + "unicode.TurkishCase": "unicode", + "unicode.Ugaritic": "unicode", + "unicode.Unified_Ideograph": "unicode", + "unicode.Upper": "unicode", + "unicode.UpperCase": "unicode", + "unicode.UpperLower": "unicode", + "unicode.Vai": "unicode", + "unicode.Variation_Selector": "unicode", + "unicode.Version": "unicode", + "unicode.Warang_Citi": "unicode", + "unicode.White_Space": "unicode", + "unicode.Yi": "unicode", + "unicode.Z": "unicode", + "unicode.Zanabazar_Square": "unicode", + "unicode.Zl": "unicode", + "unicode.Zp": "unicode", + "unicode.Zs": "unicode", + "url.Error": "net/url", + "url.EscapeError": "net/url", + "url.InvalidHostError": "net/url", + "url.Parse": "net/url", + "url.ParseQuery": "net/url", + "url.ParseRequestURI": "net/url", + "url.PathEscape": "net/url", + "url.PathUnescape": "net/url", + "url.QueryEscape": "net/url", + "url.QueryUnescape": "net/url", + "url.URL": "net/url", + "url.User": "net/url", + "url.UserPassword": "net/url", + "url.Userinfo": "net/url", + "url.Values": "net/url", + "user.Current": "os/user", + "user.Group": "os/user", + "user.Lookup": "os/user", + "user.LookupGroup": "os/user", + "user.LookupGroupId": "os/user", + "user.LookupId": "os/user", + "user.UnknownGroupError": "os/user", + "user.UnknownGroupIdError": "os/user", + "user.UnknownUserError": "os/user", + "user.UnknownUserIdError": "os/user", + "user.User": "os/user", + "utf16.Decode": "unicode/utf16", + "utf16.DecodeRune": "unicode/utf16", + "utf16.Encode": "unicode/utf16", + "utf16.EncodeRune": "unicode/utf16", + "utf16.IsSurrogate": "unicode/utf16", + "utf8.DecodeLastRune": "unicode/utf8", + "utf8.DecodeLastRuneInString": "unicode/utf8", + "utf8.DecodeRune": "unicode/utf8", + "utf8.DecodeRuneInString": "unicode/utf8", + "utf8.EncodeRune": "unicode/utf8", + "utf8.FullRune": "unicode/utf8", + "utf8.FullRuneInString": "unicode/utf8", + "utf8.MaxRune": "unicode/utf8", + "utf8.RuneCount": "unicode/utf8", + "utf8.RuneCountInString": "unicode/utf8", + "utf8.RuneError": "unicode/utf8", + "utf8.RuneLen": "unicode/utf8", + "utf8.RuneSelf": "unicode/utf8", + "utf8.RuneStart": "unicode/utf8", + "utf8.UTFMax": "unicode/utf8", + "utf8.Valid": "unicode/utf8", + "utf8.ValidRune": "unicode/utf8", + "utf8.ValidString": "unicode/utf8", + "x509.CANotAuthorizedForExtKeyUsage": "crypto/x509", + "x509.CANotAuthorizedForThisName": "crypto/x509", + "x509.CertPool": "crypto/x509", + "x509.Certificate": "crypto/x509", + "x509.CertificateInvalidError": "crypto/x509", + "x509.CertificateRequest": "crypto/x509", + "x509.ConstraintViolationError": "crypto/x509", + "x509.CreateCertificate": "crypto/x509", + "x509.CreateCertificateRequest": "crypto/x509", + "x509.DSA": "crypto/x509", + "x509.DSAWithSHA1": "crypto/x509", + "x509.DSAWithSHA256": "crypto/x509", + "x509.DecryptPEMBlock": "crypto/x509", + "x509.ECDSA": "crypto/x509", + "x509.ECDSAWithSHA1": "crypto/x509", + "x509.ECDSAWithSHA256": "crypto/x509", + "x509.ECDSAWithSHA384": "crypto/x509", + "x509.ECDSAWithSHA512": "crypto/x509", + "x509.EncryptPEMBlock": "crypto/x509", + "x509.ErrUnsupportedAlgorithm": "crypto/x509", + "x509.Expired": "crypto/x509", + "x509.ExtKeyUsage": "crypto/x509", + "x509.ExtKeyUsageAny": "crypto/x509", + "x509.ExtKeyUsageClientAuth": "crypto/x509", + "x509.ExtKeyUsageCodeSigning": "crypto/x509", + "x509.ExtKeyUsageEmailProtection": "crypto/x509", + "x509.ExtKeyUsageIPSECEndSystem": "crypto/x509", + "x509.ExtKeyUsageIPSECTunnel": "crypto/x509", + "x509.ExtKeyUsageIPSECUser": "crypto/x509", + "x509.ExtKeyUsageMicrosoftCommercialCodeSigning": "crypto/x509", + "x509.ExtKeyUsageMicrosoftKernelCodeSigning": "crypto/x509", + "x509.ExtKeyUsageMicrosoftServerGatedCrypto": "crypto/x509", + "x509.ExtKeyUsageNetscapeServerGatedCrypto": "crypto/x509", + "x509.ExtKeyUsageOCSPSigning": "crypto/x509", + "x509.ExtKeyUsageServerAuth": "crypto/x509", + "x509.ExtKeyUsageTimeStamping": "crypto/x509", + "x509.HostnameError": "crypto/x509", + "x509.IncompatibleUsage": "crypto/x509", + "x509.IncorrectPasswordError": "crypto/x509", + "x509.InsecureAlgorithmError": "crypto/x509", + "x509.InvalidReason": "crypto/x509", + "x509.IsEncryptedPEMBlock": "crypto/x509", + "x509.KeyUsage": "crypto/x509", + "x509.KeyUsageCRLSign": "crypto/x509", + "x509.KeyUsageCertSign": "crypto/x509", + "x509.KeyUsageContentCommitment": "crypto/x509", + "x509.KeyUsageDataEncipherment": "crypto/x509", + "x509.KeyUsageDecipherOnly": "crypto/x509", + "x509.KeyUsageDigitalSignature": "crypto/x509", + "x509.KeyUsageEncipherOnly": "crypto/x509", + "x509.KeyUsageKeyAgreement": "crypto/x509", + "x509.KeyUsageKeyEncipherment": "crypto/x509", + "x509.MD2WithRSA": "crypto/x509", + "x509.MD5WithRSA": "crypto/x509", + "x509.MarshalECPrivateKey": "crypto/x509", + "x509.MarshalPKCS1PrivateKey": "crypto/x509", + "x509.MarshalPKCS1PublicKey": "crypto/x509", + "x509.MarshalPKCS8PrivateKey": "crypto/x509", + "x509.MarshalPKIXPublicKey": "crypto/x509", + "x509.NameConstraintsWithoutSANs": "crypto/x509", + "x509.NameMismatch": "crypto/x509", + "x509.NewCertPool": "crypto/x509", + "x509.NotAuthorizedToSign": "crypto/x509", + "x509.PEMCipher": "crypto/x509", + "x509.PEMCipher3DES": "crypto/x509", + "x509.PEMCipherAES128": "crypto/x509", + "x509.PEMCipherAES192": "crypto/x509", + "x509.PEMCipherAES256": "crypto/x509", + "x509.PEMCipherDES": "crypto/x509", + "x509.ParseCRL": "crypto/x509", + "x509.ParseCertificate": "crypto/x509", + "x509.ParseCertificateRequest": "crypto/x509", + "x509.ParseCertificates": "crypto/x509", + "x509.ParseDERCRL": "crypto/x509", + "x509.ParseECPrivateKey": "crypto/x509", + "x509.ParsePKCS1PrivateKey": "crypto/x509", + "x509.ParsePKCS1PublicKey": "crypto/x509", + "x509.ParsePKCS8PrivateKey": "crypto/x509", + "x509.ParsePKIXPublicKey": "crypto/x509", + "x509.PublicKeyAlgorithm": "crypto/x509", + "x509.RSA": "crypto/x509", + "x509.SHA1WithRSA": "crypto/x509", + "x509.SHA256WithRSA": "crypto/x509", + "x509.SHA256WithRSAPSS": "crypto/x509", + "x509.SHA384WithRSA": "crypto/x509", + "x509.SHA384WithRSAPSS": "crypto/x509", + "x509.SHA512WithRSA": "crypto/x509", + "x509.SHA512WithRSAPSS": "crypto/x509", + "x509.SignatureAlgorithm": "crypto/x509", + "x509.SystemCertPool": "crypto/x509", + "x509.SystemRootsError": "crypto/x509", + "x509.TooManyConstraints": "crypto/x509", + "x509.TooManyIntermediates": "crypto/x509", + "x509.UnconstrainedName": "crypto/x509", + "x509.UnhandledCriticalExtension": "crypto/x509", + "x509.UnknownAuthorityError": "crypto/x509", + "x509.UnknownPublicKeyAlgorithm": "crypto/x509", + "x509.UnknownSignatureAlgorithm": "crypto/x509", + "x509.VerifyOptions": "crypto/x509", + "xml.Attr": "encoding/xml", + "xml.CharData": "encoding/xml", + "xml.Comment": "encoding/xml", + "xml.CopyToken": "encoding/xml", + "xml.Decoder": "encoding/xml", + "xml.Directive": "encoding/xml", + "xml.Encoder": "encoding/xml", + "xml.EndElement": "encoding/xml", + "xml.Escape": "encoding/xml", + "xml.EscapeText": "encoding/xml", + "xml.HTMLAutoClose": "encoding/xml", + "xml.HTMLEntity": "encoding/xml", + "xml.Header": "encoding/xml", + "xml.Marshal": "encoding/xml", + "xml.MarshalIndent": "encoding/xml", + "xml.Marshaler": "encoding/xml", + "xml.MarshalerAttr": "encoding/xml", + "xml.Name": "encoding/xml", + "xml.NewDecoder": "encoding/xml", + "xml.NewEncoder": "encoding/xml", + "xml.NewTokenDecoder": "encoding/xml", + "xml.ProcInst": "encoding/xml", + "xml.StartElement": "encoding/xml", + "xml.SyntaxError": "encoding/xml", + "xml.TagPathError": "encoding/xml", + "xml.Token": "encoding/xml", + "xml.TokenReader": "encoding/xml", + "xml.Unmarshal": "encoding/xml", + "xml.UnmarshalError": "encoding/xml", + "xml.Unmarshaler": "encoding/xml", + "xml.UnmarshalerAttr": "encoding/xml", + "xml.UnsupportedTypeError": "encoding/xml", + "zip.Compressor": "archive/zip", + "zip.Decompressor": "archive/zip", + "zip.Deflate": "archive/zip", + "zip.ErrAlgorithm": "archive/zip", + "zip.ErrChecksum": "archive/zip", + "zip.ErrFormat": "archive/zip", + "zip.File": "archive/zip", + "zip.FileHeader": "archive/zip", + "zip.FileInfoHeader": "archive/zip", + "zip.NewReader": "archive/zip", + "zip.NewWriter": "archive/zip", + "zip.OpenReader": "archive/zip", + "zip.ReadCloser": "archive/zip", + "zip.Reader": "archive/zip", + "zip.RegisterCompressor": "archive/zip", + "zip.RegisterDecompressor": "archive/zip", + "zip.Store": "archive/zip", + "zip.Writer": "archive/zip", + "zlib.BestCompression": "compress/zlib", + "zlib.BestSpeed": "compress/zlib", + "zlib.DefaultCompression": "compress/zlib", + "zlib.ErrChecksum": "compress/zlib", + "zlib.ErrDictionary": "compress/zlib", + "zlib.ErrHeader": "compress/zlib", + "zlib.HuffmanOnly": "compress/zlib", + "zlib.NewReader": "compress/zlib", + "zlib.NewReaderDict": "compress/zlib", + "zlib.NewWriter": "compress/zlib", + "zlib.NewWriterLevel": "compress/zlib", + "zlib.NewWriterLevelDict": "compress/zlib", + "zlib.NoCompression": "compress/zlib", + "zlib.Resetter": "compress/zlib", + "zlib.Writer": "compress/zlib", + + "unsafe.Alignof": "unsafe", + "unsafe.ArbitraryType": "unsafe", + "unsafe.Offsetof": "unsafe", + "unsafe.Pointer": "unsafe", + "unsafe.Sizeof": "unsafe", +} diff --git a/vendor/golang.org/x/tools/third_party/moduleloader/LICENSE b/vendor/golang.org/x/tools/third_party/moduleloader/LICENSE new file mode 100644 index 0000000..1723a22 --- /dev/null +++ b/vendor/golang.org/x/tools/third_party/moduleloader/LICENSE @@ -0,0 +1,22 @@ +Copyright (c) 2013-2016 Guy Bedford, Luke Hoban, Addy Osmani + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/vendor/golang.org/x/tools/third_party/typescript/LICENSE b/vendor/golang.org/x/tools/third_party/typescript/LICENSE new file mode 100644 index 0000000..e7259f8 --- /dev/null +++ b/vendor/golang.org/x/tools/third_party/typescript/LICENSE @@ -0,0 +1,55 @@ +Apache License + +Version 2.0, January 2004 + +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of this License; and + +You must cause any modified files to carry prominent notices stating that You changed the files; and + +You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + +If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS \ No newline at end of file diff --git a/vendor/golang.org/x/tools/third_party/webcomponents/LICENSE b/vendor/golang.org/x/tools/third_party/webcomponents/LICENSE new file mode 100644 index 0000000..e648283 --- /dev/null +++ b/vendor/golang.org/x/tools/third_party/webcomponents/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2015 The Polymer Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/vendor/honnef.co/go/tools/LICENSE b/vendor/honnef.co/go/tools/LICENSE new file mode 100644 index 0000000..dfd0314 --- /dev/null +++ b/vendor/honnef.co/go/tools/LICENSE @@ -0,0 +1,20 @@ +Copyright (c) 2016 Dominik Honnef + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/honnef.co/go/tools/callgraph/callgraph.go b/vendor/honnef.co/go/tools/callgraph/callgraph.go new file mode 100644 index 0000000..d93a20a --- /dev/null +++ b/vendor/honnef.co/go/tools/callgraph/callgraph.go @@ -0,0 +1,129 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* + +Package callgraph defines the call graph and various algorithms +and utilities to operate on it. + +A call graph is a labelled directed graph whose nodes represent +functions and whose edge labels represent syntactic function call +sites. The presence of a labelled edge (caller, site, callee) +indicates that caller may call callee at the specified call site. + +A call graph is a multigraph: it may contain multiple edges (caller, +*, callee) connecting the same pair of nodes, so long as the edges +differ by label; this occurs when one function calls another function +from multiple call sites. Also, it may contain multiple edges +(caller, site, *) that differ only by callee; this indicates a +polymorphic call. + +A SOUND call graph is one that overapproximates the dynamic calling +behaviors of the program in all possible executions. One call graph +is more PRECISE than another if it is a smaller overapproximation of +the dynamic behavior. + +All call graphs have a synthetic root node which is responsible for +calling main() and init(). + +Calls to built-in functions (e.g. panic, println) are not represented +in the call graph; they are treated like built-in operators of the +language. + +*/ +package callgraph // import "honnef.co/go/tools/callgraph" + +// TODO(adonovan): add a function to eliminate wrappers from the +// callgraph, preserving topology. +// More generally, we could eliminate "uninteresting" nodes such as +// nodes from packages we don't care about. + +import ( + "fmt" + "go/token" + + "honnef.co/go/tools/ssa" +) + +// A Graph represents a call graph. +// +// A graph may contain nodes that are not reachable from the root. +// If the call graph is sound, such nodes indicate unreachable +// functions. +// +type Graph struct { + Root *Node // the distinguished root node + Nodes map[*ssa.Function]*Node // all nodes by function +} + +// New returns a new Graph with the specified root node. +func New(root *ssa.Function) *Graph { + g := &Graph{Nodes: make(map[*ssa.Function]*Node)} + g.Root = g.CreateNode(root) + return g +} + +// CreateNode returns the Node for fn, creating it if not present. +func (g *Graph) CreateNode(fn *ssa.Function) *Node { + n, ok := g.Nodes[fn] + if !ok { + n = &Node{Func: fn, ID: len(g.Nodes)} + g.Nodes[fn] = n + } + return n +} + +// A Node represents a node in a call graph. +type Node struct { + Func *ssa.Function // the function this node represents + ID int // 0-based sequence number + In []*Edge // unordered set of incoming call edges (n.In[*].Callee == n) + Out []*Edge // unordered set of outgoing call edges (n.Out[*].Caller == n) +} + +func (n *Node) String() string { + return fmt.Sprintf("n%d:%s", n.ID, n.Func) +} + +// A Edge represents an edge in the call graph. +// +// Site is nil for edges originating in synthetic or intrinsic +// functions, e.g. reflect.Call or the root of the call graph. +type Edge struct { + Caller *Node + Site ssa.CallInstruction + Callee *Node +} + +func (e Edge) String() string { + return fmt.Sprintf("%s --> %s", e.Caller, e.Callee) +} + +func (e Edge) Description() string { + var prefix string + switch e.Site.(type) { + case nil: + return "synthetic call" + case *ssa.Go: + prefix = "concurrent " + case *ssa.Defer: + prefix = "deferred " + } + return prefix + e.Site.Common().Description() +} + +func (e Edge) Pos() token.Pos { + if e.Site == nil { + return token.NoPos + } + return e.Site.Pos() +} + +// AddEdge adds the edge (caller, site, callee) to the call graph. +// Elimination of duplicate edges is the caller's responsibility. +func AddEdge(caller *Node, site ssa.CallInstruction, callee *Node) { + e := &Edge{caller, site, callee} + callee.In = append(callee.In, e) + caller.Out = append(caller.Out, e) +} diff --git a/vendor/honnef.co/go/tools/callgraph/static/static.go b/vendor/honnef.co/go/tools/callgraph/static/static.go new file mode 100644 index 0000000..5444e84 --- /dev/null +++ b/vendor/honnef.co/go/tools/callgraph/static/static.go @@ -0,0 +1,35 @@ +// Package static computes the call graph of a Go program containing +// only static call edges. +package static // import "honnef.co/go/tools/callgraph/static" + +import ( + "honnef.co/go/tools/callgraph" + "honnef.co/go/tools/ssa" + "honnef.co/go/tools/ssa/ssautil" +) + +// CallGraph computes the call graph of the specified program +// considering only static calls. +// +func CallGraph(prog *ssa.Program) *callgraph.Graph { + cg := callgraph.New(nil) // TODO(adonovan) eliminate concept of rooted callgraph + + // TODO(adonovan): opt: use only a single pass over the ssa.Program. + // TODO(adonovan): opt: this is slower than RTA (perhaps because + // the lower precision means so many edges are allocated)! + for f := range ssautil.AllFunctions(prog) { + fnode := cg.CreateNode(f) + for _, b := range f.Blocks { + for _, instr := range b.Instrs { + if site, ok := instr.(ssa.CallInstruction); ok { + if g := site.Common().StaticCallee(); g != nil { + gnode := cg.CreateNode(g) + callgraph.AddEdge(fnode, site, gnode) + } + } + } + } + } + + return cg +} diff --git a/vendor/honnef.co/go/tools/callgraph/util.go b/vendor/honnef.co/go/tools/callgraph/util.go new file mode 100644 index 0000000..7aeda96 --- /dev/null +++ b/vendor/honnef.co/go/tools/callgraph/util.go @@ -0,0 +1,181 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package callgraph + +import "honnef.co/go/tools/ssa" + +// This file provides various utilities over call graphs, such as +// visitation and path search. + +// CalleesOf returns a new set containing all direct callees of the +// caller node. +// +func CalleesOf(caller *Node) map[*Node]bool { + callees := make(map[*Node]bool) + for _, e := range caller.Out { + callees[e.Callee] = true + } + return callees +} + +// GraphVisitEdges visits all the edges in graph g in depth-first order. +// The edge function is called for each edge in postorder. If it +// returns non-nil, visitation stops and GraphVisitEdges returns that +// value. +// +func GraphVisitEdges(g *Graph, edge func(*Edge) error) error { + seen := make(map[*Node]bool) + var visit func(n *Node) error + visit = func(n *Node) error { + if !seen[n] { + seen[n] = true + for _, e := range n.Out { + if err := visit(e.Callee); err != nil { + return err + } + if err := edge(e); err != nil { + return err + } + } + } + return nil + } + for _, n := range g.Nodes { + if err := visit(n); err != nil { + return err + } + } + return nil +} + +// PathSearch finds an arbitrary path starting at node start and +// ending at some node for which isEnd() returns true. On success, +// PathSearch returns the path as an ordered list of edges; on +// failure, it returns nil. +// +func PathSearch(start *Node, isEnd func(*Node) bool) []*Edge { + stack := make([]*Edge, 0, 32) + seen := make(map[*Node]bool) + var search func(n *Node) []*Edge + search = func(n *Node) []*Edge { + if !seen[n] { + seen[n] = true + if isEnd(n) { + return stack + } + for _, e := range n.Out { + stack = append(stack, e) // push + if found := search(e.Callee); found != nil { + return found + } + stack = stack[:len(stack)-1] // pop + } + } + return nil + } + return search(start) +} + +// DeleteSyntheticNodes removes from call graph g all nodes for +// synthetic functions (except g.Root and package initializers), +// preserving the topology. In effect, calls to synthetic wrappers +// are "inlined". +// +func (g *Graph) DeleteSyntheticNodes() { + // Measurements on the standard library and go.tools show that + // resulting graph has ~15% fewer nodes and 4-8% fewer edges + // than the input. + // + // Inlining a wrapper of in-degree m, out-degree n adds m*n + // and removes m+n edges. Since most wrappers are monomorphic + // (n=1) this results in a slight reduction. Polymorphic + // wrappers (n>1), e.g. from embedding an interface value + // inside a struct to satisfy some interface, cause an + // increase in the graph, but they seem to be uncommon. + + // Hash all existing edges to avoid creating duplicates. + edges := make(map[Edge]bool) + for _, cgn := range g.Nodes { + for _, e := range cgn.Out { + edges[*e] = true + } + } + for fn, cgn := range g.Nodes { + if cgn == g.Root || fn.Synthetic == "" || isInit(cgn.Func) { + continue // keep + } + for _, eIn := range cgn.In { + for _, eOut := range cgn.Out { + newEdge := Edge{eIn.Caller, eIn.Site, eOut.Callee} + if edges[newEdge] { + continue // don't add duplicate + } + AddEdge(eIn.Caller, eIn.Site, eOut.Callee) + edges[newEdge] = true + } + } + g.DeleteNode(cgn) + } +} + +func isInit(fn *ssa.Function) bool { + return fn.Pkg != nil && fn.Pkg.Func("init") == fn +} + +// DeleteNode removes node n and its edges from the graph g. +// (NB: not efficient for batch deletion.) +func (g *Graph) DeleteNode(n *Node) { + n.deleteIns() + n.deleteOuts() + delete(g.Nodes, n.Func) +} + +// deleteIns deletes all incoming edges to n. +func (n *Node) deleteIns() { + for _, e := range n.In { + removeOutEdge(e) + } + n.In = nil +} + +// deleteOuts deletes all outgoing edges from n. +func (n *Node) deleteOuts() { + for _, e := range n.Out { + removeInEdge(e) + } + n.Out = nil +} + +// removeOutEdge removes edge.Caller's outgoing edge 'edge'. +func removeOutEdge(edge *Edge) { + caller := edge.Caller + n := len(caller.Out) + for i, e := range caller.Out { + if e == edge { + // Replace it with the final element and shrink the slice. + caller.Out[i] = caller.Out[n-1] + caller.Out[n-1] = nil // aid GC + caller.Out = caller.Out[:n-1] + return + } + } + panic("edge not found: " + edge.String()) +} + +// removeInEdge removes edge.Callee's incoming edge 'edge'. +func removeInEdge(edge *Edge) { + caller := edge.Callee + n := len(caller.In) + for i, e := range caller.In { + if e == edge { + // Replace it with the final element and shrink the slice. + caller.In[i] = caller.In[n-1] + caller.In[n-1] = nil // aid GC + caller.In = caller.In[:n-1] + return + } + } + panic("edge not found: " + edge.String()) +} diff --git a/vendor/honnef.co/go/tools/cmd/megacheck/megacheck.go b/vendor/honnef.co/go/tools/cmd/megacheck/megacheck.go new file mode 100644 index 0000000..4c0b97c --- /dev/null +++ b/vendor/honnef.co/go/tools/cmd/megacheck/megacheck.go @@ -0,0 +1,122 @@ +// megacheck runs staticcheck, gosimple and unused. +package main // import "honnef.co/go/tools/cmd/megacheck" + +import ( + "os" + + "honnef.co/go/tools/lint/lintutil" + "honnef.co/go/tools/simple" + "honnef.co/go/tools/staticcheck" + "honnef.co/go/tools/unused" +) + +func main() { + var flags struct { + staticcheck struct { + enabled bool + generated bool + exitNonZero bool + } + gosimple struct { + enabled bool + generated bool + exitNonZero bool + } + unused struct { + enabled bool + constants bool + fields bool + functions bool + types bool + variables bool + debug string + wholeProgram bool + reflection bool + exitNonZero bool + } + } + fs := lintutil.FlagSet("megacheck") + fs.BoolVar(&flags.gosimple.enabled, + "simple.enabled", true, "Run gosimple") + fs.BoolVar(&flags.gosimple.generated, + "simple.generated", false, "Check generated code") + fs.BoolVar(&flags.gosimple.exitNonZero, + "simple.exit-non-zero", false, "Exit non-zero if any problems were found") + + fs.BoolVar(&flags.staticcheck.enabled, + "staticcheck.enabled", true, "Run staticcheck") + fs.BoolVar(&flags.staticcheck.generated, + "staticcheck.generated", false, "Check generated code (only applies to a subset of checks)") + fs.BoolVar(&flags.staticcheck.exitNonZero, + "staticcheck.exit-non-zero", true, "Exit non-zero if any problems were found") + + fs.BoolVar(&flags.unused.enabled, + "unused.enabled", true, "Run unused") + fs.BoolVar(&flags.unused.constants, + "unused.consts", true, "Report unused constants") + fs.BoolVar(&flags.unused.fields, + "unused.fields", true, "Report unused fields") + fs.BoolVar(&flags.unused.functions, + "unused.funcs", true, "Report unused functions and methods") + fs.BoolVar(&flags.unused.types, + "unused.types", true, "Report unused types") + fs.BoolVar(&flags.unused.variables, + "unused.vars", true, "Report unused variables") + fs.BoolVar(&flags.unused.wholeProgram, + "unused.exported", false, "Treat arguments as a program and report unused exported identifiers") + fs.BoolVar(&flags.unused.reflection, + "unused.reflect", true, "Consider identifiers as used when it's likely they'll be accessed via reflection") + fs.BoolVar(&flags.unused.exitNonZero, + "unused.exit-non-zero", true, "Exit non-zero if any problems were found") + + fs.Parse(os.Args[1:]) + + var checkers []lintutil.CheckerConfig + + if flags.staticcheck.enabled { + sac := staticcheck.NewChecker() + sac.CheckGenerated = flags.staticcheck.generated + checkers = append(checkers, lintutil.CheckerConfig{ + Checker: sac, + ExitNonZero: flags.staticcheck.exitNonZero, + }) + } + + if flags.gosimple.enabled { + sc := simple.NewChecker() + sc.CheckGenerated = flags.gosimple.generated + checkers = append(checkers, lintutil.CheckerConfig{ + Checker: sc, + ExitNonZero: flags.gosimple.exitNonZero, + }) + } + + if flags.unused.enabled { + var mode unused.CheckMode + if flags.unused.constants { + mode |= unused.CheckConstants + } + if flags.unused.fields { + mode |= unused.CheckFields + } + if flags.unused.functions { + mode |= unused.CheckFunctions + } + if flags.unused.types { + mode |= unused.CheckTypes + } + if flags.unused.variables { + mode |= unused.CheckVariables + } + uc := unused.NewChecker(mode) + uc.WholeProgram = flags.unused.wholeProgram + uc.ConsiderReflection = flags.unused.reflection + checkers = append(checkers, lintutil.CheckerConfig{ + Checker: unused.NewLintChecker(uc), + ExitNonZero: flags.unused.exitNonZero, + }) + + } + + lintutil.ProcessFlagSet(checkers, fs) +} diff --git a/vendor/honnef.co/go/tools/deprecated/stdlib.go b/vendor/honnef.co/go/tools/deprecated/stdlib.go new file mode 100644 index 0000000..b6b217c --- /dev/null +++ b/vendor/honnef.co/go/tools/deprecated/stdlib.go @@ -0,0 +1,54 @@ +package deprecated + +type Deprecation struct { + DeprecatedSince int + AlternativeAvailableSince int +} + +var Stdlib = map[string]Deprecation{ + "image/jpeg.Reader": {4, 0}, + // FIXME(dh): AllowBinary isn't being detected as deprecated + // because the comment has a newline right after "Deprecated:" + "go/build.AllowBinary": {7, 7}, + "(archive/zip.FileHeader).CompressedSize": {1, 1}, + "(archive/zip.FileHeader).UncompressedSize": {1, 1}, + "(go/doc.Package).Bugs": {1, 1}, + "os.SEEK_SET": {7, 7}, + "os.SEEK_CUR": {7, 7}, + "os.SEEK_END": {7, 7}, + "(net.Dialer).Cancel": {7, 7}, + "runtime.CPUProfile": {9, 0}, + "compress/flate.ReadError": {6, 6}, + "compress/flate.WriteError": {6, 6}, + "path/filepath.HasPrefix": {0, 0}, + "(net/http.Transport).Dial": {7, 7}, + "(*net/http.Transport).CancelRequest": {6, 5}, + "net/http.ErrWriteAfterFlush": {7, 0}, + "net/http.ErrHeaderTooLong": {8, 0}, + "net/http.ErrShortBody": {8, 0}, + "net/http.ErrMissingContentLength": {8, 0}, + "net/http/httputil.ErrPersistEOF": {0, 0}, + "net/http/httputil.ErrClosed": {0, 0}, + "net/http/httputil.ErrPipeline": {0, 0}, + "net/http/httputil.ServerConn": {0, 0}, + "net/http/httputil.NewServerConn": {0, 0}, + "net/http/httputil.ClientConn": {0, 0}, + "net/http/httputil.NewClientConn": {0, 0}, + "net/http/httputil.NewProxyClientConn": {0, 0}, + "(net/http.Request).Cancel": {7, 7}, + "(text/template/parse.PipeNode).Line": {1, 1}, + "(text/template/parse.ActionNode).Line": {1, 1}, + "(text/template/parse.BranchNode).Line": {1, 1}, + "(text/template/parse.TemplateNode).Line": {1, 1}, + "database/sql/driver.ColumnConverter": {9, 9}, + "database/sql/driver.Execer": {8, 8}, + "database/sql/driver.Queryer": {8, 8}, + "(database/sql/driver.Conn).Begin": {8, 8}, + "(database/sql/driver.Stmt).Exec": {8, 8}, + "(database/sql/driver.Stmt).Query": {8, 8}, + "syscall.StringByteSlice": {1, 1}, + "syscall.StringBytePtr": {1, 1}, + "syscall.StringSlicePtr": {1, 1}, + "syscall.StringToUTF16": {1, 1}, + "syscall.StringToUTF16Ptr": {1, 1}, +} diff --git a/vendor/honnef.co/go/tools/functions/concrete.go b/vendor/honnef.co/go/tools/functions/concrete.go new file mode 100644 index 0000000..932acd0 --- /dev/null +++ b/vendor/honnef.co/go/tools/functions/concrete.go @@ -0,0 +1,56 @@ +package functions + +import ( + "go/token" + "go/types" + + "honnef.co/go/tools/ssa" +) + +func concreteReturnTypes(fn *ssa.Function) []*types.Tuple { + res := fn.Signature.Results() + if res == nil { + return nil + } + ifaces := make([]bool, res.Len()) + any := false + for i := 0; i < res.Len(); i++ { + _, ifaces[i] = res.At(i).Type().Underlying().(*types.Interface) + any = any || ifaces[i] + } + if !any { + return []*types.Tuple{res} + } + var out []*types.Tuple + for _, block := range fn.Blocks { + if len(block.Instrs) == 0 { + continue + } + ret, ok := block.Instrs[len(block.Instrs)-1].(*ssa.Return) + if !ok { + continue + } + vars := make([]*types.Var, res.Len()) + for i, v := range ret.Results { + var typ types.Type + if !ifaces[i] { + typ = res.At(i).Type() + } else if mi, ok := v.(*ssa.MakeInterface); ok { + // TODO(dh): if mi.X is a function call that returns + // an interface, call concreteReturnTypes on that + // function (or, really, go through Descriptions, + // avoid infinite recursion etc, just like nil error + // detection) + + // TODO(dh): support Phi nodes + typ = mi.X.Type() + } else { + typ = res.At(i).Type() + } + vars[i] = types.NewParam(token.NoPos, nil, "", typ) + } + out = append(out, types.NewTuple(vars...)) + } + // TODO(dh): deduplicate out + return out +} diff --git a/vendor/honnef.co/go/tools/functions/functions.go b/vendor/honnef.co/go/tools/functions/functions.go new file mode 100644 index 0000000..c5fe2d7 --- /dev/null +++ b/vendor/honnef.co/go/tools/functions/functions.go @@ -0,0 +1,150 @@ +package functions + +import ( + "go/types" + "sync" + + "honnef.co/go/tools/callgraph" + "honnef.co/go/tools/callgraph/static" + "honnef.co/go/tools/ssa" + "honnef.co/go/tools/staticcheck/vrp" +) + +var stdlibDescs = map[string]Description{ + "errors.New": Description{Pure: true}, + + "fmt.Errorf": Description{Pure: true}, + "fmt.Sprintf": Description{Pure: true}, + "fmt.Sprint": Description{Pure: true}, + + "sort.Reverse": Description{Pure: true}, + + "strings.Map": Description{Pure: true}, + "strings.Repeat": Description{Pure: true}, + "strings.Replace": Description{Pure: true}, + "strings.Title": Description{Pure: true}, + "strings.ToLower": Description{Pure: true}, + "strings.ToLowerSpecial": Description{Pure: true}, + "strings.ToTitle": Description{Pure: true}, + "strings.ToTitleSpecial": Description{Pure: true}, + "strings.ToUpper": Description{Pure: true}, + "strings.ToUpperSpecial": Description{Pure: true}, + "strings.Trim": Description{Pure: true}, + "strings.TrimFunc": Description{Pure: true}, + "strings.TrimLeft": Description{Pure: true}, + "strings.TrimLeftFunc": Description{Pure: true}, + "strings.TrimPrefix": Description{Pure: true}, + "strings.TrimRight": Description{Pure: true}, + "strings.TrimRightFunc": Description{Pure: true}, + "strings.TrimSpace": Description{Pure: true}, + "strings.TrimSuffix": Description{Pure: true}, + + "(*net/http.Request).WithContext": Description{Pure: true}, + + "math/rand.Read": Description{NilError: true}, + "(*math/rand.Rand).Read": Description{NilError: true}, +} + +type Description struct { + // The function is known to be pure + Pure bool + // The function is known to be a stub + Stub bool + // The function is known to never return (panics notwithstanding) + Infinite bool + // Variable ranges + Ranges vrp.Ranges + Loops []Loop + // Function returns an error as its last argument, but it is + // always nil + NilError bool + ConcreteReturnTypes []*types.Tuple +} + +type descriptionEntry struct { + ready chan struct{} + result Description +} + +type Descriptions struct { + CallGraph *callgraph.Graph + mu sync.Mutex + cache map[*ssa.Function]*descriptionEntry +} + +func NewDescriptions(prog *ssa.Program) *Descriptions { + return &Descriptions{ + CallGraph: static.CallGraph(prog), + cache: map[*ssa.Function]*descriptionEntry{}, + } +} + +func (d *Descriptions) Get(fn *ssa.Function) Description { + d.mu.Lock() + fd := d.cache[fn] + if fd == nil { + fd = &descriptionEntry{ + ready: make(chan struct{}), + } + d.cache[fn] = fd + d.mu.Unlock() + + { + fd.result = stdlibDescs[fn.RelString(nil)] + fd.result.Pure = fd.result.Pure || d.IsPure(fn) + fd.result.Stub = fd.result.Stub || d.IsStub(fn) + fd.result.Infinite = fd.result.Infinite || !terminates(fn) + fd.result.Ranges = vrp.BuildGraph(fn).Solve() + fd.result.Loops = findLoops(fn) + fd.result.NilError = fd.result.NilError || IsNilError(fn) + fd.result.ConcreteReturnTypes = concreteReturnTypes(fn) + } + + close(fd.ready) + } else { + d.mu.Unlock() + <-fd.ready + } + return fd.result +} + +func IsNilError(fn *ssa.Function) bool { + // TODO(dh): This is very simplistic, as we only look for constant + // nil returns. A more advanced approach would work transitively. + // An even more advanced approach would be context-aware and + // determine nil errors based on inputs (e.g. io.WriteString to a + // bytes.Buffer will always return nil, but an io.WriteString to + // an os.File might not). Similarly, an os.File opened for reading + // won't error on Close, but other files will. + res := fn.Signature.Results() + if res.Len() == 0 { + return false + } + last := res.At(res.Len() - 1) + if types.TypeString(last.Type(), nil) != "error" { + return false + } + + if fn.Blocks == nil { + return false + } + for _, block := range fn.Blocks { + if len(block.Instrs) == 0 { + continue + } + ins := block.Instrs[len(block.Instrs)-1] + ret, ok := ins.(*ssa.Return) + if !ok { + continue + } + v := ret.Results[len(ret.Results)-1] + c, ok := v.(*ssa.Const) + if !ok { + return false + } + if !c.IsNil() { + return false + } + } + return true +} diff --git a/vendor/honnef.co/go/tools/functions/loops.go b/vendor/honnef.co/go/tools/functions/loops.go new file mode 100644 index 0000000..63011cf --- /dev/null +++ b/vendor/honnef.co/go/tools/functions/loops.go @@ -0,0 +1,50 @@ +package functions + +import "honnef.co/go/tools/ssa" + +type Loop map[*ssa.BasicBlock]bool + +func findLoops(fn *ssa.Function) []Loop { + if fn.Blocks == nil { + return nil + } + tree := fn.DomPreorder() + var sets []Loop + for _, h := range tree { + for _, n := range h.Preds { + if !h.Dominates(n) { + continue + } + // n is a back-edge to h + // h is the loop header + if n == h { + sets = append(sets, Loop{n: true}) + continue + } + set := Loop{h: true, n: true} + for _, b := range allPredsBut(n, h, nil) { + set[b] = true + } + sets = append(sets, set) + } + } + return sets +} + +func allPredsBut(b, but *ssa.BasicBlock, list []*ssa.BasicBlock) []*ssa.BasicBlock { +outer: + for _, pred := range b.Preds { + if pred == but { + continue + } + for _, p := range list { + // TODO improve big-o complexity of this function + if pred == p { + continue outer + } + } + list = append(list, pred) + list = allPredsBut(pred, but, list) + } + return list +} diff --git a/vendor/honnef.co/go/tools/functions/pure.go b/vendor/honnef.co/go/tools/functions/pure.go new file mode 100644 index 0000000..d1c4d03 --- /dev/null +++ b/vendor/honnef.co/go/tools/functions/pure.go @@ -0,0 +1,123 @@ +package functions + +import ( + "go/token" + "go/types" + + "honnef.co/go/tools/callgraph" + "honnef.co/go/tools/lint" + "honnef.co/go/tools/ssa" +) + +// IsStub reports whether a function is a stub. A function is +// considered a stub if it has no instructions or exactly one +// instruction, which must be either returning only constant values or +// a panic. +func (d *Descriptions) IsStub(fn *ssa.Function) bool { + if len(fn.Blocks) == 0 { + return true + } + if len(fn.Blocks) > 1 { + return false + } + instrs := lint.FilterDebug(fn.Blocks[0].Instrs) + if len(instrs) != 1 { + return false + } + + switch instrs[0].(type) { + case *ssa.Return: + // Since this is the only instruction, the return value must + // be a constant. We consider all constants as stubs, not just + // the zero value. This does not, unfortunately, cover zero + // initialised structs, as these cause additional + // instructions. + return true + case *ssa.Panic: + return true + default: + return false + } +} + +func (d *Descriptions) IsPure(fn *ssa.Function) bool { + if fn.Signature.Results().Len() == 0 { + // A function with no return values is empty or is doing some + // work we cannot see (for example because of build tags); + // don't consider it pure. + return false + } + + for _, param := range fn.Params { + if _, ok := param.Type().Underlying().(*types.Basic); !ok { + return false + } + } + + if fn.Blocks == nil { + return false + } + checkCall := func(common *ssa.CallCommon) bool { + if common.IsInvoke() { + return false + } + builtin, ok := common.Value.(*ssa.Builtin) + if !ok { + if common.StaticCallee() != fn { + if common.StaticCallee() == nil { + return false + } + // TODO(dh): ideally, IsPure wouldn't be responsible + // for avoiding infinite recursion, but + // FunctionDescriptions would be. + node := d.CallGraph.CreateNode(common.StaticCallee()) + if callgraph.PathSearch(node, func(other *callgraph.Node) bool { + return other.Func == fn + }) != nil { + return false + } + if !d.Get(common.StaticCallee()).Pure { + return false + } + } + } else { + switch builtin.Name() { + case "len", "cap", "make", "new": + default: + return false + } + } + return true + } + for _, b := range fn.Blocks { + for _, ins := range b.Instrs { + switch ins := ins.(type) { + case *ssa.Call: + if !checkCall(ins.Common()) { + return false + } + case *ssa.Defer: + if !checkCall(&ins.Call) { + return false + } + case *ssa.Select: + return false + case *ssa.Send: + return false + case *ssa.Go: + return false + case *ssa.Panic: + return false + case *ssa.Store: + return false + case *ssa.FieldAddr: + return false + case *ssa.UnOp: + if ins.Op == token.MUL || ins.Op == token.AND { + return false + } + } + } + } + return true +} diff --git a/vendor/honnef.co/go/tools/functions/terminates.go b/vendor/honnef.co/go/tools/functions/terminates.go new file mode 100644 index 0000000..65f9e16 --- /dev/null +++ b/vendor/honnef.co/go/tools/functions/terminates.go @@ -0,0 +1,24 @@ +package functions + +import "honnef.co/go/tools/ssa" + +// terminates reports whether fn is supposed to return, that is if it +// has at least one theoretic path that returns from the function. +// Explicit panics do not count as terminating. +func terminates(fn *ssa.Function) bool { + if fn.Blocks == nil { + // assuming that a function terminates is the conservative + // choice + return true + } + + for _, block := range fn.Blocks { + if len(block.Instrs) == 0 { + continue + } + if _, ok := block.Instrs[len(block.Instrs)-1].(*ssa.Return); ok { + return true + } + } + return false +} diff --git a/vendor/honnef.co/go/tools/gcsizes/LICENSE b/vendor/honnef.co/go/tools/gcsizes/LICENSE new file mode 100644 index 0000000..6a66aea --- /dev/null +++ b/vendor/honnef.co/go/tools/gcsizes/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/honnef.co/go/tools/internal/sharedcheck/lint.go b/vendor/honnef.co/go/tools/internal/sharedcheck/lint.go new file mode 100644 index 0000000..5f62fc2 --- /dev/null +++ b/vendor/honnef.co/go/tools/internal/sharedcheck/lint.go @@ -0,0 +1,70 @@ +package sharedcheck + +import ( + "go/ast" + "go/types" + + "honnef.co/go/tools/lint" + "honnef.co/go/tools/ssa" +) + +func CheckRangeStringRunes(nodeFns map[ast.Node]*ssa.Function, j *lint.Job) { + fn := func(node ast.Node) bool { + rng, ok := node.(*ast.RangeStmt) + if !ok || !lint.IsBlank(rng.Key) { + return true + } + ssafn := nodeFns[rng] + if ssafn == nil { + return true + } + v, _ := ssafn.ValueForExpr(rng.X) + + // Check that we're converting from string to []rune + val, _ := v.(*ssa.Convert) + if val == nil { + return true + } + Tsrc, ok := val.X.Type().(*types.Basic) + if !ok || Tsrc.Kind() != types.String { + return true + } + Tdst, ok := val.Type().(*types.Slice) + if !ok { + return true + } + TdstElem, ok := Tdst.Elem().(*types.Basic) + if !ok || TdstElem.Kind() != types.Int32 { + return true + } + + // Check that the result of the conversion is only used to + // range over + refs := val.Referrers() + if refs == nil { + return true + } + + // Expect two refs: one for obtaining the length of the slice, + // one for accessing the elements + if len(lint.FilterDebug(*refs)) != 2 { + // TODO(dh): right now, we check that only one place + // refers to our slice. This will miss cases such as + // ranging over the slice twice. Ideally, we'd ensure that + // the slice is only used for ranging over (without + // accessing the key), but that is harder to do because in + // SSA form, ranging over a slice looks like an ordinary + // loop with index increments and slice accesses. We'd + // have to look at the associated AST node to check that + // it's a range statement. + return true + } + + j.Errorf(rng, "should range over string, not []rune(string)") + + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} diff --git a/vendor/honnef.co/go/tools/lint/LICENSE b/vendor/honnef.co/go/tools/lint/LICENSE new file mode 100644 index 0000000..796130a --- /dev/null +++ b/vendor/honnef.co/go/tools/lint/LICENSE @@ -0,0 +1,28 @@ +Copyright (c) 2013 The Go Authors. All rights reserved. +Copyright (c) 2016 Dominik Honnef. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/honnef.co/go/tools/lint/lint.go b/vendor/honnef.co/go/tools/lint/lint.go new file mode 100644 index 0000000..75a5198 --- /dev/null +++ b/vendor/honnef.co/go/tools/lint/lint.go @@ -0,0 +1,844 @@ +// Copyright (c) 2013 The Go Authors. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file or at +// https://developers.google.com/open-source/licenses/bsd. + +// Package lint provides the foundation for tools like gosimple. +package lint // import "honnef.co/go/tools/lint" + +import ( + "bytes" + "fmt" + "go/ast" + "go/build" + "go/constant" + "go/printer" + "go/token" + "go/types" + "path/filepath" + "runtime" + "sort" + "strings" + "sync" + "unicode" + + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/go/loader" + "honnef.co/go/tools/ssa" + "honnef.co/go/tools/ssa/ssautil" +) + +type Job struct { + Program *Program + + checker string + check string + problems []Problem +} + +type Ignore interface { + Match(p Problem) bool +} + +type LineIgnore struct { + File string + Line int + Checks []string + matched bool + pos token.Pos +} + +func (li *LineIgnore) Match(p Problem) bool { + if p.Position.Filename != li.File || p.Position.Line != li.Line { + return false + } + for _, c := range li.Checks { + if m, _ := filepath.Match(c, p.Check); m { + li.matched = true + return true + } + } + return false +} + +func (li *LineIgnore) String() string { + matched := "not matched" + if li.matched { + matched = "matched" + } + return fmt.Sprintf("%s:%d %s (%s)", li.File, li.Line, strings.Join(li.Checks, ", "), matched) +} + +type FileIgnore struct { + File string + Checks []string +} + +func (fi *FileIgnore) Match(p Problem) bool { + if p.Position.Filename != fi.File { + return false + } + for _, c := range fi.Checks { + if m, _ := filepath.Match(c, p.Check); m { + return true + } + } + return false +} + +type GlobIgnore struct { + Pattern string + Checks []string +} + +func (gi *GlobIgnore) Match(p Problem) bool { + if gi.Pattern != "*" { + pkgpath := p.Package.Path() + if strings.HasSuffix(pkgpath, "_test") { + pkgpath = pkgpath[:len(pkgpath)-len("_test")] + } + name := filepath.Join(pkgpath, filepath.Base(p.Position.Filename)) + if m, _ := filepath.Match(gi.Pattern, name); !m { + return false + } + } + for _, c := range gi.Checks { + if m, _ := filepath.Match(c, p.Check); m { + return true + } + } + return false +} + +type Program struct { + SSA *ssa.Program + Prog *loader.Program + // TODO(dh): Rename to InitialPackages? + Packages []*Pkg + InitialFunctions []*ssa.Function + AllFunctions []*ssa.Function + Files []*ast.File + Info *types.Info + GoVersion int + + tokenFileMap map[*token.File]*ast.File + astFileMap map[*ast.File]*Pkg +} + +type Func func(*Job) + +// Problem represents a problem in some source code. +type Problem struct { + pos token.Pos + Position token.Position // position in source file + Text string // the prose that describes the problem + Check string + Checker string + Package *types.Package + Ignored bool +} + +func (p *Problem) String() string { + if p.Check == "" { + return p.Text + } + return fmt.Sprintf("%s (%s)", p.Text, p.Check) +} + +type Checker interface { + Name() string + Prefix() string + Init(*Program) + Funcs() map[string]Func +} + +// A Linter lints Go source code. +type Linter struct { + Checker Checker + Ignores []Ignore + GoVersion int + ReturnIgnored bool + + automaticIgnores []Ignore +} + +func (l *Linter) ignore(p Problem) bool { + ignored := false + for _, ig := range l.automaticIgnores { + // We cannot short-circuit these, as we want to record, for + // each ignore, whether it matched or not. + if ig.Match(p) { + ignored = true + } + } + if ignored { + // no need to execute other ignores if we've already had a + // match. + return true + } + for _, ig := range l.Ignores { + // We can short-circuit here, as we aren't tracking any + // information. + if ig.Match(p) { + return true + } + } + + return false +} + +func (prog *Program) File(node Positioner) *ast.File { + return prog.tokenFileMap[prog.SSA.Fset.File(node.Pos())] +} + +func (j *Job) File(node Positioner) *ast.File { + return j.Program.File(node) +} + +// TODO(dh): switch to sort.Slice when Go 1.9 lands. +type byPosition struct { + fset *token.FileSet + ps []Problem +} + +func (ps byPosition) Len() int { + return len(ps.ps) +} + +func (ps byPosition) Less(i int, j int) bool { + pi, pj := ps.ps[i].Position, ps.ps[j].Position + + if pi.Filename != pj.Filename { + return pi.Filename < pj.Filename + } + if pi.Line != pj.Line { + return pi.Line < pj.Line + } + if pi.Column != pj.Column { + return pi.Column < pj.Column + } + + return ps.ps[i].Text < ps.ps[j].Text +} + +func (ps byPosition) Swap(i int, j int) { + ps.ps[i], ps.ps[j] = ps.ps[j], ps.ps[i] +} + +func parseDirective(s string) (cmd string, args []string) { + if !strings.HasPrefix(s, "//lint:") { + return "", nil + } + s = strings.TrimPrefix(s, "//lint:") + fields := strings.Split(s, " ") + return fields[0], fields[1:] +} + +func (l *Linter) Lint(lprog *loader.Program, conf *loader.Config) []Problem { + ssaprog := ssautil.CreateProgram(lprog, ssa.GlobalDebug) + ssaprog.Build() + pkgMap := map[*ssa.Package]*Pkg{} + var pkgs []*Pkg + for _, pkginfo := range lprog.InitialPackages() { + ssapkg := ssaprog.Package(pkginfo.Pkg) + var bp *build.Package + if len(pkginfo.Files) != 0 { + path := lprog.Fset.Position(pkginfo.Files[0].Pos()).Filename + dir := filepath.Dir(path) + var err error + ctx := conf.Build + if ctx == nil { + ctx = &build.Default + } + bp, err = ctx.ImportDir(dir, 0) + if err != nil { + // shouldn't happen + } + } + pkg := &Pkg{ + Package: ssapkg, + Info: pkginfo, + BuildPkg: bp, + } + pkgMap[ssapkg] = pkg + pkgs = append(pkgs, pkg) + } + prog := &Program{ + SSA: ssaprog, + Prog: lprog, + Packages: pkgs, + Info: &types.Info{}, + GoVersion: l.GoVersion, + tokenFileMap: map[*token.File]*ast.File{}, + astFileMap: map[*ast.File]*Pkg{}, + } + + initial := map[*types.Package]struct{}{} + for _, pkg := range pkgs { + initial[pkg.Info.Pkg] = struct{}{} + } + for fn := range ssautil.AllFunctions(ssaprog) { + if fn.Pkg == nil { + continue + } + prog.AllFunctions = append(prog.AllFunctions, fn) + if _, ok := initial[fn.Pkg.Pkg]; ok { + prog.InitialFunctions = append(prog.InitialFunctions, fn) + } + } + for _, pkg := range pkgs { + prog.Files = append(prog.Files, pkg.Info.Files...) + + ssapkg := ssaprog.Package(pkg.Info.Pkg) + for _, f := range pkg.Info.Files { + prog.astFileMap[f] = pkgMap[ssapkg] + } + } + + for _, pkginfo := range lprog.AllPackages { + for _, f := range pkginfo.Files { + tf := lprog.Fset.File(f.Pos()) + prog.tokenFileMap[tf] = f + } + } + + var out []Problem + l.automaticIgnores = nil + for _, pkginfo := range lprog.InitialPackages() { + for _, f := range pkginfo.Files { + cm := ast.NewCommentMap(lprog.Fset, f, f.Comments) + for node, cgs := range cm { + for _, cg := range cgs { + for _, c := range cg.List { + if !strings.HasPrefix(c.Text, "//lint:") { + continue + } + cmd, args := parseDirective(c.Text) + switch cmd { + case "ignore", "file-ignore": + if len(args) < 2 { + // FIXME(dh): this causes duplicated warnings when using megacheck + p := Problem{ + pos: c.Pos(), + Position: prog.DisplayPosition(c.Pos()), + Text: "malformed linter directive; missing the required reason field?", + Check: "", + Checker: l.Checker.Name(), + Package: nil, + } + out = append(out, p) + continue + } + default: + // unknown directive, ignore + continue + } + checks := strings.Split(args[0], ",") + pos := prog.DisplayPosition(node.Pos()) + var ig Ignore + switch cmd { + case "ignore": + ig = &LineIgnore{ + File: pos.Filename, + Line: pos.Line, + Checks: checks, + pos: c.Pos(), + } + case "file-ignore": + ig = &FileIgnore{ + File: pos.Filename, + Checks: checks, + } + } + l.automaticIgnores = append(l.automaticIgnores, ig) + } + } + } + } + } + + sizes := struct { + types int + defs int + uses int + implicits int + selections int + scopes int + }{} + for _, pkg := range pkgs { + sizes.types += len(pkg.Info.Info.Types) + sizes.defs += len(pkg.Info.Info.Defs) + sizes.uses += len(pkg.Info.Info.Uses) + sizes.implicits += len(pkg.Info.Info.Implicits) + sizes.selections += len(pkg.Info.Info.Selections) + sizes.scopes += len(pkg.Info.Info.Scopes) + } + prog.Info.Types = make(map[ast.Expr]types.TypeAndValue, sizes.types) + prog.Info.Defs = make(map[*ast.Ident]types.Object, sizes.defs) + prog.Info.Uses = make(map[*ast.Ident]types.Object, sizes.uses) + prog.Info.Implicits = make(map[ast.Node]types.Object, sizes.implicits) + prog.Info.Selections = make(map[*ast.SelectorExpr]*types.Selection, sizes.selections) + prog.Info.Scopes = make(map[ast.Node]*types.Scope, sizes.scopes) + for _, pkg := range pkgs { + for k, v := range pkg.Info.Info.Types { + prog.Info.Types[k] = v + } + for k, v := range pkg.Info.Info.Defs { + prog.Info.Defs[k] = v + } + for k, v := range pkg.Info.Info.Uses { + prog.Info.Uses[k] = v + } + for k, v := range pkg.Info.Info.Implicits { + prog.Info.Implicits[k] = v + } + for k, v := range pkg.Info.Info.Selections { + prog.Info.Selections[k] = v + } + for k, v := range pkg.Info.Info.Scopes { + prog.Info.Scopes[k] = v + } + } + l.Checker.Init(prog) + + funcs := l.Checker.Funcs() + var keys []string + for k := range funcs { + keys = append(keys, k) + } + sort.Strings(keys) + + var jobs []*Job + for _, k := range keys { + j := &Job{ + Program: prog, + checker: l.Checker.Name(), + check: k, + } + jobs = append(jobs, j) + } + wg := &sync.WaitGroup{} + for _, j := range jobs { + wg.Add(1) + go func(j *Job) { + defer wg.Done() + fn := funcs[j.check] + if fn == nil { + return + } + fn(j) + }(j) + } + wg.Wait() + + for _, j := range jobs { + for _, p := range j.problems { + p.Ignored = l.ignore(p) + if l.ReturnIgnored || !p.Ignored { + out = append(out, p) + } + } + } + + for _, ig := range l.automaticIgnores { + ig, ok := ig.(*LineIgnore) + if !ok { + continue + } + if ig.matched { + continue + } + for _, c := range ig.Checks { + idx := strings.IndexFunc(c, func(r rune) bool { + return unicode.IsNumber(r) + }) + if idx == -1 { + // malformed check name, backing out + continue + } + if c[:idx] != l.Checker.Prefix() { + // not for this checker + continue + } + p := Problem{ + pos: ig.pos, + Position: prog.DisplayPosition(ig.pos), + Text: "this linter directive didn't match anything; should it be removed?", + Check: "", + Checker: l.Checker.Name(), + Package: nil, + } + out = append(out, p) + } + } + + sort.Sort(byPosition{lprog.Fset, out}) + return out +} + +// Pkg represents a package being linted. +type Pkg struct { + *ssa.Package + Info *loader.PackageInfo + BuildPkg *build.Package +} + +type packager interface { + Package() *ssa.Package +} + +func IsExample(fn *ssa.Function) bool { + if !strings.HasPrefix(fn.Name(), "Example") { + return false + } + f := fn.Prog.Fset.File(fn.Pos()) + if f == nil { + return false + } + return strings.HasSuffix(f.Name(), "_test.go") +} + +func (j *Job) IsInTest(node Positioner) bool { + f := j.Program.SSA.Fset.File(node.Pos()) + return f != nil && strings.HasSuffix(f.Name(), "_test.go") +} + +func (j *Job) IsInMain(node Positioner) bool { + if node, ok := node.(packager); ok { + return node.Package().Pkg.Name() == "main" + } + pkg := j.NodePackage(node) + if pkg == nil { + return false + } + return pkg.Pkg.Name() == "main" +} + +type Positioner interface { + Pos() token.Pos +} + +func (prog *Program) DisplayPosition(p token.Pos) token.Position { + // The //line compiler directive can be used to change the file + // name and line numbers associated with code. This can, for + // example, be used by code generation tools. The most prominent + // example is 'go tool cgo', which uses //line directives to refer + // back to the original source code. + // + // In the context of our linters, we need to treat these + // directives differently depending on context. For cgo files, we + // want to honour the directives, so that line numbers are + // adjusted correctly. For all other files, we want to ignore the + // directives, so that problems are reported at their actual + // position and not, for example, a yacc grammar file. This also + // affects the ignore mechanism, since it operates on the position + // information stored within problems. With this implementation, a + // user will ignore foo.go, not foo.y + + pkg := prog.astFileMap[prog.tokenFileMap[prog.Prog.Fset.File(p)]] + bp := pkg.BuildPkg + adjPos := prog.Prog.Fset.Position(p) + if bp == nil { + // couldn't find the package for some reason (deleted? faulty + // file system?) + return adjPos + } + base := filepath.Base(adjPos.Filename) + for _, f := range bp.CgoFiles { + if f == base { + // this is a cgo file, use the adjusted position + return adjPos + } + } + // not a cgo file, ignore //line directives + return prog.Prog.Fset.PositionFor(p, false) +} + +func (j *Job) Errorf(n Positioner, format string, args ...interface{}) *Problem { + tf := j.Program.SSA.Fset.File(n.Pos()) + f := j.Program.tokenFileMap[tf] + pkg := j.Program.astFileMap[f].Pkg + + pos := j.Program.DisplayPosition(n.Pos()) + problem := Problem{ + pos: n.Pos(), + Position: pos, + Text: fmt.Sprintf(format, args...), + Check: j.check, + Checker: j.checker, + Package: pkg, + } + j.problems = append(j.problems, problem) + return &j.problems[len(j.problems)-1] +} + +func (j *Job) Render(x interface{}) string { + fset := j.Program.SSA.Fset + var buf bytes.Buffer + if err := printer.Fprint(&buf, fset, x); err != nil { + panic(err) + } + return buf.String() +} + +func (j *Job) RenderArgs(args []ast.Expr) string { + var ss []string + for _, arg := range args { + ss = append(ss, j.Render(arg)) + } + return strings.Join(ss, ", ") +} + +func IsIdent(expr ast.Expr, ident string) bool { + id, ok := expr.(*ast.Ident) + return ok && id.Name == ident +} + +// isBlank returns whether id is the blank identifier "_". +// If id == nil, the answer is false. +func IsBlank(id ast.Expr) bool { + ident, ok := id.(*ast.Ident) + return ok && ident.Name == "_" +} + +func IsZero(expr ast.Expr) bool { + lit, ok := expr.(*ast.BasicLit) + return ok && lit.Kind == token.INT && lit.Value == "0" +} + +func (j *Job) IsNil(expr ast.Expr) bool { + return j.Program.Info.Types[expr].IsNil() +} + +func (j *Job) BoolConst(expr ast.Expr) bool { + val := j.Program.Info.ObjectOf(expr.(*ast.Ident)).(*types.Const).Val() + return constant.BoolVal(val) +} + +func (j *Job) IsBoolConst(expr ast.Expr) bool { + // We explicitly don't support typed bools because more often than + // not, custom bool types are used as binary enums and the + // explicit comparison is desired. + + ident, ok := expr.(*ast.Ident) + if !ok { + return false + } + obj := j.Program.Info.ObjectOf(ident) + c, ok := obj.(*types.Const) + if !ok { + return false + } + basic, ok := c.Type().(*types.Basic) + if !ok { + return false + } + if basic.Kind() != types.UntypedBool && basic.Kind() != types.Bool { + return false + } + return true +} + +func (j *Job) ExprToInt(expr ast.Expr) (int64, bool) { + tv := j.Program.Info.Types[expr] + if tv.Value == nil { + return 0, false + } + if tv.Value.Kind() != constant.Int { + return 0, false + } + return constant.Int64Val(tv.Value) +} + +func (j *Job) ExprToString(expr ast.Expr) (string, bool) { + val := j.Program.Info.Types[expr].Value + if val == nil { + return "", false + } + if val.Kind() != constant.String { + return "", false + } + return constant.StringVal(val), true +} + +func (j *Job) NodePackage(node Positioner) *Pkg { + f := j.File(node) + return j.Program.astFileMap[f] +} + +func IsGenerated(f *ast.File) bool { + comments := f.Comments + if len(comments) > 0 { + comment := comments[0].Text() + return strings.Contains(comment, "Code generated by") || + strings.Contains(comment, "DO NOT EDIT") + } + return false +} + +func Preamble(f *ast.File) string { + cutoff := f.Package + if f.Doc != nil { + cutoff = f.Doc.Pos() + } + var out []string + for _, cmt := range f.Comments { + if cmt.Pos() >= cutoff { + break + } + out = append(out, cmt.Text()) + } + return strings.Join(out, "\n") +} + +func IsPointerLike(T types.Type) bool { + switch T := T.Underlying().(type) { + case *types.Interface, *types.Chan, *types.Map, *types.Pointer: + return true + case *types.Basic: + return T.Kind() == types.UnsafePointer + } + return false +} + +func (j *Job) IsGoVersion(minor int) bool { + return j.Program.GoVersion >= minor +} + +func (j *Job) IsCallToAST(node ast.Node, name string) bool { + call, ok := node.(*ast.CallExpr) + if !ok { + return false + } + sel, ok := call.Fun.(*ast.SelectorExpr) + if !ok { + return false + } + fn, ok := j.Program.Info.ObjectOf(sel.Sel).(*types.Func) + return ok && fn.FullName() == name +} + +func (j *Job) IsCallToAnyAST(node ast.Node, names ...string) bool { + for _, name := range names { + if j.IsCallToAST(node, name) { + return true + } + } + return false +} + +func CallName(call *ssa.CallCommon) string { + if call.IsInvoke() { + return "" + } + switch v := call.Value.(type) { + case *ssa.Function: + fn, ok := v.Object().(*types.Func) + if !ok { + return "" + } + return fn.FullName() + case *ssa.Builtin: + return v.Name() + } + return "" +} + +func IsCallTo(call *ssa.CallCommon, name string) bool { + return CallName(call) == name +} + +func FilterDebug(instr []ssa.Instruction) []ssa.Instruction { + var out []ssa.Instruction + for _, ins := range instr { + if _, ok := ins.(*ssa.DebugRef); !ok { + out = append(out, ins) + } + } + return out +} + +func NodeFns(pkgs []*Pkg) map[ast.Node]*ssa.Function { + out := map[ast.Node]*ssa.Function{} + + wg := &sync.WaitGroup{} + chNodeFns := make(chan map[ast.Node]*ssa.Function, runtime.NumCPU()*2) + for _, pkg := range pkgs { + pkg := pkg + wg.Add(1) + go func() { + m := map[ast.Node]*ssa.Function{} + for _, f := range pkg.Info.Files { + ast.Walk(&globalVisitor{m, pkg, f}, f) + } + chNodeFns <- m + wg.Done() + }() + } + go func() { + wg.Wait() + close(chNodeFns) + }() + + for nodeFns := range chNodeFns { + for k, v := range nodeFns { + out[k] = v + } + } + + return out +} + +type globalVisitor struct { + m map[ast.Node]*ssa.Function + pkg *Pkg + f *ast.File +} + +func (v *globalVisitor) Visit(node ast.Node) ast.Visitor { + switch node := node.(type) { + case *ast.CallExpr: + v.m[node] = v.pkg.Func("init") + return v + case *ast.FuncDecl, *ast.FuncLit: + nv := &fnVisitor{v.m, v.f, v.pkg, nil} + return nv.Visit(node) + default: + return v + } +} + +type fnVisitor struct { + m map[ast.Node]*ssa.Function + f *ast.File + pkg *Pkg + ssafn *ssa.Function +} + +func (v *fnVisitor) Visit(node ast.Node) ast.Visitor { + switch node := node.(type) { + case *ast.FuncDecl: + var ssafn *ssa.Function + ssafn = v.pkg.Prog.FuncValue(v.pkg.Info.ObjectOf(node.Name).(*types.Func)) + v.m[node] = ssafn + if ssafn == nil { + return nil + } + return &fnVisitor{v.m, v.f, v.pkg, ssafn} + case *ast.FuncLit: + var ssafn *ssa.Function + path, _ := astutil.PathEnclosingInterval(v.f, node.Pos(), node.Pos()) + ssafn = ssa.EnclosingFunction(v.pkg.Package, path) + v.m[node] = ssafn + if ssafn == nil { + return nil + } + return &fnVisitor{v.m, v.f, v.pkg, ssafn} + case nil: + return nil + default: + v.m[node] = v.ssafn + return v + } +} diff --git a/vendor/honnef.co/go/tools/lint/lintutil/util.go b/vendor/honnef.co/go/tools/lint/lintutil/util.go new file mode 100644 index 0000000..0bb1426 --- /dev/null +++ b/vendor/honnef.co/go/tools/lint/lintutil/util.go @@ -0,0 +1,349 @@ +// Copyright (c) 2013 The Go Authors. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file or at +// https://developers.google.com/open-source/licenses/bsd. + +// Package lintutil provides helpers for writing linter command lines. +package lintutil // import "honnef.co/go/tools/lint/lintutil" + +import ( + "encoding/json" + "errors" + "flag" + "fmt" + "go/build" + "go/parser" + "go/token" + "go/types" + "io" + "os" + "path/filepath" + "strconv" + "strings" + + "honnef.co/go/tools/lint" + "honnef.co/go/tools/version" + + "github.com/kisielk/gotool" + "golang.org/x/tools/go/loader" +) + +type OutputFormatter interface { + Format(p lint.Problem) +} + +type TextOutput struct { + w io.Writer +} + +func (o TextOutput) Format(p lint.Problem) { + fmt.Fprintf(o.w, "%v: %s\n", relativePositionString(p.Position), p.String()) +} + +type JSONOutput struct { + w io.Writer +} + +func (o JSONOutput) Format(p lint.Problem) { + type location struct { + File string `json:"file"` + Line int `json:"line"` + Column int `json:"column"` + } + jp := struct { + Checker string `json:"checker"` + Code string `json:"code"` + Severity string `json:"severity,omitempty"` + Location location `json:"location"` + Message string `json:"message"` + Ignored bool `json:"ignored"` + }{ + p.Checker, + p.Check, + "", // TODO(dh): support severity + location{ + p.Position.Filename, + p.Position.Line, + p.Position.Column, + }, + p.Text, + p.Ignored, + } + _ = json.NewEncoder(o.w).Encode(jp) +} +func usage(name string, flags *flag.FlagSet) func() { + return func() { + fmt.Fprintf(os.Stderr, "Usage of %s:\n", name) + fmt.Fprintf(os.Stderr, "\t%s [flags] # runs on package in current directory\n", name) + fmt.Fprintf(os.Stderr, "\t%s [flags] packages\n", name) + fmt.Fprintf(os.Stderr, "\t%s [flags] directory\n", name) + fmt.Fprintf(os.Stderr, "\t%s [flags] files... # must be a single package\n", name) + fmt.Fprintf(os.Stderr, "Flags:\n") + flags.PrintDefaults() + } +} + +type runner struct { + checker lint.Checker + tags []string + ignores []lint.Ignore + version int + returnIgnored bool +} + +func resolveRelative(importPaths []string, tags []string) (goFiles bool, err error) { + if len(importPaths) == 0 { + return false, nil + } + if strings.HasSuffix(importPaths[0], ".go") { + // User is specifying a package in terms of .go files, don't resolve + return true, nil + } + wd, err := os.Getwd() + if err != nil { + return false, err + } + ctx := build.Default + ctx.BuildTags = tags + for i, path := range importPaths { + bpkg, err := ctx.Import(path, wd, build.FindOnly) + if err != nil { + return false, fmt.Errorf("can't load package %q: %v", path, err) + } + importPaths[i] = bpkg.ImportPath + } + return false, nil +} + +func parseIgnore(s string) ([]lint.Ignore, error) { + var out []lint.Ignore + if len(s) == 0 { + return nil, nil + } + for _, part := range strings.Fields(s) { + p := strings.Split(part, ":") + if len(p) != 2 { + return nil, errors.New("malformed ignore string") + } + path := p[0] + checks := strings.Split(p[1], ",") + out = append(out, &lint.GlobIgnore{Pattern: path, Checks: checks}) + } + return out, nil +} + +type versionFlag int + +func (v *versionFlag) String() string { + return fmt.Sprintf("1.%d", *v) +} + +func (v *versionFlag) Set(s string) error { + if len(s) < 3 { + return errors.New("invalid Go version") + } + if s[0] != '1' { + return errors.New("invalid Go version") + } + if s[1] != '.' { + return errors.New("invalid Go version") + } + i, err := strconv.Atoi(s[2:]) + *v = versionFlag(i) + return err +} + +func (v *versionFlag) Get() interface{} { + return int(*v) +} + +func FlagSet(name string) *flag.FlagSet { + flags := flag.NewFlagSet("", flag.ExitOnError) + flags.Usage = usage(name, flags) + flags.Float64("min_confidence", 0, "Deprecated; use -ignore instead") + flags.String("tags", "", "List of `build tags`") + flags.String("ignore", "", "Space separated list of checks to ignore, in the following format: 'import/path/file.go:Check1,Check2,...' Both the import path and file name sections support globbing, e.g. 'os/exec/*_test.go'") + flags.Bool("tests", true, "Include tests") + flags.Bool("version", false, "Print version and exit") + flags.Bool("show-ignored", false, "Don't filter ignored problems") + flags.String("f", "text", "Output `format` (valid choices are 'text' and 'json')") + + tags := build.Default.ReleaseTags + v := tags[len(tags)-1][2:] + version := new(versionFlag) + if err := version.Set(v); err != nil { + panic(fmt.Sprintf("internal error: %s", err)) + } + + flags.Var(version, "go", "Target Go `version` in the format '1.x'") + return flags +} + +type CheckerConfig struct { + Checker lint.Checker + ExitNonZero bool +} + +func ProcessFlagSet(confs []CheckerConfig, fs *flag.FlagSet) { + tags := fs.Lookup("tags").Value.(flag.Getter).Get().(string) + ignore := fs.Lookup("ignore").Value.(flag.Getter).Get().(string) + tests := fs.Lookup("tests").Value.(flag.Getter).Get().(bool) + goVersion := fs.Lookup("go").Value.(flag.Getter).Get().(int) + format := fs.Lookup("f").Value.(flag.Getter).Get().(string) + printVersion := fs.Lookup("version").Value.(flag.Getter).Get().(bool) + showIgnored := fs.Lookup("show-ignored").Value.(flag.Getter).Get().(bool) + + if printVersion { + version.Print() + os.Exit(0) + } + + var cs []lint.Checker + for _, conf := range confs { + cs = append(cs, conf.Checker) + } + pss, err := Lint(cs, fs.Args(), &Options{ + Tags: strings.Fields(tags), + LintTests: tests, + Ignores: ignore, + GoVersion: goVersion, + ReturnIgnored: showIgnored, + }) + if err != nil { + fmt.Fprintln(os.Stderr, err) + os.Exit(1) + } + + var ps []lint.Problem + for _, p := range pss { + ps = append(ps, p...) + } + + var f OutputFormatter + switch format { + case "text": + f = TextOutput{os.Stdout} + case "json": + f = JSONOutput{os.Stdout} + default: + fmt.Fprintf(os.Stderr, "unsupported output format %q\n", format) + os.Exit(2) + } + + for _, p := range ps { + f.Format(p) + } + for i, p := range pss { + if len(p) != 0 && confs[i].ExitNonZero { + os.Exit(1) + } + } +} + +type Options struct { + Tags []string + LintTests bool + Ignores string + GoVersion int + ReturnIgnored bool +} + +func Lint(cs []lint.Checker, pkgs []string, opt *Options) ([][]lint.Problem, error) { + if opt == nil { + opt = &Options{} + } + ignores, err := parseIgnore(opt.Ignores) + if err != nil { + return nil, err + } + paths := gotool.ImportPaths(pkgs) + goFiles, err := resolveRelative(paths, opt.Tags) + if err != nil { + return nil, err + } + ctx := build.Default + ctx.BuildTags = opt.Tags + hadError := false + conf := &loader.Config{ + Build: &ctx, + ParserMode: parser.ParseComments, + ImportPkgs: map[string]bool{}, + TypeChecker: types.Config{ + Error: func(err error) { + // Only print the first error found + if hadError { + return + } + hadError = true + fmt.Fprintln(os.Stderr, err) + }, + }, + } + if goFiles { + conf.CreateFromFilenames("adhoc", paths...) + } else { + for _, path := range paths { + conf.ImportPkgs[path] = opt.LintTests + } + } + lprog, err := conf.Load() + if err != nil { + return nil, err + } + + var problems [][]lint.Problem + for _, c := range cs { + runner := &runner{ + checker: c, + tags: opt.Tags, + ignores: ignores, + version: opt.GoVersion, + returnIgnored: opt.ReturnIgnored, + } + problems = append(problems, runner.lint(lprog, conf)) + } + return problems, nil +} + +func shortPath(path string) string { + cwd, err := os.Getwd() + if err != nil { + return path + } + if rel, err := filepath.Rel(cwd, path); err == nil && len(rel) < len(path) { + return rel + } + return path +} + +func relativePositionString(pos token.Position) string { + s := shortPath(pos.Filename) + if pos.IsValid() { + if s != "" { + s += ":" + } + s += fmt.Sprintf("%d:%d", pos.Line, pos.Column) + } + if s == "" { + s = "-" + } + return s +} + +func ProcessArgs(name string, cs []CheckerConfig, args []string) { + flags := FlagSet(name) + flags.Parse(args) + + ProcessFlagSet(cs, flags) +} + +func (runner *runner) lint(lprog *loader.Program, conf *loader.Config) []lint.Problem { + l := &lint.Linter{ + Checker: runner.checker, + Ignores: runner.ignores, + GoVersion: runner.version, + ReturnIgnored: runner.returnIgnored, + } + return l.Lint(lprog, conf) +} diff --git a/vendor/honnef.co/go/tools/simple/lint.go b/vendor/honnef.co/go/tools/simple/lint.go new file mode 100644 index 0000000..47ee690 --- /dev/null +++ b/vendor/honnef.co/go/tools/simple/lint.go @@ -0,0 +1,1771 @@ +// Package simple contains a linter for Go source code. +package simple // import "honnef.co/go/tools/simple" + +import ( + "go/ast" + "go/constant" + "go/token" + "go/types" + "reflect" + "strings" + + "honnef.co/go/tools/internal/sharedcheck" + "honnef.co/go/tools/lint" + "honnef.co/go/tools/ssa" + + "golang.org/x/tools/go/types/typeutil" +) + +type Checker struct { + CheckGenerated bool + MS *typeutil.MethodSetCache + + nodeFns map[ast.Node]*ssa.Function +} + +func NewChecker() *Checker { + return &Checker{ + MS: &typeutil.MethodSetCache{}, + } +} + +func (*Checker) Name() string { return "gosimple" } +func (*Checker) Prefix() string { return "S" } + +func (c *Checker) Init(prog *lint.Program) { + c.nodeFns = lint.NodeFns(prog.Packages) +} + +func (c *Checker) Funcs() map[string]lint.Func { + return map[string]lint.Func{ + "S1000": c.LintSingleCaseSelect, + "S1001": c.LintLoopCopy, + "S1002": c.LintIfBoolCmp, + "S1003": c.LintStringsContains, + "S1004": c.LintBytesCompare, + "S1005": c.LintUnnecessaryBlank, + "S1006": c.LintForTrue, + "S1007": c.LintRegexpRaw, + "S1008": c.LintIfReturn, + "S1009": c.LintRedundantNilCheckWithLen, + "S1010": c.LintSlicing, + "S1011": c.LintLoopAppend, + "S1012": c.LintTimeSince, + "S1013": c.LintSimplerReturn, + "S1014": nil, + "S1015": nil, + "S1016": c.LintSimplerStructConversion, + "S1017": c.LintTrim, + "S1018": c.LintLoopSlide, + "S1019": c.LintMakeLenCap, + "S1020": c.LintAssertNotNil, + "S1021": c.LintDeclareAssign, + "S1022": nil, + "S1023": c.LintRedundantBreak, + "S1024": c.LintTimeUntil, + "S1025": c.LintRedundantSprintf, + "S1026": nil, + "S1027": nil, + "S1028": c.LintErrorsNewSprintf, + "S1029": c.LintRangeStringRunes, + "S1030": c.LintBytesBufferConversions, + "S1031": c.LintNilCheckAroundRange, + } +} + +func (c *Checker) filterGenerated(files []*ast.File) []*ast.File { + if c.CheckGenerated { + return files + } + var out []*ast.File + for _, f := range files { + if !lint.IsGenerated(f) { + out = append(out, f) + } + } + return out +} + +func (c *Checker) LintSingleCaseSelect(j *lint.Job) { + isSingleSelect := func(node ast.Node) bool { + v, ok := node.(*ast.SelectStmt) + if !ok { + return false + } + return len(v.Body.List) == 1 + } + + seen := map[ast.Node]struct{}{} + fn := func(node ast.Node) bool { + switch v := node.(type) { + case *ast.ForStmt: + if len(v.Body.List) != 1 { + return true + } + if !isSingleSelect(v.Body.List[0]) { + return true + } + if _, ok := v.Body.List[0].(*ast.SelectStmt).Body.List[0].(*ast.CommClause).Comm.(*ast.SendStmt); ok { + // Don't suggest using range for channel sends + return true + } + seen[v.Body.List[0]] = struct{}{} + j.Errorf(node, "should use for range instead of for { select {} }") + case *ast.SelectStmt: + if _, ok := seen[v]; ok { + return true + } + if !isSingleSelect(v) { + return true + } + j.Errorf(node, "should use a simple channel send/receive instead of select with a single case") + return true + } + return true + } + for _, f := range c.filterGenerated(j.Program.Files) { + ast.Inspect(f, fn) + } +} + +func (c *Checker) LintLoopCopy(j *lint.Job) { + fn := func(node ast.Node) bool { + loop, ok := node.(*ast.RangeStmt) + if !ok { + return true + } + + if loop.Key == nil { + return true + } + if len(loop.Body.List) != 1 { + return true + } + stmt, ok := loop.Body.List[0].(*ast.AssignStmt) + if !ok { + return true + } + if stmt.Tok != token.ASSIGN || len(stmt.Lhs) != 1 || len(stmt.Rhs) != 1 { + return true + } + lhs, ok := stmt.Lhs[0].(*ast.IndexExpr) + if !ok { + return true + } + if _, ok := j.Program.Info.TypeOf(lhs.X).(*types.Slice); !ok { + return true + } + lidx, ok := lhs.Index.(*ast.Ident) + if !ok { + return true + } + key, ok := loop.Key.(*ast.Ident) + if !ok { + return true + } + if j.Program.Info.TypeOf(lhs) == nil || j.Program.Info.TypeOf(stmt.Rhs[0]) == nil { + return true + } + if j.Program.Info.ObjectOf(lidx) != j.Program.Info.ObjectOf(key) { + return true + } + if !types.Identical(j.Program.Info.TypeOf(lhs), j.Program.Info.TypeOf(stmt.Rhs[0])) { + return true + } + if _, ok := j.Program.Info.TypeOf(loop.X).(*types.Slice); !ok { + return true + } + + if rhs, ok := stmt.Rhs[0].(*ast.IndexExpr); ok { + rx, ok := rhs.X.(*ast.Ident) + _ = rx + if !ok { + return true + } + ridx, ok := rhs.Index.(*ast.Ident) + if !ok { + return true + } + if j.Program.Info.ObjectOf(ridx) != j.Program.Info.ObjectOf(key) { + return true + } + } else if rhs, ok := stmt.Rhs[0].(*ast.Ident); ok { + value, ok := loop.Value.(*ast.Ident) + if !ok { + return true + } + if j.Program.Info.ObjectOf(rhs) != j.Program.Info.ObjectOf(value) { + return true + } + } else { + return true + } + j.Errorf(loop, "should use copy() instead of a loop") + return true + } + for _, f := range c.filterGenerated(j.Program.Files) { + ast.Inspect(f, fn) + } +} + +func (c *Checker) LintIfBoolCmp(j *lint.Job) { + fn := func(node ast.Node) bool { + expr, ok := node.(*ast.BinaryExpr) + if !ok || (expr.Op != token.EQL && expr.Op != token.NEQ) { + return true + } + x := j.IsBoolConst(expr.X) + y := j.IsBoolConst(expr.Y) + if !x && !y { + return true + } + var other ast.Expr + var val bool + if x { + val = j.BoolConst(expr.X) + other = expr.Y + } else { + val = j.BoolConst(expr.Y) + other = expr.X + } + basic, ok := j.Program.Info.TypeOf(other).Underlying().(*types.Basic) + if !ok || basic.Kind() != types.Bool { + return true + } + op := "" + if (expr.Op == token.EQL && !val) || (expr.Op == token.NEQ && val) { + op = "!" + } + r := op + j.Render(other) + l1 := len(r) + r = strings.TrimLeft(r, "!") + if (l1-len(r))%2 == 1 { + r = "!" + r + } + j.Errorf(expr, "should omit comparison to bool constant, can be simplified to %s", r) + return true + } + for _, f := range c.filterGenerated(j.Program.Files) { + ast.Inspect(f, fn) + } +} + +func (c *Checker) LintBytesBufferConversions(j *lint.Job) { + fn := func(node ast.Node) bool { + call, ok := node.(*ast.CallExpr) + if !ok || len(call.Args) != 1 { + return true + } + + argCall, ok := call.Args[0].(*ast.CallExpr) + if !ok { + return true + } + sel, ok := argCall.Fun.(*ast.SelectorExpr) + if !ok { + return true + } + + typ := j.Program.Info.TypeOf(call.Fun) + if typ == types.Universe.Lookup("string").Type() && j.IsCallToAST(call.Args[0], "(*bytes.Buffer).Bytes") { + j.Errorf(call, "should use %v.String() instead of %v", j.Render(sel.X), j.Render(call)) + } else if typ, ok := typ.(*types.Slice); ok && typ.Elem() == types.Universe.Lookup("byte").Type() && j.IsCallToAST(call.Args[0], "(*bytes.Buffer).String") { + j.Errorf(call, "should use %v.Bytes() instead of %v", j.Render(sel.X), j.Render(call)) + } + + return true + } + for _, f := range c.filterGenerated(j.Program.Files) { + ast.Inspect(f, fn) + } +} + +func (c *Checker) LintStringsContains(j *lint.Job) { + // map of value to token to bool value + allowed := map[int64]map[token.Token]bool{ + -1: {token.GTR: true, token.NEQ: true, token.EQL: false}, + 0: {token.GEQ: true, token.LSS: false}, + } + fn := func(node ast.Node) bool { + expr, ok := node.(*ast.BinaryExpr) + if !ok { + return true + } + switch expr.Op { + case token.GEQ, token.GTR, token.NEQ, token.LSS, token.EQL: + default: + return true + } + + value, ok := j.ExprToInt(expr.Y) + if !ok { + return true + } + + allowedOps, ok := allowed[value] + if !ok { + return true + } + b, ok := allowedOps[expr.Op] + if !ok { + return true + } + + call, ok := expr.X.(*ast.CallExpr) + if !ok { + return true + } + sel, ok := call.Fun.(*ast.SelectorExpr) + if !ok { + return true + } + pkgIdent, ok := sel.X.(*ast.Ident) + if !ok { + return true + } + funIdent := sel.Sel + if pkgIdent.Name != "strings" && pkgIdent.Name != "bytes" { + return true + } + newFunc := "" + switch funIdent.Name { + case "IndexRune": + newFunc = "ContainsRune" + case "IndexAny": + newFunc = "ContainsAny" + case "Index": + newFunc = "Contains" + default: + return true + } + + prefix := "" + if !b { + prefix = "!" + } + j.Errorf(node, "should use %s%s.%s(%s) instead", prefix, pkgIdent.Name, newFunc, j.RenderArgs(call.Args)) + + return true + } + for _, f := range c.filterGenerated(j.Program.Files) { + ast.Inspect(f, fn) + } +} + +func (c *Checker) LintBytesCompare(j *lint.Job) { + fn := func(node ast.Node) bool { + expr, ok := node.(*ast.BinaryExpr) + if !ok { + return true + } + if expr.Op != token.NEQ && expr.Op != token.EQL { + return true + } + call, ok := expr.X.(*ast.CallExpr) + if !ok { + return true + } + if !j.IsCallToAST(call, "bytes.Compare") { + return true + } + value, ok := j.ExprToInt(expr.Y) + if !ok || value != 0 { + return true + } + args := j.RenderArgs(call.Args) + prefix := "" + if expr.Op == token.NEQ { + prefix = "!" + } + j.Errorf(node, "should use %sbytes.Equal(%s) instead", prefix, args) + return true + } + for _, f := range c.filterGenerated(j.Program.Files) { + ast.Inspect(f, fn) + } +} + +func (c *Checker) LintForTrue(j *lint.Job) { + fn := func(node ast.Node) bool { + loop, ok := node.(*ast.ForStmt) + if !ok { + return true + } + if loop.Init != nil || loop.Post != nil { + return true + } + if !j.IsBoolConst(loop.Cond) || !j.BoolConst(loop.Cond) { + return true + } + j.Errorf(loop, "should use for {} instead of for true {}") + return true + } + for _, f := range c.filterGenerated(j.Program.Files) { + ast.Inspect(f, fn) + } +} + +func (c *Checker) LintRegexpRaw(j *lint.Job) { + fn := func(node ast.Node) bool { + call, ok := node.(*ast.CallExpr) + if !ok { + return true + } + if !j.IsCallToAST(call, "regexp.MustCompile") && + !j.IsCallToAST(call, "regexp.Compile") { + return true + } + sel, ok := call.Fun.(*ast.SelectorExpr) + if !ok { + return true + } + if len(call.Args) != 1 { + // invalid function call + return true + } + lit, ok := call.Args[0].(*ast.BasicLit) + if !ok { + // TODO(dominikh): support string concat, maybe support constants + return true + } + if lit.Kind != token.STRING { + // invalid function call + return true + } + if lit.Value[0] != '"' { + // already a raw string + return true + } + val := lit.Value + if !strings.Contains(val, `\\`) { + return true + } + + bs := false + for _, c := range val { + if !bs && c == '\\' { + bs = true + continue + } + if bs && c == '\\' { + bs = false + continue + } + if bs { + // backslash followed by non-backslash -> escape sequence + return true + } + } + + j.Errorf(call, "should use raw string (`...`) with regexp.%s to avoid having to escape twice", sel.Sel.Name) + return true + } + for _, f := range c.filterGenerated(j.Program.Files) { + ast.Inspect(f, fn) + } +} + +func (c *Checker) LintIfReturn(j *lint.Job) { + fn := func(node ast.Node) bool { + block, ok := node.(*ast.BlockStmt) + if !ok { + return true + } + l := len(block.List) + if l < 2 { + return true + } + n1, n2 := block.List[l-2], block.List[l-1] + + if len(block.List) >= 3 { + if _, ok := block.List[l-3].(*ast.IfStmt); ok { + // Do not flag a series of if statements + return true + } + } + // if statement with no init, no else, a single condition + // checking an identifier or function call and just a return + // statement in the body, that returns a boolean constant + ifs, ok := n1.(*ast.IfStmt) + if !ok { + return true + } + if ifs.Else != nil || ifs.Init != nil { + return true + } + if len(ifs.Body.List) != 1 { + return true + } + if op, ok := ifs.Cond.(*ast.BinaryExpr); ok { + switch op.Op { + case token.EQL, token.LSS, token.GTR, token.NEQ, token.LEQ, token.GEQ: + default: + return true + } + } + ret1, ok := ifs.Body.List[0].(*ast.ReturnStmt) + if !ok { + return true + } + if len(ret1.Results) != 1 { + return true + } + if !j.IsBoolConst(ret1.Results[0]) { + return true + } + + ret2, ok := n2.(*ast.ReturnStmt) + if !ok { + return true + } + if len(ret2.Results) != 1 { + return true + } + if !j.IsBoolConst(ret2.Results[0]) { + return true + } + j.Errorf(n1, "should use 'return ' instead of 'if { return }; return '") + return true + } + for _, f := range c.filterGenerated(j.Program.Files) { + ast.Inspect(f, fn) + } +} + +// LintRedundantNilCheckWithLen checks for the following reduntant nil-checks: +// +// if x == nil || len(x) == 0 {} +// if x != nil && len(x) != 0 {} +// if x != nil && len(x) == N {} (where N != 0) +// if x != nil && len(x) > N {} +// if x != nil && len(x) >= N {} (where N != 0) +// +func (c *Checker) LintRedundantNilCheckWithLen(j *lint.Job) { + isConstZero := func(expr ast.Expr) (isConst bool, isZero bool) { + _, ok := expr.(*ast.BasicLit) + if ok { + return true, lint.IsZero(expr) + } + id, ok := expr.(*ast.Ident) + if !ok { + return false, false + } + c, ok := j.Program.Info.ObjectOf(id).(*types.Const) + if !ok { + return false, false + } + return true, c.Val().Kind() == constant.Int && c.Val().String() == "0" + } + + fn := func(node ast.Node) bool { + // check that expr is "x || y" or "x && y" + expr, ok := node.(*ast.BinaryExpr) + if !ok { + return true + } + if expr.Op != token.LOR && expr.Op != token.LAND { + return true + } + eqNil := expr.Op == token.LOR + + // check that x is "xx == nil" or "xx != nil" + x, ok := expr.X.(*ast.BinaryExpr) + if !ok { + return true + } + if eqNil && x.Op != token.EQL { + return true + } + if !eqNil && x.Op != token.NEQ { + return true + } + xx, ok := x.X.(*ast.Ident) + if !ok { + return true + } + if !j.IsNil(x.Y) { + return true + } + + // check that y is "len(xx) == 0" or "len(xx) ... " + y, ok := expr.Y.(*ast.BinaryExpr) + if !ok { + return true + } + if eqNil && y.Op != token.EQL { // must be len(xx) *==* 0 + return false + } + yx, ok := y.X.(*ast.CallExpr) + if !ok { + return true + } + yxFun, ok := yx.Fun.(*ast.Ident) + if !ok || yxFun.Name != "len" || len(yx.Args) != 1 { + return true + } + yxArg, ok := yx.Args[0].(*ast.Ident) + if !ok { + return true + } + if yxArg.Name != xx.Name { + return true + } + + if eqNil && !lint.IsZero(y.Y) { // must be len(x) == *0* + return true + } + + if !eqNil { + isConst, isZero := isConstZero(y.Y) + if !isConst { + return true + } + switch y.Op { + case token.EQL: + // avoid false positive for "xx != nil && len(xx) == 0" + if isZero { + return true + } + case token.GEQ: + // avoid false positive for "xx != nil && len(xx) >= 0" + if isZero { + return true + } + case token.NEQ: + // avoid false positive for "xx != nil && len(xx) != " + if !isZero { + return true + } + case token.GTR: + // ok + default: + return true + } + } + + // finally check that xx type is one of array, slice, map or chan + // this is to prevent false positive in case if xx is a pointer to an array + var nilType string + switch j.Program.Info.TypeOf(xx).(type) { + case *types.Slice: + nilType = "nil slices" + case *types.Map: + nilType = "nil maps" + case *types.Chan: + nilType = "nil channels" + default: + return true + } + j.Errorf(expr, "should omit nil check; len() for %s is defined as zero", nilType) + return true + } + for _, f := range c.filterGenerated(j.Program.Files) { + ast.Inspect(f, fn) + } +} + +func (c *Checker) LintSlicing(j *lint.Job) { + fn := func(node ast.Node) bool { + n, ok := node.(*ast.SliceExpr) + if !ok { + return true + } + if n.Max != nil { + return true + } + s, ok := n.X.(*ast.Ident) + if !ok || s.Obj == nil { + return true + } + call, ok := n.High.(*ast.CallExpr) + if !ok || len(call.Args) != 1 || call.Ellipsis.IsValid() { + return true + } + fun, ok := call.Fun.(*ast.Ident) + if !ok || fun.Name != "len" { + return true + } + if _, ok := j.Program.Info.ObjectOf(fun).(*types.Builtin); !ok { + return true + } + arg, ok := call.Args[0].(*ast.Ident) + if !ok || arg.Obj != s.Obj { + return true + } + j.Errorf(n, "should omit second index in slice, s[a:len(s)] is identical to s[a:]") + return true + } + for _, f := range c.filterGenerated(j.Program.Files) { + ast.Inspect(f, fn) + } +} + +func refersTo(info *types.Info, expr ast.Expr, ident *ast.Ident) bool { + found := false + fn := func(node ast.Node) bool { + ident2, ok := node.(*ast.Ident) + if !ok { + return true + } + if info.ObjectOf(ident) == info.ObjectOf(ident2) { + found = true + return false + } + return true + } + ast.Inspect(expr, fn) + return found +} + +func (c *Checker) LintLoopAppend(j *lint.Job) { + fn := func(node ast.Node) bool { + loop, ok := node.(*ast.RangeStmt) + if !ok { + return true + } + if !lint.IsBlank(loop.Key) { + return true + } + val, ok := loop.Value.(*ast.Ident) + if !ok { + return true + } + if len(loop.Body.List) != 1 { + return true + } + stmt, ok := loop.Body.List[0].(*ast.AssignStmt) + if !ok { + return true + } + if stmt.Tok != token.ASSIGN || len(stmt.Lhs) != 1 || len(stmt.Rhs) != 1 { + return true + } + if refersTo(j.Program.Info, stmt.Lhs[0], val) { + return true + } + call, ok := stmt.Rhs[0].(*ast.CallExpr) + if !ok { + return true + } + if len(call.Args) != 2 || call.Ellipsis.IsValid() { + return true + } + fun, ok := call.Fun.(*ast.Ident) + if !ok { + return true + } + obj := j.Program.Info.ObjectOf(fun) + fn, ok := obj.(*types.Builtin) + if !ok || fn.Name() != "append" { + return true + } + + src := j.Program.Info.TypeOf(loop.X) + dst := j.Program.Info.TypeOf(call.Args[0]) + // TODO(dominikh) remove nil check once Go issue #15173 has + // been fixed + if src == nil { + return true + } + if !types.Identical(src, dst) { + return true + } + + if j.Render(stmt.Lhs[0]) != j.Render(call.Args[0]) { + return true + } + + el, ok := call.Args[1].(*ast.Ident) + if !ok { + return true + } + if j.Program.Info.ObjectOf(val) != j.Program.Info.ObjectOf(el) { + return true + } + j.Errorf(loop, "should replace loop with %s = append(%s, %s...)", + j.Render(stmt.Lhs[0]), j.Render(call.Args[0]), j.Render(loop.X)) + return true + } + for _, f := range c.filterGenerated(j.Program.Files) { + ast.Inspect(f, fn) + } +} + +func (c *Checker) LintTimeSince(j *lint.Job) { + fn := func(node ast.Node) bool { + call, ok := node.(*ast.CallExpr) + if !ok { + return true + } + sel, ok := call.Fun.(*ast.SelectorExpr) + if !ok { + return true + } + if !j.IsCallToAST(sel.X, "time.Now") { + return true + } + if sel.Sel.Name != "Sub" { + return true + } + j.Errorf(call, "should use time.Since instead of time.Now().Sub") + return true + } + for _, f := range c.filterGenerated(j.Program.Files) { + ast.Inspect(f, fn) + } +} + +func (c *Checker) LintTimeUntil(j *lint.Job) { + if !j.IsGoVersion(8) { + return + } + fn := func(node ast.Node) bool { + call, ok := node.(*ast.CallExpr) + if !ok { + return true + } + if !j.IsCallToAST(call, "(time.Time).Sub") { + return true + } + if !j.IsCallToAST(call.Args[0], "time.Now") { + return true + } + j.Errorf(call, "should use time.Until instead of t.Sub(time.Now())") + return true + } + for _, f := range c.filterGenerated(j.Program.Files) { + ast.Inspect(f, fn) + } +} + +func (c *Checker) LintSimplerReturn(j *lint.Job) { + fn1 := func(node ast.Node) bool { + var ret *ast.FieldList + switch x := node.(type) { + case *ast.FuncDecl: + ret = x.Type.Results + case *ast.FuncLit: + ret = x.Type.Results + default: + return true + } + if ret == nil { + return true + } + + fn2 := func(node ast.Node) bool { + block, ok := node.(*ast.BlockStmt) + if !ok { + return true + } + if len(block.List) < 2 { + return true + } + + outer: + for i, stmt := range block.List { + if i == len(block.List)-1 { + break + } + if i > 0 { + // don't flag an if in a series of ifs + if _, ok := block.List[i-1].(*ast.IfStmt); ok { + continue + } + } + + // if != nil + ifs, ok := stmt.(*ast.IfStmt) + if !ok || len(ifs.Body.List) != 1 || ifs.Else != nil { + continue + } + expr, ok := ifs.Cond.(*ast.BinaryExpr) + if !ok || expr.Op != token.NEQ || !j.IsNil(expr.Y) { + continue + } + id1, ok := expr.X.(*ast.Ident) + if !ok { + continue + } + + // return ..., + ret1, ok := ifs.Body.List[0].(*ast.ReturnStmt) + if !ok || len(ret1.Results) == 0 { + continue + } + var results1 []types.Object + for _, res := range ret1.Results { + ident, ok := res.(*ast.Ident) + if !ok { + continue outer + } + results1 = append(results1, j.Program.Info.ObjectOf(ident)) + } + if results1[len(results1)-1] != j.Program.Info.ObjectOf(id1) { + continue + } + + // return ..., [ | nil] + ret2, ok := block.List[i+1].(*ast.ReturnStmt) + if !ok || len(ret2.Results) == 0 { + continue + } + var results2 []types.Object + for _, res := range ret2.Results { + ident, ok := res.(*ast.Ident) + if !ok { + continue outer + } + results2 = append(results2, j.Program.Info.ObjectOf(ident)) + } + _, isNil := results2[len(results2)-1].(*types.Nil) + if results2[len(results2)-1] != j.Program.Info.ObjectOf(id1) && + !isNil { + continue + } + for i, v := range results1[:len(results1)-1] { + if v != results2[i] { + continue outer + } + } + + id1Obj := j.Program.Info.ObjectOf(id1) + if id1Obj == nil { + continue + } + _, idIface := id1Obj.Type().Underlying().(*types.Interface) + _, retIface := j.Program.Info.TypeOf(ret.List[len(ret.List)-1].Type).Underlying().(*types.Interface) + + if retIface && !idIface { + // When the return value is an interface, but the + // identifier is not, an explicit check for nil is + // required to return an untyped nil. + continue + } + + j.Errorf(ifs, "'if %s != nil { return %s }; return %s' can be simplified to 'return %s'", + j.Render(expr.X), j.RenderArgs(ret1.Results), + j.RenderArgs(ret2.Results), j.RenderArgs(ret1.Results)) + } + return true + } + ast.Inspect(node, fn2) + return true + } + for _, f := range c.filterGenerated(j.Program.Files) { + ast.Inspect(f, fn1) + } +} + +func (c *Checker) LintUnnecessaryBlank(j *lint.Job) { + fn1 := func(node ast.Node) { + assign, ok := node.(*ast.AssignStmt) + if !ok { + return + } + if len(assign.Lhs) != 2 || len(assign.Rhs) != 1 { + return + } + if !lint.IsBlank(assign.Lhs[1]) { + return + } + switch rhs := assign.Rhs[0].(type) { + case *ast.IndexExpr: + // The type-checker should make sure that it's a map, but + // let's be safe. + if _, ok := j.Program.Info.TypeOf(rhs.X).Underlying().(*types.Map); !ok { + return + } + case *ast.UnaryExpr: + if rhs.Op != token.ARROW { + return + } + default: + return + } + cp := *assign + cp.Lhs = cp.Lhs[0:1] + j.Errorf(assign, "should write %s instead of %s", j.Render(&cp), j.Render(assign)) + } + + fn2 := func(node ast.Node) { + stmt, ok := node.(*ast.AssignStmt) + if !ok { + return + } + if len(stmt.Lhs) != len(stmt.Rhs) { + return + } + for i, lh := range stmt.Lhs { + rh := stmt.Rhs[i] + if !lint.IsBlank(lh) { + continue + } + expr, ok := rh.(*ast.UnaryExpr) + if !ok { + continue + } + if expr.Op != token.ARROW { + continue + } + j.Errorf(lh, "'_ = <-ch' can be simplified to '<-ch'") + } + } + + fn3 := func(node ast.Node) { + rs, ok := node.(*ast.RangeStmt) + if !ok { + return + } + if lint.IsBlank(rs.Key) && (rs.Value == nil || lint.IsBlank(rs.Value)) { + j.Errorf(rs.Key, "should omit values from range; this loop is equivalent to `for range ...`") + } + } + + fn := func(node ast.Node) bool { + fn1(node) + fn2(node) + if j.IsGoVersion(4) { + fn3(node) + } + return true + } + for _, f := range c.filterGenerated(j.Program.Files) { + ast.Inspect(f, fn) + } +} + +func (c *Checker) LintSimplerStructConversion(j *lint.Job) { + var skip ast.Node + fn := func(node ast.Node) bool { + // Do not suggest type conversion between pointers + if unary, ok := node.(*ast.UnaryExpr); ok && unary.Op == token.AND { + if lit, ok := unary.X.(*ast.CompositeLit); ok { + skip = lit + } + return true + } + + if node == skip { + return true + } + + lit, ok := node.(*ast.CompositeLit) + if !ok { + return true + } + typ1, _ := j.Program.Info.TypeOf(lit.Type).(*types.Named) + if typ1 == nil { + return true + } + s1, ok := typ1.Underlying().(*types.Struct) + if !ok { + return true + } + + var typ2 *types.Named + var ident *ast.Ident + getSelType := func(expr ast.Expr) (types.Type, *ast.Ident, bool) { + sel, ok := expr.(*ast.SelectorExpr) + if !ok { + return nil, nil, false + } + ident, ok := sel.X.(*ast.Ident) + if !ok { + return nil, nil, false + } + typ := j.Program.Info.TypeOf(sel.X) + return typ, ident, typ != nil + } + if len(lit.Elts) == 0 { + return true + } + if s1.NumFields() != len(lit.Elts) { + return true + } + for i, elt := range lit.Elts { + var t types.Type + var id *ast.Ident + var ok bool + switch elt := elt.(type) { + case *ast.SelectorExpr: + t, id, ok = getSelType(elt) + if !ok { + return true + } + if i >= s1.NumFields() || s1.Field(i).Name() != elt.Sel.Name { + return true + } + case *ast.KeyValueExpr: + var sel *ast.SelectorExpr + sel, ok = elt.Value.(*ast.SelectorExpr) + if !ok { + return true + } + + if elt.Key.(*ast.Ident).Name != sel.Sel.Name { + return true + } + t, id, ok = getSelType(elt.Value) + } + if !ok { + return true + } + // All fields must be initialized from the same object + if ident != nil && ident.Obj != id.Obj { + return true + } + typ2, _ = t.(*types.Named) + if typ2 == nil { + return true + } + ident = id + } + + if typ2 == nil { + return true + } + + if typ1.Obj().Pkg() != typ2.Obj().Pkg() { + // Do not suggest type conversions between different + // packages. Types in different packages might only match + // by coincidence. Furthermore, if the dependency ever + // adds more fields to its type, it could break the code + // that relies on the type conversion to work. + return true + } + + s2, ok := typ2.Underlying().(*types.Struct) + if !ok { + return true + } + if typ1 == typ2 { + return true + } + if !structsIdentical(s1, s2) { + return true + } + j.Errorf(node, "should convert %s (type %s) to %s instead of using struct literal", + ident.Name, typ2.Obj().Name(), typ1.Obj().Name()) + return true + } + for _, f := range c.filterGenerated(j.Program.Files) { + ast.Inspect(f, fn) + } +} + +func (c *Checker) LintTrim(j *lint.Job) { + sameNonDynamic := func(node1, node2 ast.Node) bool { + if reflect.TypeOf(node1) != reflect.TypeOf(node2) { + return false + } + + switch node1 := node1.(type) { + case *ast.Ident: + return node1.Obj == node2.(*ast.Ident).Obj + case *ast.SelectorExpr: + return j.Render(node1) == j.Render(node2) + case *ast.IndexExpr: + return j.Render(node1) == j.Render(node2) + } + return false + } + + isLenOnIdent := func(fn ast.Expr, ident ast.Expr) bool { + call, ok := fn.(*ast.CallExpr) + if !ok { + return false + } + if fn, ok := call.Fun.(*ast.Ident); !ok || fn.Name != "len" { + return false + } + if len(call.Args) != 1 { + return false + } + return sameNonDynamic(call.Args[0], ident) + } + + fn := func(node ast.Node) bool { + var pkg string + var fun string + + ifstmt, ok := node.(*ast.IfStmt) + if !ok { + return true + } + if ifstmt.Init != nil { + return true + } + if ifstmt.Else != nil { + return true + } + if len(ifstmt.Body.List) != 1 { + return true + } + condCall, ok := ifstmt.Cond.(*ast.CallExpr) + if !ok { + return true + } + call, ok := condCall.Fun.(*ast.SelectorExpr) + if !ok { + return true + } + if lint.IsIdent(call.X, "strings") { + pkg = "strings" + } else if lint.IsIdent(call.X, "bytes") { + pkg = "bytes" + } else { + return true + } + if lint.IsIdent(call.Sel, "HasPrefix") { + fun = "HasPrefix" + } else if lint.IsIdent(call.Sel, "HasSuffix") { + fun = "HasSuffix" + } else { + return true + } + + assign, ok := ifstmt.Body.List[0].(*ast.AssignStmt) + if !ok { + return true + } + if assign.Tok != token.ASSIGN { + return true + } + if len(assign.Lhs) != 1 || len(assign.Rhs) != 1 { + return true + } + if !sameNonDynamic(condCall.Args[0], assign.Lhs[0]) { + return true + } + slice, ok := assign.Rhs[0].(*ast.SliceExpr) + if !ok { + return true + } + if slice.Slice3 { + return true + } + if !sameNonDynamic(slice.X, condCall.Args[0]) { + return true + } + var index ast.Expr + switch fun { + case "HasPrefix": + // TODO(dh) We could detect a High that is len(s), but another + // rule will already flag that, anyway. + if slice.High != nil { + return true + } + index = slice.Low + case "HasSuffix": + if slice.Low != nil { + n, ok := j.ExprToInt(slice.Low) + if !ok || n != 0 { + return true + } + } + index = slice.High + } + + switch index := index.(type) { + case *ast.CallExpr: + if fun != "HasPrefix" { + return true + } + if fn, ok := index.Fun.(*ast.Ident); !ok || fn.Name != "len" { + return true + } + if len(index.Args) != 1 { + return true + } + id3 := index.Args[0] + switch oid3 := condCall.Args[1].(type) { + case *ast.BasicLit: + if pkg != "strings" { + return false + } + lit, ok := id3.(*ast.BasicLit) + if !ok { + return true + } + s1, ok1 := j.ExprToString(lit) + s2, ok2 := j.ExprToString(condCall.Args[1]) + if !ok1 || !ok2 || s1 != s2 { + return true + } + default: + if !sameNonDynamic(id3, oid3) { + return true + } + } + case *ast.BasicLit, *ast.Ident: + if fun != "HasPrefix" { + return true + } + if pkg != "strings" { + return true + } + string, ok1 := j.ExprToString(condCall.Args[1]) + int, ok2 := j.ExprToInt(slice.Low) + if !ok1 || !ok2 || int != int64(len(string)) { + return true + } + case *ast.BinaryExpr: + if fun != "HasSuffix" { + return true + } + if index.Op != token.SUB { + return true + } + if !isLenOnIdent(index.X, condCall.Args[0]) || + !isLenOnIdent(index.Y, condCall.Args[1]) { + return true + } + default: + return true + } + + var replacement string + switch fun { + case "HasPrefix": + replacement = "TrimPrefix" + case "HasSuffix": + replacement = "TrimSuffix" + } + j.Errorf(ifstmt, "should replace this if statement with an unconditional %s.%s", pkg, replacement) + return true + } + for _, f := range c.filterGenerated(j.Program.Files) { + ast.Inspect(f, fn) + } +} + +func (c *Checker) LintLoopSlide(j *lint.Job) { + // TODO(dh): detect bs[i+offset] in addition to bs[offset+i] + // TODO(dh): consider merging this function with LintLoopCopy + // TODO(dh): detect length that is an expression, not a variable name + // TODO(dh): support sliding to a different offset than the beginning of the slice + + fn := func(node ast.Node) bool { + /* + for i := 0; i < n; i++ { + bs[i] = bs[offset+i] + } + + ↓ + + copy(bs[:n], bs[offset:offset+n]) + */ + + loop, ok := node.(*ast.ForStmt) + if !ok || len(loop.Body.List) != 1 || loop.Init == nil || loop.Cond == nil || loop.Post == nil { + return true + } + assign, ok := loop.Init.(*ast.AssignStmt) + if !ok || len(assign.Lhs) != 1 || len(assign.Rhs) != 1 || !lint.IsZero(assign.Rhs[0]) { + return true + } + initvar, ok := assign.Lhs[0].(*ast.Ident) + if !ok { + return true + } + post, ok := loop.Post.(*ast.IncDecStmt) + if !ok || post.Tok != token.INC { + return true + } + postvar, ok := post.X.(*ast.Ident) + if !ok || j.Program.Info.ObjectOf(postvar) != j.Program.Info.ObjectOf(initvar) { + return true + } + bin, ok := loop.Cond.(*ast.BinaryExpr) + if !ok || bin.Op != token.LSS { + return true + } + binx, ok := bin.X.(*ast.Ident) + if !ok || j.Program.Info.ObjectOf(binx) != j.Program.Info.ObjectOf(initvar) { + return true + } + biny, ok := bin.Y.(*ast.Ident) + if !ok { + return true + } + + assign, ok = loop.Body.List[0].(*ast.AssignStmt) + if !ok || len(assign.Lhs) != 1 || len(assign.Rhs) != 1 || assign.Tok != token.ASSIGN { + return true + } + lhs, ok := assign.Lhs[0].(*ast.IndexExpr) + if !ok { + return true + } + rhs, ok := assign.Rhs[0].(*ast.IndexExpr) + if !ok { + return true + } + + bs1, ok := lhs.X.(*ast.Ident) + if !ok { + return true + } + bs2, ok := rhs.X.(*ast.Ident) + if !ok { + return true + } + obj1 := j.Program.Info.ObjectOf(bs1) + obj2 := j.Program.Info.ObjectOf(bs2) + if obj1 != obj2 { + return true + } + if _, ok := obj1.Type().Underlying().(*types.Slice); !ok { + return true + } + + index1, ok := lhs.Index.(*ast.Ident) + if !ok || j.Program.Info.ObjectOf(index1) != j.Program.Info.ObjectOf(initvar) { + return true + } + index2, ok := rhs.Index.(*ast.BinaryExpr) + if !ok || index2.Op != token.ADD { + return true + } + add1, ok := index2.X.(*ast.Ident) + if !ok { + return true + } + add2, ok := index2.Y.(*ast.Ident) + if !ok || j.Program.Info.ObjectOf(add2) != j.Program.Info.ObjectOf(initvar) { + return true + } + + j.Errorf(loop, "should use copy(%s[:%s], %s[%s:]) instead", j.Render(bs1), j.Render(biny), j.Render(bs1), j.Render(add1)) + return true + } + for _, f := range c.filterGenerated(j.Program.Files) { + ast.Inspect(f, fn) + } +} + +func (c *Checker) LintMakeLenCap(j *lint.Job) { + fn := func(node ast.Node) bool { + call, ok := node.(*ast.CallExpr) + if !ok { + return true + } + if fn, ok := call.Fun.(*ast.Ident); !ok || fn.Name != "make" { + // FIXME check whether make is indeed the built-in function + return true + } + switch len(call.Args) { + case 2: + // make(T, len) + if _, ok := j.Program.Info.TypeOf(call.Args[0]).Underlying().(*types.Slice); ok { + break + } + if lint.IsZero(call.Args[1]) { + j.Errorf(call.Args[1], "should use make(%s) instead", j.Render(call.Args[0])) + } + case 3: + // make(T, len, cap) + if j.Render(call.Args[1]) == j.Render(call.Args[2]) { + j.Errorf(call.Args[1], "should use make(%s, %s) instead", j.Render(call.Args[0]), j.Render(call.Args[1])) + } + } + return false + } + for _, f := range c.filterGenerated(j.Program.Files) { + ast.Inspect(f, fn) + } +} + +func (c *Checker) LintAssertNotNil(j *lint.Job) { + isNilCheck := func(ident *ast.Ident, expr ast.Expr) bool { + xbinop, ok := expr.(*ast.BinaryExpr) + if !ok || xbinop.Op != token.NEQ { + return false + } + xident, ok := xbinop.X.(*ast.Ident) + if !ok || xident.Obj != ident.Obj { + return false + } + if !j.IsNil(xbinop.Y) { + return false + } + return true + } + isOKCheck := func(ident *ast.Ident, expr ast.Expr) bool { + yident, ok := expr.(*ast.Ident) + if !ok || yident.Obj != ident.Obj { + return false + } + return true + } + fn := func(node ast.Node) bool { + ifstmt, ok := node.(*ast.IfStmt) + if !ok { + return true + } + assign, ok := ifstmt.Init.(*ast.AssignStmt) + if !ok || len(assign.Lhs) != 2 || len(assign.Rhs) != 1 || !lint.IsBlank(assign.Lhs[0]) { + return true + } + assert, ok := assign.Rhs[0].(*ast.TypeAssertExpr) + if !ok { + return true + } + binop, ok := ifstmt.Cond.(*ast.BinaryExpr) + if !ok || binop.Op != token.LAND { + return true + } + assertIdent, ok := assert.X.(*ast.Ident) + if !ok { + return true + } + assignIdent, ok := assign.Lhs[1].(*ast.Ident) + if !ok { + return true + } + if !(isNilCheck(assertIdent, binop.X) && isOKCheck(assignIdent, binop.Y)) && + !(isNilCheck(assertIdent, binop.Y) && isOKCheck(assignIdent, binop.X)) { + return true + } + j.Errorf(ifstmt, "when %s is true, %s can't be nil", j.Render(assignIdent), j.Render(assertIdent)) + return true + } + for _, f := range c.filterGenerated(j.Program.Files) { + ast.Inspect(f, fn) + } +} + +func (c *Checker) LintDeclareAssign(j *lint.Job) { + fn := func(node ast.Node) bool { + block, ok := node.(*ast.BlockStmt) + if !ok { + return true + } + if len(block.List) < 2 { + return true + } + for i, stmt := range block.List[:len(block.List)-1] { + _ = i + decl, ok := stmt.(*ast.DeclStmt) + if !ok { + continue + } + gdecl, ok := decl.Decl.(*ast.GenDecl) + if !ok || gdecl.Tok != token.VAR || len(gdecl.Specs) != 1 { + continue + } + vspec, ok := gdecl.Specs[0].(*ast.ValueSpec) + if !ok || len(vspec.Names) != 1 || len(vspec.Values) != 0 { + continue + } + + assign, ok := block.List[i+1].(*ast.AssignStmt) + if !ok || assign.Tok != token.ASSIGN { + continue + } + if len(assign.Lhs) != 1 || len(assign.Rhs) != 1 { + continue + } + ident, ok := assign.Lhs[0].(*ast.Ident) + if !ok { + continue + } + if vspec.Names[0].Obj != ident.Obj { + continue + } + + if refersTo(j.Program.Info, assign.Rhs[0], ident) { + continue + } + j.Errorf(decl, "should merge variable declaration with assignment on next line") + } + return true + } + for _, f := range c.filterGenerated(j.Program.Files) { + ast.Inspect(f, fn) + } +} + +func (c *Checker) LintRedundantBreak(j *lint.Job) { + fn1 := func(node ast.Node) { + clause, ok := node.(*ast.CaseClause) + if !ok { + return + } + if len(clause.Body) < 2 { + return + } + branch, ok := clause.Body[len(clause.Body)-1].(*ast.BranchStmt) + if !ok || branch.Tok != token.BREAK || branch.Label != nil { + return + } + j.Errorf(branch, "redundant break statement") + return + } + fn2 := func(node ast.Node) { + var ret *ast.FieldList + var body *ast.BlockStmt + switch x := node.(type) { + case *ast.FuncDecl: + ret = x.Type.Results + body = x.Body + case *ast.FuncLit: + ret = x.Type.Results + body = x.Body + default: + return + } + // if the func has results, a return can't be redundant. + // similarly, if there are no statements, there can be + // no return. + if ret != nil || body == nil || len(body.List) < 1 { + return + } + rst, ok := body.List[len(body.List)-1].(*ast.ReturnStmt) + if !ok { + return + } + // we don't need to check rst.Results as we already + // checked x.Type.Results to be nil. + j.Errorf(rst, "redundant return statement") + } + fn := func(node ast.Node) bool { + fn1(node) + fn2(node) + return true + } + for _, f := range c.filterGenerated(j.Program.Files) { + ast.Inspect(f, fn) + } +} + +func (c *Checker) Implements(j *lint.Job, typ types.Type, iface string) bool { + // OPT(dh): we can cache the type lookup + idx := strings.IndexRune(iface, '.') + var scope *types.Scope + var ifaceName string + if idx == -1 { + scope = types.Universe + ifaceName = iface + } else { + pkgName := iface[:idx] + pkg := j.Program.Prog.Package(pkgName) + if pkg == nil { + return false + } + scope = pkg.Pkg.Scope() + ifaceName = iface[idx+1:] + } + + obj := scope.Lookup(ifaceName) + if obj == nil { + return false + } + i, ok := obj.Type().Underlying().(*types.Interface) + if !ok { + return false + } + return types.Implements(typ, i) +} + +func (c *Checker) LintRedundantSprintf(j *lint.Job) { + fn := func(node ast.Node) bool { + call, ok := node.(*ast.CallExpr) + if !ok { + return true + } + if !j.IsCallToAST(call, "fmt.Sprintf") { + return true + } + if len(call.Args) != 2 { + return true + } + if s, ok := j.ExprToString(call.Args[0]); !ok || s != "%s" { + return true + } + pkg := j.NodePackage(call) + arg := call.Args[1] + typ := pkg.Info.TypeOf(arg) + + if c.Implements(j, typ, "fmt.Stringer") { + j.Errorf(call, "should use String() instead of fmt.Sprintf") + return true + } + + if typ.Underlying() == types.Universe.Lookup("string").Type() { + if typ == types.Universe.Lookup("string").Type() { + j.Errorf(call, "the argument is already a string, there's no need to use fmt.Sprintf") + } else { + j.Errorf(call, "the argument's underlying type is a string, should use a simple conversion instead of fmt.Sprintf") + } + } + return true + } + for _, f := range c.filterGenerated(j.Program.Files) { + ast.Inspect(f, fn) + } +} + +func (c *Checker) LintErrorsNewSprintf(j *lint.Job) { + fn := func(node ast.Node) bool { + if !j.IsCallToAST(node, "errors.New") { + return true + } + call := node.(*ast.CallExpr) + if !j.IsCallToAST(call.Args[0], "fmt.Sprintf") { + return true + } + j.Errorf(node, "should use fmt.Errorf(...) instead of errors.New(fmt.Sprintf(...))") + return true + } + for _, f := range c.filterGenerated(j.Program.Files) { + ast.Inspect(f, fn) + } +} + +func (c *Checker) LintRangeStringRunes(j *lint.Job) { + sharedcheck.CheckRangeStringRunes(c.nodeFns, j) +} + +func (c *Checker) LintNilCheckAroundRange(j *lint.Job) { + fn := func(node ast.Node) bool { + ifstmt, ok := node.(*ast.IfStmt) + if !ok { + return true + } + + cond, ok := ifstmt.Cond.(*ast.BinaryExpr) + if !ok { + return true + } + + if cond.Op != token.NEQ || !j.IsNil(cond.Y) || len(ifstmt.Body.List) != 1 { + return true + } + + loop, ok := ifstmt.Body.List[0].(*ast.RangeStmt) + if !ok { + return true + } + ifXIdent, ok := cond.X.(*ast.Ident) + if !ok { + return true + } + rangeXIdent, ok := loop.X.(*ast.Ident) + if !ok { + return true + } + if ifXIdent.Obj != rangeXIdent.Obj { + return true + } + switch j.Program.Info.TypeOf(rangeXIdent).(type) { + case *types.Slice, *types.Map: + j.Errorf(node, "unnecessary nil check around range") + } + return true + } + for _, f := range c.filterGenerated(j.Program.Files) { + ast.Inspect(f, fn) + } +} diff --git a/vendor/honnef.co/go/tools/simple/lint17.go b/vendor/honnef.co/go/tools/simple/lint17.go new file mode 100644 index 0000000..53f529c --- /dev/null +++ b/vendor/honnef.co/go/tools/simple/lint17.go @@ -0,0 +1,7 @@ +// +build !go1.8 + +package simple + +import "go/types" + +var structsIdentical = types.Identical diff --git a/vendor/honnef.co/go/tools/simple/lint18.go b/vendor/honnef.co/go/tools/simple/lint18.go new file mode 100644 index 0000000..ab9ea72 --- /dev/null +++ b/vendor/honnef.co/go/tools/simple/lint18.go @@ -0,0 +1,7 @@ +// +build go1.8 + +package simple + +import "go/types" + +var structsIdentical = types.IdenticalIgnoreTags diff --git a/vendor/honnef.co/go/tools/ssa/LICENSE b/vendor/honnef.co/go/tools/ssa/LICENSE new file mode 100644 index 0000000..aee4804 --- /dev/null +++ b/vendor/honnef.co/go/tools/ssa/LICENSE @@ -0,0 +1,28 @@ +Copyright (c) 2009 The Go Authors. All rights reserved. +Copyright (c) 2016 Dominik Honnef. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/honnef.co/go/tools/ssa/blockopt.go b/vendor/honnef.co/go/tools/ssa/blockopt.go new file mode 100644 index 0000000..22c9a4c --- /dev/null +++ b/vendor/honnef.co/go/tools/ssa/blockopt.go @@ -0,0 +1,195 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// Simple block optimizations to simplify the control flow graph. + +// TODO(adonovan): opt: instead of creating several "unreachable" blocks +// per function in the Builder, reuse a single one (e.g. at Blocks[1]) +// to reduce garbage. + +import ( + "fmt" + "os" +) + +// If true, perform sanity checking and show progress at each +// successive iteration of optimizeBlocks. Very verbose. +const debugBlockOpt = false + +// markReachable sets Index=-1 for all blocks reachable from b. +func markReachable(b *BasicBlock) { + b.Index = -1 + for _, succ := range b.Succs { + if succ.Index == 0 { + markReachable(succ) + } + } +} + +func DeleteUnreachableBlocks(f *Function) { + deleteUnreachableBlocks(f) +} + +// deleteUnreachableBlocks marks all reachable blocks of f and +// eliminates (nils) all others, including possibly cyclic subgraphs. +// +func deleteUnreachableBlocks(f *Function) { + const white, black = 0, -1 + // We borrow b.Index temporarily as the mark bit. + for _, b := range f.Blocks { + b.Index = white + } + markReachable(f.Blocks[0]) + if f.Recover != nil { + markReachable(f.Recover) + } + for i, b := range f.Blocks { + if b.Index == white { + for _, c := range b.Succs { + if c.Index == black { + c.removePred(b) // delete white->black edge + } + } + if debugBlockOpt { + fmt.Fprintln(os.Stderr, "unreachable", b) + } + f.Blocks[i] = nil // delete b + } + } + f.removeNilBlocks() +} + +// jumpThreading attempts to apply simple jump-threading to block b, +// in which a->b->c become a->c if b is just a Jump. +// The result is true if the optimization was applied. +// +func jumpThreading(f *Function, b *BasicBlock) bool { + if b.Index == 0 { + return false // don't apply to entry block + } + if b.Instrs == nil { + return false + } + if _, ok := b.Instrs[0].(*Jump); !ok { + return false // not just a jump + } + c := b.Succs[0] + if c == b { + return false // don't apply to degenerate jump-to-self. + } + if c.hasPhi() { + return false // not sound without more effort + } + for j, a := range b.Preds { + a.replaceSucc(b, c) + + // If a now has two edges to c, replace its degenerate If by Jump. + if len(a.Succs) == 2 && a.Succs[0] == c && a.Succs[1] == c { + jump := new(Jump) + jump.setBlock(a) + a.Instrs[len(a.Instrs)-1] = jump + a.Succs = a.Succs[:1] + c.removePred(b) + } else { + if j == 0 { + c.replacePred(b, a) + } else { + c.Preds = append(c.Preds, a) + } + } + + if debugBlockOpt { + fmt.Fprintln(os.Stderr, "jumpThreading", a, b, c) + } + } + f.Blocks[b.Index] = nil // delete b + return true +} + +// fuseBlocks attempts to apply the block fusion optimization to block +// a, in which a->b becomes ab if len(a.Succs)==len(b.Preds)==1. +// The result is true if the optimization was applied. +// +func fuseBlocks(f *Function, a *BasicBlock) bool { + if len(a.Succs) != 1 { + return false + } + b := a.Succs[0] + if len(b.Preds) != 1 { + return false + } + + // Degenerate &&/|| ops may result in a straight-line CFG + // containing φ-nodes. (Ideally we'd replace such them with + // their sole operand but that requires Referrers, built later.) + if b.hasPhi() { + return false // not sound without further effort + } + + // Eliminate jump at end of A, then copy all of B across. + a.Instrs = append(a.Instrs[:len(a.Instrs)-1], b.Instrs...) + for _, instr := range b.Instrs { + instr.setBlock(a) + } + + // A inherits B's successors + a.Succs = append(a.succs2[:0], b.Succs...) + + // Fix up Preds links of all successors of B. + for _, c := range b.Succs { + c.replacePred(b, a) + } + + if debugBlockOpt { + fmt.Fprintln(os.Stderr, "fuseBlocks", a, b) + } + + f.Blocks[b.Index] = nil // delete b + return true +} + +func OptimizeBlocks(f *Function) { + optimizeBlocks(f) +} + +// optimizeBlocks() performs some simple block optimizations on a +// completed function: dead block elimination, block fusion, jump +// threading. +// +func optimizeBlocks(f *Function) { + deleteUnreachableBlocks(f) + + // Loop until no further progress. + changed := true + for changed { + changed = false + + if debugBlockOpt { + f.WriteTo(os.Stderr) + mustSanityCheck(f, nil) + } + + for _, b := range f.Blocks { + // f.Blocks will temporarily contain nils to indicate + // deleted blocks; we remove them at the end. + if b == nil { + continue + } + + // Fuse blocks. b->c becomes bc. + if fuseBlocks(f, b) { + changed = true + } + + // a->b->c becomes a->c if b contains only a Jump. + if jumpThreading(f, b) { + changed = true + continue // (b was disconnected) + } + } + } + f.removeNilBlocks() +} diff --git a/vendor/honnef.co/go/tools/ssa/builder.go b/vendor/honnef.co/go/tools/ssa/builder.go new file mode 100644 index 0000000..bfb7a2b --- /dev/null +++ b/vendor/honnef.co/go/tools/ssa/builder.go @@ -0,0 +1,2383 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file implements the BUILD phase of SSA construction. +// +// SSA construction has two phases, CREATE and BUILD. In the CREATE phase +// (create.go), all packages are constructed and type-checked and +// definitions of all package members are created, method-sets are +// computed, and wrapper methods are synthesized. +// ssa.Packages are created in arbitrary order. +// +// In the BUILD phase (builder.go), the builder traverses the AST of +// each Go source function and generates SSA instructions for the +// function body. Initializer expressions for package-level variables +// are emitted to the package's init() function in the order specified +// by go/types.Info.InitOrder, then code for each function in the +// package is generated in lexical order. +// The BUILD phases for distinct packages are independent and are +// executed in parallel. +// +// TODO(adonovan): indeed, building functions is now embarrassingly parallel. +// Audit for concurrency then benchmark using more goroutines. +// +// The builder's and Program's indices (maps) are populated and +// mutated during the CREATE phase, but during the BUILD phase they +// remain constant. The sole exception is Prog.methodSets and its +// related maps, which are protected by a dedicated mutex. + +import ( + "fmt" + "go/ast" + exact "go/constant" + "go/token" + "go/types" + "os" + "sync" +) + +type opaqueType struct { + types.Type + name string +} + +func (t *opaqueType) String() string { return t.name } + +var ( + varOk = newVar("ok", tBool) + varIndex = newVar("index", tInt) + + // Type constants. + tBool = types.Typ[types.Bool] + tByte = types.Typ[types.Byte] + tInt = types.Typ[types.Int] + tInvalid = types.Typ[types.Invalid] + tString = types.Typ[types.String] + tUntypedNil = types.Typ[types.UntypedNil] + tRangeIter = &opaqueType{nil, "iter"} // the type of all "range" iterators + tEface = new(types.Interface) + + // SSA Value constants. + vZero = intConst(0) + vOne = intConst(1) + vTrue = NewConst(exact.MakeBool(true), tBool) +) + +// builder holds state associated with the package currently being built. +// Its methods contain all the logic for AST-to-SSA conversion. +type builder struct{} + +// cond emits to fn code to evaluate boolean condition e and jump +// to t or f depending on its value, performing various simplifications. +// +// Postcondition: fn.currentBlock is nil. +// +func (b *builder) cond(fn *Function, e ast.Expr, t, f *BasicBlock) { + switch e := e.(type) { + case *ast.ParenExpr: + b.cond(fn, e.X, t, f) + return + + case *ast.BinaryExpr: + switch e.Op { + case token.LAND: + ltrue := fn.newBasicBlock("cond.true") + b.cond(fn, e.X, ltrue, f) + fn.currentBlock = ltrue + b.cond(fn, e.Y, t, f) + return + + case token.LOR: + lfalse := fn.newBasicBlock("cond.false") + b.cond(fn, e.X, t, lfalse) + fn.currentBlock = lfalse + b.cond(fn, e.Y, t, f) + return + } + + case *ast.UnaryExpr: + if e.Op == token.NOT { + b.cond(fn, e.X, f, t) + return + } + } + + // A traditional compiler would simplify "if false" (etc) here + // but we do not, for better fidelity to the source code. + // + // The value of a constant condition may be platform-specific, + // and may cause blocks that are reachable in some configuration + // to be hidden from subsequent analyses such as bug-finding tools. + emitIf(fn, b.expr(fn, e), t, f) +} + +// logicalBinop emits code to fn to evaluate e, a &&- or +// ||-expression whose reified boolean value is wanted. +// The value is returned. +// +func (b *builder) logicalBinop(fn *Function, e *ast.BinaryExpr) Value { + rhs := fn.newBasicBlock("binop.rhs") + done := fn.newBasicBlock("binop.done") + + // T(e) = T(e.X) = T(e.Y) after untyped constants have been + // eliminated. + // TODO(adonovan): not true; MyBool==MyBool yields UntypedBool. + t := fn.Pkg.typeOf(e) + + var short Value // value of the short-circuit path + switch e.Op { + case token.LAND: + b.cond(fn, e.X, rhs, done) + short = NewConst(exact.MakeBool(false), t) + + case token.LOR: + b.cond(fn, e.X, done, rhs) + short = NewConst(exact.MakeBool(true), t) + } + + // Is rhs unreachable? + if rhs.Preds == nil { + // Simplify false&&y to false, true||y to true. + fn.currentBlock = done + return short + } + + // Is done unreachable? + if done.Preds == nil { + // Simplify true&&y (or false||y) to y. + fn.currentBlock = rhs + return b.expr(fn, e.Y) + } + + // All edges from e.X to done carry the short-circuit value. + var edges []Value + for _ = range done.Preds { + edges = append(edges, short) + } + + // The edge from e.Y to done carries the value of e.Y. + fn.currentBlock = rhs + edges = append(edges, b.expr(fn, e.Y)) + emitJump(fn, done) + fn.currentBlock = done + + phi := &Phi{Edges: edges, Comment: e.Op.String()} + phi.pos = e.OpPos + phi.typ = t + return done.emit(phi) +} + +// exprN lowers a multi-result expression e to SSA form, emitting code +// to fn and returning a single Value whose type is a *types.Tuple. +// The caller must access the components via Extract. +// +// Multi-result expressions include CallExprs in a multi-value +// assignment or return statement, and "value,ok" uses of +// TypeAssertExpr, IndexExpr (when X is a map), and UnaryExpr (when Op +// is token.ARROW). +// +func (b *builder) exprN(fn *Function, e ast.Expr) Value { + typ := fn.Pkg.typeOf(e).(*types.Tuple) + switch e := e.(type) { + case *ast.ParenExpr: + return b.exprN(fn, e.X) + + case *ast.CallExpr: + // Currently, no built-in function nor type conversion + // has multiple results, so we can avoid some of the + // cases for single-valued CallExpr. + var c Call + b.setCall(fn, e, &c.Call) + c.typ = typ + return fn.emit(&c) + + case *ast.IndexExpr: + mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map) + lookup := &Lookup{ + X: b.expr(fn, e.X), + Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()), + CommaOk: true, + } + lookup.setType(typ) + lookup.setPos(e.Lbrack) + return fn.emit(lookup) + + case *ast.TypeAssertExpr: + return emitTypeTest(fn, b.expr(fn, e.X), typ.At(0).Type(), e.Lparen) + + case *ast.UnaryExpr: // must be receive <- + unop := &UnOp{ + Op: token.ARROW, + X: b.expr(fn, e.X), + CommaOk: true, + } + unop.setType(typ) + unop.setPos(e.OpPos) + return fn.emit(unop) + } + panic(fmt.Sprintf("exprN(%T) in %s", e, fn)) +} + +// builtin emits to fn SSA instructions to implement a call to the +// built-in function obj with the specified arguments +// and return type. It returns the value defined by the result. +// +// The result is nil if no special handling was required; in this case +// the caller should treat this like an ordinary library function +// call. +// +func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ types.Type, pos token.Pos) Value { + switch obj.Name() { + case "make": + switch typ.Underlying().(type) { + case *types.Slice: + n := b.expr(fn, args[1]) + m := n + if len(args) == 3 { + m = b.expr(fn, args[2]) + } + if m, ok := m.(*Const); ok { + // treat make([]T, n, m) as new([m]T)[:n] + cap := m.Int64() + at := types.NewArray(typ.Underlying().(*types.Slice).Elem(), cap) + alloc := emitNew(fn, at, pos) + alloc.Comment = "makeslice" + v := &Slice{ + X: alloc, + High: n, + } + v.setPos(pos) + v.setType(typ) + return fn.emit(v) + } + v := &MakeSlice{ + Len: n, + Cap: m, + } + v.setPos(pos) + v.setType(typ) + return fn.emit(v) + + case *types.Map: + var res Value + if len(args) == 2 { + res = b.expr(fn, args[1]) + } + v := &MakeMap{Reserve: res} + v.setPos(pos) + v.setType(typ) + return fn.emit(v) + + case *types.Chan: + var sz Value = vZero + if len(args) == 2 { + sz = b.expr(fn, args[1]) + } + v := &MakeChan{Size: sz} + v.setPos(pos) + v.setType(typ) + return fn.emit(v) + } + + case "new": + alloc := emitNew(fn, deref(typ), pos) + alloc.Comment = "new" + return alloc + + case "len", "cap": + // Special case: len or cap of an array or *array is + // based on the type, not the value which may be nil. + // We must still evaluate the value, though. (If it + // was side-effect free, the whole call would have + // been constant-folded.) + t := deref(fn.Pkg.typeOf(args[0])).Underlying() + if at, ok := t.(*types.Array); ok { + b.expr(fn, args[0]) // for effects only + return intConst(at.Len()) + } + // Otherwise treat as normal. + + case "panic": + fn.emit(&Panic{ + X: emitConv(fn, b.expr(fn, args[0]), tEface), + pos: pos, + }) + fn.currentBlock = fn.newBasicBlock("unreachable") + return vTrue // any non-nil Value will do + } + return nil // treat all others as a regular function call +} + +// addr lowers a single-result addressable expression e to SSA form, +// emitting code to fn and returning the location (an lvalue) defined +// by the expression. +// +// If escaping is true, addr marks the base variable of the +// addressable expression e as being a potentially escaping pointer +// value. For example, in this code: +// +// a := A{ +// b: [1]B{B{c: 1}} +// } +// return &a.b[0].c +// +// the application of & causes a.b[0].c to have its address taken, +// which means that ultimately the local variable a must be +// heap-allocated. This is a simple but very conservative escape +// analysis. +// +// Operations forming potentially escaping pointers include: +// - &x, including when implicit in method call or composite literals. +// - a[:] iff a is an array (not *array) +// - references to variables in lexically enclosing functions. +// +func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue { + switch e := e.(type) { + case *ast.Ident: + if isBlankIdent(e) { + return blank{} + } + obj := fn.Pkg.objectOf(e) + v := fn.Prog.packageLevelValue(obj) // var (address) + if v == nil { + v = fn.lookup(obj, escaping) + } + return &address{addr: v, pos: e.Pos(), expr: e} + + case *ast.CompositeLit: + t := deref(fn.Pkg.typeOf(e)) + var v *Alloc + if escaping { + v = emitNew(fn, t, e.Lbrace) + } else { + v = fn.addLocal(t, e.Lbrace) + } + v.Comment = "complit" + var sb storebuf + b.compLit(fn, v, e, true, &sb) + sb.emit(fn) + return &address{addr: v, pos: e.Lbrace, expr: e} + + case *ast.ParenExpr: + return b.addr(fn, e.X, escaping) + + case *ast.SelectorExpr: + sel, ok := fn.Pkg.info.Selections[e] + if !ok { + // qualified identifier + return b.addr(fn, e.Sel, escaping) + } + if sel.Kind() != types.FieldVal { + panic(sel) + } + wantAddr := true + v := b.receiver(fn, e.X, wantAddr, escaping, sel) + last := len(sel.Index()) - 1 + return &address{ + addr: emitFieldSelection(fn, v, sel.Index()[last], true, e.Sel), + pos: e.Sel.Pos(), + expr: e.Sel, + } + + case *ast.IndexExpr: + var x Value + var et types.Type + switch t := fn.Pkg.typeOf(e.X).Underlying().(type) { + case *types.Array: + x = b.addr(fn, e.X, escaping).address(fn) + et = types.NewPointer(t.Elem()) + case *types.Pointer: // *array + x = b.expr(fn, e.X) + et = types.NewPointer(t.Elem().Underlying().(*types.Array).Elem()) + case *types.Slice: + x = b.expr(fn, e.X) + et = types.NewPointer(t.Elem()) + case *types.Map: + return &element{ + m: b.expr(fn, e.X), + k: emitConv(fn, b.expr(fn, e.Index), t.Key()), + t: t.Elem(), + pos: e.Lbrack, + } + default: + panic("unexpected container type in IndexExpr: " + t.String()) + } + v := &IndexAddr{ + X: x, + Index: emitConv(fn, b.expr(fn, e.Index), tInt), + } + v.setPos(e.Lbrack) + v.setType(et) + return &address{addr: fn.emit(v), pos: e.Lbrack, expr: e} + + case *ast.StarExpr: + return &address{addr: b.expr(fn, e.X), pos: e.Star, expr: e} + } + + panic(fmt.Sprintf("unexpected address expression: %T", e)) +} + +type store struct { + lhs lvalue + rhs Value +} + +type storebuf struct{ stores []store } + +func (sb *storebuf) store(lhs lvalue, rhs Value) { + sb.stores = append(sb.stores, store{lhs, rhs}) +} + +func (sb *storebuf) emit(fn *Function) { + for _, s := range sb.stores { + s.lhs.store(fn, s.rhs) + } +} + +// assign emits to fn code to initialize the lvalue loc with the value +// of expression e. If isZero is true, assign assumes that loc holds +// the zero value for its type. +// +// This is equivalent to loc.store(fn, b.expr(fn, e)), but may generate +// better code in some cases, e.g., for composite literals in an +// addressable location. +// +// If sb is not nil, assign generates code to evaluate expression e, but +// not to update loc. Instead, the necessary stores are appended to the +// storebuf sb so that they can be executed later. This allows correct +// in-place update of existing variables when the RHS is a composite +// literal that may reference parts of the LHS. +// +func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb *storebuf) { + // Can we initialize it in place? + if e, ok := unparen(e).(*ast.CompositeLit); ok { + // A CompositeLit never evaluates to a pointer, + // so if the type of the location is a pointer, + // an &-operation is implied. + if _, ok := loc.(blank); !ok { // avoid calling blank.typ() + if isPointer(loc.typ()) { + ptr := b.addr(fn, e, true).address(fn) + // copy address + if sb != nil { + sb.store(loc, ptr) + } else { + loc.store(fn, ptr) + } + return + } + } + + if _, ok := loc.(*address); ok { + if isInterface(loc.typ()) { + // e.g. var x interface{} = T{...} + // Can't in-place initialize an interface value. + // Fall back to copying. + } else { + // x = T{...} or x := T{...} + addr := loc.address(fn) + if sb != nil { + b.compLit(fn, addr, e, isZero, sb) + } else { + var sb storebuf + b.compLit(fn, addr, e, isZero, &sb) + sb.emit(fn) + } + + // Subtle: emit debug ref for aggregate types only; + // slice and map are handled by store ops in compLit. + switch loc.typ().Underlying().(type) { + case *types.Struct, *types.Array: + emitDebugRef(fn, e, addr, true) + } + + return + } + } + } + + // simple case: just copy + rhs := b.expr(fn, e) + if sb != nil { + sb.store(loc, rhs) + } else { + loc.store(fn, rhs) + } +} + +// expr lowers a single-result expression e to SSA form, emitting code +// to fn and returning the Value defined by the expression. +// +func (b *builder) expr(fn *Function, e ast.Expr) Value { + e = unparen(e) + + tv := fn.Pkg.info.Types[e] + + // Is expression a constant? + if tv.Value != nil { + return NewConst(tv.Value, tv.Type) + } + + var v Value + if tv.Addressable() { + // Prefer pointer arithmetic ({Index,Field}Addr) followed + // by Load over subelement extraction (e.g. Index, Field), + // to avoid large copies. + v = b.addr(fn, e, false).load(fn) + } else { + v = b.expr0(fn, e, tv) + } + if fn.debugInfo() { + emitDebugRef(fn, e, v, false) + } + return v +} + +func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value { + switch e := e.(type) { + case *ast.BasicLit: + panic("non-constant BasicLit") // unreachable + + case *ast.FuncLit: + fn2 := &Function{ + name: fmt.Sprintf("%s$%d", fn.Name(), 1+len(fn.AnonFuncs)), + Signature: fn.Pkg.typeOf(e.Type).Underlying().(*types.Signature), + pos: e.Type.Func, + parent: fn, + Pkg: fn.Pkg, + Prog: fn.Prog, + syntax: e, + } + fn.AnonFuncs = append(fn.AnonFuncs, fn2) + b.buildFunction(fn2) + if fn2.FreeVars == nil { + return fn2 + } + v := &MakeClosure{Fn: fn2} + v.setType(tv.Type) + for _, fv := range fn2.FreeVars { + v.Bindings = append(v.Bindings, fv.outer) + fv.outer = nil + } + return fn.emit(v) + + case *ast.TypeAssertExpr: // single-result form only + return emitTypeAssert(fn, b.expr(fn, e.X), tv.Type, e.Lparen) + + case *ast.CallExpr: + if fn.Pkg.info.Types[e.Fun].IsType() { + // Explicit type conversion, e.g. string(x) or big.Int(x) + x := b.expr(fn, e.Args[0]) + y := emitConv(fn, x, tv.Type) + if y != x { + switch y := y.(type) { + case *Convert: + y.pos = e.Lparen + case *ChangeType: + y.pos = e.Lparen + case *MakeInterface: + y.pos = e.Lparen + } + } + return y + } + // Call to "intrinsic" built-ins, e.g. new, make, panic. + if id, ok := unparen(e.Fun).(*ast.Ident); ok { + if obj, ok := fn.Pkg.info.Uses[id].(*types.Builtin); ok { + if v := b.builtin(fn, obj, e.Args, tv.Type, e.Lparen); v != nil { + return v + } + } + } + // Regular function call. + var v Call + b.setCall(fn, e, &v.Call) + v.setType(tv.Type) + return fn.emit(&v) + + case *ast.UnaryExpr: + switch e.Op { + case token.AND: // &X --- potentially escaping. + addr := b.addr(fn, e.X, true) + if _, ok := unparen(e.X).(*ast.StarExpr); ok { + // &*p must panic if p is nil (http://golang.org/s/go12nil). + // For simplicity, we'll just (suboptimally) rely + // on the side effects of a load. + // TODO(adonovan): emit dedicated nilcheck. + addr.load(fn) + } + return addr.address(fn) + case token.ADD: + return b.expr(fn, e.X) + case token.NOT, token.ARROW, token.SUB, token.XOR: // ! <- - ^ + v := &UnOp{ + Op: e.Op, + X: b.expr(fn, e.X), + } + v.setPos(e.OpPos) + v.setType(tv.Type) + return fn.emit(v) + default: + panic(e.Op) + } + + case *ast.BinaryExpr: + switch e.Op { + case token.LAND, token.LOR: + return b.logicalBinop(fn, e) + case token.SHL, token.SHR: + fallthrough + case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT: + return emitArith(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), tv.Type, e.OpPos) + + case token.EQL, token.NEQ, token.GTR, token.LSS, token.LEQ, token.GEQ: + cmp := emitCompare(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), e.OpPos) + // The type of x==y may be UntypedBool. + return emitConv(fn, cmp, DefaultType(tv.Type)) + default: + panic("illegal op in BinaryExpr: " + e.Op.String()) + } + + case *ast.SliceExpr: + var low, high, max Value + var x Value + switch fn.Pkg.typeOf(e.X).Underlying().(type) { + case *types.Array: + // Potentially escaping. + x = b.addr(fn, e.X, true).address(fn) + case *types.Basic, *types.Slice, *types.Pointer: // *array + x = b.expr(fn, e.X) + default: + panic("unreachable") + } + if e.High != nil { + high = b.expr(fn, e.High) + } + if e.Low != nil { + low = b.expr(fn, e.Low) + } + if e.Slice3 { + max = b.expr(fn, e.Max) + } + v := &Slice{ + X: x, + Low: low, + High: high, + Max: max, + } + v.setPos(e.Lbrack) + v.setType(tv.Type) + return fn.emit(v) + + case *ast.Ident: + obj := fn.Pkg.info.Uses[e] + // Universal built-in or nil? + switch obj := obj.(type) { + case *types.Builtin: + return &Builtin{name: obj.Name(), sig: tv.Type.(*types.Signature)} + case *types.Nil: + return nilConst(tv.Type) + } + // Package-level func or var? + if v := fn.Prog.packageLevelValue(obj); v != nil { + if _, ok := obj.(*types.Var); ok { + return emitLoad(fn, v) // var (address) + } + return v // (func) + } + // Local var. + return emitLoad(fn, fn.lookup(obj, false)) // var (address) + + case *ast.SelectorExpr: + sel, ok := fn.Pkg.info.Selections[e] + if !ok { + // qualified identifier + return b.expr(fn, e.Sel) + } + switch sel.Kind() { + case types.MethodExpr: + // (*T).f or T.f, the method f from the method-set of type T. + // The result is a "thunk". + return emitConv(fn, makeThunk(fn.Prog, sel), tv.Type) + + case types.MethodVal: + // e.f where e is an expression and f is a method. + // The result is a "bound". + obj := sel.Obj().(*types.Func) + rt := recvType(obj) + wantAddr := isPointer(rt) + escaping := true + v := b.receiver(fn, e.X, wantAddr, escaping, sel) + if isInterface(rt) { + // If v has interface type I, + // we must emit a check that v is non-nil. + // We use: typeassert v.(I). + emitTypeAssert(fn, v, rt, token.NoPos) + } + c := &MakeClosure{ + Fn: makeBound(fn.Prog, obj), + Bindings: []Value{v}, + } + c.setPos(e.Sel.Pos()) + c.setType(tv.Type) + return fn.emit(c) + + case types.FieldVal: + indices := sel.Index() + last := len(indices) - 1 + v := b.expr(fn, e.X) + v = emitImplicitSelections(fn, v, indices[:last]) + v = emitFieldSelection(fn, v, indices[last], false, e.Sel) + return v + } + + panic("unexpected expression-relative selector") + + case *ast.IndexExpr: + switch t := fn.Pkg.typeOf(e.X).Underlying().(type) { + case *types.Array: + // Non-addressable array (in a register). + v := &Index{ + X: b.expr(fn, e.X), + Index: emitConv(fn, b.expr(fn, e.Index), tInt), + } + v.setPos(e.Lbrack) + v.setType(t.Elem()) + return fn.emit(v) + + case *types.Map: + // Maps are not addressable. + mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map) + v := &Lookup{ + X: b.expr(fn, e.X), + Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()), + } + v.setPos(e.Lbrack) + v.setType(mapt.Elem()) + return fn.emit(v) + + case *types.Basic: // => string + // Strings are not addressable. + v := &Lookup{ + X: b.expr(fn, e.X), + Index: b.expr(fn, e.Index), + } + v.setPos(e.Lbrack) + v.setType(tByte) + return fn.emit(v) + + case *types.Slice, *types.Pointer: // *array + // Addressable slice/array; use IndexAddr and Load. + return b.addr(fn, e, false).load(fn) + + default: + panic("unexpected container type in IndexExpr: " + t.String()) + } + + case *ast.CompositeLit, *ast.StarExpr: + // Addressable types (lvalues) + return b.addr(fn, e, false).load(fn) + } + + panic(fmt.Sprintf("unexpected expr: %T", e)) +} + +// stmtList emits to fn code for all statements in list. +func (b *builder) stmtList(fn *Function, list []ast.Stmt) { + for _, s := range list { + b.stmt(fn, s) + } +} + +// receiver emits to fn code for expression e in the "receiver" +// position of selection e.f (where f may be a field or a method) and +// returns the effective receiver after applying the implicit field +// selections of sel. +// +// wantAddr requests that the result is an an address. If +// !sel.Indirect(), this may require that e be built in addr() mode; it +// must thus be addressable. +// +// escaping is defined as per builder.addr(). +// +func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, sel *types.Selection) Value { + var v Value + if wantAddr && !sel.Indirect() && !isPointer(fn.Pkg.typeOf(e)) { + v = b.addr(fn, e, escaping).address(fn) + } else { + v = b.expr(fn, e) + } + + last := len(sel.Index()) - 1 + v = emitImplicitSelections(fn, v, sel.Index()[:last]) + if !wantAddr && isPointer(v.Type()) { + v = emitLoad(fn, v) + } + return v +} + +// setCallFunc populates the function parts of a CallCommon structure +// (Func, Method, Recv, Args[0]) based on the kind of invocation +// occurring in e. +// +func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) { + c.pos = e.Lparen + + // Is this a method call? + if selector, ok := unparen(e.Fun).(*ast.SelectorExpr); ok { + sel, ok := fn.Pkg.info.Selections[selector] + if ok && sel.Kind() == types.MethodVal { + obj := sel.Obj().(*types.Func) + recv := recvType(obj) + wantAddr := isPointer(recv) + escaping := true + v := b.receiver(fn, selector.X, wantAddr, escaping, sel) + if isInterface(recv) { + // Invoke-mode call. + c.Value = v + c.Method = obj + } else { + // "Call"-mode call. + c.Value = fn.Prog.declaredFunc(obj) + c.Args = append(c.Args, v) + } + return + } + + // sel.Kind()==MethodExpr indicates T.f() or (*T).f(): + // a statically dispatched call to the method f in the + // method-set of T or *T. T may be an interface. + // + // e.Fun would evaluate to a concrete method, interface + // wrapper function, or promotion wrapper. + // + // For now, we evaluate it in the usual way. + // + // TODO(adonovan): opt: inline expr() here, to make the + // call static and to avoid generation of wrappers. + // It's somewhat tricky as it may consume the first + // actual parameter if the call is "invoke" mode. + // + // Examples: + // type T struct{}; func (T) f() {} // "call" mode + // type T interface { f() } // "invoke" mode + // + // type S struct{ T } + // + // var s S + // S.f(s) + // (*S).f(&s) + // + // Suggested approach: + // - consume the first actual parameter expression + // and build it with b.expr(). + // - apply implicit field selections. + // - use MethodVal logic to populate fields of c. + } + + // Evaluate the function operand in the usual way. + c.Value = b.expr(fn, e.Fun) +} + +// emitCallArgs emits to f code for the actual parameters of call e to +// a (possibly built-in) function of effective type sig. +// The argument values are appended to args, which is then returned. +// +func (b *builder) emitCallArgs(fn *Function, sig *types.Signature, e *ast.CallExpr, args []Value) []Value { + // f(x, y, z...): pass slice z straight through. + if e.Ellipsis != 0 { + for i, arg := range e.Args { + v := emitConv(fn, b.expr(fn, arg), sig.Params().At(i).Type()) + args = append(args, v) + } + return args + } + + offset := len(args) // 1 if call has receiver, 0 otherwise + + // Evaluate actual parameter expressions. + // + // If this is a chained call of the form f(g()) where g has + // multiple return values (MRV), they are flattened out into + // args; a suffix of them may end up in a varargs slice. + for _, arg := range e.Args { + v := b.expr(fn, arg) + if ttuple, ok := v.Type().(*types.Tuple); ok { // MRV chain + for i, n := 0, ttuple.Len(); i < n; i++ { + args = append(args, emitExtract(fn, v, i)) + } + } else { + args = append(args, v) + } + } + + // Actual->formal assignability conversions for normal parameters. + np := sig.Params().Len() // number of normal parameters + if sig.Variadic() { + np-- + } + for i := 0; i < np; i++ { + args[offset+i] = emitConv(fn, args[offset+i], sig.Params().At(i).Type()) + } + + // Actual->formal assignability conversions for variadic parameter, + // and construction of slice. + if sig.Variadic() { + varargs := args[offset+np:] + st := sig.Params().At(np).Type().(*types.Slice) + vt := st.Elem() + if len(varargs) == 0 { + args = append(args, nilConst(st)) + } else { + // Replace a suffix of args with a slice containing it. + at := types.NewArray(vt, int64(len(varargs))) + a := emitNew(fn, at, token.NoPos) + a.setPos(e.Rparen) + a.Comment = "varargs" + for i, arg := range varargs { + iaddr := &IndexAddr{ + X: a, + Index: intConst(int64(i)), + } + iaddr.setType(types.NewPointer(vt)) + fn.emit(iaddr) + emitStore(fn, iaddr, arg, arg.Pos()) + } + s := &Slice{X: a} + s.setType(st) + args[offset+np] = fn.emit(s) + args = args[:offset+np+1] + } + } + return args +} + +// setCall emits to fn code to evaluate all the parameters of a function +// call e, and populates *c with those values. +// +func (b *builder) setCall(fn *Function, e *ast.CallExpr, c *CallCommon) { + // First deal with the f(...) part and optional receiver. + b.setCallFunc(fn, e, c) + + // Then append the other actual parameters. + sig, _ := fn.Pkg.typeOf(e.Fun).Underlying().(*types.Signature) + if sig == nil { + panic(fmt.Sprintf("no signature for call of %s", e.Fun)) + } + c.Args = b.emitCallArgs(fn, sig, e, c.Args) +} + +// assignOp emits to fn code to perform loc += incr or loc -= incr. +func (b *builder) assignOp(fn *Function, loc lvalue, incr Value, op token.Token, pos token.Pos) { + oldv := loc.load(fn) + loc.store(fn, emitArith(fn, op, oldv, emitConv(fn, incr, oldv.Type()), loc.typ(), pos)) +} + +// localValueSpec emits to fn code to define all of the vars in the +// function-local ValueSpec, spec. +// +func (b *builder) localValueSpec(fn *Function, spec *ast.ValueSpec) { + switch { + case len(spec.Values) == len(spec.Names): + // e.g. var x, y = 0, 1 + // 1:1 assignment + for i, id := range spec.Names { + if !isBlankIdent(id) { + fn.addLocalForIdent(id) + } + lval := b.addr(fn, id, false) // non-escaping + b.assign(fn, lval, spec.Values[i], true, nil) + } + + case len(spec.Values) == 0: + // e.g. var x, y int + // Locals are implicitly zero-initialized. + for _, id := range spec.Names { + if !isBlankIdent(id) { + lhs := fn.addLocalForIdent(id) + if fn.debugInfo() { + emitDebugRef(fn, id, lhs, true) + } + } + } + + default: + // e.g. var x, y = pos() + tuple := b.exprN(fn, spec.Values[0]) + for i, id := range spec.Names { + if !isBlankIdent(id) { + fn.addLocalForIdent(id) + lhs := b.addr(fn, id, false) // non-escaping + lhs.store(fn, emitExtract(fn, tuple, i)) + } + } + } +} + +// assignStmt emits code to fn for a parallel assignment of rhss to lhss. +// isDef is true if this is a short variable declaration (:=). +// +// Note the similarity with localValueSpec. +// +func (b *builder) assignStmt(fn *Function, lhss, rhss []ast.Expr, isDef bool) { + // Side effects of all LHSs and RHSs must occur in left-to-right order. + lvals := make([]lvalue, len(lhss)) + isZero := make([]bool, len(lhss)) + for i, lhs := range lhss { + var lval lvalue = blank{} + if !isBlankIdent(lhs) { + if isDef { + if obj := fn.Pkg.info.Defs[lhs.(*ast.Ident)]; obj != nil { + fn.addNamedLocal(obj) + isZero[i] = true + } + } + lval = b.addr(fn, lhs, false) // non-escaping + } + lvals[i] = lval + } + if len(lhss) == len(rhss) { + // Simple assignment: x = f() (!isDef) + // Parallel assignment: x, y = f(), g() (!isDef) + // or short var decl: x, y := f(), g() (isDef) + // + // In all cases, the RHSs may refer to the LHSs, + // so we need a storebuf. + var sb storebuf + for i := range rhss { + b.assign(fn, lvals[i], rhss[i], isZero[i], &sb) + } + sb.emit(fn) + } else { + // e.g. x, y = pos() + tuple := b.exprN(fn, rhss[0]) + emitDebugRef(fn, rhss[0], tuple, false) + for i, lval := range lvals { + lval.store(fn, emitExtract(fn, tuple, i)) + } + } +} + +// arrayLen returns the length of the array whose composite literal elements are elts. +func (b *builder) arrayLen(fn *Function, elts []ast.Expr) int64 { + var max int64 = -1 + var i int64 = -1 + for _, e := range elts { + if kv, ok := e.(*ast.KeyValueExpr); ok { + i = b.expr(fn, kv.Key).(*Const).Int64() + } else { + i++ + } + if i > max { + max = i + } + } + return max + 1 +} + +// compLit emits to fn code to initialize a composite literal e at +// address addr with type typ. +// +// Nested composite literals are recursively initialized in place +// where possible. If isZero is true, compLit assumes that addr +// holds the zero value for typ. +// +// Because the elements of a composite literal may refer to the +// variables being updated, as in the second line below, +// x := T{a: 1} +// x = T{a: x.a} +// all the reads must occur before all the writes. Thus all stores to +// loc are emitted to the storebuf sb for later execution. +// +// A CompositeLit may have pointer type only in the recursive (nested) +// case when the type name is implicit. e.g. in []*T{{}}, the inner +// literal has type *T behaves like &T{}. +// In that case, addr must hold a T, not a *T. +// +func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero bool, sb *storebuf) { + typ := deref(fn.Pkg.typeOf(e)) + switch t := typ.Underlying().(type) { + case *types.Struct: + if !isZero && len(e.Elts) != t.NumFields() { + // memclear + sb.store(&address{addr, e.Lbrace, nil}, + zeroValue(fn, deref(addr.Type()))) + isZero = true + } + for i, e := range e.Elts { + fieldIndex := i + pos := e.Pos() + if kv, ok := e.(*ast.KeyValueExpr); ok { + fname := kv.Key.(*ast.Ident).Name + for i, n := 0, t.NumFields(); i < n; i++ { + sf := t.Field(i) + if sf.Name() == fname { + fieldIndex = i + pos = kv.Colon + e = kv.Value + break + } + } + } + sf := t.Field(fieldIndex) + faddr := &FieldAddr{ + X: addr, + Field: fieldIndex, + } + faddr.setType(types.NewPointer(sf.Type())) + fn.emit(faddr) + b.assign(fn, &address{addr: faddr, pos: pos, expr: e}, e, isZero, sb) + } + + case *types.Array, *types.Slice: + var at *types.Array + var array Value + switch t := t.(type) { + case *types.Slice: + at = types.NewArray(t.Elem(), b.arrayLen(fn, e.Elts)) + alloc := emitNew(fn, at, e.Lbrace) + alloc.Comment = "slicelit" + array = alloc + case *types.Array: + at = t + array = addr + + if !isZero && int64(len(e.Elts)) != at.Len() { + // memclear + sb.store(&address{array, e.Lbrace, nil}, + zeroValue(fn, deref(array.Type()))) + } + } + + var idx *Const + for _, e := range e.Elts { + pos := e.Pos() + if kv, ok := e.(*ast.KeyValueExpr); ok { + idx = b.expr(fn, kv.Key).(*Const) + pos = kv.Colon + e = kv.Value + } else { + var idxval int64 + if idx != nil { + idxval = idx.Int64() + 1 + } + idx = intConst(idxval) + } + iaddr := &IndexAddr{ + X: array, + Index: idx, + } + iaddr.setType(types.NewPointer(at.Elem())) + fn.emit(iaddr) + if t != at { // slice + // backing array is unaliased => storebuf not needed. + b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, nil) + } else { + b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, sb) + } + } + + if t != at { // slice + s := &Slice{X: array} + s.setPos(e.Lbrace) + s.setType(typ) + sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, fn.emit(s)) + } + + case *types.Map: + m := &MakeMap{Reserve: intConst(int64(len(e.Elts)))} + m.setPos(e.Lbrace) + m.setType(typ) + fn.emit(m) + for _, e := range e.Elts { + e := e.(*ast.KeyValueExpr) + + // If a key expression in a map literal is itself a + // composite literal, the type may be omitted. + // For example: + // map[*struct{}]bool{{}: true} + // An &-operation may be implied: + // map[*struct{}]bool{&struct{}{}: true} + var key Value + if _, ok := unparen(e.Key).(*ast.CompositeLit); ok && isPointer(t.Key()) { + // A CompositeLit never evaluates to a pointer, + // so if the type of the location is a pointer, + // an &-operation is implied. + key = b.addr(fn, e.Key, true).address(fn) + } else { + key = b.expr(fn, e.Key) + } + + loc := element{ + m: m, + k: emitConv(fn, key, t.Key()), + t: t.Elem(), + pos: e.Colon, + } + + // We call assign() only because it takes care + // of any &-operation required in the recursive + // case, e.g., + // map[int]*struct{}{0: {}} implies &struct{}{}. + // In-place update is of course impossible, + // and no storebuf is needed. + b.assign(fn, &loc, e.Value, true, nil) + } + sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, m) + + default: + panic("unexpected CompositeLit type: " + t.String()) + } +} + +// switchStmt emits to fn code for the switch statement s, optionally +// labelled by label. +// +func (b *builder) switchStmt(fn *Function, s *ast.SwitchStmt, label *lblock) { + // We treat SwitchStmt like a sequential if-else chain. + // Multiway dispatch can be recovered later by ssautil.Switches() + // to those cases that are free of side effects. + if s.Init != nil { + b.stmt(fn, s.Init) + } + var tag Value = vTrue + if s.Tag != nil { + tag = b.expr(fn, s.Tag) + } + done := fn.newBasicBlock("switch.done") + if label != nil { + label._break = done + } + // We pull the default case (if present) down to the end. + // But each fallthrough label must point to the next + // body block in source order, so we preallocate a + // body block (fallthru) for the next case. + // Unfortunately this makes for a confusing block order. + var dfltBody *[]ast.Stmt + var dfltFallthrough *BasicBlock + var fallthru, dfltBlock *BasicBlock + ncases := len(s.Body.List) + for i, clause := range s.Body.List { + body := fallthru + if body == nil { + body = fn.newBasicBlock("switch.body") // first case only + } + + // Preallocate body block for the next case. + fallthru = done + if i+1 < ncases { + fallthru = fn.newBasicBlock("switch.body") + } + + cc := clause.(*ast.CaseClause) + if cc.List == nil { + // Default case. + dfltBody = &cc.Body + dfltFallthrough = fallthru + dfltBlock = body + continue + } + + var nextCond *BasicBlock + for _, cond := range cc.List { + nextCond = fn.newBasicBlock("switch.next") + // TODO(adonovan): opt: when tag==vTrue, we'd + // get better code if we use b.cond(cond) + // instead of BinOp(EQL, tag, b.expr(cond)) + // followed by If. Don't forget conversions + // though. + cond := emitCompare(fn, token.EQL, tag, b.expr(fn, cond), token.NoPos) + emitIf(fn, cond, body, nextCond) + fn.currentBlock = nextCond + } + fn.currentBlock = body + fn.targets = &targets{ + tail: fn.targets, + _break: done, + _fallthrough: fallthru, + } + b.stmtList(fn, cc.Body) + fn.targets = fn.targets.tail + emitJump(fn, done) + fn.currentBlock = nextCond + } + if dfltBlock != nil { + emitJump(fn, dfltBlock) + fn.currentBlock = dfltBlock + fn.targets = &targets{ + tail: fn.targets, + _break: done, + _fallthrough: dfltFallthrough, + } + b.stmtList(fn, *dfltBody) + fn.targets = fn.targets.tail + } + emitJump(fn, done) + fn.currentBlock = done +} + +// typeSwitchStmt emits to fn code for the type switch statement s, optionally +// labelled by label. +// +func (b *builder) typeSwitchStmt(fn *Function, s *ast.TypeSwitchStmt, label *lblock) { + // We treat TypeSwitchStmt like a sequential if-else chain. + // Multiway dispatch can be recovered later by ssautil.Switches(). + + // Typeswitch lowering: + // + // var x X + // switch y := x.(type) { + // case T1, T2: S1 // >1 (y := x) + // case nil: SN // nil (y := x) + // default: SD // 0 types (y := x) + // case T3: S3 // 1 type (y := x.(T3)) + // } + // + // ...s.Init... + // x := eval x + // .caseT1: + // t1, ok1 := typeswitch,ok x + // if ok1 then goto S1 else goto .caseT2 + // .caseT2: + // t2, ok2 := typeswitch,ok x + // if ok2 then goto S1 else goto .caseNil + // .S1: + // y := x + // ...S1... + // goto done + // .caseNil: + // if t2, ok2 := typeswitch,ok x + // if x == nil then goto SN else goto .caseT3 + // .SN: + // y := x + // ...SN... + // goto done + // .caseT3: + // t3, ok3 := typeswitch,ok x + // if ok3 then goto S3 else goto default + // .S3: + // y := t3 + // ...S3... + // goto done + // .default: + // y := x + // ...SD... + // goto done + // .done: + + if s.Init != nil { + b.stmt(fn, s.Init) + } + + var x Value + switch ass := s.Assign.(type) { + case *ast.ExprStmt: // x.(type) + x = b.expr(fn, unparen(ass.X).(*ast.TypeAssertExpr).X) + case *ast.AssignStmt: // y := x.(type) + x = b.expr(fn, unparen(ass.Rhs[0]).(*ast.TypeAssertExpr).X) + } + + done := fn.newBasicBlock("typeswitch.done") + if label != nil { + label._break = done + } + var default_ *ast.CaseClause + for _, clause := range s.Body.List { + cc := clause.(*ast.CaseClause) + if cc.List == nil { + default_ = cc + continue + } + body := fn.newBasicBlock("typeswitch.body") + var next *BasicBlock + var casetype types.Type + var ti Value // ti, ok := typeassert,ok x + for _, cond := range cc.List { + next = fn.newBasicBlock("typeswitch.next") + casetype = fn.Pkg.typeOf(cond) + var condv Value + if casetype == tUntypedNil { + condv = emitCompare(fn, token.EQL, x, nilConst(x.Type()), token.NoPos) + ti = x + } else { + yok := emitTypeTest(fn, x, casetype, cc.Case) + ti = emitExtract(fn, yok, 0) + condv = emitExtract(fn, yok, 1) + } + emitIf(fn, condv, body, next) + fn.currentBlock = next + } + if len(cc.List) != 1 { + ti = x + } + fn.currentBlock = body + b.typeCaseBody(fn, cc, ti, done) + fn.currentBlock = next + } + if default_ != nil { + b.typeCaseBody(fn, default_, x, done) + } else { + emitJump(fn, done) + } + fn.currentBlock = done +} + +func (b *builder) typeCaseBody(fn *Function, cc *ast.CaseClause, x Value, done *BasicBlock) { + if obj := fn.Pkg.info.Implicits[cc]; obj != nil { + // In a switch y := x.(type), each case clause + // implicitly declares a distinct object y. + // In a single-type case, y has that type. + // In multi-type cases, 'case nil' and default, + // y has the same type as the interface operand. + emitStore(fn, fn.addNamedLocal(obj), x, obj.Pos()) + } + fn.targets = &targets{ + tail: fn.targets, + _break: done, + } + b.stmtList(fn, cc.Body) + fn.targets = fn.targets.tail + emitJump(fn, done) +} + +// selectStmt emits to fn code for the select statement s, optionally +// labelled by label. +// +func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) { + // A blocking select of a single case degenerates to a + // simple send or receive. + // TODO(adonovan): opt: is this optimization worth its weight? + if len(s.Body.List) == 1 { + clause := s.Body.List[0].(*ast.CommClause) + if clause.Comm != nil { + b.stmt(fn, clause.Comm) + done := fn.newBasicBlock("select.done") + if label != nil { + label._break = done + } + fn.targets = &targets{ + tail: fn.targets, + _break: done, + } + b.stmtList(fn, clause.Body) + fn.targets = fn.targets.tail + emitJump(fn, done) + fn.currentBlock = done + return + } + } + + // First evaluate all channels in all cases, and find + // the directions of each state. + var states []*SelectState + blocking := true + debugInfo := fn.debugInfo() + for _, clause := range s.Body.List { + var st *SelectState + switch comm := clause.(*ast.CommClause).Comm.(type) { + case nil: // default case + blocking = false + continue + + case *ast.SendStmt: // ch<- i + ch := b.expr(fn, comm.Chan) + st = &SelectState{ + Dir: types.SendOnly, + Chan: ch, + Send: emitConv(fn, b.expr(fn, comm.Value), + ch.Type().Underlying().(*types.Chan).Elem()), + Pos: comm.Arrow, + } + if debugInfo { + st.DebugNode = comm + } + + case *ast.AssignStmt: // x := <-ch + recv := unparen(comm.Rhs[0]).(*ast.UnaryExpr) + st = &SelectState{ + Dir: types.RecvOnly, + Chan: b.expr(fn, recv.X), + Pos: recv.OpPos, + } + if debugInfo { + st.DebugNode = recv + } + + case *ast.ExprStmt: // <-ch + recv := unparen(comm.X).(*ast.UnaryExpr) + st = &SelectState{ + Dir: types.RecvOnly, + Chan: b.expr(fn, recv.X), + Pos: recv.OpPos, + } + if debugInfo { + st.DebugNode = recv + } + } + states = append(states, st) + } + + // We dispatch on the (fair) result of Select using a + // sequential if-else chain, in effect: + // + // idx, recvOk, r0...r_n-1 := select(...) + // if idx == 0 { // receive on channel 0 (first receive => r0) + // x, ok := r0, recvOk + // ...state0... + // } else if v == 1 { // send on channel 1 + // ...state1... + // } else { + // ...default... + // } + sel := &Select{ + States: states, + Blocking: blocking, + } + sel.setPos(s.Select) + var vars []*types.Var + vars = append(vars, varIndex, varOk) + for _, st := range states { + if st.Dir == types.RecvOnly { + tElem := st.Chan.Type().Underlying().(*types.Chan).Elem() + vars = append(vars, anonVar(tElem)) + } + } + sel.setType(types.NewTuple(vars...)) + + fn.emit(sel) + idx := emitExtract(fn, sel, 0) + + done := fn.newBasicBlock("select.done") + if label != nil { + label._break = done + } + + var defaultBody *[]ast.Stmt + state := 0 + r := 2 // index in 'sel' tuple of value; increments if st.Dir==RECV + for _, cc := range s.Body.List { + clause := cc.(*ast.CommClause) + if clause.Comm == nil { + defaultBody = &clause.Body + continue + } + body := fn.newBasicBlock("select.body") + next := fn.newBasicBlock("select.next") + emitIf(fn, emitCompare(fn, token.EQL, idx, intConst(int64(state)), token.NoPos), body, next) + fn.currentBlock = body + fn.targets = &targets{ + tail: fn.targets, + _break: done, + } + switch comm := clause.Comm.(type) { + case *ast.ExprStmt: // <-ch + if debugInfo { + v := emitExtract(fn, sel, r) + emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false) + } + r++ + + case *ast.AssignStmt: // x := <-states[state].Chan + if comm.Tok == token.DEFINE { + fn.addLocalForIdent(comm.Lhs[0].(*ast.Ident)) + } + x := b.addr(fn, comm.Lhs[0], false) // non-escaping + v := emitExtract(fn, sel, r) + if debugInfo { + emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false) + } + x.store(fn, v) + + if len(comm.Lhs) == 2 { // x, ok := ... + if comm.Tok == token.DEFINE { + fn.addLocalForIdent(comm.Lhs[1].(*ast.Ident)) + } + ok := b.addr(fn, comm.Lhs[1], false) // non-escaping + ok.store(fn, emitExtract(fn, sel, 1)) + } + r++ + } + b.stmtList(fn, clause.Body) + fn.targets = fn.targets.tail + emitJump(fn, done) + fn.currentBlock = next + state++ + } + if defaultBody != nil { + fn.targets = &targets{ + tail: fn.targets, + _break: done, + } + b.stmtList(fn, *defaultBody) + fn.targets = fn.targets.tail + } else { + // A blocking select must match some case. + // (This should really be a runtime.errorString, not a string.) + fn.emit(&Panic{ + X: emitConv(fn, stringConst("blocking select matched no case"), tEface), + }) + fn.currentBlock = fn.newBasicBlock("unreachable") + } + emitJump(fn, done) + fn.currentBlock = done +} + +// forStmt emits to fn code for the for statement s, optionally +// labelled by label. +// +func (b *builder) forStmt(fn *Function, s *ast.ForStmt, label *lblock) { + // ...init... + // jump loop + // loop: + // if cond goto body else done + // body: + // ...body... + // jump post + // post: (target of continue) + // ...post... + // jump loop + // done: (target of break) + if s.Init != nil { + b.stmt(fn, s.Init) + } + body := fn.newBasicBlock("for.body") + done := fn.newBasicBlock("for.done") // target of 'break' + loop := body // target of back-edge + if s.Cond != nil { + loop = fn.newBasicBlock("for.loop") + } + cont := loop // target of 'continue' + if s.Post != nil { + cont = fn.newBasicBlock("for.post") + } + if label != nil { + label._break = done + label._continue = cont + } + emitJump(fn, loop) + fn.currentBlock = loop + if loop != body { + b.cond(fn, s.Cond, body, done) + fn.currentBlock = body + } + fn.targets = &targets{ + tail: fn.targets, + _break: done, + _continue: cont, + } + b.stmt(fn, s.Body) + fn.targets = fn.targets.tail + emitJump(fn, cont) + + if s.Post != nil { + fn.currentBlock = cont + b.stmt(fn, s.Post) + emitJump(fn, loop) // back-edge + } + fn.currentBlock = done +} + +// rangeIndexed emits to fn the header for an integer-indexed loop +// over array, *array or slice value x. +// The v result is defined only if tv is non-nil. +// forPos is the position of the "for" token. +// +func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) { + // + // length = len(x) + // index = -1 + // loop: (target of continue) + // index++ + // if index < length goto body else done + // body: + // k = index + // v = x[index] + // ...body... + // jump loop + // done: (target of break) + + // Determine number of iterations. + var length Value + if arr, ok := deref(x.Type()).Underlying().(*types.Array); ok { + // For array or *array, the number of iterations is + // known statically thanks to the type. We avoid a + // data dependence upon x, permitting later dead-code + // elimination if x is pure, static unrolling, etc. + // Ranging over a nil *array may have >0 iterations. + // We still generate code for x, in case it has effects. + length = intConst(arr.Len()) + } else { + // length = len(x). + var c Call + c.Call.Value = makeLen(x.Type()) + c.Call.Args = []Value{x} + c.setType(tInt) + length = fn.emit(&c) + } + + index := fn.addLocal(tInt, token.NoPos) + emitStore(fn, index, intConst(-1), pos) + + loop = fn.newBasicBlock("rangeindex.loop") + emitJump(fn, loop) + fn.currentBlock = loop + + incr := &BinOp{ + Op: token.ADD, + X: emitLoad(fn, index), + Y: vOne, + } + incr.setType(tInt) + emitStore(fn, index, fn.emit(incr), pos) + + body := fn.newBasicBlock("rangeindex.body") + done = fn.newBasicBlock("rangeindex.done") + emitIf(fn, emitCompare(fn, token.LSS, incr, length, token.NoPos), body, done) + fn.currentBlock = body + + k = emitLoad(fn, index) + if tv != nil { + switch t := x.Type().Underlying().(type) { + case *types.Array: + instr := &Index{ + X: x, + Index: k, + } + instr.setType(t.Elem()) + v = fn.emit(instr) + + case *types.Pointer: // *array + instr := &IndexAddr{ + X: x, + Index: k, + } + instr.setType(types.NewPointer(t.Elem().Underlying().(*types.Array).Elem())) + v = emitLoad(fn, fn.emit(instr)) + + case *types.Slice: + instr := &IndexAddr{ + X: x, + Index: k, + } + instr.setType(types.NewPointer(t.Elem())) + v = emitLoad(fn, fn.emit(instr)) + + default: + panic("rangeIndexed x:" + t.String()) + } + } + return +} + +// rangeIter emits to fn the header for a loop using +// Range/Next/Extract to iterate over map or string value x. +// tk and tv are the types of the key/value results k and v, or nil +// if the respective component is not wanted. +// +func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) { + // + // it = range x + // loop: (target of continue) + // okv = next it (ok, key, value) + // ok = extract okv #0 + // if ok goto body else done + // body: + // k = extract okv #1 + // v = extract okv #2 + // ...body... + // jump loop + // done: (target of break) + // + + if tk == nil { + tk = tInvalid + } + if tv == nil { + tv = tInvalid + } + + rng := &Range{X: x} + rng.setPos(pos) + rng.setType(tRangeIter) + it := fn.emit(rng) + + loop = fn.newBasicBlock("rangeiter.loop") + emitJump(fn, loop) + fn.currentBlock = loop + + _, isString := x.Type().Underlying().(*types.Basic) + + okv := &Next{ + Iter: it, + IsString: isString, + } + okv.setType(types.NewTuple( + varOk, + newVar("k", tk), + newVar("v", tv), + )) + fn.emit(okv) + + body := fn.newBasicBlock("rangeiter.body") + done = fn.newBasicBlock("rangeiter.done") + emitIf(fn, emitExtract(fn, okv, 0), body, done) + fn.currentBlock = body + + if tk != tInvalid { + k = emitExtract(fn, okv, 1) + } + if tv != tInvalid { + v = emitExtract(fn, okv, 2) + } + return +} + +// rangeChan emits to fn the header for a loop that receives from +// channel x until it fails. +// tk is the channel's element type, or nil if the k result is +// not wanted +// pos is the position of the '=' or ':=' token. +// +func (b *builder) rangeChan(fn *Function, x Value, tk types.Type, pos token.Pos) (k Value, loop, done *BasicBlock) { + // + // loop: (target of continue) + // ko = <-x (key, ok) + // ok = extract ko #1 + // if ok goto body else done + // body: + // k = extract ko #0 + // ... + // goto loop + // done: (target of break) + + loop = fn.newBasicBlock("rangechan.loop") + emitJump(fn, loop) + fn.currentBlock = loop + recv := &UnOp{ + Op: token.ARROW, + X: x, + CommaOk: true, + } + recv.setPos(pos) + recv.setType(types.NewTuple( + newVar("k", x.Type().Underlying().(*types.Chan).Elem()), + varOk, + )) + ko := fn.emit(recv) + body := fn.newBasicBlock("rangechan.body") + done = fn.newBasicBlock("rangechan.done") + emitIf(fn, emitExtract(fn, ko, 1), body, done) + fn.currentBlock = body + if tk != nil { + k = emitExtract(fn, ko, 0) + } + return +} + +// rangeStmt emits to fn code for the range statement s, optionally +// labelled by label. +// +func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock) { + var tk, tv types.Type + if s.Key != nil && !isBlankIdent(s.Key) { + tk = fn.Pkg.typeOf(s.Key) + } + if s.Value != nil && !isBlankIdent(s.Value) { + tv = fn.Pkg.typeOf(s.Value) + } + + // If iteration variables are defined (:=), this + // occurs once outside the loop. + // + // Unlike a short variable declaration, a RangeStmt + // using := never redeclares an existing variable; it + // always creates a new one. + if s.Tok == token.DEFINE { + if tk != nil { + fn.addLocalForIdent(s.Key.(*ast.Ident)) + } + if tv != nil { + fn.addLocalForIdent(s.Value.(*ast.Ident)) + } + } + + x := b.expr(fn, s.X) + + var k, v Value + var loop, done *BasicBlock + switch rt := x.Type().Underlying().(type) { + case *types.Slice, *types.Array, *types.Pointer: // *array + k, v, loop, done = b.rangeIndexed(fn, x, tv, s.For) + + case *types.Chan: + k, loop, done = b.rangeChan(fn, x, tk, s.For) + + case *types.Map, *types.Basic: // string + k, v, loop, done = b.rangeIter(fn, x, tk, tv, s.For) + + default: + panic("Cannot range over: " + rt.String()) + } + + // Evaluate both LHS expressions before we update either. + var kl, vl lvalue + if tk != nil { + kl = b.addr(fn, s.Key, false) // non-escaping + } + if tv != nil { + vl = b.addr(fn, s.Value, false) // non-escaping + } + if tk != nil { + kl.store(fn, k) + } + if tv != nil { + vl.store(fn, v) + } + + if label != nil { + label._break = done + label._continue = loop + } + + fn.targets = &targets{ + tail: fn.targets, + _break: done, + _continue: loop, + } + b.stmt(fn, s.Body) + fn.targets = fn.targets.tail + emitJump(fn, loop) // back-edge + fn.currentBlock = done +} + +// stmt lowers statement s to SSA form, emitting code to fn. +func (b *builder) stmt(fn *Function, _s ast.Stmt) { + // The label of the current statement. If non-nil, its _goto + // target is always set; its _break and _continue are set only + // within the body of switch/typeswitch/select/for/range. + // It is effectively an additional default-nil parameter of stmt(). + var label *lblock +start: + switch s := _s.(type) { + case *ast.EmptyStmt: + // ignore. (Usually removed by gofmt.) + + case *ast.DeclStmt: // Con, Var or Typ + d := s.Decl.(*ast.GenDecl) + if d.Tok == token.VAR { + for _, spec := range d.Specs { + if vs, ok := spec.(*ast.ValueSpec); ok { + b.localValueSpec(fn, vs) + } + } + } + + case *ast.LabeledStmt: + label = fn.labelledBlock(s.Label) + emitJump(fn, label._goto) + fn.currentBlock = label._goto + _s = s.Stmt + goto start // effectively: tailcall stmt(fn, s.Stmt, label) + + case *ast.ExprStmt: + b.expr(fn, s.X) + + case *ast.SendStmt: + fn.emit(&Send{ + Chan: b.expr(fn, s.Chan), + X: emitConv(fn, b.expr(fn, s.Value), + fn.Pkg.typeOf(s.Chan).Underlying().(*types.Chan).Elem()), + pos: s.Arrow, + }) + + case *ast.IncDecStmt: + op := token.ADD + if s.Tok == token.DEC { + op = token.SUB + } + loc := b.addr(fn, s.X, false) + b.assignOp(fn, loc, NewConst(exact.MakeInt64(1), loc.typ()), op, s.Pos()) + + case *ast.AssignStmt: + switch s.Tok { + case token.ASSIGN, token.DEFINE: + b.assignStmt(fn, s.Lhs, s.Rhs, s.Tok == token.DEFINE) + + default: // +=, etc. + op := s.Tok + token.ADD - token.ADD_ASSIGN + b.assignOp(fn, b.addr(fn, s.Lhs[0], false), b.expr(fn, s.Rhs[0]), op, s.Pos()) + } + + case *ast.GoStmt: + // The "intrinsics" new/make/len/cap are forbidden here. + // panic is treated like an ordinary function call. + v := Go{pos: s.Go} + b.setCall(fn, s.Call, &v.Call) + fn.emit(&v) + + case *ast.DeferStmt: + // The "intrinsics" new/make/len/cap are forbidden here. + // panic is treated like an ordinary function call. + v := Defer{pos: s.Defer} + b.setCall(fn, s.Call, &v.Call) + fn.emit(&v) + + // A deferred call can cause recovery from panic, + // and control resumes at the Recover block. + createRecoverBlock(fn) + + case *ast.ReturnStmt: + var results []Value + if len(s.Results) == 1 && fn.Signature.Results().Len() > 1 { + // Return of one expression in a multi-valued function. + tuple := b.exprN(fn, s.Results[0]) + ttuple := tuple.Type().(*types.Tuple) + for i, n := 0, ttuple.Len(); i < n; i++ { + results = append(results, + emitConv(fn, emitExtract(fn, tuple, i), + fn.Signature.Results().At(i).Type())) + } + } else { + // 1:1 return, or no-arg return in non-void function. + for i, r := range s.Results { + v := emitConv(fn, b.expr(fn, r), fn.Signature.Results().At(i).Type()) + results = append(results, v) + } + } + if fn.namedResults != nil { + // Function has named result parameters (NRPs). + // Perform parallel assignment of return operands to NRPs. + for i, r := range results { + emitStore(fn, fn.namedResults[i], r, s.Return) + } + } + // Run function calls deferred in this + // function when explicitly returning from it. + fn.emit(new(RunDefers)) + if fn.namedResults != nil { + // Reload NRPs to form the result tuple. + results = results[:0] + for _, r := range fn.namedResults { + results = append(results, emitLoad(fn, r)) + } + } + fn.emit(&Return{Results: results, pos: s.Return}) + fn.currentBlock = fn.newBasicBlock("unreachable") + + case *ast.BranchStmt: + var block *BasicBlock + switch s.Tok { + case token.BREAK: + if s.Label != nil { + block = fn.labelledBlock(s.Label)._break + } else { + for t := fn.targets; t != nil && block == nil; t = t.tail { + block = t._break + } + } + + case token.CONTINUE: + if s.Label != nil { + block = fn.labelledBlock(s.Label)._continue + } else { + for t := fn.targets; t != nil && block == nil; t = t.tail { + block = t._continue + } + } + + case token.FALLTHROUGH: + for t := fn.targets; t != nil && block == nil; t = t.tail { + block = t._fallthrough + } + + case token.GOTO: + block = fn.labelledBlock(s.Label)._goto + } + emitJump(fn, block) + fn.currentBlock = fn.newBasicBlock("unreachable") + + case *ast.BlockStmt: + b.stmtList(fn, s.List) + + case *ast.IfStmt: + if s.Init != nil { + b.stmt(fn, s.Init) + } + then := fn.newBasicBlock("if.then") + done := fn.newBasicBlock("if.done") + els := done + if s.Else != nil { + els = fn.newBasicBlock("if.else") + } + b.cond(fn, s.Cond, then, els) + fn.currentBlock = then + b.stmt(fn, s.Body) + emitJump(fn, done) + + if s.Else != nil { + fn.currentBlock = els + b.stmt(fn, s.Else) + emitJump(fn, done) + } + + fn.currentBlock = done + + case *ast.SwitchStmt: + b.switchStmt(fn, s, label) + + case *ast.TypeSwitchStmt: + b.typeSwitchStmt(fn, s, label) + + case *ast.SelectStmt: + b.selectStmt(fn, s, label) + + case *ast.ForStmt: + b.forStmt(fn, s, label) + + case *ast.RangeStmt: + b.rangeStmt(fn, s, label) + + default: + panic(fmt.Sprintf("unexpected statement kind: %T", s)) + } +} + +// buildFunction builds SSA code for the body of function fn. Idempotent. +func (b *builder) buildFunction(fn *Function) { + if fn.Blocks != nil { + return // building already started + } + + var recvField *ast.FieldList + var body *ast.BlockStmt + var functype *ast.FuncType + switch n := fn.syntax.(type) { + case nil: + return // not a Go source function. (Synthetic, or from object file.) + case *ast.FuncDecl: + functype = n.Type + recvField = n.Recv + body = n.Body + case *ast.FuncLit: + functype = n.Type + body = n.Body + default: + panic(n) + } + + if body == nil { + // External function. + if fn.Params == nil { + // This condition ensures we add a non-empty + // params list once only, but we may attempt + // the degenerate empty case repeatedly. + // TODO(adonovan): opt: don't do that. + + // We set Function.Params even though there is no body + // code to reference them. This simplifies clients. + if recv := fn.Signature.Recv(); recv != nil { + fn.addParamObj(recv) + } + params := fn.Signature.Params() + for i, n := 0, params.Len(); i < n; i++ { + fn.addParamObj(params.At(i)) + } + } + return + } + if fn.Prog.mode&LogSource != 0 { + defer logStack("build function %s @ %s", fn, fn.Prog.Fset.Position(fn.pos))() + } + fn.startBody() + fn.createSyntacticParams(recvField, functype) + b.stmt(fn, body) + if cb := fn.currentBlock; cb != nil && (cb == fn.Blocks[0] || cb == fn.Recover || cb.Preds != nil) { + // Control fell off the end of the function's body block. + // + // Block optimizations eliminate the current block, if + // unreachable. It is a builder invariant that + // if this no-arg return is ill-typed for + // fn.Signature.Results, this block must be + // unreachable. The sanity checker checks this. + fn.emit(new(RunDefers)) + fn.emit(new(Return)) + } + fn.finishBody() +} + +// buildFuncDecl builds SSA code for the function or method declared +// by decl in package pkg. +// +func (b *builder) buildFuncDecl(pkg *Package, decl *ast.FuncDecl) { + id := decl.Name + if isBlankIdent(id) { + return // discard + } + fn := pkg.values[pkg.info.Defs[id]].(*Function) + if decl.Recv == nil && id.Name == "init" { + var v Call + v.Call.Value = fn + v.setType(types.NewTuple()) + pkg.init.emit(&v) + } + b.buildFunction(fn) +} + +// Build calls Package.Build for each package in prog. +// Building occurs in parallel unless the BuildSerially mode flag was set. +// +// Build is intended for whole-program analysis; a typical compiler +// need only build a single package. +// +// Build is idempotent and thread-safe. +// +func (prog *Program) Build() { + var wg sync.WaitGroup + for _, p := range prog.packages { + if prog.mode&BuildSerially != 0 { + p.Build() + } else { + wg.Add(1) + go func(p *Package) { + p.Build() + wg.Done() + }(p) + } + } + wg.Wait() +} + +// Build builds SSA code for all functions and vars in package p. +// +// Precondition: CreatePackage must have been called for all of p's +// direct imports (and hence its direct imports must have been +// error-free). +// +// Build is idempotent and thread-safe. +// +func (p *Package) Build() { p.buildOnce.Do(p.build) } + +func (p *Package) build() { + if p.info == nil { + return // synthetic package, e.g. "testmain" + } + if p.files == nil { + p.info = nil + return // package loaded from export data + } + + // Ensure we have runtime type info for all exported members. + // TODO(adonovan): ideally belongs in memberFromObject, but + // that would require package creation in topological order. + for name, mem := range p.Members { + if ast.IsExported(name) { + p.Prog.needMethodsOf(mem.Type()) + } + } + if p.Prog.mode&LogSource != 0 { + defer logStack("build %s", p)() + } + init := p.init + init.startBody() + + var done *BasicBlock + + if p.Prog.mode&BareInits == 0 { + // Make init() skip if package is already initialized. + initguard := p.Var("init$guard") + doinit := init.newBasicBlock("init.start") + done = init.newBasicBlock("init.done") + emitIf(init, emitLoad(init, initguard), done, doinit) + init.currentBlock = doinit + emitStore(init, initguard, vTrue, token.NoPos) + + // Call the init() function of each package we import. + for _, pkg := range p.Pkg.Imports() { + prereq := p.Prog.packages[pkg] + if prereq == nil { + panic(fmt.Sprintf("Package(%q).Build(): unsatisfied import: Program.CreatePackage(%q) was not called", p.Pkg.Path(), pkg.Path())) + } + var v Call + v.Call.Value = prereq.init + v.Call.pos = init.pos + v.setType(types.NewTuple()) + init.emit(&v) + } + } + + var b builder + + // Initialize package-level vars in correct order. + for _, varinit := range p.info.InitOrder { + if init.Prog.mode&LogSource != 0 { + fmt.Fprintf(os.Stderr, "build global initializer %v @ %s\n", + varinit.Lhs, p.Prog.Fset.Position(varinit.Rhs.Pos())) + } + if len(varinit.Lhs) == 1 { + // 1:1 initialization: var x, y = a(), b() + var lval lvalue + if v := varinit.Lhs[0]; v.Name() != "_" { + lval = &address{addr: p.values[v].(*Global), pos: v.Pos()} + } else { + lval = blank{} + } + b.assign(init, lval, varinit.Rhs, true, nil) + } else { + // n:1 initialization: var x, y := f() + tuple := b.exprN(init, varinit.Rhs) + for i, v := range varinit.Lhs { + if v.Name() == "_" { + continue + } + emitStore(init, p.values[v].(*Global), emitExtract(init, tuple, i), v.Pos()) + } + } + } + + // Build all package-level functions, init functions + // and methods, including unreachable/blank ones. + // We build them in source order, but it's not significant. + for _, file := range p.files { + for _, decl := range file.Decls { + if decl, ok := decl.(*ast.FuncDecl); ok { + b.buildFuncDecl(p, decl) + } + } + } + + // Finish up init(). + if p.Prog.mode&BareInits == 0 { + emitJump(init, done) + init.currentBlock = done + } + init.emit(new(Return)) + init.finishBody() + + p.info = nil // We no longer need ASTs or go/types deductions. + + if p.Prog.mode&SanityCheckFunctions != 0 { + sanityCheckPackage(p) + } +} + +// Like ObjectOf, but panics instead of returning nil. +// Only valid during p's create and build phases. +func (p *Package) objectOf(id *ast.Ident) types.Object { + if o := p.info.ObjectOf(id); o != nil { + return o + } + panic(fmt.Sprintf("no types.Object for ast.Ident %s @ %s", + id.Name, p.Prog.Fset.Position(id.Pos()))) +} + +// Like TypeOf, but panics instead of returning nil. +// Only valid during p's create and build phases. +func (p *Package) typeOf(e ast.Expr) types.Type { + if T := p.info.TypeOf(e); T != nil { + return T + } + panic(fmt.Sprintf("no type for %T @ %s", + e, p.Prog.Fset.Position(e.Pos()))) +} diff --git a/vendor/honnef.co/go/tools/ssa/const.go b/vendor/honnef.co/go/tools/ssa/const.go new file mode 100644 index 0000000..3606e0f --- /dev/null +++ b/vendor/honnef.co/go/tools/ssa/const.go @@ -0,0 +1,171 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.6 + +package ssa + +// This file defines the Const SSA value type. + +import ( + "fmt" + exact "go/constant" + "go/token" + "go/types" + "strconv" +) + +// NewConst returns a new constant of the specified value and type. +// val must be valid according to the specification of Const.Value. +// +func NewConst(val exact.Value, typ types.Type) *Const { + return &Const{typ, val} +} + +// intConst returns an 'int' constant that evaluates to i. +// (i is an int64 in case the host is narrower than the target.) +func intConst(i int64) *Const { + return NewConst(exact.MakeInt64(i), tInt) +} + +// nilConst returns a nil constant of the specified type, which may +// be any reference type, including interfaces. +// +func nilConst(typ types.Type) *Const { + return NewConst(nil, typ) +} + +// stringConst returns a 'string' constant that evaluates to s. +func stringConst(s string) *Const { + return NewConst(exact.MakeString(s), tString) +} + +// zeroConst returns a new "zero" constant of the specified type, +// which must not be an array or struct type: the zero values of +// aggregates are well-defined but cannot be represented by Const. +// +func zeroConst(t types.Type) *Const { + switch t := t.(type) { + case *types.Basic: + switch { + case t.Info()&types.IsBoolean != 0: + return NewConst(exact.MakeBool(false), t) + case t.Info()&types.IsNumeric != 0: + return NewConst(exact.MakeInt64(0), t) + case t.Info()&types.IsString != 0: + return NewConst(exact.MakeString(""), t) + case t.Kind() == types.UnsafePointer: + fallthrough + case t.Kind() == types.UntypedNil: + return nilConst(t) + default: + panic(fmt.Sprint("zeroConst for unexpected type:", t)) + } + case *types.Pointer, *types.Slice, *types.Interface, *types.Chan, *types.Map, *types.Signature: + return nilConst(t) + case *types.Named: + return NewConst(zeroConst(t.Underlying()).Value, t) + case *types.Array, *types.Struct, *types.Tuple: + panic(fmt.Sprint("zeroConst applied to aggregate:", t)) + } + panic(fmt.Sprint("zeroConst: unexpected ", t)) +} + +func (c *Const) RelString(from *types.Package) string { + var s string + if c.Value == nil { + s = "nil" + } else if c.Value.Kind() == exact.String { + s = exact.StringVal(c.Value) + const max = 20 + // TODO(adonovan): don't cut a rune in half. + if len(s) > max { + s = s[:max-3] + "..." // abbreviate + } + s = strconv.Quote(s) + } else { + s = c.Value.String() + } + return s + ":" + relType(c.Type(), from) +} + +func (c *Const) Name() string { + return c.RelString(nil) +} + +func (c *Const) String() string { + return c.Name() +} + +func (c *Const) Type() types.Type { + return c.typ +} + +func (c *Const) Referrers() *[]Instruction { + return nil +} + +func (c *Const) Parent() *Function { return nil } + +func (c *Const) Pos() token.Pos { + return token.NoPos +} + +// IsNil returns true if this constant represents a typed or untyped nil value. +func (c *Const) IsNil() bool { + return c.Value == nil +} + +// TODO(adonovan): move everything below into honnef.co/go/tools/ssa/interp. + +// Int64 returns the numeric value of this constant truncated to fit +// a signed 64-bit integer. +// +func (c *Const) Int64() int64 { + switch x := exact.ToInt(c.Value); x.Kind() { + case exact.Int: + if i, ok := exact.Int64Val(x); ok { + return i + } + return 0 + case exact.Float: + f, _ := exact.Float64Val(x) + return int64(f) + } + panic(fmt.Sprintf("unexpected constant value: %T", c.Value)) +} + +// Uint64 returns the numeric value of this constant truncated to fit +// an unsigned 64-bit integer. +// +func (c *Const) Uint64() uint64 { + switch x := exact.ToInt(c.Value); x.Kind() { + case exact.Int: + if u, ok := exact.Uint64Val(x); ok { + return u + } + return 0 + case exact.Float: + f, _ := exact.Float64Val(x) + return uint64(f) + } + panic(fmt.Sprintf("unexpected constant value: %T", c.Value)) +} + +// Float64 returns the numeric value of this constant truncated to fit +// a float64. +// +func (c *Const) Float64() float64 { + f, _ := exact.Float64Val(c.Value) + return f +} + +// Complex128 returns the complex value of this constant truncated to +// fit a complex128. +// +func (c *Const) Complex128() complex128 { + re, _ := exact.Float64Val(exact.Real(c.Value)) + im, _ := exact.Float64Val(exact.Imag(c.Value)) + return complex(re, im) +} diff --git a/vendor/honnef.co/go/tools/ssa/const15.go b/vendor/honnef.co/go/tools/ssa/const15.go new file mode 100644 index 0000000..49b5333 --- /dev/null +++ b/vendor/honnef.co/go/tools/ssa/const15.go @@ -0,0 +1,171 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.5,!go1.6 + +package ssa + +// This file defines the Const SSA value type. + +import ( + "fmt" + exact "go/constant" + "go/token" + "go/types" + "strconv" +) + +// NewConst returns a new constant of the specified value and type. +// val must be valid according to the specification of Const.Value. +// +func NewConst(val exact.Value, typ types.Type) *Const { + return &Const{typ, val} +} + +// intConst returns an 'int' constant that evaluates to i. +// (i is an int64 in case the host is narrower than the target.) +func intConst(i int64) *Const { + return NewConst(exact.MakeInt64(i), tInt) +} + +// nilConst returns a nil constant of the specified type, which may +// be any reference type, including interfaces. +// +func nilConst(typ types.Type) *Const { + return NewConst(nil, typ) +} + +// stringConst returns a 'string' constant that evaluates to s. +func stringConst(s string) *Const { + return NewConst(exact.MakeString(s), tString) +} + +// zeroConst returns a new "zero" constant of the specified type, +// which must not be an array or struct type: the zero values of +// aggregates are well-defined but cannot be represented by Const. +// +func zeroConst(t types.Type) *Const { + switch t := t.(type) { + case *types.Basic: + switch { + case t.Info()&types.IsBoolean != 0: + return NewConst(exact.MakeBool(false), t) + case t.Info()&types.IsNumeric != 0: + return NewConst(exact.MakeInt64(0), t) + case t.Info()&types.IsString != 0: + return NewConst(exact.MakeString(""), t) + case t.Kind() == types.UnsafePointer: + fallthrough + case t.Kind() == types.UntypedNil: + return nilConst(t) + default: + panic(fmt.Sprint("zeroConst for unexpected type:", t)) + } + case *types.Pointer, *types.Slice, *types.Interface, *types.Chan, *types.Map, *types.Signature: + return nilConst(t) + case *types.Named: + return NewConst(zeroConst(t.Underlying()).Value, t) + case *types.Array, *types.Struct, *types.Tuple: + panic(fmt.Sprint("zeroConst applied to aggregate:", t)) + } + panic(fmt.Sprint("zeroConst: unexpected ", t)) +} + +func (c *Const) RelString(from *types.Package) string { + var s string + if c.Value == nil { + s = "nil" + } else if c.Value.Kind() == exact.String { + s = exact.StringVal(c.Value) + const max = 20 + // TODO(adonovan): don't cut a rune in half. + if len(s) > max { + s = s[:max-3] + "..." // abbreviate + } + s = strconv.Quote(s) + } else { + s = c.Value.String() + } + return s + ":" + relType(c.Type(), from) +} + +func (c *Const) Name() string { + return c.RelString(nil) +} + +func (c *Const) String() string { + return c.Name() +} + +func (c *Const) Type() types.Type { + return c.typ +} + +func (c *Const) Referrers() *[]Instruction { + return nil +} + +func (c *Const) Parent() *Function { return nil } + +func (c *Const) Pos() token.Pos { + return token.NoPos +} + +// IsNil returns true if this constant represents a typed or untyped nil value. +func (c *Const) IsNil() bool { + return c.Value == nil +} + +// TODO(adonovan): move everything below into honnef.co/go/tools/ssa/interp. + +// Int64 returns the numeric value of this constant truncated to fit +// a signed 64-bit integer. +// +func (c *Const) Int64() int64 { + switch x := c.Value; x.Kind() { + case exact.Int: + if i, ok := exact.Int64Val(x); ok { + return i + } + return 0 + case exact.Float: + f, _ := exact.Float64Val(x) + return int64(f) + } + panic(fmt.Sprintf("unexpected constant value: %T", c.Value)) +} + +// Uint64 returns the numeric value of this constant truncated to fit +// an unsigned 64-bit integer. +// +func (c *Const) Uint64() uint64 { + switch x := c.Value; x.Kind() { + case exact.Int: + if u, ok := exact.Uint64Val(x); ok { + return u + } + return 0 + case exact.Float: + f, _ := exact.Float64Val(x) + return uint64(f) + } + panic(fmt.Sprintf("unexpected constant value: %T", c.Value)) +} + +// Float64 returns the numeric value of this constant truncated to fit +// a float64. +// +func (c *Const) Float64() float64 { + f, _ := exact.Float64Val(c.Value) + return f +} + +// Complex128 returns the complex value of this constant truncated to +// fit a complex128. +// +func (c *Const) Complex128() complex128 { + re, _ := exact.Float64Val(exact.Real(c.Value)) + im, _ := exact.Float64Val(exact.Imag(c.Value)) + return complex(re, im) +} diff --git a/vendor/honnef.co/go/tools/ssa/create.go b/vendor/honnef.co/go/tools/ssa/create.go new file mode 100644 index 0000000..69ac12b --- /dev/null +++ b/vendor/honnef.co/go/tools/ssa/create.go @@ -0,0 +1,263 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file implements the CREATE phase of SSA construction. +// See builder.go for explanation. + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + "os" + "sync" + + "golang.org/x/tools/go/types/typeutil" +) + +// NewProgram returns a new SSA Program. +// +// mode controls diagnostics and checking during SSA construction. +// +func NewProgram(fset *token.FileSet, mode BuilderMode) *Program { + prog := &Program{ + Fset: fset, + imported: make(map[string]*Package), + packages: make(map[*types.Package]*Package), + thunks: make(map[selectionKey]*Function), + bounds: make(map[*types.Func]*Function), + mode: mode, + } + + h := typeutil.MakeHasher() // protected by methodsMu, in effect + prog.methodSets.SetHasher(h) + prog.canon.SetHasher(h) + + return prog +} + +// memberFromObject populates package pkg with a member for the +// typechecker object obj. +// +// For objects from Go source code, syntax is the associated syntax +// tree (for funcs and vars only); it will be used during the build +// phase. +// +func memberFromObject(pkg *Package, obj types.Object, syntax ast.Node) { + name := obj.Name() + switch obj := obj.(type) { + case *types.Builtin: + if pkg.Pkg != types.Unsafe { + panic("unexpected builtin object: " + obj.String()) + } + + case *types.TypeName: + pkg.Members[name] = &Type{ + object: obj, + pkg: pkg, + } + + case *types.Const: + c := &NamedConst{ + object: obj, + Value: NewConst(obj.Val(), obj.Type()), + pkg: pkg, + } + pkg.values[obj] = c.Value + pkg.Members[name] = c + + case *types.Var: + g := &Global{ + Pkg: pkg, + name: name, + object: obj, + typ: types.NewPointer(obj.Type()), // address + pos: obj.Pos(), + } + pkg.values[obj] = g + pkg.Members[name] = g + + case *types.Func: + sig := obj.Type().(*types.Signature) + if sig.Recv() == nil && name == "init" { + pkg.ninit++ + name = fmt.Sprintf("init#%d", pkg.ninit) + } + fn := &Function{ + name: name, + object: obj, + Signature: sig, + syntax: syntax, + pos: obj.Pos(), + Pkg: pkg, + Prog: pkg.Prog, + } + if syntax == nil { + fn.Synthetic = "loaded from gc object file" + } + + pkg.values[obj] = fn + if sig.Recv() == nil { + pkg.Members[name] = fn // package-level function + } + + default: // (incl. *types.Package) + panic("unexpected Object type: " + obj.String()) + } +} + +// membersFromDecl populates package pkg with members for each +// typechecker object (var, func, const or type) associated with the +// specified decl. +// +func membersFromDecl(pkg *Package, decl ast.Decl) { + switch decl := decl.(type) { + case *ast.GenDecl: // import, const, type or var + switch decl.Tok { + case token.CONST: + for _, spec := range decl.Specs { + for _, id := range spec.(*ast.ValueSpec).Names { + if !isBlankIdent(id) { + memberFromObject(pkg, pkg.info.Defs[id], nil) + } + } + } + + case token.VAR: + for _, spec := range decl.Specs { + for _, id := range spec.(*ast.ValueSpec).Names { + if !isBlankIdent(id) { + memberFromObject(pkg, pkg.info.Defs[id], spec) + } + } + } + + case token.TYPE: + for _, spec := range decl.Specs { + id := spec.(*ast.TypeSpec).Name + if !isBlankIdent(id) { + memberFromObject(pkg, pkg.info.Defs[id], nil) + } + } + } + + case *ast.FuncDecl: + id := decl.Name + if !isBlankIdent(id) { + memberFromObject(pkg, pkg.info.Defs[id], decl) + } + } +} + +// CreatePackage constructs and returns an SSA Package from the +// specified type-checked, error-free file ASTs, and populates its +// Members mapping. +// +// importable determines whether this package should be returned by a +// subsequent call to ImportedPackage(pkg.Path()). +// +// The real work of building SSA form for each function is not done +// until a subsequent call to Package.Build(). +// +func (prog *Program) CreatePackage(pkg *types.Package, files []*ast.File, info *types.Info, importable bool) *Package { + p := &Package{ + Prog: prog, + Members: make(map[string]Member), + values: make(map[types.Object]Value), + Pkg: pkg, + info: info, // transient (CREATE and BUILD phases) + files: files, // transient (CREATE and BUILD phases) + } + + // Add init() function. + p.init = &Function{ + name: "init", + Signature: new(types.Signature), + Synthetic: "package initializer", + Pkg: p, + Prog: prog, + } + p.Members[p.init.name] = p.init + + // CREATE phase. + // Allocate all package members: vars, funcs, consts and types. + if len(files) > 0 { + // Go source package. + for _, file := range files { + for _, decl := range file.Decls { + membersFromDecl(p, decl) + } + } + } else { + // GC-compiled binary package (or "unsafe") + // No code. + // No position information. + scope := p.Pkg.Scope() + for _, name := range scope.Names() { + obj := scope.Lookup(name) + memberFromObject(p, obj, nil) + if obj, ok := obj.(*types.TypeName); ok { + if named, ok := obj.Type().(*types.Named); ok { + for i, n := 0, named.NumMethods(); i < n; i++ { + memberFromObject(p, named.Method(i), nil) + } + } + } + } + } + + if prog.mode&BareInits == 0 { + // Add initializer guard variable. + initguard := &Global{ + Pkg: p, + name: "init$guard", + typ: types.NewPointer(tBool), + } + p.Members[initguard.Name()] = initguard + } + + if prog.mode&GlobalDebug != 0 { + p.SetDebugMode(true) + } + + if prog.mode&PrintPackages != 0 { + printMu.Lock() + p.WriteTo(os.Stdout) + printMu.Unlock() + } + + if importable { + prog.imported[p.Pkg.Path()] = p + } + prog.packages[p.Pkg] = p + + return p +} + +// printMu serializes printing of Packages/Functions to stdout. +var printMu sync.Mutex + +// AllPackages returns a new slice containing all packages in the +// program prog in unspecified order. +// +func (prog *Program) AllPackages() []*Package { + pkgs := make([]*Package, 0, len(prog.packages)) + for _, pkg := range prog.packages { + pkgs = append(pkgs, pkg) + } + return pkgs +} + +// ImportedPackage returns the importable SSA Package whose import +// path is path, or nil if no such SSA package has been created. +// +// Not all packages are importable. For example, no import +// declaration can resolve to the x_test package created by 'go test' +// or the ad-hoc main package created 'go build foo.go'. +// +func (prog *Program) ImportedPackage(path string) *Package { + return prog.imported[path] +} diff --git a/vendor/honnef.co/go/tools/ssa/doc.go b/vendor/honnef.co/go/tools/ssa/doc.go new file mode 100644 index 0000000..57474dd --- /dev/null +++ b/vendor/honnef.co/go/tools/ssa/doc.go @@ -0,0 +1,123 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package ssa defines a representation of the elements of Go programs +// (packages, types, functions, variables and constants) using a +// static single-assignment (SSA) form intermediate representation +// (IR) for the bodies of functions. +// +// THIS INTERFACE IS EXPERIMENTAL AND IS LIKELY TO CHANGE. +// +// For an introduction to SSA form, see +// http://en.wikipedia.org/wiki/Static_single_assignment_form. +// This page provides a broader reading list: +// http://www.dcs.gla.ac.uk/~jsinger/ssa.html. +// +// The level of abstraction of the SSA form is intentionally close to +// the source language to facilitate construction of source analysis +// tools. It is not intended for machine code generation. +// +// All looping, branching and switching constructs are replaced with +// unstructured control flow. Higher-level control flow constructs +// such as multi-way branch can be reconstructed as needed; see +// ssautil.Switches() for an example. +// +// To construct an SSA-form program, call ssautil.CreateProgram on a +// loader.Program, a set of type-checked packages created from +// parsed Go source files. The resulting ssa.Program contains all the +// packages and their members, but SSA code is not created for +// function bodies until a subsequent call to (*Package).Build. +// +// The builder initially builds a naive SSA form in which all local +// variables are addresses of stack locations with explicit loads and +// stores. Registerisation of eligible locals and φ-node insertion +// using dominance and dataflow are then performed as a second pass +// called "lifting" to improve the accuracy and performance of +// subsequent analyses; this pass can be skipped by setting the +// NaiveForm builder flag. +// +// The primary interfaces of this package are: +// +// - Member: a named member of a Go package. +// - Value: an expression that yields a value. +// - Instruction: a statement that consumes values and performs computation. +// - Node: a Value or Instruction (emphasizing its membership in the SSA value graph) +// +// A computation that yields a result implements both the Value and +// Instruction interfaces. The following table shows for each +// concrete type which of these interfaces it implements. +// +// Value? Instruction? Member? +// *Alloc ✔ ✔ +// *BinOp ✔ ✔ +// *Builtin ✔ +// *Call ✔ ✔ +// *ChangeInterface ✔ ✔ +// *ChangeType ✔ ✔ +// *Const ✔ +// *Convert ✔ ✔ +// *DebugRef ✔ +// *Defer ✔ +// *Extract ✔ ✔ +// *Field ✔ ✔ +// *FieldAddr ✔ ✔ +// *FreeVar ✔ +// *Function ✔ ✔ (func) +// *Global ✔ ✔ (var) +// *Go ✔ +// *If ✔ +// *Index ✔ ✔ +// *IndexAddr ✔ ✔ +// *Jump ✔ +// *Lookup ✔ ✔ +// *MakeChan ✔ ✔ +// *MakeClosure ✔ ✔ +// *MakeInterface ✔ ✔ +// *MakeMap ✔ ✔ +// *MakeSlice ✔ ✔ +// *MapUpdate ✔ +// *NamedConst ✔ (const) +// *Next ✔ ✔ +// *Panic ✔ +// *Parameter ✔ +// *Phi ✔ ✔ +// *Range ✔ ✔ +// *Return ✔ +// *RunDefers ✔ +// *Select ✔ ✔ +// *Send ✔ +// *Slice ✔ ✔ +// *Store ✔ +// *Type ✔ (type) +// *TypeAssert ✔ ✔ +// *UnOp ✔ ✔ +// +// Other key types in this package include: Program, Package, Function +// and BasicBlock. +// +// The program representation constructed by this package is fully +// resolved internally, i.e. it does not rely on the names of Values, +// Packages, Functions, Types or BasicBlocks for the correct +// interpretation of the program. Only the identities of objects and +// the topology of the SSA and type graphs are semantically +// significant. (There is one exception: Ids, used to identify field +// and method names, contain strings.) Avoidance of name-based +// operations simplifies the implementation of subsequent passes and +// can make them very efficient. Many objects are nonetheless named +// to aid in debugging, but it is not essential that the names be +// either accurate or unambiguous. The public API exposes a number of +// name-based maps for client convenience. +// +// The ssa/ssautil package provides various utilities that depend only +// on the public API of this package. +// +// TODO(adonovan): Consider the exceptional control-flow implications +// of defer and recover(). +// +// TODO(adonovan): write a how-to document for all the various cases +// of trying to determine corresponding elements across the four +// domains of source locations, ast.Nodes, types.Objects, +// ssa.Values/Instructions. +// +package ssa // import "honnef.co/go/tools/ssa" diff --git a/vendor/honnef.co/go/tools/ssa/dom.go b/vendor/honnef.co/go/tools/ssa/dom.go new file mode 100644 index 0000000..12ef430 --- /dev/null +++ b/vendor/honnef.co/go/tools/ssa/dom.go @@ -0,0 +1,341 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file defines algorithms related to dominance. + +// Dominator tree construction ---------------------------------------- +// +// We use the algorithm described in Lengauer & Tarjan. 1979. A fast +// algorithm for finding dominators in a flowgraph. +// http://doi.acm.org/10.1145/357062.357071 +// +// We also apply the optimizations to SLT described in Georgiadis et +// al, Finding Dominators in Practice, JGAA 2006, +// http://jgaa.info/accepted/2006/GeorgiadisTarjanWerneck2006.10.1.pdf +// to avoid the need for buckets of size > 1. + +import ( + "bytes" + "fmt" + "math/big" + "os" + "sort" +) + +// Idom returns the block that immediately dominates b: +// its parent in the dominator tree, if any. +// Neither the entry node (b.Index==0) nor recover node +// (b==b.Parent().Recover()) have a parent. +// +func (b *BasicBlock) Idom() *BasicBlock { return b.dom.idom } + +// Dominees returns the list of blocks that b immediately dominates: +// its children in the dominator tree. +// +func (b *BasicBlock) Dominees() []*BasicBlock { return b.dom.children } + +// Dominates reports whether b dominates c. +func (b *BasicBlock) Dominates(c *BasicBlock) bool { + return b.dom.pre <= c.dom.pre && c.dom.post <= b.dom.post +} + +type byDomPreorder []*BasicBlock + +func (a byDomPreorder) Len() int { return len(a) } +func (a byDomPreorder) Swap(i, j int) { a[i], a[j] = a[j], a[i] } +func (a byDomPreorder) Less(i, j int) bool { return a[i].dom.pre < a[j].dom.pre } + +// DomPreorder returns a new slice containing the blocks of f in +// dominator tree preorder. +// +func (f *Function) DomPreorder() []*BasicBlock { + n := len(f.Blocks) + order := make(byDomPreorder, n, n) + copy(order, f.Blocks) + sort.Sort(order) + return order +} + +// domInfo contains a BasicBlock's dominance information. +type domInfo struct { + idom *BasicBlock // immediate dominator (parent in domtree) + children []*BasicBlock // nodes immediately dominated by this one + pre, post int32 // pre- and post-order numbering within domtree +} + +// ltState holds the working state for Lengauer-Tarjan algorithm +// (during which domInfo.pre is repurposed for CFG DFS preorder number). +type ltState struct { + // Each slice is indexed by b.Index. + sdom []*BasicBlock // b's semidominator + parent []*BasicBlock // b's parent in DFS traversal of CFG + ancestor []*BasicBlock // b's ancestor with least sdom +} + +// dfs implements the depth-first search part of the LT algorithm. +func (lt *ltState) dfs(v *BasicBlock, i int32, preorder []*BasicBlock) int32 { + preorder[i] = v + v.dom.pre = i // For now: DFS preorder of spanning tree of CFG + i++ + lt.sdom[v.Index] = v + lt.link(nil, v) + for _, w := range v.Succs { + if lt.sdom[w.Index] == nil { + lt.parent[w.Index] = v + i = lt.dfs(w, i, preorder) + } + } + return i +} + +// eval implements the EVAL part of the LT algorithm. +func (lt *ltState) eval(v *BasicBlock) *BasicBlock { + // TODO(adonovan): opt: do path compression per simple LT. + u := v + for ; lt.ancestor[v.Index] != nil; v = lt.ancestor[v.Index] { + if lt.sdom[v.Index].dom.pre < lt.sdom[u.Index].dom.pre { + u = v + } + } + return u +} + +// link implements the LINK part of the LT algorithm. +func (lt *ltState) link(v, w *BasicBlock) { + lt.ancestor[w.Index] = v +} + +// buildDomTree computes the dominator tree of f using the LT algorithm. +// Precondition: all blocks are reachable (e.g. optimizeBlocks has been run). +// +func buildDomTree(f *Function) { + // The step numbers refer to the original LT paper; the + // reordering is due to Georgiadis. + + // Clear any previous domInfo. + for _, b := range f.Blocks { + b.dom = domInfo{} + } + + n := len(f.Blocks) + // Allocate space for 5 contiguous [n]*BasicBlock arrays: + // sdom, parent, ancestor, preorder, buckets. + space := make([]*BasicBlock, 5*n, 5*n) + lt := ltState{ + sdom: space[0:n], + parent: space[n : 2*n], + ancestor: space[2*n : 3*n], + } + + // Step 1. Number vertices by depth-first preorder. + preorder := space[3*n : 4*n] + root := f.Blocks[0] + prenum := lt.dfs(root, 0, preorder) + recover := f.Recover + if recover != nil { + lt.dfs(recover, prenum, preorder) + } + + buckets := space[4*n : 5*n] + copy(buckets, preorder) + + // In reverse preorder... + for i := int32(n) - 1; i > 0; i-- { + w := preorder[i] + + // Step 3. Implicitly define the immediate dominator of each node. + for v := buckets[i]; v != w; v = buckets[v.dom.pre] { + u := lt.eval(v) + if lt.sdom[u.Index].dom.pre < i { + v.dom.idom = u + } else { + v.dom.idom = w + } + } + + // Step 2. Compute the semidominators of all nodes. + lt.sdom[w.Index] = lt.parent[w.Index] + for _, v := range w.Preds { + u := lt.eval(v) + if lt.sdom[u.Index].dom.pre < lt.sdom[w.Index].dom.pre { + lt.sdom[w.Index] = lt.sdom[u.Index] + } + } + + lt.link(lt.parent[w.Index], w) + + if lt.parent[w.Index] == lt.sdom[w.Index] { + w.dom.idom = lt.parent[w.Index] + } else { + buckets[i] = buckets[lt.sdom[w.Index].dom.pre] + buckets[lt.sdom[w.Index].dom.pre] = w + } + } + + // The final 'Step 3' is now outside the loop. + for v := buckets[0]; v != root; v = buckets[v.dom.pre] { + v.dom.idom = root + } + + // Step 4. Explicitly define the immediate dominator of each + // node, in preorder. + for _, w := range preorder[1:] { + if w == root || w == recover { + w.dom.idom = nil + } else { + if w.dom.idom != lt.sdom[w.Index] { + w.dom.idom = w.dom.idom.dom.idom + } + // Calculate Children relation as inverse of Idom. + w.dom.idom.dom.children = append(w.dom.idom.dom.children, w) + } + } + + pre, post := numberDomTree(root, 0, 0) + if recover != nil { + numberDomTree(recover, pre, post) + } + + // printDomTreeDot(os.Stderr, f) // debugging + // printDomTreeText(os.Stderr, root, 0) // debugging + + if f.Prog.mode&SanityCheckFunctions != 0 { + sanityCheckDomTree(f) + } +} + +// numberDomTree sets the pre- and post-order numbers of a depth-first +// traversal of the dominator tree rooted at v. These are used to +// answer dominance queries in constant time. +// +func numberDomTree(v *BasicBlock, pre, post int32) (int32, int32) { + v.dom.pre = pre + pre++ + for _, child := range v.dom.children { + pre, post = numberDomTree(child, pre, post) + } + v.dom.post = post + post++ + return pre, post +} + +// Testing utilities ---------------------------------------- + +// sanityCheckDomTree checks the correctness of the dominator tree +// computed by the LT algorithm by comparing against the dominance +// relation computed by a naive Kildall-style forward dataflow +// analysis (Algorithm 10.16 from the "Dragon" book). +// +func sanityCheckDomTree(f *Function) { + n := len(f.Blocks) + + // D[i] is the set of blocks that dominate f.Blocks[i], + // represented as a bit-set of block indices. + D := make([]big.Int, n) + + one := big.NewInt(1) + + // all is the set of all blocks; constant. + var all big.Int + all.Set(one).Lsh(&all, uint(n)).Sub(&all, one) + + // Initialization. + for i, b := range f.Blocks { + if i == 0 || b == f.Recover { + // A root is dominated only by itself. + D[i].SetBit(&D[0], 0, 1) + } else { + // All other blocks are (initially) dominated + // by every block. + D[i].Set(&all) + } + } + + // Iteration until fixed point. + for changed := true; changed; { + changed = false + for i, b := range f.Blocks { + if i == 0 || b == f.Recover { + continue + } + // Compute intersection across predecessors. + var x big.Int + x.Set(&all) + for _, pred := range b.Preds { + x.And(&x, &D[pred.Index]) + } + x.SetBit(&x, i, 1) // a block always dominates itself. + if D[i].Cmp(&x) != 0 { + D[i].Set(&x) + changed = true + } + } + } + + // Check the entire relation. O(n^2). + // The Recover block (if any) must be treated specially so we skip it. + ok := true + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + b, c := f.Blocks[i], f.Blocks[j] + if c == f.Recover { + continue + } + actual := b.Dominates(c) + expected := D[j].Bit(i) == 1 + if actual != expected { + fmt.Fprintf(os.Stderr, "dominates(%s, %s)==%t, want %t\n", b, c, actual, expected) + ok = false + } + } + } + + preorder := f.DomPreorder() + for _, b := range f.Blocks { + if got := preorder[b.dom.pre]; got != b { + fmt.Fprintf(os.Stderr, "preorder[%d]==%s, want %s\n", b.dom.pre, got, b) + ok = false + } + } + + if !ok { + panic("sanityCheckDomTree failed for " + f.String()) + } + +} + +// Printing functions ---------------------------------------- + +// printDomTree prints the dominator tree as text, using indentation. +func printDomTreeText(buf *bytes.Buffer, v *BasicBlock, indent int) { + fmt.Fprintf(buf, "%*s%s\n", 4*indent, "", v) + for _, child := range v.dom.children { + printDomTreeText(buf, child, indent+1) + } +} + +// printDomTreeDot prints the dominator tree of f in AT&T GraphViz +// (.dot) format. +func printDomTreeDot(buf *bytes.Buffer, f *Function) { + fmt.Fprintln(buf, "//", f) + fmt.Fprintln(buf, "digraph domtree {") + for i, b := range f.Blocks { + v := b.dom + fmt.Fprintf(buf, "\tn%d [label=\"%s (%d, %d)\",shape=\"rectangle\"];\n", v.pre, b, v.pre, v.post) + // TODO(adonovan): improve appearance of edges + // belonging to both dominator tree and CFG. + + // Dominator tree edge. + if i != 0 { + fmt.Fprintf(buf, "\tn%d -> n%d [style=\"solid\",weight=100];\n", v.idom.dom.pre, v.pre) + } + // CFG edges. + for _, pred := range b.Preds { + fmt.Fprintf(buf, "\tn%d -> n%d [style=\"dotted\",weight=0];\n", pred.dom.pre, v.pre) + } + } + fmt.Fprintln(buf, "}") +} diff --git a/vendor/honnef.co/go/tools/ssa/emit.go b/vendor/honnef.co/go/tools/ssa/emit.go new file mode 100644 index 0000000..400da21 --- /dev/null +++ b/vendor/honnef.co/go/tools/ssa/emit.go @@ -0,0 +1,475 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.5 + +package ssa + +// Helpers for emitting SSA instructions. + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" +) + +// emitNew emits to f a new (heap Alloc) instruction allocating an +// object of type typ. pos is the optional source location. +// +func emitNew(f *Function, typ types.Type, pos token.Pos) *Alloc { + v := &Alloc{Heap: true} + v.setType(types.NewPointer(typ)) + v.setPos(pos) + f.emit(v) + return v +} + +// emitLoad emits to f an instruction to load the address addr into a +// new temporary, and returns the value so defined. +// +func emitLoad(f *Function, addr Value) *UnOp { + v := &UnOp{Op: token.MUL, X: addr} + v.setType(deref(addr.Type())) + f.emit(v) + return v +} + +// emitDebugRef emits to f a DebugRef pseudo-instruction associating +// expression e with value v. +// +func emitDebugRef(f *Function, e ast.Expr, v Value, isAddr bool) { + if !f.debugInfo() { + return // debugging not enabled + } + if v == nil || e == nil { + panic("nil") + } + var obj types.Object + e = unparen(e) + if id, ok := e.(*ast.Ident); ok { + if isBlankIdent(id) { + return + } + obj = f.Pkg.objectOf(id) + switch obj.(type) { + case *types.Nil, *types.Const, *types.Builtin: + return + } + } + f.emit(&DebugRef{ + X: v, + Expr: e, + IsAddr: isAddr, + object: obj, + }) +} + +// emitArith emits to f code to compute the binary operation op(x, y) +// where op is an eager shift, logical or arithmetic operation. +// (Use emitCompare() for comparisons and Builder.logicalBinop() for +// non-eager operations.) +// +func emitArith(f *Function, op token.Token, x, y Value, t types.Type, pos token.Pos) Value { + switch op { + case token.SHL, token.SHR: + x = emitConv(f, x, t) + // y may be signed or an 'untyped' constant. + // TODO(adonovan): whence signed values? + if b, ok := y.Type().Underlying().(*types.Basic); ok && b.Info()&types.IsUnsigned == 0 { + y = emitConv(f, y, types.Typ[types.Uint64]) + } + + case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT: + x = emitConv(f, x, t) + y = emitConv(f, y, t) + + default: + panic("illegal op in emitArith: " + op.String()) + + } + v := &BinOp{ + Op: op, + X: x, + Y: y, + } + v.setPos(pos) + v.setType(t) + return f.emit(v) +} + +// emitCompare emits to f code compute the boolean result of +// comparison comparison 'x op y'. +// +func emitCompare(f *Function, op token.Token, x, y Value, pos token.Pos) Value { + xt := x.Type().Underlying() + yt := y.Type().Underlying() + + // Special case to optimise a tagless SwitchStmt so that + // these are equivalent + // switch { case e: ...} + // switch true { case e: ... } + // if e==true { ... } + // even in the case when e's type is an interface. + // TODO(adonovan): opt: generalise to x==true, false!=y, etc. + if x == vTrue && op == token.EQL { + if yt, ok := yt.(*types.Basic); ok && yt.Info()&types.IsBoolean != 0 { + return y + } + } + + if types.Identical(xt, yt) { + // no conversion necessary + } else if _, ok := xt.(*types.Interface); ok { + y = emitConv(f, y, x.Type()) + } else if _, ok := yt.(*types.Interface); ok { + x = emitConv(f, x, y.Type()) + } else if _, ok := x.(*Const); ok { + x = emitConv(f, x, y.Type()) + } else if _, ok := y.(*Const); ok { + y = emitConv(f, y, x.Type()) + } else { + // other cases, e.g. channels. No-op. + } + + v := &BinOp{ + Op: op, + X: x, + Y: y, + } + v.setPos(pos) + v.setType(tBool) + return f.emit(v) +} + +// isValuePreserving returns true if a conversion from ut_src to +// ut_dst is value-preserving, i.e. just a change of type. +// Precondition: neither argument is a named type. +// +func isValuePreserving(ut_src, ut_dst types.Type) bool { + // Identical underlying types? + if structTypesIdentical(ut_dst, ut_src) { + return true + } + + switch ut_dst.(type) { + case *types.Chan: + // Conversion between channel types? + _, ok := ut_src.(*types.Chan) + return ok + + case *types.Pointer: + // Conversion between pointers with identical base types? + _, ok := ut_src.(*types.Pointer) + return ok + } + return false +} + +// emitConv emits to f code to convert Value val to exactly type typ, +// and returns the converted value. Implicit conversions are required +// by language assignability rules in assignments, parameter passing, +// etc. Conversions cannot fail dynamically. +// +func emitConv(f *Function, val Value, typ types.Type) Value { + t_src := val.Type() + + // Identical types? Conversion is a no-op. + if types.Identical(t_src, typ) { + return val + } + + ut_dst := typ.Underlying() + ut_src := t_src.Underlying() + + // Just a change of type, but not value or representation? + if isValuePreserving(ut_src, ut_dst) { + c := &ChangeType{X: val} + c.setType(typ) + return f.emit(c) + } + + // Conversion to, or construction of a value of, an interface type? + if _, ok := ut_dst.(*types.Interface); ok { + // Assignment from one interface type to another? + if _, ok := ut_src.(*types.Interface); ok { + c := &ChangeInterface{X: val} + c.setType(typ) + return f.emit(c) + } + + // Untyped nil constant? Return interface-typed nil constant. + if ut_src == tUntypedNil { + return nilConst(typ) + } + + // Convert (non-nil) "untyped" literals to their default type. + if t, ok := ut_src.(*types.Basic); ok && t.Info()&types.IsUntyped != 0 { + val = emitConv(f, val, DefaultType(ut_src)) + } + + f.Pkg.Prog.needMethodsOf(val.Type()) + mi := &MakeInterface{X: val} + mi.setType(typ) + return f.emit(mi) + } + + // Conversion of a compile-time constant value? + if c, ok := val.(*Const); ok { + if _, ok := ut_dst.(*types.Basic); ok || c.IsNil() { + // Conversion of a compile-time constant to + // another constant type results in a new + // constant of the destination type and + // (initially) the same abstract value. + // We don't truncate the value yet. + return NewConst(c.Value, typ) + } + + // We're converting from constant to non-constant type, + // e.g. string -> []byte/[]rune. + } + + // A representation-changing conversion? + // At least one of {ut_src,ut_dst} must be *Basic. + // (The other may be []byte or []rune.) + _, ok1 := ut_src.(*types.Basic) + _, ok2 := ut_dst.(*types.Basic) + if ok1 || ok2 { + c := &Convert{X: val} + c.setType(typ) + return f.emit(c) + } + + panic(fmt.Sprintf("in %s: cannot convert %s (%s) to %s", f, val, val.Type(), typ)) +} + +// emitStore emits to f an instruction to store value val at location +// addr, applying implicit conversions as required by assignability rules. +// +func emitStore(f *Function, addr, val Value, pos token.Pos) *Store { + s := &Store{ + Addr: addr, + Val: emitConv(f, val, deref(addr.Type())), + pos: pos, + } + f.emit(s) + return s +} + +// emitJump emits to f a jump to target, and updates the control-flow graph. +// Postcondition: f.currentBlock is nil. +// +func emitJump(f *Function, target *BasicBlock) { + b := f.currentBlock + b.emit(new(Jump)) + addEdge(b, target) + f.currentBlock = nil +} + +func (b *BasicBlock) emitJump(target *BasicBlock) { + b.emit(new(Jump)) + addEdge(b, target) +} + +// emitIf emits to f a conditional jump to tblock or fblock based on +// cond, and updates the control-flow graph. +// Postcondition: f.currentBlock is nil. +// +func emitIf(f *Function, cond Value, tblock, fblock *BasicBlock) { + b := f.currentBlock + b.emit(&If{Cond: cond}) + addEdge(b, tblock) + addEdge(b, fblock) + f.currentBlock = nil +} + +// emitExtract emits to f an instruction to extract the index'th +// component of tuple. It returns the extracted value. +// +func emitExtract(f *Function, tuple Value, index int) Value { + e := &Extract{Tuple: tuple, Index: index} + e.setType(tuple.Type().(*types.Tuple).At(index).Type()) + return f.emit(e) +} + +// emitTypeAssert emits to f a type assertion value := x.(t) and +// returns the value. x.Type() must be an interface. +// +func emitTypeAssert(f *Function, x Value, t types.Type, pos token.Pos) Value { + a := &TypeAssert{X: x, AssertedType: t} + a.setPos(pos) + a.setType(t) + return f.emit(a) +} + +// emitTypeTest emits to f a type test value,ok := x.(t) and returns +// a (value, ok) tuple. x.Type() must be an interface. +// +func emitTypeTest(f *Function, x Value, t types.Type, pos token.Pos) Value { + a := &TypeAssert{ + X: x, + AssertedType: t, + CommaOk: true, + } + a.setPos(pos) + a.setType(types.NewTuple( + newVar("value", t), + varOk, + )) + return f.emit(a) +} + +// emitTailCall emits to f a function call in tail position. The +// caller is responsible for all fields of 'call' except its type. +// Intended for wrapper methods. +// Precondition: f does/will not use deferred procedure calls. +// Postcondition: f.currentBlock is nil. +// +func emitTailCall(f *Function, call *Call) { + tresults := f.Signature.Results() + nr := tresults.Len() + if nr == 1 { + call.typ = tresults.At(0).Type() + } else { + call.typ = tresults + } + tuple := f.emit(call) + var ret Return + switch nr { + case 0: + // no-op + case 1: + ret.Results = []Value{tuple} + default: + for i := 0; i < nr; i++ { + v := emitExtract(f, tuple, i) + // TODO(adonovan): in principle, this is required: + // v = emitConv(f, o.Type, f.Signature.Results[i].Type) + // but in practice emitTailCall is only used when + // the types exactly match. + ret.Results = append(ret.Results, v) + } + } + f.emit(&ret) + f.currentBlock = nil +} + +// emitImplicitSelections emits to f code to apply the sequence of +// implicit field selections specified by indices to base value v, and +// returns the selected value. +// +// If v is the address of a struct, the result will be the address of +// a field; if it is the value of a struct, the result will be the +// value of a field. +// +func emitImplicitSelections(f *Function, v Value, indices []int) Value { + for _, index := range indices { + fld := deref(v.Type()).Underlying().(*types.Struct).Field(index) + + if isPointer(v.Type()) { + instr := &FieldAddr{ + X: v, + Field: index, + } + instr.setType(types.NewPointer(fld.Type())) + v = f.emit(instr) + // Load the field's value iff indirectly embedded. + if isPointer(fld.Type()) { + v = emitLoad(f, v) + } + } else { + instr := &Field{ + X: v, + Field: index, + } + instr.setType(fld.Type()) + v = f.emit(instr) + } + } + return v +} + +// emitFieldSelection emits to f code to select the index'th field of v. +// +// If wantAddr, the input must be a pointer-to-struct and the result +// will be the field's address; otherwise the result will be the +// field's value. +// Ident id is used for position and debug info. +// +func emitFieldSelection(f *Function, v Value, index int, wantAddr bool, id *ast.Ident) Value { + fld := deref(v.Type()).Underlying().(*types.Struct).Field(index) + if isPointer(v.Type()) { + instr := &FieldAddr{ + X: v, + Field: index, + } + instr.setPos(id.Pos()) + instr.setType(types.NewPointer(fld.Type())) + v = f.emit(instr) + // Load the field's value iff we don't want its address. + if !wantAddr { + v = emitLoad(f, v) + } + } else { + instr := &Field{ + X: v, + Field: index, + } + instr.setPos(id.Pos()) + instr.setType(fld.Type()) + v = f.emit(instr) + } + emitDebugRef(f, id, v, wantAddr) + return v +} + +// zeroValue emits to f code to produce a zero value of type t, +// and returns it. +// +func zeroValue(f *Function, t types.Type) Value { + switch t.Underlying().(type) { + case *types.Struct, *types.Array: + return emitLoad(f, f.addLocal(t, token.NoPos)) + default: + return zeroConst(t) + } +} + +// createRecoverBlock emits to f a block of code to return after a +// recovered panic, and sets f.Recover to it. +// +// If f's result parameters are named, the code loads and returns +// their current values, otherwise it returns the zero values of their +// type. +// +// Idempotent. +// +func createRecoverBlock(f *Function) { + if f.Recover != nil { + return // already created + } + saved := f.currentBlock + + f.Recover = f.newBasicBlock("recover") + f.currentBlock = f.Recover + + var results []Value + if f.namedResults != nil { + // Reload NRPs to form value tuple. + for _, r := range f.namedResults { + results = append(results, emitLoad(f, r)) + } + } else { + R := f.Signature.Results() + for i, n := 0, R.Len(); i < n; i++ { + T := R.At(i).Type() + + // Return zero value of each result type. + results = append(results, zeroValue(f, T)) + } + } + f.emit(&Return{Results: results}) + + f.currentBlock = saved +} diff --git a/vendor/honnef.co/go/tools/ssa/func.go b/vendor/honnef.co/go/tools/ssa/func.go new file mode 100644 index 0000000..86a3da7 --- /dev/null +++ b/vendor/honnef.co/go/tools/ssa/func.go @@ -0,0 +1,703 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.5 + +package ssa + +// This file implements the Function and BasicBlock types. + +import ( + "bytes" + "fmt" + "go/ast" + "go/token" + "go/types" + "io" + "os" + "strings" +) + +// addEdge adds a control-flow graph edge from from to to. +func addEdge(from, to *BasicBlock) { + from.Succs = append(from.Succs, to) + to.Preds = append(to.Preds, from) +} + +// Parent returns the function that contains block b. +func (b *BasicBlock) Parent() *Function { return b.parent } + +// String returns a human-readable label of this block. +// It is not guaranteed unique within the function. +// +func (b *BasicBlock) String() string { + return fmt.Sprintf("%d", b.Index) +} + +// emit appends an instruction to the current basic block. +// If the instruction defines a Value, it is returned. +// +func (b *BasicBlock) emit(i Instruction) Value { + i.setBlock(b) + b.Instrs = append(b.Instrs, i) + v, _ := i.(Value) + return v +} + +// predIndex returns the i such that b.Preds[i] == c or panics if +// there is none. +func (b *BasicBlock) predIndex(c *BasicBlock) int { + for i, pred := range b.Preds { + if pred == c { + return i + } + } + panic(fmt.Sprintf("no edge %s -> %s", c, b)) +} + +// hasPhi returns true if b.Instrs contains φ-nodes. +func (b *BasicBlock) hasPhi() bool { + _, ok := b.Instrs[0].(*Phi) + return ok +} + +func (b *BasicBlock) Phis() []Instruction { + return b.phis() +} + +// phis returns the prefix of b.Instrs containing all the block's φ-nodes. +func (b *BasicBlock) phis() []Instruction { + for i, instr := range b.Instrs { + if _, ok := instr.(*Phi); !ok { + return b.Instrs[:i] + } + } + return nil // unreachable in well-formed blocks +} + +// replacePred replaces all occurrences of p in b's predecessor list with q. +// Ordinarily there should be at most one. +// +func (b *BasicBlock) replacePred(p, q *BasicBlock) { + for i, pred := range b.Preds { + if pred == p { + b.Preds[i] = q + } + } +} + +// replaceSucc replaces all occurrences of p in b's successor list with q. +// Ordinarily there should be at most one. +// +func (b *BasicBlock) replaceSucc(p, q *BasicBlock) { + for i, succ := range b.Succs { + if succ == p { + b.Succs[i] = q + } + } +} + +func (b *BasicBlock) RemovePred(p *BasicBlock) { + b.removePred(p) +} + +// removePred removes all occurrences of p in b's +// predecessor list and φ-nodes. +// Ordinarily there should be at most one. +// +func (b *BasicBlock) removePred(p *BasicBlock) { + phis := b.phis() + + // We must preserve edge order for φ-nodes. + j := 0 + for i, pred := range b.Preds { + if pred != p { + b.Preds[j] = b.Preds[i] + // Strike out φ-edge too. + for _, instr := range phis { + phi := instr.(*Phi) + phi.Edges[j] = phi.Edges[i] + } + j++ + } + } + // Nil out b.Preds[j:] and φ-edges[j:] to aid GC. + for i := j; i < len(b.Preds); i++ { + b.Preds[i] = nil + for _, instr := range phis { + instr.(*Phi).Edges[i] = nil + } + } + b.Preds = b.Preds[:j] + for _, instr := range phis { + phi := instr.(*Phi) + phi.Edges = phi.Edges[:j] + } +} + +// Destinations associated with unlabelled for/switch/select stmts. +// We push/pop one of these as we enter/leave each construct and for +// each BranchStmt we scan for the innermost target of the right type. +// +type targets struct { + tail *targets // rest of stack + _break *BasicBlock + _continue *BasicBlock + _fallthrough *BasicBlock +} + +// Destinations associated with a labelled block. +// We populate these as labels are encountered in forward gotos or +// labelled statements. +// +type lblock struct { + _goto *BasicBlock + _break *BasicBlock + _continue *BasicBlock +} + +// labelledBlock returns the branch target associated with the +// specified label, creating it if needed. +// +func (f *Function) labelledBlock(label *ast.Ident) *lblock { + lb := f.lblocks[label.Obj] + if lb == nil { + lb = &lblock{_goto: f.newBasicBlock(label.Name)} + if f.lblocks == nil { + f.lblocks = make(map[*ast.Object]*lblock) + } + f.lblocks[label.Obj] = lb + } + return lb +} + +// addParam adds a (non-escaping) parameter to f.Params of the +// specified name, type and source position. +// +func (f *Function) addParam(name string, typ types.Type, pos token.Pos) *Parameter { + v := &Parameter{ + name: name, + typ: typ, + pos: pos, + parent: f, + } + f.Params = append(f.Params, v) + return v +} + +func (f *Function) addParamObj(obj types.Object) *Parameter { + name := obj.Name() + if name == "" { + name = fmt.Sprintf("arg%d", len(f.Params)) + } + param := f.addParam(name, obj.Type(), obj.Pos()) + param.object = obj + return param +} + +// addSpilledParam declares a parameter that is pre-spilled to the +// stack; the function body will load/store the spilled location. +// Subsequent lifting will eliminate spills where possible. +// +func (f *Function) addSpilledParam(obj types.Object) { + param := f.addParamObj(obj) + spill := &Alloc{Comment: obj.Name()} + spill.setType(types.NewPointer(obj.Type())) + spill.setPos(obj.Pos()) + f.objects[obj] = spill + f.Locals = append(f.Locals, spill) + f.emit(spill) + f.emit(&Store{Addr: spill, Val: param}) +} + +// startBody initializes the function prior to generating SSA code for its body. +// Precondition: f.Type() already set. +// +func (f *Function) startBody() { + f.currentBlock = f.newBasicBlock("entry") + f.objects = make(map[types.Object]Value) // needed for some synthetics, e.g. init +} + +// createSyntacticParams populates f.Params and generates code (spills +// and named result locals) for all the parameters declared in the +// syntax. In addition it populates the f.objects mapping. +// +// Preconditions: +// f.startBody() was called. +// Postcondition: +// len(f.Params) == len(f.Signature.Params) + (f.Signature.Recv() ? 1 : 0) +// +func (f *Function) createSyntacticParams(recv *ast.FieldList, functype *ast.FuncType) { + // Receiver (at most one inner iteration). + if recv != nil { + for _, field := range recv.List { + for _, n := range field.Names { + f.addSpilledParam(f.Pkg.info.Defs[n]) + } + // Anonymous receiver? No need to spill. + if field.Names == nil { + f.addParamObj(f.Signature.Recv()) + } + } + } + + // Parameters. + if functype.Params != nil { + n := len(f.Params) // 1 if has recv, 0 otherwise + for _, field := range functype.Params.List { + for _, n := range field.Names { + f.addSpilledParam(f.Pkg.info.Defs[n]) + } + // Anonymous parameter? No need to spill. + if field.Names == nil { + f.addParamObj(f.Signature.Params().At(len(f.Params) - n)) + } + } + } + + // Named results. + if functype.Results != nil { + for _, field := range functype.Results.List { + // Implicit "var" decl of locals for named results. + for _, n := range field.Names { + f.namedResults = append(f.namedResults, f.addLocalForIdent(n)) + } + } + } +} + +// numberRegisters assigns numbers to all SSA registers +// (value-defining Instructions) in f, to aid debugging. +// (Non-Instruction Values are named at construction.) +// +func numberRegisters(f *Function) { + v := 0 + for _, b := range f.Blocks { + for _, instr := range b.Instrs { + switch instr.(type) { + case Value: + instr.(interface { + setNum(int) + }).setNum(v) + v++ + } + } + } +} + +// buildReferrers populates the def/use information in all non-nil +// Value.Referrers slice. +// Precondition: all such slices are initially empty. +func buildReferrers(f *Function) { + var rands []*Value + for _, b := range f.Blocks { + for _, instr := range b.Instrs { + rands = instr.Operands(rands[:0]) // recycle storage + for _, rand := range rands { + if r := *rand; r != nil { + if ref := r.Referrers(); ref != nil { + *ref = append(*ref, instr) + } + } + } + } + } +} + +// finishBody() finalizes the function after SSA code generation of its body. +func (f *Function) finishBody() { + f.objects = nil + f.currentBlock = nil + f.lblocks = nil + + // Don't pin the AST in memory (except in debug mode). + if n := f.syntax; n != nil && !f.debugInfo() { + f.syntax = extentNode{n.Pos(), n.End()} + } + + // Remove from f.Locals any Allocs that escape to the heap. + j := 0 + for _, l := range f.Locals { + if !l.Heap { + f.Locals[j] = l + j++ + } + } + // Nil out f.Locals[j:] to aid GC. + for i := j; i < len(f.Locals); i++ { + f.Locals[i] = nil + } + f.Locals = f.Locals[:j] + + optimizeBlocks(f) + + buildReferrers(f) + + buildDomTree(f) + + if f.Prog.mode&NaiveForm == 0 { + // For debugging pre-state of lifting pass: + // numberRegisters(f) + // f.WriteTo(os.Stderr) + lift(f) + } + + f.namedResults = nil // (used by lifting) + + numberRegisters(f) + + if f.Prog.mode&PrintFunctions != 0 { + printMu.Lock() + f.WriteTo(os.Stdout) + printMu.Unlock() + } + + if f.Prog.mode&SanityCheckFunctions != 0 { + mustSanityCheck(f, nil) + } +} + +func (f *Function) RemoveNilBlocks() { + f.removeNilBlocks() +} + +// removeNilBlocks eliminates nils from f.Blocks and updates each +// BasicBlock.Index. Use this after any pass that may delete blocks. +// +func (f *Function) removeNilBlocks() { + j := 0 + for _, b := range f.Blocks { + if b != nil { + b.Index = j + f.Blocks[j] = b + j++ + } + } + // Nil out f.Blocks[j:] to aid GC. + for i := j; i < len(f.Blocks); i++ { + f.Blocks[i] = nil + } + f.Blocks = f.Blocks[:j] +} + +// SetDebugMode sets the debug mode for package pkg. If true, all its +// functions will include full debug info. This greatly increases the +// size of the instruction stream, and causes Functions to depend upon +// the ASTs, potentially keeping them live in memory for longer. +// +func (pkg *Package) SetDebugMode(debug bool) { + // TODO(adonovan): do we want ast.File granularity? + pkg.debug = debug +} + +// debugInfo reports whether debug info is wanted for this function. +func (f *Function) debugInfo() bool { + return f.Pkg != nil && f.Pkg.debug +} + +// addNamedLocal creates a local variable, adds it to function f and +// returns it. Its name and type are taken from obj. Subsequent +// calls to f.lookup(obj) will return the same local. +// +func (f *Function) addNamedLocal(obj types.Object) *Alloc { + l := f.addLocal(obj.Type(), obj.Pos()) + l.Comment = obj.Name() + f.objects[obj] = l + return l +} + +func (f *Function) addLocalForIdent(id *ast.Ident) *Alloc { + return f.addNamedLocal(f.Pkg.info.Defs[id]) +} + +// addLocal creates an anonymous local variable of type typ, adds it +// to function f and returns it. pos is the optional source location. +// +func (f *Function) addLocal(typ types.Type, pos token.Pos) *Alloc { + v := &Alloc{} + v.setType(types.NewPointer(typ)) + v.setPos(pos) + f.Locals = append(f.Locals, v) + f.emit(v) + return v +} + +// lookup returns the address of the named variable identified by obj +// that is local to function f or one of its enclosing functions. +// If escaping, the reference comes from a potentially escaping pointer +// expression and the referent must be heap-allocated. +// +func (f *Function) lookup(obj types.Object, escaping bool) Value { + if v, ok := f.objects[obj]; ok { + if alloc, ok := v.(*Alloc); ok && escaping { + alloc.Heap = true + } + return v // function-local var (address) + } + + // Definition must be in an enclosing function; + // plumb it through intervening closures. + if f.parent == nil { + panic("no ssa.Value for " + obj.String()) + } + outer := f.parent.lookup(obj, true) // escaping + v := &FreeVar{ + name: obj.Name(), + typ: outer.Type(), + pos: outer.Pos(), + outer: outer, + parent: f, + } + f.objects[obj] = v + f.FreeVars = append(f.FreeVars, v) + return v +} + +// emit emits the specified instruction to function f. +func (f *Function) emit(instr Instruction) Value { + return f.currentBlock.emit(instr) +} + +// RelString returns the full name of this function, qualified by +// package name, receiver type, etc. +// +// The specific formatting rules are not guaranteed and may change. +// +// Examples: +// "math.IsNaN" // a package-level function +// "(*bytes.Buffer).Bytes" // a declared method or a wrapper +// "(*bytes.Buffer).Bytes$thunk" // thunk (func wrapping method; receiver is param 0) +// "(*bytes.Buffer).Bytes$bound" // bound (func wrapping method; receiver supplied by closure) +// "main.main$1" // an anonymous function in main +// "main.init#1" // a declared init function +// "main.init" // the synthesized package initializer +// +// When these functions are referred to from within the same package +// (i.e. from == f.Pkg.Object), they are rendered without the package path. +// For example: "IsNaN", "(*Buffer).Bytes", etc. +// +// All non-synthetic functions have distinct package-qualified names. +// (But two methods may have the same name "(T).f" if one is a synthetic +// wrapper promoting a non-exported method "f" from another package; in +// that case, the strings are equal but the identifiers "f" are distinct.) +// +func (f *Function) RelString(from *types.Package) string { + // Anonymous? + if f.parent != nil { + // An anonymous function's Name() looks like "parentName$1", + // but its String() should include the type/package/etc. + parent := f.parent.RelString(from) + for i, anon := range f.parent.AnonFuncs { + if anon == f { + return fmt.Sprintf("%s$%d", parent, 1+i) + } + } + + return f.name // should never happen + } + + // Method (declared or wrapper)? + if recv := f.Signature.Recv(); recv != nil { + return f.relMethod(from, recv.Type()) + } + + // Thunk? + if f.method != nil { + return f.relMethod(from, f.method.Recv()) + } + + // Bound? + if len(f.FreeVars) == 1 && strings.HasSuffix(f.name, "$bound") { + return f.relMethod(from, f.FreeVars[0].Type()) + } + + // Package-level function? + // Prefix with package name for cross-package references only. + if p := f.pkg(); p != nil && p != from { + return fmt.Sprintf("%s.%s", p.Path(), f.name) + } + + // Unknown. + return f.name +} + +func (f *Function) relMethod(from *types.Package, recv types.Type) string { + return fmt.Sprintf("(%s).%s", relType(recv, from), f.name) +} + +// writeSignature writes to buf the signature sig in declaration syntax. +func writeSignature(buf *bytes.Buffer, from *types.Package, name string, sig *types.Signature, params []*Parameter) { + buf.WriteString("func ") + if recv := sig.Recv(); recv != nil { + buf.WriteString("(") + if n := params[0].Name(); n != "" { + buf.WriteString(n) + buf.WriteString(" ") + } + types.WriteType(buf, params[0].Type(), types.RelativeTo(from)) + buf.WriteString(") ") + } + buf.WriteString(name) + types.WriteSignature(buf, sig, types.RelativeTo(from)) +} + +func (f *Function) pkg() *types.Package { + if f.Pkg != nil { + return f.Pkg.Pkg + } + return nil +} + +var _ io.WriterTo = (*Function)(nil) // *Function implements io.Writer + +func (f *Function) WriteTo(w io.Writer) (int64, error) { + var buf bytes.Buffer + WriteFunction(&buf, f) + n, err := w.Write(buf.Bytes()) + return int64(n), err +} + +// WriteFunction writes to buf a human-readable "disassembly" of f. +func WriteFunction(buf *bytes.Buffer, f *Function) { + fmt.Fprintf(buf, "# Name: %s\n", f.String()) + if f.Pkg != nil { + fmt.Fprintf(buf, "# Package: %s\n", f.Pkg.Pkg.Path()) + } + if syn := f.Synthetic; syn != "" { + fmt.Fprintln(buf, "# Synthetic:", syn) + } + if pos := f.Pos(); pos.IsValid() { + fmt.Fprintf(buf, "# Location: %s\n", f.Prog.Fset.Position(pos)) + } + + if f.parent != nil { + fmt.Fprintf(buf, "# Parent: %s\n", f.parent.Name()) + } + + if f.Recover != nil { + fmt.Fprintf(buf, "# Recover: %s\n", f.Recover) + } + + from := f.pkg() + + if f.FreeVars != nil { + buf.WriteString("# Free variables:\n") + for i, fv := range f.FreeVars { + fmt.Fprintf(buf, "# % 3d:\t%s %s\n", i, fv.Name(), relType(fv.Type(), from)) + } + } + + if len(f.Locals) > 0 { + buf.WriteString("# Locals:\n") + for i, l := range f.Locals { + fmt.Fprintf(buf, "# % 3d:\t%s %s\n", i, l.Name(), relType(deref(l.Type()), from)) + } + } + writeSignature(buf, from, f.Name(), f.Signature, f.Params) + buf.WriteString(":\n") + + if f.Blocks == nil { + buf.WriteString("\t(external)\n") + } + + // NB. column calculations are confused by non-ASCII + // characters and assume 8-space tabs. + const punchcard = 80 // for old time's sake. + const tabwidth = 8 + for _, b := range f.Blocks { + if b == nil { + // Corrupt CFG. + fmt.Fprintf(buf, ".nil:\n") + continue + } + n, _ := fmt.Fprintf(buf, "%d:", b.Index) + bmsg := fmt.Sprintf("%s P:%d S:%d", b.Comment, len(b.Preds), len(b.Succs)) + fmt.Fprintf(buf, "%*s%s\n", punchcard-1-n-len(bmsg), "", bmsg) + + if false { // CFG debugging + fmt.Fprintf(buf, "\t# CFG: %s --> %s --> %s\n", b.Preds, b, b.Succs) + } + for _, instr := range b.Instrs { + buf.WriteString("\t") + switch v := instr.(type) { + case Value: + l := punchcard - tabwidth + // Left-align the instruction. + if name := v.Name(); name != "" { + n, _ := fmt.Fprintf(buf, "%s = ", name) + l -= n + } + n, _ := buf.WriteString(instr.String()) + l -= n + // Right-align the type if there's space. + if t := v.Type(); t != nil { + buf.WriteByte(' ') + ts := relType(t, from) + l -= len(ts) + len(" ") // (spaces before and after type) + if l > 0 { + fmt.Fprintf(buf, "%*s", l, "") + } + buf.WriteString(ts) + } + case nil: + // Be robust against bad transforms. + buf.WriteString("") + default: + buf.WriteString(instr.String()) + } + buf.WriteString("\n") + } + } + fmt.Fprintf(buf, "\n") +} + +// newBasicBlock adds to f a new basic block and returns it. It does +// not automatically become the current block for subsequent calls to emit. +// comment is an optional string for more readable debugging output. +// +func (f *Function) newBasicBlock(comment string) *BasicBlock { + b := &BasicBlock{ + Index: len(f.Blocks), + Comment: comment, + parent: f, + } + b.Succs = b.succs2[:0] + f.Blocks = append(f.Blocks, b) + return b +} + +// NewFunction returns a new synthetic Function instance belonging to +// prog, with its name and signature fields set as specified. +// +// The caller is responsible for initializing the remaining fields of +// the function object, e.g. Pkg, Params, Blocks. +// +// It is practically impossible for clients to construct well-formed +// SSA functions/packages/programs directly, so we assume this is the +// job of the Builder alone. NewFunction exists to provide clients a +// little flexibility. For example, analysis tools may wish to +// construct fake Functions for the root of the callgraph, a fake +// "reflect" package, etc. +// +// TODO(adonovan): think harder about the API here. +// +func (prog *Program) NewFunction(name string, sig *types.Signature, provenance string) *Function { + return &Function{Prog: prog, name: name, Signature: sig, Synthetic: provenance} +} + +type extentNode [2]token.Pos + +func (n extentNode) Pos() token.Pos { return n[0] } +func (n extentNode) End() token.Pos { return n[1] } + +// Syntax returns an ast.Node whose Pos/End methods provide the +// lexical extent of the function if it was defined by Go source code +// (f.Synthetic==""), or nil otherwise. +// +// If f was built with debug information (see Package.SetDebugRef), +// the result is the *ast.FuncDecl or *ast.FuncLit that declared the +// function. Otherwise, it is an opaque Node providing only position +// information; this avoids pinning the AST in memory. +// +func (f *Function) Syntax() ast.Node { return f.syntax } diff --git a/vendor/honnef.co/go/tools/ssa/identical.go b/vendor/honnef.co/go/tools/ssa/identical.go new file mode 100644 index 0000000..53cbee1 --- /dev/null +++ b/vendor/honnef.co/go/tools/ssa/identical.go @@ -0,0 +1,7 @@ +// +build go1.8 + +package ssa + +import "go/types" + +var structTypesIdentical = types.IdenticalIgnoreTags diff --git a/vendor/honnef.co/go/tools/ssa/identical_17.go b/vendor/honnef.co/go/tools/ssa/identical_17.go new file mode 100644 index 0000000..da89d33 --- /dev/null +++ b/vendor/honnef.co/go/tools/ssa/identical_17.go @@ -0,0 +1,7 @@ +// +build !go1.8 + +package ssa + +import "go/types" + +var structTypesIdentical = types.Identical diff --git a/vendor/honnef.co/go/tools/ssa/lift.go b/vendor/honnef.co/go/tools/ssa/lift.go new file mode 100644 index 0000000..0270797 --- /dev/null +++ b/vendor/honnef.co/go/tools/ssa/lift.go @@ -0,0 +1,608 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.5 + +package ssa + +// This file defines the lifting pass which tries to "lift" Alloc +// cells (new/local variables) into SSA registers, replacing loads +// with the dominating stored value, eliminating loads and stores, and +// inserting φ-nodes as needed. + +// Cited papers and resources: +// +// Ron Cytron et al. 1991. Efficiently computing SSA form... +// http://doi.acm.org/10.1145/115372.115320 +// +// Cooper, Harvey, Kennedy. 2001. A Simple, Fast Dominance Algorithm. +// Software Practice and Experience 2001, 4:1-10. +// http://www.hipersoft.rice.edu/grads/publications/dom14.pdf +// +// Daniel Berlin, llvmdev mailing list, 2012. +// http://lists.cs.uiuc.edu/pipermail/llvmdev/2012-January/046638.html +// (Be sure to expand the whole thread.) + +// TODO(adonovan): opt: there are many optimizations worth evaluating, and +// the conventional wisdom for SSA construction is that a simple +// algorithm well engineered often beats those of better asymptotic +// complexity on all but the most egregious inputs. +// +// Danny Berlin suggests that the Cooper et al. algorithm for +// computing the dominance frontier is superior to Cytron et al. +// Furthermore he recommends that rather than computing the DF for the +// whole function then renaming all alloc cells, it may be cheaper to +// compute the DF for each alloc cell separately and throw it away. +// +// Consider exploiting liveness information to avoid creating dead +// φ-nodes which we then immediately remove. +// +// Integrate lifting with scalar replacement of aggregates (SRA) since +// the two are synergistic. +// +// Also see many other "TODO: opt" suggestions in the code. + +import ( + "fmt" + "go/token" + "go/types" + "math/big" + "os" +) + +// If true, perform sanity checking and show diagnostic information at +// each step of lifting. Very verbose. +const debugLifting = false + +// domFrontier maps each block to the set of blocks in its dominance +// frontier. The outer slice is conceptually a map keyed by +// Block.Index. The inner slice is conceptually a set, possibly +// containing duplicates. +// +// TODO(adonovan): opt: measure impact of dups; consider a packed bit +// representation, e.g. big.Int, and bitwise parallel operations for +// the union step in the Children loop. +// +// domFrontier's methods mutate the slice's elements but not its +// length, so their receivers needn't be pointers. +// +type domFrontier [][]*BasicBlock + +func (df domFrontier) add(u, v *BasicBlock) { + p := &df[u.Index] + *p = append(*p, v) +} + +// build builds the dominance frontier df for the dominator (sub)tree +// rooted at u, using the Cytron et al. algorithm. +// +// TODO(adonovan): opt: consider Berlin approach, computing pruned SSA +// by pruning the entire IDF computation, rather than merely pruning +// the DF -> IDF step. +func (df domFrontier) build(u *BasicBlock) { + // Encounter each node u in postorder of dom tree. + for _, child := range u.dom.children { + df.build(child) + } + for _, vb := range u.Succs { + if v := vb.dom; v.idom != u { + df.add(u, vb) + } + } + for _, w := range u.dom.children { + for _, vb := range df[w.Index] { + // TODO(adonovan): opt: use word-parallel bitwise union. + if v := vb.dom; v.idom != u { + df.add(u, vb) + } + } + } +} + +func buildDomFrontier(fn *Function) domFrontier { + df := make(domFrontier, len(fn.Blocks)) + df.build(fn.Blocks[0]) + if fn.Recover != nil { + df.build(fn.Recover) + } + return df +} + +func RemoveInstr(refs []Instruction, instr Instruction) []Instruction { + return removeInstr(refs, instr) +} + +func removeInstr(refs []Instruction, instr Instruction) []Instruction { + i := 0 + for _, ref := range refs { + if ref == instr { + continue + } + refs[i] = ref + i++ + } + for j := i; j != len(refs); j++ { + refs[j] = nil // aid GC + } + return refs[:i] +} + +// lift attempts to replace local and new Allocs accessed only with +// load/store by SSA registers, inserting φ-nodes where necessary. +// The result is a program in classical pruned SSA form. +// +// Preconditions: +// - fn has no dead blocks (blockopt has run). +// - Def/use info (Operands and Referrers) is up-to-date. +// - The dominator tree is up-to-date. +// +func lift(fn *Function) { + // TODO(adonovan): opt: lots of little optimizations may be + // worthwhile here, especially if they cause us to avoid + // buildDomFrontier. For example: + // + // - Alloc never loaded? Eliminate. + // - Alloc never stored? Replace all loads with a zero constant. + // - Alloc stored once? Replace loads with dominating store; + // don't forget that an Alloc is itself an effective store + // of zero. + // - Alloc used only within a single block? + // Use degenerate algorithm avoiding φ-nodes. + // - Consider synergy with scalar replacement of aggregates (SRA). + // e.g. *(&x.f) where x is an Alloc. + // Perhaps we'd get better results if we generated this as x.f + // i.e. Field(x, .f) instead of Load(FieldIndex(x, .f)). + // Unclear. + // + // But we will start with the simplest correct code. + df := buildDomFrontier(fn) + + if debugLifting { + title := false + for i, blocks := range df { + if blocks != nil { + if !title { + fmt.Fprintf(os.Stderr, "Dominance frontier of %s:\n", fn) + title = true + } + fmt.Fprintf(os.Stderr, "\t%s: %s\n", fn.Blocks[i], blocks) + } + } + } + + newPhis := make(newPhiMap) + + // During this pass we will replace some BasicBlock.Instrs + // (allocs, loads and stores) with nil, keeping a count in + // BasicBlock.gaps. At the end we will reset Instrs to the + // concatenation of all non-dead newPhis and non-nil Instrs + // for the block, reusing the original array if space permits. + + // While we're here, we also eliminate 'rundefers' + // instructions in functions that contain no 'defer' + // instructions. + usesDefer := false + + // Determine which allocs we can lift and number them densely. + // The renaming phase uses this numbering for compact maps. + numAllocs := 0 + for _, b := range fn.Blocks { + b.gaps = 0 + b.rundefers = 0 + for _, instr := range b.Instrs { + switch instr := instr.(type) { + case *Alloc: + index := -1 + if liftAlloc(df, instr, newPhis) { + index = numAllocs + numAllocs++ + } + instr.index = index + case *Defer: + usesDefer = true + case *RunDefers: + b.rundefers++ + } + } + } + + // renaming maps an alloc (keyed by index) to its replacement + // value. Initially the renaming contains nil, signifying the + // zero constant of the appropriate type; we construct the + // Const lazily at most once on each path through the domtree. + // TODO(adonovan): opt: cache per-function not per subtree. + renaming := make([]Value, numAllocs) + + // Renaming. + rename(fn.Blocks[0], renaming, newPhis) + + // Eliminate dead new phis, then prepend the live ones to each block. + for _, b := range fn.Blocks { + + // Compress the newPhis slice to eliminate unused phis. + // TODO(adonovan): opt: compute liveness to avoid + // placing phis in blocks for which the alloc cell is + // not live. + nps := newPhis[b] + j := 0 + for _, np := range nps { + if !phiIsLive(np.phi) { + // discard it, first removing it from referrers + for _, newval := range np.phi.Edges { + if refs := newval.Referrers(); refs != nil { + *refs = removeInstr(*refs, np.phi) + } + } + continue + } + nps[j] = np + j++ + } + nps = nps[:j] + + rundefersToKill := b.rundefers + if usesDefer { + rundefersToKill = 0 + } + + if j+b.gaps+rundefersToKill == 0 { + continue // fast path: no new phis or gaps + } + + // Compact nps + non-nil Instrs into a new slice. + // TODO(adonovan): opt: compact in situ if there is + // sufficient space or slack in the slice. + dst := make([]Instruction, len(b.Instrs)+j-b.gaps-rundefersToKill) + for i, np := range nps { + dst[i] = np.phi + } + for _, instr := range b.Instrs { + if instr == nil { + continue + } + if !usesDefer { + if _, ok := instr.(*RunDefers); ok { + continue + } + } + dst[j] = instr + j++ + } + for i, np := range nps { + dst[i] = np.phi + } + b.Instrs = dst + } + + // Remove any fn.Locals that were lifted. + j := 0 + for _, l := range fn.Locals { + if l.index < 0 { + fn.Locals[j] = l + j++ + } + } + // Nil out fn.Locals[j:] to aid GC. + for i := j; i < len(fn.Locals); i++ { + fn.Locals[i] = nil + } + fn.Locals = fn.Locals[:j] +} + +func phiIsLive(phi *Phi) bool { + for _, instr := range *phi.Referrers() { + if instr == phi { + continue // self-refs don't count + } + if _, ok := instr.(*DebugRef); ok { + continue // debug refs don't count + } + return true + } + return false +} + +type blockSet struct{ big.Int } // (inherit methods from Int) + +// add adds b to the set and returns true if the set changed. +func (s *blockSet) add(b *BasicBlock) bool { + i := b.Index + if s.Bit(i) != 0 { + return false + } + s.SetBit(&s.Int, i, 1) + return true +} + +// take removes an arbitrary element from a set s and +// returns its index, or returns -1 if empty. +func (s *blockSet) take() int { + l := s.BitLen() + for i := 0; i < l; i++ { + if s.Bit(i) == 1 { + s.SetBit(&s.Int, i, 0) + return i + } + } + return -1 +} + +// newPhi is a pair of a newly introduced φ-node and the lifted Alloc +// it replaces. +type newPhi struct { + phi *Phi + alloc *Alloc +} + +// newPhiMap records for each basic block, the set of newPhis that +// must be prepended to the block. +type newPhiMap map[*BasicBlock][]newPhi + +// liftAlloc determines whether alloc can be lifted into registers, +// and if so, it populates newPhis with all the φ-nodes it may require +// and returns true. +// +func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap) bool { + // Don't lift aggregates into registers, because we don't have + // a way to express their zero-constants. + switch deref(alloc.Type()).Underlying().(type) { + case *types.Array, *types.Struct: + return false + } + + // Don't lift named return values in functions that defer + // calls that may recover from panic. + if fn := alloc.Parent(); fn.Recover != nil { + for _, nr := range fn.namedResults { + if nr == alloc { + return false + } + } + } + + // Compute defblocks, the set of blocks containing a + // definition of the alloc cell. + var defblocks blockSet + for _, instr := range *alloc.Referrers() { + // Bail out if we discover the alloc is not liftable; + // the only operations permitted to use the alloc are + // loads/stores into the cell, and DebugRef. + switch instr := instr.(type) { + case *Store: + if instr.Val == alloc { + return false // address used as value + } + if instr.Addr != alloc { + panic("Alloc.Referrers is inconsistent") + } + defblocks.add(instr.Block()) + case *UnOp: + if instr.Op != token.MUL { + return false // not a load + } + if instr.X != alloc { + panic("Alloc.Referrers is inconsistent") + } + case *DebugRef: + // ok + default: + return false // some other instruction + } + } + // The Alloc itself counts as a (zero) definition of the cell. + defblocks.add(alloc.Block()) + + if debugLifting { + fmt.Fprintln(os.Stderr, "\tlifting ", alloc, alloc.Name()) + } + + fn := alloc.Parent() + + // Φ-insertion. + // + // What follows is the body of the main loop of the insert-φ + // function described by Cytron et al, but instead of using + // counter tricks, we just reset the 'hasAlready' and 'work' + // sets each iteration. These are bitmaps so it's pretty cheap. + // + // TODO(adonovan): opt: recycle slice storage for W, + // hasAlready, defBlocks across liftAlloc calls. + var hasAlready blockSet + + // Initialize W and work to defblocks. + var work blockSet = defblocks // blocks seen + var W blockSet // blocks to do + W.Set(&defblocks.Int) + + // Traverse iterated dominance frontier, inserting φ-nodes. + for i := W.take(); i != -1; i = W.take() { + u := fn.Blocks[i] + for _, v := range df[u.Index] { + if hasAlready.add(v) { + // Create φ-node. + // It will be prepended to v.Instrs later, if needed. + phi := &Phi{ + Edges: make([]Value, len(v.Preds)), + Comment: alloc.Comment, + } + phi.pos = alloc.Pos() + phi.setType(deref(alloc.Type())) + phi.block = v + if debugLifting { + fmt.Fprintf(os.Stderr, "\tplace %s = %s at block %s\n", phi.Name(), phi, v) + } + newPhis[v] = append(newPhis[v], newPhi{phi, alloc}) + + if work.add(v) { + W.add(v) + } + } + } + } + + return true +} + +func ReplaceAll(x, y Value) { + replaceAll(x, y) +} + +// replaceAll replaces all intraprocedural uses of x with y, +// updating x.Referrers and y.Referrers. +// Precondition: x.Referrers() != nil, i.e. x must be local to some function. +// +func replaceAll(x, y Value) { + var rands []*Value + pxrefs := x.Referrers() + pyrefs := y.Referrers() + for _, instr := range *pxrefs { + rands = instr.Operands(rands[:0]) // recycle storage + for _, rand := range rands { + if *rand != nil { + if *rand == x { + *rand = y + } + } + } + if pyrefs != nil { + *pyrefs = append(*pyrefs, instr) // dups ok + } + } + *pxrefs = nil // x is now unreferenced +} + +// renamed returns the value to which alloc is being renamed, +// constructing it lazily if it's the implicit zero initialization. +// +func renamed(renaming []Value, alloc *Alloc) Value { + v := renaming[alloc.index] + if v == nil { + v = zeroConst(deref(alloc.Type())) + renaming[alloc.index] = v + } + return v +} + +// rename implements the (Cytron et al) SSA renaming algorithm, a +// preorder traversal of the dominator tree replacing all loads of +// Alloc cells with the value stored to that cell by the dominating +// store instruction. For lifting, we need only consider loads, +// stores and φ-nodes. +// +// renaming is a map from *Alloc (keyed by index number) to its +// dominating stored value; newPhis[x] is the set of new φ-nodes to be +// prepended to block x. +// +func rename(u *BasicBlock, renaming []Value, newPhis newPhiMap) { + // Each φ-node becomes the new name for its associated Alloc. + for _, np := range newPhis[u] { + phi := np.phi + alloc := np.alloc + renaming[alloc.index] = phi + } + + // Rename loads and stores of allocs. + for i, instr := range u.Instrs { + switch instr := instr.(type) { + case *Alloc: + if instr.index >= 0 { // store of zero to Alloc cell + // Replace dominated loads by the zero value. + renaming[instr.index] = nil + if debugLifting { + fmt.Fprintf(os.Stderr, "\tkill alloc %s\n", instr) + } + // Delete the Alloc. + u.Instrs[i] = nil + u.gaps++ + } + + case *Store: + if alloc, ok := instr.Addr.(*Alloc); ok && alloc.index >= 0 { // store to Alloc cell + // Replace dominated loads by the stored value. + renaming[alloc.index] = instr.Val + if debugLifting { + fmt.Fprintf(os.Stderr, "\tkill store %s; new value: %s\n", + instr, instr.Val.Name()) + } + // Remove the store from the referrer list of the stored value. + if refs := instr.Val.Referrers(); refs != nil { + *refs = removeInstr(*refs, instr) + } + // Delete the Store. + u.Instrs[i] = nil + u.gaps++ + } + + case *UnOp: + if instr.Op == token.MUL { + if alloc, ok := instr.X.(*Alloc); ok && alloc.index >= 0 { // load of Alloc cell + newval := renamed(renaming, alloc) + if debugLifting { + fmt.Fprintf(os.Stderr, "\tupdate load %s = %s with %s\n", + instr.Name(), instr, newval.Name()) + } + // Replace all references to + // the loaded value by the + // dominating stored value. + replaceAll(instr, newval) + // Delete the Load. + u.Instrs[i] = nil + u.gaps++ + } + } + + case *DebugRef: + if alloc, ok := instr.X.(*Alloc); ok && alloc.index >= 0 { // ref of Alloc cell + if instr.IsAddr { + instr.X = renamed(renaming, alloc) + instr.IsAddr = false + + // Add DebugRef to instr.X's referrers. + if refs := instr.X.Referrers(); refs != nil { + *refs = append(*refs, instr) + } + } else { + // A source expression denotes the address + // of an Alloc that was optimized away. + instr.X = nil + + // Delete the DebugRef. + u.Instrs[i] = nil + u.gaps++ + } + } + } + } + + // For each φ-node in a CFG successor, rename the edge. + for _, v := range u.Succs { + phis := newPhis[v] + if len(phis) == 0 { + continue + } + i := v.predIndex(u) + for _, np := range phis { + phi := np.phi + alloc := np.alloc + newval := renamed(renaming, alloc) + if debugLifting { + fmt.Fprintf(os.Stderr, "\tsetphi %s edge %s -> %s (#%d) (alloc=%s) := %s\n", + phi.Name(), u, v, i, alloc.Name(), newval.Name()) + } + phi.Edges[i] = newval + if prefs := newval.Referrers(); prefs != nil { + *prefs = append(*prefs, phi) + } + } + } + + // Continue depth-first recursion over domtree, pushing a + // fresh copy of the renaming map for each subtree. + for _, v := range u.dom.children { + // TODO(adonovan): opt: avoid copy on final iteration; use destructive update. + r := make([]Value, len(renaming)) + copy(r, renaming) + rename(v, r, newPhis) + } +} diff --git a/vendor/honnef.co/go/tools/ssa/lvalue.go b/vendor/honnef.co/go/tools/ssa/lvalue.go new file mode 100644 index 0000000..d2226a9 --- /dev/null +++ b/vendor/honnef.co/go/tools/ssa/lvalue.go @@ -0,0 +1,125 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.5 + +package ssa + +// lvalues are the union of addressable expressions and map-index +// expressions. + +import ( + "go/ast" + "go/token" + "go/types" +) + +// An lvalue represents an assignable location that may appear on the +// left-hand side of an assignment. This is a generalization of a +// pointer to permit updates to elements of maps. +// +type lvalue interface { + store(fn *Function, v Value) // stores v into the location + load(fn *Function) Value // loads the contents of the location + address(fn *Function) Value // address of the location + typ() types.Type // returns the type of the location +} + +// An address is an lvalue represented by a true pointer. +type address struct { + addr Value + pos token.Pos // source position + expr ast.Expr // source syntax of the value (not address) [debug mode] +} + +func (a *address) load(fn *Function) Value { + load := emitLoad(fn, a.addr) + load.pos = a.pos + return load +} + +func (a *address) store(fn *Function, v Value) { + store := emitStore(fn, a.addr, v, a.pos) + if a.expr != nil { + // store.Val is v, converted for assignability. + emitDebugRef(fn, a.expr, store.Val, false) + } +} + +func (a *address) address(fn *Function) Value { + if a.expr != nil { + emitDebugRef(fn, a.expr, a.addr, true) + } + return a.addr +} + +func (a *address) typ() types.Type { + return deref(a.addr.Type()) +} + +// An element is an lvalue represented by m[k], the location of an +// element of a map or string. These locations are not addressable +// since pointers cannot be formed from them, but they do support +// load(), and in the case of maps, store(). +// +type element struct { + m, k Value // map or string + t types.Type // map element type or string byte type + pos token.Pos // source position of colon ({k:v}) or lbrack (m[k]=v) +} + +func (e *element) load(fn *Function) Value { + l := &Lookup{ + X: e.m, + Index: e.k, + } + l.setPos(e.pos) + l.setType(e.t) + return fn.emit(l) +} + +func (e *element) store(fn *Function, v Value) { + up := &MapUpdate{ + Map: e.m, + Key: e.k, + Value: emitConv(fn, v, e.t), + } + up.pos = e.pos + fn.emit(up) +} + +func (e *element) address(fn *Function) Value { + panic("map/string elements are not addressable") +} + +func (e *element) typ() types.Type { + return e.t +} + +// A blank is a dummy variable whose name is "_". +// It is not reified: loads are illegal and stores are ignored. +// +type blank struct{} + +func (bl blank) load(fn *Function) Value { + panic("blank.load is illegal") +} + +func (bl blank) store(fn *Function, v Value) { + s := &BlankStore{ + Val: v, + } + fn.emit(s) +} + +func (bl blank) address(fn *Function) Value { + panic("blank var is not addressable") +} + +func (bl blank) typ() types.Type { + // This should be the type of the blank Ident; the typechecker + // doesn't provide this yet, but fortunately, we don't need it + // yet either. + panic("blank.typ is unimplemented") +} diff --git a/vendor/honnef.co/go/tools/ssa/methods.go b/vendor/honnef.co/go/tools/ssa/methods.go new file mode 100644 index 0000000..7d1fb42 --- /dev/null +++ b/vendor/honnef.co/go/tools/ssa/methods.go @@ -0,0 +1,241 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.5 + +package ssa + +// This file defines utilities for population of method sets. + +import ( + "fmt" + "go/types" +) + +// MethodValue returns the Function implementing method sel, building +// wrapper methods on demand. It returns nil if sel denotes an +// abstract (interface) method. +// +// Precondition: sel.Kind() == MethodVal. +// +// Thread-safe. +// +// EXCLUSIVE_LOCKS_ACQUIRED(prog.methodsMu) +// +func (prog *Program) MethodValue(sel *types.Selection) *Function { + if sel.Kind() != types.MethodVal { + panic(fmt.Sprintf("Method(%s) kind != MethodVal", sel)) + } + T := sel.Recv() + if isInterface(T) { + return nil // abstract method + } + if prog.mode&LogSource != 0 { + defer logStack("Method %s %v", T, sel)() + } + + prog.methodsMu.Lock() + defer prog.methodsMu.Unlock() + + return prog.addMethod(prog.createMethodSet(T), sel) +} + +// LookupMethod returns the implementation of the method of type T +// identified by (pkg, name). It returns nil if the method exists but +// is abstract, and panics if T has no such method. +// +func (prog *Program) LookupMethod(T types.Type, pkg *types.Package, name string) *Function { + sel := prog.MethodSets.MethodSet(T).Lookup(pkg, name) + if sel == nil { + panic(fmt.Sprintf("%s has no method %s", T, types.Id(pkg, name))) + } + return prog.MethodValue(sel) +} + +// methodSet contains the (concrete) methods of a non-interface type. +type methodSet struct { + mapping map[string]*Function // populated lazily + complete bool // mapping contains all methods +} + +// Precondition: !isInterface(T). +// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu) +func (prog *Program) createMethodSet(T types.Type) *methodSet { + mset, ok := prog.methodSets.At(T).(*methodSet) + if !ok { + mset = &methodSet{mapping: make(map[string]*Function)} + prog.methodSets.Set(T, mset) + } + return mset +} + +// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu) +func (prog *Program) addMethod(mset *methodSet, sel *types.Selection) *Function { + if sel.Kind() == types.MethodExpr { + panic(sel) + } + id := sel.Obj().Id() + fn := mset.mapping[id] + if fn == nil { + obj := sel.Obj().(*types.Func) + + needsPromotion := len(sel.Index()) > 1 + needsIndirection := !isPointer(recvType(obj)) && isPointer(sel.Recv()) + if needsPromotion || needsIndirection { + fn = makeWrapper(prog, sel) + } else { + fn = prog.declaredFunc(obj) + } + if fn.Signature.Recv() == nil { + panic(fn) // missing receiver + } + mset.mapping[id] = fn + } + return fn +} + +// RuntimeTypes returns a new unordered slice containing all +// concrete types in the program for which a complete (non-empty) +// method set is required at run-time. +// +// Thread-safe. +// +// EXCLUSIVE_LOCKS_ACQUIRED(prog.methodsMu) +// +func (prog *Program) RuntimeTypes() []types.Type { + prog.methodsMu.Lock() + defer prog.methodsMu.Unlock() + + var res []types.Type + prog.methodSets.Iterate(func(T types.Type, v interface{}) { + if v.(*methodSet).complete { + res = append(res, T) + } + }) + return res +} + +// declaredFunc returns the concrete function/method denoted by obj. +// Panic ensues if there is none. +// +func (prog *Program) declaredFunc(obj *types.Func) *Function { + if v := prog.packageLevelValue(obj); v != nil { + return v.(*Function) + } + panic("no concrete method: " + obj.String()) +} + +// needMethodsOf ensures that runtime type information (including the +// complete method set) is available for the specified type T and all +// its subcomponents. +// +// needMethodsOf must be called for at least every type that is an +// operand of some MakeInterface instruction, and for the type of +// every exported package member. +// +// Precondition: T is not a method signature (*Signature with Recv()!=nil). +// +// Thread-safe. (Called via emitConv from multiple builder goroutines.) +// +// TODO(adonovan): make this faster. It accounts for 20% of SSA build time. +// +// EXCLUSIVE_LOCKS_ACQUIRED(prog.methodsMu) +// +func (prog *Program) needMethodsOf(T types.Type) { + prog.methodsMu.Lock() + prog.needMethods(T, false) + prog.methodsMu.Unlock() +} + +// Precondition: T is not a method signature (*Signature with Recv()!=nil). +// Recursive case: skip => don't create methods for T. +// +// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu) +// +func (prog *Program) needMethods(T types.Type, skip bool) { + // Each package maintains its own set of types it has visited. + if prevSkip, ok := prog.runtimeTypes.At(T).(bool); ok { + // needMethods(T) was previously called + if !prevSkip || skip { + return // already seen, with same or false 'skip' value + } + } + prog.runtimeTypes.Set(T, skip) + + tmset := prog.MethodSets.MethodSet(T) + + if !skip && !isInterface(T) && tmset.Len() > 0 { + // Create methods of T. + mset := prog.createMethodSet(T) + if !mset.complete { + mset.complete = true + n := tmset.Len() + for i := 0; i < n; i++ { + prog.addMethod(mset, tmset.At(i)) + } + } + } + + // Recursion over signatures of each method. + for i := 0; i < tmset.Len(); i++ { + sig := tmset.At(i).Type().(*types.Signature) + prog.needMethods(sig.Params(), false) + prog.needMethods(sig.Results(), false) + } + + switch t := T.(type) { + case *types.Basic: + // nop + + case *types.Interface: + // nop---handled by recursion over method set. + + case *types.Pointer: + prog.needMethods(t.Elem(), false) + + case *types.Slice: + prog.needMethods(t.Elem(), false) + + case *types.Chan: + prog.needMethods(t.Elem(), false) + + case *types.Map: + prog.needMethods(t.Key(), false) + prog.needMethods(t.Elem(), false) + + case *types.Signature: + if t.Recv() != nil { + panic(fmt.Sprintf("Signature %s has Recv %s", t, t.Recv())) + } + prog.needMethods(t.Params(), false) + prog.needMethods(t.Results(), false) + + case *types.Named: + // A pointer-to-named type can be derived from a named + // type via reflection. It may have methods too. + prog.needMethods(types.NewPointer(T), false) + + // Consider 'type T struct{S}' where S has methods. + // Reflection provides no way to get from T to struct{S}, + // only to S, so the method set of struct{S} is unwanted, + // so set 'skip' flag during recursion. + prog.needMethods(t.Underlying(), true) + + case *types.Array: + prog.needMethods(t.Elem(), false) + + case *types.Struct: + for i, n := 0, t.NumFields(); i < n; i++ { + prog.needMethods(t.Field(i).Type(), false) + } + + case *types.Tuple: + for i, n := 0, t.Len(); i < n; i++ { + prog.needMethods(t.At(i).Type(), false) + } + + default: + panic(T) + } +} diff --git a/vendor/honnef.co/go/tools/ssa/mode.go b/vendor/honnef.co/go/tools/ssa/mode.go new file mode 100644 index 0000000..d2a2698 --- /dev/null +++ b/vendor/honnef.co/go/tools/ssa/mode.go @@ -0,0 +1,100 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file defines the BuilderMode type and its command-line flag. + +import ( + "bytes" + "fmt" +) + +// BuilderMode is a bitmask of options for diagnostics and checking. +// +// *BuilderMode satisfies the flag.Value interface. Example: +// +// var mode = ssa.BuilderMode(0) +// func init() { flag.Var(&mode, "build", ssa.BuilderModeDoc) } +// +type BuilderMode uint + +const ( + PrintPackages BuilderMode = 1 << iota // Print package inventory to stdout + PrintFunctions // Print function SSA code to stdout + LogSource // Log source locations as SSA builder progresses + SanityCheckFunctions // Perform sanity checking of function bodies + NaiveForm // Build naïve SSA form: don't replace local loads/stores with registers + BuildSerially // Build packages serially, not in parallel. + GlobalDebug // Enable debug info for all packages + BareInits // Build init functions without guards or calls to dependent inits +) + +const BuilderModeDoc = `Options controlling the SSA builder. +The value is a sequence of zero or more of these letters: +C perform sanity [C]hecking of the SSA form. +D include [D]ebug info for every function. +P print [P]ackage inventory. +F print [F]unction SSA code. +S log [S]ource locations as SSA builder progresses. +L build distinct packages seria[L]ly instead of in parallel. +N build [N]aive SSA form: don't replace local loads/stores with registers. +I build bare [I]nit functions: no init guards or calls to dependent inits. +` + +func (m BuilderMode) String() string { + var buf bytes.Buffer + if m&GlobalDebug != 0 { + buf.WriteByte('D') + } + if m&PrintPackages != 0 { + buf.WriteByte('P') + } + if m&PrintFunctions != 0 { + buf.WriteByte('F') + } + if m&LogSource != 0 { + buf.WriteByte('S') + } + if m&SanityCheckFunctions != 0 { + buf.WriteByte('C') + } + if m&NaiveForm != 0 { + buf.WriteByte('N') + } + if m&BuildSerially != 0 { + buf.WriteByte('L') + } + return buf.String() +} + +// Set parses the flag characters in s and updates *m. +func (m *BuilderMode) Set(s string) error { + var mode BuilderMode + for _, c := range s { + switch c { + case 'D': + mode |= GlobalDebug + case 'P': + mode |= PrintPackages + case 'F': + mode |= PrintFunctions + case 'S': + mode |= LogSource | BuildSerially + case 'C': + mode |= SanityCheckFunctions + case 'N': + mode |= NaiveForm + case 'L': + mode |= BuildSerially + default: + return fmt.Errorf("unknown BuilderMode option: %q", c) + } + } + *m = mode + return nil +} + +// Get returns m. +func (m BuilderMode) Get() interface{} { return m } diff --git a/vendor/honnef.co/go/tools/ssa/print.go b/vendor/honnef.co/go/tools/ssa/print.go new file mode 100644 index 0000000..a7deb88 --- /dev/null +++ b/vendor/honnef.co/go/tools/ssa/print.go @@ -0,0 +1,433 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.5 + +package ssa + +// This file implements the String() methods for all Value and +// Instruction types. + +import ( + "bytes" + "fmt" + "go/types" + "io" + "reflect" + "sort" + + "golang.org/x/tools/go/types/typeutil" +) + +// relName returns the name of v relative to i. +// In most cases, this is identical to v.Name(), but references to +// Functions (including methods) and Globals use RelString and +// all types are displayed with relType, so that only cross-package +// references are package-qualified. +// +func relName(v Value, i Instruction) string { + var from *types.Package + if i != nil { + from = i.Parent().pkg() + } + switch v := v.(type) { + case Member: // *Function or *Global + return v.RelString(from) + case *Const: + return v.RelString(from) + } + return v.Name() +} + +func relType(t types.Type, from *types.Package) string { + return types.TypeString(t, types.RelativeTo(from)) +} + +func relString(m Member, from *types.Package) string { + // NB: not all globals have an Object (e.g. init$guard), + // so use Package().Object not Object.Package(). + if pkg := m.Package().Pkg; pkg != nil && pkg != from { + return fmt.Sprintf("%s.%s", pkg.Path(), m.Name()) + } + return m.Name() +} + +// Value.String() +// +// This method is provided only for debugging. +// It never appears in disassembly, which uses Value.Name(). + +func (v *Parameter) String() string { + from := v.Parent().pkg() + return fmt.Sprintf("parameter %s : %s", v.Name(), relType(v.Type(), from)) +} + +func (v *FreeVar) String() string { + from := v.Parent().pkg() + return fmt.Sprintf("freevar %s : %s", v.Name(), relType(v.Type(), from)) +} + +func (v *Builtin) String() string { + return fmt.Sprintf("builtin %s", v.Name()) +} + +// Instruction.String() + +func (v *Alloc) String() string { + op := "local" + if v.Heap { + op = "new" + } + from := v.Parent().pkg() + return fmt.Sprintf("%s %s (%s)", op, relType(deref(v.Type()), from), v.Comment) +} + +func (v *Phi) String() string { + var b bytes.Buffer + b.WriteString("φ [") + for i, edge := range v.Edges { + if i > 0 { + b.WriteString(", ") + } + // Be robust against malformed CFG. + block := -1 + if v.block != nil && i < len(v.block.Preds) { + block = v.block.Preds[i].Index + } + fmt.Fprintf(&b, "%d: ", block) + edgeVal := "" // be robust + if edge != nil { + edgeVal = relName(edge, v) + } + b.WriteString(edgeVal) + } + b.WriteString("]") + if v.Comment != "" { + b.WriteString(" #") + b.WriteString(v.Comment) + } + return b.String() +} + +func printCall(v *CallCommon, prefix string, instr Instruction) string { + var b bytes.Buffer + b.WriteString(prefix) + if !v.IsInvoke() { + b.WriteString(relName(v.Value, instr)) + } else { + fmt.Fprintf(&b, "invoke %s.%s", relName(v.Value, instr), v.Method.Name()) + } + b.WriteString("(") + for i, arg := range v.Args { + if i > 0 { + b.WriteString(", ") + } + b.WriteString(relName(arg, instr)) + } + if v.Signature().Variadic() { + b.WriteString("...") + } + b.WriteString(")") + return b.String() +} + +func (c *CallCommon) String() string { + return printCall(c, "", nil) +} + +func (v *Call) String() string { + return printCall(&v.Call, "", v) +} + +func (v *BinOp) String() string { + return fmt.Sprintf("%s %s %s", relName(v.X, v), v.Op.String(), relName(v.Y, v)) +} + +func (v *UnOp) String() string { + return fmt.Sprintf("%s%s%s", v.Op, relName(v.X, v), commaOk(v.CommaOk)) +} + +func printConv(prefix string, v, x Value) string { + from := v.Parent().pkg() + return fmt.Sprintf("%s %s <- %s (%s)", + prefix, + relType(v.Type(), from), + relType(x.Type(), from), + relName(x, v.(Instruction))) +} + +func (v *ChangeType) String() string { return printConv("changetype", v, v.X) } +func (v *Convert) String() string { return printConv("convert", v, v.X) } +func (v *ChangeInterface) String() string { return printConv("change interface", v, v.X) } +func (v *MakeInterface) String() string { return printConv("make", v, v.X) } + +func (v *MakeClosure) String() string { + var b bytes.Buffer + fmt.Fprintf(&b, "make closure %s", relName(v.Fn, v)) + if v.Bindings != nil { + b.WriteString(" [") + for i, c := range v.Bindings { + if i > 0 { + b.WriteString(", ") + } + b.WriteString(relName(c, v)) + } + b.WriteString("]") + } + return b.String() +} + +func (v *MakeSlice) String() string { + from := v.Parent().pkg() + return fmt.Sprintf("make %s %s %s", + relType(v.Type(), from), + relName(v.Len, v), + relName(v.Cap, v)) +} + +func (v *Slice) String() string { + var b bytes.Buffer + b.WriteString("slice ") + b.WriteString(relName(v.X, v)) + b.WriteString("[") + if v.Low != nil { + b.WriteString(relName(v.Low, v)) + } + b.WriteString(":") + if v.High != nil { + b.WriteString(relName(v.High, v)) + } + if v.Max != nil { + b.WriteString(":") + b.WriteString(relName(v.Max, v)) + } + b.WriteString("]") + return b.String() +} + +func (v *MakeMap) String() string { + res := "" + if v.Reserve != nil { + res = relName(v.Reserve, v) + } + from := v.Parent().pkg() + return fmt.Sprintf("make %s %s", relType(v.Type(), from), res) +} + +func (v *MakeChan) String() string { + from := v.Parent().pkg() + return fmt.Sprintf("make %s %s", relType(v.Type(), from), relName(v.Size, v)) +} + +func (v *FieldAddr) String() string { + st := deref(v.X.Type()).Underlying().(*types.Struct) + // Be robust against a bad index. + name := "?" + if 0 <= v.Field && v.Field < st.NumFields() { + name = st.Field(v.Field).Name() + } + return fmt.Sprintf("&%s.%s [#%d]", relName(v.X, v), name, v.Field) +} + +func (v *Field) String() string { + st := v.X.Type().Underlying().(*types.Struct) + // Be robust against a bad index. + name := "?" + if 0 <= v.Field && v.Field < st.NumFields() { + name = st.Field(v.Field).Name() + } + return fmt.Sprintf("%s.%s [#%d]", relName(v.X, v), name, v.Field) +} + +func (v *IndexAddr) String() string { + return fmt.Sprintf("&%s[%s]", relName(v.X, v), relName(v.Index, v)) +} + +func (v *Index) String() string { + return fmt.Sprintf("%s[%s]", relName(v.X, v), relName(v.Index, v)) +} + +func (v *Lookup) String() string { + return fmt.Sprintf("%s[%s]%s", relName(v.X, v), relName(v.Index, v), commaOk(v.CommaOk)) +} + +func (v *Range) String() string { + return "range " + relName(v.X, v) +} + +func (v *Next) String() string { + return "next " + relName(v.Iter, v) +} + +func (v *TypeAssert) String() string { + from := v.Parent().pkg() + return fmt.Sprintf("typeassert%s %s.(%s)", commaOk(v.CommaOk), relName(v.X, v), relType(v.AssertedType, from)) +} + +func (v *Extract) String() string { + return fmt.Sprintf("extract %s #%d", relName(v.Tuple, v), v.Index) +} + +func (s *Jump) String() string { + // Be robust against malformed CFG. + block := -1 + if s.block != nil && len(s.block.Succs) == 1 { + block = s.block.Succs[0].Index + } + return fmt.Sprintf("jump %d", block) +} + +func (s *If) String() string { + // Be robust against malformed CFG. + tblock, fblock := -1, -1 + if s.block != nil && len(s.block.Succs) == 2 { + tblock = s.block.Succs[0].Index + fblock = s.block.Succs[1].Index + } + return fmt.Sprintf("if %s goto %d else %d", relName(s.Cond, s), tblock, fblock) +} + +func (s *Go) String() string { + return printCall(&s.Call, "go ", s) +} + +func (s *Panic) String() string { + return "panic " + relName(s.X, s) +} + +func (s *Return) String() string { + var b bytes.Buffer + b.WriteString("return") + for i, r := range s.Results { + if i == 0 { + b.WriteString(" ") + } else { + b.WriteString(", ") + } + b.WriteString(relName(r, s)) + } + return b.String() +} + +func (*RunDefers) String() string { + return "rundefers" +} + +func (s *Send) String() string { + return fmt.Sprintf("send %s <- %s", relName(s.Chan, s), relName(s.X, s)) +} + +func (s *Defer) String() string { + return printCall(&s.Call, "defer ", s) +} + +func (s *Select) String() string { + var b bytes.Buffer + for i, st := range s.States { + if i > 0 { + b.WriteString(", ") + } + if st.Dir == types.RecvOnly { + b.WriteString("<-") + b.WriteString(relName(st.Chan, s)) + } else { + b.WriteString(relName(st.Chan, s)) + b.WriteString("<-") + b.WriteString(relName(st.Send, s)) + } + } + non := "" + if !s.Blocking { + non = "non" + } + return fmt.Sprintf("select %sblocking [%s]", non, b.String()) +} + +func (s *Store) String() string { + return fmt.Sprintf("*%s = %s", relName(s.Addr, s), relName(s.Val, s)) +} + +func (s *BlankStore) String() string { + return fmt.Sprintf("_ = %s", relName(s.Val, s)) +} + +func (s *MapUpdate) String() string { + return fmt.Sprintf("%s[%s] = %s", relName(s.Map, s), relName(s.Key, s), relName(s.Value, s)) +} + +func (s *DebugRef) String() string { + p := s.Parent().Prog.Fset.Position(s.Pos()) + var descr interface{} + if s.object != nil { + descr = s.object // e.g. "var x int" + } else { + descr = reflect.TypeOf(s.Expr) // e.g. "*ast.CallExpr" + } + var addr string + if s.IsAddr { + addr = "address of " + } + return fmt.Sprintf("; %s%s @ %d:%d is %s", addr, descr, p.Line, p.Column, s.X.Name()) +} + +func (p *Package) String() string { + return "package " + p.Pkg.Path() +} + +var _ io.WriterTo = (*Package)(nil) // *Package implements io.Writer + +func (p *Package) WriteTo(w io.Writer) (int64, error) { + var buf bytes.Buffer + WritePackage(&buf, p) + n, err := w.Write(buf.Bytes()) + return int64(n), err +} + +// WritePackage writes to buf a human-readable summary of p. +func WritePackage(buf *bytes.Buffer, p *Package) { + fmt.Fprintf(buf, "%s:\n", p) + + var names []string + maxname := 0 + for name := range p.Members { + if l := len(name); l > maxname { + maxname = l + } + names = append(names, name) + } + + from := p.Pkg + sort.Strings(names) + for _, name := range names { + switch mem := p.Members[name].(type) { + case *NamedConst: + fmt.Fprintf(buf, " const %-*s %s = %s\n", + maxname, name, mem.Name(), mem.Value.RelString(from)) + + case *Function: + fmt.Fprintf(buf, " func %-*s %s\n", + maxname, name, relType(mem.Type(), from)) + + case *Type: + fmt.Fprintf(buf, " type %-*s %s\n", + maxname, name, relType(mem.Type().Underlying(), from)) + for _, meth := range typeutil.IntuitiveMethodSet(mem.Type(), &p.Prog.MethodSets) { + fmt.Fprintf(buf, " %s\n", types.SelectionString(meth, types.RelativeTo(from))) + } + + case *Global: + fmt.Fprintf(buf, " var %-*s %s\n", + maxname, name, relType(mem.Type().(*types.Pointer).Elem(), from)) + } + } + + fmt.Fprintf(buf, "\n") +} + +func commaOk(x bool) string { + if x { + return ",ok" + } + return "" +} diff --git a/vendor/honnef.co/go/tools/ssa/sanity.go b/vendor/honnef.co/go/tools/ssa/sanity.go new file mode 100644 index 0000000..bd7377c --- /dev/null +++ b/vendor/honnef.co/go/tools/ssa/sanity.go @@ -0,0 +1,523 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.5 + +package ssa + +// An optional pass for sanity-checking invariants of the SSA representation. +// Currently it checks CFG invariants but little at the instruction level. + +import ( + "fmt" + "go/types" + "io" + "os" + "strings" +) + +type sanity struct { + reporter io.Writer + fn *Function + block *BasicBlock + instrs map[Instruction]struct{} + insane bool +} + +// sanityCheck performs integrity checking of the SSA representation +// of the function fn and returns true if it was valid. Diagnostics +// are written to reporter if non-nil, os.Stderr otherwise. Some +// diagnostics are only warnings and do not imply a negative result. +// +// Sanity-checking is intended to facilitate the debugging of code +// transformation passes. +// +func sanityCheck(fn *Function, reporter io.Writer) bool { + if reporter == nil { + reporter = os.Stderr + } + return (&sanity{reporter: reporter}).checkFunction(fn) +} + +// mustSanityCheck is like sanityCheck but panics instead of returning +// a negative result. +// +func mustSanityCheck(fn *Function, reporter io.Writer) { + if !sanityCheck(fn, reporter) { + fn.WriteTo(os.Stderr) + panic("SanityCheck failed") + } +} + +func (s *sanity) diagnostic(prefix, format string, args ...interface{}) { + fmt.Fprintf(s.reporter, "%s: function %s", prefix, s.fn) + if s.block != nil { + fmt.Fprintf(s.reporter, ", block %s", s.block) + } + io.WriteString(s.reporter, ": ") + fmt.Fprintf(s.reporter, format, args...) + io.WriteString(s.reporter, "\n") +} + +func (s *sanity) errorf(format string, args ...interface{}) { + s.insane = true + s.diagnostic("Error", format, args...) +} + +func (s *sanity) warnf(format string, args ...interface{}) { + s.diagnostic("Warning", format, args...) +} + +// findDuplicate returns an arbitrary basic block that appeared more +// than once in blocks, or nil if all were unique. +func findDuplicate(blocks []*BasicBlock) *BasicBlock { + if len(blocks) < 2 { + return nil + } + if blocks[0] == blocks[1] { + return blocks[0] + } + // Slow path: + m := make(map[*BasicBlock]bool) + for _, b := range blocks { + if m[b] { + return b + } + m[b] = true + } + return nil +} + +func (s *sanity) checkInstr(idx int, instr Instruction) { + switch instr := instr.(type) { + case *If, *Jump, *Return, *Panic: + s.errorf("control flow instruction not at end of block") + case *Phi: + if idx == 0 { + // It suffices to apply this check to just the first phi node. + if dup := findDuplicate(s.block.Preds); dup != nil { + s.errorf("phi node in block with duplicate predecessor %s", dup) + } + } else { + prev := s.block.Instrs[idx-1] + if _, ok := prev.(*Phi); !ok { + s.errorf("Phi instruction follows a non-Phi: %T", prev) + } + } + if ne, np := len(instr.Edges), len(s.block.Preds); ne != np { + s.errorf("phi node has %d edges but %d predecessors", ne, np) + + } else { + for i, e := range instr.Edges { + if e == nil { + s.errorf("phi node '%s' has no value for edge #%d from %s", instr.Comment, i, s.block.Preds[i]) + } + } + } + + case *Alloc: + if !instr.Heap { + found := false + for _, l := range s.fn.Locals { + if l == instr { + found = true + break + } + } + if !found { + s.errorf("local alloc %s = %s does not appear in Function.Locals", instr.Name(), instr) + } + } + + case *BinOp: + case *Call: + case *ChangeInterface: + case *ChangeType: + case *Convert: + if _, ok := instr.X.Type().Underlying().(*types.Basic); !ok { + if _, ok := instr.Type().Underlying().(*types.Basic); !ok { + s.errorf("convert %s -> %s: at least one type must be basic", instr.X.Type(), instr.Type()) + } + } + + case *Defer: + case *Extract: + case *Field: + case *FieldAddr: + case *Go: + case *Index: + case *IndexAddr: + case *Lookup: + case *MakeChan: + case *MakeClosure: + numFree := len(instr.Fn.(*Function).FreeVars) + numBind := len(instr.Bindings) + if numFree != numBind { + s.errorf("MakeClosure has %d Bindings for function %s with %d free vars", + numBind, instr.Fn, numFree) + + } + if recv := instr.Type().(*types.Signature).Recv(); recv != nil { + s.errorf("MakeClosure's type includes receiver %s", recv.Type()) + } + + case *MakeInterface: + case *MakeMap: + case *MakeSlice: + case *MapUpdate: + case *Next: + case *Range: + case *RunDefers: + case *Select: + case *Send: + case *Slice: + case *Store: + case *TypeAssert: + case *UnOp: + case *DebugRef: + case *BlankStore: + case *Sigma: + // TODO(adonovan): implement checks. + default: + panic(fmt.Sprintf("Unknown instruction type: %T", instr)) + } + + if call, ok := instr.(CallInstruction); ok { + if call.Common().Signature() == nil { + s.errorf("nil signature: %s", call) + } + } + + // Check that value-defining instructions have valid types + // and a valid referrer list. + if v, ok := instr.(Value); ok { + t := v.Type() + if t == nil { + s.errorf("no type: %s = %s", v.Name(), v) + } else if t == tRangeIter { + // not a proper type; ignore. + } else if b, ok := t.Underlying().(*types.Basic); ok && b.Info()&types.IsUntyped != 0 { + s.errorf("instruction has 'untyped' result: %s = %s : %s", v.Name(), v, t) + } + s.checkReferrerList(v) + } + + // Untyped constants are legal as instruction Operands(), + // for example: + // _ = "foo"[0] + // or: + // if wordsize==64 {...} + + // All other non-Instruction Values can be found via their + // enclosing Function or Package. +} + +func (s *sanity) checkFinalInstr(idx int, instr Instruction) { + switch instr := instr.(type) { + case *If: + if nsuccs := len(s.block.Succs); nsuccs != 2 { + s.errorf("If-terminated block has %d successors; expected 2", nsuccs) + return + } + if s.block.Succs[0] == s.block.Succs[1] { + s.errorf("If-instruction has same True, False target blocks: %s", s.block.Succs[0]) + return + } + + case *Jump: + if nsuccs := len(s.block.Succs); nsuccs != 1 { + s.errorf("Jump-terminated block has %d successors; expected 1", nsuccs) + return + } + + case *Return: + if nsuccs := len(s.block.Succs); nsuccs != 0 { + s.errorf("Return-terminated block has %d successors; expected none", nsuccs) + return + } + if na, nf := len(instr.Results), s.fn.Signature.Results().Len(); nf != na { + s.errorf("%d-ary return in %d-ary function", na, nf) + } + + case *Panic: + if nsuccs := len(s.block.Succs); nsuccs != 0 { + s.errorf("Panic-terminated block has %d successors; expected none", nsuccs) + return + } + + default: + s.errorf("non-control flow instruction at end of block") + } +} + +func (s *sanity) checkBlock(b *BasicBlock, index int) { + s.block = b + + if b.Index != index { + s.errorf("block has incorrect Index %d", b.Index) + } + if b.parent != s.fn { + s.errorf("block has incorrect parent %s", b.parent) + } + + // Check all blocks are reachable. + // (The entry block is always implicitly reachable, + // as is the Recover block, if any.) + if (index > 0 && b != b.parent.Recover) && len(b.Preds) == 0 { + s.warnf("unreachable block") + if b.Instrs == nil { + // Since this block is about to be pruned, + // tolerating transient problems in it + // simplifies other optimizations. + return + } + } + + // Check predecessor and successor relations are dual, + // and that all blocks in CFG belong to same function. + for _, a := range b.Preds { + found := false + for _, bb := range a.Succs { + if bb == b { + found = true + break + } + } + if !found { + s.errorf("expected successor edge in predecessor %s; found only: %s", a, a.Succs) + } + if a.parent != s.fn { + s.errorf("predecessor %s belongs to different function %s", a, a.parent) + } + } + for _, c := range b.Succs { + found := false + for _, bb := range c.Preds { + if bb == b { + found = true + break + } + } + if !found { + s.errorf("expected predecessor edge in successor %s; found only: %s", c, c.Preds) + } + if c.parent != s.fn { + s.errorf("successor %s belongs to different function %s", c, c.parent) + } + } + + // Check each instruction is sane. + n := len(b.Instrs) + if n == 0 { + s.errorf("basic block contains no instructions") + } + var rands [10]*Value // reuse storage + for j, instr := range b.Instrs { + if instr == nil { + s.errorf("nil instruction at index %d", j) + continue + } + if b2 := instr.Block(); b2 == nil { + s.errorf("nil Block() for instruction at index %d", j) + continue + } else if b2 != b { + s.errorf("wrong Block() (%s) for instruction at index %d ", b2, j) + continue + } + if j < n-1 { + s.checkInstr(j, instr) + } else { + s.checkFinalInstr(j, instr) + } + + // Check Instruction.Operands. + operands: + for i, op := range instr.Operands(rands[:0]) { + if op == nil { + s.errorf("nil operand pointer %d of %s", i, instr) + continue + } + val := *op + if val == nil { + continue // a nil operand is ok + } + + // Check that "untyped" types only appear on constant operands. + if _, ok := (*op).(*Const); !ok { + if basic, ok := (*op).Type().(*types.Basic); ok { + if basic.Info()&types.IsUntyped != 0 { + s.errorf("operand #%d of %s is untyped: %s", i, instr, basic) + } + } + } + + // Check that Operands that are also Instructions belong to same function. + // TODO(adonovan): also check their block dominates block b. + if val, ok := val.(Instruction); ok { + if val.Parent() != s.fn { + s.errorf("operand %d of %s is an instruction (%s) from function %s", i, instr, val, val.Parent()) + } + } + + // Check that each function-local operand of + // instr refers back to instr. (NB: quadratic) + switch val := val.(type) { + case *Const, *Global, *Builtin: + continue // not local + case *Function: + if val.parent == nil { + continue // only anon functions are local + } + } + + // TODO(adonovan): check val.Parent() != nil <=> val.Referrers() is defined. + + if refs := val.Referrers(); refs != nil { + for _, ref := range *refs { + if ref == instr { + continue operands + } + } + s.errorf("operand %d of %s (%s) does not refer to us", i, instr, val) + } else { + s.errorf("operand %d of %s (%s) has no referrers", i, instr, val) + } + } + } +} + +func (s *sanity) checkReferrerList(v Value) { + refs := v.Referrers() + if refs == nil { + s.errorf("%s has missing referrer list", v.Name()) + return + } + for i, ref := range *refs { + if _, ok := s.instrs[ref]; !ok { + s.errorf("%s.Referrers()[%d] = %s is not an instruction belonging to this function", v.Name(), i, ref) + } + } +} + +func (s *sanity) checkFunction(fn *Function) bool { + // TODO(adonovan): check Function invariants: + // - check params match signature + // - check transient fields are nil + // - warn if any fn.Locals do not appear among block instructions. + s.fn = fn + if fn.Prog == nil { + s.errorf("nil Prog") + } + + fn.String() // must not crash + fn.RelString(fn.pkg()) // must not crash + + // All functions have a package, except delegates (which are + // shared across packages, or duplicated as weak symbols in a + // separate-compilation model), and error.Error. + if fn.Pkg == nil { + if strings.HasPrefix(fn.Synthetic, "wrapper ") || + strings.HasPrefix(fn.Synthetic, "bound ") || + strings.HasPrefix(fn.Synthetic, "thunk ") || + strings.HasSuffix(fn.name, "Error") { + // ok + } else { + s.errorf("nil Pkg") + } + } + if src, syn := fn.Synthetic == "", fn.Syntax() != nil; src != syn { + s.errorf("got fromSource=%t, hasSyntax=%t; want same values", src, syn) + } + for i, l := range fn.Locals { + if l.Parent() != fn { + s.errorf("Local %s at index %d has wrong parent", l.Name(), i) + } + if l.Heap { + s.errorf("Local %s at index %d has Heap flag set", l.Name(), i) + } + } + // Build the set of valid referrers. + s.instrs = make(map[Instruction]struct{}) + for _, b := range fn.Blocks { + for _, instr := range b.Instrs { + s.instrs[instr] = struct{}{} + } + } + for i, p := range fn.Params { + if p.Parent() != fn { + s.errorf("Param %s at index %d has wrong parent", p.Name(), i) + } + s.checkReferrerList(p) + } + for i, fv := range fn.FreeVars { + if fv.Parent() != fn { + s.errorf("FreeVar %s at index %d has wrong parent", fv.Name(), i) + } + s.checkReferrerList(fv) + } + + if fn.Blocks != nil && len(fn.Blocks) == 0 { + // Function _had_ blocks (so it's not external) but + // they were "optimized" away, even the entry block. + s.errorf("Blocks slice is non-nil but empty") + } + for i, b := range fn.Blocks { + if b == nil { + s.warnf("nil *BasicBlock at f.Blocks[%d]", i) + continue + } + s.checkBlock(b, i) + } + if fn.Recover != nil && fn.Blocks[fn.Recover.Index] != fn.Recover { + s.errorf("Recover block is not in Blocks slice") + } + + s.block = nil + for i, anon := range fn.AnonFuncs { + if anon.Parent() != fn { + s.errorf("AnonFuncs[%d]=%s but %s.Parent()=%s", i, anon, anon, anon.Parent()) + } + } + s.fn = nil + return !s.insane +} + +// sanityCheckPackage checks invariants of packages upon creation. +// It does not require that the package is built. +// Unlike sanityCheck (for functions), it just panics at the first error. +func sanityCheckPackage(pkg *Package) { + if pkg.Pkg == nil { + panic(fmt.Sprintf("Package %s has no Object", pkg)) + } + pkg.String() // must not crash + + for name, mem := range pkg.Members { + if name != mem.Name() { + panic(fmt.Sprintf("%s: %T.Name() = %s, want %s", + pkg.Pkg.Path(), mem, mem.Name(), name)) + } + obj := mem.Object() + if obj == nil { + // This check is sound because fields + // {Global,Function}.object have type + // types.Object. (If they were declared as + // *types.{Var,Func}, we'd have a non-empty + // interface containing a nil pointer.) + + continue // not all members have typechecker objects + } + if obj.Name() != name { + if obj.Name() == "init" && strings.HasPrefix(mem.Name(), "init#") { + // Ok. The name of a declared init function varies between + // its types.Func ("init") and its ssa.Function ("init#%d"). + } else { + panic(fmt.Sprintf("%s: %T.Object().Name() = %s, want %s", + pkg.Pkg.Path(), mem, obj.Name(), name)) + } + } + if obj.Pos() != mem.Pos() { + panic(fmt.Sprintf("%s Pos=%d obj.Pos=%d", mem, mem.Pos(), obj.Pos())) + } + } +} diff --git a/vendor/honnef.co/go/tools/ssa/source.go b/vendor/honnef.co/go/tools/ssa/source.go new file mode 100644 index 0000000..e17e023 --- /dev/null +++ b/vendor/honnef.co/go/tools/ssa/source.go @@ -0,0 +1,299 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.5 + +package ssa + +// This file defines utilities for working with source positions +// or source-level named entities ("objects"). + +// TODO(adonovan): test that {Value,Instruction}.Pos() positions match +// the originating syntax, as specified. + +import ( + "go/ast" + "go/token" + "go/types" + "log" +) + +// EnclosingFunction returns the function that contains the syntax +// node denoted by path. +// +// Syntax associated with package-level variable specifications is +// enclosed by the package's init() function. +// +// Returns nil if not found; reasons might include: +// - the node is not enclosed by any function. +// - the node is within an anonymous function (FuncLit) and +// its SSA function has not been created yet +// (pkg.Build() has not yet been called). +// +func EnclosingFunction(pkg *Package, path []ast.Node) *Function { + // Start with package-level function... + fn := findEnclosingPackageLevelFunction(pkg, path) + if fn == nil { + return nil // not in any function + } + + // ...then walk down the nested anonymous functions. + n := len(path) +outer: + for i := range path { + if lit, ok := path[n-1-i].(*ast.FuncLit); ok { + for _, anon := range fn.AnonFuncs { + if anon.Pos() == lit.Type.Func { + fn = anon + continue outer + } + } + // SSA function not found: + // - package not yet built, or maybe + // - builder skipped FuncLit in dead block + // (in principle; but currently the Builder + // generates even dead FuncLits). + return nil + } + } + return fn +} + +// HasEnclosingFunction returns true if the AST node denoted by path +// is contained within the declaration of some function or +// package-level variable. +// +// Unlike EnclosingFunction, the behaviour of this function does not +// depend on whether SSA code for pkg has been built, so it can be +// used to quickly reject check inputs that will cause +// EnclosingFunction to fail, prior to SSA building. +// +func HasEnclosingFunction(pkg *Package, path []ast.Node) bool { + return findEnclosingPackageLevelFunction(pkg, path) != nil +} + +// findEnclosingPackageLevelFunction returns the Function +// corresponding to the package-level function enclosing path. +// +func findEnclosingPackageLevelFunction(pkg *Package, path []ast.Node) *Function { + if n := len(path); n >= 2 { // [... {Gen,Func}Decl File] + switch decl := path[n-2].(type) { + case *ast.GenDecl: + if decl.Tok == token.VAR && n >= 3 { + // Package-level 'var' initializer. + return pkg.init + } + + case *ast.FuncDecl: + if decl.Recv == nil && decl.Name.Name == "init" { + // Explicit init() function. + for _, b := range pkg.init.Blocks { + for _, instr := range b.Instrs { + if instr, ok := instr.(*Call); ok { + if callee, ok := instr.Call.Value.(*Function); ok && callee.Pkg == pkg && callee.Pos() == decl.Name.NamePos { + return callee + } + } + } + } + // Hack: return non-nil when SSA is not yet + // built so that HasEnclosingFunction works. + return pkg.init + } + // Declared function/method. + return findNamedFunc(pkg, decl.Name.NamePos) + } + } + return nil // not in any function +} + +// findNamedFunc returns the named function whose FuncDecl.Ident is at +// position pos. +// +func findNamedFunc(pkg *Package, pos token.Pos) *Function { + // Look at all package members and method sets of named types. + // Not very efficient. + for _, mem := range pkg.Members { + switch mem := mem.(type) { + case *Function: + if mem.Pos() == pos { + return mem + } + case *Type: + mset := pkg.Prog.MethodSets.MethodSet(types.NewPointer(mem.Type())) + for i, n := 0, mset.Len(); i < n; i++ { + // Don't call Program.Method: avoid creating wrappers. + obj := mset.At(i).Obj().(*types.Func) + if obj.Pos() == pos { + if pkg.values[obj] == nil { + log.Println(obj) + } + return pkg.values[obj].(*Function) + } + } + } + } + return nil +} + +// ValueForExpr returns the SSA Value that corresponds to non-constant +// expression e. +// +// It returns nil if no value was found, e.g. +// - the expression is not lexically contained within f; +// - f was not built with debug information; or +// - e is a constant expression. (For efficiency, no debug +// information is stored for constants. Use +// go/types.Info.Types[e].Value instead.) +// - e is a reference to nil or a built-in function. +// - the value was optimised away. +// +// If e is an addressable expression used in an lvalue context, +// value is the address denoted by e, and isAddr is true. +// +// The types of e (or &e, if isAddr) and the result are equal +// (modulo "untyped" bools resulting from comparisons). +// +// (Tip: to find the ssa.Value given a source position, use +// importer.PathEnclosingInterval to locate the ast.Node, then +// EnclosingFunction to locate the Function, then ValueForExpr to find +// the ssa.Value.) +// +func (f *Function) ValueForExpr(e ast.Expr) (value Value, isAddr bool) { + if f.debugInfo() { // (opt) + e = unparen(e) + for _, b := range f.Blocks { + for _, instr := range b.Instrs { + if ref, ok := instr.(*DebugRef); ok { + if ref.Expr == e { + return ref.X, ref.IsAddr + } + } + } + } + } + return +} + +// --- Lookup functions for source-level named entities (types.Objects) --- + +// Package returns the SSA Package corresponding to the specified +// type-checker package object. +// It returns nil if no such SSA package has been created. +// +func (prog *Program) Package(obj *types.Package) *Package { + return prog.packages[obj] +} + +// packageLevelValue returns the package-level value corresponding to +// the specified named object, which may be a package-level const +// (*Const), var (*Global) or func (*Function) of some package in +// prog. It returns nil if the object is not found. +// +func (prog *Program) packageLevelValue(obj types.Object) Value { + if pkg, ok := prog.packages[obj.Pkg()]; ok { + return pkg.values[obj] + } + return nil +} + +// FuncValue returns the concrete Function denoted by the source-level +// named function obj, or nil if obj denotes an interface method. +// +// TODO(adonovan): check the invariant that obj.Type() matches the +// result's Signature, both in the params/results and in the receiver. +// +func (prog *Program) FuncValue(obj *types.Func) *Function { + fn, _ := prog.packageLevelValue(obj).(*Function) + return fn +} + +// ConstValue returns the SSA Value denoted by the source-level named +// constant obj. +// +func (prog *Program) ConstValue(obj *types.Const) *Const { + // TODO(adonovan): opt: share (don't reallocate) + // Consts for const objects and constant ast.Exprs. + + // Universal constant? {true,false,nil} + if obj.Parent() == types.Universe { + return NewConst(obj.Val(), obj.Type()) + } + // Package-level named constant? + if v := prog.packageLevelValue(obj); v != nil { + return v.(*Const) + } + return NewConst(obj.Val(), obj.Type()) +} + +// VarValue returns the SSA Value that corresponds to a specific +// identifier denoting the source-level named variable obj. +// +// VarValue returns nil if a local variable was not found, perhaps +// because its package was not built, the debug information was not +// requested during SSA construction, or the value was optimized away. +// +// ref is the path to an ast.Ident (e.g. from PathEnclosingInterval), +// and that ident must resolve to obj. +// +// pkg is the package enclosing the reference. (A reference to a var +// always occurs within a function, so we need to know where to find it.) +// +// If the identifier is a field selector and its base expression is +// non-addressable, then VarValue returns the value of that field. +// For example: +// func f() struct {x int} +// f().x // VarValue(x) returns a *Field instruction of type int +// +// All other identifiers denote addressable locations (variables). +// For them, VarValue may return either the variable's address or its +// value, even when the expression is evaluated only for its value; the +// situation is reported by isAddr, the second component of the result. +// +// If !isAddr, the returned value is the one associated with the +// specific identifier. For example, +// var x int // VarValue(x) returns Const 0 here +// x = 1 // VarValue(x) returns Const 1 here +// +// It is not specified whether the value or the address is returned in +// any particular case, as it may depend upon optimizations performed +// during SSA code generation, such as registerization, constant +// folding, avoidance of materialization of subexpressions, etc. +// +func (prog *Program) VarValue(obj *types.Var, pkg *Package, ref []ast.Node) (value Value, isAddr bool) { + // All references to a var are local to some function, possibly init. + fn := EnclosingFunction(pkg, ref) + if fn == nil { + return // e.g. def of struct field; SSA not built? + } + + id := ref[0].(*ast.Ident) + + // Defining ident of a parameter? + if id.Pos() == obj.Pos() { + for _, param := range fn.Params { + if param.Object() == obj { + return param, false + } + } + } + + // Other ident? + for _, b := range fn.Blocks { + for _, instr := range b.Instrs { + if dr, ok := instr.(*DebugRef); ok { + if dr.Pos() == id.Pos() { + return dr.X, dr.IsAddr + } + } + } + } + + // Defining ident of package-level var? + if v := prog.packageLevelValue(obj); v != nil { + return v.(*Global), true + } + + return // e.g. debug info not requested, or var optimized away +} diff --git a/vendor/honnef.co/go/tools/ssa/ssa.go b/vendor/honnef.co/go/tools/ssa/ssa.go new file mode 100644 index 0000000..ed280c7 --- /dev/null +++ b/vendor/honnef.co/go/tools/ssa/ssa.go @@ -0,0 +1,1751 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.5 + +package ssa + +// This package defines a high-level intermediate representation for +// Go programs using static single-assignment (SSA) form. + +import ( + "fmt" + "go/ast" + exact "go/constant" + "go/token" + "go/types" + "sync" + + "golang.org/x/tools/go/types/typeutil" +) + +// A Program is a partial or complete Go program converted to SSA form. +type Program struct { + Fset *token.FileSet // position information for the files of this Program + imported map[string]*Package // all importable Packages, keyed by import path + packages map[*types.Package]*Package // all loaded Packages, keyed by object + mode BuilderMode // set of mode bits for SSA construction + MethodSets typeutil.MethodSetCache // cache of type-checker's method-sets + + methodsMu sync.Mutex // guards the following maps: + methodSets typeutil.Map // maps type to its concrete methodSet + runtimeTypes typeutil.Map // types for which rtypes are needed + canon typeutil.Map // type canonicalization map + bounds map[*types.Func]*Function // bounds for curried x.Method closures + thunks map[selectionKey]*Function // thunks for T.Method expressions +} + +// A Package is a single analyzed Go package containing Members for +// all package-level functions, variables, constants and types it +// declares. These may be accessed directly via Members, or via the +// type-specific accessor methods Func, Type, Var and Const. +// +// Members also contains entries for "init" (the synthetic package +// initializer) and "init#%d", the nth declared init function, +// and unspecified other things too. +// +type Package struct { + Prog *Program // the owning program + Pkg *types.Package // the corresponding go/types.Package + Members map[string]Member // all package members keyed by name (incl. init and init#%d) + values map[types.Object]Value // package members (incl. types and methods), keyed by object + init *Function // Func("init"); the package's init function + debug bool // include full debug info in this package + + // The following fields are set transiently, then cleared + // after building. + buildOnce sync.Once // ensures package building occurs once + ninit int32 // number of init functions + info *types.Info // package type information + files []*ast.File // package ASTs +} + +// A Member is a member of a Go package, implemented by *NamedConst, +// *Global, *Function, or *Type; they are created by package-level +// const, var, func and type declarations respectively. +// +type Member interface { + Name() string // declared name of the package member + String() string // package-qualified name of the package member + RelString(*types.Package) string // like String, but relative refs are unqualified + Object() types.Object // typechecker's object for this member, if any + Pos() token.Pos // position of member's declaration, if known + Type() types.Type // type of the package member + Token() token.Token // token.{VAR,FUNC,CONST,TYPE} + Package() *Package // the containing package +} + +// A Type is a Member of a Package representing a package-level named type. +// +// Type() returns a *types.Named. +// +type Type struct { + object *types.TypeName + pkg *Package +} + +// A NamedConst is a Member of a Package representing a package-level +// named constant. +// +// Pos() returns the position of the declaring ast.ValueSpec.Names[*] +// identifier. +// +// NB: a NamedConst is not a Value; it contains a constant Value, which +// it augments with the name and position of its 'const' declaration. +// +type NamedConst struct { + object *types.Const + Value *Const + pos token.Pos + pkg *Package +} + +// A Value is an SSA value that can be referenced by an instruction. +type Value interface { + // Name returns the name of this value, and determines how + // this Value appears when used as an operand of an + // Instruction. + // + // This is the same as the source name for Parameters, + // Builtins, Functions, FreeVars, Globals. + // For constants, it is a representation of the constant's value + // and type. For all other Values this is the name of the + // virtual register defined by the instruction. + // + // The name of an SSA Value is not semantically significant, + // and may not even be unique within a function. + Name() string + + // If this value is an Instruction, String returns its + // disassembled form; otherwise it returns unspecified + // human-readable information about the Value, such as its + // kind, name and type. + String() string + + // Type returns the type of this value. Many instructions + // (e.g. IndexAddr) change their behaviour depending on the + // types of their operands. + Type() types.Type + + // Parent returns the function to which this Value belongs. + // It returns nil for named Functions, Builtin, Const and Global. + Parent() *Function + + // Referrers returns the list of instructions that have this + // value as one of their operands; it may contain duplicates + // if an instruction has a repeated operand. + // + // Referrers actually returns a pointer through which the + // caller may perform mutations to the object's state. + // + // Referrers is currently only defined if Parent()!=nil, + // i.e. for the function-local values FreeVar, Parameter, + // Functions (iff anonymous) and all value-defining instructions. + // It returns nil for named Functions, Builtin, Const and Global. + // + // Instruction.Operands contains the inverse of this relation. + Referrers() *[]Instruction + + // Pos returns the location of the AST token most closely + // associated with the operation that gave rise to this value, + // or token.NoPos if it was not explicit in the source. + // + // For each ast.Node type, a particular token is designated as + // the closest location for the expression, e.g. the Lparen + // for an *ast.CallExpr. This permits a compact but + // approximate mapping from Values to source positions for use + // in diagnostic messages, for example. + // + // (Do not use this position to determine which Value + // corresponds to an ast.Expr; use Function.ValueForExpr + // instead. NB: it requires that the function was built with + // debug information.) + Pos() token.Pos +} + +// An Instruction is an SSA instruction that computes a new Value or +// has some effect. +// +// An Instruction that defines a value (e.g. BinOp) also implements +// the Value interface; an Instruction that only has an effect (e.g. Store) +// does not. +// +type Instruction interface { + // String returns the disassembled form of this value. + // + // Examples of Instructions that are Values: + // "x + y" (BinOp) + // "len([])" (Call) + // Note that the name of the Value is not printed. + // + // Examples of Instructions that are not Values: + // "return x" (Return) + // "*y = x" (Store) + // + // (The separation Value.Name() from Value.String() is useful + // for some analyses which distinguish the operation from the + // value it defines, e.g., 'y = local int' is both an allocation + // of memory 'local int' and a definition of a pointer y.) + String() string + + // Parent returns the function to which this instruction + // belongs. + Parent() *Function + + // Block returns the basic block to which this instruction + // belongs. + Block() *BasicBlock + + // setBlock sets the basic block to which this instruction belongs. + setBlock(*BasicBlock) + + // Operands returns the operands of this instruction: the + // set of Values it references. + // + // Specifically, it appends their addresses to rands, a + // user-provided slice, and returns the resulting slice, + // permitting avoidance of memory allocation. + // + // The operands are appended in undefined order, but the order + // is consistent for a given Instruction; the addresses are + // always non-nil but may point to a nil Value. Clients may + // store through the pointers, e.g. to effect a value + // renaming. + // + // Value.Referrers is a subset of the inverse of this + // relation. (Referrers are not tracked for all types of + // Values.) + Operands(rands []*Value) []*Value + + // Pos returns the location of the AST token most closely + // associated with the operation that gave rise to this + // instruction, or token.NoPos if it was not explicit in the + // source. + // + // For each ast.Node type, a particular token is designated as + // the closest location for the expression, e.g. the Go token + // for an *ast.GoStmt. This permits a compact but approximate + // mapping from Instructions to source positions for use in + // diagnostic messages, for example. + // + // (Do not use this position to determine which Instruction + // corresponds to an ast.Expr; see the notes for Value.Pos. + // This position may be used to determine which non-Value + // Instruction corresponds to some ast.Stmts, but not all: If + // and Jump instructions have no Pos(), for example.) + Pos() token.Pos +} + +// A Node is a node in the SSA value graph. Every concrete type that +// implements Node is also either a Value, an Instruction, or both. +// +// Node contains the methods common to Value and Instruction, plus the +// Operands and Referrers methods generalized to return nil for +// non-Instructions and non-Values, respectively. +// +// Node is provided to simplify SSA graph algorithms. Clients should +// use the more specific and informative Value or Instruction +// interfaces where appropriate. +// +type Node interface { + // Common methods: + String() string + Pos() token.Pos + Parent() *Function + + // Partial methods: + Operands(rands []*Value) []*Value // nil for non-Instructions + Referrers() *[]Instruction // nil for non-Values +} + +// Function represents the parameters, results, and code of a function +// or method. +// +// If Blocks is nil, this indicates an external function for which no +// Go source code is available. In this case, FreeVars and Locals +// are nil too. Clients performing whole-program analysis must +// handle external functions specially. +// +// Blocks contains the function's control-flow graph (CFG). +// Blocks[0] is the function entry point; block order is not otherwise +// semantically significant, though it may affect the readability of +// the disassembly. +// To iterate over the blocks in dominance order, use DomPreorder(). +// +// Recover is an optional second entry point to which control resumes +// after a recovered panic. The Recover block may contain only a return +// statement, preceded by a load of the function's named return +// parameters, if any. +// +// A nested function (Parent()!=nil) that refers to one or more +// lexically enclosing local variables ("free variables") has FreeVars. +// Such functions cannot be called directly but require a +// value created by MakeClosure which, via its Bindings, supplies +// values for these parameters. +// +// If the function is a method (Signature.Recv() != nil) then the first +// element of Params is the receiver parameter. +// +// A Go package may declare many functions called "init". +// For each one, Object().Name() returns "init" but Name() returns +// "init#1", etc, in declaration order. +// +// Pos() returns the declaring ast.FuncLit.Type.Func or the position +// of the ast.FuncDecl.Name, if the function was explicit in the +// source. Synthetic wrappers, for which Synthetic != "", may share +// the same position as the function they wrap. +// Syntax.Pos() always returns the position of the declaring "func" token. +// +// Type() returns the function's Signature. +// +type Function struct { + name string + object types.Object // a declared *types.Func or one of its wrappers + method *types.Selection // info about provenance of synthetic methods + Signature *types.Signature + pos token.Pos + + Synthetic string // provenance of synthetic function; "" for true source functions + syntax ast.Node // *ast.Func{Decl,Lit}; replaced with simple ast.Node after build, unless debug mode + parent *Function // enclosing function if anon; nil if global + Pkg *Package // enclosing package; nil for shared funcs (wrappers and error.Error) + Prog *Program // enclosing program + Params []*Parameter // function parameters; for methods, includes receiver + FreeVars []*FreeVar // free variables whose values must be supplied by closure + Locals []*Alloc // local variables of this function + Blocks []*BasicBlock // basic blocks of the function; nil => external + Recover *BasicBlock // optional; control transfers here after recovered panic + AnonFuncs []*Function // anonymous functions directly beneath this one + referrers []Instruction // referring instructions (iff Parent() != nil) + + // The following fields are set transiently during building, + // then cleared. + currentBlock *BasicBlock // where to emit code + objects map[types.Object]Value // addresses of local variables + namedResults []*Alloc // tuple of named results + targets *targets // linked stack of branch targets + lblocks map[*ast.Object]*lblock // labelled blocks +} + +// BasicBlock represents an SSA basic block. +// +// The final element of Instrs is always an explicit transfer of +// control (If, Jump, Return, or Panic). +// +// A block may contain no Instructions only if it is unreachable, +// i.e., Preds is nil. Empty blocks are typically pruned. +// +// BasicBlocks and their Preds/Succs relation form a (possibly cyclic) +// graph independent of the SSA Value graph: the control-flow graph or +// CFG. It is illegal for multiple edges to exist between the same +// pair of blocks. +// +// Each BasicBlock is also a node in the dominator tree of the CFG. +// The tree may be navigated using Idom()/Dominees() and queried using +// Dominates(). +// +// The order of Preds and Succs is significant (to Phi and If +// instructions, respectively). +// +type BasicBlock struct { + Index int // index of this block within Parent().Blocks + Comment string // optional label; no semantic significance + parent *Function // parent function + Instrs []Instruction // instructions in order + Preds, Succs []*BasicBlock // predecessors and successors + succs2 [2]*BasicBlock // initial space for Succs + dom domInfo // dominator tree info + gaps int // number of nil Instrs (transient) + rundefers int // number of rundefers (transient) +} + +// Pure values ---------------------------------------- + +// A FreeVar represents a free variable of the function to which it +// belongs. +// +// FreeVars are used to implement anonymous functions, whose free +// variables are lexically captured in a closure formed by +// MakeClosure. The value of such a free var is an Alloc or another +// FreeVar and is considered a potentially escaping heap address, with +// pointer type. +// +// FreeVars are also used to implement bound method closures. Such a +// free var represents the receiver value and may be of any type that +// has concrete methods. +// +// Pos() returns the position of the value that was captured, which +// belongs to an enclosing function. +// +type FreeVar struct { + name string + typ types.Type + pos token.Pos + parent *Function + referrers []Instruction + + // Transiently needed during building. + outer Value // the Value captured from the enclosing context. +} + +// A Parameter represents an input parameter of a function. +// +type Parameter struct { + name string + object types.Object // a *types.Var; nil for non-source locals + typ types.Type + pos token.Pos + parent *Function + referrers []Instruction +} + +// A Const represents the value of a constant expression. +// +// The underlying type of a constant may be any boolean, numeric, or +// string type. In addition, a Const may represent the nil value of +// any reference type---interface, map, channel, pointer, slice, or +// function---but not "untyped nil". +// +// All source-level constant expressions are represented by a Const +// of the same type and value. +// +// Value holds the exact value of the constant, independent of its +// Type(), using the same representation as package go/exact uses for +// constants, or nil for a typed nil value. +// +// Pos() returns token.NoPos. +// +// Example printed form: +// 42:int +// "hello":untyped string +// 3+4i:MyComplex +// +type Const struct { + typ types.Type + Value exact.Value +} + +// A Global is a named Value holding the address of a package-level +// variable. +// +// Pos() returns the position of the ast.ValueSpec.Names[*] +// identifier. +// +type Global struct { + name string + object types.Object // a *types.Var; may be nil for synthetics e.g. init$guard + typ types.Type + pos token.Pos + + Pkg *Package +} + +// A Builtin represents a specific use of a built-in function, e.g. len. +// +// Builtins are immutable values. Builtins do not have addresses. +// Builtins can only appear in CallCommon.Func. +// +// Name() indicates the function: one of the built-in functions from the +// Go spec (excluding "make" and "new") or one of these ssa-defined +// intrinsics: +// +// // wrapnilchk returns ptr if non-nil, panics otherwise. +// // (For use in indirection wrappers.) +// func ssa:wrapnilchk(ptr *T, recvType, methodName string) *T +// +// Object() returns a *types.Builtin for built-ins defined by the spec, +// nil for others. +// +// Type() returns a *types.Signature representing the effective +// signature of the built-in for this call. +// +type Builtin struct { + name string + sig *types.Signature +} + +// Value-defining instructions ---------------------------------------- + +// The Alloc instruction reserves space for a variable of the given type, +// zero-initializes it, and yields its address. +// +// Alloc values are always addresses, and have pointer types, so the +// type of the allocated variable is actually +// Type().Underlying().(*types.Pointer).Elem(). +// +// If Heap is false, Alloc allocates space in the function's +// activation record (frame); we refer to an Alloc(Heap=false) as a +// "local" alloc. Each local Alloc returns the same address each time +// it is executed within the same activation; the space is +// re-initialized to zero. +// +// If Heap is true, Alloc allocates space in the heap; we +// refer to an Alloc(Heap=true) as a "new" alloc. Each new Alloc +// returns a different address each time it is executed. +// +// When Alloc is applied to a channel, map or slice type, it returns +// the address of an uninitialized (nil) reference of that kind; store +// the result of MakeSlice, MakeMap or MakeChan in that location to +// instantiate these types. +// +// Pos() returns the ast.CompositeLit.Lbrace for a composite literal, +// or the ast.CallExpr.Rparen for a call to new() or for a call that +// allocates a varargs slice. +// +// Example printed form: +// t0 = local int +// t1 = new int +// +type Alloc struct { + register + Comment string + Heap bool + index int // dense numbering; for lifting +} + +var _ Instruction = (*Sigma)(nil) +var _ Value = (*Sigma)(nil) + +type Sigma struct { + register + X Value + Branch bool +} + +func (p *Sigma) Value() Value { + v := p.X + for { + sigma, ok := v.(*Sigma) + if !ok { + break + } + v = sigma + } + return v +} + +func (p *Sigma) String() string { + return fmt.Sprintf("σ [%s.%t]", relName(p.X, p), p.Branch) +} + +// The Phi instruction represents an SSA φ-node, which combines values +// that differ across incoming control-flow edges and yields a new +// value. Within a block, all φ-nodes must appear before all non-φ +// nodes. +// +// Pos() returns the position of the && or || for short-circuit +// control-flow joins, or that of the *Alloc for φ-nodes inserted +// during SSA renaming. +// +// Example printed form: +// t2 = phi [0: t0, 1: t1] +// +type Phi struct { + register + Comment string // a hint as to its purpose + Edges []Value // Edges[i] is value for Block().Preds[i] +} + +// The Call instruction represents a function or method call. +// +// The Call instruction yields the function result if there is exactly +// one. Otherwise it returns a tuple, the components of which are +// accessed via Extract. +// +// See CallCommon for generic function call documentation. +// +// Pos() returns the ast.CallExpr.Lparen, if explicit in the source. +// +// Example printed form: +// t2 = println(t0, t1) +// t4 = t3() +// t7 = invoke t5.Println(...t6) +// +type Call struct { + register + Call CallCommon +} + +// The BinOp instruction yields the result of binary operation X Op Y. +// +// Pos() returns the ast.BinaryExpr.OpPos, if explicit in the source. +// +// Example printed form: +// t1 = t0 + 1:int +// +type BinOp struct { + register + // One of: + // ADD SUB MUL QUO REM + - * / % + // AND OR XOR SHL SHR AND_NOT & | ^ << >> &~ + // EQL LSS GTR NEQ LEQ GEQ == != < <= < >= + Op token.Token + X, Y Value +} + +// The UnOp instruction yields the result of Op X. +// ARROW is channel receive. +// MUL is pointer indirection (load). +// XOR is bitwise complement. +// SUB is negation. +// NOT is logical negation. +// +// If CommaOk and Op=ARROW, the result is a 2-tuple of the value above +// and a boolean indicating the success of the receive. The +// components of the tuple are accessed using Extract. +// +// Pos() returns the ast.UnaryExpr.OpPos, if explicit in the source. +// For receive operations (ARROW) implicit in ranging over a channel, +// Pos() returns the ast.RangeStmt.For. +// For implicit memory loads (STAR), Pos() returns the position of the +// most closely associated source-level construct; the details are not +// specified. +// +// Example printed form: +// t0 = *x +// t2 = <-t1,ok +// +type UnOp struct { + register + Op token.Token // One of: NOT SUB ARROW MUL XOR ! - <- * ^ + X Value + CommaOk bool +} + +// The ChangeType instruction applies to X a value-preserving type +// change to Type(). +// +// Type changes are permitted: +// - between a named type and its underlying type. +// - between two named types of the same underlying type. +// - between (possibly named) pointers to identical base types. +// - from a bidirectional channel to a read- or write-channel, +// optionally adding/removing a name. +// +// This operation cannot fail dynamically. +// +// Pos() returns the ast.CallExpr.Lparen, if the instruction arose +// from an explicit conversion in the source. +// +// Example printed form: +// t1 = changetype *int <- IntPtr (t0) +// +type ChangeType struct { + register + X Value +} + +// The Convert instruction yields the conversion of value X to type +// Type(). One or both of those types is basic (but possibly named). +// +// A conversion may change the value and representation of its operand. +// Conversions are permitted: +// - between real numeric types. +// - between complex numeric types. +// - between string and []byte or []rune. +// - between pointers and unsafe.Pointer. +// - between unsafe.Pointer and uintptr. +// - from (Unicode) integer to (UTF-8) string. +// A conversion may imply a type name change also. +// +// This operation cannot fail dynamically. +// +// Conversions of untyped string/number/bool constants to a specific +// representation are eliminated during SSA construction. +// +// Pos() returns the ast.CallExpr.Lparen, if the instruction arose +// from an explicit conversion in the source. +// +// Example printed form: +// t1 = convert []byte <- string (t0) +// +type Convert struct { + register + X Value +} + +// ChangeInterface constructs a value of one interface type from a +// value of another interface type known to be assignable to it. +// This operation cannot fail. +// +// Pos() returns the ast.CallExpr.Lparen if the instruction arose from +// an explicit T(e) conversion; the ast.TypeAssertExpr.Lparen if the +// instruction arose from an explicit e.(T) operation; or token.NoPos +// otherwise. +// +// Example printed form: +// t1 = change interface interface{} <- I (t0) +// +type ChangeInterface struct { + register + X Value +} + +// MakeInterface constructs an instance of an interface type from a +// value of a concrete type. +// +// Use Program.MethodSets.MethodSet(X.Type()) to find the method-set +// of X, and Program.Method(m) to find the implementation of a method. +// +// To construct the zero value of an interface type T, use: +// NewConst(exact.MakeNil(), T, pos) +// +// Pos() returns the ast.CallExpr.Lparen, if the instruction arose +// from an explicit conversion in the source. +// +// Example printed form: +// t1 = make interface{} <- int (42:int) +// t2 = make Stringer <- t0 +// +type MakeInterface struct { + register + X Value +} + +// The MakeClosure instruction yields a closure value whose code is +// Fn and whose free variables' values are supplied by Bindings. +// +// Type() returns a (possibly named) *types.Signature. +// +// Pos() returns the ast.FuncLit.Type.Func for a function literal +// closure or the ast.SelectorExpr.Sel for a bound method closure. +// +// Example printed form: +// t0 = make closure anon@1.2 [x y z] +// t1 = make closure bound$(main.I).add [i] +// +type MakeClosure struct { + register + Fn Value // always a *Function + Bindings []Value // values for each free variable in Fn.FreeVars +} + +// The MakeMap instruction creates a new hash-table-based map object +// and yields a value of kind map. +// +// Type() returns a (possibly named) *types.Map. +// +// Pos() returns the ast.CallExpr.Lparen, if created by make(map), or +// the ast.CompositeLit.Lbrack if created by a literal. +// +// Example printed form: +// t1 = make map[string]int t0 +// t1 = make StringIntMap t0 +// +type MakeMap struct { + register + Reserve Value // initial space reservation; nil => default +} + +// The MakeChan instruction creates a new channel object and yields a +// value of kind chan. +// +// Type() returns a (possibly named) *types.Chan. +// +// Pos() returns the ast.CallExpr.Lparen for the make(chan) that +// created it. +// +// Example printed form: +// t0 = make chan int 0 +// t0 = make IntChan 0 +// +type MakeChan struct { + register + Size Value // int; size of buffer; zero => synchronous. +} + +// The MakeSlice instruction yields a slice of length Len backed by a +// newly allocated array of length Cap. +// +// Both Len and Cap must be non-nil Values of integer type. +// +// (Alloc(types.Array) followed by Slice will not suffice because +// Alloc can only create arrays of constant length.) +// +// Type() returns a (possibly named) *types.Slice. +// +// Pos() returns the ast.CallExpr.Lparen for the make([]T) that +// created it. +// +// Example printed form: +// t1 = make []string 1:int t0 +// t1 = make StringSlice 1:int t0 +// +type MakeSlice struct { + register + Len Value + Cap Value +} + +// The Slice instruction yields a slice of an existing string, slice +// or *array X between optional integer bounds Low and High. +// +// Dynamically, this instruction panics if X evaluates to a nil *array +// pointer. +// +// Type() returns string if the type of X was string, otherwise a +// *types.Slice with the same element type as X. +// +// Pos() returns the ast.SliceExpr.Lbrack if created by a x[:] slice +// operation, the ast.CompositeLit.Lbrace if created by a literal, or +// NoPos if not explicit in the source (e.g. a variadic argument slice). +// +// Example printed form: +// t1 = slice t0[1:] +// +type Slice struct { + register + X Value // slice, string, or *array + Low, High, Max Value // each may be nil +} + +// The FieldAddr instruction yields the address of Field of *struct X. +// +// The field is identified by its index within the field list of the +// struct type of X. +// +// Dynamically, this instruction panics if X evaluates to a nil +// pointer. +// +// Type() returns a (possibly named) *types.Pointer. +// +// Pos() returns the position of the ast.SelectorExpr.Sel for the +// field, if explicit in the source. +// +// Example printed form: +// t1 = &t0.name [#1] +// +type FieldAddr struct { + register + X Value // *struct + Field int // index into X.Type().Deref().(*types.Struct).Fields +} + +// The Field instruction yields the Field of struct X. +// +// The field is identified by its index within the field list of the +// struct type of X; by using numeric indices we avoid ambiguity of +// package-local identifiers and permit compact representations. +// +// Pos() returns the position of the ast.SelectorExpr.Sel for the +// field, if explicit in the source. +// +// Example printed form: +// t1 = t0.name [#1] +// +type Field struct { + register + X Value // struct + Field int // index into X.Type().(*types.Struct).Fields +} + +// The IndexAddr instruction yields the address of the element at +// index Index of collection X. Index is an integer expression. +// +// The elements of maps and strings are not addressable; use Lookup or +// MapUpdate instead. +// +// Dynamically, this instruction panics if X evaluates to a nil *array +// pointer. +// +// Type() returns a (possibly named) *types.Pointer. +// +// Pos() returns the ast.IndexExpr.Lbrack for the index operation, if +// explicit in the source. +// +// Example printed form: +// t2 = &t0[t1] +// +type IndexAddr struct { + register + X Value // slice or *array, + Index Value // numeric index +} + +// The Index instruction yields element Index of array X. +// +// Pos() returns the ast.IndexExpr.Lbrack for the index operation, if +// explicit in the source. +// +// Example printed form: +// t2 = t0[t1] +// +type Index struct { + register + X Value // array + Index Value // integer index +} + +// The Lookup instruction yields element Index of collection X, a map +// or string. Index is an integer expression if X is a string or the +// appropriate key type if X is a map. +// +// If CommaOk, the result is a 2-tuple of the value above and a +// boolean indicating the result of a map membership test for the key. +// The components of the tuple are accessed using Extract. +// +// Pos() returns the ast.IndexExpr.Lbrack, if explicit in the source. +// +// Example printed form: +// t2 = t0[t1] +// t5 = t3[t4],ok +// +type Lookup struct { + register + X Value // string or map + Index Value // numeric or key-typed index + CommaOk bool // return a value,ok pair +} + +// SelectState is a helper for Select. +// It represents one goal state and its corresponding communication. +// +type SelectState struct { + Dir types.ChanDir // direction of case (SendOnly or RecvOnly) + Chan Value // channel to use (for send or receive) + Send Value // value to send (for send) + Pos token.Pos // position of token.ARROW + DebugNode ast.Node // ast.SendStmt or ast.UnaryExpr(<-) [debug mode] +} + +// The Select instruction tests whether (or blocks until) one +// of the specified sent or received states is entered. +// +// Let n be the number of States for which Dir==RECV and T_i (0<=i string iterator; false => map iterator. +} + +// The TypeAssert instruction tests whether interface value X has type +// AssertedType. +// +// If !CommaOk, on success it returns v, the result of the conversion +// (defined below); on failure it panics. +// +// If CommaOk: on success it returns a pair (v, true) where v is the +// result of the conversion; on failure it returns (z, false) where z +// is AssertedType's zero value. The components of the pair must be +// accessed using the Extract instruction. +// +// If AssertedType is a concrete type, TypeAssert checks whether the +// dynamic type in interface X is equal to it, and if so, the result +// of the conversion is a copy of the value in the interface. +// +// If AssertedType is an interface, TypeAssert checks whether the +// dynamic type of the interface is assignable to it, and if so, the +// result of the conversion is a copy of the interface value X. +// If AssertedType is a superinterface of X.Type(), the operation will +// fail iff the operand is nil. (Contrast with ChangeInterface, which +// performs no nil-check.) +// +// Type() reflects the actual type of the result, possibly a +// 2-types.Tuple; AssertedType is the asserted type. +// +// Pos() returns the ast.CallExpr.Lparen if the instruction arose from +// an explicit T(e) conversion; the ast.TypeAssertExpr.Lparen if the +// instruction arose from an explicit e.(T) operation; or the +// ast.CaseClause.Case if the instruction arose from a case of a +// type-switch statement. +// +// Example printed form: +// t1 = typeassert t0.(int) +// t3 = typeassert,ok t2.(T) +// +type TypeAssert struct { + register + X Value + AssertedType types.Type + CommaOk bool +} + +// The Extract instruction yields component Index of Tuple. +// +// This is used to access the results of instructions with multiple +// return values, such as Call, TypeAssert, Next, UnOp(ARROW) and +// IndexExpr(Map). +// +// Example printed form: +// t1 = extract t0 #1 +// +type Extract struct { + register + Tuple Value + Index int +} + +// Instructions executed for effect. They do not yield a value. -------------------- + +// The Jump instruction transfers control to the sole successor of its +// owning block. +// +// A Jump must be the last instruction of its containing BasicBlock. +// +// Pos() returns NoPos. +// +// Example printed form: +// jump done +// +type Jump struct { + anInstruction +} + +// The If instruction transfers control to one of the two successors +// of its owning block, depending on the boolean Cond: the first if +// true, the second if false. +// +// An If instruction must be the last instruction of its containing +// BasicBlock. +// +// Pos() returns NoPos. +// +// Example printed form: +// if t0 goto done else body +// +type If struct { + anInstruction + Cond Value +} + +// The Return instruction returns values and control back to the calling +// function. +// +// len(Results) is always equal to the number of results in the +// function's signature. +// +// If len(Results) > 1, Return returns a tuple value with the specified +// components which the caller must access using Extract instructions. +// +// There is no instruction to return a ready-made tuple like those +// returned by a "value,ok"-mode TypeAssert, Lookup or UnOp(ARROW) or +// a tail-call to a function with multiple result parameters. +// +// Return must be the last instruction of its containing BasicBlock. +// Such a block has no successors. +// +// Pos() returns the ast.ReturnStmt.Return, if explicit in the source. +// +// Example printed form: +// return +// return nil:I, 2:int +// +type Return struct { + anInstruction + Results []Value + pos token.Pos +} + +// The RunDefers instruction pops and invokes the entire stack of +// procedure calls pushed by Defer instructions in this function. +// +// It is legal to encounter multiple 'rundefers' instructions in a +// single control-flow path through a function; this is useful in +// the combined init() function, for example. +// +// Pos() returns NoPos. +// +// Example printed form: +// rundefers +// +type RunDefers struct { + anInstruction +} + +// The Panic instruction initiates a panic with value X. +// +// A Panic instruction must be the last instruction of its containing +// BasicBlock, which must have no successors. +// +// NB: 'go panic(x)' and 'defer panic(x)' do not use this instruction; +// they are treated as calls to a built-in function. +// +// Pos() returns the ast.CallExpr.Lparen if this panic was explicit +// in the source. +// +// Example printed form: +// panic t0 +// +type Panic struct { + anInstruction + X Value // an interface{} + pos token.Pos +} + +// The Go instruction creates a new goroutine and calls the specified +// function within it. +// +// See CallCommon for generic function call documentation. +// +// Pos() returns the ast.GoStmt.Go. +// +// Example printed form: +// go println(t0, t1) +// go t3() +// go invoke t5.Println(...t6) +// +type Go struct { + anInstruction + Call CallCommon + pos token.Pos +} + +// The Defer instruction pushes the specified call onto a stack of +// functions to be called by a RunDefers instruction or by a panic. +// +// See CallCommon for generic function call documentation. +// +// Pos() returns the ast.DeferStmt.Defer. +// +// Example printed form: +// defer println(t0, t1) +// defer t3() +// defer invoke t5.Println(...t6) +// +type Defer struct { + anInstruction + Call CallCommon + pos token.Pos +} + +// The Send instruction sends X on channel Chan. +// +// Pos() returns the ast.SendStmt.Arrow, if explicit in the source. +// +// Example printed form: +// send t0 <- t1 +// +type Send struct { + anInstruction + Chan, X Value + pos token.Pos +} + +// The Store instruction stores Val at address Addr. +// Stores can be of arbitrary types. +// +// Pos() returns the position of the source-level construct most closely +// associated with the memory store operation. +// Since implicit memory stores are numerous and varied and depend upon +// implementation choices, the details are not specified. +// +// Example printed form: +// *x = y +// +type Store struct { + anInstruction + Addr Value + Val Value + pos token.Pos +} + +// The BlankStore instruction is emitted for assignments to the blank +// identifier. +// +// BlankStore is a pseudo-instruction: it has no dynamic effect. +// +// Pos() returns NoPos. +// +// Example printed form: +// _ = t0 +// +type BlankStore struct { + anInstruction + Val Value +} + +// The MapUpdate instruction updates the association of Map[Key] to +// Value. +// +// Pos() returns the ast.KeyValueExpr.Colon or ast.IndexExpr.Lbrack, +// if explicit in the source. +// +// Example printed form: +// t0[t1] = t2 +// +type MapUpdate struct { + anInstruction + Map Value + Key Value + Value Value + pos token.Pos +} + +// A DebugRef instruction maps a source-level expression Expr to the +// SSA value X that represents the value (!IsAddr) or address (IsAddr) +// of that expression. +// +// DebugRef is a pseudo-instruction: it has no dynamic effect. +// +// Pos() returns Expr.Pos(), the start position of the source-level +// expression. This is not the same as the "designated" token as +// documented at Value.Pos(). e.g. CallExpr.Pos() does not return the +// position of the ("designated") Lparen token. +// +// If Expr is an *ast.Ident denoting a var or func, Object() returns +// the object; though this information can be obtained from the type +// checker, including it here greatly facilitates debugging. +// For non-Ident expressions, Object() returns nil. +// +// DebugRefs are generated only for functions built with debugging +// enabled; see Package.SetDebugMode() and the GlobalDebug builder +// mode flag. +// +// DebugRefs are not emitted for ast.Idents referring to constants or +// predeclared identifiers, since they are trivial and numerous. +// Nor are they emitted for ast.ParenExprs. +// +// (By representing these as instructions, rather than out-of-band, +// consistency is maintained during transformation passes by the +// ordinary SSA renaming machinery.) +// +// Example printed form: +// ; *ast.CallExpr @ 102:9 is t5 +// ; var x float64 @ 109:72 is x +// ; address of *ast.CompositeLit @ 216:10 is t0 +// +type DebugRef struct { + anInstruction + Expr ast.Expr // the referring expression (never *ast.ParenExpr) + object types.Object // the identity of the source var/func + IsAddr bool // Expr is addressable and X is the address it denotes + X Value // the value or address of Expr +} + +// Embeddable mix-ins and helpers for common parts of other structs. ----------- + +// register is a mix-in embedded by all SSA values that are also +// instructions, i.e. virtual registers, and provides a uniform +// implementation of most of the Value interface: Value.Name() is a +// numbered register (e.g. "t0"); the other methods are field accessors. +// +// Temporary names are automatically assigned to each register on +// completion of building a function in SSA form. +// +// Clients must not assume that the 'id' value (and the Name() derived +// from it) is unique within a function. As always in this API, +// semantics are determined only by identity; names exist only to +// facilitate debugging. +// +type register struct { + anInstruction + num int // "name" of virtual register, e.g. "t0". Not guaranteed unique. + typ types.Type // type of virtual register + pos token.Pos // position of source expression, or NoPos + referrers []Instruction +} + +// anInstruction is a mix-in embedded by all Instructions. +// It provides the implementations of the Block and setBlock methods. +type anInstruction struct { + block *BasicBlock // the basic block of this instruction +} + +// CallCommon is contained by Go, Defer and Call to hold the +// common parts of a function or method call. +// +// Each CallCommon exists in one of two modes, function call and +// interface method invocation, or "call" and "invoke" for short. +// +// 1. "call" mode: when Method is nil (!IsInvoke), a CallCommon +// represents an ordinary function call of the value in Value, +// which may be a *Builtin, a *Function or any other value of kind +// 'func'. +// +// Value may be one of: +// (a) a *Function, indicating a statically dispatched call +// to a package-level function, an anonymous function, or +// a method of a named type. +// (b) a *MakeClosure, indicating an immediately applied +// function literal with free variables. +// (c) a *Builtin, indicating a statically dispatched call +// to a built-in function. +// (d) any other value, indicating a dynamically dispatched +// function call. +// StaticCallee returns the identity of the callee in cases +// (a) and (b), nil otherwise. +// +// Args contains the arguments to the call. If Value is a method, +// Args[0] contains the receiver parameter. +// +// Example printed form: +// t2 = println(t0, t1) +// go t3() +// defer t5(...t6) +// +// 2. "invoke" mode: when Method is non-nil (IsInvoke), a CallCommon +// represents a dynamically dispatched call to an interface method. +// In this mode, Value is the interface value and Method is the +// interface's abstract method. Note: an abstract method may be +// shared by multiple interfaces due to embedding; Value.Type() +// provides the specific interface used for this call. +// +// Value is implicitly supplied to the concrete method implementation +// as the receiver parameter; in other words, Args[0] holds not the +// receiver but the first true argument. +// +// Example printed form: +// t1 = invoke t0.String() +// go invoke t3.Run(t2) +// defer invoke t4.Handle(...t5) +// +// For all calls to variadic functions (Signature().Variadic()), +// the last element of Args is a slice. +// +type CallCommon struct { + Value Value // receiver (invoke mode) or func value (call mode) + Method *types.Func // abstract method (invoke mode) + Args []Value // actual parameters (in static method call, includes receiver) + pos token.Pos // position of CallExpr.Lparen, iff explicit in source +} + +// IsInvoke returns true if this call has "invoke" (not "call") mode. +func (c *CallCommon) IsInvoke() bool { + return c.Method != nil +} + +func (c *CallCommon) Pos() token.Pos { return c.pos } + +// Signature returns the signature of the called function. +// +// For an "invoke"-mode call, the signature of the interface method is +// returned. +// +// In either "call" or "invoke" mode, if the callee is a method, its +// receiver is represented by sig.Recv, not sig.Params().At(0). +// +func (c *CallCommon) Signature() *types.Signature { + if c.Method != nil { + return c.Method.Type().(*types.Signature) + } + return c.Value.Type().Underlying().(*types.Signature) +} + +// StaticCallee returns the callee if this is a trivially static +// "call"-mode call to a function. +func (c *CallCommon) StaticCallee() *Function { + switch fn := c.Value.(type) { + case *Function: + return fn + case *MakeClosure: + return fn.Fn.(*Function) + } + return nil +} + +// Description returns a description of the mode of this call suitable +// for a user interface, e.g., "static method call". +func (c *CallCommon) Description() string { + switch fn := c.Value.(type) { + case *Builtin: + return "built-in function call" + case *MakeClosure: + return "static function closure call" + case *Function: + if fn.Signature.Recv() != nil { + return "static method call" + } + return "static function call" + } + if c.IsInvoke() { + return "dynamic method call" // ("invoke" mode) + } + return "dynamic function call" +} + +// The CallInstruction interface, implemented by *Go, *Defer and *Call, +// exposes the common parts of function-calling instructions, +// yet provides a way back to the Value defined by *Call alone. +// +type CallInstruction interface { + Instruction + Common() *CallCommon // returns the common parts of the call + Value() *Call // returns the result value of the call (*Call) or nil (*Go, *Defer) +} + +func (s *Call) Common() *CallCommon { return &s.Call } +func (s *Defer) Common() *CallCommon { return &s.Call } +func (s *Go) Common() *CallCommon { return &s.Call } + +func (s *Call) Value() *Call { return s } +func (s *Defer) Value() *Call { return nil } +func (s *Go) Value() *Call { return nil } + +func (v *Builtin) Type() types.Type { return v.sig } +func (v *Builtin) Name() string { return v.name } +func (*Builtin) Referrers() *[]Instruction { return nil } +func (v *Builtin) Pos() token.Pos { return token.NoPos } +func (v *Builtin) Object() types.Object { return types.Universe.Lookup(v.name) } +func (v *Builtin) Parent() *Function { return nil } + +func (v *FreeVar) Type() types.Type { return v.typ } +func (v *FreeVar) Name() string { return v.name } +func (v *FreeVar) Referrers() *[]Instruction { return &v.referrers } +func (v *FreeVar) Pos() token.Pos { return v.pos } +func (v *FreeVar) Parent() *Function { return v.parent } + +func (v *Global) Type() types.Type { return v.typ } +func (v *Global) Name() string { return v.name } +func (v *Global) Parent() *Function { return nil } +func (v *Global) Pos() token.Pos { return v.pos } +func (v *Global) Referrers() *[]Instruction { return nil } +func (v *Global) Token() token.Token { return token.VAR } +func (v *Global) Object() types.Object { return v.object } +func (v *Global) String() string { return v.RelString(nil) } +func (v *Global) Package() *Package { return v.Pkg } +func (v *Global) RelString(from *types.Package) string { return relString(v, from) } + +func (v *Function) Name() string { return v.name } +func (v *Function) Type() types.Type { return v.Signature } +func (v *Function) Pos() token.Pos { return v.pos } +func (v *Function) Token() token.Token { return token.FUNC } +func (v *Function) Object() types.Object { return v.object } +func (v *Function) String() string { return v.RelString(nil) } +func (v *Function) Package() *Package { return v.Pkg } +func (v *Function) Parent() *Function { return v.parent } +func (v *Function) Referrers() *[]Instruction { + if v.parent != nil { + return &v.referrers + } + return nil +} + +func (v *Parameter) Type() types.Type { return v.typ } +func (v *Parameter) Name() string { return v.name } +func (v *Parameter) Object() types.Object { return v.object } +func (v *Parameter) Referrers() *[]Instruction { return &v.referrers } +func (v *Parameter) Pos() token.Pos { return v.pos } +func (v *Parameter) Parent() *Function { return v.parent } + +func (v *Alloc) Type() types.Type { return v.typ } +func (v *Alloc) Referrers() *[]Instruction { return &v.referrers } +func (v *Alloc) Pos() token.Pos { return v.pos } + +func (v *register) Type() types.Type { return v.typ } +func (v *register) setType(typ types.Type) { v.typ = typ } +func (v *register) Name() string { return fmt.Sprintf("t%d", v.num) } +func (v *register) setNum(num int) { v.num = num } +func (v *register) Referrers() *[]Instruction { return &v.referrers } +func (v *register) Pos() token.Pos { return v.pos } +func (v *register) setPos(pos token.Pos) { v.pos = pos } + +func (v *anInstruction) Parent() *Function { return v.block.parent } +func (v *anInstruction) Block() *BasicBlock { return v.block } +func (v *anInstruction) setBlock(block *BasicBlock) { v.block = block } +func (v *anInstruction) Referrers() *[]Instruction { return nil } + +func (t *Type) Name() string { return t.object.Name() } +func (t *Type) Pos() token.Pos { return t.object.Pos() } +func (t *Type) Type() types.Type { return t.object.Type() } +func (t *Type) Token() token.Token { return token.TYPE } +func (t *Type) Object() types.Object { return t.object } +func (t *Type) String() string { return t.RelString(nil) } +func (t *Type) Package() *Package { return t.pkg } +func (t *Type) RelString(from *types.Package) string { return relString(t, from) } + +func (c *NamedConst) Name() string { return c.object.Name() } +func (c *NamedConst) Pos() token.Pos { return c.object.Pos() } +func (c *NamedConst) String() string { return c.RelString(nil) } +func (c *NamedConst) Type() types.Type { return c.object.Type() } +func (c *NamedConst) Token() token.Token { return token.CONST } +func (c *NamedConst) Object() types.Object { return c.object } +func (c *NamedConst) Package() *Package { return c.pkg } +func (c *NamedConst) RelString(from *types.Package) string { return relString(c, from) } + +// Func returns the package-level function of the specified name, +// or nil if not found. +// +func (p *Package) Func(name string) (f *Function) { + f, _ = p.Members[name].(*Function) + return +} + +// Var returns the package-level variable of the specified name, +// or nil if not found. +// +func (p *Package) Var(name string) (g *Global) { + g, _ = p.Members[name].(*Global) + return +} + +// Const returns the package-level constant of the specified name, +// or nil if not found. +// +func (p *Package) Const(name string) (c *NamedConst) { + c, _ = p.Members[name].(*NamedConst) + return +} + +// Type returns the package-level type of the specified name, +// or nil if not found. +// +func (p *Package) Type(name string) (t *Type) { + t, _ = p.Members[name].(*Type) + return +} + +func (v *Call) Pos() token.Pos { return v.Call.pos } +func (s *Defer) Pos() token.Pos { return s.pos } +func (s *Go) Pos() token.Pos { return s.pos } +func (s *MapUpdate) Pos() token.Pos { return s.pos } +func (s *Panic) Pos() token.Pos { return s.pos } +func (s *Return) Pos() token.Pos { return s.pos } +func (s *Send) Pos() token.Pos { return s.pos } +func (s *Store) Pos() token.Pos { return s.pos } +func (s *BlankStore) Pos() token.Pos { return token.NoPos } +func (s *If) Pos() token.Pos { return token.NoPos } +func (s *Jump) Pos() token.Pos { return token.NoPos } +func (s *RunDefers) Pos() token.Pos { return token.NoPos } +func (s *DebugRef) Pos() token.Pos { return s.Expr.Pos() } + +// Operands. + +func (v *Alloc) Operands(rands []*Value) []*Value { + return rands +} + +func (v *BinOp) Operands(rands []*Value) []*Value { + return append(rands, &v.X, &v.Y) +} + +func (c *CallCommon) Operands(rands []*Value) []*Value { + rands = append(rands, &c.Value) + for i := range c.Args { + rands = append(rands, &c.Args[i]) + } + return rands +} + +func (s *Go) Operands(rands []*Value) []*Value { + return s.Call.Operands(rands) +} + +func (s *Call) Operands(rands []*Value) []*Value { + return s.Call.Operands(rands) +} + +func (s *Defer) Operands(rands []*Value) []*Value { + return s.Call.Operands(rands) +} + +func (v *ChangeInterface) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (v *ChangeType) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (v *Convert) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (s *DebugRef) Operands(rands []*Value) []*Value { + return append(rands, &s.X) +} + +func (v *Extract) Operands(rands []*Value) []*Value { + return append(rands, &v.Tuple) +} + +func (v *Field) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (v *FieldAddr) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (s *If) Operands(rands []*Value) []*Value { + return append(rands, &s.Cond) +} + +func (v *Index) Operands(rands []*Value) []*Value { + return append(rands, &v.X, &v.Index) +} + +func (v *IndexAddr) Operands(rands []*Value) []*Value { + return append(rands, &v.X, &v.Index) +} + +func (*Jump) Operands(rands []*Value) []*Value { + return rands +} + +func (v *Lookup) Operands(rands []*Value) []*Value { + return append(rands, &v.X, &v.Index) +} + +func (v *MakeChan) Operands(rands []*Value) []*Value { + return append(rands, &v.Size) +} + +func (v *MakeClosure) Operands(rands []*Value) []*Value { + rands = append(rands, &v.Fn) + for i := range v.Bindings { + rands = append(rands, &v.Bindings[i]) + } + return rands +} + +func (v *MakeInterface) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (v *MakeMap) Operands(rands []*Value) []*Value { + return append(rands, &v.Reserve) +} + +func (v *MakeSlice) Operands(rands []*Value) []*Value { + return append(rands, &v.Len, &v.Cap) +} + +func (v *MapUpdate) Operands(rands []*Value) []*Value { + return append(rands, &v.Map, &v.Key, &v.Value) +} + +func (v *Next) Operands(rands []*Value) []*Value { + return append(rands, &v.Iter) +} + +func (s *Panic) Operands(rands []*Value) []*Value { + return append(rands, &s.X) +} + +func (v *Sigma) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (v *Phi) Operands(rands []*Value) []*Value { + for i := range v.Edges { + rands = append(rands, &v.Edges[i]) + } + return rands +} + +func (v *Range) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (s *Return) Operands(rands []*Value) []*Value { + for i := range s.Results { + rands = append(rands, &s.Results[i]) + } + return rands +} + +func (*RunDefers) Operands(rands []*Value) []*Value { + return rands +} + +func (v *Select) Operands(rands []*Value) []*Value { + for i := range v.States { + rands = append(rands, &v.States[i].Chan, &v.States[i].Send) + } + return rands +} + +func (s *Send) Operands(rands []*Value) []*Value { + return append(rands, &s.Chan, &s.X) +} + +func (v *Slice) Operands(rands []*Value) []*Value { + return append(rands, &v.X, &v.Low, &v.High, &v.Max) +} + +func (s *Store) Operands(rands []*Value) []*Value { + return append(rands, &s.Addr, &s.Val) +} + +func (s *BlankStore) Operands(rands []*Value) []*Value { + return append(rands, &s.Val) +} + +func (v *TypeAssert) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (v *UnOp) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +// Non-Instruction Values: +func (v *Builtin) Operands(rands []*Value) []*Value { return rands } +func (v *FreeVar) Operands(rands []*Value) []*Value { return rands } +func (v *Const) Operands(rands []*Value) []*Value { return rands } +func (v *Function) Operands(rands []*Value) []*Value { return rands } +func (v *Global) Operands(rands []*Value) []*Value { return rands } +func (v *Parameter) Operands(rands []*Value) []*Value { return rands } diff --git a/vendor/honnef.co/go/tools/ssa/ssautil/load.go b/vendor/honnef.co/go/tools/ssa/ssautil/load.go new file mode 100644 index 0000000..2fc108f --- /dev/null +++ b/vendor/honnef.co/go/tools/ssa/ssautil/load.go @@ -0,0 +1,97 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.5 + +package ssautil + +// This file defines utility functions for constructing programs in SSA form. + +import ( + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/loader" + "honnef.co/go/tools/ssa" +) + +// CreateProgram returns a new program in SSA form, given a program +// loaded from source. An SSA package is created for each transitively +// error-free package of lprog. +// +// Code for bodies of functions is not built until Build is called +// on the result. +// +// mode controls diagnostics and checking during SSA construction. +// +func CreateProgram(lprog *loader.Program, mode ssa.BuilderMode) *ssa.Program { + prog := ssa.NewProgram(lprog.Fset, mode) + + for _, info := range lprog.AllPackages { + if info.TransitivelyErrorFree { + prog.CreatePackage(info.Pkg, info.Files, &info.Info, info.Importable) + } + } + + return prog +} + +// BuildPackage builds an SSA program with IR for a single package. +// +// It populates pkg by type-checking the specified file ASTs. All +// dependencies are loaded using the importer specified by tc, which +// typically loads compiler export data; SSA code cannot be built for +// those packages. BuildPackage then constructs an ssa.Program with all +// dependency packages created, and builds and returns the SSA package +// corresponding to pkg. +// +// The caller must have set pkg.Path() to the import path. +// +// The operation fails if there were any type-checking or import errors. +// +// See ../ssa/example_test.go for an example. +// +func BuildPackage(tc *types.Config, fset *token.FileSet, pkg *types.Package, files []*ast.File, mode ssa.BuilderMode) (*ssa.Package, *types.Info, error) { + if fset == nil { + panic("no token.FileSet") + } + if pkg.Path() == "" { + panic("package has no import path") + } + + info := &types.Info{ + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + Scopes: make(map[ast.Node]*types.Scope), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + } + if err := types.NewChecker(tc, fset, pkg, info).Files(files); err != nil { + return nil, nil, err + } + + prog := ssa.NewProgram(fset, mode) + + // Create SSA packages for all imports. + // Order is not significant. + created := make(map[*types.Package]bool) + var createAll func(pkgs []*types.Package) + createAll = func(pkgs []*types.Package) { + for _, p := range pkgs { + if !created[p] { + created[p] = true + prog.CreatePackage(p, nil, nil, true) + createAll(p.Imports()) + } + } + } + createAll(pkg.Imports()) + + // Create and build the primary package. + ssapkg := prog.CreatePackage(pkg, files, info, false) + ssapkg.Build() + return ssapkg, info, nil +} diff --git a/vendor/honnef.co/go/tools/ssa/ssautil/switch.go b/vendor/honnef.co/go/tools/ssa/ssautil/switch.go new file mode 100644 index 0000000..ef698b3 --- /dev/null +++ b/vendor/honnef.co/go/tools/ssa/ssautil/switch.go @@ -0,0 +1,236 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.5 + +package ssautil + +// This file implements discovery of switch and type-switch constructs +// from low-level control flow. +// +// Many techniques exist for compiling a high-level switch with +// constant cases to efficient machine code. The optimal choice will +// depend on the data type, the specific case values, the code in the +// body of each case, and the hardware. +// Some examples: +// - a lookup table (for a switch that maps constants to constants) +// - a computed goto +// - a binary tree +// - a perfect hash +// - a two-level switch (to partition constant strings by their first byte). + +import ( + "bytes" + "fmt" + "go/token" + "go/types" + + "honnef.co/go/tools/ssa" +) + +// A ConstCase represents a single constant comparison. +// It is part of a Switch. +type ConstCase struct { + Block *ssa.BasicBlock // block performing the comparison + Body *ssa.BasicBlock // body of the case + Value *ssa.Const // case comparand +} + +// A TypeCase represents a single type assertion. +// It is part of a Switch. +type TypeCase struct { + Block *ssa.BasicBlock // block performing the type assert + Body *ssa.BasicBlock // body of the case + Type types.Type // case type + Binding ssa.Value // value bound by this case +} + +// A Switch is a logical high-level control flow operation +// (a multiway branch) discovered by analysis of a CFG containing +// only if/else chains. It is not part of the ssa.Instruction set. +// +// One of ConstCases and TypeCases has length >= 2; +// the other is nil. +// +// In a value switch, the list of cases may contain duplicate constants. +// A type switch may contain duplicate types, or types assignable +// to an interface type also in the list. +// TODO(adonovan): eliminate such duplicates. +// +type Switch struct { + Start *ssa.BasicBlock // block containing start of if/else chain + X ssa.Value // the switch operand + ConstCases []ConstCase // ordered list of constant comparisons + TypeCases []TypeCase // ordered list of type assertions + Default *ssa.BasicBlock // successor if all comparisons fail +} + +func (sw *Switch) String() string { + // We represent each block by the String() of its + // first Instruction, e.g. "print(42:int)". + var buf bytes.Buffer + if sw.ConstCases != nil { + fmt.Fprintf(&buf, "switch %s {\n", sw.X.Name()) + for _, c := range sw.ConstCases { + fmt.Fprintf(&buf, "case %s: %s\n", c.Value, c.Body.Instrs[0]) + } + } else { + fmt.Fprintf(&buf, "switch %s.(type) {\n", sw.X.Name()) + for _, c := range sw.TypeCases { + fmt.Fprintf(&buf, "case %s %s: %s\n", + c.Binding.Name(), c.Type, c.Body.Instrs[0]) + } + } + if sw.Default != nil { + fmt.Fprintf(&buf, "default: %s\n", sw.Default.Instrs[0]) + } + fmt.Fprintf(&buf, "}") + return buf.String() +} + +// Switches examines the control-flow graph of fn and returns the +// set of inferred value and type switches. A value switch tests an +// ssa.Value for equality against two or more compile-time constant +// values. Switches involving link-time constants (addresses) are +// ignored. A type switch type-asserts an ssa.Value against two or +// more types. +// +// The switches are returned in dominance order. +// +// The resulting switches do not necessarily correspond to uses of the +// 'switch' keyword in the source: for example, a single source-level +// switch statement with non-constant cases may result in zero, one or +// many Switches, one per plural sequence of constant cases. +// Switches may even be inferred from if/else- or goto-based control flow. +// (In general, the control flow constructs of the source program +// cannot be faithfully reproduced from the SSA representation.) +// +func Switches(fn *ssa.Function) []Switch { + // Traverse the CFG in dominance order, so we don't + // enter an if/else-chain in the middle. + var switches []Switch + seen := make(map[*ssa.BasicBlock]bool) // TODO(adonovan): opt: use ssa.blockSet + for _, b := range fn.DomPreorder() { + if x, k := isComparisonBlock(b); x != nil { + // Block b starts a switch. + sw := Switch{Start: b, X: x} + valueSwitch(&sw, k, seen) + if len(sw.ConstCases) > 1 { + switches = append(switches, sw) + } + } + + if y, x, T := isTypeAssertBlock(b); y != nil { + // Block b starts a type switch. + sw := Switch{Start: b, X: x} + typeSwitch(&sw, y, T, seen) + if len(sw.TypeCases) > 1 { + switches = append(switches, sw) + } + } + } + return switches +} + +func valueSwitch(sw *Switch, k *ssa.Const, seen map[*ssa.BasicBlock]bool) { + b := sw.Start + x := sw.X + for x == sw.X { + if seen[b] { + break + } + seen[b] = true + + sw.ConstCases = append(sw.ConstCases, ConstCase{ + Block: b, + Body: b.Succs[0], + Value: k, + }) + b = b.Succs[1] + if len(b.Instrs) > 2 { + // Block b contains not just 'if x == k', + // so it may have side effects that + // make it unsafe to elide. + break + } + if len(b.Preds) != 1 { + // Block b has multiple predecessors, + // so it cannot be treated as a case. + break + } + x, k = isComparisonBlock(b) + } + sw.Default = b +} + +func typeSwitch(sw *Switch, y ssa.Value, T types.Type, seen map[*ssa.BasicBlock]bool) { + b := sw.Start + x := sw.X + for x == sw.X { + if seen[b] { + break + } + seen[b] = true + + sw.TypeCases = append(sw.TypeCases, TypeCase{ + Block: b, + Body: b.Succs[0], + Type: T, + Binding: y, + }) + b = b.Succs[1] + if len(b.Instrs) > 4 { + // Block b contains not just + // {TypeAssert; Extract #0; Extract #1; If} + // so it may have side effects that + // make it unsafe to elide. + break + } + if len(b.Preds) != 1 { + // Block b has multiple predecessors, + // so it cannot be treated as a case. + break + } + y, x, T = isTypeAssertBlock(b) + } + sw.Default = b +} + +// isComparisonBlock returns the operands (v, k) if a block ends with +// a comparison v==k, where k is a compile-time constant. +// +func isComparisonBlock(b *ssa.BasicBlock) (v ssa.Value, k *ssa.Const) { + if n := len(b.Instrs); n >= 2 { + if i, ok := b.Instrs[n-1].(*ssa.If); ok { + if binop, ok := i.Cond.(*ssa.BinOp); ok && binop.Block() == b && binop.Op == token.EQL { + if k, ok := binop.Y.(*ssa.Const); ok { + return binop.X, k + } + if k, ok := binop.X.(*ssa.Const); ok { + return binop.Y, k + } + } + } + } + return +} + +// isTypeAssertBlock returns the operands (y, x, T) if a block ends with +// a type assertion "if y, ok := x.(T); ok {". +// +func isTypeAssertBlock(b *ssa.BasicBlock) (y, x ssa.Value, T types.Type) { + if n := len(b.Instrs); n >= 4 { + if i, ok := b.Instrs[n-1].(*ssa.If); ok { + if ext1, ok := i.Cond.(*ssa.Extract); ok && ext1.Block() == b && ext1.Index == 1 { + if ta, ok := ext1.Tuple.(*ssa.TypeAssert); ok && ta.Block() == b { + // hack: relies upon instruction ordering. + if ext0, ok := b.Instrs[n-3].(*ssa.Extract); ok { + return ext0, ta.X, ta.AssertedType + } + } + } + } + } + return +} diff --git a/vendor/honnef.co/go/tools/ssa/ssautil/visit.go b/vendor/honnef.co/go/tools/ssa/ssautil/visit.go new file mode 100644 index 0000000..5c14845 --- /dev/null +++ b/vendor/honnef.co/go/tools/ssa/ssautil/visit.go @@ -0,0 +1,79 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssautil // import "honnef.co/go/tools/ssa/ssautil" + +import "honnef.co/go/tools/ssa" + +// This file defines utilities for visiting the SSA representation of +// a Program. +// +// TODO(adonovan): test coverage. + +// AllFunctions finds and returns the set of functions potentially +// needed by program prog, as determined by a simple linker-style +// reachability algorithm starting from the members and method-sets of +// each package. The result may include anonymous functions and +// synthetic wrappers. +// +// Precondition: all packages are built. +// +func AllFunctions(prog *ssa.Program) map[*ssa.Function]bool { + visit := visitor{ + prog: prog, + seen: make(map[*ssa.Function]bool), + } + visit.program() + return visit.seen +} + +type visitor struct { + prog *ssa.Program + seen map[*ssa.Function]bool +} + +func (visit *visitor) program() { + for _, pkg := range visit.prog.AllPackages() { + for _, mem := range pkg.Members { + if fn, ok := mem.(*ssa.Function); ok { + visit.function(fn) + } + } + } + for _, T := range visit.prog.RuntimeTypes() { + mset := visit.prog.MethodSets.MethodSet(T) + for i, n := 0, mset.Len(); i < n; i++ { + visit.function(visit.prog.MethodValue(mset.At(i))) + } + } +} + +func (visit *visitor) function(fn *ssa.Function) { + if !visit.seen[fn] { + visit.seen[fn] = true + var buf [10]*ssa.Value // avoid alloc in common case + for _, b := range fn.Blocks { + for _, instr := range b.Instrs { + for _, op := range instr.Operands(buf[:0]) { + if fn, ok := (*op).(*ssa.Function); ok { + visit.function(fn) + } + } + } + } + } +} + +// MainPackages returns the subset of the specified packages +// named "main" that define a main function. +// The result may include synthetic "testmain" packages. +func MainPackages(pkgs []*ssa.Package) []*ssa.Package { + var mains []*ssa.Package + for _, pkg := range pkgs { + if pkg.Pkg.Name() == "main" && pkg.Func("main") != nil { + mains = append(mains, pkg) + } + } + return mains +} diff --git a/vendor/honnef.co/go/tools/ssa/testmain.go b/vendor/honnef.co/go/tools/ssa/testmain.go new file mode 100644 index 0000000..2b89724 --- /dev/null +++ b/vendor/honnef.co/go/tools/ssa/testmain.go @@ -0,0 +1,266 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.5 + +package ssa + +// CreateTestMainPackage synthesizes a main package that runs all the +// tests of the supplied packages. +// It is closely coupled to $GOROOT/src/cmd/go/test.go and $GOROOT/src/testing. +// +// TODO(adonovan): this file no longer needs to live in the ssa package. +// Move it to ssautil. + +import ( + "bytes" + "fmt" + "go/ast" + "go/parser" + "go/types" + "log" + "os" + "strings" + "text/template" +) + +// FindTests returns the Test, Benchmark, and Example functions +// (as defined by "go test") defined in the specified package, +// and its TestMain function, if any. +func FindTests(pkg *Package) (tests, benchmarks, examples []*Function, main *Function) { + prog := pkg.Prog + + // The first two of these may be nil: if the program doesn't import "testing", + // it can't contain any tests, but it may yet contain Examples. + var testSig *types.Signature // func(*testing.T) + var benchmarkSig *types.Signature // func(*testing.B) + var exampleSig = types.NewSignature(nil, nil, nil, false) // func() + + // Obtain the types from the parameters of testing.MainStart. + if testingPkg := prog.ImportedPackage("testing"); testingPkg != nil { + mainStart := testingPkg.Func("MainStart") + params := mainStart.Signature.Params() + testSig = funcField(params.At(1).Type()) + benchmarkSig = funcField(params.At(2).Type()) + + // Does the package define this function? + // func TestMain(*testing.M) + if f := pkg.Func("TestMain"); f != nil { + sig := f.Type().(*types.Signature) + starM := mainStart.Signature.Results().At(0).Type() // *testing.M + if sig.Results().Len() == 0 && + sig.Params().Len() == 1 && + types.Identical(sig.Params().At(0).Type(), starM) { + main = f + } + } + } + + // TODO(adonovan): use a stable order, e.g. lexical. + for _, mem := range pkg.Members { + if f, ok := mem.(*Function); ok && + ast.IsExported(f.Name()) && + strings.HasSuffix(prog.Fset.Position(f.Pos()).Filename, "_test.go") { + + switch { + case testSig != nil && isTestSig(f, "Test", testSig): + tests = append(tests, f) + case benchmarkSig != nil && isTestSig(f, "Benchmark", benchmarkSig): + benchmarks = append(benchmarks, f) + case isTestSig(f, "Example", exampleSig): + examples = append(examples, f) + default: + continue + } + } + } + return +} + +// Like isTest, but checks the signature too. +func isTestSig(f *Function, prefix string, sig *types.Signature) bool { + return isTest(f.Name(), prefix) && types.Identical(f.Signature, sig) +} + +// Given the type of one of the three slice parameters of testing.Main, +// returns the function type. +func funcField(slice types.Type) *types.Signature { + return slice.(*types.Slice).Elem().Underlying().(*types.Struct).Field(1).Type().(*types.Signature) +} + +// isTest tells whether name looks like a test (or benchmark, according to prefix). +// It is a Test (say) if there is a character after Test that is not a lower-case letter. +// We don't want TesticularCancer. +// Plundered from $GOROOT/src/cmd/go/test.go +func isTest(name, prefix string) bool { + if !strings.HasPrefix(name, prefix) { + return false + } + if len(name) == len(prefix) { // "Test" is ok + return true + } + return ast.IsExported(name[len(prefix):]) +} + +// CreateTestMainPackage creates and returns a synthetic "testmain" +// package for the specified package if it defines tests, benchmarks or +// executable examples, or nil otherwise. The new package is named +// "main" and provides a function named "main" that runs the tests, +// similar to the one that would be created by the 'go test' tool. +// +// Subsequent calls to prog.AllPackages include the new package. +// The package pkg must belong to the program prog. +func (prog *Program) CreateTestMainPackage(pkg *Package) *Package { + if pkg.Prog != prog { + log.Fatal("Package does not belong to Program") + } + + // Template data + var data struct { + Pkg *Package + Tests, Benchmarks, Examples []*Function + Main *Function + Go18 bool + } + data.Pkg = pkg + + // Enumerate tests. + data.Tests, data.Benchmarks, data.Examples, data.Main = FindTests(pkg) + if data.Main == nil && + data.Tests == nil && data.Benchmarks == nil && data.Examples == nil { + return nil + } + + // Synthesize source for testmain package. + path := pkg.Pkg.Path() + "$testmain" + tmpl := testmainTmpl + if testingPkg := prog.ImportedPackage("testing"); testingPkg != nil { + // In Go 1.8, testing.MainStart's first argument is an interface, not a func. + data.Go18 = types.IsInterface(testingPkg.Func("MainStart").Signature.Params().At(0).Type()) + } else { + // The program does not import "testing", but FindTests + // returned non-nil, which must mean there were Examples + // but no Test, Benchmark, or TestMain functions. + + // We'll simply call them from testmain.main; this will + // ensure they don't panic, but will not check any + // "Output:" comments. + // (We should not execute an Example that has no + // "Output:" comment, but it's impossible to tell here.) + tmpl = examplesOnlyTmpl + } + var buf bytes.Buffer + if err := tmpl.Execute(&buf, data); err != nil { + log.Fatalf("internal error expanding template for %s: %v", path, err) + } + if false { // debugging + fmt.Fprintln(os.Stderr, buf.String()) + } + + // Parse and type-check the testmain package. + f, err := parser.ParseFile(prog.Fset, path+".go", &buf, parser.Mode(0)) + if err != nil { + log.Fatalf("internal error parsing %s: %v", path, err) + } + conf := types.Config{ + DisableUnusedImportCheck: true, + Importer: importer{pkg}, + } + files := []*ast.File{f} + info := &types.Info{ + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + Scopes: make(map[ast.Node]*types.Scope), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + } + testmainPkg, err := conf.Check(path, prog.Fset, files, info) + if err != nil { + log.Fatalf("internal error type-checking %s: %v", path, err) + } + + // Create and build SSA code. + testmain := prog.CreatePackage(testmainPkg, files, info, false) + testmain.SetDebugMode(false) + testmain.Build() + testmain.Func("main").Synthetic = "test main function" + testmain.Func("init").Synthetic = "package initializer" + return testmain +} + +// An implementation of types.Importer for an already loaded SSA program. +type importer struct { + pkg *Package // package under test; may be non-importable +} + +func (imp importer) Import(path string) (*types.Package, error) { + if p := imp.pkg.Prog.ImportedPackage(path); p != nil { + return p.Pkg, nil + } + if path == imp.pkg.Pkg.Path() { + return imp.pkg.Pkg, nil + } + return nil, fmt.Errorf("not found") // can't happen +} + +var testmainTmpl = template.Must(template.New("testmain").Parse(` +package main + +import "io" +import "os" +import "testing" +import p {{printf "%q" .Pkg.Pkg.Path}} + +{{if .Go18}} +type deps struct{} + +func (deps) MatchString(pat, str string) (bool, error) { return true, nil } +func (deps) StartCPUProfile(io.Writer) error { return nil } +func (deps) StopCPUProfile() {} +func (deps) WriteHeapProfile(io.Writer) error { return nil } +func (deps) WriteProfileTo(string, io.Writer, int) error { return nil } + +var match deps +{{else}} +func match(_, _ string) (bool, error) { return true, nil } +{{end}} + +func main() { + tests := []testing.InternalTest{ +{{range .Tests}} + { {{printf "%q" .Name}}, p.{{.Name}} }, +{{end}} + } + benchmarks := []testing.InternalBenchmark{ +{{range .Benchmarks}} + { {{printf "%q" .Name}}, p.{{.Name}} }, +{{end}} + } + examples := []testing.InternalExample{ +{{range .Examples}} + {Name: {{printf "%q" .Name}}, F: p.{{.Name}}}, +{{end}} + } + m := testing.MainStart(match, tests, benchmarks, examples) +{{with .Main}} + p.{{.Name}}(m) +{{else}} + os.Exit(m.Run()) +{{end}} +} + +`)) + +var examplesOnlyTmpl = template.Must(template.New("examples").Parse(` +package main + +import p {{printf "%q" .Pkg.Pkg.Path}} + +func main() { +{{range .Examples}} + p.{{.Name}}() +{{end}} +} +`)) diff --git a/vendor/honnef.co/go/tools/ssa/util.go b/vendor/honnef.co/go/tools/ssa/util.go new file mode 100644 index 0000000..317a013 --- /dev/null +++ b/vendor/honnef.co/go/tools/ssa/util.go @@ -0,0 +1,121 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.5 + +package ssa + +// This file defines a number of miscellaneous utility functions. + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + "io" + "os" + + "golang.org/x/tools/go/ast/astutil" +) + +//// AST utilities + +func unparen(e ast.Expr) ast.Expr { return astutil.Unparen(e) } + +// isBlankIdent returns true iff e is an Ident with name "_". +// They have no associated types.Object, and thus no type. +// +func isBlankIdent(e ast.Expr) bool { + id, ok := e.(*ast.Ident) + return ok && id.Name == "_" +} + +//// Type utilities. Some of these belong in go/types. + +// isPointer returns true for types whose underlying type is a pointer. +func isPointer(typ types.Type) bool { + _, ok := typ.Underlying().(*types.Pointer) + return ok +} + +func isInterface(T types.Type) bool { return types.IsInterface(T) } + +// deref returns a pointer's element type; otherwise it returns typ. +func deref(typ types.Type) types.Type { + if p, ok := typ.Underlying().(*types.Pointer); ok { + return p.Elem() + } + return typ +} + +// recvType returns the receiver type of method obj. +func recvType(obj *types.Func) types.Type { + return obj.Type().(*types.Signature).Recv().Type() +} + +// DefaultType returns the default "typed" type for an "untyped" type; +// it returns the incoming type for all other types. The default type +// for untyped nil is untyped nil. +// +// Exported to ssa/interp. +// +// TODO(gri): this is a copy of go/types.defaultType; export that function. +// +func DefaultType(typ types.Type) types.Type { + if t, ok := typ.(*types.Basic); ok { + k := t.Kind() + switch k { + case types.UntypedBool: + k = types.Bool + case types.UntypedInt: + k = types.Int + case types.UntypedRune: + k = types.Rune + case types.UntypedFloat: + k = types.Float64 + case types.UntypedComplex: + k = types.Complex128 + case types.UntypedString: + k = types.String + } + typ = types.Typ[k] + } + return typ +} + +// logStack prints the formatted "start" message to stderr and +// returns a closure that prints the corresponding "end" message. +// Call using 'defer logStack(...)()' to show builder stack on panic. +// Don't forget trailing parens! +// +func logStack(format string, args ...interface{}) func() { + msg := fmt.Sprintf(format, args...) + io.WriteString(os.Stderr, msg) + io.WriteString(os.Stderr, "\n") + return func() { + io.WriteString(os.Stderr, msg) + io.WriteString(os.Stderr, " end\n") + } +} + +// newVar creates a 'var' for use in a types.Tuple. +func newVar(name string, typ types.Type) *types.Var { + return types.NewParam(token.NoPos, nil, name, typ) +} + +// anonVar creates an anonymous 'var' for use in a types.Tuple. +func anonVar(typ types.Type) *types.Var { + return newVar("", typ) +} + +var lenResults = types.NewTuple(anonVar(tInt)) + +// makeLen returns the len builtin specialized to type func(T)int. +func makeLen(T types.Type) *Builtin { + lenParams := types.NewTuple(anonVar(T)) + return &Builtin{ + name: "len", + sig: types.NewSignature(nil, lenParams, lenResults, false), + } +} diff --git a/vendor/honnef.co/go/tools/ssa/wrappers.go b/vendor/honnef.co/go/tools/ssa/wrappers.go new file mode 100644 index 0000000..6ca01ab --- /dev/null +++ b/vendor/honnef.co/go/tools/ssa/wrappers.go @@ -0,0 +1,296 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.5 + +package ssa + +// This file defines synthesis of Functions that delegate to declared +// methods; they come in three kinds: +// +// (1) wrappers: methods that wrap declared methods, performing +// implicit pointer indirections and embedded field selections. +// +// (2) thunks: funcs that wrap declared methods. Like wrappers, +// thunks perform indirections and field selections. The thunk's +// first parameter is used as the receiver for the method call. +// +// (3) bounds: funcs that wrap declared methods. The bound's sole +// free variable, supplied by a closure, is used as the receiver +// for the method call. No indirections or field selections are +// performed since they can be done before the call. + +import ( + "fmt" + + "go/types" +) + +// -- wrappers ----------------------------------------------------------- + +// makeWrapper returns a synthetic method that delegates to the +// declared method denoted by meth.Obj(), first performing any +// necessary pointer indirections or field selections implied by meth. +// +// The resulting method's receiver type is meth.Recv(). +// +// This function is versatile but quite subtle! Consider the +// following axes of variation when making changes: +// - optional receiver indirection +// - optional implicit field selections +// - meth.Obj() may denote a concrete or an interface method +// - the result may be a thunk or a wrapper. +// +// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu) +// +func makeWrapper(prog *Program, sel *types.Selection) *Function { + obj := sel.Obj().(*types.Func) // the declared function + sig := sel.Type().(*types.Signature) // type of this wrapper + + var recv *types.Var // wrapper's receiver or thunk's params[0] + name := obj.Name() + var description string + var start int // first regular param + if sel.Kind() == types.MethodExpr { + name += "$thunk" + description = "thunk" + recv = sig.Params().At(0) + start = 1 + } else { + description = "wrapper" + recv = sig.Recv() + } + + description = fmt.Sprintf("%s for %s", description, sel.Obj()) + if prog.mode&LogSource != 0 { + defer logStack("make %s to (%s)", description, recv.Type())() + } + fn := &Function{ + name: name, + method: sel, + object: obj, + Signature: sig, + Synthetic: description, + Prog: prog, + pos: obj.Pos(), + } + fn.startBody() + fn.addSpilledParam(recv) + createParams(fn, start) + + indices := sel.Index() + + var v Value = fn.Locals[0] // spilled receiver + if isPointer(sel.Recv()) { + v = emitLoad(fn, v) + + // For simple indirection wrappers, perform an informative nil-check: + // "value method (T).f called using nil *T pointer" + if len(indices) == 1 && !isPointer(recvType(obj)) { + var c Call + c.Call.Value = &Builtin{ + name: "ssa:wrapnilchk", + sig: types.NewSignature(nil, + types.NewTuple(anonVar(sel.Recv()), anonVar(tString), anonVar(tString)), + types.NewTuple(anonVar(sel.Recv())), false), + } + c.Call.Args = []Value{ + v, + stringConst(deref(sel.Recv()).String()), + stringConst(sel.Obj().Name()), + } + c.setType(v.Type()) + v = fn.emit(&c) + } + } + + // Invariant: v is a pointer, either + // value of *A receiver param, or + // address of A spilled receiver. + + // We use pointer arithmetic (FieldAddr possibly followed by + // Load) in preference to value extraction (Field possibly + // preceded by Load). + + v = emitImplicitSelections(fn, v, indices[:len(indices)-1]) + + // Invariant: v is a pointer, either + // value of implicit *C field, or + // address of implicit C field. + + var c Call + if r := recvType(obj); !isInterface(r) { // concrete method + if !isPointer(r) { + v = emitLoad(fn, v) + } + c.Call.Value = prog.declaredFunc(obj) + c.Call.Args = append(c.Call.Args, v) + } else { + c.Call.Method = obj + c.Call.Value = emitLoad(fn, v) + } + for _, arg := range fn.Params[1:] { + c.Call.Args = append(c.Call.Args, arg) + } + emitTailCall(fn, &c) + fn.finishBody() + return fn +} + +// createParams creates parameters for wrapper method fn based on its +// Signature.Params, which do not include the receiver. +// start is the index of the first regular parameter to use. +// +func createParams(fn *Function, start int) { + var last *Parameter + tparams := fn.Signature.Params() + for i, n := start, tparams.Len(); i < n; i++ { + last = fn.addParamObj(tparams.At(i)) + } + if fn.Signature.Variadic() { + last.typ = types.NewSlice(last.typ) + } +} + +// -- bounds ----------------------------------------------------------- + +// makeBound returns a bound method wrapper (or "bound"), a synthetic +// function that delegates to a concrete or interface method denoted +// by obj. The resulting function has no receiver, but has one free +// variable which will be used as the method's receiver in the +// tail-call. +// +// Use MakeClosure with such a wrapper to construct a bound method +// closure. e.g.: +// +// type T int or: type T interface { meth() } +// func (t T) meth() +// var t T +// f := t.meth +// f() // calls t.meth() +// +// f is a closure of a synthetic wrapper defined as if by: +// +// f := func() { return t.meth() } +// +// Unlike makeWrapper, makeBound need perform no indirection or field +// selections because that can be done before the closure is +// constructed. +// +// EXCLUSIVE_LOCKS_ACQUIRED(meth.Prog.methodsMu) +// +func makeBound(prog *Program, obj *types.Func) *Function { + prog.methodsMu.Lock() + defer prog.methodsMu.Unlock() + fn, ok := prog.bounds[obj] + if !ok { + description := fmt.Sprintf("bound method wrapper for %s", obj) + if prog.mode&LogSource != 0 { + defer logStack("%s", description)() + } + fn = &Function{ + name: obj.Name() + "$bound", + object: obj, + Signature: changeRecv(obj.Type().(*types.Signature), nil), // drop receiver + Synthetic: description, + Prog: prog, + pos: obj.Pos(), + } + + fv := &FreeVar{name: "recv", typ: recvType(obj), parent: fn} + fn.FreeVars = []*FreeVar{fv} + fn.startBody() + createParams(fn, 0) + var c Call + + if !isInterface(recvType(obj)) { // concrete + c.Call.Value = prog.declaredFunc(obj) + c.Call.Args = []Value{fv} + } else { + c.Call.Value = fv + c.Call.Method = obj + } + for _, arg := range fn.Params { + c.Call.Args = append(c.Call.Args, arg) + } + emitTailCall(fn, &c) + fn.finishBody() + + prog.bounds[obj] = fn + } + return fn +} + +// -- thunks ----------------------------------------------------------- + +// makeThunk returns a thunk, a synthetic function that delegates to a +// concrete or interface method denoted by sel.Obj(). The resulting +// function has no receiver, but has an additional (first) regular +// parameter. +// +// Precondition: sel.Kind() == types.MethodExpr. +// +// type T int or: type T interface { meth() } +// func (t T) meth() +// f := T.meth +// var t T +// f(t) // calls t.meth() +// +// f is a synthetic wrapper defined as if by: +// +// f := func(t T) { return t.meth() } +// +// TODO(adonovan): opt: currently the stub is created even when used +// directly in a function call: C.f(i, 0). This is less efficient +// than inlining the stub. +// +// EXCLUSIVE_LOCKS_ACQUIRED(meth.Prog.methodsMu) +// +func makeThunk(prog *Program, sel *types.Selection) *Function { + if sel.Kind() != types.MethodExpr { + panic(sel) + } + + key := selectionKey{ + kind: sel.Kind(), + recv: sel.Recv(), + obj: sel.Obj(), + index: fmt.Sprint(sel.Index()), + indirect: sel.Indirect(), + } + + prog.methodsMu.Lock() + defer prog.methodsMu.Unlock() + + // Canonicalize key.recv to avoid constructing duplicate thunks. + canonRecv, ok := prog.canon.At(key.recv).(types.Type) + if !ok { + canonRecv = key.recv + prog.canon.Set(key.recv, canonRecv) + } + key.recv = canonRecv + + fn, ok := prog.thunks[key] + if !ok { + fn = makeWrapper(prog, sel) + if fn.Signature.Recv() != nil { + panic(fn) // unexpected receiver + } + prog.thunks[key] = fn + } + return fn +} + +func changeRecv(s *types.Signature, recv *types.Var) *types.Signature { + return types.NewSignature(recv, s.Params(), s.Results(), s.Variadic()) +} + +// selectionKey is like types.Selection but a usable map key. +type selectionKey struct { + kind types.SelectionKind + recv types.Type // canonicalized via Program.canon + obj types.Object + index string + indirect bool +} diff --git a/vendor/honnef.co/go/tools/ssa/write.go b/vendor/honnef.co/go/tools/ssa/write.go new file mode 100644 index 0000000..89761a1 --- /dev/null +++ b/vendor/honnef.co/go/tools/ssa/write.go @@ -0,0 +1,5 @@ +package ssa + +func NewJump(parent *BasicBlock) *Jump { + return &Jump{anInstruction{parent}} +} diff --git a/vendor/honnef.co/go/tools/staticcheck/buildtag.go b/vendor/honnef.co/go/tools/staticcheck/buildtag.go new file mode 100644 index 0000000..27c31c0 --- /dev/null +++ b/vendor/honnef.co/go/tools/staticcheck/buildtag.go @@ -0,0 +1,21 @@ +package staticcheck + +import ( + "go/ast" + "strings" + + "honnef.co/go/tools/lint" +) + +func buildTags(f *ast.File) [][]string { + var out [][]string + for _, line := range strings.Split(lint.Preamble(f), "\n") { + if !strings.HasPrefix(line, "+build ") { + continue + } + line = strings.TrimSpace(strings.TrimPrefix(line, "+build ")) + fields := strings.Fields(line) + out = append(out, fields) + } + return out +} diff --git a/vendor/honnef.co/go/tools/staticcheck/lint.go b/vendor/honnef.co/go/tools/staticcheck/lint.go new file mode 100644 index 0000000..fdc395e --- /dev/null +++ b/vendor/honnef.co/go/tools/staticcheck/lint.go @@ -0,0 +1,2738 @@ +// Package staticcheck contains a linter for Go source code. +package staticcheck // import "honnef.co/go/tools/staticcheck" + +import ( + "fmt" + "go/ast" + "go/constant" + "go/token" + "go/types" + htmltemplate "html/template" + "net/http" + "regexp" + "sort" + "strconv" + "strings" + "sync" + texttemplate "text/template" + + "honnef.co/go/tools/deprecated" + "honnef.co/go/tools/functions" + "honnef.co/go/tools/internal/sharedcheck" + "honnef.co/go/tools/lint" + "honnef.co/go/tools/ssa" + "honnef.co/go/tools/staticcheck/vrp" + + "golang.org/x/tools/go/ast/astutil" +) + +func validRegexp(call *Call) { + arg := call.Args[0] + err := ValidateRegexp(arg.Value) + if err != nil { + arg.Invalid(err.Error()) + } +} + +type runeSlice []rune + +func (rs runeSlice) Len() int { return len(rs) } +func (rs runeSlice) Less(i int, j int) bool { return rs[i] < rs[j] } +func (rs runeSlice) Swap(i int, j int) { rs[i], rs[j] = rs[j], rs[i] } + +func utf8Cutset(call *Call) { + arg := call.Args[1] + if InvalidUTF8(arg.Value) { + arg.Invalid(MsgInvalidUTF8) + } +} + +func uniqueCutset(call *Call) { + arg := call.Args[1] + if !UniqueStringCutset(arg.Value) { + arg.Invalid(MsgNonUniqueCutset) + } +} + +func unmarshalPointer(name string, arg int) CallCheck { + return func(call *Call) { + if !Pointer(call.Args[arg].Value) { + call.Args[arg].Invalid(fmt.Sprintf("%s expects to unmarshal into a pointer, but the provided value is not a pointer", name)) + } + } +} + +func pointlessIntMath(call *Call) { + if ConvertedFromInt(call.Args[0].Value) { + call.Invalid(fmt.Sprintf("calling %s on a converted integer is pointless", lint.CallName(call.Instr.Common()))) + } +} + +func checkValidHostPort(arg int) CallCheck { + return func(call *Call) { + if !ValidHostPort(call.Args[arg].Value) { + call.Args[arg].Invalid(MsgInvalidHostPort) + } + } +} + +var ( + checkRegexpRules = map[string]CallCheck{ + "regexp.MustCompile": validRegexp, + "regexp.Compile": validRegexp, + } + + checkTimeParseRules = map[string]CallCheck{ + "time.Parse": func(call *Call) { + arg := call.Args[0] + err := ValidateTimeLayout(arg.Value) + if err != nil { + arg.Invalid(err.Error()) + } + }, + } + + checkEncodingBinaryRules = map[string]CallCheck{ + "encoding/binary.Write": func(call *Call) { + arg := call.Args[2] + if !CanBinaryMarshal(call.Job, arg.Value) { + arg.Invalid(fmt.Sprintf("value of type %s cannot be used with binary.Write", arg.Value.Value.Type())) + } + }, + } + + checkURLsRules = map[string]CallCheck{ + "net/url.Parse": func(call *Call) { + arg := call.Args[0] + err := ValidateURL(arg.Value) + if err != nil { + arg.Invalid(err.Error()) + } + }, + } + + checkSyncPoolValueRules = map[string]CallCheck{ + "(*sync.Pool).Put": func(call *Call) { + arg := call.Args[0] + typ := arg.Value.Value.Type() + if !lint.IsPointerLike(typ) { + arg.Invalid("argument should be pointer-like to avoid allocations") + } + }, + } + + checkRegexpFindAllRules = map[string]CallCheck{ + "(*regexp.Regexp).FindAll": RepeatZeroTimes("a FindAll method", 1), + "(*regexp.Regexp).FindAllIndex": RepeatZeroTimes("a FindAll method", 1), + "(*regexp.Regexp).FindAllString": RepeatZeroTimes("a FindAll method", 1), + "(*regexp.Regexp).FindAllStringIndex": RepeatZeroTimes("a FindAll method", 1), + "(*regexp.Regexp).FindAllStringSubmatch": RepeatZeroTimes("a FindAll method", 1), + "(*regexp.Regexp).FindAllStringSubmatchIndex": RepeatZeroTimes("a FindAll method", 1), + "(*regexp.Regexp).FindAllSubmatch": RepeatZeroTimes("a FindAll method", 1), + "(*regexp.Regexp).FindAllSubmatchIndex": RepeatZeroTimes("a FindAll method", 1), + } + + checkUTF8CutsetRules = map[string]CallCheck{ + "strings.IndexAny": utf8Cutset, + "strings.LastIndexAny": utf8Cutset, + "strings.ContainsAny": utf8Cutset, + "strings.Trim": utf8Cutset, + "strings.TrimLeft": utf8Cutset, + "strings.TrimRight": utf8Cutset, + } + + checkUniqueCutsetRules = map[string]CallCheck{ + "strings.Trim": uniqueCutset, + "strings.TrimLeft": uniqueCutset, + "strings.TrimRight": uniqueCutset, + } + + checkUnmarshalPointerRules = map[string]CallCheck{ + "encoding/xml.Unmarshal": unmarshalPointer("xml.Unmarshal", 1), + "(*encoding/xml.Decoder).Decode": unmarshalPointer("Decode", 0), + "encoding/json.Unmarshal": unmarshalPointer("json.Unmarshal", 1), + "(*encoding/json.Decoder).Decode": unmarshalPointer("Decode", 0), + } + + checkUnbufferedSignalChanRules = map[string]CallCheck{ + "os/signal.Notify": func(call *Call) { + arg := call.Args[0] + if UnbufferedChannel(arg.Value) { + arg.Invalid("the channel used with signal.Notify should be buffered") + } + }, + } + + checkMathIntRules = map[string]CallCheck{ + "math.Ceil": pointlessIntMath, + "math.Floor": pointlessIntMath, + "math.IsNaN": pointlessIntMath, + "math.Trunc": pointlessIntMath, + "math.IsInf": pointlessIntMath, + } + + checkStringsReplaceZeroRules = map[string]CallCheck{ + "strings.Replace": RepeatZeroTimes("strings.Replace", 3), + "bytes.Replace": RepeatZeroTimes("bytes.Replace", 3), + } + + checkListenAddressRules = map[string]CallCheck{ + "net/http.ListenAndServe": checkValidHostPort(0), + "net/http.ListenAndServeTLS": checkValidHostPort(0), + } + + checkBytesEqualIPRules = map[string]CallCheck{ + "bytes.Equal": func(call *Call) { + if ConvertedFrom(call.Args[0].Value, "net.IP") && ConvertedFrom(call.Args[1].Value, "net.IP") { + call.Invalid("use net.IP.Equal to compare net.IPs, not bytes.Equal") + } + }, + } + + checkRegexpMatchLoopRules = map[string]CallCheck{ + "regexp.Match": loopedRegexp("regexp.Match"), + "regexp.MatchReader": loopedRegexp("regexp.MatchReader"), + "regexp.MatchString": loopedRegexp("regexp.MatchString"), + } +) + +type Checker struct { + CheckGenerated bool + funcDescs *functions.Descriptions + deprecatedObjs map[types.Object]string + nodeFns map[ast.Node]*ssa.Function +} + +func NewChecker() *Checker { + return &Checker{} +} + +func (*Checker) Name() string { return "staticcheck" } +func (*Checker) Prefix() string { return "SA" } + +func (c *Checker) Funcs() map[string]lint.Func { + return map[string]lint.Func{ + "SA1000": c.callChecker(checkRegexpRules), + "SA1001": c.CheckTemplate, + "SA1002": c.callChecker(checkTimeParseRules), + "SA1003": c.callChecker(checkEncodingBinaryRules), + "SA1004": c.CheckTimeSleepConstant, + "SA1005": c.CheckExec, + "SA1006": c.CheckUnsafePrintf, + "SA1007": c.callChecker(checkURLsRules), + "SA1008": c.CheckCanonicalHeaderKey, + "SA1009": nil, + "SA1010": c.callChecker(checkRegexpFindAllRules), + "SA1011": c.callChecker(checkUTF8CutsetRules), + "SA1012": c.CheckNilContext, + "SA1013": c.CheckSeeker, + "SA1014": c.callChecker(checkUnmarshalPointerRules), + "SA1015": c.CheckLeakyTimeTick, + "SA1016": c.CheckUntrappableSignal, + "SA1017": c.callChecker(checkUnbufferedSignalChanRules), + "SA1018": c.callChecker(checkStringsReplaceZeroRules), + "SA1019": c.CheckDeprecated, + "SA1020": c.callChecker(checkListenAddressRules), + "SA1021": c.callChecker(checkBytesEqualIPRules), + "SA1022": nil, + "SA1023": c.CheckWriterBufferModified, + "SA1024": c.callChecker(checkUniqueCutsetRules), + + "SA2000": c.CheckWaitgroupAdd, + "SA2001": c.CheckEmptyCriticalSection, + "SA2002": c.CheckConcurrentTesting, + "SA2003": c.CheckDeferLock, + + "SA3000": c.CheckTestMainExit, + "SA3001": c.CheckBenchmarkN, + + "SA4000": c.CheckLhsRhsIdentical, + "SA4001": c.CheckIneffectiveCopy, + "SA4002": c.CheckDiffSizeComparison, + "SA4003": c.CheckUnsignedComparison, + "SA4004": c.CheckIneffectiveLoop, + "SA4005": nil, + "SA4006": c.CheckUnreadVariableValues, + // "SA4007": c.CheckPredeterminedBooleanExprs, + "SA4007": nil, + "SA4008": c.CheckLoopCondition, + "SA4009": c.CheckArgOverwritten, + "SA4010": c.CheckIneffectiveAppend, + "SA4011": c.CheckScopedBreak, + "SA4012": c.CheckNaNComparison, + "SA4013": c.CheckDoubleNegation, + "SA4014": c.CheckRepeatedIfElse, + "SA4015": c.callChecker(checkMathIntRules), + "SA4016": c.CheckSillyBitwiseOps, + "SA4017": c.CheckPureFunctions, + "SA4018": c.CheckSelfAssignment, + "SA4019": c.CheckDuplicateBuildConstraints, + + "SA5000": c.CheckNilMaps, + "SA5001": c.CheckEarlyDefer, + "SA5002": c.CheckInfiniteEmptyLoop, + "SA5003": c.CheckDeferInInfiniteLoop, + "SA5004": c.CheckLoopEmptyDefault, + "SA5005": c.CheckCyclicFinalizer, + // "SA5006": c.CheckSliceOutOfBounds, + "SA5007": c.CheckInfiniteRecursion, + + "SA6000": c.callChecker(checkRegexpMatchLoopRules), + "SA6001": c.CheckMapBytesKey, + "SA6002": c.callChecker(checkSyncPoolValueRules), + "SA6003": c.CheckRangeStringRunes, + "SA6004": nil, + + "SA9000": nil, + "SA9001": c.CheckDubiousDeferInChannelRangeLoop, + "SA9002": c.CheckNonOctalFileMode, + "SA9003": c.CheckEmptyBranch, + } +} + +func (c *Checker) filterGenerated(files []*ast.File) []*ast.File { + if c.CheckGenerated { + return files + } + var out []*ast.File + for _, f := range files { + if !lint.IsGenerated(f) { + out = append(out, f) + } + } + return out +} + +func (c *Checker) deprecateObject(m map[types.Object]string, prog *lint.Program, obj types.Object) { + if obj.Pkg() == nil { + return + } + + f := prog.File(obj) + if f == nil { + return + } + msg := c.deprecationMessage(f, prog.Prog.Fset, obj) + if msg != "" { + m[obj] = msg + } +} + +func (c *Checker) Init(prog *lint.Program) { + wg := &sync.WaitGroup{} + wg.Add(3) + go func() { + c.funcDescs = functions.NewDescriptions(prog.SSA) + for _, fn := range prog.AllFunctions { + if fn.Blocks != nil { + applyStdlibKnowledge(fn) + ssa.OptimizeBlocks(fn) + } + } + wg.Done() + }() + + go func() { + c.nodeFns = lint.NodeFns(prog.Packages) + wg.Done() + }() + + go func() { + c.deprecatedObjs = map[types.Object]string{} + for _, ssapkg := range prog.SSA.AllPackages() { + ssapkg := ssapkg + for _, member := range ssapkg.Members { + obj := member.Object() + if obj == nil { + continue + } + c.deprecateObject(c.deprecatedObjs, prog, obj) + if typ, ok := obj.Type().(*types.Named); ok { + for i := 0; i < typ.NumMethods(); i++ { + meth := typ.Method(i) + c.deprecateObject(c.deprecatedObjs, prog, meth) + } + + if iface, ok := typ.Underlying().(*types.Interface); ok { + for i := 0; i < iface.NumExplicitMethods(); i++ { + meth := iface.ExplicitMethod(i) + c.deprecateObject(c.deprecatedObjs, prog, meth) + } + } + } + if typ, ok := obj.Type().Underlying().(*types.Struct); ok { + n := typ.NumFields() + for i := 0; i < n; i++ { + // FIXME(dh): This code will not find deprecated + // fields in anonymous structs. + field := typ.Field(i) + c.deprecateObject(c.deprecatedObjs, prog, field) + } + } + } + } + wg.Done() + }() + + wg.Wait() +} + +func (c *Checker) deprecationMessage(file *ast.File, fset *token.FileSet, obj types.Object) (message string) { + pos := obj.Pos() + path, _ := astutil.PathEnclosingInterval(file, pos, pos) + if len(path) <= 2 { + return "" + } + var docs []*ast.CommentGroup + switch n := path[1].(type) { + case *ast.FuncDecl: + docs = append(docs, n.Doc) + case *ast.Field: + docs = append(docs, n.Doc) + case *ast.ValueSpec: + docs = append(docs, n.Doc) + if len(path) >= 3 { + if n, ok := path[2].(*ast.GenDecl); ok { + docs = append(docs, n.Doc) + } + } + case *ast.TypeSpec: + docs = append(docs, n.Doc) + if len(path) >= 3 { + if n, ok := path[2].(*ast.GenDecl); ok { + docs = append(docs, n.Doc) + } + } + default: + return "" + } + + for _, doc := range docs { + if doc == nil { + continue + } + parts := strings.Split(doc.Text(), "\n\n") + last := parts[len(parts)-1] + if !strings.HasPrefix(last, "Deprecated: ") { + continue + } + alt := last[len("Deprecated: "):] + alt = strings.Replace(alt, "\n", " ", -1) + return alt + } + return "" +} + +func (c *Checker) isInLoop(b *ssa.BasicBlock) bool { + sets := c.funcDescs.Get(b.Parent()).Loops + for _, set := range sets { + if set[b] { + return true + } + } + return false +} + +func applyStdlibKnowledge(fn *ssa.Function) { + if len(fn.Blocks) == 0 { + return + } + + // comma-ok receiving from a time.Tick channel will never return + // ok == false, so any branching on the value of ok can be + // replaced with an unconditional jump. This will primarily match + // `for range time.Tick(x)` loops, but it can also match + // user-written code. + for _, block := range fn.Blocks { + if len(block.Instrs) < 3 { + continue + } + if len(block.Succs) != 2 { + continue + } + var instrs []*ssa.Instruction + for i, ins := range block.Instrs { + if _, ok := ins.(*ssa.DebugRef); ok { + continue + } + instrs = append(instrs, &block.Instrs[i]) + } + + for i, ins := range instrs { + unop, ok := (*ins).(*ssa.UnOp) + if !ok || unop.Op != token.ARROW { + continue + } + call, ok := unop.X.(*ssa.Call) + if !ok { + continue + } + if !lint.IsCallTo(call.Common(), "time.Tick") { + continue + } + ex, ok := (*instrs[i+1]).(*ssa.Extract) + if !ok || ex.Tuple != unop || ex.Index != 1 { + continue + } + + ifstmt, ok := (*instrs[i+2]).(*ssa.If) + if !ok || ifstmt.Cond != ex { + continue + } + + *instrs[i+2] = ssa.NewJump(block) + succ := block.Succs[1] + block.Succs = block.Succs[0:1] + succ.RemovePred(block) + } + } +} + +func hasType(j *lint.Job, expr ast.Expr, name string) bool { + return types.TypeString(j.Program.Info.TypeOf(expr), nil) == name +} + +func (c *Checker) CheckUntrappableSignal(j *lint.Job) { + fn := func(node ast.Node) bool { + call, ok := node.(*ast.CallExpr) + if !ok { + return true + } + if !j.IsCallToAnyAST(call, + "os/signal.Ignore", "os/signal.Notify", "os/signal.Reset") { + return true + } + for _, arg := range call.Args { + if conv, ok := arg.(*ast.CallExpr); ok && isName(j, conv.Fun, "os.Signal") { + arg = conv.Args[0] + } + + if isName(j, arg, "os.Kill") || isName(j, arg, "syscall.SIGKILL") { + j.Errorf(arg, "%s cannot be trapped (did you mean syscall.SIGTERM?)", j.Render(arg)) + } + if isName(j, arg, "syscall.SIGSTOP") { + j.Errorf(arg, "%s signal cannot be trapped", j.Render(arg)) + } + } + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func (c *Checker) CheckTemplate(j *lint.Job) { + fn := func(node ast.Node) bool { + call, ok := node.(*ast.CallExpr) + if !ok { + return true + } + var kind string + if j.IsCallToAST(call, "(*text/template.Template).Parse") { + kind = "text" + } else if j.IsCallToAST(call, "(*html/template.Template).Parse") { + kind = "html" + } else { + return true + } + sel := call.Fun.(*ast.SelectorExpr) + if !j.IsCallToAST(sel.X, "text/template.New") && + !j.IsCallToAST(sel.X, "html/template.New") { + // TODO(dh): this is a cheap workaround for templates with + // different delims. A better solution with less false + // negatives would use data flow analysis to see where the + // template comes from and where it has been + return true + } + s, ok := j.ExprToString(call.Args[0]) + if !ok { + return true + } + var err error + switch kind { + case "text": + _, err = texttemplate.New("").Parse(s) + case "html": + _, err = htmltemplate.New("").Parse(s) + } + if err != nil { + // TODO(dominikh): whitelist other parse errors, if any + if strings.Contains(err.Error(), "unexpected") { + j.Errorf(call.Args[0], "%s", err) + } + } + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func (c *Checker) CheckTimeSleepConstant(j *lint.Job) { + fn := func(node ast.Node) bool { + call, ok := node.(*ast.CallExpr) + if !ok { + return true + } + if !j.IsCallToAST(call, "time.Sleep") { + return true + } + lit, ok := call.Args[0].(*ast.BasicLit) + if !ok { + return true + } + n, err := strconv.Atoi(lit.Value) + if err != nil { + return true + } + if n == 0 || n > 120 { + // time.Sleep(0) is a seldomly used pattern in concurrency + // tests. >120 might be intentional. 120 was chosen + // because the user could've meant 2 minutes. + return true + } + recommendation := "time.Sleep(time.Nanosecond)" + if n != 1 { + recommendation = fmt.Sprintf("time.Sleep(%d * time.Nanosecond)", n) + } + j.Errorf(call.Args[0], "sleeping for %d nanoseconds is probably a bug. Be explicit if it isn't: %s", n, recommendation) + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func (c *Checker) CheckWaitgroupAdd(j *lint.Job) { + fn := func(node ast.Node) bool { + g, ok := node.(*ast.GoStmt) + if !ok { + return true + } + fun, ok := g.Call.Fun.(*ast.FuncLit) + if !ok { + return true + } + if len(fun.Body.List) == 0 { + return true + } + stmt, ok := fun.Body.List[0].(*ast.ExprStmt) + if !ok { + return true + } + call, ok := stmt.X.(*ast.CallExpr) + if !ok { + return true + } + sel, ok := call.Fun.(*ast.SelectorExpr) + if !ok { + return true + } + fn, ok := j.Program.Info.ObjectOf(sel.Sel).(*types.Func) + if !ok { + return true + } + if fn.FullName() == "(*sync.WaitGroup).Add" { + j.Errorf(sel, "should call %s before starting the goroutine to avoid a race", + j.Render(stmt)) + } + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func (c *Checker) CheckInfiniteEmptyLoop(j *lint.Job) { + fn := func(node ast.Node) bool { + loop, ok := node.(*ast.ForStmt) + if !ok || len(loop.Body.List) != 0 || loop.Post != nil { + return true + } + + if loop.Init != nil { + // TODO(dh): this isn't strictly necessary, it just makes + // the check easier. + return true + } + // An empty loop is bad news in two cases: 1) The loop has no + // condition. In that case, it's just a loop that spins + // forever and as fast as it can, keeping a core busy. 2) The + // loop condition only consists of variable or field reads and + // operators on those. The only way those could change their + // value is with unsynchronised access, which constitutes a + // data race. + // + // If the condition contains any function calls, its behaviour + // is dynamic and the loop might terminate. Similarly for + // channel receives. + + if loop.Cond != nil && hasSideEffects(loop.Cond) { + return true + } + + j.Errorf(loop, "this loop will spin, using 100%% CPU") + if loop.Cond != nil { + j.Errorf(loop, "loop condition never changes or has a race condition") + } + + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func (c *Checker) CheckDeferInInfiniteLoop(j *lint.Job) { + fn := func(node ast.Node) bool { + mightExit := false + var defers []ast.Stmt + loop, ok := node.(*ast.ForStmt) + if !ok || loop.Cond != nil { + return true + } + fn2 := func(node ast.Node) bool { + switch stmt := node.(type) { + case *ast.ReturnStmt: + mightExit = true + case *ast.BranchStmt: + // TODO(dominikh): if this sees a break in a switch or + // select, it doesn't check if it breaks the loop or + // just the select/switch. This causes some false + // negatives. + if stmt.Tok == token.BREAK { + mightExit = true + } + case *ast.DeferStmt: + defers = append(defers, stmt) + case *ast.FuncLit: + // Don't look into function bodies + return false + } + return true + } + ast.Inspect(loop.Body, fn2) + if mightExit { + return true + } + for _, stmt := range defers { + j.Errorf(stmt, "defers in this infinite loop will never run") + } + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func (c *Checker) CheckDubiousDeferInChannelRangeLoop(j *lint.Job) { + fn := func(node ast.Node) bool { + loop, ok := node.(*ast.RangeStmt) + if !ok { + return true + } + typ := j.Program.Info.TypeOf(loop.X) + _, ok = typ.Underlying().(*types.Chan) + if !ok { + return true + } + fn2 := func(node ast.Node) bool { + switch stmt := node.(type) { + case *ast.DeferStmt: + j.Errorf(stmt, "defers in this range loop won't run unless the channel gets closed") + case *ast.FuncLit: + // Don't look into function bodies + return false + } + return true + } + ast.Inspect(loop.Body, fn2) + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func (c *Checker) CheckTestMainExit(j *lint.Job) { + fn := func(node ast.Node) bool { + if !isTestMain(j, node) { + return true + } + + arg := j.Program.Info.ObjectOf(node.(*ast.FuncDecl).Type.Params.List[0].Names[0]) + callsRun := false + fn2 := func(node ast.Node) bool { + call, ok := node.(*ast.CallExpr) + if !ok { + return true + } + sel, ok := call.Fun.(*ast.SelectorExpr) + if !ok { + return true + } + ident, ok := sel.X.(*ast.Ident) + if !ok { + return true + } + if arg != j.Program.Info.ObjectOf(ident) { + return true + } + if sel.Sel.Name == "Run" { + callsRun = true + return false + } + return true + } + ast.Inspect(node.(*ast.FuncDecl).Body, fn2) + + callsExit := false + fn3 := func(node ast.Node) bool { + if j.IsCallToAST(node, "os.Exit") { + callsExit = true + return false + } + return true + } + ast.Inspect(node.(*ast.FuncDecl).Body, fn3) + if !callsExit && callsRun { + j.Errorf(node, "TestMain should call os.Exit to set exit code") + } + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func isTestMain(j *lint.Job, node ast.Node) bool { + decl, ok := node.(*ast.FuncDecl) + if !ok { + return false + } + if decl.Name.Name != "TestMain" { + return false + } + if len(decl.Type.Params.List) != 1 { + return false + } + arg := decl.Type.Params.List[0] + if len(arg.Names) != 1 { + return false + } + typ := j.Program.Info.TypeOf(arg.Type) + return typ != nil && typ.String() == "*testing.M" +} + +func (c *Checker) CheckExec(j *lint.Job) { + fn := func(node ast.Node) bool { + call, ok := node.(*ast.CallExpr) + if !ok { + return true + } + if !j.IsCallToAST(call, "os/exec.Command") { + return true + } + val, ok := j.ExprToString(call.Args[0]) + if !ok { + return true + } + if !strings.Contains(val, " ") || strings.Contains(val, `\`) || strings.Contains(val, "/") { + return true + } + j.Errorf(call.Args[0], "first argument to exec.Command looks like a shell command, but a program name or path are expected") + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func (c *Checker) CheckLoopEmptyDefault(j *lint.Job) { + fn := func(node ast.Node) bool { + loop, ok := node.(*ast.ForStmt) + if !ok || len(loop.Body.List) != 1 || loop.Cond != nil || loop.Init != nil { + return true + } + sel, ok := loop.Body.List[0].(*ast.SelectStmt) + if !ok { + return true + } + for _, c := range sel.Body.List { + if comm, ok := c.(*ast.CommClause); ok && comm.Comm == nil && len(comm.Body) == 0 { + j.Errorf(comm, "should not have an empty default case in a for+select loop. The loop will spin.") + } + } + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func (c *Checker) CheckLhsRhsIdentical(j *lint.Job) { + fn := func(node ast.Node) bool { + op, ok := node.(*ast.BinaryExpr) + if !ok { + return true + } + switch op.Op { + case token.EQL, token.NEQ: + if basic, ok := j.Program.Info.TypeOf(op.X).(*types.Basic); ok { + if kind := basic.Kind(); kind == types.Float32 || kind == types.Float64 { + // f == f and f != f might be used to check for NaN + return true + } + } + case token.SUB, token.QUO, token.AND, token.REM, token.OR, token.XOR, token.AND_NOT, + token.LAND, token.LOR, token.LSS, token.GTR, token.LEQ, token.GEQ: + default: + // For some ops, such as + and *, it can make sense to + // have identical operands + return true + } + + if j.Render(op.X) != j.Render(op.Y) { + return true + } + j.Errorf(op, "identical expressions on the left and right side of the '%s' operator", op.Op) + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func (c *Checker) CheckScopedBreak(j *lint.Job) { + fn := func(node ast.Node) bool { + var body *ast.BlockStmt + switch node := node.(type) { + case *ast.ForStmt: + body = node.Body + case *ast.RangeStmt: + body = node.Body + default: + return true + } + for _, stmt := range body.List { + var blocks [][]ast.Stmt + switch stmt := stmt.(type) { + case *ast.SwitchStmt: + for _, c := range stmt.Body.List { + blocks = append(blocks, c.(*ast.CaseClause).Body) + } + case *ast.SelectStmt: + for _, c := range stmt.Body.List { + blocks = append(blocks, c.(*ast.CommClause).Body) + } + default: + continue + } + + for _, body := range blocks { + if len(body) == 0 { + continue + } + lasts := []ast.Stmt{body[len(body)-1]} + // TODO(dh): unfold all levels of nested block + // statements, not just a single level if statement + if ifs, ok := lasts[0].(*ast.IfStmt); ok { + if len(ifs.Body.List) == 0 { + continue + } + lasts[0] = ifs.Body.List[len(ifs.Body.List)-1] + + if block, ok := ifs.Else.(*ast.BlockStmt); ok { + if len(block.List) != 0 { + lasts = append(lasts, block.List[len(block.List)-1]) + } + } + } + for _, last := range lasts { + branch, ok := last.(*ast.BranchStmt) + if !ok || branch.Tok != token.BREAK || branch.Label != nil { + continue + } + j.Errorf(branch, "ineffective break statement. Did you mean to break out of the outer loop?") + } + } + } + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func (c *Checker) CheckUnsafePrintf(j *lint.Job) { + fn := func(node ast.Node) bool { + call, ok := node.(*ast.CallExpr) + if !ok { + return true + } + if !j.IsCallToAnyAST(call, "fmt.Printf", "fmt.Sprintf", "log.Printf") { + return true + } + if len(call.Args) != 1 { + return true + } + switch call.Args[0].(type) { + case *ast.CallExpr, *ast.Ident: + default: + return true + } + j.Errorf(call.Args[0], "printf-style function with dynamic first argument and no further arguments should use print-style function instead") + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func (c *Checker) CheckEarlyDefer(j *lint.Job) { + fn := func(node ast.Node) bool { + block, ok := node.(*ast.BlockStmt) + if !ok { + return true + } + if len(block.List) < 2 { + return true + } + for i, stmt := range block.List { + if i == len(block.List)-1 { + break + } + assign, ok := stmt.(*ast.AssignStmt) + if !ok { + continue + } + if len(assign.Rhs) != 1 { + continue + } + if len(assign.Lhs) < 2 { + continue + } + if lhs, ok := assign.Lhs[len(assign.Lhs)-1].(*ast.Ident); ok && lhs.Name == "_" { + continue + } + call, ok := assign.Rhs[0].(*ast.CallExpr) + if !ok { + continue + } + sig, ok := j.Program.Info.TypeOf(call.Fun).(*types.Signature) + if !ok { + continue + } + if sig.Results().Len() < 2 { + continue + } + last := sig.Results().At(sig.Results().Len() - 1) + // FIXME(dh): check that it's error from universe, not + // another type of the same name + if last.Type().String() != "error" { + continue + } + lhs, ok := assign.Lhs[0].(*ast.Ident) + if !ok { + continue + } + def, ok := block.List[i+1].(*ast.DeferStmt) + if !ok { + continue + } + sel, ok := def.Call.Fun.(*ast.SelectorExpr) + if !ok { + continue + } + ident, ok := selectorX(sel).(*ast.Ident) + if !ok { + continue + } + if ident.Obj != lhs.Obj { + continue + } + if sel.Sel.Name != "Close" { + continue + } + j.Errorf(def, "should check returned error before deferring %s", j.Render(def.Call)) + } + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func selectorX(sel *ast.SelectorExpr) ast.Node { + switch x := sel.X.(type) { + case *ast.SelectorExpr: + return selectorX(x) + default: + return x + } +} + +func (c *Checker) CheckEmptyCriticalSection(j *lint.Job) { + // Initially it might seem like this check would be easier to + // implement in SSA. After all, we're only checking for two + // consecutive method calls. In reality, however, there may be any + // number of other instructions between the lock and unlock, while + // still constituting an empty critical section. For example, + // given `m.x().Lock(); m.x().Unlock()`, there will be a call to + // x(). In the AST-based approach, this has a tiny potential for a + // false positive (the second call to x might be doing work that + // is protected by the mutex). In an SSA-based approach, however, + // it would miss a lot of real bugs. + + mutexParams := func(s ast.Stmt) (x ast.Expr, funcName string, ok bool) { + expr, ok := s.(*ast.ExprStmt) + if !ok { + return nil, "", false + } + call, ok := expr.X.(*ast.CallExpr) + if !ok { + return nil, "", false + } + sel, ok := call.Fun.(*ast.SelectorExpr) + if !ok { + return nil, "", false + } + + fn, ok := j.Program.Info.ObjectOf(sel.Sel).(*types.Func) + if !ok { + return nil, "", false + } + sig := fn.Type().(*types.Signature) + if sig.Params().Len() != 0 || sig.Results().Len() != 0 { + return nil, "", false + } + + return sel.X, fn.Name(), true + } + + fn := func(node ast.Node) bool { + block, ok := node.(*ast.BlockStmt) + if !ok { + return true + } + if len(block.List) < 2 { + return true + } + for i := range block.List[:len(block.List)-1] { + sel1, method1, ok1 := mutexParams(block.List[i]) + sel2, method2, ok2 := mutexParams(block.List[i+1]) + + if !ok1 || !ok2 || j.Render(sel1) != j.Render(sel2) { + continue + } + if (method1 == "Lock" && method2 == "Unlock") || + (method1 == "RLock" && method2 == "RUnlock") { + j.Errorf(block.List[i+1], "empty critical section") + } + } + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +// cgo produces code like fn(&*_Cvar_kSomeCallbacks) which we don't +// want to flag. +var cgoIdent = regexp.MustCompile(`^_C(func|var)_.+$`) + +func (c *Checker) CheckIneffectiveCopy(j *lint.Job) { + fn := func(node ast.Node) bool { + if unary, ok := node.(*ast.UnaryExpr); ok { + if star, ok := unary.X.(*ast.StarExpr); ok && unary.Op == token.AND { + ident, ok := star.X.(*ast.Ident) + if !ok || !cgoIdent.MatchString(ident.Name) { + j.Errorf(unary, "&*x will be simplified to x. It will not copy x.") + } + } + } + + if star, ok := node.(*ast.StarExpr); ok { + if unary, ok := star.X.(*ast.UnaryExpr); ok && unary.Op == token.AND { + j.Errorf(star, "*&x will be simplified to x. It will not copy x.") + } + } + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func (c *Checker) CheckDiffSizeComparison(j *lint.Job) { + for _, ssafn := range j.Program.InitialFunctions { + for _, b := range ssafn.Blocks { + for _, ins := range b.Instrs { + binop, ok := ins.(*ssa.BinOp) + if !ok { + continue + } + if binop.Op != token.EQL && binop.Op != token.NEQ { + continue + } + _, ok1 := binop.X.(*ssa.Slice) + _, ok2 := binop.Y.(*ssa.Slice) + if !ok1 && !ok2 { + continue + } + r := c.funcDescs.Get(ssafn).Ranges + r1, ok1 := r.Get(binop.X).(vrp.StringInterval) + r2, ok2 := r.Get(binop.Y).(vrp.StringInterval) + if !ok1 || !ok2 { + continue + } + if r1.Length.Intersection(r2.Length).Empty() { + j.Errorf(binop, "comparing strings of different sizes for equality will always return false") + } + } + } + } +} + +func (c *Checker) CheckCanonicalHeaderKey(j *lint.Job) { + fn := func(node ast.Node) bool { + assign, ok := node.(*ast.AssignStmt) + if ok { + // TODO(dh): This risks missing some Header reads, for + // example in `h1["foo"] = h2["foo"]` – these edge + // cases are probably rare enough to ignore for now. + for _, expr := range assign.Lhs { + op, ok := expr.(*ast.IndexExpr) + if !ok { + continue + } + if hasType(j, op.X, "net/http.Header") { + return false + } + } + return true + } + op, ok := node.(*ast.IndexExpr) + if !ok { + return true + } + if !hasType(j, op.X, "net/http.Header") { + return true + } + s, ok := j.ExprToString(op.Index) + if !ok { + return true + } + if s == http.CanonicalHeaderKey(s) { + return true + } + j.Errorf(op, "keys in http.Header are canonicalized, %q is not canonical; fix the constant or use http.CanonicalHeaderKey", s) + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func (c *Checker) CheckBenchmarkN(j *lint.Job) { + fn := func(node ast.Node) bool { + assign, ok := node.(*ast.AssignStmt) + if !ok { + return true + } + if len(assign.Lhs) != 1 || len(assign.Rhs) != 1 { + return true + } + sel, ok := assign.Lhs[0].(*ast.SelectorExpr) + if !ok { + return true + } + if sel.Sel.Name != "N" { + return true + } + if !hasType(j, sel.X, "*testing.B") { + return true + } + j.Errorf(assign, "should not assign to %s", j.Render(sel)) + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func (c *Checker) CheckUnreadVariableValues(j *lint.Job) { + fn := func(node ast.Node) bool { + switch node.(type) { + case *ast.FuncDecl, *ast.FuncLit: + default: + return true + } + + ssafn := c.nodeFns[node] + if ssafn == nil { + return true + } + if lint.IsExample(ssafn) { + return true + } + ast.Inspect(node, func(node ast.Node) bool { + assign, ok := node.(*ast.AssignStmt) + if !ok { + return true + } + if len(assign.Lhs) > 1 && len(assign.Rhs) == 1 { + // Either a function call with multiple return values, + // or a comma-ok assignment + + val, _ := ssafn.ValueForExpr(assign.Rhs[0]) + if val == nil { + return true + } + refs := val.Referrers() + if refs == nil { + return true + } + for _, ref := range *refs { + ex, ok := ref.(*ssa.Extract) + if !ok { + continue + } + exrefs := ex.Referrers() + if exrefs == nil { + continue + } + if len(lint.FilterDebug(*exrefs)) == 0 { + lhs := assign.Lhs[ex.Index] + if ident, ok := lhs.(*ast.Ident); !ok || ok && ident.Name == "_" { + continue + } + j.Errorf(lhs, "this value of %s is never used", lhs) + } + } + return true + } + for i, lhs := range assign.Lhs { + rhs := assign.Rhs[i] + if ident, ok := lhs.(*ast.Ident); !ok || ok && ident.Name == "_" { + continue + } + val, _ := ssafn.ValueForExpr(rhs) + if val == nil { + continue + } + + refs := val.Referrers() + if refs == nil { + // TODO investigate why refs can be nil + return true + } + if len(lint.FilterDebug(*refs)) == 0 { + j.Errorf(lhs, "this value of %s is never used", lhs) + } + } + return true + }) + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func (c *Checker) CheckPredeterminedBooleanExprs(j *lint.Job) { + for _, ssafn := range j.Program.InitialFunctions { + for _, block := range ssafn.Blocks { + for _, ins := range block.Instrs { + ssabinop, ok := ins.(*ssa.BinOp) + if !ok { + continue + } + switch ssabinop.Op { + case token.GTR, token.LSS, token.EQL, token.NEQ, token.LEQ, token.GEQ: + default: + continue + } + + xs, ok1 := consts(ssabinop.X, nil, nil) + ys, ok2 := consts(ssabinop.Y, nil, nil) + if !ok1 || !ok2 || len(xs) == 0 || len(ys) == 0 { + continue + } + + trues := 0 + for _, x := range xs { + for _, y := range ys { + if x.Value == nil { + if y.Value == nil { + trues++ + } + continue + } + if constant.Compare(x.Value, ssabinop.Op, y.Value) { + trues++ + } + } + } + b := trues != 0 + if trues == 0 || trues == len(xs)*len(ys) { + j.Errorf(ssabinop, "binary expression is always %t for all possible values (%s %s %s)", + b, xs, ssabinop.Op, ys) + } + } + } + } +} + +func (c *Checker) CheckNilMaps(j *lint.Job) { + for _, ssafn := range j.Program.InitialFunctions { + for _, block := range ssafn.Blocks { + for _, ins := range block.Instrs { + mu, ok := ins.(*ssa.MapUpdate) + if !ok { + continue + } + c, ok := mu.Map.(*ssa.Const) + if !ok { + continue + } + if c.Value != nil { + continue + } + j.Errorf(mu, "assignment to nil map") + } + } + } +} + +func (c *Checker) CheckUnsignedComparison(j *lint.Job) { + fn := func(node ast.Node) bool { + expr, ok := node.(*ast.BinaryExpr) + if !ok { + return true + } + tx := j.Program.Info.TypeOf(expr.X) + basic, ok := tx.Underlying().(*types.Basic) + if !ok { + return true + } + if (basic.Info() & types.IsUnsigned) == 0 { + return true + } + lit, ok := expr.Y.(*ast.BasicLit) + if !ok || lit.Value != "0" { + return true + } + switch expr.Op { + case token.GEQ: + j.Errorf(expr, "unsigned values are always >= 0") + case token.LSS: + j.Errorf(expr, "unsigned values are never < 0") + } + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func consts(val ssa.Value, out []*ssa.Const, visitedPhis map[string]bool) ([]*ssa.Const, bool) { + if visitedPhis == nil { + visitedPhis = map[string]bool{} + } + var ok bool + switch val := val.(type) { + case *ssa.Phi: + if visitedPhis[val.Name()] { + break + } + visitedPhis[val.Name()] = true + vals := val.Operands(nil) + for _, phival := range vals { + out, ok = consts(*phival, out, visitedPhis) + if !ok { + return nil, false + } + } + case *ssa.Const: + out = append(out, val) + case *ssa.Convert: + out, ok = consts(val.X, out, visitedPhis) + if !ok { + return nil, false + } + default: + return nil, false + } + if len(out) < 2 { + return out, true + } + uniq := []*ssa.Const{out[0]} + for _, val := range out[1:] { + if val.Value == uniq[len(uniq)-1].Value { + continue + } + uniq = append(uniq, val) + } + return uniq, true +} + +func (c *Checker) CheckLoopCondition(j *lint.Job) { + fn := func(node ast.Node) bool { + loop, ok := node.(*ast.ForStmt) + if !ok { + return true + } + if loop.Init == nil || loop.Cond == nil || loop.Post == nil { + return true + } + init, ok := loop.Init.(*ast.AssignStmt) + if !ok || len(init.Lhs) != 1 || len(init.Rhs) != 1 { + return true + } + cond, ok := loop.Cond.(*ast.BinaryExpr) + if !ok { + return true + } + x, ok := cond.X.(*ast.Ident) + if !ok { + return true + } + lhs, ok := init.Lhs[0].(*ast.Ident) + if !ok { + return true + } + if x.Obj != lhs.Obj { + return true + } + if _, ok := loop.Post.(*ast.IncDecStmt); !ok { + return true + } + + ssafn := c.nodeFns[cond] + if ssafn == nil { + return true + } + v, isAddr := ssafn.ValueForExpr(cond.X) + if v == nil || isAddr { + return true + } + switch v := v.(type) { + case *ssa.Phi: + ops := v.Operands(nil) + if len(ops) != 2 { + return true + } + _, ok := (*ops[0]).(*ssa.Const) + if !ok { + return true + } + sigma, ok := (*ops[1]).(*ssa.Sigma) + if !ok { + return true + } + if sigma.X != v { + return true + } + case *ssa.UnOp: + return true + } + j.Errorf(cond, "variable in loop condition never changes") + + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func (c *Checker) CheckArgOverwritten(j *lint.Job) { + fn := func(node ast.Node) bool { + var typ *ast.FuncType + var body *ast.BlockStmt + switch fn := node.(type) { + case *ast.FuncDecl: + typ = fn.Type + body = fn.Body + case *ast.FuncLit: + typ = fn.Type + body = fn.Body + } + if body == nil { + return true + } + ssafn := c.nodeFns[node] + if ssafn == nil { + return true + } + if len(typ.Params.List) == 0 { + return true + } + for _, field := range typ.Params.List { + for _, arg := range field.Names { + obj := j.Program.Info.ObjectOf(arg) + var ssaobj *ssa.Parameter + for _, param := range ssafn.Params { + if param.Object() == obj { + ssaobj = param + break + } + } + if ssaobj == nil { + continue + } + refs := ssaobj.Referrers() + if refs == nil { + continue + } + if len(lint.FilterDebug(*refs)) != 0 { + continue + } + + assigned := false + ast.Inspect(body, func(node ast.Node) bool { + assign, ok := node.(*ast.AssignStmt) + if !ok { + return true + } + for _, lhs := range assign.Lhs { + ident, ok := lhs.(*ast.Ident) + if !ok { + continue + } + if j.Program.Info.ObjectOf(ident) == obj { + assigned = true + return false + } + } + return true + }) + if assigned { + j.Errorf(arg, "argument %s is overwritten before first use", arg) + } + } + } + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func (c *Checker) CheckIneffectiveLoop(j *lint.Job) { + // This check detects some, but not all unconditional loop exits. + // We give up in the following cases: + // + // - a goto anywhere in the loop. The goto might skip over our + // return, and we don't check that it doesn't. + // + // - any nested, unlabelled continue, even if it is in another + // loop or closure. + fn := func(node ast.Node) bool { + var body *ast.BlockStmt + switch fn := node.(type) { + case *ast.FuncDecl: + body = fn.Body + case *ast.FuncLit: + body = fn.Body + default: + return true + } + if body == nil { + return true + } + labels := map[*ast.Object]ast.Stmt{} + ast.Inspect(body, func(node ast.Node) bool { + label, ok := node.(*ast.LabeledStmt) + if !ok { + return true + } + labels[label.Label.Obj] = label.Stmt + return true + }) + + ast.Inspect(body, func(node ast.Node) bool { + var loop ast.Node + var body *ast.BlockStmt + switch node := node.(type) { + case *ast.ForStmt: + body = node.Body + loop = node + case *ast.RangeStmt: + typ := j.Program.Info.TypeOf(node.X) + if _, ok := typ.Underlying().(*types.Map); ok { + // looping once over a map is a valid pattern for + // getting an arbitrary element. + return true + } + body = node.Body + loop = node + default: + return true + } + if len(body.List) < 2 { + // avoid flagging the somewhat common pattern of using + // a range loop to get the first element in a slice, + // or the first rune in a string. + return true + } + var unconditionalExit ast.Node + hasBranching := false + for _, stmt := range body.List { + switch stmt := stmt.(type) { + case *ast.BranchStmt: + switch stmt.Tok { + case token.BREAK: + if stmt.Label == nil || labels[stmt.Label.Obj] == loop { + unconditionalExit = stmt + } + case token.CONTINUE: + if stmt.Label == nil || labels[stmt.Label.Obj] == loop { + unconditionalExit = nil + return false + } + } + case *ast.ReturnStmt: + unconditionalExit = stmt + case *ast.IfStmt, *ast.ForStmt, *ast.RangeStmt, *ast.SwitchStmt, *ast.SelectStmt: + hasBranching = true + } + } + if unconditionalExit == nil || !hasBranching { + return false + } + ast.Inspect(body, func(node ast.Node) bool { + if branch, ok := node.(*ast.BranchStmt); ok { + + switch branch.Tok { + case token.GOTO: + unconditionalExit = nil + return false + case token.CONTINUE: + if branch.Label != nil && labels[branch.Label.Obj] != loop { + return true + } + unconditionalExit = nil + return false + } + } + return true + }) + if unconditionalExit != nil { + j.Errorf(unconditionalExit, "the surrounding loop is unconditionally terminated") + } + return true + }) + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func (c *Checker) CheckNilContext(j *lint.Job) { + fn := func(node ast.Node) bool { + call, ok := node.(*ast.CallExpr) + if !ok { + return true + } + if len(call.Args) == 0 { + return true + } + if typ, ok := j.Program.Info.TypeOf(call.Args[0]).(*types.Basic); !ok || typ.Kind() != types.UntypedNil { + return true + } + sig, ok := j.Program.Info.TypeOf(call.Fun).(*types.Signature) + if !ok { + return true + } + if sig.Params().Len() == 0 { + return true + } + if types.TypeString(sig.Params().At(0).Type(), nil) != "context.Context" { + return true + } + j.Errorf(call.Args[0], + "do not pass a nil Context, even if a function permits it; pass context.TODO if you are unsure about which Context to use") + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func (c *Checker) CheckSeeker(j *lint.Job) { + fn := func(node ast.Node) bool { + call, ok := node.(*ast.CallExpr) + if !ok { + return true + } + sel, ok := call.Fun.(*ast.SelectorExpr) + if !ok { + return true + } + if sel.Sel.Name != "Seek" { + return true + } + if len(call.Args) != 2 { + return true + } + arg0, ok := call.Args[0].(*ast.SelectorExpr) + if !ok { + return true + } + switch arg0.Sel.Name { + case "SeekStart", "SeekCurrent", "SeekEnd": + default: + return true + } + pkg, ok := arg0.X.(*ast.Ident) + if !ok { + return true + } + if pkg.Name != "io" { + return true + } + j.Errorf(call, "the first argument of io.Seeker is the offset, but an io.Seek* constant is being used instead") + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func (c *Checker) CheckIneffectiveAppend(j *lint.Job) { + isAppend := func(ins ssa.Value) bool { + call, ok := ins.(*ssa.Call) + if !ok { + return false + } + if call.Call.IsInvoke() { + return false + } + if builtin, ok := call.Call.Value.(*ssa.Builtin); !ok || builtin.Name() != "append" { + return false + } + return true + } + + for _, ssafn := range j.Program.InitialFunctions { + for _, block := range ssafn.Blocks { + for _, ins := range block.Instrs { + val, ok := ins.(ssa.Value) + if !ok || !isAppend(val) { + continue + } + + isUsed := false + visited := map[ssa.Instruction]bool{} + var walkRefs func(refs []ssa.Instruction) + walkRefs = func(refs []ssa.Instruction) { + loop: + for _, ref := range refs { + if visited[ref] { + continue + } + visited[ref] = true + if _, ok := ref.(*ssa.DebugRef); ok { + continue + } + switch ref := ref.(type) { + case *ssa.Phi: + walkRefs(*ref.Referrers()) + case *ssa.Sigma: + walkRefs(*ref.Referrers()) + case ssa.Value: + if !isAppend(ref) { + isUsed = true + } else { + walkRefs(*ref.Referrers()) + } + case ssa.Instruction: + isUsed = true + break loop + } + } + } + refs := val.Referrers() + if refs == nil { + continue + } + walkRefs(*refs) + if !isUsed { + j.Errorf(ins, "this result of append is never used, except maybe in other appends") + } + } + } + } +} + +func (c *Checker) CheckConcurrentTesting(j *lint.Job) { + for _, ssafn := range j.Program.InitialFunctions { + for _, block := range ssafn.Blocks { + for _, ins := range block.Instrs { + gostmt, ok := ins.(*ssa.Go) + if !ok { + continue + } + var fn *ssa.Function + switch val := gostmt.Call.Value.(type) { + case *ssa.Function: + fn = val + case *ssa.MakeClosure: + fn = val.Fn.(*ssa.Function) + default: + continue + } + if fn.Blocks == nil { + continue + } + for _, block := range fn.Blocks { + for _, ins := range block.Instrs { + call, ok := ins.(*ssa.Call) + if !ok { + continue + } + if call.Call.IsInvoke() { + continue + } + callee := call.Call.StaticCallee() + if callee == nil { + continue + } + recv := callee.Signature.Recv() + if recv == nil { + continue + } + if types.TypeString(recv.Type(), nil) != "*testing.common" { + continue + } + fn, ok := call.Call.StaticCallee().Object().(*types.Func) + if !ok { + continue + } + name := fn.Name() + switch name { + case "FailNow", "Fatal", "Fatalf", "SkipNow", "Skip", "Skipf": + default: + continue + } + j.Errorf(gostmt, "the goroutine calls T.%s, which must be called in the same goroutine as the test", name) + } + } + } + } + } +} + +func (c *Checker) CheckCyclicFinalizer(j *lint.Job) { + for _, ssafn := range j.Program.InitialFunctions { + node := c.funcDescs.CallGraph.CreateNode(ssafn) + for _, edge := range node.Out { + if edge.Callee.Func.RelString(nil) != "runtime.SetFinalizer" { + continue + } + arg0 := edge.Site.Common().Args[0] + if iface, ok := arg0.(*ssa.MakeInterface); ok { + arg0 = iface.X + } + unop, ok := arg0.(*ssa.UnOp) + if !ok { + continue + } + v, ok := unop.X.(*ssa.Alloc) + if !ok { + continue + } + arg1 := edge.Site.Common().Args[1] + if iface, ok := arg1.(*ssa.MakeInterface); ok { + arg1 = iface.X + } + mc, ok := arg1.(*ssa.MakeClosure) + if !ok { + continue + } + for _, b := range mc.Bindings { + if b == v { + pos := j.Program.DisplayPosition(mc.Fn.Pos()) + j.Errorf(edge.Site, "the finalizer closes over the object, preventing the finalizer from ever running (at %s)", pos) + } + } + } + } +} + +func (c *Checker) CheckSliceOutOfBounds(j *lint.Job) { + for _, ssafn := range j.Program.InitialFunctions { + for _, block := range ssafn.Blocks { + for _, ins := range block.Instrs { + ia, ok := ins.(*ssa.IndexAddr) + if !ok { + continue + } + if _, ok := ia.X.Type().Underlying().(*types.Slice); !ok { + continue + } + sr, ok1 := c.funcDescs.Get(ssafn).Ranges[ia.X].(vrp.SliceInterval) + idxr, ok2 := c.funcDescs.Get(ssafn).Ranges[ia.Index].(vrp.IntInterval) + if !ok1 || !ok2 || !sr.IsKnown() || !idxr.IsKnown() || sr.Length.Empty() || idxr.Empty() { + continue + } + if idxr.Lower.Cmp(sr.Length.Upper) >= 0 { + j.Errorf(ia, "index out of bounds") + } + } + } + } +} + +func (c *Checker) CheckDeferLock(j *lint.Job) { + for _, ssafn := range j.Program.InitialFunctions { + for _, block := range ssafn.Blocks { + instrs := lint.FilterDebug(block.Instrs) + if len(instrs) < 2 { + continue + } + for i, ins := range instrs[:len(instrs)-1] { + call, ok := ins.(*ssa.Call) + if !ok { + continue + } + if !lint.IsCallTo(call.Common(), "(*sync.Mutex).Lock") && !lint.IsCallTo(call.Common(), "(*sync.RWMutex).RLock") { + continue + } + nins, ok := instrs[i+1].(*ssa.Defer) + if !ok { + continue + } + if !lint.IsCallTo(&nins.Call, "(*sync.Mutex).Lock") && !lint.IsCallTo(&nins.Call, "(*sync.RWMutex).RLock") { + continue + } + if call.Common().Args[0] != nins.Call.Args[0] { + continue + } + name := shortCallName(call.Common()) + alt := "" + switch name { + case "Lock": + alt = "Unlock" + case "RLock": + alt = "RUnlock" + } + j.Errorf(nins, "deferring %s right after having locked already; did you mean to defer %s?", name, alt) + } + } + } +} + +func (c *Checker) CheckNaNComparison(j *lint.Job) { + isNaN := func(v ssa.Value) bool { + call, ok := v.(*ssa.Call) + if !ok { + return false + } + return lint.IsCallTo(call.Common(), "math.NaN") + } + for _, ssafn := range j.Program.InitialFunctions { + for _, block := range ssafn.Blocks { + for _, ins := range block.Instrs { + ins, ok := ins.(*ssa.BinOp) + if !ok { + continue + } + if isNaN(ins.X) || isNaN(ins.Y) { + j.Errorf(ins, "no value is equal to NaN, not even NaN itself") + } + } + } + } +} + +func (c *Checker) CheckInfiniteRecursion(j *lint.Job) { + for _, ssafn := range j.Program.InitialFunctions { + node := c.funcDescs.CallGraph.CreateNode(ssafn) + for _, edge := range node.Out { + if edge.Callee != node { + continue + } + if _, ok := edge.Site.(*ssa.Go); ok { + // Recursively spawning goroutines doesn't consume + // stack space infinitely, so don't flag it. + continue + } + + block := edge.Site.Block() + canReturn := false + for _, b := range ssafn.Blocks { + if block.Dominates(b) { + continue + } + if len(b.Instrs) == 0 { + continue + } + if _, ok := b.Instrs[len(b.Instrs)-1].(*ssa.Return); ok { + canReturn = true + break + } + } + if canReturn { + continue + } + j.Errorf(edge.Site, "infinite recursive call") + } + } +} + +func objectName(obj types.Object) string { + if obj == nil { + return "" + } + var name string + if obj.Pkg() != nil && obj.Pkg().Scope().Lookup(obj.Name()) == obj { + var s string + s = obj.Pkg().Path() + if s != "" { + name += s + "." + } + } + name += obj.Name() + return name +} + +func isName(j *lint.Job, expr ast.Expr, name string) bool { + var obj types.Object + switch expr := expr.(type) { + case *ast.Ident: + obj = j.Program.Info.ObjectOf(expr) + case *ast.SelectorExpr: + obj = j.Program.Info.ObjectOf(expr.Sel) + } + return objectName(obj) == name +} + +func (c *Checker) CheckLeakyTimeTick(j *lint.Job) { + for _, ssafn := range j.Program.InitialFunctions { + if j.IsInMain(ssafn) || j.IsInTest(ssafn) { + continue + } + for _, block := range ssafn.Blocks { + for _, ins := range block.Instrs { + call, ok := ins.(*ssa.Call) + if !ok || !lint.IsCallTo(call.Common(), "time.Tick") { + continue + } + if c.funcDescs.Get(call.Parent()).Infinite { + continue + } + j.Errorf(call, "using time.Tick leaks the underlying ticker, consider using it only in endless functions, tests and the main package, and use time.NewTicker here") + } + } + } +} + +func (c *Checker) CheckDoubleNegation(j *lint.Job) { + fn := func(node ast.Node) bool { + unary1, ok := node.(*ast.UnaryExpr) + if !ok { + return true + } + unary2, ok := unary1.X.(*ast.UnaryExpr) + if !ok { + return true + } + if unary1.Op != token.NOT || unary2.Op != token.NOT { + return true + } + j.Errorf(unary1, "negating a boolean twice has no effect; is this a typo?") + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func hasSideEffects(node ast.Node) bool { + dynamic := false + ast.Inspect(node, func(node ast.Node) bool { + switch node := node.(type) { + case *ast.CallExpr: + dynamic = true + return false + case *ast.UnaryExpr: + if node.Op == token.ARROW { + dynamic = true + return false + } + } + return true + }) + return dynamic +} + +func (c *Checker) CheckRepeatedIfElse(j *lint.Job) { + seen := map[ast.Node]bool{} + + var collectConds func(ifstmt *ast.IfStmt, inits []ast.Stmt, conds []ast.Expr) ([]ast.Stmt, []ast.Expr) + collectConds = func(ifstmt *ast.IfStmt, inits []ast.Stmt, conds []ast.Expr) ([]ast.Stmt, []ast.Expr) { + seen[ifstmt] = true + if ifstmt.Init != nil { + inits = append(inits, ifstmt.Init) + } + conds = append(conds, ifstmt.Cond) + if elsestmt, ok := ifstmt.Else.(*ast.IfStmt); ok { + return collectConds(elsestmt, inits, conds) + } + return inits, conds + } + fn := func(node ast.Node) bool { + ifstmt, ok := node.(*ast.IfStmt) + if !ok { + return true + } + if seen[ifstmt] { + return true + } + inits, conds := collectConds(ifstmt, nil, nil) + if len(inits) > 0 { + return true + } + for _, cond := range conds { + if hasSideEffects(cond) { + return true + } + } + counts := map[string]int{} + for _, cond := range conds { + s := j.Render(cond) + counts[s]++ + if counts[s] == 2 { + j.Errorf(cond, "this condition occurs multiple times in this if/else if chain") + } + } + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func (c *Checker) CheckSillyBitwiseOps(j *lint.Job) { + for _, ssafn := range j.Program.InitialFunctions { + for _, block := range ssafn.Blocks { + for _, ins := range block.Instrs { + ins, ok := ins.(*ssa.BinOp) + if !ok { + continue + } + + if c, ok := ins.Y.(*ssa.Const); !ok || c.Value == nil || c.Value.Kind() != constant.Int || c.Uint64() != 0 { + continue + } + switch ins.Op { + case token.AND, token.OR, token.XOR: + default: + // we do not flag shifts because too often, x<<0 is part + // of a pattern, x<<0, x<<8, x<<16, ... + continue + } + path, _ := astutil.PathEnclosingInterval(j.File(ins), ins.Pos(), ins.Pos()) + if len(path) == 0 { + continue + } + if node, ok := path[0].(*ast.BinaryExpr); !ok || !lint.IsZero(node.Y) { + continue + } + + switch ins.Op { + case token.AND: + j.Errorf(ins, "x & 0 always equals 0") + case token.OR, token.XOR: + j.Errorf(ins, "x %s 0 always equals x", ins.Op) + } + } + } + } +} + +func (c *Checker) CheckNonOctalFileMode(j *lint.Job) { + fn := func(node ast.Node) bool { + call, ok := node.(*ast.CallExpr) + if !ok { + return true + } + sig, ok := j.Program.Info.TypeOf(call.Fun).(*types.Signature) + if !ok { + return true + } + n := sig.Params().Len() + var args []int + for i := 0; i < n; i++ { + typ := sig.Params().At(i).Type() + if types.TypeString(typ, nil) == "os.FileMode" { + args = append(args, i) + } + } + for _, i := range args { + lit, ok := call.Args[i].(*ast.BasicLit) + if !ok { + continue + } + if len(lit.Value) == 3 && + lit.Value[0] != '0' && + lit.Value[0] >= '0' && lit.Value[0] <= '7' && + lit.Value[1] >= '0' && lit.Value[1] <= '7' && + lit.Value[2] >= '0' && lit.Value[2] <= '7' { + + v, err := strconv.ParseInt(lit.Value, 10, 64) + if err != nil { + continue + } + j.Errorf(call.Args[i], "file mode '%s' evaluates to %#o; did you mean '0%s'?", lit.Value, v, lit.Value) + } + } + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func (c *Checker) CheckPureFunctions(j *lint.Job) { +fnLoop: + for _, ssafn := range j.Program.InitialFunctions { + if j.IsInTest(ssafn) { + params := ssafn.Signature.Params() + for i := 0; i < params.Len(); i++ { + param := params.At(i) + if types.TypeString(param.Type(), nil) == "*testing.B" { + // Ignore discarded pure functions in code related + // to benchmarks. Instead of matching BenchmarkFoo + // functions, we match any function accepting a + // *testing.B. Benchmarks sometimes call generic + // functions for doing the actual work, and + // checking for the parameter is a lot easier and + // faster than analyzing call trees. + continue fnLoop + } + } + } + + for _, b := range ssafn.Blocks { + for _, ins := range b.Instrs { + ins, ok := ins.(*ssa.Call) + if !ok { + continue + } + refs := ins.Referrers() + if refs == nil || len(lint.FilterDebug(*refs)) > 0 { + continue + } + callee := ins.Common().StaticCallee() + if callee == nil { + continue + } + if c.funcDescs.Get(callee).Pure && !c.funcDescs.Get(callee).Stub { + j.Errorf(ins, "%s is a pure function but its return value is ignored", callee.Name()) + continue + } + } + } + } +} + +func (c *Checker) isDeprecated(j *lint.Job, ident *ast.Ident) (bool, string) { + obj := j.Program.Info.ObjectOf(ident) + if obj.Pkg() == nil { + return false, "" + } + alt := c.deprecatedObjs[obj] + return alt != "", alt +} + +func selectorName(j *lint.Job, expr *ast.SelectorExpr) string { + sel := j.Program.Info.Selections[expr] + if sel == nil { + if x, ok := expr.X.(*ast.Ident); ok { + pkg, ok := j.Program.Info.ObjectOf(x).(*types.PkgName) + if !ok { + // This shouldn't happen + return fmt.Sprintf("%s.%s", x.Name, expr.Sel.Name) + } + return fmt.Sprintf("%s.%s", pkg.Imported().Path(), expr.Sel.Name) + } + panic(fmt.Sprintf("unsupported selector: %v", expr)) + } + return fmt.Sprintf("(%s).%s", sel.Recv(), sel.Obj().Name()) +} + +func (c *Checker) enclosingFunc(sel *ast.SelectorExpr) *ssa.Function { + fn := c.nodeFns[sel] + if fn == nil { + return nil + } + for fn.Parent() != nil { + fn = fn.Parent() + } + return fn +} + +func (c *Checker) CheckDeprecated(j *lint.Job) { + fn := func(node ast.Node) bool { + sel, ok := node.(*ast.SelectorExpr) + if !ok { + return true + } + + obj := j.Program.Info.ObjectOf(sel.Sel) + if obj.Pkg() == nil { + return true + } + nodePkg := j.NodePackage(node).Pkg + if nodePkg == obj.Pkg() || obj.Pkg().Path()+"_test" == nodePkg.Path() { + // Don't flag stuff in our own package + return true + } + if ok, alt := c.isDeprecated(j, sel.Sel); ok { + // Look for the first available alternative, not the first + // version something was deprecated in. If a function was + // deprecated in Go 1.6, an alternative has been available + // already in 1.0, and we're targetting 1.2, it still + // makes sense to use the alternative from 1.0, to be + // future-proof. + minVersion := deprecated.Stdlib[selectorName(j, sel)].AlternativeAvailableSince + if !j.IsGoVersion(minVersion) { + return true + } + + if fn := c.enclosingFunc(sel); fn != nil { + if _, ok := c.deprecatedObjs[fn.Object()]; ok { + // functions that are deprecated may use deprecated + // symbols + return true + } + } + j.Errorf(sel, "%s is deprecated: %s", j.Render(sel), alt) + return true + } + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} + +func (c *Checker) callChecker(rules map[string]CallCheck) func(j *lint.Job) { + return func(j *lint.Job) { + c.checkCalls(j, rules) + } +} + +func (c *Checker) checkCalls(j *lint.Job, rules map[string]CallCheck) { + for _, ssafn := range j.Program.InitialFunctions { + node := c.funcDescs.CallGraph.CreateNode(ssafn) + for _, edge := range node.Out { + callee := edge.Callee.Func + obj, ok := callee.Object().(*types.Func) + if !ok { + continue + } + + r, ok := rules[obj.FullName()] + if !ok { + continue + } + var args []*Argument + ssaargs := edge.Site.Common().Args + if callee.Signature.Recv() != nil { + ssaargs = ssaargs[1:] + } + for _, arg := range ssaargs { + if iarg, ok := arg.(*ssa.MakeInterface); ok { + arg = iarg.X + } + vr := c.funcDescs.Get(edge.Site.Parent()).Ranges[arg] + args = append(args, &Argument{Value: Value{arg, vr}}) + } + call := &Call{ + Job: j, + Instr: edge.Site, + Args: args, + Checker: c, + Parent: edge.Site.Parent(), + } + r(call) + for idx, arg := range call.Args { + _ = idx + for _, e := range arg.invalids { + // path, _ := astutil.PathEnclosingInterval(f.File, edge.Site.Pos(), edge.Site.Pos()) + // if len(path) < 2 { + // continue + // } + // astcall, ok := path[0].(*ast.CallExpr) + // if !ok { + // continue + // } + // j.Errorf(astcall.Args[idx], "%s", e) + + j.Errorf(edge.Site, "%s", e) + } + } + for _, e := range call.invalids { + j.Errorf(call.Instr.Common(), "%s", e) + } + } + } +} + +func unwrapFunction(val ssa.Value) *ssa.Function { + switch val := val.(type) { + case *ssa.Function: + return val + case *ssa.MakeClosure: + return val.Fn.(*ssa.Function) + default: + return nil + } +} + +func shortCallName(call *ssa.CallCommon) string { + if call.IsInvoke() { + return "" + } + switch v := call.Value.(type) { + case *ssa.Function: + fn, ok := v.Object().(*types.Func) + if !ok { + return "" + } + return fn.Name() + case *ssa.Builtin: + return v.Name() + } + return "" +} + +func hasCallTo(block *ssa.BasicBlock, name string) bool { + for _, ins := range block.Instrs { + call, ok := ins.(*ssa.Call) + if !ok { + continue + } + if lint.IsCallTo(call.Common(), name) { + return true + } + } + return false +} + +// deref returns a pointer's element type; otherwise it returns typ. +func deref(typ types.Type) types.Type { + if p, ok := typ.Underlying().(*types.Pointer); ok { + return p.Elem() + } + return typ +} + +func (c *Checker) CheckWriterBufferModified(j *lint.Job) { + // TODO(dh): this might be a good candidate for taint analysis. + // Taint the argument as MUST_NOT_MODIFY, then propagate that + // through functions like bytes.Split + + for _, ssafn := range j.Program.InitialFunctions { + sig := ssafn.Signature + if ssafn.Name() != "Write" || sig.Recv() == nil || sig.Params().Len() != 1 || sig.Results().Len() != 2 { + continue + } + tArg, ok := sig.Params().At(0).Type().(*types.Slice) + if !ok { + continue + } + if basic, ok := tArg.Elem().(*types.Basic); !ok || basic.Kind() != types.Byte { + continue + } + if basic, ok := sig.Results().At(0).Type().(*types.Basic); !ok || basic.Kind() != types.Int { + continue + } + if named, ok := sig.Results().At(1).Type().(*types.Named); !ok || types.TypeString(named, nil) != "error" { + continue + } + + for _, block := range ssafn.Blocks { + for _, ins := range block.Instrs { + switch ins := ins.(type) { + case *ssa.Store: + addr, ok := ins.Addr.(*ssa.IndexAddr) + if !ok { + continue + } + if addr.X != ssafn.Params[1] { + continue + } + j.Errorf(ins, "io.Writer.Write must not modify the provided buffer, not even temporarily") + case *ssa.Call: + if !lint.IsCallTo(ins.Common(), "append") { + continue + } + if ins.Common().Args[0] != ssafn.Params[1] { + continue + } + j.Errorf(ins, "io.Writer.Write must not modify the provided buffer, not even temporarily") + } + } + } + } +} + +func loopedRegexp(name string) CallCheck { + return func(call *Call) { + if len(extractConsts(call.Args[0].Value.Value)) == 0 { + return + } + if !call.Checker.isInLoop(call.Instr.Block()) { + return + } + call.Invalid(fmt.Sprintf("calling %s in a loop has poor performance, consider using regexp.Compile", name)) + } +} + +func (c *Checker) CheckEmptyBranch(j *lint.Job) { + fn := func(node ast.Node) bool { + ifstmt, ok := node.(*ast.IfStmt) + if !ok { + return true + } + ssafn := c.nodeFns[node] + if lint.IsExample(ssafn) { + return true + } + if ifstmt.Else != nil { + b, ok := ifstmt.Else.(*ast.BlockStmt) + if !ok || len(b.List) != 0 { + return true + } + j.Errorf(ifstmt.Else, "empty branch") + } + if len(ifstmt.Body.List) != 0 { + return true + } + j.Errorf(ifstmt, "empty branch") + return true + } + for _, f := range c.filterGenerated(j.Program.Files) { + ast.Inspect(f, fn) + } +} + +func (c *Checker) CheckMapBytesKey(j *lint.Job) { + for _, fn := range j.Program.InitialFunctions { + for _, b := range fn.Blocks { + insLoop: + for _, ins := range b.Instrs { + // find []byte -> string conversions + conv, ok := ins.(*ssa.Convert) + if !ok || conv.Type() != types.Universe.Lookup("string").Type() { + continue + } + if s, ok := conv.X.Type().(*types.Slice); !ok || s.Elem() != types.Universe.Lookup("byte").Type() { + continue + } + refs := conv.Referrers() + // need at least two (DebugRef) references: the + // conversion and the *ast.Ident + if refs == nil || len(*refs) < 2 { + continue + } + ident := false + // skip first reference, that's the conversion itself + for _, ref := range (*refs)[1:] { + switch ref := ref.(type) { + case *ssa.DebugRef: + if _, ok := ref.Expr.(*ast.Ident); !ok { + // the string seems to be used somewhere + // unexpected; the default branch should + // catch this already, but be safe + continue insLoop + } else { + ident = true + } + case *ssa.Lookup: + default: + // the string is used somewhere else than a + // map lookup + continue insLoop + } + } + + // the result of the conversion wasn't assigned to an + // identifier + if !ident { + continue + } + j.Errorf(conv, "m[string(key)] would be more efficient than k := string(key); m[k]") + } + } + } +} + +func (c *Checker) CheckRangeStringRunes(j *lint.Job) { + sharedcheck.CheckRangeStringRunes(c.nodeFns, j) +} + +func (c *Checker) CheckSelfAssignment(j *lint.Job) { + fn := func(node ast.Node) bool { + assign, ok := node.(*ast.AssignStmt) + if !ok { + return true + } + if assign.Tok != token.ASSIGN || len(assign.Lhs) != len(assign.Rhs) { + return true + } + for i, stmt := range assign.Lhs { + rlh := j.Render(stmt) + rrh := j.Render(assign.Rhs[i]) + if rlh == rrh { + j.Errorf(assign, "self-assignment of %s to %s", rrh, rlh) + } + } + return true + } + for _, f := range c.filterGenerated(j.Program.Files) { + ast.Inspect(f, fn) + } +} + +func buildTagsIdentical(s1, s2 []string) bool { + if len(s1) != len(s2) { + return false + } + s1s := make([]string, len(s1)) + copy(s1s, s1) + sort.Strings(s1s) + s2s := make([]string, len(s2)) + copy(s2s, s2) + sort.Strings(s2s) + for i, s := range s1s { + if s != s2s[i] { + return false + } + } + return true +} + +func (c *Checker) CheckDuplicateBuildConstraints(job *lint.Job) { + for _, f := range c.filterGenerated(job.Program.Files) { + constraints := buildTags(f) + for i, constraint1 := range constraints { + for j, constraint2 := range constraints { + if i >= j { + continue + } + if buildTagsIdentical(constraint1, constraint2) { + job.Errorf(f, "identical build constraints %q and %q", + strings.Join(constraint1, " "), + strings.Join(constraint2, " ")) + } + } + } + } +} diff --git a/vendor/honnef.co/go/tools/staticcheck/rules.go b/vendor/honnef.co/go/tools/staticcheck/rules.go new file mode 100644 index 0000000..60cc00c --- /dev/null +++ b/vendor/honnef.co/go/tools/staticcheck/rules.go @@ -0,0 +1,321 @@ +package staticcheck + +import ( + "fmt" + "go/constant" + "go/types" + "net" + "net/url" + "regexp" + "sort" + "strconv" + "strings" + "time" + "unicode/utf8" + + "honnef.co/go/tools/lint" + "honnef.co/go/tools/ssa" + "honnef.co/go/tools/staticcheck/vrp" +) + +const ( + MsgInvalidHostPort = "invalid port or service name in host:port pair" + MsgInvalidUTF8 = "argument is not a valid UTF-8 encoded string" + MsgNonUniqueCutset = "cutset contains duplicate characters" +) + +type Call struct { + Job *lint.Job + Instr ssa.CallInstruction + Args []*Argument + + Checker *Checker + Parent *ssa.Function + + invalids []string +} + +func (c *Call) Invalid(msg string) { + c.invalids = append(c.invalids, msg) +} + +type Argument struct { + Value Value + invalids []string +} + +func (arg *Argument) Invalid(msg string) { + arg.invalids = append(arg.invalids, msg) +} + +type Value struct { + Value ssa.Value + Range vrp.Range +} + +type CallCheck func(call *Call) + +func extractConsts(v ssa.Value) []*ssa.Const { + switch v := v.(type) { + case *ssa.Const: + return []*ssa.Const{v} + case *ssa.MakeInterface: + return extractConsts(v.X) + default: + return nil + } +} + +func ValidateRegexp(v Value) error { + for _, c := range extractConsts(v.Value) { + if c.Value == nil { + continue + } + if c.Value.Kind() != constant.String { + continue + } + s := constant.StringVal(c.Value) + if _, err := regexp.Compile(s); err != nil { + return err + } + } + return nil +} + +func ValidateTimeLayout(v Value) error { + for _, c := range extractConsts(v.Value) { + if c.Value == nil { + continue + } + if c.Value.Kind() != constant.String { + continue + } + s := constant.StringVal(c.Value) + s = strings.Replace(s, "_", " ", -1) + s = strings.Replace(s, "Z", "-", -1) + _, err := time.Parse(s, s) + if err != nil { + return err + } + } + return nil +} + +func ValidateURL(v Value) error { + for _, c := range extractConsts(v.Value) { + if c.Value == nil { + continue + } + if c.Value.Kind() != constant.String { + continue + } + s := constant.StringVal(c.Value) + _, err := url.Parse(s) + if err != nil { + return fmt.Errorf("%q is not a valid URL: %s", s, err) + } + } + return nil +} + +func IntValue(v Value, z vrp.Z) bool { + r, ok := v.Range.(vrp.IntInterval) + if !ok || !r.IsKnown() { + return false + } + if r.Lower != r.Upper { + return false + } + if r.Lower.Cmp(z) == 0 { + return true + } + return false +} + +func InvalidUTF8(v Value) bool { + for _, c := range extractConsts(v.Value) { + if c.Value == nil { + continue + } + if c.Value.Kind() != constant.String { + continue + } + s := constant.StringVal(c.Value) + if !utf8.ValidString(s) { + return true + } + } + return false +} + +func UnbufferedChannel(v Value) bool { + r, ok := v.Range.(vrp.ChannelInterval) + if !ok || !r.IsKnown() { + return false + } + if r.Size.Lower.Cmp(vrp.NewZ(0)) == 0 && + r.Size.Upper.Cmp(vrp.NewZ(0)) == 0 { + return true + } + return false +} + +func Pointer(v Value) bool { + switch v.Value.Type().Underlying().(type) { + case *types.Pointer, *types.Interface: + return true + } + return false +} + +func ConvertedFromInt(v Value) bool { + conv, ok := v.Value.(*ssa.Convert) + if !ok { + return false + } + b, ok := conv.X.Type().Underlying().(*types.Basic) + if !ok { + return false + } + if (b.Info() & types.IsInteger) == 0 { + return false + } + return true +} + +func validEncodingBinaryType(j *lint.Job, typ types.Type) bool { + typ = typ.Underlying() + switch typ := typ.(type) { + case *types.Basic: + switch typ.Kind() { + case types.Uint8, types.Uint16, types.Uint32, types.Uint64, + types.Int8, types.Int16, types.Int32, types.Int64, + types.Float32, types.Float64, types.Complex64, types.Complex128, types.Invalid: + return true + case types.Bool: + return j.IsGoVersion(8) + } + return false + case *types.Struct: + n := typ.NumFields() + for i := 0; i < n; i++ { + if !validEncodingBinaryType(j, typ.Field(i).Type()) { + return false + } + } + return true + case *types.Array: + return validEncodingBinaryType(j, typ.Elem()) + case *types.Interface: + // we can't determine if it's a valid type or not + return true + } + return false +} + +func CanBinaryMarshal(j *lint.Job, v Value) bool { + typ := v.Value.Type().Underlying() + if ttyp, ok := typ.(*types.Pointer); ok { + typ = ttyp.Elem().Underlying() + } + if ttyp, ok := typ.(interface { + Elem() types.Type + }); ok { + if _, ok := ttyp.(*types.Pointer); !ok { + typ = ttyp.Elem() + } + } + + return validEncodingBinaryType(j, typ) +} + +func RepeatZeroTimes(name string, arg int) CallCheck { + return func(call *Call) { + arg := call.Args[arg] + if IntValue(arg.Value, vrp.NewZ(0)) { + arg.Invalid(fmt.Sprintf("calling %s with n == 0 will return no results, did you mean -1?", name)) + } + } +} + +func validateServiceName(s string) bool { + if len(s) < 1 || len(s) > 15 { + return false + } + if s[0] == '-' || s[len(s)-1] == '-' { + return false + } + if strings.Contains(s, "--") { + return false + } + hasLetter := false + for _, r := range s { + if (r >= 'A' && r <= 'Z') || (r >= 'a' && r <= 'z') { + hasLetter = true + continue + } + if r >= '0' && r <= '9' { + continue + } + return false + } + return hasLetter +} + +func validatePort(s string) bool { + n, err := strconv.ParseInt(s, 10, 64) + if err != nil { + return validateServiceName(s) + } + return n >= 0 && n <= 65535 +} + +func ValidHostPort(v Value) bool { + for _, k := range extractConsts(v.Value) { + if k.Value == nil { + continue + } + if k.Value.Kind() != constant.String { + continue + } + s := constant.StringVal(k.Value) + _, port, err := net.SplitHostPort(s) + if err != nil { + return false + } + // TODO(dh): check hostname + if !validatePort(port) { + return false + } + } + return true +} + +// ConvertedFrom reports whether value v was converted from type typ. +func ConvertedFrom(v Value, typ string) bool { + change, ok := v.Value.(*ssa.ChangeType) + return ok && types.TypeString(change.X.Type(), nil) == typ +} + +func UniqueStringCutset(v Value) bool { + for _, c := range extractConsts(v.Value) { + if c.Value == nil { + continue + } + if c.Value.Kind() != constant.String { + continue + } + s := constant.StringVal(c.Value) + rs := runeSlice(s) + if len(rs) < 2 { + continue + } + sort.Sort(rs) + for i, r := range rs[1:] { + if rs[i] == r { + return false + } + } + } + return true +} diff --git a/vendor/honnef.co/go/tools/staticcheck/vrp/channel.go b/vendor/honnef.co/go/tools/staticcheck/vrp/channel.go new file mode 100644 index 0000000..c8fbacb --- /dev/null +++ b/vendor/honnef.co/go/tools/staticcheck/vrp/channel.go @@ -0,0 +1,73 @@ +package vrp + +import ( + "fmt" + + "honnef.co/go/tools/ssa" +) + +type ChannelInterval struct { + Size IntInterval +} + +func (c ChannelInterval) Union(other Range) Range { + i, ok := other.(ChannelInterval) + if !ok { + i = ChannelInterval{EmptyIntInterval} + } + if c.Size.Empty() || !c.Size.IsKnown() { + return i + } + if i.Size.Empty() || !i.Size.IsKnown() { + return c + } + return ChannelInterval{ + Size: c.Size.Union(i.Size).(IntInterval), + } +} + +func (c ChannelInterval) String() string { + return c.Size.String() +} + +func (c ChannelInterval) IsKnown() bool { + return c.Size.IsKnown() +} + +type MakeChannelConstraint struct { + aConstraint + Buffer ssa.Value +} +type ChannelChangeTypeConstraint struct { + aConstraint + X ssa.Value +} + +func NewMakeChannelConstraint(buffer, y ssa.Value) Constraint { + return &MakeChannelConstraint{NewConstraint(y), buffer} +} +func NewChannelChangeTypeConstraint(x, y ssa.Value) Constraint { + return &ChannelChangeTypeConstraint{NewConstraint(y), x} +} + +func (c *MakeChannelConstraint) Operands() []ssa.Value { return []ssa.Value{c.Buffer} } +func (c *ChannelChangeTypeConstraint) Operands() []ssa.Value { return []ssa.Value{c.X} } + +func (c *MakeChannelConstraint) String() string { + return fmt.Sprintf("%s = make(chan, %s)", c.Y().Name, c.Buffer.Name()) +} +func (c *ChannelChangeTypeConstraint) String() string { + return fmt.Sprintf("%s = changetype(%s)", c.Y().Name, c.X.Name()) +} + +func (c *MakeChannelConstraint) Eval(g *Graph) Range { + i, ok := g.Range(c.Buffer).(IntInterval) + if !ok { + return ChannelInterval{NewIntInterval(NewZ(0), PInfinity)} + } + if i.Lower.Sign() == -1 { + i.Lower = NewZ(0) + } + return ChannelInterval{i} +} +func (c *ChannelChangeTypeConstraint) Eval(g *Graph) Range { return g.Range(c.X) } diff --git a/vendor/honnef.co/go/tools/staticcheck/vrp/int.go b/vendor/honnef.co/go/tools/staticcheck/vrp/int.go new file mode 100644 index 0000000..926bb7a --- /dev/null +++ b/vendor/honnef.co/go/tools/staticcheck/vrp/int.go @@ -0,0 +1,476 @@ +package vrp + +import ( + "fmt" + "go/token" + "go/types" + "math/big" + + "honnef.co/go/tools/ssa" +) + +type Zs []Z + +func (zs Zs) Len() int { + return len(zs) +} + +func (zs Zs) Less(i int, j int) bool { + return zs[i].Cmp(zs[j]) == -1 +} + +func (zs Zs) Swap(i int, j int) { + zs[i], zs[j] = zs[j], zs[i] +} + +type Z struct { + infinity int8 + integer *big.Int +} + +func NewZ(n int64) Z { + return NewBigZ(big.NewInt(n)) +} + +func NewBigZ(n *big.Int) Z { + return Z{integer: n} +} + +func (z1 Z) Infinite() bool { + return z1.infinity != 0 +} + +func (z1 Z) Add(z2 Z) Z { + if z2.Sign() == -1 { + return z1.Sub(z2.Negate()) + } + if z1 == NInfinity { + return NInfinity + } + if z1 == PInfinity { + return PInfinity + } + if z2 == PInfinity { + return PInfinity + } + + if !z1.Infinite() && !z2.Infinite() { + n := &big.Int{} + n.Add(z1.integer, z2.integer) + return NewBigZ(n) + } + + panic(fmt.Sprintf("%s + %s is not defined", z1, z2)) +} + +func (z1 Z) Sub(z2 Z) Z { + if z2.Sign() == -1 { + return z1.Add(z2.Negate()) + } + if !z1.Infinite() && !z2.Infinite() { + n := &big.Int{} + n.Sub(z1.integer, z2.integer) + return NewBigZ(n) + } + + if z1 != PInfinity && z2 == PInfinity { + return NInfinity + } + if z1.Infinite() && !z2.Infinite() { + return Z{infinity: z1.infinity} + } + if z1 == PInfinity && z2 == PInfinity { + return PInfinity + } + panic(fmt.Sprintf("%s - %s is not defined", z1, z2)) +} + +func (z1 Z) Mul(z2 Z) Z { + if (z1.integer != nil && z1.integer.Sign() == 0) || + (z2.integer != nil && z2.integer.Sign() == 0) { + return NewBigZ(&big.Int{}) + } + + if z1.infinity != 0 || z2.infinity != 0 { + return Z{infinity: int8(z1.Sign() * z2.Sign())} + } + + n := &big.Int{} + n.Mul(z1.integer, z2.integer) + return NewBigZ(n) +} + +func (z1 Z) Negate() Z { + if z1.infinity == 1 { + return NInfinity + } + if z1.infinity == -1 { + return PInfinity + } + n := &big.Int{} + n.Neg(z1.integer) + return NewBigZ(n) +} + +func (z1 Z) Sign() int { + if z1.infinity != 0 { + return int(z1.infinity) + } + return z1.integer.Sign() +} + +func (z1 Z) String() string { + if z1 == NInfinity { + return "-∞" + } + if z1 == PInfinity { + return "∞" + } + return fmt.Sprintf("%d", z1.integer) +} + +func (z1 Z) Cmp(z2 Z) int { + if z1.infinity == z2.infinity && z1.infinity != 0 { + return 0 + } + if z1 == PInfinity { + return 1 + } + if z1 == NInfinity { + return -1 + } + if z2 == NInfinity { + return 1 + } + if z2 == PInfinity { + return -1 + } + return z1.integer.Cmp(z2.integer) +} + +func MaxZ(zs ...Z) Z { + if len(zs) == 0 { + panic("Max called with no arguments") + } + if len(zs) == 1 { + return zs[0] + } + ret := zs[0] + for _, z := range zs[1:] { + if z.Cmp(ret) == 1 { + ret = z + } + } + return ret +} + +func MinZ(zs ...Z) Z { + if len(zs) == 0 { + panic("Min called with no arguments") + } + if len(zs) == 1 { + return zs[0] + } + ret := zs[0] + for _, z := range zs[1:] { + if z.Cmp(ret) == -1 { + ret = z + } + } + return ret +} + +var NInfinity = Z{infinity: -1} +var PInfinity = Z{infinity: 1} +var EmptyIntInterval = IntInterval{true, PInfinity, NInfinity} + +func InfinityFor(v ssa.Value) IntInterval { + if b, ok := v.Type().Underlying().(*types.Basic); ok { + if (b.Info() & types.IsUnsigned) != 0 { + return NewIntInterval(NewZ(0), PInfinity) + } + } + return NewIntInterval(NInfinity, PInfinity) +} + +type IntInterval struct { + known bool + Lower Z + Upper Z +} + +func NewIntInterval(l, u Z) IntInterval { + if u.Cmp(l) == -1 { + return EmptyIntInterval + } + return IntInterval{known: true, Lower: l, Upper: u} +} + +func (i IntInterval) IsKnown() bool { + return i.known +} + +func (i IntInterval) Empty() bool { + return i.Lower == PInfinity && i.Upper == NInfinity +} + +func (i IntInterval) IsMaxRange() bool { + return i.Lower == NInfinity && i.Upper == PInfinity +} + +func (i1 IntInterval) Intersection(i2 IntInterval) IntInterval { + if !i1.IsKnown() { + return i2 + } + if !i2.IsKnown() { + return i1 + } + if i1.Empty() || i2.Empty() { + return EmptyIntInterval + } + i3 := NewIntInterval(MaxZ(i1.Lower, i2.Lower), MinZ(i1.Upper, i2.Upper)) + if i3.Lower.Cmp(i3.Upper) == 1 { + return EmptyIntInterval + } + return i3 +} + +func (i1 IntInterval) Union(other Range) Range { + i2, ok := other.(IntInterval) + if !ok { + i2 = EmptyIntInterval + } + if i1.Empty() || !i1.IsKnown() { + return i2 + } + if i2.Empty() || !i2.IsKnown() { + return i1 + } + return NewIntInterval(MinZ(i1.Lower, i2.Lower), MaxZ(i1.Upper, i2.Upper)) +} + +func (i1 IntInterval) Add(i2 IntInterval) IntInterval { + if i1.Empty() || i2.Empty() { + return EmptyIntInterval + } + l1, u1, l2, u2 := i1.Lower, i1.Upper, i2.Lower, i2.Upper + return NewIntInterval(l1.Add(l2), u1.Add(u2)) +} + +func (i1 IntInterval) Sub(i2 IntInterval) IntInterval { + if i1.Empty() || i2.Empty() { + return EmptyIntInterval + } + l1, u1, l2, u2 := i1.Lower, i1.Upper, i2.Lower, i2.Upper + return NewIntInterval(l1.Sub(u2), u1.Sub(l2)) +} + +func (i1 IntInterval) Mul(i2 IntInterval) IntInterval { + if i1.Empty() || i2.Empty() { + return EmptyIntInterval + } + x1, x2 := i1.Lower, i1.Upper + y1, y2 := i2.Lower, i2.Upper + return NewIntInterval( + MinZ(x1.Mul(y1), x1.Mul(y2), x2.Mul(y1), x2.Mul(y2)), + MaxZ(x1.Mul(y1), x1.Mul(y2), x2.Mul(y1), x2.Mul(y2)), + ) +} + +func (i1 IntInterval) String() string { + if !i1.IsKnown() { + return "[⊥, ⊥]" + } + if i1.Empty() { + return "{}" + } + return fmt.Sprintf("[%s, %s]", i1.Lower, i1.Upper) +} + +type IntArithmeticConstraint struct { + aConstraint + A ssa.Value + B ssa.Value + Op token.Token + Fn func(IntInterval, IntInterval) IntInterval +} + +type IntAddConstraint struct{ *IntArithmeticConstraint } +type IntSubConstraint struct{ *IntArithmeticConstraint } +type IntMulConstraint struct{ *IntArithmeticConstraint } + +type IntConversionConstraint struct { + aConstraint + X ssa.Value +} + +type IntIntersectionConstraint struct { + aConstraint + ranges Ranges + A ssa.Value + B ssa.Value + Op token.Token + I IntInterval + resolved bool +} + +type IntIntervalConstraint struct { + aConstraint + I IntInterval +} + +func NewIntArithmeticConstraint(a, b, y ssa.Value, op token.Token, fn func(IntInterval, IntInterval) IntInterval) *IntArithmeticConstraint { + return &IntArithmeticConstraint{NewConstraint(y), a, b, op, fn} +} +func NewIntAddConstraint(a, b, y ssa.Value) Constraint { + return &IntAddConstraint{NewIntArithmeticConstraint(a, b, y, token.ADD, IntInterval.Add)} +} +func NewIntSubConstraint(a, b, y ssa.Value) Constraint { + return &IntSubConstraint{NewIntArithmeticConstraint(a, b, y, token.SUB, IntInterval.Sub)} +} +func NewIntMulConstraint(a, b, y ssa.Value) Constraint { + return &IntMulConstraint{NewIntArithmeticConstraint(a, b, y, token.MUL, IntInterval.Mul)} +} +func NewIntConversionConstraint(x, y ssa.Value) Constraint { + return &IntConversionConstraint{NewConstraint(y), x} +} +func NewIntIntersectionConstraint(a, b ssa.Value, op token.Token, ranges Ranges, y ssa.Value) Constraint { + return &IntIntersectionConstraint{ + aConstraint: NewConstraint(y), + ranges: ranges, + A: a, + B: b, + Op: op, + } +} +func NewIntIntervalConstraint(i IntInterval, y ssa.Value) Constraint { + return &IntIntervalConstraint{NewConstraint(y), i} +} + +func (c *IntArithmeticConstraint) Operands() []ssa.Value { return []ssa.Value{c.A, c.B} } +func (c *IntConversionConstraint) Operands() []ssa.Value { return []ssa.Value{c.X} } +func (c *IntIntersectionConstraint) Operands() []ssa.Value { return []ssa.Value{c.A} } +func (s *IntIntervalConstraint) Operands() []ssa.Value { return nil } + +func (c *IntArithmeticConstraint) String() string { + return fmt.Sprintf("%s = %s %s %s", c.Y().Name(), c.A.Name(), c.Op, c.B.Name()) +} +func (c *IntConversionConstraint) String() string { + return fmt.Sprintf("%s = %s(%s)", c.Y().Name(), c.Y().Type(), c.X.Name()) +} +func (c *IntIntersectionConstraint) String() string { + return fmt.Sprintf("%s = %s %s %s (%t branch)", c.Y().Name(), c.A.Name(), c.Op, c.B.Name(), c.Y().(*ssa.Sigma).Branch) +} +func (c *IntIntervalConstraint) String() string { return fmt.Sprintf("%s = %s", c.Y().Name(), c.I) } + +func (c *IntArithmeticConstraint) Eval(g *Graph) Range { + i1, i2 := g.Range(c.A).(IntInterval), g.Range(c.B).(IntInterval) + if !i1.IsKnown() || !i2.IsKnown() { + return IntInterval{} + } + return c.Fn(i1, i2) +} +func (c *IntConversionConstraint) Eval(g *Graph) Range { + s := &types.StdSizes{ + // XXX is it okay to assume the largest word size, or do we + // need to be platform specific? + WordSize: 8, + MaxAlign: 1, + } + fromI := g.Range(c.X).(IntInterval) + toI := g.Range(c.Y()).(IntInterval) + fromT := c.X.Type().Underlying().(*types.Basic) + toT := c.Y().Type().Underlying().(*types.Basic) + fromB := s.Sizeof(c.X.Type()) + toB := s.Sizeof(c.Y().Type()) + + if !fromI.IsKnown() { + return toI + } + if !toI.IsKnown() { + return fromI + } + + // uint -> sint/uint, M > N: [max(0, l1), min(2**N-1, u2)] + if (fromT.Info()&types.IsUnsigned != 0) && + toB > fromB { + + n := big.NewInt(1) + n.Lsh(n, uint(fromB*8)) + n.Sub(n, big.NewInt(1)) + return NewIntInterval( + MaxZ(NewZ(0), fromI.Lower), + MinZ(NewBigZ(n), toI.Upper), + ) + } + + // sint -> sint, M > N; [max(-∞, l1), min(2**N-1, u2)] + if (fromT.Info()&types.IsUnsigned == 0) && + (toT.Info()&types.IsUnsigned == 0) && + toB > fromB { + + n := big.NewInt(1) + n.Lsh(n, uint(fromB*8)) + n.Sub(n, big.NewInt(1)) + return NewIntInterval( + MaxZ(NInfinity, fromI.Lower), + MinZ(NewBigZ(n), toI.Upper), + ) + } + + return fromI +} +func (c *IntIntersectionConstraint) Eval(g *Graph) Range { + xi := g.Range(c.A).(IntInterval) + if !xi.IsKnown() { + return c.I + } + return xi.Intersection(c.I) +} +func (c *IntIntervalConstraint) Eval(*Graph) Range { return c.I } + +func (c *IntIntersectionConstraint) Futures() []ssa.Value { + return []ssa.Value{c.B} +} + +func (c *IntIntersectionConstraint) Resolve() { + r, ok := c.ranges[c.B].(IntInterval) + if !ok { + c.I = InfinityFor(c.Y()) + return + } + + switch c.Op { + case token.EQL: + c.I = r + case token.GTR: + c.I = NewIntInterval(r.Lower.Add(NewZ(1)), PInfinity) + case token.GEQ: + c.I = NewIntInterval(r.Lower, PInfinity) + case token.LSS: + // TODO(dh): do we need 0 instead of NInfinity for uints? + c.I = NewIntInterval(NInfinity, r.Upper.Sub(NewZ(1))) + case token.LEQ: + c.I = NewIntInterval(NInfinity, r.Upper) + case token.NEQ: + c.I = InfinityFor(c.Y()) + default: + panic("unsupported op " + c.Op.String()) + } +} + +func (c *IntIntersectionConstraint) IsKnown() bool { + return c.I.IsKnown() +} + +func (c *IntIntersectionConstraint) MarkUnresolved() { + c.resolved = false +} + +func (c *IntIntersectionConstraint) MarkResolved() { + c.resolved = true +} + +func (c *IntIntersectionConstraint) IsResolved() bool { + return c.resolved +} diff --git a/vendor/honnef.co/go/tools/staticcheck/vrp/slice.go b/vendor/honnef.co/go/tools/staticcheck/vrp/slice.go new file mode 100644 index 0000000..40658dd --- /dev/null +++ b/vendor/honnef.co/go/tools/staticcheck/vrp/slice.go @@ -0,0 +1,273 @@ +package vrp + +// TODO(dh): most of the constraints have implementations identical to +// that of strings. Consider reusing them. + +import ( + "fmt" + "go/types" + + "honnef.co/go/tools/ssa" +) + +type SliceInterval struct { + Length IntInterval +} + +func (s SliceInterval) Union(other Range) Range { + i, ok := other.(SliceInterval) + if !ok { + i = SliceInterval{EmptyIntInterval} + } + if s.Length.Empty() || !s.Length.IsKnown() { + return i + } + if i.Length.Empty() || !i.Length.IsKnown() { + return s + } + return SliceInterval{ + Length: s.Length.Union(i.Length).(IntInterval), + } +} +func (s SliceInterval) String() string { return s.Length.String() } +func (s SliceInterval) IsKnown() bool { return s.Length.IsKnown() } + +type SliceAppendConstraint struct { + aConstraint + A ssa.Value + B ssa.Value +} + +type SliceSliceConstraint struct { + aConstraint + X ssa.Value + Lower ssa.Value + Upper ssa.Value +} + +type ArraySliceConstraint struct { + aConstraint + X ssa.Value + Lower ssa.Value + Upper ssa.Value +} + +type SliceIntersectionConstraint struct { + aConstraint + X ssa.Value + I IntInterval +} + +type SliceLengthConstraint struct { + aConstraint + X ssa.Value +} + +type MakeSliceConstraint struct { + aConstraint + Size ssa.Value +} + +type SliceIntervalConstraint struct { + aConstraint + I IntInterval +} + +func NewSliceAppendConstraint(a, b, y ssa.Value) Constraint { + return &SliceAppendConstraint{NewConstraint(y), a, b} +} +func NewSliceSliceConstraint(x, lower, upper, y ssa.Value) Constraint { + return &SliceSliceConstraint{NewConstraint(y), x, lower, upper} +} +func NewArraySliceConstraint(x, lower, upper, y ssa.Value) Constraint { + return &ArraySliceConstraint{NewConstraint(y), x, lower, upper} +} +func NewSliceIntersectionConstraint(x ssa.Value, i IntInterval, y ssa.Value) Constraint { + return &SliceIntersectionConstraint{NewConstraint(y), x, i} +} +func NewSliceLengthConstraint(x, y ssa.Value) Constraint { + return &SliceLengthConstraint{NewConstraint(y), x} +} +func NewMakeSliceConstraint(size, y ssa.Value) Constraint { + return &MakeSliceConstraint{NewConstraint(y), size} +} +func NewSliceIntervalConstraint(i IntInterval, y ssa.Value) Constraint { + return &SliceIntervalConstraint{NewConstraint(y), i} +} + +func (c *SliceAppendConstraint) Operands() []ssa.Value { return []ssa.Value{c.A, c.B} } +func (c *SliceSliceConstraint) Operands() []ssa.Value { + ops := []ssa.Value{c.X} + if c.Lower != nil { + ops = append(ops, c.Lower) + } + if c.Upper != nil { + ops = append(ops, c.Upper) + } + return ops +} +func (c *ArraySliceConstraint) Operands() []ssa.Value { + ops := []ssa.Value{c.X} + if c.Lower != nil { + ops = append(ops, c.Lower) + } + if c.Upper != nil { + ops = append(ops, c.Upper) + } + return ops +} +func (c *SliceIntersectionConstraint) Operands() []ssa.Value { return []ssa.Value{c.X} } +func (c *SliceLengthConstraint) Operands() []ssa.Value { return []ssa.Value{c.X} } +func (c *MakeSliceConstraint) Operands() []ssa.Value { return []ssa.Value{c.Size} } +func (s *SliceIntervalConstraint) Operands() []ssa.Value { return nil } + +func (c *SliceAppendConstraint) String() string { + return fmt.Sprintf("%s = append(%s, %s)", c.Y().Name(), c.A.Name(), c.B.Name()) +} +func (c *SliceSliceConstraint) String() string { + var lname, uname string + if c.Lower != nil { + lname = c.Lower.Name() + } + if c.Upper != nil { + uname = c.Upper.Name() + } + return fmt.Sprintf("%s[%s:%s]", c.X.Name(), lname, uname) +} +func (c *ArraySliceConstraint) String() string { + var lname, uname string + if c.Lower != nil { + lname = c.Lower.Name() + } + if c.Upper != nil { + uname = c.Upper.Name() + } + return fmt.Sprintf("%s[%s:%s]", c.X.Name(), lname, uname) +} +func (c *SliceIntersectionConstraint) String() string { + return fmt.Sprintf("%s = %s.%t ⊓ %s", c.Y().Name(), c.X.Name(), c.Y().(*ssa.Sigma).Branch, c.I) +} +func (c *SliceLengthConstraint) String() string { + return fmt.Sprintf("%s = len(%s)", c.Y().Name(), c.X.Name()) +} +func (c *MakeSliceConstraint) String() string { + return fmt.Sprintf("%s = make(slice, %s)", c.Y().Name(), c.Size.Name()) +} +func (c *SliceIntervalConstraint) String() string { return fmt.Sprintf("%s = %s", c.Y().Name(), c.I) } + +func (c *SliceAppendConstraint) Eval(g *Graph) Range { + l1 := g.Range(c.A).(SliceInterval).Length + var l2 IntInterval + switch r := g.Range(c.B).(type) { + case SliceInterval: + l2 = r.Length + case StringInterval: + l2 = r.Length + default: + return SliceInterval{} + } + if !l1.IsKnown() || !l2.IsKnown() { + return SliceInterval{} + } + return SliceInterval{ + Length: l1.Add(l2), + } +} +func (c *SliceSliceConstraint) Eval(g *Graph) Range { + lr := NewIntInterval(NewZ(0), NewZ(0)) + if c.Lower != nil { + lr = g.Range(c.Lower).(IntInterval) + } + ur := g.Range(c.X).(SliceInterval).Length + if c.Upper != nil { + ur = g.Range(c.Upper).(IntInterval) + } + if !lr.IsKnown() || !ur.IsKnown() { + return SliceInterval{} + } + + ls := []Z{ + ur.Lower.Sub(lr.Lower), + ur.Upper.Sub(lr.Lower), + ur.Lower.Sub(lr.Upper), + ur.Upper.Sub(lr.Upper), + } + // TODO(dh): if we don't truncate lengths to 0 we might be able to + // easily detect slices with high < low. we'd need to treat -∞ + // specially, though. + for i, l := range ls { + if l.Sign() == -1 { + ls[i] = NewZ(0) + } + } + + return SliceInterval{ + Length: NewIntInterval(MinZ(ls...), MaxZ(ls...)), + } +} +func (c *ArraySliceConstraint) Eval(g *Graph) Range { + lr := NewIntInterval(NewZ(0), NewZ(0)) + if c.Lower != nil { + lr = g.Range(c.Lower).(IntInterval) + } + var l int64 + switch typ := c.X.Type().(type) { + case *types.Array: + l = typ.Len() + case *types.Pointer: + l = typ.Elem().(*types.Array).Len() + } + ur := NewIntInterval(NewZ(l), NewZ(l)) + if c.Upper != nil { + ur = g.Range(c.Upper).(IntInterval) + } + if !lr.IsKnown() || !ur.IsKnown() { + return SliceInterval{} + } + + ls := []Z{ + ur.Lower.Sub(lr.Lower), + ur.Upper.Sub(lr.Lower), + ur.Lower.Sub(lr.Upper), + ur.Upper.Sub(lr.Upper), + } + // TODO(dh): if we don't truncate lengths to 0 we might be able to + // easily detect slices with high < low. we'd need to treat -∞ + // specially, though. + for i, l := range ls { + if l.Sign() == -1 { + ls[i] = NewZ(0) + } + } + + return SliceInterval{ + Length: NewIntInterval(MinZ(ls...), MaxZ(ls...)), + } +} +func (c *SliceIntersectionConstraint) Eval(g *Graph) Range { + xi := g.Range(c.X).(SliceInterval) + if !xi.IsKnown() { + return c.I + } + return SliceInterval{ + Length: xi.Length.Intersection(c.I), + } +} +func (c *SliceLengthConstraint) Eval(g *Graph) Range { + i := g.Range(c.X).(SliceInterval).Length + if !i.IsKnown() { + return NewIntInterval(NewZ(0), PInfinity) + } + return i +} +func (c *MakeSliceConstraint) Eval(g *Graph) Range { + i, ok := g.Range(c.Size).(IntInterval) + if !ok { + return SliceInterval{NewIntInterval(NewZ(0), PInfinity)} + } + if i.Lower.Sign() == -1 { + i.Lower = NewZ(0) + } + return SliceInterval{i} +} +func (c *SliceIntervalConstraint) Eval(*Graph) Range { return SliceInterval{c.I} } diff --git a/vendor/honnef.co/go/tools/staticcheck/vrp/string.go b/vendor/honnef.co/go/tools/staticcheck/vrp/string.go new file mode 100644 index 0000000..e05877f --- /dev/null +++ b/vendor/honnef.co/go/tools/staticcheck/vrp/string.go @@ -0,0 +1,258 @@ +package vrp + +import ( + "fmt" + "go/token" + "go/types" + + "honnef.co/go/tools/ssa" +) + +type StringInterval struct { + Length IntInterval +} + +func (s StringInterval) Union(other Range) Range { + i, ok := other.(StringInterval) + if !ok { + i = StringInterval{EmptyIntInterval} + } + if s.Length.Empty() || !s.Length.IsKnown() { + return i + } + if i.Length.Empty() || !i.Length.IsKnown() { + return s + } + return StringInterval{ + Length: s.Length.Union(i.Length).(IntInterval), + } +} + +func (s StringInterval) String() string { + return s.Length.String() +} + +func (s StringInterval) IsKnown() bool { + return s.Length.IsKnown() +} + +type StringSliceConstraint struct { + aConstraint + X ssa.Value + Lower ssa.Value + Upper ssa.Value +} + +type StringIntersectionConstraint struct { + aConstraint + ranges Ranges + A ssa.Value + B ssa.Value + Op token.Token + I IntInterval + resolved bool +} + +type StringConcatConstraint struct { + aConstraint + A ssa.Value + B ssa.Value +} + +type StringLengthConstraint struct { + aConstraint + X ssa.Value +} + +type StringIntervalConstraint struct { + aConstraint + I IntInterval +} + +func NewStringSliceConstraint(x, lower, upper, y ssa.Value) Constraint { + return &StringSliceConstraint{NewConstraint(y), x, lower, upper} +} +func NewStringIntersectionConstraint(a, b ssa.Value, op token.Token, ranges Ranges, y ssa.Value) Constraint { + return &StringIntersectionConstraint{ + aConstraint: NewConstraint(y), + ranges: ranges, + A: a, + B: b, + Op: op, + } +} +func NewStringConcatConstraint(a, b, y ssa.Value) Constraint { + return &StringConcatConstraint{NewConstraint(y), a, b} +} +func NewStringLengthConstraint(x ssa.Value, y ssa.Value) Constraint { + return &StringLengthConstraint{NewConstraint(y), x} +} +func NewStringIntervalConstraint(i IntInterval, y ssa.Value) Constraint { + return &StringIntervalConstraint{NewConstraint(y), i} +} + +func (c *StringSliceConstraint) Operands() []ssa.Value { + vs := []ssa.Value{c.X} + if c.Lower != nil { + vs = append(vs, c.Lower) + } + if c.Upper != nil { + vs = append(vs, c.Upper) + } + return vs +} +func (c *StringIntersectionConstraint) Operands() []ssa.Value { return []ssa.Value{c.A} } +func (c StringConcatConstraint) Operands() []ssa.Value { return []ssa.Value{c.A, c.B} } +func (c *StringLengthConstraint) Operands() []ssa.Value { return []ssa.Value{c.X} } +func (s *StringIntervalConstraint) Operands() []ssa.Value { return nil } + +func (c *StringSliceConstraint) String() string { + var lname, uname string + if c.Lower != nil { + lname = c.Lower.Name() + } + if c.Upper != nil { + uname = c.Upper.Name() + } + return fmt.Sprintf("%s[%s:%s]", c.X.Name(), lname, uname) +} +func (c *StringIntersectionConstraint) String() string { + return fmt.Sprintf("%s = %s %s %s (%t branch)", c.Y().Name(), c.A.Name(), c.Op, c.B.Name(), c.Y().(*ssa.Sigma).Branch) +} +func (c StringConcatConstraint) String() string { + return fmt.Sprintf("%s = %s + %s", c.Y().Name(), c.A.Name(), c.B.Name()) +} +func (c *StringLengthConstraint) String() string { + return fmt.Sprintf("%s = len(%s)", c.Y().Name(), c.X.Name()) +} +func (c *StringIntervalConstraint) String() string { return fmt.Sprintf("%s = %s", c.Y().Name(), c.I) } + +func (c *StringSliceConstraint) Eval(g *Graph) Range { + lr := NewIntInterval(NewZ(0), NewZ(0)) + if c.Lower != nil { + lr = g.Range(c.Lower).(IntInterval) + } + ur := g.Range(c.X).(StringInterval).Length + if c.Upper != nil { + ur = g.Range(c.Upper).(IntInterval) + } + if !lr.IsKnown() || !ur.IsKnown() { + return StringInterval{} + } + + ls := []Z{ + ur.Lower.Sub(lr.Lower), + ur.Upper.Sub(lr.Lower), + ur.Lower.Sub(lr.Upper), + ur.Upper.Sub(lr.Upper), + } + // TODO(dh): if we don't truncate lengths to 0 we might be able to + // easily detect slices with high < low. we'd need to treat -∞ + // specially, though. + for i, l := range ls { + if l.Sign() == -1 { + ls[i] = NewZ(0) + } + } + + return StringInterval{ + Length: NewIntInterval(MinZ(ls...), MaxZ(ls...)), + } +} +func (c *StringIntersectionConstraint) Eval(g *Graph) Range { + var l IntInterval + switch r := g.Range(c.A).(type) { + case StringInterval: + l = r.Length + case IntInterval: + l = r + } + + if !l.IsKnown() { + return StringInterval{c.I} + } + return StringInterval{ + Length: l.Intersection(c.I), + } +} +func (c StringConcatConstraint) Eval(g *Graph) Range { + i1, i2 := g.Range(c.A).(StringInterval), g.Range(c.B).(StringInterval) + if !i1.Length.IsKnown() || !i2.Length.IsKnown() { + return StringInterval{} + } + return StringInterval{ + Length: i1.Length.Add(i2.Length), + } +} +func (c *StringLengthConstraint) Eval(g *Graph) Range { + i := g.Range(c.X).(StringInterval).Length + if !i.IsKnown() { + return NewIntInterval(NewZ(0), PInfinity) + } + return i +} +func (c *StringIntervalConstraint) Eval(*Graph) Range { return StringInterval{c.I} } + +func (c *StringIntersectionConstraint) Futures() []ssa.Value { + return []ssa.Value{c.B} +} + +func (c *StringIntersectionConstraint) Resolve() { + if (c.A.Type().Underlying().(*types.Basic).Info() & types.IsString) != 0 { + // comparing two strings + r, ok := c.ranges[c.B].(StringInterval) + if !ok { + c.I = NewIntInterval(NewZ(0), PInfinity) + return + } + switch c.Op { + case token.EQL: + c.I = r.Length + case token.GTR, token.GEQ: + c.I = NewIntInterval(r.Length.Lower, PInfinity) + case token.LSS, token.LEQ: + c.I = NewIntInterval(NewZ(0), r.Length.Upper) + case token.NEQ: + default: + panic("unsupported op " + c.Op.String()) + } + } else { + r, ok := c.ranges[c.B].(IntInterval) + if !ok { + c.I = NewIntInterval(NewZ(0), PInfinity) + return + } + // comparing two lengths + switch c.Op { + case token.EQL: + c.I = r + case token.GTR: + c.I = NewIntInterval(r.Lower.Add(NewZ(1)), PInfinity) + case token.GEQ: + c.I = NewIntInterval(r.Lower, PInfinity) + case token.LSS: + c.I = NewIntInterval(NInfinity, r.Upper.Sub(NewZ(1))) + case token.LEQ: + c.I = NewIntInterval(NInfinity, r.Upper) + case token.NEQ: + default: + panic("unsupported op " + c.Op.String()) + } + } +} + +func (c *StringIntersectionConstraint) IsKnown() bool { + return c.I.IsKnown() +} + +func (c *StringIntersectionConstraint) MarkUnresolved() { + c.resolved = false +} + +func (c *StringIntersectionConstraint) MarkResolved() { + c.resolved = true +} + +func (c *StringIntersectionConstraint) IsResolved() bool { + return c.resolved +} diff --git a/vendor/honnef.co/go/tools/staticcheck/vrp/vrp.go b/vendor/honnef.co/go/tools/staticcheck/vrp/vrp.go new file mode 100644 index 0000000..cb17f04 --- /dev/null +++ b/vendor/honnef.co/go/tools/staticcheck/vrp/vrp.go @@ -0,0 +1,1049 @@ +package vrp + +// TODO(dh) widening and narrowing have a lot of code in common. Make +// it reusable. + +import ( + "fmt" + "go/constant" + "go/token" + "go/types" + "math/big" + "sort" + "strings" + + "honnef.co/go/tools/ssa" +) + +type Future interface { + Constraint + Futures() []ssa.Value + Resolve() + IsKnown() bool + MarkUnresolved() + MarkResolved() + IsResolved() bool +} + +type Range interface { + Union(other Range) Range + IsKnown() bool +} + +type Constraint interface { + Y() ssa.Value + isConstraint() + String() string + Eval(*Graph) Range + Operands() []ssa.Value +} + +type aConstraint struct { + y ssa.Value +} + +func NewConstraint(y ssa.Value) aConstraint { + return aConstraint{y} +} + +func (aConstraint) isConstraint() {} +func (c aConstraint) Y() ssa.Value { return c.y } + +type PhiConstraint struct { + aConstraint + Vars []ssa.Value +} + +func NewPhiConstraint(vars []ssa.Value, y ssa.Value) Constraint { + uniqm := map[ssa.Value]struct{}{} + for _, v := range vars { + uniqm[v] = struct{}{} + } + var uniq []ssa.Value + for v := range uniqm { + uniq = append(uniq, v) + } + return &PhiConstraint{ + aConstraint: NewConstraint(y), + Vars: uniq, + } +} + +func (c *PhiConstraint) Operands() []ssa.Value { + return c.Vars +} + +func (c *PhiConstraint) Eval(g *Graph) Range { + i := Range(nil) + for _, v := range c.Vars { + i = g.Range(v).Union(i) + } + return i +} + +func (c *PhiConstraint) String() string { + names := make([]string, len(c.Vars)) + for i, v := range c.Vars { + names[i] = v.Name() + } + return fmt.Sprintf("%s = φ(%s)", c.Y().Name(), strings.Join(names, ", ")) +} + +func isSupportedType(typ types.Type) bool { + switch typ := typ.Underlying().(type) { + case *types.Basic: + switch typ.Kind() { + case types.String, types.UntypedString: + return true + default: + if (typ.Info() & types.IsInteger) == 0 { + return false + } + } + case *types.Chan: + return true + case *types.Slice: + return true + default: + return false + } + return true +} + +func ConstantToZ(c constant.Value) Z { + s := constant.ToInt(c).ExactString() + n := &big.Int{} + n.SetString(s, 10) + return NewBigZ(n) +} + +func sigmaInteger(g *Graph, ins *ssa.Sigma, cond *ssa.BinOp, ops []*ssa.Value) Constraint { + op := cond.Op + if !ins.Branch { + op = (invertToken(op)) + } + + switch op { + case token.EQL, token.GTR, token.GEQ, token.LSS, token.LEQ: + default: + return nil + } + var a, b ssa.Value + if (*ops[0]) == ins.X { + a = *ops[0] + b = *ops[1] + } else { + a = *ops[1] + b = *ops[0] + op = flipToken(op) + } + return NewIntIntersectionConstraint(a, b, op, g.ranges, ins) +} + +func sigmaString(g *Graph, ins *ssa.Sigma, cond *ssa.BinOp, ops []*ssa.Value) Constraint { + op := cond.Op + if !ins.Branch { + op = (invertToken(op)) + } + + switch op { + case token.EQL, token.GTR, token.GEQ, token.LSS, token.LEQ: + default: + return nil + } + + if ((*ops[0]).Type().Underlying().(*types.Basic).Info() & types.IsString) == 0 { + var a, b ssa.Value + call, ok := (*ops[0]).(*ssa.Call) + if ok && call.Common().Args[0] == ins.X { + a = *ops[0] + b = *ops[1] + } else { + a = *ops[1] + b = *ops[0] + op = flipToken(op) + } + return NewStringIntersectionConstraint(a, b, op, g.ranges, ins) + } + var a, b ssa.Value + if (*ops[0]) == ins.X { + a = *ops[0] + b = *ops[1] + } else { + a = *ops[1] + b = *ops[0] + op = flipToken(op) + } + return NewStringIntersectionConstraint(a, b, op, g.ranges, ins) +} + +func sigmaSlice(g *Graph, ins *ssa.Sigma, cond *ssa.BinOp, ops []*ssa.Value) Constraint { + // TODO(dh) sigmaSlice and sigmaString are a lot alike. Can they + // be merged? + // + // XXX support futures + + op := cond.Op + if !ins.Branch { + op = (invertToken(op)) + } + + k, ok := (*ops[1]).(*ssa.Const) + // XXX investigate in what cases this wouldn't be a Const + // + // XXX what if left and right are swapped? + if !ok { + return nil + } + + call, ok := (*ops[0]).(*ssa.Call) + if !ok { + return nil + } + builtin, ok := call.Common().Value.(*ssa.Builtin) + if !ok { + return nil + } + if builtin.Name() != "len" { + return nil + } + callops := call.Operands(nil) + + v := ConstantToZ(k.Value) + c := NewSliceIntersectionConstraint(*callops[1], IntInterval{}, ins).(*SliceIntersectionConstraint) + switch op { + case token.EQL: + c.I = NewIntInterval(v, v) + case token.GTR, token.GEQ: + off := int64(0) + if cond.Op == token.GTR { + off = 1 + } + c.I = NewIntInterval( + v.Add(NewZ(off)), + PInfinity, + ) + case token.LSS, token.LEQ: + off := int64(0) + if cond.Op == token.LSS { + off = -1 + } + c.I = NewIntInterval( + NInfinity, + v.Add(NewZ(off)), + ) + default: + return nil + } + return c +} + +func BuildGraph(f *ssa.Function) *Graph { + g := &Graph{ + Vertices: map[interface{}]*Vertex{}, + ranges: Ranges{}, + } + + var cs []Constraint + + ops := make([]*ssa.Value, 16) + seen := map[ssa.Value]bool{} + for _, block := range f.Blocks { + for _, ins := range block.Instrs { + ops = ins.Operands(ops[:0]) + for _, op := range ops { + if c, ok := (*op).(*ssa.Const); ok { + if seen[c] { + continue + } + seen[c] = true + if c.Value == nil { + switch c.Type().Underlying().(type) { + case *types.Slice: + cs = append(cs, NewSliceIntervalConstraint(NewIntInterval(NewZ(0), NewZ(0)), c)) + } + continue + } + switch c.Value.Kind() { + case constant.Int: + v := ConstantToZ(c.Value) + cs = append(cs, NewIntIntervalConstraint(NewIntInterval(v, v), c)) + case constant.String: + s := constant.StringVal(c.Value) + n := NewZ(int64(len(s))) + cs = append(cs, NewStringIntervalConstraint(NewIntInterval(n, n), c)) + } + } + } + } + } + for _, block := range f.Blocks { + for _, ins := range block.Instrs { + switch ins := ins.(type) { + case *ssa.Convert: + switch v := ins.Type().Underlying().(type) { + case *types.Basic: + if (v.Info() & types.IsInteger) == 0 { + continue + } + cs = append(cs, NewIntConversionConstraint(ins.X, ins)) + } + case *ssa.Call: + if static := ins.Common().StaticCallee(); static != nil { + if fn, ok := static.Object().(*types.Func); ok { + switch fn.FullName() { + case "bytes.Index", "bytes.IndexAny", "bytes.IndexByte", + "bytes.IndexFunc", "bytes.IndexRune", "bytes.LastIndex", + "bytes.LastIndexAny", "bytes.LastIndexByte", "bytes.LastIndexFunc", + "strings.Index", "strings.IndexAny", "strings.IndexByte", + "strings.IndexFunc", "strings.IndexRune", "strings.LastIndex", + "strings.LastIndexAny", "strings.LastIndexByte", "strings.LastIndexFunc": + // TODO(dh): instead of limiting by +∞, + // limit by the upper bound of the passed + // string + cs = append(cs, NewIntIntervalConstraint(NewIntInterval(NewZ(-1), PInfinity), ins)) + case "bytes.Title", "bytes.ToLower", "bytes.ToTitle", "bytes.ToUpper", + "strings.Title", "strings.ToLower", "strings.ToTitle", "strings.ToUpper": + cs = append(cs, NewCopyConstraint(ins.Common().Args[0], ins)) + case "bytes.ToLowerSpecial", "bytes.ToTitleSpecial", "bytes.ToUpperSpecial", + "strings.ToLowerSpecial", "strings.ToTitleSpecial", "strings.ToUpperSpecial": + cs = append(cs, NewCopyConstraint(ins.Common().Args[1], ins)) + case "bytes.Compare", "strings.Compare": + cs = append(cs, NewIntIntervalConstraint(NewIntInterval(NewZ(-1), NewZ(1)), ins)) + case "bytes.Count", "strings.Count": + // TODO(dh): instead of limiting by +∞, + // limit by the upper bound of the passed + // string. + cs = append(cs, NewIntIntervalConstraint(NewIntInterval(NewZ(0), PInfinity), ins)) + case "bytes.Map", "bytes.TrimFunc", "bytes.TrimLeft", "bytes.TrimLeftFunc", + "bytes.TrimRight", "bytes.TrimRightFunc", "bytes.TrimSpace", + "strings.Map", "strings.TrimFunc", "strings.TrimLeft", "strings.TrimLeftFunc", + "strings.TrimRight", "strings.TrimRightFunc", "strings.TrimSpace": + // TODO(dh): lower = 0, upper = upper of passed string + case "bytes.TrimPrefix", "bytes.TrimSuffix", + "strings.TrimPrefix", "strings.TrimSuffix": + // TODO(dh) range between "unmodified" and len(cutset) removed + case "(*bytes.Buffer).Cap", "(*bytes.Buffer).Len", "(*bytes.Reader).Len", "(*bytes.Reader).Size": + cs = append(cs, NewIntIntervalConstraint(NewIntInterval(NewZ(0), PInfinity), ins)) + } + } + } + builtin, ok := ins.Common().Value.(*ssa.Builtin) + ops := ins.Operands(nil) + if !ok { + continue + } + switch builtin.Name() { + case "len": + switch op1 := (*ops[1]).Type().Underlying().(type) { + case *types.Basic: + if op1.Kind() == types.String || op1.Kind() == types.UntypedString { + cs = append(cs, NewStringLengthConstraint(*ops[1], ins)) + } + case *types.Slice: + cs = append(cs, NewSliceLengthConstraint(*ops[1], ins)) + } + + case "append": + cs = append(cs, NewSliceAppendConstraint(ins.Common().Args[0], ins.Common().Args[1], ins)) + } + case *ssa.BinOp: + ops := ins.Operands(nil) + basic, ok := (*ops[0]).Type().Underlying().(*types.Basic) + if !ok { + continue + } + switch basic.Kind() { + case types.Int, types.Int8, types.Int16, types.Int32, types.Int64, + types.Uint, types.Uint8, types.Uint16, types.Uint32, types.Uint64, types.UntypedInt: + fns := map[token.Token]func(ssa.Value, ssa.Value, ssa.Value) Constraint{ + token.ADD: NewIntAddConstraint, + token.SUB: NewIntSubConstraint, + token.MUL: NewIntMulConstraint, + // XXX support QUO, REM, SHL, SHR + } + fn, ok := fns[ins.Op] + if ok { + cs = append(cs, fn(*ops[0], *ops[1], ins)) + } + case types.String, types.UntypedString: + if ins.Op == token.ADD { + cs = append(cs, NewStringConcatConstraint(*ops[0], *ops[1], ins)) + } + } + case *ssa.Slice: + typ := ins.X.Type().Underlying() + switch typ := typ.(type) { + case *types.Basic: + cs = append(cs, NewStringSliceConstraint(ins.X, ins.Low, ins.High, ins)) + case *types.Slice: + cs = append(cs, NewSliceSliceConstraint(ins.X, ins.Low, ins.High, ins)) + case *types.Array: + cs = append(cs, NewArraySliceConstraint(ins.X, ins.Low, ins.High, ins)) + case *types.Pointer: + if _, ok := typ.Elem().(*types.Array); !ok { + continue + } + cs = append(cs, NewArraySliceConstraint(ins.X, ins.Low, ins.High, ins)) + } + case *ssa.Phi: + if !isSupportedType(ins.Type()) { + continue + } + ops := ins.Operands(nil) + dops := make([]ssa.Value, len(ops)) + for i, op := range ops { + dops[i] = *op + } + cs = append(cs, NewPhiConstraint(dops, ins)) + case *ssa.Sigma: + pred := ins.Block().Preds[0] + instrs := pred.Instrs + cond, ok := instrs[len(instrs)-1].(*ssa.If).Cond.(*ssa.BinOp) + ops := cond.Operands(nil) + if !ok { + continue + } + switch typ := ins.Type().Underlying().(type) { + case *types.Basic: + var c Constraint + switch typ.Kind() { + case types.Int, types.Int8, types.Int16, types.Int32, types.Int64, + types.Uint, types.Uint8, types.Uint16, types.Uint32, types.Uint64, types.UntypedInt: + c = sigmaInteger(g, ins, cond, ops) + case types.String, types.UntypedString: + c = sigmaString(g, ins, cond, ops) + } + if c != nil { + cs = append(cs, c) + } + case *types.Slice: + c := sigmaSlice(g, ins, cond, ops) + if c != nil { + cs = append(cs, c) + } + default: + //log.Printf("unsupported sigma type %T", typ) // XXX + } + case *ssa.MakeChan: + cs = append(cs, NewMakeChannelConstraint(ins.Size, ins)) + case *ssa.MakeSlice: + cs = append(cs, NewMakeSliceConstraint(ins.Len, ins)) + case *ssa.ChangeType: + switch ins.X.Type().Underlying().(type) { + case *types.Chan: + cs = append(cs, NewChannelChangeTypeConstraint(ins.X, ins)) + } + } + } + } + + for _, c := range cs { + if c == nil { + panic("nil constraint") + } + // If V is used in constraint C, then we create an edge V->C + for _, op := range c.Operands() { + g.AddEdge(op, c, false) + } + if c, ok := c.(Future); ok { + for _, op := range c.Futures() { + g.AddEdge(op, c, true) + } + } + // If constraint C defines variable V, then we create an edge + // C->V + g.AddEdge(c, c.Y(), false) + } + + g.FindSCCs() + g.sccEdges = make([][]Edge, len(g.SCCs)) + g.futures = make([][]Future, len(g.SCCs)) + for _, e := range g.Edges { + g.sccEdges[e.From.SCC] = append(g.sccEdges[e.From.SCC], e) + if !e.control { + continue + } + if c, ok := e.To.Value.(Future); ok { + g.futures[e.From.SCC] = append(g.futures[e.From.SCC], c) + } + } + return g +} + +func (g *Graph) Solve() Ranges { + var consts []Z + off := NewZ(1) + for _, n := range g.Vertices { + if c, ok := n.Value.(*ssa.Const); ok { + basic, ok := c.Type().Underlying().(*types.Basic) + if !ok { + continue + } + if (basic.Info() & types.IsInteger) != 0 { + z := ConstantToZ(c.Value) + consts = append(consts, z) + consts = append(consts, z.Add(off)) + consts = append(consts, z.Sub(off)) + } + } + + } + sort.Sort(Zs(consts)) + + for scc, vertices := range g.SCCs { + n := 0 + n = len(vertices) + if n == 1 { + g.resolveFutures(scc) + v := vertices[0] + if v, ok := v.Value.(ssa.Value); ok { + switch typ := v.Type().Underlying().(type) { + case *types.Basic: + switch typ.Kind() { + case types.String, types.UntypedString: + if !g.Range(v).(StringInterval).IsKnown() { + g.SetRange(v, StringInterval{NewIntInterval(NewZ(0), PInfinity)}) + } + default: + if !g.Range(v).(IntInterval).IsKnown() { + g.SetRange(v, InfinityFor(v)) + } + } + case *types.Chan: + if !g.Range(v).(ChannelInterval).IsKnown() { + g.SetRange(v, ChannelInterval{NewIntInterval(NewZ(0), PInfinity)}) + } + case *types.Slice: + if !g.Range(v).(SliceInterval).IsKnown() { + g.SetRange(v, SliceInterval{NewIntInterval(NewZ(0), PInfinity)}) + } + } + } + if c, ok := v.Value.(Constraint); ok { + g.SetRange(c.Y(), c.Eval(g)) + } + } else { + uses := g.uses(scc) + entries := g.entries(scc) + for len(entries) > 0 { + v := entries[len(entries)-1] + entries = entries[:len(entries)-1] + for _, use := range uses[v] { + if g.widen(use, consts) { + entries = append(entries, use.Y()) + } + } + } + + g.resolveFutures(scc) + + // XXX this seems to be necessary, but shouldn't be. + // removing it leads to nil pointer derefs; investigate + // where we're not setting values correctly. + for _, n := range vertices { + if v, ok := n.Value.(ssa.Value); ok { + i, ok := g.Range(v).(IntInterval) + if !ok { + continue + } + if !i.IsKnown() { + g.SetRange(v, InfinityFor(v)) + } + } + } + + actives := g.actives(scc) + for len(actives) > 0 { + v := actives[len(actives)-1] + actives = actives[:len(actives)-1] + for _, use := range uses[v] { + if g.narrow(use) { + actives = append(actives, use.Y()) + } + } + } + } + // propagate scc + for _, edge := range g.sccEdges[scc] { + if edge.control { + continue + } + if edge.From.SCC == edge.To.SCC { + continue + } + if c, ok := edge.To.Value.(Constraint); ok { + g.SetRange(c.Y(), c.Eval(g)) + } + if c, ok := edge.To.Value.(Future); ok { + if !c.IsKnown() { + c.MarkUnresolved() + } + } + } + } + + for v, r := range g.ranges { + i, ok := r.(IntInterval) + if !ok { + continue + } + if (v.Type().Underlying().(*types.Basic).Info() & types.IsUnsigned) == 0 { + if i.Upper != PInfinity { + s := &types.StdSizes{ + // XXX is it okay to assume the largest word size, or do we + // need to be platform specific? + WordSize: 8, + MaxAlign: 1, + } + bits := (s.Sizeof(v.Type()) * 8) - 1 + n := big.NewInt(1) + n = n.Lsh(n, uint(bits)) + upper, lower := &big.Int{}, &big.Int{} + upper.Sub(n, big.NewInt(1)) + lower.Neg(n) + + if i.Upper.Cmp(NewBigZ(upper)) == 1 { + i = NewIntInterval(NInfinity, PInfinity) + } else if i.Lower.Cmp(NewBigZ(lower)) == -1 { + i = NewIntInterval(NInfinity, PInfinity) + } + } + } + + g.ranges[v] = i + } + + return g.ranges +} + +func VertexString(v *Vertex) string { + switch v := v.Value.(type) { + case Constraint: + return v.String() + case ssa.Value: + return v.Name() + case nil: + return "BUG: nil vertex value" + default: + panic(fmt.Sprintf("unexpected type %T", v)) + } +} + +type Vertex struct { + Value interface{} // one of Constraint or ssa.Value + SCC int + index int + lowlink int + stack bool + + Succs []Edge +} + +type Ranges map[ssa.Value]Range + +func (r Ranges) Get(x ssa.Value) Range { + if x == nil { + return nil + } + i, ok := r[x] + if !ok { + switch x := x.Type().Underlying().(type) { + case *types.Basic: + switch x.Kind() { + case types.String, types.UntypedString: + return StringInterval{} + default: + return IntInterval{} + } + case *types.Chan: + return ChannelInterval{} + case *types.Slice: + return SliceInterval{} + } + } + return i +} + +type Graph struct { + Vertices map[interface{}]*Vertex + Edges []Edge + SCCs [][]*Vertex + ranges Ranges + + // map SCCs to futures + futures [][]Future + // map SCCs to edges + sccEdges [][]Edge +} + +func (g Graph) Graphviz() string { + var lines []string + lines = append(lines, "digraph{") + ids := map[interface{}]int{} + i := 1 + for _, v := range g.Vertices { + ids[v] = i + shape := "box" + if _, ok := v.Value.(ssa.Value); ok { + shape = "oval" + } + lines = append(lines, fmt.Sprintf(`n%d [shape="%s", label=%q, colorscheme=spectral11, style="filled", fillcolor="%d"]`, + i, shape, VertexString(v), (v.SCC%11)+1)) + i++ + } + for _, e := range g.Edges { + style := "solid" + if e.control { + style = "dashed" + } + lines = append(lines, fmt.Sprintf(`n%d -> n%d [style="%s"]`, ids[e.From], ids[e.To], style)) + } + lines = append(lines, "}") + return strings.Join(lines, "\n") +} + +func (g *Graph) SetRange(x ssa.Value, r Range) { + g.ranges[x] = r +} + +func (g *Graph) Range(x ssa.Value) Range { + return g.ranges.Get(x) +} + +func (g *Graph) widen(c Constraint, consts []Z) bool { + setRange := func(i Range) { + g.SetRange(c.Y(), i) + } + widenIntInterval := func(oi, ni IntInterval) (IntInterval, bool) { + if !ni.IsKnown() { + return oi, false + } + nlc := NInfinity + nuc := PInfinity + for _, co := range consts { + if co.Cmp(ni.Lower) <= 0 { + nlc = co + break + } + } + for _, co := range consts { + if co.Cmp(ni.Upper) >= 0 { + nuc = co + break + } + } + + if !oi.IsKnown() { + return ni, true + } + if ni.Lower.Cmp(oi.Lower) == -1 && ni.Upper.Cmp(oi.Upper) == 1 { + return NewIntInterval(nlc, nuc), true + } + if ni.Lower.Cmp(oi.Lower) == -1 { + return NewIntInterval(nlc, oi.Upper), true + } + if ni.Upper.Cmp(oi.Upper) == 1 { + return NewIntInterval(oi.Lower, nuc), true + } + return oi, false + } + switch oi := g.Range(c.Y()).(type) { + case IntInterval: + ni := c.Eval(g).(IntInterval) + si, changed := widenIntInterval(oi, ni) + if changed { + setRange(si) + return true + } + return false + case StringInterval: + ni := c.Eval(g).(StringInterval) + si, changed := widenIntInterval(oi.Length, ni.Length) + if changed { + setRange(StringInterval{si}) + return true + } + return false + case SliceInterval: + ni := c.Eval(g).(SliceInterval) + si, changed := widenIntInterval(oi.Length, ni.Length) + if changed { + setRange(SliceInterval{si}) + return true + } + return false + default: + return false + } +} + +func (g *Graph) narrow(c Constraint) bool { + narrowIntInterval := func(oi, ni IntInterval) (IntInterval, bool) { + oLower := oi.Lower + oUpper := oi.Upper + nLower := ni.Lower + nUpper := ni.Upper + + if oLower == NInfinity && nLower != NInfinity { + return NewIntInterval(nLower, oUpper), true + } + if oUpper == PInfinity && nUpper != PInfinity { + return NewIntInterval(oLower, nUpper), true + } + if oLower.Cmp(nLower) == 1 { + return NewIntInterval(nLower, oUpper), true + } + if oUpper.Cmp(nUpper) == -1 { + return NewIntInterval(oLower, nUpper), true + } + return oi, false + } + switch oi := g.Range(c.Y()).(type) { + case IntInterval: + ni := c.Eval(g).(IntInterval) + si, changed := narrowIntInterval(oi, ni) + if changed { + g.SetRange(c.Y(), si) + return true + } + return false + case StringInterval: + ni := c.Eval(g).(StringInterval) + si, changed := narrowIntInterval(oi.Length, ni.Length) + if changed { + g.SetRange(c.Y(), StringInterval{si}) + return true + } + return false + case SliceInterval: + ni := c.Eval(g).(SliceInterval) + si, changed := narrowIntInterval(oi.Length, ni.Length) + if changed { + g.SetRange(c.Y(), SliceInterval{si}) + return true + } + return false + default: + return false + } +} + +func (g *Graph) resolveFutures(scc int) { + for _, c := range g.futures[scc] { + c.Resolve() + } +} + +func (g *Graph) entries(scc int) []ssa.Value { + var entries []ssa.Value + for _, n := range g.Vertices { + if n.SCC != scc { + continue + } + if v, ok := n.Value.(ssa.Value); ok { + // XXX avoid quadratic runtime + // + // XXX I cannot think of any code where the future and its + // variables aren't in the same SCC, in which case this + // code isn't very useful (the variables won't be resolved + // yet). Before we have a cross-SCC example, however, we + // can't really verify that this code is working + // correctly, or indeed doing anything useful. + for _, on := range g.Vertices { + if c, ok := on.Value.(Future); ok { + if c.Y() == v { + if !c.IsResolved() { + g.SetRange(c.Y(), c.Eval(g)) + c.MarkResolved() + } + break + } + } + } + if g.Range(v).IsKnown() { + entries = append(entries, v) + } + } + } + return entries +} + +func (g *Graph) uses(scc int) map[ssa.Value][]Constraint { + m := map[ssa.Value][]Constraint{} + for _, e := range g.sccEdges[scc] { + if e.control { + continue + } + if v, ok := e.From.Value.(ssa.Value); ok { + c := e.To.Value.(Constraint) + sink := c.Y() + if g.Vertices[sink].SCC == scc { + m[v] = append(m[v], c) + } + } + } + return m +} + +func (g *Graph) actives(scc int) []ssa.Value { + var actives []ssa.Value + for _, n := range g.Vertices { + if n.SCC != scc { + continue + } + if v, ok := n.Value.(ssa.Value); ok { + if _, ok := v.(*ssa.Const); !ok { + actives = append(actives, v) + } + } + } + return actives +} + +func (g *Graph) AddEdge(from, to interface{}, ctrl bool) { + vf, ok := g.Vertices[from] + if !ok { + vf = &Vertex{Value: from} + g.Vertices[from] = vf + } + vt, ok := g.Vertices[to] + if !ok { + vt = &Vertex{Value: to} + g.Vertices[to] = vt + } + e := Edge{From: vf, To: vt, control: ctrl} + g.Edges = append(g.Edges, e) + vf.Succs = append(vf.Succs, e) +} + +type Edge struct { + From, To *Vertex + control bool +} + +func (e Edge) String() string { + return fmt.Sprintf("%s -> %s", VertexString(e.From), VertexString(e.To)) +} + +func (g *Graph) FindSCCs() { + // use Tarjan to find the SCCs + + index := 1 + var s []*Vertex + + scc := 0 + var strongconnect func(v *Vertex) + strongconnect = func(v *Vertex) { + // set the depth index for v to the smallest unused index + v.index = index + v.lowlink = index + index++ + s = append(s, v) + v.stack = true + + for _, e := range v.Succs { + w := e.To + if w.index == 0 { + // successor w has not yet been visited; recurse on it + strongconnect(w) + if w.lowlink < v.lowlink { + v.lowlink = w.lowlink + } + } else if w.stack { + // successor w is in stack s and hence in the current scc + if w.index < v.lowlink { + v.lowlink = w.index + } + } + } + + if v.lowlink == v.index { + for { + w := s[len(s)-1] + s = s[:len(s)-1] + w.stack = false + w.SCC = scc + if w == v { + break + } + } + scc++ + } + } + for _, v := range g.Vertices { + if v.index == 0 { + strongconnect(v) + } + } + + g.SCCs = make([][]*Vertex, scc) + for _, n := range g.Vertices { + n.SCC = scc - n.SCC - 1 + g.SCCs[n.SCC] = append(g.SCCs[n.SCC], n) + } +} + +func invertToken(tok token.Token) token.Token { + switch tok { + case token.LSS: + return token.GEQ + case token.GTR: + return token.LEQ + case token.EQL: + return token.NEQ + case token.NEQ: + return token.EQL + case token.GEQ: + return token.LSS + case token.LEQ: + return token.GTR + default: + panic(fmt.Sprintf("unsupported token %s", tok)) + } +} + +func flipToken(tok token.Token) token.Token { + switch tok { + case token.LSS: + return token.GTR + case token.GTR: + return token.LSS + case token.EQL: + return token.EQL + case token.NEQ: + return token.NEQ + case token.GEQ: + return token.LEQ + case token.LEQ: + return token.GEQ + default: + panic(fmt.Sprintf("unsupported token %s", tok)) + } +} + +type CopyConstraint struct { + aConstraint + X ssa.Value +} + +func (c *CopyConstraint) String() string { + return fmt.Sprintf("%s = copy(%s)", c.Y().Name(), c.X.Name()) +} + +func (c *CopyConstraint) Eval(g *Graph) Range { + return g.Range(c.X) +} + +func (c *CopyConstraint) Operands() []ssa.Value { + return []ssa.Value{c.X} +} + +func NewCopyConstraint(x, y ssa.Value) Constraint { + return &CopyConstraint{ + aConstraint: aConstraint{ + y: y, + }, + X: x, + } +} diff --git a/vendor/honnef.co/go/tools/unused/unused.go b/vendor/honnef.co/go/tools/unused/unused.go new file mode 100644 index 0000000..21889e8 --- /dev/null +++ b/vendor/honnef.co/go/tools/unused/unused.go @@ -0,0 +1,1064 @@ +package unused // import "honnef.co/go/tools/unused" + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + "io" + "path/filepath" + "strings" + + "honnef.co/go/tools/lint" + + "golang.org/x/tools/go/loader" + "golang.org/x/tools/go/types/typeutil" +) + +func NewLintChecker(c *Checker) *LintChecker { + l := &LintChecker{ + c: c, + } + return l +} + +type LintChecker struct { + c *Checker +} + +func (*LintChecker) Name() string { return "unused" } +func (*LintChecker) Prefix() string { return "U" } + +func (l *LintChecker) Init(*lint.Program) {} +func (l *LintChecker) Funcs() map[string]lint.Func { + return map[string]lint.Func{ + "U1000": l.Lint, + } +} + +func typString(obj types.Object) string { + switch obj := obj.(type) { + case *types.Func: + return "func" + case *types.Var: + if obj.IsField() { + return "field" + } + return "var" + case *types.Const: + return "const" + case *types.TypeName: + return "type" + default: + // log.Printf("%T", obj) + return "identifier" + } +} + +func (l *LintChecker) Lint(j *lint.Job) { + unused := l.c.Check(j.Program.Prog) + for _, u := range unused { + name := u.Obj.Name() + if sig, ok := u.Obj.Type().(*types.Signature); ok && sig.Recv() != nil { + switch sig.Recv().Type().(type) { + case *types.Named, *types.Pointer: + typ := types.TypeString(sig.Recv().Type(), func(*types.Package) string { return "" }) + if len(typ) > 0 && typ[0] == '*' { + name = fmt.Sprintf("(%s).%s", typ, u.Obj.Name()) + } else if len(typ) > 0 { + name = fmt.Sprintf("%s.%s", typ, u.Obj.Name()) + } + } + } + j.Errorf(u.Obj, "%s %s is unused", typString(u.Obj), name) + } +} + +type graph struct { + roots []*graphNode + nodes map[interface{}]*graphNode +} + +func (g *graph) markUsedBy(obj, usedBy interface{}) { + objNode := g.getNode(obj) + usedByNode := g.getNode(usedBy) + if objNode.obj == usedByNode.obj { + return + } + usedByNode.uses[objNode] = struct{}{} +} + +var labelCounter = 1 + +func (g *graph) getNode(obj interface{}) *graphNode { + for { + if pt, ok := obj.(*types.Pointer); ok { + obj = pt.Elem() + } else { + break + } + } + _, ok := g.nodes[obj] + if !ok { + g.addObj(obj) + } + + return g.nodes[obj] +} + +func (g *graph) addObj(obj interface{}) { + if pt, ok := obj.(*types.Pointer); ok { + obj = pt.Elem() + } + node := &graphNode{obj: obj, uses: make(map[*graphNode]struct{}), n: labelCounter} + g.nodes[obj] = node + labelCounter++ + + if obj, ok := obj.(*types.Struct); ok { + n := obj.NumFields() + for i := 0; i < n; i++ { + field := obj.Field(i) + g.markUsedBy(obj, field) + } + } +} + +type graphNode struct { + obj interface{} + uses map[*graphNode]struct{} + used bool + quiet bool + n int +} + +type CheckMode int + +const ( + CheckConstants CheckMode = 1 << iota + CheckFields + CheckFunctions + CheckTypes + CheckVariables + + CheckAll = CheckConstants | CheckFields | CheckFunctions | CheckTypes | CheckVariables +) + +type Unused struct { + Obj types.Object + Position token.Position +} + +type Checker struct { + Mode CheckMode + WholeProgram bool + ConsiderReflection bool + Debug io.Writer + + graph *graph + + msCache typeutil.MethodSetCache + lprog *loader.Program + topmostCache map[*types.Scope]*types.Scope + interfaces []*types.Interface +} + +func NewChecker(mode CheckMode) *Checker { + return &Checker{ + Mode: mode, + graph: &graph{ + nodes: make(map[interface{}]*graphNode), + }, + topmostCache: make(map[*types.Scope]*types.Scope), + } +} + +func (c *Checker) checkConstants() bool { return (c.Mode & CheckConstants) > 0 } +func (c *Checker) checkFields() bool { return (c.Mode & CheckFields) > 0 } +func (c *Checker) checkFunctions() bool { return (c.Mode & CheckFunctions) > 0 } +func (c *Checker) checkTypes() bool { return (c.Mode & CheckTypes) > 0 } +func (c *Checker) checkVariables() bool { return (c.Mode & CheckVariables) > 0 } + +func (c *Checker) markFields(typ types.Type) { + structType, ok := typ.Underlying().(*types.Struct) + if !ok { + return + } + n := structType.NumFields() + for i := 0; i < n; i++ { + field := structType.Field(i) + c.graph.markUsedBy(field, typ) + } +} + +type Error struct { + Errors map[string][]error +} + +func (e Error) Error() string { + return fmt.Sprintf("errors in %d packages", len(e.Errors)) +} + +func (c *Checker) Check(lprog *loader.Program) []Unused { + var unused []Unused + c.lprog = lprog + if c.WholeProgram { + c.findExportedInterfaces() + } + for _, pkg := range c.lprog.InitialPackages() { + c.processDefs(pkg) + c.processUses(pkg) + c.processTypes(pkg) + c.processSelections(pkg) + c.processAST(pkg) + } + + for _, node := range c.graph.nodes { + obj, ok := node.obj.(types.Object) + if !ok { + continue + } + typNode, ok := c.graph.nodes[obj.Type()] + if !ok { + continue + } + node.uses[typNode] = struct{}{} + } + + roots := map[*graphNode]struct{}{} + for _, root := range c.graph.roots { + roots[root] = struct{}{} + } + markNodesUsed(roots) + c.markNodesQuiet() + + if c.Debug != nil { + c.printDebugGraph(c.Debug) + } + + for _, node := range c.graph.nodes { + if node.used || node.quiet { + continue + } + obj, ok := node.obj.(types.Object) + if !ok { + continue + } + found := false + if !false { + for _, pkg := range c.lprog.InitialPackages() { + if pkg.Pkg == obj.Pkg() { + found = true + break + } + } + } + if !found { + continue + } + + pos := c.lprog.Fset.Position(obj.Pos()) + if pos.Filename == "" || filepath.Base(pos.Filename) == "C" { + continue + } + generated := false + for _, file := range c.lprog.Package(obj.Pkg().Path()).Files { + if c.lprog.Fset.Position(file.Pos()).Filename != pos.Filename { + continue + } + if len(file.Comments) > 0 { + generated = isGenerated(file.Comments[0].Text()) + } + break + } + if generated { + continue + } + unused = append(unused, Unused{Obj: obj, Position: pos}) + } + return unused +} + +// isNoCopyType reports whether a type represents the NoCopy sentinel +// type. The NoCopy type is a named struct with no fields and exactly +// one method `func Lock()` that is empty. +// +// FIXME(dh): currently we're not checking that the function body is +// empty. +func isNoCopyType(typ types.Type) bool { + st, ok := typ.Underlying().(*types.Struct) + if !ok { + return false + } + if st.NumFields() != 0 { + return false + } + + named, ok := typ.(*types.Named) + if !ok { + return false + } + if named.NumMethods() != 1 { + return false + } + meth := named.Method(0) + if meth.Name() != "Lock" { + return false + } + sig := meth.Type().(*types.Signature) + if sig.Params().Len() != 0 || sig.Results().Len() != 0 { + return false + } + return true +} + +func (c *Checker) useNoCopyFields(typ types.Type) { + if st, ok := typ.Underlying().(*types.Struct); ok { + n := st.NumFields() + for i := 0; i < n; i++ { + field := st.Field(i) + if isNoCopyType(field.Type()) { + c.graph.markUsedBy(field, typ) + c.graph.markUsedBy(field.Type().(*types.Named).Method(0), field.Type()) + } + } + } +} + +func (c *Checker) useExportedFields(typ types.Type) { + if st, ok := typ.Underlying().(*types.Struct); ok { + n := st.NumFields() + for i := 0; i < n; i++ { + field := st.Field(i) + if field.Exported() { + c.graph.markUsedBy(field, typ) + } + } + } +} + +func (c *Checker) useExportedMethods(typ types.Type) { + named, ok := typ.(*types.Named) + if !ok { + return + } + ms := typeutil.IntuitiveMethodSet(named, &c.msCache) + for i := 0; i < len(ms); i++ { + meth := ms[i].Obj() + if meth.Exported() { + c.graph.markUsedBy(meth, typ) + } + } + + st, ok := named.Underlying().(*types.Struct) + if !ok { + return + } + n := st.NumFields() + for i := 0; i < n; i++ { + field := st.Field(i) + if !field.Anonymous() { + continue + } + ms := typeutil.IntuitiveMethodSet(field.Type(), &c.msCache) + for j := 0; j < len(ms); j++ { + if ms[j].Obj().Exported() { + c.graph.markUsedBy(field, typ) + break + } + } + } +} + +func (c *Checker) processDefs(pkg *loader.PackageInfo) { + for _, obj := range pkg.Defs { + if obj == nil { + continue + } + c.graph.getNode(obj) + + if obj, ok := obj.(*types.TypeName); ok { + c.graph.markUsedBy(obj.Type().Underlying(), obj.Type()) + c.graph.markUsedBy(obj.Type(), obj) // TODO is this needed? + c.graph.markUsedBy(obj, obj.Type()) + + // We mark all exported fields as used. For normal + // operation, we have to. The user may use these fields + // without us knowing. + // + // TODO(dh): In whole-program mode, however, we mark them + // as used because of reflection (such as JSON + // marshaling). Strictly speaking, we would only need to + // mark them used if an instance of the type was + // accessible via an interface value. + if !c.WholeProgram || c.ConsiderReflection { + c.useExportedFields(obj.Type()) + } + + // TODO(dh): Traditionally we have not marked all exported + // methods as exported, even though they're strictly + // speaking accessible through reflection. We've done that + // because using methods just via reflection is rare, and + // not worth the false negatives. With the new -reflect + // flag, however, we should reconsider that choice. + if !c.WholeProgram { + c.useExportedMethods(obj.Type()) + } + } + + switch obj := obj.(type) { + case *types.Var, *types.Const, *types.Func, *types.TypeName: + if obj.Exported() { + // Exported variables and constants use their types, + // even if there's no expression using them in the + // checked program. + // + // Also operates on funcs and type names, but that's + // irrelevant/redundant. + c.graph.markUsedBy(obj.Type(), obj) + } + if obj.Name() == "_" { + node := c.graph.getNode(obj) + node.quiet = true + scope := c.topmostScope(pkg.Pkg.Scope().Innermost(obj.Pos()), pkg.Pkg) + if scope == pkg.Pkg.Scope() { + c.graph.roots = append(c.graph.roots, node) + } else { + c.graph.markUsedBy(obj, scope) + } + } else { + // Variables declared in functions are used. This is + // done so that arguments and return parameters are + // always marked as used. + if _, ok := obj.(*types.Var); ok { + if obj.Parent() != obj.Pkg().Scope() && obj.Parent() != nil { + c.graph.markUsedBy(obj, c.topmostScope(obj.Parent(), obj.Pkg())) + c.graph.markUsedBy(obj.Type(), obj) + } + } + } + } + + if fn, ok := obj.(*types.Func); ok { + // A function uses its signature + c.graph.markUsedBy(fn, fn.Type()) + + // A function uses its return types + sig := fn.Type().(*types.Signature) + res := sig.Results() + n := res.Len() + for i := 0; i < n; i++ { + c.graph.markUsedBy(res.At(i).Type(), fn) + } + } + + if obj, ok := obj.(interface { + Scope() *types.Scope + Pkg() *types.Package + }); ok { + scope := obj.Scope() + c.graph.markUsedBy(c.topmostScope(scope, obj.Pkg()), obj) + } + + if c.isRoot(obj) { + node := c.graph.getNode(obj) + c.graph.roots = append(c.graph.roots, node) + if obj, ok := obj.(*types.PkgName); ok { + scope := obj.Pkg().Scope() + c.graph.markUsedBy(scope, obj) + } + } + } +} + +func (c *Checker) processUses(pkg *loader.PackageInfo) { + for ident, usedObj := range pkg.Uses { + if _, ok := usedObj.(*types.PkgName); ok { + continue + } + pos := ident.Pos() + scope := pkg.Pkg.Scope().Innermost(pos) + scope = c.topmostScope(scope, pkg.Pkg) + if scope != pkg.Pkg.Scope() { + c.graph.markUsedBy(usedObj, scope) + } + + switch usedObj.(type) { + case *types.Var, *types.Const: + c.graph.markUsedBy(usedObj.Type(), usedObj) + } + } +} + +func (c *Checker) findExportedInterfaces() { + c.interfaces = []*types.Interface{types.Universe.Lookup("error").Type().(*types.Named).Underlying().(*types.Interface)} + var pkgs []*loader.PackageInfo + if c.WholeProgram { + for _, pkg := range c.lprog.AllPackages { + pkgs = append(pkgs, pkg) + } + } else { + pkgs = c.lprog.InitialPackages() + } + + for _, pkg := range pkgs { + for _, tv := range pkg.Types { + iface, ok := tv.Type.(*types.Interface) + if !ok { + continue + } + if iface.NumMethods() == 0 { + continue + } + c.interfaces = append(c.interfaces, iface) + } + } +} + +func (c *Checker) processTypes(pkg *loader.PackageInfo) { + named := map[*types.Named]*types.Pointer{} + var interfaces []*types.Interface + for _, tv := range pkg.Types { + if typ, ok := tv.Type.(interface { + Elem() types.Type + }); ok { + c.graph.markUsedBy(typ.Elem(), typ) + } + + switch obj := tv.Type.(type) { + case *types.Named: + named[obj] = types.NewPointer(obj) + c.graph.markUsedBy(obj, obj.Underlying()) + c.graph.markUsedBy(obj.Underlying(), obj) + case *types.Interface: + if obj.NumMethods() > 0 { + interfaces = append(interfaces, obj) + } + case *types.Struct: + c.useNoCopyFields(obj) + if pkg.Pkg.Name() != "main" && !c.WholeProgram { + c.useExportedFields(obj) + } + } + } + + // Pretend that all types are meant to implement as many + // interfaces as possible. + // + // TODO(dh): For normal operations, that's the best we can do, as + // we have no idea what external users will do with our types. In + // whole-program mode, we could be more conservative, in two ways: + // 1) Only consider interfaces if a type has been assigned to one + // 2) Use SSA and flow analysis and determine the exact set of + // interfaces that is relevant. + fn := func(iface *types.Interface) { + for obj, objPtr := range named { + if !types.Implements(obj, iface) && !types.Implements(objPtr, iface) { + continue + } + ifaceMethods := make(map[string]struct{}, iface.NumMethods()) + n := iface.NumMethods() + for i := 0; i < n; i++ { + meth := iface.Method(i) + ifaceMethods[meth.Name()] = struct{}{} + } + for _, obj := range []types.Type{obj, objPtr} { + ms := c.msCache.MethodSet(obj) + n := ms.Len() + for i := 0; i < n; i++ { + sel := ms.At(i) + meth := sel.Obj().(*types.Func) + _, found := ifaceMethods[meth.Name()] + if !found { + continue + } + c.graph.markUsedBy(meth.Type().(*types.Signature).Recv().Type(), obj) // embedded receiver + if len(sel.Index()) > 1 { + f := getField(obj, sel.Index()[0]) + c.graph.markUsedBy(f, obj) // embedded receiver + } + c.graph.markUsedBy(meth, obj) + } + } + } + } + + for _, iface := range interfaces { + fn(iface) + } + for _, iface := range c.interfaces { + fn(iface) + } +} + +func (c *Checker) processSelections(pkg *loader.PackageInfo) { + fn := func(expr *ast.SelectorExpr, sel *types.Selection, offset int) { + scope := pkg.Pkg.Scope().Innermost(expr.Pos()) + c.graph.markUsedBy(expr.X, c.topmostScope(scope, pkg.Pkg)) + c.graph.markUsedBy(sel.Obj(), expr.X) + if len(sel.Index()) > 1 { + typ := sel.Recv() + indices := sel.Index() + for _, idx := range indices[:len(indices)-offset] { + obj := getField(typ, idx) + typ = obj.Type() + c.graph.markUsedBy(obj, expr.X) + } + } + } + + for expr, sel := range pkg.Selections { + switch sel.Kind() { + case types.FieldVal: + fn(expr, sel, 0) + case types.MethodVal: + fn(expr, sel, 1) + } + } +} + +func dereferenceType(typ types.Type) types.Type { + if typ, ok := typ.(*types.Pointer); ok { + return typ.Elem() + } + return typ +} + +// processConversion marks fields as used if they're part of a type conversion. +func (c *Checker) processConversion(pkg *loader.PackageInfo, node ast.Node) { + if node, ok := node.(*ast.CallExpr); ok { + callTyp := pkg.TypeOf(node.Fun) + var typDst *types.Struct + var ok bool + switch typ := callTyp.(type) { + case *types.Named: + typDst, ok = typ.Underlying().(*types.Struct) + case *types.Pointer: + typDst, ok = typ.Elem().Underlying().(*types.Struct) + default: + return + } + if !ok { + return + } + + if typ, ok := pkg.TypeOf(node.Args[0]).(*types.Basic); ok && typ.Kind() == types.UnsafePointer { + // This is an unsafe conversion. Assume that all the + // fields are relevant (they are, because of memory + // layout) + n := typDst.NumFields() + for i := 0; i < n; i++ { + c.graph.markUsedBy(typDst.Field(i), typDst) + } + return + } + + typSrc, ok := dereferenceType(pkg.TypeOf(node.Args[0])).Underlying().(*types.Struct) + if !ok { + return + } + + // When we convert from type t1 to t2, were t1 and t2 are + // structs, all fields are relevant, as otherwise the + // conversion would fail. + // + // We mark t2's fields as used by t1's fields, and vice + // versa. That way, if no code actually refers to a field + // in either type, it's still correctly marked as unused. + // If a field is used in either struct, it's implicitly + // relevant in the other one, too. + // + // It works in a similar way for conversions between types + // of two packages, only that the extra information in the + // graph is redundant unless we're in whole program mode. + n := typDst.NumFields() + for i := 0; i < n; i++ { + fDst := typDst.Field(i) + fSrc := typSrc.Field(i) + c.graph.markUsedBy(fDst, fSrc) + c.graph.markUsedBy(fSrc, fDst) + } + } +} + +// processCompositeLiteral marks fields as used if the struct is used +// in a composite literal. +func (c *Checker) processCompositeLiteral(pkg *loader.PackageInfo, node ast.Node) { + // XXX how does this actually work? wouldn't it match t{}? + if node, ok := node.(*ast.CompositeLit); ok { + typ := pkg.TypeOf(node) + if _, ok := typ.(*types.Named); ok { + typ = typ.Underlying() + } + if _, ok := typ.(*types.Struct); !ok { + return + } + + if isBasicStruct(node.Elts) { + c.markFields(typ) + } + } +} + +// processCgoExported marks functions as used if they're being +// exported to cgo. +func (c *Checker) processCgoExported(pkg *loader.PackageInfo, node ast.Node) { + if node, ok := node.(*ast.FuncDecl); ok { + if node.Doc == nil { + return + } + for _, cmt := range node.Doc.List { + if !strings.HasPrefix(cmt.Text, "//go:cgo_export_") { + return + } + obj := pkg.ObjectOf(node.Name) + c.graph.roots = append(c.graph.roots, c.graph.getNode(obj)) + } + } +} + +func (c *Checker) processVariableDeclaration(pkg *loader.PackageInfo, node ast.Node) { + if decl, ok := node.(*ast.GenDecl); ok { + for _, spec := range decl.Specs { + spec, ok := spec.(*ast.ValueSpec) + if !ok { + continue + } + for i, name := range spec.Names { + if i >= len(spec.Values) { + break + } + value := spec.Values[i] + fn := func(node ast.Node) bool { + if node3, ok := node.(*ast.Ident); ok { + obj := pkg.ObjectOf(node3) + if _, ok := obj.(*types.PkgName); ok { + return true + } + c.graph.markUsedBy(obj, pkg.ObjectOf(name)) + } + return true + } + ast.Inspect(value, fn) + } + } + } +} + +func (c *Checker) processArrayConstants(pkg *loader.PackageInfo, node ast.Node) { + if decl, ok := node.(*ast.ArrayType); ok { + ident, ok := decl.Len.(*ast.Ident) + if !ok { + return + } + c.graph.markUsedBy(pkg.ObjectOf(ident), pkg.TypeOf(decl)) + } +} + +func (c *Checker) processKnownReflectMethodCallers(pkg *loader.PackageInfo, node ast.Node) { + call, ok := node.(*ast.CallExpr) + if !ok { + return + } + sel, ok := call.Fun.(*ast.SelectorExpr) + if !ok { + return + } + if types.TypeString(pkg.TypeOf(sel.X), nil) != "*net/rpc.Server" { + x, ok := sel.X.(*ast.Ident) + if !ok { + return + } + pkgname, ok := pkg.ObjectOf(x).(*types.PkgName) + if !ok { + return + } + if pkgname.Imported().Path() != "net/rpc" { + return + } + } + + var arg ast.Expr + switch sel.Sel.Name { + case "Register": + if len(call.Args) != 1 { + return + } + arg = call.Args[0] + case "RegisterName": + if len(call.Args) != 2 { + return + } + arg = call.Args[1] + } + typ := pkg.TypeOf(arg) + ms := types.NewMethodSet(typ) + for i := 0; i < ms.Len(); i++ { + c.graph.markUsedBy(ms.At(i).Obj(), typ) + } +} + +func (c *Checker) processAST(pkg *loader.PackageInfo) { + fn := func(node ast.Node) bool { + c.processConversion(pkg, node) + c.processKnownReflectMethodCallers(pkg, node) + c.processCompositeLiteral(pkg, node) + c.processCgoExported(pkg, node) + c.processVariableDeclaration(pkg, node) + c.processArrayConstants(pkg, node) + return true + } + for _, file := range pkg.Files { + ast.Inspect(file, fn) + } +} + +func isBasicStruct(elts []ast.Expr) bool { + for _, elt := range elts { + if _, ok := elt.(*ast.KeyValueExpr); !ok { + return true + } + } + return false +} + +func isPkgScope(obj types.Object) bool { + return obj.Parent() == obj.Pkg().Scope() +} + +func isMain(obj types.Object) bool { + if obj.Pkg().Name() != "main" { + return false + } + if obj.Name() != "main" { + return false + } + if !isPkgScope(obj) { + return false + } + if !isFunction(obj) { + return false + } + if isMethod(obj) { + return false + } + return true +} + +func isFunction(obj types.Object) bool { + _, ok := obj.(*types.Func) + return ok +} + +func isMethod(obj types.Object) bool { + if !isFunction(obj) { + return false + } + return obj.(*types.Func).Type().(*types.Signature).Recv() != nil +} + +func isVariable(obj types.Object) bool { + _, ok := obj.(*types.Var) + return ok +} + +func isConstant(obj types.Object) bool { + _, ok := obj.(*types.Const) + return ok +} + +func isType(obj types.Object) bool { + _, ok := obj.(*types.TypeName) + return ok +} + +func isField(obj types.Object) bool { + if obj, ok := obj.(*types.Var); ok && obj.IsField() { + return true + } + return false +} + +func (c *Checker) checkFlags(v interface{}) bool { + obj, ok := v.(types.Object) + if !ok { + return false + } + if isFunction(obj) && !c.checkFunctions() { + return false + } + if isVariable(obj) && !c.checkVariables() { + return false + } + if isConstant(obj) && !c.checkConstants() { + return false + } + if isType(obj) && !c.checkTypes() { + return false + } + if isField(obj) && !c.checkFields() { + return false + } + return true +} + +func (c *Checker) isRoot(obj types.Object) bool { + // - in local mode, main, init, tests, and non-test, non-main exported are roots + // - in global mode (not yet implemented), main, init and tests are roots + + if _, ok := obj.(*types.PkgName); ok { + return true + } + + if isMain(obj) || (isFunction(obj) && !isMethod(obj) && obj.Name() == "init") { + return true + } + if obj.Exported() { + f := c.lprog.Fset.Position(obj.Pos()).Filename + if strings.HasSuffix(f, "_test.go") { + return strings.HasPrefix(obj.Name(), "Test") || + strings.HasPrefix(obj.Name(), "Benchmark") || + strings.HasPrefix(obj.Name(), "Example") + } + + // Package-level are used, except in package main + if isPkgScope(obj) && obj.Pkg().Name() != "main" && !c.WholeProgram { + return true + } + } + return false +} + +func markNodesUsed(nodes map[*graphNode]struct{}) { + for node := range nodes { + wasUsed := node.used + node.used = true + if !wasUsed { + markNodesUsed(node.uses) + } + } +} + +func (c *Checker) markNodesQuiet() { + for _, node := range c.graph.nodes { + if node.used { + continue + } + if obj, ok := node.obj.(types.Object); ok && !c.checkFlags(obj) { + node.quiet = true + continue + } + c.markObjQuiet(node.obj) + } +} + +func (c *Checker) markObjQuiet(obj interface{}) { + switch obj := obj.(type) { + case *types.Named: + n := obj.NumMethods() + for i := 0; i < n; i++ { + meth := obj.Method(i) + node := c.graph.getNode(meth) + node.quiet = true + c.markObjQuiet(meth.Scope()) + } + case *types.Struct: + n := obj.NumFields() + for i := 0; i < n; i++ { + field := obj.Field(i) + c.graph.nodes[field].quiet = true + } + case *types.Func: + c.markObjQuiet(obj.Scope()) + case *types.Scope: + if obj == nil { + return + } + if obj.Parent() == types.Universe { + return + } + for _, name := range obj.Names() { + v := obj.Lookup(name) + if n, ok := c.graph.nodes[v]; ok { + n.quiet = true + } + } + n := obj.NumChildren() + for i := 0; i < n; i++ { + c.markObjQuiet(obj.Child(i)) + } + } +} + +func getField(typ types.Type, idx int) *types.Var { + switch obj := typ.(type) { + case *types.Pointer: + return getField(obj.Elem(), idx) + case *types.Named: + switch v := obj.Underlying().(type) { + case *types.Struct: + return v.Field(idx) + case *types.Pointer: + return getField(v.Elem(), idx) + default: + panic(fmt.Sprintf("unexpected type %s", typ)) + } + case *types.Struct: + return obj.Field(idx) + } + return nil +} + +func (c *Checker) topmostScope(scope *types.Scope, pkg *types.Package) (ret *types.Scope) { + if top, ok := c.topmostCache[scope]; ok { + return top + } + defer func() { + c.topmostCache[scope] = ret + }() + if scope == pkg.Scope() { + return scope + } + if scope.Parent().Parent() == pkg.Scope() { + return scope + } + return c.topmostScope(scope.Parent(), pkg) +} + +func (c *Checker) printDebugGraph(w io.Writer) { + fmt.Fprintln(w, "digraph {") + fmt.Fprintln(w, "n0 [label = roots]") + for _, node := range c.graph.nodes { + s := fmt.Sprintf("%s (%T)", node.obj, node.obj) + s = strings.Replace(s, "\n", "", -1) + s = strings.Replace(s, `"`, "", -1) + fmt.Fprintf(w, `n%d [label = %q]`, node.n, s) + color := "black" + switch { + case node.used: + color = "green" + case node.quiet: + color = "orange" + case !c.checkFlags(node.obj): + color = "purple" + default: + color = "red" + } + fmt.Fprintf(w, "[color = %s]", color) + fmt.Fprintln(w) + } + + for _, node1 := range c.graph.nodes { + for node2 := range node1.uses { + fmt.Fprintf(w, "n%d -> n%d\n", node1.n, node2.n) + } + } + for _, root := range c.graph.roots { + fmt.Fprintf(w, "n0 -> n%d\n", root.n) + } + fmt.Fprintln(w, "}") +} + +func isGenerated(comment string) bool { + return strings.Contains(comment, "Code generated by") || + strings.Contains(comment, "DO NOT EDIT") +} diff --git a/vendor/honnef.co/go/tools/version/version.go b/vendor/honnef.co/go/tools/version/version.go new file mode 100644 index 0000000..9536482 --- /dev/null +++ b/vendor/honnef.co/go/tools/version/version.go @@ -0,0 +1,17 @@ +package version + +import ( + "fmt" + "os" + "path/filepath" +) + +const Version = "2017.2.2" + +func Print() { + if Version == "devel" { + fmt.Printf("%s (no version)\n", filepath.Base(os.Args[0])) + } else { + fmt.Printf("%s %s\n", filepath.Base(os.Args[0]), Version) + } +} -- 2.39.5