aboutsummaryrefslogtreecommitdiffstats
path: root/vendor/github.com/quasilyte
diff options
context:
space:
mode:
authorTaras Madan <tarasmadan@google.com>2022-09-05 14:27:54 +0200
committerGitHub <noreply@github.com>2022-09-05 12:27:54 +0000
commitb2f2446b46bf02821d90ebedadae2bf7ae0e880e (patch)
tree923cf42842918d6bebca1d6bbdc08abed54d274d /vendor/github.com/quasilyte
parente6654faff4bcca4be92e9a8596fd4b77f747c39e (diff)
go.mod, vendor: update (#3358)
* go.mod, vendor: remove unnecessary dependencies Commands: 1. go mod tidy 2. go mod vendor * go.mod, vendor: update cloud.google.com/go Commands: 1. go get -u cloud.google.com/go 2. go mod tidy 3. go mod vendor * go.mod, vendor: update cloud.google.com/* Commands: 1. go get -u cloud.google.com/storage cloud.google.com/logging 2. go mod tidy 3. go mod vendor * go.mod, .golangci.yml, vendor: update *lint* Commands: 1. go get -u golang.org/x/tools github.com/golangci/golangci-lint@v1.47.0 2. go mod tidy 3. go mod vendor 4. edit .golangci.yml to suppress new errors (resolved in the same PR later) * all: fix lint errors hash.go: copy() recommended by gosimple parse.go: ent is never nil verifier.go: signal.Notify() with unbuffered channel is bad. Have no idea why. * .golangci.yml: adjust godot rules check-all is deprecated, but still work if you're hesitating too - I'll remove this commit
Diffstat (limited to 'vendor/github.com/quasilyte')
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/LICENSE2
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/internal/goenv/goenv.go54
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/.gitattributes2
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/LICENSE27
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/README.md55
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/kludge.go70
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/load.go72
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/main.go332
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/match.go1108
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/subst.go261
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/write.go63
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/internal/xsrcimporter/xsrcimporter.go29
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/ast_walker.go369
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/bundle.go2
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/engine.go188
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/filters.go584
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/go_version.go58
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/gorule.go92
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/goutil/goutil.go46
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/importer.go67
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/filter_op.gen.go276
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/gen_filter_op.go147
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/ir.go113
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir_loader.go888
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir_utils.go41
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/irconv/irconv.go856
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/libdsl.go201
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/match_data.go46
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/node_category.go273
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/nodepath.go49
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/parser.go902
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/profiling/no_labels.go16
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/profiling/with_labels.go21
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/compile.go269
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/debug_info.go5
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/disasm.go12
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/eval.go38
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/gen_opcodes.go10
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcode_string.go70
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcodes.gen.go148
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/quasigo.go56
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qfmt/qfmt.go17
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qstrconv/qstrconv.go24
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qstrings/qstrings.go62
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go134
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go449
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/textmatch/compile.go84
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/textmatch/matchers.go72
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/textmatch/textmatch.go26
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/patternop_string.go14
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/typematch.go138
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/utils.go126
-rw-r--r--vendor/github.com/quasilyte/gogrep/.gitignore4
-rw-r--r--vendor/github.com/quasilyte/gogrep/.golangci.yml49
-rw-r--r--vendor/github.com/quasilyte/gogrep/LICENSE33
-rw-r--r--vendor/github.com/quasilyte/gogrep/Makefile19
-rw-r--r--vendor/github.com/quasilyte/gogrep/README.md41
-rw-r--r--vendor/github.com/quasilyte/gogrep/compile.go1174
-rw-r--r--vendor/github.com/quasilyte/gogrep/compile_import.go57
-rw-r--r--vendor/github.com/quasilyte/gogrep/gen_operations.go357
-rw-r--r--vendor/github.com/quasilyte/gogrep/go.mod8
-rw-r--r--vendor/github.com/quasilyte/gogrep/go.sum8
-rw-r--r--vendor/github.com/quasilyte/gogrep/gogrep.go180
-rw-r--r--vendor/github.com/quasilyte/gogrep/instructions.go116
-rw-r--r--vendor/github.com/quasilyte/gogrep/internal/stdinfo/stdinfo.go151
-rw-r--r--vendor/github.com/quasilyte/gogrep/match.go937
-rw-r--r--vendor/github.com/quasilyte/gogrep/nodetag/nodetag.go280
-rw-r--r--vendor/github.com/quasilyte/gogrep/operation_string.go146
-rw-r--r--vendor/github.com/quasilyte/gogrep/operations.gen.go1570
-rw-r--r--vendor/github.com/quasilyte/gogrep/parse.go (renamed from vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/parse.go)303
-rw-r--r--vendor/github.com/quasilyte/gogrep/slices.go58
-rw-r--r--vendor/github.com/quasilyte/stdinfo/LICENSE21
-rw-r--r--vendor/github.com/quasilyte/stdinfo/go.mod3
-rw-r--r--vendor/github.com/quasilyte/stdinfo/stdinfo.go30
-rw-r--r--vendor/github.com/quasilyte/stdinfo/stdinfo_gen.go274
75 files changed, 11064 insertions, 3819 deletions
diff --git a/vendor/github.com/quasilyte/go-ruleguard/LICENSE b/vendor/github.com/quasilyte/go-ruleguard/LICENSE
index f0381fb49..558f81ff2 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/LICENSE
+++ b/vendor/github.com/quasilyte/go-ruleguard/LICENSE
@@ -1,6 +1,6 @@
BSD 3-Clause License
-Copyright (c) 2019, Iskander (Alex) Sharipov / quasilyte
+Copyright (c) 2022, Iskander (Alex) Sharipov / quasilyte
All rights reserved.
Redistribution and use in source and binary forms, with or without
diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/goenv/goenv.go b/vendor/github.com/quasilyte/go-ruleguard/internal/goenv/goenv.go
new file mode 100644
index 000000000..2f207aa07
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/internal/goenv/goenv.go
@@ -0,0 +1,54 @@
+package goenv
+
+import (
+ "errors"
+ "os/exec"
+ "runtime"
+ "strconv"
+ "strings"
+)
+
+func Read() (map[string]string, error) {
+ out, err := exec.Command("go", "env").CombinedOutput()
+ if err != nil {
+ return nil, err
+ }
+ return parseGoEnv(out, runtime.GOOS)
+}
+
+func parseGoEnv(data []byte, goos string) (map[string]string, error) {
+ vars := make(map[string]string)
+
+ lines := strings.Split(strings.ReplaceAll(string(data), "\r\n", "\n"), "\n")
+
+ if goos == "windows" {
+ // Line format is: `set $name=$value`
+ for _, l := range lines {
+ l = strings.TrimPrefix(l, "set ")
+ parts := strings.Split(l, "=")
+ if len(parts) != 2 {
+ continue
+ }
+ vars[parts[0]] = parts[1]
+ }
+ } else {
+ // Line format is: `$name="$value"`
+ for _, l := range lines {
+ parts := strings.Split(strings.TrimSpace(l), "=")
+ if len(parts) != 2 {
+ continue
+ }
+ val, err := strconv.Unquote(parts[1])
+ if err != nil {
+ continue
+ }
+ vars[parts[0]] = val
+ }
+ }
+
+ if len(vars) == 0 {
+ return nil, errors.New("empty env set")
+ }
+
+ return vars, nil
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/.gitattributes b/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/.gitattributes
deleted file mode 100644
index 6f9522992..000000000
--- a/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/.gitattributes
+++ /dev/null
@@ -1,2 +0,0 @@
-# To prevent CRLF breakages on Windows for fragile files, like testdata.
-* -text
diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/LICENSE b/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/LICENSE
deleted file mode 100644
index a06c5ebfc..000000000
--- a/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/LICENSE
+++ /dev/null
@@ -1,27 +0,0 @@
-Copyright (c) 2017, Daniel Martí. All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
- * Redistributions of source code must retain the above copyright
-notice, this list of conditions and the following disclaimer.
- * Redistributions in binary form must reproduce the above
-copyright notice, this list of conditions and the following disclaimer
-in the documentation and/or other materials provided with the
-distribution.
- * Neither the name of the copyright holder nor the names of its
-contributors may be used to endorse or promote products derived from
-this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/README.md b/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/README.md
deleted file mode 100644
index 12cb0fdc4..000000000
--- a/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/README.md
+++ /dev/null
@@ -1,55 +0,0 @@
-# gogrep
-
- go get mvdan.cc/gogrep
-
-Search for Go code using syntax trees. Work in progress.
-
- gogrep -x 'if $x != nil { return $x, $*_ }'
-
-### Instructions
-
- usage: gogrep commands [packages]
-
-A command is of the form "-A pattern", where -A is one of:
-
- -x find all nodes matching a pattern
- -g discard nodes not matching a pattern
- -v discard nodes matching a pattern
- -a filter nodes by certain attributes
- -s substitute with a given syntax tree
- -w write source back to disk or stdout
-
-A pattern is a piece of Go code which may include wildcards. It can be:
-
- a statement (many if split by semicolonss)
- an expression (many if split by commas)
- a type expression
- a top-level declaration (var, func, const)
- an entire file
-
-Wildcards consist of `$` and a name. All wildcards with the same name
-within an expression must match the same node, excluding "_". Example:
-
- $x.$_ = $x // assignment of self to a field in self
-
-If `*` is before the name, it will match any number of nodes. Example:
-
- fmt.Fprintf(os.Stdout, $*_) // all Fprintfs on stdout
-
-`*` can also be used to match optional nodes, like:
-
- for $*_ { $*_ } // will match all for loops
- if $*_; $b { $*_ } // will match all ifs with condition $b
-
-Regexes can also be used to match certain identifier names only. The
-`.*` pattern can be used to match all identifiers. Example:
-
- fmt.$(_ /Fprint.*/)(os.Stdout, $*_) // all Fprint* on stdout
-
-The nodes resulting from applying the commands will be printed line by
-line to standard output.
-
-Here are two simple examples of the -a operand:
-
- gogrep -x '$x + $y' // will match both numerical and string "+" operations
- gogrep -x '$x + $y' -a 'type(string)' // matches only string concatenations
diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/kludge.go b/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/kludge.go
deleted file mode 100644
index f62c4aafd..000000000
--- a/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/kludge.go
+++ /dev/null
@@ -1,70 +0,0 @@
-package gogrep
-
-import (
- "go/ast"
- "go/token"
- "go/types"
-)
-
-// This is an ugly way to use gogrep as a library.
-// It can go away when there will be another option.
-
-// Parse creates a gogrep pattern out of a given string expression.
-func Parse(fset *token.FileSet, expr string) (*Pattern, error) {
- m := matcher{
- fset: fset,
- Info: &types.Info{},
- }
- node, err := m.parseExpr(expr)
- if err != nil {
- return nil, err
- }
- return &Pattern{m: &m, Expr: node}, nil
-}
-
-// Pattern is a compiled gogrep pattern.
-type Pattern struct {
- Expr ast.Node
- m *matcher
-}
-
-// MatchData describes a successful pattern match.
-type MatchData struct {
- Node ast.Node
- Values map[string]ast.Node
-}
-
-// Clone creates a pattern copy.
-func (p *Pattern) Clone() *Pattern {
- clone := *p
- clone.m = &matcher{}
- *clone.m = *p.m
- clone.m.values = make(map[string]ast.Node)
- return &clone
-}
-
-// MatchNode calls cb if n matches a pattern.
-func (p *Pattern) MatchNode(n ast.Node, cb func(MatchData)) {
- p.m.values = map[string]ast.Node{}
- if p.m.node(p.Expr, n) {
- cb(MatchData{
- Values: p.m.values,
- Node: n,
- })
- }
-}
-
-// Match calls cb for any pattern match found in n.
-func (p *Pattern) Match(n ast.Node, cb func(MatchData)) {
- cmd := exprCmd{name: "x", value: p.Expr}
- matches := p.m.cmdRange(cmd, []submatch{{
- values: map[string]ast.Node{},
- node: n,
- }})
- for _, match := range matches {
- cb(MatchData{
- Values: match.values,
- Node: match.node,
- })
- }
-}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/load.go b/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/load.go
deleted file mode 100644
index 09ab3fd01..000000000
--- a/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/load.go
+++ /dev/null
@@ -1,72 +0,0 @@
-// Copyright (c) 2017, Daniel Martí <mvdan@mvdan.cc>
-// See LICENSE for licensing information
-
-package gogrep
-
-import (
- "fmt"
- "sort"
- "strings"
-
- "golang.org/x/tools/go/packages"
-)
-
-func (m *matcher) load(wd string, args ...string) ([]*packages.Package, error) {
- mode := packages.NeedName | packages.NeedImports | packages.NeedSyntax |
- packages.NeedTypes | packages.NeedTypesInfo
- if m.recursive { // need the syntax trees for the dependencies too
- mode |= packages.NeedDeps
- }
- cfg := &packages.Config{
- Mode: mode,
- Dir: wd,
- Fset: m.fset,
- Tests: m.tests,
- }
- pkgs, err := packages.Load(cfg, args...)
- if err != nil {
- return nil, err
- }
- jointErr := ""
- packages.Visit(pkgs, nil, func(pkg *packages.Package) {
- for _, err := range pkg.Errors {
- jointErr += err.Error() + "\n"
- }
- })
- if jointErr != "" {
- return nil, fmt.Errorf("%s", jointErr)
- }
-
- // Make a sorted list of the packages, including transitive dependencies
- // if recurse is true.
- byPath := make(map[string]*packages.Package)
- var addDeps func(*packages.Package)
- addDeps = func(pkg *packages.Package) {
- if strings.HasSuffix(pkg.PkgPath, ".test") {
- // don't add recursive test deps
- return
- }
- for _, imp := range pkg.Imports {
- if _, ok := byPath[imp.PkgPath]; ok {
- continue // seen; avoid recursive call
- }
- byPath[imp.PkgPath] = imp
- addDeps(imp)
- }
- }
- for _, pkg := range pkgs {
- byPath[pkg.PkgPath] = pkg
- if m.recursive {
- // add all dependencies once
- addDeps(pkg)
- }
- }
- pkgs = pkgs[:0]
- for _, pkg := range byPath {
- pkgs = append(pkgs, pkg)
- }
- sort.Slice(pkgs, func(i, j int) bool {
- return pkgs[i].PkgPath < pkgs[j].PkgPath
- })
- return pkgs, nil
-}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/main.go b/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/main.go
deleted file mode 100644
index 004cb32e9..000000000
--- a/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/main.go
+++ /dev/null
@@ -1,332 +0,0 @@
-// Copyright (c) 2017, Daniel Martí <mvdan@mvdan.cc>
-// See LICENSE for licensing information
-
-package gogrep
-
-import (
- "bytes"
- "flag"
- "fmt"
- "go/ast"
- "go/build"
- "go/printer"
- "go/token"
- "go/types"
- "io"
- "os"
- "regexp"
- "strconv"
- "strings"
-)
-
-var usage = func() {
- fmt.Fprint(os.Stderr, `usage: gogrep commands [packages]
-
-gogrep performs a query on the given Go packages.
-
- -r search dependencies recursively too
- -tests search test files too (and direct test deps, with -r)
-
-A command is one of the following:
-
- -x pattern find all nodes matching a pattern
- -g pattern discard nodes not matching a pattern
- -v pattern discard nodes matching a pattern
- -a attribute discard nodes without an attribute
- -s pattern substitute with a given syntax tree
- -p number navigate up a number of node parents
- -w write the entire source code back
-
-A pattern is a piece of Go code which may include dollar expressions. It can be
-a number of statements, a number of expressions, a declaration, or an entire
-file.
-
-A dollar expression consist of '$' and a name. Dollar expressions with the same
-name within a query always match the same node, excluding "_". Example:
-
- -x '$x.$_ = $x' # assignment of self to a field in self
-
-If '*' is before the name, it will match any number of nodes. Example:
-
- -x 'fmt.Fprintf(os.Stdout, $*_)' # all Fprintfs on stdout
-
-By default, the resulting nodes will be printed one per line to standard output.
-To update the input files, use -w.
-`)
-}
-
-func main() {
- m := matcher{
- out: os.Stdout,
- ctx: &build.Default,
- }
- err := m.fromArgs(".", os.Args[1:])
- if err != nil {
- fmt.Fprintln(os.Stderr, err)
- os.Exit(1)
- }
-}
-
-type matcher struct {
- out io.Writer
- ctx *build.Context
-
- fset *token.FileSet
-
- parents map[ast.Node]ast.Node
-
- recursive, tests bool
- aggressive bool
-
- // information about variables (wildcards), by id (which is an
- // integer starting at 0)
- vars []varInfo
-
- // node values recorded by name, excluding "_" (used only by the
- // actual matching phase)
- values map[string]ast.Node
- scope *types.Scope
-
- *types.Info
- stdImporter types.Importer
-}
-
-type varInfo struct {
- name string
- any bool
-}
-
-func (m *matcher) info(id int) varInfo {
- if id < 0 {
- return varInfo{}
- }
- return m.vars[id]
-}
-
-type exprCmd struct {
- name string
- src string
- value interface{}
-}
-
-type strCmdFlag struct {
- name string
- cmds *[]exprCmd
-}
-
-func (o *strCmdFlag) String() string { return "" }
-func (o *strCmdFlag) Set(val string) error {
- *o.cmds = append(*o.cmds, exprCmd{name: o.name, src: val})
- return nil
-}
-
-type boolCmdFlag struct {
- name string
- cmds *[]exprCmd
-}
-
-func (o *boolCmdFlag) String() string { return "" }
-func (o *boolCmdFlag) Set(val string) error {
- if val != "true" {
- return fmt.Errorf("flag can only be true")
- }
- *o.cmds = append(*o.cmds, exprCmd{name: o.name})
- return nil
-}
-func (o *boolCmdFlag) IsBoolFlag() bool { return true }
-
-func (m *matcher) fromArgs(wd string, args []string) error {
- m.fset = token.NewFileSet()
- cmds, args, err := m.parseCmds(args)
- if err != nil {
- return err
- }
- pkgs, err := m.load(wd, args...)
- if err != nil {
- return err
- }
- var all []ast.Node
- for _, pkg := range pkgs {
- m.Info = pkg.TypesInfo
- nodes := make([]ast.Node, len(pkg.Syntax))
- for i, f := range pkg.Syntax {
- nodes[i] = f
- }
- all = append(all, m.matches(cmds, nodes)...)
- }
- for _, n := range all {
- fpos := m.fset.Position(n.Pos())
- if strings.HasPrefix(fpos.Filename, wd) {
- fpos.Filename = fpos.Filename[len(wd)+1:]
- }
- fmt.Fprintf(m.out, "%v: %s\n", fpos, singleLinePrint(n))
- }
- return nil
-}
-
-func (m *matcher) parseCmds(args []string) ([]exprCmd, []string, error) {
- flagSet := flag.NewFlagSet("gogrep", flag.ExitOnError)
- flagSet.Usage = usage
- flagSet.BoolVar(&m.recursive, "r", false, "search dependencies recursively too")
- flagSet.BoolVar(&m.tests, "tests", false, "search test files too (and direct test deps, with -r)")
-
- var cmds []exprCmd
- flagSet.Var(&strCmdFlag{
- name: "x",
- cmds: &cmds,
- }, "x", "")
- flagSet.Var(&strCmdFlag{
- name: "g",
- cmds: &cmds,
- }, "g", "")
- flagSet.Var(&strCmdFlag{
- name: "v",
- cmds: &cmds,
- }, "v", "")
- flagSet.Var(&strCmdFlag{
- name: "a",
- cmds: &cmds,
- }, "a", "")
- flagSet.Var(&strCmdFlag{
- name: "s",
- cmds: &cmds,
- }, "s", "")
- flagSet.Var(&strCmdFlag{
- name: "p",
- cmds: &cmds,
- }, "p", "")
- flagSet.Var(&boolCmdFlag{
- name: "w",
- cmds: &cmds,
- }, "w", "")
- flagSet.Parse(args)
- paths := flagSet.Args()
-
- if len(cmds) < 1 {
- return nil, nil, fmt.Errorf("need at least one command")
- }
- for i, cmd := range cmds {
- switch cmd.name {
- case "w":
- continue // no expr
- case "p":
- n, err := strconv.Atoi(cmd.src)
- if err != nil {
- return nil, nil, err
- }
- cmds[i].value = n
- case "a":
- m, err := m.parseAttrs(cmd.src)
- if err != nil {
- return nil, nil, fmt.Errorf("cannot parse mods: %v", err)
- }
- cmds[i].value = m
- default:
- node, err := m.parseExpr(cmd.src)
- if err != nil {
- return nil, nil, err
- }
- cmds[i].value = node
- }
- }
- return cmds, paths, nil
-}
-
-type bufferJoinLines struct {
- bytes.Buffer
- last string
-}
-
-var rxNeedSemicolon = regexp.MustCompile(`([])}a-zA-Z0-9"'` + "`" + `]|\+\+|--)$`)
-
-func (b *bufferJoinLines) Write(p []byte) (n int, err error) {
- if string(p) == "\n" {
- if b.last == "\n" {
- return 1, nil
- }
- if rxNeedSemicolon.MatchString(b.last) {
- b.Buffer.WriteByte(';')
- }
- b.Buffer.WriteByte(' ')
- b.last = "\n"
- return 1, nil
- }
- p = bytes.Trim(p, "\t")
- n, err = b.Buffer.Write(p)
- b.last = string(p)
- return
-}
-
-func (b *bufferJoinLines) String() string {
- return strings.TrimSuffix(b.Buffer.String(), "; ")
-}
-
-// inspect is like ast.Inspect, but it supports our extra nodeList Node
-// type (only at the top level).
-func inspect(node ast.Node, fn func(ast.Node) bool) {
- // ast.Walk barfs on ast.Node types it doesn't know, so
- // do the first level manually here
- list, ok := node.(nodeList)
- if !ok {
- ast.Inspect(node, fn)
- return
- }
- if !fn(list) {
- return
- }
- for i := 0; i < list.len(); i++ {
- ast.Inspect(list.at(i), fn)
- }
- fn(nil)
-}
-
-var emptyFset = token.NewFileSet()
-
-func singleLinePrint(node ast.Node) string {
- var buf bufferJoinLines
- inspect(node, func(node ast.Node) bool {
- bl, ok := node.(*ast.BasicLit)
- if !ok || bl.Kind != token.STRING {
- return true
- }
- if !strings.HasPrefix(bl.Value, "`") {
- return true
- }
- if !strings.Contains(bl.Value, "\n") {
- return true
- }
- bl.Value = strconv.Quote(bl.Value[1 : len(bl.Value)-1])
- return true
- })
- printNode(&buf, emptyFset, node)
- return buf.String()
-}
-
-func printNode(w io.Writer, fset *token.FileSet, node ast.Node) {
- switch x := node.(type) {
- case exprList:
- if len(x) == 0 {
- return
- }
- printNode(w, fset, x[0])
- for _, n := range x[1:] {
- fmt.Fprintf(w, ", ")
- printNode(w, fset, n)
- }
- case stmtList:
- if len(x) == 0 {
- return
- }
- printNode(w, fset, x[0])
- for _, n := range x[1:] {
- fmt.Fprintf(w, "; ")
- printNode(w, fset, n)
- }
- default:
- err := printer.Fprint(w, fset, node)
- if err != nil && strings.Contains(err.Error(), "go/printer: unsupported node type") {
- // Should never happen, but make it obvious when it does.
- panic(fmt.Errorf("cannot print node %T: %v", node, err))
- }
- }
-}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/match.go b/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/match.go
deleted file mode 100644
index 08b53d87d..000000000
--- a/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/match.go
+++ /dev/null
@@ -1,1108 +0,0 @@
-// Copyright (c) 2017, Daniel Martí <mvdan@mvdan.cc>
-// See LICENSE for licensing information
-
-package gogrep
-
-import (
- "fmt"
- "go/ast"
- "go/importer"
- "go/token"
- "go/types"
- "regexp"
- "strconv"
-)
-
-func (m *matcher) matches(cmds []exprCmd, nodes []ast.Node) []ast.Node {
- m.parents = make(map[ast.Node]ast.Node)
- m.fillParents(nodes...)
- initial := make([]submatch, len(nodes))
- for i, node := range nodes {
- initial[i].node = node
- initial[i].values = make(map[string]ast.Node)
- }
- final := m.submatches(cmds, initial)
- finalNodes := make([]ast.Node, len(final))
- for i := range finalNodes {
- finalNodes[i] = final[i].node
- }
- return finalNodes
-}
-
-func (m *matcher) fillParents(nodes ...ast.Node) {
- stack := make([]ast.Node, 1, 32)
- for _, node := range nodes {
- inspect(node, func(node ast.Node) bool {
- if node == nil {
- stack = stack[:len(stack)-1]
- return true
- }
- if _, ok := node.(nodeList); !ok {
- m.parents[node] = stack[len(stack)-1]
- }
- stack = append(stack, node)
- return true
- })
- }
-}
-
-type submatch struct {
- node ast.Node
- values map[string]ast.Node
-}
-
-func valsCopy(values map[string]ast.Node) map[string]ast.Node {
- v2 := make(map[string]ast.Node, len(values))
- for k, v := range values {
- v2[k] = v
- }
- return v2
-}
-
-func (m *matcher) submatches(cmds []exprCmd, subs []submatch) []submatch {
- if len(cmds) == 0 {
- return subs
- }
- cmd := cmds[0]
- var fn func(exprCmd, []submatch) []submatch
- switch cmd.name {
- case "x":
- fn = m.cmdRange
- case "g":
- fn = m.cmdFilter(true)
- case "v":
- fn = m.cmdFilter(false)
- case "s":
- fn = m.cmdSubst
- case "a":
- fn = m.cmdAttr
- case "p":
- fn = m.cmdParents
- case "w":
- if len(cmds) > 1 {
- panic("-w must be the last command")
- }
- fn = m.cmdWrite
- default:
- panic(fmt.Sprintf("unknown command: %q", cmd.name))
- }
- return m.submatches(cmds[1:], fn(cmd, subs))
-}
-
-func (m *matcher) cmdRange(cmd exprCmd, subs []submatch) []submatch {
- var matches []submatch
- seen := map[nodePosHash]bool{}
-
- // The values context for each new submatch must be a new copy
- // from its parent submatch. If we don't do this copy, all the
- // submatches would share the same map and have side effects.
- var startValues map[string]ast.Node
-
- match := func(exprNode, node ast.Node) {
- if node == nil {
- return
- }
- m.values = valsCopy(startValues)
- found := m.topNode(exprNode, node)
- if found == nil {
- return
- }
- hash := posHash(found)
- if !seen[hash] {
- matches = append(matches, submatch{
- node: found,
- values: m.values,
- })
- seen[hash] = true
- }
- }
- for _, sub := range subs {
- startValues = valsCopy(sub.values)
- m.walkWithLists(cmd.value.(ast.Node), sub.node, match)
- }
- return matches
-}
-
-func (m *matcher) cmdFilter(wantAny bool) func(exprCmd, []submatch) []submatch {
- return func(cmd exprCmd, subs []submatch) []submatch {
- var matches []submatch
- any := false
- match := func(exprNode, node ast.Node) {
- if node == nil {
- return
- }
- found := m.topNode(exprNode, node)
- if found != nil {
- any = true
- }
- }
- for _, sub := range subs {
- any = false
- m.values = sub.values
- m.walkWithLists(cmd.value.(ast.Node), sub.node, match)
- if any == wantAny {
- matches = append(matches, sub)
- }
- }
- return matches
- }
-}
-
-func (m *matcher) cmdAttr(cmd exprCmd, subs []submatch) []submatch {
- var matches []submatch
- for _, sub := range subs {
- m.values = sub.values
- if m.attrApplies(sub.node, cmd.value.(attribute)) {
- matches = append(matches, sub)
- }
- }
- return matches
-}
-
-func (m *matcher) cmdParents(cmd exprCmd, subs []submatch) []submatch {
- for i := range subs {
- sub := &subs[i]
- reps := cmd.value.(int)
- for j := 0; j < reps; j++ {
- sub.node = m.parentOf(sub.node)
- }
- }
- return subs
-}
-
-func (m *matcher) attrApplies(node ast.Node, attr interface{}) bool {
- if rx, ok := attr.(*regexp.Regexp); ok {
- if exprStmt, ok := node.(*ast.ExprStmt); ok {
- // since we prefer matching entire statements, get the
- // ident from the ExprStmt
- node = exprStmt.X
- }
- ident, ok := node.(*ast.Ident)
- return ok && rx.MatchString(ident.Name)
- }
- expr, _ := node.(ast.Expr)
- if expr == nil {
- return false // only exprs have types
- }
- t := m.Info.TypeOf(expr)
- if t == nil {
- return false // an expr, but no type?
- }
- tv := m.Info.Types[expr]
- switch x := attr.(type) {
- case typeCheck:
- want := m.resolveType(m.scope, x.expr)
- switch {
- case x.op == "type" && !types.Identical(t, want):
- return false
- case x.op == "asgn" && !types.AssignableTo(t, want):
- return false
- case x.op == "conv" && !types.ConvertibleTo(t, want):
- return false
- }
- case typProperty:
- switch {
- case x == "comp" && !types.Comparable(t):
- return false
- case x == "addr" && !tv.Addressable():
- return false
- }
- case typUnderlying:
- u := t.Underlying()
- uok := true
- switch x {
- case "basic":
- _, uok = u.(*types.Basic)
- case "array":
- _, uok = u.(*types.Array)
- case "slice":
- _, uok = u.(*types.Slice)
- case "struct":
- _, uok = u.(*types.Struct)
- case "interface":
- _, uok = u.(*types.Interface)
- case "pointer":
- _, uok = u.(*types.Pointer)
- case "func":
- _, uok = u.(*types.Signature)
- case "map":
- _, uok = u.(*types.Map)
- case "chan":
- _, uok = u.(*types.Chan)
- }
- if !uok {
- return false
- }
- }
- return true
-}
-
-func (m *matcher) walkWithLists(exprNode, node ast.Node, fn func(exprNode, node ast.Node)) {
- visit := func(node ast.Node) bool {
- fn(exprNode, node)
- for _, list := range nodeLists(node) {
- fn(exprNode, list)
- if id := m.wildAnyIdent(exprNode); id != nil {
- // so that "$*a" will match "a, b"
- fn(exprList([]ast.Expr{id}), list)
- // so that "$*a" will match "a; b"
- fn(toStmtList(id), list)
- }
- }
- return true
- }
- inspect(node, visit)
-}
-
-func (m *matcher) topNode(exprNode, node ast.Node) ast.Node {
- sts1, ok1 := exprNode.(stmtList)
- sts2, ok2 := node.(stmtList)
- if ok1 && ok2 {
- // allow a partial match at the top level
- return m.nodes(sts1, sts2, true)
- }
- if m.node(exprNode, node) {
- return node
- }
- return nil
-}
-
-// optNode is like node, but for those nodes that can be nil and are not
-// part of a list. For example, init and post statements in a for loop.
-func (m *matcher) optNode(expr, node ast.Node) bool {
- if ident := m.wildAnyIdent(expr); ident != nil {
- if m.node(toStmtList(ident), toStmtList(node)) {
- return true
- }
- }
- return m.node(expr, node)
-}
-
-func (m *matcher) node(expr, node ast.Node) bool {
- switch node.(type) {
- case *ast.File, *ast.FuncType, *ast.BlockStmt, *ast.IfStmt,
- *ast.SwitchStmt, *ast.TypeSwitchStmt, *ast.CaseClause,
- *ast.CommClause, *ast.ForStmt, *ast.RangeStmt:
- if scope := m.Info.Scopes[node]; scope != nil {
- m.scope = scope
- }
- }
- if !m.aggressive {
- if expr == nil || node == nil {
- return expr == node
- }
- } else {
- if expr == nil && node == nil {
- return true
- }
- if node == nil {
- expr, node = node, expr
- }
- }
- switch x := expr.(type) {
- case nil: // only in aggressive mode
- y, ok := node.(*ast.Ident)
- return ok && y.Name == "_"
-
- case *ast.File:
- y, ok := node.(*ast.File)
- if !ok || !m.node(x.Name, y.Name) || len(x.Decls) != len(y.Decls) ||
- len(x.Imports) != len(y.Imports) {
- return false
- }
- for i, decl := range x.Decls {
- if !m.node(decl, y.Decls[i]) {
- return false
- }
- }
- for i, imp := range x.Imports {
- if !m.node(imp, y.Imports[i]) {
- return false
- }
- }
- return true
-
- case *ast.Ident:
- y, yok := node.(*ast.Ident)
- if !isWildName(x.Name) {
- // not a wildcard
- return yok && x.Name == y.Name
- }
- if _, ok := node.(ast.Node); !ok {
- return false // to not include our extra node types
- }
- id := fromWildName(x.Name)
- info := m.info(id)
- if info.any {
- return false
- }
- if info.name == "_" {
- // values are discarded, matches anything
- return true
- }
- prev, ok := m.values[info.name]
- if !ok {
- // first occurrence, record value
- m.values[info.name] = node
- return true
- }
- // multiple uses must match
- return m.node(prev, node)
-
- // lists (ys are generated by us while walking)
- case exprList:
- y, ok := node.(exprList)
- return ok && m.exprs(x, y)
- case stmtList:
- y, ok := node.(stmtList)
- return ok && m.stmts(x, y)
-
- // lits
- case *ast.BasicLit:
- y, ok := node.(*ast.BasicLit)
- return ok && x.Kind == y.Kind && x.Value == y.Value
- case *ast.CompositeLit:
- y, ok := node.(*ast.CompositeLit)
- return ok && m.node(x.Type, y.Type) && m.exprs(x.Elts, y.Elts)
- case *ast.FuncLit:
- y, ok := node.(*ast.FuncLit)
- return ok && m.node(x.Type, y.Type) && m.node(x.Body, y.Body)
-
- // types
- case *ast.ArrayType:
- y, ok := node.(*ast.ArrayType)
- return ok && m.node(x.Len, y.Len) && m.node(x.Elt, y.Elt)
- case *ast.MapType:
- y, ok := node.(*ast.MapType)
- return ok && m.node(x.Key, y.Key) && m.node(x.Value, y.Value)
- case *ast.StructType:
- y, ok := node.(*ast.StructType)
- return ok && m.fields(x.Fields, y.Fields)
- case *ast.Field:
- // TODO: tags?
- y, ok := node.(*ast.Field)
- if !ok {
- return false
- }
- if len(x.Names) == 0 && x.Tag == nil && m.node(x.Type, y) {
- // Allow $var to match a field.
- return true
- }
- return m.idents(x.Names, y.Names) && m.node(x.Type, y.Type)
- case *ast.FuncType:
- y, ok := node.(*ast.FuncType)
- return ok && m.fields(x.Params, y.Params) &&
- m.fields(x.Results, y.Results)
- case *ast.InterfaceType:
- y, ok := node.(*ast.InterfaceType)
- return ok && m.fields(x.Methods, y.Methods)
- case *ast.ChanType:
- y, ok := node.(*ast.ChanType)
- return ok && x.Dir == y.Dir && m.node(x.Value, y.Value)
-
- // other exprs
- case *ast.Ellipsis:
- y, ok := node.(*ast.Ellipsis)
- return ok && m.node(x.Elt, y.Elt)
- case *ast.ParenExpr:
- y, ok := node.(*ast.ParenExpr)
- return ok && m.node(x.X, y.X)
- case *ast.UnaryExpr:
- y, ok := node.(*ast.UnaryExpr)
- return ok && x.Op == y.Op && m.node(x.X, y.X)
- case *ast.BinaryExpr:
- y, ok := node.(*ast.BinaryExpr)
- return ok && x.Op == y.Op && m.node(x.X, y.X) && m.node(x.Y, y.Y)
- case *ast.CallExpr:
- y, ok := node.(*ast.CallExpr)
- return ok && m.node(x.Fun, y.Fun) && m.exprs(x.Args, y.Args) &&
- bothValid(x.Ellipsis, y.Ellipsis)
- case *ast.KeyValueExpr:
- y, ok := node.(*ast.KeyValueExpr)
- return ok && m.node(x.Key, y.Key) && m.node(x.Value, y.Value)
- case *ast.StarExpr:
- y, ok := node.(*ast.StarExpr)
- return ok && m.node(x.X, y.X)
- case *ast.SelectorExpr:
- y, ok := node.(*ast.SelectorExpr)
- return ok && m.node(x.X, y.X) && m.node(x.Sel, y.Sel)
- case *ast.IndexExpr:
- y, ok := node.(*ast.IndexExpr)
- return ok && m.node(x.X, y.X) && m.node(x.Index, y.Index)
- case *ast.SliceExpr:
- y, ok := node.(*ast.SliceExpr)
- return ok && m.node(x.X, y.X) && m.node(x.Low, y.Low) &&
- m.node(x.High, y.High) && m.node(x.Max, y.Max)
- case *ast.TypeAssertExpr:
- y, ok := node.(*ast.TypeAssertExpr)
- return ok && m.node(x.X, y.X) && m.node(x.Type, y.Type)
-
- // decls
- case *ast.GenDecl:
- y, ok := node.(*ast.GenDecl)
- return ok && x.Tok == y.Tok && m.specs(x.Specs, y.Specs)
- case *ast.FuncDecl:
- y, ok := node.(*ast.FuncDecl)
- return ok && m.fields(x.Recv, y.Recv) && m.node(x.Name, y.Name) &&
- m.node(x.Type, y.Type) && m.node(x.Body, y.Body)
-
- // specs
- case *ast.ValueSpec:
- y, ok := node.(*ast.ValueSpec)
- if !ok || !m.node(x.Type, y.Type) {
- return false
- }
- if m.aggressive && len(x.Names) == 1 {
- for i := range y.Names {
- if m.node(x.Names[i], y.Names[i]) &&
- (x.Values == nil || m.node(x.Values[i], y.Values[i])) {
- return true
- }
- }
- }
- return m.idents(x.Names, y.Names) && m.exprs(x.Values, y.Values)
-
- // stmt bridge nodes
- case *ast.ExprStmt:
- if id, ok := x.X.(*ast.Ident); ok && isWildName(id.Name) {
- // prefer matching $x as a statement, as it's
- // the parent
- return m.node(id, node)
- }
- y, ok := node.(*ast.ExprStmt)
- return ok && m.node(x.X, y.X)
- case *ast.DeclStmt:
- y, ok := node.(*ast.DeclStmt)
- return ok && m.node(x.Decl, y.Decl)
-
- // stmts
- case *ast.EmptyStmt:
- _, ok := node.(*ast.EmptyStmt)
- return ok
- case *ast.LabeledStmt:
- y, ok := node.(*ast.LabeledStmt)
- return ok && m.node(x.Label, y.Label) && m.node(x.Stmt, y.Stmt)
- case *ast.SendStmt:
- y, ok := node.(*ast.SendStmt)
- return ok && m.node(x.Chan, y.Chan) && m.node(x.Value, y.Value)
- case *ast.IncDecStmt:
- y, ok := node.(*ast.IncDecStmt)
- return ok && x.Tok == y.Tok && m.node(x.X, y.X)
- case *ast.AssignStmt:
- y, ok := node.(*ast.AssignStmt)
- if !m.aggressive {
- return ok && x.Tok == y.Tok &&
- m.exprs(x.Lhs, y.Lhs) && m.exprs(x.Rhs, y.Rhs)
- }
- if ok {
- return m.exprs(x.Lhs, y.Lhs) && m.exprs(x.Rhs, y.Rhs)
- }
- vs, ok := node.(*ast.ValueSpec)
- return ok && m.nodesMatch(exprList(x.Lhs), identList(vs.Names)) &&
- m.exprs(x.Rhs, vs.Values)
- case *ast.GoStmt:
- y, ok := node.(*ast.GoStmt)
- return ok && m.node(x.Call, y.Call)
- case *ast.DeferStmt:
- y, ok := node.(*ast.DeferStmt)
- return ok && m.node(x.Call, y.Call)
- case *ast.ReturnStmt:
- y, ok := node.(*ast.ReturnStmt)
- return ok && m.exprs(x.Results, y.Results)
- case *ast.BranchStmt:
- y, ok := node.(*ast.BranchStmt)
- return ok && x.Tok == y.Tok && m.node(maybeNilIdent(x.Label), maybeNilIdent(y.Label))
- case *ast.BlockStmt:
- if m.aggressive && m.node(stmtList(x.List), node) {
- return true
- }
- y, ok := node.(*ast.BlockStmt)
- if !ok {
- return false
- }
- if x == nil || y == nil {
- return x == y
- }
- return m.cases(x.List, y.List) || m.stmts(x.List, y.List)
- case *ast.IfStmt:
- y, ok := node.(*ast.IfStmt)
- if !ok {
- return false
- }
- condAny := m.wildAnyIdent(x.Cond)
- if condAny != nil && x.Init == nil {
- // if $*x { ... } on the left
- left := toStmtList(condAny)
- return m.node(left, toStmtList(y.Init, y.Cond)) &&
- m.node(x.Body, y.Body) && m.optNode(x.Else, y.Else)
- }
- return m.optNode(x.Init, y.Init) && m.node(x.Cond, y.Cond) &&
- m.node(x.Body, y.Body) && m.node(x.Else, y.Else)
- case *ast.CaseClause:
- y, ok := node.(*ast.CaseClause)
- return ok && m.exprs(x.List, y.List) && m.stmts(x.Body, y.Body)
- case *ast.SwitchStmt:
- y, ok := node.(*ast.SwitchStmt)
- if !ok {
- return false
- }
- tagAny := m.wildAnyIdent(x.Tag)
- if tagAny != nil && x.Init == nil {
- // switch $*x { ... } on the left
- left := toStmtList(tagAny)
- return m.node(left, toStmtList(y.Init, y.Tag)) &&
- m.node(x.Body, y.Body)
- }
- return m.optNode(x.Init, y.Init) && m.node(x.Tag, y.Tag) && m.node(x.Body, y.Body)
- case *ast.TypeSwitchStmt:
- y, ok := node.(*ast.TypeSwitchStmt)
- return ok && m.optNode(x.Init, y.Init) && m.node(x.Assign, y.Assign) && m.node(x.Body, y.Body)
- case *ast.CommClause:
- y, ok := node.(*ast.CommClause)
- return ok && m.node(x.Comm, y.Comm) && m.stmts(x.Body, y.Body)
- case *ast.SelectStmt:
- y, ok := node.(*ast.SelectStmt)
- return ok && m.node(x.Body, y.Body)
- case *ast.ForStmt:
- condIdent := m.wildAnyIdent(x.Cond)
- if condIdent != nil && x.Init == nil && x.Post == nil {
- // "for $*x { ... }" on the left
- left := toStmtList(condIdent)
- // also accept RangeStmt on the right
- switch y := node.(type) {
- case *ast.ForStmt:
- return m.node(left, toStmtList(y.Init, y.Cond, y.Post)) &&
- m.node(x.Body, y.Body)
- case *ast.RangeStmt:
- return m.node(left, toStmtList(y.Key, y.Value, y.X)) &&
- m.node(x.Body, y.Body)
- default:
- return false
- }
- }
- y, ok := node.(*ast.ForStmt)
- if !ok {
- return false
- }
- return m.optNode(x.Init, y.Init) && m.node(x.Cond, y.Cond) &&
- m.optNode(x.Post, y.Post) && m.node(x.Body, y.Body)
- case *ast.RangeStmt:
- y, ok := node.(*ast.RangeStmt)
- return ok && m.node(x.Key, y.Key) && m.node(x.Value, y.Value) &&
- m.node(x.X, y.X) && m.node(x.Body, y.Body)
-
- case *ast.TypeSpec:
- y, ok := node.(*ast.TypeSpec)
- return ok && m.node(x.Name, y.Name) && m.node(x.Type, y.Type)
-
- case *ast.FieldList:
- // we ignore these, for now
- return false
- default:
- panic(fmt.Sprintf("unexpected node: %T", x))
- }
-}
-
-func (m *matcher) wildAnyIdent(node ast.Node) *ast.Ident {
- switch x := node.(type) {
- case *ast.ExprStmt:
- return m.wildAnyIdent(x.X)
- case *ast.Ident:
- if !isWildName(x.Name) {
- return nil
- }
- if !m.info(fromWildName(x.Name)).any {
- return nil
- }
- return x
- }
- return nil
-}
-
-// resolveType resolves a type expression from a given scope.
-func (m *matcher) resolveType(scope *types.Scope, expr ast.Expr) types.Type {
- switch x := expr.(type) {
- case *ast.Ident:
- _, obj := scope.LookupParent(x.Name, token.NoPos)
- if obj == nil {
- // TODO: error if all resolveType calls on a type
- // expression fail? or perhaps resolve type expressions
- // across the entire program?
- return nil
- }
- return obj.Type()
- case *ast.ArrayType:
- elt := m.resolveType(scope, x.Elt)
- if x.Len == nil {
- return types.NewSlice(elt)
- }
- bl, ok := x.Len.(*ast.BasicLit)
- if !ok || bl.Kind != token.INT {
- panic(fmt.Sprintf("TODO: %T", x))
- }
- len, _ := strconv.ParseInt(bl.Value, 0, 0)
- return types.NewArray(elt, len)
- case *ast.StarExpr:
- return types.NewPointer(m.resolveType(scope, x.X))
- case *ast.ChanType:
- dir := types.SendRecv
- switch x.Dir {
- case ast.SEND:
- dir = types.SendOnly
- case ast.RECV:
- dir = types.RecvOnly
- }
- return types.NewChan(dir, m.resolveType(scope, x.Value))
- case *ast.SelectorExpr:
- scope = m.findScope(scope, x.X)
- return m.resolveType(scope, x.Sel)
- default:
- panic(fmt.Sprintf("resolveType TODO: %T", x))
- }
-}
-
-func (m *matcher) findScope(scope *types.Scope, expr ast.Expr) *types.Scope {
- switch x := expr.(type) {
- case *ast.Ident:
- _, obj := scope.LookupParent(x.Name, token.NoPos)
- if pkg, ok := obj.(*types.PkgName); ok {
- return pkg.Imported().Scope()
- }
- // try to fall back to std
- if m.stdImporter == nil {
- m.stdImporter = importer.Default()
- }
- path := x.Name
- if longer, ok := stdImportFixes[path]; ok {
- path = longer
- }
- pkg, err := m.stdImporter.Import(path)
- if err != nil {
- panic(fmt.Sprintf("findScope err: %v", err))
- }
- return pkg.Scope()
- default:
- panic(fmt.Sprintf("findScope TODO: %T", x))
- }
-}
-
-var stdImportFixes = map[string]string{
- // go list std | grep -vE 'vendor|internal' | grep '/' | sed -r 's@^(.*)/([^/]*)$@"\2": "\1/\2",@' | sort
- // (after commenting out the less likely duplicates)
- "adler32": "hash/adler32",
- "aes": "crypto/aes",
- "ascii85": "encoding/ascii85",
- "asn1": "encoding/asn1",
- "ast": "go/ast",
- "atomic": "sync/atomic",
- "base32": "encoding/base32",
- "base64": "encoding/base64",
- "big": "math/big",
- "binary": "encoding/binary",
- "bits": "math/bits",
- "build": "go/build",
- "bzip2": "compress/bzip2",
- "cgi": "net/http/cgi",
- "cgo": "runtime/cgo",
- "cipher": "crypto/cipher",
- "cmplx": "math/cmplx",
- "color": "image/color",
- "constant": "go/constant",
- "cookiejar": "net/http/cookiejar",
- "crc32": "hash/crc32",
- "crc64": "hash/crc64",
- "csv": "encoding/csv",
- "debug": "runtime/debug",
- "des": "crypto/des",
- "doc": "go/doc",
- "draw": "image/draw",
- "driver": "database/sql/driver",
- "dsa": "crypto/dsa",
- "dwarf": "debug/dwarf",
- "ecdsa": "crypto/ecdsa",
- "elf": "debug/elf",
- "elliptic": "crypto/elliptic",
- "exec": "os/exec",
- "fcgi": "net/http/fcgi",
- "filepath": "path/filepath",
- "flate": "compress/flate",
- "fnv": "hash/fnv",
- "format": "go/format",
- "gif": "image/gif",
- "gob": "encoding/gob",
- "gosym": "debug/gosym",
- "gzip": "compress/gzip",
- "heap": "container/heap",
- "hex": "encoding/hex",
- "hmac": "crypto/hmac",
- "http": "net/http",
- "httptest": "net/http/httptest",
- "httptrace": "net/http/httptrace",
- "httputil": "net/http/httputil",
- "importer": "go/importer",
- "iotest": "testing/iotest",
- "ioutil": "io/ioutil",
- "jpeg": "image/jpeg",
- "json": "encoding/json",
- "jsonrpc": "net/rpc/jsonrpc",
- "list": "container/list",
- "lzw": "compress/lzw",
- "macho": "debug/macho",
- "mail": "net/mail",
- "md5": "crypto/md5",
- "multipart": "mime/multipart",
- "palette": "image/color/palette",
- "parser": "go/parser",
- "parse": "text/template/parse",
- "pe": "debug/pe",
- "pem": "encoding/pem",
- "pkix": "crypto/x509/pkix",
- "plan9obj": "debug/plan9obj",
- "png": "image/png",
- //"pprof": "net/http/pprof",
- "pprof": "runtime/pprof",
- "printer": "go/printer",
- "quick": "testing/quick",
- "quotedprintable": "mime/quotedprintable",
- "race": "runtime/race",
- //"rand": "crypto/rand",
- "rand": "math/rand",
- "rc4": "crypto/rc4",
- "ring": "container/ring",
- "rpc": "net/rpc",
- "rsa": "crypto/rsa",
- //"scanner": "go/scanner",
- "scanner": "text/scanner",
- "sha1": "crypto/sha1",
- "sha256": "crypto/sha256",
- "sha512": "crypto/sha512",
- "signal": "os/signal",
- "smtp": "net/smtp",
- "sql": "database/sql",
- "subtle": "crypto/subtle",
- "suffixarray": "index/suffixarray",
- "syntax": "regexp/syntax",
- "syslog": "log/syslog",
- "tabwriter": "text/tabwriter",
- "tar": "archive/tar",
- //"template": "html/template",
- "template": "text/template",
- "textproto": "net/textproto",
- "tls": "crypto/tls",
- "token": "go/token",
- "trace": "runtime/trace",
- "types": "go/types",
- "url": "net/url",
- "user": "os/user",
- "utf16": "unicode/utf16",
- "utf8": "unicode/utf8",
- "x509": "crypto/x509",
- "xml": "encoding/xml",
- "zip": "archive/zip",
- "zlib": "compress/zlib",
-}
-
-func maybeNilIdent(x *ast.Ident) ast.Node {
- if x == nil {
- return nil
- }
- return x
-}
-
-func bothValid(p1, p2 token.Pos) bool {
- return p1.IsValid() == p2.IsValid()
-}
-
-type nodeList interface {
- at(i int) ast.Node
- len() int
- slice(from, to int) nodeList
- ast.Node
-}
-
-// nodes matches two lists of nodes. It uses a common algorithm to match
-// wildcard patterns with any number of nodes without recursion.
-func (m *matcher) nodes(ns1, ns2 nodeList, partial bool) ast.Node {
- ns1len, ns2len := ns1.len(), ns2.len()
- if ns1len == 0 {
- if ns2len == 0 {
- return ns2
- }
- return nil
- }
- partialStart, partialEnd := 0, ns2len
- i1, i2 := 0, 0
- next1, next2 := 0, 0
-
- // We need to keep a copy of m.values so that we can restart
- // with a different "any of" match while discarding any matches
- // we found while trying it.
- type restart struct {
- matches map[string]ast.Node
- next1, next2 int
- }
- // We need to stack these because otherwise some edge cases
- // would not match properly. Since we have various kinds of
- // wildcards (nodes containing them, $_, and $*_), in some cases
- // we may have to go back and do multiple restarts to get to the
- // right starting position.
- var stack []restart
- push := func(n1, n2 int) {
- if n2 > ns2len {
- return // would be discarded anyway
- }
- stack = append(stack, restart{valsCopy(m.values), n1, n2})
- next1, next2 = n1, n2
- }
- pop := func() {
- i1, i2 = next1, next2
- m.values = stack[len(stack)-1].matches
- stack = stack[:len(stack)-1]
- next1, next2 = 0, 0
- if len(stack) > 0 {
- next1 = stack[len(stack)-1].next1
- next2 = stack[len(stack)-1].next2
- }
- }
- wildName := ""
- wildStart := 0
-
- // wouldMatch returns whether the current wildcard - if any -
- // matches the nodes we are currently trying it on.
- wouldMatch := func() bool {
- switch wildName {
- case "", "_":
- return true
- }
- list := ns2.slice(wildStart, i2)
- // check that it matches any nodes found elsewhere
- prev, ok := m.values[wildName]
- if ok && !m.node(prev, list) {
- return false
- }
- m.values[wildName] = list
- return true
- }
- for i1 < ns1len || i2 < ns2len {
- if i1 < ns1len {
- n1 := ns1.at(i1)
- id := fromWildNode(n1)
- info := m.info(id)
- if info.any {
- // keep track of where this wildcard
- // started (if info.name == wildName,
- // we're trying the same wildcard
- // matching one more node)
- if info.name != wildName {
- wildStart = i2
- wildName = info.name
- }
- // try to match zero or more at i2,
- // restarting at i2+1 if it fails
- push(i1, i2+1)
- i1++
- continue
- }
- if partial && i1 == 0 {
- // let "b; c" match "a; b; c"
- // (simulates a $*_ at the beginning)
- partialStart = i2
- push(i1, i2+1)
- }
- if i2 < ns2len && wouldMatch() && m.node(n1, ns2.at(i2)) {
- wildName = ""
- // ordinary match
- i1++
- i2++
- continue
- }
- }
- if partial && i1 == ns1len && wildName == "" {
- partialEnd = i2
- break // let "b; c" match "b; c; d"
- }
- // mismatch, try to restart
- if 0 < next2 && next2 <= ns2len && (i1 != next1 || i2 != next2) {
- pop()
- continue
- }
- return nil
- }
- if !wouldMatch() {
- return nil
- }
- return ns2.slice(partialStart, partialEnd)
-}
-
-func (m *matcher) nodesMatch(list1, list2 nodeList) bool {
- return m.nodes(list1, list2, false) != nil
-}
-
-func (m *matcher) exprs(exprs1, exprs2 []ast.Expr) bool {
- return m.nodesMatch(exprList(exprs1), exprList(exprs2))
-}
-
-func (m *matcher) idents(ids1, ids2 []*ast.Ident) bool {
- return m.nodesMatch(identList(ids1), identList(ids2))
-}
-
-func toStmtList(nodes ...ast.Node) stmtList {
- var stmts []ast.Stmt
- for _, node := range nodes {
- switch x := node.(type) {
- case nil:
- case ast.Stmt:
- stmts = append(stmts, x)
- case ast.Expr:
- stmts = append(stmts, &ast.ExprStmt{X: x})
- default:
- panic(fmt.Sprintf("unexpected node type: %T", x))
- }
- }
- return stmtList(stmts)
-}
-
-func (m *matcher) cases(stmts1, stmts2 []ast.Stmt) bool {
- for _, stmt := range stmts2 {
- switch stmt.(type) {
- case *ast.CaseClause, *ast.CommClause:
- default:
- return false
- }
- }
- var left []*ast.Ident
- for _, stmt := range stmts1 {
- var expr ast.Expr
- var bstmt ast.Stmt
- switch x := stmt.(type) {
- case *ast.CaseClause:
- if len(x.List) != 1 || len(x.Body) != 1 {
- return false
- }
- expr, bstmt = x.List[0], x.Body[0]
- case *ast.CommClause:
- if x.Comm == nil || len(x.Body) != 1 {
- return false
- }
- if commExpr, ok := x.Comm.(*ast.ExprStmt); ok {
- expr = commExpr.X
- }
- bstmt = x.Body[0]
- default:
- return false
- }
- xs, ok := bstmt.(*ast.ExprStmt)
- if !ok {
- return false
- }
- bodyIdent, ok := xs.X.(*ast.Ident)
- if !ok || bodyIdent.Name != "gogrep_body" {
- return false
- }
- id, ok := expr.(*ast.Ident)
- if !ok || !isWildName(id.Name) {
- return false
- }
- left = append(left, id)
- }
- return m.nodesMatch(identList(left), stmtList(stmts2))
-}
-
-func (m *matcher) stmts(stmts1, stmts2 []ast.Stmt) bool {
- return m.nodesMatch(stmtList(stmts1), stmtList(stmts2))
-}
-
-func (m *matcher) specs(specs1, specs2 []ast.Spec) bool {
- return m.nodesMatch(specList(specs1), specList(specs2))
-}
-
-func (m *matcher) fields(fields1, fields2 *ast.FieldList) bool {
- if fields1 == nil || fields2 == nil {
- return fields1 == fields2
- }
- return m.nodesMatch(fieldList(fields1.List), fieldList(fields2.List))
-}
-
-func fromWildNode(node ast.Node) int {
- switch node := node.(type) {
- case *ast.Ident:
- return fromWildName(node.Name)
- case *ast.ExprStmt:
- return fromWildNode(node.X)
- case *ast.Field:
- // Allow $var to represent an entire field; the lone identifier
- // gets picked up as an anonymous field.
- if len(node.Names) == 0 && node.Tag == nil {
- return fromWildNode(node.Type)
- }
- }
- return -1
-}
-
-func nodeLists(n ast.Node) []nodeList {
- var lists []nodeList
- addList := func(list nodeList) {
- if list.len() > 0 {
- lists = append(lists, list)
- }
- }
- switch x := n.(type) {
- case nodeList:
- addList(x)
- case *ast.CompositeLit:
- addList(exprList(x.Elts))
- case *ast.CallExpr:
- addList(exprList(x.Args))
- case *ast.AssignStmt:
- addList(exprList(x.Lhs))
- addList(exprList(x.Rhs))
- case *ast.ReturnStmt:
- addList(exprList(x.Results))
- case *ast.ValueSpec:
- addList(exprList(x.Values))
- case *ast.BlockStmt:
- addList(stmtList(x.List))
- case *ast.CaseClause:
- addList(exprList(x.List))
- addList(stmtList(x.Body))
- case *ast.CommClause:
- addList(stmtList(x.Body))
- }
- return lists
-}
-
-type exprList []ast.Expr
-type identList []*ast.Ident
-type stmtList []ast.Stmt
-type specList []ast.Spec
-type fieldList []*ast.Field
-
-func (l exprList) len() int { return len(l) }
-func (l identList) len() int { return len(l) }
-func (l stmtList) len() int { return len(l) }
-func (l specList) len() int { return len(l) }
-func (l fieldList) len() int { return len(l) }
-
-func (l exprList) at(i int) ast.Node { return l[i] }
-func (l identList) at(i int) ast.Node { return l[i] }
-func (l stmtList) at(i int) ast.Node { return l[i] }
-func (l specList) at(i int) ast.Node { return l[i] }
-func (l fieldList) at(i int) ast.Node { return l[i] }
-
-func (l exprList) slice(i, j int) nodeList { return l[i:j] }
-func (l identList) slice(i, j int) nodeList { return l[i:j] }
-func (l stmtList) slice(i, j int) nodeList { return l[i:j] }
-func (l specList) slice(i, j int) nodeList { return l[i:j] }
-func (l fieldList) slice(i, j int) nodeList { return l[i:j] }
-
-func (l exprList) Pos() token.Pos { return l[0].Pos() }
-func (l identList) Pos() token.Pos { return l[0].Pos() }
-func (l stmtList) Pos() token.Pos { return l[0].Pos() }
-func (l specList) Pos() token.Pos { return l[0].Pos() }
-func (l fieldList) Pos() token.Pos { return l[0].Pos() }
-
-func (l exprList) End() token.Pos { return l[len(l)-1].End() }
-func (l identList) End() token.Pos { return l[len(l)-1].End() }
-func (l stmtList) End() token.Pos { return l[len(l)-1].End() }
-func (l specList) End() token.Pos { return l[len(l)-1].End() }
-func (l fieldList) End() token.Pos { return l[len(l)-1].End() }
diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/subst.go b/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/subst.go
deleted file mode 100644
index 8870858ed..000000000
--- a/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/subst.go
+++ /dev/null
@@ -1,261 +0,0 @@
-// Copyright (c) 2018, Daniel Martí <mvdan@mvdan.cc>
-// See LICENSE for licensing information
-
-package gogrep
-
-import (
- "fmt"
- "go/ast"
- "go/token"
- "reflect"
-)
-
-func (m *matcher) cmdSubst(cmd exprCmd, subs []submatch) []submatch {
- for i := range subs {
- sub := &subs[i]
- nodeCopy, _ := m.parseExpr(cmd.src)
- // since we'll want to set positions within the file's
- // FileSet
- scrubPositions(nodeCopy)
-
- m.fillParents(nodeCopy)
- nodeCopy = m.fillValues(nodeCopy, sub.values)
- m.substNode(sub.node, nodeCopy)
- sub.node = nodeCopy
- }
- return subs
-}
-
-type topNode struct {
- Node ast.Node
-}
-
-func (t topNode) Pos() token.Pos { return t.Node.Pos() }
-func (t topNode) End() token.Pos { return t.Node.End() }
-
-func (m *matcher) fillValues(node ast.Node, values map[string]ast.Node) ast.Node {
- // node might not have a parent, in which case we need to set an
- // artificial one. Its pointer interface is a copy, so we must also
- // return it.
- top := &topNode{node}
- m.setParentOf(node, top)
-
- inspect(node, func(node ast.Node) bool {
- id := fromWildNode(node)
- info := m.info(id)
- if info.name == "" {
- return true
- }
- prev := values[info.name]
- switch prev.(type) {
- case exprList:
- node = exprList([]ast.Expr{
- node.(*ast.Ident),
- })
- case stmtList:
- if ident, ok := node.(*ast.Ident); ok {
- node = &ast.ExprStmt{X: ident}
- }
- node = stmtList([]ast.Stmt{
- node.(*ast.ExprStmt),
- })
- }
- m.substNode(node, prev)
- return true
- })
- m.setParentOf(node, nil)
- return top.Node
-}
-
-func (m *matcher) substNode(oldNode, newNode ast.Node) {
- parent := m.parentOf(oldNode)
- m.setParentOf(newNode, parent)
-
- ptr := m.nodePtr(oldNode)
- switch x := ptr.(type) {
- case **ast.Ident:
- *x = newNode.(*ast.Ident)
- case *ast.Node:
- *x = newNode
- case *ast.Expr:
- *x = newNode.(ast.Expr)
- case *ast.Stmt:
- switch y := newNode.(type) {
- case ast.Expr:
- stmt := &ast.ExprStmt{X: y}
- m.setParentOf(stmt, parent)
- *x = stmt
- case ast.Stmt:
- *x = y
- default:
- panic(fmt.Sprintf("cannot replace stmt with %T", y))
- }
- case *[]ast.Expr:
- oldList := oldNode.(exprList)
- var first, last []ast.Expr
- for i, expr := range *x {
- if expr == oldList[0] {
- first = (*x)[:i]
- last = (*x)[i+len(oldList):]
- break
- }
- }
- switch y := newNode.(type) {
- case ast.Expr:
- *x = append(first, y)
- case exprList:
- *x = append(first, y...)
- default:
- panic(fmt.Sprintf("cannot replace exprs with %T", y))
- }
- *x = append(*x, last...)
- case *[]ast.Stmt:
- oldList := oldNode.(stmtList)
- var first, last []ast.Stmt
- for i, stmt := range *x {
- if stmt == oldList[0] {
- first = (*x)[:i]
- last = (*x)[i+len(oldList):]
- break
- }
- }
- switch y := newNode.(type) {
- case ast.Expr:
- stmt := &ast.ExprStmt{X: y}
- m.setParentOf(stmt, parent)
- *x = append(first, stmt)
- case ast.Stmt:
- *x = append(first, y)
- case stmtList:
- *x = append(first, y...)
- default:
- panic(fmt.Sprintf("cannot replace stmts with %T", y))
- }
- *x = append(*x, last...)
- case nil:
- return
- default:
- panic(fmt.Sprintf("unsupported substitution: %T", x))
- }
- // the new nodes have scrubbed positions, so try our best to use
- // sensible ones
- fixPositions(parent)
-}
-
-func (m *matcher) parentOf(node ast.Node) ast.Node {
- list, ok := node.(nodeList)
- if ok {
- node = list.at(0)
- }
- return m.parents[node]
-}
-
-func (m *matcher) setParentOf(node, parent ast.Node) {
- list, ok := node.(nodeList)
- if ok {
- if list.len() == 0 {
- return
- }
- node = list.at(0)
- }
- m.parents[node] = parent
-}
-
-func (m *matcher) nodePtr(node ast.Node) interface{} {
- list, wantSlice := node.(nodeList)
- if wantSlice {
- node = list.at(0)
- }
- parent := m.parentOf(node)
- if parent == nil {
- return nil
- }
- v := reflect.ValueOf(parent).Elem()
- for i := 0; i < v.NumField(); i++ {
- fld := v.Field(i)
- switch fld.Type().Kind() {
- case reflect.Slice:
- for i := 0; i < fld.Len(); i++ {
- ifld := fld.Index(i)
- if ifld.Interface() != node {
- continue
- }
- if wantSlice {
- return fld.Addr().Interface()
- }
- return ifld.Addr().Interface()
- }
- case reflect.Interface:
- if fld.Interface() == node {
- return fld.Addr().Interface()
- }
- }
- }
- return nil
-}
-
-// nodePosHash is an ast.Node that can always be used as a key in maps,
-// even for nodes that are slices like nodeList.
-type nodePosHash struct {
- pos, end token.Pos
-}
-
-func (n nodePosHash) Pos() token.Pos { return n.pos }
-func (n nodePosHash) End() token.Pos { return n.end }
-
-func posHash(node ast.Node) nodePosHash {
- return nodePosHash{pos: node.Pos(), end: node.End()}
-}
-
-var posType = reflect.TypeOf(token.NoPos)
-
-func scrubPositions(node ast.Node) {
- inspect(node, func(node ast.Node) bool {
- v := reflect.ValueOf(node)
- if v.Kind() != reflect.Ptr {
- return true
- }
- v = v.Elem()
- if v.Kind() != reflect.Struct {
- return true
- }
- for i := 0; i < v.NumField(); i++ {
- fld := v.Field(i)
- if fld.Type() == posType {
- fld.SetInt(0)
- }
- }
- return true
- })
-}
-
-// fixPositions tries to fix common syntax errors caused from syntax rewrites.
-func fixPositions(node ast.Node) {
- if top, ok := node.(*topNode); ok {
- node = top.Node
- }
- // fallback sets pos to the 'to' position if not valid.
- fallback := func(pos *token.Pos, to token.Pos) {
- if !pos.IsValid() {
- *pos = to
- }
- }
- ast.Inspect(node, func(node ast.Node) bool {
- // TODO: many more node types
- switch x := node.(type) {
- case *ast.GoStmt:
- fallback(&x.Go, x.Call.Pos())
- case *ast.ReturnStmt:
- if len(x.Results) == 0 {
- break
- }
- // Ensure that there's no newline before the returned
- // values, as otherwise we have a naked return. See
- // https://github.com/golang/go/issues/32854.
- if pos := x.Results[0].Pos(); pos > x.Return {
- x.Return = pos
- }
- }
- return true
- })
-}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/write.go b/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/write.go
deleted file mode 100644
index b4796a896..000000000
--- a/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/write.go
+++ /dev/null
@@ -1,63 +0,0 @@
-// Copyright (c) 2018, Daniel Martí <mvdan@mvdan.cc>
-// See LICENSE for licensing information
-
-package gogrep
-
-import (
- "go/ast"
- "go/printer"
- "os"
-)
-
-func (m *matcher) cmdWrite(cmd exprCmd, subs []submatch) []submatch {
- seenRoot := make(map[nodePosHash]bool)
- filePaths := make(map[*ast.File]string)
- var next []submatch
- for _, sub := range subs {
- root := m.nodeRoot(sub.node)
- hash := posHash(root)
- if seenRoot[hash] {
- continue // avoid dups
- }
- seenRoot[hash] = true
- file, ok := root.(*ast.File)
- if ok {
- path := m.fset.Position(file.Package).Filename
- if path != "" {
- // write to disk
- filePaths[file] = path
- continue
- }
- }
- // pass it on, to print to stdout
- next = append(next, submatch{node: root})
- }
- for file, path := range filePaths {
- f, err := os.OpenFile(path, os.O_WRONLY|os.O_TRUNC, 0)
- if err != nil {
- // TODO: return errors instead
- panic(err)
- }
- if err := printConfig.Fprint(f, m.fset, file); err != nil {
- // TODO: return errors instead
- panic(err)
- }
- }
- return next
-}
-
-var printConfig = printer.Config{
- Mode: printer.UseSpaces | printer.TabIndent,
- Tabwidth: 8,
-}
-
-func (m *matcher) nodeRoot(node ast.Node) ast.Node {
- parent := m.parentOf(node)
- if parent == nil {
- return node
- }
- if _, ok := parent.(nodeList); ok {
- return parent
- }
- return m.nodeRoot(parent)
-}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/xsrcimporter/xsrcimporter.go b/vendor/github.com/quasilyte/go-ruleguard/internal/xsrcimporter/xsrcimporter.go
new file mode 100644
index 000000000..3b58f97c7
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/internal/xsrcimporter/xsrcimporter.go
@@ -0,0 +1,29 @@
+package xsrcimporter
+
+import (
+ "go/build"
+ "go/importer"
+ "go/token"
+ "go/types"
+ "unsafe"
+)
+
+func New(ctxt *build.Context, fset *token.FileSet) types.Importer {
+ imp := importer.ForCompiler(fset, "source", nil)
+ ifaceVal := *(*iface)(unsafe.Pointer(&imp))
+ srcImp := (*srcImporter)(ifaceVal.data)
+ srcImp.ctxt = ctxt
+ return imp
+}
+
+type iface struct {
+ _ *byte
+ data unsafe.Pointer
+}
+
+type srcImporter struct {
+ ctxt *build.Context
+ _ *token.FileSet
+ _ types.Sizes
+ _ map[string]*types.Package
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ast_walker.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ast_walker.go
new file mode 100644
index 000000000..c52a5a822
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ast_walker.go
@@ -0,0 +1,369 @@
+package ruleguard
+
+import (
+ "go/ast"
+ "go/constant"
+
+ "github.com/quasilyte/gogrep/nodetag"
+)
+
+type astWalker struct {
+ nodePath *nodePath
+
+ filterParams *filterParams
+
+ visit func(ast.Node, nodetag.Value)
+}
+
+func (w *astWalker) Walk(root ast.Node, visit func(ast.Node, nodetag.Value)) {
+ w.visit = visit
+ w.walk(root)
+}
+
+func (w *astWalker) walkIdentList(list []*ast.Ident) {
+ for _, x := range list {
+ w.walk(x)
+ }
+}
+
+func (w *astWalker) walkExprList(list []ast.Expr) {
+ for _, x := range list {
+ w.walk(x)
+ }
+}
+
+func (w *astWalker) walkStmtList(list []ast.Stmt) {
+ for _, x := range list {
+ w.walk(x)
+ }
+}
+
+func (w *astWalker) walkDeclList(list []ast.Decl) {
+ for _, x := range list {
+ w.walk(x)
+ }
+}
+
+func (w *astWalker) walk(n ast.Node) {
+ w.nodePath.Push(n)
+ defer w.nodePath.Pop()
+
+ switch n := n.(type) {
+ case *ast.Field:
+ // TODO: handle field types.
+ // See #252
+ w.walkIdentList(n.Names)
+ w.walk(n.Type)
+
+ case *ast.FieldList:
+ for _, f := range n.List {
+ w.walk(f)
+ }
+
+ case *ast.Ellipsis:
+ w.visit(n, nodetag.Ellipsis)
+ if n.Elt != nil {
+ w.walk(n.Elt)
+ }
+
+ case *ast.FuncLit:
+ w.visit(n, nodetag.FuncLit)
+ w.walk(n.Type)
+ w.walk(n.Body)
+
+ case *ast.CompositeLit:
+ w.visit(n, nodetag.CompositeLit)
+ if n.Type != nil {
+ w.walk(n.Type)
+ }
+ w.walkExprList(n.Elts)
+
+ case *ast.ParenExpr:
+ w.visit(n, nodetag.ParenExpr)
+ w.walk(n.X)
+
+ case *ast.SelectorExpr:
+ w.visit(n, nodetag.SelectorExpr)
+ w.walk(n.X)
+ w.walk(n.Sel)
+
+ case *ast.IndexExpr:
+ w.visit(n, nodetag.IndexExpr)
+ w.walk(n.X)
+ w.walk(n.Index)
+
+ case *ast.SliceExpr:
+ w.visit(n, nodetag.SliceExpr)
+ w.walk(n.X)
+ if n.Low != nil {
+ w.walk(n.Low)
+ }
+ if n.High != nil {
+ w.walk(n.High)
+ }
+ if n.Max != nil {
+ w.walk(n.Max)
+ }
+
+ case *ast.TypeAssertExpr:
+ w.visit(n, nodetag.TypeAssertExpr)
+ w.walk(n.X)
+ if n.Type != nil {
+ w.walk(n.Type)
+ }
+
+ case *ast.CallExpr:
+ w.visit(n, nodetag.CallExpr)
+ w.walk(n.Fun)
+ w.walkExprList(n.Args)
+
+ case *ast.StarExpr:
+ w.visit(n, nodetag.StarExpr)
+ w.walk(n.X)
+
+ case *ast.UnaryExpr:
+ w.visit(n, nodetag.UnaryExpr)
+ w.walk(n.X)
+
+ case *ast.BinaryExpr:
+ w.visit(n, nodetag.BinaryExpr)
+ w.walk(n.X)
+ w.walk(n.Y)
+
+ case *ast.KeyValueExpr:
+ w.visit(n, nodetag.KeyValueExpr)
+ w.walk(n.Key)
+ w.walk(n.Value)
+
+ case *ast.ArrayType:
+ w.visit(n, nodetag.ArrayType)
+ if n.Len != nil {
+ w.walk(n.Len)
+ }
+ w.walk(n.Elt)
+
+ case *ast.StructType:
+ w.visit(n, nodetag.StructType)
+ w.walk(n.Fields)
+
+ case *ast.FuncType:
+ w.visit(n, nodetag.FuncType)
+ if n.Params != nil {
+ w.walk(n.Params)
+ }
+ if n.Results != nil {
+ w.walk(n.Results)
+ }
+
+ case *ast.InterfaceType:
+ w.visit(n, nodetag.InterfaceType)
+ w.walk(n.Methods)
+
+ case *ast.MapType:
+ w.visit(n, nodetag.MapType)
+ w.walk(n.Key)
+ w.walk(n.Value)
+
+ case *ast.ChanType:
+ w.visit(n, nodetag.ChanType)
+ w.walk(n.Value)
+
+ case *ast.DeclStmt:
+ w.visit(n, nodetag.DeclStmt)
+ w.walk(n.Decl)
+
+ case *ast.LabeledStmt:
+ w.visit(n, nodetag.LabeledStmt)
+ w.walk(n.Label)
+ w.walk(n.Stmt)
+
+ case *ast.ExprStmt:
+ w.visit(n, nodetag.ExprStmt)
+ w.walk(n.X)
+
+ case *ast.SendStmt:
+ w.visit(n, nodetag.SendStmt)
+ w.walk(n.Chan)
+ w.walk(n.Value)
+
+ case *ast.IncDecStmt:
+ w.visit(n, nodetag.IncDecStmt)
+ w.walk(n.X)
+
+ case *ast.AssignStmt:
+ w.visit(n, nodetag.AssignStmt)
+ w.walkExprList(n.Lhs)
+ w.walkExprList(n.Rhs)
+
+ case *ast.GoStmt:
+ w.visit(n, nodetag.GoStmt)
+ w.walk(n.Call)
+
+ case *ast.DeferStmt:
+ w.visit(n, nodetag.DeferStmt)
+ w.walk(n.Call)
+
+ case *ast.ReturnStmt:
+ w.visit(n, nodetag.ReturnStmt)
+ w.walkExprList(n.Results)
+
+ case *ast.BranchStmt:
+ w.visit(n, nodetag.BranchStmt)
+ if n.Label != nil {
+ w.walk(n.Label)
+ }
+
+ case *ast.BlockStmt:
+ w.visit(n, nodetag.BlockStmt)
+ w.walkStmtList(n.List)
+
+ case *ast.IfStmt:
+ w.visit(n, nodetag.IfStmt)
+ if n.Init != nil {
+ w.walk(n.Init)
+ }
+ w.walk(n.Cond)
+ deadcode := w.filterParams.deadcode
+ if !deadcode {
+ cv := w.filterParams.ctx.Types.Types[n.Cond].Value
+ if cv != nil {
+ w.filterParams.deadcode = !deadcode && !constant.BoolVal(cv)
+ w.walk(n.Body)
+ w.filterParams.deadcode = !w.filterParams.deadcode
+ if n.Else != nil {
+ w.walk(n.Else)
+ }
+ w.filterParams.deadcode = deadcode
+ return
+ }
+ }
+ w.walk(n.Body)
+ if n.Else != nil {
+ w.walk(n.Else)
+ }
+
+ case *ast.CaseClause:
+ w.visit(n, nodetag.CaseClause)
+ w.walkExprList(n.List)
+ w.walkStmtList(n.Body)
+
+ case *ast.SwitchStmt:
+ w.visit(n, nodetag.SwitchStmt)
+ if n.Init != nil {
+ w.walk(n.Init)
+ }
+ if n.Tag != nil {
+ w.walk(n.Tag)
+ }
+ w.walk(n.Body)
+
+ case *ast.TypeSwitchStmt:
+ w.visit(n, nodetag.TypeSwitchStmt)
+ if n.Init != nil {
+ w.walk(n.Init)
+ }
+ w.walk(n.Assign)
+ w.walk(n.Body)
+
+ case *ast.CommClause:
+ w.visit(n, nodetag.CommClause)
+ if n.Comm != nil {
+ w.walk(n.Comm)
+ }
+ w.walkStmtList(n.Body)
+
+ case *ast.SelectStmt:
+ w.visit(n, nodetag.SelectStmt)
+ w.walk(n.Body)
+
+ case *ast.ForStmt:
+ w.visit(n, nodetag.ForStmt)
+ if n.Init != nil {
+ w.walk(n.Init)
+ }
+ if n.Cond != nil {
+ w.walk(n.Cond)
+ }
+ if n.Post != nil {
+ w.walk(n.Post)
+ }
+ w.walk(n.Body)
+
+ case *ast.RangeStmt:
+ w.visit(n, nodetag.RangeStmt)
+ if n.Key != nil {
+ w.walk(n.Key)
+ }
+ if n.Value != nil {
+ w.walk(n.Value)
+ }
+ w.walk(n.X)
+ w.walk(n.Body)
+
+ case *ast.ImportSpec:
+ w.visit(n, nodetag.ImportSpec)
+ if n.Name != nil {
+ w.walk(n.Name)
+ }
+ w.walk(n.Path)
+ if n.Comment != nil {
+ w.walk(n.Comment)
+ }
+
+ case *ast.ValueSpec:
+ w.visit(n, nodetag.ValueSpec)
+ if n.Doc != nil {
+ w.walk(n.Doc)
+ }
+ w.walkIdentList(n.Names)
+ if n.Type != nil {
+ w.walk(n.Type)
+ }
+ w.walkExprList(n.Values)
+ if n.Comment != nil {
+ w.walk(n.Comment)
+ }
+
+ case *ast.TypeSpec:
+ w.visit(n, nodetag.TypeSpec)
+ if n.Doc != nil {
+ w.walk(n.Doc)
+ }
+ w.walk(n.Name)
+ w.walk(n.Type)
+ if n.Comment != nil {
+ w.walk(n.Comment)
+ }
+
+ case *ast.GenDecl:
+ w.visit(n, nodetag.GenDecl)
+ if n.Doc != nil {
+ w.walk(n.Doc)
+ }
+ for _, s := range n.Specs {
+ w.walk(s)
+ }
+
+ case *ast.FuncDecl:
+ w.visit(n, nodetag.FuncDecl)
+ prevFunc := w.filterParams.currentFunc
+ w.filterParams.currentFunc = n
+ if n.Doc != nil {
+ w.walk(n.Doc)
+ }
+ if n.Recv != nil {
+ w.walk(n.Recv)
+ }
+ w.walk(n.Name)
+ w.walk(n.Type)
+ if n.Body != nil {
+ w.walk(n.Body)
+ }
+ w.filterParams.currentFunc = prevFunc
+
+ case *ast.File:
+ w.visit(n, nodetag.File)
+ w.walk(n.Name)
+ w.walkDeclList(n.Decls)
+ }
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/bundle.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/bundle.go
index 950e3c410..72a334b9c 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/bundle.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/bundle.go
@@ -6,7 +6,7 @@ import (
"github.com/quasilyte/go-ruleguard/internal/golist"
)
-func findBundleFiles(pkgPath string) ([]string, error) {
+func findBundleFiles(pkgPath string) ([]string, error) { // nolint
pkg, err := golist.JSON(pkgPath)
if err != nil {
return nil, err
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/engine.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/engine.go
index f8d1e390a..a5e6ca4d6 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/engine.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/engine.go
@@ -4,13 +4,24 @@ import (
"errors"
"fmt"
"go/ast"
+ "go/build"
+ "go/token"
"go/types"
"io"
+ "io/ioutil"
+ "os"
+ "sort"
"strings"
"sync"
+ "github.com/quasilyte/go-ruleguard/internal/goenv"
+ "github.com/quasilyte/go-ruleguard/ruleguard/ir"
"github.com/quasilyte/go-ruleguard/ruleguard/quasigo"
+ "github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qfmt"
+ "github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qstrconv"
+ "github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qstrings"
"github.com/quasilyte/go-ruleguard/ruleguard/typematch"
+ "github.com/quasilyte/stdinfo"
)
type engine struct {
@@ -25,19 +36,75 @@ func newEngine() *engine {
}
}
-func (e *engine) Load(ctx *ParseContext, filename string, r io.Reader) error {
- config := rulesParserConfig{
- state: e.state,
- ctx: ctx,
- importer: newGoImporter(e.state, goImporterConfig{
- fset: ctx.Fset,
- debugImports: ctx.DebugImports,
- debugPrint: ctx.DebugPrint,
- }),
- itab: typematch.NewImportsTab(stdlibPackages),
- }
- p := newRulesParser(config)
- rset, err := p.ParseFile(filename, r)
+func (e *engine) LoadedGroups() []GoRuleGroup {
+ result := make([]GoRuleGroup, 0, len(e.ruleSet.groups))
+ for _, g := range e.ruleSet.groups {
+ result = append(result, *g)
+ }
+ sort.Slice(result, func(i, j int) bool {
+ return result[i].Name < result[j].Name
+ })
+ return result
+}
+
+func (e *engine) Load(ctx *LoadContext, buildContext *build.Context, filename string, r io.Reader) error {
+ data, err := ioutil.ReadAll(r)
+ if err != nil {
+ return err
+ }
+ imp := newGoImporter(e.state, goImporterConfig{
+ fset: ctx.Fset,
+ debugImports: ctx.DebugImports,
+ debugPrint: ctx.DebugPrint,
+ buildContext: buildContext,
+ })
+ irfile, pkg, err := convertAST(ctx, imp, filename, data)
+ if err != nil {
+ return err
+ }
+ config := irLoaderConfig{
+ state: e.state,
+ pkg: pkg,
+ ctx: ctx,
+ importer: imp,
+ itab: typematch.NewImportsTab(stdinfo.PathByName),
+ gogrepFset: token.NewFileSet(),
+ }
+ l := newIRLoader(config)
+ rset, err := l.LoadFile(filename, irfile)
+ if err != nil {
+ return err
+ }
+
+ if e.ruleSet == nil {
+ e.ruleSet = rset
+ } else {
+ combinedRuleSet, err := mergeRuleSets([]*goRuleSet{e.ruleSet, rset})
+ if err != nil {
+ return err
+ }
+ e.ruleSet = combinedRuleSet
+ }
+
+ return nil
+}
+
+func (e *engine) LoadFromIR(ctx *LoadContext, buildContext *build.Context, filename string, f *ir.File) error {
+ imp := newGoImporter(e.state, goImporterConfig{
+ fset: ctx.Fset,
+ debugImports: ctx.DebugImports,
+ debugPrint: ctx.DebugPrint,
+ buildContext: buildContext,
+ })
+ config := irLoaderConfig{
+ state: e.state,
+ ctx: ctx,
+ importer: imp,
+ itab: typematch.NewImportsTab(stdinfo.PathByName),
+ gogrepFset: token.NewFileSet(),
+ }
+ l := newIRLoader(config)
+ rset, err := l.LoadFile(filename, f)
if err != nil {
return err
}
@@ -55,12 +122,12 @@ func (e *engine) Load(ctx *ParseContext, filename string, r io.Reader) error {
return nil
}
-func (e *engine) Run(ctx *RunContext, f *ast.File) error {
+func (e *engine) Run(ctx *RunContext, buildContext *build.Context, f *ast.File) error {
if e.ruleSet == nil {
return errors.New("used Run() with an empty rule set; forgot to call Load() first?")
}
- rset := cloneRuleSet(e.ruleSet)
- return newRulesRunner(ctx, e.state, rset).run(f)
+ rset := e.ruleSet
+ return newRulesRunner(ctx, buildContext, e.state, rset).run(f)
}
// engineState is a shared state inside the engine.
@@ -77,33 +144,16 @@ type engineState struct {
func newEngineState() *engineState {
env := quasigo.NewEnv()
+ qstrings.ImportAll(env)
+ qstrconv.ImportAll(env)
+ qfmt.ImportAll(env)
state := &engineState{
- env: env,
- pkgCache: make(map[string]*types.Package),
- typeByFQN: map[string]types.Type{
- // Predeclared types.
- `error`: types.Universe.Lookup("error").Type(),
- `bool`: types.Typ[types.Bool],
- `int`: types.Typ[types.Int],
- `int8`: types.Typ[types.Int8],
- `int16`: types.Typ[types.Int16],
- `int32`: types.Typ[types.Int32],
- `int64`: types.Typ[types.Int64],
- `uint`: types.Typ[types.Uint],
- `uint8`: types.Typ[types.Uint8],
- `uint16`: types.Typ[types.Uint16],
- `uint32`: types.Typ[types.Uint32],
- `uint64`: types.Typ[types.Uint64],
- `uintptr`: types.Typ[types.Uintptr],
- `string`: types.Typ[types.String],
- `float32`: types.Typ[types.Float32],
- `float64`: types.Typ[types.Float64],
- `complex64`: types.Typ[types.Complex64],
- `complex128`: types.Typ[types.Complex128],
- // Predeclared aliases (provided for convenience).
- `byte`: types.Typ[types.Uint8],
- `rune`: types.Typ[types.Int32],
- },
+ env: env,
+ pkgCache: make(map[string]*types.Package),
+ typeByFQN: map[string]types.Type{},
+ }
+ for key, typ := range typeByName {
+ state.typeByFQN[key] = typ
}
initEnv(state, env)
return state
@@ -118,10 +168,27 @@ func (state *engineState) GetCachedPackage(pkgPath string) *types.Package {
func (state *engineState) AddCachedPackage(pkgPath string, pkg *types.Package) {
state.pkgCacheMu.Lock()
- state.pkgCache[pkgPath] = pkg
+ state.addCachedPackage(pkgPath, pkg)
state.pkgCacheMu.Unlock()
}
+func (state *engineState) addCachedPackage(pkgPath string, pkg *types.Package) {
+ state.pkgCache[pkgPath] = pkg
+
+ // Also add all complete packages that are dependencies of the pkg.
+ // This way we cache more and avoid duplicated package loading
+ // which can lead to typechecking issues.
+ //
+ // Note that it does not increase our memory consumption
+ // as these packages are reachable via pkg, so they'll
+ // not be freed by GC anyway.
+ for _, imported := range pkg.Imports() {
+ if imported.Complete() {
+ state.addCachedPackage(imported.Path(), imported)
+ }
+ }
+}
+
func (state *engineState) FindType(importer *goImporter, currentPkg *types.Package, fqn string) (types.Type, error) {
// TODO(quasilyte): we can pre-populate the cache during the Load() phase.
// If we inspect the AST of a user function, all constant FQN can be preloaded.
@@ -155,9 +222,12 @@ func (state *engineState) findTypeNoCache(importer *goImporter, currentPkg *type
pkgPath := fqn[:pos]
objectName := fqn[pos+1:]
var pkg *types.Package
- if directDep := findDependency(currentPkg, pkgPath); directDep != nil {
- pkg = directDep
- } else {
+ if currentPkg != nil {
+ if directDep := findDependency(currentPkg, pkgPath); directDep != nil {
+ pkg = directDep
+ }
+ }
+ if pkg == nil {
loadedPkg, err := importer.Import(pkgPath)
if err != nil {
return nil, err
@@ -172,3 +242,29 @@ func (state *engineState) findTypeNoCache(importer *goImporter, currentPkg *type
state.typeByFQN[fqn] = typ
return typ, nil
}
+
+func inferBuildContext() *build.Context {
+ // Inherit most fields from the build.Default.
+ ctx := build.Default
+
+ env, err := goenv.Read()
+ if err != nil {
+ return &ctx
+ }
+
+ ctx.GOROOT = env["GOROOT"]
+ ctx.GOPATH = env["GOPATH"]
+ ctx.GOARCH = env["GOARCH"]
+ ctx.GOOS = env["GOOS"]
+
+ switch os.Getenv("CGO_ENABLED") {
+ case "0":
+ ctx.CgoEnabled = false
+ case "1":
+ ctx.CgoEnabled = true
+ default:
+ ctx.CgoEnabled = env["CGO_ENABLED"] == "1"
+ }
+
+ return &ctx
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/filters.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/filters.go
index 3816405a3..604ae4a18 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/filters.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/filters.go
@@ -6,10 +6,14 @@ import (
"go/token"
"go/types"
"path/filepath"
- "regexp"
+
+ "github.com/quasilyte/gogrep"
+ "github.com/quasilyte/gogrep/nodetag"
+ "golang.org/x/tools/go/ast/astutil"
"github.com/quasilyte/go-ruleguard/internal/xtypes"
"github.com/quasilyte/go-ruleguard/ruleguard/quasigo"
+ "github.com/quasilyte/go-ruleguard/ruleguard/textmatch"
"github.com/quasilyte/go-ruleguard/ruleguard/typematch"
)
@@ -19,6 +23,15 @@ func filterFailure(reason string) matchFilterResult {
return matchFilterResult(reason)
}
+func exprListFilterApply(src string, list gogrep.ExprSlice, fn func(ast.Expr) bool) matchFilterResult {
+ for i := 0; i < list.Len(); i++ {
+ if !fn(list.At(i).(ast.Expr)) {
+ return filterFailure(src)
+ }
+ }
+ return filterSuccess
+}
+
func makeNotFilter(src string, x matchFilter) filterFunc {
return func(params *filterParams) matchFilterResult {
if x.fn(params).Matched() {
@@ -46,6 +59,15 @@ func makeOrFilter(lhs, rhs matchFilter) filterFunc {
}
}
+func makeDeadcodeFilter(src string) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ if params.deadcode {
+ return filterSuccess
+ }
+ return filterFailure(src)
+ }
+}
+
func makeFileImportsFilter(src, pkgPath string) filterFunc {
return func(params *filterParams) matchFilterResult {
_, imported := params.imports[pkgPath]
@@ -56,7 +78,7 @@ func makeFileImportsFilter(src, pkgPath string) filterFunc {
}
}
-func makeFilePkgPathMatchesFilter(src string, re *regexp.Regexp) filterFunc {
+func makeFilePkgPathMatchesFilter(src string, re textmatch.Pattern) filterFunc {
return func(params *filterParams) matchFilterResult {
pkgPath := params.ctx.Pkg.Path()
if re.MatchString(pkgPath) {
@@ -66,7 +88,7 @@ func makeFilePkgPathMatchesFilter(src string, re *regexp.Regexp) filterFunc {
}
}
-func makeFileNameMatchesFilter(src string, re *regexp.Regexp) filterFunc {
+func makeFileNameMatchesFilter(src string, re textmatch.Pattern) filterFunc {
return func(params *filterParams) matchFilterResult {
if re.MatchString(filepath.Base(params.filename)) {
return filterSuccess
@@ -77,6 +99,12 @@ func makeFileNameMatchesFilter(src string, re *regexp.Regexp) filterFunc {
func makePureFilter(src, varname string) filterFunc {
return func(params *filterParams) matchFilterResult {
+ if list, ok := params.subNode(varname).(gogrep.ExprSlice); ok {
+ return exprListFilterApply(src, list, func(x ast.Expr) bool {
+ return isPure(params.ctx.Types, x)
+ })
+ }
+
n := params.subExpr(varname)
if isPure(params.ctx.Types, n) {
return filterSuccess
@@ -87,6 +115,12 @@ func makePureFilter(src, varname string) filterFunc {
func makeConstFilter(src, varname string) filterFunc {
return func(params *filterParams) matchFilterResult {
+ if list, ok := params.subNode(varname).(gogrep.ExprSlice); ok {
+ return exprListFilterApply(src, list, func(x ast.Expr) bool {
+ return isConstant(params.ctx.Types, x)
+ })
+ }
+
n := params.subExpr(varname)
if isConstant(params.ctx.Types, n) {
return filterSuccess
@@ -95,8 +129,30 @@ func makeConstFilter(src, varname string) filterFunc {
}
}
+func makeConstSliceFilter(src, varname string) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ if list, ok := params.subNode(varname).(gogrep.ExprSlice); ok {
+ return exprListFilterApply(src, list, func(x ast.Expr) bool {
+ return isConstantSlice(params.ctx.Types, x)
+ })
+ }
+
+ n := params.subExpr(varname)
+ if isConstantSlice(params.ctx.Types, n) {
+ return filterSuccess
+ }
+ return filterFailure(src)
+ }
+}
+
func makeAddressableFilter(src, varname string) filterFunc {
return func(params *filterParams) matchFilterResult {
+ if list, ok := params.subNode(varname).(gogrep.ExprSlice); ok {
+ return exprListFilterApply(src, list, func(x ast.Expr) bool {
+ return isAddressable(params.ctx.Types, x)
+ })
+ }
+
n := params.subExpr(varname)
if isAddressable(params.ctx.Types, n) {
return filterSuccess
@@ -105,13 +161,48 @@ func makeAddressableFilter(src, varname string) filterFunc {
}
}
+func makeComparableFilter(src, varname string) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ if list, ok := params.subNode(varname).(gogrep.ExprSlice); ok {
+ return exprListFilterApply(src, list, func(x ast.Expr) bool {
+ return types.Comparable(params.typeofNode(x))
+ })
+ }
+
+ if types.Comparable(params.typeofNode(params.subNode(varname))) {
+ return filterSuccess
+ }
+ return filterFailure(src)
+ }
+}
+
+func makeVarContainsFilter(src, varname string, pat *gogrep.Pattern) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ params.gogrepSubState.CapturePreset = params.match.CaptureList()
+ matched := false
+ gogrep.Walk(params.subNode(varname), func(n ast.Node) bool {
+ if matched {
+ return false
+ }
+ pat.MatchNode(params.gogrepSubState, n, func(m gogrep.MatchData) {
+ matched = true
+ })
+ return true
+ })
+ if matched {
+ return filterSuccess
+ }
+ return filterFailure(src)
+ }
+}
+
func makeCustomVarFilter(src, varname string, fn *quasigo.Func) filterFunc {
return func(params *filterParams) matchFilterResult {
// TODO(quasilyte): what if bytecode function panics due to the programming error?
// We should probably catch the panic here, print trace and return "false"
// from the filter (or even propagate that panic to let it crash).
params.varname = varname
- result := quasigo.Call(params.env, fn, params)
+ result := quasigo.Call(params.env, fn)
if result.Value().(bool) {
return filterSuccess
}
@@ -121,6 +212,12 @@ func makeCustomVarFilter(src, varname string, fn *quasigo.Func) filterFunc {
func makeTypeImplementsFilter(src, varname string, iface *types.Interface) filterFunc {
return func(params *filterParams) matchFilterResult {
+ if list, ok := params.subNode(varname).(gogrep.ExprSlice); ok {
+ return exprListFilterApply(src, list, func(x ast.Expr) bool {
+ return xtypes.Implements(params.typeofNode(x), iface)
+ })
+ }
+
typ := params.typeofNode(params.subExpr(varname))
if xtypes.Implements(typ, iface) {
return filterSuccess
@@ -129,19 +226,123 @@ func makeTypeImplementsFilter(src, varname string, iface *types.Interface) filte
}
}
+func makeTypeHasMethodFilter(src, varname string, fn *types.Func) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ typ := params.typeofNode(params.subNode(varname))
+ if typeHasMethod(typ, fn) {
+ return filterSuccess
+ }
+ return filterFailure(src)
+ }
+}
+
+func makeTypeHasPointersFilter(src, varname string) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ typ := params.typeofNode(params.subExpr(varname))
+ if typeHasPointers(typ) {
+ return filterSuccess
+ }
+ return filterFailure(src)
+ }
+}
+
+func makeTypeIsIntUintFilter(src, varname string, underlying bool, kind types.BasicKind) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ typ := params.typeofNode(params.subExpr(varname))
+ if underlying {
+ typ = typ.Underlying()
+ }
+ if basicType, ok := typ.(*types.Basic); ok {
+ first := kind
+ last := kind + 4
+ if basicType.Kind() >= first && basicType.Kind() <= last {
+ return filterSuccess
+ }
+ }
+ return filterFailure(src)
+ }
+}
+
+func makeTypeIsSignedFilter(src, varname string, underlying bool) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ typ := params.typeofNode(params.subExpr(varname))
+ if underlying {
+ typ = typ.Underlying()
+ }
+ if basicType, ok := typ.(*types.Basic); ok {
+ if basicType.Info()&types.IsInteger != 0 && basicType.Info()&types.IsUnsigned == 0 {
+ return filterSuccess
+ }
+ }
+ return filterFailure(src)
+ }
+}
+
+func makeTypeOfKindFilter(src, varname string, underlying bool, kind types.BasicInfo) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ typ := params.typeofNode(params.subExpr(varname))
+ if underlying {
+ typ = typ.Underlying()
+ }
+ if basicType, ok := typ.(*types.Basic); ok {
+ if basicType.Info()&kind != 0 {
+ return filterSuccess
+ }
+ }
+ return filterFailure(src)
+ }
+}
+
+func makeTypesIdenticalFilter(src, lhsVarname, rhsVarname string) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ lhsType := params.typeofNode(params.subNode(lhsVarname))
+ rhsType := params.typeofNode(params.subNode(rhsVarname))
+ if xtypes.Identical(lhsType, rhsType) {
+ return filterSuccess
+ }
+ return filterFailure(src)
+ }
+}
+
+func makeRootSinkTypeIsFilter(src string, pat *typematch.Pattern) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ // TODO(quasilyte): add variadic support?
+ e, ok := params.match.Node().(ast.Expr)
+ if ok {
+ parent, kv := findSinkRoot(params)
+ typ := findSinkType(params, parent, kv, e)
+ if pat.MatchIdentical(params.typematchState, typ) {
+ return filterSuccess
+ }
+ }
+ return filterFailure(src)
+ }
+}
+
func makeTypeIsFilter(src, varname string, underlying bool, pat *typematch.Pattern) filterFunc {
if underlying {
return func(params *filterParams) matchFilterResult {
- typ := params.typeofNode(params.subExpr(varname)).Underlying()
- if pat.MatchIdentical(typ) {
+ if list, ok := params.subNode(varname).(gogrep.ExprSlice); ok {
+ return exprListFilterApply(src, list, func(x ast.Expr) bool {
+ return pat.MatchIdentical(params.typematchState, params.typeofNode(x).Underlying())
+ })
+ }
+ typ := params.typeofNode(params.subNode(varname)).Underlying()
+ if pat.MatchIdentical(params.typematchState, typ) {
return filterSuccess
}
return filterFailure(src)
}
}
+
return func(params *filterParams) matchFilterResult {
- typ := params.typeofNode(params.subExpr(varname))
- if pat.MatchIdentical(typ) {
+ if list, ok := params.subNode(varname).(gogrep.ExprSlice); ok {
+ return exprListFilterApply(src, list, func(x ast.Expr) bool {
+ return pat.MatchIdentical(params.typematchState, params.typeofNode(x))
+ })
+ }
+ typ := params.typeofNode(params.subNode(varname))
+ if pat.MatchIdentical(params.typematchState, typ) {
return filterSuccess
}
return filterFailure(src)
@@ -150,6 +351,12 @@ func makeTypeIsFilter(src, varname string, underlying bool, pat *typematch.Patte
func makeTypeConvertibleToFilter(src, varname string, dstType types.Type) filterFunc {
return func(params *filterParams) matchFilterResult {
+ if list, ok := params.subNode(varname).(gogrep.ExprSlice); ok {
+ return exprListFilterApply(src, list, func(x ast.Expr) bool {
+ return types.ConvertibleTo(params.typeofNode(x), dstType)
+ })
+ }
+
typ := params.typeofNode(params.subExpr(varname))
if types.ConvertibleTo(typ, dstType) {
return filterSuccess
@@ -160,6 +367,12 @@ func makeTypeConvertibleToFilter(src, varname string, dstType types.Type) filter
func makeTypeAssignableToFilter(src, varname string, dstType types.Type) filterFunc {
return func(params *filterParams) matchFilterResult {
+ if list, ok := params.subNode(varname).(gogrep.ExprSlice); ok {
+ return exprListFilterApply(src, list, func(x ast.Expr) bool {
+ return types.AssignableTo(params.typeofNode(x), dstType)
+ })
+ }
+
typ := params.typeofNode(params.subExpr(varname))
if types.AssignableTo(typ, dstType) {
return filterSuccess
@@ -168,8 +381,66 @@ func makeTypeAssignableToFilter(src, varname string, dstType types.Type) filterF
}
}
+func makeLineFilter(src, varname string, op token.Token, rhsVarname string) filterFunc {
+ // TODO(quasilyte): add variadic support.
+ return func(params *filterParams) matchFilterResult {
+ line1 := params.ctx.Fset.Position(params.subNode(varname).Pos()).Line
+ line2 := params.ctx.Fset.Position(params.subNode(rhsVarname).Pos()).Line
+ lhsValue := constant.MakeInt64(int64(line1))
+ rhsValue := constant.MakeInt64(int64(line2))
+ if constant.Compare(lhsValue, op, rhsValue) {
+ return filterSuccess
+ }
+ return filterFailure(src)
+ }
+}
+
+func makeObjectIsGlobalFilter(src, varname string) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ obj := params.ctx.Types.ObjectOf(identOf(params.subExpr(varname)))
+ globalScope := params.ctx.Pkg.Scope()
+ if obj.Parent() == globalScope {
+ return filterSuccess
+ }
+
+ return filterFailure(src)
+ }
+}
+
+func makeGoVersionFilter(src string, op token.Token, version GoVersion) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ if params.ctx.GoVersion.IsAny() {
+ return filterSuccess
+ }
+ if versionCompare(params.ctx.GoVersion, op, version) {
+ return filterSuccess
+ }
+ return filterFailure(src)
+ }
+}
+
+func makeLineConstFilter(src, varname string, op token.Token, rhsValue constant.Value) filterFunc {
+ // TODO(quasilyte): add variadic support.
+ return func(params *filterParams) matchFilterResult {
+ n := params.subNode(varname)
+ lhsValue := constant.MakeInt64(int64(params.ctx.Fset.Position(n.Pos()).Line))
+ if constant.Compare(lhsValue, op, rhsValue) {
+ return filterSuccess
+ }
+ return filterFailure(src)
+ }
+}
+
func makeTypeSizeConstFilter(src, varname string, op token.Token, rhsValue constant.Value) filterFunc {
return func(params *filterParams) matchFilterResult {
+ if list, ok := params.subNode(varname).(gogrep.ExprSlice); ok {
+ return exprListFilterApply(src, list, func(x ast.Expr) bool {
+ typ := params.typeofNode(x)
+ lhsValue := constant.MakeInt64(params.ctx.Sizes.Sizeof(typ))
+ return constant.Compare(lhsValue, op, rhsValue)
+ })
+ }
+
typ := params.typeofNode(params.subExpr(varname))
lhsValue := constant.MakeInt64(params.ctx.Sizes.Sizeof(typ))
if constant.Compare(lhsValue, op, rhsValue) {
@@ -179,8 +450,28 @@ func makeTypeSizeConstFilter(src, varname string, op token.Token, rhsValue const
}
}
+func makeTypeSizeFilter(src, varname string, op token.Token, rhsVarname string) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ lhsTyp := params.typeofNode(params.subExpr(varname))
+ lhsValue := constant.MakeInt64(params.ctx.Sizes.Sizeof(lhsTyp))
+ rhsTyp := params.typeofNode(params.subExpr(rhsVarname))
+ rhsValue := constant.MakeInt64(params.ctx.Sizes.Sizeof(rhsTyp))
+ if constant.Compare(lhsValue, op, rhsValue) {
+ return filterSuccess
+ }
+ return filterFailure(src)
+ }
+}
+
func makeValueIntConstFilter(src, varname string, op token.Token, rhsValue constant.Value) filterFunc {
return func(params *filterParams) matchFilterResult {
+ if list, ok := params.subNode(varname).(gogrep.ExprSlice); ok {
+ return exprListFilterApply(src, list, func(x ast.Expr) bool {
+ lhsValue := intValueOf(params.ctx.Types, x)
+ return lhsValue != nil && constant.Compare(lhsValue, op, rhsValue)
+ })
+ }
+
lhsValue := intValueOf(params.ctx.Types, params.subExpr(varname))
if lhsValue == nil {
return filterFailure(src) // The value is unknown
@@ -193,6 +484,7 @@ func makeValueIntConstFilter(src, varname string, op token.Token, rhsValue const
}
func makeValueIntFilter(src, varname string, op token.Token, rhsVarname string) filterFunc {
+ // TODO(quasilyte): add variadic support.
return func(params *filterParams) matchFilterResult {
lhsValue := intValueOf(params.ctx.Types, params.subExpr(varname))
if lhsValue == nil {
@@ -210,8 +502,9 @@ func makeValueIntFilter(src, varname string, op token.Token, rhsVarname string)
}
func makeTextConstFilter(src, varname string, op token.Token, rhsValue constant.Value) filterFunc {
+ // TODO(quasilyte): add variadic support.
return func(params *filterParams) matchFilterResult {
- s := params.nodeText(params.subExpr(varname))
+ s := params.nodeText(params.subNode(varname))
lhsValue := constant.MakeString(string(s))
if constant.Compare(lhsValue, op, rhsValue) {
return filterSuccess
@@ -221,10 +514,12 @@ func makeTextConstFilter(src, varname string, op token.Token, rhsValue constant.
}
func makeTextFilter(src, varname string, op token.Token, rhsVarname string) filterFunc {
+ // TODO(quasilyte): add variadic support.
return func(params *filterParams) matchFilterResult {
- s1 := params.nodeText(params.subExpr(varname))
+ s1 := params.nodeText(params.subNode(varname))
lhsValue := constant.MakeString(string(s1))
- s2 := params.nodeText(params.values[rhsVarname])
+ n, _ := params.match.CapturedByName(rhsVarname)
+ s2 := params.nodeText(n)
rhsValue := constant.MakeString(string(s2))
if constant.Compare(lhsValue, op, rhsValue) {
return filterSuccess
@@ -233,30 +528,271 @@ func makeTextFilter(src, varname string, op token.Token, rhsVarname string) filt
}
}
-func makeTextMatchesFilter(src, varname string, re *regexp.Regexp) filterFunc {
+func makeTextMatchesFilter(src, varname string, re textmatch.Pattern) filterFunc {
+ // TODO(quasilyte): add variadic support.
return func(params *filterParams) matchFilterResult {
- if re.Match(params.nodeText(params.subExpr(varname))) {
+ if re.Match(params.nodeText(params.subNode(varname))) {
return filterSuccess
}
return filterFailure(src)
}
}
-func makeNodeIsFilter(src, varname string, cat nodeCategory) filterFunc {
+func makeRootParentNodeIsFilter(src string, tag nodetag.Value) filterFunc {
return func(params *filterParams) matchFilterResult {
- n := params.subExpr(varname)
- var matched bool
- switch cat {
- case nodeExpr:
- _, matched = n.(ast.Expr)
- case nodeStmt:
- _, matched = n.(ast.Stmt)
- default:
- matched = (cat == categorizeNode(n))
+ parent := params.nodePath.Parent()
+ if nodeIs(parent, tag) {
+ return filterSuccess
}
- if matched {
+ return filterFailure(src)
+ }
+}
+
+func makeNodeIsFilter(src, varname string, tag nodetag.Value) filterFunc {
+ // TODO(quasilyte): add comment nodes support?
+ // TODO(quasilyte): add variadic support.
+ return func(params *filterParams) matchFilterResult {
+ n := params.subNode(varname)
+ if nodeIs(n, tag) {
return filterSuccess
}
return filterFailure(src)
}
}
+
+func makeObjectIsFilter(src, varname, objectName string) filterFunc {
+ var predicate func(types.Object) bool
+ switch objectName {
+ case "Func":
+ predicate = func(x types.Object) bool {
+ _, ok := x.(*types.Func)
+ return ok
+ }
+ case "Var":
+ predicate = func(x types.Object) bool {
+ _, ok := x.(*types.Var)
+ return ok
+ }
+ case "Const":
+ predicate = func(x types.Object) bool {
+ _, ok := x.(*types.Const)
+ return ok
+ }
+ case "TypeName":
+ predicate = func(x types.Object) bool {
+ _, ok := x.(*types.TypeName)
+ return ok
+ }
+ case "Label":
+ predicate = func(x types.Object) bool {
+ _, ok := x.(*types.Label)
+ return ok
+ }
+ case "PkgName":
+ predicate = func(x types.Object) bool {
+ _, ok := x.(*types.PkgName)
+ return ok
+ }
+ case "Builtin":
+ predicate = func(x types.Object) bool {
+ _, ok := x.(*types.Builtin)
+ return ok
+ }
+ case "Nil":
+ predicate = func(x types.Object) bool {
+ _, ok := x.(*types.Nil)
+ return ok
+ }
+ }
+
+ return func(params *filterParams) matchFilterResult {
+ if list, ok := params.subNode(varname).(gogrep.ExprSlice); ok {
+ return exprListFilterApply(src, list, func(x ast.Expr) bool {
+ ident := identOf(x)
+ return ident != nil && predicate(params.ctx.Types.ObjectOf(ident))
+ })
+ }
+
+ ident := identOf(params.subExpr(varname))
+ if ident == nil {
+ return filterFailure(src)
+ }
+ object := params.ctx.Types.ObjectOf(ident)
+ if predicate(object) {
+ return filterSuccess
+ }
+ return filterFailure(src)
+ }
+}
+
+func nodeIs(n ast.Node, tag nodetag.Value) bool {
+ var matched bool
+ switch tag {
+ case nodetag.Expr:
+ _, matched = n.(ast.Expr)
+ case nodetag.Stmt:
+ _, matched = n.(ast.Stmt)
+ case nodetag.Node:
+ matched = true
+ default:
+ matched = (tag == nodetag.FromNode(n))
+ }
+ return matched
+}
+
+func typeHasMethod(typ types.Type, fn *types.Func) bool {
+ obj, _, _ := types.LookupFieldOrMethod(typ, true, fn.Pkg(), fn.Name())
+ fn2, ok := obj.(*types.Func)
+ if !ok {
+ return false
+ }
+ return xtypes.Identical(fn.Type(), fn2.Type())
+}
+
+func typeHasPointers(typ types.Type) bool {
+ switch typ := typ.(type) {
+ case *types.Basic:
+ switch typ.Kind() {
+ case types.UnsafePointer, types.String, types.UntypedNil, types.UntypedString:
+ return true
+ }
+ return false
+
+ case *types.Named:
+ return typeHasPointers(typ.Underlying())
+
+ case *types.Struct:
+ for i := 0; i < typ.NumFields(); i++ {
+ if typeHasPointers(typ.Field(i).Type()) {
+ return true
+ }
+ }
+ return false
+
+ case *types.Array:
+ return typeHasPointers(typ.Elem())
+
+ default:
+ return true
+ }
+}
+
+func findSinkRoot(params *filterParams) (ast.Node, *ast.KeyValueExpr) {
+ for i := 1; i < params.nodePath.Len(); i++ {
+ switch n := params.nodePath.NthParent(i).(type) {
+ case *ast.ParenExpr:
+ // Skip and continue.
+ continue
+ case *ast.KeyValueExpr:
+ return params.nodePath.NthParent(i + 1).(ast.Expr), n
+ default:
+ return n, nil
+ }
+ }
+ return nil, nil
+}
+
+func findContainingFunc(params *filterParams) *types.Signature {
+ for i := 2; i < params.nodePath.Len(); i++ {
+ switch n := params.nodePath.NthParent(i).(type) {
+ case *ast.FuncDecl:
+ fn, ok := params.ctx.Types.TypeOf(n.Name).(*types.Signature)
+ if ok {
+ return fn
+ }
+ case *ast.FuncLit:
+ fn, ok := params.ctx.Types.TypeOf(n.Type).(*types.Signature)
+ if ok {
+ return fn
+ }
+ }
+ }
+ return nil
+}
+
+func findSinkType(params *filterParams, parent ast.Node, kv *ast.KeyValueExpr, e ast.Expr) types.Type {
+ switch parent := parent.(type) {
+ case *ast.ValueSpec:
+ return params.ctx.Types.TypeOf(parent.Type)
+
+ case *ast.ReturnStmt:
+ for i, result := range parent.Results {
+ if astutil.Unparen(result) != e {
+ continue
+ }
+ sig := findContainingFunc(params)
+ if sig == nil {
+ break
+ }
+ return sig.Results().At(i).Type()
+ }
+
+ case *ast.IndexExpr:
+ if astutil.Unparen(parent.Index) == e {
+ switch typ := params.ctx.Types.TypeOf(parent.X).Underlying().(type) {
+ case *types.Map:
+ return typ.Key()
+ case *types.Slice, *types.Array:
+ return nil // TODO: some untyped int type?
+ }
+ }
+
+ case *ast.AssignStmt:
+ if parent.Tok != token.ASSIGN || len(parent.Lhs) != len(parent.Rhs) {
+ break
+ }
+ for i, rhs := range parent.Rhs {
+ if rhs == e {
+ return params.ctx.Types.TypeOf(parent.Lhs[i])
+ }
+ }
+
+ case *ast.CompositeLit:
+ switch typ := params.ctx.Types.TypeOf(parent).Underlying().(type) {
+ case *types.Slice:
+ return typ.Elem()
+ case *types.Array:
+ return typ.Elem()
+ case *types.Map:
+ if astutil.Unparen(kv.Key) == e {
+ return typ.Key()
+ }
+ return typ.Elem()
+ case *types.Struct:
+ fieldName, ok := kv.Key.(*ast.Ident)
+ if !ok {
+ break
+ }
+ for i := 0; i < typ.NumFields(); i++ {
+ field := typ.Field(i)
+ if field.Name() == fieldName.String() {
+ return field.Type()
+ }
+ }
+ }
+
+ case *ast.CallExpr:
+ switch typ := params.ctx.Types.TypeOf(parent.Fun).(type) {
+ case *types.Signature:
+ // A function call argument.
+ for i, arg := range parent.Args {
+ if astutil.Unparen(arg) != e {
+ continue
+ }
+ isVariadicArg := (i >= typ.Params().Len()-1) && typ.Variadic()
+ if isVariadicArg && !parent.Ellipsis.IsValid() {
+ return typ.Params().At(typ.Params().Len() - 1).Type().(*types.Slice).Elem()
+ }
+ if i < typ.Params().Len() {
+ return typ.Params().At(i).Type()
+ }
+ break
+ }
+ default:
+ // Probably a type cast.
+ return typ
+ }
+ }
+
+ return invalidType
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/go_version.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/go_version.go
new file mode 100644
index 000000000..39e4a492d
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/go_version.go
@@ -0,0 +1,58 @@
+package ruleguard
+
+import (
+ "fmt"
+ "go/token"
+ "strconv"
+ "strings"
+)
+
+type GoVersion struct {
+ Major int
+ Minor int
+}
+
+func (ver GoVersion) IsAny() bool { return ver.Major == 0 }
+
+func ParseGoVersion(version string) (GoVersion, error) {
+ var result GoVersion
+ if version == "" {
+ return GoVersion{}, nil
+ }
+ parts := strings.Split(version, ".")
+ if len(parts) != 2 {
+ return result, fmt.Errorf("invalid format: %s", version)
+ }
+ major, err := strconv.Atoi(parts[0])
+ if err != nil {
+ return result, fmt.Errorf("invalid major version part: %s: %s", parts[0], err)
+ }
+ minor, err := strconv.Atoi(parts[1])
+ if err != nil {
+ return result, fmt.Errorf("invalid minor version part: %s: %s", parts[1], err)
+ }
+ result.Major = major
+ result.Minor = minor
+ return result, nil
+}
+
+func versionCompare(x GoVersion, op token.Token, y GoVersion) bool {
+ switch op {
+ case token.EQL: // ==
+ return x.Major == y.Major && x.Minor == y.Minor
+ case token.NEQ: // !=
+ return !versionCompare(x, token.EQL, y)
+
+ case token.GTR: // >
+ return x.Major > y.Major || (x.Major == y.Major && x.Minor > y.Minor)
+ case token.GEQ: // >=
+ return x.Major > y.Major || (x.Major == y.Major && x.Minor >= y.Minor)
+ case token.LSS: // <
+ return x.Major < y.Major || (x.Major == y.Major && x.Minor < y.Minor)
+ case token.LEQ: // <=
+ return x.Major < y.Major || (x.Major == y.Major && x.Minor <= y.Minor)
+
+ default:
+ panic("unexpected version compare op")
+ }
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/gorule.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/gorule.go
index 5357ad67f..655fc5b89 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/gorule.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/gorule.go
@@ -3,34 +3,42 @@ package ruleguard
import (
"fmt"
"go/ast"
- "go/token"
"go/types"
+ "regexp"
- "github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep"
"github.com/quasilyte/go-ruleguard/ruleguard/quasigo"
+ "github.com/quasilyte/go-ruleguard/ruleguard/typematch"
+ "github.com/quasilyte/gogrep"
+ "github.com/quasilyte/gogrep/nodetag"
)
type goRuleSet struct {
universal *scopedGoRuleSet
- groups map[string]token.Position // To handle redefinitions
+ groups map[string]*GoRuleGroup // To handle redefinitions
}
type scopedGoRuleSet struct {
- uncategorized []goRule
- categorizedNum int
- rulesByCategory [nodeCategoriesCount][]goRule
+ categorizedNum int
+ rulesByTag [nodetag.NumBuckets][]goRule
+ commentRules []goCommentRule
+}
+
+type goCommentRule struct {
+ base goRule
+ pat *regexp.Regexp
+ captureGroups bool
}
type goRule struct {
- group string
- filename string
+ group *GoRuleGroup
line int
pat *gogrep.Pattern
msg string
location string
suggestion string
filter matchFilter
+ do *quasigo.Func
}
type matchFilterResult string
@@ -52,18 +60,36 @@ type filterParams struct {
imports map[string]struct{}
env *quasigo.EvalEnv
- importer *goImporter
+ importer *goImporter
+ gogrepSubState *gogrep.MatcherState
+ typematchState *typematch.MatcherState
+
+ match matchData
+ nodePath *nodePath
+
+ nodeText func(n ast.Node) []byte
+ nodeString func(n ast.Node) string
- values map[string]ast.Node
+ deadcode bool
- nodeText func(n ast.Node) []byte
+ currentFunc *ast.FuncDecl
// varname is set only for custom filters before bytecode function is called.
varname string
+
+ // Both of these are Do() function related fields.
+ reportString string
+ suggestString string
+}
+
+func (params *filterParams) subNode(name string) ast.Node {
+ n, _ := params.match.CapturedByName(name)
+ return n
}
func (params *filterParams) subExpr(name string) ast.Expr {
- switch n := params.values[name].(type) {
+ n, _ := params.match.CapturedByName(name)
+ switch n := n.(type) {
case ast.Expr:
return n
case *ast.ExprStmt:
@@ -74,38 +100,34 @@ func (params *filterParams) subExpr(name string) ast.Expr {
}
func (params *filterParams) typeofNode(n ast.Node) types.Type {
- if e, ok := n.(ast.Expr); ok {
- if typ := params.ctx.Types.TypeOf(e); typ != nil {
- return typ
- }
+ var e ast.Expr
+ switch n := n.(type) {
+ case ast.Expr:
+ e = n
+ case *ast.Field:
+ e = n.Type
}
-
- return types.Typ[types.Invalid]
-}
-
-func cloneRuleSet(rset *goRuleSet) *goRuleSet {
- out, err := mergeRuleSets([]*goRuleSet{rset})
- if err != nil {
- panic(err) // Should never happen
+ if typ := params.ctx.Types.TypeOf(e); typ != nil {
+ return typ
}
- return out
+ return invalidType
}
func mergeRuleSets(toMerge []*goRuleSet) (*goRuleSet, error) {
out := &goRuleSet{
universal: &scopedGoRuleSet{},
- groups: make(map[string]token.Position),
+ groups: make(map[string]*GoRuleGroup),
}
for _, x := range toMerge {
out.universal = appendScopedRuleSet(out.universal, x.universal)
- for group, pos := range x.groups {
- if prevPos, ok := out.groups[group]; ok {
- newRef := fmt.Sprintf("%s:%d", pos.Filename, pos.Line)
- oldRef := fmt.Sprintf("%s:%d", prevPos.Filename, prevPos.Line)
- return nil, fmt.Errorf("%s: redefenition of %s(), previously defined at %s", newRef, group, oldRef)
+ for groupName, group := range x.groups {
+ if prevGroup, ok := out.groups[groupName]; ok {
+ newRef := fmt.Sprintf("%s:%d", group.Filename, group.Line)
+ oldRef := fmt.Sprintf("%s:%d", prevGroup.Filename, prevGroup.Line)
+ return nil, fmt.Errorf("%s: redefinition of %s(), previously defined at %s", newRef, groupName, oldRef)
}
- out.groups[group] = pos
+ out.groups[groupName] = group
}
}
@@ -113,11 +135,11 @@ func mergeRuleSets(toMerge []*goRuleSet) (*goRuleSet, error) {
}
func appendScopedRuleSet(dst, src *scopedGoRuleSet) *scopedGoRuleSet {
- dst.uncategorized = append(dst.uncategorized, cloneRuleSlice(src.uncategorized)...)
- for cat, rules := range src.rulesByCategory {
- dst.rulesByCategory[cat] = append(dst.rulesByCategory[cat], cloneRuleSlice(rules)...)
+ for tag, rules := range src.rulesByTag {
+ dst.rulesByTag[tag] = append(dst.rulesByTag[tag], cloneRuleSlice(rules)...)
dst.categorizedNum += len(rules)
}
+ dst.commentRules = append(dst.commentRules, src.commentRules...)
return dst
}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/goutil/goutil.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/goutil/goutil.go
index 6cc4d9056..3f35b17e1 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/goutil/goutil.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/goutil/goutil.go
@@ -1,9 +1,13 @@
package goutil
import (
+ "fmt"
"go/ast"
+ "go/importer"
+ "go/parser"
"go/printer"
"go/token"
+ "go/types"
"strings"
)
@@ -19,3 +23,45 @@ func SprintNode(fset *token.FileSet, n ast.Node) string {
}
return buf.String()
}
+
+type LoadConfig struct {
+ Fset *token.FileSet
+ Filename string
+ Data interface{}
+ Importer types.Importer
+}
+
+type LoadResult struct {
+ Pkg *types.Package
+ Types *types.Info
+ Syntax *ast.File
+}
+
+func LoadGoFile(config LoadConfig) (*LoadResult, error) {
+ imp := config.Importer
+ if imp == nil {
+ imp = importer.ForCompiler(config.Fset, "source", nil)
+ }
+
+ parserFlags := parser.ParseComments
+ f, err := parser.ParseFile(config.Fset, config.Filename, config.Data, parserFlags)
+ if err != nil {
+ return nil, fmt.Errorf("parse file error: %w", err)
+ }
+ typechecker := types.Config{Importer: imp}
+ typesInfo := &types.Info{
+ Types: map[ast.Expr]types.TypeAndValue{},
+ Uses: map[*ast.Ident]types.Object{},
+ Defs: map[*ast.Ident]types.Object{},
+ }
+ pkg, err := typechecker.Check(f.Name.String(), config.Fset, []*ast.File{f}, typesInfo)
+ if err != nil {
+ return nil, fmt.Errorf("typechecker error: %w", err)
+ }
+ result := &LoadResult{
+ Pkg: pkg,
+ Types: typesInfo,
+ Syntax: f,
+ }
+ return result, nil
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/importer.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/importer.go
index 06a0bbf9f..19494db9c 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/importer.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/importer.go
@@ -2,15 +2,13 @@ package ruleguard
import (
"fmt"
- "go/ast"
+ "go/build"
"go/importer"
- "go/parser"
"go/token"
"go/types"
- "path/filepath"
"runtime"
- "github.com/quasilyte/go-ruleguard/internal/golist"
+ "github.com/quasilyte/go-ruleguard/internal/xsrcimporter"
)
// goImporter is a `types.Importer` that tries to load a package no matter what.
@@ -23,7 +21,8 @@ type goImporter struct {
defaultImporter types.Importer
srcImporter types.Importer
- fset *token.FileSet
+ fset *token.FileSet
+ buildContext *build.Context
debugImports bool
debugPrint func(string)
@@ -33,17 +32,20 @@ type goImporterConfig struct {
fset *token.FileSet
debugImports bool
debugPrint func(string)
+ buildContext *build.Context
}
func newGoImporter(state *engineState, config goImporterConfig) *goImporter {
- return &goImporter{
+ imp := &goImporter{
state: state,
fset: config.fset,
debugImports: config.debugImports,
debugPrint: config.debugPrint,
defaultImporter: importer.Default(),
- srcImporter: importer.ForCompiler(config.fset, "source", nil),
+ buildContext: config.buildContext,
}
+ imp.initSourceImporter()
+ return imp
}
func (imp *goImporter) Import(path string) (*types.Package, error) {
@@ -54,8 +56,8 @@ func (imp *goImporter) Import(path string) (*types.Package, error) {
return pkg, nil
}
- pkg, err1 := imp.srcImporter.Import(path)
- if err1 == nil {
+ pkg, srcErr := imp.srcImporter.Import(path)
+ if srcErr == nil {
imp.state.AddCachedPackage(path, pkg)
if imp.debugImports {
imp.debugPrint(fmt.Sprintf(`imported "%s" from source importer`, path))
@@ -63,8 +65,8 @@ func (imp *goImporter) Import(path string) (*types.Package, error) {
return pkg, nil
}
- pkg, err2 := imp.defaultImporter.Import(path)
- if err2 == nil {
+ pkg, defaultErr := imp.defaultImporter.Import(path)
+ if defaultErr == nil {
imp.state.AddCachedPackage(path, pkg)
if imp.debugImports {
imp.debugPrint(fmt.Sprintf(`imported "%s" from %s importer`, path, runtime.Compiler))
@@ -72,45 +74,22 @@ func (imp *goImporter) Import(path string) (*types.Package, error) {
return pkg, nil
}
- // Fallback to `go list` as a last resort.
- pkg, err3 := imp.golistImport(path)
- if err3 == nil {
- imp.state.AddCachedPackage(path, pkg)
- if imp.debugImports {
- imp.debugPrint(fmt.Sprintf(`imported "%s" from golist importer`, path))
- }
- return pkg, nil
- }
-
if imp.debugImports {
imp.debugPrint(fmt.Sprintf(`failed to import "%s":`, path))
- imp.debugPrint(fmt.Sprintf(" source importer: %v", err1))
- imp.debugPrint(fmt.Sprintf(" %s importer: %v", runtime.Compiler, err2))
- imp.debugPrint(fmt.Sprintf(" golist importer: %v", err3))
+ imp.debugPrint(fmt.Sprintf(" %s importer: %v", runtime.Compiler, defaultErr))
+ imp.debugPrint(fmt.Sprintf(" source importer: %v", srcErr))
+ imp.debugPrint(fmt.Sprintf(" GOROOT=%q GOPATH=%q", imp.buildContext.GOROOT, imp.buildContext.GOPATH))
}
- return nil, err2
+ return nil, defaultErr
}
-func (imp *goImporter) golistImport(path string) (*types.Package, error) {
- golistPkg, err := golist.JSON(path)
- if err != nil {
- return nil, err
- }
-
- files := make([]*ast.File, 0, len(golistPkg.GoFiles))
- for _, filename := range golistPkg.GoFiles {
- fullname := filepath.Join(golistPkg.Dir, filename)
- f, err := parser.ParseFile(imp.fset, fullname, nil, 0)
- if err != nil {
- return nil, err
+func (imp *goImporter) initSourceImporter() {
+ if imp.buildContext == nil {
+ if imp.debugImports {
+ imp.debugPrint("using build.Default context")
}
- files = append(files, f)
+ imp.buildContext = &build.Default
}
-
- // TODO: do we want to assign imp as importer for this nested typecherker?
- // Otherwise it won't be able to resolve imports.
- var typecheker types.Config
- var info types.Info
- return typecheker.Check(path, imp.fset, files, &info)
+ imp.srcImporter = xsrcimporter.New(imp.buildContext, imp.fset)
}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/filter_op.gen.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/filter_op.gen.go
new file mode 100644
index 000000000..c9401c020
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/filter_op.gen.go
@@ -0,0 +1,276 @@
+// Code generated "gen_filter_op.go"; DO NOT EDIT.
+
+package ir
+
+const (
+ FilterInvalidOp FilterOp = 0
+
+ // !$Args[0]
+ FilterNotOp FilterOp = 1
+
+ // $Args[0] && $Args[1]
+ FilterAndOp FilterOp = 2
+
+ // $Args[0] || $Args[1]
+ FilterOrOp FilterOp = 3
+
+ // $Args[0] == $Args[1]
+ FilterEqOp FilterOp = 4
+
+ // $Args[0] != $Args[1]
+ FilterNeqOp FilterOp = 5
+
+ // $Args[0] > $Args[1]
+ FilterGtOp FilterOp = 6
+
+ // $Args[0] < $Args[1]
+ FilterLtOp FilterOp = 7
+
+ // $Args[0] >= $Args[1]
+ FilterGtEqOp FilterOp = 8
+
+ // $Args[0] <= $Args[1]
+ FilterLtEqOp FilterOp = 9
+
+ // m[$Value].Addressable
+ // $Value type: string
+ FilterVarAddressableOp FilterOp = 10
+
+ // m[$Value].Comparable
+ // $Value type: string
+ FilterVarComparableOp FilterOp = 11
+
+ // m[$Value].Pure
+ // $Value type: string
+ FilterVarPureOp FilterOp = 12
+
+ // m[$Value].Const
+ // $Value type: string
+ FilterVarConstOp FilterOp = 13
+
+ // m[$Value].ConstSlice
+ // $Value type: string
+ FilterVarConstSliceOp FilterOp = 14
+
+ // m[$Value].Text
+ // $Value type: string
+ FilterVarTextOp FilterOp = 15
+
+ // m[$Value].Line
+ // $Value type: string
+ FilterVarLineOp FilterOp = 16
+
+ // m[$Value].Value.Int()
+ // $Value type: string
+ FilterVarValueIntOp FilterOp = 17
+
+ // m[$Value].Type.Size
+ // $Value type: string
+ FilterVarTypeSizeOp FilterOp = 18
+
+ // m[$Value].Type.HasPointers()
+ // $Value type: string
+ FilterVarTypeHasPointersOp FilterOp = 19
+
+ // m[$Value].Filter($Args[0])
+ // $Value type: string
+ FilterVarFilterOp FilterOp = 20
+
+ // m[$Value].Node.Is($Args[0])
+ // $Value type: string
+ FilterVarNodeIsOp FilterOp = 21
+
+ // m[$Value].Object.Is($Args[0])
+ // $Value type: string
+ FilterVarObjectIsOp FilterOp = 22
+
+ // m[$Value].Object.IsGlobal()
+ // $Value type: string
+ FilterVarObjectIsGlobalOp FilterOp = 23
+
+ // m[$Value].Type.Is($Args[0])
+ // $Value type: string
+ FilterVarTypeIsOp FilterOp = 24
+
+ // m[$Value].Type.IdenticalTo($Args[0])
+ // $Value type: string
+ FilterVarTypeIdenticalToOp FilterOp = 25
+
+ // m[$Value].Type.Underlying().Is($Args[0])
+ // $Value type: string
+ FilterVarTypeUnderlyingIsOp FilterOp = 26
+
+ // m[$Value].Type.OfKind($Args[0])
+ // $Value type: string
+ FilterVarTypeOfKindOp FilterOp = 27
+
+ // m[$Value].Type.Underlying().OfKind($Args[0])
+ // $Value type: string
+ FilterVarTypeUnderlyingOfKindOp FilterOp = 28
+
+ // m[$Value].Type.ConvertibleTo($Args[0])
+ // $Value type: string
+ FilterVarTypeConvertibleToOp FilterOp = 29
+
+ // m[$Value].Type.AssignableTo($Args[0])
+ // $Value type: string
+ FilterVarTypeAssignableToOp FilterOp = 30
+
+ // m[$Value].Type.Implements($Args[0])
+ // $Value type: string
+ FilterVarTypeImplementsOp FilterOp = 31
+
+ // m[$Value].Type.HasMethod($Args[0])
+ // $Value type: string
+ FilterVarTypeHasMethodOp FilterOp = 32
+
+ // m[$Value].Text.Matches($Args[0])
+ // $Value type: string
+ FilterVarTextMatchesOp FilterOp = 33
+
+ // m[$Value].Contains($Args[0])
+ // $Value type: string
+ FilterVarContainsOp FilterOp = 34
+
+ // m.Deadcode()
+ FilterDeadcodeOp FilterOp = 35
+
+ // m.GoVersion().Eq($Value)
+ // $Value type: string
+ FilterGoVersionEqOp FilterOp = 36
+
+ // m.GoVersion().LessThan($Value)
+ // $Value type: string
+ FilterGoVersionLessThanOp FilterOp = 37
+
+ // m.GoVersion().GreaterThan($Value)
+ // $Value type: string
+ FilterGoVersionGreaterThanOp FilterOp = 38
+
+ // m.GoVersion().LessEqThan($Value)
+ // $Value type: string
+ FilterGoVersionLessEqThanOp FilterOp = 39
+
+ // m.GoVersion().GreaterEqThan($Value)
+ // $Value type: string
+ FilterGoVersionGreaterEqThanOp FilterOp = 40
+
+ // m.File.Imports($Value)
+ // $Value type: string
+ FilterFileImportsOp FilterOp = 41
+
+ // m.File.PkgPath.Matches($Value)
+ // $Value type: string
+ FilterFilePkgPathMatchesOp FilterOp = 42
+
+ // m.File.Name.Matches($Value)
+ // $Value type: string
+ FilterFileNameMatchesOp FilterOp = 43
+
+ // $Value holds a function name
+ // $Value type: string
+ FilterFilterFuncRefOp FilterOp = 44
+
+ // $Value holds a string constant
+ // $Value type: string
+ FilterStringOp FilterOp = 45
+
+ // $Value holds an int64 constant
+ // $Value type: int64
+ FilterIntOp FilterOp = 46
+
+ // m[`$$`].Node.Parent().Is($Args[0])
+ FilterRootNodeParentIsOp FilterOp = 47
+
+ // m[`$$`].SinkType.Is($Args[0])
+ FilterRootSinkTypeIsOp FilterOp = 48
+)
+
+var filterOpNames = map[FilterOp]string{
+ FilterInvalidOp: `Invalid`,
+ FilterNotOp: `Not`,
+ FilterAndOp: `And`,
+ FilterOrOp: `Or`,
+ FilterEqOp: `Eq`,
+ FilterNeqOp: `Neq`,
+ FilterGtOp: `Gt`,
+ FilterLtOp: `Lt`,
+ FilterGtEqOp: `GtEq`,
+ FilterLtEqOp: `LtEq`,
+ FilterVarAddressableOp: `VarAddressable`,
+ FilterVarComparableOp: `VarComparable`,
+ FilterVarPureOp: `VarPure`,
+ FilterVarConstOp: `VarConst`,
+ FilterVarConstSliceOp: `VarConstSlice`,
+ FilterVarTextOp: `VarText`,
+ FilterVarLineOp: `VarLine`,
+ FilterVarValueIntOp: `VarValueInt`,
+ FilterVarTypeSizeOp: `VarTypeSize`,
+ FilterVarTypeHasPointersOp: `VarTypeHasPointers`,
+ FilterVarFilterOp: `VarFilter`,
+ FilterVarNodeIsOp: `VarNodeIs`,
+ FilterVarObjectIsOp: `VarObjectIs`,
+ FilterVarObjectIsGlobalOp: `VarObjectIsGlobal`,
+ FilterVarTypeIsOp: `VarTypeIs`,
+ FilterVarTypeIdenticalToOp: `VarTypeIdenticalTo`,
+ FilterVarTypeUnderlyingIsOp: `VarTypeUnderlyingIs`,
+ FilterVarTypeOfKindOp: `VarTypeOfKind`,
+ FilterVarTypeUnderlyingOfKindOp: `VarTypeUnderlyingOfKind`,
+ FilterVarTypeConvertibleToOp: `VarTypeConvertibleTo`,
+ FilterVarTypeAssignableToOp: `VarTypeAssignableTo`,
+ FilterVarTypeImplementsOp: `VarTypeImplements`,
+ FilterVarTypeHasMethodOp: `VarTypeHasMethod`,
+ FilterVarTextMatchesOp: `VarTextMatches`,
+ FilterVarContainsOp: `VarContains`,
+ FilterDeadcodeOp: `Deadcode`,
+ FilterGoVersionEqOp: `GoVersionEq`,
+ FilterGoVersionLessThanOp: `GoVersionLessThan`,
+ FilterGoVersionGreaterThanOp: `GoVersionGreaterThan`,
+ FilterGoVersionLessEqThanOp: `GoVersionLessEqThan`,
+ FilterGoVersionGreaterEqThanOp: `GoVersionGreaterEqThan`,
+ FilterFileImportsOp: `FileImports`,
+ FilterFilePkgPathMatchesOp: `FilePkgPathMatches`,
+ FilterFileNameMatchesOp: `FileNameMatches`,
+ FilterFilterFuncRefOp: `FilterFuncRef`,
+ FilterStringOp: `String`,
+ FilterIntOp: `Int`,
+ FilterRootNodeParentIsOp: `RootNodeParentIs`,
+ FilterRootSinkTypeIsOp: `RootSinkTypeIs`,
+}
+var filterOpFlags = map[FilterOp]uint64{
+ FilterAndOp: flagIsBinaryExpr,
+ FilterOrOp: flagIsBinaryExpr,
+ FilterEqOp: flagIsBinaryExpr,
+ FilterNeqOp: flagIsBinaryExpr,
+ FilterGtOp: flagIsBinaryExpr,
+ FilterLtOp: flagIsBinaryExpr,
+ FilterGtEqOp: flagIsBinaryExpr,
+ FilterLtEqOp: flagIsBinaryExpr,
+ FilterVarAddressableOp: flagHasVar,
+ FilterVarComparableOp: flagHasVar,
+ FilterVarPureOp: flagHasVar,
+ FilterVarConstOp: flagHasVar,
+ FilterVarConstSliceOp: flagHasVar,
+ FilterVarTextOp: flagHasVar,
+ FilterVarLineOp: flagHasVar,
+ FilterVarValueIntOp: flagHasVar,
+ FilterVarTypeSizeOp: flagHasVar,
+ FilterVarTypeHasPointersOp: flagHasVar,
+ FilterVarFilterOp: flagHasVar,
+ FilterVarNodeIsOp: flagHasVar,
+ FilterVarObjectIsOp: flagHasVar,
+ FilterVarObjectIsGlobalOp: flagHasVar,
+ FilterVarTypeIsOp: flagHasVar,
+ FilterVarTypeIdenticalToOp: flagHasVar,
+ FilterVarTypeUnderlyingIsOp: flagHasVar,
+ FilterVarTypeOfKindOp: flagHasVar,
+ FilterVarTypeUnderlyingOfKindOp: flagHasVar,
+ FilterVarTypeConvertibleToOp: flagHasVar,
+ FilterVarTypeAssignableToOp: flagHasVar,
+ FilterVarTypeImplementsOp: flagHasVar,
+ FilterVarTypeHasMethodOp: flagHasVar,
+ FilterVarTextMatchesOp: flagHasVar,
+ FilterVarContainsOp: flagHasVar,
+ FilterStringOp: flagIsBasicLit,
+ FilterIntOp: flagIsBasicLit,
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/gen_filter_op.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/gen_filter_op.go
new file mode 100644
index 000000000..d3b740905
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/gen_filter_op.go
@@ -0,0 +1,147 @@
+//go:build generate
+// +build generate
+
+package main
+
+import (
+ "bytes"
+ "fmt"
+ "go/format"
+ "io/ioutil"
+ "strings"
+)
+
+type opInfo struct {
+ name string
+ comment string
+ valueType string
+ flags uint64
+}
+
+const (
+ flagIsBinaryExpr uint64 = 1 << iota
+ flagIsBasicLit
+ flagHasVar
+)
+
+func main() {
+ ops := []opInfo{
+ {name: "Invalid"},
+
+ {name: "Not", comment: "!$Args[0]"},
+
+ // Binary expressions.
+ {name: "And", comment: "$Args[0] && $Args[1]", flags: flagIsBinaryExpr},
+ {name: "Or", comment: "$Args[0] || $Args[1]", flags: flagIsBinaryExpr},
+ {name: "Eq", comment: "$Args[0] == $Args[1]", flags: flagIsBinaryExpr},
+ {name: "Neq", comment: "$Args[0] != $Args[1]", flags: flagIsBinaryExpr},
+ {name: "Gt", comment: "$Args[0] > $Args[1]", flags: flagIsBinaryExpr},
+ {name: "Lt", comment: "$Args[0] < $Args[1]", flags: flagIsBinaryExpr},
+ {name: "GtEq", comment: "$Args[0] >= $Args[1]", flags: flagIsBinaryExpr},
+ {name: "LtEq", comment: "$Args[0] <= $Args[1]", flags: flagIsBinaryExpr},
+
+ {name: "VarAddressable", comment: "m[$Value].Addressable", valueType: "string", flags: flagHasVar},
+ {name: "VarComparable", comment: "m[$Value].Comparable", valueType: "string", flags: flagHasVar},
+ {name: "VarPure", comment: "m[$Value].Pure", valueType: "string", flags: flagHasVar},
+ {name: "VarConst", comment: "m[$Value].Const", valueType: "string", flags: flagHasVar},
+ {name: "VarConstSlice", comment: "m[$Value].ConstSlice", valueType: "string", flags: flagHasVar},
+ {name: "VarText", comment: "m[$Value].Text", valueType: "string", flags: flagHasVar},
+ {name: "VarLine", comment: "m[$Value].Line", valueType: "string", flags: flagHasVar},
+ {name: "VarValueInt", comment: "m[$Value].Value.Int()", valueType: "string", flags: flagHasVar},
+ {name: "VarTypeSize", comment: "m[$Value].Type.Size", valueType: "string", flags: flagHasVar},
+ {name: "VarTypeHasPointers", comment: "m[$Value].Type.HasPointers()", valueType: "string", flags: flagHasVar},
+
+ {name: "VarFilter", comment: "m[$Value].Filter($Args[0])", valueType: "string", flags: flagHasVar},
+ {name: "VarNodeIs", comment: "m[$Value].Node.Is($Args[0])", valueType: "string", flags: flagHasVar},
+ {name: "VarObjectIs", comment: "m[$Value].Object.Is($Args[0])", valueType: "string", flags: flagHasVar},
+ {name: "VarObjectIsGlobal", comment: "m[$Value].Object.IsGlobal()", valueType: "string", flags: flagHasVar},
+ {name: "VarTypeIs", comment: "m[$Value].Type.Is($Args[0])", valueType: "string", flags: flagHasVar},
+ {name: "VarTypeIdenticalTo", comment: "m[$Value].Type.IdenticalTo($Args[0])", valueType: "string", flags: flagHasVar},
+ {name: "VarTypeUnderlyingIs", comment: "m[$Value].Type.Underlying().Is($Args[0])", valueType: "string", flags: flagHasVar},
+ {name: "VarTypeOfKind", comment: "m[$Value].Type.OfKind($Args[0])", valueType: "string", flags: flagHasVar},
+ {name: "VarTypeUnderlyingOfKind", comment: "m[$Value].Type.Underlying().OfKind($Args[0])", valueType: "string", flags: flagHasVar},
+ {name: "VarTypeConvertibleTo", comment: "m[$Value].Type.ConvertibleTo($Args[0])", valueType: "string", flags: flagHasVar},
+ {name: "VarTypeAssignableTo", comment: "m[$Value].Type.AssignableTo($Args[0])", valueType: "string", flags: flagHasVar},
+ {name: "VarTypeImplements", comment: "m[$Value].Type.Implements($Args[0])", valueType: "string", flags: flagHasVar},
+ {name: "VarTypeHasMethod", comment: "m[$Value].Type.HasMethod($Args[0])", valueType: "string", flags: flagHasVar},
+ {name: "VarTextMatches", comment: "m[$Value].Text.Matches($Args[0])", valueType: "string", flags: flagHasVar},
+
+ {name: "VarContains", comment: "m[$Value].Contains($Args[0])", valueType: "string", flags: flagHasVar},
+
+ {name: "Deadcode", comment: "m.Deadcode()"},
+
+ {name: "GoVersionEq", comment: "m.GoVersion().Eq($Value)", valueType: "string"},
+ {name: "GoVersionLessThan", comment: "m.GoVersion().LessThan($Value)", valueType: "string"},
+ {name: "GoVersionGreaterThan", comment: "m.GoVersion().GreaterThan($Value)", valueType: "string"},
+ {name: "GoVersionLessEqThan", comment: "m.GoVersion().LessEqThan($Value)", valueType: "string"},
+ {name: "GoVersionGreaterEqThan", comment: "m.GoVersion().GreaterEqThan($Value)", valueType: "string"},
+
+ {name: "FileImports", comment: "m.File.Imports($Value)", valueType: "string"},
+ {name: "FilePkgPathMatches", comment: "m.File.PkgPath.Matches($Value)", valueType: "string"},
+ {name: "FileNameMatches", comment: "m.File.Name.Matches($Value)", valueType: "string"},
+
+ {name: "FilterFuncRef", comment: "$Value holds a function name", valueType: "string"},
+
+ {name: "String", comment: "$Value holds a string constant", valueType: "string", flags: flagIsBasicLit},
+ {name: "Int", comment: "$Value holds an int64 constant", valueType: "int64", flags: flagIsBasicLit},
+
+ {name: "RootNodeParentIs", comment: "m[`$$`].Node.Parent().Is($Args[0])"},
+ {name: "RootSinkTypeIs", comment: "m[`$$`].SinkType.Is($Args[0])"},
+ }
+
+ var buf bytes.Buffer
+
+ buf.WriteString(`// Code generated "gen_filter_op.go"; DO NOT EDIT.` + "\n")
+ buf.WriteString("\n")
+ buf.WriteString("package ir\n")
+ buf.WriteString("const (\n")
+
+ for i, op := range ops {
+ if strings.Contains(op.comment, "$Value") && op.valueType == "" {
+ fmt.Printf("missing %s valueType\n", op.name)
+ }
+ if op.comment != "" {
+ buf.WriteString("// " + op.comment + "\n")
+ }
+ if op.valueType != "" {
+ buf.WriteString("// $Value type: " + op.valueType + "\n")
+ }
+ fmt.Fprintf(&buf, "Filter%sOp FilterOp = %d\n", op.name, i)
+ buf.WriteString("\n")
+ }
+ buf.WriteString(")\n")
+
+ buf.WriteString("var filterOpNames = map[FilterOp]string{\n")
+ for _, op := range ops {
+ fmt.Fprintf(&buf, "Filter%sOp: `%s`,\n", op.name, op.name)
+ }
+ buf.WriteString("}\n")
+
+ buf.WriteString("var filterOpFlags = map[FilterOp]uint64{\n")
+ for _, op := range ops {
+ if op.flags == 0 {
+ continue
+ }
+ parts := make([]string, 0, 1)
+ if op.flags&flagIsBinaryExpr != 0 {
+ parts = append(parts, "flagIsBinaryExpr")
+ }
+ if op.flags&flagIsBasicLit != 0 {
+ parts = append(parts, "flagIsBasicLit")
+ }
+ if op.flags&flagHasVar != 0 {
+ parts = append(parts, "flagHasVar")
+ }
+ fmt.Fprintf(&buf, "Filter%sOp: %s,\n", op.name, strings.Join(parts, " | "))
+ }
+ buf.WriteString("}\n")
+
+ pretty, err := format.Source(buf.Bytes())
+ if err != nil {
+ panic(err)
+ }
+
+ if err := ioutil.WriteFile("filter_op.gen.go", pretty, 0644); err != nil {
+ panic(err)
+ }
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/ir.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/ir.go
new file mode 100644
index 000000000..b89481168
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/ir.go
@@ -0,0 +1,113 @@
+package ir
+
+import (
+ "fmt"
+ "strings"
+)
+
+type File struct {
+ PkgPath string
+
+ RuleGroups []RuleGroup
+
+ CustomDecls []string
+
+ BundleImports []BundleImport
+}
+
+type BundleImport struct {
+ Line int
+
+ PkgPath string
+ Prefix string
+}
+
+type RuleGroup struct {
+ Line int
+ Name string
+ MatcherName string
+
+ DocTags []string
+ DocSummary string
+ DocBefore string
+ DocAfter string
+ DocNote string
+
+ Imports []PackageImport
+
+ Rules []Rule
+}
+
+type PackageImport struct {
+ Path string
+ Name string
+}
+
+type Rule struct {
+ Line int
+
+ SyntaxPatterns []PatternString
+ CommentPatterns []PatternString
+
+ ReportTemplate string
+ SuggestTemplate string
+ DoFuncName string
+
+ WhereExpr FilterExpr
+
+ LocationVar string
+}
+
+type PatternString struct {
+ Line int
+ Value string
+}
+
+// stringer -type=FilterOp -trimprefix=Filter
+
+//go:generate go run ./gen_filter_op.go
+type FilterOp int
+
+func (op FilterOp) String() string { return filterOpNames[op] }
+
+type FilterExpr struct {
+ Line int
+
+ Op FilterOp
+ Src string
+ Value interface{}
+ Args []FilterExpr
+}
+
+func (e FilterExpr) IsValid() bool { return e.Op != FilterInvalidOp }
+
+func (e FilterExpr) IsBinaryExpr() bool { return filterOpFlags[e.Op]&flagIsBinaryExpr != 0 }
+func (e FilterExpr) IsBasicLit() bool { return filterOpFlags[e.Op]&flagIsBasicLit != 0 }
+func (e FilterExpr) HasVar() bool { return filterOpFlags[e.Op]&flagHasVar != 0 }
+
+func (e FilterExpr) String() string {
+ switch e.Op {
+ case FilterStringOp:
+ return `"` + e.Value.(string) + `"`
+ case FilterIntOp:
+ return fmt.Sprint(e.Value.(int64))
+ }
+ parts := make([]string, 0, len(e.Args)+2)
+ parts = append(parts, e.Op.String())
+ if e.Value != nil {
+ parts = append(parts, fmt.Sprintf("[%#v]", e.Value))
+ }
+ for _, arg := range e.Args {
+ parts = append(parts, arg.String())
+ }
+ if len(parts) == 1 {
+ return parts[0]
+ }
+ return "(" + strings.Join(parts, " ") + ")"
+}
+
+const (
+ flagIsBinaryExpr uint64 = 1 << iota
+ flagIsBasicLit
+ flagHasVar
+)
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir_loader.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir_loader.go
new file mode 100644
index 000000000..c07a19f54
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir_loader.go
@@ -0,0 +1,888 @@
+package ruleguard
+
+import (
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/constant"
+ "go/parser"
+ "go/token"
+ "go/types"
+ "io/ioutil"
+ "regexp"
+
+ "github.com/quasilyte/gogrep"
+ "github.com/quasilyte/gogrep/nodetag"
+
+ "github.com/quasilyte/go-ruleguard/ruleguard/goutil"
+ "github.com/quasilyte/go-ruleguard/ruleguard/ir"
+ "github.com/quasilyte/go-ruleguard/ruleguard/quasigo"
+ "github.com/quasilyte/go-ruleguard/ruleguard/textmatch"
+ "github.com/quasilyte/go-ruleguard/ruleguard/typematch"
+)
+
+type irLoaderConfig struct {
+ ctx *LoadContext
+
+ state *engineState
+
+ importer *goImporter
+
+ itab *typematch.ImportsTab
+
+ pkg *types.Package
+
+ gogrepFset *token.FileSet
+
+ prefix string
+ importedPkg string
+}
+
+type irLoader struct {
+ state *engineState
+ ctx *LoadContext
+ itab *typematch.ImportsTab
+
+ pkg *types.Package
+
+ file *ir.File
+ gogrepFset *token.FileSet
+
+ filename string
+ res *goRuleSet
+
+ importer *goImporter
+
+ group *GoRuleGroup
+
+ prefix string // For imported packages, a prefix that is added to a rule group name
+ importedPkg string // Package path; only for imported packages
+
+ imported []*goRuleSet
+}
+
+func newIRLoader(config irLoaderConfig) *irLoader {
+ return &irLoader{
+ state: config.state,
+ ctx: config.ctx,
+ importer: config.importer,
+ itab: config.itab,
+ pkg: config.pkg,
+ prefix: config.prefix,
+ gogrepFset: config.gogrepFset,
+ }
+}
+
+func (l *irLoader) LoadFile(filename string, f *ir.File) (*goRuleSet, error) {
+ l.filename = filename
+ l.file = f
+ l.res = &goRuleSet{
+ universal: &scopedGoRuleSet{},
+ groups: make(map[string]*GoRuleGroup),
+ }
+
+ for _, imp := range f.BundleImports {
+ if l.importedPkg != "" {
+ return nil, l.errorf(imp.Line, nil, "imports from imported packages are not supported yet")
+ }
+ if err := l.loadBundle(imp); err != nil {
+ return nil, err
+ }
+ }
+
+ if err := l.compileFilterFuncs(filename, f); err != nil {
+ return nil, err
+ }
+
+ for i := range f.RuleGroups {
+ if err := l.loadRuleGroup(&f.RuleGroups[i]); err != nil {
+ return nil, err
+ }
+ }
+
+ if len(l.imported) != 0 {
+ toMerge := []*goRuleSet{l.res}
+ toMerge = append(toMerge, l.imported...)
+ merged, err := mergeRuleSets(toMerge)
+ if err != nil {
+ return nil, err
+ }
+ l.res = merged
+ }
+
+ return l.res, nil
+}
+
+func (l *irLoader) importErrorf(line int, wrapped error, format string, args ...interface{}) error {
+ return &ImportError{
+ msg: fmt.Sprintf("%s:%d: %s", l.filename, line, fmt.Sprintf(format, args...)),
+ err: wrapped,
+ }
+}
+
+func (l *irLoader) errorf(line int, wrapped error, format string, args ...interface{}) error {
+ if wrapped == nil {
+ return fmt.Errorf("%s:%d: %s", l.filename, line, fmt.Sprintf(format, args...))
+ }
+ return fmt.Errorf("%s:%d: %s: %w", l.filename, line, fmt.Sprintf(format, args...), wrapped)
+}
+
+func (l *irLoader) loadBundle(bundle ir.BundleImport) error {
+ files, err := findBundleFiles(bundle.PkgPath)
+ if err != nil {
+ return l.errorf(bundle.Line, err, "can't find imported bundle files")
+ }
+ for _, filename := range files {
+ rset, err := l.loadExternFile(bundle.Prefix, bundle.PkgPath, filename)
+ if err != nil {
+ return l.errorf(bundle.Line, err, "error during bundle file loading")
+ }
+ l.imported = append(l.imported, rset)
+ }
+
+ return nil
+}
+
+func (l *irLoader) loadExternFile(prefix, pkgPath, filename string) (*goRuleSet, error) {
+ src, err := ioutil.ReadFile(filename)
+ if err != nil {
+ return nil, err
+ }
+ irfile, pkg, err := convertAST(l.ctx, l.importer, filename, src)
+ if err != nil {
+ return nil, err
+ }
+ config := irLoaderConfig{
+ state: l.state,
+ ctx: l.ctx,
+ importer: l.importer,
+ prefix: prefix,
+ pkg: pkg,
+ importedPkg: pkgPath,
+ itab: l.itab,
+ gogrepFset: l.gogrepFset,
+ }
+ rset, err := newIRLoader(config).LoadFile(filename, irfile)
+ if err != nil {
+ return nil, fmt.Errorf("%s: %w", l.importedPkg, err)
+ }
+ return rset, nil
+}
+
+func (l *irLoader) compileFilterFuncs(filename string, irfile *ir.File) error {
+ if len(irfile.CustomDecls) == 0 {
+ return nil
+ }
+
+ var buf bytes.Buffer
+ buf.WriteString("package gorules\n")
+ buf.WriteString("import \"github.com/quasilyte/go-ruleguard/dsl\"\n")
+ buf.WriteString("import \"github.com/quasilyte/go-ruleguard/dsl/types\"\n")
+ for _, src := range irfile.CustomDecls {
+ buf.WriteString(src)
+ buf.WriteString("\n")
+ }
+ buf.WriteString("type _ = dsl.Matcher\n")
+ buf.WriteString("type _ = types.Type\n")
+
+ fset := token.NewFileSet()
+ f, err := goutil.LoadGoFile(goutil.LoadConfig{
+ Fset: fset,
+ Filename: filename,
+ Data: &buf,
+ Importer: l.importer,
+ })
+ if err != nil {
+ // If this ever happens, user will get unexpected error
+ // lines for it; but we should trust that 99.9% errors
+ // should be catched at irconv phase so we get a valid Go
+ // source here as well?
+ return fmt.Errorf("parse custom decls: %w", err)
+ }
+
+ for _, decl := range f.Syntax.Decls {
+ decl, ok := decl.(*ast.FuncDecl)
+ if !ok {
+ continue
+ }
+ ctx := &quasigo.CompileContext{
+ Env: l.state.env,
+ Package: f.Pkg,
+ Types: f.Types,
+ Fset: fset,
+ }
+ compiled, err := quasigo.Compile(ctx, decl)
+ if err != nil {
+ return err
+ }
+ if l.ctx.DebugFunc == decl.Name.String() {
+ l.ctx.DebugPrint(quasigo.Disasm(l.state.env, compiled))
+ }
+ ctx.Env.AddFunc(f.Pkg.Path(), decl.Name.String(), compiled)
+ }
+
+ return nil
+}
+
+func (l *irLoader) loadRuleGroup(group *ir.RuleGroup) error {
+ l.group = &GoRuleGroup{
+ Line: group.Line,
+ Filename: l.filename,
+ Name: group.Name,
+ DocSummary: group.DocSummary,
+ DocBefore: group.DocBefore,
+ DocAfter: group.DocAfter,
+ DocNote: group.DocNote,
+ DocTags: group.DocTags,
+ }
+ if l.prefix != "" {
+ l.group.Name = l.prefix + "/" + l.group.Name
+ }
+
+ if l.ctx.GroupFilter != nil && !l.ctx.GroupFilter(l.group) {
+ return nil // Skip this group
+ }
+ if _, ok := l.res.groups[l.group.Name]; ok {
+ panic(fmt.Sprintf("duplicated function %s after the typecheck", l.group.Name)) // Should never happen
+ }
+ l.res.groups[l.group.Name] = l.group
+
+ l.itab.EnterScope()
+ defer l.itab.LeaveScope()
+
+ for _, imported := range group.Imports {
+ l.itab.Load(imported.Name, imported.Path)
+ }
+
+ for i := range group.Rules {
+ rule := &group.Rules[i]
+ if err := l.loadRule(group, rule); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func (l *irLoader) loadRule(group *ir.RuleGroup, rule *ir.Rule) error {
+ proto := goRule{
+ line: rule.Line,
+ group: l.group,
+ suggestion: rule.SuggestTemplate,
+ msg: rule.ReportTemplate,
+ location: rule.LocationVar,
+ }
+
+ if rule.DoFuncName != "" {
+ doFn := l.state.env.GetFunc(l.file.PkgPath, rule.DoFuncName)
+ if doFn == nil {
+ return l.errorf(rule.Line, nil, "can't find a compiled version of %s", rule.DoFuncName)
+ }
+ proto.do = doFn
+ }
+
+ info := filterInfo{
+ Vars: make(map[string]struct{}),
+ group: group,
+ }
+ if rule.WhereExpr.IsValid() {
+ filter, err := l.newFilter(rule.WhereExpr, &info)
+ if err != nil {
+ return err
+ }
+ proto.filter = filter
+ }
+
+ for _, pat := range rule.SyntaxPatterns {
+ if err := l.loadSyntaxRule(group, proto, info, rule, pat.Value, pat.Line); err != nil {
+ return err
+ }
+ }
+ for _, pat := range rule.CommentPatterns {
+ if err := l.loadCommentRule(proto, rule, pat.Value, pat.Line); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (l *irLoader) loadCommentRule(resultProto goRule, rule *ir.Rule, src string, line int) error {
+ dst := l.res.universal
+ pat, err := regexp.Compile(src)
+ if err != nil {
+ return l.errorf(rule.Line, err, "compile regexp")
+ }
+ resultBase := resultProto
+ resultBase.line = line
+ result := goCommentRule{
+ base: resultProto,
+ pat: pat,
+ captureGroups: regexpHasCaptureGroups(src),
+ }
+ dst.commentRules = append(dst.commentRules, result)
+
+ return nil
+}
+
+func (l *irLoader) gogrepCompile(group *ir.RuleGroup, src string) (*gogrep.Pattern, gogrep.PatternInfo, error) {
+ var imports map[string]string
+ if len(group.Imports) != 0 {
+ imports = make(map[string]string)
+ for _, imported := range group.Imports {
+ imports[imported.Name] = imported.Path
+ }
+ }
+
+ gogrepConfig := gogrep.CompileConfig{
+ Fset: l.gogrepFset,
+ Src: src,
+ Strict: false,
+ WithTypes: true,
+ Imports: imports,
+ }
+ return gogrep.Compile(gogrepConfig)
+}
+
+func (l *irLoader) loadSyntaxRule(group *ir.RuleGroup, resultProto goRule, filterInfo filterInfo, rule *ir.Rule, src string, line int) error {
+ result := resultProto
+ result.line = line
+
+ pat, info, err := l.gogrepCompile(group, src)
+ if err != nil {
+ return l.errorf(rule.Line, err, "parse match pattern")
+ }
+ result.pat = pat
+
+ for filterVar := range filterInfo.Vars {
+ if filterVar == "$$" {
+ continue // OK: a predefined var for the "entire match"
+ }
+ _, ok := info.Vars[filterVar]
+ if !ok {
+ return l.errorf(rule.Line, nil, "filter refers to a non-existing var %s", filterVar)
+ }
+ }
+
+ dst := l.res.universal
+ var dstTags []nodetag.Value
+ switch tag := pat.NodeTag(); tag {
+ case nodetag.Unknown:
+ return l.errorf(rule.Line, nil, "can't infer a tag of %s", src)
+ case nodetag.Node:
+ return l.errorf(rule.Line, nil, "%s pattern is too general", src)
+ case nodetag.StmtList:
+ dstTags = []nodetag.Value{
+ nodetag.BlockStmt,
+ nodetag.CaseClause,
+ nodetag.CommClause,
+ }
+ case nodetag.ExprList:
+ dstTags = []nodetag.Value{
+ nodetag.CallExpr,
+ nodetag.CompositeLit,
+ nodetag.ReturnStmt,
+ }
+ default:
+ dstTags = []nodetag.Value{tag}
+ }
+ for _, tag := range dstTags {
+ dst.rulesByTag[tag] = append(dst.rulesByTag[tag], result)
+ }
+ dst.categorizedNum++
+
+ return nil
+}
+
+func (l *irLoader) unwrapTypeExpr(filter ir.FilterExpr) (types.Type, error) {
+ typeString := l.unwrapStringExpr(filter)
+ if typeString == "" {
+ return nil, l.errorf(filter.Line, nil, "expected a non-empty type string")
+ }
+ typ, err := typeFromString(typeString)
+ if err != nil {
+ return nil, l.errorf(filter.Line, err, "parse type expr")
+ }
+ if typ == nil {
+ return nil, l.errorf(filter.Line, nil, "can't convert %s into a type constraint yet", typeString)
+ }
+ return typ, nil
+}
+
+func (l *irLoader) unwrapFuncRefExpr(filter ir.FilterExpr) (*types.Func, error) {
+ s := l.unwrapStringExpr(filter)
+ if s == "" {
+ return nil, l.errorf(filter.Line, nil, "expected a non-empty func ref string")
+ }
+
+ n, err := parser.ParseExpr(s)
+ if err != nil {
+ return nil, err
+ }
+
+ switch n := n.(type) {
+ case *ast.CallExpr:
+ // TODO: implement this.
+ return nil, l.errorf(filter.Line, nil, "inline func signatures are not supported yet")
+ case *ast.SelectorExpr:
+ funcName := n.Sel.Name
+ pkgAndType, ok := n.X.(*ast.SelectorExpr)
+ if !ok {
+ return nil, l.errorf(filter.Line, nil, "invalid selector expression")
+ }
+ pkgID, ok := pkgAndType.X.(*ast.Ident)
+ if !ok {
+ return nil, l.errorf(filter.Line, nil, "invalid package name selector part")
+ }
+ pkgName := pkgID.Name
+ typeName := pkgAndType.Sel.Name
+ fqn := pkgName + "." + typeName
+ typ, err := l.state.FindType(l.importer, l.pkg, fqn)
+ if err != nil {
+ return nil, l.errorf(filter.Line, nil, "can't find %s type", fqn)
+ }
+ switch typ := typ.Underlying().(type) {
+ case *types.Interface:
+ for i := 0; i < typ.NumMethods(); i++ {
+ fn := typ.Method(i)
+ if fn.Name() == funcName {
+ return fn, nil
+ }
+ }
+ default:
+ return nil, l.errorf(filter.Line, nil, "only interfaces are supported, but %s is %T", fqn, typ)
+ }
+
+ default:
+ return nil, l.errorf(filter.Line, nil, "unexpected %T node", n)
+ }
+
+ return nil, nil
+}
+
+func (l *irLoader) unwrapInterfaceExpr(filter ir.FilterExpr) (*types.Interface, error) {
+ typeString := l.unwrapStringExpr(filter)
+ if typeString == "" {
+ return nil, l.errorf(filter.Line, nil, "expected a non-empty type name string")
+ }
+
+ typ, err := l.state.FindType(l.importer, l.pkg, typeString)
+ if err == nil {
+ iface, ok := typ.Underlying().(*types.Interface)
+ if !ok {
+ return nil, l.errorf(filter.Line, nil, "%s is not an interface type", typeString)
+ }
+ return iface, nil
+ }
+
+ n, err := parser.ParseExpr(typeString)
+ if err != nil {
+ return nil, l.errorf(filter.Line, err, "parse %s type expr", typeString)
+ }
+ qn, ok := n.(*ast.SelectorExpr)
+ if !ok {
+ return nil, l.errorf(filter.Line, nil, "can't resolve %s type; try a fully-qualified name", typeString)
+ }
+ pkgName, ok := qn.X.(*ast.Ident)
+ if !ok {
+ return nil, l.errorf(filter.Line, nil, "invalid package name")
+ }
+ pkgPath, ok := l.itab.Lookup(pkgName.Name)
+ if !ok {
+ return nil, l.errorf(filter.Line, nil, "package %s is not imported", pkgName.Name)
+ }
+ pkg, err := l.importer.Import(pkgPath)
+ if err != nil {
+ return nil, l.importErrorf(filter.Line, err, "can't load %s", pkgPath)
+ }
+ obj := pkg.Scope().Lookup(qn.Sel.Name)
+ if obj == nil {
+ return nil, l.errorf(filter.Line, nil, "%s is not found in %s", qn.Sel.Name, pkgPath)
+ }
+ iface, ok := obj.Type().Underlying().(*types.Interface)
+ if !ok {
+ return nil, l.errorf(filter.Line, nil, "%s is not an interface type", qn.Sel.Name)
+ }
+ return iface, nil
+}
+
+func (l *irLoader) unwrapRegexpExpr(filter ir.FilterExpr) (textmatch.Pattern, error) {
+ patternString := l.unwrapStringExpr(filter)
+ if patternString == "" {
+ return nil, l.errorf(filter.Line, nil, "expected a non-empty regexp pattern argument")
+ }
+ re, err := textmatch.Compile(patternString)
+ if err != nil {
+ return nil, l.errorf(filter.Line, err, "compile regexp")
+ }
+ return re, nil
+}
+
+func (l *irLoader) unwrapNodeTagExpr(filter ir.FilterExpr) (nodetag.Value, error) {
+ typeString := l.unwrapStringExpr(filter)
+ if typeString == "" {
+ return nodetag.Unknown, l.errorf(filter.Line, nil, "expected a non-empty string argument")
+ }
+ tag := nodetag.FromString(typeString)
+ if tag == nodetag.Unknown {
+ return tag, l.errorf(filter.Line, nil, "%s is not a valid go/ast type name", typeString)
+ }
+ return tag, nil
+}
+
+func (l *irLoader) unwrapStringExpr(filter ir.FilterExpr) string {
+ if filter.Op == ir.FilterStringOp {
+ return filter.Value.(string)
+ }
+ return ""
+}
+
+func (l *irLoader) stringToBasicKind(s string) types.BasicInfo {
+ switch s {
+ case "integer":
+ return types.IsInteger
+ case "unsigned":
+ return types.IsUnsigned
+ case "float":
+ return types.IsFloat
+ case "complex":
+ return types.IsComplex
+ case "untyped":
+ return types.IsUnsigned
+ case "numeric":
+ return types.IsNumeric
+ default:
+ return 0
+ }
+}
+
+func (l *irLoader) newFilter(filter ir.FilterExpr, info *filterInfo) (matchFilter, error) {
+ if filter.HasVar() {
+ info.Vars[filter.Value.(string)] = struct{}{}
+ }
+
+ if filter.IsBinaryExpr() {
+ return l.newBinaryExprFilter(filter, info)
+ }
+
+ result := matchFilter{src: filter.Src}
+
+ switch filter.Op {
+ case ir.FilterNotOp:
+ x, err := l.newFilter(filter.Args[0], info)
+ if err != nil {
+ return result, err
+ }
+ result.fn = makeNotFilter(result.src, x)
+
+ case ir.FilterVarTextMatchesOp:
+ re, err := l.unwrapRegexpExpr(filter.Args[0])
+ if err != nil {
+ return result, err
+ }
+ result.fn = makeTextMatchesFilter(result.src, filter.Value.(string), re)
+
+ case ir.FilterVarObjectIsOp:
+ typeString := l.unwrapStringExpr(filter.Args[0])
+ if typeString == "" {
+ return result, l.errorf(filter.Line, nil, "expected a non-empty string argument")
+ }
+ switch typeString {
+ case "Func", "Var", "Const", "TypeName", "Label", "PkgName", "Builtin", "Nil":
+ // OK.
+ default:
+ return result, l.errorf(filter.Line, nil, "%s is not a valid go/types object name", typeString)
+ }
+ result.fn = makeObjectIsFilter(result.src, filter.Value.(string), typeString)
+
+ case ir.FilterRootNodeParentIsOp:
+ tag, err := l.unwrapNodeTagExpr(filter.Args[0])
+ if err != nil {
+ return result, err
+ }
+ result.fn = makeRootParentNodeIsFilter(result.src, tag)
+
+ case ir.FilterVarNodeIsOp:
+ tag, err := l.unwrapNodeTagExpr(filter.Args[0])
+ if err != nil {
+ return result, err
+ }
+ result.fn = makeNodeIsFilter(result.src, filter.Value.(string), tag)
+
+ case ir.FilterRootSinkTypeIsOp:
+ typeString := l.unwrapStringExpr(filter.Args[0])
+ if typeString == "" {
+ return result, l.errorf(filter.Line, nil, "expected a non-empty string argument")
+ }
+ ctx := typematch.Context{Itab: l.itab}
+ pat, err := typematch.Parse(&ctx, typeString)
+ if err != nil {
+ return result, l.errorf(filter.Line, err, "parse type expr")
+ }
+ result.fn = makeRootSinkTypeIsFilter(result.src, pat)
+
+ case ir.FilterVarTypeHasPointersOp:
+ result.fn = makeTypeHasPointersFilter(result.src, filter.Value.(string))
+
+ case ir.FilterVarTypeOfKindOp, ir.FilterVarTypeUnderlyingOfKindOp:
+ kindString := l.unwrapStringExpr(filter.Args[0])
+ if kindString == "" {
+ return result, l.errorf(filter.Line, nil, "expected a non-empty string argument")
+ }
+ underlying := filter.Op == ir.FilterVarTypeUnderlyingOfKindOp
+ switch kindString {
+ case "signed":
+ result.fn = makeTypeIsSignedFilter(result.src, filter.Value.(string), underlying)
+ case "int":
+ result.fn = makeTypeIsIntUintFilter(result.src, filter.Value.(string), underlying, types.Int)
+ case "uint":
+ result.fn = makeTypeIsIntUintFilter(result.src, filter.Value.(string), underlying, types.Uint)
+ default:
+ kind := l.stringToBasicKind(kindString)
+ if kind == 0 {
+ return result, l.errorf(filter.Line, nil, "unknown kind %s", kindString)
+ }
+ result.fn = makeTypeOfKindFilter(result.src, filter.Value.(string), underlying, kind)
+ }
+
+ case ir.FilterVarTypeIdenticalToOp:
+ lhsVarname := filter.Value.(string)
+ rhsVarname := filter.Args[0].Value.(string)
+ result.fn = makeTypesIdenticalFilter(result.src, lhsVarname, rhsVarname)
+
+ case ir.FilterVarTypeIsOp, ir.FilterVarTypeUnderlyingIsOp:
+ typeString := l.unwrapStringExpr(filter.Args[0])
+ if typeString == "" {
+ return result, l.errorf(filter.Line, nil, "expected a non-empty string argument")
+ }
+ ctx := typematch.Context{Itab: l.itab}
+ pat, err := typematch.Parse(&ctx, typeString)
+ if err != nil {
+ return result, l.errorf(filter.Line, err, "parse type expr")
+ }
+ underlying := filter.Op == ir.FilterVarTypeUnderlyingIsOp
+ result.fn = makeTypeIsFilter(result.src, filter.Value.(string), underlying, pat)
+
+ case ir.FilterVarTypeConvertibleToOp:
+ dstType, err := l.unwrapTypeExpr(filter.Args[0])
+ if err != nil {
+ return result, err
+ }
+ result.fn = makeTypeConvertibleToFilter(result.src, filter.Value.(string), dstType)
+
+ case ir.FilterVarTypeAssignableToOp:
+ dstType, err := l.unwrapTypeExpr(filter.Args[0])
+ if err != nil {
+ return result, err
+ }
+ result.fn = makeTypeAssignableToFilter(result.src, filter.Value.(string), dstType)
+
+ case ir.FilterVarTypeImplementsOp:
+ iface, err := l.unwrapInterfaceExpr(filter.Args[0])
+ if err != nil {
+ return result, err
+ }
+ result.fn = makeTypeImplementsFilter(result.src, filter.Value.(string), iface)
+
+ case ir.FilterVarTypeHasMethodOp:
+ fn, err := l.unwrapFuncRefExpr(filter.Args[0])
+ if err != nil {
+ return result, err
+ }
+ if fn == nil {
+ return result, l.errorf(filter.Line, nil, "can't resolve HasMethod() argument")
+ }
+ result.fn = makeTypeHasMethodFilter(result.src, filter.Value.(string), fn)
+
+ case ir.FilterVarPureOp:
+ result.fn = makePureFilter(result.src, filter.Value.(string))
+ case ir.FilterVarConstOp:
+ result.fn = makeConstFilter(result.src, filter.Value.(string))
+ case ir.FilterVarObjectIsGlobalOp:
+ result.fn = makeObjectIsGlobalFilter(result.src, filter.Value.(string))
+ case ir.FilterVarConstSliceOp:
+ result.fn = makeConstSliceFilter(result.src, filter.Value.(string))
+ case ir.FilterVarAddressableOp:
+ result.fn = makeAddressableFilter(result.src, filter.Value.(string))
+ case ir.FilterVarComparableOp:
+ result.fn = makeComparableFilter(result.src, filter.Value.(string))
+
+ case ir.FilterFileImportsOp:
+ result.fn = makeFileImportsFilter(result.src, filter.Value.(string))
+
+ case ir.FilterDeadcodeOp:
+ result.fn = makeDeadcodeFilter(result.src)
+
+ case ir.FilterGoVersionEqOp:
+ version, err := ParseGoVersion(filter.Value.(string))
+ if err != nil {
+ return result, l.errorf(filter.Line, err, "parse Go version")
+ }
+ result.fn = makeGoVersionFilter(result.src, token.EQL, version)
+ case ir.FilterGoVersionLessThanOp:
+ version, err := ParseGoVersion(filter.Value.(string))
+ if err != nil {
+ return result, l.errorf(filter.Line, err, "parse Go version")
+ }
+ result.fn = makeGoVersionFilter(result.src, token.LSS, version)
+ case ir.FilterGoVersionGreaterThanOp:
+ version, err := ParseGoVersion(filter.Value.(string))
+ if err != nil {
+ return result, l.errorf(filter.Line, err, "parse Go version")
+ }
+ result.fn = makeGoVersionFilter(result.src, token.GTR, version)
+ case ir.FilterGoVersionLessEqThanOp:
+ version, err := ParseGoVersion(filter.Value.(string))
+ if err != nil {
+ return result, l.errorf(filter.Line, err, "parse Go version")
+ }
+ result.fn = makeGoVersionFilter(result.src, token.LEQ, version)
+ case ir.FilterGoVersionGreaterEqThanOp:
+ version, err := ParseGoVersion(filter.Value.(string))
+ if err != nil {
+ return result, l.errorf(filter.Line, err, "parse Go version")
+ }
+ result.fn = makeGoVersionFilter(result.src, token.GEQ, version)
+
+ case ir.FilterFilePkgPathMatchesOp:
+ re, err := regexp.Compile(filter.Value.(string))
+ if err != nil {
+ return result, l.errorf(filter.Line, err, "compile regexp")
+ }
+ result.fn = makeFilePkgPathMatchesFilter(result.src, re)
+
+ case ir.FilterFileNameMatchesOp:
+ re, err := regexp.Compile(filter.Value.(string))
+ if err != nil {
+ return result, l.errorf(filter.Line, err, "compile regexp")
+ }
+ result.fn = makeFileNameMatchesFilter(result.src, re)
+
+ case ir.FilterVarContainsOp:
+ src := filter.Args[0].Value.(string)
+ pat, _, err := l.gogrepCompile(info.group, src)
+ if err != nil {
+ return result, l.errorf(filter.Line, err, "parse contains pattern")
+ }
+ result.fn = makeVarContainsFilter(result.src, filter.Value.(string), pat)
+
+ case ir.FilterVarFilterOp:
+ funcName := filter.Args[0].Value.(string)
+ userFn := l.state.env.GetFunc(l.file.PkgPath, funcName)
+ if userFn == nil {
+ return result, l.errorf(filter.Line, nil, "can't find a compiled version of %s", funcName)
+ }
+ result.fn = makeCustomVarFilter(result.src, filter.Value.(string), userFn)
+ }
+
+ if result.fn == nil {
+ return result, l.errorf(filter.Line, nil, "unsupported expr: %s (%s)", result.src, filter.Op)
+ }
+
+ return result, nil
+}
+
+func (l *irLoader) newBinaryExprFilter(filter ir.FilterExpr, info *filterInfo) (matchFilter, error) {
+ if filter.Op == ir.FilterAndOp || filter.Op == ir.FilterOrOp {
+ result := matchFilter{src: filter.Src}
+ lhs, err := l.newFilter(filter.Args[0], info)
+ if err != nil {
+ return result, err
+ }
+ rhs, err := l.newFilter(filter.Args[1], info)
+ if err != nil {
+ return result, err
+ }
+ if filter.Op == ir.FilterAndOp {
+ result.fn = makeAndFilter(lhs, rhs)
+ } else {
+ result.fn = makeOrFilter(lhs, rhs)
+ }
+ return result, nil
+ }
+
+ // If constexpr is on the LHS, move it to the right, so the code below
+ // can imply constants being on the RHS all the time.
+ if filter.Args[0].IsBasicLit() && !filter.Args[1].IsBasicLit() {
+ // Just a precaution: if we ever have a float values here,
+ // we may not want to rearrange anything.
+ switch filter.Args[0].Value.(type) {
+ case string, int64:
+ switch filter.Op {
+ case ir.FilterEqOp, ir.FilterNeqOp:
+ // Simple commutative ops. Just swap the args.
+ newFilter := filter
+ newFilter.Args = []ir.FilterExpr{filter.Args[1], filter.Args[0]}
+ return l.newBinaryExprFilter(newFilter, info)
+ }
+ }
+ }
+
+ result := matchFilter{src: filter.Src}
+
+ var tok token.Token
+ switch filter.Op {
+ case ir.FilterEqOp:
+ tok = token.EQL
+ case ir.FilterNeqOp:
+ tok = token.NEQ
+ case ir.FilterGtOp:
+ tok = token.GTR
+ case ir.FilterGtEqOp:
+ tok = token.GEQ
+ case ir.FilterLtOp:
+ tok = token.LSS
+ case ir.FilterLtEqOp:
+ tok = token.LEQ
+ default:
+ return result, l.errorf(filter.Line, nil, "unsupported operator in binary expr: %s", result.src)
+ }
+
+ lhs := filter.Args[0]
+ rhs := filter.Args[1]
+ var rhsValue constant.Value
+ switch rhs.Op {
+ case ir.FilterStringOp:
+ rhsValue = constant.MakeString(rhs.Value.(string))
+ case ir.FilterIntOp:
+ rhsValue = constant.MakeInt64(rhs.Value.(int64))
+ }
+
+ switch lhs.Op {
+ case ir.FilterVarLineOp:
+ if rhsValue != nil {
+ result.fn = makeLineConstFilter(result.src, lhs.Value.(string), tok, rhsValue)
+ } else if rhs.Op == lhs.Op {
+ result.fn = makeLineFilter(result.src, lhs.Value.(string), tok, rhs.Value.(string))
+ }
+ case ir.FilterVarTypeSizeOp:
+ if rhsValue != nil {
+ result.fn = makeTypeSizeConstFilter(result.src, lhs.Value.(string), tok, rhsValue)
+ } else {
+ result.fn = makeTypeSizeFilter(result.src, lhs.Value.(string), tok, rhs.Value.(string))
+ }
+ case ir.FilterVarValueIntOp:
+ if rhsValue != nil {
+ result.fn = makeValueIntConstFilter(result.src, lhs.Value.(string), tok, rhsValue)
+ } else if rhs.Op == lhs.Op {
+ result.fn = makeValueIntFilter(result.src, lhs.Value.(string), tok, rhs.Value.(string))
+ }
+ case ir.FilterVarTextOp:
+ if rhsValue != nil {
+ result.fn = makeTextConstFilter(result.src, lhs.Value.(string), tok, rhsValue)
+ } else if rhs.Op == lhs.Op {
+ result.fn = makeTextFilter(result.src, lhs.Value.(string), tok, rhs.Value.(string))
+ }
+ }
+
+ if result.fn == nil {
+ return result, l.errorf(filter.Line, nil, "unsupported binary expr: %s", result.src)
+ }
+
+ return result, nil
+}
+
+type filterInfo struct {
+ Vars map[string]struct{}
+
+ group *ir.RuleGroup
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir_utils.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir_utils.go
new file mode 100644
index 000000000..62c24bf15
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir_utils.go
@@ -0,0 +1,41 @@
+package ruleguard
+
+import (
+ "fmt"
+ "go/ast"
+ "go/parser"
+ "go/types"
+
+ "github.com/quasilyte/go-ruleguard/ruleguard/ir"
+ "github.com/quasilyte/go-ruleguard/ruleguard/irconv"
+)
+
+func convertAST(ctx *LoadContext, imp *goImporter, filename string, src []byte) (*ir.File, *types.Package, error) {
+ parserFlags := parser.ParseComments
+ f, err := parser.ParseFile(ctx.Fset, filename, src, parserFlags)
+ if err != nil {
+ return nil, nil, fmt.Errorf("parse file error: %w", err)
+ }
+
+ typechecker := types.Config{Importer: imp}
+ typesInfo := &types.Info{
+ Types: map[ast.Expr]types.TypeAndValue{},
+ Uses: map[*ast.Ident]types.Object{},
+ Defs: map[*ast.Ident]types.Object{},
+ }
+ pkg, err := typechecker.Check("gorules", ctx.Fset, []*ast.File{f}, typesInfo)
+ if err != nil {
+ return nil, nil, fmt.Errorf("typechecker error: %w", err)
+ }
+ irconvCtx := &irconv.Context{
+ Pkg: pkg,
+ Types: typesInfo,
+ Fset: ctx.Fset,
+ Src: src,
+ }
+ irfile, err := irconv.ConvertFile(irconvCtx, f)
+ if err != nil {
+ return nil, nil, fmt.Errorf("irconv error: %w", err)
+ }
+ return irfile, pkg, nil
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/irconv/irconv.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/irconv/irconv.go
new file mode 100644
index 000000000..646091fed
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/irconv/irconv.go
@@ -0,0 +1,856 @@
+package irconv
+
+import (
+ "fmt"
+ "go/ast"
+ "go/constant"
+ "go/token"
+ "go/types"
+ "path"
+ "strconv"
+ "strings"
+
+ "github.com/go-toolsmith/astcopy"
+ "golang.org/x/tools/go/ast/astutil"
+
+ "github.com/quasilyte/go-ruleguard/ruleguard/goutil"
+ "github.com/quasilyte/go-ruleguard/ruleguard/ir"
+)
+
+type Context struct {
+ Pkg *types.Package
+ Types *types.Info
+ Fset *token.FileSet
+ Src []byte
+}
+
+func ConvertFile(ctx *Context, f *ast.File) (result *ir.File, err error) {
+ defer func() {
+ if err != nil {
+ return
+ }
+ rv := recover()
+ if rv == nil {
+ return
+ }
+ if convErr, ok := rv.(convError); ok {
+ err = convErr.err
+ return
+ }
+ panic(rv) // not our panic
+ }()
+
+ conv := &converter{
+ types: ctx.Types,
+ pkg: ctx.Pkg,
+ fset: ctx.Fset,
+ src: ctx.Src,
+ }
+ result = conv.ConvertFile(f)
+ return result, nil
+}
+
+type convError struct {
+ err error
+}
+
+type localMacroFunc struct {
+ name string
+ params []string
+ template ast.Expr
+}
+
+type converter struct {
+ types *types.Info
+ pkg *types.Package
+ fset *token.FileSet
+ src []byte
+
+ group *ir.RuleGroup
+ groupFuncs []localMacroFunc
+
+ dslPkgname string // The local name of the "ruleguard/dsl" package (usually its just "dsl")
+}
+
+func (conv *converter) errorf(n ast.Node, format string, args ...interface{}) convError {
+ loc := conv.fset.Position(n.Pos())
+ msg := fmt.Sprintf(format, args...)
+ return convError{err: fmt.Errorf("%s:%d: %s", loc.Filename, loc.Line, msg)}
+}
+
+func (conv *converter) ConvertFile(f *ast.File) *ir.File {
+ result := &ir.File{
+ PkgPath: conv.pkg.Path(),
+ }
+
+ conv.dslPkgname = "dsl"
+
+ for _, imp := range f.Imports {
+ importPath, err := strconv.Unquote(imp.Path.Value)
+ if err != nil {
+ panic(conv.errorf(imp, "unquote %s import path: %s", imp.Path.Value, err))
+ }
+ if importPath == "github.com/quasilyte/go-ruleguard/dsl" {
+ if imp.Name != nil {
+ conv.dslPkgname = imp.Name.Name
+ }
+ }
+ // Right now this list is hardcoded from the knowledge of which
+ // stdlib packages are supported inside the bytecode.
+ switch importPath {
+ case "fmt", "strings", "strconv":
+ conv.addCustomImport(result, importPath)
+ }
+ }
+
+ for _, decl := range f.Decls {
+ funcDecl, ok := decl.(*ast.FuncDecl)
+ if !ok {
+ genDecl := decl.(*ast.GenDecl)
+ if genDecl.Tok != token.IMPORT {
+ conv.addCustomDecl(result, decl)
+ }
+ continue
+ }
+
+ if funcDecl.Name.String() == "init" {
+ conv.convertInitFunc(result, funcDecl)
+ continue
+ }
+
+ if conv.isMatcherFunc(funcDecl) {
+ result.RuleGroups = append(result.RuleGroups, *conv.convertRuleGroup(funcDecl))
+ } else {
+ conv.addCustomDecl(result, funcDecl)
+ }
+ }
+
+ return result
+}
+
+func (conv *converter) convertInitFunc(dst *ir.File, decl *ast.FuncDecl) {
+ for _, stmt := range decl.Body.List {
+ exprStmt, ok := stmt.(*ast.ExprStmt)
+ if !ok {
+ panic(conv.errorf(stmt, "unsupported statement"))
+ }
+ call, ok := exprStmt.X.(*ast.CallExpr)
+ if !ok {
+ panic(conv.errorf(stmt, "unsupported expr"))
+ }
+ fn, ok := call.Fun.(*ast.SelectorExpr)
+ if !ok {
+ panic(conv.errorf(stmt, "unsupported call"))
+ }
+ pkg, ok := fn.X.(*ast.Ident)
+ if !ok || pkg.Name != conv.dslPkgname {
+ panic(conv.errorf(stmt, "unsupported call"))
+ }
+
+ switch fn.Sel.Name {
+ case "ImportRules":
+ prefix := conv.parseStringArg(call.Args[0])
+ bundleSelector, ok := call.Args[1].(*ast.SelectorExpr)
+ if !ok {
+ panic(conv.errorf(call.Args[1], "expected a `pkgname.Bundle` argument"))
+ }
+ bundleObj := conv.types.ObjectOf(bundleSelector.Sel)
+ dst.BundleImports = append(dst.BundleImports, ir.BundleImport{
+ Prefix: prefix,
+ PkgPath: bundleObj.Pkg().Path(),
+ Line: conv.fset.Position(exprStmt.Pos()).Line,
+ })
+
+ default:
+ panic(conv.errorf(stmt, "unsupported %s call", fn.Sel.Name))
+ }
+ }
+}
+
+func (conv *converter) addCustomImport(dst *ir.File, pkgPath string) {
+ dst.CustomDecls = append(dst.CustomDecls, `import "`+pkgPath+`"`)
+}
+
+func (conv *converter) addCustomDecl(dst *ir.File, decl ast.Decl) {
+ begin := conv.fset.Position(decl.Pos())
+ end := conv.fset.Position(decl.End())
+ src := conv.src[begin.Offset:end.Offset]
+ dst.CustomDecls = append(dst.CustomDecls, string(src))
+}
+
+func (conv *converter) isMatcherFunc(f *ast.FuncDecl) bool {
+ typ := conv.types.ObjectOf(f.Name).Type().(*types.Signature)
+ return typ.Results().Len() == 0 &&
+ typ.Params().Len() == 1 &&
+ typ.Params().At(0).Type().String() == "github.com/quasilyte/go-ruleguard/dsl.Matcher"
+}
+
+func (conv *converter) convertRuleGroup(decl *ast.FuncDecl) *ir.RuleGroup {
+ result := &ir.RuleGroup{
+ Line: conv.fset.Position(decl.Name.Pos()).Line,
+ }
+ conv.group = result
+ conv.groupFuncs = conv.groupFuncs[:0]
+
+ result.Name = decl.Name.String()
+ result.MatcherName = decl.Type.Params.List[0].Names[0].String()
+
+ if decl.Doc != nil {
+ conv.convertDocComments(decl.Doc)
+ }
+
+ seenRules := false
+ for _, stmt := range decl.Body.List {
+ if assign, ok := stmt.(*ast.AssignStmt); ok && assign.Tok == token.DEFINE {
+ conv.localDefine(assign)
+ continue
+ }
+
+ if _, ok := stmt.(*ast.DeclStmt); ok {
+ continue
+ }
+ stmtExpr, ok := stmt.(*ast.ExprStmt)
+ if !ok {
+ panic(conv.errorf(stmt, "expected a %s method call, found %s", result.MatcherName, goutil.SprintNode(conv.fset, stmt)))
+ }
+ call, ok := stmtExpr.X.(*ast.CallExpr)
+ if !ok {
+ panic(conv.errorf(stmt, "expected a %s method call, found %s", result.MatcherName, goutil.SprintNode(conv.fset, stmt)))
+ }
+
+ switch conv.matcherMethodName(call) {
+ case "Import":
+ if seenRules {
+ panic(conv.errorf(call, "Import() should be used before any rules definitions"))
+ }
+ conv.doMatcherImport(call)
+ default:
+ seenRules = true
+ conv.convertRuleExpr(call)
+ }
+ }
+
+ return result
+}
+
+func (conv *converter) findLocalMacro(call *ast.CallExpr) *localMacroFunc {
+ fn, ok := call.Fun.(*ast.Ident)
+ if !ok {
+ return nil
+ }
+ for i := range conv.groupFuncs {
+ if conv.groupFuncs[i].name == fn.Name {
+ return &conv.groupFuncs[i]
+ }
+ }
+ return nil
+}
+
+func (conv *converter) expandMacro(macro *localMacroFunc, call *ast.CallExpr) ir.FilterExpr {
+ // Check that call args are OK.
+ // Since "function calls" are implemented as a macro expansion here,
+ // we don't allow arguments that have a non-trivial evaluation.
+ isSafe := func(arg ast.Expr) bool {
+ switch arg := astutil.Unparen(arg).(type) {
+ case *ast.BasicLit, *ast.Ident:
+ return true
+
+ case *ast.IndexExpr:
+ mapIdent, ok := astutil.Unparen(arg.X).(*ast.Ident)
+ if !ok {
+ return false
+ }
+ if mapIdent.Name != conv.group.MatcherName {
+ return false
+ }
+ key, ok := astutil.Unparen(arg.Index).(*ast.BasicLit)
+ if !ok || key.Kind != token.STRING {
+ return false
+ }
+ return true
+
+ default:
+ return false
+ }
+ }
+ args := map[string]ast.Expr{}
+ for i, arg := range call.Args {
+ paramName := macro.params[i]
+ if !isSafe(arg) {
+ panic(conv.errorf(arg, "unsupported/too complex %s argument", paramName))
+ }
+ args[paramName] = astutil.Unparen(arg)
+ }
+
+ body := astcopy.Expr(macro.template)
+ expanded := astutil.Apply(body, nil, func(cur *astutil.Cursor) bool {
+ if ident, ok := cur.Node().(*ast.Ident); ok {
+ arg, ok := args[ident.Name]
+ if ok {
+ cur.Replace(arg)
+ return true
+ }
+ }
+ // astcopy above will copy the AST tree, but it won't update
+ // the associated types.Info map of const values.
+ // We'll try to solve that issue at least partially here.
+ if lit, ok := cur.Node().(*ast.BasicLit); ok {
+ switch lit.Kind {
+ case token.STRING:
+ val, err := strconv.Unquote(lit.Value)
+ if err == nil {
+ conv.types.Types[lit] = types.TypeAndValue{
+ Type: types.Typ[types.UntypedString],
+ Value: constant.MakeString(val),
+ }
+ }
+ case token.INT:
+ val, err := strconv.ParseInt(lit.Value, 0, 64)
+ if err == nil {
+ conv.types.Types[lit] = types.TypeAndValue{
+ Type: types.Typ[types.UntypedInt],
+ Value: constant.MakeInt64(val),
+ }
+ }
+ case token.FLOAT:
+ val, err := strconv.ParseFloat(lit.Value, 64)
+ if err == nil {
+ conv.types.Types[lit] = types.TypeAndValue{
+ Type: types.Typ[types.UntypedFloat],
+ Value: constant.MakeFloat64(val),
+ }
+ }
+ }
+ }
+ return true
+ })
+
+ return conv.convertFilterExpr(expanded.(ast.Expr))
+}
+
+func (conv *converter) localDefine(assign *ast.AssignStmt) {
+ if len(assign.Lhs) != 1 || len(assign.Rhs) != 1 {
+ panic(conv.errorf(assign, "multi-value := is not supported"))
+ }
+ lhs, ok := assign.Lhs[0].(*ast.Ident)
+ if !ok {
+ panic(conv.errorf(assign.Lhs[0], "only simple ident lhs is supported"))
+ }
+ rhs := assign.Rhs[0]
+ fn, ok := rhs.(*ast.FuncLit)
+ if !ok {
+ panic(conv.errorf(rhs, "only func literals are supported on the rhs"))
+ }
+ typ := conv.types.TypeOf(fn).(*types.Signature)
+ isBoolResult := typ.Results() != nil &&
+ typ.Results().Len() == 1 &&
+ typ.Results().At(0).Type() == types.Typ[types.Bool]
+ if !isBoolResult {
+ var loc ast.Node = fn.Type
+ if fn.Type.Results != nil {
+ loc = fn.Type.Results
+ }
+ panic(conv.errorf(loc, "only funcs returning bool are supported"))
+ }
+ if len(fn.Body.List) != 1 {
+ panic(conv.errorf(fn.Body, "only simple 1 return statement funcs are supported"))
+ }
+ stmt, ok := fn.Body.List[0].(*ast.ReturnStmt)
+ if !ok {
+ panic(conv.errorf(fn.Body.List[0], "expected a return statement, found %T", fn.Body.List[0]))
+ }
+ var params []string
+ for _, field := range fn.Type.Params.List {
+ for _, id := range field.Names {
+ params = append(params, id.Name)
+ }
+ }
+ macro := localMacroFunc{
+ name: lhs.Name,
+ params: params,
+ template: stmt.Results[0],
+ }
+ conv.groupFuncs = append(conv.groupFuncs, macro)
+}
+
+func (conv *converter) doMatcherImport(call *ast.CallExpr) {
+ pkgPath := conv.parseStringArg(call.Args[0])
+ pkgName := path.Base(pkgPath)
+ conv.group.Imports = append(conv.group.Imports, ir.PackageImport{
+ Path: pkgPath,
+ Name: pkgName,
+ })
+}
+
+func (conv *converter) matcherMethodName(call *ast.CallExpr) string {
+ selector, ok := call.Fun.(*ast.SelectorExpr)
+ if !ok {
+ return ""
+ }
+ id, ok := selector.X.(*ast.Ident)
+ if !ok || id.Name != conv.group.MatcherName {
+ return ""
+ }
+ return selector.Sel.Name
+}
+
+func (conv *converter) convertDocComments(comment *ast.CommentGroup) {
+ knownPragmas := []string{
+ "tags",
+ "summary",
+ "before",
+ "after",
+ "note",
+ }
+
+ for _, c := range comment.List {
+ if !strings.HasPrefix(c.Text, "//doc:") {
+ continue
+ }
+ s := strings.TrimPrefix(c.Text, "//doc:")
+ var pragma string
+ for i := range knownPragmas {
+ if strings.HasPrefix(s, knownPragmas[i]) {
+ pragma = knownPragmas[i]
+ break
+ }
+ }
+ if pragma == "" {
+ panic(conv.errorf(c, "unrecognized 'doc' pragma in comment"))
+ }
+ s = strings.TrimPrefix(s, pragma)
+ s = strings.TrimSpace(s)
+ switch pragma {
+ case "summary":
+ conv.group.DocSummary = s
+ case "before":
+ conv.group.DocBefore = s
+ case "after":
+ conv.group.DocAfter = s
+ case "note":
+ conv.group.DocNote = s
+ case "tags":
+ conv.group.DocTags = strings.Fields(s)
+ default:
+ panic("unhandled 'doc' pragma: " + pragma) // Should never happen
+ }
+ }
+}
+
+func (conv *converter) convertRuleExpr(call *ast.CallExpr) {
+ origCall := call
+ var (
+ matchArgs *[]ast.Expr
+ matchCommentArgs *[]ast.Expr
+ whereArgs *[]ast.Expr
+ suggestArgs *[]ast.Expr
+ reportArgs *[]ast.Expr
+ atArgs *[]ast.Expr
+ doArgs *[]ast.Expr
+ )
+
+ for {
+ chain, ok := call.Fun.(*ast.SelectorExpr)
+ if !ok {
+ break
+ }
+ switch chain.Sel.Name {
+ case "Match":
+ if matchArgs != nil {
+ panic(conv.errorf(chain.Sel, "Match() can't be repeated"))
+ }
+ if matchCommentArgs != nil {
+ panic(conv.errorf(chain.Sel, "Match() and MatchComment() can't be combined"))
+ }
+ matchArgs = &call.Args
+ case "MatchComment":
+ if matchCommentArgs != nil {
+ panic(conv.errorf(chain.Sel, "MatchComment() can't be repeated"))
+ }
+ if matchArgs != nil {
+ panic(conv.errorf(chain.Sel, "Match() and MatchComment() can't be combined"))
+ }
+ matchCommentArgs = &call.Args
+ case "Where":
+ if whereArgs != nil {
+ panic(conv.errorf(chain.Sel, "Where() can't be repeated"))
+ }
+ whereArgs = &call.Args
+ case "Suggest":
+ if suggestArgs != nil {
+ panic(conv.errorf(chain.Sel, "Suggest() can't be repeated"))
+ }
+ suggestArgs = &call.Args
+ case "Report":
+ if reportArgs != nil {
+ panic(conv.errorf(chain.Sel, "Report() can't be repeated"))
+ }
+ reportArgs = &call.Args
+ case "Do":
+ doArgs = &call.Args
+ case "At":
+ if atArgs != nil {
+ panic(conv.errorf(chain.Sel, "At() can't be repeated"))
+ }
+ atArgs = &call.Args
+ default:
+ panic(conv.errorf(chain.Sel, "unexpected %s method", chain.Sel.Name))
+ }
+ call, ok = chain.X.(*ast.CallExpr)
+ if !ok {
+ break
+ }
+ }
+
+ // AST patterns for Match() or regexp patterns for MatchComment().
+ var alternatives []string
+ var alternativeLines []int
+
+ if matchArgs == nil && matchCommentArgs == nil {
+ panic(conv.errorf(origCall, "missing Match() or MatchComment() call"))
+ }
+
+ if matchArgs != nil {
+ for _, arg := range *matchArgs {
+ alternatives = append(alternatives, conv.parseStringArg(arg))
+ alternativeLines = append(alternativeLines, conv.fset.Position(arg.Pos()).Line)
+ }
+ } else {
+ for _, arg := range *matchCommentArgs {
+ alternatives = append(alternatives, conv.parseStringArg(arg))
+ alternativeLines = append(alternativeLines, conv.fset.Position(arg.Pos()).Line)
+ }
+ }
+
+ rule := ir.Rule{Line: conv.fset.Position(origCall.Pos()).Line}
+
+ if atArgs != nil {
+ index, ok := (*atArgs)[0].(*ast.IndexExpr)
+ if !ok {
+ panic(conv.errorf((*atArgs)[0], "expected %s[`varname`] expression", conv.group.MatcherName))
+ }
+ rule.LocationVar = conv.parseStringArg(index.Index)
+ }
+
+ if whereArgs != nil {
+ rule.WhereExpr = conv.convertFilterExpr((*whereArgs)[0])
+ }
+
+ if suggestArgs != nil {
+ rule.SuggestTemplate = conv.parseStringArg((*suggestArgs)[0])
+ }
+
+ if suggestArgs == nil && reportArgs == nil && doArgs == nil {
+ panic(conv.errorf(origCall, "missing Report(), Suggest() or Do() call"))
+ }
+ if doArgs != nil {
+ if suggestArgs != nil || reportArgs != nil {
+ panic(conv.errorf(origCall, "can't combine Report/Suggest with Do yet"))
+ }
+ if matchCommentArgs != nil {
+ panic(conv.errorf(origCall, "can't use Do() with MatchComment() yet"))
+ }
+ funcName, ok := (*doArgs)[0].(*ast.Ident)
+ if !ok {
+ panic(conv.errorf((*doArgs)[0], "only named function args are supported"))
+ }
+ rule.DoFuncName = funcName.String()
+ } else {
+ if reportArgs == nil {
+ rule.ReportTemplate = "suggestion: " + rule.SuggestTemplate
+ } else {
+ rule.ReportTemplate = conv.parseStringArg((*reportArgs)[0])
+ }
+ }
+
+ for i, alt := range alternatives {
+ pat := ir.PatternString{
+ Line: alternativeLines[i],
+ Value: alt,
+ }
+ if matchArgs != nil {
+ rule.SyntaxPatterns = append(rule.SyntaxPatterns, pat)
+ } else {
+ rule.CommentPatterns = append(rule.CommentPatterns, pat)
+ }
+ }
+ conv.group.Rules = append(conv.group.Rules, rule)
+}
+
+func (conv *converter) convertFilterExpr(e ast.Expr) ir.FilterExpr {
+ result := conv.convertFilterExprImpl(e)
+ result.Src = goutil.SprintNode(conv.fset, e)
+ result.Line = conv.fset.Position(e.Pos()).Line
+ if !result.IsValid() {
+ panic(conv.errorf(e, "unsupported expr: %s (%T)", result.Src, e))
+ }
+ return result
+}
+
+func (conv *converter) convertFilterExprImpl(e ast.Expr) ir.FilterExpr {
+ if cv := conv.types.Types[e].Value; cv != nil {
+ switch cv.Kind() {
+ case constant.String:
+ v := constant.StringVal(cv)
+ return ir.FilterExpr{Op: ir.FilterStringOp, Value: v}
+ case constant.Int:
+ v, ok := constant.Int64Val(cv)
+ if ok {
+ return ir.FilterExpr{Op: ir.FilterIntOp, Value: v}
+ }
+ }
+ }
+ convertExprList := func(list []ast.Expr) []ir.FilterExpr {
+ if len(list) == 0 {
+ return nil
+ }
+ result := make([]ir.FilterExpr, len(list))
+ for i, e := range list {
+ result[i] = conv.convertFilterExpr(e)
+ }
+ return result
+ }
+
+ switch e := e.(type) {
+ case *ast.ParenExpr:
+ return conv.convertFilterExpr(e.X)
+
+ case *ast.UnaryExpr:
+ x := conv.convertFilterExpr(e.X)
+ args := []ir.FilterExpr{x}
+ switch e.Op {
+ case token.NOT:
+ return ir.FilterExpr{Op: ir.FilterNotOp, Args: args}
+ }
+
+ case *ast.BinaryExpr:
+ x := conv.convertFilterExpr(e.X)
+ y := conv.convertFilterExpr(e.Y)
+ args := []ir.FilterExpr{x, y}
+ switch e.Op {
+ case token.LAND:
+ return ir.FilterExpr{Op: ir.FilterAndOp, Args: args}
+ case token.LOR:
+ return ir.FilterExpr{Op: ir.FilterOrOp, Args: args}
+ case token.NEQ:
+ return ir.FilterExpr{Op: ir.FilterNeqOp, Args: args}
+ case token.EQL:
+ return ir.FilterExpr{Op: ir.FilterEqOp, Args: args}
+ case token.GTR:
+ return ir.FilterExpr{Op: ir.FilterGtOp, Args: args}
+ case token.LSS:
+ return ir.FilterExpr{Op: ir.FilterLtOp, Args: args}
+ case token.GEQ:
+ return ir.FilterExpr{Op: ir.FilterGtEqOp, Args: args}
+ case token.LEQ:
+ return ir.FilterExpr{Op: ir.FilterLtEqOp, Args: args}
+ default:
+ panic(conv.errorf(e, "unexpected binary op: %s", e.Op.String()))
+ }
+
+ case *ast.SelectorExpr:
+ op := conv.inspectFilterSelector(e)
+ switch op.path {
+ case "Text":
+ return ir.FilterExpr{Op: ir.FilterVarTextOp, Value: op.varName}
+ case "Line":
+ return ir.FilterExpr{Op: ir.FilterVarLineOp, Value: op.varName}
+ case "Pure":
+ return ir.FilterExpr{Op: ir.FilterVarPureOp, Value: op.varName}
+ case "Const":
+ return ir.FilterExpr{Op: ir.FilterVarConstOp, Value: op.varName}
+ case "ConstSlice":
+ return ir.FilterExpr{Op: ir.FilterVarConstSliceOp, Value: op.varName}
+ case "Addressable":
+ return ir.FilterExpr{Op: ir.FilterVarAddressableOp, Value: op.varName}
+ case "Comparable":
+ return ir.FilterExpr{Op: ir.FilterVarComparableOp, Value: op.varName}
+ case "Type.Size":
+ return ir.FilterExpr{Op: ir.FilterVarTypeSizeOp, Value: op.varName}
+ }
+
+ case *ast.CallExpr:
+ op := conv.inspectFilterSelector(e)
+ switch op.path {
+ case "Deadcode":
+ return ir.FilterExpr{Op: ir.FilterDeadcodeOp}
+ case "GoVersion.Eq":
+ return ir.FilterExpr{Op: ir.FilterGoVersionEqOp, Value: conv.parseStringArg(e.Args[0])}
+ case "GoVersion.LessThan":
+ return ir.FilterExpr{Op: ir.FilterGoVersionLessThanOp, Value: conv.parseStringArg(e.Args[0])}
+ case "GoVersion.GreaterThan":
+ return ir.FilterExpr{Op: ir.FilterGoVersionGreaterThanOp, Value: conv.parseStringArg(e.Args[0])}
+ case "GoVersion.LessEqThan":
+ return ir.FilterExpr{Op: ir.FilterGoVersionLessEqThanOp, Value: conv.parseStringArg(e.Args[0])}
+ case "GoVersion.GreaterEqThan":
+ return ir.FilterExpr{Op: ir.FilterGoVersionGreaterEqThanOp, Value: conv.parseStringArg(e.Args[0])}
+ case "File.Imports":
+ return ir.FilterExpr{Op: ir.FilterFileImportsOp, Value: conv.parseStringArg(e.Args[0])}
+ case "File.PkgPath.Matches":
+ return ir.FilterExpr{Op: ir.FilterFilePkgPathMatchesOp, Value: conv.parseStringArg(e.Args[0])}
+ case "File.Name.Matches":
+ return ir.FilterExpr{Op: ir.FilterFileNameMatchesOp, Value: conv.parseStringArg(e.Args[0])}
+
+ case "Contains":
+ pat := conv.parseStringArg(e.Args[0])
+ return ir.FilterExpr{
+ Op: ir.FilterVarContainsOp,
+ Value: op.varName,
+ Args: []ir.FilterExpr{
+ {Op: ir.FilterStringOp, Value: pat},
+ },
+ }
+
+ case "Type.IdenticalTo":
+ // TODO: reuse the code with parsing At() args?
+ index, ok := e.Args[0].(*ast.IndexExpr)
+ if !ok {
+ panic(conv.errorf(e.Args[0], "expected %s[`varname`] expression", conv.group.MatcherName))
+ }
+ rhsVarname := conv.parseStringArg(index.Index)
+ args := []ir.FilterExpr{
+ {Op: ir.FilterStringOp, Value: rhsVarname},
+ }
+ return ir.FilterExpr{Op: ir.FilterVarTypeIdenticalToOp, Value: op.varName, Args: args}
+
+ case "Filter":
+ funcName, ok := e.Args[0].(*ast.Ident)
+ if !ok {
+ panic(conv.errorf(e.Args[0], "only named function args are supported"))
+ }
+ args := []ir.FilterExpr{
+ {Op: ir.FilterFilterFuncRefOp, Value: funcName.String()},
+ }
+ return ir.FilterExpr{Op: ir.FilterVarFilterOp, Value: op.varName, Args: args}
+ }
+
+ if macro := conv.findLocalMacro(e); macro != nil {
+ return conv.expandMacro(macro, e)
+ }
+
+ args := convertExprList(e.Args)
+ switch op.path {
+ case "Value.Int":
+ return ir.FilterExpr{Op: ir.FilterVarValueIntOp, Value: op.varName, Args: args}
+ case "Text.Matches":
+ return ir.FilterExpr{Op: ir.FilterVarTextMatchesOp, Value: op.varName, Args: args}
+ case "Node.Is":
+ return ir.FilterExpr{Op: ir.FilterVarNodeIsOp, Value: op.varName, Args: args}
+ case "Node.Parent.Is":
+ if op.varName != "$$" {
+ // TODO: remove this restriction.
+ panic(conv.errorf(e.Args[0], "only $$ parent nodes are implemented"))
+ }
+ return ir.FilterExpr{Op: ir.FilterRootNodeParentIsOp, Args: args}
+ case "Object.Is":
+ return ir.FilterExpr{Op: ir.FilterVarObjectIsOp, Value: op.varName, Args: args}
+ case "Object.IsGlobal":
+ return ir.FilterExpr{Op: ir.FilterVarObjectIsGlobalOp, Value: op.varName}
+ case "SinkType.Is":
+ if op.varName != "$$" {
+ // TODO: remove this restriction.
+ panic(conv.errorf(e.Args[0], "sink type is only implemented for $$ var"))
+ }
+ return ir.FilterExpr{Op: ir.FilterRootSinkTypeIsOp, Value: op.varName, Args: args}
+ case "Type.HasPointers":
+ return ir.FilterExpr{Op: ir.FilterVarTypeHasPointersOp, Value: op.varName}
+ case "Type.Is":
+ return ir.FilterExpr{Op: ir.FilterVarTypeIsOp, Value: op.varName, Args: args}
+ case "Type.Underlying.Is":
+ return ir.FilterExpr{Op: ir.FilterVarTypeUnderlyingIsOp, Value: op.varName, Args: args}
+ case "Type.OfKind":
+ return ir.FilterExpr{Op: ir.FilterVarTypeOfKindOp, Value: op.varName, Args: args}
+ case "Type.Underlying.OfKind":
+ return ir.FilterExpr{Op: ir.FilterVarTypeUnderlyingOfKindOp, Value: op.varName, Args: args}
+ case "Type.ConvertibleTo":
+ return ir.FilterExpr{Op: ir.FilterVarTypeConvertibleToOp, Value: op.varName, Args: args}
+ case "Type.AssignableTo":
+ return ir.FilterExpr{Op: ir.FilterVarTypeAssignableToOp, Value: op.varName, Args: args}
+ case "Type.Implements":
+ return ir.FilterExpr{Op: ir.FilterVarTypeImplementsOp, Value: op.varName, Args: args}
+ case "Type.HasMethod":
+ return ir.FilterExpr{Op: ir.FilterVarTypeHasMethodOp, Value: op.varName, Args: args}
+ }
+ }
+
+ return ir.FilterExpr{}
+}
+
+func (conv *converter) parseStringArg(e ast.Expr) string {
+ s, ok := conv.toStringValue(e)
+ if !ok {
+ panic(conv.errorf(e, "expected a string literal argument"))
+ }
+ return s
+}
+
+func (conv *converter) toStringValue(x ast.Node) (string, bool) {
+ switch x := x.(type) {
+ case *ast.BasicLit:
+ if x.Kind != token.STRING {
+ return "", false
+ }
+ s, err := strconv.Unquote(x.Value)
+ if err != nil {
+ return "", false
+ }
+ return s, true
+ case ast.Expr:
+ typ, ok := conv.types.Types[x]
+ if !ok || typ.Type.String() != "string" {
+ return "", false
+ }
+ str := constant.StringVal(typ.Value)
+ return str, true
+ }
+ return "", false
+}
+
+func (conv *converter) inspectFilterSelector(e ast.Expr) filterExprSelector {
+ var o filterExprSelector
+
+ if call, ok := e.(*ast.CallExpr); ok {
+ o.args = call.Args
+ e = call.Fun
+ }
+ var path string
+ for {
+ if call, ok := e.(*ast.CallExpr); ok {
+ e = call.Fun
+ continue
+ }
+ selector, ok := e.(*ast.SelectorExpr)
+ if !ok {
+ break
+ }
+ if path == "" {
+ path = selector.Sel.Name
+ } else {
+ path = selector.Sel.Name + "." + path
+ }
+ e = astutil.Unparen(selector.X)
+ }
+
+ o.path = path
+
+ indexing, ok := astutil.Unparen(e).(*ast.IndexExpr)
+ if !ok {
+ return o
+ }
+ mapIdent, ok := astutil.Unparen(indexing.X).(*ast.Ident)
+ if !ok {
+ return o
+ }
+ o.mapName = mapIdent.Name
+ indexString, _ := conv.toStringValue(indexing.Index)
+ o.varName = indexString
+
+ return o
+}
+
+type filterExprSelector struct {
+ mapName string
+ varName string
+ path string
+ args []ast.Expr
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/libdsl.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/libdsl.go
index ddd56cbe1..d0e161613 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/libdsl.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/libdsl.go
@@ -1,6 +1,7 @@
package ruleguard
import (
+ "fmt"
"go/types"
"github.com/quasilyte/go-ruleguard/internal/xtypes"
@@ -28,10 +29,17 @@ import (
func initEnv(state *engineState, env *quasigo.Env) {
nativeTypes := map[string]quasigoNative{
+ `*github.com/quasilyte/go-ruleguard/dsl.MatchedText`: dslMatchedText{},
+ `*github.com/quasilyte/go-ruleguard/dsl.DoVar`: dslDoVar{},
+ `*github.com/quasilyte/go-ruleguard/dsl.DoContext`: dslDoContext{},
`*github.com/quasilyte/go-ruleguard/dsl.VarFilterContext`: dslVarFilterContext{state: state},
`github.com/quasilyte/go-ruleguard/dsl/types.Type`: dslTypesType{},
`*github.com/quasilyte/go-ruleguard/dsl/types.Interface`: dslTypesInterface{},
`*github.com/quasilyte/go-ruleguard/dsl/types.Pointer`: dslTypesPointer{},
+ `*github.com/quasilyte/go-ruleguard/dsl/types.Struct`: dslTypesStruct{},
+ `*github.com/quasilyte/go-ruleguard/dsl/types.Array`: dslTypesArray{},
+ `*github.com/quasilyte/go-ruleguard/dsl/types.Slice`: dslTypesSlice{},
+ `*github.com/quasilyte/go-ruleguard/dsl/types.Var`: dslTypesVar{},
}
for qualifier, typ := range nativeTypes {
@@ -89,6 +97,55 @@ func (dslTypesInterface) String(stack *quasigo.ValueStack) {
stack.Push(stack.Pop().(*types.Interface).String())
}
+type dslTypesSlice struct{}
+
+func (native dslTypesSlice) funcs() map[string]func(*quasigo.ValueStack) {
+ return map[string]func(*quasigo.ValueStack){
+ "Underlying": native.Underlying,
+ "String": native.String,
+ "Elem": native.Elem,
+ }
+}
+
+func (dslTypesSlice) Underlying(stack *quasigo.ValueStack) {
+ stack.Push(stack.Pop().(*types.Slice).Underlying())
+}
+
+func (dslTypesSlice) String(stack *quasigo.ValueStack) {
+ stack.Push(stack.Pop().(*types.Slice).String())
+}
+
+func (dslTypesSlice) Elem(stack *quasigo.ValueStack) {
+ stack.Push(stack.Pop().(*types.Slice).Elem())
+}
+
+type dslTypesArray struct{}
+
+func (native dslTypesArray) funcs() map[string]func(*quasigo.ValueStack) {
+ return map[string]func(*quasigo.ValueStack){
+ "Underlying": native.Underlying,
+ "String": native.String,
+ "Elem": native.Elem,
+ "Len": native.Len,
+ }
+}
+
+func (dslTypesArray) Underlying(stack *quasigo.ValueStack) {
+ stack.Push(stack.Pop().(*types.Array).Underlying())
+}
+
+func (dslTypesArray) String(stack *quasigo.ValueStack) {
+ stack.Push(stack.Pop().(*types.Array).String())
+}
+
+func (dslTypesArray) Elem(stack *quasigo.ValueStack) {
+ stack.Push(stack.Pop().(*types.Array).Elem())
+}
+
+func (dslTypesArray) Len(stack *quasigo.ValueStack) {
+ stack.PushInt(int(stack.Pop().(*types.Array).Len()))
+}
+
type dslTypesPointer struct{}
func (native dslTypesPointer) funcs() map[string]func(*quasigo.ValueStack) {
@@ -111,15 +168,49 @@ func (dslTypesPointer) Elem(stack *quasigo.ValueStack) {
stack.Push(stack.Pop().(*types.Pointer).Elem())
}
+type dslTypesStruct struct{}
+
+func (native dslTypesStruct) funcs() map[string]func(*quasigo.ValueStack) {
+ return map[string]func(*quasigo.ValueStack){
+ "Underlying": native.Underlying,
+ "String": native.String,
+ "NumFields": native.NumFields,
+ "Field": native.Field,
+ }
+}
+
+func (dslTypesStruct) Underlying(stack *quasigo.ValueStack) {
+ stack.Push(stack.Pop().(*types.Struct).Underlying())
+}
+
+func (dslTypesStruct) String(stack *quasigo.ValueStack) {
+ stack.Push(stack.Pop().(*types.Struct).String())
+}
+
+func (dslTypesStruct) NumFields(stack *quasigo.ValueStack) {
+ stack.PushInt(stack.Pop().(*types.Struct).NumFields())
+}
+
+func (dslTypesStruct) Field(stack *quasigo.ValueStack) {
+ i := stack.PopInt()
+ typ := stack.Pop().(*types.Struct)
+ stack.Push(typ.Field(i))
+}
+
type dslTypesPackage struct{}
func (native dslTypesPackage) funcs() map[string]func(*quasigo.ValueStack) {
return map[string]func(*quasigo.ValueStack){
"Implements": native.Implements,
"Identical": native.Identical,
+ "NewArray": native.NewArray,
+ "NewSlice": native.NewSlice,
"NewPointer": native.NewPointer,
+ "AsArray": native.AsArray,
+ "AsSlice": native.AsSlice,
"AsPointer": native.AsPointer,
"AsInterface": native.AsInterface,
+ "AsStruct": native.AsStruct,
}
}
@@ -135,11 +226,32 @@ func (dslTypesPackage) Identical(stack *quasigo.ValueStack) {
stack.Push(xtypes.Identical(x, y))
}
+func (dslTypesPackage) NewArray(stack *quasigo.ValueStack) {
+ length := stack.PopInt()
+ typ := stack.Pop().(types.Type)
+ stack.Push(types.NewArray(typ, int64(length)))
+}
+
+func (dslTypesPackage) NewSlice(stack *quasigo.ValueStack) {
+ typ := stack.Pop().(types.Type)
+ stack.Push(types.NewSlice(typ))
+}
+
func (dslTypesPackage) NewPointer(stack *quasigo.ValueStack) {
typ := stack.Pop().(types.Type)
stack.Push(types.NewPointer(typ))
}
+func (dslTypesPackage) AsArray(stack *quasigo.ValueStack) {
+ typ, _ := stack.Pop().(types.Type).(*types.Array)
+ stack.Push(typ)
+}
+
+func (dslTypesPackage) AsSlice(stack *quasigo.ValueStack) {
+ typ, _ := stack.Pop().(types.Type).(*types.Slice)
+ stack.Push(typ)
+}
+
func (dslTypesPackage) AsPointer(stack *quasigo.ValueStack) {
typ, _ := stack.Pop().(types.Type).(*types.Pointer)
stack.Push(typ)
@@ -150,6 +262,95 @@ func (dslTypesPackage) AsInterface(stack *quasigo.ValueStack) {
stack.Push(typ)
}
+func (dslTypesPackage) AsStruct(stack *quasigo.ValueStack) {
+ typ, _ := stack.Pop().(types.Type).(*types.Struct)
+ stack.Push(typ)
+}
+
+type dslTypesVar struct{}
+
+func (native dslTypesVar) funcs() map[string]func(*quasigo.ValueStack) {
+ return map[string]func(*quasigo.ValueStack){
+ "Embedded": native.Embedded,
+ "Type": native.Type,
+ }
+}
+
+func (dslTypesVar) Embedded(stack *quasigo.ValueStack) {
+ stack.Push(stack.Pop().(*types.Var).Embedded())
+}
+
+func (dslTypesVar) Type(stack *quasigo.ValueStack) {
+ stack.Push(stack.Pop().(*types.Var).Type())
+}
+
+type dslDoContext struct{}
+
+func (native dslDoContext) funcs() map[string]func(*quasigo.ValueStack) {
+ return map[string]func(*quasigo.ValueStack){
+ "SetReport": native.SetReport,
+ "SetSuggest": native.SetSuggest,
+ "Var": native.Var,
+ }
+}
+
+func (native dslDoContext) Var(stack *quasigo.ValueStack) {
+ s := stack.Pop().(string)
+ params := stack.Pop().(*filterParams)
+ stack.Push(&dslDoVarRepr{params: params, name: s})
+}
+
+func (native dslDoContext) SetReport(stack *quasigo.ValueStack) {
+ s := stack.Pop().(string)
+ params := stack.Pop().(*filterParams)
+ params.reportString = s
+}
+
+func (native dslDoContext) SetSuggest(stack *quasigo.ValueStack) {
+ s := stack.Pop().(string)
+ params := stack.Pop().(*filterParams)
+ params.suggestString = s
+}
+
+type dslMatchedText struct{}
+
+func (native dslMatchedText) funcs() map[string]func(*quasigo.ValueStack) {
+ return map[string]func(*quasigo.ValueStack){
+ "String": native.String,
+ }
+}
+
+func (dslMatchedText) String(stack *quasigo.ValueStack) {
+ fmt.Printf("%T\n", stack.Pop())
+ stack.Push("ok2")
+}
+
+type dslDoVarRepr struct {
+ params *filterParams
+ name string
+}
+
+type dslDoVar struct{}
+
+func (native dslDoVar) funcs() map[string]func(*quasigo.ValueStack) {
+ return map[string]func(*quasigo.ValueStack){
+ "Text": native.Text,
+ "Type": native.Type,
+ }
+}
+
+func (dslDoVar) Text(stack *quasigo.ValueStack) {
+ v := stack.Pop().(*dslDoVarRepr)
+ params := v.params
+ stack.Push(params.nodeString(params.subNode(v.name)))
+}
+
+func (dslDoVar) Type(stack *quasigo.ValueStack) {
+ v := stack.Pop().(*dslDoVarRepr)
+ params := v.params
+ stack.Push(params.typeofNode(params.subNode(v.name)))
+}
+
type dslVarFilterContext struct {
state *engineState
}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/match_data.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/match_data.go
new file mode 100644
index 000000000..3bf3bf5a8
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/match_data.go
@@ -0,0 +1,46 @@
+package ruleguard
+
+import (
+ "go/ast"
+
+ "github.com/quasilyte/gogrep"
+)
+
+// matchData is used to handle both regexp and AST match sets in the same way.
+type matchData interface {
+ // TODO: don't use gogrep.CapturedNode type here.
+
+ Node() ast.Node
+ CaptureList() []gogrep.CapturedNode
+ CapturedByName(name string) (ast.Node, bool)
+}
+
+type commentMatchData struct {
+ node ast.Node
+ capture []gogrep.CapturedNode
+}
+
+func (m commentMatchData) Node() ast.Node { return m.node }
+
+func (m commentMatchData) CaptureList() []gogrep.CapturedNode { return m.capture }
+
+func (m commentMatchData) CapturedByName(name string) (ast.Node, bool) {
+ for _, c := range m.capture {
+ if c.Name == name {
+ return c.Node, true
+ }
+ }
+ return nil, false
+}
+
+type astMatchData struct {
+ match gogrep.MatchData
+}
+
+func (m astMatchData) Node() ast.Node { return m.match.Node }
+
+func (m astMatchData) CaptureList() []gogrep.CapturedNode { return m.match.Capture }
+
+func (m astMatchData) CapturedByName(name string) (ast.Node, bool) {
+ return m.match.CapturedByName(name)
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/node_category.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/node_category.go
deleted file mode 100644
index 57d849b1a..000000000
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/node_category.go
+++ /dev/null
@@ -1,273 +0,0 @@
-package ruleguard
-
-import (
- "go/ast"
-)
-
-type nodeCategory int
-
-const (
- nodeUnknown nodeCategory = iota
-
- nodeArrayType
- nodeAssignStmt
- nodeBasicLit
- nodeBinaryExpr
- nodeBlockStmt
- nodeBranchStmt
- nodeCallExpr
- nodeCaseClause
- nodeChanType
- nodeCommClause
- nodeCompositeLit
- nodeDeclStmt
- nodeDeferStmt
- nodeEllipsis
- nodeEmptyStmt
- nodeExprStmt
- nodeForStmt
- nodeFuncDecl
- nodeFuncLit
- nodeFuncType
- nodeGenDecl
- nodeGoStmt
- nodeIdent
- nodeIfStmt
- nodeImportSpec
- nodeIncDecStmt
- nodeIndexExpr
- nodeInterfaceType
- nodeKeyValueExpr
- nodeLabeledStmt
- nodeMapType
- nodeParenExpr
- nodeRangeStmt
- nodeReturnStmt
- nodeSelectStmt
- nodeSelectorExpr
- nodeSendStmt
- nodeSliceExpr
- nodeStarExpr
- nodeStructType
- nodeSwitchStmt
- nodeTypeAssertExpr
- nodeTypeSpec
- nodeTypeSwitchStmt
- nodeUnaryExpr
- nodeValueSpec
-
- nodeCategoriesCount
-
- // Categories below are not used inside scopedRuleSet yet
- // as categorizeNode will never produce them during the parsing.
- // They're required for Node.Is().
-
- nodeExpr // ast.Expr
- nodeStmt // ast.Stmt
-)
-
-func categorizeNode(n ast.Node) nodeCategory {
- switch n.(type) {
- case *ast.ArrayType:
- return nodeArrayType
- case *ast.AssignStmt:
- return nodeAssignStmt
- case *ast.BasicLit:
- return nodeBasicLit
- case *ast.BinaryExpr:
- return nodeBinaryExpr
- case *ast.BlockStmt:
- return nodeBlockStmt
- case *ast.BranchStmt:
- return nodeBranchStmt
- case *ast.CallExpr:
- return nodeCallExpr
- case *ast.CaseClause:
- return nodeCaseClause
- case *ast.ChanType:
- return nodeChanType
- case *ast.CommClause:
- return nodeCommClause
- case *ast.CompositeLit:
- return nodeCompositeLit
- case *ast.DeclStmt:
- return nodeDeclStmt
- case *ast.DeferStmt:
- return nodeDeferStmt
- case *ast.Ellipsis:
- return nodeEllipsis
- case *ast.EmptyStmt:
- return nodeEmptyStmt
- case *ast.ExprStmt:
- return nodeExprStmt
- case *ast.ForStmt:
- return nodeForStmt
- case *ast.FuncDecl:
- return nodeFuncDecl
- case *ast.FuncLit:
- return nodeFuncLit
- case *ast.FuncType:
- return nodeFuncType
- case *ast.GenDecl:
- return nodeGenDecl
- case *ast.GoStmt:
- return nodeGoStmt
- case *ast.Ident:
- return nodeIdent
- case *ast.IfStmt:
- return nodeIfStmt
- case *ast.ImportSpec:
- return nodeImportSpec
- case *ast.IncDecStmt:
- return nodeIncDecStmt
- case *ast.IndexExpr:
- return nodeIndexExpr
- case *ast.InterfaceType:
- return nodeInterfaceType
- case *ast.KeyValueExpr:
- return nodeKeyValueExpr
- case *ast.LabeledStmt:
- return nodeLabeledStmt
- case *ast.MapType:
- return nodeMapType
- case *ast.ParenExpr:
- return nodeParenExpr
- case *ast.RangeStmt:
- return nodeRangeStmt
- case *ast.ReturnStmt:
- return nodeReturnStmt
- case *ast.SelectStmt:
- return nodeSelectStmt
- case *ast.SelectorExpr:
- return nodeSelectorExpr
- case *ast.SendStmt:
- return nodeSendStmt
- case *ast.SliceExpr:
- return nodeSliceExpr
- case *ast.StarExpr:
- return nodeStarExpr
- case *ast.StructType:
- return nodeStructType
- case *ast.SwitchStmt:
- return nodeSwitchStmt
- case *ast.TypeAssertExpr:
- return nodeTypeAssertExpr
- case *ast.TypeSpec:
- return nodeTypeSpec
- case *ast.TypeSwitchStmt:
- return nodeTypeSwitchStmt
- case *ast.UnaryExpr:
- return nodeUnaryExpr
- case *ast.ValueSpec:
- return nodeValueSpec
- default:
- return nodeUnknown
- }
-}
-
-func categorizeNodeString(s string) nodeCategory {
- switch s {
- case "Expr":
- return nodeExpr
- case "Stmt":
- return nodeStmt
- }
-
- // Below is a switch from categorizeNode.
- switch s {
- case "ArrayType":
- return nodeArrayType
- case "AssignStmt":
- return nodeAssignStmt
- case "BasicLit":
- return nodeBasicLit
- case "BinaryExpr":
- return nodeBinaryExpr
- case "BlockStmt":
- return nodeBlockStmt
- case "BranchStmt":
- return nodeBranchStmt
- case "CallExpr":
- return nodeCallExpr
- case "CaseClause":
- return nodeCaseClause
- case "ChanType":
- return nodeChanType
- case "CommClause":
- return nodeCommClause
- case "CompositeLit":
- return nodeCompositeLit
- case "DeclStmt":
- return nodeDeclStmt
- case "DeferStmt":
- return nodeDeferStmt
- case "Ellipsis":
- return nodeEllipsis
- case "EmptyStmt":
- return nodeEmptyStmt
- case "ExprStmt":
- return nodeExprStmt
- case "ForStmt":
- return nodeForStmt
- case "FuncDecl":
- return nodeFuncDecl
- case "FuncLit":
- return nodeFuncLit
- case "FuncType":
- return nodeFuncType
- case "GenDecl":
- return nodeGenDecl
- case "GoStmt":
- return nodeGoStmt
- case "Ident":
- return nodeIdent
- case "IfStmt":
- return nodeIfStmt
- case "ImportSpec":
- return nodeImportSpec
- case "IncDecStmt":
- return nodeIncDecStmt
- case "IndexExpr":
- return nodeIndexExpr
- case "InterfaceType":
- return nodeInterfaceType
- case "KeyValueExpr":
- return nodeKeyValueExpr
- case "LabeledStmt":
- return nodeLabeledStmt
- case "MapType":
- return nodeMapType
- case "ParenExpr":
- return nodeParenExpr
- case "RangeStmt":
- return nodeRangeStmt
- case "ReturnStmt":
- return nodeReturnStmt
- case "SelectStmt":
- return nodeSelectStmt
- case "SelectorExpr":
- return nodeSelectorExpr
- case "SendStmt":
- return nodeSendStmt
- case "SliceExpr":
- return nodeSliceExpr
- case "StarExpr":
- return nodeStarExpr
- case "StructType":
- return nodeStructType
- case "SwitchStmt":
- return nodeSwitchStmt
- case "TypeAssertExpr":
- return nodeTypeAssertExpr
- case "TypeSpec":
- return nodeTypeSpec
- case "TypeSwitchStmt":
- return nodeTypeSwitchStmt
- case "UnaryExpr":
- return nodeUnaryExpr
- case "ValueSpec":
- return nodeValueSpec
- default:
- return nodeUnknown
- }
-}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/nodepath.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/nodepath.go
new file mode 100644
index 000000000..b0f02f0aa
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/nodepath.go
@@ -0,0 +1,49 @@
+package ruleguard
+
+import (
+ "fmt"
+ "go/ast"
+ "strings"
+)
+
+type nodePath struct {
+ stack []ast.Node
+}
+
+func newNodePath() nodePath {
+ return nodePath{stack: make([]ast.Node, 0, 32)}
+}
+
+func (p nodePath) String() string {
+ parts := make([]string, len(p.stack))
+ for i, n := range p.stack {
+ parts[i] = fmt.Sprintf("%T", n)
+ }
+ return strings.Join(parts, "/")
+}
+
+func (p nodePath) Parent() ast.Node {
+ return p.NthParent(1)
+}
+
+func (p nodePath) Current() ast.Node {
+ return p.NthParent(0)
+}
+
+func (p nodePath) NthParent(n int) ast.Node {
+ index := uint(len(p.stack) - n - 1)
+ if index < uint(len(p.stack)) {
+ return p.stack[index]
+ }
+ return nil
+}
+
+func (p *nodePath) Len() int { return len(p.stack) }
+
+func (p *nodePath) Push(n ast.Node) {
+ p.stack = append(p.stack, n)
+}
+
+func (p *nodePath) Pop() {
+ p.stack = p.stack[:len(p.stack)-1]
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/parser.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/parser.go
deleted file mode 100644
index 89d2dc437..000000000
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/parser.go
+++ /dev/null
@@ -1,902 +0,0 @@
-package ruleguard
-
-import (
- "bytes"
- "fmt"
- "go/ast"
- "go/parser"
- "go/token"
- "go/types"
- "io"
- "io/ioutil"
- "path"
- "regexp"
- "strconv"
-
- "github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep"
- "github.com/quasilyte/go-ruleguard/ruleguard/goutil"
- "github.com/quasilyte/go-ruleguard/ruleguard/quasigo"
- "github.com/quasilyte/go-ruleguard/ruleguard/typematch"
-)
-
-// TODO(quasilyte): use source code byte slicing instead of SprintNode?
-
-type parseError string
-
-func (e parseError) Error() string { return string(e) }
-
-type rulesParser struct {
- state *engineState
- ctx *ParseContext
-
- prefix string // For imported packages, a prefix that is added to a rule group name
- importedPkg string // Package path; only for imported packages
-
- filename string
- group string
- res *goRuleSet
- pkg *types.Package
- types *types.Info
-
- importer *goImporter
-
- itab *typematch.ImportsTab
-
- imported []*goRuleSet
-
- dslPkgname string // The local name of the "ruleguard/dsl" package (usually its just "dsl")
-}
-
-type rulesParserConfig struct {
- state *engineState
-
- ctx *ParseContext
-
- importer *goImporter
-
- prefix string
- importedPkg string
-
- itab *typematch.ImportsTab
-}
-
-func newRulesParser(config rulesParserConfig) *rulesParser {
- return &rulesParser{
- state: config.state,
- ctx: config.ctx,
- importer: config.importer,
- prefix: config.prefix,
- importedPkg: config.importedPkg,
- itab: config.itab,
- }
-}
-
-func (p *rulesParser) ParseFile(filename string, r io.Reader) (*goRuleSet, error) {
- p.dslPkgname = "dsl"
- p.filename = filename
- p.res = &goRuleSet{
- universal: &scopedGoRuleSet{},
- groups: make(map[string]token.Position),
- }
-
- parserFlags := parser.Mode(0)
- f, err := parser.ParseFile(p.ctx.Fset, filename, r, parserFlags)
- if err != nil {
- return nil, fmt.Errorf("parse file error: %v", err)
- }
-
- for _, imp := range f.Imports {
- importPath, err := strconv.Unquote(imp.Path.Value)
- if err != nil {
- return nil, p.errorf(imp, "unquote %s import path: %v", imp.Path.Value, err)
- }
- if importPath == "github.com/quasilyte/go-ruleguard/dsl" {
- if imp.Name != nil {
- p.dslPkgname = imp.Name.Name
- }
- }
- }
-
- if f.Name.Name != "gorules" {
- return nil, fmt.Errorf("expected a gorules package name, found %s", f.Name.Name)
- }
-
- typechecker := types.Config{Importer: p.importer}
- p.types = &types.Info{
- Types: map[ast.Expr]types.TypeAndValue{},
- Uses: map[*ast.Ident]types.Object{},
- Defs: map[*ast.Ident]types.Object{},
- }
- pkg, err := typechecker.Check("gorules", p.ctx.Fset, []*ast.File{f}, p.types)
- if err != nil {
- return nil, fmt.Errorf("typechecker error: %v", err)
- }
- p.pkg = pkg
-
- var matcherFuncs []*ast.FuncDecl
- var userFuncs []*ast.FuncDecl
- for _, decl := range f.Decls {
- decl, ok := decl.(*ast.FuncDecl)
- if !ok {
- continue
- }
- if decl.Name.String() == "init" {
- if err := p.parseInitFunc(decl); err != nil {
- return nil, err
- }
- continue
- }
-
- if p.isMatcherFunc(decl) {
- matcherFuncs = append(matcherFuncs, decl)
- } else {
- userFuncs = append(userFuncs, decl)
- }
- }
-
- for _, decl := range userFuncs {
- if err := p.parseUserFunc(decl); err != nil {
- return nil, err
- }
- }
- for _, decl := range matcherFuncs {
- if err := p.parseRuleGroup(decl); err != nil {
- return nil, err
- }
- }
-
- if len(p.imported) != 0 {
- toMerge := []*goRuleSet{p.res}
- toMerge = append(toMerge, p.imported...)
- merged, err := mergeRuleSets(toMerge)
- if err != nil {
- return nil, err
- }
- p.res = merged
- }
-
- return p.res, nil
-}
-
-func (p *rulesParser) parseUserFunc(f *ast.FuncDecl) error {
- ctx := &quasigo.CompileContext{
- Env: p.state.env,
- Types: p.types,
- Fset: p.ctx.Fset,
- }
- compiled, err := quasigo.Compile(ctx, f)
- if err != nil {
- return err
- }
- if p.ctx.DebugFilter == f.Name.String() {
- p.ctx.DebugPrint(quasigo.Disasm(p.state.env, compiled))
- }
- ctx.Env.AddFunc(p.pkg.Path(), f.Name.String(), compiled)
- return nil
-}
-
-func (p *rulesParser) parseInitFunc(f *ast.FuncDecl) error {
- type bundleImport struct {
- node ast.Node
- prefix string
- pkgPath string
- }
-
- var imported []bundleImport
-
- for _, stmt := range f.Body.List {
- exprStmt, ok := stmt.(*ast.ExprStmt)
- if !ok {
- return p.errorf(stmt, "unsupported statement")
- }
- call, ok := exprStmt.X.(*ast.CallExpr)
- if !ok {
- return p.errorf(stmt, "unsupported expr")
- }
- fn, ok := call.Fun.(*ast.SelectorExpr)
- if !ok {
- return p.errorf(stmt, "unsupported call")
- }
- pkg, ok := fn.X.(*ast.Ident)
- if !ok || pkg.Name != p.dslPkgname {
- return p.errorf(stmt, "unsupported call")
- }
-
- switch fn.Sel.Name {
- case "ImportRules":
- if p.importedPkg != "" {
- return p.errorf(call, "imports from imported packages are not supported yet")
- }
- prefix := p.parseStringArg(call.Args[0])
- bundleSelector, ok := call.Args[1].(*ast.SelectorExpr)
- if !ok {
- return p.errorf(call.Args[1], "expected a `pkgname.Bundle` argument")
- }
- bundleObj := p.types.ObjectOf(bundleSelector.Sel)
- imported = append(imported, bundleImport{
- node: stmt,
- prefix: prefix,
- pkgPath: bundleObj.Pkg().Path(),
- })
-
- default:
- return p.errorf(stmt, "unsupported %s call", fn.Sel.Name)
- }
- }
-
- for _, imp := range imported {
- files, err := findBundleFiles(imp.pkgPath)
- if err != nil {
- return p.errorf(imp.node, "import lookup error: %v", err)
- }
- for _, filename := range files {
- rset, err := p.importRules(imp.prefix, imp.pkgPath, filename)
- if err != nil {
- return p.errorf(imp.node, "import parsing error: %v", err)
- }
- p.imported = append(p.imported, rset)
- }
- }
-
- return nil
-}
-
-func (p *rulesParser) importRules(prefix, pkgPath, filename string) (*goRuleSet, error) {
- data, err := ioutil.ReadFile(filename)
- if err != nil {
- return nil, err
- }
- config := rulesParserConfig{
- ctx: p.ctx,
- importer: p.importer,
- prefix: prefix,
- importedPkg: pkgPath,
- itab: p.itab,
- }
- rset, err := newRulesParser(config).ParseFile(filename, bytes.NewReader(data))
- if err != nil {
- return nil, fmt.Errorf("%s: %v", p.importedPkg, err)
- }
- return rset, nil
-}
-
-func (p *rulesParser) isMatcherFunc(f *ast.FuncDecl) bool {
- typ := p.types.ObjectOf(f.Name).Type().(*types.Signature)
- return typ.Results().Len() == 0 &&
- typ.Params().Len() == 1 &&
- typ.Params().At(0).Type().String() == "github.com/quasilyte/go-ruleguard/dsl.Matcher"
-}
-
-func (p *rulesParser) parseRuleGroup(f *ast.FuncDecl) (err error) {
- defer func() {
- rv := recover()
- if rv == nil {
- return
- }
- if parseErr, ok := rv.(parseError); ok {
- err = parseErr
- return
- }
- panic(rv) // not our panic
- }()
-
- if f.Name.String() == "_" {
- return p.errorf(f.Name, "`_` is not a valid rule group function name")
- }
- if f.Body == nil {
- return p.errorf(f, "unexpected empty function body")
- }
- params := f.Type.Params.List
- matcher := params[0].Names[0].Name
-
- p.group = f.Name.Name
- if p.prefix != "" {
- p.group = p.prefix + "/" + f.Name.Name
- }
-
- if p.ctx.GroupFilter != nil && !p.ctx.GroupFilter(p.group) {
- return nil // Skip this group
- }
- if _, ok := p.res.groups[p.group]; ok {
- panic(fmt.Sprintf("duplicated function %s after the typecheck", p.group)) // Should never happen
- }
- p.res.groups[p.group] = token.Position{
- Filename: p.filename,
- Line: p.ctx.Fset.Position(f.Name.Pos()).Line,
- }
-
- p.itab.EnterScope()
- defer p.itab.LeaveScope()
-
- for _, stmt := range f.Body.List {
- if _, ok := stmt.(*ast.DeclStmt); ok {
- continue
- }
- stmtExpr, ok := stmt.(*ast.ExprStmt)
- if !ok {
- return p.errorf(stmt, "expected a %s method call, found %s", matcher, goutil.SprintNode(p.ctx.Fset, stmt))
- }
- call, ok := stmtExpr.X.(*ast.CallExpr)
- if !ok {
- return p.errorf(stmt, "expected a %s method call, found %s", matcher, goutil.SprintNode(p.ctx.Fset, stmt))
- }
- if err := p.parseCall(matcher, call); err != nil {
- return err
- }
-
- }
-
- return nil
-}
-
-func (p *rulesParser) parseCall(matcher string, call *ast.CallExpr) error {
- f := call.Fun.(*ast.SelectorExpr)
- x, ok := f.X.(*ast.Ident)
- if ok && x.Name == matcher {
- return p.parseStmt(f.Sel, call.Args)
- }
-
- return p.parseRule(matcher, call)
-}
-
-func (p *rulesParser) parseStmt(fn *ast.Ident, args []ast.Expr) error {
- switch fn.Name {
- case "Import":
- pkgPath, ok := p.toStringValue(args[0])
- if !ok {
- return p.errorf(args[0], "expected a string literal argument")
- }
- pkgName := path.Base(pkgPath)
- p.itab.Load(pkgName, pkgPath)
- return nil
- default:
- return p.errorf(fn, "unexpected %s method", fn.Name)
- }
-}
-
-func (p *rulesParser) parseRule(matcher string, call *ast.CallExpr) error {
- origCall := call
- var (
- matchArgs *[]ast.Expr
- whereArgs *[]ast.Expr
- suggestArgs *[]ast.Expr
- reportArgs *[]ast.Expr
- atArgs *[]ast.Expr
- )
- for {
- chain, ok := call.Fun.(*ast.SelectorExpr)
- if !ok {
- break
- }
- switch chain.Sel.Name {
- case "Match":
- if matchArgs != nil {
- return p.errorf(chain.Sel, "Match() can't be repeated")
- }
- matchArgs = &call.Args
- case "Where":
- if whereArgs != nil {
- return p.errorf(chain.Sel, "Where() can't be repeated")
- }
- whereArgs = &call.Args
- case "Suggest":
- if suggestArgs != nil {
- return p.errorf(chain.Sel, "Suggest() can't be repeated")
- }
- suggestArgs = &call.Args
- case "Report":
- if reportArgs != nil {
- return p.errorf(chain.Sel, "Report() can't be repeated")
- }
- reportArgs = &call.Args
- case "At":
- if atArgs != nil {
- return p.errorf(chain.Sel, "At() can't be repeated")
- }
- atArgs = &call.Args
- default:
- return p.errorf(chain.Sel, "unexpected %s method", chain.Sel.Name)
- }
- call, ok = chain.X.(*ast.CallExpr)
- if !ok {
- break
- }
- }
-
- dst := p.res.universal
- proto := goRule{
- filename: p.filename,
- line: p.ctx.Fset.Position(origCall.Pos()).Line,
- group: p.group,
- }
- var alternatives []string
-
- if matchArgs == nil {
- return p.errorf(origCall, "missing Match() call")
- }
- for _, arg := range *matchArgs {
- alternatives = append(alternatives, p.parseStringArg(arg))
- }
-
- if whereArgs != nil {
- proto.filter = p.parseFilter((*whereArgs)[0])
- }
-
- if suggestArgs != nil {
- proto.suggestion = p.parseStringArg((*suggestArgs)[0])
- }
-
- if reportArgs == nil {
- if suggestArgs == nil {
- return p.errorf(origCall, "missing Report() or Suggest() call")
- }
- proto.msg = "suggestion: " + proto.suggestion
- } else {
- proto.msg = p.parseStringArg((*reportArgs)[0])
- }
-
- if atArgs != nil {
- index, ok := (*atArgs)[0].(*ast.IndexExpr)
- if !ok {
- return p.errorf((*atArgs)[0], "expected %s[`varname`] expression", matcher)
- }
- arg, ok := p.toStringValue(index.Index)
- if !ok {
- return p.errorf(index.Index, "expected a string literal index")
- }
- proto.location = arg
- }
-
- for i, alt := range alternatives {
- rule := proto
- pat, err := gogrep.Parse(p.ctx.Fset, alt)
- if err != nil {
- return p.errorf((*matchArgs)[i], "parse match pattern: %v", err)
- }
- rule.pat = pat
- cat := categorizeNode(pat.Expr)
- if cat == nodeUnknown {
- dst.uncategorized = append(dst.uncategorized, rule)
- } else {
- dst.categorizedNum++
- dst.rulesByCategory[cat] = append(dst.rulesByCategory[cat], rule)
- }
- }
-
- return nil
-}
-
-func (p *rulesParser) parseFilter(root ast.Expr) matchFilter {
- return p.parseFilterExpr(root)
-}
-
-func (p *rulesParser) errorf(n ast.Node, format string, args ...interface{}) parseError {
- loc := p.ctx.Fset.Position(n.Pos())
- message := fmt.Sprintf("%s:%d: %s", loc.Filename, loc.Line, fmt.Sprintf(format, args...))
- return parseError(message)
-}
-
-func (p *rulesParser) parseStringArg(e ast.Expr) string {
- s, ok := p.toStringValue(e)
- if !ok {
- panic(p.errorf(e, "expected a string literal argument"))
- }
- return s
-}
-
-func (p *rulesParser) parseRegexpArg(e ast.Expr) *regexp.Regexp {
- patternString, ok := p.toStringValue(e)
- if !ok {
- panic(p.errorf(e, "expected a regexp pattern argument"))
- }
- re, err := regexp.Compile(patternString)
- if err != nil {
- panic(p.errorf(e, err.Error()))
- }
- return re
-}
-
-func (p *rulesParser) parseTypeStringArg(e ast.Expr) types.Type {
- typeString, ok := p.toStringValue(e)
- if !ok {
- panic(p.errorf(e, "expected a type string argument"))
- }
- typ, err := typeFromString(typeString)
- if err != nil {
- panic(p.errorf(e, "parse type expr: %v", err))
- }
- if typ == nil {
- panic(p.errorf(e, "can't convert %s into a type constraint yet", typeString))
- }
- return typ
-}
-
-func (p *rulesParser) parseFilterExpr(e ast.Expr) matchFilter {
- result := matchFilter{src: goutil.SprintNode(p.ctx.Fset, e)}
-
- switch e := e.(type) {
- case *ast.ParenExpr:
- return p.parseFilterExpr(e.X)
-
- case *ast.UnaryExpr:
- x := p.parseFilterExpr(e.X)
- if e.Op == token.NOT {
- result.fn = makeNotFilter(result.src, x)
- return result
- }
- panic(p.errorf(e, "unsupported unary op: %s", result.src))
-
- case *ast.BinaryExpr:
- switch e.Op {
- case token.LAND:
- result.fn = makeAndFilter(p.parseFilterExpr(e.X), p.parseFilterExpr(e.Y))
- return result
- case token.LOR:
- result.fn = makeOrFilter(p.parseFilterExpr(e.X), p.parseFilterExpr(e.Y))
- return result
- case token.GEQ, token.LEQ, token.LSS, token.GTR, token.EQL, token.NEQ:
- operand := p.toFilterOperand(e.X)
- rhs := p.toFilterOperand(e.Y)
- rhsValue := p.types.Types[e.Y].Value
- if operand.path == "Type.Size" && rhsValue != nil {
- result.fn = makeTypeSizeConstFilter(result.src, operand.varName, e.Op, rhsValue)
- return result
- }
- if operand.path == "Value.Int" && rhsValue != nil {
- result.fn = makeValueIntConstFilter(result.src, operand.varName, e.Op, rhsValue)
- return result
- }
- if operand.path == "Value.Int" && rhs.path == "Value.Int" && rhs.varName != "" {
- result.fn = makeValueIntFilter(result.src, operand.varName, e.Op, rhs.varName)
- return result
- }
- if operand.path == "Text" && rhsValue != nil {
- result.fn = makeTextConstFilter(result.src, operand.varName, e.Op, rhsValue)
- return result
- }
- if operand.path == "Text" && rhs.path == "Text" && rhs.varName != "" {
- result.fn = makeTextFilter(result.src, operand.varName, e.Op, rhs.varName)
- return result
- }
- }
- panic(p.errorf(e, "unsupported binary op: %s", result.src))
- }
-
- operand := p.toFilterOperand(e)
- args := operand.args
- switch operand.path {
- default:
- panic(p.errorf(e, "unsupported expr: %s", result.src))
-
- case "File.Imports":
- pkgPath := p.parseStringArg(args[0])
- result.fn = makeFileImportsFilter(result.src, pkgPath)
-
- case "File.PkgPath.Matches":
- re := p.parseRegexpArg(args[0])
- result.fn = makeFilePkgPathMatchesFilter(result.src, re)
-
- case "File.Name.Matches":
- re := p.parseRegexpArg(args[0])
- result.fn = makeFileNameMatchesFilter(result.src, re)
-
- case "Pure":
- result.fn = makePureFilter(result.src, operand.varName)
-
- case "Const":
- result.fn = makeConstFilter(result.src, operand.varName)
-
- case "Addressable":
- result.fn = makeAddressableFilter(result.src, operand.varName)
-
- case "Filter":
- expr, fn := goutil.ResolveFunc(p.types, args[0])
- if expr != nil {
- panic(p.errorf(expr, "expected a simple function name, found expression"))
- }
- sig := fn.Type().(*types.Signature)
- userFn := p.state.env.GetFunc(fn.Pkg().Path(), fn.Name())
- if userFn == nil {
- panic(p.errorf(args[0], "can't find a compiled version of %s", sig.String()))
- }
- result.fn = makeCustomVarFilter(result.src, operand.varName, userFn)
-
- case "Type.Is", "Type.Underlying.Is":
- typeString, ok := p.toStringValue(args[0])
- if !ok {
- panic(p.errorf(args[0], "expected a string literal argument"))
- }
- ctx := typematch.Context{Itab: p.itab}
- pat, err := typematch.Parse(&ctx, typeString)
- if err != nil {
- panic(p.errorf(args[0], "parse type expr: %v", err))
- }
- underlying := operand.path == "Type.Underlying.Is"
- result.fn = makeTypeIsFilter(result.src, operand.varName, underlying, pat)
-
- case "Type.ConvertibleTo":
- dstType := p.parseTypeStringArg(args[0])
- result.fn = makeTypeConvertibleToFilter(result.src, operand.varName, dstType)
-
- case "Type.AssignableTo":
- dstType := p.parseTypeStringArg(args[0])
- result.fn = makeTypeAssignableToFilter(result.src, operand.varName, dstType)
-
- case "Type.Implements":
- typeString, ok := p.toStringValue(args[0])
- if !ok {
- panic(p.errorf(args[0], "expected a string literal argument"))
- }
- n, err := parser.ParseExpr(typeString)
- if err != nil {
- panic(p.errorf(args[0], "parse type expr: %v", err))
- }
- var iface *types.Interface
- switch n := n.(type) {
- case *ast.Ident:
- if n.Name != `error` {
- panic(p.errorf(n, "only `error` unqualified type is recognized"))
- }
- iface = types.Universe.Lookup("error").Type().Underlying().(*types.Interface)
- case *ast.SelectorExpr:
- pkgName, ok := n.X.(*ast.Ident)
- if !ok {
- panic(p.errorf(n.X, "invalid package name"))
- }
- pkgPath, ok := p.itab.Lookup(pkgName.Name)
- if !ok {
- panic(p.errorf(n.X, "package %s is not imported", pkgName.Name))
- }
- pkg, err := p.importer.Import(pkgPath)
- if err != nil {
- panic(p.errorf(n, "can't load %s: %v", pkgPath, err))
- }
- obj := pkg.Scope().Lookup(n.Sel.Name)
- if obj == nil {
- panic(p.errorf(n, "%s is not found in %s", n.Sel.Name, pkgPath))
- }
- iface, ok = obj.Type().Underlying().(*types.Interface)
- if !ok {
- panic(p.errorf(n, "%s is not an interface type", n.Sel.Name))
- }
- default:
- panic(p.errorf(args[0], "only qualified names (and `error`) are supported"))
- }
- result.fn = makeTypeImplementsFilter(result.src, operand.varName, iface)
-
- case "Text.Matches":
- re := p.parseRegexpArg(args[0])
- result.fn = makeTextMatchesFilter(result.src, operand.varName, re)
-
- case "Node.Is":
- typeString, ok := p.toStringValue(args[0])
- if !ok {
- panic(p.errorf(args[0], "expected a string literal argument"))
- }
- cat := categorizeNodeString(typeString)
- if cat == nodeUnknown {
- panic(p.errorf(args[0], "%s is not a valid go/ast type name", typeString))
- }
- result.fn = makeNodeIsFilter(result.src, operand.varName, cat)
- }
-
- if result.fn == nil {
- panic("bug: nil func for the filter") // Should never happen
- }
- return result
-}
-
-func (p *rulesParser) toStringValue(x ast.Node) (string, bool) {
- switch x := x.(type) {
- case *ast.BasicLit:
- if x.Kind != token.STRING {
- return "", false
- }
- s, err := strconv.Unquote(x.Value)
- if err != nil {
- return "", false
- }
- return s, true
- case ast.Expr:
- typ, ok := p.types.Types[x]
- if !ok || typ.Type.String() != "string" {
- return "", false
- }
- str := typ.Value.ExactString()
- str = str[1 : len(str)-1] // remove quotes
- return str, true
- }
- return "", false
-}
-
-func (p *rulesParser) toFilterOperand(e ast.Expr) filterOperand {
- var o filterOperand
-
- if call, ok := e.(*ast.CallExpr); ok {
- o.args = call.Args
- e = call.Fun
- }
- var path string
- for {
- if call, ok := e.(*ast.CallExpr); ok {
- e = call.Fun
- continue
- }
- selector, ok := e.(*ast.SelectorExpr)
- if !ok {
- break
- }
- if path == "" {
- path = selector.Sel.Name
- } else {
- path = selector.Sel.Name + "." + path
- }
- e = selector.X
- }
-
- o.path = path
-
- indexing, ok := e.(*ast.IndexExpr)
- if !ok {
- return o
- }
- mapIdent, ok := indexing.X.(*ast.Ident)
- if !ok {
- return o
- }
- o.mapName = mapIdent.Name
- indexString, _ := p.toStringValue(indexing.Index)
- o.varName = indexString
-
- return o
-}
-
-type filterOperand struct {
- mapName string
- varName string
- path string
- args []ast.Expr
-}
-
-var stdlibPackages = map[string]string{
- "adler32": "hash/adler32",
- "aes": "crypto/aes",
- "ascii85": "encoding/ascii85",
- "asn1": "encoding/asn1",
- "ast": "go/ast",
- "atomic": "sync/atomic",
- "base32": "encoding/base32",
- "base64": "encoding/base64",
- "big": "math/big",
- "binary": "encoding/binary",
- "bits": "math/bits",
- "bufio": "bufio",
- "build": "go/build",
- "bytes": "bytes",
- "bzip2": "compress/bzip2",
- "cgi": "net/http/cgi",
- "cgo": "runtime/cgo",
- "cipher": "crypto/cipher",
- "cmplx": "math/cmplx",
- "color": "image/color",
- "constant": "go/constant",
- "context": "context",
- "cookiejar": "net/http/cookiejar",
- "crc32": "hash/crc32",
- "crc64": "hash/crc64",
- "crypto": "crypto",
- "csv": "encoding/csv",
- "debug": "runtime/debug",
- "des": "crypto/des",
- "doc": "go/doc",
- "draw": "image/draw",
- "driver": "database/sql/driver",
- "dsa": "crypto/dsa",
- "dwarf": "debug/dwarf",
- "ecdsa": "crypto/ecdsa",
- "ed25519": "crypto/ed25519",
- "elf": "debug/elf",
- "elliptic": "crypto/elliptic",
- "encoding": "encoding",
- "errors": "errors",
- "exec": "os/exec",
- "expvar": "expvar",
- "fcgi": "net/http/fcgi",
- "filepath": "path/filepath",
- "flag": "flag",
- "flate": "compress/flate",
- "fmt": "fmt",
- "fnv": "hash/fnv",
- "format": "go/format",
- "gif": "image/gif",
- "gob": "encoding/gob",
- "gosym": "debug/gosym",
- "gzip": "compress/gzip",
- "hash": "hash",
- "heap": "container/heap",
- "hex": "encoding/hex",
- "hmac": "crypto/hmac",
- "html": "html",
- "http": "net/http",
- "httptest": "net/http/httptest",
- "httptrace": "net/http/httptrace",
- "httputil": "net/http/httputil",
- "image": "image",
- "importer": "go/importer",
- "io": "io",
- "iotest": "testing/iotest",
- "ioutil": "io/ioutil",
- "jpeg": "image/jpeg",
- "json": "encoding/json",
- "jsonrpc": "net/rpc/jsonrpc",
- "list": "container/list",
- "log": "log",
- "lzw": "compress/lzw",
- "macho": "debug/macho",
- "mail": "net/mail",
- "math": "math",
- "md5": "crypto/md5",
- "mime": "mime",
- "multipart": "mime/multipart",
- "net": "net",
- "os": "os",
- "palette": "image/color/palette",
- "parse": "text/template/parse",
- "parser": "go/parser",
- "path": "path",
- "pe": "debug/pe",
- "pem": "encoding/pem",
- "pkix": "crypto/x509/pkix",
- "plan9obj": "debug/plan9obj",
- "plugin": "plugin",
- "png": "image/png",
- "pprof": "runtime/pprof",
- "printer": "go/printer",
- "quick": "testing/quick",
- "quotedprintable": "mime/quotedprintable",
- "race": "runtime/race",
- "rand": "math/rand",
- "rc4": "crypto/rc4",
- "reflect": "reflect",
- "regexp": "regexp",
- "ring": "container/ring",
- "rpc": "net/rpc",
- "rsa": "crypto/rsa",
- "runtime": "runtime",
- "scanner": "text/scanner",
- "sha1": "crypto/sha1",
- "sha256": "crypto/sha256",
- "sha512": "crypto/sha512",
- "signal": "os/signal",
- "smtp": "net/smtp",
- "sort": "sort",
- "sql": "database/sql",
- "strconv": "strconv",
- "strings": "strings",
- "subtle": "crypto/subtle",
- "suffixarray": "index/suffixarray",
- "sync": "sync",
- "syntax": "regexp/syntax",
- "syscall": "syscall",
- "syslog": "log/syslog",
- "tabwriter": "text/tabwriter",
- "tar": "archive/tar",
- "template": "text/template",
- "testing": "testing",
- "textproto": "net/textproto",
- "time": "time",
- "tls": "crypto/tls",
- "token": "go/token",
- "trace": "runtime/trace",
- "types": "go/types",
- "unicode": "unicode",
- "unsafe": "unsafe",
- "url": "net/url",
- "user": "os/user",
- "utf16": "unicode/utf16",
- "utf8": "unicode/utf8",
- "x509": "crypto/x509",
- "xml": "encoding/xml",
- "zip": "archive/zip",
- "zlib": "compress/zlib",
-}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/profiling/no_labels.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/profiling/no_labels.go
new file mode 100644
index 000000000..c5b26e230
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/profiling/no_labels.go
@@ -0,0 +1,16 @@
+//go:build !pproflabels
+// +build !pproflabels
+
+package profiling
+
+import (
+ "context"
+)
+
+const LabelsEnabled = false
+
+func EnterWithLabels(origContext context.Context, name string) {
+}
+
+func Leave(origContext context.Context) {
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/profiling/with_labels.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/profiling/with_labels.go
new file mode 100644
index 000000000..6a35a13ad
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/profiling/with_labels.go
@@ -0,0 +1,21 @@
+//go:build pproflabels
+// +build pproflabels
+
+package profiling
+
+import (
+ "context"
+ "runtime/pprof"
+)
+
+const LabelsEnabled = true
+
+func EnterWithLabels(origContext context.Context, name string) {
+ labels := pprof.Labels("rules", name)
+ ctx := pprof.WithLabels(origContext, labels)
+ pprof.SetGoroutineLabels(ctx)
+}
+
+func Leave(origContext context.Context) {
+ pprof.SetGoroutineLabels(origContext)
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/compile.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/compile.go
index fa28732d5..b81fb8f1d 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/compile.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/compile.go
@@ -11,8 +11,13 @@ import (
"golang.org/x/tools/go/ast/astutil"
)
+var voidType = &types.Tuple{}
+
func compile(ctx *CompileContext, fn *ast.FuncDecl) (compiled *Func, err error) {
defer func() {
+ if err != nil {
+ return
+ }
rv := recover()
if rv == nil {
return
@@ -28,24 +33,20 @@ func compile(ctx *CompileContext, fn *ast.FuncDecl) (compiled *Func, err error)
}
func compileFunc(ctx *CompileContext, fn *ast.FuncDecl) *Func {
- fnType := ctx.Types.ObjectOf(fn.Name).Type().(*types.Signature)
- if fnType.Results().Len() != 1 {
- panic(compileError("only functions with a single non-void results are supported"))
- }
-
cl := compiler{
ctx: ctx,
- retType: fnType.Results().At(0).Type(),
+ fnType: ctx.Types.ObjectOf(fn.Name).Type().(*types.Signature),
constantsPool: make(map[interface{}]int),
intConstantsPool: make(map[int]int),
locals: make(map[string]int),
}
- return cl.compileFunc(fnType, fn)
+ return cl.compileFunc(fn)
}
type compiler struct {
ctx *CompileContext
+ fnType *types.Signature
retType types.Type
lastOp opcode
@@ -53,7 +54,9 @@ type compiler struct {
locals map[string]int
constantsPool map[interface{}]int
intConstantsPool map[int]int
- params map[string]int
+
+ params map[string]int
+ intParams map[string]int
code []byte
constants []interface{}
@@ -74,32 +77,59 @@ type compileError string
func (e compileError) Error() string { return string(e) }
-func (cl *compiler) compileFunc(fnType *types.Signature, fn *ast.FuncDecl) *Func {
- if !cl.isSupportedType(cl.retType) {
- panic(cl.errorUnsupportedType(fn.Name, cl.retType, "function result"))
+func (cl *compiler) compileFunc(fn *ast.FuncDecl) *Func {
+ switch cl.fnType.Results().Len() {
+ case 0:
+ cl.retType = voidType
+ case 1:
+ cl.retType = cl.fnType.Results().At(0).Type()
+ default:
+ panic(cl.errorf(fn.Name, "multi-result functions are not supported"))
}
- dbg := funcDebugInfo{
- paramNames: make([]string, fnType.Params().Len()),
+ if !cl.isSupportedType(cl.retType) {
+ panic(cl.errorUnsupportedType(fn.Name, cl.retType, "function result"))
}
- cl.params = make(map[string]int, fnType.Params().Len())
- for i := 0; i < fnType.Params().Len(); i++ {
- p := fnType.Params().At(i)
+ cl.params = make(map[string]int, cl.fnType.Params().Len())
+ cl.intParams = make(map[string]int, cl.fnType.Params().Len())
+ for i := 0; i < cl.fnType.Params().Len(); i++ {
+ p := cl.fnType.Params().At(i)
paramName := p.Name()
paramType := p.Type()
- cl.params[paramName] = i
- dbg.paramNames[i] = paramName
if !cl.isSupportedType(paramType) {
panic(cl.errorUnsupportedType(fn.Name, paramType, paramName+" param"))
}
+ if typeIsInt(paramType) {
+ cl.intParams[paramName] = len(cl.intParams)
+ } else {
+ cl.params[paramName] = len(cl.params)
+ }
+ }
+
+ dbg := funcDebugInfo{
+ paramNames: make([]string, len(cl.params)),
+ intParamNames: make([]string, len(cl.intParams)),
+ }
+ for paramName, i := range cl.params {
+ dbg.paramNames[i] = paramName
+ }
+ for paramName, i := range cl.intParams {
+ dbg.intParamNames[i] = paramName
}
cl.compileStmt(fn.Body)
+ if cl.retType == voidType {
+ cl.emit(opReturn)
+ }
+
compiled := &Func{
- code: cl.code,
- constants: cl.constants,
- intConstants: cl.intConstants,
+ code: cl.code,
+ constants: cl.constants,
+ intConstants: cl.intConstants,
+ numObjectParams: len(cl.params),
+ numIntParams: len(cl.intParams),
+ name: cl.ctx.Package.Path() + "." + fn.Name.String(),
}
if len(cl.locals) != 0 {
dbg.localNames = make([]string, len(cl.locals))
@@ -132,6 +162,9 @@ func (cl *compiler) compileStmt(stmt ast.Stmt) {
case *ast.BranchStmt:
cl.compileBranchStmt(stmt)
+ case *ast.ExprStmt:
+ cl.compileExprStmt(stmt)
+
case *ast.BlockStmt:
for i := range stmt.List {
cl.compileStmt(stmt.List[i])
@@ -168,6 +201,19 @@ func (cl *compiler) compileBranchStmt(branch *ast.BranchStmt) {
}
}
+func (cl *compiler) compileExprStmt(stmt *ast.ExprStmt) {
+ if call, ok := stmt.X.(*ast.CallExpr); ok {
+ sig := cl.ctx.Types.TypeOf(call.Fun).(*types.Signature)
+ if sig.Results() != nil {
+ panic(cl.errorf(call, "only void funcs can be used in stmt context"))
+ }
+ cl.compileCallExpr(call)
+ return
+ }
+
+ panic(cl.errorf(stmt.X, "can't compile this expr stmt yet: %T", stmt.X))
+}
+
func (cl *compiler) compileForStmt(stmt *ast.ForStmt) {
labelBreak := cl.newLabel()
labelContinue := cl.newLabel()
@@ -228,45 +274,60 @@ func (cl *compiler) compileIfStmt(stmt *ast.IfStmt) {
}
func (cl *compiler) compileAssignStmt(assign *ast.AssignStmt) {
- if len(assign.Lhs) != 1 {
- panic(cl.errorf(assign, "only single left operand is allowed in assignments"))
- }
if len(assign.Rhs) != 1 {
panic(cl.errorf(assign, "only single right operand is allowed in assignments"))
}
- lhs := assign.Lhs[0]
- rhs := assign.Rhs[0]
- varname, ok := lhs.(*ast.Ident)
- if !ok {
- panic(cl.errorf(lhs, "can assign only to simple variables"))
+ for _, lhs := range assign.Lhs {
+ _, ok := lhs.(*ast.Ident)
+ if !ok {
+ panic(cl.errorf(lhs, "can assign only to simple variables"))
+ }
}
+ rhs := assign.Rhs[0]
cl.compileExpr(rhs)
- typ := cl.ctx.Types.TypeOf(varname)
if assign.Tok == token.DEFINE {
- if _, ok := cl.locals[varname.String()]; ok {
- panic(cl.errorf(lhs, "%s variable shadowing is not allowed", varname))
- }
- if !cl.isSupportedType(typ) {
- panic(cl.errorUnsupportedType(varname, typ, varname.String()+" local variable"))
+ for i := len(assign.Lhs) - 1; i >= 0; i-- {
+ varname := assign.Lhs[i].(*ast.Ident)
+ typ := cl.ctx.Types.TypeOf(varname)
+ if _, ok := cl.locals[varname.String()]; ok {
+ panic(cl.errorf(varname, "%s variable shadowing is not allowed", varname))
+ }
+ if !cl.isSupportedType(typ) {
+ panic(cl.errorUnsupportedType(varname, typ, varname.String()+" local variable"))
+ }
+ if len(cl.locals) == maxFuncLocals {
+ panic(cl.errorf(varname, "can't define %s: too many locals", varname))
+ }
+ id := len(cl.locals)
+ cl.locals[varname.String()] = id
+ cl.emit8(pickOp(typeIsInt(typ), opSetIntLocal, opSetLocal), id)
}
- if len(cl.locals) == maxFuncLocals {
- panic(cl.errorf(lhs, "can't define %s: too many locals", varname))
- }
- id := len(cl.locals)
- cl.locals[varname.String()] = id
- cl.emit8(pickOp(typeIsInt(typ), opSetIntLocal, opSetLocal), id)
} else {
- id := cl.getLocal(varname, varname.String())
- cl.emit8(pickOp(typeIsInt(typ), opSetIntLocal, opSetLocal), id)
+ for i := len(assign.Lhs) - 1; i >= 0; i-- {
+ varname := assign.Lhs[i].(*ast.Ident)
+ typ := cl.ctx.Types.TypeOf(varname)
+ id := cl.getLocal(varname, varname.String())
+ cl.emit8(pickOp(typeIsInt(typ), opSetIntLocal, opSetLocal), id)
+ }
}
}
+func (cl *compiler) isParamName(varname string) bool {
+ if _, ok := cl.params[varname]; ok {
+ return true
+ }
+ if _, ok := cl.intParams[varname]; ok {
+ return true
+ }
+ return false
+}
+
func (cl *compiler) getLocal(v ast.Expr, varname string) int {
id, ok := cl.locals[varname]
if !ok {
- if _, ok := cl.params[varname]; ok {
+ if cl.isParamName(varname) {
panic(cl.errorf(v, "can't assign to %s, params are readonly", varname))
}
panic(cl.errorf(v, "%s is not a writeable local variable", varname))
@@ -275,6 +336,11 @@ func (cl *compiler) getLocal(v ast.Expr, varname string) int {
}
func (cl *compiler) compileReturnStmt(ret *ast.ReturnStmt) {
+ if cl.retType == voidType {
+ cl.emit(opReturn)
+ return
+ }
+
if ret.Results == nil {
panic(cl.errorf(ret, "'naked' return statements are not allowed"))
}
@@ -467,6 +533,20 @@ func (cl *compiler) compileBuiltinCall(fn *ast.Ident, call *ast.CallExpr) {
panic(cl.errorf(s, "can't compile len() with non-string argument yet"))
}
cl.emit(opStringLen)
+
+ case `println`:
+ if len(call.Args) != 1 {
+ panic(cl.errorf(call, "only 1-arg form of println() is supported"))
+ }
+ funcName := "Print"
+ if typeIsInt(cl.ctx.Types.TypeOf(call.Args[0])) {
+ funcName = "PrintInt"
+ }
+ key := funcKey{qualifier: "builtin", name: funcName}
+ if !cl.compileNativeCall(key, 0, nil, call.Args) {
+ panic(cl.errorf(fn, "builtin.%s native func is not registered", funcName))
+ }
+
default:
panic(cl.errorf(fn, "can't compile %s() builtin function call yet", fn))
}
@@ -494,19 +574,96 @@ func (cl *compiler) compileCallExpr(call *ast.CallExpr) {
} else {
key.qualifier = fn.Pkg().Path()
}
+ variadic := 0
+ if sig.Variadic() {
+ variadic = sig.Params().Len() - 1
+ }
+ if expr != nil {
+ cl.compileExpr(expr)
+ }
+ if cl.compileNativeCall(key, variadic, expr, call.Args) {
+ return
+ }
+ if cl.compileCall(key, sig, call.Args) {
+ return
+ }
+ panic(cl.errorf(call.Fun, "can't compile a call to %s func", key))
+}
- if funcID, ok := cl.ctx.Env.nameToNativeFuncID[key]; ok {
- if expr != nil {
- cl.compileExpr(expr)
+func (cl *compiler) compileCall(key funcKey, sig *types.Signature, args []ast.Expr) bool {
+ if sig.Variadic() {
+ return false
+ }
+
+ funcID, ok := cl.ctx.Env.nameToFuncID[key]
+ if !ok {
+ return false
+ }
+
+ for _, arg := range args {
+ cl.compileExpr(arg)
+ }
+
+ var op opcode
+ if sig.Results().Len() == 0 {
+ op = opVoidCall
+ } else if typeIsInt(sig.Results().At(0).Type()) {
+ op = opIntCall
+ } else {
+ op = opCall
+ }
+
+ cl.emit16(op, int(funcID))
+ return true
+}
+
+func (cl *compiler) compileNativeCall(key funcKey, variadic int, funcExpr ast.Expr, args []ast.Expr) bool {
+ funcID, ok := cl.ctx.Env.nameToNativeFuncID[key]
+ if !ok {
+ return false
+ }
+
+ if len(args) == 1 {
+ // Check that it's not a f(g()) call, where g() returns
+ // a multi-value result; we can't compile that yet.
+ if call, ok := args[0].(*ast.CallExpr); ok {
+ results := cl.ctx.Types.TypeOf(call.Fun).(*types.Signature).Results()
+ if results != nil && results.Len() > 1 {
+ panic(cl.errorf(args[0], "can't pass tuple as a func argument"))
+ }
}
- for _, arg := range call.Args {
+ }
+
+ normalArgs := args
+ var variadicArgs []ast.Expr
+ if variadic != 0 {
+ normalArgs = args[:variadic]
+ variadicArgs = args[variadic:]
+ }
+
+ for _, arg := range normalArgs {
+ cl.compileExpr(arg)
+ }
+ if variadic != 0 {
+ for _, arg := range variadicArgs {
cl.compileExpr(arg)
+ // int-typed values should appear in the interface{}-typed
+ // objects slice, so we get all variadic args placed in one place.
+ if typeIsInt(cl.ctx.Types.TypeOf(arg)) {
+ cl.emit(opConvIntToIface)
+ }
}
- cl.emit16(opCallNative, int(funcID))
- return
+ if len(variadicArgs) > 255 {
+ panic(cl.errorf(funcExpr, "too many variadic args"))
+ }
+ // Even if len(variadicArgs) is 0, we still need to overwrite
+ // the old variadicLen value, so the variadic func is not confused
+ // by some unrelated value.
+ cl.emit8(opSetVariadicLen, len(variadicArgs))
}
- panic(cl.errorf(call.Fun, "can't compile a call to %s func", key))
+ cl.emit16(opCallNative, int(funcID))
+ return true
}
func (cl *compiler) compileUnaryOp(op opcode, e *ast.UnaryExpr) {
@@ -546,7 +703,11 @@ func (cl *compiler) compileIdent(ident *ast.Ident) {
return
}
if paramIndex, ok := cl.params[ident.String()]; ok {
- cl.emit8(pickOp(typeIsInt(tv.Type), opPushIntParam, opPushParam), paramIndex)
+ cl.emit8(opPushParam, paramIndex)
+ return
+ }
+ if paramIndex, ok := cl.intParams[ident.String()]; ok {
+ cl.emit8(opPushIntParam, paramIndex)
return
}
if localIndex, ok := cl.locals[ident.String()]; ok {
@@ -677,6 +838,10 @@ func (cl *compiler) isUncondJump(op opcode) bool {
}
func (cl *compiler) isSupportedType(typ types.Type) bool {
+ if typ == voidType {
+ return true
+ }
+
switch typ := typ.Underlying().(type) {
case *types.Pointer:
// 1. Pointers to structs are supported.
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/debug_info.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/debug_info.go
index e42bbb76a..057c02bc1 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/debug_info.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/debug_info.go
@@ -5,8 +5,9 @@ type debugInfo struct {
}
type funcDebugInfo struct {
- paramNames []string
- localNames []string
+ paramNames []string
+ intParamNames []string
+ localNames []string
}
func newDebugInfo() *debugInfo {
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/disasm.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/disasm.go
index 192cf0710..cafc9ed5e 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/disasm.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/disasm.go
@@ -39,14 +39,24 @@ func disasm(env *Env, fn *Func) string {
id := decode16(code, pc+1)
arg = id
comment = env.nativeFuncs[id].name
- case opPushParam, opPushIntParam:
+ case opCall, opIntCall, opVoidCall:
+ id := decode16(code, pc+1)
+ arg = id
+ comment = env.userFuncs[id].name
+ case opPushParam:
index := int(code[pc+1])
arg = index
comment = dbg.paramNames[index]
+ case opPushIntParam:
+ index := int(code[pc+1])
+ arg = index
+ comment = dbg.intParamNames[index]
case opSetLocal, opSetIntLocal, opPushLocal, opPushIntLocal, opIncLocal, opDecLocal:
index := int(code[pc+1])
arg = index
comment = dbg.localNames[index]
+ case opSetVariadicLen:
+ arg = int(code[pc+1])
case opPushConst:
arg = int(code[pc+1])
comment = fmt.Sprintf("value=%#v", fn.constants[code[pc+1]])
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/eval.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/eval.go
index afc000ea3..311da15ad 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/eval.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/eval.go
@@ -38,22 +38,22 @@ func (s *ValueStack) dup() { s.objects = append(s.objects, s.objects[len(s.objec
// Identical to s.Pop() without using the result.
func (s *ValueStack) discard() { s.objects = s.objects[:len(s.objects)-1] }
-func eval(env *EvalEnv, fn *Func, args []interface{}) CallResult {
+func eval(env *EvalEnv, fn *Func, top, intTop int) CallResult {
pc := 0
code := fn.code
- stack := env.stack
+ stack := &env.Stack
var locals [maxFuncLocals]interface{}
var intLocals [maxFuncLocals]int
for {
switch op := opcode(code[pc]); op {
case opPushParam:
- index := code[pc+1]
- stack.Push(args[index])
+ index := int(code[pc+1])
+ stack.Push(stack.objects[top+index])
pc += 2
case opPushIntParam:
- index := code[pc+1]
- stack.PushInt(args[index].(int))
+ index := int(code[pc+1])
+ stack.PushInt(stack.ints[intTop+index])
pc += 2
case opPushLocal:
@@ -99,6 +99,10 @@ func eval(env *EvalEnv, fn *Func, args []interface{}) CallResult {
stack.PushInt(fn.intConstants[id])
pc += 2
+ case opConvIntToIface:
+ stack.Push(stack.PopInt())
+ pc++
+
case opPushTrue:
stack.Push(true)
pc++
@@ -114,12 +118,34 @@ func eval(env *EvalEnv, fn *Func, args []interface{}) CallResult {
return CallResult{value: stack.top()}
case opReturnIntTop:
return CallResult{scalarValue: uint64(stack.topInt())}
+ case opReturn:
+ return CallResult{}
+ case opSetVariadicLen:
+ stack.variadicLen = int(code[pc+1])
+ pc += 2
case opCallNative:
id := decode16(code, pc+1)
fn := env.nativeFuncs[id].mappedFunc
fn(stack)
pc += 3
+ case opCall:
+ id := decode16(code, pc+1)
+ fn := env.userFuncs[id]
+ result := eval(env, fn, len(stack.objects)-fn.numObjectParams, len(stack.ints)-fn.numIntParams)
+ stack.Push(result.Value())
+ pc += 3
+ case opIntCall:
+ id := decode16(code, pc+1)
+ fn := env.userFuncs[id]
+ result := eval(env, fn, len(stack.objects)-fn.numObjectParams, len(stack.ints)-fn.numIntParams)
+ stack.PushInt(result.IntValue())
+ pc += 3
+ case opVoidCall:
+ id := decode16(code, pc+1)
+ fn := env.userFuncs[id]
+ eval(env, fn, len(stack.objects)-fn.numObjectParams, len(stack.ints)-fn.numIntParams)
+ pc += 3
case opJump:
offset := decode16(code, pc+1)
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/gen_opcodes.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/gen_opcodes.go
index fde48b7cd..c8d512038 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/gen_opcodes.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/gen_opcodes.go
@@ -1,3 +1,4 @@
+//go:build main
// +build main
package main
@@ -25,6 +26,8 @@ var opcodePrototypes = []opcodeProto{
{"PushConst", "op constid:u8", "() -> (const)"},
{"PushIntConst", "op constid:u8", "() -> (const:int)"},
+ {"ConvIntToIface", "op", "(value:int) -> (value)"},
+
{"SetLocal", "op index:u8", "(value) -> ()"},
{"SetIntLocal", "op index:u8", "(value:int) -> ()"},
{"IncLocal", "op index:u8", stackUnchanged},
@@ -34,18 +37,23 @@ var opcodePrototypes = []opcodeProto{
{"ReturnIntTop", "op", "(value) -> (value)"},
{"ReturnFalse", "op", stackUnchanged},
{"ReturnTrue", "op", stackUnchanged},
+ {"Return", "op", stackUnchanged},
{"Jump", "op offset:i16", stackUnchanged},
{"JumpFalse", "op offset:i16", "(cond:bool) -> ()"},
{"JumpTrue", "op offset:i16", "(cond:bool) -> ()"},
+ {"SetVariadicLen", "op len:u8", stackUnchanged},
{"CallNative", "op funcid:u16", "(args...) -> (results...)"},
+ {"Call", "op funcid:u16", "(args...) -> (result)"},
+ {"IntCall", "op funcid:u16", "(args...) -> (result:int)"},
+ {"VoidCall", "op funcid:u16", "(args...) -> ()"},
{"IsNil", "op", "(value) -> (result:bool)"},
{"IsNotNil", "op", "(value) -> (result:bool)"},
{"Not", "op", "(value:bool) -> (result:bool)"},
-
+
{"EqInt", "op", "(x:int y:int) -> (result:bool)"},
{"NotEqInt", "op", "(x:int y:int) -> (result:bool)"},
{"GtInt", "op", "(x:int y:int) -> (result:bool)"},
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcode_string.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcode_string.go
index 27dfc1f67..3136214bb 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcode_string.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcode_string.go
@@ -19,41 +19,47 @@ func _() {
_ = x[opPushTrue-8]
_ = x[opPushConst-9]
_ = x[opPushIntConst-10]
- _ = x[opSetLocal-11]
- _ = x[opSetIntLocal-12]
- _ = x[opIncLocal-13]
- _ = x[opDecLocal-14]
- _ = x[opReturnTop-15]
- _ = x[opReturnIntTop-16]
- _ = x[opReturnFalse-17]
- _ = x[opReturnTrue-18]
- _ = x[opJump-19]
- _ = x[opJumpFalse-20]
- _ = x[opJumpTrue-21]
- _ = x[opCallNative-22]
- _ = x[opIsNil-23]
- _ = x[opIsNotNil-24]
- _ = x[opNot-25]
- _ = x[opEqInt-26]
- _ = x[opNotEqInt-27]
- _ = x[opGtInt-28]
- _ = x[opGtEqInt-29]
- _ = x[opLtInt-30]
- _ = x[opLtEqInt-31]
- _ = x[opEqString-32]
- _ = x[opNotEqString-33]
- _ = x[opConcat-34]
- _ = x[opAdd-35]
- _ = x[opSub-36]
- _ = x[opStringSlice-37]
- _ = x[opStringSliceFrom-38]
- _ = x[opStringSliceTo-39]
- _ = x[opStringLen-40]
+ _ = x[opConvIntToIface-11]
+ _ = x[opSetLocal-12]
+ _ = x[opSetIntLocal-13]
+ _ = x[opIncLocal-14]
+ _ = x[opDecLocal-15]
+ _ = x[opReturnTop-16]
+ _ = x[opReturnIntTop-17]
+ _ = x[opReturnFalse-18]
+ _ = x[opReturnTrue-19]
+ _ = x[opReturn-20]
+ _ = x[opJump-21]
+ _ = x[opJumpFalse-22]
+ _ = x[opJumpTrue-23]
+ _ = x[opSetVariadicLen-24]
+ _ = x[opCallNative-25]
+ _ = x[opCall-26]
+ _ = x[opIntCall-27]
+ _ = x[opVoidCall-28]
+ _ = x[opIsNil-29]
+ _ = x[opIsNotNil-30]
+ _ = x[opNot-31]
+ _ = x[opEqInt-32]
+ _ = x[opNotEqInt-33]
+ _ = x[opGtInt-34]
+ _ = x[opGtEqInt-35]
+ _ = x[opLtInt-36]
+ _ = x[opLtEqInt-37]
+ _ = x[opEqString-38]
+ _ = x[opNotEqString-39]
+ _ = x[opConcat-40]
+ _ = x[opAdd-41]
+ _ = x[opSub-42]
+ _ = x[opStringSlice-43]
+ _ = x[opStringSliceFrom-44]
+ _ = x[opStringSliceTo-45]
+ _ = x[opStringLen-46]
}
-const _opcode_name = "InvalidPopDupPushParamPushIntParamPushLocalPushIntLocalPushFalsePushTruePushConstPushIntConstSetLocalSetIntLocalIncLocalDecLocalReturnTopReturnIntTopReturnFalseReturnTrueJumpJumpFalseJumpTrueCallNativeIsNilIsNotNilNotEqIntNotEqIntGtIntGtEqIntLtIntLtEqIntEqStringNotEqStringConcatAddSubStringSliceStringSliceFromStringSliceToStringLen"
+const _opcode_name = "InvalidPopDupPushParamPushIntParamPushLocalPushIntLocalPushFalsePushTruePushConstPushIntConstConvIntToIfaceSetLocalSetIntLocalIncLocalDecLocalReturnTopReturnIntTopReturnFalseReturnTrueReturnJumpJumpFalseJumpTrueSetVariadicLenCallNativeCallIntCallVoidCallIsNilIsNotNilNotEqIntNotEqIntGtIntGtEqIntLtIntLtEqIntEqStringNotEqStringConcatAddSubStringSliceStringSliceFromStringSliceToStringLen"
-var _opcode_index = [...]uint16{0, 7, 10, 13, 22, 34, 43, 55, 64, 72, 81, 93, 101, 112, 120, 128, 137, 149, 160, 170, 174, 183, 191, 201, 206, 214, 217, 222, 230, 235, 242, 247, 254, 262, 273, 279, 282, 285, 296, 311, 324, 333}
+var _opcode_index = [...]uint16{0, 7, 10, 13, 22, 34, 43, 55, 64, 72, 81, 93, 107, 115, 126, 134, 142, 151, 163, 174, 184, 190, 194, 203, 211, 225, 235, 239, 246, 254, 259, 267, 270, 275, 283, 288, 295, 300, 307, 315, 326, 332, 335, 338, 349, 364, 377, 386}
func (i opcode) String() string {
if i >= opcode(len(_opcode_index)-1) {
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcodes.gen.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcodes.gen.go
index 268b42a1e..a3ec270d4 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcodes.gen.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcodes.gen.go
@@ -48,125 +48,149 @@ const (
// Stack effect: () -> (const:int)
opPushIntConst opcode = 10
- // Encoding: 0x0b index:u8 (width=2)
- // Stack effect: (value) -> ()
- opSetLocal opcode = 11
+ // Encoding: 0x0b (width=1)
+ // Stack effect: (value:int) -> (value)
+ opConvIntToIface opcode = 11
// Encoding: 0x0c index:u8 (width=2)
- // Stack effect: (value:int) -> ()
- opSetIntLocal opcode = 12
+ // Stack effect: (value) -> ()
+ opSetLocal opcode = 12
// Encoding: 0x0d index:u8 (width=2)
- // Stack effect: unchanged
- opIncLocal opcode = 13
+ // Stack effect: (value:int) -> ()
+ opSetIntLocal opcode = 13
// Encoding: 0x0e index:u8 (width=2)
// Stack effect: unchanged
- opDecLocal opcode = 14
+ opIncLocal opcode = 14
- // Encoding: 0x0f (width=1)
- // Stack effect: (value) -> (value)
- opReturnTop opcode = 15
+ // Encoding: 0x0f index:u8 (width=2)
+ // Stack effect: unchanged
+ opDecLocal opcode = 15
// Encoding: 0x10 (width=1)
// Stack effect: (value) -> (value)
- opReturnIntTop opcode = 16
+ opReturnTop opcode = 16
// Encoding: 0x11 (width=1)
- // Stack effect: unchanged
- opReturnFalse opcode = 17
+ // Stack effect: (value) -> (value)
+ opReturnIntTop opcode = 17
// Encoding: 0x12 (width=1)
// Stack effect: unchanged
- opReturnTrue opcode = 18
+ opReturnFalse opcode = 18
- // Encoding: 0x13 offset:i16 (width=3)
+ // Encoding: 0x13 (width=1)
// Stack effect: unchanged
- opJump opcode = 19
+ opReturnTrue opcode = 19
- // Encoding: 0x14 offset:i16 (width=3)
- // Stack effect: (cond:bool) -> ()
- opJumpFalse opcode = 20
+ // Encoding: 0x14 (width=1)
+ // Stack effect: unchanged
+ opReturn opcode = 20
// Encoding: 0x15 offset:i16 (width=3)
+ // Stack effect: unchanged
+ opJump opcode = 21
+
+ // Encoding: 0x16 offset:i16 (width=3)
+ // Stack effect: (cond:bool) -> ()
+ opJumpFalse opcode = 22
+
+ // Encoding: 0x17 offset:i16 (width=3)
// Stack effect: (cond:bool) -> ()
- opJumpTrue opcode = 21
+ opJumpTrue opcode = 23
+
+ // Encoding: 0x18 len:u8 (width=2)
+ // Stack effect: unchanged
+ opSetVariadicLen opcode = 24
- // Encoding: 0x16 funcid:u16 (width=3)
+ // Encoding: 0x19 funcid:u16 (width=3)
// Stack effect: (args...) -> (results...)
- opCallNative opcode = 22
+ opCallNative opcode = 25
- // Encoding: 0x17 (width=1)
+ // Encoding: 0x1a funcid:u16 (width=3)
+ // Stack effect: (args...) -> (result)
+ opCall opcode = 26
+
+ // Encoding: 0x1b funcid:u16 (width=3)
+ // Stack effect: (args...) -> (result:int)
+ opIntCall opcode = 27
+
+ // Encoding: 0x1c funcid:u16 (width=3)
+ // Stack effect: (args...) -> ()
+ opVoidCall opcode = 28
+
+ // Encoding: 0x1d (width=1)
// Stack effect: (value) -> (result:bool)
- opIsNil opcode = 23
+ opIsNil opcode = 29
- // Encoding: 0x18 (width=1)
+ // Encoding: 0x1e (width=1)
// Stack effect: (value) -> (result:bool)
- opIsNotNil opcode = 24
+ opIsNotNil opcode = 30
- // Encoding: 0x19 (width=1)
+ // Encoding: 0x1f (width=1)
// Stack effect: (value:bool) -> (result:bool)
- opNot opcode = 25
+ opNot opcode = 31
- // Encoding: 0x1a (width=1)
+ // Encoding: 0x20 (width=1)
// Stack effect: (x:int y:int) -> (result:bool)
- opEqInt opcode = 26
+ opEqInt opcode = 32
- // Encoding: 0x1b (width=1)
+ // Encoding: 0x21 (width=1)
// Stack effect: (x:int y:int) -> (result:bool)
- opNotEqInt opcode = 27
+ opNotEqInt opcode = 33
- // Encoding: 0x1c (width=1)
+ // Encoding: 0x22 (width=1)
// Stack effect: (x:int y:int) -> (result:bool)
- opGtInt opcode = 28
+ opGtInt opcode = 34
- // Encoding: 0x1d (width=1)
+ // Encoding: 0x23 (width=1)
// Stack effect: (x:int y:int) -> (result:bool)
- opGtEqInt opcode = 29
+ opGtEqInt opcode = 35
- // Encoding: 0x1e (width=1)
+ // Encoding: 0x24 (width=1)
// Stack effect: (x:int y:int) -> (result:bool)
- opLtInt opcode = 30
+ opLtInt opcode = 36
- // Encoding: 0x1f (width=1)
+ // Encoding: 0x25 (width=1)
// Stack effect: (x:int y:int) -> (result:bool)
- opLtEqInt opcode = 31
+ opLtEqInt opcode = 37
- // Encoding: 0x20 (width=1)
+ // Encoding: 0x26 (width=1)
// Stack effect: (x:string y:string) -> (result:bool)
- opEqString opcode = 32
+ opEqString opcode = 38
- // Encoding: 0x21 (width=1)
+ // Encoding: 0x27 (width=1)
// Stack effect: (x:string y:string) -> (result:bool)
- opNotEqString opcode = 33
+ opNotEqString opcode = 39
- // Encoding: 0x22 (width=1)
+ // Encoding: 0x28 (width=1)
// Stack effect: (x:string y:string) -> (result:string)
- opConcat opcode = 34
+ opConcat opcode = 40
- // Encoding: 0x23 (width=1)
+ // Encoding: 0x29 (width=1)
// Stack effect: (x:int y:int) -> (result:int)
- opAdd opcode = 35
+ opAdd opcode = 41
- // Encoding: 0x24 (width=1)
+ // Encoding: 0x2a (width=1)
// Stack effect: (x:int y:int) -> (result:int)
- opSub opcode = 36
+ opSub opcode = 42
- // Encoding: 0x25 (width=1)
+ // Encoding: 0x2b (width=1)
// Stack effect: (s:string from:int to:int) -> (result:string)
- opStringSlice opcode = 37
+ opStringSlice opcode = 43
- // Encoding: 0x26 (width=1)
+ // Encoding: 0x2c (width=1)
// Stack effect: (s:string from:int) -> (result:string)
- opStringSliceFrom opcode = 38
+ opStringSliceFrom opcode = 44
- // Encoding: 0x27 (width=1)
+ // Encoding: 0x2d (width=1)
// Stack effect: (s:string to:int) -> (result:string)
- opStringSliceTo opcode = 39
+ opStringSliceTo opcode = 45
- // Encoding: 0x28 (width=1)
+ // Encoding: 0x2e (width=1)
// Stack effect: (s:string) -> (result:int)
- opStringLen opcode = 40
+ opStringLen opcode = 46
)
type opcodeInfo struct {
@@ -186,6 +210,7 @@ var opcodeInfoTable = [256]opcodeInfo{
opPushTrue: {width: 1},
opPushConst: {width: 2},
opPushIntConst: {width: 2},
+ opConvIntToIface: {width: 1},
opSetLocal: {width: 2},
opSetIntLocal: {width: 2},
opIncLocal: {width: 2},
@@ -194,10 +219,15 @@ var opcodeInfoTable = [256]opcodeInfo{
opReturnIntTop: {width: 1},
opReturnFalse: {width: 1},
opReturnTrue: {width: 1},
+ opReturn: {width: 1},
opJump: {width: 3},
opJumpFalse: {width: 3},
opJumpTrue: {width: 3},
+ opSetVariadicLen: {width: 2},
opCallNative: {width: 3},
+ opCall: {width: 3},
+ opIntCall: {width: 3},
+ opVoidCall: {width: 3},
opIsNil: {width: 1},
opIsNotNil: {width: 1},
opNot: {width: 1},
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/quasigo.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/quasigo.go
index 7d457538d..8ac75771f 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/quasigo.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/quasigo.go
@@ -34,7 +34,7 @@ type EvalEnv struct {
nativeFuncs []nativeFunc
userFuncs []*Func
- stack *ValueStack
+ Stack ValueStack
}
// NewEnv creates a new empty environment.
@@ -47,7 +47,7 @@ func (env *Env) GetEvalEnv() *EvalEnv {
return &EvalEnv{
nativeFuncs: env.nativeFuncs,
userFuncs: env.userFuncs,
- stack: &ValueStack{
+ Stack: ValueStack{
objects: make([]interface{}, 0, 32),
ints: make([]int, 0, 16),
},
@@ -84,8 +84,9 @@ type CompileContext struct {
// being compiled; then it should be used to execute these functions.
Env *Env
- Types *types.Info
- Fset *token.FileSet
+ Package *types.Package
+ Types *types.Info
+ Fset *token.FileSet
}
// Compile prepares an executable version of fn.
@@ -93,11 +94,19 @@ func Compile(ctx *CompileContext, fn *ast.FuncDecl) (compiled *Func, err error)
return compile(ctx, fn)
}
-// Call invokes a given function with provided arguments.
-func Call(env *EvalEnv, fn *Func, args ...interface{}) CallResult {
- env.stack.objects = env.stack.objects[:0]
- env.stack.ints = env.stack.ints[:0]
- return eval(env, fn, args)
+// Call invokes a given function.
+// All arguments should be pushed to env.Stack prior to this call.
+//
+// Note that arguments are not popped off the stack,
+// so you can bind the args once and use Call multiple times.
+// If you want to reset arguments, do env.Stack.Reset().
+func Call(env *EvalEnv, fn *Func) CallResult {
+ numObjectArgs := len(env.Stack.objects)
+ numIntArgs := len(env.Stack.ints)
+ result := eval(env, fn, 0, 0)
+ env.Stack.objects = env.Stack.objects[:numObjectArgs]
+ env.Stack.ints = env.Stack.ints[:numIntArgs]
+ return result
}
// CallResult is a return value of Call function.
@@ -128,6 +137,11 @@ type Func struct {
constants []interface{}
intConstants []int
+
+ numObjectParams int
+ numIntParams int
+
+ name string
}
// ValueStack is used to manipulate runtime values during the evaluation.
@@ -138,8 +152,15 @@ type Func struct {
// If int was pushed with PushInt(), it should be retrieved by PopInt().
// It's a bad idea to do a Push() and then PopInt() and vice-versa.
type ValueStack struct {
- objects []interface{}
- ints []int
+ objects []interface{}
+ ints []int
+ variadicLen int
+}
+
+// Reset empties the stack.
+func (s *ValueStack) Reset() {
+ s.objects = s.objects[:0]
+ s.ints = s.ints[:0]
}
// Pop removes the top stack element and returns it.
@@ -157,6 +178,19 @@ func (s *ValueStack) PopInt() int {
return x
}
+// PopVariadic removes the `...` argument and returns it as a slice.
+//
+// Slice elements are in the order they were passed to the function,
+// for example, a call Sprintf("%s:%d", filename, line) returns
+// the slice []interface{filename, line}.
+func (s *ValueStack) PopVariadic() []interface{} {
+ to := len(s.objects)
+ from := to - s.variadicLen
+ xs := s.objects[from:to]
+ s.objects = s.objects[:from]
+ return xs
+}
+
// Push adds x to the stack.
// Important: for int-typed values, use PushInt.
func (s *ValueStack) Push(x interface{}) { s.objects = append(s.objects, x) }
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qfmt/qfmt.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qfmt/qfmt.go
new file mode 100644
index 000000000..249ac2563
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qfmt/qfmt.go
@@ -0,0 +1,17 @@
+package qfmt
+
+import (
+ "fmt"
+
+ "github.com/quasilyte/go-ruleguard/ruleguard/quasigo"
+)
+
+func ImportAll(env *quasigo.Env) {
+ env.AddNativeFunc(`fmt`, `Sprintf`, Sprintf)
+}
+
+func Sprintf(stack *quasigo.ValueStack) {
+ args := stack.PopVariadic()
+ format := stack.Pop().(string)
+ stack.Push(fmt.Sprintf(format, args...))
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qstrconv/qstrconv.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qstrconv/qstrconv.go
new file mode 100644
index 000000000..8bc2d943f
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qstrconv/qstrconv.go
@@ -0,0 +1,24 @@
+package qstrconv
+
+import (
+ "strconv"
+
+ "github.com/quasilyte/go-ruleguard/ruleguard/quasigo"
+)
+
+func ImportAll(env *quasigo.Env) {
+ env.AddNativeFunc(`strconv`, `Atoi`, Atoi)
+ env.AddNativeFunc(`strconv`, `Itoa`, Itoa)
+}
+
+func Atoi(stack *quasigo.ValueStack) {
+ s := stack.Pop().(string)
+ v, err := strconv.Atoi(s)
+ stack.PushInt(v)
+ stack.Push(err)
+}
+
+func Itoa(stack *quasigo.ValueStack) {
+ i := stack.PopInt()
+ stack.Push(strconv.Itoa(i))
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qstrings/qstrings.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qstrings/qstrings.go
new file mode 100644
index 000000000..6b708ad9c
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qstrings/qstrings.go
@@ -0,0 +1,62 @@
+package qstrings
+
+import (
+ "strings"
+
+ "github.com/quasilyte/go-ruleguard/ruleguard/quasigo"
+)
+
+func ImportAll(env *quasigo.Env) {
+ env.AddNativeFunc(`strings`, `Replace`, Replace)
+ env.AddNativeFunc(`strings`, `ReplaceAll`, ReplaceAll)
+ env.AddNativeFunc(`strings`, `TrimPrefix`, TrimPrefix)
+ env.AddNativeFunc(`strings`, `TrimSuffix`, TrimSuffix)
+ env.AddNativeFunc(`strings`, `HasPrefix`, HasPrefix)
+ env.AddNativeFunc(`strings`, `HasSuffix`, HasSuffix)
+ env.AddNativeFunc(`strings`, `Contains`, Contains)
+}
+
+func Replace(stack *quasigo.ValueStack) {
+ n := stack.PopInt()
+ newPart := stack.Pop().(string)
+ oldPart := stack.Pop().(string)
+ s := stack.Pop().(string)
+ stack.Push(strings.Replace(s, oldPart, newPart, n))
+}
+
+func ReplaceAll(stack *quasigo.ValueStack) {
+ newPart := stack.Pop().(string)
+ oldPart := stack.Pop().(string)
+ s := stack.Pop().(string)
+ stack.Push(strings.ReplaceAll(s, oldPart, newPart))
+}
+
+func TrimPrefix(stack *quasigo.ValueStack) {
+ prefix := stack.Pop().(string)
+ s := stack.Pop().(string)
+ stack.Push(strings.TrimPrefix(s, prefix))
+}
+
+func TrimSuffix(stack *quasigo.ValueStack) {
+ prefix := stack.Pop().(string)
+ s := stack.Pop().(string)
+ stack.Push(strings.TrimSuffix(s, prefix))
+}
+
+func HasPrefix(stack *quasigo.ValueStack) {
+ prefix := stack.Pop().(string)
+ s := stack.Pop().(string)
+ stack.Push(strings.HasPrefix(s, prefix))
+}
+
+func HasSuffix(stack *quasigo.ValueStack) {
+ suffix := stack.Pop().(string)
+ s := stack.Pop().(string)
+ stack.Push(strings.HasSuffix(s, suffix))
+}
+
+func Contains(stack *quasigo.ValueStack) {
+ substr := stack.Pop().(string)
+ s := stack.Pop().(string)
+ stack.Push(strings.Contains(s, substr))
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go
index ba23861a2..1a2e2f05f 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go
@@ -2,9 +2,12 @@ package ruleguard
import (
"go/ast"
+ "go/build"
"go/token"
"go/types"
"io"
+
+ "github.com/quasilyte/go-ruleguard/ruleguard/ir"
)
// Engine is the main ruleguard package API object.
@@ -18,6 +21,17 @@ import (
// An Engine must be created with NewEngine() function.
type Engine struct {
impl *engine
+
+ // BuildContext can be used as an override for build.Default context.
+ // Used during the Go packages resolving.
+ //
+ // Use Engine.InferBuildContext() to create a sensible default
+ // for this field that is better than build.Default.
+ // We're not using this by default to avoid the excessive work
+ // if you already have a properly initialized build.Context object.
+ //
+ // nil will result in build.Default usage.
+ BuildContext *build.Context
}
// NewEngine creates an engine with empty rule set.
@@ -25,13 +39,30 @@ func NewEngine() *Engine {
return &Engine{impl: newEngine()}
}
+func (e *Engine) InferBuildContext() {
+ e.BuildContext = inferBuildContext()
+}
+
// Load reads a ruleguard file from r and adds it to the engine rule set.
//
// Load() is not thread-safe, especially if used concurrently with Run() method.
// It's advised to Load() all ruleguard files under a critical section (like sync.Once)
// and then use Run() to execute all of them.
-func (e *Engine) Load(ctx *ParseContext, filename string, r io.Reader) error {
- return e.impl.Load(ctx, filename, r)
+func (e *Engine) Load(ctx *LoadContext, filename string, r io.Reader) error {
+ return e.impl.Load(ctx, e.BuildContext, filename, r)
+}
+
+// LoadFromIR is like Load(), but it takes already parsed IR file as an input.
+//
+// This method can be useful if you're trying to embed a precompiled rules file
+// into your binary.
+func (e *Engine) LoadFromIR(ctx *LoadContext, filename string, f *ir.File) error {
+ return e.impl.LoadFromIR(ctx, e.BuildContext, filename, f)
+}
+
+// LoadedGroups returns information about all currently loaded rule groups.
+func (e *Engine) LoadedGroups() []GoRuleGroup {
+ return e.impl.LoadedGroups()
}
// Run executes all loaded rules on a given file.
@@ -40,11 +71,11 @@ func (e *Engine) Load(ctx *ParseContext, filename string, r io.Reader) error {
// Run() is thread-safe, unless used in parallel with Load(),
// which modifies the engine state.
func (e *Engine) Run(ctx *RunContext, f *ast.File) error {
- return e.impl.Run(ctx, f)
+ return e.impl.Run(ctx, e.BuildContext, f)
}
-type ParseContext struct {
- DebugFilter string
+type LoadContext struct {
+ DebugFunc string
DebugImports bool
DebugPrint func(string)
@@ -52,7 +83,7 @@ type ParseContext struct {
// If function returns false, that group will not be included
// in the resulting rules set.
// Nil filter accepts all rule groups.
- GroupFilter func(string) bool
+ GroupFilter func(*GoRuleGroup) bool
Fset *token.FileSet
}
@@ -62,11 +93,40 @@ type RunContext struct {
DebugImports bool
DebugPrint func(string)
- Types *types.Info
- Sizes types.Sizes
- Fset *token.FileSet
- Report func(rule GoRuleInfo, n ast.Node, msg string, s *Suggestion)
- Pkg *types.Package
+ Types *types.Info
+ Sizes types.Sizes
+ Fset *token.FileSet
+ Pkg *types.Package
+
+ // Report is a function that is called for every successful ruleguard match.
+ // The pointer to ReportData is reused, it should not be kept.
+ // If you want to keep it after Report() returns, make a copy.
+ Report func(*ReportData)
+
+ GoVersion GoVersion
+
+ // TruncateLen is a length threshold (in bytes) for interpolated vars in Report() templates.
+ //
+ // Truncation removes the part of the string in the middle and replaces it with <...>
+ // so it meets the max length constraint.
+ //
+ // The default value is 60 (implied if value is 0).
+ //
+ // Note that this value is ignored for Suggest templates.
+ // Ruleguard doesn't truncate suggested replacement candidates.
+ TruncateLen int
+}
+
+type ReportData struct {
+ RuleInfo GoRuleInfo
+ Node ast.Node
+ Message string
+ Suggestion *Suggestion
+
+ // Experimental: fields below are part of the experiment.
+ // They'll probably be removed or changed over time.
+
+ Func *ast.FuncDecl
}
type Suggestion struct {
@@ -76,12 +136,54 @@ type Suggestion struct {
}
type GoRuleInfo struct {
- // Filename is a file that defined this rule.
- Filename string
-
// Line is a line inside a file that defined this rule.
Line int
- // Group is a function name that contained this rule.
- Group string
+ // Group is a function that contains this rule.
+ Group *GoRuleGroup
+}
+
+type GoRuleGroup struct {
+ // Name is a function name associated with this rule group.
+ Name string
+
+ // Pos is a location where this rule group was defined.
+ Pos token.Position
+
+ // Line is a source code line number inside associated file.
+ // A pair of Filename:Line form a conventional location string.
+ Line int
+
+ // Filename is a file that defined this rule group.
+ Filename string
+
+ // DocTags contains a list of keys from the `gorules:tags` comment.
+ DocTags []string
+
+ // DocSummary is a short one sentence description.
+ // Filled from the `doc:summary` pragma content.
+ DocSummary string
+
+ // DocBefore is a code snippet of code that will violate rule.
+ // Filled from the `doc:before` pragma content.
+ DocBefore string
+
+ // DocAfter is a code snippet of fixed code that complies to the rule.
+ // Filled from the `doc:after` pragma content.
+ DocAfter string
+
+ // DocNote is an optional caution message or advice.
+ // Usually, it's used to reference some external resource, like
+ // issue on the GitHub.
+ // Filled from the `doc:note` pragma content.
+ DocNote string
+}
+
+// ImportError is returned when a ruleguard file references a package that cannot be imported.
+type ImportError struct {
+ msg string
+ err error
}
+
+func (e *ImportError) Error() string { return e.msg }
+func (e *ImportError) Unwrap() error { return e.err }
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go
index 2048ce3e7..92f6cc34b 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go
@@ -2,64 +2,126 @@ package ruleguard
import (
"bytes"
+ "context"
"fmt"
"go/ast"
+ "go/build"
"go/printer"
+ "go/token"
"io/ioutil"
"path/filepath"
+ "reflect"
"sort"
"strconv"
"strings"
- "github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep"
"github.com/quasilyte/go-ruleguard/ruleguard/goutil"
+ "github.com/quasilyte/go-ruleguard/ruleguard/profiling"
+ "github.com/quasilyte/go-ruleguard/ruleguard/quasigo"
+ "github.com/quasilyte/go-ruleguard/ruleguard/typematch"
+ "github.com/quasilyte/gogrep"
+ "github.com/quasilyte/gogrep/nodetag"
)
type rulesRunner struct {
state *engineState
+ bgContext context.Context
+
ctx *RunContext
rules *goRuleSet
+ truncateLen int
+
+ reportData ReportData
+
+ gogrepState gogrep.MatcherState
+ gogrepSubState gogrep.MatcherState
+
importer *goImporter
filename string
src []byte
- // A slice that is used to do a nodes keys sorting in renderMessage().
- sortScratch []string
+ // nodePath is a stack of ast.Nodes we visited to this point.
+ // When we enter a new node, it's placed on the top of the stack.
+ // When we leave that node, it's popped.
+ // The stack is a slice that is allocated only once and reused
+ // for the lifetime of the runner.
+ // The only overhead it has is a slice append and pop operations
+ // that are quire cheap.
+ //
+ // Note: we need this path to get a Node.Parent() for `$$` matches.
+ // So it's used to climb up the tree there.
+ // For named submatches we can't use it as the node can be located
+ // deeper into the tree than the current node.
+ // In those cases we need a more complicated algorithm.
+ nodePath nodePath
filterParams filterParams
}
-func newRulesRunner(ctx *RunContext, state *engineState, rules *goRuleSet) *rulesRunner {
+func newRulesRunner(ctx *RunContext, buildContext *build.Context, state *engineState, rules *goRuleSet) *rulesRunner {
importer := newGoImporter(state, goImporterConfig{
fset: ctx.Fset,
debugImports: ctx.DebugImports,
debugPrint: ctx.DebugPrint,
+ buildContext: buildContext,
})
+ gogrepState := gogrep.NewMatcherState()
+ gogrepState.Types = ctx.Types
+ gogrepSubState := gogrep.NewMatcherState()
+ gogrepSubState.Types = ctx.Types
+ evalEnv := state.env.GetEvalEnv()
rr := &rulesRunner{
- ctx: ctx,
- importer: importer,
- rules: rules,
+ bgContext: context.Background(),
+ ctx: ctx,
+ importer: importer,
+ rules: rules,
+ gogrepState: gogrepState,
+ gogrepSubState: gogrepSubState,
+ nodePath: newNodePath(),
+ truncateLen: ctx.TruncateLen,
filterParams: filterParams{
- env: state.env.GetEvalEnv(),
- importer: importer,
- ctx: ctx,
+ typematchState: typematch.NewMatcherState(),
+ env: evalEnv,
+ importer: importer,
+ ctx: ctx,
},
- sortScratch: make([]string, 0, 8),
+ }
+ evalEnv.Stack.Push(&rr.filterParams)
+ if ctx.TruncateLen == 0 {
+ rr.truncateLen = 60
}
rr.filterParams.nodeText = rr.nodeText
+ rr.filterParams.nodeString = rr.nodeString
+ rr.filterParams.nodePath = &rr.nodePath
+ rr.filterParams.gogrepSubState = &rr.gogrepSubState
return rr
}
+func (rr *rulesRunner) nodeString(n ast.Node) string {
+ b := rr.nodeText(n)
+ return string(b)
+}
+
func (rr *rulesRunner) nodeText(n ast.Node) []byte {
+ if gogrep.IsEmptyNodeSlice(n) {
+ return nil
+ }
+
from := rr.ctx.Fset.Position(n.Pos()).Offset
to := rr.ctx.Fset.Position(n.End()).Offset
src := rr.fileBytes()
if (from >= 0 && from < len(src)) && (to >= 0 && to < len(src)) {
return src[from:to]
}
+
+ // Go printer would panic on comments.
+ if n, ok := n.(*ast.Comment); ok {
+ return []byte(n.Text)
+ }
+
// Fallback to the printer.
var buf bytes.Buffer
if err := printer.Fprint(&buf, rr.ctx.Fset, n); err != nil {
@@ -86,61 +148,145 @@ func (rr *rulesRunner) fileBytes() []byte {
}
func (rr *rulesRunner) run(f *ast.File) error {
- // TODO(quasilyte): run local rules as well.
+ // If it's not empty then we're leaking memory.
+ // For every Push() there should be a Pop() call.
+ if rr.nodePath.Len() != 0 {
+ panic("internal error: node path is not empty")
+ }
rr.filename = rr.ctx.Fset.Position(f.Pos()).Filename
rr.filterParams.filename = rr.filename
rr.collectImports(f)
- for _, rule := range rr.rules.universal.uncategorized {
- rule.pat.Match(f, func(m gogrep.MatchData) {
- rr.handleMatch(rule, m)
+ if rr.rules.universal.categorizedNum != 0 {
+ var inspector astWalker
+ inspector.nodePath = &rr.nodePath
+ inspector.filterParams = &rr.filterParams
+ inspector.Walk(f, func(n ast.Node, tag nodetag.Value) {
+ rr.runRules(n, tag)
})
}
- if rr.rules.universal.categorizedNum != 0 {
- ast.Inspect(f, func(n ast.Node) bool {
- cat := categorizeNode(n)
- for _, rule := range rr.rules.universal.rulesByCategory[cat] {
- matched := false
- rule.pat.MatchNode(n, func(m gogrep.MatchData) {
- matched = rr.handleMatch(rule, m)
- })
- if matched {
- break
- }
+ if len(rr.rules.universal.commentRules) != 0 {
+ for _, commentGroup := range f.Comments {
+ for _, comment := range commentGroup.List {
+ rr.runCommentRules(comment)
}
- return true
- })
+ }
}
return nil
}
-func (rr *rulesRunner) reject(rule goRule, reason string, m gogrep.MatchData) {
- if rule.group != rr.ctx.Debug {
- return // This rule is not being debugged
+func (rr *rulesRunner) runCommentRules(comment *ast.Comment) {
+ // We'll need that file to create a token.Pos from the artificial offset.
+ file := rr.ctx.Fset.File(comment.Pos())
+
+ for _, rule := range rr.rules.universal.commentRules {
+ var m commentMatchData
+ if rule.captureGroups {
+ result := rule.pat.FindStringSubmatchIndex(comment.Text)
+ if result == nil {
+ continue
+ }
+ for i, name := range rule.pat.SubexpNames() {
+ if i == 0 || name == "" {
+ continue
+ }
+ resultIndex := i * 2
+ beginPos := result[resultIndex+0]
+ endPos := result[resultIndex+1]
+ // Negative index a special case when named group captured nothing.
+ // Consider this pattern: `(?P<x>foo)|(bar)`.
+ // If we have `bar` input string, <x> will remain empty.
+ if beginPos < 0 || endPos < 0 {
+ m.capture = append(m.capture, gogrep.CapturedNode{
+ Name: name,
+ Node: &ast.Comment{Slash: comment.Pos()},
+ })
+ continue
+ }
+ m.capture = append(m.capture, gogrep.CapturedNode{
+ Name: name,
+ Node: &ast.Comment{
+ Slash: file.Pos(beginPos + file.Offset(comment.Pos())),
+ Text: comment.Text[beginPos:endPos],
+ },
+ })
+ }
+ m.node = &ast.Comment{
+ Slash: file.Pos(result[0] + file.Offset(comment.Pos())),
+ Text: comment.Text[result[0]:result[1]],
+ }
+ } else {
+ // Fast path: no need to save any submatches.
+ result := rule.pat.FindStringIndex(comment.Text)
+ if result == nil {
+ continue
+ }
+ m.node = &ast.Comment{
+ Slash: file.Pos(result[0] + file.Offset(comment.Pos())),
+ Text: comment.Text[result[0]:result[1]],
+ }
+ }
+
+ accept := rr.handleCommentMatch(rule, m)
+ if accept {
+ break
+ }
}
+}
- pos := rr.ctx.Fset.Position(m.Node.Pos())
- rr.ctx.DebugPrint(fmt.Sprintf("%s:%d: [%s:%d] rejected by %s",
- pos.Filename, pos.Line, filepath.Base(rule.filename), rule.line, reason))
+func (rr *rulesRunner) runRules(n ast.Node, tag nodetag.Value) {
+ // profiling.LabelsEnabled is constant, so labels-related
+ // code should be a no-op inside normal build.
+ // To enable labels, use "-tags pproflabels" build tag.
+
+ for _, rule := range rr.rules.universal.rulesByTag[tag] {
+ if profiling.LabelsEnabled {
+ profiling.EnterWithLabels(rr.bgContext, rule.group.Name)
+ }
+
+ matched := false
+ rule.pat.MatchNode(&rr.gogrepState, n, func(m gogrep.MatchData) {
+ matched = rr.handleMatch(rule, m)
+ })
+
+ if profiling.LabelsEnabled {
+ profiling.Leave(rr.bgContext)
+ }
- type namedNode struct {
- name string
- node ast.Node
+ if matched && !multiMatchTags[tag] {
+ break
+ }
}
- values := make([]namedNode, 0, len(m.Values))
- for name, node := range m.Values {
- values = append(values, namedNode{name: name, node: node})
+}
+
+func (rr *rulesRunner) reject(rule goRule, reason string, m matchData) {
+ if rule.group.Name != rr.ctx.Debug {
+ return // This rule is not being debugged
}
+
+ pos := rr.ctx.Fset.Position(m.Node().Pos())
+ rr.ctx.DebugPrint(fmt.Sprintf("%s:%d: [%s:%d] rejected by %s",
+ pos.Filename, pos.Line, filepath.Base(rule.group.Filename), rule.line, reason))
+
+ values := make([]gogrep.CapturedNode, len(m.CaptureList()))
+ copy(values, m.CaptureList())
sort.Slice(values, func(i, j int) bool {
- return values[i].name < values[j].name
+ return values[i].Name < values[j].Name
})
for _, v := range values {
- name := v.name
- node := v.node
+ name := v.Name
+ node := v.Node
+
+ if comment, ok := node.(*ast.Comment); ok {
+ s := strings.ReplaceAll(comment.Text, "\n", `\n`)
+ rr.ctx.DebugPrint(fmt.Sprintf(" $%s: %s", name, s))
+ continue
+ }
+
var expr ast.Expr
switch node := node.(type) {
case ast.Expr:
@@ -161,35 +307,105 @@ func (rr *rulesRunner) reject(rule goRule, reason string, m gogrep.MatchData) {
}
}
+func (rr *rulesRunner) handleCommentMatch(rule goCommentRule, m commentMatchData) bool {
+ if rule.base.filter.fn != nil {
+ rr.filterParams.match = m
+ filterResult := rule.base.filter.fn(&rr.filterParams)
+ if !filterResult.Matched() {
+ rr.reject(rule.base, filterResult.RejectReason(), m)
+ return false
+ }
+ }
+
+ message := rr.renderMessage(rule.base.msg, m, true)
+ node := m.Node()
+ if rule.base.location != "" {
+ node, _ = m.CapturedByName(rule.base.location)
+ }
+ var suggestion *Suggestion
+ if rule.base.suggestion != "" {
+ suggestion = &Suggestion{
+ Replacement: []byte(rr.renderMessage(rule.base.suggestion, m, false)),
+ From: node.Pos(),
+ To: node.End(),
+ }
+ }
+ info := GoRuleInfo{
+ Group: rule.base.group,
+ Line: rule.base.line,
+ }
+ rr.reportData.RuleInfo = info
+ rr.reportData.Node = node
+ rr.reportData.Message = message
+ rr.reportData.Suggestion = suggestion
+
+ rr.ctx.Report(&rr.reportData)
+ return true
+}
+
func (rr *rulesRunner) handleMatch(rule goRule, m gogrep.MatchData) bool {
+ if rule.filter.fn != nil || rule.do != nil {
+ rr.filterParams.match = astMatchData{match: m}
+ }
+
if rule.filter.fn != nil {
- rr.filterParams.values = m.Values
filterResult := rule.filter.fn(&rr.filterParams)
if !filterResult.Matched() {
- rr.reject(rule, filterResult.RejectReason(), m)
+ rr.reject(rule, filterResult.RejectReason(), astMatchData{match: m})
return false
}
}
- message := rr.renderMessage(rule.msg, m.Node, m.Values, true)
node := m.Node
if rule.location != "" {
- node = m.Values[rule.location]
+ node, _ = m.CapturedByName(rule.location)
+ }
+
+ var messageText string
+ var suggestText string
+ if rule.do != nil {
+ rr.filterParams.reportString = ""
+ rr.filterParams.suggestString = ""
+ _ = quasigo.Call(rr.filterParams.env, rule.do)
+ messageText = rr.filterParams.reportString
+ if messageText == "" {
+ if rr.filterParams.suggestString != "" {
+ messageText = "suggestion: " + rr.filterParams.suggestString
+ } else {
+ messageText = "<empty message>"
+ }
+ }
+ if rr.filterParams.suggestString != "" {
+ suggestText = rr.filterParams.suggestString
+ }
+ } else {
+ messageText = rr.renderMessage(rule.msg, astMatchData{match: m}, true)
+ if rule.suggestion != "" {
+ suggestText = rr.renderMessage(rule.suggestion, astMatchData{match: m}, false)
+ }
}
+
var suggestion *Suggestion
- if rule.suggestion != "" {
+ if suggestText != "" {
suggestion = &Suggestion{
- Replacement: []byte(rr.renderMessage(rule.suggestion, m.Node, m.Values, false)),
+ Replacement: []byte(suggestText),
From: node.Pos(),
To: node.End(),
}
}
+
info := GoRuleInfo{
- Group: rule.group,
- Filename: rule.filename,
- Line: rule.line,
+ Group: rule.group,
+ Line: rule.line,
}
- rr.ctx.Report(info, node, message, suggestion)
+ rr.reportData.RuleInfo = info
+ rr.reportData.Node = node
+ rr.reportData.Message = messageText
+ rr.reportData.Suggestion = suggestion
+
+ rr.reportData.Func = rr.filterParams.currentFunc
+
+ rr.ctx.Report(&rr.reportData)
return true
}
@@ -204,40 +420,113 @@ func (rr *rulesRunner) collectImports(f *ast.File) {
}
}
-func (rr *rulesRunner) renderMessage(msg string, n ast.Node, nodes map[string]ast.Node, truncate bool) string {
- var buf strings.Builder
- if strings.Contains(msg, "$$") {
- buf.Write(rr.nodeText(n))
- msg = strings.ReplaceAll(msg, "$$", buf.String())
- }
- if len(nodes) == 0 {
+func (rr *rulesRunner) renderMessage(msg string, m matchData, truncate bool) string {
+ if !strings.Contains(msg, "$") {
return msg
}
- rr.sortScratch = rr.sortScratch[:0]
- for name := range nodes {
- rr.sortScratch = append(rr.sortScratch, name)
+ var capture []gogrep.CapturedNode
+ if len(m.CaptureList()) != 0 {
+ capture = make([]gogrep.CapturedNode, 0, len(m.CaptureList()))
+ for _, c := range m.CaptureList() {
+ n := c.Node
+ // Some captured nodes are typed, but nil.
+ // We can't really get their text, so skip them here.
+ // For example, pattern `func $_() $results { $*_ }` may
+ // match a nil *ast.FieldList for $results if executed
+ // against a function with no results.
+ if reflect.ValueOf(n).IsNil() && !gogrep.IsEmptyNodeSlice(n) {
+ continue
+ }
+ capture = append(capture, c)
+ }
+ if len(capture) > 1 {
+ sort.Slice(capture, func(i, j int) bool {
+ return len(capture[i].Name) > len(capture[j].Name)
+ })
+ }
}
- sort.Slice(rr.sortScratch, func(i, j int) bool {
- return len(rr.sortScratch[i]) > len(rr.sortScratch[j])
- })
- for _, name := range rr.sortScratch {
- n := nodes[name]
- key := "$" + name
- if !strings.Contains(msg, key) {
- continue
+ result := make([]byte, 0, len(msg)*2)
+ i := 0
+ for {
+ j := strings.IndexByte(msg[i:], '$')
+ if j == -1 {
+ result = append(result, msg[i:]...)
+ break
+ }
+ dollarPos := i + j
+ result = append(result, msg[i:dollarPos]...)
+ var n ast.Node
+ var nameLen int
+ if strings.HasPrefix(msg[dollarPos+1:], "$") {
+ n = m.Node()
+ nameLen = 1
+ } else {
+ for _, c := range capture {
+ if strings.HasPrefix(msg[dollarPos+1:], c.Name) {
+ n = c.Node
+ nameLen = len(c.Name)
+ break
+ }
+ }
}
- buf.Reset()
- buf.Write(rr.nodeText(n))
- // Don't interpolate strings that are too long.
- var replacement string
- if truncate && buf.Len() > 60 {
- replacement = key
+ if n != nil {
+ text := rr.nodeText(n)
+ text = rr.fixedText(text, n, msg[dollarPos+1+nameLen:])
+ if truncate {
+ text = truncateText(text, rr.truncateLen)
+ }
+ result = append(result, text...)
} else {
- replacement = buf.String()
+ result = append(result, '$')
}
- msg = strings.ReplaceAll(msg, key, replacement)
+ i = dollarPos + len("$") + nameLen
}
- return msg
+
+ return string(result)
+}
+
+func (rr *rulesRunner) fixedText(text []byte, n ast.Node, following string) []byte {
+ // pattern=`$x.y` $x=`&buf` following=`.y`
+ // Insert $x as `buf`, so we get `buf.y` instead of incorrect `&buf.y`.
+ if n, ok := n.(*ast.UnaryExpr); ok && n.Op == token.AND {
+ shouldFix := false
+ switch n.X.(type) {
+ case *ast.Ident, *ast.IndexExpr, *ast.SelectorExpr:
+ shouldFix = true
+ }
+ if shouldFix && strings.HasPrefix(following, ".") {
+ return bytes.TrimPrefix(text, []byte("&"))
+ }
+ }
+
+ return text
+}
+
+var longTextPlaceholder = []byte("<...>")
+
+func truncateText(s []byte, maxLen int) []byte {
+ if len(s) <= maxLen-len(longTextPlaceholder) {
+ return s
+ }
+ maxLen -= len(longTextPlaceholder)
+ leftLen := maxLen / 2
+ rightLen := (maxLen % 2) + leftLen
+ left := s[:leftLen]
+ right := s[len(s)-rightLen:]
+
+ result := make([]byte, 0, len(left)+len(longTextPlaceholder)+len(right))
+ result = append(result, left...)
+ result = append(result, longTextPlaceholder...)
+ result = append(result, right...)
+
+ return result
+}
+
+var multiMatchTags = [nodetag.NumBuckets]bool{
+ nodetag.BlockStmt: true,
+ nodetag.CaseClause: true,
+ nodetag.CommClause: true,
+ nodetag.File: true,
}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/textmatch/compile.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/textmatch/compile.go
new file mode 100644
index 000000000..d320bf880
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/textmatch/compile.go
@@ -0,0 +1,84 @@
+package textmatch
+
+import (
+ "regexp"
+ "regexp/syntax"
+ "unicode"
+)
+
+func compile(s string) (Pattern, error) {
+ reSyntax, err := syntax.Parse(s, syntax.Perl)
+ if err == nil {
+ if optimized := compileOptimized(s, reSyntax); optimized != nil {
+ return optimized, nil
+ }
+ }
+ return regexp.Compile(s)
+}
+
+func compileOptimized(s string, re *syntax.Regexp) Pattern {
+ // .*
+ isAny := func(re *syntax.Regexp) bool {
+ return re.Op == syntax.OpStar && re.Sub[0].Op == syntax.OpAnyCharNotNL
+ }
+ // "literal"
+ isLit := func(re *syntax.Regexp) bool {
+ return re.Op == syntax.OpLiteral
+ }
+ // ^
+ isBegin := func(re *syntax.Regexp) bool {
+ return re.Op == syntax.OpBeginText
+ }
+ // $
+ isEnd := func(re *syntax.Regexp) bool {
+ return re.Op == syntax.OpEndText
+ }
+
+ // TODO: analyze what kind of regexps people use in rules
+ // more often and optimize those as well.
+
+ // lit => strings.Contains($input, lit)
+ if re.Op == syntax.OpLiteral {
+ return &containsLiteralMatcher{value: newInputValue(string(re.Rune))}
+ }
+
+ // `.*` lit `.*` => strings.Contains($input, lit)
+ if re.Op == syntax.OpConcat && len(re.Sub) == 3 {
+ if isAny(re.Sub[0]) && isLit(re.Sub[1]) && isAny(re.Sub[2]) {
+ return &containsLiteralMatcher{value: newInputValue(string(re.Sub[1].Rune))}
+ }
+ }
+
+ // `^` lit => strings.HasPrefix($input, lit)
+ if re.Op == syntax.OpConcat && len(re.Sub) == 2 {
+ if isBegin(re.Sub[0]) && isLit(re.Sub[1]) {
+ return &prefixLiteralMatcher{value: newInputValue(string(re.Sub[1].Rune))}
+ }
+ }
+
+ // lit `$` => strings.HasSuffix($input, lit)
+ if re.Op == syntax.OpConcat && len(re.Sub) == 2 {
+ if isLit(re.Sub[0]) && isEnd(re.Sub[1]) {
+ return &suffixLiteralMatcher{value: newInputValue(string(re.Sub[0].Rune))}
+ }
+ }
+
+ // `^` lit `$` => $input == lit
+ if re.Op == syntax.OpConcat && len(re.Sub) == 3 {
+ if isBegin(re.Sub[0]) && isLit(re.Sub[1]) && isEnd(re.Sub[2]) {
+ return &eqLiteralMatcher{value: newInputValue(string(re.Sub[1].Rune))}
+ }
+ }
+
+ // `^\p{Lu}` => prefixRunePredMatcher:unicode.IsUpper
+ // `^\p{Ll}` => prefixRunePredMatcher:unicode.IsLower
+ switch s {
+ case `^\p{Lu}`:
+ return &prefixRunePredMatcher{pred: unicode.IsUpper}
+ case `^\p{Ll}`:
+ return &prefixRunePredMatcher{pred: unicode.IsLower}
+ }
+
+ // Can't optimize.
+ return nil
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/textmatch/matchers.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/textmatch/matchers.go
new file mode 100644
index 000000000..2f68c9aee
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/textmatch/matchers.go
@@ -0,0 +1,72 @@
+package textmatch
+
+import (
+ "bytes"
+ "strings"
+ "unicode/utf8"
+)
+
+// inputValue is a wrapper for string|[]byte.
+//
+// We hold both values to avoid string->[]byte and vice versa
+// conversions when doing Match and MatchString.
+type inputValue struct {
+ s string
+ b []byte
+}
+
+func newInputValue(s string) inputValue {
+ return inputValue{s: s, b: []byte(s)}
+}
+
+type containsLiteralMatcher struct{ value inputValue }
+
+func (m *containsLiteralMatcher) MatchString(s string) bool {
+ return strings.Contains(s, m.value.s)
+}
+
+func (m *containsLiteralMatcher) Match(b []byte) bool {
+ return bytes.Contains(b, m.value.b)
+}
+
+type prefixLiteralMatcher struct{ value inputValue }
+
+func (m *prefixLiteralMatcher) MatchString(s string) bool {
+ return strings.HasPrefix(s, m.value.s)
+}
+
+func (m *prefixLiteralMatcher) Match(b []byte) bool {
+ return bytes.HasPrefix(b, m.value.b)
+}
+
+type suffixLiteralMatcher struct{ value inputValue }
+
+func (m *suffixLiteralMatcher) MatchString(s string) bool {
+ return strings.HasSuffix(s, m.value.s)
+}
+
+func (m *suffixLiteralMatcher) Match(b []byte) bool {
+ return bytes.HasSuffix(b, m.value.b)
+}
+
+type eqLiteralMatcher struct{ value inputValue }
+
+func (m *eqLiteralMatcher) MatchString(s string) bool {
+ return m.value.s == s
+}
+
+func (m *eqLiteralMatcher) Match(b []byte) bool {
+ return bytes.Equal(m.value.b, b)
+}
+
+type prefixRunePredMatcher struct{ pred func(rune) bool }
+
+func (m *prefixRunePredMatcher) MatchString(s string) bool {
+ r, _ := utf8.DecodeRuneInString(s)
+ return m.pred(r)
+}
+
+func (m *prefixRunePredMatcher) Match(b []byte) bool {
+ r, _ := utf8.DecodeRune(b)
+ return m.pred(r)
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/textmatch/textmatch.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/textmatch/textmatch.go
new file mode 100644
index 000000000..a3787e2c1
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/textmatch/textmatch.go
@@ -0,0 +1,26 @@
+package textmatch
+
+import "regexp"
+
+// Pattern is a compiled regular expression.
+type Pattern interface {
+ MatchString(s string) bool
+ Match(b []byte) bool
+}
+
+// Compile parses a regular expression and returns a compiled
+// pattern that can match inputs descriped by the regexp.
+//
+// Semantically it's close to the regexp.Compile, but
+// it does recognize some common patterns and creates
+// a more optimized matcher for them.
+func Compile(re string) (Pattern, error) {
+ return compile(re)
+}
+
+// IsRegexp reports whether p is implemented using regexp.
+// False means that the underlying matcher is something optimized.
+func IsRegexp(p Pattern) bool {
+ _, ok := p.(*regexp.Regexp)
+ return ok
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/patternop_string.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/patternop_string.go
index 1d739819d..672b6b45b 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/patternop_string.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/patternop_string.go
@@ -16,15 +16,17 @@ func _() {
_ = x[opArray-5]
_ = x[opMap-6]
_ = x[opChan-7]
- _ = x[opFunc-8]
- _ = x[opStructNoSeq-9]
- _ = x[opStruct-10]
- _ = x[opNamed-11]
+ _ = x[opFuncNoSeq-8]
+ _ = x[opFunc-9]
+ _ = x[opStructNoSeq-10]
+ _ = x[opStruct-11]
+ _ = x[opAnyInterface-12]
+ _ = x[opNamed-13]
}
-const _patternOp_name = "opBuiltinTypeopPointeropVaropVarSeqopSliceopArrayopMapopChanopFuncopStructNoSeqopStructopNamed"
+const _patternOp_name = "opBuiltinTypeopPointeropVaropVarSeqopSliceopArrayopMapopChanopFuncNoSeqopFuncopStructNoSeqopStructopAnyInterfaceopNamed"
-var _patternOp_index = [...]uint8{0, 13, 22, 27, 35, 42, 49, 54, 60, 66, 79, 87, 94}
+var _patternOp_index = [...]uint8{0, 13, 22, 27, 35, 42, 49, 54, 60, 71, 77, 90, 98, 112, 119}
func (i patternOp) String() string {
if i < 0 || i >= patternOp(len(_patternOp_index)-1) {
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/typematch.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/typematch.go
index 19391ecd4..b74740378 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/typematch.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/typematch.go
@@ -24,16 +24,40 @@ const (
opArray
opMap
opChan
+ opFuncNoSeq
opFunc
opStructNoSeq
opStruct
+ opAnyInterface
opNamed
)
-type Pattern struct {
+type MatcherState struct {
typeMatches map[string]types.Type
int64Matches map[string]int64
+}
+
+func NewMatcherState() *MatcherState {
+ return &MatcherState{
+ typeMatches: map[string]types.Type{},
+ int64Matches: map[string]int64{},
+ }
+}
+
+func (state *MatcherState) reset() {
+ if len(state.int64Matches) != 0 {
+ for k := range state.int64Matches {
+ delete(state.int64Matches, k)
+ }
+ }
+ if len(state.typeMatches) != 0 {
+ for k := range state.typeMatches {
+ delete(state.typeMatches, k)
+ }
+ }
+}
+type Pattern struct {
root *pattern
}
@@ -105,9 +129,7 @@ func Parse(ctx *Context, s string) (*Pattern, error) {
return nil, fmt.Errorf("can't convert %s type expression", s)
}
p := &Pattern{
- typeMatches: map[string]types.Type{},
- int64Matches: map[string]int64{},
- root: root,
+ root: root,
}
return p, nil
}
@@ -166,6 +188,9 @@ func parseExpr(ctx *Context, e ast.Expr) *pattern {
if !ok {
return nil
}
+ if pkg.Name == "unsafe" && e.Sel.Name == "Pointer" {
+ return &pattern{op: opBuiltinType, value: types.Typ[types.UnsafePointer]}
+ }
pkgPath, ok := ctx.Itab.Lookup(pkg.Name)
if !ok {
return nil
@@ -252,6 +277,7 @@ func parseExpr(ctx *Context, e ast.Expr) *pattern {
return parseExpr(ctx, e.X)
case *ast.FuncType:
+ hasSeq := false
var params []*pattern
var results []*pattern
if e.Params != nil {
@@ -263,6 +289,9 @@ func parseExpr(ctx *Context, e ast.Expr) *pattern {
if len(field.Names) != 0 {
return nil
}
+ if p.op == opVarSeq {
+ hasSeq = true
+ }
params = append(params, p)
}
}
@@ -275,11 +304,18 @@ func parseExpr(ctx *Context, e ast.Expr) *pattern {
if len(field.Names) != 0 {
return nil
}
+ if p.op == opVarSeq {
+ hasSeq = true
+ }
results = append(results, p)
}
}
+ op := opFuncNoSeq
+ if hasSeq {
+ op = opFunc
+ }
return &pattern{
- op: opFunc,
+ op: op,
value: len(params),
subs: append(params, results...),
}
@@ -313,27 +349,28 @@ func parseExpr(ctx *Context, e ast.Expr) *pattern {
if len(e.Methods.List) == 0 {
return &pattern{op: opBuiltinType, value: efaceType}
}
+ if len(e.Methods.List) == 1 {
+ p := parseExpr(ctx, e.Methods.List[0].Type)
+ if p == nil {
+ return nil
+ }
+ if p.op != opVarSeq {
+ return nil
+ }
+ return &pattern{op: opAnyInterface}
+ }
}
return nil
}
// MatchIdentical returns true if the go typ matches pattern p.
-func (p *Pattern) MatchIdentical(typ types.Type) bool {
- p.reset()
- return p.matchIdentical(p.root, typ)
+func (p *Pattern) MatchIdentical(state *MatcherState, typ types.Type) bool {
+ state.reset()
+ return p.matchIdentical(state, p.root, typ)
}
-func (p *Pattern) reset() {
- if len(p.int64Matches) != 0 {
- p.int64Matches = map[string]int64{}
- }
- if len(p.typeMatches) != 0 {
- p.typeMatches = map[string]types.Type{}
- }
-}
-
-func (p *Pattern) matchIdenticalFielder(subs []*pattern, f fielder) bool {
+func (p *Pattern) matchIdenticalFielder(state *MatcherState, subs []*pattern, f fielder) bool {
// TODO: do backtracking.
numFields := f.NumFields()
@@ -361,7 +398,7 @@ func (p *Pattern) matchIdenticalFielder(subs []*pattern, f fielder) bool {
matchAny = false
i++
// Lookahead for non-greedy matching.
- case i+1 < len(subs) && p.matchIdentical(subs[i+1], f.Field(fieldsMatched).Type()):
+ case i+1 < len(subs) && p.matchIdentical(state, subs[i+1], f.Field(fieldsMatched).Type()):
matchAny = false
i += 2
fieldsMatched++
@@ -371,7 +408,7 @@ func (p *Pattern) matchIdenticalFielder(subs []*pattern, f fielder) bool {
continue
}
- if fieldsLeft == 0 || !p.matchIdentical(pat, f.Field(fieldsMatched).Type()) {
+ if fieldsLeft == 0 || !p.matchIdentical(state, pat, f.Field(fieldsMatched).Type()) {
return false
}
i++
@@ -381,16 +418,16 @@ func (p *Pattern) matchIdenticalFielder(subs []*pattern, f fielder) bool {
return numFields == fieldsMatched
}
-func (p *Pattern) matchIdentical(sub *pattern, typ types.Type) bool {
+func (p *Pattern) matchIdentical(state *MatcherState, sub *pattern, typ types.Type) bool {
switch sub.op {
case opVar:
name := sub.value.(string)
if name == "_" {
return true
}
- y, ok := p.typeMatches[name]
+ y, ok := state.typeMatches[name]
if !ok {
- p.typeMatches[name] = typ
+ state.typeMatches[name] = typ
return true
}
if y == nil {
@@ -406,14 +443,14 @@ func (p *Pattern) matchIdentical(sub *pattern, typ types.Type) bool {
if !ok {
return false
}
- return p.matchIdentical(sub.subs[0], typ.Elem())
+ return p.matchIdentical(state, sub.subs[0], typ.Elem())
case opSlice:
typ, ok := typ.(*types.Slice)
if !ok {
return false
}
- return p.matchIdentical(sub.subs[0], typ.Elem())
+ return p.matchIdentical(state, sub.subs[0], typ.Elem())
case opArray:
typ, ok := typ.(*types.Array)
@@ -427,25 +464,25 @@ func (p *Pattern) matchIdentical(sub *pattern, typ types.Type) bool {
wantLen = typ.Len()
break
}
- length, ok := p.int64Matches[v]
+ length, ok := state.int64Matches[v]
if ok {
wantLen = length
} else {
- p.int64Matches[v] = typ.Len()
+ state.int64Matches[v] = typ.Len()
wantLen = typ.Len()
}
case int64:
wantLen = v
}
- return wantLen == typ.Len() && p.matchIdentical(sub.subs[0], typ.Elem())
+ return wantLen == typ.Len() && p.matchIdentical(state, sub.subs[0], typ.Elem())
case opMap:
typ, ok := typ.(*types.Map)
if !ok {
return false
}
- return p.matchIdentical(sub.subs[0], typ.Key()) &&
- p.matchIdentical(sub.subs[1], typ.Elem())
+ return p.matchIdentical(state, sub.subs[0], typ.Key()) &&
+ p.matchIdentical(state, sub.subs[1], typ.Elem())
case opChan:
typ, ok := typ.(*types.Chan)
@@ -453,7 +490,7 @@ func (p *Pattern) matchIdentical(sub *pattern, typ types.Type) bool {
return false
}
dir := sub.value.(types.ChanDir)
- return dir == typ.Dir() && p.matchIdentical(sub.subs[0], typ.Elem())
+ return dir == typ.Dir() && p.matchIdentical(state, sub.subs[0], typ.Elem())
case opNamed:
typ, ok := typ.(*types.Named)
@@ -474,7 +511,7 @@ func (p *Pattern) matchIdentical(sub *pattern, typ types.Type) bool {
path := strings.SplitAfter(obj.Pkg().Path(), "/vendor/")
return path[len(path)-1] == pkgPath && typeName == obj.Name()
- case opFunc:
+ case opFuncNoSeq:
typ, ok := typ.(*types.Signature)
if !ok {
return false
@@ -489,17 +526,35 @@ func (p *Pattern) matchIdentical(sub *pattern, typ types.Type) bool {
return false
}
for i := 0; i < typ.Params().Len(); i++ {
- if !p.matchIdentical(params[i], typ.Params().At(i).Type()) {
+ if !p.matchIdentical(state, params[i], typ.Params().At(i).Type()) {
return false
}
}
for i := 0; i < typ.Results().Len(); i++ {
- if !p.matchIdentical(results[i], typ.Results().At(i).Type()) {
+ if !p.matchIdentical(state, results[i], typ.Results().At(i).Type()) {
return false
}
}
return true
+ case opFunc:
+ typ, ok := typ.(*types.Signature)
+ if !ok {
+ return false
+ }
+ numParams := sub.value.(int)
+ params := sub.subs[:numParams]
+ results := sub.subs[numParams:]
+ adapter := tupleFielder{x: typ.Params()}
+ if !p.matchIdenticalFielder(state, params, &adapter) {
+ return false
+ }
+ adapter.x = typ.Results()
+ if !p.matchIdenticalFielder(state, results, &adapter) {
+ return false
+ }
+ return true
+
case opStructNoSeq:
typ, ok := typ.(*types.Struct)
if !ok {
@@ -509,7 +564,7 @@ func (p *Pattern) matchIdentical(sub *pattern, typ types.Type) bool {
return false
}
for i, member := range sub.subs {
- if !p.matchIdentical(member, typ.Field(i).Type()) {
+ if !p.matchIdentical(state, member, typ.Field(i).Type()) {
return false
}
}
@@ -520,11 +575,15 @@ func (p *Pattern) matchIdentical(sub *pattern, typ types.Type) bool {
if !ok {
return false
}
- if !p.matchIdenticalFielder(sub.subs, typ) {
+ if !p.matchIdenticalFielder(state, sub.subs, typ) {
return false
}
return true
+ case opAnyInterface:
+ _, ok := typ.(*types.Interface)
+ return ok
+
default:
return false
}
@@ -534,3 +593,10 @@ type fielder interface {
Field(i int) *types.Var
NumFields() int
}
+
+type tupleFielder struct {
+ x *types.Tuple
+}
+
+func (tup *tupleFielder) Field(i int) *types.Var { return tup.x.At(i) }
+func (tup *tupleFielder) NumFields() int { return tup.x.Len() }
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/utils.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/utils.go
index 16fd7d68a..962e9da2a 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/utils.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/utils.go
@@ -6,10 +6,42 @@ import (
"go/parser"
"go/token"
"go/types"
+ "regexp/syntax"
"strconv"
"strings"
)
+var invalidType = types.Typ[types.Invalid]
+
+func regexpHasCaptureGroups(pattern string) bool {
+ // regexp.Compile() uses syntax.Perl flags, so
+ // we use the same flags here.
+ re, err := syntax.Parse(pattern, syntax.Perl)
+ if err != nil {
+ return true // true is more conservative than false
+ }
+
+ found := false
+
+ var walkRegexp func(*syntax.Regexp)
+ walkRegexp = func(re *syntax.Regexp) {
+ if found {
+ return
+ }
+ // OpCapture handles both named and unnamed capture groups.
+ if re.Op == syntax.OpCapture {
+ found = true
+ return
+ }
+ for _, sub := range re.Sub {
+ walkRegexp(sub)
+ }
+ }
+ walkRegexp(re)
+
+ return found
+}
+
func findDependency(pkg *types.Package, path string) *types.Package {
if pkg.Path() == path {
return pkg
@@ -24,24 +56,30 @@ func findDependency(pkg *types.Package, path string) *types.Package {
return nil
}
-var basicTypeByName = map[string]types.Type{
- "bool": types.Typ[types.Bool],
- "int": types.Typ[types.Int],
- "int8": types.Typ[types.Int8],
- "int16": types.Typ[types.Int16],
- "int32": types.Typ[types.Int32],
- "int64": types.Typ[types.Int64],
- "uint": types.Typ[types.Uint],
- "uint8": types.Typ[types.Uint8],
- "uint16": types.Typ[types.Uint16],
- "uint32": types.Typ[types.Uint32],
- "uint64": types.Typ[types.Uint64],
- "uintptr": types.Typ[types.Uintptr],
- "float32": types.Typ[types.Float32],
- "float64": types.Typ[types.Float64],
- "complex64": types.Typ[types.Complex64],
- "complex128": types.Typ[types.Complex128],
- "string": types.Typ[types.String],
+var typeByName = map[string]types.Type{
+ // Predeclared types.
+ `error`: types.Universe.Lookup("error").Type(),
+ `bool`: types.Typ[types.Bool],
+ `int`: types.Typ[types.Int],
+ `int8`: types.Typ[types.Int8],
+ `int16`: types.Typ[types.Int16],
+ `int32`: types.Typ[types.Int32],
+ `int64`: types.Typ[types.Int64],
+ `uint`: types.Typ[types.Uint],
+ `uint8`: types.Typ[types.Uint8],
+ `uint16`: types.Typ[types.Uint16],
+ `uint32`: types.Typ[types.Uint32],
+ `uint64`: types.Typ[types.Uint64],
+ `uintptr`: types.Typ[types.Uintptr],
+ `string`: types.Typ[types.String],
+ `float32`: types.Typ[types.Float32],
+ `float64`: types.Typ[types.Float64],
+ `complex64`: types.Typ[types.Complex64],
+ `complex128`: types.Typ[types.Complex128],
+
+ // Predeclared aliases (provided for convenience).
+ `byte`: types.Typ[types.Uint8],
+ `rune`: types.Typ[types.Int32],
}
func typeFromString(s string) (types.Type, error) {
@@ -57,9 +95,9 @@ func typeFromString(s string) (types.Type, error) {
func typeFromNode(e ast.Expr) types.Type {
switch e := e.(type) {
case *ast.Ident:
- basic, ok := basicTypeByName[e.Name]
+ typ, ok := typeByName[e.Name]
if ok {
- return basic
+ return typ
}
case *ast.ArrayType:
@@ -78,7 +116,7 @@ func typeFromNode(e ast.Expr) types.Type {
if err != nil {
return nil
}
- types.NewArray(elem, int64(length))
+ return types.NewArray(elem, int64(length))
case *ast.MapType:
keyType := typeFromNode(e.Key)
@@ -143,7 +181,7 @@ func isPure(info *types.Info, expr ast.Expr) bool {
case *ast.UnaryExpr:
return expr.Op != token.ARROW &&
isPure(info, expr.X)
- case *ast.BasicLit, *ast.Ident:
+ case *ast.BasicLit, *ast.Ident, *ast.FuncLit:
return true
case *ast.IndexExpr:
return isPure(info, expr.X) &&
@@ -184,6 +222,37 @@ func isConstant(info *types.Info, expr ast.Expr) bool {
return ok && tv.Value != nil
}
+func isConstantSlice(info *types.Info, expr ast.Expr) bool {
+ switch expr := expr.(type) {
+ case *ast.CallExpr:
+ // Matches []byte("string").
+ if len(expr.Args) != 1 {
+ return false
+ }
+ lit, ok := expr.Args[0].(*ast.BasicLit)
+ if !ok || lit.Kind != token.STRING {
+ return false
+ }
+ typ, ok := info.TypeOf(expr.Fun).(*types.Slice)
+ if !ok {
+ return false
+ }
+ basicType, ok := typ.Elem().(*types.Basic)
+ return ok && basicType.Kind() == types.Uint8
+
+ case *ast.CompositeLit:
+ for _, elt := range expr.Elts {
+ if !isConstant(info, elt) {
+ return false
+ }
+ }
+ return true
+
+ default:
+ return false
+ }
+}
+
// isTypeExpr reports whether x represents a type expression.
//
// Type expression does not evaluate to any run time value,
@@ -213,3 +282,16 @@ func isTypeExpr(info *types.Info, x ast.Expr) bool {
return false
}
}
+
+func identOf(e ast.Expr) *ast.Ident {
+ switch e := e.(type) {
+ case *ast.ParenExpr:
+ return identOf(e.X)
+ case *ast.Ident:
+ return e
+ case *ast.SelectorExpr:
+ return e.Sel
+ default:
+ return nil
+ }
+}
diff --git a/vendor/github.com/quasilyte/gogrep/.gitignore b/vendor/github.com/quasilyte/gogrep/.gitignore
new file mode 100644
index 000000000..ec560f1c9
--- /dev/null
+++ b/vendor/github.com/quasilyte/gogrep/.gitignore
@@ -0,0 +1,4 @@
+.idea
+.vscode
+coverage.txt
+bin
diff --git a/vendor/github.com/quasilyte/gogrep/.golangci.yml b/vendor/github.com/quasilyte/gogrep/.golangci.yml
new file mode 100644
index 000000000..16d03c54d
--- /dev/null
+++ b/vendor/github.com/quasilyte/gogrep/.golangci.yml
@@ -0,0 +1,49 @@
+{
+ "run": {
+ # timeout for analysis, e.g. 30s, 5m, default is 1m
+ "deadline": "3m",
+ },
+ "fast": false,
+ "linters": {
+ "enable": [
+ "deadcode",
+ "errcheck",
+ "gas",
+ "gocritic",
+ "gofmt",
+ "goimports",
+ "revive",
+ "govet",
+ "gosimple",
+ "ineffassign",
+ "megacheck",
+ "misspell",
+ "nakedret",
+ "staticcheck",
+ "structcheck",
+ "typecheck",
+ "unconvert",
+ "unused",
+ "varcheck",
+ ],
+ },
+ "disable": [
+ "depguard",
+ "dupl",
+ "gocyclo",
+ "interfacer",
+ "lll",
+ "maligned",
+ "prealloc",
+ ],
+ "linters-settings": {
+ "gocritic": {
+ "enabled-tags": [
+ "style",
+ "diagnostic",
+ "performance",
+ "experimental",
+ ],
+ },
+ },
+}
diff --git a/vendor/github.com/quasilyte/gogrep/LICENSE b/vendor/github.com/quasilyte/gogrep/LICENSE
new file mode 100644
index 000000000..575b56ae1
--- /dev/null
+++ b/vendor/github.com/quasilyte/gogrep/LICENSE
@@ -0,0 +1,33 @@
+BSD 3-Clause License
+
+Copyright (c) 2021, Iskander (Alex) Sharipov
+
+Originally based on the Daniel Martí code | Copyright (c) 2017, Daniel Martí. All rights reserved.
+See https://github.com/mvdan/gogrep
+
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/quasilyte/gogrep/Makefile b/vendor/github.com/quasilyte/gogrep/Makefile
new file mode 100644
index 000000000..d05331f42
--- /dev/null
+++ b/vendor/github.com/quasilyte/gogrep/Makefile
@@ -0,0 +1,19 @@
+GOPATH_DIR=`go env GOPATH`
+
+test:
+ go test -count 2 -coverpkg=./... -coverprofile=coverage.txt -covermode=atomic ./...
+ go test -bench=. ./...
+ @echo "everything is OK"
+
+ci-lint:
+ curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(GOPATH_DIR)/bin v1.43.0
+ $(GOPATH_DIR)/bin/golangci-lint run ./...
+ go install github.com/quasilyte/go-consistent@latest
+ $(GOPATH_DIR)/bin/go-consistent . ./internal/... ./nodetag/... ./filters/...
+ @echo "everything is OK"
+
+lint:
+ golangci-lint run ./...
+ @echo "everything is OK"
+
+.PHONY: ci-lint lint test
diff --git a/vendor/github.com/quasilyte/gogrep/README.md b/vendor/github.com/quasilyte/gogrep/README.md
new file mode 100644
index 000000000..b6c2c47c1
--- /dev/null
+++ b/vendor/github.com/quasilyte/gogrep/README.md
@@ -0,0 +1,41 @@
+![logo](https://github.com/quasilyte/vscode-gogrep/blob/master/docs/logo.png?raw=true)
+
+![Build Status](https://github.com/quasilyte/gogrep/workflows/Go/badge.svg)
+[![PkgGoDev](https://pkg.go.dev/badge/mod/github.com/quasilyte/gogrep)](https://pkg.go.dev/github.com/quasilyte/gogrep)
+[![Go Report Card](https://goreportcard.com/badge/github.com/quasilyte/gogrep)](https://goreportcard.com/report/github.com/quasilyte/gogrep)
+![Code Coverage](https://codecov.io/gh/quasilyte/gogrep/branch/master/graph/badge.svg)
+
+# gogrep
+
+This is an attempt to move a modified [gogrep](https://github.com/mvdan/gogrep) from the [go-ruleguard](https://github.com/quasilyte/go-ruleguard) project, so it can be used independently.
+
+This repository contains two Go modules. One for the gogrep library and the second one for the command-line tool.
+
+## gogrep as a library
+
+To get a gogrep library module, install the root Go module.
+
+```bash
+$ go get github.com/quasilyte/gogrep
+```
+
+## gogrep as a command-line utility
+
+To get a gogrep command-line tool, install the `cmd/gogrep` Go submodule.
+
+```bash
+$ go install github.com/quasilyte/cmd/gogrep
+```
+
+See [docs/gogrep_cli.md](_docs/gogrep_cli.md) to learn how to use it.
+
+## Used by
+
+A gogrep library is used by:
+
+* [go-ruleguard](https://github.com/quasilyte/go-ruleguard)
+* [gocorpus](https://github.com/quasilyte/gocorpus)
+
+## Acknowledgements
+
+The original gogrep is written by the [Daniel Martí](https://github.com/mvdan).
diff --git a/vendor/github.com/quasilyte/gogrep/compile.go b/vendor/github.com/quasilyte/gogrep/compile.go
new file mode 100644
index 000000000..cc60c05a6
--- /dev/null
+++ b/vendor/github.com/quasilyte/gogrep/compile.go
@@ -0,0 +1,1174 @@
+package gogrep
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+
+ "github.com/quasilyte/gogrep/internal/stdinfo"
+)
+
+type compileError string
+
+func (e compileError) Error() string { return string(e) }
+
+type compiler struct {
+ config CompileConfig
+
+ prog *program
+ stringIndexes map[string]uint8
+ ifaceIndexes map[interface{}]uint8
+
+ info *PatternInfo
+
+ insideStmtList bool
+}
+
+func (c *compiler) Compile(root ast.Node, info *PatternInfo) (p *program, err error) {
+ defer func() {
+ if err != nil {
+ return
+ }
+ rv := recover()
+ if rv == nil {
+ return
+ }
+ if parseErr, ok := rv.(compileError); ok {
+ err = parseErr
+ return
+ }
+ panic(rv) // Not our panic
+ }()
+
+ c.info = info
+ c.prog = &program{
+ insts: make([]instruction, 0, 8),
+ }
+ c.stringIndexes = make(map[string]uint8)
+ c.ifaceIndexes = make(map[interface{}]uint8)
+
+ c.compileNode(root)
+
+ if len(c.prog.insts) == 0 {
+ return nil, c.errorf(root, "0 instructions generated")
+ }
+
+ return c.prog, nil
+}
+
+func (c *compiler) errorf(n ast.Node, format string, args ...interface{}) compileError {
+ loc := c.config.Fset.Position(n.Pos())
+ message := fmt.Sprintf("%s:%d: %s", loc.Filename, loc.Line, fmt.Sprintf(format, args...))
+ return compileError(message)
+}
+
+func (c *compiler) toUint8(n ast.Node, v int) uint8 {
+ if !fitsUint8(v) {
+ panic(c.errorf(n, "implementation error: %v can't be converted to uint8", v))
+ }
+ return uint8(v)
+}
+
+func (c *compiler) internVar(n ast.Node, s string) uint8 {
+ c.info.Vars[s] = struct{}{}
+ index := c.internString(n, s)
+ return index
+}
+
+func (c *compiler) internString(n ast.Node, s string) uint8 {
+ if index, ok := c.stringIndexes[s]; ok {
+ return index
+ }
+ index := len(c.prog.strings)
+ if !fitsUint8(index) {
+ panic(c.errorf(n, "implementation limitation: too many string values"))
+ }
+ c.stringIndexes[s] = uint8(index)
+ c.prog.strings = append(c.prog.strings, s)
+ return uint8(index)
+}
+
+func (c *compiler) internIface(n ast.Node, v interface{}) uint8 {
+ if index, ok := c.ifaceIndexes[v]; ok {
+ return index
+ }
+ index := len(c.prog.ifaces)
+ if !fitsUint8(index) {
+ panic(c.errorf(n, "implementation limitation: too many values"))
+ }
+ c.ifaceIndexes[v] = uint8(index)
+ c.prog.ifaces = append(c.prog.ifaces, v)
+ return uint8(index)
+}
+
+func (c *compiler) emitInst(inst instruction) {
+ c.prog.insts = append(c.prog.insts, inst)
+}
+
+func (c *compiler) emitInstOp(op operation) {
+ c.emitInst(instruction{op: op})
+}
+
+func (c *compiler) compileNode(n ast.Node) {
+ switch n := n.(type) {
+ case *ast.File:
+ c.compileFile(n)
+ case ast.Decl:
+ c.compileDecl(n)
+ case ast.Expr:
+ c.compileExpr(n)
+ case ast.Stmt:
+ c.compileStmt(n)
+ case *ast.ValueSpec:
+ c.compileValueSpec(n)
+ case stmtSlice:
+ c.compileStmtSlice(n)
+ case declSlice:
+ c.compileDeclSlice(n)
+ case ExprSlice:
+ c.compileExprSlice(n)
+ case *rangeClause:
+ c.compileRangeClause(n)
+ case *rangeHeader:
+ c.compileRangeHeader(n)
+ default:
+ panic(c.errorf(n, "compileNode: unexpected %T", n))
+ }
+}
+
+func (c *compiler) compileOptStmt(n ast.Stmt) {
+ if exprStmt, ok := n.(*ast.ExprStmt); ok {
+ if ident, ok := exprStmt.X.(*ast.Ident); ok && isWildName(ident.Name) {
+ c.compileWildIdent(ident, true)
+ return
+ }
+ }
+ c.compileStmt(n)
+}
+
+func (c *compiler) compileOptExpr(n ast.Expr) {
+ if ident, ok := n.(*ast.Ident); ok && isWildName(ident.Name) {
+ c.compileWildIdent(ident, true)
+ return
+ }
+ c.compileExpr(n)
+}
+
+func (c *compiler) compileOptFieldList(n *ast.FieldList) {
+ if len(n.List) == 1 {
+ if ident, ok := n.List[0].Type.(*ast.Ident); ok && isWildName(ident.Name) && len(n.List[0].Names) == 0 {
+ // `func (...) $*result` - result could be anything
+ // `func (...) $result` - result is a field list of 1 element
+ info := decodeWildName(ident.Name)
+ switch {
+ case info.Seq:
+ c.compileWildIdent(ident, true)
+ case info.Name == "_":
+ c.emitInstOp(opFieldNode)
+ default:
+ c.emitInst(instruction{
+ op: opNamedFieldNode,
+ valueIndex: c.internVar(n, info.Name),
+ })
+ }
+ return
+ }
+ }
+ c.compileFieldList(n)
+}
+
+func (c *compiler) compileFieldList(n *ast.FieldList) {
+ c.emitInstOp(opFieldList)
+ for _, x := range n.List {
+ c.compileField(x)
+ }
+ c.emitInstOp(opEnd)
+}
+
+func (c *compiler) compileField(n *ast.Field) {
+ switch {
+ case len(n.Names) == 0:
+ if ident, ok := n.Type.(*ast.Ident); ok && isWildName(ident.Name) {
+ c.compileWildIdent(ident, false)
+ return
+ }
+ c.emitInstOp(opUnnamedField)
+ case len(n.Names) == 1:
+ name := n.Names[0]
+ if isWildName(name.Name) {
+ c.emitInstOp(opField)
+ c.compileWildIdent(name, false)
+ } else {
+ c.emitInst(instruction{
+ op: opSimpleField,
+ valueIndex: c.internString(name, name.Name),
+ })
+ }
+ default:
+ c.emitInstOp(opMultiField)
+ for _, name := range n.Names {
+ c.compileIdent(name)
+ }
+ c.emitInstOp(opEnd)
+ }
+ c.compileExpr(n.Type)
+}
+
+func (c *compiler) compileValueSpec(spec *ast.ValueSpec) {
+ switch {
+ case spec.Type == nil && len(spec.Values) == 0:
+ if isWildName(spec.Names[0].String()) {
+ c.compileIdent(spec.Names[0])
+ return
+ }
+ c.emitInstOp(opValueSpec)
+ case spec.Type == nil:
+ c.emitInstOp(opValueInitSpec)
+ case len(spec.Values) == 0:
+ c.emitInstOp(opTypedValueSpec)
+ default:
+ c.emitInstOp(opTypedValueInitSpec)
+ }
+ for _, name := range spec.Names {
+ c.compileIdent(name)
+ }
+ c.emitInstOp(opEnd)
+ if spec.Type != nil {
+ c.compileOptExpr(spec.Type)
+ }
+ if len(spec.Values) != 0 {
+ for _, v := range spec.Values {
+ c.compileExpr(v)
+ }
+ c.emitInstOp(opEnd)
+ }
+}
+
+func (c *compiler) compileTypeSpec(spec *ast.TypeSpec) {
+ c.emitInstOp(pickOp(spec.Assign.IsValid(), opTypeAliasSpec, opTypeSpec))
+ c.compileIdent(spec.Name)
+ c.compileExpr(spec.Type)
+}
+
+func (c *compiler) compileFile(n *ast.File) {
+ if len(n.Imports) == 0 && len(n.Decls) == 0 {
+ c.emitInstOp(opEmptyPackage)
+ c.compileIdent(n.Name)
+ return
+ }
+
+ panic(c.errorf(n, "compileFile: unsupported file pattern"))
+}
+
+func (c *compiler) compileDecl(n ast.Decl) {
+ switch n := n.(type) {
+ case *ast.FuncDecl:
+ c.compileFuncDecl(n)
+ case *ast.GenDecl:
+ c.compileGenDecl(n)
+
+ default:
+ panic(c.errorf(n, "compileDecl: unexpected %T", n))
+ }
+}
+
+func (c *compiler) compileFuncDecl(n *ast.FuncDecl) {
+ if n.Recv == nil {
+ c.emitInstOp(pickOp(n.Body == nil, opFuncProtoDecl, opFuncDecl))
+ } else {
+ c.emitInstOp(pickOp(n.Body == nil, opMethodProtoDecl, opMethodDecl))
+ }
+
+ if n.Recv != nil {
+ c.compileFieldList(n.Recv)
+ }
+ c.compileIdent(n.Name)
+ c.compileFuncType(n.Type)
+ if n.Body != nil {
+ c.compileBlockStmt(n.Body)
+ }
+}
+
+func (c *compiler) compileGenDecl(n *ast.GenDecl) {
+ if c.insideStmtList {
+ c.emitInstOp(opDeclStmt)
+ }
+
+ switch n.Tok {
+ case token.CONST, token.VAR:
+ c.emitInstOp(pickOp(n.Tok == token.CONST, opConstDecl, opVarDecl))
+ for _, spec := range n.Specs {
+ c.compileValueSpec(spec.(*ast.ValueSpec))
+ }
+ c.emitInstOp(opEnd)
+ case token.TYPE:
+ c.emitInstOp(opTypeDecl)
+ for _, spec := range n.Specs {
+ c.compileTypeSpec(spec.(*ast.TypeSpec))
+ }
+ c.emitInstOp(opEnd)
+
+ default:
+ panic(c.errorf(n, "unexpected gen decl"))
+ }
+}
+
+func (c *compiler) compileExpr(n ast.Expr) {
+ switch n := n.(type) {
+ case *ast.BasicLit:
+ c.compileBasicLit(n)
+ case *ast.BinaryExpr:
+ c.compileBinaryExpr(n)
+ case *ast.IndexExpr:
+ c.compileIndexExpr(n)
+ case *ast.Ident:
+ c.compileIdent(n)
+ case *ast.CallExpr:
+ c.compileCallExpr(n)
+ case *ast.UnaryExpr:
+ c.compileUnaryExpr(n)
+ case *ast.StarExpr:
+ c.compileStarExpr(n)
+ case *ast.ParenExpr:
+ c.compileParenExpr(n)
+ case *ast.SliceExpr:
+ c.compileSliceExpr(n)
+ case *ast.StructType:
+ c.compileStructType(n)
+ case *ast.InterfaceType:
+ c.compileInterfaceType(n)
+ case *ast.FuncType:
+ c.compileFuncType(n)
+ case *ast.ArrayType:
+ c.compileArrayType(n)
+ case *ast.MapType:
+ c.compileMapType(n)
+ case *ast.ChanType:
+ c.compileChanType(n)
+ case *ast.CompositeLit:
+ c.compileCompositeLit(n)
+ case *ast.FuncLit:
+ c.compileFuncLit(n)
+ case *ast.Ellipsis:
+ c.compileEllipsis(n)
+ case *ast.KeyValueExpr:
+ c.compileKeyValueExpr(n)
+ case *ast.SelectorExpr:
+ c.compileSelectorExpr(n)
+ case *ast.TypeAssertExpr:
+ c.compileTypeAssertExpr(n)
+
+ default:
+ panic(c.errorf(n, "compileExpr: unexpected %T", n))
+ }
+}
+
+func (c *compiler) compileBasicLit(n *ast.BasicLit) {
+ if !c.config.Strict {
+ v := literalValue(n)
+ if v == nil {
+ panic(c.errorf(n, "can't convert %s (%s) value", n.Value, n.Kind))
+ }
+ c.prog.insts = append(c.prog.insts, instruction{
+ op: opBasicLit,
+ valueIndex: c.internIface(n, v),
+ })
+ return
+ }
+
+ var inst instruction
+ switch n.Kind {
+ case token.INT:
+ inst.op = opStrictIntLit
+ case token.FLOAT:
+ inst.op = opStrictFloatLit
+ case token.STRING:
+ inst.op = opStrictStringLit
+ case token.CHAR:
+ inst.op = opStrictCharLit
+ default:
+ inst.op = opStrictComplexLit
+ }
+ inst.valueIndex = c.internString(n, n.Value)
+ c.prog.insts = append(c.prog.insts, inst)
+}
+
+func (c *compiler) compileBinaryExpr(n *ast.BinaryExpr) {
+ c.prog.insts = append(c.prog.insts, instruction{
+ op: opBinaryExpr,
+ value: c.toUint8(n, int(n.Op)),
+ })
+ c.compileExpr(n.X)
+ c.compileExpr(n.Y)
+}
+
+func (c *compiler) compileIndexExpr(n *ast.IndexExpr) {
+ c.emitInstOp(opIndexExpr)
+ c.compileExpr(n.X)
+ c.compileExpr(n.Index)
+}
+
+func (c *compiler) compileWildIdent(n *ast.Ident, optional bool) {
+ info := decodeWildName(n.Name)
+ var inst instruction
+ switch {
+ case info.Name == "_" && !info.Seq:
+ inst.op = opNode
+ case info.Name == "_" && info.Seq:
+ inst.op = pickOp(optional, opOptNode, opNodeSeq)
+ case info.Name != "_" && !info.Seq:
+ inst.op = opNamedNode
+ inst.valueIndex = c.internVar(n, info.Name)
+ default:
+ inst.op = pickOp(optional, opNamedOptNode, opNamedNodeSeq)
+ inst.valueIndex = c.internVar(n, info.Name)
+ }
+ c.prog.insts = append(c.prog.insts, inst)
+}
+
+func (c *compiler) compileIdent(n *ast.Ident) {
+ if isWildName(n.Name) {
+ c.compileWildIdent(n, false)
+ return
+ }
+
+ c.prog.insts = append(c.prog.insts, instruction{
+ op: opIdent,
+ valueIndex: c.internString(n, n.Name),
+ })
+}
+
+func (c *compiler) compileExprMembers(list []ast.Expr) {
+ isSimple := len(list) <= 255
+ if isSimple {
+ for _, x := range list {
+ if decodeWildNode(x).Seq {
+ isSimple = false
+ break
+ }
+ }
+ }
+
+ if isSimple {
+ c.emitInst(instruction{
+ op: opSimpleArgList,
+ value: uint8(len(list)),
+ })
+ for _, x := range list {
+ c.compileExpr(x)
+ }
+ } else {
+ c.emitInstOp(opArgList)
+ for _, x := range list {
+ c.compileExpr(x)
+ }
+ c.emitInstOp(opEnd)
+ }
+}
+
+func (c *compiler) compileCallExpr(n *ast.CallExpr) {
+ canBeVariadic := func(n *ast.CallExpr) bool {
+ if len(n.Args) == 0 {
+ return false
+ }
+ lastArg, ok := n.Args[len(n.Args)-1].(*ast.Ident)
+ if !ok {
+ return false
+ }
+ return isWildName(lastArg.Name) && decodeWildName(lastArg.Name).Seq
+ }
+
+ op := opNonVariadicCallExpr
+ if n.Ellipsis.IsValid() {
+ op = opVariadicCallExpr
+ } else if canBeVariadic(n) {
+ op = opCallExpr
+ }
+
+ c.emitInstOp(op)
+ c.compileSymbol(n.Fun)
+ c.compileExprMembers(n.Args)
+}
+
+// compileSymbol is mostly like a normal compileExpr, but it's used
+// in places where we can find a type/function symbol.
+//
+// For example, in function call expressions a called function expression
+// can look like `fmt.Sprint`. It will be compiled as a special
+// selector expression that requires `fmt` to be a package as opposed
+// to only check that it's an identifier with "fmt" value.
+func (c *compiler) compileSymbol(sym ast.Expr) {
+ compilePkgSymbol := func(c *compiler, sym ast.Expr) bool {
+ e, ok := sym.(*ast.SelectorExpr)
+ if !ok {
+ return false
+ }
+ ident, ok := e.X.(*ast.Ident)
+ if !ok || isWildName(e.Sel.Name) {
+ return false
+ }
+ pkgPath := c.config.Imports[ident.Name]
+ if pkgPath == "" && stdinfo.Packages[ident.Name] != "" {
+ pkgPath = stdinfo.Packages[ident.Name]
+ }
+ if pkgPath == "" {
+ return false
+ }
+ c.emitInst(instruction{
+ op: opSimpleSelectorExpr,
+ valueIndex: c.internString(e.Sel, e.Sel.String()),
+ })
+ c.emitInst(instruction{
+ op: opPkg,
+ valueIndex: c.internString(ident, pkgPath),
+ })
+ return true
+ }
+
+ if c.config.WithTypes {
+ if compilePkgSymbol(c, sym) {
+ return
+ }
+ }
+
+ c.compileExpr(sym)
+}
+
+func (c *compiler) compileUnaryExpr(n *ast.UnaryExpr) {
+ c.prog.insts = append(c.prog.insts, instruction{
+ op: opUnaryExpr,
+ value: c.toUint8(n, int(n.Op)),
+ })
+ c.compileExpr(n.X)
+}
+
+func (c *compiler) compileStarExpr(n *ast.StarExpr) {
+ c.emitInstOp(opStarExpr)
+ c.compileExpr(n.X)
+}
+
+func (c *compiler) compileParenExpr(n *ast.ParenExpr) {
+ c.emitInstOp(opParenExpr)
+ c.compileExpr(n.X)
+}
+
+func (c *compiler) compileSliceExpr(n *ast.SliceExpr) {
+ switch {
+ case n.Low == nil && n.High == nil && !n.Slice3:
+ c.emitInstOp(opSliceExpr)
+ c.compileOptExpr(n.X)
+ case n.Low != nil && n.High == nil && !n.Slice3:
+ c.emitInstOp(opSliceFromExpr)
+ c.compileOptExpr(n.X)
+ c.compileOptExpr(n.Low)
+ case n.Low == nil && n.High != nil && !n.Slice3:
+ c.emitInstOp(opSliceToExpr)
+ c.compileOptExpr(n.X)
+ c.compileOptExpr(n.High)
+ case n.Low != nil && n.High != nil && !n.Slice3:
+ c.emitInstOp(opSliceFromToExpr)
+ c.compileOptExpr(n.X)
+ c.compileOptExpr(n.Low)
+ c.compileOptExpr(n.High)
+ case n.Low == nil && n.Slice3:
+ c.emitInstOp(opSliceToCapExpr)
+ c.compileOptExpr(n.X)
+ c.compileOptExpr(n.High)
+ c.compileOptExpr(n.Max)
+ case n.Low != nil && n.Slice3:
+ c.emitInstOp(opSliceFromToCapExpr)
+ c.compileOptExpr(n.X)
+ c.compileOptExpr(n.Low)
+ c.compileOptExpr(n.High)
+ c.compileOptExpr(n.Max)
+ default:
+ panic(c.errorf(n, "unexpected slice expr"))
+ }
+}
+
+func (c *compiler) compileStructType(n *ast.StructType) {
+ c.emitInstOp(opStructType)
+ c.compileOptFieldList(n.Fields)
+}
+
+func (c *compiler) compileInterfaceType(n *ast.InterfaceType) {
+ c.emitInstOp(opInterfaceType)
+ c.compileOptFieldList(n.Methods)
+}
+
+func (c *compiler) compileFuncType(n *ast.FuncType) {
+ void := n.Results == nil || len(n.Results.List) == 0
+ if void {
+ c.emitInstOp(opVoidFuncType)
+ } else {
+ c.emitInstOp(opFuncType)
+ }
+ c.compileOptFieldList(n.Params)
+ if !void {
+ c.compileOptFieldList(n.Results)
+ }
+}
+
+func (c *compiler) compileArrayType(n *ast.ArrayType) {
+ if n.Len == nil {
+ c.emitInstOp(opSliceType)
+ c.compileExpr(n.Elt)
+ } else {
+ c.emitInstOp(opArrayType)
+ c.compileExpr(n.Len)
+ c.compileExpr(n.Elt)
+ }
+}
+
+func (c *compiler) compileMapType(n *ast.MapType) {
+ c.emitInstOp(opMapType)
+ c.compileExpr(n.Key)
+ c.compileExpr(n.Value)
+}
+
+func (c *compiler) compileChanType(n *ast.ChanType) {
+ c.emitInst(instruction{
+ op: opChanType,
+ value: c.toUint8(n, int(n.Dir)),
+ })
+ c.compileExpr(n.Value)
+}
+
+func (c *compiler) compileCompositeLit(n *ast.CompositeLit) {
+ if n.Type == nil {
+ c.emitInstOp(opCompositeLit)
+ } else {
+ c.emitInstOp(opTypedCompositeLit)
+ c.compileExpr(n.Type)
+ }
+ for _, elt := range n.Elts {
+ c.compileExpr(elt)
+ }
+ c.emitInstOp(opEnd)
+}
+
+func (c *compiler) compileFuncLit(n *ast.FuncLit) {
+ c.emitInstOp(opFuncLit)
+ c.compileFuncType(n.Type)
+ c.compileBlockStmt(n.Body)
+}
+
+func (c *compiler) compileEllipsis(n *ast.Ellipsis) {
+ if n.Elt == nil {
+ c.emitInstOp(opEllipsis)
+ } else {
+ c.emitInstOp(opTypedEllipsis)
+ c.compileExpr(n.Elt)
+ }
+}
+
+func (c *compiler) compileKeyValueExpr(n *ast.KeyValueExpr) {
+ c.emitInstOp(opKeyValueExpr)
+ c.compileExpr(n.Key)
+ c.compileExpr(n.Value)
+}
+
+func (c *compiler) compileSelectorExpr(n *ast.SelectorExpr) {
+ if isWildName(n.Sel.Name) {
+ c.emitInstOp(opSelectorExpr)
+ c.compileWildIdent(n.Sel, false)
+ c.compileExpr(n.X)
+ return
+ }
+
+ c.prog.insts = append(c.prog.insts, instruction{
+ op: opSimpleSelectorExpr,
+ valueIndex: c.internString(n.Sel, n.Sel.String()),
+ })
+ c.compileExpr(n.X)
+}
+
+func (c *compiler) compileTypeAssertExpr(n *ast.TypeAssertExpr) {
+ if n.Type != nil {
+ c.emitInstOp(opTypeAssertExpr)
+ c.compileExpr(n.X)
+ c.compileExpr(n.Type)
+ } else {
+ c.emitInstOp(opTypeSwitchAssertExpr)
+ c.compileExpr(n.X)
+ }
+}
+
+func (c *compiler) compileStmt(n ast.Stmt) {
+ switch n := n.(type) {
+ case *ast.AssignStmt:
+ c.compileAssignStmt(n)
+ case *ast.BlockStmt:
+ c.compileBlockStmt(n)
+ case *ast.ExprStmt:
+ c.compileExprStmt(n)
+ case *ast.IfStmt:
+ c.compileIfStmt(n)
+ case *ast.CaseClause:
+ c.compileCaseClause(n)
+ case *ast.SwitchStmt:
+ c.compileSwitchStmt(n)
+ case *ast.TypeSwitchStmt:
+ c.compileTypeSwitchStmt(n)
+ case *ast.SelectStmt:
+ c.compileSelectStmt(n)
+ case *ast.ForStmt:
+ c.compileForStmt(n)
+ case *ast.RangeStmt:
+ c.compileRangeStmt(n)
+ case *ast.IncDecStmt:
+ c.compileIncDecStmt(n)
+ case *ast.EmptyStmt:
+ c.compileEmptyStmt(n)
+ case *ast.ReturnStmt:
+ c.compileReturnStmt(n)
+ case *ast.BranchStmt:
+ c.compileBranchStmt(n)
+ case *ast.LabeledStmt:
+ c.compileLabeledStmt(n)
+ case *ast.GoStmt:
+ c.compileGoStmt(n)
+ case *ast.DeferStmt:
+ c.compileDeferStmt(n)
+ case *ast.SendStmt:
+ c.compileSendStmt(n)
+ case *ast.DeclStmt:
+ c.compileDecl(n.Decl)
+
+ default:
+ panic(c.errorf(n, "compileStmt: unexpected %T", n))
+ }
+}
+
+func (c *compiler) compileAssignStmt(n *ast.AssignStmt) {
+ if len(n.Lhs) == 1 && len(n.Rhs) == 1 {
+ lhsInfo := decodeWildNode(n.Lhs[0])
+ rhsInfo := decodeWildNode(n.Rhs[0])
+ if !lhsInfo.Seq && !rhsInfo.Seq {
+ c.emitInst(instruction{
+ op: opAssignStmt,
+ value: uint8(n.Tok),
+ })
+ c.compileExpr(n.Lhs[0])
+ c.compileExpr(n.Rhs[0])
+ return
+ }
+ }
+
+ c.emitInst(instruction{
+ op: opMultiAssignStmt,
+ value: uint8(n.Tok),
+ })
+ for _, x := range n.Lhs {
+ c.compileExpr(x)
+ }
+ c.emitInstOp(opEnd)
+ for _, x := range n.Rhs {
+ c.compileExpr(x)
+ }
+ c.emitInstOp(opEnd)
+}
+
+func (c *compiler) compileBlockStmt(n *ast.BlockStmt) {
+ c.emitInstOp(opBlockStmt)
+ insideStmtList := c.insideStmtList
+ c.insideStmtList = true
+ for _, elt := range n.List {
+ c.compileStmt(elt)
+ }
+ c.insideStmtList = insideStmtList
+ c.emitInstOp(opEnd)
+}
+
+func (c *compiler) compileExprStmt(n *ast.ExprStmt) {
+ if ident, ok := n.X.(*ast.Ident); ok && isWildName(ident.Name) {
+ c.compileIdent(ident)
+ } else {
+ c.emitInstOp(opExprStmt)
+ c.compileExpr(n.X)
+ }
+}
+
+func (c *compiler) compileIfStmt(n *ast.IfStmt) {
+ // Check for the special case: `if $*_ ...` should match all if statements.
+ if ident, ok := n.Cond.(*ast.Ident); ok && n.Init == nil && isWildName(ident.Name) {
+ info := decodeWildName(ident.Name)
+ if info.Seq && info.Name == "_" {
+ // Set Init to Cond, change cond from $*_ to $_.
+ n.Init = &ast.ExprStmt{X: n.Cond}
+ cond := &ast.Ident{Name: encodeWildName(info.Name, false)}
+ n.Cond = cond
+ c.compileIfStmt(n)
+ return
+ }
+ // Named $* is harder and slower.
+ if info.Seq {
+ c.prog.insts = append(c.prog.insts, instruction{
+ op: pickOp(n.Else == nil, opIfNamedOptStmt, opIfNamedOptElseStmt),
+ valueIndex: c.internVar(ident, info.Name),
+ })
+ c.compileStmt(n.Body)
+ if n.Else != nil {
+ c.compileStmt(n.Else)
+ }
+ return
+ }
+ }
+
+ switch {
+ case n.Init == nil && n.Else == nil:
+ c.emitInstOp(opIfStmt)
+ c.compileExpr(n.Cond)
+ c.compileStmt(n.Body)
+ case n.Init != nil && n.Else == nil:
+ c.emitInstOp(opIfInitStmt)
+ c.compileOptStmt(n.Init)
+ c.compileExpr(n.Cond)
+ c.compileStmt(n.Body)
+ case n.Init == nil && n.Else != nil:
+ c.emitInstOp(opIfElseStmt)
+ c.compileExpr(n.Cond)
+ c.compileStmt(n.Body)
+ c.compileStmt(n.Else)
+ case n.Init != nil && n.Else != nil:
+ c.emitInstOp(opIfInitElseStmt)
+ c.compileOptStmt(n.Init)
+ c.compileExpr(n.Cond)
+ c.compileStmt(n.Body)
+ c.compileStmt(n.Else)
+
+ default:
+ panic(c.errorf(n, "unexpected if stmt"))
+ }
+}
+
+func (c *compiler) compileCommClause(n *ast.CommClause) {
+ c.emitInstOp(pickOp(n.Comm == nil, opDefaultCommClause, opCommClause))
+ if n.Comm != nil {
+ c.compileStmt(n.Comm)
+ }
+ for _, x := range n.Body {
+ c.compileStmt(x)
+ }
+ c.emitInstOp(opEnd)
+}
+
+func (c *compiler) compileCaseClause(n *ast.CaseClause) {
+ c.emitInstOp(pickOp(n.List == nil, opDefaultCaseClause, opCaseClause))
+ if n.List != nil {
+ for _, x := range n.List {
+ c.compileExpr(x)
+ }
+ c.emitInstOp(opEnd)
+ }
+ for _, x := range n.Body {
+ c.compileStmt(x)
+ }
+ c.emitInstOp(opEnd)
+}
+
+func (c *compiler) compileSwitchBody(n *ast.BlockStmt) {
+ wildcardCase := func(cc *ast.CaseClause) *ast.Ident {
+ if len(cc.List) != 1 || len(cc.Body) != 1 {
+ return nil
+ }
+ v, ok := cc.List[0].(*ast.Ident)
+ if !ok || !isWildName(v.Name) {
+ return nil
+ }
+ bodyStmt, ok := cc.Body[0].(*ast.ExprStmt)
+ if !ok {
+ return nil
+ }
+ bodyIdent, ok := bodyStmt.X.(*ast.Ident)
+ if !ok || bodyIdent.Name != "gogrep_body" {
+ return nil
+ }
+ return v
+ }
+ for _, cc := range n.List {
+ cc := cc.(*ast.CaseClause)
+ wildcard := wildcardCase(cc)
+ if wildcard == nil {
+ c.compileCaseClause(cc)
+ continue
+ }
+ c.compileWildIdent(wildcard, false)
+ }
+ c.emitInstOp(opEnd)
+}
+
+func (c *compiler) compileSwitchStmt(n *ast.SwitchStmt) {
+ var op operation
+ switch {
+ case n.Init == nil && n.Tag == nil:
+ op = opSwitchStmt
+ case n.Init == nil && n.Tag != nil:
+ op = opSwitchTagStmt
+ case n.Init != nil && n.Tag == nil:
+ op = opSwitchInitStmt
+ default:
+ op = opSwitchInitTagStmt
+ }
+
+ c.emitInstOp(op)
+ if n.Init != nil {
+ c.compileOptStmt(n.Init)
+ }
+ if n.Tag != nil {
+ c.compileOptExpr(n.Tag)
+ }
+ c.compileSwitchBody(n.Body)
+}
+
+func (c *compiler) compileTypeSwitchStmt(n *ast.TypeSwitchStmt) {
+ c.emitInstOp(pickOp(n.Init == nil, opTypeSwitchStmt, opTypeSwitchInitStmt))
+ if n.Init != nil {
+ c.compileOptStmt(n.Init)
+ }
+ c.compileStmt(n.Assign)
+ c.compileSwitchBody(n.Body)
+}
+
+func (c *compiler) compileSelectStmt(n *ast.SelectStmt) {
+ c.emitInstOp(opSelectStmt)
+
+ wildcardCase := func(cc *ast.CommClause) *ast.Ident {
+ if cc.Comm == nil {
+ return nil
+ }
+ vStmt, ok := cc.Comm.(*ast.ExprStmt)
+ if !ok {
+ return nil
+ }
+ v, ok := vStmt.X.(*ast.Ident)
+ if !ok || !isWildName(v.Name) {
+ return nil
+ }
+ bodyStmt, ok := cc.Body[0].(*ast.ExprStmt)
+ if !ok {
+ return nil
+ }
+ bodyIdent, ok := bodyStmt.X.(*ast.Ident)
+ if !ok || bodyIdent.Name != "gogrep_body" {
+ return nil
+ }
+ return v
+ }
+ for _, cc := range n.Body.List {
+ cc := cc.(*ast.CommClause)
+ wildcard := wildcardCase(cc)
+ if wildcard == nil {
+ c.compileCommClause(cc)
+ continue
+ }
+ c.compileWildIdent(wildcard, false)
+ }
+ c.emitInstOp(opEnd)
+}
+
+func (c *compiler) compileForStmt(n *ast.ForStmt) {
+ var op operation
+ switch {
+ case n.Init == nil && n.Cond == nil && n.Post == nil:
+ op = opForStmt
+ case n.Init == nil && n.Cond == nil && n.Post != nil:
+ op = opForPostStmt
+ case n.Init == nil && n.Cond != nil && n.Post == nil:
+ op = opForCondStmt
+ case n.Init == nil && n.Cond != nil && n.Post != nil:
+ op = opForCondPostStmt
+ case n.Init != nil && n.Cond == nil && n.Post == nil:
+ op = opForInitStmt
+ case n.Init != nil && n.Cond == nil && n.Post != nil:
+ op = opForInitPostStmt
+ case n.Init != nil && n.Cond != nil && n.Post == nil:
+ op = opForInitCondStmt
+ default:
+ op = opForInitCondPostStmt
+ }
+
+ c.emitInstOp(op)
+ if n.Init != nil {
+ c.compileOptStmt(n.Init)
+ }
+ if n.Cond != nil {
+ c.compileOptExpr(n.Cond)
+ }
+ if n.Post != nil {
+ c.compileOptStmt(n.Post)
+ }
+ c.compileBlockStmt(n.Body)
+}
+
+func (c *compiler) compileRangeStmt(n *ast.RangeStmt) {
+ switch {
+ case n.Key == nil && n.Value == nil:
+ c.emitInstOp(opRangeStmt)
+ c.compileExpr(n.X)
+ c.compileStmt(n.Body)
+ case n.Key != nil && n.Value == nil:
+ c.emitInst(instruction{
+ op: opRangeKeyStmt,
+ value: c.toUint8(n, int(n.Tok)),
+ })
+ c.compileExpr(n.Key)
+ c.compileExpr(n.X)
+ c.compileStmt(n.Body)
+ case n.Key != nil && n.Value != nil:
+ c.emitInst(instruction{
+ op: opRangeKeyValueStmt,
+ value: c.toUint8(n, int(n.Tok)),
+ })
+ c.compileExpr(n.Key)
+ c.compileExpr(n.Value)
+ c.compileExpr(n.X)
+ c.compileStmt(n.Body)
+ default:
+ panic(c.errorf(n, "unexpected range stmt"))
+ }
+}
+
+func (c *compiler) compileIncDecStmt(n *ast.IncDecStmt) {
+ c.prog.insts = append(c.prog.insts, instruction{
+ op: opIncDecStmt,
+ value: c.toUint8(n, int(n.Tok)),
+ })
+ c.compileExpr(n.X)
+}
+
+func (c *compiler) compileEmptyStmt(n *ast.EmptyStmt) {
+ _ = n // unused
+ c.emitInstOp(opEmptyStmt)
+}
+
+func (c *compiler) compileReturnStmt(n *ast.ReturnStmt) {
+ c.emitInstOp(opReturnStmt)
+ for _, x := range n.Results {
+ c.compileExpr(x)
+ }
+ c.emitInstOp(opEnd)
+}
+
+func (c *compiler) compileBranchStmt(n *ast.BranchStmt) {
+ if n.Label != nil {
+ if isWildName(n.Label.Name) {
+ c.prog.insts = append(c.prog.insts, instruction{
+ op: opLabeledBranchStmt,
+ value: c.toUint8(n, int(n.Tok)),
+ })
+ c.compileWildIdent(n.Label, false)
+ } else {
+ c.prog.insts = append(c.prog.insts, instruction{
+ op: opSimpleLabeledBranchStmt,
+ value: c.toUint8(n, int(n.Tok)),
+ valueIndex: c.internString(n.Label, n.Label.Name),
+ })
+ }
+ return
+ }
+ c.prog.insts = append(c.prog.insts, instruction{
+ op: opBranchStmt,
+ value: c.toUint8(n, int(n.Tok)),
+ })
+}
+
+func (c *compiler) compileLabeledStmt(n *ast.LabeledStmt) {
+ if isWildName(n.Label.Name) {
+ c.emitInstOp(opLabeledStmt)
+ c.compileWildIdent(n.Label, false)
+ c.compileStmt(n.Stmt)
+ return
+ }
+
+ c.prog.insts = append(c.prog.insts, instruction{
+ op: opSimpleLabeledStmt,
+ valueIndex: c.internString(n.Label, n.Label.Name),
+ })
+ c.compileStmt(n.Stmt)
+}
+
+func (c *compiler) compileGoStmt(n *ast.GoStmt) {
+ c.emitInstOp(opGoStmt)
+ c.compileExpr(n.Call)
+}
+
+func (c *compiler) compileDeferStmt(n *ast.DeferStmt) {
+ c.emitInstOp(opDeferStmt)
+ c.compileExpr(n.Call)
+}
+
+func (c *compiler) compileSendStmt(n *ast.SendStmt) {
+ c.emitInstOp(opSendStmt)
+ c.compileExpr(n.Chan)
+ c.compileExpr(n.Value)
+}
+
+func (c *compiler) compileDeclSlice(decls declSlice) {
+ c.emitInstOp(opMultiDecl)
+ for _, n := range decls {
+ c.compileDecl(n)
+ }
+ c.emitInstOp(opEnd)
+}
+
+func (c *compiler) compileStmtSlice(stmts stmtSlice) {
+ c.emitInstOp(opMultiStmt)
+ insideStmtList := c.insideStmtList
+ c.insideStmtList = true
+ for _, n := range stmts {
+ c.compileStmt(n)
+ }
+ c.insideStmtList = insideStmtList
+ c.emitInstOp(opEnd)
+}
+
+func (c *compiler) compileExprSlice(exprs ExprSlice) {
+ c.emitInstOp(opMultiExpr)
+ for _, n := range exprs {
+ c.compileExpr(n)
+ }
+ c.emitInstOp(opEnd)
+}
+
+func (c *compiler) compileRangeClause(clause *rangeClause) {
+ c.emitInstOp(opRangeClause)
+ c.compileExpr(clause.X)
+}
+
+func (c *compiler) compileRangeHeader(h *rangeHeader) {
+ n := h.Node
+ switch {
+ case n.Key == nil && n.Value == nil:
+ c.emitInstOp(opRangeHeader)
+ c.compileExpr(n.X)
+ case n.Key != nil && n.Value == nil:
+ c.emitInst(instruction{
+ op: opRangeKeyHeader,
+ value: c.toUint8(n, int(n.Tok)),
+ })
+ c.compileExpr(n.Key)
+ c.compileExpr(n.X)
+ case n.Key != nil && n.Value != nil:
+ c.emitInst(instruction{
+ op: opRangeKeyValueHeader,
+ value: c.toUint8(n, int(n.Tok)),
+ })
+ c.compileExpr(n.Key)
+ c.compileExpr(n.Value)
+ c.compileExpr(n.X)
+ default:
+ panic(c.errorf(n, "unexpected range header"))
+ }
+}
+
+func pickOp(cond bool, ifTrue, ifFalse operation) operation {
+ if cond {
+ return ifTrue
+ }
+ return ifFalse
+}
+
+func fitsUint8(v int) bool {
+ return v >= 0 && v <= 0xff
+}
diff --git a/vendor/github.com/quasilyte/gogrep/compile_import.go b/vendor/github.com/quasilyte/gogrep/compile_import.go
new file mode 100644
index 000000000..ab0dd12a7
--- /dev/null
+++ b/vendor/github.com/quasilyte/gogrep/compile_import.go
@@ -0,0 +1,57 @@
+package gogrep
+
+import (
+ "errors"
+ "fmt"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+)
+
+func compileImportPattern(config CompileConfig) (*Pattern, PatternInfo, error) {
+ // TODO: figure out how to compile it as a part of a normal pattern compilation?
+ // This is an adhoc solution to a problem.
+
+ readIdent := func(s string) (varname, rest string) {
+ first := true
+ var offset int
+ for _, ch := range s {
+ ok := unicode.IsLetter(ch) ||
+ ch == '_' ||
+ (!first && unicode.IsDigit(ch))
+ if !ok {
+ break
+ }
+ offset += utf8.RuneLen(ch)
+ first = false
+ }
+ return s[:offset], s[offset:]
+ }
+
+ info := newPatternInfo()
+ src := config.Src
+ src = src[len("import $"):]
+ if src == "" {
+ return nil, info, errors.New("expected ident after $, found EOF")
+ }
+ varname, rest := readIdent(src)
+ if strings.TrimSpace(rest) != "" {
+ return nil, info, fmt.Errorf("unexpected %s", rest)
+ }
+ var p program
+ if varname != "_" {
+ info.Vars[src] = struct{}{}
+ p.strings = []string{varname}
+ p.insts = []instruction{
+ {op: opImportDecl},
+ {op: opNamedNodeSeq, valueIndex: 0},
+ {op: opEnd},
+ }
+ } else {
+ p.insts = []instruction{
+ {op: opAnyImportDecl},
+ }
+ }
+ m := matcher{prog: &p, insts: p.insts}
+ return &Pattern{m: &m}, info, nil
+}
diff --git a/vendor/github.com/quasilyte/gogrep/gen_operations.go b/vendor/github.com/quasilyte/gogrep/gen_operations.go
new file mode 100644
index 000000000..8de59980b
--- /dev/null
+++ b/vendor/github.com/quasilyte/gogrep/gen_operations.go
@@ -0,0 +1,357 @@
+//go:build main
+// +build main
+
+package main
+
+import (
+ "bytes"
+ "fmt"
+ "go/format"
+ "io/ioutil"
+ "log"
+ "strings"
+ "text/template"
+)
+
+var opPrototypes = []operationProto{
+ {name: "Node", tag: "Node"},
+ {name: "NamedNode", tag: "Node", valueIndex: "strings | wildcard name"},
+ {name: "NodeSeq"},
+ {name: "NamedNodeSeq", valueIndex: "strings | wildcard name"},
+ {name: "OptNode"},
+ {name: "NamedOptNode", valueIndex: "strings | wildcard name"},
+
+ {name: "FieldNode", tag: "Node"},
+ {name: "NamedFieldNode", tag: "Node", valueIndex: "strings | wildcard name"},
+
+ {name: "MultiStmt", tag: "StmtList", args: "stmts...", example: "f(); g()"},
+ {name: "MultiExpr", tag: "ExprList", args: "exprs...", example: "f(), g()"},
+ {name: "MultiDecl", tag: "DeclList", args: "exprs...", example: "f(), g()"},
+
+ {name: "End"},
+
+ {name: "BasicLit", tag: "BasicLit", valueIndex: "ifaces | parsed literal value"},
+ {name: "StrictIntLit", tag: "BasicLit", valueIndex: "strings | raw literal value"},
+ {name: "StrictFloatLit", tag: "BasicLit", valueIndex: "strings | raw literal value"},
+ {name: "StrictCharLit", tag: "BasicLit", valueIndex: "strings | raw literal value"},
+ {name: "StrictStringLit", tag: "BasicLit", valueIndex: "strings | raw literal value"},
+ {name: "StrictComplexLit", tag: "BasicLit", valueIndex: "strings | raw literal value"},
+
+ {name: "Ident", tag: "Ident", valueIndex: "strings | ident name"},
+ {name: "Pkg", tag: "Ident", valueIndex: "strings | package path"},
+
+ {name: "IndexExpr", tag: "IndexExpr", args: "x expr"},
+
+ {name: "SliceExpr", tag: "SliceExpr", args: "x"},
+ {name: "SliceFromExpr", tag: "SliceExpr", args: "x from", example: "x[from:]"},
+ {name: "SliceToExpr", tag: "SliceExpr", args: "x to", example: "x[:to]"},
+ {name: "SliceFromToExpr", tag: "SliceExpr", args: "x from to", example: "x[from:to]"},
+ {name: "SliceToCapExpr", tag: "SliceExpr", args: "x from cap", example: "x[:from:cap]"},
+ {name: "SliceFromToCapExpr", tag: "SliceExpr", args: "x from to cap", example: "x[from:to:cap]"},
+
+ {name: "FuncLit", tag: "FuncLit", args: "type block"},
+
+ {name: "CompositeLit", tag: "CompositeLit", args: "elts...", example: "{elts...}"},
+ {name: "TypedCompositeLit", tag: "CompositeLit", args: "typ elts...", example: "typ{elts...}"},
+
+ {name: "SimpleSelectorExpr", tag: "SelectorExpr", args: "x", valueIndex: "strings | selector name"},
+ {name: "SelectorExpr", tag: "SelectorExpr", args: "x sel"},
+ {name: "TypeAssertExpr", tag: "TypeAssertExpr", args: "x typ"},
+ {name: "TypeSwitchAssertExpr", tag: "TypeAssertExpr", args: "x"},
+
+ {name: "StructType", tag: "StructType", args: "fields"},
+ {name: "InterfaceType", tag: "StructType", args: "fields"},
+ {name: "VoidFuncType", tag: "FuncType", args: "params"},
+ {name: "FuncType", tag: "FuncType", args: "params results"},
+ {name: "ArrayType", tag: "ArrayType", args: "length elem"},
+ {name: "SliceType", tag: "ArrayType", args: "elem"},
+ {name: "MapType", tag: "MapType", args: "key value"},
+ {name: "ChanType", tag: "ChanType", args: "value", value: "ast.ChanDir | channel direction"},
+ {name: "KeyValueExpr", tag: "KeyValueExpr", args: "key value"},
+
+ {name: "Ellipsis", tag: "Ellipsis"},
+ {name: "TypedEllipsis", tag: "Ellipsis", args: "type"},
+
+ {name: "StarExpr", tag: "StarExpr", args: "x"},
+ {name: "UnaryExpr", tag: "UnaryExpr", args: "x", value: "token.Token | unary operator"},
+ {name: "BinaryExpr", tag: "BinaryExpr", args: "x y", value: "token.Token | binary operator"},
+ {name: "ParenExpr", tag: "ParenExpr", args: "x"},
+
+ {
+ name: "ArgList",
+ args: "exprs...",
+ example: "1, 2, 3",
+ },
+ {
+ name: "SimpleArgList",
+ note: "Like ArgList, but pattern contains no $*",
+ args: "exprs[]",
+ value: "int | slice len",
+ example: "1, 2, 3",
+ },
+
+ {name: "VariadicCallExpr", tag: "CallExpr", args: "fn args", example: "f(1, xs...)"},
+ {name: "NonVariadicCallExpr", tag: "CallExpr", args: "fn args", example: "f(1, xs)"},
+ {name: "CallExpr", tag: "CallExpr", args: "fn args", example: "f(1, xs) or f(1, xs...)"},
+
+ {name: "AssignStmt", tag: "AssignStmt", args: "lhs rhs", value: "token.Token | ':=' or '='", example: "lhs := rhs()"},
+ {name: "MultiAssignStmt", tag: "AssignStmt", args: "lhs... rhs...", value: "token.Token | ':=' or '='", example: "lhs1, lhs2 := rhs()"},
+
+ {name: "BranchStmt", tag: "BranchStmt", args: "x", value: "token.Token | branch kind"},
+ {name: "SimpleLabeledBranchStmt", tag: "BranchStmt", args: "x", valueIndex: "strings | label name", value: "token.Token | branch kind"},
+ {name: "LabeledBranchStmt", tag: "BranchStmt", args: "label x", value: "token.Token | branch kind"},
+ {name: "SimpleLabeledStmt", tag: "LabeledStmt", args: "x", valueIndex: "strings | label name"},
+ {name: "LabeledStmt", tag: "LabeledStmt", args: "label x"},
+
+ {name: "BlockStmt", tag: "BlockStmt", args: "body..."},
+ {name: "ExprStmt", tag: "ExprStmt", args: "x"},
+
+ {name: "GoStmt", tag: "GoStmt", args: "x"},
+ {name: "DeferStmt", tag: "DeferStmt", args: "x"},
+
+ {name: "SendStmt", tag: "SendStmt", args: "ch value"},
+
+ {name: "EmptyStmt", tag: "EmptyStmt"},
+ {name: "IncDecStmt", tag: "IncDecStmt", args: "x", value: "token.Token | '++' or '--'"},
+ {name: "ReturnStmt", tag: "ReturnStmt", args: "results..."},
+
+ {name: "IfStmt", tag: "IfStmt", args: "cond block", example: "if cond {}"},
+ {name: "IfInitStmt", tag: "IfStmt", args: "init cond block", example: "if init; cond {}"},
+ {name: "IfElseStmt", tag: "IfStmt", args: "cond block else", example: "if cond {} else ..."},
+ {name: "IfInitElseStmt", tag: "IfStmt", args: "init cond block else", example: "if init; cond {} else ..."},
+ {name: "IfNamedOptStmt", tag: "IfStmt", args: "block", valueIndex: "strings | wildcard name", example: "if $*x {}"},
+ {name: "IfNamedOptElseStmt", tag: "IfStmt", args: "block else", valueIndex: "strings | wildcard name", example: "if $*x {} else ..."},
+
+ {name: "SwitchStmt", tag: "SwitchStmt", args: "body...", example: "switch {}"},
+ {name: "SwitchTagStmt", tag: "SwitchStmt", args: "tag body...", example: "switch tag {}"},
+ {name: "SwitchInitStmt", tag: "SwitchStmt", args: "init body...", example: "switch init; {}"},
+ {name: "SwitchInitTagStmt", tag: "SwitchStmt", args: "init tag body...", example: "switch init; tag {}"},
+
+ {name: "SelectStmt", tag: "SelectStmt", args: "body..."},
+
+ {name: "TypeSwitchStmt", tag: "TypeSwitchStmt", args: "x block", example: "switch x.(type) {}"},
+ {name: "TypeSwitchInitStmt", tag: "TypeSwitchStmt", args: "init x block", example: "switch init; x.(type) {}"},
+
+ {name: "CaseClause", tag: "CaseClause", args: "values... body..."},
+ {name: "DefaultCaseClause", tag: "CaseClause", args: "body..."},
+
+ {name: "CommClause", tag: "CommClause", args: "comm body..."},
+ {name: "DefaultCommClause", tag: "CommClause", args: "body..."},
+
+ {name: "ForStmt", tag: "ForStmt", args: "blocl", example: "for {}"},
+ {name: "ForPostStmt", tag: "ForStmt", args: "post block", example: "for ; ; post {}"},
+ {name: "ForCondStmt", tag: "ForStmt", args: "cond block", example: "for ; cond; {}"},
+ {name: "ForCondPostStmt", tag: "ForStmt", args: "cond post block", example: "for ; cond; post {}"},
+ {name: "ForInitStmt", tag: "ForStmt", args: "init block", example: "for init; ; {}"},
+ {name: "ForInitPostStmt", tag: "ForStmt", args: "init post block", example: "for init; ; post {}"},
+ {name: "ForInitCondStmt", tag: "ForStmt", args: "init cond block", example: "for init; cond; {}"},
+ {name: "ForInitCondPostStmt", tag: "ForStmt", args: "init cond post block", example: "for init; cond; post {}"},
+
+ {name: "RangeStmt", tag: "RangeStmt", args: "x block", example: "for range x {}"},
+ {name: "RangeKeyStmt", tag: "RangeStmt", args: "key x block", value: "token.Token | ':=' or '='", example: "for key := range x {}"},
+ {name: "RangeKeyValueStmt", tag: "RangeStmt", args: "key value x block", value: "token.Token | ':=' or '='", example: "for key, value := range x {}"},
+
+ {name: "RangeClause", tag: "RangeStmt", args: "x", example: "range x"},
+ {name: "RangeHeader", tag: "RangeStmt", args: "x", example: "for range x"},
+ {name: "RangeKeyHeader", tag: "RangeStmt", args: "key x", value: "token.Token | ':=' or '='", example: "for key := range x"},
+ {name: "RangeKeyValueHeader", tag: "RangeStmt", args: "key value x", value: "token.Token | ':=' or '='", example: "for key, value := range x"},
+
+ {name: "FieldList", args: "fields..."},
+ {name: "UnnamedField", args: "typ", example: "type"},
+ {name: "SimpleField", args: "typ", valueIndex: "strings | field name", example: "name type"},
+ {name: "Field", args: "name typ", example: "$name type"},
+ {name: "MultiField", args: "names... typ", example: "name1, name2 type"},
+
+ {name: "ValueSpec", tag: "ValueSpec", args: "value"},
+ {name: "ValueInitSpec", tag: "ValueSpec", args: "lhs... rhs...", example: "lhs = rhs"},
+ {name: "TypedValueInitSpec", tag: "ValueSpec", args: "lhs... type rhs...", example: "lhs typ = rhs"},
+ {name: "TypedValueSpec", tag: "ValueSpec", args: "lhs... type", example: "lhs typ"},
+
+ {name: "TypeSpec", tag: "TypeSpec", args: "name type", example: "name type"},
+ {name: "TypeAliasSpec", tag: "TypeSpec", args: "name type", example: "name = type"},
+
+ {name: "FuncDecl", tag: "FuncDecl", args: "name type block"},
+ {name: "MethodDecl", tag: "FuncDecl", args: "recv name type block"},
+ {name: "FuncProtoDecl", tag: "FuncDecl", args: "name type"},
+ {name: "MethodProtoDecl", tag: "FuncDecl", args: "recv name type"},
+
+ {name: "DeclStmt", tag: "DeclStmt", args: "decl"},
+ {name: "ConstDecl", tag: "GenDecl", args: "valuespecs..."},
+ {name: "VarDecl", tag: "GenDecl", args: "valuespecs..."},
+ {name: "TypeDecl", tag: "GenDecl", args: "typespecs..."},
+
+ {name: "AnyImportDecl", tag: "GenDecl"},
+ {name: "ImportDecl", tag: "GenDecl", args: "importspecs..."},
+
+ {name: "EmptyPackage", tag: "File", args: "name"},
+}
+
+type operationProto struct {
+ name string
+ value string
+ valueIndex string
+ tag string
+ example string
+ args string
+ note string
+}
+
+type operationInfo struct {
+ Example string
+ Note string
+ Args string
+ Enum uint8
+ TagName string
+ Name string
+ ValueDoc string
+ ValueIndexDoc string
+ ExtraValueKindName string
+ ValueKindName string
+ VariadicMap uint64
+ NumArgs int
+ SliceIndex int
+}
+
+const stackUnchanged = ""
+
+var fileTemplate = template.Must(template.New("operations.go").Parse(`// Code generated "gen_operations.go"; DO NOT EDIT.
+
+package gogrep
+
+import (
+ "github.com/quasilyte/gogrep/nodetag"
+)
+
+//go:generate stringer -type=operation -trimprefix=op
+type operation uint8
+
+const (
+ opInvalid operation = 0
+{{ range .Operations }}
+ // Tag: {{.TagName}}
+ {{- if .Note}}{{print "\n"}}// {{.Note}}{{end}}
+ {{- if .Args}}{{print "\n"}}// Args: {{.Args}}{{end}}
+ {{- if .Example}}{{print "\n"}}// Example: {{.Example}}{{end}}
+ {{- if .ValueDoc}}{{print "\n"}}// Value: {{.ValueDoc}}{{end}}
+ {{- if .ValueIndexDoc}}{{print "\n"}}// ValueIndex: {{.ValueIndexDoc}}{{end}}
+ op{{ .Name }} operation = {{.Enum}}
+{{ end -}}
+)
+
+type operationInfo struct {
+ Tag nodetag.Value
+ NumArgs int
+ ValueKind valueKind
+ ExtraValueKind valueKind
+ VariadicMap bitmap64
+ SliceIndex int
+}
+
+var operationInfoTable = [256]operationInfo{
+ opInvalid: {},
+
+{{ range .Operations -}}
+ op{{.Name}}: {
+ Tag: nodetag.{{.TagName}},
+ NumArgs: {{.NumArgs}},
+ ValueKind: {{.ValueKindName}},
+ ExtraValueKind: {{.ExtraValueKindName}},
+ VariadicMap: {{.VariadicMap}}, // {{printf "%b" .VariadicMap}}
+ SliceIndex: {{.SliceIndex}},
+ },
+{{ end }}
+}
+`))
+
+func main() {
+ operations := make([]operationInfo, len(opPrototypes))
+ for i, proto := range opPrototypes {
+ enum := uint8(i + 1)
+
+ tagName := proto.tag
+ if tagName == "" {
+ tagName = "Unknown"
+ }
+
+ variadicMap := uint64(0)
+ numArgs := 0
+ sliceLenIndex := -1
+ if proto.args != "" {
+ args := strings.Split(proto.args, " ")
+ numArgs = len(args)
+ for i, arg := range args {
+ isVariadic := strings.HasSuffix(arg, "...")
+ if isVariadic {
+ variadicMap |= 1 << i
+ }
+ if strings.HasSuffix(arg, "[]") {
+ sliceLenIndex = i
+ }
+ }
+ }
+
+ extraValueKindName := "emptyValue"
+ if proto.valueIndex != "" {
+ parts := strings.Split(proto.valueIndex, " | ")
+ typ := parts[0]
+ switch typ {
+ case "strings":
+ extraValueKindName = "stringValue"
+ case "ifaces":
+ extraValueKindName = "ifaceValue"
+ default:
+ panic(fmt.Sprintf("%s: unexpected %s type", proto.name, typ))
+ }
+ }
+ valueKindName := "emptyValue"
+ if proto.value != "" {
+ parts := strings.Split(proto.value, " | ")
+ typ := parts[0]
+ switch typ {
+ case "token.Token":
+ valueKindName = "tokenValue"
+ case "ast.ChanDir":
+ valueKindName = "chandirValue"
+ case "int":
+ valueKindName = "intValue"
+ default:
+ panic(fmt.Sprintf("%s: unexpected %s type", proto.name, typ))
+ }
+ }
+
+ operations[i] = operationInfo{
+ Example: proto.example,
+ Note: proto.note,
+ Args: proto.args,
+ Enum: enum,
+ TagName: tagName,
+ Name: proto.name,
+ ValueDoc: proto.value,
+ ValueIndexDoc: proto.valueIndex,
+ NumArgs: numArgs,
+ VariadicMap: variadicMap,
+ ExtraValueKindName: extraValueKindName,
+ ValueKindName: valueKindName,
+ SliceIndex: sliceLenIndex,
+ }
+ }
+
+ var buf bytes.Buffer
+ err := fileTemplate.Execute(&buf, map[string]interface{}{
+ "Operations": operations,
+ })
+ if err != nil {
+ log.Panicf("execute template: %v", err)
+ }
+ writeFile("operations.gen.go", buf.Bytes())
+}
+
+func writeFile(filename string, data []byte) {
+ pretty, err := format.Source(data)
+ if err != nil {
+ log.Panicf("gofmt: %v", err)
+ }
+ if err := ioutil.WriteFile(filename, pretty, 0666); err != nil {
+ log.Panicf("write %s: %v", filename, err)
+ }
+}
diff --git a/vendor/github.com/quasilyte/gogrep/go.mod b/vendor/github.com/quasilyte/gogrep/go.mod
new file mode 100644
index 000000000..3c76dc5e1
--- /dev/null
+++ b/vendor/github.com/quasilyte/gogrep/go.mod
@@ -0,0 +1,8 @@
+module github.com/quasilyte/gogrep
+
+go 1.16
+
+require (
+ github.com/go-toolsmith/astequal v1.0.1
+ github.com/google/go-cmp v0.5.6
+)
diff --git a/vendor/github.com/quasilyte/gogrep/go.sum b/vendor/github.com/quasilyte/gogrep/go.sum
new file mode 100644
index 000000000..25c3bbb3e
--- /dev/null
+++ b/vendor/github.com/quasilyte/gogrep/go.sum
@@ -0,0 +1,8 @@
+github.com/go-toolsmith/astequal v1.0.1 h1:JbSszi42Jiqu36Gnf363HWS9MTEAz67vTQLponh3Moc=
+github.com/go-toolsmith/astequal v1.0.1/go.mod h1:4oGA3EZXTVItV/ipGiOx7NWkY5veFfcsOJVS2YxltLw=
+github.com/go-toolsmith/strparse v1.0.0 h1:Vcw78DnpCAKlM20kSbAyO4mPfJn/lyYA4BJUDxe2Jb4=
+github.com/go-toolsmith/strparse v1.0.0/go.mod h1:YI2nUKP9YGZnL/L1/DLFBfixrcjslWct4wyljWhSRy8=
+github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ=
+github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
+golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
diff --git a/vendor/github.com/quasilyte/gogrep/gogrep.go b/vendor/github.com/quasilyte/gogrep/gogrep.go
new file mode 100644
index 000000000..313a9a251
--- /dev/null
+++ b/vendor/github.com/quasilyte/gogrep/gogrep.go
@@ -0,0 +1,180 @@
+package gogrep
+
+import (
+ "errors"
+ "go/ast"
+ "go/token"
+ "go/types"
+ "strings"
+
+ "github.com/quasilyte/gogrep/nodetag"
+)
+
+func IsEmptyNodeSlice(n ast.Node) bool {
+ if list, ok := n.(NodeSlice); ok {
+ return list.Len() == 0
+ }
+ return false
+}
+
+// MatchData describes a successful pattern match.
+type MatchData struct {
+ Node ast.Node
+ Capture []CapturedNode
+}
+
+type CapturedNode struct {
+ Name string
+ Node ast.Node
+}
+
+func (data MatchData) CapturedByName(name string) (ast.Node, bool) {
+ if name == "$$" {
+ return data.Node, true
+ }
+ return findNamed(data.Capture, name)
+}
+
+type PartialNode struct {
+ X ast.Node
+
+ from token.Pos
+ to token.Pos
+}
+
+func (p *PartialNode) Pos() token.Pos { return p.from }
+func (p *PartialNode) End() token.Pos { return p.to }
+
+type MatcherState struct {
+ Types *types.Info
+
+ // CapturePreset is a key-value pairs to use in the next match calls
+ // as predefined variables.
+ // For example, if the pattern is `$x = f()` and CapturePreset contains
+ // a pair with Name=x and value of `obj.x`, then the above mentioned
+ // pattern will only match `obj.x = f()` statements.
+ //
+ // If nil, the default behavior will be used. A first syntax element
+ // matching the matcher var will be captured.
+ CapturePreset []CapturedNode
+
+ // node values recorded by name, excluding "_" (used only by the
+ // actual matching phase)
+ capture []CapturedNode
+
+ pc int
+
+ partial PartialNode
+}
+
+func NewMatcherState() MatcherState {
+ return MatcherState{
+ capture: make([]CapturedNode, 0, 8),
+ }
+}
+
+type Pattern struct {
+ m *matcher
+}
+
+type PatternInfo struct {
+ Vars map[string]struct{}
+}
+
+func (p *Pattern) NodeTag() nodetag.Value {
+ return operationInfoTable[p.m.prog.insts[0].op].Tag
+}
+
+// MatchNode calls cb if n matches a pattern.
+func (p *Pattern) MatchNode(state *MatcherState, n ast.Node, cb func(MatchData)) {
+ p.m.MatchNode(state, n, cb)
+}
+
+// Clone creates a pattern copy.
+func (p *Pattern) Clone() *Pattern {
+ clone := *p
+ clone.m = &matcher{}
+ *clone.m = *p.m
+ return &clone
+}
+
+type CompileConfig struct {
+ Fset *token.FileSet
+
+ // Src is a gogrep pattern expression string.
+ Src string
+
+ // When strict is false, gogrep may consider 0xA and 10 to be identical.
+ // If true, a compiled pattern will require a full syntax match.
+ Strict bool
+
+ // WithTypes controls whether gogrep would have types.Info during the pattern execution.
+ // If set to true, it will compile a pattern to a potentially more precise form, where
+ // fmt.Printf maps to the stdlib function call but not Printf method call on some
+ // random fmt variable.
+ WithTypes bool
+
+ // Imports specifies packages that should be recognized for the type-aware matching.
+ // It maps a package name to a package path.
+ // Only used if WithTypes is true.
+ Imports map[string]string
+}
+
+func Compile(config CompileConfig) (*Pattern, PatternInfo, error) {
+ if strings.HasPrefix(config.Src, "import $") {
+ return compileImportPattern(config)
+ }
+ info := newPatternInfo()
+ n, err := parseExpr(config.Fset, config.Src)
+ if err != nil {
+ return nil, info, err
+ }
+ if n == nil {
+ return nil, info, errors.New("invalid pattern syntax")
+ }
+ var c compiler
+ c.config = config
+ prog, err := c.Compile(n, &info)
+ if err != nil {
+ return nil, info, err
+ }
+ m := newMatcher(prog)
+ return &Pattern{m: m}, info, nil
+}
+
+func Walk(root ast.Node, fn func(n ast.Node) bool) {
+ switch root := root.(type) {
+ case ExprSlice:
+ for _, e := range root {
+ ast.Inspect(e, fn)
+ }
+ case stmtSlice:
+ for _, e := range root {
+ ast.Inspect(e, fn)
+ }
+ case fieldSlice:
+ for _, e := range root {
+ ast.Inspect(e, fn)
+ }
+ case identSlice:
+ for _, e := range root {
+ ast.Inspect(e, fn)
+ }
+ case specSlice:
+ for _, e := range root {
+ ast.Inspect(e, fn)
+ }
+ case declSlice:
+ for _, e := range root {
+ ast.Inspect(e, fn)
+ }
+ default:
+ ast.Inspect(root, fn)
+ }
+}
+
+func newPatternInfo() PatternInfo {
+ return PatternInfo{
+ Vars: make(map[string]struct{}),
+ }
+}
diff --git a/vendor/github.com/quasilyte/gogrep/instructions.go b/vendor/github.com/quasilyte/gogrep/instructions.go
new file mode 100644
index 000000000..9f4f72d88
--- /dev/null
+++ b/vendor/github.com/quasilyte/gogrep/instructions.go
@@ -0,0 +1,116 @@
+package gogrep
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "strings"
+)
+
+type bitmap64 uint64
+
+func (m bitmap64) IsSet(pos int) bool {
+ return m&(1<<pos) != 0
+}
+
+type valueKind uint8
+
+const (
+ emptyValue valueKind = iota
+ stringValue // Extra values only; value is stored in program.strings
+ ifaceValue // Extra values only; value is stored in program.ifaces
+ tokenValue // token.Token
+ chandirValue // ast.CharDir
+ intValue // int
+)
+
+type program struct {
+ insts []instruction
+ strings []string
+ ifaces []interface{}
+}
+
+func formatProgram(p *program) []string {
+ var parts []string
+ insts := p.insts
+
+ nextInst := func() instruction {
+ inst := insts[0]
+ insts = insts[1:]
+ return inst
+ }
+ peekOp := func() operation {
+ return insts[0].op
+ }
+
+ var walk func(int)
+ walk = func(depth int) {
+ if len(insts) == 0 {
+ return
+ }
+ inst := nextInst()
+
+ part := strings.Repeat(" • ", depth) + formatInstruction(p, inst)
+ parts = append(parts, part)
+
+ info := operationInfoTable[inst.op]
+ for i := 0; i < info.NumArgs; i++ {
+ if i == info.SliceIndex {
+ for j := 0; j < int(inst.value); j++ {
+ walk(depth + 1)
+ }
+ continue
+ }
+ if !info.VariadicMap.IsSet(i) {
+ walk(depth + 1)
+ continue
+ }
+ for {
+ isEnd := peekOp() == opEnd
+ walk(depth + 1)
+ if isEnd {
+ break
+ }
+ }
+ }
+ }
+
+ walk(0)
+ return parts
+}
+
+func formatInstruction(p *program, inst instruction) string {
+ parts := []string{inst.op.String()}
+
+ info := operationInfoTable[inst.op]
+
+ switch info.ValueKind {
+ case chandirValue:
+ dir := ast.ChanDir(inst.value)
+ if dir&ast.SEND != 0 {
+ parts = append(parts, "send")
+ }
+ if dir&ast.RECV != 0 {
+ parts = append(parts, "recv")
+ }
+ case tokenValue:
+ parts = append(parts, token.Token(inst.value).String())
+ case intValue:
+ parts = append(parts, fmt.Sprint(inst.value))
+ }
+
+ switch info.ExtraValueKind {
+ case ifaceValue:
+ parts = append(parts, fmt.Sprintf("%#v", p.ifaces[inst.valueIndex]))
+ case stringValue:
+ parts = append(parts, p.strings[inst.valueIndex])
+ }
+
+ return strings.Join(parts, " ")
+}
+
+type instruction struct {
+ op operation
+ value uint8
+ valueIndex uint8
+}
diff --git a/vendor/github.com/quasilyte/gogrep/internal/stdinfo/stdinfo.go b/vendor/github.com/quasilyte/gogrep/internal/stdinfo/stdinfo.go
new file mode 100644
index 000000000..f00d66d46
--- /dev/null
+++ b/vendor/github.com/quasilyte/gogrep/internal/stdinfo/stdinfo.go
@@ -0,0 +1,151 @@
+package stdinfo
+
+var Packages = map[string]string{
+ "adler32": "hash/adler32",
+ "aes": "crypto/aes",
+ "ascii85": "encoding/ascii85",
+ "asn1": "encoding/asn1",
+ "ast": "go/ast",
+ "atomic": "sync/atomic",
+ "base32": "encoding/base32",
+ "base64": "encoding/base64",
+ "big": "math/big",
+ "binary": "encoding/binary",
+ "bits": "math/bits",
+ "bufio": "bufio",
+ "build": "go/build",
+ "bytes": "bytes",
+ "bzip2": "compress/bzip2",
+ "cgi": "net/http/cgi",
+ "cgo": "runtime/cgo",
+ "cipher": "crypto/cipher",
+ "cmplx": "math/cmplx",
+ "color": "image/color",
+ "constant": "go/constant",
+ "constraint": "go/build/constraint",
+ "context": "context",
+ "cookiejar": "net/http/cookiejar",
+ "crc32": "hash/crc32",
+ "crc64": "hash/crc64",
+ "crypto": "crypto",
+ "csv": "encoding/csv",
+ "debug": "runtime/debug",
+ "des": "crypto/des",
+ "doc": "go/doc",
+ "draw": "image/draw",
+ "driver": "database/sql/driver",
+ "dsa": "crypto/dsa",
+ "dwarf": "debug/dwarf",
+ "ecdsa": "crypto/ecdsa",
+ "ed25519": "crypto/ed25519",
+ "elf": "debug/elf",
+ "elliptic": "crypto/elliptic",
+ "embed": "embed",
+ "encoding": "encoding",
+ "errors": "errors",
+ "exec": "os/exec",
+ "expvar": "expvar",
+ "fcgi": "net/http/fcgi",
+ "filepath": "path/filepath",
+ "flag": "flag",
+ "flate": "compress/flate",
+ "fmt": "fmt",
+ "fnv": "hash/fnv",
+ "format": "go/format",
+ "fs": "io/fs",
+ "fstest": "testing/fstest",
+ "gif": "image/gif",
+ "gob": "encoding/gob",
+ "gosym": "debug/gosym",
+ "gzip": "compress/gzip",
+ "hash": "hash",
+ "heap": "container/heap",
+ "hex": "encoding/hex",
+ "hmac": "crypto/hmac",
+ "html": "html",
+ "http": "net/http",
+ "httptest": "net/http/httptest",
+ "httptrace": "net/http/httptrace",
+ "httputil": "net/http/httputil",
+ "image": "image",
+ "importer": "go/importer",
+ "io": "io",
+ "iotest": "testing/iotest",
+ "ioutil": "io/ioutil",
+ "jpeg": "image/jpeg",
+ "json": "encoding/json",
+ "jsonrpc": "net/rpc/jsonrpc",
+ "list": "container/list",
+ "log": "log",
+ "lzw": "compress/lzw",
+ "macho": "debug/macho",
+ "mail": "net/mail",
+ "maphash": "hash/maphash",
+ "math": "math",
+ "md5": "crypto/md5",
+ "metrics": "runtime/metrics",
+ "mime": "mime",
+ "multipart": "mime/multipart",
+ "net": "net",
+ "os": "os",
+ "palette": "image/color/palette",
+ "parse": "text/template/parse",
+ "parser": "go/parser",
+ "path": "path",
+ "pe": "debug/pe",
+ "pem": "encoding/pem",
+ "pkix": "crypto/x509/pkix",
+ "plan9obj": "debug/plan9obj",
+ "plugin": "plugin",
+ "png": "image/png",
+ "pprof": "runtime/pprof",
+ "printer": "go/printer",
+ "quick": "testing/quick",
+ "quotedprintable": "mime/quotedprintable",
+ "race": "runtime/race",
+ "rand": "math/rand",
+ "rc4": "crypto/rc4",
+ "reflect": "reflect",
+ "regexp": "regexp",
+ "ring": "container/ring",
+ "rpc": "net/rpc",
+ "rsa": "crypto/rsa",
+ "runtime": "runtime",
+ "scanner": "text/scanner",
+ "sha1": "crypto/sha1",
+ "sha256": "crypto/sha256",
+ "sha512": "crypto/sha512",
+ "signal": "os/signal",
+ "smtp": "net/smtp",
+ "sort": "sort",
+ "sql": "database/sql",
+ "strconv": "strconv",
+ "strings": "strings",
+ "subtle": "crypto/subtle",
+ "suffixarray": "index/suffixarray",
+ "sync": "sync",
+ "syntax": "regexp/syntax",
+ "syscall": "syscall",
+ "syslog": "log/syslog",
+ "tabwriter": "text/tabwriter",
+ "tar": "archive/tar",
+ "template": "text/template",
+ "testing": "testing",
+ "textproto": "net/textproto",
+ "time": "time",
+ "tls": "crypto/tls",
+ "token": "go/token",
+ "trace": "runtime/trace",
+ "types": "go/types",
+ "tzdata": "time/tzdata",
+ "unicode": "unicode",
+ "unsafe": "unsafe",
+ "url": "net/url",
+ "user": "os/user",
+ "utf16": "unicode/utf16",
+ "utf8": "unicode/utf8",
+ "x509": "crypto/x509",
+ "xml": "encoding/xml",
+ "zip": "archive/zip",
+ "zlib": "compress/zlib",
+}
diff --git a/vendor/github.com/quasilyte/gogrep/match.go b/vendor/github.com/quasilyte/gogrep/match.go
new file mode 100644
index 000000000..d927beff3
--- /dev/null
+++ b/vendor/github.com/quasilyte/gogrep/match.go
@@ -0,0 +1,937 @@
+package gogrep
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+ "strconv"
+
+ "github.com/go-toolsmith/astequal"
+)
+
+type matcher struct {
+ prog *program
+
+ insts []instruction
+}
+
+func newMatcher(prog *program) *matcher {
+ return &matcher{
+ prog: prog,
+ insts: prog.insts,
+ }
+}
+
+func (m *matcher) nextInst(state *MatcherState) instruction {
+ inst := m.insts[state.pc]
+ state.pc++
+ return inst
+}
+
+func (m *matcher) stringValue(inst instruction) string {
+ return m.prog.strings[inst.valueIndex]
+}
+
+func (m *matcher) ifaceValue(inst instruction) interface{} {
+ return m.prog.ifaces[inst.valueIndex]
+}
+
+func (m *matcher) resetCapture(state *MatcherState) {
+ state.capture = state.capture[:0]
+ if state.CapturePreset != nil {
+ state.capture = append(state.capture, state.CapturePreset...)
+ }
+}
+
+func (m *matcher) MatchNode(state *MatcherState, n ast.Node, accept func(MatchData)) {
+ state.pc = 0
+ inst := m.nextInst(state)
+ switch inst.op {
+ case opMultiStmt:
+ switch n := n.(type) {
+ case *ast.BlockStmt:
+ m.walkStmtSlice(state, n.List, accept)
+ case *ast.CaseClause:
+ m.walkStmtSlice(state, n.Body, accept)
+ case *ast.CommClause:
+ m.walkStmtSlice(state, n.Body, accept)
+ }
+ case opMultiExpr:
+ switch n := n.(type) {
+ case *ast.CallExpr:
+ m.walkExprSlice(state, n.Args, accept)
+ case *ast.CompositeLit:
+ m.walkExprSlice(state, n.Elts, accept)
+ case *ast.ReturnStmt:
+ m.walkExprSlice(state, n.Results, accept)
+ }
+ case opMultiDecl:
+ if n, ok := n.(*ast.File); ok {
+ m.walkDeclSlice(state, n.Decls, accept)
+ }
+ case opRangeClause:
+ m.matchRangeClause(state, n, accept)
+ case opRangeHeader:
+ m.matchRangeHeader(state, n, accept)
+ case opRangeKeyHeader:
+ m.matchRangeKeyHeader(state, inst, n, accept)
+ case opRangeKeyValueHeader:
+ m.matchRangeKeyValueHeader(state, inst, n, accept)
+ default:
+ m.resetCapture(state)
+ if m.matchNodeWithInst(state, inst, n) {
+ accept(MatchData{
+ Capture: state.capture,
+ Node: n,
+ })
+ }
+ }
+}
+
+func (m *matcher) walkDeclSlice(state *MatcherState, decls []ast.Decl, accept func(MatchData)) {
+ m.walkNodeSlice(state, declSlice(decls), accept)
+}
+
+func (m *matcher) walkExprSlice(state *MatcherState, exprs []ast.Expr, accept func(MatchData)) {
+ m.walkNodeSlice(state, ExprSlice(exprs), accept)
+}
+
+func (m *matcher) walkStmtSlice(state *MatcherState, stmts []ast.Stmt, accept func(MatchData)) {
+ m.walkNodeSlice(state, stmtSlice(stmts), accept)
+}
+
+func (m *matcher) walkNodeSlice(state *MatcherState, nodes NodeSlice, accept func(MatchData)) {
+ sliceLen := nodes.Len()
+ from := 0
+ for {
+ state.pc = 1 // FIXME: this is a kludge
+ m.resetCapture(state)
+ matched, offset := m.matchNodeList(state, nodes.slice(from, sliceLen), true)
+ if matched == nil {
+ break
+ }
+ accept(MatchData{
+ Capture: state.capture,
+ Node: matched,
+ })
+ from += offset - 1
+ if from >= sliceLen {
+ break
+ }
+ }
+}
+
+func (m *matcher) matchNamed(state *MatcherState, name string, n ast.Node) bool {
+ prev, ok := findNamed(state.capture, name)
+ if !ok {
+ // First occurrence, record value.
+ state.capture = append(state.capture, CapturedNode{Name: name, Node: n})
+ return true
+ }
+
+ return equalNodes(prev, n)
+}
+
+func (m *matcher) matchNamedField(state *MatcherState, name string, n ast.Node) bool {
+ prev, ok := findNamed(state.capture, name)
+ if !ok {
+ // First occurrence, record value.
+ unwrapped := m.unwrapNode(n)
+ state.capture = append(state.capture, CapturedNode{Name: name, Node: unwrapped})
+ return true
+ }
+ n = m.unwrapNode(n)
+ return equalNodes(prev, n)
+}
+
+func (m *matcher) unwrapNode(x ast.Node) ast.Node {
+ switch x := x.(type) {
+ case *ast.Field:
+ if len(x.Names) == 0 {
+ return x.Type
+ }
+ case *ast.FieldList:
+ if x != nil && len(x.List) == 1 && len(x.List[0].Names) == 0 {
+ return x.List[0].Type
+ }
+ }
+ return x
+}
+
+func (m *matcher) matchNodeWithInst(state *MatcherState, inst instruction, n ast.Node) bool {
+ switch inst.op {
+ case opNode:
+ return n != nil
+ case opOptNode:
+ return true
+
+ case opNamedNode:
+ return n != nil && m.matchNamed(state, m.stringValue(inst), n)
+ case opNamedOptNode:
+ return m.matchNamed(state, m.stringValue(inst), n)
+
+ case opFieldNode:
+ n, ok := n.(*ast.FieldList)
+ return ok && n != nil && len(n.List) == 1 && len(n.List[0].Names) == 0
+ case opNamedFieldNode:
+ return n != nil && m.matchNamedField(state, m.stringValue(inst), n)
+
+ case opBasicLit:
+ n, ok := n.(*ast.BasicLit)
+ return ok && m.ifaceValue(inst) == literalValue(n)
+
+ case opStrictIntLit:
+ n, ok := n.(*ast.BasicLit)
+ return ok && n.Kind == token.INT && m.stringValue(inst) == n.Value
+ case opStrictFloatLit:
+ n, ok := n.(*ast.BasicLit)
+ return ok && n.Kind == token.FLOAT && m.stringValue(inst) == n.Value
+ case opStrictCharLit:
+ n, ok := n.(*ast.BasicLit)
+ return ok && n.Kind == token.CHAR && m.stringValue(inst) == n.Value
+ case opStrictStringLit:
+ n, ok := n.(*ast.BasicLit)
+ return ok && n.Kind == token.STRING && m.stringValue(inst) == n.Value
+ case opStrictComplexLit:
+ n, ok := n.(*ast.BasicLit)
+ return ok && n.Kind == token.IMAG && m.stringValue(inst) == n.Value
+
+ case opIdent:
+ n, ok := n.(*ast.Ident)
+ return ok && m.stringValue(inst) == n.Name
+
+ case opPkg:
+ n, ok := n.(*ast.Ident)
+ if !ok {
+ return false
+ }
+ obj := state.Types.ObjectOf(n)
+ if obj == nil {
+ return false
+ }
+ pkgName, ok := obj.(*types.PkgName)
+ return ok && pkgName.Imported().Path() == m.stringValue(inst)
+
+ case opBinaryExpr:
+ n, ok := n.(*ast.BinaryExpr)
+ return ok && n.Op == token.Token(inst.value) &&
+ m.matchNode(state, n.X) && m.matchNode(state, n.Y)
+
+ case opUnaryExpr:
+ n, ok := n.(*ast.UnaryExpr)
+ return ok && n.Op == token.Token(inst.value) && m.matchNode(state, n.X)
+
+ case opStarExpr:
+ n, ok := n.(*ast.StarExpr)
+ return ok && m.matchNode(state, n.X)
+
+ case opVariadicCallExpr:
+ n, ok := n.(*ast.CallExpr)
+ return ok && n.Ellipsis.IsValid() && m.matchNode(state, n.Fun) && m.matchArgList(state, n.Args)
+ case opNonVariadicCallExpr:
+ n, ok := n.(*ast.CallExpr)
+ return ok && !n.Ellipsis.IsValid() && m.matchNode(state, n.Fun) && m.matchArgList(state, n.Args)
+ case opCallExpr:
+ n, ok := n.(*ast.CallExpr)
+ return ok && m.matchNode(state, n.Fun) && m.matchArgList(state, n.Args)
+
+ case opSimpleSelectorExpr:
+ n, ok := n.(*ast.SelectorExpr)
+ return ok && m.stringValue(inst) == n.Sel.Name && m.matchNode(state, n.X)
+ case opSelectorExpr:
+ n, ok := n.(*ast.SelectorExpr)
+ return ok && m.matchNode(state, n.Sel) && m.matchNode(state, n.X)
+
+ case opTypeAssertExpr:
+ n, ok := n.(*ast.TypeAssertExpr)
+ return ok && m.matchNode(state, n.X) && m.matchNode(state, n.Type)
+ case opTypeSwitchAssertExpr:
+ n, ok := n.(*ast.TypeAssertExpr)
+ return ok && n.Type == nil && m.matchNode(state, n.X)
+
+ case opSliceExpr:
+ n, ok := n.(*ast.SliceExpr)
+ return ok && n.Low == nil && n.High == nil && m.matchNode(state, n.X)
+ case opSliceFromExpr:
+ n, ok := n.(*ast.SliceExpr)
+ return ok && n.High == nil && !n.Slice3 &&
+ m.matchNode(state, n.X) && m.matchNode(state, n.Low)
+ case opSliceToExpr:
+ n, ok := n.(*ast.SliceExpr)
+ return ok && n.Low == nil && !n.Slice3 &&
+ m.matchNode(state, n.X) && m.matchNode(state, n.High)
+ case opSliceFromToExpr:
+ n, ok := n.(*ast.SliceExpr)
+ return ok && !n.Slice3 &&
+ m.matchNode(state, n.X) && m.matchNode(state, n.Low) && m.matchNode(state, n.High)
+ case opSliceToCapExpr:
+ n, ok := n.(*ast.SliceExpr)
+ return ok && n.Low == nil &&
+ m.matchNode(state, n.X) && m.matchNode(state, n.High) && m.matchNode(state, n.Max)
+ case opSliceFromToCapExpr:
+ n, ok := n.(*ast.SliceExpr)
+ return ok && m.matchNode(state, n.X) && m.matchNode(state, n.Low) && m.matchNode(state, n.High) && m.matchNode(state, n.Max)
+
+ case opIndexExpr:
+ n, ok := n.(*ast.IndexExpr)
+ return ok && m.matchNode(state, n.X) && m.matchNode(state, n.Index)
+
+ case opKeyValueExpr:
+ n, ok := n.(*ast.KeyValueExpr)
+ return ok && m.matchNode(state, n.Key) && m.matchNode(state, n.Value)
+
+ case opParenExpr:
+ n, ok := n.(*ast.ParenExpr)
+ return ok && m.matchNode(state, n.X)
+
+ case opEllipsis:
+ n, ok := n.(*ast.Ellipsis)
+ return ok && n.Elt == nil
+ case opTypedEllipsis:
+ n, ok := n.(*ast.Ellipsis)
+ return ok && n.Elt != nil && m.matchNode(state, n.Elt)
+
+ case opSliceType:
+ n, ok := n.(*ast.ArrayType)
+ return ok && n.Len == nil && m.matchNode(state, n.Elt)
+ case opArrayType:
+ n, ok := n.(*ast.ArrayType)
+ return ok && n.Len != nil && m.matchNode(state, n.Len) && m.matchNode(state, n.Elt)
+ case opMapType:
+ n, ok := n.(*ast.MapType)
+ return ok && m.matchNode(state, n.Key) && m.matchNode(state, n.Value)
+ case opChanType:
+ n, ok := n.(*ast.ChanType)
+ return ok && ast.ChanDir(inst.value) == n.Dir && m.matchNode(state, n.Value)
+ case opVoidFuncType:
+ n, ok := n.(*ast.FuncType)
+ return ok && n.Results == nil && m.matchNode(state, n.Params)
+ case opFuncType:
+ n, ok := n.(*ast.FuncType)
+ return ok && m.matchNode(state, n.Params) && m.matchNode(state, n.Results)
+ case opStructType:
+ n, ok := n.(*ast.StructType)
+ return ok && m.matchNode(state, n.Fields)
+ case opInterfaceType:
+ n, ok := n.(*ast.InterfaceType)
+ return ok && m.matchNode(state, n.Methods)
+
+ case opCompositeLit:
+ n, ok := n.(*ast.CompositeLit)
+ return ok && n.Type == nil && m.matchExprSlice(state, n.Elts)
+ case opTypedCompositeLit:
+ n, ok := n.(*ast.CompositeLit)
+ return ok && n.Type != nil && m.matchNode(state, n.Type) && m.matchExprSlice(state, n.Elts)
+
+ case opUnnamedField:
+ n, ok := n.(*ast.Field)
+ return ok && len(n.Names) == 0 && m.matchNode(state, n.Type)
+ case opSimpleField:
+ n, ok := n.(*ast.Field)
+ return ok && len(n.Names) == 1 && m.stringValue(inst) == n.Names[0].Name && m.matchNode(state, n.Type)
+ case opField:
+ n, ok := n.(*ast.Field)
+ return ok && len(n.Names) == 1 && m.matchNode(state, n.Names[0]) && m.matchNode(state, n.Type)
+ case opMultiField:
+ n, ok := n.(*ast.Field)
+ return ok && len(n.Names) >= 2 && m.matchIdentSlice(state, n.Names) && m.matchNode(state, n.Type)
+ case opFieldList:
+ // FieldList could be nil in places like function return types.
+ n, ok := n.(*ast.FieldList)
+ return ok && n != nil && m.matchFieldSlice(state, n.List)
+
+ case opFuncLit:
+ n, ok := n.(*ast.FuncLit)
+ return ok && m.matchNode(state, n.Type) && m.matchNode(state, n.Body)
+
+ case opAssignStmt:
+ n, ok := n.(*ast.AssignStmt)
+ return ok && token.Token(inst.value) == n.Tok &&
+ len(n.Lhs) == 1 && m.matchNode(state, n.Lhs[0]) &&
+ len(n.Rhs) == 1 && m.matchNode(state, n.Rhs[0])
+ case opMultiAssignStmt:
+ n, ok := n.(*ast.AssignStmt)
+ return ok && token.Token(inst.value) == n.Tok &&
+ m.matchExprSlice(state, n.Lhs) && m.matchExprSlice(state, n.Rhs)
+
+ case opExprStmt:
+ n, ok := n.(*ast.ExprStmt)
+ return ok && m.matchNode(state, n.X)
+
+ case opGoStmt:
+ n, ok := n.(*ast.GoStmt)
+ return ok && m.matchNode(state, n.Call)
+ case opDeferStmt:
+ n, ok := n.(*ast.DeferStmt)
+ return ok && m.matchNode(state, n.Call)
+ case opSendStmt:
+ n, ok := n.(*ast.SendStmt)
+ return ok && m.matchNode(state, n.Chan) && m.matchNode(state, n.Value)
+
+ case opBlockStmt:
+ n, ok := n.(*ast.BlockStmt)
+ return ok && m.matchStmtSlice(state, n.List)
+
+ case opIfStmt:
+ n, ok := n.(*ast.IfStmt)
+ return ok && n.Init == nil && n.Else == nil &&
+ m.matchNode(state, n.Cond) && m.matchNode(state, n.Body)
+ case opIfElseStmt:
+ n, ok := n.(*ast.IfStmt)
+ return ok && n.Init == nil && n.Else != nil &&
+ m.matchNode(state, n.Cond) && m.matchNode(state, n.Body) && m.matchNode(state, n.Else)
+ case opIfInitStmt:
+ n, ok := n.(*ast.IfStmt)
+ return ok && n.Else == nil &&
+ m.matchNode(state, n.Init) && m.matchNode(state, n.Cond) && m.matchNode(state, n.Body)
+ case opIfInitElseStmt:
+ n, ok := n.(*ast.IfStmt)
+ return ok && n.Else != nil &&
+ m.matchNode(state, n.Init) && m.matchNode(state, n.Cond) && m.matchNode(state, n.Body) && m.matchNode(state, n.Else)
+
+ case opIfNamedOptStmt:
+ n, ok := n.(*ast.IfStmt)
+ return ok && n.Else == nil && m.matchNode(state, n.Body) &&
+ m.matchNamed(state, m.stringValue(inst), toStmtSlice(n.Cond, n.Init))
+ case opIfNamedOptElseStmt:
+ n, ok := n.(*ast.IfStmt)
+ return ok && n.Else != nil && m.matchNode(state, n.Body) && m.matchNode(state, n.Else) &&
+ m.matchNamed(state, m.stringValue(inst), toStmtSlice(n.Cond, n.Init))
+
+ case opCaseClause:
+ n, ok := n.(*ast.CaseClause)
+ return ok && n.List != nil && m.matchExprSlice(state, n.List) && m.matchStmtSlice(state, n.Body)
+ case opDefaultCaseClause:
+ n, ok := n.(*ast.CaseClause)
+ return ok && n.List == nil && m.matchStmtSlice(state, n.Body)
+
+ case opSwitchStmt:
+ n, ok := n.(*ast.SwitchStmt)
+ return ok && n.Init == nil && n.Tag == nil && m.matchStmtSlice(state, n.Body.List)
+ case opSwitchTagStmt:
+ n, ok := n.(*ast.SwitchStmt)
+ return ok && n.Init == nil && m.matchNode(state, n.Tag) && m.matchStmtSlice(state, n.Body.List)
+ case opSwitchInitStmt:
+ n, ok := n.(*ast.SwitchStmt)
+ return ok && n.Tag == nil && m.matchNode(state, n.Init) && m.matchStmtSlice(state, n.Body.List)
+ case opSwitchInitTagStmt:
+ n, ok := n.(*ast.SwitchStmt)
+ return ok && m.matchNode(state, n.Init) && m.matchNode(state, n.Tag) && m.matchStmtSlice(state, n.Body.List)
+
+ case opTypeSwitchStmt:
+ n, ok := n.(*ast.TypeSwitchStmt)
+ return ok && n.Init == nil && m.matchNode(state, n.Assign) && m.matchStmtSlice(state, n.Body.List)
+ case opTypeSwitchInitStmt:
+ n, ok := n.(*ast.TypeSwitchStmt)
+ return ok && m.matchNode(state, n.Init) &&
+ m.matchNode(state, n.Assign) && m.matchStmtSlice(state, n.Body.List)
+
+ case opCommClause:
+ n, ok := n.(*ast.CommClause)
+ return ok && n.Comm != nil && m.matchNode(state, n.Comm) && m.matchStmtSlice(state, n.Body)
+ case opDefaultCommClause:
+ n, ok := n.(*ast.CommClause)
+ return ok && n.Comm == nil && m.matchStmtSlice(state, n.Body)
+
+ case opSelectStmt:
+ n, ok := n.(*ast.SelectStmt)
+ return ok && m.matchStmtSlice(state, n.Body.List)
+
+ case opRangeStmt:
+ n, ok := n.(*ast.RangeStmt)
+ return ok && n.Key == nil && n.Value == nil && m.matchNode(state, n.X) && m.matchNode(state, n.Body)
+ case opRangeKeyStmt:
+ n, ok := n.(*ast.RangeStmt)
+ return ok && n.Key != nil && n.Value == nil && token.Token(inst.value) == n.Tok &&
+ m.matchNode(state, n.Key) && m.matchNode(state, n.X) && m.matchNode(state, n.Body)
+ case opRangeKeyValueStmt:
+ n, ok := n.(*ast.RangeStmt)
+ return ok && n.Key != nil && n.Value != nil && token.Token(inst.value) == n.Tok &&
+ m.matchNode(state, n.Key) && m.matchNode(state, n.Value) && m.matchNode(state, n.X) && m.matchNode(state, n.Body)
+
+ case opForStmt:
+ n, ok := n.(*ast.ForStmt)
+ return ok && n.Init == nil && n.Cond == nil && n.Post == nil &&
+ m.matchNode(state, n.Body)
+ case opForPostStmt:
+ n, ok := n.(*ast.ForStmt)
+ return ok && n.Init == nil && n.Cond == nil && n.Post != nil &&
+ m.matchNode(state, n.Post) && m.matchNode(state, n.Body)
+ case opForCondStmt:
+ n, ok := n.(*ast.ForStmt)
+ return ok && n.Init == nil && n.Cond != nil && n.Post == nil &&
+ m.matchNode(state, n.Cond) && m.matchNode(state, n.Body)
+ case opForCondPostStmt:
+ n, ok := n.(*ast.ForStmt)
+ return ok && n.Init == nil && n.Cond != nil && n.Post != nil &&
+ m.matchNode(state, n.Cond) && m.matchNode(state, n.Post) && m.matchNode(state, n.Body)
+ case opForInitStmt:
+ n, ok := n.(*ast.ForStmt)
+ return ok && n.Init != nil && n.Cond == nil && n.Post == nil &&
+ m.matchNode(state, n.Init) && m.matchNode(state, n.Body)
+ case opForInitPostStmt:
+ n, ok := n.(*ast.ForStmt)
+ return ok && n.Init != nil && n.Cond == nil && n.Post != nil &&
+ m.matchNode(state, n.Init) && m.matchNode(state, n.Post) && m.matchNode(state, n.Body)
+ case opForInitCondStmt:
+ n, ok := n.(*ast.ForStmt)
+ return ok && n.Init != nil && n.Cond != nil && n.Post == nil &&
+ m.matchNode(state, n.Init) && m.matchNode(state, n.Cond) && m.matchNode(state, n.Body)
+ case opForInitCondPostStmt:
+ n, ok := n.(*ast.ForStmt)
+ return ok && m.matchNode(state, n.Init) && m.matchNode(state, n.Cond) && m.matchNode(state, n.Post) && m.matchNode(state, n.Body)
+
+ case opIncDecStmt:
+ n, ok := n.(*ast.IncDecStmt)
+ return ok && token.Token(inst.value) == n.Tok && m.matchNode(state, n.X)
+
+ case opReturnStmt:
+ n, ok := n.(*ast.ReturnStmt)
+ return ok && m.matchExprSlice(state, n.Results)
+
+ case opLabeledStmt:
+ n, ok := n.(*ast.LabeledStmt)
+ return ok && m.matchNode(state, n.Label) && m.matchNode(state, n.Stmt)
+ case opSimpleLabeledStmt:
+ n, ok := n.(*ast.LabeledStmt)
+ return ok && m.stringValue(inst) == n.Label.Name && m.matchNode(state, n.Stmt)
+
+ case opLabeledBranchStmt:
+ n, ok := n.(*ast.BranchStmt)
+ return ok && n.Label != nil && token.Token(inst.value) == n.Tok && m.matchNode(state, n.Label)
+ case opSimpleLabeledBranchStmt:
+ n, ok := n.(*ast.BranchStmt)
+ return ok && n.Label != nil && m.stringValue(inst) == n.Label.Name && token.Token(inst.value) == n.Tok
+ case opBranchStmt:
+ n, ok := n.(*ast.BranchStmt)
+ return ok && n.Label == nil && token.Token(inst.value) == n.Tok
+
+ case opEmptyStmt:
+ _, ok := n.(*ast.EmptyStmt)
+ return ok
+
+ case opFuncDecl:
+ n, ok := n.(*ast.FuncDecl)
+ return ok && n.Recv == nil && n.Body != nil &&
+ m.matchNode(state, n.Name) && m.matchNode(state, n.Type) && m.matchNode(state, n.Body)
+ case opFuncProtoDecl:
+ n, ok := n.(*ast.FuncDecl)
+ return ok && n.Recv == nil && n.Body == nil &&
+ m.matchNode(state, n.Name) && m.matchNode(state, n.Type)
+ case opMethodDecl:
+ n, ok := n.(*ast.FuncDecl)
+ return ok && n.Recv != nil && n.Body != nil &&
+ m.matchNode(state, n.Recv) && m.matchNode(state, n.Name) && m.matchNode(state, n.Type) && m.matchNode(state, n.Body)
+ case opMethodProtoDecl:
+ n, ok := n.(*ast.FuncDecl)
+ return ok && n.Recv != nil && n.Body == nil &&
+ m.matchNode(state, n.Recv) && m.matchNode(state, n.Name) && m.matchNode(state, n.Type)
+
+ case opValueSpec:
+ n, ok := n.(*ast.ValueSpec)
+ return ok && len(n.Values) == 0 && n.Type == nil &&
+ len(n.Names) == 1 && m.matchNode(state, n.Names[0])
+ case opValueInitSpec:
+ n, ok := n.(*ast.ValueSpec)
+ return ok && len(n.Values) != 0 && n.Type == nil &&
+ m.matchIdentSlice(state, n.Names) && m.matchExprSlice(state, n.Values)
+ case opTypedValueSpec:
+ n, ok := n.(*ast.ValueSpec)
+ return ok && len(n.Values) == 0 && n.Type != nil &&
+ m.matchIdentSlice(state, n.Names) && m.matchNode(state, n.Type)
+ case opTypedValueInitSpec:
+ n, ok := n.(*ast.ValueSpec)
+ return ok && len(n.Values) != 0 &&
+ m.matchIdentSlice(state, n.Names) && m.matchNode(state, n.Type) && m.matchExprSlice(state, n.Values)
+
+ case opTypeSpec:
+ n, ok := n.(*ast.TypeSpec)
+ return ok && !n.Assign.IsValid() && m.matchNode(state, n.Name) && m.matchNode(state, n.Type)
+ case opTypeAliasSpec:
+ n, ok := n.(*ast.TypeSpec)
+ return ok && n.Assign.IsValid() && m.matchNode(state, n.Name) && m.matchNode(state, n.Type)
+
+ case opDeclStmt:
+ n, ok := n.(*ast.DeclStmt)
+ return ok && m.matchNode(state, n.Decl)
+
+ case opConstDecl:
+ n, ok := n.(*ast.GenDecl)
+ return ok && n.Tok == token.CONST && m.matchSpecSlice(state, n.Specs)
+ case opVarDecl:
+ n, ok := n.(*ast.GenDecl)
+ return ok && n.Tok == token.VAR && m.matchSpecSlice(state, n.Specs)
+ case opTypeDecl:
+ n, ok := n.(*ast.GenDecl)
+ return ok && n.Tok == token.TYPE && m.matchSpecSlice(state, n.Specs)
+ case opAnyImportDecl:
+ n, ok := n.(*ast.GenDecl)
+ return ok && n.Tok == token.IMPORT
+ case opImportDecl:
+ n, ok := n.(*ast.GenDecl)
+ return ok && n.Tok == token.IMPORT && m.matchSpecSlice(state, n.Specs)
+
+ case opEmptyPackage:
+ n, ok := n.(*ast.File)
+ return ok && len(n.Imports) == 0 && len(n.Decls) == 0 && m.matchNode(state, n.Name)
+
+ default:
+ panic(fmt.Sprintf("unexpected op %s", inst.op))
+ }
+}
+
+func (m *matcher) matchNode(state *MatcherState, n ast.Node) bool {
+ return m.matchNodeWithInst(state, m.nextInst(state), n)
+}
+
+func (m *matcher) matchArgList(state *MatcherState, exprs []ast.Expr) bool {
+ inst := m.nextInst(state)
+ if inst.op != opSimpleArgList {
+ return m.matchExprSlice(state, exprs)
+ }
+ if len(exprs) != int(inst.value) {
+ return false
+ }
+ for _, x := range exprs {
+ if !m.matchNode(state, x) {
+ return false
+ }
+ }
+ return true
+}
+
+func (m *matcher) matchStmtSlice(state *MatcherState, stmts []ast.Stmt) bool {
+ matched, _ := m.matchNodeList(state, stmtSlice(stmts), false)
+ return matched != nil
+}
+
+func (m *matcher) matchExprSlice(state *MatcherState, exprs []ast.Expr) bool {
+ matched, _ := m.matchNodeList(state, ExprSlice(exprs), false)
+ return matched != nil
+}
+
+func (m *matcher) matchFieldSlice(state *MatcherState, fields []*ast.Field) bool {
+ matched, _ := m.matchNodeList(state, fieldSlice(fields), false)
+ return matched != nil
+}
+
+func (m *matcher) matchIdentSlice(state *MatcherState, idents []*ast.Ident) bool {
+ matched, _ := m.matchNodeList(state, identSlice(idents), false)
+ return matched != nil
+}
+
+func (m *matcher) matchSpecSlice(state *MatcherState, specs []ast.Spec) bool {
+ matched, _ := m.matchNodeList(state, specSlice(specs), false)
+ return matched != nil
+}
+
+// matchNodeList matches two lists of nodes. It uses a common algorithm to match
+// wildcard patterns with any number of nodes without recursion.
+func (m *matcher) matchNodeList(state *MatcherState, nodes NodeSlice, partial bool) (matched ast.Node, offset int) {
+ sliceLen := nodes.Len()
+ inst := m.nextInst(state)
+ if inst.op == opEnd {
+ if sliceLen == 0 {
+ return nodes, 0
+ }
+ return nil, -1
+ }
+ pcBase := state.pc
+ pcNext := 0
+ j := 0
+ jNext := 0
+ partialStart, partialEnd := 0, sliceLen
+
+ type restart struct {
+ matches []CapturedNode
+ pc int
+ j int
+ wildStart int
+ wildName string
+ }
+ // We need to stack these because otherwise some edge cases
+ // would not match properly. Since we have various kinds of
+ // wildcards (nodes containing them, $_, and $*_), in some cases
+ // we may have to go back and do multiple restarts to get to the
+ // right starting position.
+ var stack []restart
+ wildName := ""
+ wildStart := 0
+ push := func(next int) {
+ if next > sliceLen {
+ return // would be discarded anyway
+ }
+ pcNext = state.pc - 1
+ jNext = next
+ stack = append(stack, restart{state.capture, pcNext, next, wildStart, wildName})
+ }
+ pop := func() {
+ j = jNext
+ state.pc = pcNext
+ state.capture = stack[len(stack)-1].matches
+ wildName = stack[len(stack)-1].wildName
+ wildStart = stack[len(stack)-1].wildStart
+ stack = stack[:len(stack)-1]
+ pcNext = 0
+ jNext = 0
+ if len(stack) != 0 {
+ pcNext = stack[len(stack)-1].pc
+ jNext = stack[len(stack)-1].j
+ }
+ }
+
+ // wouldMatch returns whether the current wildcard - if any -
+ // matches the nodes we are currently trying it on.
+ wouldMatch := func() bool {
+ switch wildName {
+ case "", "_":
+ return true
+ }
+ return m.matchNamed(state, wildName, nodes.slice(wildStart, j))
+ }
+ for ; inst.op != opEnd || j < sliceLen; inst = m.nextInst(state) {
+ if inst.op != opEnd {
+ if inst.op == opNodeSeq || inst.op == opNamedNodeSeq {
+ // keep track of where this wildcard
+ // started (if name == wildName,
+ // we're trying the same wildcard
+ // matching one more node)
+ name := "_"
+ if inst.op == opNamedNodeSeq {
+ name = m.stringValue(inst)
+ }
+ if name != wildName {
+ wildStart = j
+ wildName = name
+ }
+ // try to match zero or more at j,
+ // restarting at j+1 if it fails
+ push(j + 1)
+ continue
+ }
+ if partial && state.pc == pcBase {
+ // let "b; c" match "a; b; c"
+ // (simulates a $*_ at the beginning)
+ partialStart = j
+ push(j + 1)
+ }
+ if j < sliceLen && wouldMatch() && m.matchNodeWithInst(state, inst, nodes.At(j)) {
+ // ordinary match
+ wildName = ""
+ j++
+ continue
+ }
+ }
+ if partial && inst.op == opEnd && wildName == "" {
+ partialEnd = j
+ break // let "b; c" match "b; c; d"
+ }
+ // mismatch, try to restart
+ if 0 < jNext && jNext <= sliceLen && (state.pc != pcNext || j != jNext) {
+ pop()
+ continue
+ }
+ return nil, -1
+ }
+ if !wouldMatch() {
+ return nil, -1
+ }
+ return nodes.slice(partialStart, partialEnd), partialEnd + 1
+}
+
+func (m *matcher) matchRangeClause(state *MatcherState, n ast.Node, accept func(MatchData)) {
+ rng, ok := n.(*ast.RangeStmt)
+ if !ok {
+ return
+ }
+ m.resetCapture(state)
+ if !m.matchNode(state, rng.X) {
+ return
+ }
+
+ // Now the fun begins: there is no Range pos in RangeStmt, so we need
+ // to make our best guess to find it.
+ // See https://github.com/golang/go/issues/50429
+ //
+ // In gogrep we don't have []byte sources available, and
+ // it would be cumbersome to walk bytes manually to find the "range" keyword.
+ // What we can do is to hope that code is:
+ // 1. Properly gofmt-ed.
+ // 2. There are no some freefloating artifacts between TokPos and "range".
+ var from int
+ if rng.TokPos != token.NoPos {
+ // Start from the end of the '=' or ':=' token.
+ from = int(rng.TokPos + 1)
+ if rng.Tok == token.DEFINE {
+ from++ // ':=' is 1 byte longer that '='
+ }
+ // Now suppose we have 'for _, x := range xs {...}'
+ // If this is true, then `xs.Pos.Offset - len(" range ")` would
+ // lead us to the current 'from' value.
+ // It's syntactically correct to have `:=range`, so we don't
+ // unconditionally add a space here.
+ if int(rng.X.Pos())-len(" range ") == from {
+ // This means that there is exactly one space between Tok and "range".
+ // There are some afwul cases where this might break, but let's
+ // not think about them too much.
+ from += len(" ")
+ }
+ } else {
+ // `for range xs {...}` form.
+ // There should be at least 1 space between "for" and "range".
+ from = int(rng.For) + len("for ")
+ }
+
+ state.partial.X = rng
+ state.partial.from = token.Pos(from)
+ state.partial.to = rng.X.End()
+
+ accept(MatchData{
+ Capture: state.capture,
+ Node: &state.partial,
+ })
+}
+
+func (m *matcher) matchRangeHeader(state *MatcherState, n ast.Node, accept func(MatchData)) {
+ rng, ok := n.(*ast.RangeStmt)
+ if ok && rng.Key == nil && rng.Value == nil && m.matchNode(state, rng.X) {
+ m.setRangeHeaderPos(state, rng)
+ accept(MatchData{
+ Capture: state.capture,
+ Node: &state.partial,
+ })
+ }
+}
+
+func (m *matcher) matchRangeKeyHeader(state *MatcherState, inst instruction, n ast.Node, accept func(MatchData)) {
+ rng, ok := n.(*ast.RangeStmt)
+ if ok && rng.Key != nil && rng.Value == nil && token.Token(inst.value) == rng.Tok && m.matchNode(state, rng.Key) && m.matchNode(state, rng.X) {
+ m.setRangeHeaderPos(state, rng)
+ accept(MatchData{
+ Capture: state.capture,
+ Node: &state.partial,
+ })
+ }
+}
+
+func (m *matcher) matchRangeKeyValueHeader(state *MatcherState, inst instruction, n ast.Node, accept func(MatchData)) {
+ rng, ok := n.(*ast.RangeStmt)
+ if ok && rng.Key != nil && rng.Value != nil && token.Token(inst.value) == rng.Tok && m.matchNode(state, rng.Key) && m.matchNode(state, rng.Value) && m.matchNode(state, rng.X) {
+ m.setRangeHeaderPos(state, rng)
+ accept(MatchData{
+ Capture: state.capture,
+ Node: &state.partial,
+ })
+ }
+}
+
+func (m *matcher) setRangeHeaderPos(state *MatcherState, rng *ast.RangeStmt) {
+ state.partial.X = rng
+ state.partial.from = rng.Pos()
+ state.partial.to = rng.Body.Pos() - 1
+}
+
+func findNamed(capture []CapturedNode, name string) (ast.Node, bool) {
+ for _, c := range capture {
+ if c.Name == name {
+ return c.Node, true
+ }
+ }
+ return nil, false
+}
+
+func literalValue(lit *ast.BasicLit) interface{} {
+ switch lit.Kind {
+ case token.INT:
+ v, err := strconv.ParseInt(lit.Value, 0, 64)
+ if err == nil {
+ return v
+ }
+ case token.CHAR:
+ s, err := strconv.Unquote(lit.Value)
+ if err != nil {
+ return nil
+ }
+ // Return the first rune.
+ for _, c := range s {
+ return c
+ }
+ case token.STRING:
+ s, err := strconv.Unquote(lit.Value)
+ if err == nil {
+ return s
+ }
+ case token.FLOAT:
+ v, err := strconv.ParseFloat(lit.Value, 64)
+ if err == nil {
+ return v
+ }
+ case token.IMAG:
+ v, err := strconv.ParseComplex(lit.Value, 128)
+ if err == nil {
+ return v
+ }
+ }
+ return nil
+}
+
+func equalNodes(x, y ast.Node) bool {
+ if x == nil || y == nil {
+ return x == y
+ }
+ switch x := x.(type) {
+ case stmtSlice:
+ y, ok := y.(stmtSlice)
+ if !ok || len(x) != len(y) {
+ return false
+ }
+ for i := range x {
+ if !astequal.Stmt(x[i], y[i]) {
+ return false
+ }
+ }
+ return true
+ case ExprSlice:
+ y, ok := y.(ExprSlice)
+ if !ok || len(x) != len(y) {
+ return false
+ }
+ for i := range x {
+ if !astequal.Expr(x[i], y[i]) {
+ return false
+ }
+ }
+ return true
+ case declSlice:
+ y, ok := y.(declSlice)
+ if !ok || len(x) != len(y) {
+ return false
+ }
+ for i := range x {
+ if !astequal.Decl(x[i], y[i]) {
+ return false
+ }
+ }
+ return true
+
+ default:
+ return astequal.Node(x, y)
+ }
+}
+
+func toStmtSlice(nodes ...ast.Node) stmtSlice {
+ var stmts []ast.Stmt
+ for _, node := range nodes {
+ switch x := node.(type) {
+ case nil:
+ case ast.Stmt:
+ stmts = append(stmts, x)
+ case ast.Expr:
+ stmts = append(stmts, &ast.ExprStmt{X: x})
+ default:
+ panic(fmt.Sprintf("unexpected node type: %T", x))
+ }
+ }
+ return stmtSlice(stmts)
+}
diff --git a/vendor/github.com/quasilyte/gogrep/nodetag/nodetag.go b/vendor/github.com/quasilyte/gogrep/nodetag/nodetag.go
new file mode 100644
index 000000000..a4cc2ff85
--- /dev/null
+++ b/vendor/github.com/quasilyte/gogrep/nodetag/nodetag.go
@@ -0,0 +1,280 @@
+package nodetag
+
+import (
+ "go/ast"
+)
+
+type Value int
+
+const (
+ Unknown Value = iota
+
+ ArrayType
+ AssignStmt
+ BasicLit
+ BinaryExpr
+ BlockStmt
+ BranchStmt
+ CallExpr
+ CaseClause
+ ChanType
+ CommClause
+ CompositeLit
+ DeclStmt
+ DeferStmt
+ Ellipsis
+ EmptyStmt
+ ExprStmt
+ File
+ ForStmt
+ FuncDecl
+ FuncLit
+ FuncType
+ GenDecl
+ GoStmt
+ Ident
+ IfStmt
+ ImportSpec
+ IncDecStmt
+ IndexExpr
+ InterfaceType
+ KeyValueExpr
+ LabeledStmt
+ MapType
+ ParenExpr
+ RangeStmt
+ ReturnStmt
+ SelectStmt
+ SelectorExpr
+ SendStmt
+ SliceExpr
+ StarExpr
+ StructType
+ SwitchStmt
+ TypeAssertExpr
+ TypeSpec
+ TypeSwitchStmt
+ UnaryExpr
+ ValueSpec
+
+ NumBuckets
+
+ StmtList // gogrep stmt list
+ ExprList // gogrep expr list
+ DeclList // gogrep decl list
+
+ Node // ast.Node
+ Expr // ast.Expr
+ Stmt // ast.Stmt
+)
+
+func FromNode(n ast.Node) Value {
+ switch n.(type) {
+ case *ast.ArrayType:
+ return ArrayType
+ case *ast.AssignStmt:
+ return AssignStmt
+ case *ast.BasicLit:
+ return BasicLit
+ case *ast.BinaryExpr:
+ return BinaryExpr
+ case *ast.BlockStmt:
+ return BlockStmt
+ case *ast.BranchStmt:
+ return BranchStmt
+ case *ast.CallExpr:
+ return CallExpr
+ case *ast.CaseClause:
+ return CaseClause
+ case *ast.ChanType:
+ return ChanType
+ case *ast.CommClause:
+ return CommClause
+ case *ast.CompositeLit:
+ return CompositeLit
+ case *ast.DeclStmt:
+ return DeclStmt
+ case *ast.DeferStmt:
+ return DeferStmt
+ case *ast.Ellipsis:
+ return Ellipsis
+ case *ast.EmptyStmt:
+ return EmptyStmt
+ case *ast.ExprStmt:
+ return ExprStmt
+ case *ast.File:
+ return File
+ case *ast.ForStmt:
+ return ForStmt
+ case *ast.FuncDecl:
+ return FuncDecl
+ case *ast.FuncLit:
+ return FuncLit
+ case *ast.FuncType:
+ return FuncType
+ case *ast.GenDecl:
+ return GenDecl
+ case *ast.GoStmt:
+ return GoStmt
+ case *ast.Ident:
+ return Ident
+ case *ast.IfStmt:
+ return IfStmt
+ case *ast.ImportSpec:
+ return ImportSpec
+ case *ast.IncDecStmt:
+ return IncDecStmt
+ case *ast.IndexExpr:
+ return IndexExpr
+ case *ast.InterfaceType:
+ return InterfaceType
+ case *ast.KeyValueExpr:
+ return KeyValueExpr
+ case *ast.LabeledStmt:
+ return LabeledStmt
+ case *ast.MapType:
+ return MapType
+ case *ast.ParenExpr:
+ return ParenExpr
+ case *ast.RangeStmt:
+ return RangeStmt
+ case *ast.ReturnStmt:
+ return ReturnStmt
+ case *ast.SelectStmt:
+ return SelectStmt
+ case *ast.SelectorExpr:
+ return SelectorExpr
+ case *ast.SendStmt:
+ return SendStmt
+ case *ast.SliceExpr:
+ return SliceExpr
+ case *ast.StarExpr:
+ return StarExpr
+ case *ast.StructType:
+ return StructType
+ case *ast.SwitchStmt:
+ return SwitchStmt
+ case *ast.TypeAssertExpr:
+ return TypeAssertExpr
+ case *ast.TypeSpec:
+ return TypeSpec
+ case *ast.TypeSwitchStmt:
+ return TypeSwitchStmt
+ case *ast.UnaryExpr:
+ return UnaryExpr
+ case *ast.ValueSpec:
+ return ValueSpec
+ default:
+ return Unknown
+ }
+}
+
+func FromString(s string) Value {
+ switch s {
+ case "Expr":
+ return Expr
+ case "Stmt":
+ return Stmt
+ case "Node":
+ return Node
+ }
+
+ switch s {
+ case "ArrayType":
+ return ArrayType
+ case "AssignStmt":
+ return AssignStmt
+ case "BasicLit":
+ return BasicLit
+ case "BinaryExpr":
+ return BinaryExpr
+ case "BlockStmt":
+ return BlockStmt
+ case "BranchStmt":
+ return BranchStmt
+ case "CallExpr":
+ return CallExpr
+ case "CaseClause":
+ return CaseClause
+ case "ChanType":
+ return ChanType
+ case "CommClause":
+ return CommClause
+ case "CompositeLit":
+ return CompositeLit
+ case "DeclStmt":
+ return DeclStmt
+ case "DeferStmt":
+ return DeferStmt
+ case "Ellipsis":
+ return Ellipsis
+ case "EmptyStmt":
+ return EmptyStmt
+ case "ExprStmt":
+ return ExprStmt
+ case "File":
+ return File
+ case "ForStmt":
+ return ForStmt
+ case "FuncDecl":
+ return FuncDecl
+ case "FuncLit":
+ return FuncLit
+ case "FuncType":
+ return FuncType
+ case "GenDecl":
+ return GenDecl
+ case "GoStmt":
+ return GoStmt
+ case "Ident":
+ return Ident
+ case "IfStmt":
+ return IfStmt
+ case "ImportSpec":
+ return ImportSpec
+ case "IncDecStmt":
+ return IncDecStmt
+ case "IndexExpr":
+ return IndexExpr
+ case "InterfaceType":
+ return InterfaceType
+ case "KeyValueExpr":
+ return KeyValueExpr
+ case "LabeledStmt":
+ return LabeledStmt
+ case "MapType":
+ return MapType
+ case "ParenExpr":
+ return ParenExpr
+ case "RangeStmt":
+ return RangeStmt
+ case "ReturnStmt":
+ return ReturnStmt
+ case "SelectStmt":
+ return SelectStmt
+ case "SelectorExpr":
+ return SelectorExpr
+ case "SendStmt":
+ return SendStmt
+ case "SliceExpr":
+ return SliceExpr
+ case "StarExpr":
+ return StarExpr
+ case "StructType":
+ return StructType
+ case "SwitchStmt":
+ return SwitchStmt
+ case "TypeAssertExpr":
+ return TypeAssertExpr
+ case "TypeSpec":
+ return TypeSpec
+ case "TypeSwitchStmt":
+ return TypeSwitchStmt
+ case "UnaryExpr":
+ return UnaryExpr
+ case "ValueSpec":
+ return ValueSpec
+ default:
+ return Unknown
+ }
+}
diff --git a/vendor/github.com/quasilyte/gogrep/operation_string.go b/vendor/github.com/quasilyte/gogrep/operation_string.go
new file mode 100644
index 000000000..fa093266e
--- /dev/null
+++ b/vendor/github.com/quasilyte/gogrep/operation_string.go
@@ -0,0 +1,146 @@
+// Code generated by "stringer -type=operation -trimprefix=op"; DO NOT EDIT.
+
+package gogrep
+
+import "strconv"
+
+func _() {
+ // An "invalid array index" compiler error signifies that the constant values have changed.
+ // Re-run the stringer command to generate them again.
+ var x [1]struct{}
+ _ = x[opInvalid-0]
+ _ = x[opNode-1]
+ _ = x[opNamedNode-2]
+ _ = x[opNodeSeq-3]
+ _ = x[opNamedNodeSeq-4]
+ _ = x[opOptNode-5]
+ _ = x[opNamedOptNode-6]
+ _ = x[opFieldNode-7]
+ _ = x[opNamedFieldNode-8]
+ _ = x[opMultiStmt-9]
+ _ = x[opMultiExpr-10]
+ _ = x[opMultiDecl-11]
+ _ = x[opEnd-12]
+ _ = x[opBasicLit-13]
+ _ = x[opStrictIntLit-14]
+ _ = x[opStrictFloatLit-15]
+ _ = x[opStrictCharLit-16]
+ _ = x[opStrictStringLit-17]
+ _ = x[opStrictComplexLit-18]
+ _ = x[opIdent-19]
+ _ = x[opPkg-20]
+ _ = x[opIndexExpr-21]
+ _ = x[opSliceExpr-22]
+ _ = x[opSliceFromExpr-23]
+ _ = x[opSliceToExpr-24]
+ _ = x[opSliceFromToExpr-25]
+ _ = x[opSliceToCapExpr-26]
+ _ = x[opSliceFromToCapExpr-27]
+ _ = x[opFuncLit-28]
+ _ = x[opCompositeLit-29]
+ _ = x[opTypedCompositeLit-30]
+ _ = x[opSimpleSelectorExpr-31]
+ _ = x[opSelectorExpr-32]
+ _ = x[opTypeAssertExpr-33]
+ _ = x[opTypeSwitchAssertExpr-34]
+ _ = x[opStructType-35]
+ _ = x[opInterfaceType-36]
+ _ = x[opVoidFuncType-37]
+ _ = x[opFuncType-38]
+ _ = x[opArrayType-39]
+ _ = x[opSliceType-40]
+ _ = x[opMapType-41]
+ _ = x[opChanType-42]
+ _ = x[opKeyValueExpr-43]
+ _ = x[opEllipsis-44]
+ _ = x[opTypedEllipsis-45]
+ _ = x[opStarExpr-46]
+ _ = x[opUnaryExpr-47]
+ _ = x[opBinaryExpr-48]
+ _ = x[opParenExpr-49]
+ _ = x[opArgList-50]
+ _ = x[opSimpleArgList-51]
+ _ = x[opVariadicCallExpr-52]
+ _ = x[opNonVariadicCallExpr-53]
+ _ = x[opCallExpr-54]
+ _ = x[opAssignStmt-55]
+ _ = x[opMultiAssignStmt-56]
+ _ = x[opBranchStmt-57]
+ _ = x[opSimpleLabeledBranchStmt-58]
+ _ = x[opLabeledBranchStmt-59]
+ _ = x[opSimpleLabeledStmt-60]
+ _ = x[opLabeledStmt-61]
+ _ = x[opBlockStmt-62]
+ _ = x[opExprStmt-63]
+ _ = x[opGoStmt-64]
+ _ = x[opDeferStmt-65]
+ _ = x[opSendStmt-66]
+ _ = x[opEmptyStmt-67]
+ _ = x[opIncDecStmt-68]
+ _ = x[opReturnStmt-69]
+ _ = x[opIfStmt-70]
+ _ = x[opIfInitStmt-71]
+ _ = x[opIfElseStmt-72]
+ _ = x[opIfInitElseStmt-73]
+ _ = x[opIfNamedOptStmt-74]
+ _ = x[opIfNamedOptElseStmt-75]
+ _ = x[opSwitchStmt-76]
+ _ = x[opSwitchTagStmt-77]
+ _ = x[opSwitchInitStmt-78]
+ _ = x[opSwitchInitTagStmt-79]
+ _ = x[opSelectStmt-80]
+ _ = x[opTypeSwitchStmt-81]
+ _ = x[opTypeSwitchInitStmt-82]
+ _ = x[opCaseClause-83]
+ _ = x[opDefaultCaseClause-84]
+ _ = x[opCommClause-85]
+ _ = x[opDefaultCommClause-86]
+ _ = x[opForStmt-87]
+ _ = x[opForPostStmt-88]
+ _ = x[opForCondStmt-89]
+ _ = x[opForCondPostStmt-90]
+ _ = x[opForInitStmt-91]
+ _ = x[opForInitPostStmt-92]
+ _ = x[opForInitCondStmt-93]
+ _ = x[opForInitCondPostStmt-94]
+ _ = x[opRangeStmt-95]
+ _ = x[opRangeKeyStmt-96]
+ _ = x[opRangeKeyValueStmt-97]
+ _ = x[opRangeClause-98]
+ _ = x[opRangeHeader-99]
+ _ = x[opRangeKeyHeader-100]
+ _ = x[opRangeKeyValueHeader-101]
+ _ = x[opFieldList-102]
+ _ = x[opUnnamedField-103]
+ _ = x[opSimpleField-104]
+ _ = x[opField-105]
+ _ = x[opMultiField-106]
+ _ = x[opValueSpec-107]
+ _ = x[opValueInitSpec-108]
+ _ = x[opTypedValueInitSpec-109]
+ _ = x[opTypedValueSpec-110]
+ _ = x[opTypeSpec-111]
+ _ = x[opTypeAliasSpec-112]
+ _ = x[opFuncDecl-113]
+ _ = x[opMethodDecl-114]
+ _ = x[opFuncProtoDecl-115]
+ _ = x[opMethodProtoDecl-116]
+ _ = x[opDeclStmt-117]
+ _ = x[opConstDecl-118]
+ _ = x[opVarDecl-119]
+ _ = x[opTypeDecl-120]
+ _ = x[opAnyImportDecl-121]
+ _ = x[opImportDecl-122]
+ _ = x[opEmptyPackage-123]
+}
+
+const _operation_name = "InvalidNodeNamedNodeNodeSeqNamedNodeSeqOptNodeNamedOptNodeFieldNodeNamedFieldNodeMultiStmtMultiExprMultiDeclEndBasicLitStrictIntLitStrictFloatLitStrictCharLitStrictStringLitStrictComplexLitIdentPkgIndexExprSliceExprSliceFromExprSliceToExprSliceFromToExprSliceToCapExprSliceFromToCapExprFuncLitCompositeLitTypedCompositeLitSimpleSelectorExprSelectorExprTypeAssertExprTypeSwitchAssertExprStructTypeInterfaceTypeVoidFuncTypeFuncTypeArrayTypeSliceTypeMapTypeChanTypeKeyValueExprEllipsisTypedEllipsisStarExprUnaryExprBinaryExprParenExprArgListSimpleArgListVariadicCallExprNonVariadicCallExprCallExprAssignStmtMultiAssignStmtBranchStmtSimpleLabeledBranchStmtLabeledBranchStmtSimpleLabeledStmtLabeledStmtBlockStmtExprStmtGoStmtDeferStmtSendStmtEmptyStmtIncDecStmtReturnStmtIfStmtIfInitStmtIfElseStmtIfInitElseStmtIfNamedOptStmtIfNamedOptElseStmtSwitchStmtSwitchTagStmtSwitchInitStmtSwitchInitTagStmtSelectStmtTypeSwitchStmtTypeSwitchInitStmtCaseClauseDefaultCaseClauseCommClauseDefaultCommClauseForStmtForPostStmtForCondStmtForCondPostStmtForInitStmtForInitPostStmtForInitCondStmtForInitCondPostStmtRangeStmtRangeKeyStmtRangeKeyValueStmtRangeClauseRangeHeaderRangeKeyHeaderRangeKeyValueHeaderFieldListUnnamedFieldSimpleFieldFieldMultiFieldValueSpecValueInitSpecTypedValueInitSpecTypedValueSpecTypeSpecTypeAliasSpecFuncDeclMethodDeclFuncProtoDeclMethodProtoDeclDeclStmtConstDeclVarDeclTypeDeclAnyImportDeclImportDeclEmptyPackage"
+
+var _operation_index = [...]uint16{0, 7, 11, 20, 27, 39, 46, 58, 67, 81, 90, 99, 108, 111, 119, 131, 145, 158, 173, 189, 194, 197, 206, 215, 228, 239, 254, 268, 286, 293, 305, 322, 340, 352, 366, 386, 396, 409, 421, 429, 438, 447, 454, 462, 474, 482, 495, 503, 512, 522, 531, 538, 551, 567, 586, 594, 604, 619, 629, 652, 669, 686, 697, 706, 714, 720, 729, 737, 746, 756, 766, 772, 782, 792, 806, 820, 838, 848, 861, 875, 892, 902, 916, 934, 944, 961, 971, 988, 995, 1006, 1017, 1032, 1043, 1058, 1073, 1092, 1101, 1113, 1130, 1141, 1152, 1166, 1185, 1194, 1206, 1217, 1222, 1232, 1241, 1254, 1272, 1286, 1294, 1307, 1315, 1325, 1338, 1353, 1361, 1370, 1377, 1385, 1398, 1408, 1420}
+
+func (i operation) String() string {
+ if i >= operation(len(_operation_index)-1) {
+ return "operation(" + strconv.FormatInt(int64(i), 10) + ")"
+ }
+ return _operation_name[_operation_index[i]:_operation_index[i+1]]
+}
diff --git a/vendor/github.com/quasilyte/gogrep/operations.gen.go b/vendor/github.com/quasilyte/gogrep/operations.gen.go
new file mode 100644
index 000000000..8ff1fbeb7
--- /dev/null
+++ b/vendor/github.com/quasilyte/gogrep/operations.gen.go
@@ -0,0 +1,1570 @@
+// Code generated "gen_operations.go"; DO NOT EDIT.
+
+package gogrep
+
+import (
+ "github.com/quasilyte/gogrep/nodetag"
+)
+
+//go:generate stringer -type=operation -trimprefix=op
+type operation uint8
+
+const (
+ opInvalid operation = 0
+
+ // Tag: Node
+ opNode operation = 1
+
+ // Tag: Node
+ // ValueIndex: strings | wildcard name
+ opNamedNode operation = 2
+
+ // Tag: Unknown
+ opNodeSeq operation = 3
+
+ // Tag: Unknown
+ // ValueIndex: strings | wildcard name
+ opNamedNodeSeq operation = 4
+
+ // Tag: Unknown
+ opOptNode operation = 5
+
+ // Tag: Unknown
+ // ValueIndex: strings | wildcard name
+ opNamedOptNode operation = 6
+
+ // Tag: Node
+ opFieldNode operation = 7
+
+ // Tag: Node
+ // ValueIndex: strings | wildcard name
+ opNamedFieldNode operation = 8
+
+ // Tag: StmtList
+ // Args: stmts...
+ // Example: f(); g()
+ opMultiStmt operation = 9
+
+ // Tag: ExprList
+ // Args: exprs...
+ // Example: f(), g()
+ opMultiExpr operation = 10
+
+ // Tag: DeclList
+ // Args: exprs...
+ // Example: f(), g()
+ opMultiDecl operation = 11
+
+ // Tag: Unknown
+ opEnd operation = 12
+
+ // Tag: BasicLit
+ // ValueIndex: ifaces | parsed literal value
+ opBasicLit operation = 13
+
+ // Tag: BasicLit
+ // ValueIndex: strings | raw literal value
+ opStrictIntLit operation = 14
+
+ // Tag: BasicLit
+ // ValueIndex: strings | raw literal value
+ opStrictFloatLit operation = 15
+
+ // Tag: BasicLit
+ // ValueIndex: strings | raw literal value
+ opStrictCharLit operation = 16
+
+ // Tag: BasicLit
+ // ValueIndex: strings | raw literal value
+ opStrictStringLit operation = 17
+
+ // Tag: BasicLit
+ // ValueIndex: strings | raw literal value
+ opStrictComplexLit operation = 18
+
+ // Tag: Ident
+ // ValueIndex: strings | ident name
+ opIdent operation = 19
+
+ // Tag: Ident
+ // ValueIndex: strings | package path
+ opPkg operation = 20
+
+ // Tag: IndexExpr
+ // Args: x expr
+ opIndexExpr operation = 21
+
+ // Tag: SliceExpr
+ // Args: x
+ opSliceExpr operation = 22
+
+ // Tag: SliceExpr
+ // Args: x from
+ // Example: x[from:]
+ opSliceFromExpr operation = 23
+
+ // Tag: SliceExpr
+ // Args: x to
+ // Example: x[:to]
+ opSliceToExpr operation = 24
+
+ // Tag: SliceExpr
+ // Args: x from to
+ // Example: x[from:to]
+ opSliceFromToExpr operation = 25
+
+ // Tag: SliceExpr
+ // Args: x from cap
+ // Example: x[:from:cap]
+ opSliceToCapExpr operation = 26
+
+ // Tag: SliceExpr
+ // Args: x from to cap
+ // Example: x[from:to:cap]
+ opSliceFromToCapExpr operation = 27
+
+ // Tag: FuncLit
+ // Args: type block
+ opFuncLit operation = 28
+
+ // Tag: CompositeLit
+ // Args: elts...
+ // Example: {elts...}
+ opCompositeLit operation = 29
+
+ // Tag: CompositeLit
+ // Args: typ elts...
+ // Example: typ{elts...}
+ opTypedCompositeLit operation = 30
+
+ // Tag: SelectorExpr
+ // Args: x
+ // ValueIndex: strings | selector name
+ opSimpleSelectorExpr operation = 31
+
+ // Tag: SelectorExpr
+ // Args: x sel
+ opSelectorExpr operation = 32
+
+ // Tag: TypeAssertExpr
+ // Args: x typ
+ opTypeAssertExpr operation = 33
+
+ // Tag: TypeAssertExpr
+ // Args: x
+ opTypeSwitchAssertExpr operation = 34
+
+ // Tag: StructType
+ // Args: fields
+ opStructType operation = 35
+
+ // Tag: StructType
+ // Args: fields
+ opInterfaceType operation = 36
+
+ // Tag: FuncType
+ // Args: params
+ opVoidFuncType operation = 37
+
+ // Tag: FuncType
+ // Args: params results
+ opFuncType operation = 38
+
+ // Tag: ArrayType
+ // Args: length elem
+ opArrayType operation = 39
+
+ // Tag: ArrayType
+ // Args: elem
+ opSliceType operation = 40
+
+ // Tag: MapType
+ // Args: key value
+ opMapType operation = 41
+
+ // Tag: ChanType
+ // Args: value
+ // Value: ast.ChanDir | channel direction
+ opChanType operation = 42
+
+ // Tag: KeyValueExpr
+ // Args: key value
+ opKeyValueExpr operation = 43
+
+ // Tag: Ellipsis
+ opEllipsis operation = 44
+
+ // Tag: Ellipsis
+ // Args: type
+ opTypedEllipsis operation = 45
+
+ // Tag: StarExpr
+ // Args: x
+ opStarExpr operation = 46
+
+ // Tag: UnaryExpr
+ // Args: x
+ // Value: token.Token | unary operator
+ opUnaryExpr operation = 47
+
+ // Tag: BinaryExpr
+ // Args: x y
+ // Value: token.Token | binary operator
+ opBinaryExpr operation = 48
+
+ // Tag: ParenExpr
+ // Args: x
+ opParenExpr operation = 49
+
+ // Tag: Unknown
+ // Args: exprs...
+ // Example: 1, 2, 3
+ opArgList operation = 50
+
+ // Tag: Unknown
+ // Like ArgList, but pattern contains no $*
+ // Args: exprs[]
+ // Example: 1, 2, 3
+ // Value: int | slice len
+ opSimpleArgList operation = 51
+
+ // Tag: CallExpr
+ // Args: fn args
+ // Example: f(1, xs...)
+ opVariadicCallExpr operation = 52
+
+ // Tag: CallExpr
+ // Args: fn args
+ // Example: f(1, xs)
+ opNonVariadicCallExpr operation = 53
+
+ // Tag: CallExpr
+ // Args: fn args
+ // Example: f(1, xs) or f(1, xs...)
+ opCallExpr operation = 54
+
+ // Tag: AssignStmt
+ // Args: lhs rhs
+ // Example: lhs := rhs()
+ // Value: token.Token | ':=' or '='
+ opAssignStmt operation = 55
+
+ // Tag: AssignStmt
+ // Args: lhs... rhs...
+ // Example: lhs1, lhs2 := rhs()
+ // Value: token.Token | ':=' or '='
+ opMultiAssignStmt operation = 56
+
+ // Tag: BranchStmt
+ // Args: x
+ // Value: token.Token | branch kind
+ opBranchStmt operation = 57
+
+ // Tag: BranchStmt
+ // Args: x
+ // Value: token.Token | branch kind
+ // ValueIndex: strings | label name
+ opSimpleLabeledBranchStmt operation = 58
+
+ // Tag: BranchStmt
+ // Args: label x
+ // Value: token.Token | branch kind
+ opLabeledBranchStmt operation = 59
+
+ // Tag: LabeledStmt
+ // Args: x
+ // ValueIndex: strings | label name
+ opSimpleLabeledStmt operation = 60
+
+ // Tag: LabeledStmt
+ // Args: label x
+ opLabeledStmt operation = 61
+
+ // Tag: BlockStmt
+ // Args: body...
+ opBlockStmt operation = 62
+
+ // Tag: ExprStmt
+ // Args: x
+ opExprStmt operation = 63
+
+ // Tag: GoStmt
+ // Args: x
+ opGoStmt operation = 64
+
+ // Tag: DeferStmt
+ // Args: x
+ opDeferStmt operation = 65
+
+ // Tag: SendStmt
+ // Args: ch value
+ opSendStmt operation = 66
+
+ // Tag: EmptyStmt
+ opEmptyStmt operation = 67
+
+ // Tag: IncDecStmt
+ // Args: x
+ // Value: token.Token | '++' or '--'
+ opIncDecStmt operation = 68
+
+ // Tag: ReturnStmt
+ // Args: results...
+ opReturnStmt operation = 69
+
+ // Tag: IfStmt
+ // Args: cond block
+ // Example: if cond {}
+ opIfStmt operation = 70
+
+ // Tag: IfStmt
+ // Args: init cond block
+ // Example: if init; cond {}
+ opIfInitStmt operation = 71
+
+ // Tag: IfStmt
+ // Args: cond block else
+ // Example: if cond {} else ...
+ opIfElseStmt operation = 72
+
+ // Tag: IfStmt
+ // Args: init cond block else
+ // Example: if init; cond {} else ...
+ opIfInitElseStmt operation = 73
+
+ // Tag: IfStmt
+ // Args: block
+ // Example: if $*x {}
+ // ValueIndex: strings | wildcard name
+ opIfNamedOptStmt operation = 74
+
+ // Tag: IfStmt
+ // Args: block else
+ // Example: if $*x {} else ...
+ // ValueIndex: strings | wildcard name
+ opIfNamedOptElseStmt operation = 75
+
+ // Tag: SwitchStmt
+ // Args: body...
+ // Example: switch {}
+ opSwitchStmt operation = 76
+
+ // Tag: SwitchStmt
+ // Args: tag body...
+ // Example: switch tag {}
+ opSwitchTagStmt operation = 77
+
+ // Tag: SwitchStmt
+ // Args: init body...
+ // Example: switch init; {}
+ opSwitchInitStmt operation = 78
+
+ // Tag: SwitchStmt
+ // Args: init tag body...
+ // Example: switch init; tag {}
+ opSwitchInitTagStmt operation = 79
+
+ // Tag: SelectStmt
+ // Args: body...
+ opSelectStmt operation = 80
+
+ // Tag: TypeSwitchStmt
+ // Args: x block
+ // Example: switch x.(type) {}
+ opTypeSwitchStmt operation = 81
+
+ // Tag: TypeSwitchStmt
+ // Args: init x block
+ // Example: switch init; x.(type) {}
+ opTypeSwitchInitStmt operation = 82
+
+ // Tag: CaseClause
+ // Args: values... body...
+ opCaseClause operation = 83
+
+ // Tag: CaseClause
+ // Args: body...
+ opDefaultCaseClause operation = 84
+
+ // Tag: CommClause
+ // Args: comm body...
+ opCommClause operation = 85
+
+ // Tag: CommClause
+ // Args: body...
+ opDefaultCommClause operation = 86
+
+ // Tag: ForStmt
+ // Args: blocl
+ // Example: for {}
+ opForStmt operation = 87
+
+ // Tag: ForStmt
+ // Args: post block
+ // Example: for ; ; post {}
+ opForPostStmt operation = 88
+
+ // Tag: ForStmt
+ // Args: cond block
+ // Example: for ; cond; {}
+ opForCondStmt operation = 89
+
+ // Tag: ForStmt
+ // Args: cond post block
+ // Example: for ; cond; post {}
+ opForCondPostStmt operation = 90
+
+ // Tag: ForStmt
+ // Args: init block
+ // Example: for init; ; {}
+ opForInitStmt operation = 91
+
+ // Tag: ForStmt
+ // Args: init post block
+ // Example: for init; ; post {}
+ opForInitPostStmt operation = 92
+
+ // Tag: ForStmt
+ // Args: init cond block
+ // Example: for init; cond; {}
+ opForInitCondStmt operation = 93
+
+ // Tag: ForStmt
+ // Args: init cond post block
+ // Example: for init; cond; post {}
+ opForInitCondPostStmt operation = 94
+
+ // Tag: RangeStmt
+ // Args: x block
+ // Example: for range x {}
+ opRangeStmt operation = 95
+
+ // Tag: RangeStmt
+ // Args: key x block
+ // Example: for key := range x {}
+ // Value: token.Token | ':=' or '='
+ opRangeKeyStmt operation = 96
+
+ // Tag: RangeStmt
+ // Args: key value x block
+ // Example: for key, value := range x {}
+ // Value: token.Token | ':=' or '='
+ opRangeKeyValueStmt operation = 97
+
+ // Tag: RangeStmt
+ // Args: x
+ // Example: range x
+ opRangeClause operation = 98
+
+ // Tag: RangeStmt
+ // Args: x
+ // Example: for range x
+ opRangeHeader operation = 99
+
+ // Tag: RangeStmt
+ // Args: key x
+ // Example: for key := range x
+ // Value: token.Token | ':=' or '='
+ opRangeKeyHeader operation = 100
+
+ // Tag: RangeStmt
+ // Args: key value x
+ // Example: for key, value := range x
+ // Value: token.Token | ':=' or '='
+ opRangeKeyValueHeader operation = 101
+
+ // Tag: Unknown
+ // Args: fields...
+ opFieldList operation = 102
+
+ // Tag: Unknown
+ // Args: typ
+ // Example: type
+ opUnnamedField operation = 103
+
+ // Tag: Unknown
+ // Args: typ
+ // Example: name type
+ // ValueIndex: strings | field name
+ opSimpleField operation = 104
+
+ // Tag: Unknown
+ // Args: name typ
+ // Example: $name type
+ opField operation = 105
+
+ // Tag: Unknown
+ // Args: names... typ
+ // Example: name1, name2 type
+ opMultiField operation = 106
+
+ // Tag: ValueSpec
+ // Args: value
+ opValueSpec operation = 107
+
+ // Tag: ValueSpec
+ // Args: lhs... rhs...
+ // Example: lhs = rhs
+ opValueInitSpec operation = 108
+
+ // Tag: ValueSpec
+ // Args: lhs... type rhs...
+ // Example: lhs typ = rhs
+ opTypedValueInitSpec operation = 109
+
+ // Tag: ValueSpec
+ // Args: lhs... type
+ // Example: lhs typ
+ opTypedValueSpec operation = 110
+
+ // Tag: TypeSpec
+ // Args: name type
+ // Example: name type
+ opTypeSpec operation = 111
+
+ // Tag: TypeSpec
+ // Args: name type
+ // Example: name = type
+ opTypeAliasSpec operation = 112
+
+ // Tag: FuncDecl
+ // Args: name type block
+ opFuncDecl operation = 113
+
+ // Tag: FuncDecl
+ // Args: recv name type block
+ opMethodDecl operation = 114
+
+ // Tag: FuncDecl
+ // Args: name type
+ opFuncProtoDecl operation = 115
+
+ // Tag: FuncDecl
+ // Args: recv name type
+ opMethodProtoDecl operation = 116
+
+ // Tag: DeclStmt
+ // Args: decl
+ opDeclStmt operation = 117
+
+ // Tag: GenDecl
+ // Args: valuespecs...
+ opConstDecl operation = 118
+
+ // Tag: GenDecl
+ // Args: valuespecs...
+ opVarDecl operation = 119
+
+ // Tag: GenDecl
+ // Args: typespecs...
+ opTypeDecl operation = 120
+
+ // Tag: GenDecl
+ opAnyImportDecl operation = 121
+
+ // Tag: GenDecl
+ // Args: importspecs...
+ opImportDecl operation = 122
+
+ // Tag: File
+ // Args: name
+ opEmptyPackage operation = 123
+)
+
+type operationInfo struct {
+ Tag nodetag.Value
+ NumArgs int
+ ValueKind valueKind
+ ExtraValueKind valueKind
+ VariadicMap bitmap64
+ SliceIndex int
+}
+
+var operationInfoTable = [256]operationInfo{
+ opInvalid: {},
+
+ opNode: {
+ Tag: nodetag.Node,
+ NumArgs: 0,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opNamedNode: {
+ Tag: nodetag.Node,
+ NumArgs: 0,
+ ValueKind: emptyValue,
+ ExtraValueKind: stringValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opNodeSeq: {
+ Tag: nodetag.Unknown,
+ NumArgs: 0,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opNamedNodeSeq: {
+ Tag: nodetag.Unknown,
+ NumArgs: 0,
+ ValueKind: emptyValue,
+ ExtraValueKind: stringValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opOptNode: {
+ Tag: nodetag.Unknown,
+ NumArgs: 0,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opNamedOptNode: {
+ Tag: nodetag.Unknown,
+ NumArgs: 0,
+ ValueKind: emptyValue,
+ ExtraValueKind: stringValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opFieldNode: {
+ Tag: nodetag.Node,
+ NumArgs: 0,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opNamedFieldNode: {
+ Tag: nodetag.Node,
+ NumArgs: 0,
+ ValueKind: emptyValue,
+ ExtraValueKind: stringValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opMultiStmt: {
+ Tag: nodetag.StmtList,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 1, // 1
+ SliceIndex: -1,
+ },
+ opMultiExpr: {
+ Tag: nodetag.ExprList,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 1, // 1
+ SliceIndex: -1,
+ },
+ opMultiDecl: {
+ Tag: nodetag.DeclList,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 1, // 1
+ SliceIndex: -1,
+ },
+ opEnd: {
+ Tag: nodetag.Unknown,
+ NumArgs: 0,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opBasicLit: {
+ Tag: nodetag.BasicLit,
+ NumArgs: 0,
+ ValueKind: emptyValue,
+ ExtraValueKind: ifaceValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opStrictIntLit: {
+ Tag: nodetag.BasicLit,
+ NumArgs: 0,
+ ValueKind: emptyValue,
+ ExtraValueKind: stringValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opStrictFloatLit: {
+ Tag: nodetag.BasicLit,
+ NumArgs: 0,
+ ValueKind: emptyValue,
+ ExtraValueKind: stringValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opStrictCharLit: {
+ Tag: nodetag.BasicLit,
+ NumArgs: 0,
+ ValueKind: emptyValue,
+ ExtraValueKind: stringValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opStrictStringLit: {
+ Tag: nodetag.BasicLit,
+ NumArgs: 0,
+ ValueKind: emptyValue,
+ ExtraValueKind: stringValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opStrictComplexLit: {
+ Tag: nodetag.BasicLit,
+ NumArgs: 0,
+ ValueKind: emptyValue,
+ ExtraValueKind: stringValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opIdent: {
+ Tag: nodetag.Ident,
+ NumArgs: 0,
+ ValueKind: emptyValue,
+ ExtraValueKind: stringValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opPkg: {
+ Tag: nodetag.Ident,
+ NumArgs: 0,
+ ValueKind: emptyValue,
+ ExtraValueKind: stringValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opIndexExpr: {
+ Tag: nodetag.IndexExpr,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opSliceExpr: {
+ Tag: nodetag.SliceExpr,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opSliceFromExpr: {
+ Tag: nodetag.SliceExpr,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opSliceToExpr: {
+ Tag: nodetag.SliceExpr,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opSliceFromToExpr: {
+ Tag: nodetag.SliceExpr,
+ NumArgs: 3,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opSliceToCapExpr: {
+ Tag: nodetag.SliceExpr,
+ NumArgs: 3,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opSliceFromToCapExpr: {
+ Tag: nodetag.SliceExpr,
+ NumArgs: 4,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opFuncLit: {
+ Tag: nodetag.FuncLit,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opCompositeLit: {
+ Tag: nodetag.CompositeLit,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 1, // 1
+ SliceIndex: -1,
+ },
+ opTypedCompositeLit: {
+ Tag: nodetag.CompositeLit,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 2, // 10
+ SliceIndex: -1,
+ },
+ opSimpleSelectorExpr: {
+ Tag: nodetag.SelectorExpr,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: stringValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opSelectorExpr: {
+ Tag: nodetag.SelectorExpr,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opTypeAssertExpr: {
+ Tag: nodetag.TypeAssertExpr,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opTypeSwitchAssertExpr: {
+ Tag: nodetag.TypeAssertExpr,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opStructType: {
+ Tag: nodetag.StructType,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opInterfaceType: {
+ Tag: nodetag.StructType,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opVoidFuncType: {
+ Tag: nodetag.FuncType,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opFuncType: {
+ Tag: nodetag.FuncType,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opArrayType: {
+ Tag: nodetag.ArrayType,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opSliceType: {
+ Tag: nodetag.ArrayType,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opMapType: {
+ Tag: nodetag.MapType,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opChanType: {
+ Tag: nodetag.ChanType,
+ NumArgs: 1,
+ ValueKind: chandirValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opKeyValueExpr: {
+ Tag: nodetag.KeyValueExpr,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opEllipsis: {
+ Tag: nodetag.Ellipsis,
+ NumArgs: 0,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opTypedEllipsis: {
+ Tag: nodetag.Ellipsis,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opStarExpr: {
+ Tag: nodetag.StarExpr,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opUnaryExpr: {
+ Tag: nodetag.UnaryExpr,
+ NumArgs: 1,
+ ValueKind: tokenValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opBinaryExpr: {
+ Tag: nodetag.BinaryExpr,
+ NumArgs: 2,
+ ValueKind: tokenValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opParenExpr: {
+ Tag: nodetag.ParenExpr,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opArgList: {
+ Tag: nodetag.Unknown,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 1, // 1
+ SliceIndex: -1,
+ },
+ opSimpleArgList: {
+ Tag: nodetag.Unknown,
+ NumArgs: 1,
+ ValueKind: intValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: 0,
+ },
+ opVariadicCallExpr: {
+ Tag: nodetag.CallExpr,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opNonVariadicCallExpr: {
+ Tag: nodetag.CallExpr,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opCallExpr: {
+ Tag: nodetag.CallExpr,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opAssignStmt: {
+ Tag: nodetag.AssignStmt,
+ NumArgs: 2,
+ ValueKind: tokenValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opMultiAssignStmt: {
+ Tag: nodetag.AssignStmt,
+ NumArgs: 2,
+ ValueKind: tokenValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 3, // 11
+ SliceIndex: -1,
+ },
+ opBranchStmt: {
+ Tag: nodetag.BranchStmt,
+ NumArgs: 1,
+ ValueKind: tokenValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opSimpleLabeledBranchStmt: {
+ Tag: nodetag.BranchStmt,
+ NumArgs: 1,
+ ValueKind: tokenValue,
+ ExtraValueKind: stringValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opLabeledBranchStmt: {
+ Tag: nodetag.BranchStmt,
+ NumArgs: 2,
+ ValueKind: tokenValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opSimpleLabeledStmt: {
+ Tag: nodetag.LabeledStmt,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: stringValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opLabeledStmt: {
+ Tag: nodetag.LabeledStmt,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opBlockStmt: {
+ Tag: nodetag.BlockStmt,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 1, // 1
+ SliceIndex: -1,
+ },
+ opExprStmt: {
+ Tag: nodetag.ExprStmt,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opGoStmt: {
+ Tag: nodetag.GoStmt,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opDeferStmt: {
+ Tag: nodetag.DeferStmt,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opSendStmt: {
+ Tag: nodetag.SendStmt,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opEmptyStmt: {
+ Tag: nodetag.EmptyStmt,
+ NumArgs: 0,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opIncDecStmt: {
+ Tag: nodetag.IncDecStmt,
+ NumArgs: 1,
+ ValueKind: tokenValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opReturnStmt: {
+ Tag: nodetag.ReturnStmt,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 1, // 1
+ SliceIndex: -1,
+ },
+ opIfStmt: {
+ Tag: nodetag.IfStmt,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opIfInitStmt: {
+ Tag: nodetag.IfStmt,
+ NumArgs: 3,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opIfElseStmt: {
+ Tag: nodetag.IfStmt,
+ NumArgs: 3,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opIfInitElseStmt: {
+ Tag: nodetag.IfStmt,
+ NumArgs: 4,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opIfNamedOptStmt: {
+ Tag: nodetag.IfStmt,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: stringValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opIfNamedOptElseStmt: {
+ Tag: nodetag.IfStmt,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: stringValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opSwitchStmt: {
+ Tag: nodetag.SwitchStmt,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 1, // 1
+ SliceIndex: -1,
+ },
+ opSwitchTagStmt: {
+ Tag: nodetag.SwitchStmt,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 2, // 10
+ SliceIndex: -1,
+ },
+ opSwitchInitStmt: {
+ Tag: nodetag.SwitchStmt,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 2, // 10
+ SliceIndex: -1,
+ },
+ opSwitchInitTagStmt: {
+ Tag: nodetag.SwitchStmt,
+ NumArgs: 3,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 4, // 100
+ SliceIndex: -1,
+ },
+ opSelectStmt: {
+ Tag: nodetag.SelectStmt,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 1, // 1
+ SliceIndex: -1,
+ },
+ opTypeSwitchStmt: {
+ Tag: nodetag.TypeSwitchStmt,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opTypeSwitchInitStmt: {
+ Tag: nodetag.TypeSwitchStmt,
+ NumArgs: 3,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opCaseClause: {
+ Tag: nodetag.CaseClause,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 3, // 11
+ SliceIndex: -1,
+ },
+ opDefaultCaseClause: {
+ Tag: nodetag.CaseClause,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 1, // 1
+ SliceIndex: -1,
+ },
+ opCommClause: {
+ Tag: nodetag.CommClause,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 2, // 10
+ SliceIndex: -1,
+ },
+ opDefaultCommClause: {
+ Tag: nodetag.CommClause,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 1, // 1
+ SliceIndex: -1,
+ },
+ opForStmt: {
+ Tag: nodetag.ForStmt,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opForPostStmt: {
+ Tag: nodetag.ForStmt,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opForCondStmt: {
+ Tag: nodetag.ForStmt,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opForCondPostStmt: {
+ Tag: nodetag.ForStmt,
+ NumArgs: 3,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opForInitStmt: {
+ Tag: nodetag.ForStmt,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opForInitPostStmt: {
+ Tag: nodetag.ForStmt,
+ NumArgs: 3,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opForInitCondStmt: {
+ Tag: nodetag.ForStmt,
+ NumArgs: 3,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opForInitCondPostStmt: {
+ Tag: nodetag.ForStmt,
+ NumArgs: 4,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opRangeStmt: {
+ Tag: nodetag.RangeStmt,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opRangeKeyStmt: {
+ Tag: nodetag.RangeStmt,
+ NumArgs: 3,
+ ValueKind: tokenValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opRangeKeyValueStmt: {
+ Tag: nodetag.RangeStmt,
+ NumArgs: 4,
+ ValueKind: tokenValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opRangeClause: {
+ Tag: nodetag.RangeStmt,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opRangeHeader: {
+ Tag: nodetag.RangeStmt,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opRangeKeyHeader: {
+ Tag: nodetag.RangeStmt,
+ NumArgs: 2,
+ ValueKind: tokenValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opRangeKeyValueHeader: {
+ Tag: nodetag.RangeStmt,
+ NumArgs: 3,
+ ValueKind: tokenValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opFieldList: {
+ Tag: nodetag.Unknown,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 1, // 1
+ SliceIndex: -1,
+ },
+ opUnnamedField: {
+ Tag: nodetag.Unknown,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opSimpleField: {
+ Tag: nodetag.Unknown,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: stringValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opField: {
+ Tag: nodetag.Unknown,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opMultiField: {
+ Tag: nodetag.Unknown,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 1, // 1
+ SliceIndex: -1,
+ },
+ opValueSpec: {
+ Tag: nodetag.ValueSpec,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opValueInitSpec: {
+ Tag: nodetag.ValueSpec,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 3, // 11
+ SliceIndex: -1,
+ },
+ opTypedValueInitSpec: {
+ Tag: nodetag.ValueSpec,
+ NumArgs: 3,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 5, // 101
+ SliceIndex: -1,
+ },
+ opTypedValueSpec: {
+ Tag: nodetag.ValueSpec,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 1, // 1
+ SliceIndex: -1,
+ },
+ opTypeSpec: {
+ Tag: nodetag.TypeSpec,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opTypeAliasSpec: {
+ Tag: nodetag.TypeSpec,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opFuncDecl: {
+ Tag: nodetag.FuncDecl,
+ NumArgs: 3,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opMethodDecl: {
+ Tag: nodetag.FuncDecl,
+ NumArgs: 4,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opFuncProtoDecl: {
+ Tag: nodetag.FuncDecl,
+ NumArgs: 2,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opMethodProtoDecl: {
+ Tag: nodetag.FuncDecl,
+ NumArgs: 3,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opDeclStmt: {
+ Tag: nodetag.DeclStmt,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opConstDecl: {
+ Tag: nodetag.GenDecl,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 1, // 1
+ SliceIndex: -1,
+ },
+ opVarDecl: {
+ Tag: nodetag.GenDecl,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 1, // 1
+ SliceIndex: -1,
+ },
+ opTypeDecl: {
+ Tag: nodetag.GenDecl,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 1, // 1
+ SliceIndex: -1,
+ },
+ opAnyImportDecl: {
+ Tag: nodetag.GenDecl,
+ NumArgs: 0,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+ opImportDecl: {
+ Tag: nodetag.GenDecl,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 1, // 1
+ SliceIndex: -1,
+ },
+ opEmptyPackage: {
+ Tag: nodetag.File,
+ NumArgs: 1,
+ ValueKind: emptyValue,
+ ExtraValueKind: emptyValue,
+ VariadicMap: 0, // 0
+ SliceIndex: -1,
+ },
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/parse.go b/vendor/github.com/quasilyte/gogrep/parse.go
index b46e64393..f70c4a8f4 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/parse.go
+++ b/vendor/github.com/quasilyte/gogrep/parse.go
@@ -10,62 +10,43 @@ import (
"go/parser"
"go/scanner"
"go/token"
- "regexp"
- "strconv"
"strings"
"text/template"
)
-func (m *matcher) transformSource(expr string) (string, []posOffset, error) {
- toks, err := m.tokenize([]byte(expr))
+func transformSource(expr string) (string, []posOffset, error) {
+ toks, err := tokenize([]byte(expr))
if err != nil {
return "", nil, fmt.Errorf("cannot tokenize expr: %v", err)
}
var offs []posOffset
lbuf := lineColBuffer{line: 1, col: 1}
- addOffset := func(length int) {
- lbuf.offs -= length
- offs = append(offs, posOffset{
- atLine: lbuf.line,
- atCol: lbuf.col,
- offset: length,
- })
- }
- if len(toks) > 0 && toks[0].tok == tokAggressive {
- toks = toks[1:]
- m.aggressive = true
- }
lastLit := false
for _, t := range toks {
if lbuf.offs >= t.pos.Offset && lastLit && t.lit != "" {
- lbuf.WriteString(" ")
+ _, _ = lbuf.WriteString(" ")
}
for lbuf.offs < t.pos.Offset {
- lbuf.WriteString(" ")
+ _, _ = lbuf.WriteString(" ")
}
if t.lit == "" {
- lbuf.WriteString(t.tok.String())
+ _, _ = lbuf.WriteString(t.tok.String())
lastLit = false
continue
}
- if isWildName(t.lit) {
- // to correct the position offsets for the extra
- // info attached to ident name strings
- addOffset(len(wildPrefix) - 1)
- }
- lbuf.WriteString(t.lit)
+ _, _ = lbuf.WriteString(t.lit)
lastLit = strings.TrimSpace(t.lit) != ""
}
// trailing newlines can cause issues with commas
return strings.TrimSpace(lbuf.String()), offs, nil
}
-func (m *matcher) parseExpr(expr string) (ast.Node, error) {
- exprStr, offs, err := m.transformSource(expr)
+func parseExpr(fset *token.FileSet, expr string) (ast.Node, error) {
+ exprStr, offs, err := transformSource(expr)
if err != nil {
return nil, err
}
- node, _, err := parseDetectingNode(m.fset, exprStr)
+ node, err := parseDetectingNode(fset, exprStr)
if err != nil {
err = subPosOffsets(err, offs...)
return nil, fmt.Errorf("cannot parse expr: %v", err)
@@ -94,6 +75,9 @@ func (l *lineColBuffer) WriteString(s string) (n int, err error) {
var tmplDecl = template.Must(template.New("").Parse(`` +
`package p; {{ . }}`))
+var tmplBlock = template.Must(template.New("").Parse(`` +
+ `package p; func _() { if true {{ . }} else {} }`))
+
var tmplExprs = template.Must(template.New("").Parse(`` +
`package p; var _ = []interface{}{ {{ . }}, }`))
@@ -144,27 +128,42 @@ func parseType(fset *token.FileSet, src string) (ast.Expr, *ast.File, error) {
// one of: *ast.File, ast.Decl, ast.Expr, ast.Stmt, *ast.ValueSpec.
// It also returns the *ast.File used for the parsing, so that the returned node
// can be easily type-checked.
-func parseDetectingNode(fset *token.FileSet, src string) (ast.Node, *ast.File, error) {
+func parseDetectingNode(fset *token.FileSet, src string) (ast.Node, error) {
file := fset.AddFile("", fset.Base(), len(src))
scan := scanner.Scanner{}
scan.Init(file, []byte(src), nil, 0)
if _, tok, _ := scan.Scan(); tok == token.EOF {
- return nil, nil, fmt.Errorf("empty source code")
+ return nil, fmt.Errorf("empty source code")
}
var mainErr error
- // first try as a whole file
- if f, err := parser.ParseFile(fset, "", src, 0); err == nil && noBadNodes(f) {
- return f, f, nil
+ // Some adhoc patterns first.
+ if strings.HasPrefix(src, "range ") {
+ e, err := parser.ParseExpr(src[len("range "):])
+ if err == nil && noBadNodes(e) {
+ return &rangeClause{X: e}, nil
+ }
+ }
+ if strings.HasPrefix(src, "for ") && !strings.HasSuffix(src, "}") {
+ asStmts := execTmpl(tmplStmts, src+"{}")
+ f, err := parser.ParseFile(fset, "", asStmts, 0)
+ if err == nil && noBadNodes(f) {
+ bl := f.Decls[0].(*ast.FuncDecl).Body
+ if len(bl.List) == 1 {
+ return &rangeHeader{Node: bl.List[0].(*ast.RangeStmt)}, nil
+ }
+ }
}
- // then as a single declaration, or many
- asDecl := execTmpl(tmplDecl, src)
- if f, err := parser.ParseFile(fset, "", asDecl, 0); err == nil && noBadNodes(f) {
- if len(f.Decls) == 1 {
- return f.Decls[0], f, nil
+ // try as a block; otherwise blocks might be mistaken for composite
+ // literals further below
+ asBlock := execTmpl(tmplBlock, src)
+ if f, err := parser.ParseFile(fset, "", asBlock, 0); err == nil && noBadNodes(f) {
+ bl := f.Decls[0].(*ast.FuncDecl).Body
+ if len(bl.List) == 1 {
+ ifs := bl.List[0].(*ast.IfStmt)
+ return ifs.Body, nil
}
- return f, f, nil
}
// then as value expressions
@@ -173,39 +172,57 @@ func parseDetectingNode(fset *token.FileSet, src string) (ast.Node, *ast.File, e
vs := f.Decls[0].(*ast.GenDecl).Specs[0].(*ast.ValueSpec)
cl := vs.Values[0].(*ast.CompositeLit)
if len(cl.Elts) == 1 {
- return cl.Elts[0], f, nil
+ return cl.Elts[0], nil
}
- return exprList(cl.Elts), f, nil
+ return ExprSlice(cl.Elts), nil
}
// then try as statements
asStmts := execTmpl(tmplStmts, src)
- if f, err := parser.ParseFile(fset, "", asStmts, 0); err == nil && noBadNodes(f) {
+ f, err := parser.ParseFile(fset, "", asStmts, 0)
+ if err == nil && noBadNodes(f) {
bl := f.Decls[0].(*ast.FuncDecl).Body
if len(bl.List) == 1 {
- return bl.List[0], f, nil
+ return bl.List[0], nil
}
- return stmtList(bl.List), f, nil
- } else {
- // Statements is what covers most cases, so it will give
- // the best overall error message. Show positions
- // relative to where the user's code is put in the
- // template.
- mainErr = subPosOffsets(err, posOffset{1, 1, 22})
+ return stmtSlice(bl.List), nil
+ }
+ // Statements is what covers most cases, so it will give
+ // the best overall error message. Show positions
+ // relative to where the user's code is put in the
+ // template.
+ mainErr = subPosOffsets(err, posOffset{1, 1, 22})
+
+ // try as a single declaration, or many
+ asDecl := execTmpl(tmplDecl, src)
+ if f, err := parser.ParseFile(fset, "", asDecl, 0); err == nil && noBadNodes(f) {
+ if len(f.Decls) == 1 {
+ return f.Decls[0], nil
+ }
+ return declSlice(f.Decls), nil
+ }
+
+ // try as a whole file
+ if f, err := parser.ParseFile(fset, "", src, 0); err == nil && noBadNodes(f) {
+ return f, nil
}
// type expressions not yet picked up, for e.g. chans and interfaces
if typ, f, err := parseType(fset, src); err == nil && noBadNodes(f) {
- return typ, f, nil
+ return typ, nil
}
// value specs
asValSpec := execTmpl(tmplValSpec, src)
if f, err := parser.ParseFile(fset, "", asValSpec, 0); err == nil && noBadNodes(f) {
- vs := f.Decls[0].(*ast.GenDecl).Specs[0].(*ast.ValueSpec)
- return vs, f, nil
+ decl := f.Decls[0].(*ast.GenDecl)
+ if len(decl.Specs) != 0 {
+ vs := f.Decls[0].(*ast.GenDecl).Specs[0].(*ast.ValueSpec)
+ return vs, nil
+ }
}
- return nil, nil, mainErr
+
+ return nil, mainErr
}
type posOffset struct {
@@ -233,11 +250,6 @@ func subPosOffsets(err error, offs ...posOffset) error {
return list
}
-const (
- _ token.Token = -iota
- tokAggressive
-)
-
type fullToken struct {
pos token.Position
tok token.Token
@@ -252,7 +264,7 @@ const (
caseHere
)
-func (m *matcher) tokenize(src []byte) ([]fullToken, error) {
+func tokenize(src []byte) ([]fullToken, error) {
var s scanner.Scanner
fset := token.NewFileSet()
file := fset.AddFile("", fset.Base(), len(src))
@@ -282,9 +294,6 @@ func (m *matcher) tokenize(src []byte) ([]fullToken, error) {
for t := next(); t.tok != token.EOF; t = next() {
switch t.lit {
case "$": // continues below
- case "~":
- toks = append(toks, fullToken{t.pos, tokAggressive, ""})
- continue
case "switch", "select", "case":
if t.lit == "case" {
caseStat = caseNone
@@ -299,7 +308,7 @@ func (m *matcher) tokenize(src []byte) ([]fullToken, error) {
toks = append(toks, t)
continue
}
- wt, err := m.wildcard(t.pos, next)
+ wt, err := tokenizeWildcard(t.pos, next)
if err != nil {
return nil, err
}
@@ -308,145 +317,81 @@ func (m *matcher) tokenize(src []byte) ([]fullToken, error) {
}
toks = append(toks, wt)
if caseStat == caseHere {
- toks = append(toks, fullToken{wt.pos, token.COLON, ""})
- toks = append(toks, fullToken{wt.pos, token.IDENT, "gogrep_body"})
+ toks = append(toks,
+ fullToken{wt.pos, token.COLON, ""},
+ fullToken{wt.pos, token.IDENT, "gogrep_body"})
}
}
return toks, err
}
-func (m *matcher) wildcard(pos token.Position, next func() fullToken) (fullToken, error) {
- wt := fullToken{pos, token.IDENT, wildPrefix}
+type varInfo struct {
+ Name string
+ Seq bool
+}
+
+func tokenizeWildcard(pos token.Position, next func() fullToken) (fullToken, error) {
t := next()
- var info varInfo
+ any := false
if t.tok == token.MUL {
t = next()
- info.any = true
+ any = true
}
+ wildName := encodeWildName(t.lit, any)
+ wt := fullToken{pos, token.IDENT, wildName}
if t.tok != token.IDENT {
return wt, fmt.Errorf("%v: $ must be followed by ident, got %v",
t.pos, t.tok)
}
- id := len(m.vars)
- wt.lit += strconv.Itoa(id)
- info.name = t.lit
- m.vars = append(m.vars, info)
return wt, nil
}
-type typeCheck struct {
- op string // "type", "asgn", "conv"
- expr ast.Expr
-}
+const wildSeparator = "ᐸᐳ"
-type attribute interface{}
+func isWildName(s string) bool {
+ return strings.HasPrefix(s, wildSeparator)
+}
-type typProperty string
+func encodeWildName(name string, any bool) string {
+ suffix := "v"
+ if any {
+ suffix = "a"
+ }
+ return wildSeparator + name + wildSeparator + suffix
+}
-type typUnderlying string
+func decodeWildName(s string) varInfo {
+ s = s[len(wildSeparator):]
+ nameEnd := strings.Index(s, wildSeparator)
+ name := s[:nameEnd+0]
+ s = s[nameEnd:]
+ s = s[len(wildSeparator):]
+ kind := s
+ return varInfo{Name: name, Seq: kind == "a"}
+}
-func (m *matcher) parseAttrs(src string) (attribute, error) {
- toks, err := m.tokenize([]byte(src))
- if err != nil {
- return nil, err
- }
- i := -1
- var t fullToken
- next := func() fullToken {
- if i++; i < len(toks) {
- return toks[i]
- }
- return fullToken{tok: token.EOF, pos: t.pos}
- }
- t = next()
- op := t.lit
- switch op { // the ones that don't take args
- case "comp", "addr":
- if t = next(); t.tok != token.SEMICOLON {
- return nil, fmt.Errorf("%v: wanted EOF, got %v", t.pos, t.tok)
- }
- return typProperty(op), nil
- }
- opPos := t.pos
- if t = next(); t.tok != token.LPAREN {
- return nil, fmt.Errorf("%v: wanted (", t.pos)
- }
- var attr attribute
- switch op {
- case "rx":
- t = next()
- rxStr, err := strconv.Unquote(t.lit)
- if err != nil {
- return nil, fmt.Errorf("%v: %v", t.pos, err)
- }
- if !strings.HasPrefix(rxStr, "^") {
- rxStr = "^" + rxStr
- }
- if !strings.HasSuffix(rxStr, "$") {
- rxStr = rxStr + "$"
- }
- rx, err := regexp.Compile(rxStr)
- if err != nil {
- return nil, fmt.Errorf("%v: %v", t.pos, err)
- }
- attr = rx
- case "type", "asgn", "conv":
- t = next()
- start := t.pos.Offset
- for open := 1; open > 0; t = next() {
- switch t.tok {
- case token.LPAREN:
- open++
- case token.RPAREN:
- open--
- case token.EOF:
- return nil, fmt.Errorf("%v: expected ) to close (", t.pos)
- }
- }
- end := t.pos.Offset - 1
- typeStr := strings.TrimSpace(string(src[start:end]))
- fset := token.NewFileSet()
- typeExpr, _, err := parseType(fset, typeStr)
- if err != nil {
- return nil, err
- }
- attr = typeCheck{op, typeExpr}
- i -= 2 // since we went past RPAREN above
- case "is":
- switch t = next(); t.lit {
- case "basic", "array", "slice", "struct", "interface",
- "pointer", "func", "map", "chan":
- default:
- return nil, fmt.Errorf("%v: unknown type: %q", t.pos,
- t.lit)
+func decodeWildNode(n ast.Node) varInfo {
+ switch n := n.(type) {
+ case *ast.ExprStmt:
+ return decodeWildNode(n.X)
+ case *ast.Ident:
+ if isWildName(n.Name) {
+ return decodeWildName(n.Name)
}
- attr = typUnderlying(t.lit)
- default:
- return nil, fmt.Errorf("%v: unknown op %q", opPos, op)
}
- if t = next(); t.tok != token.RPAREN {
- return nil, fmt.Errorf("%v: wanted ), got %v", t.pos, t.tok)
- }
- if t = next(); t.tok != token.SEMICOLON {
- return nil, fmt.Errorf("%v: wanted EOF, got %v", t.pos, t.tok)
- }
- return attr, nil
+ return varInfo{}
}
-// using a prefix is good enough for now
-const wildPrefix = "gogrep_"
-
-func isWildName(name string) bool {
- return strings.HasPrefix(name, wildPrefix)
+type rangeClause struct {
+ X ast.Expr
}
-func fromWildName(s string) int {
- if !isWildName(s) {
- return -1
- }
- n, err := strconv.Atoi(s[len(wildPrefix):])
- if err != nil {
- return -1
- }
- return n
+type rangeHeader struct {
+ Node *ast.RangeStmt
}
+
+func (*rangeClause) Pos() token.Pos { return 0 }
+func (*rangeClause) End() token.Pos { return 0 }
+
+func (*rangeHeader) Pos() token.Pos { return 0 }
+func (*rangeHeader) End() token.Pos { return 0 }
diff --git a/vendor/github.com/quasilyte/gogrep/slices.go b/vendor/github.com/quasilyte/gogrep/slices.go
new file mode 100644
index 000000000..13775a818
--- /dev/null
+++ b/vendor/github.com/quasilyte/gogrep/slices.go
@@ -0,0 +1,58 @@
+package gogrep
+
+import (
+ "go/ast"
+ "go/token"
+)
+
+type NodeSlice interface {
+ At(i int) ast.Node
+ Len() int
+ slice(from, to int) NodeSlice
+ ast.Node
+}
+
+type (
+ ExprSlice []ast.Expr
+ stmtSlice []ast.Stmt
+ fieldSlice []*ast.Field
+ identSlice []*ast.Ident
+ specSlice []ast.Spec
+ declSlice []ast.Decl
+)
+
+func (l ExprSlice) Len() int { return len(l) }
+func (l ExprSlice) At(i int) ast.Node { return l[i] }
+func (l ExprSlice) slice(i, j int) NodeSlice { return l[i:j] }
+func (l ExprSlice) Pos() token.Pos { return l[0].Pos() }
+func (l ExprSlice) End() token.Pos { return l[len(l)-1].End() }
+
+func (l stmtSlice) Len() int { return len(l) }
+func (l stmtSlice) At(i int) ast.Node { return l[i] }
+func (l stmtSlice) slice(i, j int) NodeSlice { return l[i:j] }
+func (l stmtSlice) Pos() token.Pos { return l[0].Pos() }
+func (l stmtSlice) End() token.Pos { return l[len(l)-1].End() }
+
+func (l fieldSlice) Len() int { return len(l) }
+func (l fieldSlice) At(i int) ast.Node { return l[i] }
+func (l fieldSlice) slice(i, j int) NodeSlice { return l[i:j] }
+func (l fieldSlice) Pos() token.Pos { return l[0].Pos() }
+func (l fieldSlice) End() token.Pos { return l[len(l)-1].End() }
+
+func (l identSlice) Len() int { return len(l) }
+func (l identSlice) At(i int) ast.Node { return l[i] }
+func (l identSlice) slice(i, j int) NodeSlice { return l[i:j] }
+func (l identSlice) Pos() token.Pos { return l[0].Pos() }
+func (l identSlice) End() token.Pos { return l[len(l)-1].End() }
+
+func (l specSlice) Len() int { return len(l) }
+func (l specSlice) At(i int) ast.Node { return l[i] }
+func (l specSlice) slice(i, j int) NodeSlice { return l[i:j] }
+func (l specSlice) Pos() token.Pos { return l[0].Pos() }
+func (l specSlice) End() token.Pos { return l[len(l)-1].End() }
+
+func (l declSlice) Len() int { return len(l) }
+func (l declSlice) At(i int) ast.Node { return l[i] }
+func (l declSlice) slice(i, j int) NodeSlice { return l[i:j] }
+func (l declSlice) Pos() token.Pos { return l[0].Pos() }
+func (l declSlice) End() token.Pos { return l[len(l)-1].End() }
diff --git a/vendor/github.com/quasilyte/stdinfo/LICENSE b/vendor/github.com/quasilyte/stdinfo/LICENSE
new file mode 100644
index 000000000..87a453862
--- /dev/null
+++ b/vendor/github.com/quasilyte/stdinfo/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2022 Iskander (Alex) Sharipov
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/quasilyte/stdinfo/go.mod b/vendor/github.com/quasilyte/stdinfo/go.mod
new file mode 100644
index 000000000..147e97030
--- /dev/null
+++ b/vendor/github.com/quasilyte/stdinfo/go.mod
@@ -0,0 +1,3 @@
+module github.com/quasilyte/stdinfo
+
+go 1.17
diff --git a/vendor/github.com/quasilyte/stdinfo/stdinfo.go b/vendor/github.com/quasilyte/stdinfo/stdinfo.go
new file mode 100644
index 000000000..040f63445
--- /dev/null
+++ b/vendor/github.com/quasilyte/stdinfo/stdinfo.go
@@ -0,0 +1,30 @@
+package stdinfo
+
+type Package struct {
+ // Name is a package name.
+ // For "encoding/json" the package name is "json".
+ Name string
+
+ // Path is a package path, like "encoding/json".
+ Path string
+
+ // Freq is a package import frequency approximation.
+ // A value of -1 means "unknown".
+ Freq int
+}
+
+// PathByName maps a std package name to its package path.
+//
+// For packages with multiple choices, like "template",
+// only the more common one is accessible ("text/template" in this case).
+//
+// This map doesn't contain extremely rare packages either.
+// Use PackageList variable if you want to construct a different mapping.
+//
+// It's exported as map to make it easier to re-use it in libraries
+// without copying.
+var PathByName = generatedPathByName
+
+// PackagesList is a list of std packages information.
+// It's sorted by a package name.
+var PackagesList = generatedPackagesList
diff --git a/vendor/github.com/quasilyte/stdinfo/stdinfo_gen.go b/vendor/github.com/quasilyte/stdinfo/stdinfo_gen.go
new file mode 100644
index 000000000..ecfff9b6c
--- /dev/null
+++ b/vendor/github.com/quasilyte/stdinfo/stdinfo_gen.go
@@ -0,0 +1,274 @@
+// Code generated by "script/gen.go"; DO NOT EDIT.
+
+package stdinfo
+
+var generatedPathByName = map[string]string{
+ "fmt": "fmt", // Freq=15795
+ "testing": "testing", // Freq=12807
+ "context": "context", // Freq=10797
+ "time": "time", // Freq=8900
+ "strings": "strings", // Freq=8852
+ "os": "os", // Freq=5712
+ "bytes": "bytes", // Freq=4129
+ "io": "io", // Freq=3981
+ "http": "net/http", // Freq=3691
+ "sync": "sync", // Freq=3492
+ "errors": "errors", // Freq=3107
+ "strconv": "strconv", // Freq=3076
+ "reflect": "reflect", // Freq=3025
+ "filepath": "path/filepath", // Freq=2843
+ "json": "encoding/json", // Freq=2537
+ "sort": "sort", // Freq=2382
+ "ioutil": "io/ioutil", // Freq=2164
+ "net": "net", // Freq=2025
+ "math": "math", // Freq=1746
+ "url": "net/url", // Freq=1411
+ "regexp": "regexp", // Freq=1320
+ "runtime": "runtime", // Freq=1318
+ "log": "log", // Freq=1149
+ "flag": "flag", // Freq=1002
+ "path": "path", // Freq=993
+ "unsafe": "unsafe", // Freq=992
+ "rand": "math/rand", // Freq=981
+ "syscall": "syscall", // Freq=902
+ "atomic": "sync/atomic", // Freq=804
+ "bufio": "bufio", // Freq=695
+ "httptest": "net/http/httptest", // Freq=676
+ "exec": "os/exec", // Freq=676
+ "binary": "encoding/binary", // Freq=476
+ "tls": "crypto/tls", // Freq=475
+ "token": "go/token", // Freq=471
+ "utf8": "unicode/utf8", // Freq=404
+ "base64": "encoding/base64", // Freq=383
+ "ast": "go/ast", // Freq=373
+ "x509": "crypto/x509", // Freq=357
+ "hex": "encoding/hex", // Freq=340
+ "unicode": "unicode", // Freq=309
+ "types": "go/types", // Freq=309
+ "big": "math/big", // Freq=230
+ "sha256": "crypto/sha256", // Freq=227
+ "template": "text/template", // Freq=211
+ "fs": "io/fs", // Freq=162
+ "parser": "go/parser", // Freq=160
+ "sql": "database/sql", // Freq=157
+ "gzip": "compress/gzip", // Freq=150
+ "signal": "os/signal", // Freq=139
+ "pem": "encoding/pem", // Freq=137
+ "hash": "hash", // Freq=137
+ "crypto": "crypto", // Freq=132
+ "build": "go/build", // Freq=121
+ "debug": "runtime/debug", // Freq=121
+ "bits": "math/bits", // Freq=120
+ "constant": "go/constant", // Freq=120
+ "xml": "encoding/xml", // Freq=118
+ "tabwriter": "text/tabwriter", // Freq=116
+ "md5": "crypto/md5", // Freq=110
+ "rsa": "crypto/rsa", // Freq=103
+ "format": "go/format", // Freq=88
+ "sha1": "crypto/sha1", // Freq=85
+ "driver": "database/sql/driver", // Freq=81
+ "pkix": "crypto/x509/pkix", // Freq=80
+ "heap": "container/heap", // Freq=78
+ "tar": "archive/tar", // Freq=77
+ "ecdsa": "crypto/ecdsa", // Freq=75
+ "cipher": "crypto/cipher", // Freq=74
+ "crc32": "hash/crc32", // Freq=70
+ "gob": "encoding/gob", // Freq=65
+ "elliptic": "crypto/elliptic", // Freq=60
+ "subtle": "crypto/subtle", // Freq=54
+ "zip": "archive/zip", // Freq=54
+ "aes": "crypto/aes", // Freq=53
+ "mime": "mime", // Freq=51
+ "pprof": "runtime/pprof", // Freq=47
+ "textproto": "net/textproto", // Freq=47
+ "image": "image", // Freq=45
+ "fnv": "hash/fnv", // Freq=45
+ "hmac": "crypto/hmac", // Freq=45
+ "httputil": "net/http/httputil", // Freq=44
+ "elf": "debug/elf", // Freq=44
+ "encoding": "encoding", // Freq=41
+ "sha512": "crypto/sha512", // Freq=41
+ "cmplx": "math/cmplx", // Freq=40
+ "color": "image/color", // Freq=38
+ "html": "html", // Freq=37
+ "expvar": "expvar", // Freq=34
+ "embed": "embed", // Freq=32
+ "csv": "encoding/csv", // Freq=31
+ "importer": "go/importer", // Freq=31
+ "multipart": "mime/multipart", // Freq=30
+ "printer": "go/printer", // Freq=27
+ "syslog": "log/syslog", // Freq=27
+ "asn1": "encoding/asn1", // Freq=27
+ "list": "container/list", // Freq=27
+ "scanner": "go/scanner", // Freq=25
+ "ed25519": "crypto/ed25519", // Freq=25
+ "dwarf": "debug/dwarf", // Freq=23
+ "flate": "compress/flate", // Freq=22
+ "zlib": "compress/zlib", // Freq=21
+ "png": "image/png", // Freq=20
+ "trace": "runtime/trace", // Freq=20
+ "httptrace": "net/http/httptrace", // Freq=19
+ "utf16": "unicode/utf16", // Freq=19
+ "rpc": "net/rpc", // Freq=19
+ "macho": "debug/macho", // Freq=16
+ "iotest": "testing/iotest", // Freq=15
+ "dsa": "crypto/dsa", // Freq=13
+ "parse": "text/template/parse", // Freq=13
+ "cookiejar": "net/http/cookiejar", // Freq=12
+ "fstest": "testing/fstest", // Freq=11
+ "jpeg": "image/jpeg", // Freq=11
+}
+
+var generatedPackagesList = []Package{
+ {Name: "adler32", Path: "hash/adler32", Freq: 7},
+ {Name: "aes", Path: "crypto/aes", Freq: 53},
+ {Name: "ascii85", Path: "encoding/ascii85", Freq: -1},
+ {Name: "asn1", Path: "encoding/asn1", Freq: 27},
+ {Name: "ast", Path: "go/ast", Freq: 373},
+ {Name: "atomic", Path: "sync/atomic", Freq: 804},
+ {Name: "base32", Path: "encoding/base32", Freq: 5},
+ {Name: "base64", Path: "encoding/base64", Freq: 383},
+ {Name: "big", Path: "math/big", Freq: 230},
+ {Name: "binary", Path: "encoding/binary", Freq: 476},
+ {Name: "bits", Path: "math/bits", Freq: 120},
+ {Name: "bufio", Path: "bufio", Freq: 695},
+ {Name: "build", Path: "go/build", Freq: 121},
+ {Name: "bytes", Path: "bytes", Freq: 4129},
+ {Name: "bzip2", Path: "compress/bzip2", Freq: 7},
+ {Name: "cgi", Path: "net/http/cgi", Freq: 1},
+ {Name: "cgo", Path: "runtime/cgo", Freq: -1},
+ {Name: "cipher", Path: "crypto/cipher", Freq: 74},
+ {Name: "cmplx", Path: "math/cmplx", Freq: 40},
+ {Name: "color", Path: "image/color", Freq: 38},
+ {Name: "constant", Path: "go/constant", Freq: 120},
+ {Name: "constraint", Path: "go/build/constraint", Freq: 5},
+ {Name: "context", Path: "context", Freq: 10797},
+ {Name: "cookiejar", Path: "net/http/cookiejar", Freq: 12},
+ {Name: "crc32", Path: "hash/crc32", Freq: 70},
+ {Name: "crc64", Path: "hash/crc64", Freq: 3},
+ {Name: "crypto", Path: "crypto", Freq: 132},
+ {Name: "csv", Path: "encoding/csv", Freq: 31},
+ {Name: "debug", Path: "runtime/debug", Freq: 121},
+ {Name: "des", Path: "crypto/des", Freq: 8},
+ {Name: "doc", Path: "go/doc", Freq: 15},
+ {Name: "draw", Path: "image/draw", Freq: 7},
+ {Name: "driver", Path: "database/sql/driver", Freq: 81},
+ {Name: "dsa", Path: "crypto/dsa", Freq: 13},
+ {Name: "dwarf", Path: "debug/dwarf", Freq: 23},
+ {Name: "ecdsa", Path: "crypto/ecdsa", Freq: 75},
+ {Name: "ed25519", Path: "crypto/ed25519", Freq: 25},
+ {Name: "elf", Path: "debug/elf", Freq: 44},
+ {Name: "elliptic", Path: "crypto/elliptic", Freq: 60},
+ {Name: "embed", Path: "embed", Freq: 32},
+ {Name: "encoding", Path: "encoding", Freq: 41},
+ {Name: "errors", Path: "errors", Freq: 3107},
+ {Name: "exec", Path: "os/exec", Freq: 676},
+ {Name: "expvar", Path: "expvar", Freq: 34},
+ {Name: "fcgi", Path: "net/http/fcgi", Freq: 2},
+ {Name: "filepath", Path: "path/filepath", Freq: 2843},
+ {Name: "flag", Path: "flag", Freq: 1002},
+ {Name: "flate", Path: "compress/flate", Freq: 22},
+ {Name: "fmt", Path: "fmt", Freq: 15795},
+ {Name: "fnv", Path: "hash/fnv", Freq: 45},
+ {Name: "format", Path: "go/format", Freq: 88},
+ {Name: "fs", Path: "io/fs", Freq: 162},
+ {Name: "fstest", Path: "testing/fstest", Freq: 11},
+ {Name: "gif", Path: "image/gif", Freq: 5},
+ {Name: "gob", Path: "encoding/gob", Freq: 65},
+ {Name: "gosym", Path: "debug/gosym", Freq: 3},
+ {Name: "gzip", Path: "compress/gzip", Freq: 150},
+ {Name: "hash", Path: "hash", Freq: 137},
+ {Name: "heap", Path: "container/heap", Freq: 78},
+ {Name: "hex", Path: "encoding/hex", Freq: 340},
+ {Name: "hmac", Path: "crypto/hmac", Freq: 45},
+ {Name: "html", Path: "html", Freq: 37},
+ {Name: "http", Path: "net/http", Freq: 3691},
+ {Name: "httptest", Path: "net/http/httptest", Freq: 676},
+ {Name: "httptrace", Path: "net/http/httptrace", Freq: 19},
+ {Name: "httputil", Path: "net/http/httputil", Freq: 44},
+ {Name: "image", Path: "image", Freq: 45},
+ {Name: "importer", Path: "go/importer", Freq: 31},
+ {Name: "io", Path: "io", Freq: 3981},
+ {Name: "iotest", Path: "testing/iotest", Freq: 15},
+ {Name: "ioutil", Path: "io/ioutil", Freq: 2164},
+ {Name: "jpeg", Path: "image/jpeg", Freq: 11},
+ {Name: "json", Path: "encoding/json", Freq: 2537},
+ {Name: "jsonrpc", Path: "net/rpc/jsonrpc", Freq: -1},
+ {Name: "list", Path: "container/list", Freq: 27},
+ {Name: "log", Path: "log", Freq: 1149},
+ {Name: "lzw", Path: "compress/lzw", Freq: 3},
+ {Name: "macho", Path: "debug/macho", Freq: 16},
+ {Name: "mail", Path: "net/mail", Freq: 7},
+ {Name: "maphash", Path: "hash/maphash", Freq: 1},
+ {Name: "math", Path: "math", Freq: 1746},
+ {Name: "md5", Path: "crypto/md5", Freq: 110},
+ {Name: "metrics", Path: "runtime/metrics", Freq: 3},
+ {Name: "mime", Path: "mime", Freq: 51},
+ {Name: "multipart", Path: "mime/multipart", Freq: 30},
+ {Name: "net", Path: "net", Freq: 2025},
+ {Name: "os", Path: "os", Freq: 5712},
+ {Name: "palette", Path: "image/color/palette", Freq: 4},
+ {Name: "parse", Path: "text/template/parse", Freq: 13},
+ {Name: "parser", Path: "go/parser", Freq: 160},
+ {Name: "path", Path: "path", Freq: 993},
+ {Name: "pe", Path: "debug/pe", Freq: 12},
+ {Name: "pem", Path: "encoding/pem", Freq: 137},
+ {Name: "pkix", Path: "crypto/x509/pkix", Freq: 80},
+ {Name: "plan9obj", Path: "debug/plan9obj", Freq: 1},
+ {Name: "plugin", Path: "plugin", Freq: 4},
+ {Name: "png", Path: "image/png", Freq: 20},
+ {Name: "pprof", Path: "runtime/pprof", Freq: 47},
+ {Name: "pprof", Path: "net/http/pprof", Freq: 33},
+ {Name: "printer", Path: "go/printer", Freq: 27},
+ {Name: "quick", Path: "testing/quick", Freq: 51},
+ {Name: "quotedprintable", Path: "mime/quotedprintable", Freq: 2},
+ {Name: "race", Path: "runtime/race", Freq: -1},
+ {Name: "rand", Path: "math/rand", Freq: 981},
+ {Name: "rand", Path: "crypto/rand", Freq: 256},
+ {Name: "rc4", Path: "crypto/rc4", Freq: 3},
+ {Name: "reflect", Path: "reflect", Freq: 3025},
+ {Name: "regexp", Path: "regexp", Freq: 1320},
+ {Name: "ring", Path: "container/ring", Freq: 2},
+ {Name: "rpc", Path: "net/rpc", Freq: 19},
+ {Name: "rsa", Path: "crypto/rsa", Freq: 103},
+ {Name: "runtime", Path: "runtime", Freq: 1318},
+ {Name: "scanner", Path: "text/scanner", Freq: 23},
+ {Name: "scanner", Path: "go/scanner", Freq: 25},
+ {Name: "sha1", Path: "crypto/sha1", Freq: 85},
+ {Name: "sha256", Path: "crypto/sha256", Freq: 227},
+ {Name: "sha512", Path: "crypto/sha512", Freq: 41},
+ {Name: "signal", Path: "os/signal", Freq: 139},
+ {Name: "smtp", Path: "net/smtp", Freq: 6},
+ {Name: "sort", Path: "sort", Freq: 2382},
+ {Name: "sql", Path: "database/sql", Freq: 157},
+ {Name: "strconv", Path: "strconv", Freq: 3076},
+ {Name: "strings", Path: "strings", Freq: 8852},
+ {Name: "subtle", Path: "crypto/subtle", Freq: 54},
+ {Name: "suffixarray", Path: "index/suffixarray", Freq: 2},
+ {Name: "sync", Path: "sync", Freq: 3492},
+ {Name: "syntax", Path: "regexp/syntax", Freq: 11},
+ {Name: "syscall", Path: "syscall", Freq: 902},
+ {Name: "syslog", Path: "log/syslog", Freq: 27},
+ {Name: "tabwriter", Path: "text/tabwriter", Freq: 116},
+ {Name: "tar", Path: "archive/tar", Freq: 77},
+ {Name: "template", Path: "html/template", Freq: 173},
+ {Name: "template", Path: "text/template", Freq: 211},
+ {Name: "testing", Path: "testing", Freq: 12807},
+ {Name: "textproto", Path: "net/textproto", Freq: 47},
+ {Name: "time", Path: "time", Freq: 8900},
+ {Name: "tls", Path: "crypto/tls", Freq: 475},
+ {Name: "token", Path: "go/token", Freq: 471},
+ {Name: "trace", Path: "runtime/trace", Freq: 20},
+ {Name: "types", Path: "go/types", Freq: 309},
+ {Name: "tzdata", Path: "time/tzdata", Freq: 6},
+ {Name: "unicode", Path: "unicode", Freq: 309},
+ {Name: "unsafe", Path: "unsafe", Freq: 992},
+ {Name: "url", Path: "net/url", Freq: 1411},
+ {Name: "user", Path: "os/user", Freq: 51},
+ {Name: "utf16", Path: "unicode/utf16", Freq: 19},
+ {Name: "utf8", Path: "unicode/utf8", Freq: 404},
+ {Name: "x509", Path: "crypto/x509", Freq: 357},
+ {Name: "xml", Path: "encoding/xml", Freq: 118},
+ {Name: "zip", Path: "archive/zip", Freq: 54},
+ {Name: "zlib", Path: "compress/zlib", Freq: 21},
+}