aboutsummaryrefslogtreecommitdiffstats
path: root/vendor/github.com/quasilyte
diff options
context:
space:
mode:
authorDmitry Vyukov <dvyukov@google.com>2021-02-22 20:37:25 +0100
committerDmitry Vyukov <dvyukov@google.com>2021-02-22 21:02:12 +0100
commitfcc6d71be2c3ce7d9305c04fc2e87af554571bac (patch)
treeb01dbb3d1e2988e28ea158d2d543d603ec0b9569 /vendor/github.com/quasilyte
parent8f23c528ad5a943b9ffec5dcaf332fd0f614006e (diff)
go.mod: update golangci-lint to v1.37
Diffstat (limited to 'vendor/github.com/quasilyte')
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/dslgen/dsl_sources.go3
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/dslgen/dslgen.go53
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/internal/golist/golist.go30
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/kludge.go9
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/internal/xtypes/xtypes.go256
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/bool3.go9
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/bundle.go19
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/dsl_importer.go40
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/engine.go174
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/filters.go262
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/gorule.go118
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/goutil/goutil.go21
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/goutil/resolve.go33
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/importer.go116
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/libdsl.go200
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/merge.go24
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/node_category.go114
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/parser.go957
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/compile.go703
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/debug_info.go16
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/disasm.go74
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/env.go42
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/eval.go239
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/gen_opcodes.go184
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcode_string.go63
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcodes.gen.go219
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/quasigo.go165
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/utils.go60
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go76
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go169
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/patternop_string.go34
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/typematch.go212
-rw-r--r--vendor/github.com/quasilyte/go-ruleguard/ruleguard/utils.go34
33 files changed, 4129 insertions, 599 deletions
diff --git a/vendor/github.com/quasilyte/go-ruleguard/dslgen/dsl_sources.go b/vendor/github.com/quasilyte/go-ruleguard/dslgen/dsl_sources.go
deleted file mode 100644
index 3ba584d3f..000000000
--- a/vendor/github.com/quasilyte/go-ruleguard/dslgen/dsl_sources.go
+++ /dev/null
@@ -1,3 +0,0 @@
-package dslgen
-
-var Fluent = []byte("package fluent\n\n// Matcher is a main API group-level entry point.\n// It's used to define and configure the group rules.\n// It also represents a map of all rule-local variables.\ntype Matcher map[string]Var\n\n// Import loads given package path into a rule group imports table.\n//\n// That table is used during the rules compilation.\n//\n// The table has the following effect on the rules:\n//\t* For type expressions, it's used to resolve the\n//\t full package paths of qualified types, like `foo.Bar`.\n//\t If Import(`a/b/foo`) is called, `foo.Bar` will match\n//\t `a/b/foo.Bar` type during the pattern execution.\nfunc (m Matcher) Import(pkgPath string) {}\n\n// Match specifies a set of patterns that match a rule being defined.\n// Pattern matching succeeds if at least 1 pattern matches.\n//\n// If none of the given patterns matched, rule execution stops.\nfunc (m Matcher) Match(pattern string, alternatives ...string) Matcher {\n\treturn m\n}\n\n// Where applies additional constraint to a match.\n// If a given cond is not satisfied, a match is rejected and\n// rule execution stops.\nfunc (m Matcher) Where(cond bool) Matcher {\n\treturn m\n}\n\n// Report prints a message if associated rule match is successful.\n//\n// A message is a string that can contain interpolated expressions.\n// For every matched variable it's possible to interpolate\n// their printed representation into the message text with $<name>.\n// An entire match can be addressed with $$.\nfunc (m Matcher) Report(message string) Matcher {\n\treturn m\n}\n\n// Suggest assigns a quickfix suggestion for the matched code.\nfunc (m Matcher) Suggest(suggestion string) Matcher {\n\treturn m\n}\n\n// At binds the reported node to a named submatch.\n// If no explicit location is given, the outermost node ($$) is used.\nfunc (m Matcher) At(v Var) Matcher {\n\treturn m\n}\n\n// Var is a pattern variable that describes a named submatch.\ntype Var struct {\n\t// Pure reports whether expr matched by var is side-effect-free.\n\tPure bool\n\n\t// Const reports whether expr matched by var is a constant value.\n\tConst bool\n\n\t// Addressable reports whether the corresponding expression is addressable.\n\t// See https://golang.org/ref/spec#Address_operators.\n\tAddressable bool\n\n\t// Type is a type of a matched expr.\n\tType ExprType\n\n\t// Test is a captured node text as in the source code.\n\tText MatchedText\n}\n\n// ExprType describes a type of a matcher expr.\ntype ExprType struct {\n\t// Size represents expression type size in bytes.\n\tSize int\n}\n\n// AssignableTo reports whether a type is assign-compatible with a given type.\n// See https://golang.org/pkg/go/types/#AssignableTo.\nfunc (ExprType) AssignableTo(typ string) bool { return boolResult }\n\n// ConvertibleTo reports whether a type is conversible to a given type.\n// See https://golang.org/pkg/go/types/#ConvertibleTo.\nfunc (ExprType) ConvertibleTo(typ string) bool { return boolResult }\n\n// Implements reports whether a type implements a given interface.\n// See https://golang.org/pkg/go/types/#Implements.\nfunc (ExprType) Implements(typ string) bool { return boolResult }\n\n// Is reports whether a type is identical to a given type.\nfunc (ExprType) Is(typ string) bool { return boolResult }\n\n// MatchedText represents a source text associated with a matched node.\ntype MatchedText string\n\n// Matches reports whether the text matches the given regexp pattern.\nfunc (MatchedText) Matches(pattern string) bool { return boolResult }\n\n\n\nvar boolResult bool\n\n")
diff --git a/vendor/github.com/quasilyte/go-ruleguard/dslgen/dslgen.go b/vendor/github.com/quasilyte/go-ruleguard/dslgen/dslgen.go
deleted file mode 100644
index a2269b2ed..000000000
--- a/vendor/github.com/quasilyte/go-ruleguard/dslgen/dslgen.go
+++ /dev/null
@@ -1,53 +0,0 @@
-// +build generate
-
-package main
-
-import (
- "bytes"
- "fmt"
- "io/ioutil"
- "os"
- "path/filepath"
-)
-
-func main() {
- // See #23.
-
- data, err := dirToBytes("../dsl/fluent")
- if err != nil {
- panic(err)
- }
-
- f, err := os.Create("./dsl_sources.go")
- if err != nil {
- panic(err)
- }
- defer f.Close()
-
- fmt.Fprintf(f, `package dslgen
-
-var Fluent = []byte(%q)
-`, string(data))
-}
-
-func dirToBytes(dir string) ([]byte, error) {
- files, err := ioutil.ReadDir(dir)
- if err != nil {
- return nil, err
- }
-
- var buf bytes.Buffer
- for i, f := range files {
- data, err := ioutil.ReadFile(filepath.Join(dir, f.Name()))
- if err != nil {
- return nil, err
- }
- if i != 0 {
- newline := bytes.IndexByte(data, '\n')
- data = data[newline:]
- }
- buf.Write(data)
- buf.WriteByte('\n')
- }
- return buf.Bytes(), nil
-}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/golist/golist.go b/vendor/github.com/quasilyte/go-ruleguard/internal/golist/golist.go
new file mode 100644
index 000000000..50f9cca0b
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/internal/golist/golist.go
@@ -0,0 +1,30 @@
+package golist
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "io"
+ "os/exec"
+)
+
+// Package is `go list --json` output structure.
+type Package struct {
+ Dir string // directory containing package sources
+ ImportPath string // import path of package in dir
+ GoFiles []string // .go source files (excluding CgoFiles, TestGoFiles, XTestGoFiles)
+}
+
+// JSON runs `go list --json` for the specified pkgName and returns the parsed JSON.
+func JSON(pkgPath string) (*Package, error) {
+ out, err := exec.Command("go", "list", "--json", pkgPath).CombinedOutput()
+ if err != nil {
+ return nil, fmt.Errorf("go list error (%v): %s", err, out)
+ }
+
+ var pkg Package
+ if err := json.NewDecoder(bytes.NewReader(out)).Decode(&pkg); err != io.EOF && err != nil {
+ return nil, err
+ }
+ return &pkg, nil
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/kludge.go b/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/kludge.go
index f366af84f..f62c4aafd 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/kludge.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep/kludge.go
@@ -34,6 +34,15 @@ type MatchData struct {
Values map[string]ast.Node
}
+// Clone creates a pattern copy.
+func (p *Pattern) Clone() *Pattern {
+ clone := *p
+ clone.m = &matcher{}
+ *clone.m = *p.m
+ clone.m.values = make(map[string]ast.Node)
+ return &clone
+}
+
// MatchNode calls cb if n matches a pattern.
func (p *Pattern) MatchNode(n ast.Node, cb func(MatchData)) {
p.m.values = map[string]ast.Node{}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/xtypes/xtypes.go b/vendor/github.com/quasilyte/go-ruleguard/internal/xtypes/xtypes.go
new file mode 100644
index 000000000..028a5f141
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/internal/xtypes/xtypes.go
@@ -0,0 +1,256 @@
+package xtypes
+
+import (
+ "go/types"
+)
+
+// Implements reports whether type v implements iface.
+//
+// Unlike types.Implements(), it permits X and Y named types
+// to be considered identical even if their addresses are different.
+func Implements(v types.Type, iface *types.Interface) bool {
+ if iface.Empty() {
+ return true
+ }
+
+ if v, _ := v.Underlying().(*types.Interface); v != nil {
+ for i := 0; i < iface.NumMethods(); i++ {
+ m := iface.Method(i)
+ obj, _, _ := types.LookupFieldOrMethod(v, false, m.Pkg(), m.Name())
+ switch {
+ case obj == nil:
+ return false
+ case !Identical(obj.Type(), m.Type()):
+ return false
+ }
+ }
+ return true
+ }
+
+ // A concrete type v implements iface if it implements all methods of iface.
+ for i := 0; i < iface.NumMethods(); i++ {
+ m := iface.Method(i)
+
+ obj, _, _ := types.LookupFieldOrMethod(v, false, m.Pkg(), m.Name())
+ if obj == nil {
+ return false
+ }
+
+ f, ok := obj.(*types.Func)
+ if !ok {
+ return false
+ }
+
+ if !Identical(f.Type(), m.Type()) {
+ return false
+ }
+ }
+
+ return true
+}
+
+// Identical reports whether x and y are identical types.
+//
+// Unlike types.Identical(), it permits X and Y named types
+// to be considered identical even if their addresses are different.
+func Identical(x, y types.Type) bool {
+ return typeIdentical(x, y, nil)
+}
+
+func typeIdentical(x, y types.Type, p *ifacePair) bool {
+ if x == y {
+ return true
+ }
+
+ switch x := x.(type) {
+ case nil:
+ return false
+
+ case *types.Basic:
+ // Basic types are singletons except for the rune and byte
+ // aliases, thus we cannot solely rely on the x == y check
+ // above. See also comment in TypeName.IsAlias.
+ if y, ok := y.(*types.Basic); ok {
+ return x.Kind() == y.Kind()
+ }
+
+ case *types.Array:
+ // Two array types are identical if they have identical element types
+ // and the same array length.
+ if y, ok := y.(*types.Array); ok {
+ // If one or both array lengths are unknown (< 0) due to some error,
+ // assume they are the same to avoid spurious follow-on errors.
+ return (x.Len() < 0 || y.Len() < 0 || x.Len() == y.Len()) && typeIdentical(x.Elem(), y.Elem(), p)
+ }
+
+ case *types.Slice:
+ // Two slice types are identical if they have identical element types.
+ if y, ok := y.(*types.Slice); ok {
+ return typeIdentical(x.Elem(), y.Elem(), p)
+ }
+
+ case *types.Struct:
+ // Two struct types are identical if they have the same sequence of fields,
+ // and if corresponding fields have the same names, and identical types,
+ // and identical tags. Two embedded fields are considered to have the same
+ // name. Lower-case field names from different packages are always different.
+ if y, ok := y.(*types.Struct); ok {
+ if x.NumFields() == y.NumFields() {
+ for i := 0; i < x.NumFields(); i++ {
+ f := x.Field(i)
+ g := y.Field(i)
+ if f.Embedded() != g.Embedded() || !sameID(f, g.Pkg(), g.Name()) || !typeIdentical(f.Type(), g.Type(), p) {
+ return false
+ }
+ }
+ return true
+ }
+ }
+
+ case *types.Pointer:
+ // Two pointer types are identical if they have identical base types.
+ if y, ok := y.(*types.Pointer); ok {
+ return typeIdentical(x.Elem(), y.Elem(), p)
+ }
+
+ case *types.Tuple:
+ // Two tuples types are identical if they have the same number of elements
+ // and corresponding elements have identical types.
+ if y, ok := y.(*types.Tuple); ok {
+ if x.Len() == y.Len() {
+ if x != nil {
+ for i := 0; i < x.Len(); i++ {
+ v := x.At(i)
+ w := y.At(i)
+ if !typeIdentical(v.Type(), w.Type(), p) {
+ return false
+ }
+ }
+ }
+ return true
+ }
+ }
+
+ case *types.Signature:
+ // Two function types are identical if they have the same number of parameters
+ // and result values, corresponding parameter and result types are identical,
+ // and either both functions are variadic or neither is. Parameter and result
+ // names are not required to match.
+ if y, ok := y.(*types.Signature); ok {
+ return x.Variadic() == y.Variadic() &&
+ typeIdentical(x.Params(), y.Params(), p) &&
+ typeIdentical(x.Results(), y.Results(), p)
+ }
+
+ case *types.Interface:
+ // Two interface types are identical if they have the same set of methods with
+ // the same names and identical function types. Lower-case method names from
+ // different packages are always different. The order of the methods is irrelevant.
+ if y, ok := y.(*types.Interface); ok {
+ if x.NumMethods() != y.NumMethods() {
+ return false
+ }
+ // Interface types are the only types where cycles can occur
+ // that are not "terminated" via named types; and such cycles
+ // can only be created via method parameter types that are
+ // anonymous interfaces (directly or indirectly) embedding
+ // the current interface. Example:
+ //
+ // type T interface {
+ // m() interface{T}
+ // }
+ //
+ // If two such (differently named) interfaces are compared,
+ // endless recursion occurs if the cycle is not detected.
+ //
+ // If x and y were compared before, they must be equal
+ // (if they were not, the recursion would have stopped);
+ // search the ifacePair stack for the same pair.
+ //
+ // This is a quadratic algorithm, but in practice these stacks
+ // are extremely short (bounded by the nesting depth of interface
+ // type declarations that recur via parameter types, an extremely
+ // rare occurrence). An alternative implementation might use a
+ // "visited" map, but that is probably less efficient overall.
+ q := &ifacePair{x, y, p}
+ for p != nil {
+ if p.identical(q) {
+ return true // same pair was compared before
+ }
+ p = p.prev
+ }
+ for i := 0; i < x.NumMethods(); i++ {
+ f := x.Method(i)
+ g := y.Method(i)
+ if f.Id() != g.Id() || !typeIdentical(f.Type(), g.Type(), q) {
+ return false
+ }
+ }
+ return true
+ }
+
+ case *types.Map:
+ // Two map types are identical if they have identical key and value types.
+ if y, ok := y.(*types.Map); ok {
+ return typeIdentical(x.Key(), y.Key(), p) && typeIdentical(x.Elem(), y.Elem(), p)
+ }
+
+ case *types.Chan:
+ // Two channel types are identical if they have identical value types
+ // and the same direction.
+ if y, ok := y.(*types.Chan); ok {
+ return x.Dir() == y.Dir() && typeIdentical(x.Elem(), y.Elem(), p)
+ }
+
+ case *types.Named:
+ // Two named types are identical if their type names originate
+ // in the same type declaration.
+ y, ok := y.(*types.Named)
+ if !ok {
+ return false
+ }
+ if x.Obj() == y.Obj() {
+ return true
+ }
+ return sameID(x.Obj(), y.Obj().Pkg(), y.Obj().Name())
+
+ default:
+ panic("unreachable")
+ }
+
+ return false
+}
+
+// An ifacePair is a node in a stack of interface type pairs compared for identity.
+type ifacePair struct {
+ x *types.Interface
+ y *types.Interface
+ prev *ifacePair
+}
+
+func (p *ifacePair) identical(q *ifacePair) bool {
+ return (p.x == q.x && p.y == q.y) ||
+ (p.x == q.y && p.y == q.x)
+}
+
+func sameID(obj types.Object, pkg *types.Package, name string) bool {
+ // spec:
+ // "Two identifiers are different if they are spelled differently,
+ // or if they appear in different packages and are not exported.
+ // Otherwise, they are the same."
+ if name != obj.Name() {
+ return false
+ }
+ // obj.Name == name
+ if obj.Exported() {
+ return true
+ }
+ // not exported, so packages must be the same (pkg == nil for
+ // fields in Universe scope; this can only happen for types
+ // introduced via Eval)
+ if pkg == nil || obj.Pkg() == nil {
+ return pkg == obj.Pkg()
+ }
+ // pkg != nil && obj.pkg != nil
+ return pkg.Path() == obj.Pkg().Path()
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/bool3.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/bool3.go
deleted file mode 100644
index 6e9550c1a..000000000
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/bool3.go
+++ /dev/null
@@ -1,9 +0,0 @@
-package ruleguard
-
-type bool3 int
-
-const (
- bool3unset bool3 = iota
- bool3false
- bool3true
-)
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/bundle.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/bundle.go
new file mode 100644
index 000000000..950e3c410
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/bundle.go
@@ -0,0 +1,19 @@
+package ruleguard
+
+import (
+ "path/filepath"
+
+ "github.com/quasilyte/go-ruleguard/internal/golist"
+)
+
+func findBundleFiles(pkgPath string) ([]string, error) {
+ pkg, err := golist.JSON(pkgPath)
+ if err != nil {
+ return nil, err
+ }
+ files := make([]string, 0, len(pkg.GoFiles))
+ for _, f := range pkg.GoFiles {
+ files = append(files, filepath.Join(pkg.Dir, f))
+ }
+ return files, nil
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/dsl_importer.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/dsl_importer.go
deleted file mode 100644
index c566578d3..000000000
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/dsl_importer.go
+++ /dev/null
@@ -1,40 +0,0 @@
-package ruleguard
-
-import (
- "go/ast"
- "go/importer"
- "go/parser"
- "go/token"
- "go/types"
-
- "github.com/quasilyte/go-ruleguard/dslgen"
-)
-
-type dslImporter struct {
- fallback types.Importer
-}
-
-func newDSLImporter() *dslImporter {
- return &dslImporter{fallback: importer.Default()}
-}
-
-func (i *dslImporter) Import(path string) (*types.Package, error) {
- switch path {
- case "github.com/quasilyte/go-ruleguard/dsl/fluent":
- return i.importDSL(path, dslgen.Fluent)
-
- default:
- return i.fallback.Import(path)
- }
-}
-
-func (i *dslImporter) importDSL(path string, src []byte) (*types.Package, error) {
- fset := token.NewFileSet()
- f, err := parser.ParseFile(fset, "dsl.go", src, 0)
- if err != nil {
- return nil, err
- }
- var typecheker types.Config
- var info types.Info
- return typecheker.Check(path, fset, []*ast.File{f}, &info)
-}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/engine.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/engine.go
new file mode 100644
index 000000000..f8d1e390a
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/engine.go
@@ -0,0 +1,174 @@
+package ruleguard
+
+import (
+ "errors"
+ "fmt"
+ "go/ast"
+ "go/types"
+ "io"
+ "strings"
+ "sync"
+
+ "github.com/quasilyte/go-ruleguard/ruleguard/quasigo"
+ "github.com/quasilyte/go-ruleguard/ruleguard/typematch"
+)
+
+type engine struct {
+ state *engineState
+
+ ruleSet *goRuleSet
+}
+
+func newEngine() *engine {
+ return &engine{
+ state: newEngineState(),
+ }
+}
+
+func (e *engine) Load(ctx *ParseContext, filename string, r io.Reader) error {
+ config := rulesParserConfig{
+ state: e.state,
+ ctx: ctx,
+ importer: newGoImporter(e.state, goImporterConfig{
+ fset: ctx.Fset,
+ debugImports: ctx.DebugImports,
+ debugPrint: ctx.DebugPrint,
+ }),
+ itab: typematch.NewImportsTab(stdlibPackages),
+ }
+ p := newRulesParser(config)
+ rset, err := p.ParseFile(filename, r)
+ if err != nil {
+ return err
+ }
+
+ if e.ruleSet == nil {
+ e.ruleSet = rset
+ } else {
+ combinedRuleSet, err := mergeRuleSets([]*goRuleSet{e.ruleSet, rset})
+ if err != nil {
+ return err
+ }
+ e.ruleSet = combinedRuleSet
+ }
+
+ return nil
+}
+
+func (e *engine) Run(ctx *RunContext, f *ast.File) error {
+ if e.ruleSet == nil {
+ return errors.New("used Run() with an empty rule set; forgot to call Load() first?")
+ }
+ rset := cloneRuleSet(e.ruleSet)
+ return newRulesRunner(ctx, e.state, rset).run(f)
+}
+
+// engineState is a shared state inside the engine.
+type engineState struct {
+ env *quasigo.Env
+
+ typeByFQNMu sync.RWMutex
+ typeByFQN map[string]types.Type
+
+ pkgCacheMu sync.RWMutex
+ // pkgCache contains all imported packages, from any importer.
+ pkgCache map[string]*types.Package
+}
+
+func newEngineState() *engineState {
+ env := quasigo.NewEnv()
+ state := &engineState{
+ env: env,
+ pkgCache: make(map[string]*types.Package),
+ typeByFQN: map[string]types.Type{
+ // Predeclared types.
+ `error`: types.Universe.Lookup("error").Type(),
+ `bool`: types.Typ[types.Bool],
+ `int`: types.Typ[types.Int],
+ `int8`: types.Typ[types.Int8],
+ `int16`: types.Typ[types.Int16],
+ `int32`: types.Typ[types.Int32],
+ `int64`: types.Typ[types.Int64],
+ `uint`: types.Typ[types.Uint],
+ `uint8`: types.Typ[types.Uint8],
+ `uint16`: types.Typ[types.Uint16],
+ `uint32`: types.Typ[types.Uint32],
+ `uint64`: types.Typ[types.Uint64],
+ `uintptr`: types.Typ[types.Uintptr],
+ `string`: types.Typ[types.String],
+ `float32`: types.Typ[types.Float32],
+ `float64`: types.Typ[types.Float64],
+ `complex64`: types.Typ[types.Complex64],
+ `complex128`: types.Typ[types.Complex128],
+ // Predeclared aliases (provided for convenience).
+ `byte`: types.Typ[types.Uint8],
+ `rune`: types.Typ[types.Int32],
+ },
+ }
+ initEnv(state, env)
+ return state
+}
+
+func (state *engineState) GetCachedPackage(pkgPath string) *types.Package {
+ state.pkgCacheMu.RLock()
+ pkg := state.pkgCache[pkgPath]
+ state.pkgCacheMu.RUnlock()
+ return pkg
+}
+
+func (state *engineState) AddCachedPackage(pkgPath string, pkg *types.Package) {
+ state.pkgCacheMu.Lock()
+ state.pkgCache[pkgPath] = pkg
+ state.pkgCacheMu.Unlock()
+}
+
+func (state *engineState) FindType(importer *goImporter, currentPkg *types.Package, fqn string) (types.Type, error) {
+ // TODO(quasilyte): we can pre-populate the cache during the Load() phase.
+ // If we inspect the AST of a user function, all constant FQN can be preloaded.
+ // It could be a good thing as Load() is not expected to be executed in
+ // concurrent environment, so write-locking is not a big deal there.
+
+ state.typeByFQNMu.RLock()
+ cachedType, ok := state.typeByFQN[fqn]
+ state.typeByFQNMu.RUnlock()
+ if ok {
+ return cachedType, nil
+ }
+
+ // Code below is under a write critical section.
+ state.typeByFQNMu.Lock()
+ defer state.typeByFQNMu.Unlock()
+
+ typ, err := state.findTypeNoCache(importer, currentPkg, fqn)
+ if err != nil {
+ return nil, err
+ }
+ state.typeByFQN[fqn] = typ
+ return typ, nil
+}
+
+func (state *engineState) findTypeNoCache(importer *goImporter, currentPkg *types.Package, fqn string) (types.Type, error) {
+ pos := strings.LastIndexByte(fqn, '.')
+ if pos == -1 {
+ return nil, fmt.Errorf("%s is not a valid FQN", fqn)
+ }
+ pkgPath := fqn[:pos]
+ objectName := fqn[pos+1:]
+ var pkg *types.Package
+ if directDep := findDependency(currentPkg, pkgPath); directDep != nil {
+ pkg = directDep
+ } else {
+ loadedPkg, err := importer.Import(pkgPath)
+ if err != nil {
+ return nil, err
+ }
+ pkg = loadedPkg
+ }
+ obj := pkg.Scope().Lookup(objectName)
+ if obj == nil {
+ return nil, fmt.Errorf("%s is not found in %s", objectName, pkgPath)
+ }
+ typ := obj.Type()
+ state.typeByFQN[fqn] = typ
+ return typ, nil
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/filters.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/filters.go
new file mode 100644
index 000000000..3816405a3
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/filters.go
@@ -0,0 +1,262 @@
+package ruleguard
+
+import (
+ "go/ast"
+ "go/constant"
+ "go/token"
+ "go/types"
+ "path/filepath"
+ "regexp"
+
+ "github.com/quasilyte/go-ruleguard/internal/xtypes"
+ "github.com/quasilyte/go-ruleguard/ruleguard/quasigo"
+ "github.com/quasilyte/go-ruleguard/ruleguard/typematch"
+)
+
+const filterSuccess = matchFilterResult("")
+
+func filterFailure(reason string) matchFilterResult {
+ return matchFilterResult(reason)
+}
+
+func makeNotFilter(src string, x matchFilter) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ if x.fn(params).Matched() {
+ return matchFilterResult(src)
+ }
+ return ""
+ }
+}
+
+func makeAndFilter(lhs, rhs matchFilter) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ if lhsResult := lhs.fn(params); !lhsResult.Matched() {
+ return lhsResult
+ }
+ return rhs.fn(params)
+ }
+}
+
+func makeOrFilter(lhs, rhs matchFilter) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ if lhsResult := lhs.fn(params); lhsResult.Matched() {
+ return filterSuccess
+ }
+ return rhs.fn(params)
+ }
+}
+
+func makeFileImportsFilter(src, pkgPath string) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ _, imported := params.imports[pkgPath]
+ if imported {
+ return filterSuccess
+ }
+ return filterFailure(src)
+ }
+}
+
+func makeFilePkgPathMatchesFilter(src string, re *regexp.Regexp) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ pkgPath := params.ctx.Pkg.Path()
+ if re.MatchString(pkgPath) {
+ return filterSuccess
+ }
+ return filterFailure(src)
+ }
+}
+
+func makeFileNameMatchesFilter(src string, re *regexp.Regexp) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ if re.MatchString(filepath.Base(params.filename)) {
+ return filterSuccess
+ }
+ return filterFailure(src)
+ }
+}
+
+func makePureFilter(src, varname string) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ n := params.subExpr(varname)
+ if isPure(params.ctx.Types, n) {
+ return filterSuccess
+ }
+ return filterFailure(src)
+ }
+}
+
+func makeConstFilter(src, varname string) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ n := params.subExpr(varname)
+ if isConstant(params.ctx.Types, n) {
+ return filterSuccess
+ }
+ return filterFailure(src)
+ }
+}
+
+func makeAddressableFilter(src, varname string) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ n := params.subExpr(varname)
+ if isAddressable(params.ctx.Types, n) {
+ return filterSuccess
+ }
+ return filterFailure(src)
+ }
+}
+
+func makeCustomVarFilter(src, varname string, fn *quasigo.Func) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ // TODO(quasilyte): what if bytecode function panics due to the programming error?
+ // We should probably catch the panic here, print trace and return "false"
+ // from the filter (or even propagate that panic to let it crash).
+ params.varname = varname
+ result := quasigo.Call(params.env, fn, params)
+ if result.Value().(bool) {
+ return filterSuccess
+ }
+ return filterFailure(src)
+ }
+}
+
+func makeTypeImplementsFilter(src, varname string, iface *types.Interface) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ typ := params.typeofNode(params.subExpr(varname))
+ if xtypes.Implements(typ, iface) {
+ return filterSuccess
+ }
+ return filterFailure(src)
+ }
+}
+
+func makeTypeIsFilter(src, varname string, underlying bool, pat *typematch.Pattern) filterFunc {
+ if underlying {
+ return func(params *filterParams) matchFilterResult {
+ typ := params.typeofNode(params.subExpr(varname)).Underlying()
+ if pat.MatchIdentical(typ) {
+ return filterSuccess
+ }
+ return filterFailure(src)
+ }
+ }
+ return func(params *filterParams) matchFilterResult {
+ typ := params.typeofNode(params.subExpr(varname))
+ if pat.MatchIdentical(typ) {
+ return filterSuccess
+ }
+ return filterFailure(src)
+ }
+}
+
+func makeTypeConvertibleToFilter(src, varname string, dstType types.Type) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ typ := params.typeofNode(params.subExpr(varname))
+ if types.ConvertibleTo(typ, dstType) {
+ return filterSuccess
+ }
+ return filterFailure(src)
+ }
+}
+
+func makeTypeAssignableToFilter(src, varname string, dstType types.Type) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ typ := params.typeofNode(params.subExpr(varname))
+ if types.AssignableTo(typ, dstType) {
+ return filterSuccess
+ }
+ return filterFailure(src)
+ }
+}
+
+func makeTypeSizeConstFilter(src, varname string, op token.Token, rhsValue constant.Value) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ typ := params.typeofNode(params.subExpr(varname))
+ lhsValue := constant.MakeInt64(params.ctx.Sizes.Sizeof(typ))
+ if constant.Compare(lhsValue, op, rhsValue) {
+ return filterSuccess
+ }
+ return filterFailure(src)
+ }
+}
+
+func makeValueIntConstFilter(src, varname string, op token.Token, rhsValue constant.Value) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ lhsValue := intValueOf(params.ctx.Types, params.subExpr(varname))
+ if lhsValue == nil {
+ return filterFailure(src) // The value is unknown
+ }
+ if constant.Compare(lhsValue, op, rhsValue) {
+ return filterSuccess
+ }
+ return filterFailure(src)
+ }
+}
+
+func makeValueIntFilter(src, varname string, op token.Token, rhsVarname string) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ lhsValue := intValueOf(params.ctx.Types, params.subExpr(varname))
+ if lhsValue == nil {
+ return filterFailure(src)
+ }
+ rhsValue := intValueOf(params.ctx.Types, params.subExpr(rhsVarname))
+ if rhsValue == nil {
+ return filterFailure(src)
+ }
+ if constant.Compare(lhsValue, op, rhsValue) {
+ return filterSuccess
+ }
+ return filterFailure(src)
+ }
+}
+
+func makeTextConstFilter(src, varname string, op token.Token, rhsValue constant.Value) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ s := params.nodeText(params.subExpr(varname))
+ lhsValue := constant.MakeString(string(s))
+ if constant.Compare(lhsValue, op, rhsValue) {
+ return filterSuccess
+ }
+ return filterFailure(src)
+ }
+}
+
+func makeTextFilter(src, varname string, op token.Token, rhsVarname string) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ s1 := params.nodeText(params.subExpr(varname))
+ lhsValue := constant.MakeString(string(s1))
+ s2 := params.nodeText(params.values[rhsVarname])
+ rhsValue := constant.MakeString(string(s2))
+ if constant.Compare(lhsValue, op, rhsValue) {
+ return filterSuccess
+ }
+ return filterFailure(src)
+ }
+}
+
+func makeTextMatchesFilter(src, varname string, re *regexp.Regexp) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ if re.Match(params.nodeText(params.subExpr(varname))) {
+ return filterSuccess
+ }
+ return filterFailure(src)
+ }
+}
+
+func makeNodeIsFilter(src, varname string, cat nodeCategory) filterFunc {
+ return func(params *filterParams) matchFilterResult {
+ n := params.subExpr(varname)
+ var matched bool
+ switch cat {
+ case nodeExpr:
+ _, matched = n.(ast.Expr)
+ case nodeStmt:
+ _, matched = n.(ast.Stmt)
+ default:
+ matched = (cat == categorizeNode(n))
+ }
+ if matched {
+ return filterSuccess
+ }
+ return filterFailure(src)
+ }
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/gorule.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/gorule.go
index 1192d8492..5357ad67f 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/gorule.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/gorule.go
@@ -1,11 +1,21 @@
package ruleguard
import (
+ "fmt"
+ "go/ast"
+ "go/token"
"go/types"
"github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep"
+ "github.com/quasilyte/go-ruleguard/ruleguard/quasigo"
)
+type goRuleSet struct {
+ universal *scopedGoRuleSet
+
+ groups map[string]token.Position // To handle redefinitions
+}
+
type scopedGoRuleSet struct {
uncategorized []goRule
categorizedNum int
@@ -13,24 +23,110 @@ type scopedGoRuleSet struct {
}
type goRule struct {
+ group string
filename string
- severity string
+ line int
pat *gogrep.Pattern
msg string
location string
suggestion string
- filters map[string]submatchFilter
+ filter matchFilter
+}
+
+type matchFilterResult string
+
+func (s matchFilterResult) Matched() bool { return s == "" }
+
+func (s matchFilterResult) RejectReason() string { return string(s) }
+
+type filterFunc func(*filterParams) matchFilterResult
+
+type matchFilter struct {
+ src string
+ fn func(*filterParams) matchFilterResult
+}
+
+type filterParams struct {
+ ctx *RunContext
+ filename string
+ imports map[string]struct{}
+ env *quasigo.EvalEnv
+
+ importer *goImporter
+
+ values map[string]ast.Node
+
+ nodeText func(n ast.Node) []byte
+
+ // varname is set only for custom filters before bytecode function is called.
+ varname string
+}
+
+func (params *filterParams) subExpr(name string) ast.Expr {
+ switch n := params.values[name].(type) {
+ case ast.Expr:
+ return n
+ case *ast.ExprStmt:
+ return n.X
+ default:
+ return nil
+ }
+}
+
+func (params *filterParams) typeofNode(n ast.Node) types.Type {
+ if e, ok := n.(ast.Expr); ok {
+ if typ := params.ctx.Types.TypeOf(e); typ != nil {
+ return typ
+ }
+ }
+
+ return types.Typ[types.Invalid]
+}
+
+func cloneRuleSet(rset *goRuleSet) *goRuleSet {
+ out, err := mergeRuleSets([]*goRuleSet{rset})
+ if err != nil {
+ panic(err) // Should never happen
+ }
+ return out
+}
+
+func mergeRuleSets(toMerge []*goRuleSet) (*goRuleSet, error) {
+ out := &goRuleSet{
+ universal: &scopedGoRuleSet{},
+ groups: make(map[string]token.Position),
+ }
+
+ for _, x := range toMerge {
+ out.universal = appendScopedRuleSet(out.universal, x.universal)
+ for group, pos := range x.groups {
+ if prevPos, ok := out.groups[group]; ok {
+ newRef := fmt.Sprintf("%s:%d", pos.Filename, pos.Line)
+ oldRef := fmt.Sprintf("%s:%d", prevPos.Filename, prevPos.Line)
+ return nil, fmt.Errorf("%s: redefenition of %s(), previously defined at %s", newRef, group, oldRef)
+ }
+ out.groups[group] = pos
+ }
+ }
+
+ return out, nil
}
-type submatchFilter struct {
- typePred func(typeQuery) bool
- textPred func(string) bool
- pure bool3
- constant bool3
- addressable bool3
+func appendScopedRuleSet(dst, src *scopedGoRuleSet) *scopedGoRuleSet {
+ dst.uncategorized = append(dst.uncategorized, cloneRuleSlice(src.uncategorized)...)
+ for cat, rules := range src.rulesByCategory {
+ dst.rulesByCategory[cat] = append(dst.rulesByCategory[cat], cloneRuleSlice(rules)...)
+ dst.categorizedNum += len(rules)
+ }
+ return dst
}
-type typeQuery struct {
- x types.Type
- ctx *Context
+func cloneRuleSlice(slice []goRule) []goRule {
+ out := make([]goRule, len(slice))
+ for i, rule := range slice {
+ clone := rule
+ clone.pat = rule.pat.Clone()
+ out[i] = clone
+ }
+ return out
}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/goutil/goutil.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/goutil/goutil.go
new file mode 100644
index 000000000..6cc4d9056
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/goutil/goutil.go
@@ -0,0 +1,21 @@
+package goutil
+
+import (
+ "go/ast"
+ "go/printer"
+ "go/token"
+ "strings"
+)
+
+// SprintNode returns the textual representation of n.
+// If fset is nil, freshly created file set will be used.
+func SprintNode(fset *token.FileSet, n ast.Node) string {
+ if fset == nil {
+ fset = token.NewFileSet()
+ }
+ var buf strings.Builder
+ if err := printer.Fprint(&buf, fset, n); err != nil {
+ return ""
+ }
+ return buf.String()
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/goutil/resolve.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/goutil/resolve.go
new file mode 100644
index 000000000..8705707ac
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/goutil/resolve.go
@@ -0,0 +1,33 @@
+package goutil
+
+import (
+ "go/ast"
+ "go/types"
+
+ "golang.org/x/tools/go/ast/astutil"
+)
+
+func ResolveFunc(info *types.Info, callable ast.Expr) (ast.Expr, *types.Func) {
+ switch callable := astutil.Unparen(callable).(type) {
+ case *ast.Ident:
+ sig, ok := info.ObjectOf(callable).(*types.Func)
+ if !ok {
+ return nil, nil
+ }
+ return nil, sig
+
+ case *ast.SelectorExpr:
+ sig, ok := info.ObjectOf(callable.Sel).(*types.Func)
+ if !ok {
+ return nil, nil
+ }
+ isMethod := sig.Type().(*types.Signature).Recv() != nil
+ if _, ok := callable.X.(*ast.Ident); ok && !isMethod {
+ return nil, sig
+ }
+ return callable.X, sig
+
+ default:
+ return nil, nil
+ }
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/importer.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/importer.go
new file mode 100644
index 000000000..06a0bbf9f
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/importer.go
@@ -0,0 +1,116 @@
+package ruleguard
+
+import (
+ "fmt"
+ "go/ast"
+ "go/importer"
+ "go/parser"
+ "go/token"
+ "go/types"
+ "path/filepath"
+ "runtime"
+
+ "github.com/quasilyte/go-ruleguard/internal/golist"
+)
+
+// goImporter is a `types.Importer` that tries to load a package no matter what.
+// It iterates through multiple import strategies and accepts whatever succeeds first.
+type goImporter struct {
+ // TODO(quasilyte): share importers with gogrep?
+
+ state *engineState
+
+ defaultImporter types.Importer
+ srcImporter types.Importer
+
+ fset *token.FileSet
+
+ debugImports bool
+ debugPrint func(string)
+}
+
+type goImporterConfig struct {
+ fset *token.FileSet
+ debugImports bool
+ debugPrint func(string)
+}
+
+func newGoImporter(state *engineState, config goImporterConfig) *goImporter {
+ return &goImporter{
+ state: state,
+ fset: config.fset,
+ debugImports: config.debugImports,
+ debugPrint: config.debugPrint,
+ defaultImporter: importer.Default(),
+ srcImporter: importer.ForCompiler(config.fset, "source", nil),
+ }
+}
+
+func (imp *goImporter) Import(path string) (*types.Package, error) {
+ if pkg := imp.state.GetCachedPackage(path); pkg != nil {
+ if imp.debugImports {
+ imp.debugPrint(fmt.Sprintf(`imported "%s" from importer cache`, path))
+ }
+ return pkg, nil
+ }
+
+ pkg, err1 := imp.srcImporter.Import(path)
+ if err1 == nil {
+ imp.state.AddCachedPackage(path, pkg)
+ if imp.debugImports {
+ imp.debugPrint(fmt.Sprintf(`imported "%s" from source importer`, path))
+ }
+ return pkg, nil
+ }
+
+ pkg, err2 := imp.defaultImporter.Import(path)
+ if err2 == nil {
+ imp.state.AddCachedPackage(path, pkg)
+ if imp.debugImports {
+ imp.debugPrint(fmt.Sprintf(`imported "%s" from %s importer`, path, runtime.Compiler))
+ }
+ return pkg, nil
+ }
+
+ // Fallback to `go list` as a last resort.
+ pkg, err3 := imp.golistImport(path)
+ if err3 == nil {
+ imp.state.AddCachedPackage(path, pkg)
+ if imp.debugImports {
+ imp.debugPrint(fmt.Sprintf(`imported "%s" from golist importer`, path))
+ }
+ return pkg, nil
+ }
+
+ if imp.debugImports {
+ imp.debugPrint(fmt.Sprintf(`failed to import "%s":`, path))
+ imp.debugPrint(fmt.Sprintf(" source importer: %v", err1))
+ imp.debugPrint(fmt.Sprintf(" %s importer: %v", runtime.Compiler, err2))
+ imp.debugPrint(fmt.Sprintf(" golist importer: %v", err3))
+ }
+
+ return nil, err2
+}
+
+func (imp *goImporter) golistImport(path string) (*types.Package, error) {
+ golistPkg, err := golist.JSON(path)
+ if err != nil {
+ return nil, err
+ }
+
+ files := make([]*ast.File, 0, len(golistPkg.GoFiles))
+ for _, filename := range golistPkg.GoFiles {
+ fullname := filepath.Join(golistPkg.Dir, filename)
+ f, err := parser.ParseFile(imp.fset, fullname, nil, 0)
+ if err != nil {
+ return nil, err
+ }
+ files = append(files, f)
+ }
+
+ // TODO: do we want to assign imp as importer for this nested typecherker?
+ // Otherwise it won't be able to resolve imports.
+ var typecheker types.Config
+ var info types.Info
+ return typecheker.Check(path, imp.fset, files, &info)
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/libdsl.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/libdsl.go
new file mode 100644
index 000000000..ddd56cbe1
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/libdsl.go
@@ -0,0 +1,200 @@
+package ruleguard
+
+import (
+ "go/types"
+
+ "github.com/quasilyte/go-ruleguard/internal/xtypes"
+ "github.com/quasilyte/go-ruleguard/ruleguard/quasigo"
+)
+
+// This file implements `dsl/*` packages as native functions in quasigo.
+//
+// Every function and method defined in any `dsl/*` package should have
+// associated Go function that implements it.
+//
+// In quasigo, it's impossible to have a pointer to an interface and
+// non-pointer struct type. All interface type methods have FQN without `*` prefix
+// while all struct type methods always begin with `*`.
+//
+// Fields are readonly.
+// Field access is compiled into a method call that have a name identical to the field.
+// For example, `foo.Bar` field access will be compiled as `foo.Bar()`.
+// This may change in the future; benchmarks are needed to figure out
+// what is more efficient: reflect-based field access or a function call.
+//
+// To keep this code organized, every type and package functions are represented
+// as structs with methods. Then we bind a method value to quasigo symbol.
+// The naming scheme is `dsl{$name}Package` for packages and `dsl{$pkg}{$name}` for types.
+
+func initEnv(state *engineState, env *quasigo.Env) {
+ nativeTypes := map[string]quasigoNative{
+ `*github.com/quasilyte/go-ruleguard/dsl.VarFilterContext`: dslVarFilterContext{state: state},
+ `github.com/quasilyte/go-ruleguard/dsl/types.Type`: dslTypesType{},
+ `*github.com/quasilyte/go-ruleguard/dsl/types.Interface`: dslTypesInterface{},
+ `*github.com/quasilyte/go-ruleguard/dsl/types.Pointer`: dslTypesPointer{},
+ }
+
+ for qualifier, typ := range nativeTypes {
+ for methodName, fn := range typ.funcs() {
+ env.AddNativeMethod(qualifier, methodName, fn)
+ }
+ }
+
+ nativePackages := map[string]quasigoNative{
+ `github.com/quasilyte/go-ruleguard/dsl/types`: dslTypesPackage{},
+ }
+
+ for qualifier, pkg := range nativePackages {
+ for funcName, fn := range pkg.funcs() {
+ env.AddNativeMethod(qualifier, funcName, fn)
+ }
+ }
+}
+
+type quasigoNative interface {
+ funcs() map[string]func(*quasigo.ValueStack)
+}
+
+type dslTypesType struct{}
+
+func (native dslTypesType) funcs() map[string]func(*quasigo.ValueStack) {
+ return map[string]func(*quasigo.ValueStack){
+ "Underlying": native.Underlying,
+ "String": native.String,
+ }
+}
+
+func (dslTypesType) Underlying(stack *quasigo.ValueStack) {
+ stack.Push(stack.Pop().(types.Type).Underlying())
+}
+
+func (dslTypesType) String(stack *quasigo.ValueStack) {
+ stack.Push(stack.Pop().(types.Type).String())
+}
+
+type dslTypesInterface struct{}
+
+func (native dslTypesInterface) funcs() map[string]func(*quasigo.ValueStack) {
+ return map[string]func(*quasigo.ValueStack){
+ "Underlying": native.Underlying,
+ "String": native.String,
+ }
+}
+
+func (dslTypesInterface) Underlying(stack *quasigo.ValueStack) {
+ stack.Push(stack.Pop().(*types.Interface).Underlying())
+}
+
+func (dslTypesInterface) String(stack *quasigo.ValueStack) {
+ stack.Push(stack.Pop().(*types.Interface).String())
+}
+
+type dslTypesPointer struct{}
+
+func (native dslTypesPointer) funcs() map[string]func(*quasigo.ValueStack) {
+ return map[string]func(*quasigo.ValueStack){
+ "Underlying": native.Underlying,
+ "String": native.String,
+ "Elem": native.Elem,
+ }
+}
+
+func (dslTypesPointer) Underlying(stack *quasigo.ValueStack) {
+ stack.Push(stack.Pop().(*types.Pointer).Underlying())
+}
+
+func (dslTypesPointer) String(stack *quasigo.ValueStack) {
+ stack.Push(stack.Pop().(*types.Pointer).String())
+}
+
+func (dslTypesPointer) Elem(stack *quasigo.ValueStack) {
+ stack.Push(stack.Pop().(*types.Pointer).Elem())
+}
+
+type dslTypesPackage struct{}
+
+func (native dslTypesPackage) funcs() map[string]func(*quasigo.ValueStack) {
+ return map[string]func(*quasigo.ValueStack){
+ "Implements": native.Implements,
+ "Identical": native.Identical,
+ "NewPointer": native.NewPointer,
+ "AsPointer": native.AsPointer,
+ "AsInterface": native.AsInterface,
+ }
+}
+
+func (dslTypesPackage) Implements(stack *quasigo.ValueStack) {
+ iface := stack.Pop().(*types.Interface)
+ typ := stack.Pop().(types.Type)
+ stack.Push(xtypes.Implements(typ, iface))
+}
+
+func (dslTypesPackage) Identical(stack *quasigo.ValueStack) {
+ y := stack.Pop().(types.Type)
+ x := stack.Pop().(types.Type)
+ stack.Push(xtypes.Identical(x, y))
+}
+
+func (dslTypesPackage) NewPointer(stack *quasigo.ValueStack) {
+ typ := stack.Pop().(types.Type)
+ stack.Push(types.NewPointer(typ))
+}
+
+func (dslTypesPackage) AsPointer(stack *quasigo.ValueStack) {
+ typ, _ := stack.Pop().(types.Type).(*types.Pointer)
+ stack.Push(typ)
+}
+
+func (dslTypesPackage) AsInterface(stack *quasigo.ValueStack) {
+ typ, _ := stack.Pop().(types.Type).(*types.Interface)
+ stack.Push(typ)
+}
+
+type dslVarFilterContext struct {
+ state *engineState
+}
+
+func (native dslVarFilterContext) funcs() map[string]func(*quasigo.ValueStack) {
+ return map[string]func(*quasigo.ValueStack){
+ "Type": native.Type,
+ "SizeOf": native.SizeOf,
+ "GetType": native.GetType,
+ "GetInterface": native.GetInterface,
+ }
+}
+
+func (dslVarFilterContext) Type(stack *quasigo.ValueStack) {
+ params := stack.Pop().(*filterParams)
+ typ := params.typeofNode(params.subExpr(params.varname))
+ stack.Push(typ)
+}
+
+func (native dslVarFilterContext) SizeOf(stack *quasigo.ValueStack) {
+ typ := stack.Pop().(types.Type)
+ params := stack.Pop().(*filterParams)
+ stack.PushInt(int(params.ctx.Sizes.Sizeof(typ)))
+}
+
+func (native dslVarFilterContext) GetType(stack *quasigo.ValueStack) {
+ fqn := stack.Pop().(string)
+ params := stack.Pop().(*filterParams)
+ typ, err := native.state.FindType(params.importer, params.ctx.Pkg, fqn)
+ if err != nil {
+ panic(err)
+ }
+ stack.Push(typ)
+}
+
+func (native dslVarFilterContext) GetInterface(stack *quasigo.ValueStack) {
+ fqn := stack.Pop().(string)
+ params := stack.Pop().(*filterParams)
+ typ, err := native.state.FindType(params.importer, params.ctx.Pkg, fqn)
+ if err != nil {
+ panic(err)
+ }
+ if ifaceType, ok := typ.Underlying().(*types.Interface); ok {
+ stack.Push(ifaceType)
+ return
+ }
+ stack.Push((*types.Interface)(nil)) // Not found or not an interface
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/merge.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/merge.go
deleted file mode 100644
index e494930ab..000000000
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/merge.go
+++ /dev/null
@@ -1,24 +0,0 @@
-package ruleguard
-
-func mergeRuleSets(toMerge []*GoRuleSet) *GoRuleSet {
- out := &GoRuleSet{
- local: &scopedGoRuleSet{},
- universal: &scopedGoRuleSet{},
- }
-
- for _, x := range toMerge {
- out.local = appendScopedRuleSet(out.local, x.local)
- out.universal = appendScopedRuleSet(out.universal, x.universal)
- }
-
- return out
-}
-
-func appendScopedRuleSet(dst, src *scopedGoRuleSet) *scopedGoRuleSet {
- dst.uncategorized = append(dst.uncategorized, src.uncategorized...)
- for cat, rules := range src.rulesByCategory {
- dst.rulesByCategory[cat] = append(dst.rulesByCategory[cat], rules...)
- dst.categorizedNum += len(rules)
- }
- return dst
-}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/node_category.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/node_category.go
index 859ed39a4..57d849b1a 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/node_category.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/node_category.go
@@ -57,6 +57,13 @@ const (
nodeValueSpec
nodeCategoriesCount
+
+ // Categories below are not used inside scopedRuleSet yet
+ // as categorizeNode will never produce them during the parsing.
+ // They're required for Node.Is().
+
+ nodeExpr // ast.Expr
+ nodeStmt // ast.Stmt
)
func categorizeNode(n ast.Node) nodeCategory {
@@ -157,3 +164,110 @@ func categorizeNode(n ast.Node) nodeCategory {
return nodeUnknown
}
}
+
+func categorizeNodeString(s string) nodeCategory {
+ switch s {
+ case "Expr":
+ return nodeExpr
+ case "Stmt":
+ return nodeStmt
+ }
+
+ // Below is a switch from categorizeNode.
+ switch s {
+ case "ArrayType":
+ return nodeArrayType
+ case "AssignStmt":
+ return nodeAssignStmt
+ case "BasicLit":
+ return nodeBasicLit
+ case "BinaryExpr":
+ return nodeBinaryExpr
+ case "BlockStmt":
+ return nodeBlockStmt
+ case "BranchStmt":
+ return nodeBranchStmt
+ case "CallExpr":
+ return nodeCallExpr
+ case "CaseClause":
+ return nodeCaseClause
+ case "ChanType":
+ return nodeChanType
+ case "CommClause":
+ return nodeCommClause
+ case "CompositeLit":
+ return nodeCompositeLit
+ case "DeclStmt":
+ return nodeDeclStmt
+ case "DeferStmt":
+ return nodeDeferStmt
+ case "Ellipsis":
+ return nodeEllipsis
+ case "EmptyStmt":
+ return nodeEmptyStmt
+ case "ExprStmt":
+ return nodeExprStmt
+ case "ForStmt":
+ return nodeForStmt
+ case "FuncDecl":
+ return nodeFuncDecl
+ case "FuncLit":
+ return nodeFuncLit
+ case "FuncType":
+ return nodeFuncType
+ case "GenDecl":
+ return nodeGenDecl
+ case "GoStmt":
+ return nodeGoStmt
+ case "Ident":
+ return nodeIdent
+ case "IfStmt":
+ return nodeIfStmt
+ case "ImportSpec":
+ return nodeImportSpec
+ case "IncDecStmt":
+ return nodeIncDecStmt
+ case "IndexExpr":
+ return nodeIndexExpr
+ case "InterfaceType":
+ return nodeInterfaceType
+ case "KeyValueExpr":
+ return nodeKeyValueExpr
+ case "LabeledStmt":
+ return nodeLabeledStmt
+ case "MapType":
+ return nodeMapType
+ case "ParenExpr":
+ return nodeParenExpr
+ case "RangeStmt":
+ return nodeRangeStmt
+ case "ReturnStmt":
+ return nodeReturnStmt
+ case "SelectStmt":
+ return nodeSelectStmt
+ case "SelectorExpr":
+ return nodeSelectorExpr
+ case "SendStmt":
+ return nodeSendStmt
+ case "SliceExpr":
+ return nodeSliceExpr
+ case "StarExpr":
+ return nodeStarExpr
+ case "StructType":
+ return nodeStructType
+ case "SwitchStmt":
+ return nodeSwitchStmt
+ case "TypeAssertExpr":
+ return nodeTypeAssertExpr
+ case "TypeSpec":
+ return nodeTypeSpec
+ case "TypeSwitchStmt":
+ return nodeTypeSwitchStmt
+ case "UnaryExpr":
+ return nodeUnaryExpr
+ case "ValueSpec":
+ return nodeValueSpec
+ default:
+ return nodeUnknown
+ }
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/parser.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/parser.go
index 98fcd20df..89d2dc437 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/parser.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/parser.go
@@ -1,240 +1,310 @@
package ruleguard
import (
+ "bytes"
"fmt"
"go/ast"
- "go/constant"
- "go/importer"
"go/parser"
"go/token"
"go/types"
"io"
+ "io/ioutil"
"path"
"regexp"
"strconv"
"github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep"
+ "github.com/quasilyte/go-ruleguard/ruleguard/goutil"
+ "github.com/quasilyte/go-ruleguard/ruleguard/quasigo"
"github.com/quasilyte/go-ruleguard/ruleguard/typematch"
)
+// TODO(quasilyte): use source code byte slicing instead of SprintNode?
+
+type parseError string
+
+func (e parseError) Error() string { return string(e) }
+
type rulesParser struct {
+ state *engineState
+ ctx *ParseContext
+
+ prefix string // For imported packages, a prefix that is added to a rule group name
+ importedPkg string // Package path; only for imported packages
+
filename string
- fset *token.FileSet
- res *GoRuleSet
+ group string
+ res *goRuleSet
+ pkg *types.Package
types *types.Info
- itab *typematch.ImportsTab
- dslImporter types.Importer
- stdImporter types.Importer // TODO(quasilyte): share importer with gogrep?
- srcImporter types.Importer
+ importer *goImporter
+
+ itab *typematch.ImportsTab
+
+ imported []*goRuleSet
+
+ dslPkgname string // The local name of the "ruleguard/dsl" package (usually its just "dsl")
+}
+
+type rulesParserConfig struct {
+ state *engineState
+
+ ctx *ParseContext
+
+ importer *goImporter
+
+ prefix string
+ importedPkg string
+
+ itab *typematch.ImportsTab
}
-func newRulesParser() *rulesParser {
- var stdlib = map[string]string{
- "adler32": "hash/adler32",
- "aes": "crypto/aes",
- "ascii85": "encoding/ascii85",
- "asn1": "encoding/asn1",
- "ast": "go/ast",
- "atomic": "sync/atomic",
- "base32": "encoding/base32",
- "base64": "encoding/base64",
- "big": "math/big",
- "binary": "encoding/binary",
- "bits": "math/bits",
- "bufio": "bufio",
- "build": "go/build",
- "bytes": "bytes",
- "bzip2": "compress/bzip2",
- "cgi": "net/http/cgi",
- "cgo": "runtime/cgo",
- "cipher": "crypto/cipher",
- "cmplx": "math/cmplx",
- "color": "image/color",
- "constant": "go/constant",
- "context": "context",
- "cookiejar": "net/http/cookiejar",
- "crc32": "hash/crc32",
- "crc64": "hash/crc64",
- "crypto": "crypto",
- "csv": "encoding/csv",
- "debug": "runtime/debug",
- "des": "crypto/des",
- "doc": "go/doc",
- "draw": "image/draw",
- "driver": "database/sql/driver",
- "dsa": "crypto/dsa",
- "dwarf": "debug/dwarf",
- "ecdsa": "crypto/ecdsa",
- "ed25519": "crypto/ed25519",
- "elf": "debug/elf",
- "elliptic": "crypto/elliptic",
- "encoding": "encoding",
- "errors": "errors",
- "exec": "os/exec",
- "expvar": "expvar",
- "fcgi": "net/http/fcgi",
- "filepath": "path/filepath",
- "flag": "flag",
- "flate": "compress/flate",
- "fmt": "fmt",
- "fnv": "hash/fnv",
- "format": "go/format",
- "gif": "image/gif",
- "gob": "encoding/gob",
- "gosym": "debug/gosym",
- "gzip": "compress/gzip",
- "hash": "hash",
- "heap": "container/heap",
- "hex": "encoding/hex",
- "hmac": "crypto/hmac",
- "html": "html",
- "http": "net/http",
- "httptest": "net/http/httptest",
- "httptrace": "net/http/httptrace",
- "httputil": "net/http/httputil",
- "image": "image",
- "importer": "go/importer",
- "io": "io",
- "iotest": "testing/iotest",
- "ioutil": "io/ioutil",
- "jpeg": "image/jpeg",
- "json": "encoding/json",
- "jsonrpc": "net/rpc/jsonrpc",
- "list": "container/list",
- "log": "log",
- "lzw": "compress/lzw",
- "macho": "debug/macho",
- "mail": "net/mail",
- "math": "math",
- "md5": "crypto/md5",
- "mime": "mime",
- "multipart": "mime/multipart",
- "net": "net",
- "os": "os",
- "palette": "image/color/palette",
- "parse": "text/template/parse",
- "parser": "go/parser",
- "path": "path",
- "pe": "debug/pe",
- "pem": "encoding/pem",
- "pkix": "crypto/x509/pkix",
- "plan9obj": "debug/plan9obj",
- "plugin": "plugin",
- "png": "image/png",
- "pprof": "runtime/pprof",
- "printer": "go/printer",
- "quick": "testing/quick",
- "quotedprintable": "mime/quotedprintable",
- "race": "runtime/race",
- "rand": "math/rand",
- "rc4": "crypto/rc4",
- "reflect": "reflect",
- "regexp": "regexp",
- "ring": "container/ring",
- "rpc": "net/rpc",
- "rsa": "crypto/rsa",
- "runtime": "runtime",
- "scanner": "text/scanner",
- "sha1": "crypto/sha1",
- "sha256": "crypto/sha256",
- "sha512": "crypto/sha512",
- "signal": "os/signal",
- "smtp": "net/smtp",
- "sort": "sort",
- "sql": "database/sql",
- "strconv": "strconv",
- "strings": "strings",
- "subtle": "crypto/subtle",
- "suffixarray": "index/suffixarray",
- "sync": "sync",
- "syntax": "regexp/syntax",
- "syscall": "syscall",
- "syslog": "log/syslog",
- "tabwriter": "text/tabwriter",
- "tar": "archive/tar",
- "template": "text/template",
- "testing": "testing",
- "textproto": "net/textproto",
- "time": "time",
- "tls": "crypto/tls",
- "token": "go/token",
- "trace": "runtime/trace",
- "types": "go/types",
- "unicode": "unicode",
- "unsafe": "unsafe",
- "url": "net/url",
- "user": "os/user",
- "utf16": "unicode/utf16",
- "utf8": "unicode/utf8",
- "x509": "crypto/x509",
- "xml": "encoding/xml",
- "zip": "archive/zip",
- "zlib": "compress/zlib",
- }
-
- // TODO(quasilyte): do we need to pass the fileset here?
- fset := token.NewFileSet()
+func newRulesParser(config rulesParserConfig) *rulesParser {
return &rulesParser{
- itab: typematch.NewImportsTab(stdlib),
- stdImporter: importer.Default(),
- srcImporter: importer.ForCompiler(fset, "source", nil),
- dslImporter: newDSLImporter(),
+ state: config.state,
+ ctx: config.ctx,
+ importer: config.importer,
+ prefix: config.prefix,
+ importedPkg: config.importedPkg,
+ itab: config.itab,
}
}
-func (p *rulesParser) ParseFile(filename string, fset *token.FileSet, r io.Reader) (*GoRuleSet, error) {
+func (p *rulesParser) ParseFile(filename string, r io.Reader) (*goRuleSet, error) {
+ p.dslPkgname = "dsl"
p.filename = filename
- p.fset = fset
- p.res = &GoRuleSet{
- local: &scopedGoRuleSet{},
+ p.res = &goRuleSet{
universal: &scopedGoRuleSet{},
+ groups: make(map[string]token.Position),
}
parserFlags := parser.Mode(0)
- f, err := parser.ParseFile(fset, filename, r, parserFlags)
+ f, err := parser.ParseFile(p.ctx.Fset, filename, r, parserFlags)
if err != nil {
- return nil, fmt.Errorf("parser error: %v", err)
+ return nil, fmt.Errorf("parse file error: %v", err)
+ }
+
+ for _, imp := range f.Imports {
+ importPath, err := strconv.Unquote(imp.Path.Value)
+ if err != nil {
+ return nil, p.errorf(imp, "unquote %s import path: %v", imp.Path.Value, err)
+ }
+ if importPath == "github.com/quasilyte/go-ruleguard/dsl" {
+ if imp.Name != nil {
+ p.dslPkgname = imp.Name.Name
+ }
+ }
}
if f.Name.Name != "gorules" {
return nil, fmt.Errorf("expected a gorules package name, found %s", f.Name.Name)
}
- typechecker := types.Config{Importer: p.dslImporter}
- p.types = &types.Info{Types: map[ast.Expr]types.TypeAndValue{}}
- _, err = typechecker.Check("gorules", fset, []*ast.File{f}, p.types)
+ typechecker := types.Config{Importer: p.importer}
+ p.types = &types.Info{
+ Types: map[ast.Expr]types.TypeAndValue{},
+ Uses: map[*ast.Ident]types.Object{},
+ Defs: map[*ast.Ident]types.Object{},
+ }
+ pkg, err := typechecker.Check("gorules", p.ctx.Fset, []*ast.File{f}, p.types)
if err != nil {
return nil, fmt.Errorf("typechecker error: %v", err)
}
+ p.pkg = pkg
+ var matcherFuncs []*ast.FuncDecl
+ var userFuncs []*ast.FuncDecl
for _, decl := range f.Decls {
decl, ok := decl.(*ast.FuncDecl)
if !ok {
continue
}
+ if decl.Name.String() == "init" {
+ if err := p.parseInitFunc(decl); err != nil {
+ return nil, err
+ }
+ continue
+ }
+
+ if p.isMatcherFunc(decl) {
+ matcherFuncs = append(matcherFuncs, decl)
+ } else {
+ userFuncs = append(userFuncs, decl)
+ }
+ }
+
+ for _, decl := range userFuncs {
+ if err := p.parseUserFunc(decl); err != nil {
+ return nil, err
+ }
+ }
+ for _, decl := range matcherFuncs {
if err := p.parseRuleGroup(decl); err != nil {
return nil, err
}
}
+ if len(p.imported) != 0 {
+ toMerge := []*goRuleSet{p.res}
+ toMerge = append(toMerge, p.imported...)
+ merged, err := mergeRuleSets(toMerge)
+ if err != nil {
+ return nil, err
+ }
+ p.res = merged
+ }
+
return p.res, nil
}
-func (p *rulesParser) parseRuleGroup(f *ast.FuncDecl) error {
+func (p *rulesParser) parseUserFunc(f *ast.FuncDecl) error {
+ ctx := &quasigo.CompileContext{
+ Env: p.state.env,
+ Types: p.types,
+ Fset: p.ctx.Fset,
+ }
+ compiled, err := quasigo.Compile(ctx, f)
+ if err != nil {
+ return err
+ }
+ if p.ctx.DebugFilter == f.Name.String() {
+ p.ctx.DebugPrint(quasigo.Disasm(p.state.env, compiled))
+ }
+ ctx.Env.AddFunc(p.pkg.Path(), f.Name.String(), compiled)
+ return nil
+}
+
+func (p *rulesParser) parseInitFunc(f *ast.FuncDecl) error {
+ type bundleImport struct {
+ node ast.Node
+ prefix string
+ pkgPath string
+ }
+
+ var imported []bundleImport
+
+ for _, stmt := range f.Body.List {
+ exprStmt, ok := stmt.(*ast.ExprStmt)
+ if !ok {
+ return p.errorf(stmt, "unsupported statement")
+ }
+ call, ok := exprStmt.X.(*ast.CallExpr)
+ if !ok {
+ return p.errorf(stmt, "unsupported expr")
+ }
+ fn, ok := call.Fun.(*ast.SelectorExpr)
+ if !ok {
+ return p.errorf(stmt, "unsupported call")
+ }
+ pkg, ok := fn.X.(*ast.Ident)
+ if !ok || pkg.Name != p.dslPkgname {
+ return p.errorf(stmt, "unsupported call")
+ }
+
+ switch fn.Sel.Name {
+ case "ImportRules":
+ if p.importedPkg != "" {
+ return p.errorf(call, "imports from imported packages are not supported yet")
+ }
+ prefix := p.parseStringArg(call.Args[0])
+ bundleSelector, ok := call.Args[1].(*ast.SelectorExpr)
+ if !ok {
+ return p.errorf(call.Args[1], "expected a `pkgname.Bundle` argument")
+ }
+ bundleObj := p.types.ObjectOf(bundleSelector.Sel)
+ imported = append(imported, bundleImport{
+ node: stmt,
+ prefix: prefix,
+ pkgPath: bundleObj.Pkg().Path(),
+ })
+
+ default:
+ return p.errorf(stmt, "unsupported %s call", fn.Sel.Name)
+ }
+ }
+
+ for _, imp := range imported {
+ files, err := findBundleFiles(imp.pkgPath)
+ if err != nil {
+ return p.errorf(imp.node, "import lookup error: %v", err)
+ }
+ for _, filename := range files {
+ rset, err := p.importRules(imp.prefix, imp.pkgPath, filename)
+ if err != nil {
+ return p.errorf(imp.node, "import parsing error: %v", err)
+ }
+ p.imported = append(p.imported, rset)
+ }
+ }
+
+ return nil
+}
+
+func (p *rulesParser) importRules(prefix, pkgPath, filename string) (*goRuleSet, error) {
+ data, err := ioutil.ReadFile(filename)
+ if err != nil {
+ return nil, err
+ }
+ config := rulesParserConfig{
+ ctx: p.ctx,
+ importer: p.importer,
+ prefix: prefix,
+ importedPkg: pkgPath,
+ itab: p.itab,
+ }
+ rset, err := newRulesParser(config).ParseFile(filename, bytes.NewReader(data))
+ if err != nil {
+ return nil, fmt.Errorf("%s: %v", p.importedPkg, err)
+ }
+ return rset, nil
+}
+
+func (p *rulesParser) isMatcherFunc(f *ast.FuncDecl) bool {
+ typ := p.types.ObjectOf(f.Name).Type().(*types.Signature)
+ return typ.Results().Len() == 0 &&
+ typ.Params().Len() == 1 &&
+ typ.Params().At(0).Type().String() == "github.com/quasilyte/go-ruleguard/dsl.Matcher"
+}
+
+func (p *rulesParser) parseRuleGroup(f *ast.FuncDecl) (err error) {
+ defer func() {
+ rv := recover()
+ if rv == nil {
+ return
+ }
+ if parseErr, ok := rv.(parseError); ok {
+ err = parseErr
+ return
+ }
+ panic(rv) // not our panic
+ }()
+
+ if f.Name.String() == "_" {
+ return p.errorf(f.Name, "`_` is not a valid rule group function name")
+ }
if f.Body == nil {
return p.errorf(f, "unexpected empty function body")
}
- if f.Type.Results != nil {
- return p.errorf(f.Type.Results, "rule group function should not return anything")
- }
params := f.Type.Params.List
- if len(params) != 1 || len(params[0].Names) != 1 {
- return p.errorf(f.Type.Params, "rule group function should accept exactly 1 Matcher param")
- }
- // TODO(quasilyte): do an actual matcher param type check?
matcher := params[0].Names[0].Name
+ p.group = f.Name.Name
+ if p.prefix != "" {
+ p.group = p.prefix + "/" + f.Name.Name
+ }
+
+ if p.ctx.GroupFilter != nil && !p.ctx.GroupFilter(p.group) {
+ return nil // Skip this group
+ }
+ if _, ok := p.res.groups[p.group]; ok {
+ panic(fmt.Sprintf("duplicated function %s after the typecheck", p.group)) // Should never happen
+ }
+ p.res.groups[p.group] = token.Position{
+ Filename: p.filename,
+ Line: p.ctx.Fset.Position(f.Name.Pos()).Line,
+ }
+
p.itab.EnterScope()
defer p.itab.LeaveScope()
@@ -244,11 +314,11 @@ func (p *rulesParser) parseRuleGroup(f *ast.FuncDecl) error {
}
stmtExpr, ok := stmt.(*ast.ExprStmt)
if !ok {
- return p.errorf(stmt, "expected a %s method call, found %s", matcher, sprintNode(p.fset, stmt))
+ return p.errorf(stmt, "expected a %s method call, found %s", matcher, goutil.SprintNode(p.ctx.Fset, stmt))
}
call, ok := stmtExpr.X.(*ast.CallExpr)
if !ok {
- return p.errorf(stmt, "expected a %s method call, found %s", matcher, sprintNode(p.fset, stmt))
+ return p.errorf(stmt, "expected a %s method call, found %s", matcher, goutil.SprintNode(p.ctx.Fset, stmt))
}
if err := p.parseCall(matcher, call); err != nil {
return err
@@ -300,14 +370,29 @@ func (p *rulesParser) parseRule(matcher string, call *ast.CallExpr) error {
}
switch chain.Sel.Name {
case "Match":
+ if matchArgs != nil {
+ return p.errorf(chain.Sel, "Match() can't be repeated")
+ }
matchArgs = &call.Args
case "Where":
+ if whereArgs != nil {
+ return p.errorf(chain.Sel, "Where() can't be repeated")
+ }
whereArgs = &call.Args
case "Suggest":
+ if suggestArgs != nil {
+ return p.errorf(chain.Sel, "Suggest() can't be repeated")
+ }
suggestArgs = &call.Args
case "Report":
+ if reportArgs != nil {
+ return p.errorf(chain.Sel, "Report() can't be repeated")
+ }
reportArgs = &call.Args
case "At":
+ if atArgs != nil {
+ return p.errorf(chain.Sel, "At() can't be repeated")
+ }
atArgs = &call.Args
default:
return p.errorf(chain.Sel, "unexpected %s method", chain.Sel.Name)
@@ -319,10 +404,10 @@ func (p *rulesParser) parseRule(matcher string, call *ast.CallExpr) error {
}
dst := p.res.universal
- filters := map[string]submatchFilter{}
proto := goRule{
filename: p.filename,
- filters: filters,
+ line: p.ctx.Fset.Position(origCall.Pos()).Line,
+ group: p.group,
}
var alternatives []string
@@ -330,25 +415,15 @@ func (p *rulesParser) parseRule(matcher string, call *ast.CallExpr) error {
return p.errorf(origCall, "missing Match() call")
}
for _, arg := range *matchArgs {
- alt, ok := p.toStringValue(arg)
- if !ok {
- return p.errorf(arg, "expected a string literal argument")
- }
- alternatives = append(alternatives, alt)
+ alternatives = append(alternatives, p.parseStringArg(arg))
}
if whereArgs != nil {
- if err := p.walkFilter(filters, (*whereArgs)[0], false); err != nil {
- return err
- }
+ proto.filter = p.parseFilter((*whereArgs)[0])
}
if suggestArgs != nil {
- s, ok := p.toStringValue((*suggestArgs)[0])
- if !ok {
- return p.errorf((*suggestArgs)[0], "expected string literal argument")
- }
- proto.suggestion = s
+ proto.suggestion = p.parseStringArg((*suggestArgs)[0])
}
if reportArgs == nil {
@@ -357,11 +432,7 @@ func (p *rulesParser) parseRule(matcher string, call *ast.CallExpr) error {
}
proto.msg = "suggestion: " + proto.suggestion
} else {
- message, ok := p.toStringValue((*reportArgs)[0])
- if !ok {
- return p.errorf((*reportArgs)[0], "expected string literal argument")
- }
- proto.msg = message
+ proto.msg = p.parseStringArg((*reportArgs)[0])
}
if atArgs != nil {
@@ -378,9 +449,9 @@ func (p *rulesParser) parseRule(matcher string, call *ast.CallExpr) error {
for i, alt := range alternatives {
rule := proto
- pat, err := gogrep.Parse(p.fset, alt)
+ pat, err := gogrep.Parse(p.ctx.Fset, alt)
if err != nil {
- return p.errorf((*matchArgs)[i], "gogrep parse: %v", err)
+ return p.errorf((*matchArgs)[i], "parse match pattern: %v", err)
}
rule.pat = pat
cat := categorizeNode(pat.Expr)
@@ -395,206 +466,224 @@ func (p *rulesParser) parseRule(matcher string, call *ast.CallExpr) error {
return nil
}
-func (p *rulesParser) walkFilter(dst map[string]submatchFilter, e ast.Expr, negate bool) error {
- typeAnd := func(x, y func(typeQuery) bool) func(typeQuery) bool {
- if x == nil {
- return y
- }
- return func(q typeQuery) bool {
- return x(q) && y(q)
- }
+func (p *rulesParser) parseFilter(root ast.Expr) matchFilter {
+ return p.parseFilterExpr(root)
+}
+
+func (p *rulesParser) errorf(n ast.Node, format string, args ...interface{}) parseError {
+ loc := p.ctx.Fset.Position(n.Pos())
+ message := fmt.Sprintf("%s:%d: %s", loc.Filename, loc.Line, fmt.Sprintf(format, args...))
+ return parseError(message)
+}
+
+func (p *rulesParser) parseStringArg(e ast.Expr) string {
+ s, ok := p.toStringValue(e)
+ if !ok {
+ panic(p.errorf(e, "expected a string literal argument"))
}
- textAnd := func(x, y func(string) bool) func(string) bool {
- if x == nil {
- return y
- }
- return func(s string) bool {
- return x(s) && y(s)
- }
+ return s
+}
+
+func (p *rulesParser) parseRegexpArg(e ast.Expr) *regexp.Regexp {
+ patternString, ok := p.toStringValue(e)
+ if !ok {
+ panic(p.errorf(e, "expected a regexp pattern argument"))
+ }
+ re, err := regexp.Compile(patternString)
+ if err != nil {
+ panic(p.errorf(e, err.Error()))
}
+ return re
+}
+
+func (p *rulesParser) parseTypeStringArg(e ast.Expr) types.Type {
+ typeString, ok := p.toStringValue(e)
+ if !ok {
+ panic(p.errorf(e, "expected a type string argument"))
+ }
+ typ, err := typeFromString(typeString)
+ if err != nil {
+ panic(p.errorf(e, "parse type expr: %v", err))
+ }
+ if typ == nil {
+ panic(p.errorf(e, "can't convert %s into a type constraint yet", typeString))
+ }
+ return typ
+}
+
+func (p *rulesParser) parseFilterExpr(e ast.Expr) matchFilter {
+ result := matchFilter{src: goutil.SprintNode(p.ctx.Fset, e)}
switch e := e.(type) {
+ case *ast.ParenExpr:
+ return p.parseFilterExpr(e.X)
+
case *ast.UnaryExpr:
+ x := p.parseFilterExpr(e.X)
if e.Op == token.NOT {
- return p.walkFilter(dst, e.X, !negate)
+ result.fn = makeNotFilter(result.src, x)
+ return result
}
+ panic(p.errorf(e, "unsupported unary op: %s", result.src))
+
case *ast.BinaryExpr:
switch e.Op {
case token.LAND:
- err := p.walkFilter(dst, e.X, negate)
- if err != nil {
- return err
- }
- return p.walkFilter(dst, e.Y, negate)
+ result.fn = makeAndFilter(p.parseFilterExpr(e.X), p.parseFilterExpr(e.Y))
+ return result
+ case token.LOR:
+ result.fn = makeOrFilter(p.parseFilterExpr(e.X), p.parseFilterExpr(e.Y))
+ return result
case token.GEQ, token.LEQ, token.LSS, token.GTR, token.EQL, token.NEQ:
operand := p.toFilterOperand(e.X)
- y := p.types.Types[e.Y].Value
- expectedResult := !negate
- if operand.path == "Type.Size" && y != nil {
- filter := dst[operand.varName]
- filter.typePred = typeAnd(filter.typePred, func(q typeQuery) bool {
- x := constant.MakeInt64(q.ctx.Sizes.Sizeof(q.x))
- return expectedResult == constant.Compare(x, e.Op, y)
- })
- dst[operand.varName] = filter
- return nil
+ rhs := p.toFilterOperand(e.Y)
+ rhsValue := p.types.Types[e.Y].Value
+ if operand.path == "Type.Size" && rhsValue != nil {
+ result.fn = makeTypeSizeConstFilter(result.src, operand.varName, e.Op, rhsValue)
+ return result
}
- if operand.path == "Text" && y != nil {
- filter := dst[operand.varName]
- filter.textPred = textAnd(filter.textPred, func(s string) bool {
- x := constant.MakeString(s)
- return expectedResult == constant.Compare(x, e.Op, y)
- })
- dst[operand.varName] = filter
- return nil
+ if operand.path == "Value.Int" && rhsValue != nil {
+ result.fn = makeValueIntConstFilter(result.src, operand.varName, e.Op, rhsValue)
+ return result
+ }
+ if operand.path == "Value.Int" && rhs.path == "Value.Int" && rhs.varName != "" {
+ result.fn = makeValueIntFilter(result.src, operand.varName, e.Op, rhs.varName)
+ return result
+ }
+ if operand.path == "Text" && rhsValue != nil {
+ result.fn = makeTextConstFilter(result.src, operand.varName, e.Op, rhsValue)
+ return result
+ }
+ if operand.path == "Text" && rhs.path == "Text" && rhs.varName != "" {
+ result.fn = makeTextFilter(result.src, operand.varName, e.Op, rhs.varName)
+ return result
}
}
+ panic(p.errorf(e, "unsupported binary op: %s", result.src))
}
- // TODO(quasilyte): refactor and extend.
operand := p.toFilterOperand(e)
args := operand.args
- filter := dst[operand.varName]
switch operand.path {
default:
- return p.errorf(e, "%s is not a valid filter expression", sprintNode(p.fset, e))
+ panic(p.errorf(e, "unsupported expr: %s", result.src))
+
+ case "File.Imports":
+ pkgPath := p.parseStringArg(args[0])
+ result.fn = makeFileImportsFilter(result.src, pkgPath)
+
+ case "File.PkgPath.Matches":
+ re := p.parseRegexpArg(args[0])
+ result.fn = makeFilePkgPathMatchesFilter(result.src, re)
+
+ case "File.Name.Matches":
+ re := p.parseRegexpArg(args[0])
+ result.fn = makeFileNameMatchesFilter(result.src, re)
+
case "Pure":
- if negate {
- filter.pure = bool3false
- } else {
- filter.pure = bool3true
- }
- dst[operand.varName] = filter
+ result.fn = makePureFilter(result.src, operand.varName)
+
case "Const":
- if negate {
- filter.constant = bool3false
- } else {
- filter.constant = bool3true
- }
- dst[operand.varName] = filter
+ result.fn = makeConstFilter(result.src, operand.varName)
+
case "Addressable":
- if negate {
- filter.addressable = bool3false
- } else {
- filter.addressable = bool3true
+ result.fn = makeAddressableFilter(result.src, operand.varName)
+
+ case "Filter":
+ expr, fn := goutil.ResolveFunc(p.types, args[0])
+ if expr != nil {
+ panic(p.errorf(expr, "expected a simple function name, found expression"))
}
- dst[operand.varName] = filter
- case "Text.Matches":
- patternString, ok := p.toStringValue(args[0])
- if !ok {
- return p.errorf(args[0], "expected a string literal argument")
+ sig := fn.Type().(*types.Signature)
+ userFn := p.state.env.GetFunc(fn.Pkg().Path(), fn.Name())
+ if userFn == nil {
+ panic(p.errorf(args[0], "can't find a compiled version of %s", sig.String()))
}
- re, err := regexp.Compile(patternString)
- if err != nil {
- return p.errorf(args[0], "parse regexp: %v", err)
- }
- wantMatched := !negate
- filter.textPred = textAnd(filter.textPred, func(s string) bool {
- return wantMatched == re.MatchString(s)
- })
- dst[operand.varName] = filter
- case "Type.Is":
+ result.fn = makeCustomVarFilter(result.src, operand.varName, userFn)
+
+ case "Type.Is", "Type.Underlying.Is":
typeString, ok := p.toStringValue(args[0])
if !ok {
- return p.errorf(args[0], "expected a string literal argument")
+ panic(p.errorf(args[0], "expected a string literal argument"))
}
ctx := typematch.Context{Itab: p.itab}
pat, err := typematch.Parse(&ctx, typeString)
if err != nil {
- return p.errorf(args[0], "parse type expr: %v", err)
+ panic(p.errorf(args[0], "parse type expr: %v", err))
}
- wantIdentical := !negate
- filter.typePred = typeAnd(filter.typePred, func(q typeQuery) bool {
- return wantIdentical == pat.MatchIdentical(q.x)
- })
- dst[operand.varName] = filter
+ underlying := operand.path == "Type.Underlying.Is"
+ result.fn = makeTypeIsFilter(result.src, operand.varName, underlying, pat)
+
case "Type.ConvertibleTo":
- typeString, ok := p.toStringValue(args[0])
- if !ok {
- return p.errorf(args[0], "expected a string literal argument")
- }
- y, err := typeFromString(typeString)
- if err != nil {
- return p.errorf(args[0], "parse type expr: %v", err)
- }
- if y == nil {
- return p.errorf(args[0], "can't convert %s into a type constraint yet", typeString)
- }
- wantConvertible := !negate
- filter.typePred = typeAnd(filter.typePred, func(q typeQuery) bool {
- return wantConvertible == types.ConvertibleTo(q.x, y)
- })
- dst[operand.varName] = filter
+ dstType := p.parseTypeStringArg(args[0])
+ result.fn = makeTypeConvertibleToFilter(result.src, operand.varName, dstType)
+
case "Type.AssignableTo":
- typeString, ok := p.toStringValue(args[0])
- if !ok {
- return p.errorf(args[0], "expected a string literal argument")
- }
- y, err := typeFromString(typeString)
- if err != nil {
- return p.errorf(args[0], "parse type expr: %v", err)
- }
- if y == nil {
- return p.errorf(args[0], "can't convert %s into a type constraint yet", typeString)
- }
- wantAssignable := !negate
- filter.typePred = typeAnd(filter.typePred, func(q typeQuery) bool {
- return wantAssignable == types.AssignableTo(q.x, y)
- })
- dst[operand.varName] = filter
+ dstType := p.parseTypeStringArg(args[0])
+ result.fn = makeTypeAssignableToFilter(result.src, operand.varName, dstType)
+
case "Type.Implements":
typeString, ok := p.toStringValue(args[0])
if !ok {
- return p.errorf(args[0], "expected a string literal argument")
+ panic(p.errorf(args[0], "expected a string literal argument"))
}
n, err := parser.ParseExpr(typeString)
if err != nil {
- return p.errorf(args[0], "parse type expr: %v", err)
- }
- e, ok := n.(*ast.SelectorExpr)
- if !ok {
- return p.errorf(args[0], "only qualified names are supported")
+ panic(p.errorf(args[0], "parse type expr: %v", err))
}
- pkgName, ok := e.X.(*ast.Ident)
- if !ok {
- return p.errorf(e.X, "invalid package name")
- }
- pkgPath, ok := p.itab.Lookup(pkgName.Name)
- if !ok {
- return p.errorf(e.X, "package %s is not imported", pkgName.Name)
- }
- pkg, err := p.stdImporter.Import(pkgPath)
- if err != nil {
- pkg, err = p.srcImporter.Import(pkgPath)
+ var iface *types.Interface
+ switch n := n.(type) {
+ case *ast.Ident:
+ if n.Name != `error` {
+ panic(p.errorf(n, "only `error` unqualified type is recognized"))
+ }
+ iface = types.Universe.Lookup("error").Type().Underlying().(*types.Interface)
+ case *ast.SelectorExpr:
+ pkgName, ok := n.X.(*ast.Ident)
+ if !ok {
+ panic(p.errorf(n.X, "invalid package name"))
+ }
+ pkgPath, ok := p.itab.Lookup(pkgName.Name)
+ if !ok {
+ panic(p.errorf(n.X, "package %s is not imported", pkgName.Name))
+ }
+ pkg, err := p.importer.Import(pkgPath)
if err != nil {
- return p.errorf(e, "can't load %s: %v", pkgPath, err)
+ panic(p.errorf(n, "can't load %s: %v", pkgPath, err))
}
+ obj := pkg.Scope().Lookup(n.Sel.Name)
+ if obj == nil {
+ panic(p.errorf(n, "%s is not found in %s", n.Sel.Name, pkgPath))
+ }
+ iface, ok = obj.Type().Underlying().(*types.Interface)
+ if !ok {
+ panic(p.errorf(n, "%s is not an interface type", n.Sel.Name))
+ }
+ default:
+ panic(p.errorf(args[0], "only qualified names (and `error`) are supported"))
}
- obj := pkg.Scope().Lookup(e.Sel.Name)
- if obj == nil {
- return p.errorf(e, "%s is not found in %s", e.Sel.Name, pkgPath)
- }
- iface, ok := obj.Type().Underlying().(*types.Interface)
+ result.fn = makeTypeImplementsFilter(result.src, operand.varName, iface)
+
+ case "Text.Matches":
+ re := p.parseRegexpArg(args[0])
+ result.fn = makeTextMatchesFilter(result.src, operand.varName, re)
+
+ case "Node.Is":
+ typeString, ok := p.toStringValue(args[0])
if !ok {
- return p.errorf(e, "%s is not an interface type", e.Sel.Name)
+ panic(p.errorf(args[0], "expected a string literal argument"))
}
- wantImplemented := !negate
- filter.typePred = typeAnd(filter.typePred, func(q typeQuery) bool {
- return wantImplemented == types.Implements(q.x, iface)
- })
- dst[operand.varName] = filter
+ cat := categorizeNodeString(typeString)
+ if cat == nodeUnknown {
+ panic(p.errorf(args[0], "%s is not a valid go/ast type name", typeString))
+ }
+ result.fn = makeNodeIsFilter(result.src, operand.varName, cat)
}
- return nil
-}
-
-func (p *rulesParser) toIntValue(x ast.Node) (int64, bool) {
- lit, ok := x.(*ast.BasicLit)
- if !ok || lit.Kind != token.INT {
- return 0, false
+ if result.fn == nil {
+ panic("bug: nil func for the filter") // Should never happen
}
- v, err := strconv.ParseInt(lit.Value, 10, 64)
- return v, err == nil
+ return result
}
func (p *rulesParser) toStringValue(x ast.Node) (string, bool) {
@@ -603,7 +692,11 @@ func (p *rulesParser) toStringValue(x ast.Node) (string, bool) {
if x.Kind != token.STRING {
return "", false
}
- return unquoteNode(x), true
+ s, err := strconv.Unquote(x.Value)
+ if err != nil {
+ return "", false
+ }
+ return s, true
case ast.Expr:
typ, ok := p.types.Types[x]
if !ok || typ.Type.String() != "string" {
@@ -625,6 +718,10 @@ func (p *rulesParser) toFilterOperand(e ast.Expr) filterOperand {
}
var path string
for {
+ if call, ok := e.(*ast.CallExpr); ok {
+ e = call.Fun
+ continue
+ }
selector, ok := e.(*ast.SelectorExpr)
if !ok {
break
@@ -636,6 +733,9 @@ func (p *rulesParser) toFilterOperand(e ast.Expr) filterOperand {
}
e = selector.X
}
+
+ o.path = path
+
indexing, ok := e.(*ast.IndexExpr)
if !ok {
return o
@@ -644,21 +744,11 @@ func (p *rulesParser) toFilterOperand(e ast.Expr) filterOperand {
if !ok {
return o
}
- indexString, ok := p.toStringValue(indexing.Index)
- if !ok {
- return o
- }
-
o.mapName = mapIdent.Name
+ indexString, _ := p.toStringValue(indexing.Index)
o.varName = indexString
- o.path = path
- return o
-}
-func (p *rulesParser) errorf(n ast.Node, format string, args ...interface{}) error {
- loc := p.fset.Position(n.Pos())
- return fmt.Errorf("%s:%d: %s",
- loc.Filename, loc.Line, fmt.Sprintf(format, args...))
+ return o
}
type filterOperand struct {
@@ -667,3 +757,146 @@ type filterOperand struct {
path string
args []ast.Expr
}
+
+var stdlibPackages = map[string]string{
+ "adler32": "hash/adler32",
+ "aes": "crypto/aes",
+ "ascii85": "encoding/ascii85",
+ "asn1": "encoding/asn1",
+ "ast": "go/ast",
+ "atomic": "sync/atomic",
+ "base32": "encoding/base32",
+ "base64": "encoding/base64",
+ "big": "math/big",
+ "binary": "encoding/binary",
+ "bits": "math/bits",
+ "bufio": "bufio",
+ "build": "go/build",
+ "bytes": "bytes",
+ "bzip2": "compress/bzip2",
+ "cgi": "net/http/cgi",
+ "cgo": "runtime/cgo",
+ "cipher": "crypto/cipher",
+ "cmplx": "math/cmplx",
+ "color": "image/color",
+ "constant": "go/constant",
+ "context": "context",
+ "cookiejar": "net/http/cookiejar",
+ "crc32": "hash/crc32",
+ "crc64": "hash/crc64",
+ "crypto": "crypto",
+ "csv": "encoding/csv",
+ "debug": "runtime/debug",
+ "des": "crypto/des",
+ "doc": "go/doc",
+ "draw": "image/draw",
+ "driver": "database/sql/driver",
+ "dsa": "crypto/dsa",
+ "dwarf": "debug/dwarf",
+ "ecdsa": "crypto/ecdsa",
+ "ed25519": "crypto/ed25519",
+ "elf": "debug/elf",
+ "elliptic": "crypto/elliptic",
+ "encoding": "encoding",
+ "errors": "errors",
+ "exec": "os/exec",
+ "expvar": "expvar",
+ "fcgi": "net/http/fcgi",
+ "filepath": "path/filepath",
+ "flag": "flag",
+ "flate": "compress/flate",
+ "fmt": "fmt",
+ "fnv": "hash/fnv",
+ "format": "go/format",
+ "gif": "image/gif",
+ "gob": "encoding/gob",
+ "gosym": "debug/gosym",
+ "gzip": "compress/gzip",
+ "hash": "hash",
+ "heap": "container/heap",
+ "hex": "encoding/hex",
+ "hmac": "crypto/hmac",
+ "html": "html",
+ "http": "net/http",
+ "httptest": "net/http/httptest",
+ "httptrace": "net/http/httptrace",
+ "httputil": "net/http/httputil",
+ "image": "image",
+ "importer": "go/importer",
+ "io": "io",
+ "iotest": "testing/iotest",
+ "ioutil": "io/ioutil",
+ "jpeg": "image/jpeg",
+ "json": "encoding/json",
+ "jsonrpc": "net/rpc/jsonrpc",
+ "list": "container/list",
+ "log": "log",
+ "lzw": "compress/lzw",
+ "macho": "debug/macho",
+ "mail": "net/mail",
+ "math": "math",
+ "md5": "crypto/md5",
+ "mime": "mime",
+ "multipart": "mime/multipart",
+ "net": "net",
+ "os": "os",
+ "palette": "image/color/palette",
+ "parse": "text/template/parse",
+ "parser": "go/parser",
+ "path": "path",
+ "pe": "debug/pe",
+ "pem": "encoding/pem",
+ "pkix": "crypto/x509/pkix",
+ "plan9obj": "debug/plan9obj",
+ "plugin": "plugin",
+ "png": "image/png",
+ "pprof": "runtime/pprof",
+ "printer": "go/printer",
+ "quick": "testing/quick",
+ "quotedprintable": "mime/quotedprintable",
+ "race": "runtime/race",
+ "rand": "math/rand",
+ "rc4": "crypto/rc4",
+ "reflect": "reflect",
+ "regexp": "regexp",
+ "ring": "container/ring",
+ "rpc": "net/rpc",
+ "rsa": "crypto/rsa",
+ "runtime": "runtime",
+ "scanner": "text/scanner",
+ "sha1": "crypto/sha1",
+ "sha256": "crypto/sha256",
+ "sha512": "crypto/sha512",
+ "signal": "os/signal",
+ "smtp": "net/smtp",
+ "sort": "sort",
+ "sql": "database/sql",
+ "strconv": "strconv",
+ "strings": "strings",
+ "subtle": "crypto/subtle",
+ "suffixarray": "index/suffixarray",
+ "sync": "sync",
+ "syntax": "regexp/syntax",
+ "syscall": "syscall",
+ "syslog": "log/syslog",
+ "tabwriter": "text/tabwriter",
+ "tar": "archive/tar",
+ "template": "text/template",
+ "testing": "testing",
+ "textproto": "net/textproto",
+ "time": "time",
+ "tls": "crypto/tls",
+ "token": "go/token",
+ "trace": "runtime/trace",
+ "types": "go/types",
+ "unicode": "unicode",
+ "unsafe": "unsafe",
+ "url": "net/url",
+ "user": "os/user",
+ "utf16": "unicode/utf16",
+ "utf8": "unicode/utf8",
+ "x509": "crypto/x509",
+ "xml": "encoding/xml",
+ "zip": "archive/zip",
+ "zlib": "compress/zlib",
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/compile.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/compile.go
new file mode 100644
index 000000000..fa28732d5
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/compile.go
@@ -0,0 +1,703 @@
+package quasigo
+
+import (
+ "fmt"
+ "go/ast"
+ "go/constant"
+ "go/token"
+ "go/types"
+
+ "github.com/quasilyte/go-ruleguard/ruleguard/goutil"
+ "golang.org/x/tools/go/ast/astutil"
+)
+
+func compile(ctx *CompileContext, fn *ast.FuncDecl) (compiled *Func, err error) {
+ defer func() {
+ rv := recover()
+ if rv == nil {
+ return
+ }
+ if compileErr, ok := rv.(compileError); ok {
+ err = compileErr
+ return
+ }
+ panic(rv) // not our panic
+ }()
+
+ return compileFunc(ctx, fn), nil
+}
+
+func compileFunc(ctx *CompileContext, fn *ast.FuncDecl) *Func {
+ fnType := ctx.Types.ObjectOf(fn.Name).Type().(*types.Signature)
+ if fnType.Results().Len() != 1 {
+ panic(compileError("only functions with a single non-void results are supported"))
+ }
+
+ cl := compiler{
+ ctx: ctx,
+ retType: fnType.Results().At(0).Type(),
+ constantsPool: make(map[interface{}]int),
+ intConstantsPool: make(map[int]int),
+ locals: make(map[string]int),
+ }
+ return cl.compileFunc(fnType, fn)
+}
+
+type compiler struct {
+ ctx *CompileContext
+
+ retType types.Type
+
+ lastOp opcode
+
+ locals map[string]int
+ constantsPool map[interface{}]int
+ intConstantsPool map[int]int
+ params map[string]int
+
+ code []byte
+ constants []interface{}
+ intConstants []int
+
+ breakTarget *label
+ continueTarget *label
+
+ labels []*label
+}
+
+type label struct {
+ targetPos int
+ sources []int
+}
+
+type compileError string
+
+func (e compileError) Error() string { return string(e) }
+
+func (cl *compiler) compileFunc(fnType *types.Signature, fn *ast.FuncDecl) *Func {
+ if !cl.isSupportedType(cl.retType) {
+ panic(cl.errorUnsupportedType(fn.Name, cl.retType, "function result"))
+ }
+
+ dbg := funcDebugInfo{
+ paramNames: make([]string, fnType.Params().Len()),
+ }
+
+ cl.params = make(map[string]int, fnType.Params().Len())
+ for i := 0; i < fnType.Params().Len(); i++ {
+ p := fnType.Params().At(i)
+ paramName := p.Name()
+ paramType := p.Type()
+ cl.params[paramName] = i
+ dbg.paramNames[i] = paramName
+ if !cl.isSupportedType(paramType) {
+ panic(cl.errorUnsupportedType(fn.Name, paramType, paramName+" param"))
+ }
+ }
+
+ cl.compileStmt(fn.Body)
+ compiled := &Func{
+ code: cl.code,
+ constants: cl.constants,
+ intConstants: cl.intConstants,
+ }
+ if len(cl.locals) != 0 {
+ dbg.localNames = make([]string, len(cl.locals))
+ for localName, localIndex := range cl.locals {
+ dbg.localNames[localIndex] = localName
+ }
+ }
+ cl.ctx.Env.debug.funcs[compiled] = dbg
+ cl.linkJumps()
+ return compiled
+}
+
+func (cl *compiler) compileStmt(stmt ast.Stmt) {
+ switch stmt := stmt.(type) {
+ case *ast.ReturnStmt:
+ cl.compileReturnStmt(stmt)
+
+ case *ast.AssignStmt:
+ cl.compileAssignStmt(stmt)
+
+ case *ast.IncDecStmt:
+ cl.compileIncDecStmt(stmt)
+
+ case *ast.IfStmt:
+ cl.compileIfStmt(stmt)
+
+ case *ast.ForStmt:
+ cl.compileForStmt(stmt)
+
+ case *ast.BranchStmt:
+ cl.compileBranchStmt(stmt)
+
+ case *ast.BlockStmt:
+ for i := range stmt.List {
+ cl.compileStmt(stmt.List[i])
+ }
+
+ default:
+ panic(cl.errorf(stmt, "can't compile %T yet", stmt))
+ }
+}
+
+func (cl *compiler) compileIncDecStmt(stmt *ast.IncDecStmt) {
+ varname, ok := stmt.X.(*ast.Ident)
+ if !ok {
+ panic(cl.errorf(stmt.X, "can assign only to simple variables"))
+ }
+ id := cl.getLocal(varname, varname.String())
+ if stmt.Tok == token.INC {
+ cl.emit8(opIncLocal, id)
+ } else {
+ cl.emit8(opDecLocal, id)
+ }
+}
+
+func (cl *compiler) compileBranchStmt(branch *ast.BranchStmt) {
+ if branch.Label != nil {
+ panic(cl.errorf(branch.Label, "can't compile %s with a label", branch.Tok))
+ }
+
+ switch branch.Tok {
+ case token.BREAK:
+ cl.emitJump(opJump, cl.breakTarget)
+ default:
+ panic(cl.errorf(branch, "can't compile %s yet", branch.Tok))
+ }
+}
+
+func (cl *compiler) compileForStmt(stmt *ast.ForStmt) {
+ labelBreak := cl.newLabel()
+ labelContinue := cl.newLabel()
+ prevBreakTarget := cl.breakTarget
+ prevContinueTarget := cl.continueTarget
+ cl.breakTarget = labelBreak
+ cl.continueTarget = labelContinue
+
+ switch {
+ case stmt.Cond != nil && stmt.Init != nil && stmt.Post != nil:
+ // Will be implemented later; probably when the max number of locals will be lifted.
+ panic(cl.errorf(stmt, "can't compile C-style for loops yet"))
+
+ case stmt.Cond != nil && stmt.Init == nil && stmt.Post == nil:
+ // `for <cond> { ... }`
+ labelBody := cl.newLabel()
+ cl.emitJump(opJump, labelContinue)
+ cl.bindLabel(labelBody)
+ cl.compileStmt(stmt.Body)
+ cl.bindLabel(labelContinue)
+ cl.compileExpr(stmt.Cond)
+ cl.emitJump(opJumpTrue, labelBody)
+ cl.bindLabel(labelBreak)
+
+ default:
+ // `for { ... }`
+ cl.bindLabel(labelContinue)
+ cl.compileStmt(stmt.Body)
+ cl.emitJump(opJump, labelContinue)
+ cl.bindLabel(labelBreak)
+ }
+
+ cl.breakTarget = prevBreakTarget
+ cl.continueTarget = prevContinueTarget
+}
+
+func (cl *compiler) compileIfStmt(stmt *ast.IfStmt) {
+ if stmt.Else == nil {
+ labelEnd := cl.newLabel()
+ cl.compileExpr(stmt.Cond)
+ cl.emitJump(opJumpFalse, labelEnd)
+ cl.compileStmt(stmt.Body)
+ cl.bindLabel(labelEnd)
+ return
+ }
+
+ labelEnd := cl.newLabel()
+ labelElse := cl.newLabel()
+ cl.compileExpr(stmt.Cond)
+ cl.emitJump(opJumpFalse, labelElse)
+ cl.compileStmt(stmt.Body)
+ if !cl.isUncondJump(cl.lastOp) {
+ cl.emitJump(opJump, labelEnd)
+ }
+ cl.bindLabel(labelElse)
+ cl.compileStmt(stmt.Else)
+ cl.bindLabel(labelEnd)
+}
+
+func (cl *compiler) compileAssignStmt(assign *ast.AssignStmt) {
+ if len(assign.Lhs) != 1 {
+ panic(cl.errorf(assign, "only single left operand is allowed in assignments"))
+ }
+ if len(assign.Rhs) != 1 {
+ panic(cl.errorf(assign, "only single right operand is allowed in assignments"))
+ }
+ lhs := assign.Lhs[0]
+ rhs := assign.Rhs[0]
+ varname, ok := lhs.(*ast.Ident)
+ if !ok {
+ panic(cl.errorf(lhs, "can assign only to simple variables"))
+ }
+
+ cl.compileExpr(rhs)
+
+ typ := cl.ctx.Types.TypeOf(varname)
+ if assign.Tok == token.DEFINE {
+ if _, ok := cl.locals[varname.String()]; ok {
+ panic(cl.errorf(lhs, "%s variable shadowing is not allowed", varname))
+ }
+ if !cl.isSupportedType(typ) {
+ panic(cl.errorUnsupportedType(varname, typ, varname.String()+" local variable"))
+ }
+ if len(cl.locals) == maxFuncLocals {
+ panic(cl.errorf(lhs, "can't define %s: too many locals", varname))
+ }
+ id := len(cl.locals)
+ cl.locals[varname.String()] = id
+ cl.emit8(pickOp(typeIsInt(typ), opSetIntLocal, opSetLocal), id)
+ } else {
+ id := cl.getLocal(varname, varname.String())
+ cl.emit8(pickOp(typeIsInt(typ), opSetIntLocal, opSetLocal), id)
+ }
+}
+
+func (cl *compiler) getLocal(v ast.Expr, varname string) int {
+ id, ok := cl.locals[varname]
+ if !ok {
+ if _, ok := cl.params[varname]; ok {
+ panic(cl.errorf(v, "can't assign to %s, params are readonly", varname))
+ }
+ panic(cl.errorf(v, "%s is not a writeable local variable", varname))
+ }
+ return id
+}
+
+func (cl *compiler) compileReturnStmt(ret *ast.ReturnStmt) {
+ if ret.Results == nil {
+ panic(cl.errorf(ret, "'naked' return statements are not allowed"))
+ }
+
+ switch {
+ case identName(ret.Results[0]) == "true":
+ cl.emit(opReturnTrue)
+ case identName(ret.Results[0]) == "false":
+ cl.emit(opReturnFalse)
+ default:
+ cl.compileExpr(ret.Results[0])
+ typ := cl.ctx.Types.TypeOf(ret.Results[0])
+ cl.emit(pickOp(typeIsInt(typ), opReturnIntTop, opReturnTop))
+ }
+}
+
+func (cl *compiler) compileExpr(e ast.Expr) {
+ cv := cl.ctx.Types.Types[e].Value
+ if cv != nil {
+ cl.compileConstantValue(e, cv)
+ return
+ }
+
+ switch e := e.(type) {
+ case *ast.ParenExpr:
+ cl.compileExpr(e.X)
+
+ case *ast.Ident:
+ cl.compileIdent(e)
+
+ case *ast.SelectorExpr:
+ cl.compileSelectorExpr(e)
+
+ case *ast.UnaryExpr:
+ switch e.Op {
+ case token.NOT:
+ cl.compileUnaryOp(opNot, e)
+ default:
+ panic(cl.errorf(e, "can't compile unary %s yet", e.Op))
+ }
+
+ case *ast.SliceExpr:
+ cl.compileSliceExpr(e)
+
+ case *ast.BinaryExpr:
+ cl.compileBinaryExpr(e)
+
+ case *ast.CallExpr:
+ cl.compileCallExpr(e)
+
+ default:
+ panic(cl.errorf(e, "can't compile %T yet", e))
+ }
+}
+
+func (cl *compiler) compileSelectorExpr(e *ast.SelectorExpr) {
+ typ := cl.ctx.Types.TypeOf(e.X)
+ key := funcKey{
+ name: e.Sel.String(),
+ qualifier: typ.String(),
+ }
+
+ if funcID, ok := cl.ctx.Env.nameToNativeFuncID[key]; ok {
+ cl.compileExpr(e.X)
+ cl.emit16(opCallNative, int(funcID))
+ return
+ }
+
+ panic(cl.errorf(e, "can't compile %s field access", e.Sel))
+}
+
+func (cl *compiler) compileBinaryExpr(e *ast.BinaryExpr) {
+ typ := cl.ctx.Types.TypeOf(e.X)
+
+ switch e.Op {
+ case token.LOR:
+ cl.compileOr(e)
+ case token.LAND:
+ cl.compileAnd(e)
+
+ case token.NEQ:
+ switch {
+ case identName(e.X) == "nil":
+ cl.compileExpr(e.Y)
+ cl.emit(opIsNotNil)
+ case identName(e.Y) == "nil":
+ cl.compileExpr(e.X)
+ cl.emit(opIsNotNil)
+ case typeIsString(typ):
+ cl.compileBinaryOp(opNotEqString, e)
+ case typeIsInt(typ):
+ cl.compileBinaryOp(opNotEqInt, e)
+ default:
+ panic(cl.errorf(e, "!= is not implemented for %s operands", typ))
+ }
+ case token.EQL:
+ switch {
+ case identName(e.X) == "nil":
+ cl.compileExpr(e.Y)
+ cl.emit(opIsNil)
+ case identName(e.Y) == "nil":
+ cl.compileExpr(e.X)
+ cl.emit(opIsNil)
+ case typeIsString(cl.ctx.Types.TypeOf(e.X)):
+ cl.compileBinaryOp(opEqString, e)
+ case typeIsInt(cl.ctx.Types.TypeOf(e.X)):
+ cl.compileBinaryOp(opEqInt, e)
+ default:
+ panic(cl.errorf(e, "== is not implemented for %s operands", typ))
+ }
+
+ case token.GTR:
+ cl.compileIntBinaryOp(e, opGtInt, typ)
+ case token.GEQ:
+ cl.compileIntBinaryOp(e, opGtEqInt, typ)
+ case token.LSS:
+ cl.compileIntBinaryOp(e, opLtInt, typ)
+ case token.LEQ:
+ cl.compileIntBinaryOp(e, opLtEqInt, typ)
+
+ case token.ADD:
+ switch {
+ case typeIsString(typ):
+ cl.compileBinaryOp(opConcat, e)
+ case typeIsInt(typ):
+ cl.compileBinaryOp(opAdd, e)
+ default:
+ panic(cl.errorf(e, "+ is not implemented for %s operands", typ))
+ }
+
+ case token.SUB:
+ cl.compileIntBinaryOp(e, opSub, typ)
+
+ default:
+ panic(cl.errorf(e, "can't compile binary %s yet", e.Op))
+ }
+}
+
+func (cl *compiler) compileIntBinaryOp(e *ast.BinaryExpr, op opcode, typ types.Type) {
+ switch {
+ case typeIsInt(typ):
+ cl.compileBinaryOp(op, e)
+ default:
+ panic(cl.errorf(e, "%s is not implemented for %s operands", e.Op, typ))
+ }
+}
+
+func (cl *compiler) compileSliceExpr(slice *ast.SliceExpr) {
+ if slice.Slice3 {
+ panic(cl.errorf(slice, "can't compile 3-index slicing"))
+ }
+
+ // No need to do slicing, its no-op `s[:]`.
+ if slice.Low == nil && slice.High == nil {
+ cl.compileExpr(slice.X)
+ return
+ }
+
+ sliceOp := opStringSlice
+ sliceFromOp := opStringSliceFrom
+ sliceToOp := opStringSliceTo
+
+ if !typeIsString(cl.ctx.Types.TypeOf(slice.X)) {
+ panic(cl.errorf(slice.X, "can't compile slicing of something that is not a string"))
+ }
+
+ switch {
+ case slice.Low == nil && slice.High != nil:
+ cl.compileExpr(slice.X)
+ cl.compileExpr(slice.High)
+ cl.emit(sliceToOp)
+ case slice.Low != nil && slice.High == nil:
+ cl.compileExpr(slice.X)
+ cl.compileExpr(slice.Low)
+ cl.emit(sliceFromOp)
+ default:
+ cl.compileExpr(slice.X)
+ cl.compileExpr(slice.Low)
+ cl.compileExpr(slice.High)
+ cl.emit(sliceOp)
+ }
+}
+
+func (cl *compiler) compileBuiltinCall(fn *ast.Ident, call *ast.CallExpr) {
+ switch fn.Name {
+ case `len`:
+ s := call.Args[0]
+ cl.compileExpr(s)
+ if !typeIsString(cl.ctx.Types.TypeOf(s)) {
+ panic(cl.errorf(s, "can't compile len() with non-string argument yet"))
+ }
+ cl.emit(opStringLen)
+ default:
+ panic(cl.errorf(fn, "can't compile %s() builtin function call yet", fn))
+ }
+}
+
+func (cl *compiler) compileCallExpr(call *ast.CallExpr) {
+ if id, ok := astutil.Unparen(call.Fun).(*ast.Ident); ok {
+ _, isBuiltin := cl.ctx.Types.ObjectOf(id).(*types.Builtin)
+ if isBuiltin {
+ cl.compileBuiltinCall(id, call)
+ return
+ }
+ }
+
+ expr, fn := goutil.ResolveFunc(cl.ctx.Types, call.Fun)
+ if fn == nil {
+ panic(cl.errorf(call.Fun, "can't resolve the called function"))
+ }
+
+ // TODO: just use Func.FullName as a key?
+ key := funcKey{name: fn.Name()}
+ sig := fn.Type().(*types.Signature)
+ if sig.Recv() != nil {
+ key.qualifier = sig.Recv().Type().String()
+ } else {
+ key.qualifier = fn.Pkg().Path()
+ }
+
+ if funcID, ok := cl.ctx.Env.nameToNativeFuncID[key]; ok {
+ if expr != nil {
+ cl.compileExpr(expr)
+ }
+ for _, arg := range call.Args {
+ cl.compileExpr(arg)
+ }
+ cl.emit16(opCallNative, int(funcID))
+ return
+ }
+
+ panic(cl.errorf(call.Fun, "can't compile a call to %s func", key))
+}
+
+func (cl *compiler) compileUnaryOp(op opcode, e *ast.UnaryExpr) {
+ cl.compileExpr(e.X)
+ cl.emit(op)
+}
+
+func (cl *compiler) compileBinaryOp(op opcode, e *ast.BinaryExpr) {
+ cl.compileExpr(e.X)
+ cl.compileExpr(e.Y)
+ cl.emit(op)
+}
+
+func (cl *compiler) compileOr(e *ast.BinaryExpr) {
+ labelEnd := cl.newLabel()
+ cl.compileExpr(e.X)
+ cl.emit(opDup)
+ cl.emitJump(opJumpTrue, labelEnd)
+ cl.compileExpr(e.Y)
+ cl.bindLabel(labelEnd)
+}
+
+func (cl *compiler) compileAnd(e *ast.BinaryExpr) {
+ labelEnd := cl.newLabel()
+ cl.compileExpr(e.X)
+ cl.emit(opDup)
+ cl.emitJump(opJumpFalse, labelEnd)
+ cl.compileExpr(e.Y)
+ cl.bindLabel(labelEnd)
+}
+
+func (cl *compiler) compileIdent(ident *ast.Ident) {
+ tv := cl.ctx.Types.Types[ident]
+ cv := tv.Value
+ if cv != nil {
+ cl.compileConstantValue(ident, cv)
+ return
+ }
+ if paramIndex, ok := cl.params[ident.String()]; ok {
+ cl.emit8(pickOp(typeIsInt(tv.Type), opPushIntParam, opPushParam), paramIndex)
+ return
+ }
+ if localIndex, ok := cl.locals[ident.String()]; ok {
+ cl.emit8(pickOp(typeIsInt(tv.Type), opPushIntLocal, opPushLocal), localIndex)
+ return
+ }
+
+ panic(cl.errorf(ident, "can't compile a %s (type %s) variable read", ident.String(), tv.Type))
+}
+
+func (cl *compiler) compileConstantValue(source ast.Expr, cv constant.Value) {
+ switch cv.Kind() {
+ case constant.Bool:
+ v := constant.BoolVal(cv)
+ if v {
+ cl.emit(opPushTrue)
+ } else {
+ cl.emit(opPushFalse)
+ }
+
+ case constant.String:
+ v := constant.StringVal(cv)
+ id := cl.internConstant(v)
+ cl.emit8(opPushConst, id)
+
+ case constant.Int:
+ v, exact := constant.Int64Val(cv)
+ if !exact {
+ panic(cl.errorf(source, "non-exact int value"))
+ }
+ id := cl.internIntConstant(int(v))
+ cl.emit8(opPushIntConst, id)
+
+ case constant.Complex:
+ panic(cl.errorf(source, "can't compile complex number constants yet"))
+
+ case constant.Float:
+ panic(cl.errorf(source, "can't compile float constants yet"))
+
+ default:
+ panic(cl.errorf(source, "unexpected constant %v", cv))
+ }
+}
+
+func (cl *compiler) internIntConstant(v int) int {
+ if id, ok := cl.intConstantsPool[v]; ok {
+ return id
+ }
+ id := len(cl.intConstants)
+ cl.intConstants = append(cl.intConstants, v)
+ cl.intConstantsPool[v] = id
+ return id
+}
+
+func (cl *compiler) internConstant(v interface{}) int {
+ if _, ok := v.(int); ok {
+ panic("compiler error: int constant interned as interface{}")
+ }
+ if id, ok := cl.constantsPool[v]; ok {
+ return id
+ }
+ id := len(cl.constants)
+ cl.constants = append(cl.constants, v)
+ cl.constantsPool[v] = id
+ return id
+}
+
+func (cl *compiler) linkJumps() {
+ for _, l := range cl.labels {
+ for _, jumpPos := range l.sources {
+ offset := l.targetPos - jumpPos
+ patchPos := jumpPos + 1
+ put16(cl.code, patchPos, offset)
+ }
+ }
+}
+
+func (cl *compiler) newLabel() *label {
+ l := &label{}
+ cl.labels = append(cl.labels, l)
+ return l
+}
+
+func (cl *compiler) bindLabel(l *label) {
+ l.targetPos = len(cl.code)
+}
+
+func (cl *compiler) emit(op opcode) {
+ cl.lastOp = op
+ cl.code = append(cl.code, byte(op))
+}
+
+func (cl *compiler) emitJump(op opcode, l *label) {
+ l.sources = append(l.sources, len(cl.code))
+ cl.emit(op)
+ cl.code = append(cl.code, 0, 0)
+}
+
+func (cl *compiler) emit8(op opcode, arg8 int) {
+ cl.emit(op)
+ cl.code = append(cl.code, byte(arg8))
+}
+
+func (cl *compiler) emit16(op opcode, arg16 int) {
+ cl.emit(op)
+ buf := make([]byte, 2)
+ put16(buf, 0, arg16)
+ cl.code = append(cl.code, buf...)
+}
+
+func (cl *compiler) errorUnsupportedType(e ast.Node, typ types.Type, where string) compileError {
+ return cl.errorf(e, "%s type: %s is not supported, try something simpler", where, typ)
+}
+
+func (cl *compiler) errorf(n ast.Node, format string, args ...interface{}) compileError {
+ loc := cl.ctx.Fset.Position(n.Pos())
+ message := fmt.Sprintf("%s:%d: %s", loc.Filename, loc.Line, fmt.Sprintf(format, args...))
+ return compileError(message)
+}
+
+func (cl *compiler) isUncondJump(op opcode) bool {
+ switch op {
+ case opJump, opReturnFalse, opReturnTrue, opReturnTop, opReturnIntTop:
+ return true
+ default:
+ return false
+ }
+}
+
+func (cl *compiler) isSupportedType(typ types.Type) bool {
+ switch typ := typ.Underlying().(type) {
+ case *types.Pointer:
+ // 1. Pointers to structs are supported.
+ _, isStruct := typ.Elem().Underlying().(*types.Struct)
+ return isStruct
+
+ case *types.Basic:
+ // 2. Some of the basic types are supported.
+ // TODO: support byte/uint8 and maybe float64.
+ switch typ.Kind() {
+ case types.Bool, types.Int, types.String:
+ return true
+ default:
+ return false
+ }
+
+ case *types.Interface:
+ // 3. Interfaces are supported.
+ return true
+
+ default:
+ return false
+ }
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/debug_info.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/debug_info.go
new file mode 100644
index 000000000..e42bbb76a
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/debug_info.go
@@ -0,0 +1,16 @@
+package quasigo
+
+type debugInfo struct {
+ funcs map[*Func]funcDebugInfo
+}
+
+type funcDebugInfo struct {
+ paramNames []string
+ localNames []string
+}
+
+func newDebugInfo() *debugInfo {
+ return &debugInfo{
+ funcs: make(map[*Func]funcDebugInfo),
+ }
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/disasm.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/disasm.go
new file mode 100644
index 000000000..192cf0710
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/disasm.go
@@ -0,0 +1,74 @@
+package quasigo
+
+import (
+ "fmt"
+ "strings"
+)
+
+// TODO(quasilyte): generate extra opcode info so we can simplify disasm function?
+
+func disasm(env *Env, fn *Func) string {
+ var out strings.Builder
+
+ dbg, ok := env.debug.funcs[fn]
+ if !ok {
+ return "<unknown>\n"
+ }
+
+ code := fn.code
+ labels := map[int]string{}
+ walkBytecode(code, func(pc int, op opcode) {
+ switch op {
+ case opJumpTrue, opJumpFalse, opJump:
+ offset := decode16(code, pc+1)
+ targetPC := pc + offset
+ if _, ok := labels[targetPC]; !ok {
+ labels[targetPC] = fmt.Sprintf("L%d", len(labels))
+ }
+ }
+ })
+
+ walkBytecode(code, func(pc int, op opcode) {
+ if l := labels[pc]; l != "" {
+ fmt.Fprintf(&out, "%s:\n", l)
+ }
+ var arg interface{}
+ var comment string
+ switch op {
+ case opCallNative:
+ id := decode16(code, pc+1)
+ arg = id
+ comment = env.nativeFuncs[id].name
+ case opPushParam, opPushIntParam:
+ index := int(code[pc+1])
+ arg = index
+ comment = dbg.paramNames[index]
+ case opSetLocal, opSetIntLocal, opPushLocal, opPushIntLocal, opIncLocal, opDecLocal:
+ index := int(code[pc+1])
+ arg = index
+ comment = dbg.localNames[index]
+ case opPushConst:
+ arg = int(code[pc+1])
+ comment = fmt.Sprintf("value=%#v", fn.constants[code[pc+1]])
+ case opPushIntConst:
+ arg = int(code[pc+1])
+ comment = fmt.Sprintf("value=%#v", fn.intConstants[code[pc+1]])
+ case opJumpTrue, opJumpFalse, opJump:
+ offset := decode16(code, pc+1)
+ targetPC := pc + offset
+ arg = offset
+ comment = labels[targetPC]
+ }
+
+ if comment != "" {
+ comment = " # " + comment
+ }
+ if arg == nil {
+ fmt.Fprintf(&out, " %s%s\n", op, comment)
+ } else {
+ fmt.Fprintf(&out, " %s %#v%s\n", op, arg, comment)
+ }
+ })
+
+ return out.String()
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/env.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/env.go
new file mode 100644
index 000000000..0e2a450b1
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/env.go
@@ -0,0 +1,42 @@
+package quasigo
+
+type funcKey struct {
+ qualifier string
+ name string
+}
+
+func (k funcKey) String() string {
+ if k.qualifier != "" {
+ return k.qualifier + "." + k.name
+ }
+ return k.name
+}
+
+type nativeFunc struct {
+ mappedFunc func(*ValueStack)
+ name string // Needed for the readable disasm
+}
+
+func newEnv() *Env {
+ return &Env{
+ nameToNativeFuncID: make(map[funcKey]uint16),
+ nameToFuncID: make(map[funcKey]uint16),
+
+ debug: newDebugInfo(),
+ }
+}
+
+func (env *Env) addNativeFunc(key funcKey, f func(*ValueStack)) {
+ id := len(env.nativeFuncs)
+ env.nativeFuncs = append(env.nativeFuncs, nativeFunc{
+ mappedFunc: f,
+ name: key.String(),
+ })
+ env.nameToNativeFuncID[key] = uint16(id)
+}
+
+func (env *Env) addFunc(key funcKey, f *Func) {
+ id := len(env.userFuncs)
+ env.userFuncs = append(env.userFuncs, f)
+ env.nameToFuncID[key] = uint16(id)
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/eval.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/eval.go
new file mode 100644
index 000000000..afc000ea3
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/eval.go
@@ -0,0 +1,239 @@
+package quasigo
+
+import (
+ "fmt"
+ "reflect"
+)
+
+const maxFuncLocals = 8
+
+// pop2 removes the two top stack elements and returns them.
+//
+// Note that it returns the popped elements in the reverse order
+// to make it easier to map the order in which they were pushed.
+func (s *ValueStack) pop2() (second, top interface{}) {
+ x := s.objects[len(s.objects)-2]
+ y := s.objects[len(s.objects)-1]
+ s.objects = s.objects[:len(s.objects)-2]
+ return x, y
+}
+
+func (s *ValueStack) popInt2() (second, top int) {
+ x := s.ints[len(s.ints)-2]
+ y := s.ints[len(s.ints)-1]
+ s.ints = s.ints[:len(s.ints)-2]
+ return x, y
+}
+
+// top returns top of the stack without popping it.
+func (s *ValueStack) top() interface{} { return s.objects[len(s.objects)-1] }
+
+func (s *ValueStack) topInt() int { return s.ints[len(s.ints)-1] }
+
+// dup copies the top stack element.
+// Identical to s.Push(s.Top()), but more concise.
+func (s *ValueStack) dup() { s.objects = append(s.objects, s.objects[len(s.objects)-1]) }
+
+// discard drops the top stack element.
+// Identical to s.Pop() without using the result.
+func (s *ValueStack) discard() { s.objects = s.objects[:len(s.objects)-1] }
+
+func eval(env *EvalEnv, fn *Func, args []interface{}) CallResult {
+ pc := 0
+ code := fn.code
+ stack := env.stack
+ var locals [maxFuncLocals]interface{}
+ var intLocals [maxFuncLocals]int
+
+ for {
+ switch op := opcode(code[pc]); op {
+ case opPushParam:
+ index := code[pc+1]
+ stack.Push(args[index])
+ pc += 2
+ case opPushIntParam:
+ index := code[pc+1]
+ stack.PushInt(args[index].(int))
+ pc += 2
+
+ case opPushLocal:
+ index := code[pc+1]
+ stack.Push(locals[index])
+ pc += 2
+ case opPushIntLocal:
+ index := code[pc+1]
+ stack.PushInt(intLocals[index])
+ pc += 2
+
+ case opSetLocal:
+ index := code[pc+1]
+ locals[index] = stack.Pop()
+ pc += 2
+ case opSetIntLocal:
+ index := code[pc+1]
+ intLocals[index] = stack.PopInt()
+ pc += 2
+
+ case opIncLocal:
+ index := code[pc+1]
+ intLocals[index]++
+ pc += 2
+ case opDecLocal:
+ index := code[pc+1]
+ intLocals[index]--
+ pc += 2
+
+ case opPop:
+ stack.discard()
+ pc++
+ case opDup:
+ stack.dup()
+ pc++
+
+ case opPushConst:
+ id := code[pc+1]
+ stack.Push(fn.constants[id])
+ pc += 2
+ case opPushIntConst:
+ id := code[pc+1]
+ stack.PushInt(fn.intConstants[id])
+ pc += 2
+
+ case opPushTrue:
+ stack.Push(true)
+ pc++
+ case opPushFalse:
+ stack.Push(false)
+ pc++
+
+ case opReturnTrue:
+ return CallResult{value: true}
+ case opReturnFalse:
+ return CallResult{value: false}
+ case opReturnTop:
+ return CallResult{value: stack.top()}
+ case opReturnIntTop:
+ return CallResult{scalarValue: uint64(stack.topInt())}
+
+ case opCallNative:
+ id := decode16(code, pc+1)
+ fn := env.nativeFuncs[id].mappedFunc
+ fn(stack)
+ pc += 3
+
+ case opJump:
+ offset := decode16(code, pc+1)
+ pc += offset
+
+ case opJumpFalse:
+ if !stack.Pop().(bool) {
+ offset := decode16(code, pc+1)
+ pc += offset
+ } else {
+ pc += 3
+ }
+ case opJumpTrue:
+ if stack.Pop().(bool) {
+ offset := decode16(code, pc+1)
+ pc += offset
+ } else {
+ pc += 3
+ }
+
+ case opNot:
+ stack.Push(!stack.Pop().(bool))
+ pc++
+
+ case opConcat:
+ x, y := stack.pop2()
+ stack.Push(x.(string) + y.(string))
+ pc++
+
+ case opAdd:
+ x, y := stack.popInt2()
+ stack.PushInt(x + y)
+ pc++
+
+ case opSub:
+ x, y := stack.popInt2()
+ stack.PushInt(x - y)
+ pc++
+
+ case opEqInt:
+ x, y := stack.popInt2()
+ stack.Push(x == y)
+ pc++
+
+ case opNotEqInt:
+ x, y := stack.popInt2()
+ stack.Push(x != y)
+ pc++
+
+ case opGtInt:
+ x, y := stack.popInt2()
+ stack.Push(x > y)
+ pc++
+
+ case opGtEqInt:
+ x, y := stack.popInt2()
+ stack.Push(x >= y)
+ pc++
+
+ case opLtInt:
+ x, y := stack.popInt2()
+ stack.Push(x < y)
+ pc++
+
+ case opLtEqInt:
+ x, y := stack.popInt2()
+ stack.Push(x <= y)
+ pc++
+
+ case opEqString:
+ x, y := stack.pop2()
+ stack.Push(x.(string) == y.(string))
+ pc++
+
+ case opNotEqString:
+ x, y := stack.pop2()
+ stack.Push(x.(string) != y.(string))
+ pc++
+
+ case opIsNil:
+ x := stack.Pop()
+ stack.Push(x == nil || reflect.ValueOf(x).IsNil())
+ pc++
+
+ case opIsNotNil:
+ x := stack.Pop()
+ stack.Push(x != nil && !reflect.ValueOf(x).IsNil())
+ pc++
+
+ case opStringSlice:
+ to := stack.PopInt()
+ from := stack.PopInt()
+ s := stack.Pop().(string)
+ stack.Push(s[from:to])
+ pc++
+
+ case opStringSliceFrom:
+ from := stack.PopInt()
+ s := stack.Pop().(string)
+ stack.Push(s[from:])
+ pc++
+
+ case opStringSliceTo:
+ to := stack.PopInt()
+ s := stack.Pop().(string)
+ stack.Push(s[:to])
+ pc++
+
+ case opStringLen:
+ stack.PushInt(len(stack.Pop().(string)))
+ pc++
+
+ default:
+ panic(fmt.Sprintf("malformed bytecode: unexpected %s found", op))
+ }
+ }
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/gen_opcodes.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/gen_opcodes.go
new file mode 100644
index 000000000..fde48b7cd
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/gen_opcodes.go
@@ -0,0 +1,184 @@
+// +build main
+
+package main
+
+import (
+ "bytes"
+ "fmt"
+ "go/format"
+ "io/ioutil"
+ "log"
+ "strings"
+ "text/template"
+)
+
+var opcodePrototypes = []opcodeProto{
+ {"Pop", "op", "(value) -> ()"},
+ {"Dup", "op", "(x) -> (x x)"},
+
+ {"PushParam", "op index:u8", "() -> (value)"},
+ {"PushIntParam", "op index:u8", "() -> (value:int)"},
+ {"PushLocal", "op index:u8", "() -> (value)"},
+ {"PushIntLocal", "op index:u8", "() -> (value:int)"},
+ {"PushFalse", "op", "() -> (false)"},
+ {"PushTrue", "op", "() -> (true)"},
+ {"PushConst", "op constid:u8", "() -> (const)"},
+ {"PushIntConst", "op constid:u8", "() -> (const:int)"},
+
+ {"SetLocal", "op index:u8", "(value) -> ()"},
+ {"SetIntLocal", "op index:u8", "(value:int) -> ()"},
+ {"IncLocal", "op index:u8", stackUnchanged},
+ {"DecLocal", "op index:u8", stackUnchanged},
+
+ {"ReturnTop", "op", "(value) -> (value)"},
+ {"ReturnIntTop", "op", "(value) -> (value)"},
+ {"ReturnFalse", "op", stackUnchanged},
+ {"ReturnTrue", "op", stackUnchanged},
+
+ {"Jump", "op offset:i16", stackUnchanged},
+ {"JumpFalse", "op offset:i16", "(cond:bool) -> ()"},
+ {"JumpTrue", "op offset:i16", "(cond:bool) -> ()"},
+
+ {"CallNative", "op funcid:u16", "(args...) -> (results...)"},
+
+ {"IsNil", "op", "(value) -> (result:bool)"},
+ {"IsNotNil", "op", "(value) -> (result:bool)"},
+
+ {"Not", "op", "(value:bool) -> (result:bool)"},
+
+ {"EqInt", "op", "(x:int y:int) -> (result:bool)"},
+ {"NotEqInt", "op", "(x:int y:int) -> (result:bool)"},
+ {"GtInt", "op", "(x:int y:int) -> (result:bool)"},
+ {"GtEqInt", "op", "(x:int y:int) -> (result:bool)"},
+ {"LtInt", "op", "(x:int y:int) -> (result:bool)"},
+ {"LtEqInt", "op", "(x:int y:int) -> (result:bool)"},
+
+ {"EqString", "op", "(x:string y:string) -> (result:bool)"},
+ {"NotEqString", "op", "(x:string y:string) -> (result:bool)"},
+
+ {"Concat", "op", "(x:string y:string) -> (result:string)"},
+ {"Add", "op", "(x:int y:int) -> (result:int)"},
+ {"Sub", "op", "(x:int y:int) -> (result:int)"},
+
+ {"StringSlice", "op", "(s:string from:int to:int) -> (result:string)"},
+ {"StringSliceFrom", "op", "(s:string from:int) -> (result:string)"},
+ {"StringSliceTo", "op", "(s:string to:int) -> (result:string)"},
+ {"StringLen", "op", "(s:string) -> (result:int)"},
+}
+
+type opcodeProto struct {
+ name string
+ enc string
+ stack string
+}
+
+type encodingInfo struct {
+ width int
+ parts int
+}
+
+type opcodeInfo struct {
+ Opcode byte
+ Name string
+ Enc string
+ EncString string
+ Stack string
+ Width int
+}
+
+const stackUnchanged = ""
+
+var fileTemplate = template.Must(template.New("opcodes.go").Parse(`// Code generated "gen_opcodes.go"; DO NOT EDIT.
+
+package quasigo
+
+//go:generate stringer -type=opcode -trimprefix=op
+type opcode byte
+
+const (
+ opInvalid opcode = 0
+{{ range .Opcodes }}
+ // Encoding: {{.EncString}}
+ // Stack effect: {{ if .Stack}}{{.Stack}}{{else}}unchanged{{end}}
+ op{{ .Name }} opcode = {{.Opcode}}
+{{ end -}}
+)
+
+type opcodeInfo struct {
+ width int
+}
+
+var opcodeInfoTable = [256]opcodeInfo{
+ opInvalid: {width: 1},
+
+{{ range .Opcodes -}}
+ op{{.Name}}: {width: {{.Width}}},
+{{ end }}
+}
+`))
+
+func main() {
+ opcodes := make([]opcodeInfo, len(opcodePrototypes))
+ for i, proto := range opcodePrototypes {
+ opcode := byte(i + 1)
+ encInfo := decodeEnc(proto.enc)
+ var encString string
+ if encInfo.parts == 1 {
+ encString = fmt.Sprintf("0x%02x (width=%d)", opcode, encInfo.width)
+ } else {
+ encString = fmt.Sprintf("0x%02x %s (width=%d)",
+ opcode, strings.TrimPrefix(proto.enc, "op "), encInfo.width)
+ }
+
+ opcodes[i] = opcodeInfo{
+ Opcode: opcode,
+ Name: proto.name,
+ Enc: proto.enc,
+ EncString: encString,
+ Stack: proto.stack,
+ Width: encInfo.width,
+ }
+ }
+
+ var buf bytes.Buffer
+ err := fileTemplate.Execute(&buf, map[string]interface{}{
+ "Opcodes": opcodes,
+ })
+ if err != nil {
+ log.Panicf("execute template: %v", err)
+ }
+ writeFile("opcodes.gen.go", buf.Bytes())
+}
+
+func decodeEnc(enc string) encodingInfo {
+ fields := strings.Fields(enc)
+ width := 0
+ for _, f := range fields {
+ parts := strings.Split(f, ":")
+ var typ string
+ if len(parts) == 2 {
+ typ = parts[1]
+ } else {
+ typ = "u8"
+ }
+ switch typ {
+ case "i8", "u8":
+ width++
+ case "i16", "u16":
+ width += 2
+ default:
+ panic(fmt.Sprintf("unknown op argument type: %s", typ))
+ }
+ }
+ return encodingInfo{width: width, parts: len(fields)}
+}
+
+func writeFile(filename string, data []byte) {
+ pretty, err := format.Source(data)
+ if err != nil {
+ log.Panicf("gofmt: %v", err)
+ }
+ if err := ioutil.WriteFile(filename, pretty, 0666); err != nil {
+ log.Panicf("write %s: %v", filename, err)
+ }
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcode_string.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcode_string.go
new file mode 100644
index 000000000..27dfc1f67
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcode_string.go
@@ -0,0 +1,63 @@
+// Code generated by "stringer -type=opcode -trimprefix=op"; DO NOT EDIT.
+
+package quasigo
+
+import "strconv"
+
+func _() {
+ // An "invalid array index" compiler error signifies that the constant values have changed.
+ // Re-run the stringer command to generate them again.
+ var x [1]struct{}
+ _ = x[opInvalid-0]
+ _ = x[opPop-1]
+ _ = x[opDup-2]
+ _ = x[opPushParam-3]
+ _ = x[opPushIntParam-4]
+ _ = x[opPushLocal-5]
+ _ = x[opPushIntLocal-6]
+ _ = x[opPushFalse-7]
+ _ = x[opPushTrue-8]
+ _ = x[opPushConst-9]
+ _ = x[opPushIntConst-10]
+ _ = x[opSetLocal-11]
+ _ = x[opSetIntLocal-12]
+ _ = x[opIncLocal-13]
+ _ = x[opDecLocal-14]
+ _ = x[opReturnTop-15]
+ _ = x[opReturnIntTop-16]
+ _ = x[opReturnFalse-17]
+ _ = x[opReturnTrue-18]
+ _ = x[opJump-19]
+ _ = x[opJumpFalse-20]
+ _ = x[opJumpTrue-21]
+ _ = x[opCallNative-22]
+ _ = x[opIsNil-23]
+ _ = x[opIsNotNil-24]
+ _ = x[opNot-25]
+ _ = x[opEqInt-26]
+ _ = x[opNotEqInt-27]
+ _ = x[opGtInt-28]
+ _ = x[opGtEqInt-29]
+ _ = x[opLtInt-30]
+ _ = x[opLtEqInt-31]
+ _ = x[opEqString-32]
+ _ = x[opNotEqString-33]
+ _ = x[opConcat-34]
+ _ = x[opAdd-35]
+ _ = x[opSub-36]
+ _ = x[opStringSlice-37]
+ _ = x[opStringSliceFrom-38]
+ _ = x[opStringSliceTo-39]
+ _ = x[opStringLen-40]
+}
+
+const _opcode_name = "InvalidPopDupPushParamPushIntParamPushLocalPushIntLocalPushFalsePushTruePushConstPushIntConstSetLocalSetIntLocalIncLocalDecLocalReturnTopReturnIntTopReturnFalseReturnTrueJumpJumpFalseJumpTrueCallNativeIsNilIsNotNilNotEqIntNotEqIntGtIntGtEqIntLtIntLtEqIntEqStringNotEqStringConcatAddSubStringSliceStringSliceFromStringSliceToStringLen"
+
+var _opcode_index = [...]uint16{0, 7, 10, 13, 22, 34, 43, 55, 64, 72, 81, 93, 101, 112, 120, 128, 137, 149, 160, 170, 174, 183, 191, 201, 206, 214, 217, 222, 230, 235, 242, 247, 254, 262, 273, 279, 282, 285, 296, 311, 324, 333}
+
+func (i opcode) String() string {
+ if i >= opcode(len(_opcode_index)-1) {
+ return "opcode(" + strconv.FormatInt(int64(i), 10) + ")"
+ }
+ return _opcode_name[_opcode_index[i]:_opcode_index[i+1]]
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcodes.gen.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcodes.gen.go
new file mode 100644
index 000000000..268b42a1e
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcodes.gen.go
@@ -0,0 +1,219 @@
+// Code generated "gen_opcodes.go"; DO NOT EDIT.
+
+package quasigo
+
+//go:generate stringer -type=opcode -trimprefix=op
+type opcode byte
+
+const (
+ opInvalid opcode = 0
+
+ // Encoding: 0x01 (width=1)
+ // Stack effect: (value) -> ()
+ opPop opcode = 1
+
+ // Encoding: 0x02 (width=1)
+ // Stack effect: (x) -> (x x)
+ opDup opcode = 2
+
+ // Encoding: 0x03 index:u8 (width=2)
+ // Stack effect: () -> (value)
+ opPushParam opcode = 3
+
+ // Encoding: 0x04 index:u8 (width=2)
+ // Stack effect: () -> (value:int)
+ opPushIntParam opcode = 4
+
+ // Encoding: 0x05 index:u8 (width=2)
+ // Stack effect: () -> (value)
+ opPushLocal opcode = 5
+
+ // Encoding: 0x06 index:u8 (width=2)
+ // Stack effect: () -> (value:int)
+ opPushIntLocal opcode = 6
+
+ // Encoding: 0x07 (width=1)
+ // Stack effect: () -> (false)
+ opPushFalse opcode = 7
+
+ // Encoding: 0x08 (width=1)
+ // Stack effect: () -> (true)
+ opPushTrue opcode = 8
+
+ // Encoding: 0x09 constid:u8 (width=2)
+ // Stack effect: () -> (const)
+ opPushConst opcode = 9
+
+ // Encoding: 0x0a constid:u8 (width=2)
+ // Stack effect: () -> (const:int)
+ opPushIntConst opcode = 10
+
+ // Encoding: 0x0b index:u8 (width=2)
+ // Stack effect: (value) -> ()
+ opSetLocal opcode = 11
+
+ // Encoding: 0x0c index:u8 (width=2)
+ // Stack effect: (value:int) -> ()
+ opSetIntLocal opcode = 12
+
+ // Encoding: 0x0d index:u8 (width=2)
+ // Stack effect: unchanged
+ opIncLocal opcode = 13
+
+ // Encoding: 0x0e index:u8 (width=2)
+ // Stack effect: unchanged
+ opDecLocal opcode = 14
+
+ // Encoding: 0x0f (width=1)
+ // Stack effect: (value) -> (value)
+ opReturnTop opcode = 15
+
+ // Encoding: 0x10 (width=1)
+ // Stack effect: (value) -> (value)
+ opReturnIntTop opcode = 16
+
+ // Encoding: 0x11 (width=1)
+ // Stack effect: unchanged
+ opReturnFalse opcode = 17
+
+ // Encoding: 0x12 (width=1)
+ // Stack effect: unchanged
+ opReturnTrue opcode = 18
+
+ // Encoding: 0x13 offset:i16 (width=3)
+ // Stack effect: unchanged
+ opJump opcode = 19
+
+ // Encoding: 0x14 offset:i16 (width=3)
+ // Stack effect: (cond:bool) -> ()
+ opJumpFalse opcode = 20
+
+ // Encoding: 0x15 offset:i16 (width=3)
+ // Stack effect: (cond:bool) -> ()
+ opJumpTrue opcode = 21
+
+ // Encoding: 0x16 funcid:u16 (width=3)
+ // Stack effect: (args...) -> (results...)
+ opCallNative opcode = 22
+
+ // Encoding: 0x17 (width=1)
+ // Stack effect: (value) -> (result:bool)
+ opIsNil opcode = 23
+
+ // Encoding: 0x18 (width=1)
+ // Stack effect: (value) -> (result:bool)
+ opIsNotNil opcode = 24
+
+ // Encoding: 0x19 (width=1)
+ // Stack effect: (value:bool) -> (result:bool)
+ opNot opcode = 25
+
+ // Encoding: 0x1a (width=1)
+ // Stack effect: (x:int y:int) -> (result:bool)
+ opEqInt opcode = 26
+
+ // Encoding: 0x1b (width=1)
+ // Stack effect: (x:int y:int) -> (result:bool)
+ opNotEqInt opcode = 27
+
+ // Encoding: 0x1c (width=1)
+ // Stack effect: (x:int y:int) -> (result:bool)
+ opGtInt opcode = 28
+
+ // Encoding: 0x1d (width=1)
+ // Stack effect: (x:int y:int) -> (result:bool)
+ opGtEqInt opcode = 29
+
+ // Encoding: 0x1e (width=1)
+ // Stack effect: (x:int y:int) -> (result:bool)
+ opLtInt opcode = 30
+
+ // Encoding: 0x1f (width=1)
+ // Stack effect: (x:int y:int) -> (result:bool)
+ opLtEqInt opcode = 31
+
+ // Encoding: 0x20 (width=1)
+ // Stack effect: (x:string y:string) -> (result:bool)
+ opEqString opcode = 32
+
+ // Encoding: 0x21 (width=1)
+ // Stack effect: (x:string y:string) -> (result:bool)
+ opNotEqString opcode = 33
+
+ // Encoding: 0x22 (width=1)
+ // Stack effect: (x:string y:string) -> (result:string)
+ opConcat opcode = 34
+
+ // Encoding: 0x23 (width=1)
+ // Stack effect: (x:int y:int) -> (result:int)
+ opAdd opcode = 35
+
+ // Encoding: 0x24 (width=1)
+ // Stack effect: (x:int y:int) -> (result:int)
+ opSub opcode = 36
+
+ // Encoding: 0x25 (width=1)
+ // Stack effect: (s:string from:int to:int) -> (result:string)
+ opStringSlice opcode = 37
+
+ // Encoding: 0x26 (width=1)
+ // Stack effect: (s:string from:int) -> (result:string)
+ opStringSliceFrom opcode = 38
+
+ // Encoding: 0x27 (width=1)
+ // Stack effect: (s:string to:int) -> (result:string)
+ opStringSliceTo opcode = 39
+
+ // Encoding: 0x28 (width=1)
+ // Stack effect: (s:string) -> (result:int)
+ opStringLen opcode = 40
+)
+
+type opcodeInfo struct {
+ width int
+}
+
+var opcodeInfoTable = [256]opcodeInfo{
+ opInvalid: {width: 1},
+
+ opPop: {width: 1},
+ opDup: {width: 1},
+ opPushParam: {width: 2},
+ opPushIntParam: {width: 2},
+ opPushLocal: {width: 2},
+ opPushIntLocal: {width: 2},
+ opPushFalse: {width: 1},
+ opPushTrue: {width: 1},
+ opPushConst: {width: 2},
+ opPushIntConst: {width: 2},
+ opSetLocal: {width: 2},
+ opSetIntLocal: {width: 2},
+ opIncLocal: {width: 2},
+ opDecLocal: {width: 2},
+ opReturnTop: {width: 1},
+ opReturnIntTop: {width: 1},
+ opReturnFalse: {width: 1},
+ opReturnTrue: {width: 1},
+ opJump: {width: 3},
+ opJumpFalse: {width: 3},
+ opJumpTrue: {width: 3},
+ opCallNative: {width: 3},
+ opIsNil: {width: 1},
+ opIsNotNil: {width: 1},
+ opNot: {width: 1},
+ opEqInt: {width: 1},
+ opNotEqInt: {width: 1},
+ opGtInt: {width: 1},
+ opGtEqInt: {width: 1},
+ opLtInt: {width: 1},
+ opLtEqInt: {width: 1},
+ opEqString: {width: 1},
+ opNotEqString: {width: 1},
+ opConcat: {width: 1},
+ opAdd: {width: 1},
+ opSub: {width: 1},
+ opStringSlice: {width: 1},
+ opStringSliceFrom: {width: 1},
+ opStringSliceTo: {width: 1},
+ opStringLen: {width: 1},
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/quasigo.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/quasigo.go
new file mode 100644
index 000000000..7d457538d
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/quasigo.go
@@ -0,0 +1,165 @@
+// Package quasigo implements a Go subset compiler and interpreter.
+//
+// The implementation details are not part of the contract of this package.
+package quasigo
+
+import (
+ "go/ast"
+ "go/token"
+ "go/types"
+)
+
+// TODO(quasilyte): document what is thread-safe and what not.
+// TODO(quasilyte): add a readme.
+
+// Env is used to hold both compilation and evaluation data.
+type Env struct {
+ // TODO(quasilyte): store both native and user func ids in one map?
+
+ nativeFuncs []nativeFunc
+ nameToNativeFuncID map[funcKey]uint16
+
+ userFuncs []*Func
+ nameToFuncID map[funcKey]uint16
+
+ // debug contains all information that is only needed
+ // for better debugging and compiled code introspection.
+ // Right now it's always enabled, but we may allow stripping it later.
+ debug *debugInfo
+}
+
+// EvalEnv is a goroutine-local handle for Env.
+// To get one, use Env.GetEvalEnv() method.
+type EvalEnv struct {
+ nativeFuncs []nativeFunc
+ userFuncs []*Func
+
+ stack *ValueStack
+}
+
+// NewEnv creates a new empty environment.
+func NewEnv() *Env {
+ return newEnv()
+}
+
+// GetEvalEnv creates a new goroutine-local handle of env.
+func (env *Env) GetEvalEnv() *EvalEnv {
+ return &EvalEnv{
+ nativeFuncs: env.nativeFuncs,
+ userFuncs: env.userFuncs,
+ stack: &ValueStack{
+ objects: make([]interface{}, 0, 32),
+ ints: make([]int, 0, 16),
+ },
+ }
+}
+
+// AddNativeMethod binds `$typeName.$methodName` symbol with f.
+// A typeName should be fully qualified, like `github.com/user/pkgname.TypeName`.
+// It method is defined only on pointer type, the typeName should start with `*`.
+func (env *Env) AddNativeMethod(typeName, methodName string, f func(*ValueStack)) {
+ env.addNativeFunc(funcKey{qualifier: typeName, name: methodName}, f)
+}
+
+// AddNativeFunc binds `$pkgPath.$funcName` symbol with f.
+// A pkgPath should be a full package path in which funcName is defined.
+func (env *Env) AddNativeFunc(pkgPath, funcName string, f func(*ValueStack)) {
+ env.addNativeFunc(funcKey{qualifier: pkgPath, name: funcName}, f)
+}
+
+// AddFunc binds `$pkgPath.$funcName` symbol with f.
+func (env *Env) AddFunc(pkgPath, funcName string, f *Func) {
+ env.addFunc(funcKey{qualifier: pkgPath, name: funcName}, f)
+}
+
+// GetFunc finds previously bound function searching for the `$pkgPath.$funcName` symbol.
+func (env *Env) GetFunc(pkgPath, funcName string) *Func {
+ id := env.nameToFuncID[funcKey{qualifier: pkgPath, name: funcName}]
+ return env.userFuncs[id]
+}
+
+// CompileContext is used to provide necessary data to the compiler.
+type CompileContext struct {
+ // Env is shared environment that should be used for all functions
+ // being compiled; then it should be used to execute these functions.
+ Env *Env
+
+ Types *types.Info
+ Fset *token.FileSet
+}
+
+// Compile prepares an executable version of fn.
+func Compile(ctx *CompileContext, fn *ast.FuncDecl) (compiled *Func, err error) {
+ return compile(ctx, fn)
+}
+
+// Call invokes a given function with provided arguments.
+func Call(env *EvalEnv, fn *Func, args ...interface{}) CallResult {
+ env.stack.objects = env.stack.objects[:0]
+ env.stack.ints = env.stack.ints[:0]
+ return eval(env, fn, args)
+}
+
+// CallResult is a return value of Call function.
+// For most functions, Value() should be called to get the actual result.
+// For int-typed functions, IntValue() should be used instead.
+type CallResult struct {
+ value interface{}
+ scalarValue uint64
+}
+
+// Value unboxes an actual call return value.
+// For int results, use IntValue().
+func (res CallResult) Value() interface{} { return res.value }
+
+// IntValue unboxes an actual call return value.
+func (res CallResult) IntValue() int { return int(res.scalarValue) }
+
+// Disasm returns the compiled function disassembly text.
+// This output is not guaranteed to be stable between versions
+// and should be used only for debugging purposes.
+func Disasm(env *Env, fn *Func) string {
+ return disasm(env, fn)
+}
+
+// Func is a compiled function that is ready to be executed.
+type Func struct {
+ code []byte
+
+ constants []interface{}
+ intConstants []int
+}
+
+// ValueStack is used to manipulate runtime values during the evaluation.
+// Function arguments are pushed to the stack.
+// Function results are returned via stack as well.
+//
+// For the sake of efficiency, it stores different types separately.
+// If int was pushed with PushInt(), it should be retrieved by PopInt().
+// It's a bad idea to do a Push() and then PopInt() and vice-versa.
+type ValueStack struct {
+ objects []interface{}
+ ints []int
+}
+
+// Pop removes the top stack element and returns it.
+// Important: for int-typed values, use PopInt.
+func (s *ValueStack) Pop() interface{} {
+ x := s.objects[len(s.objects)-1]
+ s.objects = s.objects[:len(s.objects)-1]
+ return x
+}
+
+// PopInt removes the top stack element and returns it.
+func (s *ValueStack) PopInt() int {
+ x := s.ints[len(s.ints)-1]
+ s.ints = s.ints[:len(s.ints)-1]
+ return x
+}
+
+// Push adds x to the stack.
+// Important: for int-typed values, use PushInt.
+func (s *ValueStack) Push(x interface{}) { s.objects = append(s.objects, x) }
+
+// PushInt adds x to the stack.
+func (s *ValueStack) PushInt(x int) { s.ints = append(s.ints, x) }
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/utils.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/utils.go
new file mode 100644
index 000000000..a5c3676a4
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/utils.go
@@ -0,0 +1,60 @@
+package quasigo
+
+import (
+ "encoding/binary"
+ "go/ast"
+ "go/types"
+)
+
+func pickOp(cond bool, ifTrue, otherwise opcode) opcode {
+ if cond {
+ return ifTrue
+ }
+ return otherwise
+}
+
+func put16(code []byte, pos, value int) {
+ binary.LittleEndian.PutUint16(code[pos:], uint16(value))
+}
+
+func decode16(code []byte, pos int) int {
+ return int(int16(binary.LittleEndian.Uint16(code[pos:])))
+}
+
+func typeIsInt(typ types.Type) bool {
+ basic, ok := typ.Underlying().(*types.Basic)
+ if !ok {
+ return false
+ }
+ switch basic.Kind() {
+ case types.Int, types.UntypedInt:
+ return true
+ default:
+ return false
+ }
+}
+
+func typeIsString(typ types.Type) bool {
+ basic, ok := typ.Underlying().(*types.Basic)
+ if !ok {
+ return false
+ }
+ return basic.Info()&types.IsString != 0
+}
+
+func walkBytecode(code []byte, fn func(pc int, op opcode)) {
+ pc := 0
+ for pc < len(code) {
+ op := opcode(code[pc])
+ fn(pc, op)
+ pc += opcodeInfoTable[op].width
+ }
+}
+
+func identName(n ast.Expr) string {
+ id, ok := n.(*ast.Ident)
+ if ok {
+ return id.Name
+ }
+ return ""
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go
index f6032c862..ba23861a2 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go
@@ -7,7 +7,61 @@ import (
"io"
)
-type Context struct {
+// Engine is the main ruleguard package API object.
+//
+// First, load some ruleguard files with Load() to build a rule set.
+// Then use Run() to execute the rules.
+//
+// It's advised to have only 1 engine per application as it does a lot of caching.
+// The Run() method is synchronized, so it can be used concurrently.
+//
+// An Engine must be created with NewEngine() function.
+type Engine struct {
+ impl *engine
+}
+
+// NewEngine creates an engine with empty rule set.
+func NewEngine() *Engine {
+ return &Engine{impl: newEngine()}
+}
+
+// Load reads a ruleguard file from r and adds it to the engine rule set.
+//
+// Load() is not thread-safe, especially if used concurrently with Run() method.
+// It's advised to Load() all ruleguard files under a critical section (like sync.Once)
+// and then use Run() to execute all of them.
+func (e *Engine) Load(ctx *ParseContext, filename string, r io.Reader) error {
+ return e.impl.Load(ctx, filename, r)
+}
+
+// Run executes all loaded rules on a given file.
+// Matched rules invoke `RunContext.Report()` method.
+//
+// Run() is thread-safe, unless used in parallel with Load(),
+// which modifies the engine state.
+func (e *Engine) Run(ctx *RunContext, f *ast.File) error {
+ return e.impl.Run(ctx, f)
+}
+
+type ParseContext struct {
+ DebugFilter string
+ DebugImports bool
+ DebugPrint func(string)
+
+ // GroupFilter is called for every rule group being parsed.
+ // If function returns false, that group will not be included
+ // in the resulting rules set.
+ // Nil filter accepts all rule groups.
+ GroupFilter func(string) bool
+
+ Fset *token.FileSet
+}
+
+type RunContext struct {
+ Debug string
+ DebugImports bool
+ DebugPrint func(string)
+
Types *types.Info
Sizes types.Sizes
Fset *token.FileSet
@@ -21,25 +75,13 @@ type Suggestion struct {
Replacement []byte
}
-func ParseRules(filename string, fset *token.FileSet, r io.Reader) (*GoRuleSet, error) {
- p := newRulesParser()
- return p.ParseFile(filename, fset, r)
-}
-
-func RunRules(ctx *Context, f *ast.File, rules *GoRuleSet) error {
- return newRulesRunner(ctx, rules).run(f)
-}
-
type GoRuleInfo struct {
// Filename is a file that defined this rule.
Filename string
-}
-type GoRuleSet struct {
- universal *scopedGoRuleSet
- local *scopedGoRuleSet
-}
+ // Line is a line inside a file that defined this rule.
+ Line int
-func MergeRuleSets(toMerge []*GoRuleSet) *GoRuleSet {
- return mergeRuleSets(toMerge)
+ // Group is a function name that contained this rule.
+ Group string
}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go
index 971e92aed..2048ce3e7 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go
@@ -2,34 +2,62 @@ package ruleguard
import (
"bytes"
+ "fmt"
"go/ast"
"go/printer"
"io/ioutil"
+ "path/filepath"
+ "sort"
+ "strconv"
"strings"
"github.com/quasilyte/go-ruleguard/internal/mvdan.cc/gogrep"
+ "github.com/quasilyte/go-ruleguard/ruleguard/goutil"
)
type rulesRunner struct {
- ctx *Context
- rules *GoRuleSet
+ state *engineState
+
+ ctx *RunContext
+ rules *goRuleSet
+
+ importer *goImporter
filename string
src []byte
+
+ // A slice that is used to do a nodes keys sorting in renderMessage().
+ sortScratch []string
+
+ filterParams filterParams
}
-func newRulesRunner(ctx *Context, rules *GoRuleSet) *rulesRunner {
- return &rulesRunner{
- ctx: ctx,
- rules: rules,
+func newRulesRunner(ctx *RunContext, state *engineState, rules *goRuleSet) *rulesRunner {
+ importer := newGoImporter(state, goImporterConfig{
+ fset: ctx.Fset,
+ debugImports: ctx.DebugImports,
+ debugPrint: ctx.DebugPrint,
+ })
+ rr := &rulesRunner{
+ ctx: ctx,
+ importer: importer,
+ rules: rules,
+ filterParams: filterParams{
+ env: state.env.GetEvalEnv(),
+ importer: importer,
+ ctx: ctx,
+ },
+ sortScratch: make([]string, 0, 8),
}
+ rr.filterParams.nodeText = rr.nodeText
+ return rr
}
func (rr *rulesRunner) nodeText(n ast.Node) []byte {
from := rr.ctx.Fset.Position(n.Pos()).Offset
to := rr.ctx.Fset.Position(n.End()).Offset
src := rr.fileBytes()
- if (from >= 0 && int(from) < len(src)) && (to >= 0 && int(to) < len(src)) {
+ if (from >= 0 && from < len(src)) && (to >= 0 && to < len(src)) {
return src[from:to]
}
// Fallback to the printer.
@@ -61,6 +89,8 @@ func (rr *rulesRunner) run(f *ast.File) error {
// TODO(quasilyte): run local rules as well.
rr.filename = rr.ctx.Fset.Position(f.Pos()).Filename
+ rr.filterParams.filename = rr.filename
+ rr.collectImports(f)
for _, rule := range rr.rules.universal.uncategorized {
rule.pat.Match(f, func(m gogrep.MatchData) {
@@ -87,65 +117,61 @@ func (rr *rulesRunner) run(f *ast.File) error {
return nil
}
-func (rr *rulesRunner) handleMatch(rule goRule, m gogrep.MatchData) bool {
+func (rr *rulesRunner) reject(rule goRule, reason string, m gogrep.MatchData) {
+ if rule.group != rr.ctx.Debug {
+ return // This rule is not being debugged
+ }
+
+ pos := rr.ctx.Fset.Position(m.Node.Pos())
+ rr.ctx.DebugPrint(fmt.Sprintf("%s:%d: [%s:%d] rejected by %s",
+ pos.Filename, pos.Line, filepath.Base(rule.filename), rule.line, reason))
+
+ type namedNode struct {
+ name string
+ node ast.Node
+ }
+ values := make([]namedNode, 0, len(m.Values))
for name, node := range m.Values {
- expr, ok := node.(ast.Expr)
- if !ok {
- continue
- }
- filter, ok := rule.filters[name]
- if !ok {
+ values = append(values, namedNode{name: name, node: node})
+ }
+ sort.Slice(values, func(i, j int) bool {
+ return values[i].name < values[j].name
+ })
+
+ for _, v := range values {
+ name := v.name
+ node := v.node
+ var expr ast.Expr
+ switch node := node.(type) {
+ case ast.Expr:
+ expr = node
+ case *ast.ExprStmt:
+ expr = node.X
+ default:
continue
}
- if filter.typePred != nil {
- typ := rr.ctx.Types.TypeOf(expr)
- q := typeQuery{x: typ, ctx: rr.ctx}
- if !filter.typePred(q) {
- return false
- }
- }
- if filter.textPred != nil {
- if !filter.textPred(string(rr.nodeText(expr))) {
- return false
- }
- }
- switch filter.addressable {
- case bool3true:
- if !isAddressable(rr.ctx.Types, expr) {
- return false
- }
- case bool3false:
- if isAddressable(rr.ctx.Types, expr) {
- return false
- }
- }
- switch filter.pure {
- case bool3true:
- if !isPure(rr.ctx.Types, expr) {
- return false
- }
- case bool3false:
- if isPure(rr.ctx.Types, expr) {
- return false
- }
- }
- switch filter.constant {
- case bool3true:
- if !isConstant(rr.ctx.Types, expr) {
- return false
- }
- case bool3false:
- if isConstant(rr.ctx.Types, expr) {
- return false
- }
+
+ typ := rr.ctx.Types.TypeOf(expr)
+ typeString := "<unknown>"
+ if typ != nil {
+ typeString = typ.String()
}
+ s := strings.ReplaceAll(goutil.SprintNode(rr.ctx.Fset, expr), "\n", `\n`)
+ rr.ctx.DebugPrint(fmt.Sprintf(" $%s %s: %s", name, typeString, s))
}
+}
- prefix := ""
- if rule.severity != "" {
- prefix = rule.severity + ": "
+func (rr *rulesRunner) handleMatch(rule goRule, m gogrep.MatchData) bool {
+ if rule.filter.fn != nil {
+ rr.filterParams.values = m.Values
+ filterResult := rule.filter.fn(&rr.filterParams)
+ if !filterResult.Matched() {
+ rr.reject(rule, filterResult.RejectReason(), m)
+ return false
+ }
}
- message := prefix + rr.renderMessage(rule.msg, m.Node, m.Values, true)
+
+ message := rr.renderMessage(rule.msg, m.Node, m.Values, true)
node := m.Node
if rule.location != "" {
node = m.Values[rule.location]
@@ -159,12 +185,25 @@ func (rr *rulesRunner) handleMatch(rule goRule, m gogrep.MatchData) bool {
}
}
info := GoRuleInfo{
+ Group: rule.group,
Filename: rule.filename,
+ Line: rule.line,
}
rr.ctx.Report(info, node, message, suggestion)
return true
}
+func (rr *rulesRunner) collectImports(f *ast.File) {
+ rr.filterParams.imports = make(map[string]struct{}, len(f.Imports))
+ for _, spec := range f.Imports {
+ s, err := strconv.Unquote(spec.Path.Value)
+ if err != nil {
+ continue
+ }
+ rr.filterParams.imports[s] = struct{}{}
+ }
+}
+
func (rr *rulesRunner) renderMessage(msg string, n ast.Node, nodes map[string]ast.Node, truncate bool) string {
var buf strings.Builder
if strings.Contains(msg, "$$") {
@@ -174,7 +213,17 @@ func (rr *rulesRunner) renderMessage(msg string, n ast.Node, nodes map[string]as
if len(nodes) == 0 {
return msg
}
- for name, n := range nodes {
+
+ rr.sortScratch = rr.sortScratch[:0]
+ for name := range nodes {
+ rr.sortScratch = append(rr.sortScratch, name)
+ }
+ sort.Slice(rr.sortScratch, func(i, j int) bool {
+ return len(rr.sortScratch[i]) > len(rr.sortScratch[j])
+ })
+
+ for _, name := range rr.sortScratch {
+ n := nodes[name]
key := "$" + name
if !strings.Contains(msg, key) {
continue
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/patternop_string.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/patternop_string.go
new file mode 100644
index 000000000..1d739819d
--- /dev/null
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/patternop_string.go
@@ -0,0 +1,34 @@
+// Code generated by "stringer -type=patternOp"; DO NOT EDIT.
+
+package typematch
+
+import "strconv"
+
+func _() {
+ // An "invalid array index" compiler error signifies that the constant values have changed.
+ // Re-run the stringer command to generate them again.
+ var x [1]struct{}
+ _ = x[opBuiltinType-0]
+ _ = x[opPointer-1]
+ _ = x[opVar-2]
+ _ = x[opVarSeq-3]
+ _ = x[opSlice-4]
+ _ = x[opArray-5]
+ _ = x[opMap-6]
+ _ = x[opChan-7]
+ _ = x[opFunc-8]
+ _ = x[opStructNoSeq-9]
+ _ = x[opStruct-10]
+ _ = x[opNamed-11]
+}
+
+const _patternOp_name = "opBuiltinTypeopPointeropVaropVarSeqopSliceopArrayopMapopChanopFuncopStructNoSeqopStructopNamed"
+
+var _patternOp_index = [...]uint8{0, 13, 22, 27, 35, 42, 49, 54, 60, 66, 79, 87, 94}
+
+func (i patternOp) String() string {
+ if i < 0 || i >= patternOp(len(_patternOp_index)-1) {
+ return "patternOp(" + strconv.FormatInt(int64(i), 10) + ")"
+ }
+ return _patternOp_name[_patternOp_index[i]:_patternOp_index[i+1]]
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/typematch.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/typematch.go
index 5e14880cd..19391ecd4 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/typematch.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/typematch.go
@@ -8,18 +8,25 @@ import (
"go/types"
"strconv"
"strings"
+
+ "github.com/quasilyte/go-ruleguard/internal/xtypes"
)
+//go:generate stringer -type=patternOp
type patternOp int
const (
opBuiltinType patternOp = iota
opPointer
opVar
+ opVarSeq
opSlice
opArray
opMap
opChan
+ opFunc
+ opStructNoSeq
+ opStruct
opNamed
)
@@ -36,6 +43,17 @@ type pattern struct {
subs []*pattern
}
+func (pat pattern) String() string {
+ if len(pat.subs) == 0 {
+ return fmt.Sprintf("<%s %#v>", pat.op, pat.value)
+ }
+ parts := make([]string, len(pat.subs))
+ for i, sub := range pat.subs {
+ parts[i] = sub.String()
+ }
+ return fmt.Sprintf("<%s %#v (%s)>", pat.op, pat.value, strings.Join(parts, ", "))
+}
+
type ImportsTab struct {
imports []map[string]string
}
@@ -70,8 +88,14 @@ type Context struct {
Itab *ImportsTab
}
+const (
+ varPrefix = `ᐸvarᐳ`
+ varSeqPrefix = `ᐸvar_seqᐳ`
+)
+
func Parse(ctx *Context, s string) (*Pattern, error) {
- noDollars := strings.ReplaceAll(s, "$", "__")
+ noDollars := strings.ReplaceAll(s, "$*", varSeqPrefix)
+ noDollars = strings.ReplaceAll(noDollars, "$", varPrefix)
n, err := parser.ParseExpr(noDollars)
if err != nil {
return nil, err
@@ -125,10 +149,17 @@ func parseExpr(ctx *Context, e ast.Expr) *pattern {
if ok {
return &pattern{op: opBuiltinType, value: basic}
}
- if strings.HasPrefix(e.Name, "__") {
- name := strings.TrimPrefix(e.Name, "__")
+ if strings.HasPrefix(e.Name, varPrefix) {
+ name := strings.TrimPrefix(e.Name, varPrefix)
return &pattern{op: opVar, value: name}
}
+ if strings.HasPrefix(e.Name, varSeqPrefix) {
+ name := strings.TrimPrefix(e.Name, varSeqPrefix)
+ // Only unnamed seq are supported right now.
+ if name == "_" {
+ return &pattern{op: opVarSeq, value: name}
+ }
+ }
case *ast.SelectorExpr:
pkg, ok := e.X.(*ast.Ident)
@@ -159,8 +190,8 @@ func parseExpr(ctx *Context, e ast.Expr) *pattern {
subs: []*pattern{elem},
}
}
- if id, ok := e.Len.(*ast.Ident); ok && strings.HasPrefix(id.Name, "__") {
- name := strings.TrimPrefix(id.Name, "__")
+ if id, ok := e.Len.(*ast.Ident); ok && strings.HasPrefix(id.Name, varPrefix) {
+ name := strings.TrimPrefix(id.Name, varPrefix)
return &pattern{
op: opArray,
value: name,
@@ -220,6 +251,64 @@ func parseExpr(ctx *Context, e ast.Expr) *pattern {
case *ast.ParenExpr:
return parseExpr(ctx, e.X)
+ case *ast.FuncType:
+ var params []*pattern
+ var results []*pattern
+ if e.Params != nil {
+ for _, field := range e.Params.List {
+ p := parseExpr(ctx, field.Type)
+ if p == nil {
+ return nil
+ }
+ if len(field.Names) != 0 {
+ return nil
+ }
+ params = append(params, p)
+ }
+ }
+ if e.Results != nil {
+ for _, field := range e.Results.List {
+ p := parseExpr(ctx, field.Type)
+ if p == nil {
+ return nil
+ }
+ if len(field.Names) != 0 {
+ return nil
+ }
+ results = append(results, p)
+ }
+ }
+ return &pattern{
+ op: opFunc,
+ value: len(params),
+ subs: append(params, results...),
+ }
+
+ case *ast.StructType:
+ hasSeq := false
+ members := make([]*pattern, 0, len(e.Fields.List))
+ for _, field := range e.Fields.List {
+ p := parseExpr(ctx, field.Type)
+ if p == nil {
+ return nil
+ }
+ if len(field.Names) != 0 {
+ return nil
+ }
+ if p.op == opVarSeq {
+ hasSeq = true
+ }
+ members = append(members, p)
+ }
+ op := opStructNoSeq
+ if hasSeq {
+ op = opStruct
+ }
+ return &pattern{
+ op: op,
+ subs: members,
+ }
+
case *ast.InterfaceType:
if len(e.Methods.List) == 0 {
return &pattern{op: opBuiltinType, value: efaceType}
@@ -229,6 +318,7 @@ func parseExpr(ctx *Context, e ast.Expr) *pattern {
return nil
}
+// MatchIdentical returns true if the go typ matches pattern p.
func (p *Pattern) MatchIdentical(typ types.Type) bool {
p.reset()
return p.matchIdentical(p.root, typ)
@@ -243,6 +333,54 @@ func (p *Pattern) reset() {
}
}
+func (p *Pattern) matchIdenticalFielder(subs []*pattern, f fielder) bool {
+ // TODO: do backtracking.
+
+ numFields := f.NumFields()
+ fieldsMatched := 0
+
+ if len(subs) == 0 && numFields != 0 {
+ return false
+ }
+
+ matchAny := false
+
+ i := 0
+ for i < len(subs) {
+ pat := subs[i]
+
+ if pat.op == opVarSeq {
+ matchAny = true
+ }
+
+ fieldsLeft := numFields - fieldsMatched
+ if matchAny {
+ switch {
+ // "Nothing left to match" stop condition.
+ case fieldsLeft == 0:
+ matchAny = false
+ i++
+ // Lookahead for non-greedy matching.
+ case i+1 < len(subs) && p.matchIdentical(subs[i+1], f.Field(fieldsMatched).Type()):
+ matchAny = false
+ i += 2
+ fieldsMatched++
+ default:
+ fieldsMatched++
+ }
+ continue
+ }
+
+ if fieldsLeft == 0 || !p.matchIdentical(pat, f.Field(fieldsMatched).Type()) {
+ return false
+ }
+ i++
+ fieldsMatched++
+ }
+
+ return numFields == fieldsMatched
+}
+
func (p *Pattern) matchIdentical(sub *pattern, typ types.Type) bool {
switch sub.op {
case opVar:
@@ -258,10 +396,10 @@ func (p *Pattern) matchIdentical(sub *pattern, typ types.Type) bool {
if y == nil {
return typ == nil
}
- return types.Identical(typ, y)
+ return xtypes.Identical(typ, y)
case opBuiltinType:
- return types.Identical(typ, sub.value.(types.Type))
+ return xtypes.Identical(typ, sub.value.(types.Type))
case opPointer:
typ, ok := typ.(*types.Pointer)
@@ -332,9 +470,67 @@ func (p *Pattern) matchIdentical(sub *pattern, typ types.Type) bool {
}
pkgPath := sub.value.([2]string)[0]
typeName := sub.value.([2]string)[1]
- return obj.Pkg().Path() == pkgPath && typeName == obj.Name()
+ // obj.Pkg().Path() may be in a vendor directory.
+ path := strings.SplitAfter(obj.Pkg().Path(), "/vendor/")
+ return path[len(path)-1] == pkgPath && typeName == obj.Name()
+
+ case opFunc:
+ typ, ok := typ.(*types.Signature)
+ if !ok {
+ return false
+ }
+ numParams := sub.value.(int)
+ params := sub.subs[:numParams]
+ results := sub.subs[numParams:]
+ if typ.Params().Len() != len(params) {
+ return false
+ }
+ if typ.Results().Len() != len(results) {
+ return false
+ }
+ for i := 0; i < typ.Params().Len(); i++ {
+ if !p.matchIdentical(params[i], typ.Params().At(i).Type()) {
+ return false
+ }
+ }
+ for i := 0; i < typ.Results().Len(); i++ {
+ if !p.matchIdentical(results[i], typ.Results().At(i).Type()) {
+ return false
+ }
+ }
+ return true
+
+ case opStructNoSeq:
+ typ, ok := typ.(*types.Struct)
+ if !ok {
+ return false
+ }
+ if typ.NumFields() != len(sub.subs) {
+ return false
+ }
+ for i, member := range sub.subs {
+ if !p.matchIdentical(member, typ.Field(i).Type()) {
+ return false
+ }
+ }
+ return true
+
+ case opStruct:
+ typ, ok := typ.(*types.Struct)
+ if !ok {
+ return false
+ }
+ if !p.matchIdenticalFielder(sub.subs, typ) {
+ return false
+ }
+ return true
default:
return false
}
}
+
+type fielder interface {
+ Field(i int) *types.Var
+ NumFields() int
+}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/utils.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/utils.go
index c17dc2431..16fd7d68a 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/utils.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/utils.go
@@ -2,27 +2,26 @@ package ruleguard
import (
"go/ast"
+ "go/constant"
"go/parser"
- "go/printer"
"go/token"
"go/types"
"strconv"
"strings"
)
-func unquoteNode(lit *ast.BasicLit) string {
- return lit.Value[1 : len(lit.Value)-1]
-}
-
-func sprintNode(fset *token.FileSet, n ast.Node) string {
- if fset == nil {
- fset = token.NewFileSet()
+func findDependency(pkg *types.Package, path string) *types.Package {
+ if pkg.Path() == path {
+ return pkg
}
- var buf strings.Builder
- if err := printer.Fprint(&buf, fset, n); err != nil {
- return ""
+ // It looks like indirect dependencies are always incomplete?
+ // If it's true, then we don't have to recurse here.
+ for _, imported := range pkg.Imports() {
+ if dep := findDependency(imported, path); dep != nil && dep.Complete() {
+ return dep
+ }
}
- return buf.String()
+ return nil
}
var basicTypeByName = map[string]types.Type{
@@ -110,6 +109,17 @@ func typeFromNode(e ast.Expr) types.Type {
return nil
}
+func intValueOf(info *types.Info, expr ast.Expr) constant.Value {
+ tv := info.Types[expr]
+ if tv.Value == nil {
+ return nil
+ }
+ if tv.Value.Kind() != constant.Int {
+ return nil
+ }
+ return tv.Value
+}
+
// isPure reports whether expr is a softly safe expression and contains
// no significant side-effects. As opposed to strictly safe expressions,
// soft safe expressions permit some forms of side-effects, like