aboutsummaryrefslogtreecommitdiffstats
path: root/prog/minimization.go
diff options
context:
space:
mode:
authorDmitry Vyukov <dvyukov@google.com>2018-05-04 18:03:46 +0200
committerDmitry Vyukov <dvyukov@google.com>2018-05-04 18:03:46 +0200
commit2c7e14a847318974490ab59460f0834ea2ee0d24 (patch)
treee9ac237ccfeaa3a7d508a07fc37ea7b7d28697df /prog/minimization.go
parent08141db61a7a947b701d06aa5c90cd825c55e350 (diff)
gometalinter: enable cyclomatic complexity checking
Refactor some functions to be simpler. Update #538
Diffstat (limited to 'prog/minimization.go')
-rw-r--r--prog/minimization.go281
1 files changed, 145 insertions, 136 deletions
diff --git a/prog/minimization.go b/prog/minimization.go
index 8f2cbae61..71e0f5c63 100644
--- a/prog/minimization.go
+++ b/prog/minimization.go
@@ -30,6 +30,37 @@ func Minimize(p0 *Prog, callIndex0 int, crash bool, pred0 func(*Prog, int) bool)
}
// Try to remove all calls except the last one one-by-one.
+ p0, callIndex0 = removeCalls(p0, callIndex0, crash, pred)
+
+ // Try to minimize individual args.
+ for i := 0; i < len(p0.Calls); i++ {
+ ctx := &minimizeArgsCtx{
+ p0: &p0,
+ callIndex0: callIndex0,
+ crash: crash,
+ pred: pred,
+ triedPaths: make(map[string]bool),
+ }
+ again:
+ p := p0.Clone()
+ call := p.Calls[i]
+ for j, arg := range call.Args {
+ if ctx.do(p, call, arg, fmt.Sprintf("%v", j)) {
+ goto again
+ }
+ }
+ }
+
+ if callIndex0 != -1 {
+ if callIndex0 < 0 || callIndex0 >= len(p0.Calls) || name0 != p0.Calls[callIndex0].Meta.Name {
+ panic(fmt.Sprintf("bad call index after minimization: ncalls=%v index=%v call=%v/%v",
+ len(p0.Calls), callIndex0, name0, p0.Calls[callIndex0].Meta.Name))
+ }
+ }
+ return p0, callIndex0
+}
+
+func removeCalls(p0 *Prog, callIndex0 int, crash bool, pred func(*Prog, int) bool) (*Prog, int) {
for i := len(p0.Calls) - 1; i >= 0; i-- {
if i == callIndex0 {
continue
@@ -46,156 +77,134 @@ func Minimize(p0 *Prog, callIndex0 int, crash bool, pred0 func(*Prog, int) bool)
p0 = p
callIndex0 = callIndex
}
+ return p0, callIndex0
+}
- var triedPaths map[string]bool
+type minimizeArgsCtx struct {
+ p0 **Prog
+ callIndex0 int
+ crash bool
+ pred func(*Prog, int) bool
+ triedPaths map[string]bool
+}
- var rec func(p *Prog, call *Call, arg Arg, path string) bool
- rec = func(p *Prog, call *Call, arg Arg, path string) bool {
- path += fmt.Sprintf("-%v", arg.Type().FieldName())
- switch typ := arg.Type().(type) {
- case *StructType:
- a := arg.(*GroupArg)
- for _, innerArg := range a.Inner {
- if rec(p, call, innerArg, path) {
- return true
- }
- }
- case *UnionType:
- a := arg.(*UnionArg)
- if rec(p, call, a.Option, path) {
+func (ctx *minimizeArgsCtx) do(p *Prog, call *Call, arg Arg, path string) bool {
+ path += fmt.Sprintf("-%v", arg.Type().FieldName())
+ switch typ := arg.Type().(type) {
+ case *StructType:
+ a := arg.(*GroupArg)
+ for _, innerArg := range a.Inner {
+ if ctx.do(p, call, innerArg, path) {
return true
}
- case *PtrType:
- // TODO: try to remove optional ptrs
- a, ok := arg.(*PointerArg)
- if !ok {
- // Can also be *ConstArg.
- return false
- }
- if a.Res != nil {
- return rec(p, call, a.Res, path)
- }
- case *ArrayType:
- a := arg.(*GroupArg)
- for i, innerArg := range a.Inner {
- innerPath := fmt.Sprintf("%v-%v", path, i)
- if !triedPaths[innerPath] && !crash {
- if (typ.Kind == ArrayRangeLen && len(a.Inner) > int(typ.RangeBegin)) ||
- (typ.Kind == ArrayRandLen) {
- copy(a.Inner[i:], a.Inner[i+1:])
- a.Inner = a.Inner[:len(a.Inner)-1]
- removeArg(innerArg)
- p.Target.assignSizesCall(call)
-
- if pred(p, callIndex0) {
- p0 = p
- } else {
- triedPaths[innerPath] = true
- }
+ }
+ case *UnionType:
+ a := arg.(*UnionArg)
+ if ctx.do(p, call, a.Option, path) {
+ return true
+ }
+ case *PtrType:
+ // TODO: try to remove optional ptrs
+ a, ok := arg.(*PointerArg)
+ if !ok {
+ // Can also be *ConstArg.
+ return false
+ }
+ if a.Res != nil {
+ return ctx.do(p, call, a.Res, path)
+ }
+ case *ArrayType:
+ a := arg.(*GroupArg)
+ for i, innerArg := range a.Inner {
+ innerPath := fmt.Sprintf("%v-%v", path, i)
+ if !ctx.triedPaths[innerPath] && !ctx.crash {
+ if (typ.Kind == ArrayRangeLen && len(a.Inner) > int(typ.RangeBegin)) ||
+ (typ.Kind == ArrayRandLen) {
+ copy(a.Inner[i:], a.Inner[i+1:])
+ a.Inner = a.Inner[:len(a.Inner)-1]
+ removeArg(innerArg)
+ p.Target.assignSizesCall(call)
- return true
+ if ctx.pred(p, ctx.callIndex0) {
+ *ctx.p0 = p
+ } else {
+ ctx.triedPaths[innerPath] = true
}
- }
- if rec(p, call, innerArg, innerPath) {
return true
}
}
- case *IntType, *FlagsType, *ProcType:
- // TODO: try to reset bits in ints
- // TODO: try to set separate flags
- if crash {
- return false
- }
- if triedPaths[path] {
- return false
- }
- triedPaths[path] = true
- a := arg.(*ConstArg)
- if a.Val == typ.Default() {
- return false
- }
- v0 := a.Val
- a.Val = typ.Default()
- if pred(p, callIndex0) {
- p0 = p
- return true
- }
- a.Val = v0
- case *ResourceType:
- if crash {
- return false
- }
- if triedPaths[path] {
- return false
- }
- triedPaths[path] = true
- a := arg.(*ResultArg)
- if a.Res == nil {
- return false
- }
- r0 := a.Res
- a.Res = nil
- a.Val = typ.Default()
- if pred(p, callIndex0) {
- p0 = p
+ if ctx.do(p, call, innerArg, innerPath) {
return true
}
- a.Res = r0
- a.Val = 0
- case *BufferType:
- // TODO: try to set individual bytes to 0
- if triedPaths[path] {
- return false
- }
- triedPaths[path] = true
- if typ.Kind != BufferBlobRand && typ.Kind != BufferBlobRange ||
- typ.Dir() == DirOut {
- return false
- }
- a := arg.(*DataArg)
- minLen := int(typ.RangeBegin)
- for step := len(a.Data()) - minLen; len(a.Data()) > minLen && step > 0; {
- if len(a.Data())-step >= minLen {
- a.data = a.Data()[:len(a.Data())-step]
- p.Target.assignSizesCall(call)
- if pred(p, callIndex0) {
- continue
- }
- a.data = a.Data()[:len(a.Data())+step]
- p.Target.assignSizesCall(call)
- }
- step /= 2
- if crash {
- break
- }
- }
- p0 = p
- case *VmaType, *LenType, *CsumType, *ConstType:
+ }
+ case *IntType, *FlagsType, *ProcType:
+ // TODO: try to reset bits in ints
+ // TODO: try to set separate flags
+ if ctx.crash || ctx.triedPaths[path] {
return false
- default:
- panic(fmt.Sprintf("unknown arg type '%+v'", typ))
}
- return false
- }
-
- // Try to minimize individual args.
- for i := 0; i < len(p0.Calls); i++ {
- triedPaths = make(map[string]bool)
- again:
- p := p0.Clone()
- call := p.Calls[i]
- for j, arg := range call.Args {
- if rec(p, call, arg, fmt.Sprintf("%v", j)) {
- goto again
- }
+ ctx.triedPaths[path] = true
+ a := arg.(*ConstArg)
+ if a.Val == typ.Default() {
+ return false
}
- }
-
- if callIndex0 != -1 {
- if callIndex0 < 0 || callIndex0 >= len(p0.Calls) || name0 != p0.Calls[callIndex0].Meta.Name {
- panic(fmt.Sprintf("bad call index after minimization: ncalls=%v index=%v call=%v/%v",
- len(p0.Calls), callIndex0, name0, p0.Calls[callIndex0].Meta.Name))
+ v0 := a.Val
+ a.Val = typ.Default()
+ if ctx.pred(p, ctx.callIndex0) {
+ *ctx.p0 = p
+ return true
+ }
+ a.Val = v0
+ case *ResourceType:
+ if ctx.crash || ctx.triedPaths[path] {
+ return false
+ }
+ ctx.triedPaths[path] = true
+ a := arg.(*ResultArg)
+ if a.Res == nil {
+ return false
}
+ r0 := a.Res
+ a.Res = nil
+ a.Val = typ.Default()
+ if ctx.pred(p, ctx.callIndex0) {
+ *ctx.p0 = p
+ return true
+ }
+ a.Res = r0
+ a.Val = 0
+ case *BufferType:
+ // TODO: try to set individual bytes to 0
+ if ctx.triedPaths[path] {
+ return false
+ }
+ ctx.triedPaths[path] = true
+ if typ.Kind != BufferBlobRand && typ.Kind != BufferBlobRange ||
+ typ.Dir() == DirOut {
+ return false
+ }
+ a := arg.(*DataArg)
+ minLen := int(typ.RangeBegin)
+ for step := len(a.Data()) - minLen; len(a.Data()) > minLen && step > 0; {
+ if len(a.Data())-step >= minLen {
+ a.data = a.Data()[:len(a.Data())-step]
+ p.Target.assignSizesCall(call)
+ if ctx.pred(p, ctx.callIndex0) {
+ continue
+ }
+ a.data = a.Data()[:len(a.Data())+step]
+ p.Target.assignSizesCall(call)
+ }
+ step /= 2
+ if ctx.crash {
+ break
+ }
+ }
+ *ctx.p0 = p
+ case *VmaType, *LenType, *CsumType, *ConstType:
+ return false
+ default:
+ panic(fmt.Sprintf("unknown arg type '%+v'", typ))
}
- return p0, callIndex0
+ return false
}