diff options
| author | Dmitry Vyukov <dvyukov@google.com> | 2018-07-31 16:06:21 +0200 |
|---|---|---|
| committer | Dmitry Vyukov <dvyukov@google.com> | 2018-07-31 16:06:21 +0200 |
| commit | 0e9b376bc31a086d9cb563233831a33c3372eb3f (patch) | |
| tree | 1b6e899dcaf9d3fd68afa9a8ce483d6c467c7abd /prog/minimization.go | |
| parent | 531d157044641c94da30e8e7f7e1066281aec90d (diff) | |
prog: refactor Minimize
Reduce cyclomatic complexity of argument minimization
by moving type-specific logic into separate functions.
Fix few bugs along the way.
Update #538
Diffstat (limited to 'prog/minimization.go')
| -rw-r--r-- | prog/minimization.go | 259 |
1 files changed, 144 insertions, 115 deletions
diff --git a/prog/minimization.go b/prog/minimization.go index 71e0f5c63..1943ef4ea 100644 --- a/prog/minimization.go +++ b/prog/minimization.go @@ -12,14 +12,16 @@ import ( // whether it is equal to the original program or not. If it is equivalent then // the simplification attempt is committed and the process continues. func Minimize(p0 *Prog, callIndex0 int, crash bool, pred0 func(*Prog, int) bool) (*Prog, int) { - pred := pred0 - if debug { - pred = func(p *Prog, callIndex int) bool { + pred := func(p *Prog, callIndex int) bool { + for _, call := range p.Calls { + p.Target.SanitizeCall(call) + } + if debug { if err := p.validate(); err != nil { panic(err) } - return pred0(p, callIndex) } + return pred0(p, callIndex) } name0 := "" if callIndex0 != -1 { @@ -35,6 +37,7 @@ func Minimize(p0 *Prog, callIndex0 int, crash bool, pred0 func(*Prog, int) bool) // Try to minimize individual args. for i := 0; i < len(p0.Calls); i++ { ctx := &minimizeArgsCtx{ + target: p0.Target, p0: &p0, callIndex0: callIndex0, crash: crash, @@ -42,10 +45,10 @@ func Minimize(p0 *Prog, callIndex0 int, crash bool, pred0 func(*Prog, int) bool) triedPaths: make(map[string]bool), } again: - p := p0.Clone() - call := p.Calls[i] - for j, arg := range call.Args { - if ctx.do(p, call, arg, fmt.Sprintf("%v", j)) { + ctx.p = p0.Clone() + ctx.call = ctx.p.Calls[i] + for j, arg := range ctx.call.Args { + if ctx.do(arg, fmt.Sprintf("%v", j)) { goto again } } @@ -81,130 +84,156 @@ func removeCalls(p0 *Prog, callIndex0 int, crash bool, pred func(*Prog, int) boo } type minimizeArgsCtx struct { + target *Target p0 **Prog + p *Prog + call *Call callIndex0 int crash bool pred func(*Prog, int) bool triedPaths map[string]bool } -func (ctx *minimizeArgsCtx) do(p *Prog, call *Call, arg Arg, path string) bool { +func (ctx *minimizeArgsCtx) do(arg Arg, path string) bool { path += fmt.Sprintf("-%v", arg.Type().FieldName()) - switch typ := arg.Type().(type) { - case *StructType: - a := arg.(*GroupArg) - for _, innerArg := range a.Inner { - if ctx.do(p, call, innerArg, path) { - return true - } - } - case *UnionType: - a := arg.(*UnionArg) - if ctx.do(p, call, a.Option, path) { + if ctx.triedPaths[path] { + return false + } + if arg.Type().minimize(ctx, arg, path) { + return true + } + ctx.triedPaths[path] = true + return false +} + +func (typ *TypeCommon) minimize(ctx *minimizeArgsCtx, arg Arg, path string) bool { + return false +} + +func (typ *StructType) minimize(ctx *minimizeArgsCtx, arg Arg, path string) bool { + a := arg.(*GroupArg) + for _, innerArg := range a.Inner { + if ctx.do(innerArg, path) { return true } - case *PtrType: - // TODO: try to remove optional ptrs - a, ok := arg.(*PointerArg) - if !ok { - // Can also be *ConstArg. - return false - } - if a.Res != nil { - return ctx.do(p, call, a.Res, path) - } - case *ArrayType: - a := arg.(*GroupArg) - for i, innerArg := range a.Inner { - innerPath := fmt.Sprintf("%v-%v", path, i) - if !ctx.triedPaths[innerPath] && !ctx.crash { - if (typ.Kind == ArrayRangeLen && len(a.Inner) > int(typ.RangeBegin)) || - (typ.Kind == ArrayRandLen) { - copy(a.Inner[i:], a.Inner[i+1:]) - a.Inner = a.Inner[:len(a.Inner)-1] - removeArg(innerArg) - p.Target.assignSizesCall(call) - - if ctx.pred(p, ctx.callIndex0) { - *ctx.p0 = p - } else { - ctx.triedPaths[innerPath] = true - } - return true - } - } - if ctx.do(p, call, innerArg, innerPath) { - return true + } + return false +} + +func (typ *UnionType) minimize(ctx *minimizeArgsCtx, arg Arg, path string) bool { + return ctx.do(arg.(*UnionArg).Option, path) +} + +func (typ *PtrType) minimize(ctx *minimizeArgsCtx, arg Arg, path string) bool { + // TODO: try to remove optional ptrs + a := arg.(*PointerArg) + if a.Res == nil { + return false + } + return ctx.do(a.Res, path) +} + +func (typ *ArrayType) minimize(ctx *minimizeArgsCtx, arg Arg, path string) bool { + a := arg.(*GroupArg) + for i := len(a.Inner) - 1; i >= 0; i-- { + elem := a.Inner[i] + elemPath := fmt.Sprintf("%v-%v", path, i) + // Try to remove individual elements one-by-one. + if !ctx.crash && !ctx.triedPaths[elemPath] && + (typ.Kind == ArrayRandLen || + typ.Kind == ArrayRangeLen && uint64(len(a.Inner)) > typ.RangeBegin) { + ctx.triedPaths[elemPath] = true + copy(a.Inner[i:], a.Inner[i+1:]) + a.Inner = a.Inner[:len(a.Inner)-1] + removeArg(elem) + ctx.target.assignSizesCall(ctx.call) + if ctx.pred(ctx.p, ctx.callIndex0) { + *ctx.p0 = ctx.p } - } - case *IntType, *FlagsType, *ProcType: - // TODO: try to reset bits in ints - // TODO: try to set separate flags - if ctx.crash || ctx.triedPaths[path] { - return false - } - ctx.triedPaths[path] = true - a := arg.(*ConstArg) - if a.Val == typ.Default() { - return false - } - v0 := a.Val - a.Val = typ.Default() - if ctx.pred(p, ctx.callIndex0) { - *ctx.p0 = p return true } - a.Val = v0 - case *ResourceType: - if ctx.crash || ctx.triedPaths[path] { - return false - } - ctx.triedPaths[path] = true - a := arg.(*ResultArg) - if a.Res == nil { - return false - } - r0 := a.Res - a.Res = nil - a.Val = typ.Default() - if ctx.pred(p, ctx.callIndex0) { - *ctx.p0 = p + if ctx.do(elem, elemPath) { return true } - a.Res = r0 - a.Val = 0 - case *BufferType: - // TODO: try to set individual bytes to 0 - if ctx.triedPaths[path] { - return false - } - ctx.triedPaths[path] = true - if typ.Kind != BufferBlobRand && typ.Kind != BufferBlobRange || - typ.Dir() == DirOut { - return false - } - a := arg.(*DataArg) - minLen := int(typ.RangeBegin) - for step := len(a.Data()) - minLen; len(a.Data()) > minLen && step > 0; { - if len(a.Data())-step >= minLen { - a.data = a.Data()[:len(a.Data())-step] - p.Target.assignSizesCall(call) - if ctx.pred(p, ctx.callIndex0) { - continue - } - a.data = a.Data()[:len(a.Data())+step] - p.Target.assignSizesCall(call) - } - step /= 2 - if ctx.crash { - break + } + return false +} + +func (typ *IntType) minimize(ctx *minimizeArgsCtx, arg Arg, path string) bool { + return minimizeInt(ctx, arg, path) +} + +func (typ *FlagsType) minimize(ctx *minimizeArgsCtx, arg Arg, path string) bool { + return minimizeInt(ctx, arg, path) +} + +func (typ *ProcType) minimize(ctx *minimizeArgsCtx, arg Arg, path string) bool { + return minimizeInt(ctx, arg, path) +} + +func minimizeInt(ctx *minimizeArgsCtx, arg Arg, path string) bool { + // TODO: try to reset bits in ints + // TODO: try to set separate flags + if ctx.crash { + return false + } + a := arg.(*ConstArg) + def := arg.Type().Default() + if a.Val == def { + return false + } + v0 := a.Val + a.Val = def + if ctx.pred(ctx.p, ctx.callIndex0) { + *ctx.p0 = ctx.p + } else { + a.Val = v0 + } + return false +} + +func (typ *ResourceType) minimize(ctx *minimizeArgsCtx, arg Arg, path string) bool { + if ctx.crash { + return false + } + a := arg.(*ResultArg) + if a.Res == nil { + return false + } + r0 := a.Res + delete(a.Res.uses, a) + a.Res, a.Val = nil, typ.Default() + if ctx.pred(ctx.p, ctx.callIndex0) { + *ctx.p0 = ctx.p + } else { + a.Res, a.Val = r0, 0 + a.Res.uses[a] = true + } + return false +} + +func (typ *BufferType) minimize(ctx *minimizeArgsCtx, arg Arg, path string) bool { + // TODO: try to set individual bytes to 0 + if typ.Kind != BufferBlobRand && typ.Kind != BufferBlobRange || typ.Dir() == DirOut { + return false + } + a := arg.(*DataArg) + minLen := int(typ.RangeBegin) + for step := len(a.Data()) - minLen; len(a.Data()) > minLen && step > 0; { + if len(a.Data())-step >= minLen { + a.data = a.Data()[:len(a.Data())-step] + ctx.target.assignSizesCall(ctx.call) + if ctx.pred(ctx.p, ctx.callIndex0) { + continue } + a.data = a.Data()[:len(a.Data())+step] + ctx.target.assignSizesCall(ctx.call) + } + step /= 2 + if ctx.crash { + break } - *ctx.p0 = p - case *VmaType, *LenType, *CsumType, *ConstType: - return false - default: - panic(fmt.Sprintf("unknown arg type '%+v'", typ)) } + *ctx.p0 = ctx.p return false } |
