diff options
| author | Dmitry Vyukov <dvyukov@google.com> | 2020-04-26 14:14:14 +0200 |
|---|---|---|
| committer | Dmitry Vyukov <dvyukov@google.com> | 2020-05-01 13:31:17 +0200 |
| commit | e54e9781a4e043b3140b0c908ba4f4e469fd317e (patch) | |
| tree | 16e6387d78a8577c5f3d9fb8d05a51752da6338e /prog/mutation.go | |
| parent | 3f4dbb2f6fff9479d6c250e224bc3cb7f5cd66ed (diff) | |
prog: remove Dir from Type
Having Dir is Type is handy, but forces us to duplicate lots of types.
E.g. if a struct is referenced as both in and out, then we need to
have 2 copies and 2 copies of structs/types it includes.
If also prevents us from having the struct type as struct identity
(because we can have up to 3 of them).
Revert to the old way we used to do it: propagate Dir as we walk
syscall arguments. This moves lots of dir passing from pkg/compiler
to prog package.
Now Arg contains the dir, so once we build the tree, we can use dirs
as before.
Reduces size of sys/linux/gen/amd64.go from 6058336 to 5661150 (-6.6%).
Update #1580
Diffstat (limited to 'prog/mutation.go')
| -rw-r--r-- | prog/mutation.go | 28 |
1 files changed, 14 insertions, 14 deletions
diff --git a/prog/mutation.go b/prog/mutation.go index 7203a86ec..7087b4d86 100644 --- a/prog/mutation.go +++ b/prog/mutation.go @@ -99,7 +99,7 @@ func (ctx *mutator) squashAny() bool { var blobs []*DataArg var bases []*PointerArg ForeachSubArg(ptr, func(arg Arg, ctx *ArgCtx) { - if data, ok := arg.(*DataArg); ok && arg.Type().Dir() != DirOut { + if data, ok := arg.(*DataArg); ok && arg.Dir() != DirOut { blobs = append(blobs, data) bases = append(bases, ctx.Base) } @@ -119,7 +119,7 @@ func (ctx *mutator) squashAny() bool { // Update base pointer if size has increased. if baseSize < base.Res.Size() { s := analyze(ctx.ct, ctx.corpus, p, p.Calls[0]) - newArg := r.allocAddr(s, base.Type(), base.Res.Size(), base.Res) + newArg := r.allocAddr(s, base.Type(), base.Dir(), base.Res.Size(), base.Res) *base = *newArg } return true @@ -252,7 +252,7 @@ func (target *Target) mutateArg(r *randGen, s *state, arg Arg, ctx ArgCtx, updat } // Update base pointer if size has increased. if base := ctx.Base; base != nil && baseSize < base.Res.Size() { - newArg := r.allocAddr(s, base.Type(), base.Res.Size(), base.Res) + newArg := r.allocAddr(s, base.Type(), base.Dir(), base.Res.Size(), base.Res) replaceArg(base, newArg) } return calls, true @@ -260,7 +260,7 @@ func (target *Target) mutateArg(r *randGen, s *state, arg Arg, ctx ArgCtx, updat func regenerate(r *randGen, s *state, arg Arg) (calls []*Call, retry, preserve bool) { var newArg Arg - newArg, calls = r.generateArg(s, arg.Type()) + newArg, calls = r.generateArg(s, arg.Type(), arg.Dir()) replaceArg(arg, newArg) return } @@ -346,7 +346,7 @@ func (t *BufferType) mutate(r *randGen, s *state, arg Arg, ctx ArgCtx) (calls [] minLen, maxLen = t.RangeBegin, t.RangeEnd } a := arg.(*DataArg) - if t.Dir() == DirOut { + if a.Dir() == DirOut { mutateBufferSize(r, a, minLen, maxLen) return } @@ -412,7 +412,7 @@ func (t *ArrayType) mutate(r *randGen, s *state, arg Arg, ctx ArgCtx) (calls []* } if count > uint64(len(a.Inner)) { for count > uint64(len(a.Inner)) { - newArg, newCalls := r.generateArg(s, t.Type) + newArg, newCalls := r.generateArg(s, t.Type, a.Dir()) a.Inner = append(a.Inner, newArg) calls = append(calls, newCalls...) for _, c := range newCalls { @@ -433,11 +433,11 @@ func (t *PtrType) mutate(r *randGen, s *state, arg Arg, ctx ArgCtx) (calls []*Ca if r.oneOf(1000) { removeArg(a.Res) index := r.rand(len(r.target.SpecialPointers)) - newArg := MakeSpecialPointerArg(t, index) + newArg := MakeSpecialPointerArg(t, a.Dir(), index) replaceArg(arg, newArg) return } - newArg := r.allocAddr(s, t, a.Res.Size(), a.Res) + newArg := r.allocAddr(s, t, a.Dir(), a.Res.Size(), a.Res) replaceArg(arg, newArg) return } @@ -448,7 +448,7 @@ func (t *StructType) mutate(r *randGen, s *state, arg Arg, ctx ArgCtx) (calls [] panic("bad arg returned by mutationArgs: StructType") } var newArg Arg - newArg, calls = gen(&Gen{r, s}, t, arg) + newArg, calls = gen(&Gen{r, s}, t, arg.Dir(), arg) a := arg.(*GroupArg) for i, f := range newArg.(*GroupArg).Inner { replaceArg(a.Inner[i], f) @@ -459,7 +459,7 @@ func (t *StructType) mutate(r *randGen, s *state, arg Arg, ctx ArgCtx) (calls [] func (t *UnionType) mutate(r *randGen, s *state, arg Arg, ctx ArgCtx) (calls []*Call, retry, preserve bool) { if gen := r.target.SpecialTypes[t.Name()]; gen != nil { var newArg Arg - newArg, calls = gen(&Gen{r, s}, t, arg) + newArg, calls = gen(&Gen{r, s}, t, arg.Dir(), arg) replaceArg(arg, newArg) return } @@ -481,8 +481,8 @@ func (t *UnionType) mutate(r *randGen, s *state, arg Arg, ctx ArgCtx) (calls []* optType := t.Fields[newIdx] removeArg(a.Option) var newOpt Arg - newOpt, calls = r.generateArg(s, optType) - replaceArg(arg, MakeUnionArg(t, newOpt)) + newOpt, calls = r.generateArg(s, optType, a.Dir()) + replaceArg(arg, MakeUnionArg(t, a.Dir(), newOpt)) return } @@ -522,7 +522,7 @@ func (ma *mutationArgs) collectArg(arg Arg, ctx *ArgCtx) { _, isArrayTyp := typ.(*ArrayType) _, isBufferTyp := typ.(*BufferType) - if !isBufferTyp && !isArrayTyp && typ.Dir() == DirOut || !typ.Varlen() && typ.Size() == 0 { + if !isBufferTyp && !isArrayTyp && arg.Dir() == DirOut || !typ.Varlen() && typ.Size() == 0 { return } @@ -645,7 +645,7 @@ func (t *LenType) getMutationPrio(target *Target, arg Arg, ignoreSpecial bool) ( } func (t *BufferType) getMutationPrio(target *Target, arg Arg, ignoreSpecial bool) (prio float64, stopRecursion bool) { - if t.Dir() == DirOut && !t.Varlen() { + if arg.Dir() == DirOut && !t.Varlen() { return dontMutate, false } if t.Kind == BufferString && len(t.Values) == 1 { |
