aboutsummaryrefslogtreecommitdiffstats
path: root/prog
diff options
context:
space:
mode:
authorDmitry Vyukov <dvyukov@google.com>2018-01-24 19:28:36 +0100
committerDmitry Vyukov <dvyukov@google.com>2018-01-27 17:08:43 +0100
commit08146b1a84f975e2cc1007242b4202dc5cc0e5c5 (patch)
treead9f57cfbed4b9008223359d0f765a2b6a27a209 /prog
parent5d7477249ba074bbdc9ffbf80314397dbe90e886 (diff)
sys/linux: extend netfilter descriptions
Diffstat (limited to 'prog')
-rw-r--r--prog/analysis.go2
-rw-r--r--prog/export_test.go1
-rw-r--r--prog/minimization.go242
-rw-r--r--prog/mutation.go738
-rw-r--r--prog/prog.go22
-rw-r--r--prog/prog_test.go28
-rw-r--r--prog/rand.go19
-rw-r--r--prog/target.go43
8 files changed, 594 insertions, 501 deletions
diff --git a/prog/analysis.go b/prog/analysis.go
index fcb00350b..629ae1ddf 100644
--- a/prog/analysis.go
+++ b/prog/analysis.go
@@ -104,7 +104,7 @@ func foreachSubargImpl(arg Arg, parent *[]Arg, f func(arg, base Arg, parent *[]A
rec(arg, nil, parent)
}
-func foreachSubarg(arg Arg, f func(arg, base Arg, parent *[]Arg)) {
+func ForeachSubarg(arg Arg, f func(arg, base Arg, parent *[]Arg)) {
foreachSubargImpl(arg, nil, f)
}
diff --git a/prog/export_test.go b/prog/export_test.go
index 404faf071..10dc6dbe5 100644
--- a/prog/export_test.go
+++ b/prog/export_test.go
@@ -60,6 +60,7 @@ func testEachTargetRandom(t *testing.T, fn func(t *testing.T, target *Target, rs
target := target
rs := rand.NewSource(rs0.Int63())
t.Run(fmt.Sprintf("%v/%v", target.OS, target.Arch), func(t *testing.T) {
+ t.Parallel()
fn(t, target, rs, iters)
})
}
diff --git a/prog/minimization.go b/prog/minimization.go
new file mode 100644
index 000000000..459e6265d
--- /dev/null
+++ b/prog/minimization.go
@@ -0,0 +1,242 @@
+// Copyright 2018 syzkaller project authors. All rights reserved.
+// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file.
+
+package prog
+
+import (
+ "fmt"
+)
+
+// Minimize minimizes program p into an equivalent program using the equivalence
+// predicate pred. It iteratively generates simpler programs and asks pred
+// whether it is equal to the orginal program or not. If it is equivalent then
+// the simplification attempt is committed and the process continues.
+func Minimize(p0 *Prog, callIndex0 int, pred0 func(*Prog, int) bool, crash bool) (*Prog, int) {
+ pred := pred0
+ if debug {
+ pred = func(p *Prog, callIndex int) bool {
+ if err := p.validate(); err != nil {
+ panic(err)
+ }
+ return pred0(p, callIndex)
+ }
+ }
+ name0 := ""
+ if callIndex0 != -1 {
+ if callIndex0 < 0 || callIndex0 >= len(p0.Calls) {
+ panic("bad call index")
+ }
+ name0 = p0.Calls[callIndex0].Meta.Name
+ }
+
+ // Try to glue all mmap's together.
+ s := analyze(nil, p0, nil)
+ hi := -1
+ lo := -1
+ for i := 0; i < maxPages; i++ {
+ if s.pages[i] {
+ hi = i
+ if lo == -1 {
+ lo = i
+ }
+ }
+ }
+ if hi != -1 {
+ p := p0.Clone()
+ callIndex := callIndex0
+ // Remove all mmaps.
+ for i := 0; i < len(p.Calls); i++ {
+ c := p.Calls[i]
+ if i != callIndex && c.Meta == p.Target.MmapSyscall {
+ p.removeCall(i)
+ if i < callIndex {
+ callIndex--
+ }
+ i--
+ }
+ }
+ // Prepend uber-mmap.
+ mmap := p0.Target.MakeMmap(uint64(lo), uint64(hi-lo)+1)
+ p.Calls = append([]*Call{mmap}, p.Calls...)
+ if callIndex != -1 {
+ callIndex++
+ }
+ if pred(p, callIndex) {
+ p0 = p
+ callIndex0 = callIndex
+ }
+ }
+
+ // Try to remove all calls except the last one one-by-one.
+ for i := len(p0.Calls) - 1; i >= 0; i-- {
+ if i == callIndex0 {
+ continue
+ }
+ callIndex := callIndex0
+ if i < callIndex {
+ callIndex--
+ }
+ p := p0.Clone()
+ p.removeCall(i)
+ if !pred(p, callIndex) {
+ continue
+ }
+ p0 = p
+ callIndex0 = callIndex
+ }
+
+ var triedPaths map[string]bool
+
+ var rec func(p *Prog, call *Call, arg Arg, path string) bool
+ rec = func(p *Prog, call *Call, arg Arg, path string) bool {
+ path += fmt.Sprintf("-%v", arg.Type().FieldName())
+ switch typ := arg.Type().(type) {
+ case *StructType:
+ a := arg.(*GroupArg)
+ for _, innerArg := range a.Inner {
+ if rec(p, call, innerArg, path) {
+ return true
+ }
+ }
+ case *UnionType:
+ a := arg.(*UnionArg)
+ if rec(p, call, a.Option, path) {
+ return true
+ }
+ case *PtrType:
+ // TODO: try to remove optional ptrs
+ a, ok := arg.(*PointerArg)
+ if !ok {
+ // Can also be *ConstArg.
+ return false
+ }
+ if a.Res != nil {
+ return rec(p, call, a.Res, path)
+ }
+ case *ArrayType:
+ a := arg.(*GroupArg)
+ for i, innerArg := range a.Inner {
+ innerPath := fmt.Sprintf("%v-%v", path, i)
+ if !triedPaths[innerPath] && !crash {
+ if (typ.Kind == ArrayRangeLen && len(a.Inner) > int(typ.RangeBegin)) ||
+ (typ.Kind == ArrayRandLen) {
+ copy(a.Inner[i:], a.Inner[i+1:])
+ a.Inner = a.Inner[:len(a.Inner)-1]
+ removeArg(innerArg)
+ p.Target.assignSizesCall(call)
+
+ if pred(p, callIndex0) {
+ p0 = p
+ } else {
+ triedPaths[innerPath] = true
+ }
+
+ return true
+ }
+ }
+ if rec(p, call, innerArg, innerPath) {
+ return true
+ }
+ }
+ case *IntType, *FlagsType, *ProcType:
+ // TODO: try to reset bits in ints
+ // TODO: try to set separate flags
+ if crash {
+ return false
+ }
+ if triedPaths[path] {
+ return false
+ }
+ triedPaths[path] = true
+ a := arg.(*ConstArg)
+ if a.Val == typ.Default() {
+ return false
+ }
+ v0 := a.Val
+ a.Val = typ.Default()
+ if pred(p, callIndex0) {
+ p0 = p
+ return true
+ } else {
+ a.Val = v0
+ }
+ case *ResourceType:
+ if crash {
+ return false
+ }
+ if triedPaths[path] {
+ return false
+ }
+ triedPaths[path] = true
+ a := arg.(*ResultArg)
+ if a.Res == nil {
+ return false
+ }
+ r0 := a.Res
+ a.Res = nil
+ a.Val = typ.Default()
+ if pred(p, callIndex0) {
+ p0 = p
+ return true
+ } else {
+ a.Res = r0
+ a.Val = 0
+ }
+ case *BufferType:
+ // TODO: try to set individual bytes to 0
+ if triedPaths[path] {
+ return false
+ }
+ triedPaths[path] = true
+ if typ.Kind != BufferBlobRand && typ.Kind != BufferBlobRange ||
+ typ.Dir() == DirOut {
+ return false
+ }
+ a := arg.(*DataArg)
+ minLen := int(typ.RangeBegin)
+ for step := len(a.Data()) - minLen; len(a.Data()) > minLen && step > 0; {
+ if len(a.Data())-step >= minLen {
+ a.data = a.Data()[:len(a.Data())-step]
+ p.Target.assignSizesCall(call)
+ if pred(p, callIndex0) {
+ continue
+ }
+ a.data = a.Data()[:len(a.Data())+step]
+ p.Target.assignSizesCall(call)
+ }
+ step /= 2
+ if crash {
+ break
+ }
+ }
+ p0 = p
+ case *VmaType, *LenType, *CsumType, *ConstType:
+ // TODO: try to remove offset from vma
+ return false
+ default:
+ panic(fmt.Sprintf("unknown arg type '%+v'", typ))
+ }
+ return false
+ }
+
+ // Try to minimize individual args.
+ for i := 0; i < len(p0.Calls); i++ {
+ triedPaths = make(map[string]bool)
+ again:
+ p := p0.Clone()
+ call := p.Calls[i]
+ for j, arg := range call.Args {
+ if rec(p, call, arg, fmt.Sprintf("%v", j)) {
+ goto again
+ }
+ }
+ }
+
+ if callIndex0 != -1 {
+ if callIndex0 < 0 || callIndex0 >= len(p0.Calls) || name0 != p0.Calls[callIndex0].Meta.Name {
+ panic(fmt.Sprintf("bad call index after minimization: ncalls=%v index=%v call=%v/%v",
+ len(p0.Calls), callIndex0, name0, p0.Calls[callIndex0].Meta.Name))
+ }
+ }
+ return p0, callIndex0
+}
diff --git a/prog/mutation.go b/prog/mutation.go
index 910f2597f..0aa06979e 100644
--- a/prog/mutation.go
+++ b/prog/mutation.go
@@ -15,6 +15,7 @@ func (p *Prog) Mutate(rs rand.Source, ncalls int, ct *ChoiceTable, corpus []*Pro
r := newRand(p.Target, rs)
retry := false
+outer:
for stop := false; !stop || retry; stop = r.oneOf(3) {
retry = false
switch {
@@ -63,185 +64,26 @@ func (p *Prog) Mutate(rs rand.Source, ncalls int, ct *ChoiceTable, corpus []*Pro
}
s := analyze(ct, p, c)
updateSizes := true
- for stop := false; !stop; stop = r.oneOf(3) {
+ retryArg := false
+ for stop := false; !stop || retryArg; stop = r.oneOf(3) {
+ retryArg = false
args, bases, parents := p.Target.mutationArgs(c)
if len(args) == 0 {
retry = true
- continue
+ continue outer
}
idx := r.Intn(len(args))
arg, base, parent := args[idx], bases[idx], parents[idx]
- var baseSize uint64
- if base != nil {
- b, ok := base.(*PointerArg)
- if !ok || b.Res == nil {
- panic("bad base arg")
- }
- baseSize = b.Res.Size()
- }
- switch t := arg.Type().(type) {
- case *IntType, *FlagsType:
- a := arg.(*ConstArg)
- if r.bin() {
- arg1, calls1 := r.generateArg(s, arg.Type())
- p.replaceArg(c, arg, arg1, calls1)
- } else {
- switch {
- case r.nOutOf(1, 3):
- a.Val += uint64(r.Intn(4)) + 1
- case r.nOutOf(1, 2):
- a.Val -= uint64(r.Intn(4)) + 1
- default:
- a.Val ^= 1 << uint64(r.Intn(64))
- }
- }
- case *LenType:
- if !r.mutateSize(arg.(*ConstArg), *parent) {
- retry = true
- continue
- }
- updateSizes = false
- case *ResourceType, *VmaType, *ProcType:
- arg1, calls1 := r.generateArg(s, arg.Type())
- p.replaceArg(c, arg, arg1, calls1)
- case *BufferType:
- a := arg.(*DataArg)
- switch t.Kind {
- case BufferBlobRand, BufferBlobRange:
- data := append([]byte{}, a.Data()...)
- minLen, maxLen := uint64(0), maxBlobLen
- if t.Kind == BufferBlobRange {
- minLen, maxLen = t.RangeBegin, t.RangeEnd
- }
- a.data = mutateData(r, data, minLen, maxLen)
- case BufferString:
- data := append([]byte{}, a.Data()...)
- if r.bin() {
- minLen, maxLen := uint64(0), maxBlobLen
- if t.TypeSize != 0 {
- minLen, maxLen = t.TypeSize, t.TypeSize
- }
- a.data = mutateData(r, data, minLen, maxLen)
- } else {
- a.data = r.randString(s, t)
- }
- case BufferFilename:
- a.data = []byte(r.filename(s))
- case BufferText:
- data := append([]byte{}, a.Data()...)
- a.data = r.mutateText(t.Text, data)
- default:
- panic("unknown buffer kind")
- }
- case *ArrayType:
- a := arg.(*GroupArg)
- count := uint64(0)
- switch t.Kind {
- case ArrayRandLen:
- for count == uint64(len(a.Inner)) {
- count = r.randArrayLen()
- }
- case ArrayRangeLen:
- if t.RangeBegin == t.RangeEnd {
- panic("trying to mutate fixed length array")
- }
- for count == uint64(len(a.Inner)) {
- count = r.randRange(t.RangeBegin, t.RangeEnd)
- }
- }
- if count > uint64(len(a.Inner)) {
- var calls []*Call
- for count > uint64(len(a.Inner)) {
- arg1, calls1 := r.generateArg(s, t.Type)
- a.Inner = append(a.Inner, arg1)
- for _, c1 := range calls1 {
- calls = append(calls, c1)
- s.analyze(c1)
- }
- }
- for _, c1 := range calls {
- p.Target.SanitizeCall(c1)
- }
- p.Target.SanitizeCall(c)
- p.insertBefore(c, calls)
- } else if count < uint64(len(a.Inner)) {
- for _, arg := range a.Inner[count:] {
- p.removeArg(c, arg)
- }
- a.Inner = a.Inner[:count]
- }
- // TODO: swap elements of the array
- case *PtrType:
- a, ok := arg.(*PointerArg)
- if !ok {
- break
- }
- // TODO: we don't know size for out args
- size := uint64(1)
- if a.Res != nil {
- size = a.Res.Size()
- }
- arg1, calls1 := r.addr(s, t, size, a.Res)
- p.replaceArg(c, arg, arg1, calls1)
- case *StructType:
- gen := p.Target.SpecialStructs[t.Name()]
- if gen == nil {
- panic("bad arg returned by mutationArgs: StructType")
- }
- arg1, calls1 := gen(&Gen{r, s}, t, arg.(*GroupArg))
- for i, f := range arg1.(*GroupArg).Inner {
- p.replaceArg(c, arg.(*GroupArg).Inner[i], f, calls1)
- calls1 = nil
- }
- case *UnionType:
- a := arg.(*UnionArg)
- current := -1
- for i, option := range t.Fields {
- if a.Option.Type().FieldName() == option.FieldName() {
- current = i
- break
- }
- }
- if current == -1 {
- panic("can't find current option in union")
- }
- newIdx := r.Intn(len(t.Fields) - 1)
- if newIdx >= current {
- newIdx++
- }
- optType := t.Fields[newIdx]
- p.removeArg(c, a.Option)
- opt, calls := r.generateArg(s, optType)
- arg1 := MakeUnionArg(t, opt)
- p.replaceArg(c, arg, arg1, calls)
- case *CsumType:
- panic("bad arg returned by mutationArgs: CsumType")
- case *ConstType:
- panic("bad arg returned by mutationArgs: ConstType")
- default:
- panic(fmt.Sprintf("bad arg returned by mutationArgs: %#v, type=%#v", arg, arg.Type()))
- }
-
- // Update base pointer if size has increased.
- if base != nil {
- b := base.(*PointerArg)
- if baseSize < b.Res.Size() {
- arg1, calls1 := r.addr(s, b.Type(), b.Res.Size(), b.Res)
- for _, c1 := range calls1 {
- p.Target.SanitizeCall(c1)
- }
- p.insertBefore(c, calls1)
- a1 := arg1.(*PointerArg)
- b.PageIndex = a1.PageIndex
- b.PageOffset = a1.PageOffset
- b.PagesNum = a1.PagesNum
- }
+ calls, ok := p.Target.mutateArg(r, s, arg, base, parent, &updateSizes)
+ if !ok {
+ retryArg = true
+ continue
}
-
- // Update all len fields.
+ p.insertBefore(c, calls)
if updateSizes {
p.Target.assignSizesCall(c)
}
+ p.Target.SanitizeCall(c)
}
default:
// Remove a random call.
@@ -264,345 +106,248 @@ func (p *Prog) Mutate(rs rand.Source, ncalls int, ct *ChoiceTable, corpus []*Pro
}
}
-// Minimize minimizes program p into an equivalent program using the equivalence
-// predicate pred. It iteratively generates simpler programs and asks pred
-// whether it is equal to the orginal program or not. If it is equivalent then
-// the simplification attempt is committed and the process continues.
-func Minimize(p0 *Prog, callIndex0 int, pred0 func(*Prog, int) bool, crash bool) (*Prog, int) {
- pred := pred0
- if debug {
- pred = func(p *Prog, callIndex int) bool {
- if err := p.validate(); err != nil {
- panic(err)
- }
- return pred0(p, callIndex)
- }
- }
- name0 := ""
- if callIndex0 != -1 {
- if callIndex0 < 0 || callIndex0 >= len(p0.Calls) {
- panic("bad call index")
+func (target *Target) mutateArg(r *randGen, s *state, arg, base Arg, parent *[]Arg, updateSizes *bool) (calls []*Call, ok bool) {
+ var baseSize uint64
+ if base != nil {
+ b, ok := base.(*PointerArg)
+ if !ok || b.Res == nil {
+ panic("bad base arg")
}
- name0 = p0.Calls[callIndex0].Meta.Name
+ baseSize = b.Res.Size()
}
-
- // Try to glue all mmap's together.
- s := analyze(nil, p0, nil)
- hi := -1
- lo := -1
- for i := 0; i < maxPages; i++ {
- if s.pages[i] {
- hi = i
- if lo == -1 {
- lo = i
+ switch t := arg.Type().(type) {
+ case *IntType, *FlagsType:
+ a := arg.(*ConstArg)
+ if r.bin() {
+ var newArg Arg
+ newArg, calls = r.generateArg(s, arg.Type())
+ replaceArg(arg, newArg)
+ } else {
+ switch {
+ case r.nOutOf(1, 3):
+ a.Val += uint64(r.Intn(4)) + 1
+ case r.nOutOf(1, 2):
+ a.Val -= uint64(r.Intn(4)) + 1
+ default:
+ a.Val ^= 1 << uint64(r.Intn(64))
}
}
- }
- if hi != -1 {
- p := p0.Clone()
- callIndex := callIndex0
- // Remove all mmaps.
- for i := 0; i < len(p.Calls); i++ {
- c := p.Calls[i]
- if i != callIndex && c.Meta == p.Target.MmapSyscall {
- p.removeCall(i)
- if i < callIndex {
- callIndex--
+ case *LenType:
+ if !r.mutateSize(arg.(*ConstArg), *parent) {
+ return nil, false
+ }
+ *updateSizes = false
+ case *ResourceType, *VmaType, *ProcType:
+ var newArg Arg
+ newArg, calls = r.generateArg(s, arg.Type())
+ replaceArg(arg, newArg)
+ case *BufferType:
+ a := arg.(*DataArg)
+ switch t.Kind {
+ case BufferBlobRand, BufferBlobRange:
+ data := append([]byte{}, a.Data()...)
+ minLen, maxLen := uint64(0), maxBlobLen
+ if t.Kind == BufferBlobRange {
+ minLen, maxLen = t.RangeBegin, t.RangeEnd
+ }
+ a.data = mutateData(r, data, minLen, maxLen)
+ case BufferString:
+ data := append([]byte{}, a.Data()...)
+ if r.bin() {
+ minLen, maxLen := uint64(0), maxBlobLen
+ if t.TypeSize != 0 {
+ minLen, maxLen = t.TypeSize, t.TypeSize
}
- i--
+ a.data = mutateData(r, data, minLen, maxLen)
+ } else {
+ a.data = r.randString(s, t)
+ }
+ case BufferFilename:
+ a.data = []byte(r.filename(s))
+ case BufferText:
+ data := append([]byte{}, a.Data()...)
+ a.data = r.mutateText(t.Text, data)
+ default:
+ panic("unknown buffer kind")
+ }
+ case *ArrayType:
+ a := arg.(*GroupArg)
+ count := uint64(0)
+ switch t.Kind {
+ case ArrayRandLen:
+ for count == uint64(len(a.Inner)) {
+ count = r.randArrayLen()
+ }
+ case ArrayRangeLen:
+ if t.RangeBegin == t.RangeEnd {
+ panic("trying to mutate fixed length array")
+ }
+ for count == uint64(len(a.Inner)) {
+ count = r.randRange(t.RangeBegin, t.RangeEnd)
}
}
- // Prepend uber-mmap.
- mmap := p0.Target.MakeMmap(uint64(lo), uint64(hi-lo)+1)
- p.Calls = append([]*Call{mmap}, p.Calls...)
- if callIndex != -1 {
- callIndex++
+ if count > uint64(len(a.Inner)) {
+ for count > uint64(len(a.Inner)) {
+ newArg, newCalls := r.generateArg(s, t.Type)
+ a.Inner = append(a.Inner, newArg)
+ calls = append(calls, newCalls...)
+ for _, c := range newCalls {
+ s.analyze(c)
+ }
+ }
+ } else if count < uint64(len(a.Inner)) {
+ for _, arg := range a.Inner[count:] {
+ removeArg(arg)
+ }
+ a.Inner = a.Inner[:count]
}
- if pred(p, callIndex) {
- p0 = p
- callIndex0 = callIndex
+ // TODO: swap elements of the array
+ case *PtrType:
+ a, ok := arg.(*PointerArg)
+ if !ok {
+ break
}
- }
-
- // Try to remove all calls except the last one one-by-one.
- for i := len(p0.Calls) - 1; i >= 0; i-- {
- if i == callIndex0 {
- continue
+ // TODO: we don't know size for out args
+ size := uint64(1)
+ if a.Res != nil {
+ size = a.Res.Size()
}
- callIndex := callIndex0
- if i < callIndex {
- callIndex--
+ var newArg Arg
+ newArg, calls = r.addr(s, t, size, a.Res)
+ replaceArg(arg, newArg)
+ case *StructType:
+ gen := target.SpecialTypes[t.Name()]
+ if gen == nil {
+ panic("bad arg returned by mutationArgs: StructType")
}
- p := p0.Clone()
- p.removeCall(i)
- if !pred(p, callIndex) {
- continue
+ var newArg Arg
+ newArg, calls = gen(&Gen{r, s}, t, arg)
+ for i, f := range newArg.(*GroupArg).Inner {
+ replaceArg(arg.(*GroupArg).Inner[i], f)
}
- p0 = p
- callIndex0 = callIndex
- }
-
- var triedPaths map[string]bool
-
- var rec func(p *Prog, call *Call, arg Arg, path string) bool
- rec = func(p *Prog, call *Call, arg Arg, path string) bool {
- path += fmt.Sprintf("-%v", arg.Type().FieldName())
- switch typ := arg.Type().(type) {
- case *StructType:
- a := arg.(*GroupArg)
- for _, innerArg := range a.Inner {
- if rec(p, call, innerArg, path) {
- return true
- }
- }
- case *UnionType:
+ case *UnionType:
+ if gen := target.SpecialTypes[t.Name()]; gen != nil {
+ var newArg Arg
+ newArg, calls = gen(&Gen{r, s}, t, arg)
+ replaceArg(arg, newArg)
+ } else {
a := arg.(*UnionArg)
- if rec(p, call, a.Option, path) {
- return true
- }
- case *PtrType:
- // TODO: try to remove optional ptrs
- a, ok := arg.(*PointerArg)
- if !ok {
- // Can also be *ConstArg.
- return false
- }
- if a.Res != nil {
- return rec(p, call, a.Res, path)
- }
- case *ArrayType:
- a := arg.(*GroupArg)
- for i, innerArg := range a.Inner {
- innerPath := fmt.Sprintf("%v-%v", path, i)
- if !triedPaths[innerPath] && !crash {
- if (typ.Kind == ArrayRangeLen && len(a.Inner) > int(typ.RangeBegin)) ||
- (typ.Kind == ArrayRandLen) {
- copy(a.Inner[i:], a.Inner[i+1:])
- a.Inner = a.Inner[:len(a.Inner)-1]
- p.removeArg(call, innerArg)
- p.Target.assignSizesCall(call)
-
- if pred(p, callIndex0) {
- p0 = p
- } else {
- triedPaths[innerPath] = true
- }
-
- return true
- }
- }
- if rec(p, call, innerArg, innerPath) {
- return true
- }
- }
- case *IntType, *FlagsType, *ProcType:
- // TODO: try to reset bits in ints
- // TODO: try to set separate flags
- if crash {
- return false
- }
- if triedPaths[path] {
- return false
- }
- triedPaths[path] = true
- a := arg.(*ConstArg)
- if a.Val == typ.Default() {
- return false
- }
- v0 := a.Val
- a.Val = typ.Default()
- if pred(p, callIndex0) {
- p0 = p
- return true
- } else {
- a.Val = v0
- }
- case *ResourceType:
- if crash {
- return false
- }
- if triedPaths[path] {
- return false
- }
- triedPaths[path] = true
- a := arg.(*ResultArg)
- if a.Res == nil {
- return false
- }
- r0 := a.Res
- a.Res = nil
- a.Val = typ.Default()
- if pred(p, callIndex0) {
- p0 = p
- return true
- } else {
- a.Res = r0
- a.Val = 0
- }
- case *BufferType:
- // TODO: try to set individual bytes to 0
- if triedPaths[path] {
- return false
- }
- triedPaths[path] = true
- if typ.Kind != BufferBlobRand && typ.Kind != BufferBlobRange ||
- typ.Dir() == DirOut {
- return false
- }
- a := arg.(*DataArg)
- minLen := int(typ.RangeBegin)
- for step := len(a.Data()) - minLen; len(a.Data()) > minLen && step > 0; {
- if len(a.Data())-step >= minLen {
- a.data = a.Data()[:len(a.Data())-step]
- p.Target.assignSizesCall(call)
- if pred(p, callIndex0) {
- continue
- }
- a.data = a.Data()[:len(a.Data())+step]
- p.Target.assignSizesCall(call)
- }
- step /= 2
- if crash {
+ current := -1
+ for i, option := range t.Fields {
+ if a.Option.Type().FieldName() == option.FieldName() {
+ current = i
break
}
}
- p0 = p
- case *VmaType, *LenType, *CsumType, *ConstType:
- // TODO: try to remove offset from vma
- return false
- default:
- panic(fmt.Sprintf("unknown arg type '%+v'", typ))
- }
- return false
- }
-
- // Try to minimize individual args.
- for i := 0; i < len(p0.Calls); i++ {
- triedPaths = make(map[string]bool)
- again:
- p := p0.Clone()
- call := p.Calls[i]
- for j, arg := range call.Args {
- if rec(p, call, arg, fmt.Sprintf("%v", j)) {
- goto again
+ if current == -1 {
+ panic("can't find current option in union")
}
+ newIdx := r.Intn(len(t.Fields) - 1)
+ if newIdx >= current {
+ newIdx++
+ }
+ optType := t.Fields[newIdx]
+ removeArg(a.Option)
+ var newOpt Arg
+ newOpt, calls = r.generateArg(s, optType)
+ replaceArg(arg, MakeUnionArg(t, newOpt))
}
+ case *CsumType:
+ panic("bad arg returned by mutationArgs: CsumType")
+ case *ConstType:
+ panic("bad arg returned by mutationArgs: ConstType")
+ default:
+ panic(fmt.Sprintf("bad arg returned by mutationArgs: %#v, type=%#v", arg, arg.Type()))
}
- if callIndex0 != -1 {
- if callIndex0 < 0 || callIndex0 >= len(p0.Calls) || name0 != p0.Calls[callIndex0].Meta.Name {
- panic(fmt.Sprintf("bad call index after minimization: ncalls=%v index=%v call=%v/%v",
- len(p0.Calls), callIndex0, name0, p0.Calls[callIndex0].Meta.Name))
+ // Update base pointer if size has increased.
+ if base != nil {
+ b := base.(*PointerArg)
+ if baseSize < b.Res.Size() {
+ newArg, newCalls := r.addr(s, b.Type(), b.Res.Size(), b.Res)
+ calls = append(calls, newCalls...)
+ a1 := newArg.(*PointerArg)
+ b.PageIndex = a1.PageIndex
+ b.PageOffset = a1.PageOffset
+ b.PagesNum = a1.PagesNum
}
}
- return p0, callIndex0
+ for _, c := range calls {
+ target.SanitizeCall(c)
+ }
+ return calls, true
}
-func (p *Prog) TrimAfter(idx int) {
- if idx < 0 || idx >= len(p.Calls) {
- panic("trimming non-existing call")
- }
- for i := len(p.Calls) - 1; i > idx; i-- {
- c := p.Calls[i]
- foreachArg(c, func(arg, _ Arg, _ *[]Arg) {
- if a, ok := arg.(*ResultArg); ok && a.Res != nil {
- if used, ok := a.Res.(ArgUsed); ok {
- delete(*used.Used(), arg)
- }
- }
- })
- }
- p.Calls = p.Calls[:idx+1]
+func (target *Target) mutationSubargs(arg0 Arg) (args, bases []Arg, parents []*[]Arg) {
+ ForeachSubarg(arg0, func(arg, base Arg, parent *[]Arg) {
+ if target.needMutateArg(arg, base, parent) {
+ args = append(args, arg)
+ bases = append(bases, base)
+ parents = append(parents, parent)
+ }
+ })
+ return
}
func (target *Target) mutationArgs(c *Call) (args, bases []Arg, parents []*[]Arg) {
foreachArg(c, func(arg, base Arg, parent *[]Arg) {
- switch typ := arg.Type().(type) {
- case *StructType:
- if target.SpecialStructs[typ.Name()] == nil {
- // For structs only individual fields are updated.
- return
- }
- // These special structs are mutated as a whole.
- case *UnionType:
- if len(typ.Fields) == 1 {
- return
- }
- case *ArrayType:
- // Don't mutate fixed-size arrays.
- if typ.Kind == ArrayRangeLen && typ.RangeBegin == typ.RangeEnd {
- return
- }
- case *CsumType:
- // Checksum is updated when the checksummed data changes.
- return
- case *ConstType:
- // Well, this is const.
- return
- case *BufferType:
- if typ.Kind == BufferString && len(typ.Values) == 1 {
- return // string const
- }
+ if target.needMutateArg(arg, base, parent) {
+ args = append(args, arg)
+ bases = append(bases, base)
+ parents = append(parents, parent)
}
- typ := arg.Type()
- if typ.Dir() == DirOut || !typ.Varlen() && typ.Size() == 0 {
- return
- }
- if base != nil {
- if _, ok := base.Type().(*StructType); ok &&
- target.SpecialStructs[base.Type().Name()] != nil {
- // These special structs are mutated as a whole.
- return
- }
- }
- args = append(args, arg)
- bases = append(bases, base)
- parents = append(parents, parent)
})
return
}
-func swap16(v uint16) uint16 {
- v0 := byte(v >> 0)
- v1 := byte(v >> 8)
- v = 0
- v |= uint16(v1) << 0
- v |= uint16(v0) << 8
- return v
-}
-
-func swap32(v uint32) uint32 {
- v0 := byte(v >> 0)
- v1 := byte(v >> 8)
- v2 := byte(v >> 16)
- v3 := byte(v >> 24)
- v = 0
- v |= uint32(v3) << 0
- v |= uint32(v2) << 8
- v |= uint32(v1) << 16
- v |= uint32(v0) << 24
- return v
-}
-
-func swap64(v uint64) uint64 {
- v0 := byte(v >> 0)
- v1 := byte(v >> 8)
- v2 := byte(v >> 16)
- v3 := byte(v >> 24)
- v4 := byte(v >> 32)
- v5 := byte(v >> 40)
- v6 := byte(v >> 48)
- v7 := byte(v >> 56)
- v = 0
- v |= uint64(v7) << 0
- v |= uint64(v6) << 8
- v |= uint64(v5) << 16
- v |= uint64(v4) << 24
- v |= uint64(v3) << 32
- v |= uint64(v2) << 40
- v |= uint64(v1) << 48
- v |= uint64(v0) << 56
- return v
+func (target *Target) needMutateArg(arg, base Arg, parent *[]Arg) bool {
+ switch typ := arg.Type().(type) {
+ case *StructType:
+ if target.SpecialTypes[typ.Name()] == nil {
+ // For structs only individual fields are updated.
+ return false
+ }
+ // These special structs are mutated as a whole.
+ case *UnionType:
+ if target.SpecialTypes[typ.Name()] == nil && len(typ.Fields) == 1 {
+ return false
+ }
+ case *ArrayType:
+ // Don't mutate fixed-size arrays.
+ if typ.Kind == ArrayRangeLen && typ.RangeBegin == typ.RangeEnd {
+ return false
+ }
+ case *CsumType:
+ // Checksum is updated when the checksummed data changes.
+ return false
+ case *ConstType:
+ // Well, this is const.
+ return false
+ case *BufferType:
+ if typ.Kind == BufferString && len(typ.Values) == 1 {
+ return false // string const
+ }
+ }
+ typ := arg.Type()
+ if typ.Dir() == DirOut || !typ.Varlen() && typ.Size() == 0 {
+ return false
+ }
+ if base != nil {
+ // TODO(dvyukov): need to check parent as well.
+ // Say, timespec can be part of another struct and base
+ // will point to that other struct, not timespec.
+ // Strictly saying, we need to check parents all way up,
+ // or better bail out from recursion when we reach
+ // a special struct.
+ _, isStruct := base.Type().(*StructType)
+ _, isUnion := base.Type().(*UnionType)
+ if (isStruct || isUnion) &&
+ target.SpecialTypes[base.Type().Name()] != nil {
+ // These special structs/unions are mutated as a whole.
+ return false
+ }
+ }
+ return true
}
func mutateData(r *randGen, data []byte, minLen, maxLen uint64) []byte {
@@ -779,3 +524,46 @@ loop:
}
return data
}
+
+func swap16(v uint16) uint16 {
+ v0 := byte(v >> 0)
+ v1 := byte(v >> 8)
+ v = 0
+ v |= uint16(v1) << 0
+ v |= uint16(v0) << 8
+ return v
+}
+
+func swap32(v uint32) uint32 {
+ v0 := byte(v >> 0)
+ v1 := byte(v >> 8)
+ v2 := byte(v >> 16)
+ v3 := byte(v >> 24)
+ v = 0
+ v |= uint32(v3) << 0
+ v |= uint32(v2) << 8
+ v |= uint32(v1) << 16
+ v |= uint32(v0) << 24
+ return v
+}
+
+func swap64(v uint64) uint64 {
+ v0 := byte(v >> 0)
+ v1 := byte(v >> 8)
+ v2 := byte(v >> 16)
+ v3 := byte(v >> 24)
+ v4 := byte(v >> 32)
+ v5 := byte(v >> 40)
+ v6 := byte(v >> 48)
+ v7 := byte(v >> 56)
+ v = 0
+ v |= uint64(v7) << 0
+ v |= uint64(v6) << 8
+ v |= uint64(v5) << 16
+ v |= uint64(v4) << 24
+ v |= uint64(v3) << 32
+ v |= uint64(v2) << 40
+ v |= uint64(v1) << 48
+ v |= uint64(v0) << 56
+ return v
+}
diff --git a/prog/prog.go b/prog/prog.go
index 7fb6cf2ff..ce3058ba0 100644
--- a/prog/prog.go
+++ b/prog/prog.go
@@ -345,15 +345,8 @@ func (p *Prog) insertBefore(c *Call, calls []*Call) {
p.Calls = newCalls
}
-// replaceArg replaces arg with arg1 in call c in program p, and inserts calls before arg call.
-func (p *Prog) replaceArg(c *Call, arg, arg1 Arg, calls []*Call) {
- if debug {
- p.replaceArgCheck(c, arg, arg1, calls)
- }
- for _, c := range calls {
- p.Target.SanitizeCall(c)
- }
- p.insertBefore(c, calls)
+// replaceArg replaces arg with arg1 in a program.
+func replaceArg(arg, arg1 Arg) {
switch a := arg.(type) {
case *ConstArg:
*a = *arg1.(*ConstArg)
@@ -368,7 +361,6 @@ func (p *Prog) replaceArg(c *Call, arg, arg1 Arg, calls []*Call) {
default:
panic(fmt.Sprintf("replaceArg: bad arg kind %#v", arg))
}
- p.Target.SanitizeCall(c)
}
func replaceResultArg(arg, arg1 *ResultArg) {
@@ -425,9 +417,9 @@ func (p *Prog) replaceArgCheck(c *Call, arg, arg1 Arg, calls []*Call) {
}
}
-// removeArg removes all references to/from arg0 of call c from p.
-func (p *Prog) removeArg(c *Call, arg0 Arg) {
- foreachSubarg(arg0, func(arg, _ Arg, _ *[]Arg) {
+// removeArg removes all references to/from arg0 from a program.
+func removeArg(arg0 Arg) {
+ ForeachSubarg(arg0, func(arg, _ Arg, _ *[]Arg) {
if a, ok := arg.(*ResultArg); ok && a.Res != nil {
if !(*a.Res.(ArgUsed).Used())[arg] {
panic("broken tree")
@@ -451,9 +443,9 @@ func (p *Prog) removeArg(c *Call, arg0 Arg) {
func (p *Prog) removeCall(idx int) {
c := p.Calls[idx]
for _, arg := range c.Args {
- p.removeArg(c, arg)
+ removeArg(arg)
}
- p.removeArg(c, c.Ret)
+ removeArg(c.Ret)
copy(p.Calls[idx:], p.Calls[idx+1:])
p.Calls = p.Calls[:len(p.Calls)-1]
}
diff --git a/prog/prog_test.go b/prog/prog_test.go
index a1bd4b83c..5c7cc4664 100644
--- a/prog/prog_test.go
+++ b/prog/prog_test.go
@@ -175,3 +175,31 @@ func testCrossArchProg(t *testing.T, p *Prog, crossTargets []*Target) {
crossTarget.OS, crossTarget.Arch, err, serialized)
}
}
+
+func TestSpecialStructs(t *testing.T) {
+ testEachTargetRandom(t, func(t *testing.T, target *Target, rs rand.Source, iters int) {
+ for special, gen := range target.SpecialTypes {
+ t.Run(special, func(t *testing.T) {
+ var typ Type
+ for i := 0; i < len(target.Syscalls) && typ == nil; i++ {
+ ForeachType(target.Syscalls[i], func(t Type) {
+ if s, ok := t.(*StructType); ok && s.Name() == special {
+ typ = s
+ }
+ if s, ok := t.(*UnionType); ok && s.Name() == special {
+ typ = s
+ }
+ })
+ }
+ if typ == nil {
+ t.Fatal("can't find struct description")
+ }
+ g := &Gen{newRand(target, rs), newState(target, nil)}
+ for i := 0; i < iters/len(target.SpecialTypes); i++ {
+ arg, _ := gen(g, typ, nil)
+ gen(g, typ, arg)
+ }
+ })
+ }
+ })
+}
diff --git a/prog/rand.go b/prog/rand.go
index d403a4f96..a67ca7d98 100644
--- a/prog/rand.go
+++ b/prog/rand.go
@@ -517,6 +517,10 @@ func (r *randGen) generateArgs(s *state, types []Type) ([]Arg, []*Call) {
}
func (r *randGen) generateArg(s *state, typ Type) (arg Arg, calls []*Call) {
+ return r.generateArgImpl(s, typ, false)
+}
+
+func (r *randGen) generateArgImpl(s *state, typ Type, ignoreSpecial bool) (arg Arg, calls []*Call) {
if typ.Dir() == DirOut {
// No need to generate something interesting for output scalar arguments.
// But we still need to generate the argument itself so that it can be referenced
@@ -666,19 +670,28 @@ func (r *randGen) generateArg(s *state, typ Type) (arg Arg, calls []*Call) {
}
return MakeGroupArg(a, inner), calls
case *StructType:
- if gen := r.target.SpecialStructs[a.Name()]; gen != nil && a.Dir() != DirOut {
- arg, calls = gen(&Gen{r, s}, a, nil)
- return
+ if !ignoreSpecial {
+ if gen := r.target.SpecialTypes[a.Name()]; gen != nil && a.Dir() != DirOut {
+ arg, calls = gen(&Gen{r, s}, a, nil)
+ return
+ }
}
args, calls := r.generateArgs(s, a.Fields)
group := MakeGroupArg(a, args)
return group, calls
case *UnionType:
+ if !ignoreSpecial {
+ if gen := r.target.SpecialTypes[a.Name()]; gen != nil && a.Dir() != DirOut {
+ arg, calls = gen(&Gen{r, s}, a, nil)
+ return
+ }
+ }
optType := a.Fields[r.Intn(len(a.Fields))]
opt, calls := r.generateArg(s, optType)
return MakeUnionArg(a, opt), calls
case *PtrType:
inner, calls := r.generateArg(s, a.Type)
+ // TODO(dvyukov): remove knowledge about iocb from prog.
if a.Type.Name() == "iocb" && len(s.resources["iocbptr"]) != 0 {
// It is weird, but these are actually identified by kernel by address.
// So try to reuse a previously used address.
diff --git a/prog/target.go b/prog/target.go
index 29fb100a6..323a5d136 100644
--- a/prog/target.go
+++ b/prog/target.go
@@ -39,14 +39,14 @@ type Target struct {
// SanitizeCall neutralizes harmful calls.
SanitizeCall func(c *Call)
- // SpecialStructs allows target to do custom generation/mutation for some struct types.
- // Map key is struct name for which custom generation/mutation is required.
+ // SpecialTypes allows target to do custom generation/mutation for some struct's and union's.
+ // Map key is struct/union name for which custom generation/mutation is required.
// Map value is custom generation/mutation function that will be called
- // for the corresponding structs. g is helper object that allows generate random numbers,
- // allocate memory, etc. typ is the struct type. old is the old value of the struct
- // for mutation, or nil for generation. The function returns a new value of the struct,
+ // for the corresponding type. g is helper object that allows generate random numbers,
+ // allocate memory, etc. typ is the struct/union type. old is the old value of the struct/union
+ // for mutation, or nil for generation. The function returns a new value of the struct/union,
// and optionally any calls that need to be inserted before the arg reference.
- SpecialStructs map[string]func(g *Gen, typ *StructType, old *GroupArg) (Arg, []*Call)
+ SpecialTypes map[string]func(g *Gen, typ Type, old Arg) (Arg, []*Call)
// Special strings that can matter for the target.
// Used as fallback when string type does not have own dictionary.
@@ -175,7 +175,36 @@ func (g *Gen) Alloc(ptrType Type, data Arg) (Arg, []*Call) {
}
func (g *Gen) GenerateArg(typ Type, pcalls *[]*Call) Arg {
- arg, calls := g.r.generateArg(g.s, typ)
+ return g.generateArg(typ, pcalls, false)
+}
+
+func (g *Gen) GenerateSpecialArg(typ Type, pcalls *[]*Call) Arg {
+ return g.generateArg(typ, pcalls, true)
+}
+
+func (g *Gen) generateArg(typ Type, pcalls *[]*Call, ignoreSpecial bool) Arg {
+ arg, calls := g.r.generateArgImpl(g.s, typ, ignoreSpecial)
*pcalls = append(*pcalls, calls...)
+ g.r.target.assignSizesArray([]Arg{arg})
return arg
}
+
+func (g *Gen) MutateArg(arg0 Arg) (calls []*Call) {
+ updateSizes := true
+ for stop := false; !stop; stop = g.r.oneOf(3) {
+ args, bases, parents := g.r.target.mutationSubargs(arg0)
+ if len(args) == 0 {
+ // TODO(dvyukov): probably need to return this condition
+ // and updateSizes to caller so that Mutate can act accordingly.
+ return
+ }
+ idx := g.r.Intn(len(args))
+ arg, base, parent := args[idx], bases[idx], parents[idx]
+ newCalls, ok := g.r.target.mutateArg(g.r, g.s, arg, base, parent, &updateSizes)
+ if !ok {
+ continue
+ }
+ calls = append(newCalls, newCalls...)
+ }
+ return calls
+}