diff options
| author | Dmitry Vyukov <dvyukov@google.com> | 2024-07-01 14:26:07 +0200 |
|---|---|---|
| committer | Dmitry Vyukov <dvyukov@google.com> | 2024-07-22 08:35:47 +0000 |
| commit | df655b64ffc2879b80e652329fb7a11508e50310 (patch) | |
| tree | a721bbe875f7e9bc53cf2a297ce2ce7bd06bd204 /pkg | |
| parent | fb8445ca9a36aa91aed98a02092147cb88d49d9f (diff) | |
prog: restricts hints to at most 10 attempts per single kernel PC
We are getting too many generated candidates, the fuzzer may not keep up
with them at all (hints jobs keep growing infinitely). If a hint indeed came
from the input w/o transformation, then we should guess it on the first
attempt (or at least after few attempts). If it did not come from the input,
or came with a non-trivial transformation, then any number of attempts won't
help. So limit the total number of attempts (until the next restart).
Diffstat (limited to 'pkg')
| -rw-r--r-- | pkg/flatrpc/flatrpc.fbs | 4 | ||||
| -rw-r--r-- | pkg/flatrpc/flatrpc.go | 43 | ||||
| -rw-r--r-- | pkg/flatrpc/flatrpc.h | 39 | ||||
| -rw-r--r-- | pkg/fuzzer/fuzzer.go | 9 | ||||
| -rw-r--r-- | pkg/fuzzer/job.go | 4 | ||||
| -rw-r--r-- | pkg/rpcserver/runner.go | 9 | ||||
| -rw-r--r-- | pkg/runtest/run_test.go | 80 |
7 files changed, 122 insertions, 66 deletions
diff --git a/pkg/flatrpc/flatrpc.fbs b/pkg/flatrpc/flatrpc.fbs index 121b289e9..98fb2f5da 100644 --- a/pkg/flatrpc/flatrpc.fbs +++ b/pkg/flatrpc/flatrpc.fbs @@ -208,8 +208,12 @@ table CallInfoRaw { } struct ComparisonRaw { + pc :uint64; op1 :uint64; op2 :uint64; + // If is_const is set, op2 was a source code const (could not come from the input), + // otherwise both operands were dynamic and could come from the input. + is_const :bool; } table ProgInfoRaw { diff --git a/pkg/flatrpc/flatrpc.go b/pkg/flatrpc/flatrpc.go index 79c0d6cf2..aa8970ba9 100644 --- a/pkg/flatrpc/flatrpc.go +++ b/pkg/flatrpc/flatrpc.go @@ -2594,7 +2594,7 @@ func (rcv *CallInfoRaw) Comps(obj *ComparisonRaw, j int) bool { o := flatbuffers.UOffsetT(rcv._tab.Offset(12)) if o != 0 { x := rcv._tab.Vector(o) - x += flatbuffers.UOffsetT(j) * 16 + x += flatbuffers.UOffsetT(j) * 32 obj.Init(rcv._tab.Bytes, x) return true } @@ -2634,26 +2634,30 @@ func CallInfoRawAddComps(builder *flatbuffers.Builder, comps flatbuffers.UOffset builder.PrependUOffsetTSlot(4, flatbuffers.UOffsetT(comps), 0) } func CallInfoRawStartCompsVector(builder *flatbuffers.Builder, numElems int) flatbuffers.UOffsetT { - return builder.StartVector(16, numElems, 8) + return builder.StartVector(32, numElems, 8) } func CallInfoRawEnd(builder *flatbuffers.Builder) flatbuffers.UOffsetT { return builder.EndObject() } type ComparisonRawT struct { - Op1 uint64 `json:"op1"` - Op2 uint64 `json:"op2"` + Pc uint64 `json:"pc"` + Op1 uint64 `json:"op1"` + Op2 uint64 `json:"op2"` + IsConst bool `json:"is_const"` } func (t *ComparisonRawT) Pack(builder *flatbuffers.Builder) flatbuffers.UOffsetT { if t == nil { return 0 } - return CreateComparisonRaw(builder, t.Op1, t.Op2) + return CreateComparisonRaw(builder, t.Pc, t.Op1, t.Op2, t.IsConst) } func (rcv *ComparisonRaw) UnPackTo(t *ComparisonRawT) { + t.Pc = rcv.Pc() t.Op1 = rcv.Op1() t.Op2 = rcv.Op2() + t.IsConst = rcv.IsConst() } func (rcv *ComparisonRaw) UnPack() *ComparisonRawT { @@ -2678,24 +2682,41 @@ func (rcv *ComparisonRaw) Table() flatbuffers.Table { return rcv._tab.Table } -func (rcv *ComparisonRaw) Op1() uint64 { +func (rcv *ComparisonRaw) Pc() uint64 { return rcv._tab.GetUint64(rcv._tab.Pos + flatbuffers.UOffsetT(0)) } -func (rcv *ComparisonRaw) MutateOp1(n uint64) bool { +func (rcv *ComparisonRaw) MutatePc(n uint64) bool { return rcv._tab.MutateUint64(rcv._tab.Pos+flatbuffers.UOffsetT(0), n) } -func (rcv *ComparisonRaw) Op2() uint64 { +func (rcv *ComparisonRaw) Op1() uint64 { return rcv._tab.GetUint64(rcv._tab.Pos + flatbuffers.UOffsetT(8)) } -func (rcv *ComparisonRaw) MutateOp2(n uint64) bool { +func (rcv *ComparisonRaw) MutateOp1(n uint64) bool { return rcv._tab.MutateUint64(rcv._tab.Pos+flatbuffers.UOffsetT(8), n) } -func CreateComparisonRaw(builder *flatbuffers.Builder, op1 uint64, op2 uint64) flatbuffers.UOffsetT { - builder.Prep(8, 16) +func (rcv *ComparisonRaw) Op2() uint64 { + return rcv._tab.GetUint64(rcv._tab.Pos + flatbuffers.UOffsetT(16)) +} +func (rcv *ComparisonRaw) MutateOp2(n uint64) bool { + return rcv._tab.MutateUint64(rcv._tab.Pos+flatbuffers.UOffsetT(16), n) +} + +func (rcv *ComparisonRaw) IsConst() bool { + return rcv._tab.GetBool(rcv._tab.Pos + flatbuffers.UOffsetT(24)) +} +func (rcv *ComparisonRaw) MutateIsConst(n bool) bool { + return rcv._tab.MutateBool(rcv._tab.Pos+flatbuffers.UOffsetT(24), n) +} + +func CreateComparisonRaw(builder *flatbuffers.Builder, pc uint64, op1 uint64, op2 uint64, isConst bool) flatbuffers.UOffsetT { + builder.Prep(8, 32) + builder.Pad(7) + builder.PrependBool(isConst) builder.PrependUint64(op2) builder.PrependUint64(op1) + builder.PrependUint64(pc) return builder.Offset() } diff --git a/pkg/flatrpc/flatrpc.h b/pkg/flatrpc/flatrpc.h index 7ce247d2e..8be575885 100644 --- a/pkg/flatrpc/flatrpc.h +++ b/pkg/flatrpc/flatrpc.h @@ -675,17 +675,39 @@ FLATBUFFERS_STRUCT_END(ExecOptsRaw, 24); FLATBUFFERS_MANUALLY_ALIGNED_STRUCT(8) ComparisonRaw FLATBUFFERS_FINAL_CLASS { private: + uint64_t pc_; uint64_t op1_; uint64_t op2_; + uint8_t is_const_; + int8_t padding0__; int16_t padding1__; int32_t padding2__; public: ComparisonRaw() - : op1_(0), - op2_(0) { - } - ComparisonRaw(uint64_t _op1, uint64_t _op2) - : op1_(flatbuffers::EndianScalar(_op1)), - op2_(flatbuffers::EndianScalar(_op2)) { + : pc_(0), + op1_(0), + op2_(0), + is_const_(0), + padding0__(0), + padding1__(0), + padding2__(0) { + (void)padding0__; + (void)padding1__; + (void)padding2__; + } + ComparisonRaw(uint64_t _pc, uint64_t _op1, uint64_t _op2, bool _is_const) + : pc_(flatbuffers::EndianScalar(_pc)), + op1_(flatbuffers::EndianScalar(_op1)), + op2_(flatbuffers::EndianScalar(_op2)), + is_const_(flatbuffers::EndianScalar(static_cast<uint8_t>(_is_const))), + padding0__(0), + padding1__(0), + padding2__(0) { + (void)padding0__; + (void)padding1__; + (void)padding2__; + } + uint64_t pc() const { + return flatbuffers::EndianScalar(pc_); } uint64_t op1() const { return flatbuffers::EndianScalar(op1_); @@ -693,8 +715,11 @@ FLATBUFFERS_MANUALLY_ALIGNED_STRUCT(8) ComparisonRaw FLATBUFFERS_FINAL_CLASS { uint64_t op2() const { return flatbuffers::EndianScalar(op2_); } + bool is_const() const { + return flatbuffers::EndianScalar(is_const_) != 0; + } }; -FLATBUFFERS_STRUCT_END(ComparisonRaw, 16); +FLATBUFFERS_STRUCT_END(ComparisonRaw, 32); struct ConnectRequestRawT : public flatbuffers::NativeTable { typedef ConnectRequestRaw TableType; diff --git a/pkg/fuzzer/fuzzer.go b/pkg/fuzzer/fuzzer.go index a2b2ef475..7ac8cba3e 100644 --- a/pkg/fuzzer/fuzzer.go +++ b/pkg/fuzzer/fuzzer.go @@ -24,10 +24,11 @@ type Fuzzer struct { Config *Config Cover *Cover - ctx context.Context - mu sync.Mutex - rnd *rand.Rand - target *prog.Target + ctx context.Context + mu sync.Mutex + rnd *rand.Rand + target *prog.Target + hintsLimiter prog.HintsLimiter ct *prog.ChoiceTable ctProgs int diff --git a/pkg/fuzzer/job.go b/pkg/fuzzer/job.go index 93d1cc354..99ff3c433 100644 --- a/pkg/fuzzer/job.go +++ b/pkg/fuzzer/job.go @@ -456,7 +456,7 @@ func (job *hintsJob) run(fuzzer *Fuzzer) { } got := make(prog.CompMap) for _, cmp := range result.Info.Calls[job.call].Comps { - got.AddComp(cmp.Op1, cmp.Op2) + got.Add(cmp.Pc, cmp.Op1, cmp.Op2, cmp.IsConst) } if i == 0 { comps = got @@ -465,6 +465,8 @@ func (job *hintsJob) run(fuzzer *Fuzzer) { } } + fuzzer.hintsLimiter.Limit(comps) + // Then mutate the initial program for every match between // a syscall argument and a comparison operand. // Execute each of such mutants to check if it gives new coverage. diff --git a/pkg/rpcserver/runner.go b/pkg/rpcserver/runner.go index 691a5b5d5..21b270421 100644 --- a/pkg/rpcserver/runner.go +++ b/pkg/rpcserver/runner.go @@ -420,6 +420,15 @@ func (runner *Runner) convertCallInfo(call *flatrpc.CallInfo) { call.Cover = runner.canonicalizer.Canonicalize(call.Cover) call.Signal = runner.canonicalizer.Canonicalize(call.Signal) + call.Comps = slices.DeleteFunc(call.Comps, func(cmp *flatrpc.Comparison) bool { + converted := runner.canonicalizer.Canonicalize([]uint64{cmp.Pc}) + if len(converted) == 0 { + return true + } + cmp.Pc = converted[0] + return false + }) + // Check signal belongs to kernel addresses. // Mismatching addresses can mean either corrupted VM memory, or that the fuzzer somehow // managed to inject output signal. If we see any bogus signal, drop whole signal diff --git a/pkg/runtest/run_test.go b/pkg/runtest/run_test.go index 1458a1c9a..3da87dd6c 100644 --- a/pkg/runtest/run_test.go +++ b/pkg/runtest/run_test.go @@ -151,7 +151,7 @@ type CoverTest struct { Flags flatrpc.ExecFlag Cover []uint64 Signal []uint64 - Comps [][2]uint64 + Comps []*flatrpc.Comparison } type Comparison struct { @@ -252,60 +252,58 @@ func testCover(t *testing.T, target *prog.Target) { Is64Bit: true, Input: makeComps( // A normal 8-byte comparison must be returned in the output as is. - Comparison{CmpSize8 | CmpConst, 0x1111111111111111, 0x2222222222222222, 0}, + Comparison{CmpSize8 | CmpConst, 0x1111111111111111, 0x2222222222222222, 1}, // Duplicate must be removed. - Comparison{CmpSize8 | CmpConst, 0x1111111111111111, 0x2222222222222222, 0}, + Comparison{CmpSize8 | CmpConst, 0x1111111111111111, 0x2222222222222222, 1}, // Non-const comparisons must be duplicated both ways. - Comparison{CmpSize8, 0x30, 0x31, 0}, + Comparison{CmpSize8, 0x30, 0x31, 1}, // Test sign-extension for smaller argument types. - Comparison{CmpSize1 | CmpConst, 0xa3, 0x77, 0}, - Comparison{CmpSize1 | CmpConst, 0xff10, 0xffe1, 0}, - Comparison{CmpSize2 | CmpConst, 0xabcd, 0x4321, 0}, - Comparison{CmpSize4 | CmpConst, 0xabcd1234, 0x4321, 0}, + Comparison{CmpSize1 | CmpConst, 0xa3, 0x77, 1}, + Comparison{CmpSize1 | CmpConst, 0xff10, 0xffe1, 1}, + Comparison{CmpSize2 | CmpConst, 0xabcd, 0x4321, 1}, + Comparison{CmpSize4 | CmpConst, 0xabcd1234, 0x4321, 1}, // Comparison with const 0 must be removed. - Comparison{CmpSize8 | CmpConst, 0, 0x2222222222222222, 0}, - Comparison{CmpSize8, 0, 0x3333, 0}, + Comparison{CmpSize8 | CmpConst, 0, 0x2222222222222222, 1}, + Comparison{CmpSize8, 0, 0x3333, 1}, // Comparison of equal values must be removed. - Comparison{CmpSize8, 0, 0, 0}, - Comparison{CmpSize8, 0x1111, 0x1111, 0}, + Comparison{CmpSize8, 0, 0, 1}, + Comparison{CmpSize8, 0x1111, 0x1111, 1}, // Comparisons of kernel addresses must be removed. - Comparison{CmpSize8 | CmpConst, 0xda1a0000, 0xda1a1000, 0}, - Comparison{CmpSize8, 0xda1a0000, 0, 0}, - Comparison{CmpSize8, 0, 0xda1a0010, 0}, - Comparison{CmpSize8 | CmpConst, 0xc0dec0dec0de0000, 0xc0dec0dec0de1000, 0}, + Comparison{CmpSize8 | CmpConst, 0xda1a0000, 0xda1a1000, 1}, + Comparison{CmpSize8, 0xda1a0000, 0, 1}, + Comparison{CmpSize8, 0, 0xda1a0010, 1}, + Comparison{CmpSize8 | CmpConst, 0xc0dec0dec0de0000, 0xc0dec0dec0de1000, 1}, // But not with something that's not a kernel address. - Comparison{CmpSize8 | CmpConst, 0xda1a0010, 0xabcd, 0}, + Comparison{CmpSize8 | CmpConst, 0xda1a0010, 0xabcd, 1}, ), Flags: flatrpc.ExecFlagCollectComps, - Comps: [][2]uint64{ - {0x2222222222222222, 0x1111111111111111}, - {0x30, 0x31}, - {0x31, 0x30}, - {0x77, 0xffffffffffffffa3}, - {0xffffffffffffffe1, 0x10}, - {0x4321, 0xffffffffffffabcd}, - {0x4321, 0xffffffffabcd1234}, - {0x3333, 0}, - {0, 0x3333}, - {0xabcd, 0xda1a0010}, + Comps: []*flatrpc.Comparison{ + {Pc: 1, Op1: 0x2222222222222222, Op2: 0x1111111111111111, IsConst: true}, + {Pc: 1, Op1: 0x31, Op2: 0x30, IsConst: false}, + {Pc: 1, Op1: 0x77, Op2: 0xffffffffffffffa3, IsConst: true}, + {Pc: 1, Op1: 0xffffffffffffffe1, Op2: 0x10, IsConst: true}, + {Pc: 1, Op1: 0x4321, Op2: 0xffffffffffffabcd, IsConst: true}, + {Pc: 1, Op1: 0x4321, Op2: 0xffffffffabcd1234, IsConst: true}, + {Pc: 1, Op1: 0x3333, Op2: 0, IsConst: false}, + {Pc: 1, Op1: 0xabcd, Op2: 0xda1a0010, IsConst: true}, }, }, // 32-bit comparisons must be the same, so test only a subset. { Is64Bit: false, Input: makeComps( - Comparison{CmpSize8 | CmpConst, 0x1111111111111111, 0x2222222222222222, 0}, - Comparison{CmpSize2 | CmpConst, 0xabcd, 0x4321, 0}, - Comparison{CmpSize4 | CmpConst, 0xda1a0000, 0xda1a1000, 0}, - Comparison{CmpSize8 | CmpConst, 0xc0dec0dec0de0000, 0xc0dec0dec0de1000, 0}, - Comparison{CmpSize4 | CmpConst, 0xc0de0000, 0xc0de1000, 0}, - Comparison{CmpSize4 | CmpConst, 0xc0de0011, 0xc0de1022, 0}, + Comparison{CmpSize8 | CmpConst, 0x1111111111111111, 0x2222222222222222, 1}, + Comparison{CmpSize2 | CmpConst, 0xabcd, 0x4321, 2}, + Comparison{CmpSize4 | CmpConst, 0xda1a0000, 0xda1a1000, 1}, + Comparison{CmpSize8 | CmpConst, 0xc0dec0dec0de0000, 0xc0dec0dec0de1000, 3}, + Comparison{CmpSize4 | CmpConst, 0xc0de0000, 0xc0de1000, 1}, + Comparison{CmpSize4 | CmpConst, 0xc0de0011, 0xc0de1022, 1}, ), Flags: flatrpc.ExecFlagCollectComps, - Comps: [][2]uint64{ - {0x2222222222222222, 0x1111111111111111}, - {0x4321, 0xffffffffffffabcd}, - {0xc0dec0dec0de1000, 0xc0dec0dec0de0000}, + Comps: []*flatrpc.Comparison{ + {Pc: 1, Op1: 0x2222222222222222, Op2: 0x1111111111111111, IsConst: true}, + {Pc: 2, Op1: 0x4321, Op2: 0xffffffffffffabcd, IsConst: true}, + {Pc: 3, Op1: 0xc0dec0dec0de1000, Op2: 0xc0dec0dec0de0000, IsConst: true}, }, }, // Test max signal. @@ -405,10 +403,6 @@ func testCover1(t *testing.T, ctx context.Context, target *prog.Target, test Cov t.Fatalf("program execution failed: status=%v err=%v\n%s", res.Status, res.Err, res.Output) } call := res.Info.Calls[0] - var comps [][2]uint64 - for _, cmp := range call.Comps { - comps = append(comps, [2]uint64{cmp.Op1, cmp.Op2}) - } if test.Cover == nil { test.Cover = []uint64{} } @@ -418,7 +412,7 @@ func testCover1(t *testing.T, ctx context.Context, target *prog.Target, test Cov assert.Equal(t, test.Cover, call.Cover) assert.Equal(t, test.Signal, call.Signal) // Comparisons are reordered and order does not matter, so compare without order. - assert.ElementsMatch(t, test.Comps, comps) + assert.ElementsMatch(t, test.Comps, call.Comps) } func makeCover64(pcs ...uint64) []byte { |
