aboutsummaryrefslogtreecommitdiffstats
path: root/pkg/flatrpc
diff options
context:
space:
mode:
authorDmitry Vyukov <dvyukov@google.com>2024-07-01 14:26:07 +0200
committerDmitry Vyukov <dvyukov@google.com>2024-07-22 08:35:47 +0000
commitdf655b64ffc2879b80e652329fb7a11508e50310 (patch)
treea721bbe875f7e9bc53cf2a297ce2ce7bd06bd204 /pkg/flatrpc
parentfb8445ca9a36aa91aed98a02092147cb88d49d9f (diff)
prog: restricts hints to at most 10 attempts per single kernel PC
We are getting too many generated candidates, the fuzzer may not keep up with them at all (hints jobs keep growing infinitely). If a hint indeed came from the input w/o transformation, then we should guess it on the first attempt (or at least after few attempts). If it did not come from the input, or came with a non-trivial transformation, then any number of attempts won't help. So limit the total number of attempts (until the next restart).
Diffstat (limited to 'pkg/flatrpc')
-rw-r--r--pkg/flatrpc/flatrpc.fbs4
-rw-r--r--pkg/flatrpc/flatrpc.go43
-rw-r--r--pkg/flatrpc/flatrpc.h39
3 files changed, 68 insertions, 18 deletions
diff --git a/pkg/flatrpc/flatrpc.fbs b/pkg/flatrpc/flatrpc.fbs
index 121b289e9..98fb2f5da 100644
--- a/pkg/flatrpc/flatrpc.fbs
+++ b/pkg/flatrpc/flatrpc.fbs
@@ -208,8 +208,12 @@ table CallInfoRaw {
}
struct ComparisonRaw {
+ pc :uint64;
op1 :uint64;
op2 :uint64;
+ // If is_const is set, op2 was a source code const (could not come from the input),
+ // otherwise both operands were dynamic and could come from the input.
+ is_const :bool;
}
table ProgInfoRaw {
diff --git a/pkg/flatrpc/flatrpc.go b/pkg/flatrpc/flatrpc.go
index 79c0d6cf2..aa8970ba9 100644
--- a/pkg/flatrpc/flatrpc.go
+++ b/pkg/flatrpc/flatrpc.go
@@ -2594,7 +2594,7 @@ func (rcv *CallInfoRaw) Comps(obj *ComparisonRaw, j int) bool {
o := flatbuffers.UOffsetT(rcv._tab.Offset(12))
if o != 0 {
x := rcv._tab.Vector(o)
- x += flatbuffers.UOffsetT(j) * 16
+ x += flatbuffers.UOffsetT(j) * 32
obj.Init(rcv._tab.Bytes, x)
return true
}
@@ -2634,26 +2634,30 @@ func CallInfoRawAddComps(builder *flatbuffers.Builder, comps flatbuffers.UOffset
builder.PrependUOffsetTSlot(4, flatbuffers.UOffsetT(comps), 0)
}
func CallInfoRawStartCompsVector(builder *flatbuffers.Builder, numElems int) flatbuffers.UOffsetT {
- return builder.StartVector(16, numElems, 8)
+ return builder.StartVector(32, numElems, 8)
}
func CallInfoRawEnd(builder *flatbuffers.Builder) flatbuffers.UOffsetT {
return builder.EndObject()
}
type ComparisonRawT struct {
- Op1 uint64 `json:"op1"`
- Op2 uint64 `json:"op2"`
+ Pc uint64 `json:"pc"`
+ Op1 uint64 `json:"op1"`
+ Op2 uint64 `json:"op2"`
+ IsConst bool `json:"is_const"`
}
func (t *ComparisonRawT) Pack(builder *flatbuffers.Builder) flatbuffers.UOffsetT {
if t == nil {
return 0
}
- return CreateComparisonRaw(builder, t.Op1, t.Op2)
+ return CreateComparisonRaw(builder, t.Pc, t.Op1, t.Op2, t.IsConst)
}
func (rcv *ComparisonRaw) UnPackTo(t *ComparisonRawT) {
+ t.Pc = rcv.Pc()
t.Op1 = rcv.Op1()
t.Op2 = rcv.Op2()
+ t.IsConst = rcv.IsConst()
}
func (rcv *ComparisonRaw) UnPack() *ComparisonRawT {
@@ -2678,24 +2682,41 @@ func (rcv *ComparisonRaw) Table() flatbuffers.Table {
return rcv._tab.Table
}
-func (rcv *ComparisonRaw) Op1() uint64 {
+func (rcv *ComparisonRaw) Pc() uint64 {
return rcv._tab.GetUint64(rcv._tab.Pos + flatbuffers.UOffsetT(0))
}
-func (rcv *ComparisonRaw) MutateOp1(n uint64) bool {
+func (rcv *ComparisonRaw) MutatePc(n uint64) bool {
return rcv._tab.MutateUint64(rcv._tab.Pos+flatbuffers.UOffsetT(0), n)
}
-func (rcv *ComparisonRaw) Op2() uint64 {
+func (rcv *ComparisonRaw) Op1() uint64 {
return rcv._tab.GetUint64(rcv._tab.Pos + flatbuffers.UOffsetT(8))
}
-func (rcv *ComparisonRaw) MutateOp2(n uint64) bool {
+func (rcv *ComparisonRaw) MutateOp1(n uint64) bool {
return rcv._tab.MutateUint64(rcv._tab.Pos+flatbuffers.UOffsetT(8), n)
}
-func CreateComparisonRaw(builder *flatbuffers.Builder, op1 uint64, op2 uint64) flatbuffers.UOffsetT {
- builder.Prep(8, 16)
+func (rcv *ComparisonRaw) Op2() uint64 {
+ return rcv._tab.GetUint64(rcv._tab.Pos + flatbuffers.UOffsetT(16))
+}
+func (rcv *ComparisonRaw) MutateOp2(n uint64) bool {
+ return rcv._tab.MutateUint64(rcv._tab.Pos+flatbuffers.UOffsetT(16), n)
+}
+
+func (rcv *ComparisonRaw) IsConst() bool {
+ return rcv._tab.GetBool(rcv._tab.Pos + flatbuffers.UOffsetT(24))
+}
+func (rcv *ComparisonRaw) MutateIsConst(n bool) bool {
+ return rcv._tab.MutateBool(rcv._tab.Pos+flatbuffers.UOffsetT(24), n)
+}
+
+func CreateComparisonRaw(builder *flatbuffers.Builder, pc uint64, op1 uint64, op2 uint64, isConst bool) flatbuffers.UOffsetT {
+ builder.Prep(8, 32)
+ builder.Pad(7)
+ builder.PrependBool(isConst)
builder.PrependUint64(op2)
builder.PrependUint64(op1)
+ builder.PrependUint64(pc)
return builder.Offset()
}
diff --git a/pkg/flatrpc/flatrpc.h b/pkg/flatrpc/flatrpc.h
index 7ce247d2e..8be575885 100644
--- a/pkg/flatrpc/flatrpc.h
+++ b/pkg/flatrpc/flatrpc.h
@@ -675,17 +675,39 @@ FLATBUFFERS_STRUCT_END(ExecOptsRaw, 24);
FLATBUFFERS_MANUALLY_ALIGNED_STRUCT(8) ComparisonRaw FLATBUFFERS_FINAL_CLASS {
private:
+ uint64_t pc_;
uint64_t op1_;
uint64_t op2_;
+ uint8_t is_const_;
+ int8_t padding0__; int16_t padding1__; int32_t padding2__;
public:
ComparisonRaw()
- : op1_(0),
- op2_(0) {
- }
- ComparisonRaw(uint64_t _op1, uint64_t _op2)
- : op1_(flatbuffers::EndianScalar(_op1)),
- op2_(flatbuffers::EndianScalar(_op2)) {
+ : pc_(0),
+ op1_(0),
+ op2_(0),
+ is_const_(0),
+ padding0__(0),
+ padding1__(0),
+ padding2__(0) {
+ (void)padding0__;
+ (void)padding1__;
+ (void)padding2__;
+ }
+ ComparisonRaw(uint64_t _pc, uint64_t _op1, uint64_t _op2, bool _is_const)
+ : pc_(flatbuffers::EndianScalar(_pc)),
+ op1_(flatbuffers::EndianScalar(_op1)),
+ op2_(flatbuffers::EndianScalar(_op2)),
+ is_const_(flatbuffers::EndianScalar(static_cast<uint8_t>(_is_const))),
+ padding0__(0),
+ padding1__(0),
+ padding2__(0) {
+ (void)padding0__;
+ (void)padding1__;
+ (void)padding2__;
+ }
+ uint64_t pc() const {
+ return flatbuffers::EndianScalar(pc_);
}
uint64_t op1() const {
return flatbuffers::EndianScalar(op1_);
@@ -693,8 +715,11 @@ FLATBUFFERS_MANUALLY_ALIGNED_STRUCT(8) ComparisonRaw FLATBUFFERS_FINAL_CLASS {
uint64_t op2() const {
return flatbuffers::EndianScalar(op2_);
}
+ bool is_const() const {
+ return flatbuffers::EndianScalar(is_const_) != 0;
+ }
};
-FLATBUFFERS_STRUCT_END(ComparisonRaw, 16);
+FLATBUFFERS_STRUCT_END(ComparisonRaw, 32);
struct ConnectRequestRawT : public flatbuffers::NativeTable {
typedef ConnectRequestRaw TableType;