diff options
| author | Andrew Turner <andrew@fubar.geek.nz> | 2019-06-03 13:19:51 +0000 |
|---|---|---|
| committer | Dmitry Vyukov <dvyukov@google.com> | 2019-06-04 16:24:36 +0200 |
| commit | bfb4a51e30c8c04658a2675333b9b89a9d327c4a (patch) | |
| tree | 7a0bd32ab317f8652e12bed660c84eb97984c693 /executor/executor.cc | |
| parent | ad87cdf3c743711389a67930d722e89c127ab1e5 (diff) | |
executor: Protect the coverage buffer
Add functions to protect and unprotect the coverage buffer. The buffer is
protected from being written to while tracing. When the trace data is
sorted we need to make it read/write, but can return it to read only after
this has completed.
Leave the first page as read/write as we need to clear the length field.
Diffstat (limited to 'executor/executor.cc')
| -rw-r--r-- | executor/executor.cc | 6 |
1 files changed, 6 insertions, 0 deletions
diff --git a/executor/executor.cc b/executor/executor.cc index df38de3b9..29ba33867 100644 --- a/executor/executor.cc +++ b/executor/executor.cc @@ -387,8 +387,10 @@ int main(int argc, char** argv) for (int i = 0; i < kMaxThreads; i++) { threads[i].cov.fd = kCoverFd + i; cover_open(&threads[i].cov, false); + cover_protect(&threads[i].cov); } cover_open(&extra_cov, true); + cover_protect(&extra_cov); if (flag_extra_cover) { // Don't enable comps because we don't use them in the fuzzer yet. cover_enable(&extra_cov, false, true); @@ -846,8 +848,10 @@ void write_coverage_signal(cover_t* cov, uint32* signal_count_pos, uint32* cover uint32 cover_size = cov->size; if (flag_dedup_cover) { cover_data_t* end = cover_data + cover_size; + cover_unprotect(cov); std::sort(cover_data, end); cover_size = std::unique(cover_data, end) - cover_data; + cover_protect(cov); } // Truncate PCs to uint32 assuming that they fit into 32-bits. // True for x86_64 and arm64 without KASLR. @@ -932,8 +936,10 @@ void write_call_output(thread_t* th, bool finished) kcov_comparison_t* end = start + ncomps; if ((char*)end > th->cov.data_end) fail("too many comparisons %u", ncomps); + cover_unprotect(&th->cov); std::sort(start, end); ncomps = std::unique(start, end) - start; + cover_protect(&th->cov); uint32 comps_size = 0; for (uint32 i = 0; i < ncomps; ++i) { if (start[i].ignore()) |
