// Copyright 2018 syzkaller project authors. All rights reserved. // Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. package cover import ( "bufio" "bytes" "encoding/csv" "fmt" "html" "html/template" "io" "io/ioutil" "math" "net/http" "path/filepath" "sort" "strconv" "strings" "github.com/google/syzkaller/pkg/cover/backend" "github.com/google/syzkaller/pkg/mgrconfig" "github.com/google/syzkaller/sys/targets" ) func fixUpPCs(target string, progs []Prog, coverFilter map[uint32]uint32) []Prog { if coverFilter != nil { for _, prog := range progs { var nPCs []uint64 for _, pc := range prog.PCs { if coverFilter[uint32(pc)] != 0 { nPCs = append(nPCs, pc) } } prog.PCs = nPCs } } // On arm64 as PLT is enabled by default, .text section is loaded after .plt section, // so there is 0x18 bytes offset from module load address for .text section // we need to remove the 0x18 bytes offset in order to correct module symbol address if target == targets.ARM64 { for _, prog := range progs { var nPCs []uint64 for _, pc := range prog.PCs { // TODO: avoid to hardcode the address if pc < 0xffffffd010000000 { pc -= 0x18 } nPCs = append(nPCs, pc) } prog.PCs = nPCs } } return progs } func (rg *ReportGenerator) DoHTML(w io.Writer, progs []Prog, coverFilter map[uint32]uint32) error { progs = fixUpPCs(rg.target.Arch, progs, coverFilter) files, err := rg.prepareFileMap(progs) if err != nil { return err } d := &templateData{ Root: new(templateDir), RawCover: rg.rawCoverEnabled, } haveProgs := len(progs) > 1 || progs[0].Data != "" fileOpenErr := fmt.Errorf("failed to open/locate any source file") for fname, file := range files { pos := d.Root path := "" for { if path != "" { path += "/" } sep := strings.IndexByte(fname, filepath.Separator) if sep == -1 { path += fname break } dir := fname[:sep] path += dir if pos.Dirs == nil { pos.Dirs = make(map[string]*templateDir) } if pos.Dirs[dir] == nil { pos.Dirs[dir] = &templateDir{ templateBase: templateBase{ Path: path, Name: dir, }, } } pos = pos.Dirs[dir] fname = fname[sep+1:] } f := &templateFile{ templateBase: templateBase{ Path: path, Name: fname, Total: file.totalPCs, Covered: file.coveredPCs, }, HasFunctions: len(file.functions) != 0, } pos.Files = append(pos.Files, f) if file.coveredPCs == 0 { continue } addFunctionCoverage(file, d) contents := "" lines, err := parseFile(file.filename) if err == nil { contents = fileContents(file, lines, haveProgs) fileOpenErr = nil } else { // We ignore individual errors of opening/locating source files // because there is a number of reasons when/why it can happen. // We fail only if we can't open/locate any single source file. // syz-ci can mess state of source files (https://github.com/google/syzkaller/issues/1770), // or bazel lies about location of auto-generated files, // or a used can update source files with git pull/checkout. contents = html.EscapeString(err.Error()) if fileOpenErr != nil { fileOpenErr = err } } d.Contents = append(d.Contents, template.HTML(contents)) f.Index = len(d.Contents) - 1 } if fileOpenErr != nil { return fileOpenErr } for _, prog := range progs { d.Progs = append(d.Progs, templateProg{ Sig: prog.Sig, Content: template.HTML(html.EscapeString(prog.Data)), }) } processDir(d.Root) return coverTemplate.Execute(w, d) } func (rg *ReportGenerator) DoRawCoverFiles(w http.ResponseWriter, progs []Prog, coverFilter map[uint32]uint32) error { progs = fixUpPCs(rg.target.Arch, progs, coverFilter) if err := rg.lazySymbolize(progs); err != nil { return err } sort.Slice(rg.Frames, func(i, j int) bool { return rg.Frames[i].PC < rg.Frames[j].PC }) w.Header().Set("Content-Type", "text/plain; charset=utf-8") buf := bufio.NewWriter(w) fmt.Fprintf(buf, "PC,Module,Offset,Filename,StartLine,EndLine\n") for _, frame := range rg.Frames { offset := frame.PC - frame.Module.Addr fmt.Fprintf(buf, "0x%x,%v,0x%x,%v,%v\n", frame.PC, frame.Module.Name, offset, frame.Name, frame.StartLine) } buf.Flush() return nil } func (rg *ReportGenerator) DoRawCover(w http.ResponseWriter, progs []Prog, coverFilter map[uint32]uint32) { progs = fixUpPCs(rg.target.Arch, progs, coverFilter) var pcs []uint64 if len(progs) == 1 && rg.rawCoverEnabled { pcs = append([]uint64{}, progs[0].PCs...) } else { uniquePCs := make(map[uint64]bool) for _, prog := range progs { for _, pc := range prog.PCs { if uniquePCs[pc] { continue } uniquePCs[pc] = true pcs = append(pcs, pc) } } sort.Slice(pcs, func(i, j int) bool { return pcs[i] < pcs[j] }) } w.Header().Set("Content-Type", "text/plain; charset=utf-8") buf := bufio.NewWriter(w) for _, pc := range pcs { fmt.Fprintf(buf, "0x%x\n", pc) } buf.Flush() } func (rg *ReportGenerator) DoFilterPCs(w http.ResponseWriter, progs []Prog, coverFilter map[uint32]uint32) { progs = fixUpPCs(rg.target.Arch, progs, coverFilter) var pcs []uint64 uniquePCs := make(map[uint64]bool) for _, prog := range progs { for _, pc := range prog.PCs { if uniquePCs[pc] { continue } uniquePCs[pc] = true if coverFilter[uint32(pc)] != 0 { pcs = append(pcs, pc) } } } sort.Slice(pcs, func(i, j int) bool { return pcs[i] < pcs[j] }) w.Header().Set("Content-Type", "text/plain; charset=utf-8") buf := bufio.NewWriter(w) for _, pc := range pcs { fmt.Fprintf(buf, "0x%x\n", pc) } buf.Flush() } type fileStats struct { Name string Module string CoveredLines int TotalLines int CoveredPCs int TotalPCs int TotalFunctions int CoveredFunctions int CoveredPCsInFunctions int TotalPCsInCoveredFunctions int TotalPCsInFunctions int } var csvFilesHeader = []string{ "Filename", "CoveredLines", "TotalLines", "CoveredPCs", "TotalPCs", "TotalFunctions", "CoveredPCsInFunctions", "TotalPCsInFunctions", "TotalPCsInCoveredFunctions", } func (rg *ReportGenerator) convertToStats(progs []Prog) ([]fileStats, error) { files, err := rg.prepareFileMap(progs) if err != nil { return nil, err } var data []fileStats for fname, file := range files { lines, err := parseFile(file.filename) if err != nil { fmt.Printf("failed to open/locate %s\n", file.filename) continue } totalFuncs := len(file.functions) var coveredInFunc int var pcsInFunc int var pcsInCoveredFunc int var coveredFunc int for _, function := range file.functions { coveredInFunc += function.covered if function.covered != 0 { pcsInCoveredFunc += function.pcs coveredFunc++ } pcsInFunc += function.pcs } totalLines := len(lines) var coveredLines int for _, line := range file.lines { if len(line.progCount) != 0 { coveredLines++ } } data = append(data, fileStats{ Name: fname, Module: file.module, CoveredLines: coveredLines, TotalLines: totalLines, CoveredPCs: file.coveredPCs, TotalPCs: file.totalPCs, TotalFunctions: totalFuncs, CoveredFunctions: coveredFunc, CoveredPCsInFunctions: coveredInFunc, TotalPCsInFunctions: pcsInFunc, TotalPCsInCoveredFunctions: pcsInCoveredFunc, }) } return data, nil } func (rg *ReportGenerator) DoCSVFiles(w io.Writer, progs []Prog, coverFilter map[uint32]uint32) error { progs = fixUpPCs(rg.target.Arch, progs, coverFilter) data, err := rg.convertToStats(progs) if err != nil { return err } sort.SliceStable(data, func(i, j int) bool { return data[i].Name < data[j].Name }) writer := csv.NewWriter(w) defer writer.Flush() if err := writer.Write(csvFilesHeader); err != nil { return err } var d [][]string for _, dt := range data { d = append(d, []string{ dt.Name, strconv.Itoa(dt.CoveredLines), strconv.Itoa(dt.TotalLines), strconv.Itoa(dt.CoveredPCs), strconv.Itoa(dt.TotalPCs), strconv.Itoa(dt.TotalFunctions), strconv.Itoa(dt.CoveredPCsInFunctions), strconv.Itoa(dt.TotalPCsInFunctions), strconv.Itoa(dt.TotalPCsInCoveredFunctions), }) } return writer.WriteAll(d) } func groupCoverByFilePrefixes(datas []fileStats, subsystems []mgrconfig.Subsystem) map[string]map[string]string { d := make(map[string]map[string]string) for _, subsystem := range subsystems { var coveredLines int var totalLines int var coveredPCsInFile int var totalPCsInFile int var totalFuncs int var coveredFuncs int var coveredPCsInFuncs int var pcsInCoveredFuncs int var pcsInFuncs int var percentLines float64 var percentPCsInFile float64 var percentPCsInFunc float64 var percentPCsInCoveredFunc float64 var percentCoveredFunc float64 for _, path := range subsystem.Paths { for _, data := range datas { if !strings.HasPrefix(data.Name, path) { continue } coveredLines += data.CoveredLines totalLines += data.TotalLines coveredPCsInFile += data.CoveredPCs totalPCsInFile += data.TotalPCs totalFuncs += data.TotalFunctions coveredFuncs += data.CoveredFunctions coveredPCsInFuncs += data.CoveredPCsInFunctions pcsInFuncs += data.TotalPCsInFunctions pcsInCoveredFuncs += data.TotalPCsInCoveredFunctions } } if totalLines != 0 { percentLines = 100.0 * float64(coveredLines) / float64(totalLines) } if totalPCsInFile != 0 { percentPCsInFile = 100.0 * float64(coveredPCsInFile) / float64(totalPCsInFile) } if pcsInFuncs != 0 { percentPCsInFunc = 100.0 * float64(coveredPCsInFuncs) / float64(pcsInFuncs) } if pcsInCoveredFuncs != 0 { percentPCsInCoveredFunc = 100.0 * float64(coveredPCsInFuncs) / float64(pcsInCoveredFuncs) } if totalFuncs != 0 { percentCoveredFunc = 100.0 * float64(coveredFuncs) / float64(totalFuncs) } d[subsystem.Name] = map[string]string{ "name": subsystem.Name, "lines": fmt.Sprintf("%v / %v / %.2f%%", coveredLines, totalLines, percentLines), "PCsInFiles": fmt.Sprintf("%v / %v / %.2f%%", coveredPCsInFile, totalPCsInFile, percentPCsInFile), "Funcs": fmt.Sprintf("%v / %v / %.2f%%", coveredFuncs, totalFuncs, percentCoveredFunc), "PCsInFuncs": fmt.Sprintf("%v / %v / %.2f%%", coveredPCsInFuncs, pcsInFuncs, percentPCsInFunc), "PCsInCoveredFuncs": fmt.Sprintf("%v / %v / %.2f%%", coveredPCsInFuncs, pcsInCoveredFuncs, percentPCsInCoveredFunc), } } return d } func (rg *ReportGenerator) DoHTMLTable(w io.Writer, progs []Prog, coverFilter map[uint32]uint32) error { progs = fixUpPCs(rg.target.Arch, progs, coverFilter) data, err := rg.convertToStats(progs) if err != nil { return err } d := groupCoverByFilePrefixes(data, rg.subsystem) return coverTableTemplate.Execute(w, d) } func groupCoverByModule(datas []fileStats) map[string]map[string]string { d := make(map[string]map[string]string) coveredLines := make(map[string]int) totalLines := make(map[string]int) coveredPCsInFile := make(map[string]int) totalPCsInFile := make(map[string]int) totalFuncs := make(map[string]int) coveredFuncs := make(map[string]int) coveredPCsInFuncs := make(map[string]int) pcsInCoveredFuncs := make(map[string]int) pcsInFuncs := make(map[string]int) percentLines := make(map[string]float64) percentPCsInFile := make(map[string]float64) percentPCsInFunc := make(map[string]float64) percentPCsInCoveredFunc := make(map[string]float64) percentCoveredFunc := make(map[string]float64) for _, data := range datas { coveredLines[data.Module] += data.CoveredLines totalLines[data.Module] += data.TotalLines coveredPCsInFile[data.Module] += data.CoveredPCs totalPCsInFile[data.Module] += data.TotalPCs totalFuncs[data.Module] += data.TotalFunctions coveredFuncs[data.Module] += data.CoveredFunctions coveredPCsInFuncs[data.Module] += data.CoveredPCsInFunctions pcsInFuncs[data.Module] += data.TotalPCsInFunctions pcsInCoveredFuncs[data.Module] += data.TotalPCsInCoveredFunctions } for m := range totalLines { if totalLines[m] != 0 { percentLines[m] = 100.0 * float64(coveredLines[m]) / float64(totalLines[m]) } if totalPCsInFile[m] != 0 { percentPCsInFile[m] = 100.0 * float64(coveredPCsInFile[m]) / float64(totalPCsInFile[m]) } if pcsInFuncs[m] != 0 { percentPCsInFunc[m] = 100.0 * float64(coveredPCsInFuncs[m]) / float64(pcsInFuncs[m]) } if pcsInCoveredFuncs[m] != 0 { percentPCsInCoveredFunc[m] = 100.0 * float64(coveredPCsInFuncs[m]) / float64(pcsInCoveredFuncs[m]) } if totalFuncs[m] != 0 { percentCoveredFunc[m] = 100.0 * float64(coveredFuncs[m]) / float64(totalFuncs[m]) } lines := fmt.Sprintf("%v / %v / %.2f%%", coveredLines[m], totalLines[m], percentLines[m]) pcsInFiles := fmt.Sprintf("%v / %v / %.2f%%", coveredPCsInFile[m], totalPCsInFile[m], percentPCsInFile[m]) funcs := fmt.Sprintf("%v / %v / %.2f%%", coveredFuncs[m], totalFuncs[m], percentCoveredFunc[m]) pcsInFuncs := fmt.Sprintf("%v / %v / %.2f%%", coveredPCsInFuncs[m], pcsInFuncs[m], percentPCsInFunc[m]) covedFuncs := fmt.Sprintf("%v / %v / %.2f%%", coveredPCsInFuncs[m], pcsInCoveredFuncs[m], percentPCsInCoveredFunc[m]) d[m] = map[string]string{ "name": m, "lines": lines, "PCsInFiles": pcsInFiles, "Funcs": funcs, "PCsInFuncs": pcsInFuncs, "PCsInCoveredFuncs": covedFuncs, } } return d } func (rg *ReportGenerator) DoModuleCover(w io.Writer, progs []Prog, coverFilter map[uint32]uint32) error { progs = fixUpPCs(rg.target.Arch, progs, coverFilter) data, err := rg.convertToStats(progs) if err != nil { return err } d := groupCoverByModule(data) return coverTableTemplate.Execute(w, d) } var csvHeader = []string{ "Filename", "Function", "Covered PCs", "Total PCs", } func (rg *ReportGenerator) DoCSV(w io.Writer, progs []Prog, coverFilter map[uint32]uint32) error { progs = fixUpPCs(rg.target.Arch, progs, coverFilter) files, err := rg.prepareFileMap(progs) if err != nil { return err } var data [][]string for fname, file := range files { for _, function := range file.functions { data = append(data, []string{ fname, function.name, strconv.Itoa(function.covered), strconv.Itoa(function.pcs), }) } } sort.Slice(data, func(i, j int) bool { if data[i][0] != data[j][0] { return data[i][0] < data[j][0] } return data[i][1] < data[j][1] }) writer := csv.NewWriter(w) defer writer.Flush() if err := writer.Write(csvHeader); err != nil { return err } return writer.WriteAll(data) } func fileContents(file *file, lines [][]byte, haveProgs bool) string { var buf bytes.Buffer lineCover := perLineCoverage(file.covered, file.uncovered) htmlReplacer := strings.NewReplacer(">", ">", "<", "<", "&", "&", "\t", " ") buf.WriteString("
") for i := range lines { if haveProgs { prog, count := "", " " if line := file.lines[i+1]; len(line.progCount) != 0 { prog = fmt.Sprintf("onclick='onProgClick(%v, this)'", line.progIndex) count = fmt.Sprintf("% 5v", len(line.progCount)) buf.WriteString(fmt.Sprintf("%v ", prog, count)) } buf.WriteByte('\n') } } buf.WriteString("") for i := range lines { buf.WriteString(fmt.Sprintf("%d\n", i+1)) } buf.WriteString("") for i, ln := range lines { start := 0 cover := append(lineCover[i+1], lineCoverChunk{End: backend.LineEnd}) for _, cov := range cover { end := cov.End - 1 if end > len(ln) { end = len(ln) } if end == start { continue } chunk := htmlReplacer.Replace(string(ln[start:end])) start = end class := "" if cov.Covered && cov.Uncovered { class = "both" } else if cov.Covered { class = "covered" } else if cov.Uncovered { class = "uncovered" } else { buf.WriteString(chunk) continue } buf.WriteString(fmt.Sprintf("%v", class, chunk)) } buf.WriteByte('\n') } buf.WriteString("
") return buf.String() } type lineCoverChunk struct { End int Covered bool Uncovered bool } func perLineCoverage(covered, uncovered []backend.Range) map[int][]lineCoverChunk { lines := make(map[int][]lineCoverChunk) for _, r := range covered { mergeRange(lines, r, true) } for _, r := range uncovered { mergeRange(lines, r, false) } return lines } func mergeRange(lines map[int][]lineCoverChunk, r backend.Range, covered bool) { // Don't panic on broken debug info, it is frequently broken. if r.EndLine < r.StartLine { r.EndLine = r.StartLine } if r.EndLine == r.StartLine && r.EndCol <= r.StartCol { r.EndCol = backend.LineEnd } for line := r.StartLine; line <= r.EndLine; line++ { start := 0 if line == r.StartLine { start = r.StartCol } end := backend.LineEnd if line == r.EndLine { end = r.EndCol } ln := lines[line] if ln == nil { ln = append(ln, lineCoverChunk{End: backend.LineEnd}) } lines[line] = mergeLine(ln, start, end, covered) } } func mergeLine(chunks []lineCoverChunk, start, end int, covered bool) []lineCoverChunk { var res []lineCoverChunk chunkStart := 0 for _, chunk := range chunks { if chunkStart >= end || chunk.End <= start { res = append(res, chunk) } else if covered && chunk.Covered || !covered && chunk.Uncovered { res = append(res, chunk) } else if chunkStart >= start && chunk.End <= end { if covered { chunk.Covered = true } else { chunk.Uncovered = true } res = append(res, chunk) } else { if chunkStart < start { res = append(res, lineCoverChunk{start, chunk.Covered, chunk.Uncovered}) } mid := end if mid > chunk.End { mid = chunk.End } res = append(res, lineCoverChunk{mid, chunk.Covered || covered, chunk.Uncovered || !covered}) if chunk.End > end { res = append(res, lineCoverChunk{chunk.End, chunk.Covered, chunk.Uncovered}) } } chunkStart = chunk.End } return res } func addFunctionCoverage(file *file, data *templateData) { var buf bytes.Buffer for _, function := range file.functions { percentage := "" if function.covered > 0 { percentage = fmt.Sprintf("%v%%", percent(function.covered, function.pcs)) } else { percentage = "---" } buf.WriteString(fmt.Sprintf("%v", function.name)) buf.WriteString(fmt.Sprintf("%v", percentage)) buf.WriteString(fmt.Sprintf("of %v", strconv.Itoa(function.pcs))) buf.WriteString("
\n") } data.Functions = append(data.Functions, template.HTML(buf.String())) } func processDir(dir *templateDir) { for len(dir.Dirs) == 1 && len(dir.Files) == 0 { for _, child := range dir.Dirs { dir.Name += "/" + child.Name dir.Files = child.Files dir.Dirs = child.Dirs } } sort.Slice(dir.Files, func(i, j int) bool { return dir.Files[i].Name < dir.Files[j].Name }) for _, f := range dir.Files { dir.Total += f.Total dir.Covered += f.Covered f.Percent = percent(f.Covered, f.Total) } for _, child := range dir.Dirs { processDir(child) dir.Total += child.Total dir.Covered += child.Covered } dir.Percent = percent(dir.Covered, dir.Total) if dir.Covered == 0 { dir.Dirs = nil dir.Files = nil } } func percent(covered, total int) int { f := math.Ceil(float64(covered) / float64(total) * 100) if f == 100 && covered < total { f = 99 } return int(f) } func parseFile(fn string) ([][]byte, error) { data, err := ioutil.ReadFile(fn) if err != nil { return nil, err } var lines [][]byte for { idx := bytes.IndexByte(data, '\n') if idx == -1 { break } lines = append(lines, data[:idx]) data = data[idx+1:] } if len(data) != 0 { lines = append(lines, data) } return lines, nil } type templateData struct { Root *templateDir Contents []template.HTML Progs []templateProg Functions []template.HTML RawCover bool } type templateProg struct { Sig string Content template.HTML } type templateBase struct { Name string Path string Total int Covered int Percent int } type templateDir struct { templateBase Dirs map[string]*templateDir Files []*templateFile } type templateFile struct { templateBase Index int HasFunctions bool } var coverTemplate = template.Must(template.New("").Parse(`
{{range $i, $f := .Contents}}
{{$f}}
{{end}} {{$base := .}} {{range $i, $p := .Progs}}
{{if $base.RawCover}}[raw coverage]
{{end}} {{$p.Content}}
{{end}} {{range $i, $p := .Functions}}
{{$p}}
{{end}}
{{define "dir"}} {{range $dir := .Dirs}}
  • {{$dir.Name}} {{if $dir.Covered}}{{$dir.Percent}}%{{else}}---{{end}} of {{$dir.Total}}
  • {{end}} {{range $file := .Files}}
  • {{if $file.Covered}} {{$file.Name}} {{$file.Percent}}% of {{$file.Total}} {{else}} {{$file.Name}}--- of {{$file.Total}} {{end}}
  • {{end}} {{end}} `)) var coverTableTemplate = template.Must(template.New("coverTable").Parse(`
    {{range $i, $p := .}} {{end}}
    Name Covered / Total Lines / % Covered / Total PCs in File / % Covered / Total PCs in Function / % Covered / Total PCs in Covered Function / % Covered / Total Functions / %
    {{$p.name}} {{$p.lines}} {{$p.PCsInFiles}} {{$p.PCsInFuncs}} {{$p.PCsInCoveredFuncs}} {{$p.Funcs}}
    `))