diff --git a/app/cli/report.go b/app/cli/report.go index 7aeb493..2cd0203 100644 --- a/app/cli/report.go +++ b/app/cli/report.go @@ -1,10 +1,12 @@ package cli import ( - "bufio" + "bytes" "encoding" "fmt" + "math/rand" "path/filepath" + "sort" "strconv" "strings" @@ -12,6 +14,7 @@ import ( gitignore "github.com/sabhiram/go-gitignore" actx "go.hackfix.me/fcov/app/context" + aerrors "go.hackfix.me/fcov/app/errors" "go.hackfix.me/fcov/parse" "go.hackfix.me/fcov/report" "go.hackfix.me/fcov/types" @@ -19,7 +22,8 @@ import ( // Report is the fcov report command. type Report struct { - Files []string `arg:"" help:"One or more coverage files."` // not using 'existingfile' modifier since it makes it difficult to test with an in-memory FS + Paths []string `arg:"" help:"One or more paths to coverage files or directories."` // not using 'existingfile' modifier since it makes it difficult to test with an in-memory FS + Merge bool `help:"If true, coverage paths will be calculated as one. Otherwise multiple reports will be generated, one for each path. "` Filter []string `help:"Glob patterns applied on file paths to filter files from the coverage calculation and output. \n Example: '*,!*pkg*' would exclude all files except those that contain 'pkg'. " placeholder:""` FilterOutput []string `help:"Glob patterns applied on file paths to filter files from the output, but *not* from the coverage calculation. " placeholder:""` FilterOutputFile string `help:"Path to a file that contains newline-separated file paths to include in the output.\nIf specified, it overrides --filter-output. " placeholder:""` @@ -100,14 +104,13 @@ func (o *ThresholdsOption) UnmarshalText(text []byte) error { // TODO: This currently assumes Go coverage processing. Either correctly infer so, // or add a CLI flag to use Go mode reporting. func (s *Report) Run(appCtx *actx.Context) error { - cov := types.NewCoverage() filterCov := gitignore.CompileIgnoreLines(s.Filter...) filterOutLines := s.FilterOutput if s.FilterOutputFile != "" { file, err := appCtx.FS.Open(s.FilterOutputFile) if err != nil { - return fmt.Errorf("failed opening filter output file: %w", err) + return aerrors.NewRuntimeError("failed opening filter output file", err, "") } defer file.Close() @@ -118,86 +121,118 @@ func (s *Report) Run(appCtx *actx.Context) error { filterOutLines, err = createOutputFilterFromFile(file) if err != nil { - return fmt.Errorf("failed reading filter output file: %w", err) + return aerrors.NewRuntimeError("failed reading filter output file", err, "") } } filterOut := gitignore.CompileIgnoreLines(filterOutLines...) - for _, fpath := range s.Files { - file, err := appCtx.FS.Open(fpath) + var covFiles []string + for _, path := range s.Paths { + info, err := appCtx.FS.Stat(path) + if err != nil { + if vfs.IsNotExist(err) { + return err + } + return aerrors.NewRuntimeError(fmt.Sprintf("failed getting information on %s", path), err, "") + } + + if info.IsDir() { + covDirs, err := findCoverageDirectories(appCtx.FS, path) + if err != nil { + return aerrors.NewRuntimeError(fmt.Sprintf("failed reading directory %s", path), err, "") + } + + if len(covDirs) == 0 { + appCtx.Logger.Warn(fmt.Sprintf("No coverage directories found in %s", path)) + } + + genCovFiles, err := generateTextCoverage(covDirs) + if err != nil { + return err + } + covFiles = append(covFiles, genCovFiles...) + } else { + covFiles = append(covFiles, path) + } + } + + covs := map[string]*types.Coverage{} + + // Random key used to distinguish whether the coverage report should be merged or not. + // A hackish way of doing this, but the alternative would require more code. + mergeKey := fmt.Sprintf("%x\x00", rand.Int31()) + + for _, covFile := range covFiles { + file, err := appCtx.FS.Open(covFile) if err != nil { return err } defer file.Close() - if err = parse.Go(file, cov, filterCov); err != nil { + covKey := mergeKey + if !s.Merge { + covKey = strings.TrimSuffix(filepath.Base(covFile), filepath.Ext(covFile)) + } + + covs[covKey] = types.NewCoverage() + if err := parse.Go(file, covs[covKey], filterCov); err != nil { return err } } - sum := report.Create(cov) + reports := make(map[string]*report.Report, len(covs)) + for name, cov := range covs { + reports[name] = report.Create(cov) + } - renders := make(map[report.Format]string) + renders := make(map[string]map[report.Format]string) for _, out := range s.Output { var ( render string ok bool ) - if render, ok = renders[out.Format]; !ok { - render = sum.Render( - out.Format, s.NestFiles, filterOut, s.Thresholds.Lower, - s.Thresholds.Upper, s.TrimPackagePrefix) - renders[out.Format] = render + for name, r := range reports { + if _, ok = renders[name]; !ok { + renders[name] = make(map[report.Format]string) + } + render = r.Render( + out.Format, s.NestFiles, filterOut, s.TrimPackagePrefix, + ) + renders[name][out.Format] = render + } + + names := make([]string, 0, len(renders)) + for n := range renders { + names = append(names, n) + } + sort.Strings(names) + + var output bytes.Buffer + for j, name := range names { + if name != mergeKey { + header := reports[name].RenderHeader( + out.Format, name, j > 0, s.Thresholds.Lower, s.Thresholds.Upper, + ) + if _, err := fmt.Fprintln(&output, header); err != nil { + return err + } + } + if _, err := fmt.Fprintln(&output, renders[name][out.Format]); err != nil { + return err + } } if out.Filename == "" { - if _, err := fmt.Fprintln(appCtx.Stdout, render); err != nil { + if _, err := fmt.Fprintln(appCtx.Stdout, output.String()); err != nil { return err } continue } - if err := vfs.WriteFile(appCtx.FS, out.Filename, []byte(render), 0o644); err != nil { + if err := vfs.WriteFile(appCtx.FS, out.Filename, output.Bytes(), 0o644); err != nil { return err } } return nil } - -func createOutputFilterFromFile(file vfs.File) ([]string, error) { - scanner := bufio.NewScanner(file) - filter := []string{"*"} // exclude everything - - var ( - line, pkg string - packages = make(map[string]bool) - files []string - ) - // First pass to split the packages being tested from files. Since we can't - // reliably determine which .go file is tested, we include the entire package - // in that case. - for scanner.Scan() { - line = scanner.Text() - pkg = filepath.Dir(line) - if strings.HasSuffix(line, "_test.go") { - packages[pkg] = true - } else if strings.HasSuffix(line, ".go") { - files = append(files, line) - } - } - - // Second pass to assemble the filter - for _, f := range files { - pkg = filepath.Dir(f) - if !packages[pkg] { - filter = append(filter, fmt.Sprintf("!%s", f)) - } - } - - for pkg := range packages { - filter = append(filter, fmt.Sprintf("!%s/", pkg)) - } - - return filter, scanner.Err() -} diff --git a/app/cli/report_utils.go b/app/cli/report_utils.go new file mode 100644 index 0000000..50add63 --- /dev/null +++ b/app/cli/report_utils.go @@ -0,0 +1,122 @@ +package cli + +import ( + "bufio" + "fmt" + "os/exec" + "path/filepath" + "strings" + + "github.com/mandelsoft/vfs/pkg/vfs" + + aerrors "go.hackfix.me/fcov/app/errors" +) + +func createOutputFilterFromFile(file vfs.File) ([]string, error) { + scanner := bufio.NewScanner(file) + filter := []string{"*"} // exclude everything + + var ( + line, pkg string + packages = make(map[string]bool) + files []string + ) + // First pass to split the packages being tested from files. Since we can't + // reliably determine which .go file is tested, we include the entire package + // in that case. + for scanner.Scan() { + line = scanner.Text() + pkg = filepath.Dir(line) + if strings.HasSuffix(line, "_test.go") { + packages[pkg] = true + } else if strings.HasSuffix(line, ".go") { + files = append(files, line) + } + } + + // Second pass to assemble the filter + for _, f := range files { + pkg = filepath.Dir(f) + if !packages[pkg] { + filter = append(filter, fmt.Sprintf("!%s", f)) + } + } + + for pkg := range packages { + filter = append(filter, fmt.Sprintf("!%s/", pkg)) + } + + return filter, scanner.Err() +} + +func findCoverageDirectories(fs vfs.FileSystem, root string) ([]string, error) { + entries, err := vfs.ReadDir(fs, root) + if err != nil { + return nil, err + } + + var covDirs []string + + for _, entry := range entries { + if !entry.IsDir() { + continue + } + + dirPath := filepath.Join(root, entry.Name()) + files, err := vfs.ReadDir(fs, dirPath) + if err != nil { + return nil, err + } + + hasCovMeta := false + hasCovCounters := false + + for _, file := range files { + name := file.Name() + if strings.HasPrefix(name, "covmeta.") { + hasCovMeta = true + } + if strings.HasPrefix(name, "covcounters.") { + hasCovCounters = true + } + if hasCovMeta && hasCovCounters { + absPath, err := filepath.Abs(dirPath) + if err != nil { + return nil, err + } + covDirs = append(covDirs, absPath) + break + } + } + } + + return covDirs, nil +} + +// Generate legacy text coverage from directories containing binary coverage +// files generated by Go >=1.20. +// See https://dustinspecker.com/posts/go-combined-unit-integration-code-coverage/ +// It returns paths to the generated text coverage files. +// In the future, fcov could work with the binary files directly, but for now we +// need to generate the legacy text format. +func generateTextCoverage(dirs []string) (covFiles []string, err error) { + var output []byte + for _, absPath := range dirs { + outDir := filepath.Dir(absPath) + dirName := filepath.Base(absPath) + outFile := filepath.Join(outDir, dirName+".txt") + + cmd := exec.Command("go", "tool", "covdata", "textfmt", + "-i="+absPath, + "-o="+outFile) + + if output, err = cmd.CombinedOutput(); err != nil { + return nil, aerrors.NewRuntimeError( + fmt.Sprintf("failed to generate text coverage for %s", absPath), err, string(output)) + } + + covFiles = append(covFiles, outFile) + } + + return covFiles, nil +} diff --git a/report/render.go b/report/render.go index fb9078f..0c49d10 100644 --- a/report/render.go +++ b/report/render.go @@ -25,19 +25,16 @@ const ( const pkgMarker = '\x00' // Render the report as a string in the provided format, applying the provided -// filter, and style adjustments. -// The lower and upper coverage thresholds are used by formats like Markdown to -// apply different colors depending on their values. trimPackagePrefix will -// remove the matching prefix from the absolute file path. -func (s *Report) Render( - ft Format, nestFiles bool, filter *gitignore.GitIgnore, - lowerThreshold, upperThreshold float64, trimPackagePrefix string, +// filter, and style adjustments. trimPackagePrefix will remove the matching +// prefix from the absolute file path. +func (r *Report) Render( + ft Format, nestFiles bool, filter *gitignore.GitIgnore, trimPackagePrefix string, ) string { - if len(s.Packages) == 0 { + if len(r.Packages) == 0 { return "" } - sum := s.preRender(filter, nestFiles, trimPackagePrefix) + sum := r.preRender(filter, nestFiles, trimPackagePrefix) buf := &strings.Builder{} table := tablewriter.NewWriter(buf) @@ -59,9 +56,6 @@ func (s *Report) Render( table.SetBorders(tablewriter.Border{Left: true, Top: false, Right: true, Bottom: false}) table.SetCenterSeparator("|") - buf.Write([]byte(fmt.Sprintf("![Total Coverage](%s)\n\n", - generateBadgeURL(s.Coverage*100, lowerThreshold, upperThreshold)))) - if len(sum) == 0 { break } @@ -85,30 +79,56 @@ func (s *Report) Render( table.AppendBulk(data) table.Render() - if ft == Text { - buf.Write([]byte(fmt.Sprintf("\nTotal Coverage: %.2f%%", s.Coverage*100))) - } - out := buf.String() // tablewriter appends an extra newline at the end that I can't seem to // disable, so remove it. out, _ = strings.CutSuffix(out, "\n") + if ft == Markdown { + // Wrap the report in a collapsible element. + out = fmt.Sprintf("
\n\n%s\n\n
", out) + } + return out } +func (r *Report) RenderHeader( + ft Format, text string, spacer bool, lowerThreshold, upperThreshold float64, +) string { + var s string + if spacer { + switch ft { + case Markdown: + s = "\n
\n\n" + case Text: + s = "\n\n" + } + } + + tc := renderTotalCoverage(ft, r.Coverage*100, lowerThreshold, upperThreshold) + + switch ft { + case Markdown: + return fmt.Sprintf("%s### %s %s\n", s, text, tc) + case Text: + return fmt.Sprintf("%s%s %s\n", s, text, tc) + } + + return "" +} + // preRender sorts and flattens the report, applying any filters, and // optionally trimming the file paths as needed. -func (s *Report) preRender(filter *gitignore.GitIgnore, nestFiles bool, trimPackagePrefix string) [][]string { - pkgNames := make([]string, 0, len(s.Packages)) - for pkgName := range s.Packages { +func (r *Report) preRender(filter *gitignore.GitIgnore, nestFiles bool, trimPackagePrefix string) [][]string { + pkgNames := make([]string, 0, len(r.Packages)) + for pkgName := range r.Packages { pkgNames = append(pkgNames, pkgName) } sort.Strings(pkgNames) sum := make([][]string, 0) for _, pkgName := range pkgNames { - pkgSum := s.Packages[pkgName] + pkgSum := r.Packages[pkgName] fnames := make([]string, 0, len(pkgSum.Files)) for fname := range pkgSum.Files { @@ -220,13 +240,19 @@ func FormatFromString(s string) Format { } } -func generateBadgeURL(cov float64, lowerThreshold, upperThreshold float64) string { - color := "success" - if cov < lowerThreshold { - color = "critical" - } else if cov < upperThreshold { - color = "yellow" +func renderTotalCoverage( + f Format, cov float64, lowerThreshold, upperThreshold float64, +) string { + if f == Markdown { + color := "success" + if cov < lowerThreshold { + color = "critical" + } else if cov < upperThreshold { + color = "yellow" + } + + return fmt.Sprintf("![Total Coverage](https://img.shields.io/badge/%.2f%%25-%s?style=flat)", cov, color) } - return fmt.Sprintf("https://img.shields.io/badge/Total%%20Coverage-%.2f%%25-%s?style=flat", cov, color) + return fmt.Sprintf("%.2f%%", cov) }