From 34d8a152baf83f91c419a3c91eb28d66f17cab93 Mon Sep 17 00:00:00 2001 From: jonathanpopham Date: Tue, 10 Feb 2026 21:09:21 -0500 Subject: [PATCH 1/2] Add impact analysis, test coverage, and circular dependency endpoints MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Calls all three new analysis endpoints in parallel alongside the existing supermodel graph endpoint. Analysis results are used to enrich the generated entity pages with: - Impact analysis: blast radius, risk score, affected entry points - Test coverage: tested/untested status per function, coverage % per file - Circular dependencies: cycle membership, severity, breaking suggestions Adds new pills (impact level, test coverage, dependency health), new body sections on entity pages, and three new taxonomies for browsing by impact level, test coverage status, and dependency health. Analysis endpoints fail gracefully with warnings — the site still generates normally if any of the new endpoints are unavailable. Resolves #1 --- main.go | 428 ++++++++++++++++++++++++++++++++++++++++-- templates/_styles.css | 1 + templates/entity.html | 24 +++ 3 files changed, 438 insertions(+), 15 deletions(-) diff --git a/main.go b/main.go index 838bed5..e20c197 100644 --- a/main.go +++ b/main.go @@ -15,10 +15,16 @@ import ( "path/filepath" "strconv" "strings" + "sync" "time" ) -const apiBaseURL = "https://api.supermodeltools.com/v1/graphs/supermodel" +const apiBase = "https://api.supermodeltools.com" +const supermodelEndpoint = apiBase + "/v1/graphs/supermodel" +const impactEndpoint = apiBase + "/v1/analysis/impact" +const testCoverageEndpoint = apiBase + "/v1/analysis/test-coverage-map" +const circularDepsEndpoint = apiBase + "/v1/analysis/circular-dependencies" + const pollTimeout = 15 * time.Minute const defaultPollInterval = 10 * time.Second const maxFileSize = 10 * 1024 * 1024 // 10MB @@ -97,6 +103,15 @@ data: - name: "Domain" header: "Domain" type: "unordered_list" + - name: "Impact Analysis" + header: "Impact Analysis" + type: "unordered_list" + - name: "Test Coverage" + header: "Test Coverage" + type: "unordered_list" + - name: "Circular Dependencies" + header: "Circular Dependencies" + type: "unordered_list" - name: "faqs" header: "FAQs" type: "faq" @@ -158,6 +173,30 @@ taxonomies: min_entities: 1 index_description: "Browse by tag" + - name: "test_coverage" + label: "Test Coverage" + label_singular: "Coverage" + field: "test_coverage" + multi_value: false + min_entities: 1 + index_description: "Browse by test coverage status" + + - name: "impact_level" + label: "Impact Level" + label_singular: "Impact Level" + field: "impact_level" + multi_value: false + min_entities: 1 + index_description: "Browse by change impact level" + + - name: "dependency_health" + label: "Dependency Health" + label_singular: "Dependency Health" + field: "dependency_health" + multi_value: false + min_entities: 1 + index_description: "Browse by dependency health status" + pagination: per_page: 48 url_pattern: "/{taxonomy}/{entry}/{page}" @@ -299,17 +338,68 @@ func main() { fmt.Printf("Archive created: %s (%.2f MB)\n", zipPath, float64(info.Size())/(1024*1024)) logGroupEnd() - // Step 4 & 5: Call Supermodel API and poll - logGroup("Calling Supermodel API") - graphJSON, err := callSupermodelAPI(apiKey, zipPath) - if err != nil { - fatal("API call failed: %v", err) + // Step 4 & 5: Call Supermodel APIs in parallel + logGroup("Calling Supermodel APIs") + + type apiResult struct { + name string + data []byte + err error + } + + results := make(chan apiResult, 4) + var wg sync.WaitGroup + + // Launch all 4 endpoints concurrently + for _, ep := range []struct{ name, url string }{ + {"supermodel", supermodelEndpoint}, + {"impact", impactEndpoint}, + {"test-coverage", testCoverageEndpoint}, + {"circular-deps", circularDepsEndpoint}, + } { + wg.Add(1) + go func(name, url string) { + defer wg.Done() + fmt.Printf("Calling %s endpoint...\n", name) + data, err := callEndpoint(url, apiKey, zipPath) + results <- apiResult{name: name, data: data, err: err} + }(ep.name, ep.url) + } + + go func() { + wg.Wait() + close(results) + }() + + var graphJSON []byte + var impactJSON []byte + var testCoverageJSON []byte + var circularDepsJSON []byte + + for r := range results { + if r.err != nil { + if r.name == "supermodel" { + fatal("Supermodel API failed: %v", r.err) + } + fmt.Printf("::warning::%s endpoint failed: %v\n", r.name, r.err) + continue + } + fmt.Printf("%s: received %d bytes\n", r.name, len(r.data)) + switch r.name { + case "supermodel": + graphJSON = r.data + case "impact": + impactJSON = r.data + case "test-coverage": + testCoverageJSON = r.data + case "circular-deps": + circularDepsJSON = r.data + } } - fmt.Printf("Graph data received (%d bytes)\n", len(graphJSON)) logGroupEnd() - // Step 6: Save graph JSON - logGroup("Saving graph data") + // Step 6: Save JSON results + logGroup("Saving analysis data") tmpDir, err := os.MkdirTemp("", "arch-docs-*") if err != nil { fatal("Failed to create temp dir: %v", err) @@ -321,6 +411,22 @@ func main() { fatal("Failed to write graph JSON: %v", err) } fmt.Printf("Graph saved to %s\n", graphPath) + + if impactJSON != nil { + p := filepath.Join(tmpDir, "impact.json") + os.WriteFile(p, impactJSON, 0644) + fmt.Printf("Impact analysis saved to %s\n", p) + } + if testCoverageJSON != nil { + p := filepath.Join(tmpDir, "test-coverage.json") + os.WriteFile(p, testCoverageJSON, 0644) + fmt.Printf("Test coverage saved to %s\n", p) + } + if circularDepsJSON != nil { + p := filepath.Join(tmpDir, "circular-deps.json") + os.WriteFile(p, circularDepsJSON, 0644) + fmt.Printf("Circular dependencies saved to %s\n", p) + } logGroupEnd() // Step 7: Run graph2md @@ -349,6 +455,14 @@ func main() { fmt.Printf("Generated %d markdown files\n", entityCount) logGroupEnd() + // Step 7b: Enrich markdown with analysis data + if impactJSON != nil || testCoverageJSON != nil || circularDepsJSON != nil { + logGroup("Enriching entities with analysis data") + enriched := enrichMarkdown(contentDir, impactJSON, testCoverageJSON, circularDepsJSON) + fmt.Printf("Enriched %d entity files with analysis data\n", enriched) + logGroupEnd() + } + // Step 8: Generate pssg.yaml and run pssg build logGroup("Building static site") @@ -565,12 +679,12 @@ func createRepoZip(workspaceDir string) (string, error) { return tmpFile.Name(), nil } -// callSupermodelAPI sends the zip to the Supermodel API and polls for completion. -func callSupermodelAPI(apiKey, zipPath string) ([]byte, error) { +// callEndpoint sends the zip to a Supermodel API endpoint and polls for completion. +func callEndpoint(endpointURL, apiKey, zipPath string) ([]byte, error) { idempotencyKey := generateUUID() // Initial POST - respBody, resp, err := postWithZip(apiKey, zipPath, idempotencyKey) + respBody, resp, err := postWithZip(endpointURL, apiKey, zipPath, idempotencyKey) if err != nil { return nil, fmt.Errorf("initial request: %w", err) } @@ -595,7 +709,7 @@ func callSupermodelAPI(apiKey, zipPath string) ([]byte, error) { fmt.Printf("Status: %s (job: %s), polling in %s...\n", apiResp.Status, apiResp.JobID, interval) time.Sleep(interval) - respBody, resp, err = postWithZip(apiKey, zipPath, idempotencyKey) + respBody, resp, err = postWithZip(endpointURL, apiKey, zipPath, idempotencyKey) if err != nil { fmt.Printf("::warning::Poll request failed: %v, retrying...\n", err) continue @@ -618,13 +732,13 @@ func callSupermodelAPI(apiKey, zipPath string) ([]byte, error) { } // postWithZip sends a multipart POST request with the zip file. -func postWithZip(apiKey, zipPath, idempotencyKey string) ([]byte, *http.Response, error) { +func postWithZip(endpointURL, apiKey, zipPath, idempotencyKey string) ([]byte, *http.Response, error) { body, contentType, err := createMultipartBody(zipPath) if err != nil { return nil, nil, err } - req, err := http.NewRequest("POST", apiBaseURL, body) + req, err := http.NewRequest("POST", endpointURL, body) if err != nil { return nil, nil, err } @@ -798,6 +912,290 @@ func rewritePathPrefix(dir, prefix string) error { }) } +// --- Analysis response types --- + +type ImpactResponse struct { + Impacts []ImpactEntry `json:"impacts"` +} + +type ImpactEntry struct { + Target struct { + File string `json:"file"` + Name string `json:"name"` + Type string `json:"type"` + } `json:"target"` + BlastRadius struct { + DirectDependents int `json:"directDependents"` + TransitiveDependents int `json:"transitiveDependents"` + AffectedFiles int `json:"affectedFiles"` + RiskScore float64 `json:"riskScore"` + } `json:"blastRadius"` + AffectedFunctions []struct { + File string `json:"file"` + Name string `json:"name"` + Distance int `json:"distance"` + Relationship string `json:"relationship"` + } `json:"affectedFunctions"` + EntryPointsAffected []struct { + File string `json:"file"` + Name string `json:"name"` + } `json:"entryPointsAffected"` +} + +type TestCoverageResponse struct { + Metadata struct { + CoveragePercentage float64 `json:"coveragePercentage"` + TestedFunctions int `json:"testedFunctions"` + UntestedFunctions int `json:"untestedFunctions"` + } `json:"metadata"` + UntestedFunctions []struct { + File string `json:"file"` + Name string `json:"name"` + Line int `json:"line"` + Type string `json:"type"` + Confidence string `json:"confidence"` + Reason string `json:"reason"` + } `json:"untestedFunctions"` + TestedFunctions []struct { + File string `json:"file"` + Name string `json:"name"` + Line int `json:"line"` + TestFiles []string `json:"testFiles"` + } `json:"testedFunctions"` + CoverageByFile []struct { + File string `json:"file"` + CoveragePercentage float64 `json:"coveragePercentage"` + } `json:"coverageByFile"` +} + +type CircularDepsResponse struct { + Cycles []struct { + ID string `json:"id"` + Files []string `json:"files"` + Edges []struct { + Source string `json:"source"` + Target string `json:"target"` + ImportedSymbols []string `json:"importedSymbols"` + } `json:"edges"` + Severity string `json:"severity"` + BreakingSuggestion string `json:"breakingSuggestion"` + } `json:"cycles"` + Summary struct { + TotalCycles int `json:"totalCycles"` + HighSeverityCount int `json:"highSeverityCount"` + } `json:"summary"` +} + +// enrichMarkdown reads analysis JSON and injects data into generated markdown frontmatter. +func enrichMarkdown(contentDir string, impactJSON, testCoverageJSON, circularDepsJSON []byte) int { + // Parse analysis data + impactByFile := map[string]ImpactEntry{} + if impactJSON != nil { + var impact ImpactResponse + if err := json.Unmarshal(impactJSON, &impact); err == nil { + for _, entry := range impact.Impacts { + impactByFile[entry.Target.File] = entry + } + } + } + + testedFuncs := map[string][]string{} // "file:name" -> test files + untestedFuncs := map[string]string{} // "file:name" -> reason + coverageByFile := map[string]float64{} + if testCoverageJSON != nil { + var coverage TestCoverageResponse + if err := json.Unmarshal(testCoverageJSON, &coverage); err == nil { + for _, f := range coverage.TestedFunctions { + key := f.File + ":" + f.Name + testedFuncs[key] = f.TestFiles + } + for _, f := range coverage.UntestedFunctions { + key := f.File + ":" + f.Name + untestedFuncs[key] = f.Reason + } + for _, f := range coverage.CoverageByFile { + coverageByFile[f.File] = f.CoveragePercentage + } + } + } + + filesInCycles := map[string][]string{} // file -> cycle IDs + cycleDetails := map[string]string{} // cycle ID -> severity + cycleSuggestions := map[string]string{} // cycle ID -> suggestion + if circularDepsJSON != nil { + var circular CircularDepsResponse + if err := json.Unmarshal(circularDepsJSON, &circular); err == nil { + for _, cycle := range circular.Cycles { + cycleDetails[cycle.ID] = cycle.Severity + cycleSuggestions[cycle.ID] = cycle.BreakingSuggestion + for _, f := range cycle.Files { + filesInCycles[f] = append(filesInCycles[f], cycle.ID) + } + } + } + } + + enriched := 0 + filepath.Walk(contentDir, func(path string, info os.FileInfo, err error) error { + if err != nil || info.IsDir() || !strings.HasSuffix(path, ".md") { + return nil + } + + data, err := os.ReadFile(path) + if err != nil { + return nil + } + + content := string(data) + + // Find frontmatter boundaries + if !strings.HasPrefix(content, "---\n") { + return nil + } + endIdx := strings.Index(content[4:], "\n---\n") + if endIdx < 0 { + return nil + } + endIdx += 4 + + frontmatter := content[:endIdx] + body := content[endIdx+5:] // skip "\n---\n" + + // Extract file_path from frontmatter + filePath := extractFrontmatterValue(frontmatter, "file_path") + funcName := extractFrontmatterValue(frontmatter, "function_name") + nodeType := extractFrontmatterValue(frontmatter, "node_type") + + var additions []string + var bodySections []string + modified := false + + // Impact analysis enrichment + if filePath != "" { + if impact, ok := impactByFile[filePath]; ok { + level := "Low" + if impact.BlastRadius.RiskScore >= 30 { + level = "High" + } else if impact.BlastRadius.RiskScore >= 10 { + level = "Medium" + } + if impact.BlastRadius.RiskScore > 100 { + level = "Critical" + } + additions = append(additions, + fmt.Sprintf("impact_level: \"%s\"", level), + fmt.Sprintf("impact_risk_score: %.1f", impact.BlastRadius.RiskScore), + fmt.Sprintf("impact_direct_dependents: %d", impact.BlastRadius.DirectDependents), + fmt.Sprintf("impact_transitive_dependents: %d", impact.BlastRadius.TransitiveDependents), + fmt.Sprintf("impact_affected_files: %d", impact.BlastRadius.AffectedFiles), + ) + + var impactLines []string + impactLines = append(impactLines, fmt.Sprintf("- Risk Score: %.1f (%s)", impact.BlastRadius.RiskScore, level)) + impactLines = append(impactLines, fmt.Sprintf("- Direct Dependents: %d", impact.BlastRadius.DirectDependents)) + impactLines = append(impactLines, fmt.Sprintf("- Transitive Dependents: %d", impact.BlastRadius.TransitiveDependents)) + impactLines = append(impactLines, fmt.Sprintf("- Affected Files: %d", impact.BlastRadius.AffectedFiles)) + if len(impact.EntryPointsAffected) > 0 { + impactLines = append(impactLines, fmt.Sprintf("- Entry Points Affected: %d", len(impact.EntryPointsAffected))) + for _, ep := range impact.EntryPointsAffected { + if len(impactLines) > 10 { + break + } + impactLines = append(impactLines, fmt.Sprintf(" - %s (%s)", ep.Name, ep.File)) + } + } + bodySections = append(bodySections, "## Impact Analysis\n\n"+strings.Join(impactLines, "\n")) + modified = true + } + } + + // Test coverage enrichment + if funcName != "" && filePath != "" && (nodeType == "Function" || nodeType == "Method") { + key := filePath + ":" + funcName + if testFiles, ok := testedFuncs[key]; ok { + additions = append(additions, `test_coverage: "Tested"`) + var lines []string + lines = append(lines, fmt.Sprintf("- Status: Tested by %d test file(s)", len(testFiles))) + for _, tf := range testFiles { + lines = append(lines, fmt.Sprintf(" - %s", tf)) + } + bodySections = append(bodySections, "## Test Coverage\n\n"+strings.Join(lines, "\n")) + modified = true + } else if reason, ok := untestedFuncs[key]; ok { + additions = append(additions, `test_coverage: "Untested"`) + bodySections = append(bodySections, "## Test Coverage\n\n- Status: Untested\n- Reason: "+reason) + modified = true + } + } + if nodeType == "File" && filePath != "" { + if cov, ok := coverageByFile[filePath]; ok { + covStatus := "Tested" + if cov == 0 { + covStatus = "Untested" + } + additions = append(additions, + fmt.Sprintf("test_coverage: \"%s\"", covStatus), + fmt.Sprintf("test_coverage_pct: %.1f", cov), + ) + bodySections = append(bodySections, fmt.Sprintf("## Test Coverage\n\n- File Coverage: %.1f%%", cov)) + modified = true + } + } + + // Circular dependency enrichment + if filePath != "" { + if cycleIDs, ok := filesInCycles[filePath]; ok { + additions = append(additions, `dependency_health: "In Cycle"`) + var lines []string + for _, id := range cycleIDs { + sev := cycleDetails[id] + lines = append(lines, fmt.Sprintf("- %s (severity: %s)", id, sev)) + if suggestion := cycleSuggestions[id]; suggestion != "" { + lines = append(lines, fmt.Sprintf(" - Suggestion: %s", suggestion)) + } + } + bodySections = append(bodySections, "## Circular Dependencies\n\n"+strings.Join(lines, "\n")) + modified = true + } else if nodeType == "File" { + additions = append(additions, `dependency_health: "Clean"`) + modified = true + } + } + + if !modified { + return nil + } + + // Rebuild file: insert new frontmatter fields before closing --- + newFrontmatter := frontmatter + "\n" + strings.Join(additions, "\n") + newBody := body + if len(bodySections) > 0 { + newBody = strings.Join(bodySections, "\n\n") + "\n\n" + body + } + + newContent := newFrontmatter + "\n---\n" + newBody + os.WriteFile(path, []byte(newContent), info.Mode()) + enriched++ + return nil + }) + + return enriched +} + +// extractFrontmatterValue extracts a simple string value from YAML frontmatter. +func extractFrontmatterValue(frontmatter, key string) string { + for _, line := range strings.Split(frontmatter, "\n") { + line = strings.TrimSpace(line) + prefix := key + ":" + if strings.HasPrefix(line, prefix) { + val := strings.TrimSpace(line[len(prefix):]) + val = strings.Trim(val, `"'`) + return val + } + } + return "" +} + // countFiles counts files with the given extension in a directory tree. func countFiles(dir, ext string) int { count := 0 diff --git a/templates/_styles.css b/templates/_styles.css index 8a2b80e..19ab4ef 100644 --- a/templates/_styles.css +++ b/templates/_styles.css @@ -182,6 +182,7 @@ a.pill:hover { .pill-green { border-color: var(--green); color: var(--green); } .pill-orange { border-color: var(--orange); color: var(--orange); } .pill-blue { border-color: var(--blue); color: var(--blue); } +.pill-red { border-color: var(--red); color: var(--red); } /* Sections */ .entity-section { diff --git a/templates/entity.html b/templates/entity.html index d6a868d..657b671 100644 --- a/templates/entity.html +++ b/templates/entity.html @@ -36,6 +36,9 @@

{{.Entity.GetString "title"}}

{{if .Entity.GetInt "function_count"}}{{.Entity.GetInt "function_count"}} functions{{end}} {{if .Entity.GetInt "class_count"}}{{.Entity.GetInt "class_count"}} classes{{end}} {{if .Entity.GetInt "file_count"}}{{.Entity.GetInt "file_count"}} files{{end}} + {{if .Entity.GetString "impact_level"}}{{$il := .Entity.GetString "impact_level"}}Impact: {{$il}}{{end}} + {{if .Entity.GetString "test_coverage"}}{{$tc := .Entity.GetString "test_coverage"}}{{$tc}}{{end}} + {{if .Entity.GetString "dependency_health"}}{{$dh := .Entity.GetString "dependency_health"}}{{$dh}}{{end}} {{if .Entity.GetString "summary"}} @@ -204,6 +207,27 @@

Source

{{end}} + {{with index $sections "Impact Analysis"}} +
+

Impact Analysis

+ +
+ {{end}} + + {{with index $sections "Test Coverage"}} +
+

Test Coverage

+ +
+ {{end}} + + {{with index $sections "Circular Dependencies"}} +
+

Circular Dependencies

+ +
+ {{end}} + {{with .Entity.GetFAQs}}

Frequently Asked Questions

From e0ce76a413977c9671dc772988d22eb47cfc7f6f Mon Sep 17 00:00:00 2001 From: jonathanpopham Date: Tue, 10 Feb 2026 21:31:25 -0500 Subject: [PATCH 2/2] Add coverage bar visualization for test coverage data Renders per-file coverage as styled progress bars with percentage, file path, and tested/total ratio. File entities show function-level breakdown with check/x indicators. Directory/Module entities show sorted child file bars. --- main.go | 103 ++++++++++++++++++++++++++++++++++++++++-- templates/_styles.css | 11 +++++ 2 files changed, 111 insertions(+), 3 deletions(-) diff --git a/main.go b/main.go index e20c197..829d5fd 100644 --- a/main.go +++ b/main.go @@ -13,6 +13,7 @@ import ( "os" "os/exec" "path/filepath" + "sort" "strconv" "strings" "sync" @@ -999,19 +1000,28 @@ func enrichMarkdown(contentDir string, impactJSON, testCoverageJSON, circularDep } } - testedFuncs := map[string][]string{} // "file:name" -> test files - untestedFuncs := map[string]string{} // "file:name" -> reason + testedFuncs := map[string][]string{} // "file:name" -> test files + untestedFuncs := map[string]string{} // "file:name" -> reason coverageByFile := map[string]float64{} + testedCountByFile := map[string]int{} + totalCountByFile := map[string]int{} + testedNamesInFile := map[string][]string{} + untestedNamesInFile := map[string][]string{} if testCoverageJSON != nil { var coverage TestCoverageResponse if err := json.Unmarshal(testCoverageJSON, &coverage); err == nil { for _, f := range coverage.TestedFunctions { key := f.File + ":" + f.Name testedFuncs[key] = f.TestFiles + testedCountByFile[f.File]++ + totalCountByFile[f.File]++ + testedNamesInFile[f.File] = append(testedNamesInFile[f.File], f.Name) } for _, f := range coverage.UntestedFunctions { key := f.File + ":" + f.Name untestedFuncs[key] = f.Reason + totalCountByFile[f.File]++ + untestedNamesInFile[f.File] = append(untestedNamesInFile[f.File], f.Name) } for _, f := range coverage.CoverageByFile { coverageByFile[f.File] = f.CoveragePercentage @@ -1133,11 +1143,70 @@ func enrichMarkdown(contentDir string, impactJSON, testCoverageJSON, circularDep if cov == 0 { covStatus = "Untested" } + tc := testedCountByFile[filePath] + tot := totalCountByFile[filePath] additions = append(additions, fmt.Sprintf("test_coverage: \"%s\"", covStatus), fmt.Sprintf("test_coverage_pct: %.1f", cov), ) - bodySections = append(bodySections, fmt.Sprintf("## Test Coverage\n\n- File Coverage: %.1f%%", cov)) + var covLines []string + covLines = append(covLines, coverageBarHTML(filePath, cov, tc, tot)) + for _, name := range testedNamesInFile[filePath] { + covLines = append(covLines, fmt.Sprintf(` %s`, name)) + } + for _, name := range untestedNamesInFile[filePath] { + covLines = append(covLines, fmt.Sprintf(` %s`, name)) + } + bodySections = append(bodySections, "## Test Coverage\n\n"+joinCoverageItems(covLines)) + modified = true + } + } + + // Directory/Module/Package entities: show child file coverage bars + if (nodeType == "Directory" || nodeType == "Module" || nodeType == "Package" || nodeType == "Namespace") && filePath != "" && len(coverageByFile) > 0 { + prefix := filePath + if !strings.HasSuffix(prefix, "/") { + prefix += "/" + } + type fileCov struct { + file string + pct float64 + tested int + total int + } + var childFiles []fileCov + for f, pct := range coverageByFile { + if strings.HasPrefix(f, prefix) { + childFiles = append(childFiles, fileCov{f, pct, testedCountByFile[f], totalCountByFile[f]}) + } + } + if len(childFiles) > 0 { + sort.Slice(childFiles, func(i, j int) bool { + return childFiles[i].pct < childFiles[j].pct + }) + var covLines []string + for _, cf := range childFiles { + covLines = append(covLines, coverageBarHTML(cf.file, cf.pct, cf.tested, cf.total)) + } + bodySections = append(bodySections, "## Test Coverage\n\n"+joinCoverageItems(covLines)) + totalTested := 0 + totalAll := 0 + for _, cf := range childFiles { + totalTested += cf.tested + totalAll += cf.total + } + overallPct := 0.0 + if totalAll > 0 { + overallPct = float64(totalTested) / float64(totalAll) * 100 + } + covStatus := "Tested" + if overallPct == 0 { + covStatus = "Untested" + } + additions = append(additions, + fmt.Sprintf("test_coverage: \"%s\"", covStatus), + fmt.Sprintf("test_coverage_pct: %.1f", overallPct), + ) modified = true } } @@ -1196,6 +1265,34 @@ func extractFrontmatterValue(frontmatter, key string) string { return "" } +// coverageBarHTML generates an HTML coverage bar row. +func coverageBarHTML(label string, pct float64, tested, total int) string { + color := "var(--red)" + if pct >= 80 { + color = "var(--green)" + } else if pct > 0 { + color = "var(--orange)" + } + return fmt.Sprintf( + `
`+ + `
`+ + `%.1f%%`+ + `%s`+ + `(%d/%d)`+ + `
`, + pct, color, pct, label, tested, total, + ) +} + +// joinCoverageItems wraps HTML strings as markdown list items. +func joinCoverageItems(items []string) string { + var lines []string + for _, item := range items { + lines = append(lines, "- "+item) + } + return strings.Join(lines, "\n") +} + // countFiles counts files with the given extension in a directory tree. func countFiles(dir, ext string) int { count := 0 diff --git a/templates/_styles.css b/templates/_styles.css index 19ab4ef..f1be1db 100644 --- a/templates/_styles.css +++ b/templates/_styles.css @@ -184,6 +184,17 @@ a.pill:hover { .pill-blue { border-color: var(--blue); color: var(--blue); } .pill-red { border-color: var(--red); color: var(--red); } +/* Coverage bars */ +.cov-row { display: flex; align-items: center; gap: 8px; } +.cov-bar { width: 120px; height: 10px; background: rgba(255,255,255,0.06); border-radius: 3px; overflow: hidden; flex-shrink: 0; } +.cov-fill { height: 100%; border-radius: 3px; } +.cov-pct { width: 44px; text-align: right; flex-shrink: 0; font-variant-numeric: tabular-nums; color: var(--text); } +.cov-label { color: var(--text-muted); flex: 1; overflow: hidden; text-overflow: ellipsis; white-space: nowrap; } +.cov-ratio { color: var(--text-muted); opacity: 0.6; flex-shrink: 0; } +.cov-func { display: flex; align-items: center; gap: 6px; } +.cov-check { color: var(--green); } +.cov-x { color: var(--red); } + /* Sections */ .entity-section { margin-bottom: 32px;