From 4c7e46ef5f1c73ce61b1e45f74b8edd451d95571 Mon Sep 17 00:00:00 2001 From: Vishal Gowda Date: Wed, 18 Feb 2026 12:33:47 +0000 Subject: [PATCH 1/2] feat: add schema complexity analyzer with TUI, graph views, and DOT export Adds `openapi spec analyze` command that examines schema references to identify cycles, compute complexity metrics, assess codegen difficulty tiers, and generate refactoring suggestions. Includes interactive TUI with 4 tabs (Summary, Schemas, Cycles, Graph), navigable graph views (DAG overview, SCC gallery, ego graph), bordered schema detail cards, and multiple output formats (tui, json, text, dot). Co-Authored-By: Claude Opus 4.6 --- AGENTS.md | 2 + cmd/openapi/commands/openapi/analyze.go | 111 ++++ cmd/openapi/commands/openapi/root.go | 1 + cmd/openapi/internal/analyze/analyze_test.go | 247 ++++++++ cmd/openapi/internal/analyze/codegen.go | 275 +++++++++ cmd/openapi/internal/analyze/cycles.go | 336 ++++++++++ cmd/openapi/internal/analyze/graph.go | 579 ++++++++++++++++++ cmd/openapi/internal/analyze/mermaid.go | 441 +++++++++++++ cmd/openapi/internal/analyze/mermaid_test.go | 113 ++++ cmd/openapi/internal/analyze/metrics.go | 144 +++++ cmd/openapi/internal/analyze/output.go | 275 +++++++++ cmd/openapi/internal/analyze/report.go | 106 ++++ cmd/openapi/internal/analyze/suggestions.go | 228 +++++++ .../analyze/testdata/cyclic.openapi.yaml | 186 ++++++ .../internal/analyze/tui/graph_view.go | 249 ++++++++ cmd/openapi/internal/analyze/tui/keys.go | 20 + cmd/openapi/internal/analyze/tui/model.go | 488 +++++++++++++++ .../internal/analyze/tui/schema_card.go | 212 +++++++ cmd/openapi/internal/analyze/tui/styles.go | 154 +++++ cmd/openapi/internal/analyze/tui/views.go | 437 +++++++++++++ 20 files changed, 4604 insertions(+) create mode 100644 cmd/openapi/commands/openapi/analyze.go create mode 100644 cmd/openapi/internal/analyze/analyze_test.go create mode 100644 cmd/openapi/internal/analyze/codegen.go create mode 100644 cmd/openapi/internal/analyze/cycles.go create mode 100644 cmd/openapi/internal/analyze/graph.go create mode 100644 cmd/openapi/internal/analyze/mermaid.go create mode 100644 cmd/openapi/internal/analyze/mermaid_test.go create mode 100644 cmd/openapi/internal/analyze/metrics.go create mode 100644 cmd/openapi/internal/analyze/output.go create mode 100644 cmd/openapi/internal/analyze/report.go create mode 100644 cmd/openapi/internal/analyze/suggestions.go create mode 100644 cmd/openapi/internal/analyze/testdata/cyclic.openapi.yaml create mode 100644 cmd/openapi/internal/analyze/tui/graph_view.go create mode 100644 cmd/openapi/internal/analyze/tui/keys.go create mode 100644 cmd/openapi/internal/analyze/tui/model.go create mode 100644 cmd/openapi/internal/analyze/tui/schema_card.go create mode 100644 cmd/openapi/internal/analyze/tui/styles.go create mode 100644 cmd/openapi/internal/analyze/tui/views.go diff --git a/AGENTS.md b/AGENTS.md index 085cee3d..933b86c2 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -493,3 +493,5 @@ Test cases should cover: 4. **Performance**: Parallel execution speeds up test runs 5. **Debugging**: testify assertions and clear structure provide helpful failure messages 6. **Reliability**: Using `t.Context()` ensures proper test lifecycle management + +IMPORTANT: when user requests to "use granary", run `granary` command before performing any other task. diff --git a/cmd/openapi/commands/openapi/analyze.go b/cmd/openapi/commands/openapi/analyze.go new file mode 100644 index 00000000..cb96b426 --- /dev/null +++ b/cmd/openapi/commands/openapi/analyze.go @@ -0,0 +1,111 @@ +package openapi + +import ( + "errors" + "fmt" + "os" + + tea "github.com/charmbracelet/bubbletea" + "github.com/speakeasy-api/openapi/cmd/openapi/internal/analyze" + "github.com/speakeasy-api/openapi/cmd/openapi/internal/analyze/tui" + "github.com/spf13/cobra" +) + +var analyzeCmd = &cobra.Command{ + Use: "analyze ", + Short: "Analyze schema complexity, cyclicality, and codegen difficulty", + Long: `Analyze an OpenAPI specification to understand schema complexity. + +This command examines schema references to identify: +- Cycles and strongly connected components (SCCs) +- Per-schema complexity metrics (fan-in, fan-out, nesting) +- Code generation difficulty tiers (green/yellow/red) +- Actionable refactoring suggestions + +Output formats: + tui - Interactive terminal UI with progressive disclosure (default) + json - Machine-readable JSON report for CI/CD pipelines + text - Human-readable text summary + dot - Graphviz DOT format for graph visualization + +Stdin is supported — pipe data or use '-': + cat spec.yaml | openapi spec analyze + cat spec.yaml | openapi spec analyze - --format json`, + Args: stdinOrFileArgs(1, 1), + RunE: runAnalyze, +} + +func init() { + analyzeCmd.Flags().StringP("format", "f", "tui", "output format: tui, json, text, dot") + analyzeCmd.Flags().StringP("output", "o", "", "write output to file instead of stdout") +} + +func runAnalyze(cmd *cobra.Command, args []string) error { + ctx := cmd.Context() + inputFile := inputFileFromArgs(args) + format, _ := cmd.Flags().GetString("format") + outputFile, _ := cmd.Flags().GetString("output") + + // Load the document + doc, err := loadOpenAPIDocument(ctx, inputFile) + if err != nil { + return err + } + + // Run analysis + report := analyze.Analyze(ctx, doc) + + switch format { + case "tui": + if outputFile != "" { + return errors.New("--output is not compatible with --format tui") + } + m := tui.NewModel(report) + p := tea.NewProgram(m, tea.WithAltScreen()) + if _, err := p.Run(); err != nil { + return fmt.Errorf("error running analyzer TUI: %w", err) + } + return nil + + case "json": + w := os.Stdout + if outputFile != "" { + f, err := os.Create(outputFile) + if err != nil { + return fmt.Errorf("failed to create output file: %w", err) + } + defer f.Close() + w = f + } + return analyze.WriteJSON(w, report) + + case "text": + w := os.Stdout + if outputFile != "" { + f, err := os.Create(outputFile) + if err != nil { + return fmt.Errorf("failed to create output file: %w", err) + } + defer f.Close() + w = f + } + analyze.WriteText(w, report) + return nil + + case "dot": + w := os.Stdout + if outputFile != "" { + f, err := os.Create(outputFile) + if err != nil { + return fmt.Errorf("failed to create output file: %w", err) + } + defer f.Close() + w = f + } + analyze.WriteDOT(w, report) + return nil + + default: + return fmt.Errorf("unknown format: %s (expected tui, json, text, or dot)", format) + } +} diff --git a/cmd/openapi/commands/openapi/root.go b/cmd/openapi/commands/openapi/root.go index 5f4c614f..420049dc 100644 --- a/cmd/openapi/commands/openapi/root.go +++ b/cmd/openapi/commands/openapi/root.go @@ -17,5 +17,6 @@ func Apply(rootCmd *cobra.Command) { rootCmd.AddCommand(optimizeCmd) rootCmd.AddCommand(localizeCmd) rootCmd.AddCommand(exploreCmd) + rootCmd.AddCommand(analyzeCmd) rootCmd.AddCommand(snipCmd) } diff --git a/cmd/openapi/internal/analyze/analyze_test.go b/cmd/openapi/internal/analyze/analyze_test.go new file mode 100644 index 00000000..1ca483e7 --- /dev/null +++ b/cmd/openapi/internal/analyze/analyze_test.go @@ -0,0 +1,247 @@ +package analyze_test + +import ( + "bytes" + "context" + "os" + "testing" + + "github.com/speakeasy-api/openapi/cmd/openapi/internal/analyze" + "github.com/speakeasy-api/openapi/openapi" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func loadDoc(t *testing.T, path string) *openapi.OpenAPI { + t.Helper() + f, err := os.Open(path) + require.NoError(t, err) + defer f.Close() + + doc, _, err := openapi.Unmarshal(context.Background(), f, openapi.WithSkipValidation()) + require.NoError(t, err) + require.NotNil(t, doc) + return doc +} + +func TestBuildGraph_SimpleSpec(t *testing.T) { + doc := loadDoc(t, "../../../../openapi/testdata/test.openapi.yaml") + g := analyze.BuildGraph(context.Background(), doc) + + assert.NotEmpty(t, g.Nodes) + assert.Greater(t, len(g.Nodes), 0) + + // User references UserPreferences + userEdges := g.OutEdges["User"] + assert.NotEmpty(t, userEdges, "User should have outgoing edges") + + found := false + for _, e := range userEdges { + if e.To == "UserPreferences" { + found = true + assert.Equal(t, analyze.EdgeProperty, e.Kind) + } + } + assert.True(t, found, "User should reference UserPreferences") +} + +func TestBuildGraph_CyclicSpec(t *testing.T) { + doc := loadDoc(t, "testdata/cyclic.openapi.yaml") + g := analyze.BuildGraph(context.Background(), doc) + + assert.Equal(t, 10, len(g.Nodes)) + assert.Greater(t, len(g.Edges), 0) + + // TreeNode has self-references + treeEdges := g.OutEdges["TreeNode"] + selfRefCount := 0 + for _, e := range treeEdges { + if e.To == "TreeNode" { + selfRefCount++ + } + } + assert.GreaterOrEqual(t, selfRefCount, 2, "TreeNode should have at least 2 self-references (parent + children)") +} + +func TestAnalyzeCycles_DetectsSCCs(t *testing.T) { + doc := loadDoc(t, "testdata/cyclic.openapi.yaml") + g := analyze.BuildGraph(context.Background(), doc) + ca := analyze.AnalyzeCycles(g) + + assert.Greater(t, len(ca.SCCs), 0, "Should detect SCCs") + assert.Greater(t, len(ca.Cycles), 0, "Should detect cycles") + assert.NotEmpty(t, ca.NodesInCycles, "Should have nodes in cycles") + + // Person <-> Company is a required-only cycle + hasRequiredCycle := false + for _, c := range ca.Cycles { + if c.HasRequiredOnlyPath { + hasRequiredCycle = true + break + } + } + assert.True(t, hasRequiredCycle, "Should detect required-only cycle (Person <-> Company)") +} + +func TestAnalyzeCycles_NoCyclesInSimpleSpec(t *testing.T) { + doc := loadDoc(t, "../../../../openapi/testdata/test.openapi.yaml") + g := analyze.BuildGraph(context.Background(), doc) + ca := analyze.AnalyzeCycles(g) + + assert.Empty(t, ca.SCCs, "Simple spec should have no non-trivial SCCs") + assert.Empty(t, ca.Cycles, "Simple spec should have no cycles") +} + +func TestComputeMetrics(t *testing.T) { + doc := loadDoc(t, "testdata/cyclic.openapi.yaml") + g := analyze.BuildGraph(context.Background(), doc) + ca := analyze.AnalyzeCycles(g) + metrics := analyze.ComputeMetrics(g, ca) + + // Person has high fan-in (referenced by Company, Dog, Department, Event.data.anyOf) + personMetrics := metrics["Person"] + require.NotNil(t, personMetrics) + assert.Equal(t, 4, personMetrics.FanIn) + assert.True(t, personMetrics.InSCC) + + // BigSchema has high property count + bigMetrics := metrics["BigSchema"] + require.NotNil(t, bigMetrics) + assert.Equal(t, 31, bigMetrics.PropertyCount) + assert.Equal(t, 31, bigMetrics.DeepPropertyCount) // all at root level + assert.Equal(t, 0, bigMetrics.NestingDepth) + assert.Equal(t, 0, bigMetrics.CompositionDepth) + assert.Equal(t, 0, bigMetrics.UnionSiteCount) + + // Animal has 1 union site (oneOf, width 2, no discriminator) + animalMetrics := metrics["Animal"] + require.NotNil(t, animalMetrics) + assert.Equal(t, 1, animalMetrics.UnionSiteCount) + assert.Equal(t, 2, animalMetrics.MaxUnionWidth) + assert.Equal(t, 2, animalMetrics.VariantProduct) + assert.Equal(t, 1, animalMetrics.CompositionDepth) + + // Event has 1 union site (anyOf at data property, width 2) + eventMetrics := metrics["Event"] + require.NotNil(t, eventMetrics) + assert.Equal(t, 1, eventMetrics.UnionSiteCount) + assert.Equal(t, 2, eventMetrics.MaxUnionWidth) + assert.Equal(t, 1, eventMetrics.CompositionDepth) +} + +func TestAssessCodegen(t *testing.T) { + doc := loadDoc(t, "testdata/cyclic.openapi.yaml") + g := analyze.BuildGraph(context.Background(), doc) + ca := analyze.AnalyzeCycles(g) + metrics := analyze.ComputeMetrics(g, ca) + report := analyze.AssessCodegen(g, ca, metrics) + + // Person and Company should be red (required cycle) + assert.Equal(t, analyze.CodegenRed, report.PerSchema["Person"].Tier) + assert.Equal(t, analyze.CodegenRed, report.PerSchema["Company"].Tier) + + // Animal should be yellow (oneOf without discriminator) + assert.Equal(t, analyze.CodegenYellow, report.PerSchema["Animal"].Tier) + + // Event has anyOf inside its inline `data` property — now detected as red + assert.Equal(t, analyze.CodegenRed, report.PerSchema["Event"].Tier) + + // BigSchema should be yellow (high property count) + assert.Equal(t, analyze.CodegenYellow, report.PerSchema["BigSchema"].Tier) + + assert.Greater(t, report.RedCount, 0) + assert.Greater(t, report.YellowCount, 0) + assert.Greater(t, report.GreenCount, 0) +} + +func TestGenerateSuggestions(t *testing.T) { + doc := loadDoc(t, "testdata/cyclic.openapi.yaml") + report := analyze.Analyze(context.Background(), doc) + + assert.NotEmpty(t, report.Suggestions, "Should generate suggestions") + + // Should have cycle break suggestions + hasCutEdge := false + hasDiscriminator := false + hasPropertyReduction := false + for _, s := range report.Suggestions { + switch s.Type { + case analyze.SuggestionCutEdge: + hasCutEdge = true + case analyze.SuggestionAddDiscriminator: + hasDiscriminator = true + case analyze.SuggestionReducePropertyCount: + hasPropertyReduction = true + } + } + assert.True(t, hasCutEdge, "Should suggest cutting edges to break cycles") + assert.True(t, hasDiscriminator, "Should suggest adding discriminator to Animal") + assert.True(t, hasPropertyReduction, "Should suggest splitting BigSchema") +} + +func TestAnalyze_FullPipeline(t *testing.T) { + doc := loadDoc(t, "testdata/cyclic.openapi.yaml") + report := analyze.Analyze(context.Background(), doc) + + assert.Equal(t, "Cyclic Schema Test", report.DocumentTitle) + assert.Equal(t, "1.0.0", report.DocumentVersion) + assert.Equal(t, 10, report.TotalSchemas) + assert.Greater(t, report.TotalEdges, 0) + assert.Greater(t, report.SCCCount, 0) + assert.Greater(t, report.SchemasInCyclesPct, 0.0) + assert.Less(t, report.CompatibilityScore, 100.0) + assert.NotEmpty(t, report.TopFanIn) + assert.NotEmpty(t, report.TopComplex) +} + +func TestWriteJSON(t *testing.T) { + doc := loadDoc(t, "testdata/cyclic.openapi.yaml") + report := analyze.Analyze(context.Background(), doc) + + var buf bytes.Buffer + err := analyze.WriteJSON(&buf, report) + require.NoError(t, err) + + output := buf.String() + assert.Contains(t, output, "Cyclic Schema Test") + assert.Contains(t, output, `"sccCount": 3`) + assert.Contains(t, output, `"codegenTier"`) + assert.Contains(t, output, `"complexityScore"`) + assert.Contains(t, output, `"rank"`) + assert.Contains(t, output, `"deepPropertyCount"`) +} + +func TestWriteDOT(t *testing.T) { + doc := loadDoc(t, "testdata/cyclic.openapi.yaml") + report := analyze.Analyze(context.Background(), doc) + + var buf bytes.Buffer + analyze.WriteDOT(&buf, report) + + output := buf.String() + assert.Contains(t, output, "digraph schemas") + assert.Contains(t, output, "Person") + assert.Contains(t, output, "Company") + assert.Contains(t, output, "->") + assert.Contains(t, output, "fillcolor") + // Red tier schemas should have red fill + assert.Contains(t, output, "#f8d7da") + // Green tier schemas should have green fill + assert.Contains(t, output, "#d4edda") +} + +func TestWriteText(t *testing.T) { + doc := loadDoc(t, "testdata/cyclic.openapi.yaml") + report := analyze.Analyze(context.Background(), doc) + + var buf bytes.Buffer + analyze.WriteText(&buf, report) + + output := buf.String() + assert.Contains(t, output, "Schema Complexity Report") + assert.Contains(t, output, "CYCLE HEALTH") + assert.Contains(t, output, "CODEGEN COMPATIBILITY") + assert.Contains(t, output, "MOST COMPLEX") + assert.Contains(t, output, "score=") + assert.Contains(t, output, "SUGGESTIONS") +} diff --git a/cmd/openapi/internal/analyze/codegen.go b/cmd/openapi/internal/analyze/codegen.go new file mode 100644 index 00000000..2ea7b4f9 --- /dev/null +++ b/cmd/openapi/internal/analyze/codegen.go @@ -0,0 +1,275 @@ +package analyze + +import ( + "fmt" + "slices" +) + +// CodegenTier represents the difficulty level for code generation. +type CodegenTier int + +const ( + // CodegenGreen means the schema is straightforward to generate code for. + CodegenGreen CodegenTier = iota + // CodegenYellow means the schema has moderate complexity that may challenge some generators. + CodegenYellow + // CodegenRed means the schema has significant challenges for code generation. + CodegenRed +) + +func (t CodegenTier) String() string { + switch t { + case CodegenGreen: + return "green" + case CodegenYellow: + return "yellow" + case CodegenRed: + return "red" + } + return "unknown" +} + +// CodegenSignal describes a specific code generation challenge. +type CodegenSignal struct { + // ID is a short identifier for the signal. + ID string + // Description is a human-readable explanation. + Description string + // Severity is the impact level. + Severity CodegenTier + // AffectedSchemas lists the schema IDs affected (if applicable). + AffectedSchemas []string +} + +// CodegenDifficulty holds the codegen assessment for a single schema. +type CodegenDifficulty struct { + SchemaID string + Tier CodegenTier + Signals []*CodegenSignal +} + +// CodegenReport is the aggregate codegen assessment for the entire document. +type CodegenReport struct { + // PerSchema maps schema ID to its difficulty assessment. + PerSchema map[string]*CodegenDifficulty + // GreenCount is the number of green-tier schemas. + GreenCount int + // YellowCount is the number of yellow-tier schemas. + YellowCount int + // RedCount is the number of red-tier schemas. + RedCount int + // CompatibilityScore is the percentage of schemas that are green-tier (0-100). + CompatibilityScore float64 + // TopSignals are the most impactful signals across all schemas. + TopSignals []*CodegenSignal +} + +// AssessCodegen evaluates code generation difficulty for all schemas. +func AssessCodegen(g *Graph, cycles *CycleAnalysis, metrics map[string]*SchemaMetrics) *CodegenReport { + report := &CodegenReport{ + PerSchema: make(map[string]*CodegenDifficulty, len(g.Nodes)), + } + + signalCounts := make(map[string]int) + + for id, node := range g.Nodes { + m := metrics[id] + d := &CodegenDifficulty{ + SchemaID: id, + Tier: CodegenGreen, + } + + // Required cycle membership + if m != nil && m.InSCC { + for _, cycle := range cycles.Cycles { + if cycle.HasRequiredOnlyPath && slices.Contains(cycle.Path, id) { + d.addSignal("required-cycle", "Part of a cycle where all edges are required — many languages need pointer/boxing types", CodegenRed) + signalCounts["required-cycle"]++ + break + } + } + if len(d.Signals) == 0 { + d.addSignal("optional-cycle", "Part of a cycle but has optional/nullable break points", CodegenYellow) + signalCounts["optional-cycle"]++ + } + } + + // Aggregate union site info for deduplicated signals + var undiscriminatedOneOfs, anyOfSites, largeUnions []UnionSite + for _, site := range node.UnionSites { + if site.Kind == "oneOf" && !site.HasDiscriminator { + undiscriminatedOneOfs = append(undiscriminatedOneOfs, site) + } + if site.Kind == "anyOf" { + anyOfSites = append(anyOfSites, site) + } + if site.Width > 5 { + largeUnions = append(largeUnions, site) + } + } + + if len(undiscriminatedOneOfs) == 1 { + site := undiscriminatedOneOfs[0] + d.addSignal("oneOf-no-discriminator", + fmt.Sprintf("oneOf at %s without discriminator — codegen must trial-deserialize (%d variants)", site.Path, site.Width), + CodegenYellow) + signalCounts["oneOf-no-discriminator"]++ + } else if len(undiscriminatedOneOfs) > 1 { + d.addSignal("oneOf-no-discriminator", + fmt.Sprintf("%d oneOf sites without discriminator — codegen must trial-deserialize", len(undiscriminatedOneOfs)), + CodegenYellow) + signalCounts["oneOf-no-discriminator"]++ + } + + if len(anyOfSites) == 1 { + site := anyOfSites[0] + d.addSignal("anyOf", + fmt.Sprintf("anyOf at %s with %d potentially overlapping shapes — hard to generate correct types", site.Path, site.Width), + CodegenRed) + signalCounts["anyOf"]++ + } else if len(anyOfSites) > 1 { + d.addSignal("anyOf", + fmt.Sprintf("%d anyOf sites with potentially overlapping shapes — hard to generate correct types", len(anyOfSites)), + CodegenRed) + signalCounts["anyOf"]++ + } + + if len(largeUnions) == 1 { + site := largeUnions[0] + d.addSignal("large-union", + fmt.Sprintf("%s at %s has %d variants — large type unions are expensive to generate", site.Kind, site.Path, site.Width), + CodegenYellow) + signalCounts["large-union"]++ + } else if len(largeUnions) > 1 { + maxW := 0 + for _, site := range largeUnions { + if site.Width > maxW { + maxW = site.Width + } + } + d.addSignal("large-union", + fmt.Sprintf("%d union sites with >5 variants (largest: %d)", len(largeUnions), maxW), + CodegenYellow) + signalCounts["large-union"]++ + } + + // Combinatorial explosion across multiple independent union sites + if len(node.UnionSites) > 1 { + vp := 1 + for _, site := range node.UnionSites { + vp *= site.Width + } + if vp > 20 { + severity := CodegenYellow + if vp > 100 { + severity = CodegenRed + } + d.addSignal("combinatorial-explosion", + fmt.Sprintf("%d union sites produce %d variant combinations", len(node.UnionSites), vp), + severity) + signalCounts["combinatorial-explosion"]++ + } + } + + // Mixed types + if len(node.Types) > 1 { + hasNull := slices.Contains(node.Types, "null") + nonNullTypes := 0 + for _, t := range node.Types { + if t != "null" { + nonNullTypes++ + } + } + if nonNullTypes > 1 { + d.addSignal("mixed-types", "Multiple non-null types — type unions not expressible in many languages", CodegenRed) + signalCounts["mixed-types"]++ + } else if hasNull && nonNullTypes == 1 { + // Just nullable — this is fine for most languages + } + } + + // additionalProperties with named properties + hasAdditionalProps := false + for _, e := range g.OutEdges[id] { + if e.Kind == EdgeAdditionalProperties { + hasAdditionalProps = true + break + } + } + if hasAdditionalProps && node.PropertyCount > 0 { + d.addSignal("mixed-map-struct", "additionalProperties combined with named properties — awkward map+struct hybrid", CodegenYellow) + signalCounts["mixed-map-struct"]++ + } + + // Deep allOf chains + allOfEdges := 0 + for _, e := range g.OutEdges[id] { + if e.Kind == EdgeAllOf { + allOfEdges++ + } + } + if allOfEdges > 2 { + d.addSignal("deep-allOf", "Deep allOf composition — may cause inheritance complexity or flattening issues", CodegenYellow) + signalCounts["deep-allOf"]++ + } + + // Very high property count + if node.PropertyCount > 30 { + d.addSignal("high-property-count", "Schema has many properties (>30) — may indicate it should be split", CodegenYellow) + signalCounts["high-property-count"]++ + } + + report.PerSchema[id] = d + } + + // Aggregate counts + for _, d := range report.PerSchema { + switch d.Tier { + case CodegenGreen: + report.GreenCount++ + case CodegenYellow: + report.YellowCount++ + case CodegenRed: + report.RedCount++ + } + } + + total := len(report.PerSchema) + if total > 0 { + report.CompatibilityScore = float64(report.GreenCount) / float64(total) * 100 + } + + // Build top signals + for signalID, count := range signalCounts { + report.TopSignals = append(report.TopSignals, &CodegenSignal{ + ID: signalID, + Description: signalID, // will be overwritten below + AffectedSchemas: func() []string { + var schemas []string + for id, d := range report.PerSchema { + for _, s := range d.Signals { + if s.ID == signalID { + schemas = append(schemas, id) + break + } + } + } + return schemas + }(), + }) + _ = count + } + + return report +} + +func (d *CodegenDifficulty) addSignal(id, description string, severity CodegenTier) { + d.Signals = append(d.Signals, &CodegenSignal{ + ID: id, + Description: description, + Severity: severity, + }) + if severity > d.Tier { + d.Tier = severity + } +} diff --git a/cmd/openapi/internal/analyze/cycles.go b/cmd/openapi/internal/analyze/cycles.go new file mode 100644 index 00000000..156c9c61 --- /dev/null +++ b/cmd/openapi/internal/analyze/cycles.go @@ -0,0 +1,336 @@ +package analyze + +import "sort" + +// SCC represents a strongly connected component — a set of schemas +// that are all mutually reachable through references. +type SCC struct { + // NodeIDs is the set of schema IDs in this component. + NodeIDs []string + // Size is len(NodeIDs). + Size int + // IsTrivial is true if the SCC has only one node and no self-loop. + IsTrivial bool +} + +// Cycle represents a specific cycle path through the schema graph. +type Cycle struct { + // Path is the ordered list of schema IDs forming the cycle (last connects back to first). + Path []string + // Edges is the list of edges forming the cycle, parallel to Path. + Edges []*Edge + // Length is len(Path). + Length int + // HasRequiredOnlyPath is true if every edge in the cycle is required (no optional break point). + HasRequiredOnlyPath bool + // BreakPoints are edges that could be made optional/nullable to break the cycle. + BreakPoints []*Edge +} + +// CycleAnalysis holds the results of cycle and SCC analysis on a schema graph. +type CycleAnalysis struct { + // SCCs is all non-trivial strongly connected components (size > 1 or self-loop). + SCCs []*SCC + // LargestSCCSize is the size of the largest SCC. + LargestSCCSize int + // Cycles is the enumerated list of distinct cycles. + Cycles []*Cycle + // NodesInCycles is the set of node IDs that participate in at least one cycle. + NodesInCycles map[string]bool + // DAGCondensation is the condensed DAG after collapsing SCCs. + DAGCondensation *CondensedDAG +} + +// CondensedDAG represents the graph after collapsing each SCC into a single node. +type CondensedDAG struct { + // Nodes maps SCC index to the SCC. + Nodes []*SCC + // NodeToSCC maps schema ID to SCC index. + NodeToSCC map[string]int + // Edges are the edges between SCCs (deduplicated). + Edges [][2]int // [from SCC index, to SCC index] + // Depth is the longest path in the DAG (number of layers). + Depth int + // Layers groups SCCs by their topological layer (0 = no dependencies). + Layers [][]int +} + +// AnalyzeCycles performs SCC detection, cycle enumeration, and DAG condensation. +func AnalyzeCycles(g *Graph) *CycleAnalysis { + result := &CycleAnalysis{ + NodesInCycles: make(map[string]bool), + } + + // Step 1: Find SCCs using Tarjan's algorithm + sccs := tarjanSCC(g) + for _, scc := range sccs { + if !scc.IsTrivial { + result.SCCs = append(result.SCCs, scc) + if scc.Size > result.LargestSCCSize { + result.LargestSCCSize = scc.Size + } + for _, id := range scc.NodeIDs { + result.NodesInCycles[id] = true + } + } + } + + // Step 2: Enumerate cycles (bounded DFS within each SCC) + result.Cycles = enumerateCycles(g, result.SCCs) + + // Step 3: Build condensed DAG + result.DAGCondensation = buildCondensedDAG(g, sccs) + + return result +} + +// tarjanSCC implements Tarjan's algorithm for finding strongly connected components. +func tarjanSCC(g *Graph) []*SCC { + var ( + index int + stack []string + onStack = make(map[string]bool) + indices = make(map[string]int) + lowlink = make(map[string]int) + result []*SCC + ) + + var strongConnect func(v string) + strongConnect = func(v string) { + indices[v] = index + lowlink[v] = index + index++ + stack = append(stack, v) + onStack[v] = true + + for _, e := range g.OutEdges[v] { + if _, visited := indices[e.To]; !visited { + strongConnect(e.To) + if lowlink[e.To] < lowlink[v] { + lowlink[v] = lowlink[e.To] + } + } else if onStack[e.To] { + if indices[e.To] < lowlink[v] { + lowlink[v] = indices[e.To] + } + } + } + + if lowlink[v] == indices[v] { + scc := &SCC{} + for { + w := stack[len(stack)-1] + stack = stack[:len(stack)-1] + onStack[w] = false + scc.NodeIDs = append(scc.NodeIDs, w) + if w == v { + break + } + } + scc.Size = len(scc.NodeIDs) + scc.IsTrivial = scc.Size == 1 && !hasSelfLoop(g, scc.NodeIDs[0]) + sort.Strings(scc.NodeIDs) // deterministic ordering + result = append(result, scc) + } + } + + // Visit all nodes (sorted for determinism) + nodeIDs := make([]string, 0, len(g.Nodes)) + for id := range g.Nodes { + nodeIDs = append(nodeIDs, id) + } + sort.Strings(nodeIDs) + + for _, id := range nodeIDs { + if _, visited := indices[id]; !visited { + strongConnect(id) + } + } + + return result +} + +func hasSelfLoop(g *Graph, nodeID string) bool { + for _, e := range g.OutEdges[nodeID] { + if e.To == nodeID { + return true + } + } + return false +} + +// enumerateCycles uses bounded DFS within each SCC to find distinct cycles. +// Limited to maxCyclesPerSCC to avoid combinatorial explosion. +func enumerateCycles(g *Graph, sccs []*SCC) []*Cycle { + const maxCyclesPerSCC = 50 + + var allCycles []*Cycle + for _, scc := range sccs { + sccSet := make(map[string]bool, scc.Size) + for _, id := range scc.NodeIDs { + sccSet[id] = true + } + + cycles := findCyclesInSCC(g, scc.NodeIDs[0], sccSet, maxCyclesPerSCC) + allCycles = append(allCycles, cycles...) + } + + return allCycles +} + +func findCyclesInSCC(g *Graph, startNode string, sccSet map[string]bool, maxCycles int) []*Cycle { + var cycles []*Cycle + visited := make(map[string]bool) + path := []string{} + pathEdges := []*Edge{} + + var dfs func(node string) bool + dfs = func(node string) bool { + if len(cycles) >= maxCycles { + return true + } + + visited[node] = true + path = append(path, node) + + for _, e := range g.OutEdges[node] { + if !sccSet[e.To] { + continue + } + + if e.To == startNode && len(path) > 1 { + // Found a cycle back to start + cyclePath := make([]string, len(path)) + copy(cyclePath, path) + cycleEdges := make([]*Edge, len(pathEdges)) + copy(cycleEdges, pathEdges) + cycleEdges = append(cycleEdges, e) + + cycle := &Cycle{ + Path: cyclePath, + Edges: cycleEdges, + Length: len(cyclePath), + } + classifyCycle(cycle) + cycles = append(cycles, cycle) + + if len(cycles) >= maxCycles { + return true + } + continue + } + + if !visited[e.To] { + pathEdges = append(pathEdges, e) + if dfs(e.To) { + return true + } + pathEdges = pathEdges[:len(pathEdges)-1] + } + } + + path = path[:len(path)-1] + visited[node] = false + return false + } + + dfs(startNode) + return cycles +} + +func classifyCycle(c *Cycle) { + allRequired := true + for _, e := range c.Edges { + if !e.IsRequired { + allRequired = false + c.BreakPoints = append(c.BreakPoints, e) + } else if e.IsNullable || e.IsArray { + c.BreakPoints = append(c.BreakPoints, e) + } + } + c.HasRequiredOnlyPath = allRequired +} + +// buildCondensedDAG collapses SCCs into single nodes and computes the DAG structure. +func buildCondensedDAG(g *Graph, sccs []*SCC) *CondensedDAG { + dag := &CondensedDAG{ + Nodes: sccs, + NodeToSCC: make(map[string]int), + } + + for i, scc := range sccs { + for _, id := range scc.NodeIDs { + dag.NodeToSCC[id] = i + } + } + + // Build edges between SCCs + edgeSet := make(map[[2]int]bool) + for _, e := range g.Edges { + fromSCC, ok1 := dag.NodeToSCC[e.From] + toSCC, ok2 := dag.NodeToSCC[e.To] + if !ok1 || !ok2 || fromSCC == toSCC { + continue + } + key := [2]int{fromSCC, toSCC} + if !edgeSet[key] { + edgeSet[key] = true + dag.Edges = append(dag.Edges, key) + } + } + + // Compute topological layers + dag.Layers = topologicalLayers(len(sccs), dag.Edges) + dag.Depth = len(dag.Layers) + + return dag +} + +// topologicalLayers assigns each node to a layer based on longest incoming path. +func topologicalLayers(nodeCount int, edges [][2]int) [][]int { + inDegree := make([]int, nodeCount) + adj := make([][]int, nodeCount) + for i := range adj { + adj[i] = []int{} + } + + for _, e := range edges { + adj[e[0]] = append(adj[e[0]], e[1]) + inDegree[e[1]]++ + } + + // Kahn's algorithm with layer tracking + var queue []int + layer := make([]int, nodeCount) + for i := 0; i < nodeCount; i++ { + if inDegree[i] == 0 { + queue = append(queue, i) + layer[i] = 0 + } + } + + maxLayer := 0 + for len(queue) > 0 { + node := queue[0] + queue = queue[1:] + for _, next := range adj[node] { + if layer[node]+1 > layer[next] { + layer[next] = layer[node] + 1 + } + inDegree[next]-- + if inDegree[next] == 0 { + queue = append(queue, next) + if layer[next] > maxLayer { + maxLayer = layer[next] + } + } + } + } + + layers := make([][]int, maxLayer+1) + for i := 0; i < nodeCount; i++ { + layers[layer[i]] = append(layers[layer[i]], i) + } + + return layers +} diff --git a/cmd/openapi/internal/analyze/graph.go b/cmd/openapi/internal/analyze/graph.go new file mode 100644 index 00000000..0113d5c5 --- /dev/null +++ b/cmd/openapi/internal/analyze/graph.go @@ -0,0 +1,579 @@ +// Package analyze provides schema complexity analysis for OpenAPI documents. +// It extracts a directed graph of schema references and computes metrics +// useful for schema maintainers and code generation teams. +package analyze + +import ( + "context" + "fmt" + "slices" + + "github.com/speakeasy-api/openapi/jsonschema/oas3" + "github.com/speakeasy-api/openapi/openapi" +) + +// EdgeKind describes how one schema references another. +type EdgeKind string + +const ( + EdgeProperty EdgeKind = "property" + EdgeItems EdgeKind = "items" + EdgeAllOf EdgeKind = "allOf" + EdgeOneOf EdgeKind = "oneOf" + EdgeAnyOf EdgeKind = "anyOf" + EdgeAdditionalProperties EdgeKind = "additionalProperties" + EdgeNot EdgeKind = "not" + EdgePrefixItems EdgeKind = "prefixItems" + EdgeIf EdgeKind = "if" + EdgeThen EdgeKind = "then" + EdgeElse EdgeKind = "else" +) + +// Node represents a schema in the dependency graph. +type Node struct { + // ID is the unique identifier for this schema, typically the JSON pointer or component name. + ID string + // Name is the short display name (component name or last segment of JSON pointer). + Name string + // IsComponent is true if this schema lives in #/components/schemas/. + IsComponent bool + // JSONPointer is the full JSON pointer path to this schema. + JSONPointer string + // PropertyCount is the number of properties defined on this schema. + PropertyCount int + // RequiredCount is the number of required properties. + RequiredCount int + // Types is the list of types this schema declares. + Types []string + // HasDiscriminator is true if this schema has a discriminator defined. + HasDiscriminator bool + // CompositionFields tracks which composition keywords are used (allOf, oneOf, anyOf). + CompositionFields []string + // IsNullable is true if null is in the type list or nullable is true. + IsNullable bool + // DeepPropertyCount is the total properties across all inline sub-schemas. + DeepPropertyCount int + // NestingDepth is the maximum depth of inline object nesting (not counting $refs). + NestingDepth int + // CompositionDepth is the maximum depth of allOf/oneOf/anyOf nesting. + CompositionDepth int + // UnionSites lists all oneOf/anyOf occurrences found in the schema tree. + UnionSites []UnionSite +} + +// UnionSite represents a single oneOf/anyOf occurrence within a schema tree. +type UnionSite struct { + // Kind is "oneOf" or "anyOf". + Kind string + // Width is the number of alternatives. + Width int + // HasDiscriminator is true if this site has a discriminator. + HasDiscriminator bool + // Path describes where in the tree this site was found (e.g., "data", "root"). + Path string +} + +// Edge represents a reference from one schema to another. +type Edge struct { + // From is the ID of the source schema. + From string + // To is the ID of the target schema. + To string + // Kind describes how the reference is made (property, items, allOf, etc.). + Kind EdgeKind + // FieldName is set when Kind is EdgeProperty — the property name. + FieldName string + // Index is set for allOf/oneOf/anyOf — the array index. + Index int + // IsRequired is true if this is a required property edge. + IsRequired bool + // IsNullable is true if the referencing schema allows null. + IsNullable bool + // IsArray is true if this edge goes through items (array wrapper). + IsArray bool +} + +// Graph is a directed graph of schema references extracted from an OpenAPI document. +type Graph struct { + Nodes map[string]*Node + Edges []*Edge + + // Adjacency lists for fast traversal. + OutEdges map[string][]*Edge // edges from a node + InEdges map[string][]*Edge // edges to a node +} + +// NewGraph creates an empty graph. +func NewGraph() *Graph { + return &Graph{ + Nodes: make(map[string]*Node), + OutEdges: make(map[string][]*Edge), + InEdges: make(map[string][]*Edge), + } +} + +func (g *Graph) addNode(n *Node) { + if _, exists := g.Nodes[n.ID]; exists { + return + } + g.Nodes[n.ID] = n +} + +func (g *Graph) addEdge(e *Edge) { + g.Edges = append(g.Edges, e) + g.OutEdges[e.From] = append(g.OutEdges[e.From], e) + g.InEdges[e.To] = append(g.InEdges[e.To], e) +} + +// FanOut returns the number of distinct schemas this node references. +func (g *Graph) FanOut(nodeID string) int { + seen := make(map[string]bool) + for _, e := range g.OutEdges[nodeID] { + seen[e.To] = true + } + return len(seen) +} + +// FanIn returns the number of distinct schemas that reference this node. +func (g *Graph) FanIn(nodeID string) int { + seen := make(map[string]bool) + for _, e := range g.InEdges[nodeID] { + seen[e.From] = true + } + return len(seen) +} + +// BuildGraph extracts a schema reference graph from an OpenAPI document. +// It walks all component schemas and discovers their references to other schemas. +func BuildGraph(ctx context.Context, doc *openapi.OpenAPI) *Graph { + g := NewGraph() + + if doc == nil || doc.Components == nil || doc.Components.Schemas == nil { + return g + } + + // Phase 1: Register all component schemas as nodes. + for name, jsonSchema := range doc.Components.Schemas.All() { + schema := jsonSchema.GetSchema() + if schema == nil { + continue + } + + node := buildNodeFromSchema(name, schema, true) + g.addNode(node) + } + + // Phase 2: Walk each component schema and discover edges. + for name, jsonSchema := range doc.Components.Schemas.All() { + schema := jsonSchema.GetSchema() + if schema == nil { + continue + } + + extractEdges(g, name, schema) + } + + return g +} + +func buildNodeFromSchema(name string, schema *oas3.Schema, isComponent bool) *Node { + n := &Node{ + ID: name, + Name: name, + IsComponent: isComponent, + } + + if isComponent { + n.JSONPointer = fmt.Sprintf("#/components/schemas/%s", name) + } + + if schema.Properties != nil { + n.PropertyCount = schema.Properties.Len() + } + + n.RequiredCount = len(schema.Required) + n.Types = schemaTypeValues(schema) + n.HasDiscriminator = schema.Discriminator != nil + n.IsNullable = slices.Contains(n.Types, "null") || (schema.Nullable != nil && *schema.Nullable) + + // Analyze the full schema tree for composition, nesting, union sites + stats := analyzeSchemaTree(schema) + n.DeepPropertyCount = stats.deepPropertyCount + n.NestingDepth = stats.nestingDepth + n.CompositionDepth = stats.compositionDepth + n.UnionSites = stats.unionSites + for _, field := range []string{"allOf", "oneOf", "anyOf"} { + if stats.compositionFields[field] { + n.CompositionFields = append(n.CompositionFields, field) + } + } + + return n +} + +// schemaTreeStats accumulates complexity metrics from walking a schema tree. +type schemaTreeStats struct { + compositionFields map[string]bool + deepPropertyCount int + nestingDepth int + compositionDepth int + unionSites []UnionSite +} + +// analyzeSchemaTree walks an entire schema tree (recursing into inline sub-schemas) +// and computes deep property counts, nesting depth, composition depth, and union sites. +func analyzeSchemaTree(schema *oas3.Schema) *schemaTreeStats { + stats := &schemaTreeStats{ + compositionFields: make(map[string]bool), + } + stats.walk(schema, 0, 0, "", make(map[*oas3.Schema]bool)) + return stats +} + +func (s *schemaTreeStats) walk(schema *oas3.Schema, objectDepth, compDepth int, path string, seen map[*oas3.Schema]bool) { + if schema == nil || seen[schema] { + return + } + seen[schema] = true + + if objectDepth > s.nestingDepth { + s.nestingDepth = objectDepth + } + if compDepth > s.compositionDepth { + s.compositionDepth = compDepth + } + + // Count properties at this level + if schema.Properties != nil { + s.deepPropertyCount += schema.Properties.Len() + for propName, propSchema := range schema.Properties.All() { + sub := propSchema.GetSchema() + if sub != nil && sub.Ref == nil { + childPath := propName + if path != "" { + childPath = path + "." + propName + } + nextDepth := objectDepth + if sub.Properties != nil && sub.Properties.Len() > 0 { + nextDepth = objectDepth + 1 + } + s.walk(sub, nextDepth, compDepth, childPath, seen) + } + } + } + + // allOf + if len(schema.AllOf) > 0 { + s.compositionFields["allOf"] = true + if compDepth+1 > s.compositionDepth { + s.compositionDepth = compDepth + 1 + } + for _, sub := range schema.AllOf { + if sub.GetSchema() != nil && sub.GetSchema().Ref == nil { + s.walk(sub.GetSchema(), objectDepth, compDepth+1, path, seen) + } + } + } + + // oneOf + if len(schema.OneOf) > 0 { + s.compositionFields["oneOf"] = true + if compDepth+1 > s.compositionDepth { + s.compositionDepth = compDepth + 1 + } + site := UnionSite{ + Kind: "oneOf", + Width: len(schema.OneOf), + HasDiscriminator: schema.Discriminator != nil, + Path: path, + } + if site.Path == "" { + site.Path = "root" + } + s.unionSites = append(s.unionSites, site) + for _, sub := range schema.OneOf { + if sub.GetSchema() != nil && sub.GetSchema().Ref == nil { + s.walk(sub.GetSchema(), objectDepth, compDepth+1, path, seen) + } + } + } + + // anyOf + if len(schema.AnyOf) > 0 { + s.compositionFields["anyOf"] = true + if compDepth+1 > s.compositionDepth { + s.compositionDepth = compDepth + 1 + } + site := UnionSite{ + Kind: "anyOf", + Width: len(schema.AnyOf), + HasDiscriminator: schema.Discriminator != nil, + Path: path, + } + if site.Path == "" { + site.Path = "root" + } + s.unionSites = append(s.unionSites, site) + for _, sub := range schema.AnyOf { + if sub.GetSchema() != nil && sub.GetSchema().Ref == nil { + s.walk(sub.GetSchema(), objectDepth, compDepth+1, path, seen) + } + } + } + + // Items + if schema.Items != nil && schema.Items.GetSchema() != nil && schema.Items.GetSchema().Ref == nil { + itemPath := path + "[]" + if path == "" { + itemPath = "[]" + } + s.walk(schema.Items.GetSchema(), objectDepth, compDepth, itemPath, seen) + } + + // AdditionalProperties + if schema.AdditionalProperties != nil && schema.AdditionalProperties.GetSchema() != nil && schema.AdditionalProperties.GetSchema().Ref == nil { + apPath := path + "{}" + if path == "" { + apPath = "{}" + } + s.walk(schema.AdditionalProperties.GetSchema(), objectDepth, compDepth, apPath, seen) + } +} + +// extractEdges discovers all outgoing edges from a schema to other component schemas. +// It recursively descends into inline sub-schemas (properties, composition branches) +// to find $ref targets at any depth. +func extractEdges(g *Graph, sourceID string, schema *oas3.Schema) { + seen := make(map[*oas3.Schema]bool) + extractEdgesRecursive(g, sourceID, schema, seen) +} + +func extractEdgesRecursive(g *Graph, sourceID string, schema *oas3.Schema, seen map[*oas3.Schema]bool) { + if schema == nil || seen[schema] { + return + } + seen[schema] = true + + // Properties + if schema.Properties != nil { + for propName, propSchema := range schema.Properties.All() { + if target := resolveRefTarget(propSchema); target != "" { + isRequired := slices.Contains(schema.Required, propName) + propSchemaObj := propSchema.GetSchema() + isNullable := propSchemaObj != nil && (slices.Contains(schemaTypeValues(propSchemaObj), "null") || (propSchemaObj.Nullable != nil && *propSchemaObj.Nullable)) + g.addEdge(&Edge{ + From: sourceID, + To: target, + Kind: EdgeProperty, + FieldName: propName, + IsRequired: isRequired, + IsNullable: isNullable, + }) + } else if propSchema.GetSchema() != nil { + // Inline schema — recurse into it to find nested $refs + extractEdgesRecursive(g, sourceID, propSchema.GetSchema(), seen) + } + // Also check if property is an array with $ref items + if propSchema.GetSchema() != nil && propSchema.GetSchema().Items != nil { + if target := resolveRefTarget(propSchema.GetSchema().Items); target != "" { + isRequired := slices.Contains(schema.Required, propName) + g.addEdge(&Edge{ + From: sourceID, + To: target, + Kind: EdgeItems, + FieldName: propName, + IsRequired: isRequired, + IsArray: true, + }) + } else if propSchema.GetSchema().Items.GetSchema() != nil { + extractEdgesRecursive(g, sourceID, propSchema.GetSchema().Items.GetSchema(), seen) + } + } + } + } + + // Items (top-level array schema) + if schema.Items != nil { + if target := resolveRefTarget(schema.Items); target != "" { + g.addEdge(&Edge{ + From: sourceID, + To: target, + Kind: EdgeItems, + IsArray: true, + }) + } else if schema.Items.GetSchema() != nil { + extractEdgesRecursive(g, sourceID, schema.Items.GetSchema(), seen) + } + } + + // allOf + for i, sub := range schema.AllOf { + if target := resolveRefTarget(sub); target != "" { + g.addEdge(&Edge{ + From: sourceID, + To: target, + Kind: EdgeAllOf, + Index: i, + }) + } else if sub.GetSchema() != nil { + extractEdgesRecursive(g, sourceID, sub.GetSchema(), seen) + } + } + + // oneOf + for i, sub := range schema.OneOf { + if target := resolveRefTarget(sub); target != "" { + g.addEdge(&Edge{ + From: sourceID, + To: target, + Kind: EdgeOneOf, + Index: i, + }) + } else if sub.GetSchema() != nil { + extractEdgesRecursive(g, sourceID, sub.GetSchema(), seen) + } + } + + // anyOf + for i, sub := range schema.AnyOf { + if target := resolveRefTarget(sub); target != "" { + g.addEdge(&Edge{ + From: sourceID, + To: target, + Kind: EdgeAnyOf, + Index: i, + }) + } else if sub.GetSchema() != nil { + extractEdgesRecursive(g, sourceID, sub.GetSchema(), seen) + } + } + + // additionalProperties + if schema.AdditionalProperties != nil { + if target := resolveRefTarget(schema.AdditionalProperties); target != "" { + g.addEdge(&Edge{ + From: sourceID, + To: target, + Kind: EdgeAdditionalProperties, + }) + } else if schema.AdditionalProperties.GetSchema() != nil { + extractEdgesRecursive(g, sourceID, schema.AdditionalProperties.GetSchema(), seen) + } + } + + // not + if schema.Not != nil { + if target := resolveRefTarget(schema.Not); target != "" { + g.addEdge(&Edge{ + From: sourceID, + To: target, + Kind: EdgeNot, + }) + } else if schema.Not.GetSchema() != nil { + extractEdgesRecursive(g, sourceID, schema.Not.GetSchema(), seen) + } + } + + // prefixItems + for i, sub := range schema.PrefixItems { + if target := resolveRefTarget(sub); target != "" { + g.addEdge(&Edge{ + From: sourceID, + To: target, + Kind: EdgePrefixItems, + Index: i, + }) + } else if sub.GetSchema() != nil { + extractEdgesRecursive(g, sourceID, sub.GetSchema(), seen) + } + } + + // if/then/else + if schema.If != nil { + if target := resolveRefTarget(schema.If); target != "" { + g.addEdge(&Edge{From: sourceID, To: target, Kind: EdgeIf}) + } else if schema.If.GetSchema() != nil { + extractEdgesRecursive(g, sourceID, schema.If.GetSchema(), seen) + } + } + if schema.Then != nil { + if target := resolveRefTarget(schema.Then); target != "" { + g.addEdge(&Edge{From: sourceID, To: target, Kind: EdgeThen}) + } else if schema.Then.GetSchema() != nil { + extractEdgesRecursive(g, sourceID, schema.Then.GetSchema(), seen) + } + } + if schema.Else != nil { + if target := resolveRefTarget(schema.Else); target != "" { + g.addEdge(&Edge{From: sourceID, To: target, Kind: EdgeElse}) + } else if schema.Else.GetSchema() != nil { + extractEdgesRecursive(g, sourceID, schema.Else.GetSchema(), seen) + } + } + + // DependentSchemas + if schema.DependentSchemas != nil { + for _, depSchema := range schema.DependentSchemas.All() { + if target := resolveRefTarget(depSchema); target != "" { + g.addEdge(&Edge{From: sourceID, To: target, Kind: EdgeProperty}) + } else if depSchema.GetSchema() != nil { + extractEdgesRecursive(g, sourceID, depSchema.GetSchema(), seen) + } + } + } + + // PatternProperties + if schema.PatternProperties != nil { + for _, ppSchema := range schema.PatternProperties.All() { + if target := resolveRefTarget(ppSchema); target != "" { + g.addEdge(&Edge{From: sourceID, To: target, Kind: EdgeAdditionalProperties}) + } else if ppSchema.GetSchema() != nil { + extractEdgesRecursive(g, sourceID, ppSchema.GetSchema(), seen) + } + } + } +} + +// schemaTypeValues extracts the type values from a schema's Type field. +// Type is an EitherValue that is either []SchemaType (array) or SchemaType (single). +func schemaTypeValues(schema *oas3.Schema) []string { + if schema.Type == nil { + return nil + } + // Left = []SchemaType (array of types) + if schema.Type.IsLeft() { + left := schema.Type.LeftValue() + result := make([]string, len(left)) + for i, t := range left { + result[i] = string(t) + } + return result + } + // Right = SchemaType (single type) + if schema.Type.IsRight() { + right := schema.Type.RightValue() + if right != "" { + return []string{string(right)} + } + } + return nil +} + +// resolveRefTarget extracts the component schema name from a $ref if it points +// to #/components/schemas/. Returns empty string for inline schemas or external refs. +func resolveRefTarget(jsonSchema *oas3.JSONSchema[oas3.Referenceable]) string { + if jsonSchema == nil { + return "" + } + schema := jsonSchema.GetSchema() + if schema == nil || schema.Ref == nil { + return "" + } + + ref := schema.Ref.String() + const prefix = "#/components/schemas/" + if len(ref) > len(prefix) && ref[:len(prefix)] == prefix { + return ref[len(prefix):] + } + return "" +} diff --git a/cmd/openapi/internal/analyze/mermaid.go b/cmd/openapi/internal/analyze/mermaid.go new file mode 100644 index 00000000..708cff8f --- /dev/null +++ b/cmd/openapi/internal/analyze/mermaid.go @@ -0,0 +1,441 @@ +package analyze + +import ( + "fmt" + "sort" + "strings" +) + +// SCCToMermaid renders one SCC as a Mermaid flowchart string. +// The returned string can be displayed or piped to mermaid-rendering tools. +func SCCToMermaid(g *Graph, cycles *CycleAnalysis, sccIndex int) string { + if sccIndex < 0 || sccIndex >= len(cycles.SCCs) { + return "" + } + scc := cycles.SCCs[sccIndex] + memberSet := make(map[string]bool, scc.Size) + for _, id := range scc.NodeIDs { + memberSet[id] = true + } + + var sb strings.Builder + sb.WriteString("graph LR\n") + + // Nodes + for _, id := range scc.NodeIDs { + sb.WriteString(fmt.Sprintf(" %s[%s]\n", mermaidSafeID(id), id)) + } + + // Edges within SCC + for _, e := range g.Edges { + if memberSet[e.From] && memberSet[e.To] { + label := mermaidEdgeLabel(e) + if label != "" { + sb.WriteString(fmt.Sprintf(" %s -->|%s| %s\n", mermaidSafeID(e.From), label, mermaidSafeID(e.To))) + } else { + sb.WriteString(fmt.Sprintf(" %s --> %s\n", mermaidSafeID(e.From), mermaidSafeID(e.To))) + } + } + } + + return sb.String() +} + +// EgoGraphToMermaid renders a BFS neighborhood subgraph around a node. +func EgoGraphToMermaid(g *Graph, nodeID string, hops int) string { + if _, ok := g.Nodes[nodeID]; !ok { + return "" + } + + // BFS to collect nodes within hop radius + visited := map[string]int{nodeID: 0} + queue := []string{nodeID} + for len(queue) > 0 { + current := queue[0] + queue = queue[1:] + dist := visited[current] + if dist >= hops { + continue + } + // outgoing + for _, e := range g.OutEdges[current] { + if _, seen := visited[e.To]; !seen { + visited[e.To] = dist + 1 + queue = append(queue, e.To) + } + } + // incoming + for _, e := range g.InEdges[current] { + if _, seen := visited[e.From]; !seen { + visited[e.From] = dist + 1 + queue = append(queue, e.From) + } + } + } + + var sb strings.Builder + sb.WriteString("graph LR\n") + + // Sort node IDs for deterministic output + nodeIDs := make([]string, 0, len(visited)) + for id := range visited { + nodeIDs = append(nodeIDs, id) + } + sort.Strings(nodeIDs) + + for _, id := range nodeIDs { + if id == nodeID { + sb.WriteString(fmt.Sprintf(" %s((%s))\n", mermaidSafeID(id), id)) // double circle for center + } else { + sb.WriteString(fmt.Sprintf(" %s[%s]\n", mermaidSafeID(id), id)) + } + } + + // Edges between visited nodes + for _, e := range g.Edges { + if _, ok := visited[e.From]; !ok { + continue + } + if _, ok := visited[e.To]; !ok { + continue + } + label := mermaidEdgeLabel(e) + if label != "" { + sb.WriteString(fmt.Sprintf(" %s -->|%s| %s\n", mermaidSafeID(e.From), label, mermaidSafeID(e.To))) + } else { + sb.WriteString(fmt.Sprintf(" %s --> %s\n", mermaidSafeID(e.From), mermaidSafeID(e.To))) + } + } + + return sb.String() +} + +// DAGOverviewToMermaid renders the condensed DAG as a Mermaid flowchart. +// SCCs are collapsed to single nodes labeled with member names. +func DAGOverviewToMermaid(g *Graph, cycles *CycleAnalysis, maxNodes int) string { + dag := cycles.DAGCondensation + if dag == nil { + return "" + } + + var sb strings.Builder + sb.WriteString("graph TD\n") + + shown := 0 + for i, scc := range dag.Nodes { + if maxNodes > 0 && shown >= maxNodes { + break + } + id := fmt.Sprintf("scc%d", i) + if scc.IsTrivial { + sb.WriteString(fmt.Sprintf(" %s[%s]\n", id, scc.NodeIDs[0])) + } else { + label := strings.Join(scc.NodeIDs, ", ") + if len(label) > 40 { + label = label[:37] + "..." + } + sb.WriteString(fmt.Sprintf(" %s{{\"%s\"}}\n", id, label)) + } + shown++ + } + + for _, e := range dag.Edges { + if maxNodes > 0 && (e[0] >= maxNodes || e[1] >= maxNodes) { + continue + } + sb.WriteString(fmt.Sprintf(" scc%d --> scc%d\n", e[0], e[1])) + } + + return sb.String() +} + +// RenderASCIIGraph renders a set of nodes and edges as ASCII art. +// It uses the condensed DAG layers for layout. +func RenderASCIIGraph(dag *CondensedDAG, width int) string { + if dag == nil || len(dag.Nodes) == 0 { + return " (no schemas)" + } + + var sb strings.Builder + maxBoxWidth := 0 + + // Pre-compute labels and find max width + type nodeLabel struct { + sccIdx int + label string + isSCC bool + } + var allLabels [][]nodeLabel + for _, layer := range dag.Layers { + var layerLabels []nodeLabel + for _, sccIdx := range layer { + if sccIdx >= len(dag.Nodes) { + continue + } + scc := dag.Nodes[sccIdx] + var label string + isSCC := !scc.IsTrivial + if scc.IsTrivial { + label = scc.NodeIDs[0] + } else { + label = strings.Join(scc.NodeIDs, ", ") + if len(label) > 35 { + label = label[:32] + "..." + } + label = fmt.Sprintf("SCC: %s", label) + } + if len(label)+4 > maxBoxWidth { + maxBoxWidth = len(label) + 4 + } + layerLabels = append(layerLabels, nodeLabel{sccIdx: sccIdx, label: label, isSCC: isSCC}) + } + allLabels = append(allLabels, layerLabels) + } + + if maxBoxWidth > width-4 { + maxBoxWidth = width - 4 + } + if maxBoxWidth < 10 { + maxBoxWidth = 10 + } + + // Build edge lookup: from SCC index -> list of to SCC indices + edgeLookup := make(map[int][]int) + for _, e := range dag.Edges { + edgeLookup[e[0]] = append(edgeLookup[e[0]], e[1]) + } + + for layerIdx, layerLabels := range allLabels { + for _, nl := range layerLabels { + boxW := len(nl.label) + 4 + if boxW > maxBoxWidth { + boxW = maxBoxWidth + } + + if nl.isSCC { + // Double-border for SCC + sb.WriteString(fmt.Sprintf(" ╔%s╗\n", strings.Repeat("═", boxW-2))) + content := nl.label + if len(content) > boxW-4 { + content = content[:boxW-7] + "..." + } + sb.WriteString(fmt.Sprintf(" ║ %-*s ║\n", boxW-4, content)) + sb.WriteString(fmt.Sprintf(" ╚%s╝\n", strings.Repeat("═", boxW-2))) + } else { + // Single border for regular node + sb.WriteString(fmt.Sprintf(" ┌%s┐\n", strings.Repeat("─", boxW-2))) + content := nl.label + if len(content) > boxW-4 { + content = content[:boxW-7] + "..." + } + sb.WriteString(fmt.Sprintf(" │ %-*s │\n", boxW-4, content)) + sb.WriteString(fmt.Sprintf(" └%s┘\n", strings.Repeat("─", boxW-2))) + } + + // Draw edges to next layers + targets := edgeLookup[nl.sccIdx] + if len(targets) > 0 { + sort.Ints(targets) + var targetNames []string + for _, t := range targets { + if t < len(dag.Nodes) { + targetNames = append(targetNames, sccLabel(dag.Nodes[t])) + } + } + sb.WriteString(fmt.Sprintf(" ╰─→ %s\n", strings.Join(targetNames, ", "))) + } + } + + if layerIdx < len(allLabels)-1 { + sb.WriteString(" │\n") + sb.WriteString(" ▼\n") + } + } + + return sb.String() +} + +// RenderASCIIEgoGraph renders a BFS ego graph as ASCII art. +func RenderASCIIEgoGraph(g *Graph, centerID string, hops int, width int) string { + if _, ok := g.Nodes[centerID]; !ok { + return fmt.Sprintf(" Schema %q not found", centerID) + } + + // BFS to collect nodes within hop radius + visited := map[string]int{centerID: 0} + queue := []string{centerID} + for len(queue) > 0 { + current := queue[0] + queue = queue[1:] + dist := visited[current] + if dist >= hops { + continue + } + for _, e := range g.OutEdges[current] { + if _, seen := visited[e.To]; !seen { + visited[e.To] = dist + 1 + queue = append(queue, e.To) + } + } + for _, e := range g.InEdges[current] { + if _, seen := visited[e.From]; !seen { + visited[e.From] = dist + 1 + queue = append(queue, e.From) + } + } + } + + // Group by distance + byDist := make(map[int][]string) + maxDist := 0 + for id, dist := range visited { + byDist[dist] = append(byDist[dist], id) + if dist > maxDist { + maxDist = dist + } + } + + var sb strings.Builder + + // Center node + sb.WriteString(fmt.Sprintf(" ╔%s╗\n", strings.Repeat("═", len(centerID)+2))) + sb.WriteString(fmt.Sprintf(" ║ %s ║\n", centerID)) + sb.WriteString(fmt.Sprintf(" ╚%s╝\n", strings.Repeat("═", len(centerID)+2))) + + // Outgoing edges from center with labels + type edgeInfo struct { + target string + label string + } + var outEdges []edgeInfo + for _, e := range g.OutEdges[centerID] { + if _, ok := visited[e.To]; ok { + label := string(e.Kind) + if e.FieldName != "" { + label += ":" + e.FieldName + } + var flags []string + if e.IsRequired { + flags = append(flags, "req") + } + if e.IsNullable { + flags = append(flags, "null") + } + if e.IsArray { + flags = append(flags, "[]") + } + if len(flags) > 0 { + label += " [" + strings.Join(flags, ",") + "]" + } + outEdges = append(outEdges, edgeInfo{target: e.To, label: label}) + } + } + if len(outEdges) > 0 { + sb.WriteString(" │ references\n") + sb.WriteString(" ▼\n") + for _, ei := range outEdges { + sb.WriteString(fmt.Sprintf(" ├─→ %-20s %s\n", ei.target, ei.label)) + } + } + + // Incoming edges to center with labels + var inEdges []edgeInfo + for _, e := range g.InEdges[centerID] { + if _, ok := visited[e.From]; ok { + label := string(e.Kind) + if e.FieldName != "" { + label += ":" + e.FieldName + } + var flags []string + if e.IsRequired { + flags = append(flags, "req") + } + if e.IsArray { + flags = append(flags, "[]") + } + if len(flags) > 0 { + label += " [" + strings.Join(flags, ",") + "]" + } + inEdges = append(inEdges, edgeInfo{target: e.From, label: label}) + } + } + if len(inEdges) > 0 { + sb.WriteString(" │ referenced by\n") + sb.WriteString(" ▲\n") + for _, ei := range inEdges { + sb.WriteString(fmt.Sprintf(" ├── %-20s %s\n", ei.target, ei.label)) + } + } + + // Distant neighbors (hops > 1) + for dist := 2; dist <= maxDist; dist++ { + nodes := byDist[dist] + if len(nodes) == 0 { + continue + } + sort.Strings(nodes) + sb.WriteString(fmt.Sprintf("\n %d hops: %s\n", dist, strings.Join(nodes, ", "))) + } + + return sb.String() +} + +// RenderASCIISCC renders an SCC with its internal edges as ASCII. +func RenderASCIISCC(g *Graph, cycles *CycleAnalysis, sccIndex int) string { + if sccIndex < 0 || sccIndex >= len(cycles.SCCs) { + return "" + } + scc := cycles.SCCs[sccIndex] + memberSet := make(map[string]bool, scc.Size) + for _, id := range scc.NodeIDs { + memberSet[id] = true + } + + var sb strings.Builder + sb.WriteString(fmt.Sprintf(" SCC #%d (%d schemas)\n", sccIndex+1, scc.Size)) + sb.WriteString(fmt.Sprintf(" %s\n\n", strings.Repeat("─", 30))) + + for _, id := range scc.NodeIDs { + sb.WriteString(fmt.Sprintf(" [%s]\n", id)) + // Show edges to other SCC members + for _, e := range g.OutEdges[id] { + if memberSet[e.To] { + label := string(e.Kind) + if e.FieldName != "" { + label += ":" + e.FieldName + } + if e.IsRequired { + label += " (req)" + } + sb.WriteString(fmt.Sprintf(" └─→ %s via %s\n", e.To, label)) + } + } + } + + return sb.String() +} + +func mermaidSafeID(id string) string { + // Replace characters that aren't safe in mermaid IDs + r := strings.NewReplacer("-", "_", ".", "_", " ", "_") + return r.Replace(id) +} + +func mermaidEdgeLabel(e *Edge) string { + var parts []string + parts = append(parts, string(e.Kind)) + if e.FieldName != "" { + parts = append(parts, e.FieldName) + } + return strings.Join(parts, ":") +} + +func sccLabel(scc *SCC) string { + if scc.IsTrivial { + return scc.NodeIDs[0] + } + if len(scc.NodeIDs) <= 3 { + return "{" + strings.Join(scc.NodeIDs, ", ") + "}" + } + return fmt.Sprintf("{%s +%d}", scc.NodeIDs[0], scc.Size-1) +} diff --git a/cmd/openapi/internal/analyze/mermaid_test.go b/cmd/openapi/internal/analyze/mermaid_test.go new file mode 100644 index 00000000..d0a884ca --- /dev/null +++ b/cmd/openapi/internal/analyze/mermaid_test.go @@ -0,0 +1,113 @@ +package analyze_test + +import ( + "context" + "testing" + + "github.com/speakeasy-api/openapi/cmd/openapi/internal/analyze" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestSCCToMermaid(t *testing.T) { + doc := loadDoc(t, "testdata/cyclic.openapi.yaml") + g := analyze.BuildGraph(context.Background(), doc) + ca := analyze.AnalyzeCycles(g) + + require.NotEmpty(t, ca.SCCs) + + out := analyze.SCCToMermaid(g, ca, 0) + assert.Contains(t, out, "graph LR") + // Should contain at least one node from the SCC + assert.Contains(t, out, ca.SCCs[0].NodeIDs[0]) + // Should contain edges (-->) + assert.Contains(t, out, "-->") +} + +func TestSCCToMermaid_OutOfBounds(t *testing.T) { + doc := loadDoc(t, "testdata/cyclic.openapi.yaml") + g := analyze.BuildGraph(context.Background(), doc) + ca := analyze.AnalyzeCycles(g) + + assert.Empty(t, analyze.SCCToMermaid(g, ca, -1)) + assert.Empty(t, analyze.SCCToMermaid(g, ca, 999)) +} + +func TestEgoGraphToMermaid(t *testing.T) { + doc := loadDoc(t, "testdata/cyclic.openapi.yaml") + g := analyze.BuildGraph(context.Background(), doc) + + out := analyze.EgoGraphToMermaid(g, "Person", 1) + assert.Contains(t, out, "graph LR") + assert.Contains(t, out, "Person") + // Person is the center — should use double-circle notation + assert.Contains(t, out, "((Person))") + // Should include neighbors + assert.Contains(t, out, "Company") +} + +func TestEgoGraphToMermaid_UnknownNode(t *testing.T) { + doc := loadDoc(t, "testdata/cyclic.openapi.yaml") + g := analyze.BuildGraph(context.Background(), doc) + + assert.Empty(t, analyze.EgoGraphToMermaid(g, "NonExistent", 1)) +} + +func TestDAGOverviewToMermaid(t *testing.T) { + doc := loadDoc(t, "testdata/cyclic.openapi.yaml") + g := analyze.BuildGraph(context.Background(), doc) + ca := analyze.AnalyzeCycles(g) + + out := analyze.DAGOverviewToMermaid(g, ca, 0) + assert.Contains(t, out, "graph TD") + // Should contain SCC nodes + assert.Contains(t, out, "scc") +} + +func TestRenderASCIIGraph(t *testing.T) { + doc := loadDoc(t, "testdata/cyclic.openapi.yaml") + g := analyze.BuildGraph(context.Background(), doc) + ca := analyze.AnalyzeCycles(g) + + out := analyze.RenderASCIIGraph(ca.DAGCondensation, 80) + assert.NotEmpty(t, out) + // Should contain box drawing characters + assert.Contains(t, out, "┌") + assert.Contains(t, out, "│") +} + +func TestRenderASCIIGraph_NilDAG(t *testing.T) { + out := analyze.RenderASCIIGraph(nil, 80) + assert.Contains(t, out, "no schemas") +} + +func TestRenderASCIIEgoGraph(t *testing.T) { + doc := loadDoc(t, "testdata/cyclic.openapi.yaml") + g := analyze.BuildGraph(context.Background(), doc) + + out := analyze.RenderASCIIEgoGraph(g, "Person", 2, 80) + assert.Contains(t, out, "Person") + assert.Contains(t, out, "Company") + // Should have box drawing for center node + assert.Contains(t, out, "╔") +} + +func TestRenderASCIIEgoGraph_UnknownNode(t *testing.T) { + doc := loadDoc(t, "testdata/cyclic.openapi.yaml") + g := analyze.BuildGraph(context.Background(), doc) + + out := analyze.RenderASCIIEgoGraph(g, "NonExistent", 2, 80) + assert.Contains(t, out, "not found") +} + +func TestRenderASCIISCC(t *testing.T) { + doc := loadDoc(t, "testdata/cyclic.openapi.yaml") + g := analyze.BuildGraph(context.Background(), doc) + ca := analyze.AnalyzeCycles(g) + + require.NotEmpty(t, ca.SCCs) + + out := analyze.RenderASCIISCC(g, ca, 0) + assert.Contains(t, out, "SCC #1") + assert.Contains(t, out, ca.SCCs[0].NodeIDs[0]) +} diff --git a/cmd/openapi/internal/analyze/metrics.go b/cmd/openapi/internal/analyze/metrics.go new file mode 100644 index 00000000..db85f3ac --- /dev/null +++ b/cmd/openapi/internal/analyze/metrics.go @@ -0,0 +1,144 @@ +package analyze + +import "sort" + +// SchemaMetrics holds computed complexity metrics for a single schema. +type SchemaMetrics struct { + // NodeID is the schema identifier. + NodeID string + // FanIn is the number of distinct schemas referencing this one. + FanIn int + // FanOut is the number of distinct schemas this one references. + FanOut int + // PropertyCount is the number of properties defined. + PropertyCount int + // RequiredCount is the number of required properties. + RequiredCount int + // NestingDepth is the maximum depth of inline object nesting (not counting $refs). + NestingDepth int + // CompositionDepth is the depth of allOf/anyOf/oneOf nesting. + CompositionDepth int + // HasDiscriminator is true if a discriminator is defined. + HasDiscriminator bool + // CycleMembership is the number of cycles this schema participates in. + CycleMembership int + // InSCC is true if this schema is part of a non-trivial SCC. + InSCC bool + // Types is the list of declared types. + Types []string + // DeepPropertyCount is total properties across all inline sub-schemas. + DeepPropertyCount int + // MaxUnionWidth is the largest oneOf/anyOf width in the schema tree. + MaxUnionWidth int + // VariantProduct is the product of all union widths (cross-product explosion measure). + VariantProduct int + // UnionSiteCount is the number of oneOf/anyOf sites in the tree. + UnionSiteCount int +} + +// ComputeMetrics calculates per-schema complexity metrics using the graph and cycle analysis. +func ComputeMetrics(g *Graph, cycles *CycleAnalysis) map[string]*SchemaMetrics { + metrics := make(map[string]*SchemaMetrics, len(g.Nodes)) + + for id, node := range g.Nodes { + m := &SchemaMetrics{ + NodeID: id, + FanIn: g.FanIn(id), + FanOut: g.FanOut(id), + PropertyCount: node.PropertyCount, + RequiredCount: node.RequiredCount, + HasDiscriminator: node.HasDiscriminator, + InSCC: cycles.NodesInCycles[id], + Types: node.Types, + } + + // Count cycle membership + for _, cycle := range cycles.Cycles { + for _, cid := range cycle.Path { + if cid == id { + m.CycleMembership++ + break + } + } + } + + m.NestingDepth = node.NestingDepth + m.CompositionDepth = node.CompositionDepth + m.DeepPropertyCount = node.DeepPropertyCount + m.UnionSiteCount = len(node.UnionSites) + + // Compute union metrics + if len(node.UnionSites) > 0 { + m.VariantProduct = 1 + for _, site := range node.UnionSites { + if site.Width > m.MaxUnionWidth { + m.MaxUnionWidth = site.Width + } + m.VariantProduct *= site.Width + } + } + + metrics[id] = m + } + + return metrics +} + +// TopSchemasByFanIn returns the top N schemas sorted by fan-in (most referenced first). +func TopSchemasByFanIn(metrics map[string]*SchemaMetrics, n int) []*SchemaMetrics { + return topSchemasBy(metrics, n, func(a, b *SchemaMetrics) bool { + return a.FanIn > b.FanIn + }) +} + +// TopSchemasByFanOut returns the top N schemas sorted by fan-out (most dependencies first). +func TopSchemasByFanOut(metrics map[string]*SchemaMetrics, n int) []*SchemaMetrics { + return topSchemasBy(metrics, n, func(a, b *SchemaMetrics) bool { + return a.FanOut > b.FanOut + }) +} + +// TopSchemasByComplexity returns the top N schemas by a composite complexity score. +func TopSchemasByComplexity(metrics map[string]*SchemaMetrics, n int) []*SchemaMetrics { + return topSchemasBy(metrics, n, func(a, b *SchemaMetrics) bool { + return a.ComplexityScore() > b.ComplexityScore() + }) +} + +// ComplexityScore returns a composite complexity score for this schema. +func (m *SchemaMetrics) ComplexityScore() int { + score := m.FanIn + m.FanOut + m.DeepPropertyCount + m.CompositionDepth*3 + m.NestingDepth*2 + if m.InSCC { + score += 10 + } + score += m.CycleMembership * 5 + if m.VariantProduct > 1 { + // Log-scale contribution of cross-product explosion + vp := m.VariantProduct + logContrib := 0 + for vp > 1 { + logContrib++ + vp /= 2 + } + score += logContrib * 5 + } + // Multi-site bonus: independent unions multiply codegen difficulty + if m.UnionSiteCount > 1 { + score += m.UnionSiteCount * 3 + } + return score +} + +func topSchemasBy(metrics map[string]*SchemaMetrics, n int, less func(a, b *SchemaMetrics) bool) []*SchemaMetrics { + all := make([]*SchemaMetrics, 0, len(metrics)) + for _, m := range metrics { + all = append(all, m) + } + sort.Slice(all, func(i, j int) bool { + return less(all[i], all[j]) + }) + if n > len(all) { + n = len(all) + } + return all[:n] +} diff --git a/cmd/openapi/internal/analyze/output.go b/cmd/openapi/internal/analyze/output.go new file mode 100644 index 00000000..b729f940 --- /dev/null +++ b/cmd/openapi/internal/analyze/output.go @@ -0,0 +1,275 @@ +package analyze + +import ( + "encoding/json" + "fmt" + "io" + "strings" +) + +// JSONReport is the JSON-serializable form of the analysis report. +type JSONReport struct { + Document struct { + Title string `json:"title"` + Version string `json:"version"` + OpenAPI string `json:"openapi"` + } `json:"document"` + + Summary struct { + TotalSchemas int `json:"totalSchemas"` + TotalEdges int `json:"totalEdges"` + SCCCount int `json:"sccCount"` + LargestSCCSize int `json:"largestSCCSize"` + CycleCount int `json:"cycleCount"` + SchemasInCyclesPct float64 `json:"schemasInCyclesPct"` + RequiredOnlyCycles int `json:"requiredOnlyCycles"` + DAGDepth int `json:"dagDepth"` + CompatibilityScore float64 `json:"compatibilityScore"` + GreenCount int `json:"greenSchemas"` + YellowCount int `json:"yellowSchemas"` + RedCount int `json:"redSchemas"` + } `json:"summary"` + + Schemas []JSONSchemaEntry `json:"schemas"` + Cycles []JSONCycleEntry `json:"cycles"` + Suggestions []JSONSuggestion `json:"suggestions"` +} + +// JSONSchemaEntry is the JSON form of per-schema analysis. +type JSONSchemaEntry struct { + ID string `json:"id"` + Types []string `json:"types,omitempty"` + PropertyCount int `json:"propertyCount"` + DeepPropertyCount int `json:"deepPropertyCount,omitempty"` + FanIn int `json:"fanIn"` + FanOut int `json:"fanOut"` + NestingDepth int `json:"nestingDepth,omitempty"` + CompositionDepth int `json:"compositionDepth,omitempty"` + MaxUnionWidth int `json:"maxUnionWidth,omitempty"` + VariantProduct int `json:"variantProduct,omitempty"` + InSCC bool `json:"inSCC"` + CycleCount int `json:"cycleCount"` + ComplexityScore int `json:"complexityScore"` + Rank int `json:"rank"` + CodegenTier string `json:"codegenTier"` + Signals []string `json:"signals,omitempty"` +} + +// JSONCycleEntry is the JSON form of a cycle. +type JSONCycleEntry struct { + Path []string `json:"path"` + Length int `json:"length"` + RequiredOnly bool `json:"requiredOnly"` + BreakPointCount int `json:"breakPointCount"` +} + +// JSONSuggestion is the JSON form of a refactoring suggestion. +type JSONSuggestion struct { + Type string `json:"type"` + Title string `json:"title"` + Description string `json:"description"` + Schemas []string `json:"affectedSchemas"` + Impact int `json:"impact"` +} + +// WriteJSON writes the report as JSON to the given writer. +func WriteJSON(w io.Writer, r *Report) error { + jr := JSONReport{} + jr.Document.Title = r.DocumentTitle + jr.Document.Version = r.DocumentVersion + jr.Document.OpenAPI = r.OpenAPIVersion + + jr.Summary.TotalSchemas = r.TotalSchemas + jr.Summary.TotalEdges = r.TotalEdges + jr.Summary.SCCCount = r.SCCCount + jr.Summary.LargestSCCSize = r.LargestSCCSize + jr.Summary.CycleCount = len(r.Cycles.Cycles) + jr.Summary.SchemasInCyclesPct = r.SchemasInCyclesPct + jr.Summary.RequiredOnlyCycles = r.RequiredOnlyCycles + jr.Summary.DAGDepth = r.DAGDepth + jr.Summary.CompatibilityScore = r.CompatibilityScore + jr.Summary.GreenCount = r.Codegen.GreenCount + jr.Summary.YellowCount = r.Codegen.YellowCount + jr.Summary.RedCount = r.Codegen.RedCount + + // Schemas sorted by complexity + ranked := TopSchemasByComplexity(r.Metrics, len(r.Metrics)) + for rank, sm := range ranked { + entry := JSONSchemaEntry{ + ID: sm.NodeID, + Types: sm.Types, + PropertyCount: sm.PropertyCount, + DeepPropertyCount: sm.DeepPropertyCount, + FanIn: sm.FanIn, + FanOut: sm.FanOut, + NestingDepth: sm.NestingDepth, + CompositionDepth: sm.CompositionDepth, + MaxUnionWidth: sm.MaxUnionWidth, + VariantProduct: sm.VariantProduct, + InSCC: sm.InSCC, + CycleCount: sm.CycleMembership, + ComplexityScore: sm.ComplexityScore(), + Rank: rank + 1, + } + if d, ok := r.Codegen.PerSchema[sm.NodeID]; ok { + entry.CodegenTier = d.Tier.String() + for _, s := range d.Signals { + entry.Signals = append(entry.Signals, s.ID) + } + } + jr.Schemas = append(jr.Schemas, entry) + } + + for _, c := range r.Cycles.Cycles { + jr.Cycles = append(jr.Cycles, JSONCycleEntry{ + Path: c.Path, + Length: c.Length, + RequiredOnly: c.HasRequiredOnlyPath, + BreakPointCount: len(c.BreakPoints), + }) + } + + for _, sg := range r.Suggestions { + jr.Suggestions = append(jr.Suggestions, JSONSuggestion{ + Type: string(sg.Type), + Title: sg.Title, + Description: sg.Description, + Schemas: sg.AffectedSchemas, + Impact: sg.Impact, + }) + } + + enc := json.NewEncoder(w) + enc.SetIndent("", " ") + return enc.Encode(jr) +} + +// WriteDOT writes the schema reference graph in Graphviz DOT format. +func WriteDOT(w io.Writer, r *Report) { + fmt.Fprintf(w, "digraph schemas {\n") + fmt.Fprintf(w, " rankdir=LR;\n") + fmt.Fprintf(w, " node [shape=box, style=filled, fontname=\"Helvetica\"];\n\n") + + // Nodes colored by tier + ranked := TopSchemasByComplexity(r.Metrics, len(r.Metrics)) + for _, sm := range ranked { + color := "#d4edda" // green default + fontColor := "#155724" + if d, ok := r.Codegen.PerSchema[sm.NodeID]; ok { + switch d.Tier { + case CodegenYellow: + color = "#fff3cd" + fontColor = "#856404" + case CodegenRed: + color = "#f8d7da" + fontColor = "#721c24" + } + } + + label := fmt.Sprintf("%s\\nscore=%d fan-in=%d", sm.NodeID, sm.ComplexityScore(), sm.FanIn) + fmt.Fprintf(w, " %q [label=%q, fillcolor=%q, fontcolor=%q];\n", + sm.NodeID, label, color, fontColor) + } + + fmt.Fprintln(w) + + // Edges + for _, e := range r.Graph.Edges { + attrs := fmt.Sprintf("label=%q", string(e.Kind)) + if e.FieldName != "" { + attrs = fmt.Sprintf("label=%q", string(e.Kind)+":"+e.FieldName) + } + if e.IsRequired { + attrs += ", style=bold, color=red" + } + if e.IsArray { + attrs += ", style=dashed" + } + fmt.Fprintf(w, " %q -> %q [%s];\n", e.From, e.To, attrs) + } + + fmt.Fprintf(w, "}\n") +} + +// WriteText writes a human-readable text summary to the given writer. +func WriteText(w io.Writer, r *Report) { + fmt.Fprintf(w, "Schema Complexity Report: %s v%s (OpenAPI %s)\n", r.DocumentTitle, r.DocumentVersion, r.OpenAPIVersion) + fmt.Fprintf(w, "%s\n\n", strings.Repeat("=", 60)) + + // Overview + fmt.Fprintf(w, "OVERVIEW\n") + fmt.Fprintf(w, " Schemas: %d Refs: %d\n\n", r.TotalSchemas, r.TotalEdges) + + // Cycle health + fmt.Fprintf(w, "CYCLE HEALTH\n") + fmt.Fprintf(w, " SCCs: %d Largest: %d Cycles: %d\n", r.SCCCount, r.LargestSCCSize, len(r.Cycles.Cycles)) + fmt.Fprintf(w, " Schemas in cycles: %.0f%% Required-only cycles: %d\n", r.SchemasInCyclesPct, r.RequiredOnlyCycles) + fmt.Fprintf(w, " DAG depth: %d\n\n", r.DAGDepth) + + // Codegen + fmt.Fprintf(w, "CODEGEN COMPATIBILITY\n") + fmt.Fprintf(w, " Score: %.0f%% Green: %d Yellow: %d Red: %d\n\n", + r.CompatibilityScore, r.Codegen.GreenCount, r.Codegen.YellowCount, r.Codegen.RedCount) + + // Top fan-in + if len(r.TopFanIn) > 0 { + fmt.Fprintf(w, "HIGHEST FAN-IN\n") + for i, sm := range r.TopFanIn { + if sm.FanIn == 0 { + break + } + fmt.Fprintf(w, " %d. %-30s %d refs\n", i+1, sm.NodeID, sm.FanIn) + } + fmt.Fprintln(w) + } + + // Most complex + if len(r.TopComplex) > 0 { + fmt.Fprintf(w, "MOST COMPLEX\n") + for i, sm := range r.TopComplex { + score := sm.ComplexityScore() + if score == 0 { + break + } + fmt.Fprintf(w, " %d. %-30s score=%-4d (fan-in=%d fan-out=%d props=%d depth=%d unions=%d)\n", + i+1, sm.NodeID, score, sm.FanIn, sm.FanOut, sm.DeepPropertyCount, sm.CompositionDepth, sm.UnionSiteCount) + } + fmt.Fprintln(w) + } + + // Red/yellow schemas + var reds, yellows []string + for id, d := range r.Codegen.PerSchema { + switch d.Tier { + case CodegenRed: + reds = append(reds, id) + case CodegenYellow: + yellows = append(yellows, id) + } + } + if len(reds) > 0 { + fmt.Fprintf(w, "RED TIER SCHEMAS (%d)\n", len(reds)) + for _, id := range reds { + d := r.Codegen.PerSchema[id] + var sigs []string + for _, s := range d.Signals { + sigs = append(sigs, s.ID) + } + fmt.Fprintf(w, " - %-30s [%s]\n", id, strings.Join(sigs, ", ")) + } + fmt.Fprintln(w) + } + + // Suggestions + if len(r.Suggestions) > 0 { + limit := 10 + if len(r.Suggestions) < limit { + limit = len(r.Suggestions) + } + fmt.Fprintf(w, "SUGGESTIONS\n") + for i := 0; i < limit; i++ { + sg := r.Suggestions[i] + fmt.Fprintf(w, " %d. [%s] %s (impact: %d)\n", i+1, sg.Type, sg.Title, sg.Impact) + } + } +} diff --git a/cmd/openapi/internal/analyze/report.go b/cmd/openapi/internal/analyze/report.go new file mode 100644 index 00000000..30942800 --- /dev/null +++ b/cmd/openapi/internal/analyze/report.go @@ -0,0 +1,106 @@ +package analyze + +import ( + "context" + + "github.com/speakeasy-api/openapi/openapi" +) + +// Report is the top-level analysis result tying all analysis together. +type Report struct { + // DocumentTitle is the title from the OpenAPI info object. + DocumentTitle string + // DocumentVersion is the version from the OpenAPI info object. + DocumentVersion string + // OpenAPIVersion is the OpenAPI spec version (e.g., "3.1.0"). + OpenAPIVersion string + + // Schema counts + TotalSchemas int + TotalEdges int + ComponentCount int + InlineCount int + + // Graph is the extracted schema reference graph. + Graph *Graph + // Cycles is the cycle and SCC analysis. + Cycles *CycleAnalysis + // Metrics is per-schema complexity metrics. + Metrics map[string]*SchemaMetrics + // Codegen is the code generation difficulty assessment. + Codegen *CodegenReport + // Suggestions is the list of actionable refactoring suggestions. + Suggestions []*Suggestion + + // Summary statistics + SCCCount int + LargestSCCSize int + SchemasInCyclesPct float64 + RequiredOnlyCycles int + CompatibilityScore float64 + DAGDepth int + TopFanIn []*SchemaMetrics + TopFanOut []*SchemaMetrics + TopComplex []*SchemaMetrics +} + +// Analyze runs the full analysis pipeline on an OpenAPI document and returns a Report. +func Analyze(ctx context.Context, doc *openapi.OpenAPI) *Report { + r := &Report{} + + // Document metadata + if doc.Info.Title != "" { + r.DocumentTitle = doc.Info.Title + } + if doc.Info.Version != "" { + r.DocumentVersion = doc.Info.Version + } + r.OpenAPIVersion = doc.OpenAPI + + // Step 1: Build graph + r.Graph = BuildGraph(ctx, doc) + r.TotalEdges = len(r.Graph.Edges) + + // Count schemas + for _, n := range r.Graph.Nodes { + r.TotalSchemas++ + if n.IsComponent { + r.ComponentCount++ + } else { + r.InlineCount++ + } + } + + // Step 2: Cycle analysis + r.Cycles = AnalyzeCycles(r.Graph) + r.SCCCount = len(r.Cycles.SCCs) + r.LargestSCCSize = r.Cycles.LargestSCCSize + if r.TotalSchemas > 0 { + r.SchemasInCyclesPct = float64(len(r.Cycles.NodesInCycles)) / float64(r.TotalSchemas) * 100 + } + for _, c := range r.Cycles.Cycles { + if c.HasRequiredOnlyPath { + r.RequiredOnlyCycles++ + } + } + if r.Cycles.DAGCondensation != nil { + r.DAGDepth = r.Cycles.DAGCondensation.Depth + } + + // Step 3: Metrics + r.Metrics = ComputeMetrics(r.Graph, r.Cycles) + + // Step 4: Codegen assessment + r.Codegen = AssessCodegen(r.Graph, r.Cycles, r.Metrics) + r.CompatibilityScore = r.Codegen.CompatibilityScore + + // Step 5: Suggestions + r.Suggestions = GenerateSuggestions(r.Graph, r.Cycles, r.Metrics, r.Codegen) + + // Step 6: Top-N rankings + r.TopFanIn = TopSchemasByFanIn(r.Metrics, 5) + r.TopFanOut = TopSchemasByFanOut(r.Metrics, 5) + r.TopComplex = TopSchemasByComplexity(r.Metrics, 5) + + return r +} diff --git a/cmd/openapi/internal/analyze/suggestions.go b/cmd/openapi/internal/analyze/suggestions.go new file mode 100644 index 00000000..4e99b06d --- /dev/null +++ b/cmd/openapi/internal/analyze/suggestions.go @@ -0,0 +1,228 @@ +package analyze + +import "sort" + +// SuggestionType categorizes the kind of refactoring suggestion. +type SuggestionType string + +const ( + SuggestionCutEdge SuggestionType = "cut-edge" + SuggestionAddDiscriminator SuggestionType = "add-discriminator" + SuggestionSplitSCC SuggestionType = "split-scc" + SuggestionReducePropertyCount SuggestionType = "reduce-property-count" +) + +// Suggestion is an actionable refactoring recommendation. +type Suggestion struct { + // Type categorizes the suggestion. + Type SuggestionType + // Title is a short human-readable title. + Title string + // Description explains what to do and why. + Description string + // AffectedSchemas lists the schema IDs involved. + AffectedSchemas []string + // Impact estimates how many issues this would resolve (e.g., cycles broken). + Impact int + // Edge is the specific edge to cut (for cut-edge suggestions). + Edge *Edge +} + +// GenerateSuggestions produces actionable refactoring suggestions based on the analysis. +func GenerateSuggestions(g *Graph, cycles *CycleAnalysis, metrics map[string]*SchemaMetrics, codegen *CodegenReport) []*Suggestion { + var suggestions []*Suggestion + + suggestions = append(suggestions, suggestCycleBreaks(g, cycles)...) + suggestions = append(suggestions, suggestMissingDiscriminators(g, codegen)...) + suggestions = append(suggestions, suggestSCCSplits(g, cycles)...) + suggestions = append(suggestions, suggestPropertyReduction(metrics)...) + + // Sort by impact (highest first) + sort.Slice(suggestions, func(i, j int) bool { + return suggestions[i].Impact > suggestions[j].Impact + }) + + return suggestions +} + +// suggestCycleBreaks finds the minimum set of edges whose removal would break the most cycles. +// Uses a greedy approximation: pick the edge that appears in the most cycles, remove it, repeat. +func suggestCycleBreaks(g *Graph, cycles *CycleAnalysis) []*Suggestion { + if len(cycles.Cycles) == 0 { + return nil + } + + var suggestions []*Suggestion + + // Count how many cycles each edge participates in + type edgeKey struct{ from, to string } + edgeCycleCounts := make(map[edgeKey]int) + edgeMap := make(map[edgeKey]*Edge) + + for _, cycle := range cycles.Cycles { + for _, e := range cycle.Edges { + key := edgeKey{e.From, e.To} + edgeCycleCounts[key]++ + edgeMap[key] = e + } + } + + // Greedily pick edges that break the most cycles + remaining := make(map[int]bool) + for i := range cycles.Cycles { + remaining[i] = true + } + + for len(remaining) > 0 { + // Find the edge in remaining cycles with the highest count + best := edgeKey{} + bestCount := 0 + + counts := make(map[edgeKey]int) + for i := range remaining { + for _, e := range cycles.Cycles[i].Edges { + key := edgeKey{e.From, e.To} + counts[key]++ + if counts[key] > bestCount { + bestCount = counts[key] + best = key + } + } + } + + if bestCount == 0 { + break + } + + edge := edgeMap[best] + qualifier := "optional/nullable" + if edge.IsRequired && !edge.IsNullable && !edge.IsArray { + qualifier = "optional or nullable" + } + + suggestions = append(suggestions, &Suggestion{ + Type: SuggestionCutEdge, + Title: "Make " + edge.From + " → " + edge.To + " " + qualifier, + Description: describeEdgeCut(edge, bestCount), + AffectedSchemas: []string{edge.From, edge.To}, + Impact: bestCount, + Edge: edge, + }) + + // Remove cycles that contained this edge + for i := range remaining { + for _, e := range cycles.Cycles[i].Edges { + if e.From == best.from && e.To == best.to { + delete(remaining, i) + break + } + } + } + } + + return suggestions +} + +func describeEdgeCut(e *Edge, cyclesBroken int) string { + desc := "Making the " + switch e.Kind { + case EdgeProperty: + desc += "property '" + e.FieldName + "'" + case EdgeItems: + if e.FieldName != "" { + desc += "items of '" + e.FieldName + "'" + } else { + desc += "items" + } + default: + desc += string(e.Kind) + } + desc += " reference from " + e.From + " to " + e.To + if e.IsRequired { + desc += " optional (currently required)" + } else { + desc += " nullable" + } + desc += " would break " + if cyclesBroken == 1 { + desc += "1 cycle" + } else { + desc += string(rune('0'+cyclesBroken)) + " cycles" + } + return desc +} + +func suggestMissingDiscriminators(g *Graph, codegen *CodegenReport) []*Suggestion { + var suggestions []*Suggestion + + for id, d := range codegen.PerSchema { + for _, s := range d.Signals { + if s.ID == "oneOf-no-discriminator" { + suggestions = append(suggestions, &Suggestion{ + Type: SuggestionAddDiscriminator, + Title: "Add discriminator to " + id, + Description: "Schema " + id + " uses oneOf without a discriminator. Adding a discriminator property enables code generators to produce efficient deserialization without trial-and-error.", + AffectedSchemas: []string{id}, + Impact: 1, + }) + } + } + } + + return suggestions +} + +func suggestSCCSplits(g *Graph, cycles *CycleAnalysis) []*Suggestion { + var suggestions []*Suggestion + + for _, scc := range cycles.SCCs { + if scc.Size <= 2 { + continue // Small SCCs are already apparent from cycle suggestions + } + + // Find the edge whose removal would split this SCC + // (the edge between the two nodes with the fewest other connections within the SCC) + sccSet := make(map[string]bool, scc.Size) + for _, id := range scc.NodeIDs { + sccSet[id] = true + } + + suggestions = append(suggestions, &Suggestion{ + Type: SuggestionSplitSCC, + Title: "Consider splitting tightly-coupled group", + Description: "A group of " + itoa(scc.Size) + " schemas are all mutually reachable. This tight coupling may indicate an opportunity to extract a simpler interface or break the group into independent layers.", + AffectedSchemas: scc.NodeIDs, + Impact: scc.Size, + }) + } + + return suggestions +} + +func suggestPropertyReduction(metrics map[string]*SchemaMetrics) []*Suggestion { + var suggestions []*Suggestion + + for id, m := range metrics { + if m.PropertyCount > 30 { + suggestions = append(suggestions, &Suggestion{ + Type: SuggestionReducePropertyCount, + Title: "Split " + id + " into smaller schemas", + Description: "Schema " + id + " has " + itoa(m.PropertyCount) + " properties. Consider grouping related properties into sub-schemas for better organization and reusability.", + AffectedSchemas: []string{id}, + Impact: 1, + }) + } + } + + return suggestions +} + +func itoa(n int) string { + if n < 0 { + return "-" + itoa(-n) + } + if n < 10 { + return string(rune('0' + n)) + } + return itoa(n/10) + string(rune('0'+n%10)) +} diff --git a/cmd/openapi/internal/analyze/testdata/cyclic.openapi.yaml b/cmd/openapi/internal/analyze/testdata/cyclic.openapi.yaml new file mode 100644 index 00000000..acdef06f --- /dev/null +++ b/cmd/openapi/internal/analyze/testdata/cyclic.openapi.yaml @@ -0,0 +1,186 @@ +openapi: "3.1.0" +info: + title: Cyclic Schema Test + version: "1.0.0" +paths: + /tree: + get: + operationId: getTree + responses: + "200": + description: OK + content: + application/json: + schema: + $ref: "#/components/schemas/TreeNode" +components: + schemas: + TreeNode: + type: object + properties: + value: + type: string + children: + type: array + items: + $ref: "#/components/schemas/TreeNode" + parent: + $ref: "#/components/schemas/TreeNode" + required: + - value + + Person: + type: object + properties: + name: + type: string + employer: + $ref: "#/components/schemas/Company" + required: + - name + - employer + + Company: + type: object + properties: + name: + type: string + ceo: + $ref: "#/components/schemas/Person" + employees: + type: array + items: + $ref: "#/components/schemas/Person" + required: + - name + - ceo + + Animal: + oneOf: + - $ref: "#/components/schemas/Dog" + - $ref: "#/components/schemas/Cat" + + Dog: + type: object + properties: + breed: + type: string + owner: + $ref: "#/components/schemas/Person" + + Cat: + type: object + properties: + color: + type: string + lives: + type: integer + + Organization: + type: object + properties: + name: + type: string + parent: + $ref: "#/components/schemas/Organization" + subsidiaries: + type: array + items: + $ref: "#/components/schemas/Organization" + departments: + type: array + items: + $ref: "#/components/schemas/Department" + required: + - name + + Department: + type: object + properties: + name: + type: string + head: + $ref: "#/components/schemas/Person" + org: + $ref: "#/components/schemas/Organization" + required: + - name + - org + + Event: + type: + - object + - "null" + properties: + name: + type: string + data: + anyOf: + - $ref: "#/components/schemas/Person" + - $ref: "#/components/schemas/Company" + + BigSchema: + type: object + properties: + field1: + type: string + field2: + type: string + field3: + type: string + field4: + type: string + field5: + type: string + field6: + type: string + field7: + type: string + field8: + type: string + field9: + type: string + field10: + type: string + field11: + type: string + field12: + type: string + field13: + type: string + field14: + type: string + field15: + type: string + field16: + type: string + field17: + type: string + field18: + type: string + field19: + type: string + field20: + type: string + field21: + type: string + field22: + type: string + field23: + type: string + field24: + type: string + field25: + type: string + field26: + type: string + field27: + type: string + field28: + type: string + field29: + type: string + field30: + type: string + field31: + type: string diff --git a/cmd/openapi/internal/analyze/tui/graph_view.go b/cmd/openapi/internal/analyze/tui/graph_view.go new file mode 100644 index 00000000..6974f3e4 --- /dev/null +++ b/cmd/openapi/internal/analyze/tui/graph_view.go @@ -0,0 +1,249 @@ +package tui + +import ( + "fmt" + "strings" + + "github.com/charmbracelet/lipgloss" + "github.com/speakeasy-api/openapi/cmd/openapi/internal/analyze" +) + +// Graph mode constants. +const ( + GraphModeDAG = 0 + GraphModeSCC = 1 + GraphModeEgo = 2 + graphModeCount = 3 +) + +var graphModeNames = []string{"DAG overview", "SCC gallery", "Ego graph"} + +// renderGraphView dispatches to the appropriate graph rendering mode. +func (m Model) renderGraphView() string { + var sb strings.Builder + + // Mode indicator bar + sb.WriteString(m.renderGraphModeBar()) + sb.WriteString("\n") + + switch m.graphMode { + case GraphModeDAG: + sb.WriteString(m.renderGraphDAG()) + case GraphModeSCC: + sb.WriteString(m.renderGraphSCCGallery()) + case GraphModeEgo: + sb.WriteString(m.renderGraphEgo()) + } + + // Selectable node list + sb.WriteString(m.renderGraphNodeList()) + + return sb.String() +} + +func (m Model) renderGraphModeBar() string { + var tabs []string + for i, name := range graphModeNames { + label := name + switch i { + case GraphModeSCC: + sccCount := len(m.report.Cycles.SCCs) + if sccCount > 0 { + label = fmt.Sprintf("%s (%d/%d)", name, m.graphSCCIdx+1, sccCount) + } else { + label = fmt.Sprintf("%s (none)", name) + } + case GraphModeEgo: + if m.graphEgoNode != "" { + label = fmt.Sprintf("%s: %s (%d hops)", name, m.graphEgoNode, m.graphEgoHops) + } + } + + if i == m.graphMode { + tabs = append(tabs, GraphModeActive.Render(label)) + } else { + tabs = append(tabs, GraphModeInactive.Render(label)) + } + } + + return " " + strings.Join(tabs, " ") + "\n" +} + +func (m Model) renderGraphDAG() string { + dag := m.report.Cycles.DAGCondensation + if dag == nil || len(dag.Nodes) == 0 { + return StatLabel.Render(" No schemas to display.") + "\n" + } + + var sb strings.Builder + sb.WriteString(StatLabel.Render(fmt.Sprintf(" %d nodes, %d edges, %d layers", + len(dag.Nodes), len(dag.Edges), dag.Depth)) + "\n\n") + + sb.WriteString(analyze.RenderASCIIGraph(dag, m.width)) + sb.WriteString("\n") + + return sb.String() +} + +func (m Model) renderGraphSCCGallery() string { + sccs := m.report.Cycles.SCCs + if len(sccs) == 0 { + return GreenBadge.Render(" No SCCs found — graph is acyclic!") + "\n" + } + + idx := m.graphSCCIdx + if idx >= len(sccs) { + idx = len(sccs) - 1 + } + + var sb strings.Builder + + // SCC ASCII rendering + ascii := analyze.RenderASCIISCC(m.report.Graph, m.report.Cycles, idx) + sb.WriteString(ascii) + + // Cycles in this SCC + scc := sccs[idx] + memberSet := make(map[string]bool, scc.Size) + for _, id := range scc.NodeIDs { + memberSet[id] = true + } + + sb.WriteString("\n") + sb.WriteString(StatLabel.Render(" Cycles through this SCC:") + "\n") + cycleCount := 0 + for _, c := range m.report.Cycles.Cycles { + if len(c.Path) > 0 && memberSet[c.Path[0]] { + cycleCount++ + severity := GreenBadge.Render("optional") + if c.HasRequiredOnlyPath { + severity = RedBadge.Render("required-only") + } + path := strings.Join(c.Path, " -> ") + " -> " + c.Path[0] + if len(path) > m.width-20 { + path = path[:m.width-23] + "..." + } + sb.WriteString(fmt.Sprintf(" %s %s\n", severity, path)) + } + } + if cycleCount == 0 { + sb.WriteString(StatLabel.Render(" (self-loop only)") + "\n") + } + sb.WriteString("\n") + + return sb.String() +} + +func (m Model) renderGraphEgo() string { + if m.graphEgoNode == "" { + return StatLabel.Render(" Select a schema and press Enter to view its ego graph.\n Or navigate nodes below with j/k and Enter.") + "\n" + } + + var sb strings.Builder + ascii := analyze.RenderASCIIEgoGraph(m.report.Graph, m.graphEgoNode, m.graphEgoHops, m.width) + sb.WriteString(ascii) + sb.WriteString("\n") + + return sb.String() +} + +// renderGraphNodeList renders a selectable list of schemas below the graph art. +func (m Model) renderGraphNodeList() string { + if len(m.graphItems) == 0 { + return "" + } + + var sb strings.Builder + sb.WriteString(" " + strings.Repeat("─", min(m.width-4, 60)) + "\n") + + label := "Schemas" + switch m.graphMode { + case GraphModeSCC: + label = "SCC members" + case GraphModeEgo: + label = "Neighborhood" + } + sb.WriteString(StatLabel.Render(fmt.Sprintf(" %s (%d) j/k:navigate enter:focus", label, len(m.graphItems))) + "\n") + + // Show a window of items around the cursor + maxVisible := m.contentHeight() / 2 + if maxVisible < 5 { + maxVisible = 5 + } + start := m.graphCursor - maxVisible/2 + if start < 0 { + start = 0 + } + end := start + maxVisible + if end > len(m.graphItems) { + end = len(m.graphItems) + start = end - maxVisible + if start < 0 { + start = 0 + } + } + + if start > 0 { + sb.WriteString(ScrollIndicatorStyle.Render(" ... more above") + "\n") + } + + for i := start; i < end; i++ { + id := m.graphItems[i] + sm := m.report.Metrics[id] + d := m.report.Codegen.PerSchema[id] + + prefix := " " + style := NormalRow + if i == m.graphCursor { + prefix = "> " + style = SelectedRow + } + + // Tier badge + tier := "" + if d != nil { + switch d.Tier { + case analyze.CodegenGreen: + tier = GreenBadge.Render("G") + case analyze.CodegenYellow: + tier = YellowBadge.Render("Y") + case analyze.CodegenRed: + tier = RedBadge.Render("R") + } + } + + // Score and fan info + info := "" + if sm != nil { + info = fmt.Sprintf("score=%-3d in=%d out=%d", sm.ComplexityScore(), sm.FanIn, sm.FanOut) + } + + // Highlight center node in ego mode + marker := "" + if m.graphMode == GraphModeEgo && id == m.graphEgoNode { + marker = StatHighlight.Render(" *") + } + + line := fmt.Sprintf("%s %s %-24s %s%s", prefix, tier, truncate(id, 24), info, marker) + sb.WriteString(style.Render(line) + "\n") + } + + if end < len(m.graphItems) { + sb.WriteString(ScrollIndicatorStyle.Render(" ... more below") + "\n") + } + + return sb.String() +} + +// Graph mode indicator styles. +var ( + GraphModeActive = lipgloss.NewStyle(). + Padding(0, 1). + Background(lipgloss.Color(colorBlue)). + Foreground(lipgloss.Color(colorWhite)). + Bold(true) + + GraphModeInactive = lipgloss.NewStyle(). + Padding(0, 1). + Foreground(lipgloss.Color(colorGray)) +) diff --git a/cmd/openapi/internal/analyze/tui/keys.go b/cmd/openapi/internal/analyze/tui/keys.go new file mode 100644 index 00000000..5cdd71ea --- /dev/null +++ b/cmd/openapi/internal/analyze/tui/keys.go @@ -0,0 +1,20 @@ +package tui + +// Tab identifiers +type Tab int + +const ( + TabSummary Tab = iota + TabSchemas + TabCycles + TabGraph +) + +var tabNames = []string{"Summary", "Schemas", "Cycles", "Graph"} + +func (t Tab) String() string { + if int(t) < len(tabNames) { + return tabNames[t] + } + return "Unknown" +} diff --git a/cmd/openapi/internal/analyze/tui/model.go b/cmd/openapi/internal/analyze/tui/model.go new file mode 100644 index 00000000..fde68694 --- /dev/null +++ b/cmd/openapi/internal/analyze/tui/model.go @@ -0,0 +1,488 @@ +package tui + +import ( + "sort" + "strings" + "time" + + tea "github.com/charmbracelet/bubbletea" + "github.com/speakeasy-api/openapi/cmd/openapi/internal/analyze" +) + +const ( + keySequenceThreshold = 500 * time.Millisecond + scrollHalfScreenLines = 21 + headerApproxLines = 3 + footerApproxLines = 2 + layoutBuffer = 2 +) + +// Model is the top-level bubbletea model for the schema complexity analyzer TUI. +type Model struct { + report *analyze.Report + + // UI state + activeTab Tab + cursor int + scrollOffset int + width int + height int + showHelp bool + expanded map[int]bool // expanded items in list views + + // Schema list state + schemaFilter string // "" = all, "red", "yellow" + schemaSort string // "name", "fan-in", "fan-out", "tier", "complexity" + schemaItems []string + + // Cycle list state + cycleSelected int + + // Graph view state + graphMode int // 0=DAG overview, 1=SCC gallery, 2=ego graph + graphSCCIdx int // current SCC index for gallery mode + graphEgoNode string // node ID for ego graph + graphEgoHops int // hop radius (default 2) + graphCache map[string]string // cache rendered ASCII art + graphItems []string // selectable node list for current graph mode + graphCursor int // cursor within graphItems + + // Key sequence handling + lastKey string + lastKeyAt time.Time + + quitting bool +} + +// NewModel creates a new TUI model from an analysis report. +func NewModel(report *analyze.Report) Model { + m := Model{ + report: report, + width: 80, + height: 24, + expanded: make(map[int]bool), + graphEgoHops: 2, + graphCache: make(map[string]string), + } + m.rebuildSchemaItems() + m.rebuildGraphItems() + return m +} + +func (m *Model) rebuildGraphItems() { + m.graphItems = nil + m.graphCursor = 0 + + switch m.graphMode { + case GraphModeDAG: + // List all schemas grouped by DAG layer + dag := m.report.Cycles.DAGCondensation + if dag == nil { + break + } + for _, layer := range dag.Layers { + for _, sccIdx := range layer { + if sccIdx >= len(dag.Nodes) { + continue + } + scc := dag.Nodes[sccIdx] + for _, id := range scc.NodeIDs { + m.graphItems = append(m.graphItems, id) + } + } + } + + case GraphModeSCC: + // List nodes in the current SCC + sccs := m.report.Cycles.SCCs + if len(sccs) == 0 { + break + } + idx := m.graphSCCIdx + if idx >= len(sccs) { + idx = len(sccs) - 1 + } + m.graphItems = append(m.graphItems, sccs[idx].NodeIDs...) + + case GraphModeEgo: + if m.graphEgoNode == "" { + break + } + g := m.report.Graph + // BFS neighborhood + visited := map[string]int{m.graphEgoNode: 0} + queue := []string{m.graphEgoNode} + for len(queue) > 0 { + current := queue[0] + queue = queue[1:] + dist := visited[current] + if dist >= m.graphEgoHops { + continue + } + for _, e := range g.OutEdges[current] { + if _, seen := visited[e.To]; !seen { + visited[e.To] = dist + 1 + queue = append(queue, e.To) + } + } + for _, e := range g.InEdges[current] { + if _, seen := visited[e.From]; !seen { + visited[e.From] = dist + 1 + queue = append(queue, e.From) + } + } + } + // Center first, then neighbors sorted + m.graphItems = append(m.graphItems, m.graphEgoNode) + var neighbors []string + for id := range visited { + if id != m.graphEgoNode { + neighbors = append(neighbors, id) + } + } + sort.Strings(neighbors) + m.graphItems = append(m.graphItems, neighbors...) + } +} + +func (m *Model) rebuildSchemaItems() { + m.schemaItems = nil + ranked := analyze.TopSchemasByComplexity(m.report.Metrics, len(m.report.Metrics)) + for _, sm := range ranked { + if m.schemaFilter != "" { + d := m.report.Codegen.PerSchema[sm.NodeID] + if d == nil { + continue + } + switch m.schemaFilter { + case "red": + if d.Tier != analyze.CodegenRed { + continue + } + case "yellow": + if d.Tier != analyze.CodegenYellow && d.Tier != analyze.CodegenRed { + continue + } + } + } + m.schemaItems = append(m.schemaItems, sm.NodeID) + } +} + +func (m Model) Init() tea.Cmd { + return nil +} + +func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { + switch msg := msg.(type) { + case tea.WindowSizeMsg: + m.width = msg.Width + m.height = msg.Height + + case tea.KeyMsg: + if m.showHelp { + switch msg.String() { + case "q", "esc", "?": + m.showHelp = false + } + return m, nil + } + + switch msg.String() { + case "q", "ctrl+c": + m.quitting = true + return m, tea.Quit + + case "?": + m.showHelp = true + + case "tab", "l": + m.activeTab = (m.activeTab + 1) % Tab(len(tabNames)) + m.cursor = 0 + m.scrollOffset = 0 + m.expanded = make(map[int]bool) + + case "shift+tab", "h": + if m.activeTab == 0 { + m.activeTab = Tab(len(tabNames) - 1) + } else { + m.activeTab-- + } + m.cursor = 0 + m.scrollOffset = 0 + m.expanded = make(map[int]bool) + + case "up", "k": + if m.activeTab == TabGraph { + if m.graphCursor > 0 { + m.graphCursor-- + } + } else if m.cursor > 0 { + m.cursor-- + m.ensureCursorVisible() + } + + case "down", "j": + if m.activeTab == TabGraph { + if m.graphCursor < len(m.graphItems)-1 { + m.graphCursor++ + } + } else { + maxItem := m.maxCursorForTab() + if m.cursor < maxItem { + m.cursor++ + m.ensureCursorVisible() + } + } + + case "enter", " ": + if m.activeTab == TabSchemas && m.cursor < len(m.schemaItems) { + m.expanded[m.cursor] = !m.expanded[m.cursor] + if m.expanded[m.cursor] { + // Snap scroll so the expanded item starts at the top + m.scrollOffset = m.cursor + } else { + m.ensureCursorVisible() + } + // Also set ego graph node for quick switching to graph tab + m.graphEgoNode = m.schemaItems[m.cursor] + m.graphCache = make(map[string]string) + } else if m.activeTab == TabGraph && m.graphCursor < len(m.graphItems) { + selected := m.graphItems[m.graphCursor] + // Navigate into ego graph for selected node + m.graphEgoNode = selected + m.graphMode = GraphModeEgo + m.graphCache = make(map[string]string) + m.rebuildGraphItems() + } else { + m.expanded[m.cursor] = !m.expanded[m.cursor] + if m.expanded[m.cursor] { + m.scrollOffset = m.cursor + } else { + m.ensureCursorVisible() + } + } + + case "m": + if m.activeTab == TabGraph { + m.graphMode = (m.graphMode + 1) % graphModeCount + m.graphCache = make(map[string]string) + m.rebuildGraphItems() + } + + case "n": + if m.activeTab == TabGraph && m.graphMode == GraphModeSCC { + if m.graphSCCIdx < len(m.report.Cycles.SCCs)-1 { + m.graphSCCIdx++ + m.graphCache = make(map[string]string) + m.rebuildGraphItems() + } + } + + case "p": + if m.activeTab == TabGraph && m.graphMode == GraphModeSCC { + if m.graphSCCIdx > 0 { + m.graphSCCIdx-- + m.graphCache = make(map[string]string) + m.rebuildGraphItems() + } + } + + case "+", "=": + if m.activeTab == TabGraph && m.graphMode == GraphModeEgo { + if m.graphEgoHops < 5 { + m.graphEgoHops++ + m.graphCache = make(map[string]string) + m.rebuildGraphItems() + } + } + + case "-": + if m.activeTab == TabGraph && m.graphMode == GraphModeEgo { + if m.graphEgoHops > 1 { + m.graphEgoHops-- + m.graphCache = make(map[string]string) + m.rebuildGraphItems() + } + } + + case "ctrl+d": + maxItem := m.maxCursorForTab() + newPos := m.cursor + scrollHalfScreenLines + if newPos > maxItem { + m.cursor = maxItem + } else { + m.cursor = newPos + } + m.ensureCursorVisible() + + case "ctrl+u": + newPos := m.cursor - scrollHalfScreenLines + if newPos < 0 { + m.cursor = 0 + } else { + m.cursor = newPos + } + m.ensureCursorVisible() + + case "G": + m.cursor = m.maxCursorForTab() + m.ensureCursorVisible() + + case "g": + now := time.Now() + if m.lastKey == "g" && now.Sub(m.lastKeyAt) < keySequenceThreshold { + m.cursor = 0 + m.scrollOffset = 0 + m.lastKey = "" + m.lastKeyAt = time.Time{} + } else { + m.lastKey = "g" + m.lastKeyAt = now + } + + case "f": + if m.activeTab == TabSchemas { + switch m.schemaFilter { + case "": + m.schemaFilter = "yellow" + case "yellow": + m.schemaFilter = "red" + case "red": + m.schemaFilter = "" + } + m.rebuildSchemaItems() + m.cursor = 0 + m.scrollOffset = 0 + } + + case "1": + m.activeTab = TabSummary + m.cursor = 0 + m.scrollOffset = 0 + case "2": + m.activeTab = TabSchemas + m.cursor = 0 + m.scrollOffset = 0 + case "3": + m.activeTab = TabCycles + m.cursor = 0 + m.scrollOffset = 0 + case "4": + m.activeTab = TabGraph + m.cursor = 0 + m.scrollOffset = 0 + m.graphCursor = 0 + } + } + + return m, nil +} + +func (m Model) View() string { + if m.showHelp { + return m.renderHelp() + } + + var s strings.Builder + + header := m.renderTabBar() + s.WriteString(header) + + var content string + switch m.activeTab { + case TabSummary: + content = m.renderSummary() + case TabSchemas: + content = m.renderSchemaList() + case TabCycles: + content = m.renderCycleList() + case TabGraph: + content = m.renderGraphView() + } + + s.WriteString(content) + + footer := m.renderFooter() + headerLines := strings.Count(header, "\n") + contentLines := strings.Count(content, "\n") + footerLines := strings.Count(footer, "\n") + remaining := m.height - headerLines - contentLines - footerLines - 1 + if remaining > 0 { + s.WriteString(strings.Repeat("\n", remaining)) + } + s.WriteString(footer) + + return s.String() +} + +func (m Model) maxCursorForTab() int { + switch m.activeTab { + case TabSchemas: + if len(m.schemaItems) == 0 { + return 0 + } + return len(m.schemaItems) - 1 + case TabCycles: + if len(m.report.Cycles.Cycles) == 0 { + return 0 + } + return len(m.report.Cycles.Cycles) - 1 + default: + return 0 + } +} + +func (m Model) contentHeight() int { + return max(1, m.height-headerApproxLines-footerApproxLines-layoutBuffer) +} + +func (m *Model) ensureCursorVisible() { + contentH := m.contentHeight() + + if m.cursor == 0 { + m.scrollOffset = 0 + return + } + + if m.cursor < m.scrollOffset { + m.scrollOffset = m.cursor + return + } + + linesUsed := 0 + for i := m.scrollOffset; i <= m.cursor; i++ { + linesUsed += m.itemHeight(i) + } + + if linesUsed > contentH { + for newOffset := m.scrollOffset + 1; newOffset <= m.cursor; newOffset++ { + test := 0 + for i := newOffset; i <= m.cursor; i++ { + test += m.itemHeight(i) + } + if test <= contentH { + m.scrollOffset = newOffset + break + } + } + } +} + +func (m Model) itemHeight(index int) int { + if !m.expanded[index] { + return 1 + } + // Estimate card height based on content + h := 12 // base: title, tier, types, props, fan, complexity, border + if m.activeTab == TabSchemas && index < len(m.schemaItems) { + id := m.schemaItems[index] + if d, ok := m.report.Codegen.PerSchema[id]; ok && len(d.Signals) > 0 { + h += len(d.Signals) + 1 + } + if edges := m.report.Graph.OutEdges[id]; len(edges) > 0 { + h += len(edges) + 1 + } + } + return h +} diff --git a/cmd/openapi/internal/analyze/tui/schema_card.go b/cmd/openapi/internal/analyze/tui/schema_card.go new file mode 100644 index 00000000..876bd4e0 --- /dev/null +++ b/cmd/openapi/internal/analyze/tui/schema_card.go @@ -0,0 +1,212 @@ +package tui + +import ( + "fmt" + "sort" + "strings" + + "github.com/charmbracelet/lipgloss" + "github.com/speakeasy-api/openapi/cmd/openapi/internal/analyze" +) + +// renderSchemaCard renders a bordered detail card for a schema. +func (m Model) renderSchemaCard(nodeID string) string { + sm := m.report.Metrics[nodeID] + d := m.report.Codegen.PerSchema[nodeID] + node := m.report.Graph.Nodes[nodeID] + if sm == nil || node == nil { + return "" + } + + // Determine card width + cardWidth := m.width - 8 + if cardWidth > 64 { + cardWidth = 64 + } + if cardWidth < 30 { + cardWidth = 30 + } + innerWidth := cardWidth - 4 // padding inside border + + var content strings.Builder + + // Pick border color based on tier + borderColor := colorGreen + tierEmoji := "🟢" + tierName := "green" + if d != nil { + tierName = strings.ToLower(d.Tier.String()) + switch d.Tier { + case analyze.CodegenYellow: + borderColor = colorYellow + tierEmoji = "🟡" + case analyze.CodegenRed: + borderColor = colorRed + tierEmoji = "🔴" + } + } + + titleStyle := lipgloss.NewStyle(). + Bold(true). + Foreground(lipgloss.Color(borderColor)) + + // Title line inside the card + content.WriteString(titleStyle.Render(nodeID) + "\n") + + // Tier + score + rank + ranked := analyze.TopSchemasByComplexity(m.report.Metrics, len(m.report.Metrics)) + rank := 0 + for i, r := range ranked { + if r.NodeID == nodeID { + rank = i + 1 + break + } + } + content.WriteString(fmt.Sprintf("%s %s %s %s %s %s\n", + StatLabel.Render("Tier:"), StatValue.Render(tierEmoji+" "+tierName), + StatLabel.Render("Score:"), StatValue.Render(fmt.Sprintf("%d", sm.ComplexityScore())), + StatLabel.Render("Rank:"), StatValue.Render(fmt.Sprintf("#%d", rank)))) + content.WriteString("\n") + + // Types + nullable + types := strings.Join(node.Types, ", ") + if types == "" { + types = "(none)" + } + typeLine := fmt.Sprintf("%s %s", StatLabel.Render("Types:"), StatValue.Render(types)) + if node.IsNullable { + typeLine += " " + YellowBadge.Render("nullable") + } + content.WriteString(typeLine + "\n") + + // Properties: N required / M total (deep: D) + propLine := fmt.Sprintf("%s %s", + StatLabel.Render("Properties:"), + StatValue.Render(fmt.Sprintf("%d required / %d total", sm.RequiredCount, sm.PropertyCount))) + if sm.DeepPropertyCount != sm.PropertyCount { + propLine += StatLabel.Render(fmt.Sprintf(" (deep: %d)", sm.DeepPropertyCount)) + } + content.WriteString(propLine + "\n") + + // Fan-in / Fan-out + content.WriteString(fmt.Sprintf("%s %s %s %s\n", + StatLabel.Render("Fan-in:"), StatValue.Render(fmt.Sprintf("%d", sm.FanIn)), + StatLabel.Render("Fan-out:"), StatValue.Render(fmt.Sprintf("%d", sm.FanOut)))) + content.WriteString("\n") + + // Complexity section + content.WriteString(StatValue.Render("Complexity") + "\n") + content.WriteString(fmt.Sprintf(" %s %s %s %s %s %s\n", + StatLabel.Render("Nesting:"), StatValue.Render(fmt.Sprintf("%d", sm.NestingDepth)), + StatLabel.Render("Composition:"), StatValue.Render(fmt.Sprintf("%d", sm.CompositionDepth)), + StatLabel.Render("Unions:"), StatValue.Render(fmt.Sprintf("%d", sm.UnionSiteCount)))) + if sm.UnionSiteCount > 0 { + variantLine := fmt.Sprintf(" %s %s", + StatLabel.Render("Max width:"), StatValue.Render(fmt.Sprintf("%d", sm.MaxUnionWidth))) + if sm.VariantProduct > 1 { + variantLine += fmt.Sprintf(" %s %s", + StatLabel.Render("Variant product:"), + StatWarning.Render(fmt.Sprintf("%d", sm.VariantProduct))) + } + if sm.HasDiscriminator { + variantLine += " " + GreenBadge.Render("discriminated") + } + content.WriteString(variantLine + "\n") + } + + // Composition keywords + if len(node.CompositionFields) > 0 { + content.WriteString(fmt.Sprintf(" %s %s\n", + StatLabel.Render("Keywords:"), + StatValue.Render(strings.Join(node.CompositionFields, ", ")))) + } + content.WriteString("\n") + + // Cycle membership + if sm.InSCC { + cycleLine := fmt.Sprintf("%s %s", + StatLabel.Render("Cycles:"), + StatWarning.Render(fmt.Sprintf("member of %d cycle(s)", sm.CycleMembership))) + content.WriteString(cycleLine + "\n\n") + } + + // Signals with full descriptions + if d != nil && len(d.Signals) > 0 { + content.WriteString(StatValue.Render("Signals") + "\n") + for _, sig := range d.Signals { + icon := " " + switch sig.Severity { + case analyze.CodegenRed: + icon = RedBadge.Render("!") + case analyze.CodegenYellow: + icon = YellowBadge.Render("~") + } + desc := sig.Description + if innerWidth > 6 && len(desc) > innerWidth-6 { + desc = desc[:innerWidth-9] + "..." + } + content.WriteString(fmt.Sprintf(" %s %s\n", icon, StatLabel.Render(desc))) + } + content.WriteString("\n") + } + + // Outgoing edges with detail + outEdges := m.report.Graph.OutEdges[nodeID] + if len(outEdges) > 0 { + content.WriteString(StatValue.Render("References (out)") + "\n") + for _, e := range outEdges { + var parts []string + parts = append(parts, StatValue.Render(e.To)) + kindStr := string(e.Kind) + if e.FieldName != "" { + kindStr += ":" + e.FieldName + } + parts = append(parts, StatLabel.Render("via "+kindStr)) + + var flags []string + if e.IsRequired { + flags = append(flags, RequiredEdge.Render("req")) + } + if e.IsNullable { + flags = append(flags, GreenBadge.Render("nullable")) + } + if e.IsArray { + flags = append(flags, ArrayEdge.Render("array")) + } + if len(flags) > 0 { + parts = append(parts, "["+strings.Join(flags, " ")+"]") + } + + content.WriteString(" " + strings.Join(parts, " ") + "\n") + } + content.WriteString("\n") + } + + // Incoming edges + inEdges := m.report.Graph.InEdges[nodeID] + if len(inEdges) > 0 { + refs := make(map[string]bool) + for _, e := range inEdges { + refs[e.From] = true + } + refList := make([]string, 0, len(refs)) + for r := range refs { + refList = append(refList, r) + } + sort.Strings(refList) + refStr := strings.Join(refList, ", ") + if innerWidth > 16 && len(refStr) > innerWidth-16 { + refStr = refStr[:innerWidth-19] + "..." + } + content.WriteString(fmt.Sprintf("%s %s\n", + StatLabel.Render("Referenced by:"), StatValue.Render(refStr))) + } + + cardStyle := lipgloss.NewStyle(). + Border(lipgloss.RoundedBorder()). + BorderForeground(lipgloss.Color(borderColor)). + Padding(0, 1). + Width(cardWidth) + + return " " + strings.ReplaceAll(cardStyle.Render(content.String()), "\n", "\n ") + "\n" +} diff --git a/cmd/openapi/internal/analyze/tui/styles.go b/cmd/openapi/internal/analyze/tui/styles.go new file mode 100644 index 00000000..c4ec73fc --- /dev/null +++ b/cmd/openapi/internal/analyze/tui/styles.go @@ -0,0 +1,154 @@ +package tui + +import "github.com/charmbracelet/lipgloss" + +const ( + colorGreen = "#10B981" + colorBlue = "#3B82F6" + colorYellow = "#F59E0B" + colorRed = "#EF4444" + colorPurple = "#8B5CF6" + colorGray = "#6B7280" + colorThemePurple = "#7C3AED" + colorBackground = "#374151" + colorDetailGray = "#9CA3AF" + colorFooterText = "#000000" + colorWhite = "#FFFFFF" + colorCyan = "#06B6D4" + colorOrange = "#F97316" +) + +var ( + TitleStyle = lipgloss.NewStyle(). + Bold(true). + Foreground(lipgloss.Color(colorThemePurple)) + + SubtitleStyle = lipgloss.NewStyle(). + Foreground(lipgloss.Color(colorDetailGray)) + + // Tier badge styles + GreenBadge = lipgloss.NewStyle(). + Foreground(lipgloss.Color(colorGreen)). + Bold(true) + + YellowBadge = lipgloss.NewStyle(). + Foreground(lipgloss.Color(colorYellow)). + Bold(true) + + RedBadge = lipgloss.NewStyle(). + Foreground(lipgloss.Color(colorRed)). + Bold(true) + + // Stat styles + StatLabel = lipgloss.NewStyle(). + Foreground(lipgloss.Color(colorDetailGray)) + + StatValue = lipgloss.NewStyle(). + Foreground(lipgloss.Color(colorWhite)). + Bold(true) + + StatHighlight = lipgloss.NewStyle(). + Foreground(lipgloss.Color(colorCyan)). + Bold(true) + + StatWarning = lipgloss.NewStyle(). + Foreground(lipgloss.Color(colorOrange)). + Bold(true) + + // Tab styles + ActiveTab = lipgloss.NewStyle(). + Padding(0, 1). + Background(lipgloss.Color(colorThemePurple)). + Foreground(lipgloss.Color(colorWhite)). + Bold(true) + + InactiveTab = lipgloss.NewStyle(). + Padding(0, 1). + Foreground(lipgloss.Color(colorGray)) + + // List styles + SelectedRow = lipgloss.NewStyle(). + Background(lipgloss.Color(colorBackground)) + + NormalRow = lipgloss.NewStyle() + + // Detail section + DetailStyle = lipgloss.NewStyle(). + PaddingLeft(2). + Foreground(lipgloss.Color(colorDetailGray)) + + DetailHeader = lipgloss.NewStyle(). + Foreground(lipgloss.Color(colorWhite)). + Bold(true). + MarginTop(1) + + // Suggestion styles + SuggestionStyle = lipgloss.NewStyle(). + PaddingLeft(2). + Foreground(lipgloss.Color(colorCyan)) + + // Cycle edge styles + RequiredEdge = lipgloss.NewStyle(). + Foreground(lipgloss.Color(colorRed)). + Bold(true) + + OptionalEdge = lipgloss.NewStyle(). + Foreground(lipgloss.Color(colorGreen)) + + ArrayEdge = lipgloss.NewStyle(). + Foreground(lipgloss.Color(colorBlue)) + + // Footer + FooterStyle = lipgloss.NewStyle(). + Background(lipgloss.Color(colorGray)). + Foreground(lipgloss.Color(colorFooterText)). + Padding(0, 1) + + HelpKeyStyle = lipgloss.NewStyle(). + Foreground(lipgloss.Color(colorBlue)). + Bold(true) + + HelpTextStyle = lipgloss.NewStyle(). + Foreground(lipgloss.Color(colorWhite)) + + HelpModalStyle = lipgloss.NewStyle(). + Border(lipgloss.RoundedBorder()). + BorderForeground(lipgloss.Color(colorThemePurple)). + Padding(1, 2). + Width(50) + + HelpTitleStyle = lipgloss.NewStyle(). + Bold(true). + Foreground(lipgloss.Color(colorThemePurple)). + Align(lipgloss.Center). + Width(46) + + // Box styles for dashboard cards + CardStyle = lipgloss.NewStyle(). + Border(lipgloss.RoundedBorder()). + BorderForeground(lipgloss.Color(colorGray)). + Padding(0, 1) + + CardTitleStyle = lipgloss.NewStyle(). + Foreground(lipgloss.Color(colorThemePurple)). + Bold(true) + + ScrollIndicatorStyle = lipgloss.NewStyle(). + Foreground(lipgloss.Color(colorGray)) + + // Card border styles by tier + CardBorderGreen = lipgloss.NewStyle(). + Border(lipgloss.RoundedBorder()). + BorderForeground(lipgloss.Color(colorGreen)). + Padding(0, 1) + + CardBorderYellow = lipgloss.NewStyle(). + Border(lipgloss.RoundedBorder()). + BorderForeground(lipgloss.Color(colorYellow)). + Padding(0, 1) + + CardBorderRed = lipgloss.NewStyle(). + Border(lipgloss.RoundedBorder()). + BorderForeground(lipgloss.Color(colorRed)). + Padding(0, 1) +) diff --git a/cmd/openapi/internal/analyze/tui/views.go b/cmd/openapi/internal/analyze/tui/views.go new file mode 100644 index 00000000..78c60964 --- /dev/null +++ b/cmd/openapi/internal/analyze/tui/views.go @@ -0,0 +1,437 @@ +package tui + +import ( + "fmt" + "strings" + + "github.com/charmbracelet/lipgloss" + "github.com/speakeasy-api/openapi/cmd/openapi/internal/analyze" +) + +func (m Model) renderTabBar() string { + var tabs []string + for i, name := range tabNames { + if Tab(i) == m.activeTab { + tabs = append(tabs, ActiveTab.Render(name)) + } else { + tabs = append(tabs, InactiveTab.Render(name)) + } + } + + title := TitleStyle.Render("Schema Analyzer") + docInfo := SubtitleStyle.Render(fmt.Sprintf(" %s v%s", m.report.DocumentTitle, m.report.DocumentVersion)) + + return title + docInfo + "\n" + strings.Join(tabs, " ") + "\n\n" +} + +func (m Model) renderSummary() string { + r := m.report + var s strings.Builder + + // Overview card + s.WriteString(CardTitleStyle.Render("Overview") + "\n") + s.WriteString(fmt.Sprintf(" %s %s %s %s %s %s\n", + StatLabel.Render("Schemas:"), StatValue.Render(fmt.Sprintf("%d", r.TotalSchemas)), + StatLabel.Render("Refs:"), StatValue.Render(fmt.Sprintf("%d", r.TotalEdges)), + StatLabel.Render("OpenAPI:"), StatValue.Render(r.OpenAPIVersion), + )) + s.WriteString("\n") + + // Cycle health + s.WriteString(CardTitleStyle.Render("Cycle Health") + "\n") + sccLabel := fmt.Sprintf("%d", r.SCCCount) + if r.SCCCount == 0 { + sccLabel = GreenBadge.Render("0 (none)") + } else { + sccLabel = StatWarning.Render(sccLabel) + } + s.WriteString(fmt.Sprintf(" %s %s %s %s %s %s\n", + StatLabel.Render("SCCs:"), sccLabel, + StatLabel.Render("Largest:"), formatSCCSize(r.LargestSCCSize), + StatLabel.Render("Cycles:"), formatCycleCount(len(r.Cycles.Cycles)), + )) + cyclesPctLabel := fmt.Sprintf("%.0f%%", r.SchemasInCyclesPct) + if r.SchemasInCyclesPct == 0 { + cyclesPctLabel = GreenBadge.Render("0%") + } else if r.SchemasInCyclesPct > 30 { + cyclesPctLabel = RedBadge.Render(cyclesPctLabel) + } else { + cyclesPctLabel = YellowBadge.Render(cyclesPctLabel) + } + reqCyclesLabel := fmt.Sprintf("%d", r.RequiredOnlyCycles) + if r.RequiredOnlyCycles > 0 { + reqCyclesLabel = RedBadge.Render(reqCyclesLabel) + } else { + reqCyclesLabel = GreenBadge.Render("0") + } + s.WriteString(fmt.Sprintf(" %s %s %s %s %s %d\n", + StatLabel.Render("In cycles:"), cyclesPctLabel, + StatLabel.Render("Required-only:"), reqCyclesLabel, + StatLabel.Render("DAG depth:"), r.DAGDepth, + )) + s.WriteString("\n") + + // Codegen compatibility + s.WriteString(CardTitleStyle.Render("Codegen Compatibility") + "\n") + scoreStr := fmt.Sprintf("%.0f%%", r.CompatibilityScore) + if r.CompatibilityScore >= 80 { + scoreStr = GreenBadge.Render(scoreStr) + } else if r.CompatibilityScore >= 50 { + scoreStr = YellowBadge.Render(scoreStr) + } else { + scoreStr = RedBadge.Render(scoreStr) + } + s.WriteString(fmt.Sprintf(" %s %s %s %s %s %s %s %s\n", + StatLabel.Render("Score:"), scoreStr, + StatLabel.Render("Green:"), GreenBadge.Render(fmt.Sprintf("%d", r.Codegen.GreenCount)), + StatLabel.Render("Yellow:"), YellowBadge.Render(fmt.Sprintf("%d", r.Codegen.YellowCount)), + StatLabel.Render("Red:"), RedBadge.Render(fmt.Sprintf("%d", r.Codegen.RedCount)), + )) + s.WriteString("\n") + + // Compatibility bar + s.WriteString(" " + renderBar(r.Codegen.GreenCount, r.Codegen.YellowCount, r.Codegen.RedCount, m.width-6) + "\n\n") + + // Top schemas by fan-in + if len(r.TopFanIn) > 0 { + s.WriteString(CardTitleStyle.Render("Highest Fan-In (most referenced)") + "\n") + for i, sm := range r.TopFanIn { + if sm.FanIn == 0 { + break + } + s.WriteString(fmt.Sprintf(" %d. %-30s %s\n", i+1, sm.NodeID, StatHighlight.Render(fmt.Sprintf("%d refs", sm.FanIn)))) + } + s.WriteString("\n") + } + + // Top schemas by complexity + if len(r.TopComplex) > 0 { + s.WriteString(CardTitleStyle.Render("Most Complex Schemas") + "\n") + for i, sm := range r.TopComplex { + score := sm.ComplexityScore() + if score == 0 { + break + } + tier := tierBadge(m.report.Codegen.PerSchema[sm.NodeID]) + detail := fmt.Sprintf("fan-in:%d fan-out:%d props:%d", sm.FanIn, sm.FanOut, sm.DeepPropertyCount) + if sm.VariantProduct > 1 { + detail += fmt.Sprintf(" variants:%d", sm.VariantProduct) + } + s.WriteString(fmt.Sprintf(" %d. %-28s %s %s %s\n", + i+1, sm.NodeID, tier, StatHighlight.Render(fmt.Sprintf("score=%d", score)), detail)) + } + s.WriteString("\n") + } + + // Top suggestions + if len(r.Suggestions) > 0 { + limit := 5 + if len(r.Suggestions) < limit { + limit = len(r.Suggestions) + } + s.WriteString(CardTitleStyle.Render("Top Suggestions") + "\n") + for i := 0; i < limit; i++ { + sg := r.Suggestions[i] + s.WriteString(SuggestionStyle.Render(fmt.Sprintf(" %d. %s (impact: %d)", i+1, sg.Title, sg.Impact)) + "\n") + } + } + + return s.String() +} + +func (m Model) renderSchemaList() string { + var s strings.Builder + + // Filter indicator + filterLabel := "all" + switch m.schemaFilter { + case "yellow": + filterLabel = YellowBadge.Render("yellow+red") + case "red": + filterLabel = RedBadge.Render("red only") + } + s.WriteString(StatLabel.Render(fmt.Sprintf(" Filter: %s (%d schemas) [f] to cycle filter", filterLabel, len(m.schemaItems))) + "\n\n") + + // Header — pad before styling so ANSI escapes don't break alignment + s.WriteString(fmt.Sprintf(" %s %s %s %s %s %s\n", + StatLabel.Render(fmt.Sprintf("%-30s", "Schema")), + StatLabel.Render(fmt.Sprintf("%5s", "Score")), + StatLabel.Render(fmt.Sprintf("%6s", "Fan-In")), + StatLabel.Render(fmt.Sprintf("%7s", "Fan-Out")), + StatLabel.Render(fmt.Sprintf("%5s", "Props")), + StatLabel.Render("Tier"), + )) + s.WriteString(" " + strings.Repeat("-", min(m.width-4, 70)) + "\n") + + contentH := m.contentHeight() - 4 // header rows + linesRendered := 0 + + for i := m.scrollOffset; i < len(m.schemaItems) && linesRendered < contentH; i++ { + id := m.schemaItems[i] + sm := m.report.Metrics[id] + d := m.report.Codegen.PerSchema[id] + + prefix := " " + rowStyle := NormalRow + if i == m.cursor { + prefix = "> " + rowStyle = SelectedRow + } + + tier := tierBadge(d) + line := fmt.Sprintf("%s%-30s %5d %6d %7d %5d %s", + prefix, truncate(id, 30), sm.ComplexityScore(), sm.FanIn, sm.FanOut, sm.PropertyCount, tier) + s.WriteString(rowStyle.Render(line) + "\n") + linesRendered++ + + // Expanded detail — bordered card + if m.expanded[i] { + card := m.renderSchemaCard(id) + s.WriteString(card) + linesRendered += strings.Count(card, "\n") + } + } + + if m.scrollOffset > 0 { + s.WriteString(ScrollIndicatorStyle.Render(" ... more above") + "\n") + } + if m.scrollOffset+contentH < len(m.schemaItems) { + s.WriteString(ScrollIndicatorStyle.Render(" ... more below") + "\n") + } + + return s.String() +} + + +func (m Model) renderCycleList() string { + var s strings.Builder + + cycles := m.report.Cycles.Cycles + if len(cycles) == 0 { + s.WriteString(GreenBadge.Render(" No cycles detected!") + "\n") + return s.String() + } + + s.WriteString(StatLabel.Render(fmt.Sprintf(" %d cycles found (%d required-only)", len(cycles), m.report.RequiredOnlyCycles)) + "\n\n") + + contentH := m.contentHeight() - 2 + linesRendered := 0 + + for i := m.scrollOffset; i < len(cycles) && linesRendered < contentH; i++ { + c := cycles[i] + + prefix := " " + rowStyle := NormalRow + if i == m.cursor { + prefix = "> " + rowStyle = SelectedRow + } + + severity := GreenBadge.Render("optional") + if c.HasRequiredOnlyPath { + severity = RedBadge.Render("required-only") + } else if len(c.BreakPoints) == 0 { + severity = YellowBadge.Render("no-break-point") + } + + line := fmt.Sprintf("%sCycle %d len:%d %s %s", + prefix, i+1, c.Length, severity, formatCyclePath(c)) + s.WriteString(rowStyle.Render(line) + "\n") + linesRendered++ + + if m.expanded[i] { + s.WriteString(m.renderCycleDetail(c)) + linesRendered += c.Length + 3 + } + } + + return s.String() +} + +func (m Model) renderCycleDetail(c *analyze.Cycle) string { + var s strings.Builder + + s.WriteString(DetailStyle.Render(" Path:") + "\n") + for j, nodeID := range c.Path { + var edge *analyze.Edge + if j < len(c.Edges) { + edge = c.Edges[j] + } + + nodeStr := " " + nodeID + if edge != nil { + edgeLabel := formatEdgeLabel(edge) + isBreak := false + for _, bp := range c.BreakPoints { + if bp == edge { + isBreak = true + break + } + } + if isBreak { + nodeStr += " " + GreenBadge.Render("--"+edgeLabel+"--> ") + GreenBadge.Render("[cut here]") + } else { + nodeStr += " " + RequiredEdge.Render("--"+edgeLabel+"-->") + } + } else { + // Last node connects back to first + nodeStr += " " + StatLabel.Render("(back to "+c.Path[0]+")") + } + + s.WriteString(DetailStyle.Render(nodeStr) + "\n") + } + + if len(c.BreakPoints) > 0 { + s.WriteString(SuggestionStyle.Render(fmt.Sprintf(" Suggestion: %d break point(s) available", len(c.BreakPoints))) + "\n") + } else if c.HasRequiredOnlyPath { + s.WriteString(RedBadge.Render(" Warning: No natural break points — all edges required") + "\n") + } + s.WriteString("\n") + + return s.String() +} + + +func (m Model) renderFooter() string { + var parts []string + parts = append(parts, "q:quit") + parts = append(parts, "tab:next view") + parts = append(parts, "1-4:jump to view") + parts = append(parts, "?:help") + + switch m.activeTab { + case TabSchemas: + parts = append(parts, "f:filter") + parts = append(parts, "enter:expand") + case TabCycles: + parts = append(parts, "enter:expand") + case TabGraph: + parts = append(parts, "j/k:navigate") + parts = append(parts, "enter:focus") + parts = append(parts, "m:mode") + if m.graphMode == GraphModeSCC { + parts = append(parts, "n/p:SCC") + } + if m.graphMode == GraphModeEgo { + parts = append(parts, "+/-:hops") + } + } + + return FooterStyle.Width(m.width).Render(strings.Join(parts, " ")) + "\n" +} + +func (m Model) renderHelp() string { + var s strings.Builder + + s.WriteString(HelpTitleStyle.Render("Schema Analyzer Help") + "\n\n") + + helpItems := []struct{ key, desc string }{ + {"q / Ctrl-C", "Quit"}, + {"Tab / l", "Next tab"}, + {"Shift-Tab / h", "Previous tab"}, + {"1-4", "Jump to tab"}, + {"j / Down", "Move down"}, + {"k / Up", "Move up"}, + {"gg", "Jump to top"}, + {"G", "Jump to bottom"}, + {"Ctrl-D", "Scroll down half page"}, + {"Ctrl-U", "Scroll up half page"}, + {"Enter / Space", "Expand/collapse or focus node"}, + {"f", "Cycle filter (schemas tab)"}, + {"m", "Cycle graph mode (graph tab)"}, + {"n / p", "Next/prev SCC (SCC gallery)"}, + {"+ / -", "Increase/decrease ego hops"}, + {"Enter", "Focus node → ego graph (graph)"}, + {"?", "Toggle help"}, + } + + for _, item := range helpItems { + s.WriteString(fmt.Sprintf(" %s %s\n", + HelpKeyStyle.Render(fmt.Sprintf("%-14s", item.key)), + HelpTextStyle.Render(item.desc))) + } + + return HelpModalStyle.Render(s.String()) +} + +// --- Helpers --- + +func tierBadge(d *analyze.CodegenDifficulty) string { + if d == nil { + return StatLabel.Render("-") + } + switch d.Tier { + case analyze.CodegenGreen: + return GreenBadge.Render("GREEN") + case analyze.CodegenYellow: + return YellowBadge.Render("YELLOW") + case analyze.CodegenRed: + return RedBadge.Render("RED") + } + return "-" +} + +func formatEdgeLabel(e *analyze.Edge) string { + label := string(e.Kind) + if e.FieldName != "" { + label += ":" + e.FieldName + } + if e.IsRequired { + label += " [req]" + } + if e.IsArray { + label += " []" + } + return label +} + +func formatCyclePath(c *analyze.Cycle) string { + if len(c.Path) == 0 { + return "" + } + path := strings.Join(c.Path, " -> ") + path += " -> " + c.Path[0] + if len(path) > 60 { + path = path[:57] + "..." + } + return path +} + +func formatSCCSize(size int) string { + if size == 0 { + return GreenBadge.Render("0") + } + if size > 5 { + return RedBadge.Render(fmt.Sprintf("%d", size)) + } + return YellowBadge.Render(fmt.Sprintf("%d", size)) +} + +func formatCycleCount(count int) string { + if count == 0 { + return GreenBadge.Render("0") + } + return StatWarning.Render(fmt.Sprintf("%d", count)) +} + +func renderBar(green, yellow, red, width int) string { + total := green + yellow + red + if total == 0 || width <= 0 { + return "" + } + + gw := green * width / total + yw := yellow * width / total + rw := width - gw - yw + + return lipgloss.NewStyle().Foreground(lipgloss.Color(colorGreen)).Render(strings.Repeat("█", gw)) + + lipgloss.NewStyle().Foreground(lipgloss.Color(colorYellow)).Render(strings.Repeat("█", yw)) + + lipgloss.NewStyle().Foreground(lipgloss.Color(colorRed)).Render(strings.Repeat("█", rw)) +} + +func truncate(s string, maxLen int) string { + if len(s) <= maxLen { + return s + } + return s[:maxLen-3] + "..." +} From a5340b26d36f9a89606aaa3d4cd77fc82e790df7 Mon Sep 17 00:00:00 2001 From: Vishal Gowda Date: Wed, 18 Feb 2026 15:46:43 +0000 Subject: [PATCH 2/2] fix: improve analyzer TUI usability and output consistency Address 10-point UX review: auto-fallback to text for non-TTY, wire up schema sorting (s key), deterministic tie-breaking, complexity score breakdown, signal descriptions in JSON/text, Suggestions tab, mermaid export format, and visual polish (emoji removal, footer contrast, dead style cleanup). Co-Authored-By: Claude Opus 4.6 --- cmd/openapi/commands/openapi/analyze.go | 73 +++++++------- cmd/openapi/go.mod | 3 +- cmd/openapi/go.sum | 6 +- cmd/openapi/internal/analyze/metrics.go | 71 +++++++++++++- cmd/openapi/internal/analyze/output.go | 72 ++++++++++---- cmd/openapi/internal/analyze/suggestions.go | 2 +- cmd/openapi/internal/analyze/tui/keys.go | 3 +- cmd/openapi/internal/analyze/tui/model.go | 98 +++++++++++++++---- .../internal/analyze/tui/schema_card.go | 27 +++-- cmd/openapi/internal/analyze/tui/styles.go | 32 +----- cmd/openapi/internal/analyze/tui/views.go | 71 +++++++++++++- 11 files changed, 327 insertions(+), 131 deletions(-) diff --git a/cmd/openapi/commands/openapi/analyze.go b/cmd/openapi/commands/openapi/analyze.go index cb96b426..48ebc061 100644 --- a/cmd/openapi/commands/openapi/analyze.go +++ b/cmd/openapi/commands/openapi/analyze.go @@ -1,14 +1,15 @@ package openapi import ( - "errors" "fmt" + "io" "os" tea "github.com/charmbracelet/bubbletea" "github.com/speakeasy-api/openapi/cmd/openapi/internal/analyze" "github.com/speakeasy-api/openapi/cmd/openapi/internal/analyze/tui" "github.com/spf13/cobra" + "golang.org/x/term" ) var analyzeCmd = &cobra.Command{ @@ -23,10 +24,13 @@ This command examines schema references to identify: - Actionable refactoring suggestions Output formats: - tui - Interactive terminal UI with progressive disclosure (default) - json - Machine-readable JSON report for CI/CD pipelines - text - Human-readable text summary - dot - Graphviz DOT format for graph visualization + tui - Interactive terminal UI with progressive disclosure (default) + json - Machine-readable JSON report for CI/CD pipelines + text - Human-readable text summary + dot - Graphviz DOT format for graph visualization + mermaid - Mermaid diagram syntax + +The TUI format auto-falls back to text when stdout is not a terminal. Stdin is supported — pipe data or use '-': cat spec.yaml | openapi spec analyze @@ -36,7 +40,7 @@ Stdin is supported — pipe data or use '-': } func init() { - analyzeCmd.Flags().StringP("format", "f", "tui", "output format: tui, json, text, dot") + analyzeCmd.Flags().StringP("format", "f", "tui", "output format: tui, json, text, dot, mermaid") analyzeCmd.Flags().StringP("output", "o", "", "write output to file instead of stdout") } @@ -55,11 +59,29 @@ func runAnalyze(cmd *cobra.Command, args []string) error { // Run analysis report := analyze.Analyze(ctx, doc) + // Auto-fallback: if format is TUI but stdout is not a terminal, use text + if format == "tui" && outputFile == "" && !term.IsTerminal(int(os.Stdout.Fd())) { + format = "text" + } + + // TUI is incompatible with --output; suggest text instead + if format == "tui" && outputFile != "" { + return fmt.Errorf("--output is not compatible with --format tui; use --format text, json, or dot instead") + } + + // Open output writer + var w io.Writer = os.Stdout + if outputFile != "" { + f, err := os.Create(outputFile) + if err != nil { + return fmt.Errorf("failed to create output file: %w", err) + } + defer f.Close() + w = f + } + switch format { case "tui": - if outputFile != "" { - return errors.New("--output is not compatible with --format tui") - } m := tui.NewModel(report) p := tea.NewProgram(m, tea.WithAltScreen()) if _, err := p.Run(); err != nil { @@ -68,44 +90,21 @@ func runAnalyze(cmd *cobra.Command, args []string) error { return nil case "json": - w := os.Stdout - if outputFile != "" { - f, err := os.Create(outputFile) - if err != nil { - return fmt.Errorf("failed to create output file: %w", err) - } - defer f.Close() - w = f - } return analyze.WriteJSON(w, report) case "text": - w := os.Stdout - if outputFile != "" { - f, err := os.Create(outputFile) - if err != nil { - return fmt.Errorf("failed to create output file: %w", err) - } - defer f.Close() - w = f - } analyze.WriteText(w, report) return nil case "dot": - w := os.Stdout - if outputFile != "" { - f, err := os.Create(outputFile) - if err != nil { - return fmt.Errorf("failed to create output file: %w", err) - } - defer f.Close() - w = f - } analyze.WriteDOT(w, report) return nil + case "mermaid": + analyze.WriteMermaid(w, report) + return nil + default: - return fmt.Errorf("unknown format: %s (expected tui, json, text, or dot)", format) + return fmt.Errorf("unknown format: %s (expected tui, json, text, dot, or mermaid)", format) } } diff --git a/cmd/openapi/go.mod b/cmd/openapi/go.mod index 3c0f3a22..4c8ab35c 100644 --- a/cmd/openapi/go.mod +++ b/cmd/openapi/go.mod @@ -10,6 +10,7 @@ require ( github.com/speakeasy-api/openapi/openapi/linter/customrules v0.0.0-20260217225223-7d484a30828f github.com/spf13/cobra v1.10.1 github.com/stretchr/testify v1.11.1 + golang.org/x/term v0.40.0 gopkg.in/yaml.v3 v3.0.1 ) @@ -44,6 +45,6 @@ require ( github.com/vmware-labs/yaml-jsonpath v0.3.2 // indirect github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect golang.org/x/sync v0.19.0 // indirect - golang.org/x/sys v0.36.0 // indirect + golang.org/x/sys v0.41.0 // indirect golang.org/x/text v0.34.0 // indirect ) diff --git a/cmd/openapi/go.sum b/cmd/openapi/go.sum index 0cc1083f..438e30e2 100644 --- a/cmd/openapi/go.sum +++ b/cmd/openapi/go.sum @@ -113,8 +113,10 @@ golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5h golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.36.0 h1:KVRy2GtZBrk1cBYA7MKu5bEZFxQk4NIDV6RLVcC8o0k= -golang.org/x/sys v0.36.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k= +golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/term v0.40.0 h1:36e4zGLqU4yhjlmxEaagx2KuYbJq3EwY8K943ZsHcvg= +golang.org/x/term v0.40.0/go.mod h1:w2P8uVp06p2iyKKuvXIm7N/y0UCRt3UfJTfZ7oOpglM= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.34.0 h1:oL/Qq0Kdaqxa1KbNeMKwQq0reLCCaFtqu2eNuSeNHbk= golang.org/x/text v0.34.0/go.mod h1:homfLqTYRFyVYemLBFl5GgL/DWEiH5wcsQ5gSh1yziA= diff --git a/cmd/openapi/internal/analyze/metrics.go b/cmd/openapi/internal/analyze/metrics.go index db85f3ac..185a2b55 100644 --- a/cmd/openapi/internal/analyze/metrics.go +++ b/cmd/openapi/internal/analyze/metrics.go @@ -105,6 +105,28 @@ func TopSchemasByComplexity(metrics map[string]*SchemaMetrics, n int) []*SchemaM }) } +// TopSchemasByName returns all schemas sorted alphabetically by name. +func TopSchemasByName(metrics map[string]*SchemaMetrics, n int) []*SchemaMetrics { + return topSchemasBy(metrics, n, func(a, b *SchemaMetrics) bool { + return a.NodeID < b.NodeID + }) +} + +// TopSchemasByTier returns schemas sorted by codegen tier (red first, then yellow, then green). +func TopSchemasByTier(metrics map[string]*SchemaMetrics, codegen *CodegenReport, n int) []*SchemaMetrics { + return topSchemasBy(metrics, n, func(a, b *SchemaMetrics) bool { + tierA := CodegenGreen + if d, ok := codegen.PerSchema[a.NodeID]; ok { + tierA = d.Tier + } + tierB := CodegenGreen + if d, ok := codegen.PerSchema[b.NodeID]; ok { + tierB = d.Tier + } + return tierA > tierB // red (2) > yellow (1) > green (0) + }) +} + // ComplexityScore returns a composite complexity score for this schema. func (m *SchemaMetrics) ComplexityScore() int { score := m.FanIn + m.FanOut + m.DeepPropertyCount + m.CompositionDepth*3 + m.NestingDepth*2 @@ -129,13 +151,58 @@ func (m *SchemaMetrics) ComplexityScore() int { return score } +// ComplexityBreakdown returns a map of component names to their contribution to the complexity score. +func (m *SchemaMetrics) ComplexityBreakdown() []ScoreComponent { + var components []ScoreComponent + add := func(name string, value int) { + if value > 0 { + components = append(components, ScoreComponent{Name: name, Value: value}) + } + } + add("fan-in", m.FanIn) + add("fan-out", m.FanOut) + add("properties", m.DeepPropertyCount) + add("composition", m.CompositionDepth*3) + add("nesting", m.NestingDepth*2) + if m.InSCC { + add("in-SCC", 10) + } + add("cycle-membership", m.CycleMembership*5) + if m.VariantProduct > 1 { + vp := m.VariantProduct + logContrib := 0 + for vp > 1 { + logContrib++ + vp /= 2 + } + add("variant-explosion", logContrib*5) + } + if m.UnionSiteCount > 1 { + add("multi-union", m.UnionSiteCount*3) + } + return components +} + +// ScoreComponent is a named contribution to the complexity score. +type ScoreComponent struct { + Name string + Value int +} + func topSchemasBy(metrics map[string]*SchemaMetrics, n int, less func(a, b *SchemaMetrics) bool) []*SchemaMetrics { all := make([]*SchemaMetrics, 0, len(metrics)) for _, m := range metrics { all = append(all, m) } - sort.Slice(all, func(i, j int) bool { - return less(all[i], all[j]) + sort.SliceStable(all, func(i, j int) bool { + if less(all[i], all[j]) { + return true + } + if less(all[j], all[i]) { + return false + } + // Deterministic tie-break by name + return all[i].NodeID < all[j].NodeID }) if n > len(all) { n = len(all) diff --git a/cmd/openapi/internal/analyze/output.go b/cmd/openapi/internal/analyze/output.go index b729f940..ba3c254d 100644 --- a/cmd/openapi/internal/analyze/output.go +++ b/cmd/openapi/internal/analyze/output.go @@ -37,22 +37,29 @@ type JSONReport struct { // JSONSchemaEntry is the JSON form of per-schema analysis. type JSONSchemaEntry struct { - ID string `json:"id"` - Types []string `json:"types,omitempty"` - PropertyCount int `json:"propertyCount"` - DeepPropertyCount int `json:"deepPropertyCount,omitempty"` - FanIn int `json:"fanIn"` - FanOut int `json:"fanOut"` - NestingDepth int `json:"nestingDepth,omitempty"` - CompositionDepth int `json:"compositionDepth,omitempty"` - MaxUnionWidth int `json:"maxUnionWidth,omitempty"` - VariantProduct int `json:"variantProduct,omitempty"` - InSCC bool `json:"inSCC"` - CycleCount int `json:"cycleCount"` - ComplexityScore int `json:"complexityScore"` - Rank int `json:"rank"` - CodegenTier string `json:"codegenTier"` - Signals []string `json:"signals,omitempty"` + ID string `json:"id"` + Types []string `json:"types,omitempty"` + PropertyCount int `json:"propertyCount"` + DeepPropertyCount int `json:"deepPropertyCount,omitempty"` + FanIn int `json:"fanIn"` + FanOut int `json:"fanOut"` + NestingDepth int `json:"nestingDepth,omitempty"` + CompositionDepth int `json:"compositionDepth,omitempty"` + MaxUnionWidth int `json:"maxUnionWidth,omitempty"` + VariantProduct int `json:"variantProduct,omitempty"` + InSCC bool `json:"inSCC"` + CycleCount int `json:"cycleCount"` + ComplexityScore int `json:"complexityScore"` + Rank int `json:"rank"` + CodegenTier string `json:"codegenTier"` + Signals []JSONSignalEntry `json:"signals,omitempty"` +} + +// JSONSignalEntry is the JSON form of a codegen signal. +type JSONSignalEntry struct { + ID string `json:"id"` + Description string `json:"description"` + Severity string `json:"severity"` } // JSONCycleEntry is the JSON form of a cycle. @@ -114,7 +121,11 @@ func WriteJSON(w io.Writer, r *Report) error { if d, ok := r.Codegen.PerSchema[sm.NodeID]; ok { entry.CodegenTier = d.Tier.String() for _, s := range d.Signals { - entry.Signals = append(entry.Signals, s.ID) + entry.Signals = append(entry.Signals, JSONSignalEntry{ + ID: s.ID, + Description: s.Description, + Severity: s.Severity.String(), + }) } } jr.Schemas = append(jr.Schemas, entry) @@ -191,6 +202,17 @@ func WriteDOT(w io.Writer, r *Report) { fmt.Fprintf(w, "}\n") } +// WriteMermaid writes the schema reference graph as a Mermaid diagram. +func WriteMermaid(w io.Writer, r *Report) { + fmt.Fprintln(w, DAGOverviewToMermaid(r.Graph, r.Cycles, 0)) + + // Also output individual SCCs if present + for i := range r.Cycles.SCCs { + fmt.Fprintf(w, "\n%% SCC #%d\n", i+1) + fmt.Fprintln(w, SCCToMermaid(r.Graph, r.Cycles, i)) + } +} + // WriteText writes a human-readable text summary to the given writer. func WriteText(w io.Writer, r *Report) { fmt.Fprintf(w, "Schema Complexity Report: %s v%s (OpenAPI %s)\n", r.DocumentTitle, r.DocumentVersion, r.OpenAPIVersion) @@ -251,11 +273,21 @@ func WriteText(w io.Writer, r *Report) { fmt.Fprintf(w, "RED TIER SCHEMAS (%d)\n", len(reds)) for _, id := range reds { d := r.Codegen.PerSchema[id] - var sigs []string + fmt.Fprintf(w, " - %s\n", id) + for _, s := range d.Signals { + fmt.Fprintf(w, " [%s] %s (%s)\n", s.Severity, s.Description, s.ID) + } + } + fmt.Fprintln(w) + } + if len(yellows) > 0 { + fmt.Fprintf(w, "YELLOW TIER SCHEMAS (%d)\n", len(yellows)) + for _, id := range yellows { + d := r.Codegen.PerSchema[id] + fmt.Fprintf(w, " - %s\n", id) for _, s := range d.Signals { - sigs = append(sigs, s.ID) + fmt.Fprintf(w, " [%s] %s (%s)\n", s.Severity, s.Description, s.ID) } - fmt.Fprintf(w, " - %-30s [%s]\n", id, strings.Join(sigs, ", ")) } fmt.Fprintln(w) } diff --git a/cmd/openapi/internal/analyze/suggestions.go b/cmd/openapi/internal/analyze/suggestions.go index 4e99b06d..6ca7fb07 100644 --- a/cmd/openapi/internal/analyze/suggestions.go +++ b/cmd/openapi/internal/analyze/suggestions.go @@ -147,7 +147,7 @@ func describeEdgeCut(e *Edge, cyclesBroken int) string { if cyclesBroken == 1 { desc += "1 cycle" } else { - desc += string(rune('0'+cyclesBroken)) + " cycles" + desc += itoa(cyclesBroken) + " cycles" } return desc } diff --git a/cmd/openapi/internal/analyze/tui/keys.go b/cmd/openapi/internal/analyze/tui/keys.go index 5cdd71ea..664b9312 100644 --- a/cmd/openapi/internal/analyze/tui/keys.go +++ b/cmd/openapi/internal/analyze/tui/keys.go @@ -8,9 +8,10 @@ const ( TabSchemas TabCycles TabGraph + TabSuggestions ) -var tabNames = []string{"Summary", "Schemas", "Cycles", "Graph"} +var tabNames = []string{"Summary", "Schemas", "Cycles", "Graph", "Suggestions"} func (t Tab) String() string { if int(t) < len(tabNames) { diff --git a/cmd/openapi/internal/analyze/tui/model.go b/cmd/openapi/internal/analyze/tui/model.go index fde68694..6f3ae49b 100644 --- a/cmd/openapi/internal/analyze/tui/model.go +++ b/cmd/openapi/internal/analyze/tui/model.go @@ -17,6 +17,8 @@ const ( layoutBuffer = 2 ) +var schemaSortModes = []string{"complexity", "name", "fan-in", "fan-out", "tier"} + // Model is the top-level bubbletea model for the schema complexity analyzer TUI. type Model struct { report *analyze.Report @@ -31,9 +33,10 @@ type Model struct { expanded map[int]bool // expanded items in list views // Schema list state - schemaFilter string // "" = all, "red", "yellow" - schemaSort string // "name", "fan-in", "fan-out", "tier", "complexity" - schemaItems []string + schemaFilter string // "" = all, "red", "yellow" + schemaSortMode int // index into schemaSortModes + schemaItems []string + schemaRanks map[string]int // cached complexity ranks // Cycle list state cycleSelected int @@ -56,11 +59,20 @@ type Model struct { // NewModel creates a new TUI model from an analysis report. func NewModel(report *analyze.Report) Model { + // Pre-compute complexity ranks + ranked := analyze.TopSchemasByComplexity(report.Metrics, len(report.Metrics)) + ranks := make(map[string]int, len(ranked)) + for i, r := range ranked { + ranks[r.NodeID] = i + 1 + } + m := Model{ - report: report, - width: 80, - height: 24, - expanded: make(map[int]bool), + report: report, + width: 80, + height: 24, + expanded: make(map[int]bool), + schemaRanks: ranks, + graphEgoHops: 2, graphCache: make(map[string]string), } @@ -69,6 +81,13 @@ func NewModel(report *analyze.Report) Model { return m } +func (m Model) schemaRank(nodeID string) int { + if r, ok := m.schemaRanks[nodeID]; ok { + return r + } + return 0 +} + func (m *Model) rebuildGraphItems() { m.graphItems = nil m.graphCursor = 0 @@ -147,7 +166,22 @@ func (m *Model) rebuildGraphItems() { func (m *Model) rebuildSchemaItems() { m.schemaItems = nil - ranked := analyze.TopSchemasByComplexity(m.report.Metrics, len(m.report.Metrics)) + + var ranked []*analyze.SchemaMetrics + sortMode := schemaSortModes[m.schemaSortMode] + switch sortMode { + case "name": + ranked = analyze.TopSchemasByName(m.report.Metrics, len(m.report.Metrics)) + case "fan-in": + ranked = analyze.TopSchemasByFanIn(m.report.Metrics, len(m.report.Metrics)) + case "fan-out": + ranked = analyze.TopSchemasByFanOut(m.report.Metrics, len(m.report.Metrics)) + case "tier": + ranked = analyze.TopSchemasByTier(m.report.Metrics, m.report.Codegen, len(m.report.Metrics)) + default: + ranked = analyze.TopSchemasByComplexity(m.report.Metrics, len(m.report.Metrics)) + } + for _, sm := range ranked { if m.schemaFilter != "" { d := m.report.Codegen.PerSchema[sm.NodeID] @@ -341,6 +375,14 @@ func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { m.lastKeyAt = now } + case "s": + if m.activeTab == TabSchemas { + m.schemaSortMode = (m.schemaSortMode + 1) % len(schemaSortModes) + m.rebuildSchemaItems() + m.cursor = 0 + m.scrollOffset = 0 + } + case "f": if m.activeTab == TabSchemas { switch m.schemaFilter { @@ -373,6 +415,10 @@ func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { m.cursor = 0 m.scrollOffset = 0 m.graphCursor = 0 + case "5": + m.activeTab = TabSuggestions + m.cursor = 0 + m.scrollOffset = 0 } } @@ -399,6 +445,8 @@ func (m Model) View() string { content = m.renderCycleList() case TabGraph: content = m.renderGraphView() + case TabSuggestions: + content = m.renderSuggestionList() } s.WriteString(content) @@ -428,6 +476,11 @@ func (m Model) maxCursorForTab() int { return 0 } return len(m.report.Cycles.Cycles) - 1 + case TabSuggestions: + if len(m.report.Suggestions) == 0 { + return 0 + } + return len(m.report.Suggestions) - 1 default: return 0 } @@ -473,16 +526,27 @@ func (m Model) itemHeight(index int) int { if !m.expanded[index] { return 1 } - // Estimate card height based on content - h := 12 // base: title, tier, types, props, fan, complexity, border - if m.activeTab == TabSchemas && index < len(m.schemaItems) { - id := m.schemaItems[index] - if d, ok := m.report.Codegen.PerSchema[id]; ok && len(d.Signals) > 0 { - h += len(d.Signals) + 1 + + switch m.activeTab { + case TabSchemas: + // Estimate card height based on content + h := 12 // base: title, tier, types, props, fan, complexity, border + if index < len(m.schemaItems) { + id := m.schemaItems[index] + if d, ok := m.report.Codegen.PerSchema[id]; ok && len(d.Signals) > 0 { + h += len(d.Signals) + 1 + } + if edges := m.report.Graph.OutEdges[id]; len(edges) > 0 { + h += len(edges) + 1 + } } - if edges := m.report.Graph.OutEdges[id]; len(edges) > 0 { - h += len(edges) + 1 + return h + case TabSuggestions: + return 4 // description + schemas + blank + case TabCycles: + if index < len(m.report.Cycles.Cycles) { + return m.report.Cycles.Cycles[index].Length + 3 } } - return h + return 6 } diff --git a/cmd/openapi/internal/analyze/tui/schema_card.go b/cmd/openapi/internal/analyze/tui/schema_card.go index 876bd4e0..52c63c47 100644 --- a/cmd/openapi/internal/analyze/tui/schema_card.go +++ b/cmd/openapi/internal/analyze/tui/schema_card.go @@ -32,17 +32,12 @@ func (m Model) renderSchemaCard(nodeID string) string { // Pick border color based on tier borderColor := colorGreen - tierEmoji := "🟢" - tierName := "green" if d != nil { - tierName = strings.ToLower(d.Tier.String()) switch d.Tier { case analyze.CodegenYellow: borderColor = colorYellow - tierEmoji = "🟡" case analyze.CodegenRed: borderColor = colorRed - tierEmoji = "🔴" } } @@ -54,18 +49,22 @@ func (m Model) renderSchemaCard(nodeID string) string { content.WriteString(titleStyle.Render(nodeID) + "\n") // Tier + score + rank - ranked := analyze.TopSchemasByComplexity(m.report.Metrics, len(m.report.Metrics)) - rank := 0 - for i, r := range ranked { - if r.NodeID == nodeID { - rank = i + 1 - break - } - } + rank := m.schemaRank(nodeID) + tier := tierBadge(d) content.WriteString(fmt.Sprintf("%s %s %s %s %s %s\n", - StatLabel.Render("Tier:"), StatValue.Render(tierEmoji+" "+tierName), + StatLabel.Render("Tier:"), tier, StatLabel.Render("Score:"), StatValue.Render(fmt.Sprintf("%d", sm.ComplexityScore())), StatLabel.Render("Rank:"), StatValue.Render(fmt.Sprintf("#%d", rank)))) + + // Score breakdown + breakdown := sm.ComplexityBreakdown() + if len(breakdown) > 0 { + var parts []string + for _, c := range breakdown { + parts = append(parts, fmt.Sprintf("%s=%d", c.Name, c.Value)) + } + content.WriteString(StatLabel.Render(" "+strings.Join(parts, " + ")) + "\n") + } content.WriteString("\n") // Types + nullable diff --git a/cmd/openapi/internal/analyze/tui/styles.go b/cmd/openapi/internal/analyze/tui/styles.go index c4ec73fc..a6713fcd 100644 --- a/cmd/openapi/internal/analyze/tui/styles.go +++ b/cmd/openapi/internal/analyze/tui/styles.go @@ -12,7 +12,7 @@ const ( colorThemePurple = "#7C3AED" colorBackground = "#374151" colorDetailGray = "#9CA3AF" - colorFooterText = "#000000" + colorFooterText = "#E5E7EB" colorWhite = "#FFFFFF" colorCyan = "#06B6D4" colorOrange = "#F97316" @@ -77,11 +77,6 @@ var ( PaddingLeft(2). Foreground(lipgloss.Color(colorDetailGray)) - DetailHeader = lipgloss.NewStyle(). - Foreground(lipgloss.Color(colorWhite)). - Bold(true). - MarginTop(1) - // Suggestion styles SuggestionStyle = lipgloss.NewStyle(). PaddingLeft(2). @@ -92,9 +87,6 @@ var ( Foreground(lipgloss.Color(colorRed)). Bold(true) - OptionalEdge = lipgloss.NewStyle(). - Foreground(lipgloss.Color(colorGreen)) - ArrayEdge = lipgloss.NewStyle(). Foreground(lipgloss.Color(colorBlue)) @@ -123,32 +115,10 @@ var ( Align(lipgloss.Center). Width(46) - // Box styles for dashboard cards - CardStyle = lipgloss.NewStyle(). - Border(lipgloss.RoundedBorder()). - BorderForeground(lipgloss.Color(colorGray)). - Padding(0, 1) - CardTitleStyle = lipgloss.NewStyle(). Foreground(lipgloss.Color(colorThemePurple)). Bold(true) ScrollIndicatorStyle = lipgloss.NewStyle(). Foreground(lipgloss.Color(colorGray)) - - // Card border styles by tier - CardBorderGreen = lipgloss.NewStyle(). - Border(lipgloss.RoundedBorder()). - BorderForeground(lipgloss.Color(colorGreen)). - Padding(0, 1) - - CardBorderYellow = lipgloss.NewStyle(). - Border(lipgloss.RoundedBorder()). - BorderForeground(lipgloss.Color(colorYellow)). - Padding(0, 1) - - CardBorderRed = lipgloss.NewStyle(). - Border(lipgloss.RoundedBorder()). - BorderForeground(lipgloss.Color(colorRed)). - Padding(0, 1) ) diff --git a/cmd/openapi/internal/analyze/tui/views.go b/cmd/openapi/internal/analyze/tui/views.go index 78c60964..c74055ac 100644 --- a/cmd/openapi/internal/analyze/tui/views.go +++ b/cmd/openapi/internal/analyze/tui/views.go @@ -150,7 +150,8 @@ func (m Model) renderSchemaList() string { case "red": filterLabel = RedBadge.Render("red only") } - s.WriteString(StatLabel.Render(fmt.Sprintf(" Filter: %s (%d schemas) [f] to cycle filter", filterLabel, len(m.schemaItems))) + "\n\n") + sortLabel := schemaSortModes[m.schemaSortMode] + s.WriteString(StatLabel.Render(fmt.Sprintf(" Filter: %s Sort: %s (%d schemas)", filterLabel, sortLabel, len(m.schemaItems))) + "\n\n") // Header — pad before styling so ANSI escapes don't break alignment s.WriteString(fmt.Sprintf(" %s %s %s %s %s %s\n", @@ -292,19 +293,78 @@ func (m Model) renderCycleDetail(c *analyze.Cycle) string { } +func (m Model) renderSuggestionList() string { + var s strings.Builder + + suggestions := m.report.Suggestions + if len(suggestions) == 0 { + s.WriteString(GreenBadge.Render(" No suggestions — the schema graph looks good!") + "\n") + return s.String() + } + + s.WriteString(StatLabel.Render(fmt.Sprintf(" %d suggestions (sorted by impact)", len(suggestions))) + "\n\n") + + contentH := m.contentHeight() - 2 + linesRendered := 0 + + for i := m.scrollOffset; i < len(suggestions) && linesRendered < contentH; i++ { + sg := suggestions[i] + + prefix := " " + rowStyle := NormalRow + if i == m.cursor { + prefix = "> " + rowStyle = SelectedRow + } + + typeLabel := StatLabel.Render("[" + string(sg.Type) + "]") + line := fmt.Sprintf("%s%s %s %s", + prefix, typeLabel, sg.Title, StatHighlight.Render(fmt.Sprintf("impact=%d", sg.Impact))) + s.WriteString(rowStyle.Render(line) + "\n") + linesRendered++ + + if m.expanded[i] { + // Description + s.WriteString(DetailStyle.Render(" "+sg.Description) + "\n") + linesRendered++ + + // Affected schemas + if len(sg.AffectedSchemas) > 0 { + schemas := strings.Join(sg.AffectedSchemas, ", ") + s.WriteString(StatLabel.Render(" Schemas: ") + StatValue.Render(schemas) + "\n") + linesRendered++ + } + s.WriteString("\n") + linesRendered++ + } + } + + if m.scrollOffset > 0 { + s.WriteString(ScrollIndicatorStyle.Render(" ... more above") + "\n") + } + if m.scrollOffset+contentH < len(suggestions) { + s.WriteString(ScrollIndicatorStyle.Render(" ... more below") + "\n") + } + + return s.String() +} + func (m Model) renderFooter() string { var parts []string parts = append(parts, "q:quit") parts = append(parts, "tab:next view") - parts = append(parts, "1-4:jump to view") + parts = append(parts, "1-5:jump to view") parts = append(parts, "?:help") switch m.activeTab { case TabSchemas: - parts = append(parts, "f:filter") + parts = append(parts, "f:filter tier") + parts = append(parts, "s:sort") parts = append(parts, "enter:expand") case TabCycles: parts = append(parts, "enter:expand") + case TabSuggestions: + parts = append(parts, "enter:expand") case TabGraph: parts = append(parts, "j/k:navigate") parts = append(parts, "enter:focus") @@ -329,7 +389,7 @@ func (m Model) renderHelp() string { {"q / Ctrl-C", "Quit"}, {"Tab / l", "Next tab"}, {"Shift-Tab / h", "Previous tab"}, - {"1-4", "Jump to tab"}, + {"1-5", "Jump to tab"}, {"j / Down", "Move down"}, {"k / Up", "Move up"}, {"gg", "Jump to top"}, @@ -337,7 +397,8 @@ func (m Model) renderHelp() string { {"Ctrl-D", "Scroll down half page"}, {"Ctrl-U", "Scroll up half page"}, {"Enter / Space", "Expand/collapse or focus node"}, - {"f", "Cycle filter (schemas tab)"}, + {"f", "Cycle tier filter (schemas tab)"}, + {"s", "Cycle sort mode (schemas tab)"}, {"m", "Cycle graph mode (graph tab)"}, {"n / p", "Next/prev SCC (SCC gallery)"}, {"+ / -", "Increase/decrease ego hops"},