From ab3ae8bcdd6394834b86005ec362646ecdbae8af Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Mon, 23 Feb 2026 20:33:26 +0000 Subject: [PATCH 01/46] feat: add architecture agent for data-driven design tooling --- docs/design/023-M11-architecture-agent.md | 1090 +++++++++++++++++++++ 1 file changed, 1090 insertions(+) create mode 100644 docs/design/023-M11-architecture-agent.md diff --git a/docs/design/023-M11-architecture-agent.md b/docs/design/023-M11-architecture-agent.md new file mode 100644 index 00000000..d4a39f88 --- /dev/null +++ b/docs/design/023-M11-architecture-agent.md @@ -0,0 +1,1090 @@ +# Architecture Agent โ€” Data-Driven Design Tooling + +**Version:** 0.1 +**Status:** ๐Ÿ“‹ Proposed +**Last Updated:** February 22, 2026 +**Milestone:** M11 โ€” Architecture Agent +**Depends On:** [009-M4-mcp-integration](009-M4-mcp-integration.md) + +--- + +## Summary + +Add a **conversational architecture agent** to AimDB's MCP server. A developer +describes a system in natural language; the agent reasons over it using AimDB's +buffer semantics; a running instance is derived as a direct consequence โ€” with no +manual graph editing, no boilerplate, and no configuration the user has to set up +themselves. + +The Mermaid diagram, the DSL, and the running instance are all **outputs** of +a conversation โ€” not inputs to one. + +**Core principle:** The graph is a **projection** โ€” a read-only artifact that +reflects what was decided in conversation. The agent is the only thing that +writes to it. + +--- + +## Motivation + +Setting up an AimDB instance today requires writing Rust: defining `RecordKey` +enums, choosing buffer types, wiring connectors, configuring producers and +consumers. This is correct for production code, but it creates a barrier during +the ideation phase โ€” the point where a developer is still figuring out *what* +the data architecture should be. + +The architecture agent removes that barrier. It lets a developer think in terms +of data flows ("I have 50 sensors reporting temperature every 100ms, a dashboard +shows live readings, anomalies trigger alerts") and have the agent resolve those +flows into concrete AimDB primitives. The developer confirms each decision; the +agent emits the code. + +--- + +## Core Principles + +**Ideation first.** No user should ever touch a graph editor. The Mermaid +diagram, the DSL, and the running instance are all outputs of a conversation โ€” +not inputs to one. + +**Skills travel with the server.** Architecture agent capabilities are served by +the AimDB MCP server, not configured by the user. Connecting to the server is +the entire setup. + +**The graph is a projection.** Mermaid is a read-only rendering of structured +agent state. It is useful for inspection and communication, never for authoring. + +**Proposals, not silent commits.** Every record addition, buffer type choice, and +connector assignment is a proposal the human explicitly confirms. The graph is a +record of human intent, not agent inference. + +**Ambiguity is resolved by questions, not assumptions.** When intent maps to +multiple valid architectures, the agent asks exactly the right question to +resolve it โ€” grounded in AimDB's buffer semantics, not generic clarification. + +--- + +## Architecture Overview + +``` +[ Human: natural language ideation ] + | +[ Copilot + AimDB MCP: architecture agent ] + | +[ Structured decision state: .aimdb/state.toml ] + | +[ aimdb-codegen library (shared) ]----------. + | | + | used by | used by + v v +[ aimdb-cli: `aimdb generate` ] [ aimdb-mcp: on resolve_proposal ] + | + v +[ .aimdb/architecture.mermaid ] [ src/generated_schema.rs ] + | + [ cargo watch -> running instance ] + | + [ AimX / MCP: live feedback ] + ^ + ^---------------------------' +[ Agent surfaces conflicts and metrics inline ] +``` + +Every layer derives from the one above it. The human only ever interacts with +the top layer. + +The **codegen library** (`aimdb-codegen`) is a pure Rust crate with no runtime +dependencies. It reads `state.toml` and emits Mermaid and Rust source files. +Both the CLI and the MCP server consume it โ€” the CLI for standalone use +(`cargo watch`, CI, manual invocation) and the MCP server for inline generation +when a proposal is confirmed. + +--- + +## Data Model Primitives + +AimDB's three buffer types are the semantic vocabulary the agent reasons in. +Every architectural question resolves to one of these. These map directly to +`BufferCfg` in `aimdb-core/src/buffer/cfg.rs`. + +| Buffer | `BufferCfg` variant | Pattern | When the agent chooses it | +|--------|-------------------|---------|--------------------------| +| **SpmcRing** | `SpmcRing { capacity }` | High-frequency stream, multiple independent consumers | Telemetry, interaction events, logs โ€” data where every value matters and multiple systems read independently | +| **SingleLatest** | `SingleLatest` | Current state, newest value only | Configuration, experiment variants, UI state โ€” data where only the present value is relevant | +| **Mailbox** | `Mailbox` | Command channel, latest wins, single slot | Device control, OTA commands, actuation โ€” data where the latest instruction supersedes all prior ones | + +These are not IoT-specific. They are universal primitives that underlie all +data-driven systems. + +--- + +## Tooling Components + +### 1. AimDB MCP Server โ€” Extended + +The existing MCP server (`tools/aimdb-mcp`) currently exposes 10 tools, a +dynamic set of resources (one static `aimdb://instances` plus per-instance +record resources), and 2 prompts for runtime introspection. The architecture agent +extends it with design-time capabilities. + +#### New Tools + +Agent-callable functions for reading and mutating the architecture state. + +| Tool | Description | +|------|-------------| +| `get_architecture` | Current architecture graph as structured data from `state.toml` | +| `propose_change` | Add a record, modify a buffer type, add a connector โ€” any mutation is a proposal pending human confirmation | +| `resolve_proposal` | Human acceptance (`confirm`), rejection (`reject`), or redirect (`revise`) โ€” a single tool with an explicit status enum, not separate confirm/reject tools | +| `remove_record` | Propose removal of an existing record (cascades through Mermaid and codegen) | +| `rename_record` | Propose renaming a record (updates all references in `state.toml`, Mermaid, and codegen) | +| `validate_against_instance` | Check proposed `state.toml` against live instance via AimX | +| `get_buffer_metrics` | Live metrics for a given record key (delegates to existing `record_info` tool) | + +**Why `resolve_proposal` instead of `confirm_proposal` / `reject_proposal`:** +A single tool with an explicit status enum (`confirm | reject | revise`) +prevents the agent from having to decide which tool to call based on parsing +ambiguous chat text like "sure, but...". The status is always unambiguous. + +```rust +#[derive(Debug, Serialize, Deserialize)] +pub struct ResolveProposalParams { + pub proposal_id: String, + pub resolution: ProposalResolution, + /// Optional redirect message when resolution is `revise` + pub redirect: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum ProposalResolution { + Confirm, + Reject, + Revise, +} +``` + +#### New Resources + +| URI | Description | +|-----|-------------| +| `aimdb://architecture` | Current architecture as Mermaid (generated from `state.toml`) | +| `aimdb://architecture/state` | Raw `state.toml` as structured data | +| `aimdb://architecture/conflicts` | Schema conflicts between `state.toml` and running instance | +| `aimdb://architecture/decisions` | Decision rationale log from `state.toml` | + +These complement the existing resources (`discover`, `connections`, +`records/{socket}`, etc.) โ€” they do not replace them. + +#### New Prompts + +| Prompt | Description | +|--------|-------------| +| `architecture_agent` | Full system prompt encoding AimDB design skills: buffer type semantics, question patterns, proposal format, confirmation protocol. Versioned alongside AimDB. | +| `resolve_buffer_type` | Disambiguation question patterns organised by data shape. Used by the agent to ask exactly the right question when intent is ambiguous. | +| `propose_record` | Structured template for record proposals. Ensures consistent format the human can quickly scan. | +| `breaking_change_review` | Safety protocol for schema evolution โ€” what to check when a buffer type change or record removal could break the running instance. | +| `onboarding` | Guided first architecture session โ€” walks the user through describing their system. | + +The `architecture_agent` prompt is the core artifact. It encodes everything the +agent needs to reason about AimDB architectures. It is served by the MCP server +โ€” when it improves, all connected users benefit without any local configuration +change. + +--- + +### 2. Architecture Agent โ€” Behaviour Spec + +#### Startup + +On session start, the agent: + +1. Reads `aimdb://architecture/state` to load existing decisions +2. Reads `aimdb://architecture` to understand the current topology +3. Reads `aimdb://architecture/conflicts` to surface any drift between state + and running instance +4. Briefly summarises the current architecture if one exists, or asks where to + begin if not + +#### Ideation Loop + +``` +Human describes intent (any form, any specificity) + | +Agent identifies ambiguities that affect buffer type or topology + | +Agent asks one targeted question (never multiple at once) + | +Human responds + | +Agent proposes a concrete record or topology change via propose_change + | +Human confirms, rejects, or redirects via resolve_proposal + | +On confirmation: agent updates state.toml, Mermaid regenerates, codegen emits +``` + +#### Ambiguity Resolution Patterns + +The agent has a specific question for each common ambiguity. These questions are +not generic โ€” each one resolves directly to a buffer type, topology, or data +model decision. + +| Ambiguity | Agent asks | Resolves to | +|-----------|-----------|-------------| +| Read frequency unclear | "Does the consumer need every value, or just the latest at any given moment?" | `SpmcRing` vs `SingleLatest` | +| Consumer count unclear | "Is there one consumer for this data, or multiple independent ones?" | Topology (fan-out) | +| Direction unclear | "Does this flow from device to cloud, cloud to device, or both?" | `link_to` vs `link_from` | +| Cardinality unclear | "Is this one record shared across all devices, or one per device? Which specific devices?" | Concrete `key_variants` list | +| Buffering depth unclear | "If two values arrive before the consumer processes them, should the consumer see both, or only the latest?" | `SpmcRing` (both) vs `SingleLatest` / `Mailbox` (latest only) | +| Command vs state unclear | "Is this data passive state the consumer reads on demand, or an actionable command the consumer must process?" | `SingleLatest` (state) vs `Mailbox` (command) | +| Data model unclear | "Do you have a datasheet, API spec, or protocol doc I can look at for the field definitions?" | Value struct fields and types | + +#### Data Model Derivation + +The agent doesn't guess value types โ€” it derives them from source material the +user provides: + +- **Datasheets** โ€” e.g. "SHT31 sensor" -> agent extracts calibrated temperature + (f64 Celsius), humidity (f64 percent), reads the register map to understand + which fields are meaningful at the application level +- **API documentation** โ€” e.g. "firmware update REST API" -> agent extracts the + response schema fields (version, checksum, release date) +- **Protocol specs** โ€” e.g. "KNX DPT 9.001" -> agent maps the data point type + to a Rust field type +- **Conversation** โ€” when no formal spec exists, the agent asks targeted + questions: "What fields does this reading contain? What units?" + +The agent proposes the struct fields as part of the record proposal. The human +confirms the data model alongside the buffer type and topology. + +#### Proposal Format + +Every proposal follows a consistent structure the human can quickly scan: + +``` +Proposing: TemperatureReading + + Buffer: SpmcRing(256) + Reason: Anomaly detector needs a sample window; + dashboard consumes independently + + Value struct (derived from SHT31 datasheet): + celsius: f64 โ€” Temperature in degrees Celsius + humidity_percent: f64 โ€” Relative humidity 0-100 + timestamp: u64 โ€” Unix timestamp in milliseconds + + Key prefix: "sensors.temp." + Variants: indoor, outdoor, garage + + Connector: mqtt link_to "mqtt://sensors/temp/{variant}" + + Produces: sensor_task + Consumes: dashboard, anomaly_detector + +Add this record? [confirm / reject / revise] +``` + +--- + +### 3. Structured Decision State + +The agent maintains a structured state file at `.aimdb/state.toml`. This is the +memory layer โ€” it persists across sessions and is the source of truth for both +the Mermaid projection and the Rust codegen. + +#### Schema + +```toml +# .aimdb/state.toml + +[meta] +aimdb_version = "0.5.0" +created_at = "2026-02-22T14:00:00Z" +last_modified = "2026-02-22T14:33:00Z" + +# --- Records --- + +[[records]] +name = "TemperatureReading" +buffer = "SpmcRing" +capacity = 256 +key_prefix = "sensors.temp." +key_variants = ["indoor", "outdoor", "garage"] +producers = ["sensor_task"] +consumers = ["dashboard", "anomaly_detector"] + + # Value schema โ€” the agent derives this from datasheets, APIs, or conversation. + # Codegen generates a Rust struct with these fields. + [[records.fields]] + name = "celsius" + type = "f64" + description = "Temperature in degrees Celsius" + + [[records.fields]] + name = "humidity_percent" + type = "f64" + description = "Relative humidity 0-100" + + [[records.fields]] + name = "timestamp" + type = "u64" + description = "Unix timestamp in milliseconds" + + [[records.connectors]] + protocol = "mqtt" + direction = "outbound" # link_to + url = "mqtt://sensors/temp/{variant}" + +[[records]] +name = "FirmwareVersion" +buffer = "SingleLatest" +key_prefix = "device.firmware." +key_variants = ["gateway-01", "gateway-02", "sensor-hub-01"] +producers = ["cloud_ota_service"] +consumers = ["device_update_task"] + + [[records.fields]] + name = "version" + type = "String" + description = "Semantic version string (e.g. 2.1.0)" + + [[records.fields]] + name = "release_date" + type = "String" + description = "ISO 8601 date of the firmware release" + + [[records.fields]] + name = "checksum" + type = "String" + description = "SHA-256 checksum of the firmware binary" + + [[records.connectors]] + protocol = "mqtt" + direction = "inbound" # link_from + url = "mqtt://config/firmware/{variant}" + +[[records]] +name = "OtaCommand" +buffer = "Mailbox" +key_prefix = "device.ota." +key_variants = ["gateway-01", "gateway-02", "sensor-hub-01"] +producers = ["cloud_ota_service"] +consumers = ["device_update_task"] + + [[records.fields]] + name = "action" + type = "String" + description = "Command action: 'update', 'rollback', 'reboot'" + + [[records.fields]] + name = "target_version" + type = "String" + description = "Version to update to (for 'update' action)" + + [[records.connectors]] + protocol = "mqtt" + direction = "inbound" + url = "mqtt://ota/cmd/{variant}" + +# --- Decision Log --- + +[[decisions]] +record = "TemperatureReading" +field = "buffer" +chosen = "SpmcRing" +alternative = "SingleLatest" +reason = "Anomaly detector needs a sample window; dashboard consumes independently" +timestamp = "2026-02-22T14:20:00Z" + +[[decisions]] +record = "FirmwareVersion" +field = "buffer" +chosen = "SingleLatest" +alternative = "Mailbox" +reason = "Target version is state, not a command โ€” only current target matters, not history of assignments" +timestamp = "2026-02-22T14:33:00Z" + +[[decisions]] +record = "TemperatureReading" +field = "fields" +chosen = "celsius + humidity_percent + timestamp" +alternative = "raw ADC value" +reason = "User provided SHT31 datasheet โ€” sensor outputs calibrated temperature and humidity, not raw ADC" +timestamp = "2026-02-22T14:15:00Z" +``` + +**Key design choice: all key variants are concrete and resolved by the agent.** + +There is no `key_strategy` distinction. The agent's job during ideation is to +resolve the full set of variants โ€” whether that's three rooms, fifty sensors, +or three devices. "One per device" is not a codegen concern; it is a +conversation the agent has with the human: "Which devices? What are their IDs?" + +If the user says "one per device" without listing them, the agent asks: "Which +devices should I include? I need the concrete IDs for code generation." The +agent may also derive device lists from a fleet manifest, config file, or API +response the user provides. + +**The `fields` table defines the value struct.** The agent derives this from +datasheets, API documentation, protocol specs, or conversation. Each record has +a typed struct โ€” codegen generates the Rust `struct` with `Serialize` / +`Deserialize` derives. The agent decides which fields to include and what types +to use based on the source material. + +**The `decisions` table** is the architectural rationale layer. The agent writes +to it on every confirmed proposal โ€” including field-level decisions like "used +calibrated Celsius from the SHT31 datasheet rather than raw ADC values." It is +human-readable, diffable, and queryable by the agent to avoid re-litigating +settled decisions. + +--- + +### 4. Mermaid Projection + +The Mermaid file is generated, never authored. It lives at +`.aimdb/architecture.mermaid` and is regenerated by the codegen step on every +confirmed decision. + +#### Conventions + +Node shapes encode buffer type. Arrow styles encode data flow vs connector +metadata. + +```mermaid +flowchart LR + + %% Node shapes encode buffer type + %% (["Name\nSpmcRing . capacity"]) = ring buffer (stadium) + %% ("Name\nSingleLatest") = state (rounded rect) + %% {"Name\nMailbox"} = command (diamond) + + TEMP(["TemperatureReading\nSpmcRing . 256"]) + FW_VER("FirmwareVersion\nSingleLatest") + OTA_CMD{"OtaCommand\nMailbox"} + + %% Solid arrows = data flow (produce/consume) + sensor_task -->|produce| TEMP + TEMP -->|consume| dashboard + TEMP -->|consume| anomaly_detector + + cloud_ota_service -->|produce| FW_VER + cloud_ota_service -->|produce| OTA_CMD + FW_VER -->|consume| device_update_task + OTA_CMD -->|consume| device_update_task + + %% Dashed arrows = connector metadata (link_to / link_from) + TEMP -.->|"link_to mqtt://sensors/temp/{variant}"| MQTT + MQTT -.->|"link_from mqtt://config/firmware/{variant}"| FW_VER + MQTT -.->|"link_from mqtt://ota/cmd/{variant}"| OTA_CMD +``` + +These conventions are defined in `.aimdb/CONVENTIONS.md` and referenced by the +`architecture_agent` prompt. + +#### Properties + +- Renders natively in GitHub markdown โ€” no plugin, no server +- Read-only โ€” never edited by the human +- Diffable โ€” git history of `.aimdb/architecture.mermaid` is the history of + architectural decisions +- Derivable โ€” can always be regenerated from `state.toml` + +--- + +### 5. Codegen Pipeline + +#### Crate Structure + +Codegen lives in a shared library crate (`aimdb-codegen`) consumed by both the +CLI and the MCP server: + +``` +aimdb/ + aimdb-codegen/ # New crate: pure codegen library + src/ + lib.rs # Public API: generate_mermaid(), generate_rust() + state.rs # state.toml parser and schema types + mermaid.rs # state.toml -> Mermaid renderer + rust.rs # state.toml -> Rust source renderer + tools/ + aimdb-cli/ # Existing CLI โ€” gains `aimdb generate` subcommand + src/commands/generate.rs # New: invokes aimdb-codegen library + aimdb-mcp/ # Existing MCP server โ€” calls aimdb-codegen on confirm +``` + +**`aimdb-codegen` has no runtime dependencies.** It reads `state.toml` (via +`toml` + `serde`) and emits strings. No AimX connection, no async, no tokio. +This keeps it fast and testable. + +**Two consumers:** + +- **`aimdb generate`** (CLI) โ€” The primary interface for developers and CI. + Can be wired into `cargo watch -s "aimdb generate"`. Also usable standalone + for one-shot generation, dry-run validation, or pre-commit hooks. +- **`aimdb-mcp`** (MCP server) โ€” Calls `aimdb_codegen::generate_mermaid()` and + `aimdb_codegen::generate_rust()` directly (library call, no subprocess) when + `resolve_proposal` confirms a change. The MCP server writes the files and + reports the result inline. + +#### CLI Integration + +The `aimdb generate` subcommand extends the existing CLI alongside `instance`, +`record`, `graph`, and `watch`: + +``` +aimdb generate [OPTIONS] + +Options: + --state Path to state.toml [default: .aimdb/state.toml] + --mermaid Output path for Mermaid [default: .aimdb/architecture.mermaid] + --rust Output path for Rust [default: src/generated_schema.rs] + --check Validate state.toml without writing files (for CI) + --dry-run Print generated output to stdout instead of writing files +``` + +`cargo watch` integration: + +```bash +cargo watch -s "aimdb generate && cargo check" +``` + +#### Emit Targets + +Two emit targets from `state.toml`: + +**Mermaid** โ€” written to `.aimdb/architecture.mermaid` + +**AimDB Rust** โ€” written to `src/generated_schema.rs` + +The generated code uses the **actual AimDB API** โ€” `#[derive(RecordKey)]` with +`#[key = "..."]`, `#[key_prefix = "..."]`, and `#[link_address = "..."]` +attributes; `AimDbBuilder` with `.configure()`, `BufferCfg`, `.link_to()`, and +`.link_from()`: + +```rust +// Generated by aimdb-codegen โ€” do not edit manually +// Source: .aimdb/state.toml โ€” edit via architecture agent + +use aimdb_core::buffer::BufferCfg; +use aimdb_derive::RecordKey; +use serde::{Serialize, Deserialize}; + +// --- TemperatureReading --- +// Buffer: SpmcRing(256) +// Decision: "Anomaly detector needs a sample window; +// dashboard consumes independently" +// Fields derived from: SHT31 datasheet + +/// Temperature and humidity reading from SHT31 sensor. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct TemperatureReadingValue { + /// Temperature in degrees Celsius + pub celsius: f64, + /// Relative humidity 0-100 + pub humidity_percent: f64, + /// Unix timestamp in milliseconds + pub timestamp: u64, +} + +#[derive(RecordKey, Clone, Copy, PartialEq, Eq)] +#[key_prefix = "sensors.temp."] +pub enum TemperatureReadingKey { + #[key = "indoor"] + #[link_address = "mqtt://sensors/temp/indoor"] + Indoor, + + #[key = "outdoor"] + #[link_address = "mqtt://sensors/temp/outdoor"] + Outdoor, + + #[key = "garage"] + #[link_address = "mqtt://sensors/temp/garage"] + Garage, +} + +// --- FirmwareVersion --- +// Buffer: SingleLatest +// Decision: "Target version is state, not a command โ€” +// only current target matters" + +/// Firmware version state for a device. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct FirmwareVersionValue { + /// Semantic version string (e.g. 2.1.0) + pub version: String, + /// ISO 8601 date of the firmware release + pub release_date: String, + /// SHA-256 checksum of the firmware binary + pub checksum: String, +} + +#[derive(RecordKey, Clone, Copy, PartialEq, Eq)] +#[key_prefix = "device.firmware."] +pub enum FirmwareVersionKey { + #[key = "gateway-01"] + #[link_address = "mqtt://config/firmware/gateway-01"] + Gateway01, + + #[key = "gateway-02"] + #[link_address = "mqtt://config/firmware/gateway-02"] + Gateway02, + + #[key = "sensor-hub-01"] + #[link_address = "mqtt://config/firmware/sensor-hub-01"] + SensorHub01, +} + +// --- OtaCommand --- +// Buffer: Mailbox + +/// OTA command sent from cloud to a specific device. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct OtaCommandValue { + /// Command action: 'update', 'rollback', 'reboot' + pub action: String, + /// Version to update to (for 'update' action) + pub target_version: String, +} + +#[derive(RecordKey, Clone, Copy, PartialEq, Eq)] +#[key_prefix = "device.ota."] +pub enum OtaCommandKey { + #[key = "gateway-01"] + #[link_address = "mqtt://ota/cmd/gateway-01"] + Gateway01, + + #[key = "gateway-02"] + #[link_address = "mqtt://ota/cmd/gateway-02"] + Gateway02, + + #[key = "sensor-hub-01"] + #[link_address = "mqtt://ota/cmd/sensor-hub-01"] + SensorHub01, +} +``` + +A separate `configure_schema` function wires the records into an `AimDbBuilder`: + +```rust +use aimdb_core::builder::AimDbBuilder; +use aimdb_core::buffer::BufferCfg; +use aimdb_executor::Spawn; + +/// Register all architecture-agent-defined records on the builder. +/// +/// This function is generated from .aimdb/state.toml. It configures +/// buffer types and connectors. Producers and consumers (source/tap +/// closures) must be provided by application code โ€” they are not +/// generated because they contain business logic. +pub fn configure_schema( + builder: &mut AimDbBuilder, +) { + // TemperatureReading โ€” SpmcRing(256), outbound MQTT + for key in [ + TemperatureReadingKey::Indoor, + TemperatureReadingKey::Outdoor, + TemperatureReadingKey::Garage, + ] { + builder.configure::(key, |reg| { + reg.buffer_cfg(BufferCfg::SpmcRing { capacity: 256 }); + if let Some(addr) = key.link_address() { + reg.link_to(addr) + // Serializer must be provided by application code + ; + } + }); + } + + // FirmwareVersion โ€” SingleLatest, inbound MQTT + for key in [ + FirmwareVersionKey::Gateway01, + FirmwareVersionKey::Gateway02, + FirmwareVersionKey::SensorHub01, + ] { + builder.configure::(key, |reg| { + reg.buffer_cfg(BufferCfg::SingleLatest); + if let Some(addr) = key.link_address() { + reg.link_from(addr) + // Deserializer must be provided by application code + ; + } + }); + } + + // OtaCommand โ€” Mailbox, inbound MQTT + for key in [ + OtaCommandKey::Gateway01, + OtaCommandKey::Gateway02, + OtaCommandKey::SensorHub01, + ] { + builder.configure::(key, |reg| { + reg.buffer_cfg(BufferCfg::Mailbox); + if let Some(addr) = key.link_address() { + reg.link_from(addr) + ; + } + }); + } +} +``` + +**What codegen generates per record:** + +1. A **value struct** (`TemperatureReadingValue`) with `Serialize` / + `Deserialize` derives โ€” fields and types come from `records.fields` in + `state.toml` +2. A **key enum** (`TemperatureReadingKey`) with `#[derive(RecordKey)]` โ€” one + variant per entry in `key_variants` +3. A **registration block** in `configure_schema` โ€” wires buffer type, + connectors, and the correct value type `T` + +**What codegen does NOT generate:** Producer closures (`source`/`source_raw`), +consumer closures (`tap`/`tap_raw`), serializers, deserializers, and transform +pipelines. These contain business logic that belongs in application code. The +generated `configure_schema` provides the skeleton; the developer fills in the +behaviour. + +`cargo watch` picks up the change. The running instance updates. The AimDB MCP +server reflects the new state. The agent reads it back and surfaces any +conflicts inline. + +--- + +### 6. Conflict Detection + +The `aimdb://architecture/conflicts` resource compares `state.toml` against the +running instance (via AimX `record.list`). Conflicts are categorised: + +| Conflict type | Description | Severity | +|---------------|-------------|----------| +| **missing_in_instance** | Record exists in `state.toml` but not in running instance | Warning โ€” codegen may not have run yet | +| **missing_in_state** | Record exists in running instance but not in `state.toml` | Info โ€” manually registered record, not agent-managed | +| **buffer_mismatch** | `state.toml` says `SpmcRing` but instance reports `single_latest` | Error โ€” likely a stale build | +| **capacity_mismatch** | `state.toml` says capacity 256 but instance reports 1024 | Warning โ€” may be intentional override | +| **connector_mismatch** | Connector URL in `state.toml` doesn't match instance metadata | Warning | + +The agent surfaces conflicts on startup and before confirming any proposal that +touches an affected record. It does **not** attempt automatic remediation โ€” +conflicts are reported inline and the human decides what to do. + +--- + +### 7. Record Deletion and Renaming + +The ideation loop covers adding records and changing buffer types. It must also +handle removal and renaming. + +#### Deletion + +When the user says "remove TemperatureReading": + +1. Agent proposes deletion via `remove_record` tool +2. Proposal includes: affected consumers, affected connectors, and a warning if + the record exists in the running instance +3. On confirmation: record is removed from `state.toml`, Mermaid regenerates, + `src/generated_schema.rs` regenerates (the enum and its `configure_schema` + block are removed) +4. **Application code that references the deleted type will fail to compile.** + This is intentional โ€” the compiler catches dangling references. The agent + notes this in the proposal. + +#### Renaming + +When the user says "rename TemperatureReading to SensorReading": + +1. Agent proposes rename via `rename_record` tool +2. On confirmation: all references in `state.toml` are updated (including in + `decisions` entries), Mermaid regenerates, codegen regenerates with new + enum name +3. **Application code that references the old type will fail to compile.** + Same rationale as deletion โ€” the compiler is the migration tool. + +Both operations add a `decisions` entry recording what was removed/renamed and +why. + +--- + +### 8. VS Code Integration + +The VS Code surface is thin by design. It does not add a graph editor. It does +not add a canvas. It adds three things: + +**Status bar indicator** โ€” shows whether the running instance is in sync with +`state.toml`. Green = in sync. Orange = codegen pending. Red = schema conflict +detected. This is a minimal VS Code extension that reads +`aimdb://architecture/conflicts` on a timer. + +**Mermaid preview** โ€” VS Code's built-in Mermaid support (or the Markdown +Preview Mermaid extension) renders `.aimdb/architecture.mermaid` automatically. +No custom panel needed. + +**MCP connection** โ€” the user adds one entry to their VS Code MCP config: + +```json +{ + "mcpServers": { + "aimdb": { + "command": "aimdb-mcp", + "args": ["--connect", "localhost:7777"] + } + } +} +``` + +Everything else โ€” the agent skills, the prompt, the tools, the resources โ€” +arrives from the server. The user configuration is one JSON block. That is the +entire setup. + +--- + +## User Experience + +### First Session + +``` +User connects MCP server -> Copilot loads architecture_agent prompt + +Agent: No architecture found. Tell me about the system you're building โ€” + what data exists, where it comes from, and where it needs to go. + +User: I have 3 SHT31 sensors (indoor, outdoor, garage) reporting every 100ms. + A dashboard shows live readings. Anomalies trigger cloud alerts. + +Agent: I know the SHT31 โ€” it outputs calibrated temperature and relative + humidity. One question: + Does the dashboard need every reading, or just the current + value per sensor? + +User: Just the current value is fine. + +Agent: Proposing: TemperatureReading + + Buffer: SingleLatest + Reason: Dashboard reads current value only โ€” no + need to buffer historical samples + + Value struct (from SHT31 datasheet): + celsius: f64 โ€” Temperature in degrees Celsius + humidity_percent: f64 โ€” Relative humidity 0-100 + timestamp: u64 โ€” Unix timestamp in milliseconds + + Key prefix: "sensors.temp." + Variants: indoor, outdoor, garage + + Connector: mqtt link_to "mqtt://sensors/temp/{variant}" + Consumers: dashboard, anomaly_detector + + Add this record? [confirm / reject / revise] + +User: Yes, but I think we might need history for the anomaly detection. + +Agent: Good catch. If anomaly detection needs a window of readings + rather than just the latest, SpmcRing is the right choice โ€” + it buffers the last N values for independent consumption. + + Revised proposal: TemperatureReading + + Buffer: SpmcRing(256) + Reason: Anomaly detector needs a sample window; + dashboard consumes independently + + Value struct and key variants unchanged. This means the dashboard + will always have the latest reading as well. + + Add this record? [confirm / reject / revise] + +User: Yes. + +-> state.toml updated (record + fields + decision log) +-> .aimdb/architecture.mermaid regenerated +-> src/generated_schema.rs regenerated + (TemperatureReadingValue struct + TemperatureReadingKey enum + + configure_schema block) +-> cargo watch triggers rebuild +-> Running instance reflects new record +-> MCP metrics endpoint live for TemperatureReading +``` + +### Ongoing Sessions + +The agent loads prior state on startup. Existing decisions are not re-litigated +unless the human explicitly revisits them. The `decisions` table gives the agent +the rationale for prior choices, so it can explain them and reason about whether +a new requirement changes them. + +--- + +## Implementation + +### Phase 1 โ€” Prompt Engineering (no code required) + +Define the agent's reasoning capabilities as MCP prompts. Validate through +manual ideation sessions before building any infrastructure. + +| Task | Output | +|------|--------| +| Define `.aimdb/CONVENTIONS.md` โ€” Mermaid node shapes and arrow types | Static file | +| Write `architecture_agent` system prompt | MCP prompt | +| Write `resolve_buffer_type` disambiguation question patterns | MCP prompt | +| Write `propose_record` proposal template | MCP prompt | +| Write `breaking_change_review` safety protocol | MCP prompt | +| Write `onboarding` guided session prompt | MCP prompt | +| **Validate**: run ideation sessions manually using Claude or Copilot with the prompts as context. Identify gaps before building anything. | Session transcripts | + +**Exit criteria:** Three end-to-end ideation sessions produce correct buffer +type selections and Mermaid output without the agent making incorrect +assumptions. + +### Phase 2 โ€” `aimdb-codegen` Library + +Create the shared codegen crate. This is a pure library with no async or +runtime dependencies โ€” just `toml`, `serde`, and string formatting. + +| Task | Scope | +|------|-------| +| Create `aimdb-codegen` crate with `state.toml` parser (`state.rs`) | Schema types + TOML deserialization (including `records.fields`) | +| Implement `generate_mermaid(state: &ArchitectureState) -> String` | ~150 lines | +| Implement `generate_rust(state: &ArchitectureState) -> String` | ~400 lines (value structs + key enums + `configure_schema`) | +| Add `validate(state: &ArchitectureState) -> Vec` | Key uniqueness, field type validity, buffer config, connector URLs | +| Integration test: generated Rust compiles against `aimdb-core` 0.5.0 API | Compile-time test | + +The codegen generates three things per record: a **value struct** (from +`records.fields`) with `Serialize` / `Deserialize` derives, a **key enum** +(from `key_variants`) using `#[derive(RecordKey)]` with `#[key = "..."]`, +`#[key_prefix = "..."]`, and `#[link_address = "..."]` attributes, and a +**registration block** in `configure_schema` using `AimDbBuilder::configure()` +with the correct `BufferCfg`, `link_to()`, and `link_from()`. + +**Supported field types in `records.fields`:** `f64`, `f32`, `u8`, `u16`, +`u32`, `u64`, `i8`, `i16`, `i32`, `i64`, `bool`, `String`. The agent maps +source material (datasheet registers, API response fields, protocol data point +types) to these Rust primitives. + +**Key validation:** The generated `RecordKey` enums must pass the derive macro's +compile-time checks โ€” unit variants only, all variants have `#[key = "..."]`, +no duplicate keys. + +### Phase 3 โ€” CLI `generate` Subcommand + +Add `aimdb generate` to the existing CLI. This is the primary developer-facing +interface for codegen. + +| Task | Scope | +|------|-------| +| Add `aimdb-codegen` dependency to `aimdb-cli/Cargo.toml` | Dependency | +| Implement `commands/generate.rs` โ€” read `state.toml`, call codegen, write files | ~80 lines | +| Add `--check` mode for CI validation (exit 1 if files would change) | Flag | +| Add `--dry-run` mode (print to stdout) | Flag | +| Document `cargo watch -s "aimdb generate && cargo check"` pattern | README | + +This phase can ship independently of the MCP server extension โ€” developers +get value from `aimdb generate` even without the architecture agent. + +### Phase 4 โ€” MCP Server Extension + +| Task | Scope | +|------|-------| +| Add `aimdb-codegen` dependency to `aimdb-mcp/Cargo.toml` | Dependency | +| Add `propose_change`, `resolve_proposal`, `remove_record`, `rename_record` tools | `tools/aimdb-mcp/src/tools/architecture.rs` | +| Wire `resolve_proposal` confirm path to call `aimdb_codegen::generate_*()` | Library call (no subprocess) | +| Add `validate_against_instance` tool using existing AimX `record.list` | `tools/aimdb-mcp/src/tools/architecture.rs` | +| Add `aimdb://architecture/*` resources | `tools/aimdb-mcp/src/resources/architecture.rs` | +| Add all 5 new prompts to MCP prompts manifest | `tools/aimdb-mcp/src/prompts/` | +| Implement conflict detection (compare `state.toml` vs AimX `record.list`) | `tools/aimdb-mcp/src/architecture/conflicts.rs` | + +### Phase 5 โ€” VS Code Polish + +| Task | Scope | +|------|-------| +| Status bar extension (sync indicator: green/orange/red) | Minimal VS Code extension | +| Confirm Mermaid preview works via existing VS Code extensions | Documentation | +| Document the single MCP config block as the complete setup step | README update | + +--- + +## Resolved Decisions + +1. **State file location** โ€” **`.aimdb/state.toml`**. The hidden directory keeps + the repo root clean. Discoverability is addressed by documenting the path in + the README and surfacing it in agent startup messages. + +2. **Confirmation UX** โ€” **Inline chat confirmation via `resolve_proposal` + tool.** A single tool with a `confirm | reject | revise` status enum. Keeps + the entire ideation flow in one surface with no UI context switches. A VS + Code quick-pick was considered but adds no value for this pattern. + +3. **Breaking change handling** โ€” **Warn, do not block, do not propose + migrations automatically.** When a proposed change is incompatible with the + running instance, the agent surfaces the conflict inline and halts the + proposal. Migration paths are only explored if the user explicitly asks. + +4. **Multi-instance support** โ€” **Single instance only.** The MCP server + connects to exactly one running AimDB instance. Fleet and per-device keying + patterns are out of scope for this version. + +5. **Prompt injection strategy** โ€” **Slash command ships first.** The + `architecture_agent` prompt is loaded explicitly (e.g. via `/aimdb`). + Automatic injection on session start via MCP Prompts is deferred until VS + Code Copilot's support for that pattern matures. + +6. **All key variants are concrete** โ€” There is no `key_strategy` distinction. + The agent resolves the full set of concrete variants during conversation. + "One per device" is not a codegen abstraction โ€” it is a question the agent + asks: "Which devices? What are their IDs?" The agent may derive device + lists from fleet manifests, config files, or API responses. Codegen always + receives a flat list of string variants and generates one enum variant per + entry. + +7. **Record deletion/renaming** โ€” **Supported. Compiler catches dangling + references.** When a record is removed or renamed, the generated code + changes. Application code that references the old type fails to compile. + The agent notes this in the proposal. No automatic migration of application + code. + +8. **Codegen language and location** โ€” **Rust, in a shared `aimdb-codegen` + crate.** Keeps the toolchain unified, enables both `aimdb generate` (CLI) + and MCP server to use the same library. The CLI already depends on + `aimdb-core` and `aimdb-client`, so adding a codegen crate is consistent + with the existing workspace structure. Phase 3 (CLI) can ship independently + of Phase 4 (MCP), giving developers value earlier. + +9. **Value types are agent-defined structs** โ€” The agent derives the value + struct from source material: datasheets, API docs, protocol specs, or + conversation. `state.toml` stores the full field list (`records.fields`) + with name, Rust type, and description. Codegen generates a Rust struct with + `Serialize` / `Deserialize` derives and uses it as the `T` in + `builder.configure::()`. No placeholder types, no generic parameters โ€” + the struct is fully specified. + +--- + +## Success Criteria + +The tooling is working when: + +- A developer can describe a multi-record system in plain language and have a + compiling, correctly-typed AimDB schema in under 10 minutes +- The `.aimdb/architecture.mermaid` file accurately reflects every architectural + decision with no manual maintenance +- A new team member can understand the system's data architecture by reading the + Mermaid graph and the `decisions` table, without reading any application code +- Changing a buffer type or adding a connector is a one-sentence conversation, + not a code change +- Generated code compiles against the actual `aimdb-core` 0.5.0 API without + modification + +--- + +## References + +- [009-M4-mcp-integration](009-M4-mcp-integration.md) โ€” Existing MCP server + design +- [003-M1_buffer_usage_guide](003-M1_buffer_usage_guide.md) โ€” Buffer type + selection guide +- [016-M6-record-key-trait](016-M6-record-key-trait.md) โ€” `RecordKey` trait and + derive macro +- [018-M7-dynamic-mqtt-topics](018-M7-dynamic-mqtt-topics.md) โ€” Dynamic + connector routing +- [021-M9-graph-introspection](021-M9-graph-introspection.md) โ€” Dependency + graph API From 7d7e2e7f0d90181de1a005b7abdae89909438cce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Mon, 23 Feb 2026 20:35:59 +0000 Subject: [PATCH 02/46] chore: update embassy subproject to latest commit --- _external/embassy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/_external/embassy b/_external/embassy index 29640b94..d9105e12 160000 --- a/_external/embassy +++ b/_external/embassy @@ -1 +1 @@ -Subproject commit 29640b944410cf6903a42c06e4373ff07c13aede +Subproject commit d9105e122e73b806cc9d4071a19806f423a4a2fa From 84bbe5297841d891beb1cd3ae99d0e918cd5b08c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Tue, 24 Feb 2026 20:46:26 +0000 Subject: [PATCH 03/46] feat: add generate command for architecture artefacts --- tools/aimdb-cli/Cargo.toml | 1 + tools/aimdb-cli/src/commands/generate.rs | 161 +++++++++++++++++++++++ tools/aimdb-cli/src/commands/mod.rs | 1 + tools/aimdb-cli/src/main.rs | 8 +- 4 files changed, 170 insertions(+), 1 deletion(-) create mode 100644 tools/aimdb-cli/src/commands/generate.rs diff --git a/tools/aimdb-cli/Cargo.toml b/tools/aimdb-cli/Cargo.toml index 84062c18..627d2118 100644 --- a/tools/aimdb-cli/Cargo.toml +++ b/tools/aimdb-cli/Cargo.toml @@ -17,6 +17,7 @@ path = "src/main.rs" [dependencies] # AimDB client library aimdb-client = { version = "0.5.0", path = "../../aimdb-client" } +aimdb-codegen = { version = "0.5.0", path = "../../aimdb-codegen" } # Core dependencies - reuse protocol types from aimdb-core aimdb-core = { version = "0.5.0", path = "../../aimdb-core", features = [ diff --git a/tools/aimdb-cli/src/commands/generate.rs b/tools/aimdb-cli/src/commands/generate.rs new file mode 100644 index 00000000..9377248e --- /dev/null +++ b/tools/aimdb-cli/src/commands/generate.rs @@ -0,0 +1,161 @@ +//! `aimdb generate` subcommand +//! +//! Reads `.aimdb/state.toml` and emits: +//! - `.aimdb/architecture.mermaid` โ€” Mermaid diagram +//! - `src/generated_schema.rs` โ€” compilable Rust schema +//! +//! # Usage +//! +//! ```text +//! aimdb generate # generate both artefacts +//! aimdb generate --check # validate only (CI) +//! aimdb generate --dry-run # print to stdout, don't write +//! aimdb generate --state path/state.toml # custom state path +//! ``` +//! +//! Integrate with cargo-watch: +//! ```text +//! cargo watch -s "aimdb generate && cargo check" +//! ``` + +use crate::error::CliResult; +use aimdb_codegen::{generate_mermaid, generate_rust, validate, ArchitectureState, Severity}; +use anyhow::Context; +use clap::Args; +use colored::Colorize; +use std::path::{Path, PathBuf}; + +/// Generate Mermaid diagram and Rust schema from `.aimdb/state.toml` +#[derive(Debug, Args)] +pub struct GenerateCommand { + /// Path to state.toml + #[arg(long, default_value = ".aimdb/state.toml")] + pub state: PathBuf, + + /// Output path for Mermaid diagram + #[arg(long, default_value = ".aimdb/architecture.mermaid")] + pub mermaid: PathBuf, + + /// Output path for generated Rust source + #[arg(long, default_value = "src/generated_schema.rs")] + pub rust: PathBuf, + + /// Validate state.toml without writing files (exit 1 if errors found) + #[arg(long)] + pub check: bool, + + /// Print generated output to stdout instead of writing files + #[arg(long)] + pub dry_run: bool, +} + +impl GenerateCommand { + pub async fn execute(self) -> CliResult<()> { + // โ”€โ”€ Read state.toml โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + let state_path = &self.state; + if !state_path.exists() { + return Err(anyhow::anyhow!( + "state file not found: {}\n\ + Hint: start an architecture session with the AimDB architecture agent,\n\ + or create .aimdb/state.toml manually.", + state_path.display() + ) + .into()); + } + + let toml_src = std::fs::read_to_string(state_path) + .with_context(|| format!("reading {}", state_path.display()))?; + + let state = ArchitectureState::from_toml(&toml_src) + .with_context(|| format!("parsing {}", state_path.display()))?; + + // โ”€โ”€ Validate โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + let errors = validate(&state); + let has_errors = print_validation_results(&errors, state_path); + + if has_errors { + return Err( + anyhow::anyhow!("validation failed โ€” fix the errors above and retry").into(), + ); + } + + if self.check { + println!( + "{} {} validated successfully", + "โœ“".green(), + state_path.display() + ); + return Ok(()); + } + + // โ”€โ”€ Generate โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + let mermaid_src = generate_mermaid(&state); + let rust_src = generate_rust(&state); + + if self.dry_run { + println!("{} {}", "โ”€โ”€ Mermaid".dimmed(), self.mermaid.display()); + println!("{mermaid_src}"); + println!("{} {}", "โ”€โ”€ Rust".dimmed(), self.rust.display()); + println!("{rust_src}"); + return Ok(()); + } + + // โ”€โ”€ Write files โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + write_if_changed(&self.mermaid, &mermaid_src, "Mermaid")?; + write_if_changed(&self.rust, &rust_src, "Rust")?; + + println!( + "{} {} record(s) processed", + "โœ“".green(), + state.records.len() + ); + + Ok(()) + } +} + +// โ”€โ”€ Helpers โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Print validation results and return `true` if any errors were found. +fn print_validation_results(errors: &[aimdb_codegen::ValidationError], state_path: &Path) -> bool { + let mut has_errors = false; + for e in errors { + match e.severity { + Severity::Error => { + eprintln!("{} [{}] {}", "โœ—".red(), e.location, e.message); + has_errors = true; + } + Severity::Warning => { + eprintln!("{} [{}] {}", "!".yellow(), e.location, e.message); + } + } + } + if !errors.is_empty() { + eprintln!(" in: {}", state_path.display()); + } + has_errors +} + +/// Write `contents` to `path`, creating parent directories as needed. +/// Prints a status line indicating whether the file was created or unchanged. +fn write_if_changed(path: &Path, contents: &str, label: &str) -> CliResult<()> { + // Create parent directory if needed + if let Some(parent) = path.parent() { + std::fs::create_dir_all(parent) + .with_context(|| format!("creating directory: {}", parent.display()))?; + } + + // Read existing to detect changes + let existing = std::fs::read_to_string(path).ok(); + let changed = existing.as_deref() != Some(contents); + + if changed { + std::fs::write(path, contents).with_context(|| format!("writing {}", path.display()))?; + println!(" {} {} written", "โ†’".cyan(), path.display()); + } else { + println!(" {} {} unchanged", "ยท".dimmed(), path.display()); + } + + let _ = label; // Suppresses unused warning; label available for future verbose output + Ok(()) +} diff --git a/tools/aimdb-cli/src/commands/mod.rs b/tools/aimdb-cli/src/commands/mod.rs index e2802506..39b196ea 100644 --- a/tools/aimdb-cli/src/commands/mod.rs +++ b/tools/aimdb-cli/src/commands/mod.rs @@ -2,6 +2,7 @@ //! //! This module contains the implementations for all CLI commands. +pub mod generate; pub mod graph; pub mod instance; pub mod record; diff --git a/tools/aimdb-cli/src/main.rs b/tools/aimdb-cli/src/main.rs index 4f98586a..fa718d3e 100644 --- a/tools/aimdb-cli/src/main.rs +++ b/tools/aimdb-cli/src/main.rs @@ -5,7 +5,8 @@ use clap::{Parser, Subcommand}; use commands::{ - graph::GraphCommand, instance::InstanceCommand, record::RecordCommand, watch::WatchCommand, + generate::GenerateCommand, graph::GraphCommand, instance::InstanceCommand, + record::RecordCommand, watch::WatchCommand, }; mod commands; @@ -42,6 +43,10 @@ enum Command { /// Watch a record in real-time #[command(name = "watch")] Watch(WatchCommand), + + /// Generate architecture artefacts from state.toml + #[command(name = "generate")] + Generate(GenerateCommand), } #[tokio::main] @@ -53,6 +58,7 @@ async fn main() { Command::Record(cmd) => cmd.execute().await, Command::Graph(cmd) => cmd.execute().await, Command::Watch(cmd) => cmd.execute().await, + Command::Generate(cmd) => cmd.execute().await, }; if let Err(e) = result { From 2a21fc9ad7fad5fcd181a2ca78781281c7bd6408 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Tue, 24 Feb 2026 20:49:41 +0000 Subject: [PATCH 04/46] feat: add Rust source code generator for AimDB architecture state - Implemented `generate_rust` function to convert `ArchitectureState` into Rust source code. - Added support for generating value structs and key enums for records. - Introduced `configure_schema` function to register records with `AimDbBuilder`. - Created utility functions for handling imports, field definitions, and key variants. - Added tests to validate generated output against expected structures. feat: define architecture state types and TOML parser - Created `ArchitectureState`, `RecordDef`, and related structs to represent the state. - Implemented TOML parsing and serialization for architecture state. - Added support for metadata, buffer types, fields, and connectors. - Included tests to ensure correct parsing and serialization of TOML data. feat: implement architecture state validation - Added `validate` function to check for structural and semantic errors in `ArchitectureState`. - Defined `ValidationError` and `Severity` types for error reporting. - Implemented checks for record names, field types, connector definitions, and more. - Included tests to verify validation logic and error handling. --- Cargo.lock | 118 ++++++- Cargo.toml | 1 + Makefile | 39 ++- aimdb-codegen/Cargo.toml | 22 ++ aimdb-codegen/src/lib.rs | 62 ++++ aimdb-codegen/src/mermaid.rs | 286 +++++++++++++++++ aimdb-codegen/src/rust.rs | 569 ++++++++++++++++++++++++++++++++++ aimdb-codegen/src/state.rs | 305 ++++++++++++++++++ aimdb-codegen/src/validate.rs | 409 ++++++++++++++++++++++++ 9 files changed, 1796 insertions(+), 15 deletions(-) create mode 100644 aimdb-codegen/Cargo.toml create mode 100644 aimdb-codegen/src/lib.rs create mode 100644 aimdb-codegen/src/mermaid.rs create mode 100644 aimdb-codegen/src/rust.rs create mode 100644 aimdb-codegen/src/state.rs create mode 100644 aimdb-codegen/src/validate.rs diff --git a/Cargo.lock b/Cargo.lock index 263f3222..39d8fd5e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -28,6 +28,7 @@ name = "aimdb-cli" version = "0.5.0" dependencies = [ "aimdb-client", + "aimdb-codegen", "aimdb-core", "anyhow", "chrono", @@ -57,6 +58,18 @@ dependencies = [ "tokio-test", ] +[[package]] +name = "aimdb-codegen" +version = "0.5.0" +dependencies = [ + "prettyplease", + "proc-macro2", + "quote", + "serde", + "syn 2.0.108", + "toml", +] + [[package]] name = "aimdb-core" version = "0.5.0" @@ -164,9 +177,11 @@ name = "aimdb-mcp" version = "0.5.0" dependencies = [ "aimdb-client", + "aimdb-codegen", "aimdb-core", "anyhow", "chrono", + "fs2", "once_cell", "serde", "serde_json", @@ -1242,6 +1257,16 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "fs2" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213" +dependencies = [ + "libc", + "winapi", +] + [[package]] name = "fs_extra" version = "1.3.0" @@ -2092,6 +2117,16 @@ dependencies = [ "zerocopy", ] +[[package]] +name = "prettyplease" +version = "0.2.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" +dependencies = [ + "proc-macro2", + "syn 2.0.108", +] + [[package]] name = "proc-macro-error-attr2" version = "2.0.0" @@ -2558,6 +2593,15 @@ dependencies = [ "serde_core", ] +[[package]] +name = "serde_spanned" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" +dependencies = [ + "serde", +] + [[package]] name = "serde_urlencoded" version = "0.7.1" @@ -2703,7 +2747,7 @@ dependencies = [ [[package]] name = "stm32-metapac" version = "19.0.0" -source = "git+https://github.com/embassy-rs/stm32-data-generated?tag=stm32-data-69c2fa7d0b8afd40fc2172d8af2311008428193d#9a192510e3e113e949ee12001ff60f0ff7a149ea" +source = "git+https://github.com/embassy-rs/stm32-data-generated?tag=stm32-data-3879f504f35b23446e389c87deeb38542e0c9d4b#91326e22c7580c0da21cb718e06a64eb27bd201d" dependencies = [ "cortex-m", "cortex-m-rt", @@ -3037,6 +3081,47 @@ dependencies = [ "tokio", ] +[[package]] +name = "toml" +version = "0.8.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit", +] + +[[package]] +name = "toml_datetime" +version = "0.6.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.22.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" +dependencies = [ + "indexmap", + "serde", + "serde_spanned", + "toml_datetime", + "toml_write", + "winnow", +] + +[[package]] +name = "toml_write" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" + [[package]] name = "tower-service" version = "0.3.3" @@ -3452,6 +3537,22 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + [[package]] name = "winapi-util" version = "0.1.11" @@ -3461,6 +3562,12 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + [[package]] name = "windows" version = "0.61.3" @@ -3800,6 +3907,15 @@ version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" +[[package]] +name = "winnow" +version = "0.7.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829" +dependencies = [ + "memchr", +] + [[package]] name = "winreg" version = "0.50.0" diff --git a/Cargo.toml b/Cargo.toml index bc74ba87..3303bc08 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,6 +5,7 @@ members = [ "aimdb-data-contracts", "aimdb-core", "aimdb-client", + "aimdb-codegen", "aimdb-embassy-adapter", "aimdb-tokio-adapter", "aimdb-sync", diff --git a/Makefile b/Makefile index 0148b1f7..a5f7f491 100644 --- a/Makefile +++ b/Makefile @@ -56,6 +56,8 @@ build: cargo build --package aimdb-tokio-adapter --features "tokio-runtime,tracing,metrics" @printf "$(YELLOW) โ†’ Building sync wrapper$(NC)\n" cargo build --package aimdb-sync + @printf "$(YELLOW) โ†’ Building codegen library$(NC)\n" + cargo build --package aimdb-codegen @printf "$(YELLOW) โ†’ Building CLI tools$(NC)\n" cargo build --package aimdb-cli @printf "$(YELLOW) โ†’ Building MCP server$(NC)\n" @@ -85,6 +87,8 @@ test: cargo test --package aimdb-tokio-adapter --features "tokio-runtime,tracing,metrics" @printf "$(YELLOW) โ†’ Testing sync wrapper$(NC)\n" cargo test --package aimdb-sync + @printf "$(YELLOW) โ†’ Testing codegen library$(NC)\n" + cargo test --package aimdb-codegen @printf "$(YELLOW) โ†’ Testing CLI tools$(NC)\n" cargo test --package aimdb-cli @printf "$(YELLOW) โ†’ Testing MCP server$(NC)\n" @@ -100,7 +104,7 @@ test: fmt: @printf "$(GREEN)Formatting code (workspace members only)...$(NC)\n" - @for pkg in aimdb-executor aimdb-derive aimdb-data-contracts aimdb-core aimdb-client aimdb-embassy-adapter aimdb-tokio-adapter aimdb-sync aimdb-persistence aimdb-persistence-sqlite aimdb-mqtt-connector aimdb-knx-connector aimdb-cli aimdb-mcp sync-api-demo tokio-mqtt-connector-demo embassy-mqtt-connector-demo tokio-knx-connector-demo embassy-knx-connector-demo weather-mesh-common weather-hub weather-station-alpha weather-station-beta; do \ + @for pkg in aimdb-executor aimdb-derive aimdb-data-contracts aimdb-core aimdb-client aimdb-embassy-adapter aimdb-tokio-adapter aimdb-sync aimdb-persistence aimdb-persistence-sqlite aimdb-mqtt-connector aimdb-knx-connector aimdb-codegen aimdb-cli aimdb-mcp sync-api-demo tokio-mqtt-connector-demo embassy-mqtt-connector-demo tokio-knx-connector-demo embassy-knx-connector-demo weather-mesh-common weather-hub weather-station-alpha weather-station-beta; do \ printf "$(YELLOW) โ†’ Formatting $$pkg$(NC)\n"; \ cargo fmt -p $$pkg 2>/dev/null || true; \ done @@ -109,7 +113,7 @@ fmt: fmt-check: @printf "$(GREEN)Checking code formatting (workspace members only)...$(NC)\n" @FAILED=0; \ - for pkg in aimdb-executor aimdb-derive aimdb-data-contracts aimdb-core aimdb-client aimdb-embassy-adapter aimdb-tokio-adapter aimdb-sync aimdb-persistence aimdb-persistence-sqlite aimdb-mqtt-connector aimdb-knx-connector aimdb-cli aimdb-mcp sync-api-demo tokio-mqtt-connector-demo embassy-mqtt-connector-demo tokio-knx-connector-demo embassy-knx-connector-demo weather-mesh-common weather-hub weather-station-alpha weather-station-beta; do \ + for pkg in aimdb-executor aimdb-derive aimdb-data-contracts aimdb-core aimdb-client aimdb-embassy-adapter aimdb-tokio-adapter aimdb-sync aimdb-persistence aimdb-persistence-sqlite aimdb-mqtt-connector aimdb-knx-connector aimdb-codegen aimdb-cli aimdb-mcp sync-api-demo tokio-mqtt-connector-demo embassy-mqtt-connector-demo tokio-knx-connector-demo embassy-knx-connector-demo weather-mesh-common weather-hub weather-station-alpha weather-station-beta; do \ printf "$(YELLOW) โ†’ Checking $$pkg$(NC)\n"; \ if ! cargo fmt -p $$pkg -- --check 2>&1; then \ printf "$(RED)โŒ Formatting check failed for $$pkg$(NC)\n"; \ @@ -144,6 +148,8 @@ clippy: cargo clippy --package aimdb-sync --all-targets -- -D warnings @printf "$(YELLOW) โ†’ Clippy on client library$(NC)\n" cargo clippy --package aimdb-client --all-targets -- -D warnings + @printf "$(YELLOW) โ†’ Clippy on codegen library$(NC)\n" + cargo clippy --package aimdb-codegen --all-targets -- -D warnings @printf "$(YELLOW) โ†’ Clippy on CLI tools$(NC)\n" cargo clippy --package aimdb-cli --all-targets -- -D warnings @printf "$(YELLOW) โ†’ Clippy on MCP server$(NC)\n" @@ -173,6 +179,7 @@ doc: cargo doc --package aimdb-sync --no-deps cargo doc --package aimdb-mqtt-connector --features "std,tokio-runtime" --no-deps cargo doc --package aimdb-knx-connector --features "std,tokio-runtime" --no-deps + cargo doc --package aimdb-codegen --no-deps cargo doc --package aimdb-cli --no-deps cargo doc --package aimdb-mcp --no-deps cargo doc --package aimdb-persistence --no-deps @@ -298,51 +305,55 @@ publish: @cargo publish -p aimdb-executor @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-derive (2/13)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-derive (2/14)$(NC)\n" @cargo publish -p aimdb-derive @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-core (3/13)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-codegen (3/14)$(NC)\n" + @cargo publish -p aimdb-codegen + @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" + @sleep 10 + @printf "$(YELLOW) โ†’ Publishing aimdb-core (4/14)$(NC)\n" @cargo publish -p aimdb-core @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-tokio-adapter (4/13)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-tokio-adapter (5/14)$(NC)\n" @cargo publish -p aimdb-tokio-adapter @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-embassy-adapter (5/13)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-embassy-adapter (6/14)$(NC)\n" @cargo publish -p aimdb-embassy-adapter --no-verify @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-client (6/13)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-client (7/14)$(NC)\n" @cargo publish -p aimdb-client @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-sync (7/13)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-sync (8/14)$(NC)\n" @cargo publish -p aimdb-sync @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-persistence (8/13)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-persistence (9/14)$(NC)\n" @cargo publish -p aimdb-persistence @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-persistence-sqlite (9/13)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-persistence-sqlite (10/14)$(NC)\n" @cargo publish -p aimdb-persistence-sqlite @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-mqtt-connector (10/13)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-mqtt-connector (11/14)$(NC)\n" @cargo publish -p aimdb-mqtt-connector @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-knx-connector (11/13)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-knx-connector (12/14)$(NC)\n" @cargo publish -p aimdb-knx-connector @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-cli (12/13)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-cli (13/14)$(NC)\n" @cargo publish -p aimdb-cli @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-mcp (13/13)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-mcp (14/14)$(NC)\n" @cargo publish -p aimdb-mcp @printf "$(GREEN)โœ“ All crates published successfully!$(NC)\n" @printf "$(BLUE)๐ŸŽ‰ AimDB v$(shell grep '^version' Cargo.toml | head -1 | cut -d '"' -f 2) is now live on crates.io!$(NC)\n" diff --git a/aimdb-codegen/Cargo.toml b/aimdb-codegen/Cargo.toml new file mode 100644 index 00000000..99ca32fc --- /dev/null +++ b/aimdb-codegen/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "aimdb-codegen" +version = "0.5.0" +edition = "2021" +authors.workspace = true +license.workspace = true +repository.workspace = true +homepage.workspace = true +description = "Code generation library for AimDB architecture agent โ€” reads state.toml and emits Mermaid diagrams and Rust source" +keywords = ["codegen", "architecture", "mermaid", "schema", "aimdb"] +categories = ["development-tools"] + +[dependencies] +proc-macro2 = "1" +prettyplease = "0.2" +quote = "1" +serde = { version = "1.0", features = ["derive"] } +syn = "2" +toml = "0.8" + +[dev-dependencies] +# No external dev deps โ€” integration tests use generated string content diff --git a/aimdb-codegen/src/lib.rs b/aimdb-codegen/src/lib.rs new file mode 100644 index 00000000..4de4daf9 --- /dev/null +++ b/aimdb-codegen/src/lib.rs @@ -0,0 +1,62 @@ +//! AimDB Codegen โ€” architecture state to Mermaid and Rust source +//! +//! This library reads `.aimdb/state.toml` (the architecture agent's decision +//! record) and emits two artefacts: +//! +//! - **Mermaid diagram** โ€” `.aimdb/architecture.mermaid`, a read-only graph +//! projection of the architecture (see [`generate_mermaid`]) +//! - **Rust source** โ€” `src/generated_schema.rs`, compilable AimDB schema +//! using the actual 0.5.x API (see [`generate_rust`]) +//! +//! # Usage +//! +//! ```rust +//! use aimdb_codegen::{ArchitectureState, generate_mermaid, generate_rust, validate}; +//! +//! let toml = r#" +//! [meta] +//! aimdb_version = "0.5.0" +//! created_at = "2026-02-22T14:00:00Z" +//! last_modified = "2026-02-22T14:00:00Z" +//! +//! [[records]] +//! name = "Temperature" +//! buffer = "SpmcRing" +//! capacity = 128 +//! key_prefix = "sensor." +//! key_variants = ["room1"] +//! producers = ["sensor_task"] +//! consumers = ["dashboard"] +//! +//! [[records.fields]] +//! name = "celsius" +//! type = "f64" +//! description = "Temperature in Celsius" +//! "#; +//! +//! let state = ArchitectureState::from_toml(toml).unwrap(); +//! +//! let errors = validate(&state); +//! assert!(errors.iter().all(|e| e.severity != validate::Severity::Error)); +//! +//! let mermaid = generate_mermaid(&state); +//! assert!(mermaid.contains("flowchart LR")); +//! +//! let rust = generate_rust(&state); +//! assert!(rust.contains("pub struct TemperatureValue")); +//! ``` + +pub mod mermaid; +pub mod rust; +pub mod state; +pub mod validate; + +// โ”€โ”€ Convenience re-exports โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +pub use mermaid::generate_mermaid; +pub use rust::generate_rust; +pub use state::{ + ArchitectureState, BufferType, ConnectorDef, ConnectorDirection, DecisionEntry, FieldDef, Meta, + RecordDef, +}; +pub use validate::{is_valid, validate, Severity, ValidationError}; diff --git a/aimdb-codegen/src/mermaid.rs b/aimdb-codegen/src/mermaid.rs new file mode 100644 index 00000000..27dca7fb --- /dev/null +++ b/aimdb-codegen/src/mermaid.rs @@ -0,0 +1,286 @@ +//! Mermaid diagram generator +//! +//! Converts an [`ArchitectureState`] into a `flowchart LR` Mermaid diagram +//! following the conventions defined in `.aimdb/CONVENTIONS.md`. + +use crate::state::{ArchitectureState, BufferType, ConnectorDirection}; + +/// Generate a Mermaid `flowchart LR` diagram from architecture state. +/// +/// The returned string can be written directly to `.aimdb/architecture.mermaid`. +/// +/// # Conventions +/// - Stadium `(["โ€ฆ"])` = SpmcRing +/// - Rounded rect `("โ€ฆ")` = SingleLatest +/// - Diamond `{"โ€ฆ"}` = Mailbox +/// - Solid arrows โ†’ data flow (produce / consume) +/// - Dashed arrows โ†’ connector metadata (link_to / link_from) +pub fn generate_mermaid(state: &ArchitectureState) -> String { + let mut out = String::new(); + + out.push_str("flowchart LR\n"); + + // โ”€โ”€ Record nodes โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + if !state.records.is_empty() { + out.push_str( + "\n %% โ”€โ”€ Records โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€\n", + ); + } + for rec in &state.records { + let node_id = node_id(&rec.name); + let label = format!("{}\\n{}", rec.name, rec.buffer.label(rec.capacity)); + let node_def = match rec.buffer { + BufferType::SpmcRing => format!(" {node_id}([\"{label}\"])"), + BufferType::SingleLatest => format!(" {node_id}(\"{label}\")"), + BufferType::Mailbox => format!(" {node_id}{{\"{label}\"}}"), + }; + out.push_str(&node_def); + out.push('\n'); + } + + // โ”€โ”€ Data flow arrows โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + if state + .records + .iter() + .any(|r| !r.producers.is_empty() || !r.consumers.is_empty()) + { + out.push_str( + "\n %% โ”€โ”€ Data flow (solid arrows) โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€\n", + ); + } + for rec in &state.records { + let nid = node_id(&rec.name); + for producer in &rec.producers { + let pid = sanitize_id(producer); + out.push_str(&format!(" {pid} -->|produce| {nid}\n")); + } + for consumer in &rec.consumers { + let cid = sanitize_id(consumer); + out.push_str(&format!(" {nid} -->|consume| {cid}\n")); + } + } + + // โ”€โ”€ Connector metadata (dashed arrows) โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + let has_connectors = state.records.iter().any(|r| !r.connectors.is_empty()); + if has_connectors { + out.push_str( + "\n %% โ”€โ”€ Connector metadata (dashed arrows) โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€\n", + ); + // Collect unique protocol bus node names + let mut protocols_seen: Vec = Vec::new(); + for rec in &state.records { + for conn in &rec.connectors { + let bus = conn.protocol.to_uppercase(); + if !protocols_seen.contains(&bus) { + protocols_seen.push(bus); + } + } + } + for rec in &state.records { + let nid = node_id(&rec.name); + for conn in &rec.connectors { + let bus = conn.protocol.to_uppercase(); + let url = &conn.url; + match conn.direction { + ConnectorDirection::Outbound => { + out.push_str(&format!(" {nid} -.->|\"link_to {url}\"| {bus}\n")); + } + ConnectorDirection::Inbound => { + out.push_str(&format!(" {bus} -.->|\"link_from {url}\"| {nid}\n")); + } + } + } + } + } + + out +} + +/// Derive a stable Mermaid node ID from a record name. +/// +/// Converts PascalCase to SCREAMING_SNAKE_CASE, e.g. +/// `TemperatureReading` โ†’ `TEMPERATURE_READING`. +pub fn node_id(name: &str) -> String { + let mut out = String::new(); + let chars: Vec = name.chars().collect(); + for (i, &c) in chars.iter().enumerate() { + if c.is_uppercase() + && i > 0 + && (chars[i - 1].is_lowercase() || chars[i - 1].is_ascii_digit()) + { + out.push('_'); + } + out.push(c.to_ascii_uppercase()); + } + out +} + +/// Sanitize an arbitrary identifier for use as a Mermaid node ID. +/// +/// Replaces hyphens and spaces with underscores, removes other non-alphanumeric chars. +fn sanitize_id(s: &str) -> String { + s.chars() + .map(|c| { + if c.is_alphanumeric() || c == '_' { + c + } else { + '_' + } + }) + .collect() +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::state::ArchitectureState; + + const SAMPLE_TOML: &str = r#" +[meta] +aimdb_version = "0.5.0" +created_at = "2026-02-22T14:00:00Z" +last_modified = "2026-02-22T14:33:00Z" + +[[records]] +name = "TemperatureReading" +buffer = "SpmcRing" +capacity = 256 +key_prefix = "sensors.temp." +key_variants = ["indoor", "outdoor", "garage"] +producers = ["sensor_task"] +consumers = ["dashboard", "anomaly_detector"] + +[[records.fields]] +name = "celsius" +type = "f64" +description = "Temperature in degrees Celsius" + +[[records.connectors]] +protocol = "mqtt" +direction = "outbound" +url = "mqtt://sensors/temp/{variant}" + +[[records]] +name = "OtaCommand" +buffer = "Mailbox" +key_prefix = "device.ota." +key_variants = ["gateway-01"] +producers = ["cloud_ota_service"] +consumers = ["device_update_task"] + +[[records.fields]] +name = "action" +type = "String" +description = "Command action" + +[[records.connectors]] +protocol = "mqtt" +direction = "inbound" +url = "mqtt://ota/cmd/{variant}" + +[[records]] +name = "FirmwareVersion" +buffer = "SingleLatest" +key_prefix = "device.firmware." +key_variants = ["gateway-01"] +producers = ["cloud_service"] +consumers = ["updater"] + +[[records.fields]] +name = "version" +type = "String" +description = "Semantic version" +"#; + + fn state() -> ArchitectureState { + ArchitectureState::from_toml(SAMPLE_TOML).unwrap() + } + + #[test] + fn contains_flowchart_header() { + let out = generate_mermaid(&state()); + assert!( + out.starts_with("flowchart LR\n"), + "Must start with flowchart LR" + ); + } + + #[test] + fn spmc_ring_uses_stadium_shape() { + let out = generate_mermaid(&state()); + // Stadium: ([" ... "]) + assert!( + out.contains("TEMPERATURE_READING([\"TemperatureReading\\nSpmcRing ยท 256\"])"), + "SpmcRing node should use stadium shape:\n{out}" + ); + } + + #[test] + fn mailbox_uses_diamond_shape() { + let out = generate_mermaid(&state()); + assert!( + out.contains("OTA_COMMAND{\"OtaCommand\\nMailbox\"}"), + "Mailbox node should use diamond shape:\n{out}" + ); + } + + #[test] + fn single_latest_uses_rounded_rect() { + let out = generate_mermaid(&state()); + assert!( + out.contains("FIRMWARE_VERSION(\"FirmwareVersion\\nSingleLatest\")"), + "SingleLatest node should use rounded rect:\n{out}" + ); + } + + #[test] + fn produce_arrows_present() { + let out = generate_mermaid(&state()); + assert!( + out.contains("sensor_task -->|produce| TEMPERATURE_READING"), + "Producer arrow missing:\n{out}" + ); + } + + #[test] + fn consume_arrows_present() { + let out = generate_mermaid(&state()); + assert!( + out.contains("TEMPERATURE_READING -->|consume| dashboard"), + "Consumer arrow missing:\n{out}" + ); + assert!( + out.contains("TEMPERATURE_READING -->|consume| anomaly_detector"), + "Consumer arrow missing:\n{out}" + ); + } + + #[test] + fn outbound_connector_dashed_arrow() { + let out = generate_mermaid(&state()); + assert!( + out.contains( + "TEMPERATURE_READING -.->|\"link_to mqtt://sensors/temp/{variant}\"| MQTT" + ), + "Outbound dashed arrow missing:\n{out}" + ); + } + + #[test] + fn inbound_connector_dashed_arrow() { + let out = generate_mermaid(&state()); + assert!( + out.contains("MQTT -.->|\"link_from mqtt://ota/cmd/{variant}\"| OTA_COMMAND"), + "Inbound dashed arrow missing:\n{out}" + ); + } + + #[test] + fn node_id_pascal_to_screaming_snake() { + assert_eq!(node_id("TemperatureReading"), "TEMPERATURE_READING"); + assert_eq!(node_id("OtaCommand"), "OTA_COMMAND"); + assert_eq!(node_id("FirmwareVersion"), "FIRMWARE_VERSION"); + assert_eq!(node_id("AppConfig"), "APP_CONFIG"); + assert_eq!(node_id("Temp"), "TEMP"); + } +} diff --git a/aimdb-codegen/src/rust.rs b/aimdb-codegen/src/rust.rs new file mode 100644 index 00000000..2280b9b1 --- /dev/null +++ b/aimdb-codegen/src/rust.rs @@ -0,0 +1,569 @@ +//! Rust source code generator +//! +//! Converts an [`ArchitectureState`] into compilable Rust source that uses the +//! actual AimDB 0.5.x API: `#[derive(RecordKey)]`, `BufferCfg`, and +//! `AimDbBuilder::configure()`. +//! +//! Uses [`quote`] for quasi-quoting token streams and [`prettyplease`] for +//! formatting the output into idiomatic Rust. + +use proc_macro2::TokenStream; +use quote::{format_ident, quote}; + +use crate::state::{ArchitectureState, ConnectorDirection, RecordDef}; + +// โ”€โ”€ Public API โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Generate a complete Rust source file from architecture state. +/// +/// The returned string can be written to `src/generated_schema.rs`. +/// It contains: +/// - One `Value` struct per record (with `Serialize` / `Deserialize`) +/// - One `Key` enum per record (with `#[derive(RecordKey)]`) +/// - A `configure_schema()` function wiring all records into `AimDbBuilder` +pub fn generate_rust(state: &ArchitectureState) -> String { + let imports = emit_imports(); + + let record_items: Vec = state + .records + .iter() + .flat_map(|rec| { + let value_struct = emit_value_struct(rec); + let key_enum = emit_key_enum(rec); + [value_struct, key_enum] + }) + .collect(); + + let configure_fn = emit_configure_schema(state); + + let file_tokens = quote! { + #imports + #(#record_items)* + #configure_fn + }; + + let syntax_tree = syn::parse2(file_tokens).expect("generated tokens should be valid Rust"); + let formatted = prettyplease::unparse(&syntax_tree); + + // Prepend the @generated header (outside the token stream since it's a comment) + let header = "\ +// @generated โ€” do not edit manually.\n\ +// Source: .aimdb/state.toml โ€” edit via `aimdb generate` or the architecture agent.\n\ +// Regenerate: `aimdb generate` or confirm a proposal in the architecture agent.\n\n"; + + format!("{header}{formatted}") +} + +// โ”€โ”€ Imports โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +fn emit_imports() -> TokenStream { + quote! { + use aimdb_core::buffer::BufferCfg; + use aimdb_core::builder::AimDbBuilder; + use aimdb_derive::RecordKey; + use aimdb_executor::Spawn; + use serde::{Deserialize, Serialize}; + } +} + +// โ”€โ”€ Value struct โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +fn emit_value_struct(rec: &RecordDef) -> TokenStream { + let struct_name = format_ident!("{}Value", rec.name); + let doc = format!("Value type for `{}`.", rec.name); + + let fields: Vec = if rec.fields.is_empty() { + vec![emit_todo_field( + "add fields โ€” use `propose_record` to define them via the architecture agent", + )] + } else { + rec.fields + .iter() + .map(|f| { + let fname = format_ident!("{}", f.name); + let ftype: syn::Type = syn::parse_str(&f.field_type).unwrap_or_else(|_| { + panic!("invalid type `{}` for field `{}`", f.field_type, f.name) + }); + if f.description.is_empty() { + quote! { pub #fname: #ftype, } + } else { + let desc = &f.description; + quote! { + #[doc = #desc] + pub #fname: #ftype, + } + } + }) + .collect() + }; + + quote! { + #[doc = #doc] + #[derive(Debug, Clone, Serialize, Deserialize)] + pub struct #struct_name { + #(#fields)* + } + } +} + +/// Emit a dummy field with a TODO doc comment (for records with no fields yet). +fn emit_todo_field(msg: &str) -> TokenStream { + let doc = format!("TODO: {msg}"); + quote! { + #[doc = #doc] + pub _placeholder: (), + } +} + +// โ”€โ”€ Key enum โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +fn emit_key_enum(rec: &RecordDef) -> TokenStream { + let enum_name = format_ident!("{}Key", rec.name); + let connector = rec.connectors.first(); + + let key_prefix_attr = if !rec.key_prefix.is_empty() { + let prefix = &rec.key_prefix; + quote! { #[key_prefix = #prefix] } + } else { + quote! {} + }; + + let variants: Vec = if rec.key_variants.is_empty() { + let doc = "TODO: add key variants โ€” use the architecture agent to resolve them"; + vec![quote! { + #[doc = #doc] + _Placeholder, + }] + } else { + rec.key_variants + .iter() + .map(|variant_str| { + let variant_name = format_ident!("{}", to_pascal_case(variant_str)); + let link_attr = connector.map(|conn| { + let addr = conn.url.replace("{variant}", variant_str); + quote! { #[link_address = #addr] } + }); + quote! { + #[key = #variant_str] + #link_attr + #variant_name, + } + }) + .collect() + }; + + quote! { + #[derive(RecordKey, Clone, Copy, PartialEq, Eq)] + #key_prefix_attr + pub enum #enum_name { + #(#variants)* + } + } +} + +// โ”€โ”€ configure_schema โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +fn emit_configure_schema(state: &ArchitectureState) -> TokenStream { + let record_blocks: Vec = state + .records + .iter() + .map(emit_record_configure_block) + .collect(); + + quote! { + /// Register all architecture-agent-defined records on the builder. + /// + /// Generated from `.aimdb/state.toml`. Configures buffer types and connector + /// addresses. Producers, consumers, serializers, and deserializers contain + /// business logic and must be provided by application code โ€” they are not + /// generated here. + pub fn configure_schema(builder: &mut AimDbBuilder) { + #(#record_blocks)* + } + } +} + +fn emit_record_configure_block(rec: &RecordDef) -> TokenStream { + if rec.key_variants.is_empty() { + let msg = format!("TODO: {}: no key variants defined yet", rec.name); + return quote! { + // #msg โ€” placeholder + let _ = (#msg,); + }; + } + + let value_type = format_ident!("{}Value", rec.name); + let key_type = format_ident!("{}Key", rec.name); + let buffer_tokens = rec.buffer.to_tokens(rec.capacity); + + let variant_idents: Vec = rec + .key_variants + .iter() + .map(|v| format_ident!("{}", to_pascal_case(v))) + .collect(); + + let connector_block = rec.connectors.first().map(|conn| { + let link_fn = match conn.direction { + ConnectorDirection::Outbound => format_ident!("link_to"), + ConnectorDirection::Inbound => format_ident!("link_from"), + }; + let todo_comment = match conn.direction { + ConnectorDirection::Outbound => { + "TODO: add .with_serializer(|v| serde_json::to_vec(v).map_err(Into::into))" + } + ConnectorDirection::Inbound => { + "TODO: add .with_deserializer(|bytes| serde_json::from_slice(bytes).map_err(Into::into))" + } + }; + quote! { + if let Some(addr) = key.link_address() { + let _ = #todo_comment; + reg.#link_fn(addr); + } + } + }); + + quote! { + for key in [ + #(#key_type::#variant_idents,)* + ] { + builder.configure::<#value_type>(key, |reg| { + reg.buffer_cfg(#buffer_tokens); + #connector_block + }); + } + } +} + +// โ”€โ”€ Utilities โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Convert a kebab-case or snake_case string to PascalCase. +/// +/// # Examples +/// ``` +/// # use aimdb_codegen::rust::to_pascal_case; +/// assert_eq!(to_pascal_case("indoor"), "Indoor"); +/// assert_eq!(to_pascal_case("gateway-01"), "Gateway01"); +/// assert_eq!(to_pascal_case("sensor-hub-01"), "SensorHub01"); +/// assert_eq!(to_pascal_case("sensor_hub_01"), "SensorHub01"); +/// ``` +pub fn to_pascal_case(s: &str) -> String { + s.split(['-', '_']) + .map(|part| { + let mut chars = part.chars(); + match chars.next() { + None => String::new(), + Some(first) => { + let upper: String = first.to_uppercase().collect(); + upper + chars.as_str() + } + } + }) + .collect() +} + +// โ”€โ”€ Tests โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +#[cfg(test)] +mod tests { + use super::*; + use crate::state::ArchitectureState; + + const SAMPLE_TOML: &str = r#" +[meta] +aimdb_version = "0.5.0" +created_at = "2026-02-22T14:00:00Z" +last_modified = "2026-02-22T14:33:00Z" + +[[records]] +name = "TemperatureReading" +buffer = "SpmcRing" +capacity = 256 +key_prefix = "sensors.temp." +key_variants = ["indoor", "outdoor", "garage"] +producers = ["sensor_task"] +consumers = ["dashboard", "anomaly_detector"] + +[[records.fields]] +name = "celsius" +type = "f64" +description = "Temperature in degrees Celsius" + +[[records.fields]] +name = "humidity_percent" +type = "f64" +description = "Relative humidity 0-100" + +[[records.fields]] +name = "timestamp" +type = "u64" +description = "Unix timestamp in milliseconds" + +[[records.connectors]] +protocol = "mqtt" +direction = "outbound" +url = "mqtt://sensors/temp/{variant}" + +[[records]] +name = "OtaCommand" +buffer = "Mailbox" +key_prefix = "device.ota." +key_variants = ["gateway-01", "sensor-hub-01"] +producers = ["cloud_ota"] +consumers = ["updater"] + +[[records.fields]] +name = "action" +type = "String" +description = "Command: update, rollback, reboot" + +[[records.fields]] +name = "target_version" +type = "String" +description = "Target firmware version" + +[[records.connectors]] +protocol = "mqtt" +direction = "inbound" +url = "mqtt://ota/cmd/{variant}" +"#; + + fn state() -> ArchitectureState { + ArchitectureState::from_toml(SAMPLE_TOML).unwrap() + } + + fn generated() -> String { + generate_rust(&state()) + } + + #[test] + fn has_generated_header() { + let out = generated(); + assert!( + out.contains("@generated"), + "Missing @generated header:\n{out}" + ); + } + + #[test] + fn has_imports() { + let out = generated(); + assert!( + out.contains("use aimdb_core::buffer::BufferCfg;"), + "Missing BufferCfg import:\n{out}" + ); + assert!( + out.contains("use aimdb_core::builder::AimDbBuilder;"), + "Missing AimDbBuilder import:\n{out}" + ); + assert!( + out.contains("use aimdb_derive::RecordKey;"), + "Missing RecordKey import:\n{out}" + ); + assert!( + out.contains("use aimdb_executor::Spawn;"), + "Missing Spawn import:\n{out}" + ); + assert!( + out.contains("use serde::{Deserialize, Serialize};"), + "Missing serde import:\n{out}" + ); + } + + #[test] + fn value_struct_generated() { + let out = generated(); + assert!( + out.contains("pub struct TemperatureReadingValue"), + "Missing TemperatureReadingValue struct:\n{out}" + ); + assert!( + out.contains("pub celsius: f64,"), + "Missing celsius field:\n{out}" + ); + assert!( + out.contains("pub humidity_percent: f64,"), + "Missing humidity_percent field:\n{out}" + ); + assert!( + out.contains("pub timestamp: u64,"), + "Missing timestamp field:\n{out}" + ); + assert!( + out.contains("#[derive(Debug, Clone, Serialize, Deserialize)]"), + "Missing derives:\n{out}" + ); + } + + #[test] + fn key_enum_generated() { + let out = generated(); + assert!( + out.contains("pub enum TemperatureReadingKey"), + "Missing key enum:\n{out}" + ); + assert!( + out.contains("#[derive(RecordKey, Clone, Copy, PartialEq, Eq)]"), + "Missing RecordKey derive:\n{out}" + ); + assert!( + out.contains("#[key_prefix = \"sensors.temp.\"]"), + "Missing key_prefix:\n{out}" + ); + assert!( + out.contains("#[key = \"indoor\"]"), + "Missing indoor key attr:\n{out}" + ); + assert!( + out.contains("#[key = \"outdoor\"]"), + "Missing outdoor key attr:\n{out}" + ); + assert!( + out.contains("#[key = \"garage\"]"), + "Missing garage key attr:\n{out}" + ); + assert!(out.contains("Indoor,"), "Missing Indoor variant:\n{out}"); + assert!(out.contains("Outdoor,"), "Missing Outdoor variant:\n{out}"); + assert!(out.contains("Garage,"), "Missing Garage variant:\n{out}"); + } + + #[test] + fn link_address_substituted_per_variant() { + let out = generated(); + assert!( + out.contains("#[link_address = \"mqtt://sensors/temp/indoor\"]"), + "link_address not substituted for indoor:\n{out}" + ); + assert!( + out.contains("#[link_address = \"mqtt://sensors/temp/outdoor\"]"), + "link_address not substituted for outdoor:\n{out}" + ); + assert!( + out.contains("#[link_address = \"mqtt://sensors/temp/garage\"]"), + "link_address not substituted for garage:\n{out}" + ); + } + + #[test] + fn kebab_variants_to_pascal_case() { + let out = generated(); + assert!( + out.contains("pub enum OtaCommandKey"), + "Missing OtaCommandKey enum:\n{out}" + ); + assert!( + out.contains("Gateway01,"), + "gateway-01 should become Gateway01:\n{out}" + ); + assert!( + out.contains("SensorHub01,"), + "sensor-hub-01 should become SensorHub01:\n{out}" + ); + } + + #[test] + fn configure_schema_function_present() { + let out = generated(); + assert!( + out.contains( + "pub fn configure_schema(builder: &mut AimDbBuilder)" + ), + "Missing configure_schema function:\n{out}" + ); + } + + #[test] + fn configure_schema_spmc_buffer() { + let out = generated(); + // prettyplease may split struct literals across lines + assert!( + out.contains("BufferCfg::SpmcRing"), + "Missing SpmcRing buffer_cfg call:\n{out}" + ); + assert!( + out.contains("capacity: 256"), + "Missing capacity value:\n{out}" + ); + } + + #[test] + fn configure_schema_mailbox_buffer() { + let out = generated(); + assert!( + out.contains("BufferCfg::Mailbox"), + "Missing Mailbox buffer_cfg call:\n{out}" + ); + } + + #[test] + fn configure_schema_outbound_link_to() { + let out = generated(); + assert!( + out.contains("link_to(addr)"), + "Missing link_to call:\n{out}" + ); + } + + #[test] + fn configure_schema_inbound_link_from() { + let out = generated(); + assert!( + out.contains("link_from(addr)"), + "Missing link_from call:\n{out}" + ); + } + + #[test] + fn configure_schema_key_variants_iterated() { + let out = generated(); + assert!( + out.contains("TemperatureReadingKey::Indoor"), + "Missing Indoor in configure_schema:\n{out}" + ); + assert!( + out.contains("TemperatureReadingKey::Outdoor"), + "Missing Outdoor in configure_schema:\n{out}" + ); + assert!( + out.contains("TemperatureReadingKey::Garage"), + "Missing Garage in configure_schema:\n{out}" + ); + assert!( + out.contains("OtaCommandKey::Gateway01"), + "Missing Gateway01 in configure_schema:\n{out}" + ); + } + + // โ”€โ”€ to_pascal_case โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + + #[test] + fn pascal_case_simple() { + assert_eq!(to_pascal_case("indoor"), "Indoor"); + assert_eq!(to_pascal_case("outdoor"), "Outdoor"); + } + + #[test] + fn pascal_case_kebab() { + assert_eq!(to_pascal_case("gateway-01"), "Gateway01"); + assert_eq!(to_pascal_case("sensor-hub-01"), "SensorHub01"); + } + + #[test] + fn pascal_case_snake() { + assert_eq!(to_pascal_case("sensor_hub_01"), "SensorHub01"); + } + + #[test] + fn pascal_case_already_capitalized() { + assert_eq!(to_pascal_case("Indoor"), "Indoor"); + } + + /// Snapshot: print the full generated output for manual review. + #[test] + fn snapshot_full_output() { + let out = generated(); + // Uncomment to inspect: + // eprintln!("{out}"); + assert!(!out.is_empty()); + } +} diff --git a/aimdb-codegen/src/state.rs b/aimdb-codegen/src/state.rs new file mode 100644 index 00000000..552e0344 --- /dev/null +++ b/aimdb-codegen/src/state.rs @@ -0,0 +1,305 @@ +//! AimDB Codegen โ€” architecture state types and TOML parser +//! +//! Deserialises `.aimdb/state.toml` into [`ArchitectureState`]. + +use serde::{Deserialize, Serialize}; + +// โ”€โ”€ Top-level state โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// The full contents of `.aimdb/state.toml`. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ArchitectureState { + pub meta: Meta, + #[serde(default)] + pub records: Vec, + #[serde(default)] + pub decisions: Vec, +} + +impl ArchitectureState { + /// Parse from a TOML string (the contents of `state.toml`). + pub fn from_toml(s: &str) -> Result { + toml::from_str(s) + } + + /// Serialise back to a TOML string. + pub fn to_toml(&self) -> Result { + toml::to_string_pretty(self) + } +} + +// โ”€โ”€ Meta block โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// `[meta]` block โ€” version and timestamps. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Meta { + pub aimdb_version: String, + pub created_at: String, + pub last_modified: String, +} + +// โ”€โ”€ Record definition โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// One `[[records]]` entry. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct RecordDef { + /// PascalCase name, e.g. `TemperatureReading`. + pub name: String, + /// Buffer type selection. + pub buffer: BufferType, + /// Required when `buffer == SpmcRing`. Ignored otherwise. + #[serde(default)] + pub capacity: Option, + /// Common key prefix, e.g. `"sensors.temp."`. + #[serde(default)] + pub key_prefix: String, + /// Concrete key variant strings, e.g. `["indoor", "outdoor", "garage"]`. + #[serde(default)] + pub key_variants: Vec, + /// Names of tasks that produce values into this record. + #[serde(default)] + pub producers: Vec, + /// Names of tasks that consume values from this record. + #[serde(default)] + pub consumers: Vec, + /// Value struct fields (agent-derived from datasheets / specs / conversation). + #[serde(default)] + pub fields: Vec, + /// External connector definitions. + #[serde(default)] + pub connectors: Vec, +} + +// โ”€โ”€ Buffer type โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// The three AimDB buffer primitives. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +pub enum BufferType { + SpmcRing, + SingleLatest, + Mailbox, +} + +impl BufferType { + /// Human-readable label used in Mermaid node annotations. + pub fn label(&self, capacity: Option) -> String { + match self { + BufferType::SpmcRing => { + let cap = capacity.unwrap_or(256); + format!("SpmcRing ยท {cap}") + } + BufferType::SingleLatest => "SingleLatest".to_string(), + BufferType::Mailbox => "Mailbox".to_string(), + } + } + + /// The `BufferCfg` expression emitted into generated Rust. + pub fn rust_expr(&self, capacity: Option) -> String { + match self { + BufferType::SpmcRing => { + let cap = capacity.unwrap_or(256); + format!("BufferCfg::SpmcRing {{ capacity: {cap} }}") + } + BufferType::SingleLatest => "BufferCfg::SingleLatest".to_string(), + BufferType::Mailbox => "BufferCfg::Mailbox".to_string(), + } + } + + /// The `BufferCfg` expression as a token stream for use with `quote!`. + pub fn to_tokens(&self, capacity: Option) -> proc_macro2::TokenStream { + use quote::quote; + match self { + BufferType::SpmcRing => { + let cap = proc_macro2::Literal::usize_unsuffixed(capacity.unwrap_or(256)); + quote! { BufferCfg::SpmcRing { capacity: #cap } } + } + BufferType::SingleLatest => quote! { BufferCfg::SingleLatest }, + BufferType::Mailbox => quote! { BufferCfg::Mailbox }, + } + } +} + +// โ”€โ”€ Field definition โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// One `[[records.fields]]` entry โ€” a typed field in the value struct. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct FieldDef { + pub name: String, + /// Rust primitive type string, e.g. `"f64"`, `"u64"`, `"String"`, `"bool"`. + #[serde(rename = "type")] + pub field_type: String, + #[serde(default)] + pub description: String, +} + +// โ”€โ”€ Connector definition โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// One `[[records.connectors]]` entry. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ConnectorDef { + /// Protocol identifier lower-case, e.g. `"mqtt"`, `"knx"`. + pub protocol: String, + /// `"outbound"` โ†’ `link_to`, `"inbound"` โ†’ `link_from`. + pub direction: ConnectorDirection, + /// URL template, may contain `{variant}` placeholder. + pub url: String, +} + +/// Connector data flow direction. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "lowercase")] +pub enum ConnectorDirection { + Outbound, + Inbound, +} + +// โ”€โ”€ Decision log entry โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// One `[[decisions]]` entry โ€” architectural rationale. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct DecisionEntry { + pub record: String, + pub field: String, + pub chosen: String, + pub alternative: String, + pub reason: String, + pub timestamp: String, +} + +// โ”€โ”€ Tests โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +#[cfg(test)] +mod tests { + use super::*; + + const SAMPLE_TOML: &str = r#" +[meta] +aimdb_version = "0.5.0" +created_at = "2026-02-22T14:00:00Z" +last_modified = "2026-02-22T14:33:00Z" + +[[records]] +name = "TemperatureReading" +buffer = "SpmcRing" +capacity = 256 +key_prefix = "sensors.temp." +key_variants = ["indoor", "outdoor", "garage"] +producers = ["sensor_task"] +consumers = ["dashboard", "anomaly_detector"] + +[[records.fields]] +name = "celsius" +type = "f64" +description = "Temperature in degrees Celsius" + +[[records.fields]] +name = "humidity_percent" +type = "f64" +description = "Relative humidity 0-100" + +[[records.fields]] +name = "timestamp" +type = "u64" +description = "Unix timestamp in milliseconds" + +[[records.connectors]] +protocol = "mqtt" +direction = "outbound" +url = "mqtt://sensors/temp/{variant}" + +[[records]] +name = "OtaCommand" +buffer = "Mailbox" +key_prefix = "device.ota." +key_variants = ["gateway-01"] +producers = ["cloud_ota_service"] +consumers = ["device_update_task"] + +[[records.fields]] +name = "action" +type = "String" +description = "Command action" + +[[decisions]] +record = "TemperatureReading" +field = "buffer" +chosen = "SpmcRing" +alternative = "SingleLatest" +reason = "Anomaly detector needs a sample window" +timestamp = "2026-02-22T14:20:00Z" +"#; + + #[test] + fn parses_meta() { + let state = ArchitectureState::from_toml(SAMPLE_TOML).unwrap(); + assert_eq!(state.meta.aimdb_version, "0.5.0"); + assert_eq!(state.meta.created_at, "2026-02-22T14:00:00Z"); + } + + #[test] + fn parses_records() { + let state = ArchitectureState::from_toml(SAMPLE_TOML).unwrap(); + assert_eq!(state.records.len(), 2); + + let r = &state.records[0]; + assert_eq!(r.name, "TemperatureReading"); + assert_eq!(r.buffer, BufferType::SpmcRing); + assert_eq!(r.capacity, Some(256)); + assert_eq!(r.key_prefix, "sensors.temp."); + assert_eq!(r.key_variants, vec!["indoor", "outdoor", "garage"]); + assert_eq!(r.producers, vec!["sensor_task"]); + assert_eq!(r.consumers, vec!["dashboard", "anomaly_detector"]); + } + + #[test] + fn parses_fields() { + let state = ArchitectureState::from_toml(SAMPLE_TOML).unwrap(); + let r = &state.records[0]; + assert_eq!(r.fields.len(), 3); + assert_eq!(r.fields[0].name, "celsius"); + assert_eq!(r.fields[0].field_type, "f64"); + assert_eq!(r.fields[0].description, "Temperature in degrees Celsius"); + } + + #[test] + fn parses_connectors() { + let state = ArchitectureState::from_toml(SAMPLE_TOML).unwrap(); + let r = &state.records[0]; + assert_eq!(r.connectors.len(), 1); + assert_eq!(r.connectors[0].protocol, "mqtt"); + assert_eq!(r.connectors[0].direction, ConnectorDirection::Outbound); + assert_eq!(r.connectors[0].url, "mqtt://sensors/temp/{variant}"); + } + + #[test] + fn parses_decisions() { + let state = ArchitectureState::from_toml(SAMPLE_TOML).unwrap(); + assert_eq!(state.decisions.len(), 1); + assert_eq!(state.decisions[0].record, "TemperatureReading"); + assert_eq!(state.decisions[0].chosen, "SpmcRing"); + } + + #[test] + fn buffer_label_spmc() { + assert_eq!(BufferType::SpmcRing.label(Some(256)), "SpmcRing ยท 256"); + } + + #[test] + fn buffer_label_single_latest() { + assert_eq!(BufferType::SingleLatest.label(None), "SingleLatest"); + } + + #[test] + fn buffer_rust_expr_mailbox() { + assert_eq!(BufferType::Mailbox.rust_expr(None), "BufferCfg::Mailbox"); + } + + #[test] + fn round_trips_toml() { + let state = ArchitectureState::from_toml(SAMPLE_TOML).unwrap(); + let serialised = state.to_toml().unwrap(); + let state2 = ArchitectureState::from_toml(&serialised).unwrap(); + assert_eq!(state.records.len(), state2.records.len()); + assert_eq!(state.decisions.len(), state2.decisions.len()); + } +} diff --git a/aimdb-codegen/src/validate.rs b/aimdb-codegen/src/validate.rs new file mode 100644 index 00000000..88e7c4a6 --- /dev/null +++ b/aimdb-codegen/src/validate.rs @@ -0,0 +1,409 @@ +//! Architecture state validator +//! +//! Checks an [`ArchitectureState`] for structural and semantic errors before +//! code is generated or proposals are confirmed. + +use crate::state::{ArchitectureState, BufferType}; + +/// A single validation problem. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ValidationError { + /// Human-readable description of the problem. + pub message: String, + /// Location in state.toml that caused the error (e.g. `records[0].fields[1]`). + pub location: String, + /// Whether this blocks code generation (`Error`) or is advisory (`Warning`). + pub severity: Severity, +} + +/// Severity of a [`ValidationError`]. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum Severity { + /// Blocks code generation โ€” generated code would be invalid or uncompilable. + Error, + /// Advisory โ€” generated code may still work but behaviour could be unexpected. + Warning, +} + +impl std::fmt::Display for ValidationError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let tag = match self.severity { + Severity::Error => "ERROR", + Severity::Warning => "WARN", + }; + write!(f, "[{}] {}: {}", tag, self.location, self.message) + } +} + +/// Supported Rust field types for `records.fields[*].type`. +pub const VALID_FIELD_TYPES: &[&str] = &[ + "f64", "f32", "u8", "u16", "u32", "u64", "i8", "i16", "i32", "i64", "bool", "String", +]; + +/// Validate an [`ArchitectureState`] and return all problems found. +/// +/// An empty `Vec` means the state is valid and codegen may proceed. +/// Any entry with [`Severity::Error`] should block generation. +pub fn validate(state: &ArchitectureState) -> Vec { + let mut errors: Vec = Vec::new(); + + validate_meta(state, &mut errors); + validate_records(state, &mut errors); + + errors +} + +/// Returns `true` if `validate()` produces no `Error`-severity issues. +pub fn is_valid(state: &ArchitectureState) -> bool { + !validate(state) + .iter() + .any(|e| e.severity == Severity::Error) +} + +// โ”€โ”€ Internal validators โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +fn validate_meta(state: &ArchitectureState, errors: &mut Vec) { + if state.meta.aimdb_version.is_empty() { + errors.push(ValidationError { + message: "aimdb_version must not be empty".to_string(), + location: "meta.aimdb_version".to_string(), + severity: Severity::Error, + }); + } +} + +fn validate_records(state: &ArchitectureState, errors: &mut Vec) { + let mut seen_names: Vec<&str> = Vec::new(); + + for (idx, rec) in state.records.iter().enumerate() { + let loc = format!("records[{idx}]"); + + // Name must be non-empty + if rec.name.is_empty() { + errors.push(ValidationError { + message: "record name must not be empty".to_string(), + location: loc.clone(), + severity: Severity::Error, + }); + continue; // Can't do further checks without a name + } + + // Name should start with an uppercase letter (PascalCase convention) + if !rec + .name + .chars() + .next() + .map(|c| c.is_uppercase()) + .unwrap_or(false) + { + errors.push(ValidationError { + message: format!( + "record name '{}' should start with an uppercase letter (PascalCase)", + rec.name + ), + location: format!("{loc}.name"), + severity: Severity::Warning, + }); + } + + // Duplicate record names + if seen_names.contains(&rec.name.as_str()) { + errors.push(ValidationError { + message: format!("duplicate record name '{}'", rec.name), + location: format!("{loc}.name"), + severity: Severity::Error, + }); + } else { + seen_names.push(&rec.name); + } + + // SpmcRing must have capacity > 0 + if rec.buffer == BufferType::SpmcRing { + match rec.capacity { + None => { + errors.push(ValidationError { + message: "SpmcRing requires 'capacity' to be set".to_string(), + location: format!("{loc}.capacity"), + severity: Severity::Error, + }); + } + Some(0) => { + errors.push(ValidationError { + message: "SpmcRing capacity must be > 0".to_string(), + location: format!("{loc}.capacity"), + severity: Severity::Error, + }); + } + _ => {} + } + } + + // Warn if capacity is set but buffer is not SpmcRing + if rec.buffer != BufferType::SpmcRing && rec.capacity.is_some() { + errors.push(ValidationError { + message: "capacity is only meaningful for SpmcRing; it will be ignored".to_string(), + location: format!("{loc}.capacity"), + severity: Severity::Warning, + }); + } + + // Warn if no key variants + if rec.key_variants.is_empty() { + errors.push(ValidationError { + message: format!( + "record '{}' has no key_variants โ€” the key enum will be empty and unusable", + rec.name + ), + location: format!("{loc}.key_variants"), + severity: Severity::Warning, + }); + } + + // Duplicate key variants + let mut seen_variants: Vec<&str> = Vec::new(); + for variant in &rec.key_variants { + if seen_variants.contains(&variant.as_str()) { + errors.push(ValidationError { + message: format!("duplicate key variant '{variant}'"), + location: format!("{loc}.key_variants"), + severity: Severity::Error, + }); + } else { + seen_variants.push(variant); + } + } + + // Warn if no fields + if rec.fields.is_empty() { + errors.push(ValidationError { + message: format!( + "record '{}' has no fields โ€” the value struct will be empty", + rec.name + ), + location: format!("{loc}.fields"), + severity: Severity::Warning, + }); + } + + // Validate field types + for (fidx, field) in rec.fields.iter().enumerate() { + if field.name.is_empty() { + errors.push(ValidationError { + message: "field name must not be empty".to_string(), + location: format!("{loc}.fields[{fidx}]"), + severity: Severity::Error, + }); + } + if !VALID_FIELD_TYPES.contains(&field.field_type.as_str()) { + errors.push(ValidationError { + message: format!( + "unsupported field type '{}' โ€” valid types: {}", + field.field_type, + VALID_FIELD_TYPES.join(", ") + ), + location: format!("{loc}.fields[{fidx}].type"), + severity: Severity::Error, + }); + } + } + + // Validate connectors + for (cidx, conn) in rec.connectors.iter().enumerate() { + if conn.url.is_empty() { + errors.push(ValidationError { + message: "connector URL must not be empty".to_string(), + location: format!("{loc}.connectors[{cidx}].url"), + severity: Severity::Error, + }); + } + if conn.protocol.is_empty() { + errors.push(ValidationError { + message: "connector protocol must not be empty".to_string(), + location: format!("{loc}.connectors[{cidx}].protocol"), + severity: Severity::Error, + }); + } + } + } +} + +// โ”€โ”€ Tests โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +#[cfg(test)] +mod tests { + use super::*; + use crate::state::ArchitectureState; + + const VALID_TOML: &str = r#" +[meta] +aimdb_version = "0.5.0" +created_at = "2026-02-22T14:00:00Z" +last_modified = "2026-02-22T14:33:00Z" + +[[records]] +name = "TemperatureReading" +buffer = "SpmcRing" +capacity = 256 +key_prefix = "sensors.temp." +key_variants = ["indoor", "outdoor"] +producers = ["sensor_task"] +consumers = ["dashboard"] + +[[records.fields]] +name = "celsius" +type = "f64" +description = "Temperature" + +[[records.connectors]] +protocol = "mqtt" +direction = "outbound" +url = "mqtt://sensors/temp/{variant}" +"#; + + fn valid_state() -> ArchitectureState { + ArchitectureState::from_toml(VALID_TOML).unwrap() + } + + #[test] + fn valid_state_has_no_errors() { + let errs = validate(&valid_state()); + let error_errs: Vec<_> = errs + .iter() + .filter(|e| e.severity == Severity::Error) + .collect(); + assert!(error_errs.is_empty(), "Unexpected errors: {error_errs:?}"); + } + + #[test] + fn is_valid_returns_true_for_clean_state() { + assert!(is_valid(&valid_state())); + } + + #[test] + fn detects_spmc_missing_capacity() { + let toml = VALID_TOML.replace("capacity = 256\n", ""); + let state = ArchitectureState::from_toml(&toml).unwrap(); + let errs = validate(&state); + let has_err = errs + .iter() + .any(|e| e.severity == Severity::Error && e.message.contains("capacity")); + assert!( + has_err, + "Should detect missing SpmcRing capacity:\n{errs:?}" + ); + } + + #[test] + fn detects_spmc_zero_capacity() { + let toml = VALID_TOML.replace("capacity = 256", "capacity = 0"); + let state = ArchitectureState::from_toml(&toml).unwrap(); + let errs = validate(&state); + let has_err = errs + .iter() + .any(|e| e.severity == Severity::Error && e.message.contains("capacity must be > 0")); + assert!(has_err, "Should detect zero capacity:\n{errs:?}"); + } + + #[test] + fn detects_duplicate_record_names() { + let toml = format!( + "{VALID_TOML}{}", + r#" +[[records]] +name = "TemperatureReading" +buffer = "SingleLatest" +key_variants = ["a"] + +[[records.fields]] +name = "value" +type = "f64" +description = "Value" +"# + ); + let state = ArchitectureState::from_toml(&toml).unwrap(); + let errs = validate(&state); + let has_err = errs + .iter() + .any(|e| e.severity == Severity::Error && e.message.contains("duplicate record name")); + assert!(has_err, "Should detect duplicate record name:\n{errs:?}"); + } + + #[test] + fn detects_duplicate_key_variants() { + let toml = VALID_TOML.replace( + r#"key_variants = ["indoor", "outdoor"]"#, + r#"key_variants = ["indoor", "indoor"]"#, + ); + let state = ArchitectureState::from_toml(&toml).unwrap(); + let errs = validate(&state); + let has_err = errs + .iter() + .any(|e| e.severity == Severity::Error && e.message.contains("duplicate key variant")); + assert!(has_err, "Should detect duplicate key variants:\n{errs:?}"); + } + + #[test] + fn detects_invalid_field_type() { + let toml = VALID_TOML.replace(r#"type = "f64""#, r#"type = "float64""#); + let state = ArchitectureState::from_toml(&toml).unwrap(); + let errs = validate(&state); + let has_err = errs + .iter() + .any(|e| e.severity == Severity::Error && e.message.contains("unsupported field type")); + assert!(has_err, "Should detect invalid field type:\n{errs:?}"); + } + + #[test] + fn detects_empty_connector_url() { + let toml = VALID_TOML.replace(r#"url = "mqtt://sensors/temp/{variant}""#, r#"url = """#); + let state = ArchitectureState::from_toml(&toml).unwrap(); + let errs = validate(&state); + let has_err = errs + .iter() + .any(|e| e.severity == Severity::Error && e.message.contains("URL must not be empty")); + assert!(has_err, "Should detect empty connector URL:\n{errs:?}"); + } + + #[test] + fn warning_for_non_pascal_case_name() { + let toml = VALID_TOML.replace( + "name = \"TemperatureReading\"", + "name = \"temperatureReading\"", + ); + let state = ArchitectureState::from_toml(&toml).unwrap(); + let errs = validate(&state); + let has_warn = errs + .iter() + .any(|e| e.severity == Severity::Warning && e.message.contains("uppercase")); + assert!(has_warn, "Should warn about non-PascalCase name:\n{errs:?}"); + } + + #[test] + fn warning_for_capacity_on_non_spmc() { + let toml = VALID_TOML.replace("buffer = \"SpmcRing\"", "buffer = \"SingleLatest\""); + let state = ArchitectureState::from_toml(&toml).unwrap(); + let errs = validate(&state); + let has_warn = errs.iter().any(|e| { + e.severity == Severity::Warning && e.message.contains("capacity is only meaningful") + }); + assert!( + has_warn, + "Should warn about capacity on non-SpmcRing:\n{errs:?}" + ); + } + + #[test] + fn display_format() { + let e = ValidationError { + message: "something wrong".to_string(), + location: "records[0].name".to_string(), + severity: Severity::Error, + }; + let s = format!("{e}"); + assert!(s.contains("[ERROR]"), "Display should show [ERROR]:\n{s}"); + assert!( + s.contains("records[0].name"), + "Display should show location:\n{s}" + ); + } +} From 5e297965d33647bfcb9f8b2716d7c088e2759c1f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Tue, 24 Feb 2026 21:06:31 +0000 Subject: [PATCH 05/46] feat: add architecture agent tools for session management and record proposals - Implemented new tools for the architecture agent, including: - `get_architecture`: Retrieves the current architecture state. - `propose_add_record`: Proposes adding a new record with detailed schema. - `propose_modify_buffer`: Proposes changing the buffer type of an existing record. - `propose_add_connector`: Proposes adding a connector to an existing record. - `propose_modify_fields`: Proposes replacing fields of an existing record. - `propose_modify_key_variants`: Proposes updating key variants of an existing record. - `resolve_proposal`: Resolves a pending proposal with options to confirm, reject, or revise. - `remove_record`: Proposes the removal of an existing record. - `rename_record`: Proposes renaming an existing record. - `validate_against_instance`: Validates state against a live AimDB instance. - `get_buffer_metrics`: Retrieves live buffer metrics for records. - `save_memory`: Persists ideation context to memory. - `reset_session`: Resets the architecture session. - Added a new module `architecture.rs` to encapsulate the architecture agent tools. - Updated `mod.rs` to include the new architecture tools and re-export them for use. - Enhanced session management by initializing the session store and handling state transitions. --- tools/aimdb-mcp/Cargo.toml | 2 + tools/aimdb-mcp/assets/CONVENTIONS.md | 105 +++ tools/aimdb-mcp/src/architecture/conflicts.rs | 330 ++++++++ tools/aimdb-mcp/src/architecture/mod.rs | 299 +++++++ tools/aimdb-mcp/src/architecture/session.rs | 533 +++++++++++++ tools/aimdb-mcp/src/lib.rs | 1 + tools/aimdb-mcp/src/prompts/mod.rs | 642 ++++++++------- tools/aimdb-mcp/src/resources/architecture.rs | 161 ++++ tools/aimdb-mcp/src/resources/mod.rs | 24 +- tools/aimdb-mcp/src/server.rs | 365 +++++++++ tools/aimdb-mcp/src/tools/architecture.rs | 747 ++++++++++++++++++ tools/aimdb-mcp/src/tools/mod.rs | 6 + 12 files changed, 2936 insertions(+), 279 deletions(-) create mode 100644 tools/aimdb-mcp/assets/CONVENTIONS.md create mode 100644 tools/aimdb-mcp/src/architecture/conflicts.rs create mode 100644 tools/aimdb-mcp/src/architecture/mod.rs create mode 100644 tools/aimdb-mcp/src/architecture/session.rs create mode 100644 tools/aimdb-mcp/src/resources/architecture.rs create mode 100644 tools/aimdb-mcp/src/tools/architecture.rs diff --git a/tools/aimdb-mcp/Cargo.toml b/tools/aimdb-mcp/Cargo.toml index 6e7cbcf4..f8590978 100644 --- a/tools/aimdb-mcp/Cargo.toml +++ b/tools/aimdb-mcp/Cargo.toml @@ -27,6 +27,7 @@ path = "src/lib.rs" [dependencies] # AimDB dependencies aimdb-client = { version = "0.5.0", path = "../../aimdb-client" } +aimdb-codegen = { version = "0.5.0", path = "../../aimdb-codegen" } aimdb-core = { version = "0.5.0", path = "../../aimdb-core", features = [ "std", ] } @@ -55,6 +56,7 @@ tracing-subscriber = { version = "0.3", features = ["env-filter"] } # Utilities once_cell = "1.20" chrono = "0.4" +fs2 = "0.4" [dev-dependencies] tokio-test = "0.4" diff --git a/tools/aimdb-mcp/assets/CONVENTIONS.md b/tools/aimdb-mcp/assets/CONVENTIONS.md new file mode 100644 index 00000000..d7df097d --- /dev/null +++ b/tools/aimdb-mcp/assets/CONVENTIONS.md @@ -0,0 +1,105 @@ +# AimDB Architecture Diagram Conventions + +This file defines the visual language used in `.aimdb/architecture.mermaid`. +It is referenced by the `architecture_agent` MCP prompt. +**Do not edit the Mermaid file directly** โ€” it is generated from `.aimdb/state.toml`. + +--- + +## Node Shapes โ€” Buffer Type Encoding + +Each buffer type maps to a specific Mermaid node shape so the topology is +readable at a glance. + +| Buffer Type | Mermaid Shape Syntax | Rendered Shape | When Used | +|-------------|---------------------|---------------|-----------| +| `SpmcRing` | `(["Name\nSpmcRing ยท capacity"])` | Stadium / pill | High-frequency streams; every sample matters; multiple independent consumers | +| `SingleLatest` | `("Name\nSingleLatest")` | Rounded rectangle | Current state; only newest value relevant; config, UI state | +| `Mailbox` | `{"Name\nMailbox"}` | Diamond / rhombus | Command channel; latest instruction supersedes prior; actuation | + +### Examples + +```mermaid +flowchart LR + TEMP(["TemperatureReading\nSpmcRing ยท 256"]) + CFG("AppConfig\nSingleLatest") + CMD{"OtaCommand\nMailbox"} +``` + +--- + +## Arrow Styles โ€” Data Flow vs Connector Metadata + +Two arrow styles distinguish internal data flow from external connector wiring. + +| Style | Syntax | Meaning | +|-------|--------|---------| +| Solid arrow | `A -->|label| B` | Data flow: producer writes, consumer reads | +| Dashed arrow | `A -.->|label| B` | Connector metadata: `link_to` or `link_from` | + +### Solid Arrow Labels + +| Label | Meaning | +|-------|---------| +| `produce` | A task writes values into the record | +| `consume` | A task reads values from the record | + +### Dashed Arrow Labels + +Dashed arrows connect records to a shared `MQTT`, `KNX`, or other connector node. +The label contains the direction and URL template. + +| Label format | Meaning | +|--------------|---------| +| `"link_to mqtt://topic/{variant}"` | DB โ†’ broker (outbound publish) | +| `"link_from mqtt://topic/{variant}"` | Broker โ†’ DB (inbound subscribe) | +| `"link_to knx://..."` | DB โ†’ KNX bus | +| `"link_from knx://..."` | KNX bus โ†’ DB | + +--- + +## Full Example + +```mermaid +flowchart LR + + %% โ”€โ”€ Records โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + TEMP(["TemperatureReading\nSpmcRing ยท 256"]) + FW_VER("FirmwareVersion\nSingleLatest") + OTA_CMD{"OtaCommand\nMailbox"} + + %% โ”€โ”€ Data flow (solid arrows) โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + sensor_task -->|produce| TEMP + TEMP -->|consume| dashboard + TEMP -->|consume| anomaly_detector + + cloud_ota_service -->|produce| FW_VER + cloud_ota_service -->|produce| OTA_CMD + FW_VER -->|consume| device_update_task + OTA_CMD -->|consume| device_update_task + + %% โ”€โ”€ Connector metadata (dashed arrows) โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + TEMP -.->|"link_to mqtt://sensors/temp/{variant}"| MQTT + MQTT -.->|"link_from mqtt://config/firmware/{variant}"| FW_VER + MQTT -.->|"link_from mqtt://ota/cmd/{variant}"| OTA_CMD +``` + +--- + +## Rules + +1. **Records only** appear as shaped nodes. Producers and consumers appear as + plain rectangular nodes (default Mermaid style โ€” no shape override). +2. **Connector buses** (`MQTT`, `KNX`, etc.) appear as plain nodes, connected + to records via dashed arrows only. +3. **Node labels** always include the record name on the first line and the + buffer type (with capacity for SpmcRing) on the second line, separated by + `\n`. +4. **Key variants** are NOT shown individually in the diagram. The record node + represents the entire key family. Variant-specific URLs appear in dashed + arrow labels using `{variant}` placeholder notation. +5. **Decision rationale** is NOT embedded in the diagram. It lives in the + `[[decisions]]` table in `state.toml` and is queryable via the + `aimdb://architecture/decisions` MCP resource. +6. The diagram is **always regenerated** from `state.toml` โ€” never edited by + hand. Run `aimdb generate` or confirm a proposal via the architecture agent. diff --git a/tools/aimdb-mcp/src/architecture/conflicts.rs b/tools/aimdb-mcp/src/architecture/conflicts.rs new file mode 100644 index 00000000..2dbe2c49 --- /dev/null +++ b/tools/aimdb-mcp/src/architecture/conflicts.rs @@ -0,0 +1,330 @@ +//! Conflict detection between `.aimdb/state.toml` and a running AimDB instance. +//! +//! Compares the agent's declared architecture against what is live in the +//! database, categorised by severity. + +use aimdb_codegen::{ArchitectureState, BufferType}; +use serde::{Deserialize, Serialize}; + +/// A detected discrepancy between state.toml and the running instance. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Conflict { + /// Record name (may be from state or instance). + pub record_name: String, + /// Category of conflict. + pub conflict_type: ConflictType, + /// Human-readable explanation. + pub message: String, + /// Severity of the conflict. + pub severity: ConflictSeverity, +} + +/// Categories of conflict (mirrors the spec table). +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum ConflictType { + /// In state.toml but not in running instance. + MissingInInstance, + /// In running instance but not in state.toml. + MissingInState, + /// Buffer type differs. + BufferMismatch, + /// Buffer capacity differs. + CapacityMismatch, + /// Connector URL differs. + ConnectorMismatch, +} + +/// Conflict severity. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum ConflictSeverity { + /// Likely a stale build or misconfiguration โ€” blocks proposal confirmation. + Error, + /// Possibly intentional โ€” surface but don't block. + Warning, + /// Informational โ€” manually registered record, not agent-managed. + Info, +} + +/// Summary of conflict detection results. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ConflictReport { + pub conflicts: Vec, + pub error_count: usize, + pub warning_count: usize, + pub info_count: usize, + pub in_sync: bool, +} + +impl ConflictReport { + /// Build from a list of conflicts. + pub fn from_conflicts(conflicts: Vec) -> Self { + let error_count = conflicts + .iter() + .filter(|c| c.severity == ConflictSeverity::Error) + .count(); + let warning_count = conflicts + .iter() + .filter(|c| c.severity == ConflictSeverity::Warning) + .count(); + let info_count = conflicts + .iter() + .filter(|c| c.severity == ConflictSeverity::Info) + .count(); + let in_sync = error_count == 0 && warning_count == 0; + Self { + conflicts, + error_count, + warning_count, + info_count, + in_sync, + } + } +} + +/// Lightweight description of a record as reported by the running instance. +#[derive(Debug, Clone)] +pub struct LiveRecord { + /// The record's key string (e.g. `"sensors.temp.indoor"`). + pub name: String, + /// Buffer type string as returned by AimX: "spmc_ring", "single_latest", "mailbox". + pub buffer_type: String, + /// Buffer capacity, if applicable. + pub buffer_capacity: Option, +} + +/// Map a state.toml `BufferType` to the AimX wire string. +fn state_buffer_to_wire(bt: &BufferType) -> &'static str { + match bt { + BufferType::SpmcRing => "spmc_ring", + BufferType::SingleLatest => "single_latest", + BufferType::Mailbox => "mailbox", + } +} + +/// Detect conflicts between `state` and the records visible in the running instance. +/// +/// `live_records` is the list of records returned by `list_records` for the +/// connected instance (across all key variants). Because state.toml uses a +/// prefix + variants model, we match by checking whether the live record name +/// starts with the `key_prefix` of each state record. +pub fn detect_conflicts(state: &ArchitectureState, live_records: &[LiveRecord]) -> ConflictReport { + let mut conflicts: Vec = Vec::new(); + + // โ”€โ”€ Records in state but not (or mismatched) in instance โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + for rec in &state.records { + // Collect live records that belong to this state record's key family + // (i.e. their name starts with key_prefix, or matches a full variant key) + let matching_live: Vec<&LiveRecord> = live_records + .iter() + .filter(|lr| { + if rec.key_prefix.is_empty() { + // Match exact name or any variant + rec.key_variants.contains(&lr.name) || lr.name == rec.name + } else { + lr.name.starts_with(&rec.key_prefix) + } + }) + .collect(); + + if matching_live.is_empty() { + conflicts.push(Conflict { + record_name: rec.name.clone(), + conflict_type: ConflictType::MissingInInstance, + message: format!( + "'{}' is declared in state.toml but not found in the running instance. \ + Codegen may not have run yet, or the binary has not been redeployed.", + rec.name + ), + severity: ConflictSeverity::Warning, + }); + continue; + } + + let expected_buffer_wire = state_buffer_to_wire(&rec.buffer); + + for lr in &matching_live { + // Buffer type mismatch + if lr.buffer_type != expected_buffer_wire && lr.buffer_type != "none" { + conflicts.push(Conflict { + record_name: lr.name.clone(), + conflict_type: ConflictType::BufferMismatch, + message: format!( + "'{}': state.toml expects buffer '{}' but instance reports '{}'. \ + Likely a stale build โ€” rerun `aimdb generate && cargo build`.", + lr.name, expected_buffer_wire, lr.buffer_type + ), + severity: ConflictSeverity::Error, + }); + continue; + } + + // Capacity mismatch (SpmcRing only) + if rec.buffer == BufferType::SpmcRing { + if let (Some(expected), Some(actual)) = (rec.capacity, lr.buffer_capacity) { + if expected != actual { + conflicts.push(Conflict { + record_name: lr.name.clone(), + conflict_type: ConflictType::CapacityMismatch, + message: format!( + "'{}': state.toml declares SpmcRing capacity {} but \ + instance reports capacity {}. \ + May be an intentional application-level override.", + lr.name, expected, actual + ), + severity: ConflictSeverity::Warning, + }); + } + } + } + } + } + + // โ”€โ”€ Records in instance but not in state โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + for lr in live_records { + let present_in_state = state.records.iter().any(|rec| { + if rec.key_prefix.is_empty() { + rec.key_variants.contains(&lr.name) || lr.name == rec.name + } else { + lr.name.starts_with(&rec.key_prefix) + } + }); + if !present_in_state { + conflicts.push(Conflict { + record_name: lr.name.clone(), + conflict_type: ConflictType::MissingInState, + message: format!( + "'{}' exists in the running instance but is not declared in state.toml. \ + It may be a manually registered record not managed by the architecture agent.", + lr.name + ), + severity: ConflictSeverity::Info, + }); + } + } + + ConflictReport::from_conflicts(conflicts) +} + +#[cfg(test)] +mod tests { + use super::*; + use aimdb_codegen::ArchitectureState; + + const STATE_TOML: &str = r#" +[meta] +aimdb_version = "0.5.0" +created_at = "2026-02-22T14:00:00Z" +last_modified = "2026-02-22T14:00:00Z" + +[[records]] +name = "TemperatureReading" +buffer = "SpmcRing" +capacity = 256 +key_prefix = "sensors.temp." +key_variants = ["indoor", "outdoor"] +producers = ["sensor_task"] +consumers = ["dashboard"] + +[[records.fields]] +name = "celsius" +type = "f64" +description = "Celsius" +"#; + + fn state() -> ArchitectureState { + ArchitectureState::from_toml(STATE_TOML).unwrap() + } + + #[test] + fn in_sync_when_buffer_matches() { + let live = vec![ + LiveRecord { + name: "sensors.temp.indoor".to_string(), + buffer_type: "spmc_ring".to_string(), + buffer_capacity: Some(256), + }, + LiveRecord { + name: "sensors.temp.outdoor".to_string(), + buffer_type: "spmc_ring".to_string(), + buffer_capacity: Some(256), + }, + ]; + let report = detect_conflicts(&state(), &live); + assert!(report.in_sync, "Should be in sync: {:?}", report.conflicts); + } + + #[test] + fn detects_missing_in_instance() { + let report = detect_conflicts(&state(), &[]); + assert_eq!(report.warning_count, 1); + assert!(report.conflicts[0].conflict_type == ConflictType::MissingInInstance); + } + + #[test] + fn detects_buffer_mismatch() { + let live = vec![LiveRecord { + name: "sensors.temp.indoor".to_string(), + buffer_type: "single_latest".to_string(), + buffer_capacity: None, + }]; + let report = detect_conflicts(&state(), &live); + let has_mismatch = report + .conflicts + .iter() + .any(|c| c.conflict_type == ConflictType::BufferMismatch); + assert!( + has_mismatch, + "Should detect buffer mismatch: {:?}", + report.conflicts + ); + assert!(report.error_count > 0); + } + + #[test] + fn detects_capacity_mismatch() { + let live = vec![LiveRecord { + name: "sensors.temp.indoor".to_string(), + buffer_type: "spmc_ring".to_string(), + buffer_capacity: Some(1024), + }]; + let report = detect_conflicts(&state(), &live); + let has_cap = report + .conflicts + .iter() + .any(|c| c.conflict_type == ConflictType::CapacityMismatch); + assert!( + has_cap, + "Should detect capacity mismatch: {:?}", + report.conflicts + ); + } + + #[test] + fn detects_missing_in_state() { + let live = vec![ + LiveRecord { + name: "sensors.temp.indoor".to_string(), + buffer_type: "spmc_ring".to_string(), + buffer_capacity: Some(256), + }, + LiveRecord { + name: "some.other.record".to_string(), + buffer_type: "single_latest".to_string(), + buffer_capacity: None, + }, + ]; + let report = detect_conflicts(&state(), &live); + let has_missing = report + .conflicts + .iter() + .any(|c| c.conflict_type == ConflictType::MissingInState); + assert!( + has_missing, + "Should detect missing_in_state: {:?}", + report.conflicts + ); + assert_eq!(report.info_count, 1); + } +} diff --git a/tools/aimdb-mcp/src/architecture/mod.rs b/tools/aimdb-mcp/src/architecture/mod.rs new file mode 100644 index 00000000..638ad964 --- /dev/null +++ b/tools/aimdb-mcp/src/architecture/mod.rs @@ -0,0 +1,299 @@ +//! Architecture agent shared state +//! +//! Manages sessions and provides helpers for reading/writing +//! `.aimdb/state.toml` and running the codegen pipeline. +//! +//! The session state machine enforces the ideation loop: +//! `Idle โ†’ Gathering โ†’ Proposing โ†’ (resolve) โ†’ Gathering โ†’ ...` + +pub mod conflicts; +pub mod session; + +use aimdb_codegen::{generate_mermaid, generate_rust, ArchitectureState, RecordDef}; +use chrono::Utc; +use fs2::FileExt; +use once_cell::sync::OnceCell; +use serde::{Deserialize, Serialize}; +use session::SessionStore; +use std::{ + path::{Path, PathBuf}, + sync::{Arc, Mutex}, +}; + +// โ”€โ”€ Session store (module-level global, same pattern as CONNECTION_POOL) โ”€โ”€โ”€โ”€โ”€โ”€ + +static SESSION_STORE: OnceCell>> = OnceCell::new(); + +/// Initialise the session store (called once at server startup). +pub fn init_session_store() { + SESSION_STORE + .set(Arc::new(Mutex::new(SessionStore::default()))) + .ok(); +} + +/// Access the global session store. +pub fn session_store() -> Option>> { + SESSION_STORE.get().cloned() +} + +// โ”€โ”€ Default paths โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +pub fn default_state_path() -> PathBuf { + PathBuf::from(".aimdb/state.toml") +} + +pub fn default_mermaid_path() -> PathBuf { + PathBuf::from(".aimdb/architecture.mermaid") +} + +pub fn default_rust_path() -> PathBuf { + PathBuf::from("src/generated_schema.rs") +} + +pub fn default_memory_path() -> PathBuf { + PathBuf::from(".aimdb/memory.md") +} + +// โ”€โ”€ State I/O โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Read `.aimdb/state.toml` from the given path (or default). +/// +/// Returns `Ok(None)` when the file does not exist yet. +pub fn read_state(path: &Path) -> anyhow::Result> { + if !path.exists() { + return Ok(None); + } + let src = std::fs::read_to_string(path)?; + let state = ArchitectureState::from_toml(&src) + .map_err(|e| anyhow::anyhow!("parse error in {}: {}", path.display(), e))?; + Ok(Some(state)) +} + +/// Write state to disk at `path`, creating parent directories as needed. +pub fn write_state(path: &Path, state: &ArchitectureState) -> anyhow::Result<()> { + if let Some(parent) = path.parent() { + std::fs::create_dir_all(parent)?; + } + let toml = state.to_toml()?; + std::fs::write(path, toml)?; + Ok(()) +} + +/// Write state to disk with an exclusive file lock, preventing concurrent +/// writes from corrupting state.toml. +pub fn write_state_locked(path: &Path, state: &ArchitectureState) -> anyhow::Result<()> { + if let Some(parent) = path.parent() { + std::fs::create_dir_all(parent)?; + } + let toml = state.to_toml()?; + let file = std::fs::OpenOptions::new() + .write(true) + .create(true) + .truncate(true) + .open(path)?; + file.lock_exclusive()?; + std::io::Write::write_all(&mut &file, toml.as_bytes())?; + file.unlock()?; + Ok(()) +} + +/// Write Mermaid and Rust artefacts derived from `state`. +pub fn write_artefacts( + state: &ArchitectureState, + mermaid_path: &Path, + rust_path: &Path, +) -> anyhow::Result { + let mermaid = generate_mermaid(state); + let rust = generate_rust(state); + + if let Some(p) = mermaid_path.parent() { + std::fs::create_dir_all(p)?; + } + if let Some(p) = rust_path.parent() { + std::fs::create_dir_all(p)?; + } + + std::fs::write(mermaid_path, &mermaid)?; + std::fs::write(rust_path, &rust)?; + + Ok(GeneratedFiles { + mermaid_path: mermaid_path.display().to_string(), + rust_path: rust_path.display().to_string(), + mermaid_lines: mermaid.lines().count(), + rust_lines: rust.lines().count(), + }) +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct GeneratedFiles { + pub mermaid_path: String, + pub rust_path: String, + pub mermaid_lines: usize, + pub rust_lines: usize, +} + +// โ”€โ”€ Proposal types โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// A pending architectural change awaiting human confirmation. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Proposal { + pub id: String, + pub change_type: String, + pub description: String, + pub change: ProposedChange, + pub created_at: String, +} + +/// The kinds of architectural change the agent can propose. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(tag = "kind", rename_all = "snake_case")] +pub enum ProposedChange { + /// Add a brand-new record to state.toml + AddRecord { record: RecordDef }, + /// Change the buffer type (and optionally capacity) of an existing record + ModifyBuffer { + record_name: String, + buffer: aimdb_codegen::BufferType, + capacity: Option, + }, + /// Add a connector to an existing record + AddConnector { + record_name: String, + connector: aimdb_codegen::ConnectorDef, + }, + /// Replace the fields of an existing record's value struct + ModifyFields { + record_name: String, + fields: Vec, + }, + /// Remove an existing record (cascades through Mermaid and codegen) + RemoveRecord { record_name: String }, + /// Rename an existing record (updates all references) + RenameRecord { old_name: String, new_name: String }, + /// Replace the key variants (and optionally key_prefix) of an existing record + ModifyKeyVariants { + record_name: String, + key_variants: Vec, + key_prefix: Option, + }, +} + +/// Resolution for a proposal. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum ProposalResolution { + Confirm, + Reject, + Revise, +} + +// โ”€โ”€ Change application โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Apply a confirmed `ProposedChange` to the given state, updating `last_modified`. +pub fn apply_change(state: &mut ArchitectureState, change: &ProposedChange) -> anyhow::Result<()> { + state.meta.last_modified = Utc::now().to_rfc3339(); + + match change { + ProposedChange::AddRecord { record } => { + // Replace if already exists (idempotent re-confirm) + if let Some(pos) = state.records.iter().position(|r| r.name == record.name) { + state.records[pos] = record.clone(); + } else { + state.records.push(record.clone()); + } + } + + ProposedChange::ModifyBuffer { + record_name, + buffer, + capacity, + } => { + let rec = state + .records + .iter_mut() + .find(|r| &r.name == record_name) + .ok_or_else(|| anyhow::anyhow!("record '{}' not found in state", record_name))?; + rec.buffer = buffer.clone(); + rec.capacity = *capacity; + } + + ProposedChange::AddConnector { + record_name, + connector, + } => { + let rec = state + .records + .iter_mut() + .find(|r| &r.name == record_name) + .ok_or_else(|| anyhow::anyhow!("record '{}' not found in state", record_name))?; + rec.connectors.push(connector.clone()); + } + + ProposedChange::ModifyFields { + record_name, + fields, + } => { + let rec = state + .records + .iter_mut() + .find(|r| &r.name == record_name) + .ok_or_else(|| anyhow::anyhow!("record '{}' not found in state", record_name))?; + rec.fields = fields.clone(); + } + + ProposedChange::RemoveRecord { record_name } => { + state.records.retain(|r| &r.name != record_name); + } + + ProposedChange::RenameRecord { old_name, new_name } => { + for rec in &mut state.records { + if &rec.name == old_name { + rec.name = new_name.clone(); + } + } + // Update decision log references + for d in &mut state.decisions { + if &d.record == old_name { + d.record = new_name.clone(); + } + } + } + + ProposedChange::ModifyKeyVariants { + record_name, + key_variants, + key_prefix, + } => { + let rec = state + .records + .iter_mut() + .find(|r| &r.name == record_name) + .ok_or_else(|| anyhow::anyhow!("record '{}' not found in state", record_name))?; + rec.key_variants = key_variants.clone(); + if let Some(prefix) = key_prefix { + rec.key_prefix = prefix.clone(); + } + } + } + + Ok(()) +} + +/// Ensure `.aimdb/state.toml` exists with an initialised `[meta]` block. +/// Returns the current state (existing or freshly created). +pub fn ensure_state_initialised(path: &Path) -> anyhow::Result { + if let Some(existing) = read_state(path)? { + return Ok(existing); + } + let state = ArchitectureState { + meta: aimdb_codegen::Meta { + aimdb_version: "0.5.0".to_string(), + created_at: Utc::now().to_rfc3339(), + last_modified: Utc::now().to_rfc3339(), + }, + records: Vec::new(), + decisions: Vec::new(), + }; + write_state(path, &state)?; + Ok(state) +} diff --git a/tools/aimdb-mcp/src/architecture/session.rs b/tools/aimdb-mcp/src/architecture/session.rs new file mode 100644 index 00000000..92e9280c --- /dev/null +++ b/tools/aimdb-mcp/src/architecture/session.rs @@ -0,0 +1,533 @@ +//! Session-scoped state machine for the architecture ideation loop. +//! +//! Enforces the transition sequence: +//! `Idle โ†’ Gathering โ†’ Proposing โ†’ (resolve) โ†’ Gathering โ†’ ...` +//! +//! Only one proposal may be pending at a time. Read-only tools +//! (`get_architecture`, `get_buffer_metrics`, `validate_against_instance`) +//! are allowed in any phase. + +use super::Proposal; +use chrono::Utc; +use serde::{Deserialize, Serialize}; +use std::sync::atomic::{AtomicU64, Ordering}; + +// โ”€โ”€ Monotonic ID generator (fixes millisecond collision) โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +static PROPOSAL_COUNTER: AtomicU64 = AtomicU64::new(0); + +/// Generate a unique proposal ID. Uses a monotonic counter combined with +/// a timestamp prefix for human readability. Never collides. +pub fn next_proposal_id() -> String { + let seq = PROPOSAL_COUNTER.fetch_add(1, Ordering::Relaxed); + format!("prop-{}-{:04}", Utc::now().format("%Y%m%d%H%M%S"), seq) +} + +// โ”€โ”€ Session phases โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// The phases of the ideation loop. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum SessionPhase { + /// No active work. Waiting for `get_architecture` to start gathering. + Idle, + /// LLM is reading state and asking disambiguation questions. + Gathering, + /// A single proposal is pending human review. + Proposing, + /// A confirmed proposal is being applied to state.toml and artefacts. + Applying, +} + +impl SessionPhase { + /// Human-readable hint for the LLM about what to do next in this phase. + pub fn guidance(&self) -> &'static str { + match self { + SessionPhase::Idle => { + "Call get_architecture to read the current state and begin a new ideation cycle." + } + SessionPhase::Gathering => { + "Ask the user disambiguation questions, then call the appropriate propose_* tool \ + (propose_add_record, propose_modify_buffer, etc.) when you have enough context \ + to make a concrete proposal." + } + SessionPhase::Proposing => { + "A proposal is pending. Present it to the user and call \ + resolve_proposal with their decision (confirm/reject/revise)." + } + SessionPhase::Applying => { + "A proposal is being applied. Wait for the operation to complete." + } + } + } +} + +// โ”€โ”€ Gathering context โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Tracks what has been resolved during the Gathering phase. +/// +/// This is informational โ€” it helps error messages be specific about +/// what is still unresolved, but does NOT block the LLM from proposing. +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +pub struct GatheringContext { + /// The target record name (if identified). + pub target_record: Option, + /// Whether buffer type has been disambiguated. + pub buffer_resolved: bool, + /// Whether fields/value struct has been discussed. + pub fields_resolved: bool, + /// Whether key variants have been enumerated. + pub variants_resolved: bool, + /// Free-form notes the LLM can attach during gathering. + pub notes: Vec, +} + +// โ”€โ”€ Session โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// A single ideation session. One session is active at a time. +/// The session owns at most ONE pending proposal. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Session { + /// Unique session identifier. + pub id: String, + /// Current phase of the ideation loop. + pub phase: SessionPhase, + /// When this session was created. + pub created_at: String, + /// Context accumulated during the Gathering phase. + pub gathering: GatheringContext, + /// The single pending proposal (only set in Proposing phase). + pub pending_proposal: Option, + /// Count of proposals resolved in this session (for statistics). + pub resolved_count: u32, +} + +impl Default for Session { + fn default() -> Self { + Self::new() + } +} + +impl Session { + /// Create a new session in the Idle phase. + pub fn new() -> Self { + let id = format!("session-{}", Utc::now().format("%Y%m%d%H%M%S%3f")); + Self { + id, + phase: SessionPhase::Idle, + created_at: Utc::now().to_rfc3339(), + gathering: GatheringContext::default(), + pending_proposal: None, + resolved_count: 0, + } + } +} + +// โ”€โ”€ Session error โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Error produced by invalid state transitions. Includes phase-aware +/// guidance so the LLM knows exactly what to do next. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct SessionError { + /// Current phase when the invalid operation was attempted. + pub current_phase: SessionPhase, + /// What the caller tried to do. + pub attempted_action: String, + /// Human-readable guidance for the LLM. + pub guidance: String, +} + +impl std::fmt::Display for SessionError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "Invalid in phase {:?}: {}. Next step: {}", + self.current_phase, self.attempted_action, self.guidance + ) + } +} + +// โ”€โ”€ Gathering update โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Partial update to gathering context. +#[derive(Debug, Clone, Default, Deserialize)] +pub struct GatheringUpdate { + pub target_record: Option, + pub buffer_resolved: Option, + pub fields_resolved: Option, + pub variants_resolved: Option, + pub note: Option, +} + +// โ”€โ”€ Session store โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Replaces the old `ProposalStore`. Manages a single active session +/// with at most one pending proposal. +#[derive(Debug, Default)] +pub struct SessionStore { + /// The active session, if any. `None` means no session has started. + pub active: Option, +} + +impl SessionStore { + // โ”€โ”€ Transition: Idle โ†’ Gathering (triggered by get_architecture) โ”€โ”€โ”€โ”€โ”€ + + /// Called when `get_architecture` is invoked. Creates a session if + /// none exists, and transitions Idle โ†’ Gathering. + /// + /// If already in Gathering/Proposing, this is a no-op read (allowed + /// at any time โ€” reading state is always safe). + pub fn on_get_architecture(&mut self) -> &Session { + let session = self.active.get_or_insert_with(Session::new); + if session.phase == SessionPhase::Idle { + session.phase = SessionPhase::Gathering; + } + session + } + + // โ”€โ”€ Transition: Gathering โ†’ Proposing (triggered by propose_* tools) โ”€โ”€ + + /// Called when `propose_add_record`, `propose_modify_*`, `remove_record`, + /// or `rename_record` is invoked. Validates the transition and stores the single proposal. + pub fn on_propose(&mut self, proposal: Proposal) -> Result<&Proposal, SessionError> { + let session = self.active.as_mut().ok_or_else(|| SessionError { + current_phase: SessionPhase::Idle, + attempted_action: "propose_* (no active session โ€” call get_architecture first)" + .to_string(), + guidance: SessionPhase::Idle.guidance().to_string(), + })?; + + match session.phase { + SessionPhase::Idle => Err(SessionError { + current_phase: SessionPhase::Idle, + attempted_action: "propose_* (session is Idle โ€” call get_architecture first)" + .to_string(), + guidance: SessionPhase::Idle.guidance().to_string(), + }), + SessionPhase::Gathering => { + session.phase = SessionPhase::Proposing; + session.pending_proposal = Some(proposal); + Ok(session.pending_proposal.as_ref().unwrap()) + } + SessionPhase::Proposing => Err(SessionError { + current_phase: SessionPhase::Proposing, + attempted_action: + "propose_* (a proposal is already pending โ€” resolve or reset first)".to_string(), + guidance: SessionPhase::Proposing.guidance().to_string(), + }), + SessionPhase::Applying => Err(SessionError { + current_phase: SessionPhase::Applying, + attempted_action: "propose_* (cannot propose while applying โ€” wait or reset)" + .to_string(), + guidance: SessionPhase::Applying.guidance().to_string(), + }), + } + } + + // โ”€โ”€ Transition: Proposing โ†’ Gathering (triggered by resolve_proposal) + + /// Called when `resolve_proposal` is invoked. Takes the pending proposal + /// out of the session and transitions back to Gathering. + pub fn on_resolve(&mut self, proposal_id: &str) -> Result { + let session = self.active.as_mut().ok_or_else(|| SessionError { + current_phase: SessionPhase::Idle, + attempted_action: "resolve_proposal".to_string(), + guidance: SessionPhase::Idle.guidance().to_string(), + })?; + + if session.phase != SessionPhase::Proposing { + return Err(SessionError { + current_phase: session.phase, + attempted_action: "resolve_proposal".to_string(), + guidance: session.phase.guidance().to_string(), + }); + } + + let proposal = session + .pending_proposal + .take() + .ok_or_else(|| SessionError { + current_phase: SessionPhase::Proposing, + attempted_action: "resolve_proposal (no proposal found)".to_string(), + guidance: "Internal error: phase is Proposing but no proposal stored.".to_string(), + })?; + + if proposal.id != proposal_id { + // Put it back โ€” wrong ID + let correct_id = proposal.id.clone(); + session.pending_proposal = Some(proposal); + return Err(SessionError { + current_phase: SessionPhase::Proposing, + attempted_action: format!("resolve_proposal with id '{proposal_id}'"), + guidance: format!( + "The pending proposal has id '{}'. Use that id instead.", + correct_id + ), + }); + } + + session.resolved_count += 1; + session.phase = SessionPhase::Gathering; + session.gathering = GatheringContext::default(); + + Ok(proposal) + } + + // โ”€โ”€ Update gathering context โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + + /// Let the LLM annotate what it has resolved during gathering. + pub fn update_gathering(&mut self, update: GatheringUpdate) -> Result<(), SessionError> { + let session = self.active.as_mut().ok_or_else(|| SessionError { + current_phase: SessionPhase::Idle, + attempted_action: "update_gathering".to_string(), + guidance: SessionPhase::Idle.guidance().to_string(), + })?; + + if session.phase != SessionPhase::Gathering { + return Err(SessionError { + current_phase: session.phase, + attempted_action: "update_gathering".to_string(), + guidance: session.phase.guidance().to_string(), + }); + } + + if let Some(name) = update.target_record { + session.gathering.target_record = Some(name); + } + if let Some(v) = update.buffer_resolved { + session.gathering.buffer_resolved = v; + } + if let Some(v) = update.fields_resolved { + session.gathering.fields_resolved = v; + } + if let Some(v) = update.variants_resolved { + session.gathering.variants_resolved = v; + } + if let Some(note) = update.note { + session.gathering.notes.push(note); + } + Ok(()) + } + + /// Reset the session entirely (user wants to start over). + pub fn reset(&mut self) { + self.active = None; + } +} + +// โ”€โ”€ Tests โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +#[cfg(test)] +mod tests { + use super::*; + use crate::architecture::ProposedChange; + use aimdb_codegen::{BufferType, FieldDef, RecordDef}; + + fn test_proposal(id: &str) -> Proposal { + Proposal { + id: id.to_string(), + change_type: "add_record".to_string(), + description: "Test proposal".to_string(), + change: ProposedChange::AddRecord { + record: RecordDef { + name: "TestRecord".to_string(), + buffer: BufferType::SingleLatest, + capacity: None, + key_prefix: "test.".to_string(), + key_variants: vec!["alpha".to_string()], + producers: vec!["producer".to_string()], + consumers: vec!["consumer".to_string()], + fields: vec![FieldDef { + name: "value".to_string(), + field_type: "f64".to_string(), + description: "Test value".to_string(), + }], + connectors: vec![], + }, + }, + created_at: Utc::now().to_rfc3339(), + } + } + + #[test] + fn test_idle_to_gathering() { + let mut store = SessionStore::default(); + assert!(store.active.is_none()); + + let session = store.on_get_architecture(); + assert_eq!(session.phase, SessionPhase::Gathering); + assert!(store.active.is_some()); + } + + #[test] + fn test_get_architecture_idempotent_in_gathering() { + let mut store = SessionStore::default(); + let id1 = store.on_get_architecture().id.clone(); + let id2 = store.on_get_architecture().id.clone(); + assert_eq!(id1, id2, "Should reuse the same session"); + assert_eq!( + store.active.as_ref().unwrap().phase, + SessionPhase::Gathering + ); + } + + #[test] + fn test_gathering_to_proposing() { + let mut store = SessionStore::default(); + store.on_get_architecture(); + + let proposal = test_proposal("prop-001"); + let result = store.on_propose(proposal); + assert!(result.is_ok()); + assert_eq!(result.unwrap().id, "prop-001"); + assert_eq!( + store.active.as_ref().unwrap().phase, + SessionPhase::Proposing + ); + } + + #[test] + fn test_double_propose_rejected() { + let mut store = SessionStore::default(); + store.on_get_architecture(); + + store.on_propose(test_proposal("prop-001")).unwrap(); + + let result = store.on_propose(test_proposal("prop-002")); + assert!(result.is_err()); + let err = result.unwrap_err(); + assert_eq!(err.current_phase, SessionPhase::Proposing); + } + + #[test] + fn test_propose_without_session_rejected() { + let mut store = SessionStore::default(); + let result = store.on_propose(test_proposal("prop-001")); + assert!(result.is_err()); + let err = result.unwrap_err(); + assert_eq!(err.current_phase, SessionPhase::Idle); + } + + #[test] + fn test_propose_in_idle_rejected() { + let mut store = SessionStore { + active: Some(Session::new()), + }; + assert_eq!(store.active.as_ref().unwrap().phase, SessionPhase::Idle); + + let result = store.on_propose(test_proposal("prop-001")); + assert!(result.is_err()); + } + + #[test] + fn test_resolve_returns_to_gathering() { + let mut store = SessionStore::default(); + store.on_get_architecture(); + store.on_propose(test_proposal("prop-001")).unwrap(); + + let proposal = store.on_resolve("prop-001").unwrap(); + assert_eq!(proposal.id, "prop-001"); + assert_eq!( + store.active.as_ref().unwrap().phase, + SessionPhase::Gathering + ); + assert_eq!(store.active.as_ref().unwrap().resolved_count, 1); + } + + #[test] + fn test_resolve_wrong_id_rejected() { + let mut store = SessionStore::default(); + store.on_get_architecture(); + store.on_propose(test_proposal("prop-001")).unwrap(); + + let result = store.on_resolve("prop-999"); + assert!(result.is_err()); + let err = result.unwrap_err(); + assert!(err.guidance.contains("prop-001")); + + // Proposal should still be pending + assert_eq!( + store.active.as_ref().unwrap().phase, + SessionPhase::Proposing + ); + assert!(store.active.as_ref().unwrap().pending_proposal.is_some()); + } + + #[test] + fn test_resolve_not_in_proposing_rejected() { + let mut store = SessionStore::default(); + store.on_get_architecture(); + + let result = store.on_resolve("prop-001"); + assert!(result.is_err()); + assert_eq!(result.unwrap_err().current_phase, SessionPhase::Gathering); + } + + #[test] + fn test_reset_clears_session() { + let mut store = SessionStore::default(); + store.on_get_architecture(); + store.on_propose(test_proposal("prop-001")).unwrap(); + + store.reset(); + assert!(store.active.is_none()); + } + + #[test] + fn test_proposal_id_uniqueness() { + let ids: Vec = (0..100).map(|_| next_proposal_id()).collect(); + let unique: std::collections::HashSet<&String> = ids.iter().collect(); + assert_eq!(ids.len(), unique.len(), "All IDs should be unique"); + } + + #[test] + fn test_full_cycle() { + let mut store = SessionStore::default(); + + // Cycle 1: get_architecture โ†’ propose โ†’ resolve(confirm) + store.on_get_architecture(); + store.on_propose(test_proposal("prop-001")).unwrap(); + store.on_resolve("prop-001").unwrap(); + + // Should be back in Gathering for the next record + assert_eq!( + store.active.as_ref().unwrap().phase, + SessionPhase::Gathering + ); + + // Cycle 2: propose again + store.on_propose(test_proposal("prop-002")).unwrap(); + store.on_resolve("prop-002").unwrap(); + + assert_eq!(store.active.as_ref().unwrap().resolved_count, 2); + } + + #[test] + fn test_update_gathering() { + let mut store = SessionStore::default(); + store.on_get_architecture(); + + store + .update_gathering(GatheringUpdate { + target_record: Some("Temperature".to_string()), + buffer_resolved: Some(true), + ..Default::default() + }) + .unwrap(); + + let ctx = &store.active.as_ref().unwrap().gathering; + assert_eq!(ctx.target_record.as_deref(), Some("Temperature")); + assert!(ctx.buffer_resolved); + assert!(!ctx.fields_resolved); + } + + #[test] + fn test_update_gathering_not_in_gathering_rejected() { + let mut store = SessionStore::default(); + store.on_get_architecture(); + store.on_propose(test_proposal("prop-001")).unwrap(); + + let result = store.update_gathering(GatheringUpdate::default()); + assert!(result.is_err()); + } +} diff --git a/tools/aimdb-mcp/src/lib.rs b/tools/aimdb-mcp/src/lib.rs index 8301f4cd..6162bada 100644 --- a/tools/aimdb-mcp/src/lib.rs +++ b/tools/aimdb-mcp/src/lib.rs @@ -21,6 +21,7 @@ //! - **Version**: 2025-06-18 //! - **Capabilities**: Tools (7), Resources (5), Prompts (2) +pub mod architecture; pub mod connection; pub mod error; pub mod prompts; diff --git a/tools/aimdb-mcp/src/prompts/mod.rs b/tools/aimdb-mcp/src/prompts/mod.rs index f4c12151..325087b8 100644 --- a/tools/aimdb-mcp/src/prompts/mod.rs +++ b/tools/aimdb-mcp/src/prompts/mod.rs @@ -7,10 +7,31 @@ use crate::protocol::{Prompt, PromptMessage, PromptMessageContent}; /// Get all available prompts pub fn list_prompts() -> Vec { vec![ + // โ”€โ”€ Architecture agent prompts (M11) โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ Prompt { - name: "schema-help".to_string(), + name: "architecture_agent".to_string(), description: Some( - "Learn how to query and interpret record schemas before setting values".to_string(), + "Core system prompt for the AimDB architecture agent: buffer semantics, \ + ideation loop, proposal format, and confirmation protocol" + .to_string(), + ), + arguments: None, + }, + Prompt { + name: "onboarding".to_string(), + description: Some( + "Guided first architecture session: walks the user through describing \ + their system and producing their first state.toml" + .to_string(), + ), + arguments: None, + }, + Prompt { + name: "breaking_change_review".to_string(), + description: Some( + "Safety protocol for schema evolution: what to check when a buffer type change \ + or record removal could break the running instance" + .to_string(), ), arguments: None, }, @@ -25,275 +46,14 @@ pub fn list_prompts() -> Vec { /// Get a specific prompt by name pub fn get_prompt(name: &str) -> Option> { match name { - "schema-help" => Some(get_schema_help_prompt()), + "architecture_agent" => Some(get_architecture_agent_prompt()), + "onboarding" => Some(get_onboarding_prompt()), + "breaking_change_review" => Some(get_breaking_change_review_prompt()), "troubleshooting" => Some(get_troubleshooting_prompt()), _ => None, } } -/// Schema help prompt -fn get_schema_help_prompt() -> Vec { - let text = r#"# Working with AimDB Record Schemas - -## Using query_schema Tool - -The `query_schema` tool helps you understand the structure and types of AimDB records before setting values. - -### Step-by-Step Guide - -#### 1. Query the Schema -``` -Use tool: query_schema -Parameters: - - socket_path: Path to AimDB instance - - record_name: Name of the record (e.g., "server::Temperature") - - include_example: true (default) - includes current value as example - -Returns: - - record_name: Name of the record - - schema: JSON Schema describing the structure - - metadata: Buffer info, producer/consumer counts, timestamps - - example: Current value (if include_example=true) - - inferred_at: When the schema was generated -``` - -#### 2. Interpret the Schema -The schema follows JSON Schema format: -- **type**: "object", "array", "string", "number", "integer", "boolean", "null" -- **properties**: For objects, lists all fields and their types -- **required**: Array of field names that must be provided -- **items**: For arrays, the type of array elements - -#### 3. Use Field Names as Semantic Hints -Field names carry meaning - use them to infer units and formats: -- `celsius`, `fahrenheit` โ†’ Temperature units -- `timestamp`, `created_at`, `updated_at` โ†’ Unix timestamp (seconds since epoch) -- `sensor_id`, `user_id`, `device_id` โ†’ Identifier strings -- `percent`, `percentage` โ†’ Values 0-100 -- `meters`, `kilometers`, `seconds` โ†’ Unit indicators - -#### 4. Check the Example Value -The example shows a real value from the database: -- Use it as a template for the expected format -- See what values look like in practice -- Understand nested structure layout - -## Best Practices - -### Always Query Schema Before Setting Values -โŒ **Bad**: Guess the format and try to set -``` -User: "Set temperature to 75" -You: *Sets* {"temperature": 75} โ†’ ERROR! -``` - -โœ… **Good**: Query schema first, then ask for clarification -``` -User: "Set temperature to 75" -You: *Queries schema* โ†’ Sees field is called "celsius" -You: "I see the field is called 'celsius'. Did you mean 75ยฐF (23.9ยฐC) or 75ยฐC?" -``` - -### Ask Users for Clarification on Units -When field names suggest units but input is ambiguous: - -**Temperature Example**: -``` -Schema has: {"celsius": {"type": "number"}} -User says: "Set temperature to 75" - -Ask: "The 'celsius' field expects Celsius. Did you mean: - - 75ยฐF (approximately 23.9ยฐC)? - - Or 75ยฐC directly?" -``` - -**Timestamp Example**: -``` -Schema has: {"timestamp": {"type": "number"}} -User says: "Update the reading" - -Ask: "Should I use the current time for the 'timestamp' field? - (The example shows: 1730649600.123, which is a Unix timestamp)" -``` - -### Respect Required Fields -All fields in the `required` array must be provided: -``` -Schema shows: -{ - "properties": { - "celsius": {"type": "number"}, - "sensor_id": {"type": "string"}, - "timestamp": {"type": "number"} - }, - "required": ["celsius", "sensor_id", "timestamp"] -} - -When setting, you MUST provide all three fields. -Ask user for missing values if they didn't specify them. -``` - -### Check Source Code for Definitive Information -For production systems, suggest checking the Rust type definition: -``` -"For authoritative information about units and constraints, - I recommend checking the Rust source code where this record type is defined. - The field names suggest 'celsius' is in Celsius, but the code will confirm." -``` - -## Common Patterns - -### Setting a Record Value -1. Query schema to understand structure -2. Identify field names and types -3. Ask user to clarify ambiguous values (units, timestamps, etc.) -4. Confirm all required fields are provided -5. Use set_record with complete value - -### Discovering Fields -``` -User: "What fields does the SensorData record have?" - -1. Query schema -2. List all properties with their types -3. Highlight required vs optional fields -4. Show example value for reference -``` - -### Type Validation -``` -User tries to set: {"port": "8080"} -Schema shows: {"port": {"type": "integer"}} - -Response: "Error: 'port' should be a number (integer), not a string. - Try: {\"port\": 8080}" -``` - -## Understanding Schema Output - -### Simple Types -```json -{ - "type": "integer" // Whole numbers: 42, -10, 0 - "type": "number" // Decimals: 3.14, -2.5, 42.0 - "type": "string" // Text: "hello", "sensor-01" - "type": "boolean" // true or false - "type": "null" // null value -} -``` - -### Objects (Structs) -```json -{ - "type": "object", - "properties": { - "celsius": {"type": "number"}, - "sensor_id": {"type": "string"} - }, - "required": ["celsius", "sensor_id"] -} -``` -Means: `{"celsius": 22.5, "sensor_id": "sensor-01"}` - -### Arrays -```json -{ - "type": "array", - "items": {"type": "integer"} -} -``` -Means: `[1, 2, 3, 4]` - -### Nested Objects -```json -{ - "type": "object", - "properties": { - "sensor": { - "type": "object", - "properties": { - "id": {"type": "string"}, - "location": {"type": "string"} - } - }, - "reading": {"type": "number"} - } -} -``` -Means: `{"sensor": {"id": "s1", "location": "Room A"}, "reading": 42.5}` - -## When Schema Query Fails - -### Record Not Found -If query_schema fails with "Record not found": -1. Use list_records to see available records -2. Check spelling - record names are case-sensitive -3. Include namespace prefix (e.g., "server::Temperature" not "Temperature") - -### Connection Issues -If query_schema fails to connect: -1. Use discover_instances to find available instances -2. Verify socket path is correct -3. Check that AimDB instance is running - -### Empty Array Items -If schema shows `"items": {}` for an array: -``` -The array is currently empty - item type cannot be determined. -Ask user what type of items should be in the array, -or wait for the array to have values before querying schema. -``` - -## Tips - -- **Query early**: Check schema before attempting to set values -- **Use examples**: The example value is your best guide -- **Ask, don't guess**: When units/formats are unclear, ask the user -- **Validate types**: Ensure numbers are numbers, strings are strings -- **Check metadata**: Producer/consumer counts tell you if record is active -- **Field names matter**: They're your primary hint about semantics - -## Example Conversation - -``` -User: "Set the temperature to 72 degrees" - -You: *Queries schema for "server::Temperature"* - -Schema returns: -{ - "properties": { - "celsius": {"type": "number"}, - "sensor_id": {"type": "string"}, - "timestamp": {"type": "number"} - }, - "required": ["celsius", "sensor_id", "timestamp"], - "example": {"celsius": 22.5, "sensor_id": "sensor-01", "timestamp": 1730649600.123} -} - -You: "I can see the temperature record has these required fields: - - celsius (number) - - sensor_id (string) - - timestamp (number) - - A few questions: - 1. Did you mean 72ยฐF (which is 22.2ยฐC) or 72ยฐC directly? - 2. Which sensor? (The current example uses 'sensor-01') - 3. Should I use the current time for the timestamp? - - Once you clarify, I'll set the values." -``` -"#; - - vec![PromptMessage { - role: "user".to_string(), - content: PromptMessageContent { - content_type: "text".to_string(), - text: text.to_string(), - }, - }] -} - /// Troubleshooting prompt fn get_troubleshooting_prompt() -> Vec { let text = r#"# AimDB MCP Server Troubleshooting @@ -386,6 +146,340 @@ When reporting issues, include: }] } +// โ”€โ”€ Architecture agent prompt content โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Core architecture agent system prompt. +/// +/// Encodes AimDB buffer semantics, the ideation loop, disambiguation question +/// patterns, proposal format, and the confirmation protocol. Versioned alongside +/// AimDB โ€” when this improves, all connected users benefit without reconfiguring. +fn get_architecture_agent_prompt() -> Vec { + let text = r#"# AimDB Architecture Agent + +You are an AimDB architecture agent. Your role is to help developers design +data architectures for AimDB instances through conversation โ€” without them +ever touching a graph editor or writing boilerplate. + +## Your Output + +Every session produces three artefacts: +1. `.aimdb/state.toml` โ€” the structured decision record (source of truth) +2. `.aimdb/architecture.mermaid` โ€” a read-only diagram projected from state.toml +3. `src/generated_schema.rs` โ€” compilable Rust using the actual AimDB API + +These are **outputs** of the conversation. The human never edits them directly. + +## AimDB Buffer Types โ€” Your Semantic Vocabulary + +Every architectural decision resolves to one of three buffer types. +These are not IoT-specific โ€” they are universal data primitives. + +### SpmcRing { capacity: usize } +High-frequency stream. Every value matters. Multiple independent consumers. +- Use when: telemetry, sensor readings, event logs, interaction streams +- Ask: "Does the consumer need every sample, or just the latest?" +- Ask: "How many systems read this independently?" +- Capacity rule: `data_rate_hz ร— lag_tolerance_seconds`, round up to power-of-2 + +### SingleLatest +Current state. Only the most recent value matters. Intermediates discarded. +- Use when: configuration, experiment flags, UI state, firmware target version +- Ask: "If two updates arrive before consumption, does the consumer need both?" +- Key distinction from Mailbox: state is *read* on demand; commands are *acted upon* + +### Mailbox +Command channel. Latest instruction supersedes all prior. Single slot, overwrite. +- Use when: device control, OTA commands, actuation, one-shot triggers +- Ask: "Is this data passive state the consumer reads, or an actionable command?" +- Key distinction from SingleLatest: mailbox implies the consumer *must process* it + +## The Ideation Loop + +Follow this loop strictly. Never skip steps. + +``` +1. Human describes intent (any form, any specificity) +2. Identify ambiguities that affect buffer type, topology, or data model +3. Ask ONE targeted question โ€” never multiple at once +4. Human responds +5. Call the appropriate `propose_*` tool with a concrete proposal +6. Human calls resolve_proposal: confirm | reject | revise +7. On confirm: state.toml updated, Mermaid regenerated, Rust generated +8. Return to step 1 for the next record or refinement +``` + +**Never propose without resolving ambiguity.** If you are uncertain which +buffer type fits, ask first. A wrong proposal that the human confirms wastes +more time than one clarifying question. + +**One question at a time.** Asking three questions at once overwhelms. Ask the +most important one โ€” the one whose answer has the highest information value for +the buffer type decision. + +## Startup Behaviour + +On session start: +1. Read `aimdb://architecture/memory` โ€” restore ideation context and design rationale +2. Read `aimdb://architecture/state` โ€” load existing decisions +3. Read `aimdb://architecture` โ€” understand current topology +4. Read `aimdb://architecture/conflicts` โ€” surface any drift +5. If architecture exists: briefly summarise it (use memory for context), note any conflicts +6. If no architecture: ask where to begin (see `onboarding` prompt) + +Do not re-litigate settled decisions. Memory records the rationale for prior choices โ€” +use it to explain them if asked, not to revisit them. + +## Post-Confirmation: Save Memory + +After **every** `resolve_proposal` that returns `"resolution": "confirmed"`, call +`save_memory` with a narrative entry capturing: + +``` +## {RecordName} + +**Context**: {1โ€“2 sentences on what the user is building and why this record exists} + +**Key question**: {The most important question you asked} +**Answer**: {What the user said} + +**Buffer choice**: {SpmcRing|SingleLatest|Mailbox} โ€” {1โ€“2 sentences on why this fits} + +**Alternatives considered**: {Any options discussed and why they were rejected} + +**Future considerations**: {Any deferred decisions, e.g. "add host field for distributed tracing"} +``` + +Omit sections that have no content โ€” do not write "N/A". + +## Data Model Derivation + +Do not guess value types โ€” derive them from source material: + +- **Datasheets**: Extract calibrated output fields and units (not raw ADC values) +- **API documentation**: Map response schema fields to Rust primitives +- **Protocol specs**: e.g. KNX DPT 9.001 โ†’ `f32` in ยฐC +- **Conversation**: Ask targeted questions about fields and units + +Supported field types: `f64`, `f32`, `u8`, `u16`, `u32`, `u64`, +`i8`, `i16`, `i32`, `i64`, `bool`, `String`. + +If the user mentions a sensor model, look up its calibrated outputs. +If the user provides an API spec, extract the response fields. +Always propose the struct fields as part of the record proposal. + +## Key Variants + +All key variants must be concrete before you call any `propose_*` tool. Never emit +`key_strategy: "one_per_device"` โ€” that is not a valid state.toml field. + +If the user says "one per device" without listing them: +> "Which devices should I include? I need the concrete IDs โ€” for example: +> gateway-01, gateway-02, sensor-hub-01. Do you have a device list or +> fleet manifest I can read?" + +The agent may derive device lists from fleet manifests, config files, or API +responses the user provides. + +## Mermaid Conventions + +Read `aimdb://architecture/conventions` for the full specification. Summary: +- `(["Name\nSpmcRing ยท N"])` โ€” stadium shape for ring buffer +- `("Name\nSingleLatest")` โ€” rounded rect for state +- `{"Name\nMailbox"}` โ€” diamond for command +- Solid arrows: data flow (produce / consume) +- Dashed arrows: connector metadata (link_to / link_from with URL) + +## Constraints + +- **Never edit state.toml or Mermaid directly** โ€” use tools +- **Every change is a proposal** โ€” the human confirms before anything is written +- **Conflicts halt proposals** โ€” if validate_against_instance returns errors for + the affected record, surface them before proposing the change +- **Breaking changes are warned, not blocked** โ€” note that deleting or renaming + a record will cause compile errors in application code that references it +"#; + + vec![PromptMessage { + role: "user".to_string(), + content: PromptMessageContent { + content_type: "text".to_string(), + text: text.to_string(), + }, + }] +} + +/// Safety protocol for schema evolution. +/// +/// Applied before confirming any proposal that deletes, renames, or changes +/// the buffer type of an existing record โ€” especially one present in the +/// running instance. +fn get_breaking_change_review_prompt() -> Vec { + let text = r#"# Breaking Change Review Protocol + +Apply this protocol **before** calling resolve_proposal with `confirm` for any +proposal that: +- Deletes an existing record +- Renames an existing record +- Changes the buffer type of an existing record +- Removes or renames fields from a value struct +- Changes key variants (removing or renaming existing ones) + +## Step 1 โ€” Check the Running Instance + +Call `validate_against_instance`. Review the result: + +| Conflict type | Meaning | Action | +|---------------|---------|--------| +| `missing_in_instance` | Record not in instance yet | Safe โ€” codegen may not have run | +| `missing_in_state` | Record in instance but not in state.toml | Note only โ€” manually registered | +| `buffer_mismatch` | Buffer type differs between state and instance | **Warn user** | +| `capacity_mismatch` | Capacity differs | Warn โ€” may be intentional override | +| `connector_mismatch` | Connector URL differs | Warn โ€” check intent | + +If `buffer_mismatch` or `connector_mismatch` is present for the affected record, +surface it inline and **halt the proposal** until the human decides how to proceed. + +## Step 2 โ€” Application Code Impact + +For delete and rename operations, always include this warning in the proposal: + +> โš ๏ธ **Application code impact**: Deleting/renaming this record will remove +> the generated `{OldName}Key` enum and `{OldName}Value` struct from +> `src/generated_schema.rs`. Any application code that references these types +> will fail to compile. The compiler will identify all affected call sites. +> No automatic migration is performed. + +For buffer type changes, include: + +> โš ๏ธ **Buffer type change**: Changing from `{OldBuffer}` to `{NewBuffer}` +> will affect consumer behaviour. Consumers expecting ring-buffer semantics +> (e.g. anomaly detectors reading historical windows) will silently receive +> fewer values if changing from SpmcRing to SingleLatest or Mailbox. + +## Step 3 โ€” Decision Log Entry + +When confirmed, always write a `decisions` entry that records: +- What was changed (old value โ†’ new value) +- Why (the human's stated reason) +- Timestamp + +This ensures future sessions can explain why a breaking change was made. + +## What NOT to do + +- **Do not propose automatic migrations.** If a buffer type changes, do not + offer to rewrite the application code that consumes it. +- **Do not block on warnings.** Capacity mismatches and info-level conflicts + do not require user action โ€” surface them, then proceed if the human confirms. +- **Do not halt on `missing_in_instance`.** This is expected when codegen + has not been run yet or the binary has not been redeployed. +"#; + + vec![PromptMessage { + role: "user".to_string(), + content: PromptMessageContent { + content_type: "text".to_string(), + text: text.to_string(), + }, + }] +} + +/// Guided onboarding for a first architecture session. +/// +/// Walks the user through describing their system and producing their first +/// state.toml, including the key questions to ask in sequence. +fn get_onboarding_prompt() -> Vec { + let text = r#"# AimDB Architecture Agent โ€” First Session + +No architecture exists yet. Use this sequence to guide the user from a blank +state to a validated first architecture. + +## Opening Message + +> No architecture found in `.aimdb/state.toml`. +> +> Tell me about the system you're building โ€” what data exists, where it comes +> from, and where it needs to go. You don't need to be precise yet; a rough +> description is fine. + +## Information Gathering Sequence + +Collect answers to these questions, one at a time, woven naturally into +conversation โ€” not as a form. Stop collecting and start proposing as soon as +you have enough to make the first record proposal. + +### 1. Data sources +> "What generates data in your system? (sensors, services, user actions, +> external APIs, devices, cloud backends...)" + +### 2. Data consumers +> "Who or what reads that data? (dashboards, actuators, analytics pipelines, +> notification systems, other services...)" + +### 3. Frequency and volume +> "How frequently does the data change or arrive?" +> Example follow-up: "Is it continuous (100ms sensor readings) or event-driven +> (firmware update every few weeks)?" + +### 4. External connectivity +> "Does data need to flow to or from an external system? (MQTT broker, KNX +> bus, REST API, cloud service...)" + +### 5. Platform target +> "Are you running on embedded hardware, edge servers, cloud, or a mix?" +> This affects connector and buffer choices. + +## Transition to Proposals + +Once you have a clear picture of at least one data source and its consumers, +stop gathering and make your first proposal. Use the `propose_record` format. + +Do not wait for complete system description before proposing. Start with the +highest-frequency or most central data record โ€” the one that connects the most +producers and consumers. Correct proposals build momentum. + +## Example Opening Exchange + +``` +Agent: No architecture found. Tell me about the system you're building โ€” + what data exists, where it comes from, and where it needs to go. + +User: I have 3 SHT31 sensors (indoor, outdoor, garage) reporting every 100ms. + A dashboard shows live readings. Anomalies trigger cloud alerts. + +Agent: I know the SHT31 โ€” it outputs calibrated temperature and relative + humidity. One question: + Does the dashboard need every reading, or just the current value + per sensor? +``` + +(Continue with `resolve_buffer_type` patterns as needed, then `propose_record`.) + +## State Initialisation + +Before the first proposal is confirmed, create the `.aimdb/` directory and an +empty `state.toml` with the `[meta]` block: + +```toml +[meta] +aimdb_version = "0.5.0" +created_at = "{ISO8601_NOW}" +last_modified = "{ISO8601_NOW}" +``` + +The meta block is written by the `propose_*` tools on first use โ€” this is just +for context on what the initial file looks like. +"#; + + vec![PromptMessage { + role: "user".to_string(), + content: PromptMessageContent { + content_type: "text".to_string(), + text: text.to_string(), + }, + }] +} + #[cfg(test)] mod tests { use super::*; @@ -393,19 +487,13 @@ mod tests { #[test] fn test_list_prompts() { let prompts = list_prompts(); - assert_eq!(prompts.len(), 2); - assert_eq!(prompts[0].name, "schema-help"); - assert_eq!(prompts[1].name, "troubleshooting"); + assert_eq!(prompts.len(), 4); + assert_eq!(prompts[0].name, "architecture_agent"); + assert_eq!(prompts[1].name, "onboarding"); + assert_eq!(prompts[2].name, "breaking_change_review"); + assert_eq!(prompts[3].name, "troubleshooting"); } - #[test] - fn test_get_schema_help_prompt() { - let messages = get_prompt("schema-help"); - assert!(messages.is_some()); - let messages = messages.unwrap(); - assert_eq!(messages.len(), 1); - assert!(messages[0].content.text.contains("query_schema")); - } #[test] fn test_get_troubleshooting_prompt() { diff --git a/tools/aimdb-mcp/src/resources/architecture.rs b/tools/aimdb-mcp/src/resources/architecture.rs new file mode 100644 index 00000000..7b6a212b --- /dev/null +++ b/tools/aimdb-mcp/src/resources/architecture.rs @@ -0,0 +1,161 @@ +//! Architecture agent MCP resources (M11) +//! +//! Exposes 5 resources: +//! - `aimdb://architecture` โ†’ Mermaid diagram (from state.toml) +//! - `aimdb://architecture/state` โ†’ raw state.toml as TOML text +//! - `aimdb://architecture/conflicts` โ†’ validation errors/warnings as JSON +//! - `aimdb://architecture/memory` โ†’ ideation context and design rationale +//! - `aimdb://architecture/conventions` โ†’ Mermaid diagram conventions (embedded, read-only) + +use crate::architecture::{default_memory_path, default_state_path, read_state}; +use crate::error::{McpError, McpResult}; +use crate::protocol::{Resource, ResourceContent, ResourceReadResult}; +use tracing::debug; + +/// Mermaid diagram conventions โ€” embedded at compile time so the binary is +/// self-contained and users cannot accidentally modify the spec. +const CONVENTIONS: &str = include_str!("../../assets/CONVENTIONS.md"); + +// โ”€โ”€ list โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Return the 6 architecture resource descriptors. +pub fn list_resources() -> Vec { + vec![ + Resource { + uri: "aimdb://architecture".to_string(), + name: "Architecture Diagram".to_string(), + description: Some( + "Mermaid flowchart generated from .aimdb/state.toml. \ + Reflects the current proposed architecture." + .to_string(), + ), + mime_type: Some("text/plain".to_string()), + }, + Resource { + uri: "aimdb://architecture/state".to_string(), + name: "Architecture State".to_string(), + description: Some( + "Raw .aimdb/state.toml as a TOML document. \ + Contains all record definitions and connector config." + .to_string(), + ), + mime_type: Some("application/toml".to_string()), + }, + Resource { + uri: "aimdb://architecture/conflicts".to_string(), + name: "Architecture Validation".to_string(), + description: Some( + "Validation errors and warnings from .aimdb/state.toml. \ + Does not require a running instance." + .to_string(), + ), + mime_type: Some("application/json".to_string()), + }, + Resource { + uri: "aimdb://architecture/conventions".to_string(), + name: "Mermaid Diagram Conventions".to_string(), + description: Some( + "The canonical visual language for AimDB architecture diagrams: node shapes \ + per buffer type, arrow styles for data flow vs connector metadata, and \ + labelling rules. Embedded in the binary โ€” read-only." + .to_string(), + ), + mime_type: Some("text/markdown".to_string()), + }, + Resource { + uri: "aimdb://architecture/memory".to_string(), + name: "Architecture Memory".to_string(), + description: Some( + "Ideation context and design rationale from .aimdb/memory.md. \ + Captures the conversational why behind every decision: questions asked, \ + answers received, alternatives rejected, and future considerations. \ + Read this on session start to restore full design context." + .to_string(), + ), + mime_type: Some("text/markdown".to_string()), + }, + ] +} + +// โ”€โ”€ read โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Read a single architecture resource by URI. +pub fn read_resource(uri: &str) -> McpResult { + debug!("architecture read_resource: {uri}"); + + let state_path = default_state_path(); + + let state_opt = read_state(&state_path) + .map_err(|e| McpError::Internal(format!("reading state.toml: {e}")))?; + + let text = match uri { + "aimdb://architecture" => match &state_opt { + None => "No state.toml found. Run the onboarding prompt to get started.".to_string(), + Some(state) => aimdb_codegen::generate_mermaid(state), + }, + + "aimdb://architecture/state" => match &state_opt { + None => "# No state.toml found.\n".to_string(), + Some(state) => serde_json::to_string_pretty(state) + .map_err(|e| McpError::Internal(format!("serialising state: {e}")))?, + }, + + "aimdb://architecture/conflicts" => match &state_opt { + None => serde_json::to_string_pretty(&serde_json::json!({ + "errors": [], + "warnings": [], + "note": "No state.toml found" + })) + .unwrap(), + Some(state) => { + let errors = aimdb_codegen::validate(state); + serde_json::to_string_pretty(&serde_json::json!({ + "errors": errors.iter() + .filter(|e| e.severity == aimdb_codegen::Severity::Error) + .map(|e| serde_json::json!({ "location": e.location, "message": e.message })) + .collect::>(), + "warnings": errors.iter() + .filter(|e| e.severity == aimdb_codegen::Severity::Warning) + .map(|e| serde_json::json!({ "location": e.location, "message": e.message })) + .collect::>(), + })) + .map_err(|e| McpError::Internal(format!("serialising conflicts: {e}")))? + } + }, + + "aimdb://architecture/conventions" => CONVENTIONS.to_string(), + + "aimdb://architecture/memory" => { + let memory_path = default_memory_path(); + if memory_path.exists() { + std::fs::read_to_string(&memory_path) + .map_err(|e| McpError::Internal(format!("reading memory.md: {e}")))? + } else { + "# AimDB Architecture Memory\n\nNo memory recorded yet. \ + The architecture agent will populate this after the first confirmed proposal.\n" + .to_string() + } + } + + _ => { + return Err(McpError::InvalidParams(format!( + "Unknown architecture resource: {uri}" + ))) + } + }; + + let mime_type = if uri.ends_with("architecture") { + Some("text/plain".to_string()) + } else { + Some("application/json".to_string()) + }; + + Ok(ResourceReadResult { + contents: vec![ResourceContent { + uri: uri.to_string(), + mime_type, + text: Some(text), + blob: None, + }], + }) +} diff --git a/tools/aimdb-mcp/src/resources/mod.rs b/tools/aimdb-mcp/src/resources/mod.rs index 5c3714e0..2ce05def 100644 --- a/tools/aimdb-mcp/src/resources/mod.rs +++ b/tools/aimdb-mcp/src/resources/mod.rs @@ -2,8 +2,28 @@ //! //! Resources provide data that can be accessed by URI. +pub mod architecture; pub mod instances; pub mod records; -// Re-export resource functions -pub use instances::{list_resources, read_resource}; +use crate::error::McpResult; +use crate::protocol::{ResourceReadResult, ResourcesListResult}; + +/// List all available resources (instances + architecture) +pub async fn list_resources() -> McpResult { + let mut result = instances::list_resources().await?; + // Append architecture resources + for r in architecture::list_resources() { + result.resources.push(r); + } + Ok(result) +} + +/// Read a resource by URI (instances first, then architecture) +pub async fn read_resource(uri: &str) -> McpResult { + if uri.starts_with("aimdb://architecture") { + architecture::read_resource(uri) + } else { + instances::read_resource(uri).await + } +} diff --git a/tools/aimdb-mcp/src/server.rs b/tools/aimdb-mcp/src/server.rs index f970884d..5c25523c 100644 --- a/tools/aimdb-mcp/src/server.rs +++ b/tools/aimdb-mcp/src/server.rs @@ -80,6 +80,9 @@ impl McpServer { // Initialize connection pool for tools (if not already done) tools::init_connection_pool(self.connection_pool.clone()); + // Initialize session store for architecture agent + crate::architecture::init_session_store(); + // Build server capabilities let capabilities = ServerCapabilities { tools: Some(ToolsCapability { @@ -303,6 +306,350 @@ impl McpServer { "additionalProperties": false }), }, + // โ”€โ”€ Architecture agent tools (M11) โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + Tool { + name: "get_architecture".to_string(), + description: "Return the current architecture state from .aimdb/state.toml as structured JSON, including record count, validation summary, and decision log length. Run this first when entering an architecture session.".to_string(), + input_schema: json!({ + "type": "object", + "properties": { + "state_path": { + "type": "string", + "description": "Path to state.toml (default: .aimdb/state.toml)" + } + }, + "additionalProperties": false + }), + }, + Tool { + name: "propose_add_record".to_string(), + description: "Propose adding a new record to the architecture. All payload fields are explicit and typed โ€” no guessing required. Present the proposal to the user before calling resolve_proposal.".to_string(), + input_schema: json!({ + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "PascalCase record name, e.g. \"TemperatureReading\"" + }, + "description": { + "type": "string", + "description": "Human-readable description of the proposal shown to the user" + }, + "buffer": { + "type": "string", + "enum": ["SpmcRing", "SingleLatest", "Mailbox"], + "description": "Buffer semantics: SpmcRing=stream (every value), SingleLatest=state (newest only), Mailbox=command (overwrite)" + }, + "capacity": { + "type": "integer", + "description": "Ring buffer capacity โ€” required when buffer=SpmcRing. Use power-of-2, e.g. 256, 512, 1024." + }, + "key_prefix": { + "type": "string", + "description": "Optional common key prefix, e.g. \"sensors.temp.\". Default: \"\"" + }, + "key_variants": { + "type": "array", + "items": { "type": "string" }, + "description": "Concrete PascalCase variant names, e.g. [\"Default\"] or [\"Indoor\", \"Outdoor\"]. Default: []" + }, + "producers": { + "type": "array", + "items": { "type": "string" }, + "description": "Task names that write to this record, e.g. [\"sensor_task\"]." + }, + "consumers": { + "type": "array", + "items": { "type": "string" }, + "description": "Task names that read from this record, e.g. [\"anomaly_detector\"]." + }, + "fields": { + "type": "array", + "description": "Value struct fields", + "items": { + "type": "object", + "properties": { + "name": { "type": "string", "description": "snake_case field name" }, + "type": { "type": "string", "description": "Rust primitive: f64, f32, u8, u16, u32, u64, i8, i16, i32, i64, bool, String" }, + "description": { "type": "string" } + }, + "required": ["name", "type", "description"] + } + }, + "connectors": { + "type": "array", + "description": "Connector wiring (MQTT, KNX, etc.)", + "items": { + "type": "object", + "properties": { + "protocol": { "type": "string", "description": "e.g. mqtt, knx" }, + "direction": { "type": "string", "enum": ["inbound", "outbound"] }, + "url": { "type": "string", "description": "Topic/address template; may contain {variant}" } + }, + "required": ["protocol", "direction", "url"] + } + } + }, + "required": ["name", "description", "buffer"], + "additionalProperties": false + }), + }, + Tool { + name: "propose_modify_buffer".to_string(), + description: "Propose changing the buffer type (and optionally capacity) of an existing record. Present the proposal to the user before calling resolve_proposal.".to_string(), + input_schema: json!({ + "type": "object", + "properties": { + "record_name": { + "type": "string", + "description": "PascalCase name of the existing record to modify" + }, + "description": { + "type": "string", + "description": "Human-readable description of the proposal shown to the user" + }, + "buffer": { + "type": "string", + "enum": ["SpmcRing", "SingleLatest", "Mailbox"], + "description": "New buffer type" + }, + "capacity": { + "type": "integer", + "description": "Ring capacity โ€” required when buffer=SpmcRing" + } + }, + "required": ["record_name", "description", "buffer"], + "additionalProperties": false + }), + }, + Tool { + name: "propose_add_connector".to_string(), + description: "Propose adding a connector (MQTT, KNX, etc.) to an existing record. Present the proposal to the user before calling resolve_proposal.".to_string(), + input_schema: json!({ + "type": "object", + "properties": { + "record_name": { + "type": "string", + "description": "PascalCase name of the existing record to wire up" + }, + "description": { + "type": "string", + "description": "Human-readable description of the proposal shown to the user" + }, + "protocol": { + "type": "string", + "description": "Connector protocol identifier, e.g. \"mqtt\" or \"knx\"" + }, + "direction": { + "type": "string", + "enum": ["inbound", "outbound"], + "description": "inbound = brokerโ†’DB, outbound = DBโ†’broker" + }, + "url": { + "type": "string", + "description": "Topic or address template; use {variant} placeholder for key variants, e.g. \"sensors/temp/{variant}\"" + } + }, + "required": ["record_name", "description", "protocol", "direction", "url"], + "additionalProperties": false + }), + }, + Tool { + name: "propose_modify_fields".to_string(), + description: "Propose replacing the value struct fields of an existing record. This replaces ALL fields โ€” include unchanged fields too. Present the proposal to the user before calling resolve_proposal.".to_string(), + input_schema: json!({ + "type": "object", + "properties": { + "record_name": { + "type": "string", + "description": "PascalCase name of the existing record to modify" + }, + "description": { + "type": "string", + "description": "Human-readable description of the proposal shown to the user" + }, + "fields": { + "type": "array", + "description": "Complete replacement field list for the value struct", + "items": { + "type": "object", + "properties": { + "name": { "type": "string", "description": "snake_case field name" }, + "type": { "type": "string", "description": "f64, f32, u8, u16, u32, u64, i8, i16, i32, i64, bool, String" }, + "description": { "type": "string" } + }, + "required": ["name", "type", "description"] + } + } + }, + "required": ["record_name", "description", "fields"], + "additionalProperties": false + }), + }, + Tool { + name: "propose_modify_key_variants".to_string(), + description: "Propose updating the key variants of an existing record. Use this when adding a record with no variants (e.g. [\"Default\"]) or expanding a fleet (e.g. adding a new device). Present the proposal to the user before calling resolve_proposal.".to_string(), + input_schema: json!({ + "type": "object", + "properties": { + "record_name": { + "type": "string", + "description": "PascalCase name of the existing record to modify" + }, + "description": { + "type": "string", + "description": "Human-readable description of the proposal shown to the user" + }, + "key_variants": { + "type": "array", + "items": { "type": "string" }, + "description": "Complete replacement list of PascalCase variant names, e.g. [\"Default\"] or [\"ApiServer\", \"Worker\", \"Db\"]. Replaces prior variant list." + }, + "key_prefix": { + "type": "string", + "description": "Optional common key prefix. If omitted the existing prefix is preserved." + } + }, + "required": ["record_name", "description", "key_variants"], + "additionalProperties": false + }), + }, + Tool { + name: "resolve_proposal".to_string(), + description: "Resolve a pending proposal. On confirm: applies the change, writes state.toml, generates Mermaid and Rust artefacts. On reject: discards without changes. On revise: discards with a redirect message.".to_string(), + input_schema: json!({ + "type": "object", + "properties": { + "proposal_id": { + "type": "string", + "description": "The proposal ID returned by propose_add_record, propose_modify_buffer, propose_add_connector, propose_modify_fields, propose_modify_key_variants, remove_record, or rename_record" + }, + "resolution": { + "type": "string", + "enum": ["confirm", "reject", "revise"], + "description": "User decision: confirm applies the change, reject discards it, revise returns a redirect" + }, + "redirect": { + "type": "string", + "description": "Message explaining what to revise (only used when resolution=revise)" + }, + "state_path": { "type": "string", "description": "Override state.toml path" }, + "mermaid_path": { "type": "string", "description": "Override Mermaid output path" }, + "rust_path": { "type": "string", "description": "Override Rust output path" } + }, + "required": ["proposal_id", "resolution"], + "additionalProperties": false + }), + }, + Tool { + name: "remove_record".to_string(), + description: "Propose removal of an existing record. Creates a pending proposal โ€” call resolve_proposal to confirm. Note: removing a record breaks generated type aliases.".to_string(), + input_schema: json!({ + "type": "object", + "properties": { + "record_name": { + "type": "string", + "description": "PascalCase name of the record to remove" + } + }, + "required": ["record_name"], + "additionalProperties": false + }), + }, + Tool { + name: "rename_record".to_string(), + description: "Propose renaming a record. Creates a pending proposal โ€” call resolve_proposal to confirm. Note: renames the generated key enum and value struct, breaking existing references.".to_string(), + input_schema: json!({ + "type": "object", + "properties": { + "old_name": { + "type": "string", + "description": "Current PascalCase record name" + }, + "new_name": { + "type": "string", + "description": "New PascalCase record name" + } + }, + "required": ["old_name", "new_name"], + "additionalProperties": false + }), + }, + Tool { + name: "validate_against_instance".to_string(), + description: "Compare state.toml against a live AimDB instance and return a conflict report. Detects missing records, buffer type mismatches, capacity differences, and connector mismatches.".to_string(), + input_schema: json!({ + "type": "object", + "properties": { + "socket_path": { + "type": "string", + "description": "Unix socket path to the running AimDB instance (e.g., /tmp/aimdb-demo.sock)" + }, + "state_path": { + "type": "string", + "description": "Path to state.toml (default: .aimdb/state.toml)" + } + }, + "required": ["socket_path"], + "additionalProperties": false + }), + }, + Tool { + name: "get_buffer_metrics".to_string(), + description: "Get live buffer metrics for records matching a key string from a running AimDB instance.".to_string(), + input_schema: json!({ + "type": "object", + "properties": { + "socket_path": { + "type": "string", + "description": "Unix socket path to the AimDB instance" + }, + "record_key": { + "type": "string", + "description": "Substring to match against record names (e.g., 'Temperature')" + } + }, + "required": ["socket_path", "record_key"], + "additionalProperties": false + }), + }, + Tool { + name: "save_memory".to_string(), + description: "Persist ideation context and design rationale to .aimdb/memory.md. \ + Call this after every confirmed proposal with a narrative summary of what the user is building, \ + the key question asked, the answer received, why the chosen buffer type fits, \ + alternatives that were considered and rejected, and any future considerations noted. \ + On session start, read aimdb://architecture/memory to restore this context.".to_string(), + input_schema: json!({ + "type": "object", + "properties": { + "entry": { + "type": "string", + "description": "Markdown text to write. For append mode, structure as a '## RecordName' section with sub-headings: Context, Key question, Answer, Buffer choice & rationale, Alternatives considered, Future considerations." + }, + "mode": { + "type": "string", + "enum": ["append", "overwrite"], + "description": "append (default): add a timestamped section to memory.md. overwrite: replace the entire file (use only to correct the whole document)." + }, + "memory_path": { + "type": "string", + "description": "Override path (default: .aimdb/memory.md)" + } + }, + "required": ["entry"], + "additionalProperties": false + }), + }, + Tool { + name: "reset_session".to_string(), + description: "Reset the architecture agent session, discarding any pending proposals. Use when the user wants to start over or abandon the current ideation cycle.".to_string(), + input_schema: json!({ + "type": "object", + "properties": {}, + "additionalProperties": false + }), + }, ]; Ok(ToolsListResult { tools }) @@ -329,6 +676,24 @@ impl McpServer { "graph_nodes" => tools::graph_nodes(params.arguments).await?, "graph_edges" => tools::graph_edges(params.arguments).await?, "graph_topo_order" => tools::graph_topo_order(params.arguments).await?, + // Architecture agent tools (M11) + "get_architecture" => tools::get_architecture(params.arguments).await?, + "propose_add_record" => tools::propose_add_record(params.arguments).await?, + "propose_modify_buffer" => tools::propose_modify_buffer(params.arguments).await?, + "propose_add_connector" => tools::propose_add_connector(params.arguments).await?, + "propose_modify_fields" => tools::propose_modify_fields(params.arguments).await?, + "propose_modify_key_variants" => { + tools::propose_modify_key_variants(params.arguments).await? + } + "resolve_proposal" => tools::resolve_proposal(params.arguments).await?, + "remove_record" => tools::remove_record(params.arguments).await?, + "rename_record" => tools::rename_record(params.arguments).await?, + "validate_against_instance" => { + tools::validate_against_instance(params.arguments).await? + } + "get_buffer_metrics" => tools::get_buffer_metrics(params.arguments).await?, + "save_memory" => tools::save_memory(params.arguments).await?, + "reset_session" => tools::reset_session(params.arguments).await?, _ => { return Err(McpError::MethodNotFound(format!( "Unknown tool: {}", diff --git a/tools/aimdb-mcp/src/tools/architecture.rs b/tools/aimdb-mcp/src/tools/architecture.rs new file mode 100644 index 00000000..c4535393 --- /dev/null +++ b/tools/aimdb-mcp/src/tools/architecture.rs @@ -0,0 +1,747 @@ +//! Architecture agent MCP tools (M11) +//! +//! Provides typed proposal tools for the architecture agent ideation loop: +//! `get_architecture`, `propose_add_record`, `propose_modify_buffer`, +//! `propose_add_connector`, `propose_modify_fields`, `propose_modify_key_variants`, +//! `resolve_proposal`, `remove_record`, `rename_record`, +//! `validate_against_instance`, `get_buffer_metrics`, `reset_session`. +//! +//! All proposal-related tools are routed through the session state machine, +//! which enforces: `Idle โ†’ Gathering โ†’ Proposing โ†’ (resolve) โ†’ Gathering`. + +use crate::architecture::conflicts::{detect_conflicts, LiveRecord}; +use crate::architecture::session::next_proposal_id; +use crate::architecture::{ + self, apply_change, default_memory_path, default_mermaid_path, default_rust_path, + default_state_path, ensure_state_initialised, session_store, write_artefacts, + write_state_locked, Proposal, ProposalResolution, ProposedChange, +}; +use crate::error::{McpError, McpResult}; +use aimdb_client::AimxClient; +use serde::Deserialize; +use serde_json::Value; +use tracing::debug; + +// โ”€โ”€ get_architecture โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Return the current architecture state as structured JSON. +/// +/// Side-effect: creates a session if none exists, transitions Idle โ†’ Gathering. +pub async fn get_architecture(args: Option) -> McpResult { + debug!("get_architecture called"); + + #[derive(Debug, Deserialize, Default)] + struct Params { + #[serde(default)] + state_path: Option, + } + let params: Params = parse_optional(args)?; + let path = params + .state_path + .map(std::path::PathBuf::from) + .unwrap_or_else(default_state_path); + + // Transition: Idle โ†’ Gathering (or no-op if already in session) + let store = session_store() + .ok_or_else(|| McpError::Internal("Session store not initialised".to_string()))?; + let session_info = { + let mut locked = store + .lock() + .map_err(|e| McpError::Internal(format!("session store poisoned: {e}")))?; + let session = locked.on_get_architecture(); + serde_json::to_value(session).unwrap_or_default() + }; + + match architecture::read_state(&path).map_err(|e| McpError::Internal(e.to_string()))? { + None => Ok(serde_json::json!({ + "exists": false, + "message": format!( + "No state.toml found at {}. Start an architecture session with the onboarding prompt.", + path.display() + ), + "session": session_info, + })), + Some(state) => { + let errors = aimdb_codegen::validate(&state); + let result = serde_json::json!({ + "exists": true, + "state": serde_json::to_value(&state)?, + "record_count": state.records.len(), + "decision_count": state.decisions.len(), + "validation_errors": errors.iter().filter(|e| e.severity == aimdb_codegen::Severity::Error).count(), + "validation_warnings": errors.iter().filter(|e| e.severity == aimdb_codegen::Severity::Warning).count(), + "session": session_info, + }); + Ok(result) + } + } +} + +// โ”€โ”€ propose_* typed tools โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ +// +// Each tool has a concrete, fully-specified JSON Schema exposed to the MCP +// client so AI agents never have to guess payload structure. Error messages +// include the expected schema as a self-describing hint. + +// Shared helper: push a Proposal into the session store and return its ID. +fn submit_proposal(proposal: Proposal) -> McpResult { + let store = session_store() + .ok_or_else(|| McpError::Internal("Session store not initialised".to_string()))?; + let mut locked = store + .lock() + .map_err(|e| McpError::Internal(format!("session store poisoned: {e}")))?; + let p = locked.on_propose(proposal).map_err(|e| { + McpError::InvalidParams(format!("Cannot propose in current session state: {e}")) + })?; + Ok(p.id.clone()) +} + +// โ”€โ”€ propose_add_record โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Add a new record. All fields are fully typed โ€” no payload guessing required. +pub async fn propose_add_record(args: Option) -> McpResult { + debug!("propose_add_record called"); + + const SCHEMA_HINT: &str = concat!( + "Expected fields:\n", + " name : string โ€” PascalCase record name (required)\n", + " buffer : string โ€” \"SpmcRing\" | \"SingleLatest\" | \"Mailbox\" (required)\n", + " capacity : number โ€” required when buffer = \"SpmcRing\"\n", + " description : string โ€” human-readable description shown to the user (required)\n", + " key_prefix : string โ€” common key prefix, e.g. \"sensors.temp.\" (optional, default \"\")\n", + " key_variants: string[] โ€” concrete variant names, e.g. [\"Default\"] (optional, default [])\n", + " producers : string[] โ€” task names that write to this record (optional)\n", + " consumers : string[] โ€” task names that read from this record (optional)\n", + " fields : [{name, type, description}][] โ€” value struct fields (optional)\n", + " connectors : [{protocol, direction, url}][] โ€” connector wiring (optional)" + ); + + #[derive(Debug, Deserialize)] + struct Params { + description: String, + #[serde(flatten)] + record: aimdb_codegen::RecordDef, + } + + let p: Params = serde_json::from_value(args.unwrap_or(Value::Null)).map_err(|e| { + McpError::InvalidParams(format!("propose_add_record: {e}\n\n{SCHEMA_HINT}")) + })?; + + let proposal_id = submit_proposal(Proposal { + id: next_proposal_id(), + change_type: "add_record".to_string(), + description: p.description, + change: ProposedChange::AddRecord { record: p.record }, + created_at: chrono::Utc::now().to_rfc3339(), + })?; + + Ok(serde_json::json!({ + "proposal_id": proposal_id, + "message": "Proposal created. Present it to the user and call resolve_proposal with their decision.", + })) +} + +// โ”€โ”€ propose_modify_buffer โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Change the buffer type (and optionally capacity) of an existing record. +pub async fn propose_modify_buffer(args: Option) -> McpResult { + debug!("propose_modify_buffer called"); + + const SCHEMA_HINT: &str = concat!( + "Expected fields:\n", + " record_name : string โ€” PascalCase name of existing record (required)\n", + " description : string โ€” human-readable description shown to the user (required)\n", + " buffer : string โ€” \"SpmcRing\" | \"SingleLatest\" | \"Mailbox\" (required)\n", + " capacity : number โ€” required when buffer = \"SpmcRing\"" + ); + + #[derive(Debug, Deserialize)] + struct Params { + record_name: String, + description: String, + buffer: aimdb_codegen::BufferType, + capacity: Option, + } + + let p: Params = serde_json::from_value(args.unwrap_or(Value::Null)).map_err(|e| { + McpError::InvalidParams(format!("propose_modify_buffer: {e}\n\n{SCHEMA_HINT}")) + })?; + + let proposal_id = submit_proposal(Proposal { + id: next_proposal_id(), + change_type: "modify_buffer".to_string(), + description: p.description, + change: ProposedChange::ModifyBuffer { + record_name: p.record_name, + buffer: p.buffer, + capacity: p.capacity, + }, + created_at: chrono::Utc::now().to_rfc3339(), + })?; + + Ok(serde_json::json!({ + "proposal_id": proposal_id, + "message": "Proposal created. Present it to the user and call resolve_proposal with their decision.", + })) +} + +// โ”€โ”€ propose_add_connector โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Add a connector (MQTT, KNX, etc.) to an existing record. +pub async fn propose_add_connector(args: Option) -> McpResult { + debug!("propose_add_connector called"); + + const SCHEMA_HINT: &str = concat!( + "Expected fields:\n", + " record_name : string โ€” PascalCase name of existing record (required)\n", + " description : string โ€” human-readable description shown to the user (required)\n", + " protocol : string โ€” e.g. \"mqtt\", \"knx\" (required)\n", + " direction : string โ€” \"inbound\" | \"outbound\" (required)\n", + " url : string โ€” topic / address template; may contain {variant} placeholder (required)" + ); + + #[derive(Debug, Deserialize)] + struct Params { + record_name: String, + description: String, + protocol: String, + direction: aimdb_codegen::ConnectorDirection, + url: String, + } + + let p: Params = serde_json::from_value(args.unwrap_or(Value::Null)).map_err(|e| { + McpError::InvalidParams(format!("propose_add_connector: {e}\n\n{SCHEMA_HINT}")) + })?; + + let proposal_id = submit_proposal(Proposal { + id: next_proposal_id(), + change_type: "add_connector".to_string(), + description: p.description, + change: ProposedChange::AddConnector { + record_name: p.record_name, + connector: aimdb_codegen::ConnectorDef { + protocol: p.protocol, + direction: p.direction, + url: p.url, + }, + }, + created_at: chrono::Utc::now().to_rfc3339(), + })?; + + Ok(serde_json::json!({ + "proposal_id": proposal_id, + "message": "Proposal created. Present it to the user and call resolve_proposal with their decision.", + })) +} + +// โ”€โ”€ propose_modify_fields โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Replace the fields of an existing record's value struct. +pub async fn propose_modify_fields(args: Option) -> McpResult { + debug!("propose_modify_fields called"); + + const SCHEMA_HINT: &str = concat!( + "Expected fields:\n", + " record_name : string โ€” PascalCase name of existing record (required)\n", + " description : string โ€” human-readable description shown to the user (required)\n", + " fields : array โ€” replacement field list (required)\n", + " Each element: { \"name\": string, \"type\": \"f64|f32|u8|u16|u32|u64|i8|i16|i32|i64|bool|String\", \"description\": string }" + ); + + #[derive(Debug, Deserialize)] + struct Params { + record_name: String, + description: String, + fields: Vec, + } + + let p: Params = serde_json::from_value(args.unwrap_or(Value::Null)).map_err(|e| { + McpError::InvalidParams(format!("propose_modify_fields: {e}\n\n{SCHEMA_HINT}")) + })?; + + let proposal_id = submit_proposal(Proposal { + id: next_proposal_id(), + change_type: "modify_fields".to_string(), + description: p.description, + change: ProposedChange::ModifyFields { + record_name: p.record_name, + fields: p.fields, + }, + created_at: chrono::Utc::now().to_rfc3339(), + })?; + + Ok(serde_json::json!({ + "proposal_id": proposal_id, + "message": "Proposal created. Present it to the user and call resolve_proposal with their decision.", + })) +} + +// โ”€โ”€ propose_modify_key_variants โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Set the key variants (and optionally key_prefix) of an existing record. +/// +/// Use this after adding a record with no variants, or when adding new devices +/// to an existing key family. +pub async fn propose_modify_key_variants(args: Option) -> McpResult { + debug!("propose_modify_key_variants called"); + + const SCHEMA_HINT: &str = concat!( + "Expected fields:\n", + " record_name : string โ€” PascalCase name of existing record (required)\n", + " description : string โ€” human-readable description shown to the user (required)\n", + " key_variants : string[] โ€” concrete PascalCase variant names, e.g. [\"Default\"] or\n", + " [\"ApiServer\", \"Worker\", \"Db\"] (required)\n", + " key_prefix : string โ€” optional common prefix, e.g. \"sensors.temp.\" (optional)" + ); + + #[derive(Debug, Deserialize)] + struct Params { + record_name: String, + description: String, + key_variants: Vec, + key_prefix: Option, + } + + let p: Params = serde_json::from_value(args.unwrap_or(Value::Null)).map_err(|e| { + McpError::InvalidParams(format!("propose_modify_key_variants: {e}\n\n{SCHEMA_HINT}")) + })?; + + let proposal_id = submit_proposal(Proposal { + id: next_proposal_id(), + change_type: "modify_key_variants".to_string(), + description: p.description, + change: ProposedChange::ModifyKeyVariants { + record_name: p.record_name, + key_variants: p.key_variants, + key_prefix: p.key_prefix, + }, + created_at: chrono::Utc::now().to_rfc3339(), + })?; + + Ok(serde_json::json!({ + "proposal_id": proposal_id, + "message": "Proposal created. Present it to the user and call resolve_proposal with their decision.", + })) +} + +// โ”€โ”€ resolve_proposal โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +#[derive(Debug, Deserialize)] +struct ResolveProposalParams { + proposal_id: String, + resolution: ProposalResolution, + /// Optional redirect message when resolution is `revise` + #[serde(default)] + redirect: Option, + #[serde(default)] + state_path: Option, + #[serde(default)] + mermaid_path: Option, + #[serde(default)] + rust_path: Option, +} + +/// Accept, reject, or redirect (revise) a pending proposal. +/// +/// Enforces session phase: must be in `Proposing` phase with a matching +/// proposal ID. On resolve, transitions back to `Gathering`. +pub async fn resolve_proposal(args: Option) -> McpResult { + debug!("resolve_proposal called"); + let params: ResolveProposalParams = serde_json::from_value(args.unwrap_or(Value::Null)) + .map_err(|e| McpError::InvalidParams(format!("resolve_proposal: {e}")))?; + + let store = session_store() + .ok_or_else(|| McpError::Internal("Session store not initialised".to_string()))?; + + // Extract the proposal from the session (validates phase + ID) + let proposal = { + let mut locked = store + .lock() + .map_err(|e| McpError::Internal(format!("session store poisoned: {e}")))?; + locked + .on_resolve(¶ms.proposal_id) + .map_err(|e| McpError::InvalidParams(format!("Cannot resolve proposal: {e}")))? + }; + + match params.resolution { + ProposalResolution::Reject => Ok(serde_json::json!({ + "resolution": "rejected", + "proposal_id": proposal.id, + "message": "Proposal rejected. No changes were made to state.toml.", + })), + + ProposalResolution::Revise => Ok(serde_json::json!({ + "resolution": "revise", + "proposal_id": proposal.id, + "redirect": params.redirect, + "message": "Proposal marked for revision. Please revise and call the appropriate propose_* tool again.", + })), + + ProposalResolution::Confirm => { + let state_path = params + .state_path + .map(std::path::PathBuf::from) + .unwrap_or_else(default_state_path); + let mermaid_path = params + .mermaid_path + .map(std::path::PathBuf::from) + .unwrap_or_else(default_mermaid_path); + let rust_path = params + .rust_path + .map(std::path::PathBuf::from) + .unwrap_or_else(default_rust_path); + + // Read current state (or initialise) + let mut state = ensure_state_initialised(&state_path) + .map_err(|e| McpError::Internal(format!("reading state.toml: {e}")))?; + + // Apply the change + apply_change(&mut state, &proposal.change) + .map_err(|e| McpError::Internal(format!("applying change: {e}")))?; + + // Validate result before writing + let errors = aimdb_codegen::validate(&state); + let blocking_errors: Vec<_> = errors + .iter() + .filter(|e| e.severity == aimdb_codegen::Severity::Error) + .collect(); + if !blocking_errors.is_empty() { + let msgs: Vec = blocking_errors.iter().map(|e| e.to_string()).collect(); + return Err(McpError::InvalidParams(format!( + "Applying this change produces validation errors โ€” cannot confirm:\n{}", + msgs.join("\n") + ))); + } + + // Write state.toml with file lock + write_state_locked(&state_path, &state) + .map_err(|e| McpError::Internal(format!("writing state.toml: {e}")))?; + + // Generate artefacts + let generated = write_artefacts(&state, &mermaid_path, &rust_path) + .map_err(|e| McpError::Internal(format!("generating artefacts: {e}")))?; + + Ok(serde_json::json!({ + "resolution": "confirmed", + "proposal_id": proposal.id, + "change_type": proposal.change_type, + "state_toml": state_path.display().to_string(), + "generated": generated, + "record_count": state.records.len(), + "message": "Proposal confirmed. state.toml updated, Mermaid and Rust generated.", + })) + } + } +} + +// โ”€โ”€ save_memory โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Persist ideation context to `.aimdb/memory.md`. +/// +/// Call this after every confirmed proposal to record the conversational +/// rationale that led to each decision โ€” the *why* that state.toml cannot +/// express. +pub async fn save_memory(args: Option) -> McpResult { + debug!("save_memory called"); + + const SCHEMA_HINT: &str = concat!( + "Expected fields:\n", + " entry : string โ€” markdown text to append (required)\n", + " mode : \"append\" (default) | \"overwrite\" โ€” append adds a dated\n", + " section; overwrite replaces the entire file\n", + " memory_path : string โ€” override path (default: .aimdb/memory.md)" + ); + + #[derive(Debug, Default, Deserialize)] + #[serde(rename_all = "snake_case")] + enum MemoryMode { + #[default] + Append, + Overwrite, + } + + #[derive(Debug, Deserialize)] + struct Params { + entry: String, + #[serde(default)] + mode: MemoryMode, + #[serde(default)] + memory_path: Option, + } + + let p: Params = serde_json::from_value(args.unwrap_or(Value::Null)) + .map_err(|e| McpError::InvalidParams(format!("save_memory: {e}\n\n{SCHEMA_HINT}")))?; + + let path = p + .memory_path + .map(std::path::PathBuf::from) + .unwrap_or_else(default_memory_path); + + if let Some(parent) = path.parent() { + std::fs::create_dir_all(parent) + .map_err(|e| McpError::Internal(format!("creating .aimdb dir: {e}")))?; + } + + let content = match p.mode { + MemoryMode::Overwrite => p.entry.clone(), + MemoryMode::Append => { + let timestamp = chrono::Utc::now().format("%Y-%m-%d %H:%M UTC"); + let section = format!("\n---\n\n{}\n", timestamp, p.entry.trim()); + if path.exists() { + let existing = std::fs::read_to_string(&path) + .map_err(|e| McpError::Internal(format!("reading memory.md: {e}")))?; + format!("{}{}", existing.trim_end(), section) + } else { + // First write: add a header + "# AimDB Architecture Memory\n\ + > Generated by the architecture agent. Do not edit โ€” use the agent to update.\n\ + > Records the ideation context and design rationale behind each decision.\n" + .to_string() + + §ion + } + } + }; + + std::fs::write(&path, &content) + .map_err(|e| McpError::Internal(format!("writing memory.md: {e}")))?; + + Ok(serde_json::json!({ + "written": path.display().to_string(), + "mode": match p.mode { MemoryMode::Append => "append", MemoryMode::Overwrite => "overwrite" }, + "bytes": content.len(), + "message": "Memory updated. The agent will read this on next session start.", + })) +} + +#[derive(Debug, Deserialize)] +struct RemoveRecordParams { + record_name: String, + #[serde(default)] + #[allow(dead_code)] + state_path: Option, +} + +/// Propose removal of an existing record (creates a pending proposal). +/// +/// Enforces session phase: must be in `Gathering` phase. +pub async fn remove_record(args: Option) -> McpResult { + debug!("remove_record called"); + let params: RemoveRecordParams = serde_json::from_value(args.unwrap_or(Value::Null)) + .map_err(|e| McpError::InvalidParams(format!("remove_record: {e}")))?; + + let store = session_store() + .ok_or_else(|| McpError::Internal("Session store not initialised".to_string()))?; + + let proposal = Proposal { + id: next_proposal_id(), + change_type: "remove_record".to_string(), + description: format!("Remove record '{}'", params.record_name), + change: ProposedChange::RemoveRecord { + record_name: params.record_name.clone(), + }, + created_at: chrono::Utc::now().to_rfc3339(), + }; + + let id = { + let mut locked = store + .lock() + .map_err(|e| McpError::Internal(format!("session store poisoned: {e}")))?; + let p = locked + .on_propose(proposal) + .map_err(|e| McpError::InvalidParams(format!("Cannot propose removal: {e}")))?; + p.id.clone() + }; + + Ok(serde_json::json!({ + "proposal_id": id, + "record_name": params.record_name, + "warning": format!( + "Removing '{}' will delete its generated key enum and value struct from \ + src/generated_schema.rs. Application code referencing these types will fail to compile.", + params.record_name + ), + "message": "Removal proposal created. Present to the user, then call resolve_proposal.", + })) +} + +// โ”€โ”€ rename_record โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +#[derive(Debug, Deserialize)] +struct RenameRecordParams { + old_name: String, + new_name: String, + #[serde(default)] + #[allow(dead_code)] + state_path: Option, +} + +/// Propose renaming an existing record (creates a pending proposal). +/// +/// Enforces session phase: must be in `Gathering` phase. +pub async fn rename_record(args: Option) -> McpResult { + debug!("rename_record called"); + let params: RenameRecordParams = serde_json::from_value(args.unwrap_or(Value::Null)) + .map_err(|e| McpError::InvalidParams(format!("rename_record: {e}")))?; + + let store = session_store() + .ok_or_else(|| McpError::Internal("Session store not initialised".to_string()))?; + + let proposal = Proposal { + id: next_proposal_id(), + change_type: "rename_record".to_string(), + description: format!("Rename '{}' โ†’ '{}'", params.old_name, params.new_name), + change: ProposedChange::RenameRecord { + old_name: params.old_name.clone(), + new_name: params.new_name.clone(), + }, + created_at: chrono::Utc::now().to_rfc3339(), + }; + + let id = { + let mut locked = store + .lock() + .map_err(|e| McpError::Internal(format!("session store poisoned: {e}")))?; + let p = locked + .on_propose(proposal) + .map_err(|e| McpError::InvalidParams(format!("Cannot propose rename: {e}")))?; + p.id.clone() + }; + + Ok(serde_json::json!({ + "proposal_id": id, + "old_name": params.old_name, + "new_name": params.new_name, + "warning": format!( + "Renaming '{}' to '{}' will update the generated key enum and value struct names. \ + Application code using the old name ('{}Key', '{}Value') will fail to compile.", + params.old_name, params.new_name, params.old_name, params.old_name + ), + "message": "Rename proposal created. Present to the user, then call resolve_proposal.", + })) +} + +// โ”€โ”€ validate_against_instance โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +#[derive(Debug, Deserialize)] +struct ValidateInstanceParams { + socket_path: String, + #[serde(default)] + state_path: Option, +} + +/// Compare state.toml against a live AimDB instance and return conflicts. +/// +/// Session-agnostic: works in any phase. +pub async fn validate_against_instance(args: Option) -> McpResult { + debug!("validate_against_instance called"); + let params: ValidateInstanceParams = serde_json::from_value(args.unwrap_or(Value::Null)) + .map_err(|e| McpError::InvalidParams(format!("validate_against_instance: {e}")))?; + + let state_path = params + .state_path + .map(std::path::PathBuf::from) + .unwrap_or_else(default_state_path); + + let state = architecture::read_state(&state_path) + .map_err(|e| McpError::Internal(e.to_string()))? + .ok_or_else(|| { + McpError::InvalidParams(format!( + "No state.toml at {}. Nothing to validate against.", + state_path.display() + )) + })?; + + // Connect and list live records + let mut client = AimxClient::connect(¶ms.socket_path) + .await + .map_err(McpError::Client)?; + + let raw = client.list_records().await.map_err(McpError::Client)?; + + // Parse the live records into our lightweight struct + let live_records: Vec = raw + .into_iter() + .map(|r| LiveRecord { + name: r.name, + buffer_type: r.buffer_type, + buffer_capacity: r.buffer_capacity, + }) + .collect(); + + let report = detect_conflicts(&state, &live_records); + + Ok(serde_json::to_value(&report)?) +} + +// โ”€โ”€ get_buffer_metrics โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +#[derive(Debug, Deserialize)] +struct GetBufferMetricsParams { + socket_path: String, + record_key: String, +} + +/// Get live buffer metrics for a record key (delegates to list_records, filters by key). +/// +/// Session-agnostic: works in any phase. +pub async fn get_buffer_metrics(args: Option) -> McpResult { + debug!("get_buffer_metrics called"); + let params: GetBufferMetricsParams = serde_json::from_value(args.unwrap_or(Value::Null)) + .map_err(|e| McpError::InvalidParams(format!("get_buffer_metrics: {e}")))?; + + let mut client = AimxClient::connect(¶ms.socket_path) + .await + .map_err(McpError::Client)?; + + let raw = client.list_records().await.map_err(McpError::Client)?; + + let matching: Vec<_> = raw + .into_iter() + .filter(|r| r.name.contains(¶ms.record_key)) + .collect(); + + if matching.is_empty() { + return Ok(serde_json::json!({ + "found": false, + "record_key": params.record_key, + "message": "No records matching this key were found in the running instance.", + })); + } + + Ok(serde_json::json!({ + "found": true, + "record_key": params.record_key, + "records": serde_json::to_value(matching)?, + })) +} + +// โ”€โ”€ reset_session โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Reset the architecture session, discarding any pending proposals. +/// +/// Use this when the user wants to start over or abandon the current +/// ideation cycle. +pub async fn reset_session(_args: Option) -> McpResult { + debug!("reset_session called"); + + let store = session_store() + .ok_or_else(|| McpError::Internal("Session store not initialised".to_string()))?; + + let mut locked = store + .lock() + .map_err(|e| McpError::Internal(format!("session store poisoned: {e}")))?; + locked.reset(); + + Ok(serde_json::json!({ + "message": "Session reset. Call get_architecture to begin a new ideation cycle.", + })) +} + +// โ”€โ”€ Helpers โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +fn parse_optional(args: Option) -> McpResult { + match args { + None | Some(Value::Null) => Ok(T::default()), + Some(v) => serde_json::from_value(v).map_err(|e| McpError::InvalidParams(e.to_string())), + } +} diff --git a/tools/aimdb-mcp/src/tools/mod.rs b/tools/aimdb-mcp/src/tools/mod.rs index 7f462866..31995fca 100644 --- a/tools/aimdb-mcp/src/tools/mod.rs +++ b/tools/aimdb-mcp/src/tools/mod.rs @@ -5,6 +5,7 @@ use crate::connection::ConnectionPool; use once_cell::sync::OnceCell; +pub mod architecture; pub mod graph; pub mod instance; pub mod record; @@ -24,6 +25,11 @@ pub(crate) fn connection_pool() -> Option<&'static ConnectionPool> { } // Re-export tool functions +pub use architecture::{ + get_architecture, get_buffer_metrics, propose_add_connector, propose_add_record, + propose_modify_buffer, propose_modify_fields, propose_modify_key_variants, remove_record, + rename_record, reset_session, resolve_proposal, save_memory, validate_against_instance, +}; pub use graph::{graph_edges, graph_nodes, graph_topo_order}; pub use instance::{discover_instances, get_instance_info}; pub use record::{drain_record, get_record, list_records, set_record}; From f1230565591e4710628cf13517f233f087dbbd02 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Tue, 24 Feb 2026 21:06:57 +0000 Subject: [PATCH 06/46] format --- tools/aimdb-mcp/src/prompts/mod.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/tools/aimdb-mcp/src/prompts/mod.rs b/tools/aimdb-mcp/src/prompts/mod.rs index 325087b8..2c87d533 100644 --- a/tools/aimdb-mcp/src/prompts/mod.rs +++ b/tools/aimdb-mcp/src/prompts/mod.rs @@ -494,7 +494,6 @@ mod tests { assert_eq!(prompts[3].name, "troubleshooting"); } - #[test] fn test_get_troubleshooting_prompt() { let messages = get_prompt("troubleshooting"); From cd7d15cfbeb7585c693c74de21fecc494e71e35c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Tue, 24 Feb 2026 21:09:08 +0000 Subject: [PATCH 07/46] chore: update embassy subproject to latest commit --- _external/embassy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/_external/embassy b/_external/embassy index d9105e12..507ac8bc 160000 --- a/_external/embassy +++ b/_external/embassy @@ -1 +1 @@ -Subproject commit d9105e122e73b806cc9d4071a19806f423a4a2fa +Subproject commit 507ac8bc1b573f4172486440c7648b489dcfe02e From 27408a896e0e994f3d195186fa643148387fc277 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Thu, 26 Feb 2026 19:43:48 +0000 Subject: [PATCH 08/46] feat: Enhance validation and code generation for AimDB - Added validation for schema_version to ensure it is >= 1. - Introduced warnings for records with settable fields lacking a timestamp field. - Implemented validation for observable signal fields to ensure they exist and are numeric. - Updated the generate command to support a common crate mode, producing a complete crate structure. - Enhanced TOML schema to include project metadata and record-level serialization options. - Added tests for new validation rules and common crate generation. - Documented the design for extending aimdb-codegen to generate common crates. --- aimdb-codegen/src/lib.rs | 5 +- aimdb-codegen/src/rust.rs | 737 ++++++++++++++++++- aimdb-codegen/src/state.rs | 148 ++++ aimdb-codegen/src/validate.rs | 240 +++++++ docs/design/024-M11-codegen-common-crate.md | 748 ++++++++++++++++++++ tools/aimdb-cli/src/commands/generate.rs | 82 ++- tools/aimdb-mcp/src/architecture/mod.rs | 1 + tools/aimdb-mcp/src/architecture/session.rs | 4 + 8 files changed, 1918 insertions(+), 47 deletions(-) create mode 100644 docs/design/024-M11-codegen-common-crate.md diff --git a/aimdb-codegen/src/lib.rs b/aimdb-codegen/src/lib.rs index 4de4daf9..b6eea7c7 100644 --- a/aimdb-codegen/src/lib.rs +++ b/aimdb-codegen/src/lib.rs @@ -54,9 +54,10 @@ pub mod validate; // โ”€โ”€ Convenience re-exports โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ pub use mermaid::generate_mermaid; -pub use rust::generate_rust; +pub use rust::{generate_cargo_toml, generate_lib_rs, generate_rust, generate_schema_rs}; +pub use rust::{to_pascal_case, to_snake_case}; pub use state::{ ArchitectureState, BufferType, ConnectorDef, ConnectorDirection, DecisionEntry, FieldDef, Meta, - RecordDef, + ObservableDef, ProjectDef, RecordDef, SerializationType, }; pub use validate::{is_valid, validate, Severity, ValidationError}; diff --git a/aimdb-codegen/src/rust.rs b/aimdb-codegen/src/rust.rs index 2280b9b1..171d434b 100644 --- a/aimdb-codegen/src/rust.rs +++ b/aimdb-codegen/src/rust.rs @@ -10,7 +10,7 @@ use proc_macro2::TokenStream; use quote::{format_ident, quote}; -use crate::state::{ArchitectureState, ConnectorDirection, RecordDef}; +use crate::state::{ArchitectureState, ConnectorDirection, RecordDef, SerializationType}; // โ”€โ”€ Public API โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ @@ -22,15 +22,78 @@ use crate::state::{ArchitectureState, ConnectorDirection, RecordDef}; /// - One `Key` enum per record (with `#[derive(RecordKey)]`) /// - A `configure_schema()` function wiring all records into `AimDbBuilder` pub fn generate_rust(state: &ArchitectureState) -> String { - let imports = emit_imports(); + let formatted = generate_rust_inner(state); + + let header = "\ +// @generated โ€” do not edit manually.\n\ +// Source: .aimdb/state.toml โ€” edit via `aimdb generate` or the architecture agent.\n\ +// Regenerate: `aimdb generate` or confirm a proposal in the architecture agent.\n\n"; + + format!("{header}{formatted}") +} + +/// Generate `schema.rs` for a common crate (no `@generated` header). +/// +/// Emits only the portable data-contract layer: value structs, key enums, +/// `SchemaType` and `Linkable` impls. No `configure_schema`, no runtime deps. +/// This keeps the common crate platform-agnostic (`no_std`-compatible). +pub fn generate_schema_rs(state: &ArchitectureState) -> String { + generate_types_inner(state) +} + +/// Types-only inner โ€” value structs + key enums + trait impls, no `configure_schema`. +fn generate_types_inner(state: &ArchitectureState) -> String { + let imports = emit_imports_types_only(state); + + let record_items: Vec = state + .records + .iter() + .flat_map(|rec| { + let mut items = vec![emit_value_struct(rec), emit_key_enum(rec)]; + items.push(emit_schema_type_impl(rec)); + let linkable = emit_linkable_impl(rec); + if !linkable.is_empty() { + items.push(linkable); + } + if let Some(obs) = emit_observable_impl(rec) { + items.push(obs); + } + if let Some(set) = emit_settable_impl(rec) { + items.push(set); + } + items + }) + .collect(); + + let file_tokens = quote! { + #imports + #(#record_items)* + }; + + let syntax_tree = syn::parse2(file_tokens).expect("generated tokens should be valid Rust"); + prettyplease::unparse(&syntax_tree) +} + +fn generate_rust_inner(state: &ArchitectureState) -> String { + let imports = emit_imports(state); let record_items: Vec = state .records .iter() .flat_map(|rec| { - let value_struct = emit_value_struct(rec); - let key_enum = emit_key_enum(rec); - [value_struct, key_enum] + let mut items = vec![emit_value_struct(rec), emit_key_enum(rec)]; + items.push(emit_schema_type_impl(rec)); + let linkable = emit_linkable_impl(rec); + if !linkable.is_empty() { + items.push(linkable); + } + if let Some(obs) = emit_observable_impl(rec) { + items.push(obs); + } + if let Some(set) = emit_settable_impl(rec) { + items.push(set); + } + items }) .collect(); @@ -43,24 +106,155 @@ pub fn generate_rust(state: &ArchitectureState) -> String { }; let syntax_tree = syn::parse2(file_tokens).expect("generated tokens should be valid Rust"); - let formatted = prettyplease::unparse(&syntax_tree); + prettyplease::unparse(&syntax_tree) +} - // Prepend the @generated header (outside the token stream since it's a comment) - let header = "\ -// @generated โ€” do not edit manually.\n\ -// Source: .aimdb/state.toml โ€” edit via `aimdb generate` or the architecture agent.\n\ -// Regenerate: `aimdb generate` or confirm a proposal in the architecture agent.\n\n"; +/// Generate `Cargo.toml` content for a common crate. +/// +/// Requires `state.project` to be `Some`. The caller should validate this +/// before calling. +pub fn generate_cargo_toml(state: &ArchitectureState) -> String { + let project = state + .project + .as_ref() + .expect("generate_cargo_toml requires [project] block in state.toml"); + let crate_name = format!("{}-common", project.name); + let edition = project.edition.as_deref().unwrap_or("2024"); + + let has_non_custom_ser = state.records.iter().any(|r| { + r.serialization.as_ref().unwrap_or(&SerializationType::Json) != &SerializationType::Custom + }); + let has_postcard = state + .records + .iter() + .any(|r| r.serialization.as_ref() == Some(&SerializationType::Postcard)); + let has_observable = state.records.iter().any(|r| r.observable.is_some()); - format!("{header}{formatted}") + let mut data_contracts_features = Vec::new(); + if has_non_custom_ser { + data_contracts_features.push("\"linkable\""); + } + + let dc_features_str = if data_contracts_features.is_empty() { + String::new() + } else { + format!(", features = [{}]", data_contracts_features.join(", ")) + }; + + // Build std feature deps + let mut std_deps = vec!["\"aimdb-data-contracts/std\"".to_string()]; + if has_non_custom_ser && !has_postcard { + std_deps.push("\"serde_json\"".to_string()); + } + if has_observable { + std_deps.push("\"aimdb-data-contracts/observable\"".to_string()); + } + let std_features = std_deps.join(", "); + + let mut optional_deps = String::new(); + if has_non_custom_ser && !has_postcard { + optional_deps.push_str("serde_json = { version = \"1.0\", optional = true }\n"); + } + if has_postcard { + optional_deps.push_str( + "postcard = { version = \"1.0\", default-features = false, features = [\"alloc\"] }\n", + ); + } + + format!( + r#"# Regenerate with `aimdb generate --common-crate` +[package] +name = "{crate_name}" +version = "0.1.0" +edition = "{edition}" + +[features] +default = ["std"] +std = [{std_features}] +alloc = [] + +[dependencies] +aimdb-core = {{ version = "0.5", default-features = false, features = ["derive", "alloc"] }} +aimdb-data-contracts = {{ version = "0.5", default-features = false{dc_features_str} }} +serde = {{ version = "1.0", default-features = false, features = ["derive", "alloc"] }} +{optional_deps}"# + ) +} + +/// Generate `lib.rs` content for a common crate. +pub fn generate_lib_rs() -> String { + "\ +// Regenerate with `aimdb generate --common-crate` +#![cfg_attr(not(feature = \"std\"), no_std)] +extern crate alloc; + +mod schema; + +// Re-export all public types for downstream crates +pub use schema::*; +" + .to_string() } // โ”€โ”€ Imports โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ -fn emit_imports() -> TokenStream { +/// Imports for the types-only common crate schema โ€” no runtime deps. +fn emit_imports_types_only(state: &ArchitectureState) -> TokenStream { + let has_non_custom_ser = state.records.iter().any(|r| { + r.serialization.as_ref().unwrap_or(&SerializationType::Json) != &SerializationType::Custom + }); + let has_observable = state.records.iter().any(|r| r.observable.is_some()); + let has_settable = state + .records + .iter() + .any(|r| r.fields.iter().any(|f| f.settable)); + + let mut contract_traits: Vec = vec![quote! { SchemaType }]; + if has_non_custom_ser { + contract_traits.push(quote! { Linkable }); + } + if has_observable { + contract_traits.push(quote! { Observable }); + } + if has_settable { + contract_traits.push(quote! { Settable }); + } + + quote! { + use aimdb_core::RecordKey; + use aimdb_data_contracts::{#(#contract_traits),*}; + use serde::{Deserialize, Serialize}; + } +} + +/// Imports for the full flat schema โ€” includes runtime registration deps. +fn emit_imports(state: &ArchitectureState) -> TokenStream { + let has_non_custom_ser = state.records.iter().any(|r| { + r.serialization.as_ref().unwrap_or(&SerializationType::Json) != &SerializationType::Custom + }); + let has_observable = state.records.iter().any(|r| r.observable.is_some()); + let has_settable = state + .records + .iter() + .any(|r| r.fields.iter().any(|f| f.settable)); + + // Build aimdb_data_contracts trait imports + let mut contract_traits: Vec = vec![quote! { SchemaType }]; + if has_non_custom_ser { + contract_traits.push(quote! { Linkable }); + } + if has_observable { + contract_traits.push(quote! { Observable }); + } + if has_settable { + contract_traits.push(quote! { Settable }); + } + quote! { use aimdb_core::buffer::BufferCfg; use aimdb_core::builder::AimDbBuilder; - use aimdb_derive::RecordKey; + use aimdb_core::RecordKey; + use aimdb_data_contracts::{#(#contract_traits),*}; use aimdb_executor::Spawn; use serde::{Deserialize, Serialize}; } @@ -153,7 +347,7 @@ fn emit_key_enum(rec: &RecordDef) -> TokenStream { }; quote! { - #[derive(RecordKey, Clone, Copy, PartialEq, Eq)] + #[derive(Debug, RecordKey, Clone, Copy, PartialEq, Eq)] #key_prefix_attr pub enum #enum_name { #(#variants)* @@ -202,23 +396,57 @@ fn emit_record_configure_block(rec: &RecordDef) -> TokenStream { .map(|v| format_ident!("{}", to_pascal_case(v))) .collect(); + let is_custom = rec + .serialization + .as_ref() + .map(|s| s == &SerializationType::Custom) + .unwrap_or(false); + let connector_block = rec.connectors.first().map(|conn| { - let link_fn = match conn.direction { - ConnectorDirection::Outbound => format_ident!("link_to"), - ConnectorDirection::Inbound => format_ident!("link_from"), - }; - let todo_comment = match conn.direction { - ConnectorDirection::Outbound => { - "TODO: add .with_serializer(|v| serde_json::to_vec(v).map_err(Into::into))" - } - ConnectorDirection::Inbound => { - "TODO: add .with_deserializer(|bytes| serde_json::from_slice(bytes).map_err(Into::into))" + if is_custom { + // Custom serialization: keep TODO stubs + let link_fn = match conn.direction { + ConnectorDirection::Outbound => format_ident!("link_to"), + ConnectorDirection::Inbound => format_ident!("link_from"), + }; + let todo_comment = match conn.direction { + ConnectorDirection::Outbound => { + "TODO: add .with_serializer(...) โ€” serialization = \"custom\"" + } + ConnectorDirection::Inbound => { + "TODO: add .with_deserializer(...) โ€” serialization = \"custom\"" + } + }; + quote! { + if let Some(addr) = key.link_address() { + let _ = #todo_comment; + reg.#link_fn(addr); + } } - }; - quote! { - if let Some(addr) = key.link_address() { - let _ = #todo_comment; - reg.#link_fn(addr); + } else { + // Non-custom: wire real serializers via Linkable trait + match conn.direction { + ConnectorDirection::Inbound => { + quote! { + if let Some(addr) = key.link_address() { + reg.link_from(addr) + .with_deserializer(#value_type::from_bytes) + .finish(); + } + } + } + ConnectorDirection::Outbound => { + quote! { + if let Some(addr) = key.link_address() { + reg.link_to(addr) + .with_serializer(|v: &#value_type| { + v.to_bytes() + .map_err(|_| aimdb_core::connector::SerializeError::InvalidData) + }) + .finish(); + } + } + } } } }); @@ -228,15 +456,247 @@ fn emit_record_configure_block(rec: &RecordDef) -> TokenStream { #(#key_type::#variant_idents,)* ] { builder.configure::<#value_type>(key, |reg| { - reg.buffer_cfg(#buffer_tokens); + reg.buffer(#buffer_tokens); #connector_block }); } } } +// โ”€โ”€ Trait implementations โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +fn emit_schema_type_impl(rec: &RecordDef) -> TokenStream { + let struct_name = format_ident!("{}Value", rec.name); + let schema_name = to_snake_case(&rec.name); + let version = proc_macro2::Literal::u32_unsuffixed(rec.schema_version.unwrap_or(1)); + + quote! { + impl SchemaType for #struct_name { + const NAME: &'static str = #schema_name; + const VERSION: u32 = #version; + } + } +} + +fn emit_linkable_impl(rec: &RecordDef) -> TokenStream { + let ser = rec + .serialization + .as_ref() + .unwrap_or(&SerializationType::Json); + + match ser { + SerializationType::Custom => quote! {}, + SerializationType::Json => emit_linkable_json(rec), + SerializationType::Postcard => emit_linkable_postcard(rec), + } +} + +fn emit_linkable_json(rec: &RecordDef) -> TokenStream { + let struct_name = format_ident!("{}Value", rec.name); + quote! { + impl Linkable for #struct_name { + fn to_bytes(&self) -> Result, alloc::string::String> { + #[cfg(feature = "std")] + { + serde_json::to_vec(self) + .map_err(|e| alloc::format!("serialize {}: {e}", Self::NAME)) + } + #[cfg(not(feature = "std"))] + { + Err(alloc::string::String::from( + "no_std serialization not available โ€” enable the std feature or use postcard", + )) + } + } + + fn from_bytes(data: &[u8]) -> Result { + #[cfg(feature = "std")] + { + serde_json::from_slice(data) + .map_err(|e| alloc::format!("deserialize {}: {e}", Self::NAME)) + } + #[cfg(not(feature = "std"))] + { + let _ = data; + Err(alloc::string::String::from( + "no_std deserialization not available โ€” enable the std feature or use postcard", + )) + } + } + } + } +} + +fn emit_linkable_postcard(rec: &RecordDef) -> TokenStream { + let struct_name = format_ident!("{}Value", rec.name); + quote! { + impl Linkable for #struct_name { + fn to_bytes(&self) -> Result, alloc::string::String> { + postcard::to_allocvec(self) + .map_err(|e| alloc::format!("serialize {}: {e}", Self::NAME)) + } + + fn from_bytes(data: &[u8]) -> Result { + postcard::from_bytes(data) + .map_err(|e| alloc::format!("deserialize {}: {e}", Self::NAME)) + } + } + } +} + +fn emit_observable_impl(rec: &RecordDef) -> Option { + let obs = rec.observable.as_ref()?; + let struct_name = format_ident!("{}Value", rec.name); + + // Look up signal field type + let signal_field = rec.fields.iter().find(|f| f.name == obs.signal_field)?; + let signal_type: syn::Type = syn::parse_str(&signal_field.field_type).ok()?; + let signal_ident = format_ident!("{}", obs.signal_field); + + let icon = &obs.icon; + let unit = &obs.unit; + + // Timestamp heuristic: first u64 field named timestamp/computed_at/fetched_at + let timestamp_names = ["timestamp", "computed_at", "fetched_at"]; + let timestamp_field = rec + .fields + .iter() + .find(|f| f.field_type == "u64" && timestamp_names.contains(&f.name.as_str())); + + let format_log_body = if let Some(ts) = timestamp_field { + let ts_ident = format_ident!("{}", ts.name); + quote! { + alloc::format!( + "{} [{}] {}: {:.1}{} at {}", + Self::ICON, + node_id, + Self::NAME, + self.signal(), + Self::UNIT, + self.#ts_ident, + ) + } + } else { + quote! { + alloc::format!( + "{} [{}] {}: {:.1}{}", + Self::ICON, + node_id, + Self::NAME, + self.signal(), + Self::UNIT, + ) + } + }; + + Some(quote! { + impl Observable for #struct_name { + type Signal = #signal_type; + const ICON: &'static str = #icon; + const UNIT: &'static str = #unit; + + fn signal(&self) -> #signal_type { + self.#signal_ident + } + + fn format_log(&self, node_id: &str) -> alloc::string::String { + #format_log_body + } + } + }) +} + +fn emit_settable_impl(rec: &RecordDef) -> Option { + let settable_fields: Vec<_> = rec.fields.iter().filter(|f| f.settable).collect(); + if settable_fields.is_empty() { + return None; + } + + let struct_name = format_ident!("{}Value", rec.name); + + // Build the Value type + let settable_types: Vec = settable_fields + .iter() + .map(|f| syn::parse_str(&f.field_type).unwrap()) + .collect(); + + let value_type: TokenStream = if settable_types.len() == 1 { + let t = &settable_types[0]; + quote! { #t } + } else { + quote! { (#(#settable_types),*) } + }; + + // Timestamp heuristic: first u64 field named timestamp/computed_at/fetched_at + let timestamp_names = ["timestamp", "computed_at", "fetched_at"]; + let timestamp_field = rec + .fields + .iter() + .find(|f| f.field_type == "u64" && timestamp_names.contains(&f.name.as_str())); + + // Build field assignments for `set()` + let mut settable_idx = 0usize; + let field_assignments: Vec = rec + .fields + .iter() + .map(|f| { + let fname = format_ident!("{}", f.name); + if timestamp_field.map(|tf| tf.name == f.name).unwrap_or(false) && !f.settable { + // This is the timestamp field โ€” fill from parameter + quote! { #fname: timestamp, } + } else if f.settable { + let assignment = if settable_fields.len() == 1 { + quote! { value } + } else { + let idx = syn::Index::from(settable_idx); + quote! { value.#idx } + }; + settable_idx += 1; + quote! { #fname: #assignment, } + } else { + // Non-settable, non-timestamp field: use Default + quote! { #fname: Default::default(), } + } + }) + .collect(); + + Some(quote! { + impl Settable for #struct_name { + type Value = #value_type; + + fn set(value: Self::Value, timestamp: u64) -> Self { + Self { + #(#field_assignments)* + } + } + } + }) +} + // โ”€โ”€ Utilities โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ +/// Convert a PascalCase string to snake_case. +/// +/// # Examples +/// ``` +/// # use aimdb_codegen::rust::to_snake_case; +/// assert_eq!(to_snake_case("WeatherObservation"), "weather_observation"); +/// assert_eq!(to_snake_case("OtaCommand"), "ota_command"); +/// assert_eq!(to_snake_case("Temperature"), "temperature"); +/// ``` +pub fn to_snake_case(s: &str) -> String { + let mut result = String::with_capacity(s.len() + 4); + for (i, c) in s.chars().enumerate() { + if c.is_uppercase() && i > 0 { + result.push('_'); + } + for lc in c.to_lowercase() { + result.push(lc); + } + } + result +} + /// Convert a kebab-case or snake_case string to PascalCase. /// /// # Examples @@ -357,7 +817,7 @@ url = "mqtt://ota/cmd/{variant}" "Missing AimDbBuilder import:\n{out}" ); assert!( - out.contains("use aimdb_derive::RecordKey;"), + out.contains("use aimdb_core::RecordKey;"), "Missing RecordKey import:\n{out}" ); assert!( @@ -403,7 +863,7 @@ url = "mqtt://ota/cmd/{variant}" "Missing key enum:\n{out}" ); assert!( - out.contains("#[derive(RecordKey, Clone, Copy, PartialEq, Eq)]"), + out.contains("#[derive(Debug, RecordKey, Clone, Copy, PartialEq, Eq)]"), "Missing RecordKey derive:\n{out}" ); assert!( @@ -478,7 +938,7 @@ url = "mqtt://ota/cmd/{variant}" // prettyplease may split struct literals across lines assert!( out.contains("BufferCfg::SpmcRing"), - "Missing SpmcRing buffer_cfg call:\n{out}" + "Missing SpmcRing buffer call:\n{out}" ); assert!( out.contains("capacity: 256"), @@ -491,26 +951,35 @@ url = "mqtt://ota/cmd/{variant}" let out = generated(); assert!( out.contains("BufferCfg::Mailbox"), - "Missing Mailbox buffer_cfg call:\n{out}" + "Missing Mailbox buffer call:\n{out}" ); } #[test] - fn configure_schema_outbound_link_to() { + fn configure_schema_outbound_link_to_with_serializer() { let out = generated(); assert!( out.contains("link_to(addr)"), "Missing link_to call:\n{out}" ); + assert!( + out.contains("with_serializer"), + "Missing with_serializer call:\n{out}" + ); + assert!(out.contains(".finish()"), "Missing .finish() call:\n{out}"); } #[test] - fn configure_schema_inbound_link_from() { + fn configure_schema_inbound_link_from_with_deserializer() { let out = generated(); assert!( out.contains("link_from(addr)"), "Missing link_from call:\n{out}" ); + assert!( + out.contains("with_deserializer(OtaCommandValue::from_bytes)"), + "Missing with_deserializer call:\n{out}" + ); } #[test] @@ -566,4 +1035,198 @@ url = "mqtt://ota/cmd/{variant}" // eprintln!("{out}"); assert!(!out.is_empty()); } + + // โ”€โ”€ to_snake_case โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + + #[test] + fn snake_case_basic() { + assert_eq!(to_snake_case("WeatherObservation"), "weather_observation"); + assert_eq!(to_snake_case("Temperature"), "temperature"); + } + + #[test] + fn snake_case_acronym() { + assert_eq!(to_snake_case("OtaCommand"), "ota_command"); + } + + // โ”€โ”€ Extended TOML with new fields โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + + const EXTENDED_TOML: &str = r#" +[project] +name = "weather-sentinel" + +[meta] +aimdb_version = "0.5.0" +created_at = "2026-02-24T21:39:15Z" +last_modified = "2026-02-25T10:00:00Z" + +[[records]] +name = "WeatherObservation" +buffer = "SpmcRing" +capacity = 256 +key_prefix = "weather.observation." +key_variants = ["Vienna", "Munich"] +schema_version = 2 +serialization = "json" + +[records.observable] +signal_field = "temperature_celsius" +icon = "๐ŸŒก๏ธ" +unit = "ยฐC" + +[[records.fields]] +name = "timestamp" +type = "u64" +description = "Unix timestamp in milliseconds" + +[[records.fields]] +name = "temperature_celsius" +type = "f32" +description = "Air temperature" +settable = true + +[[records.fields]] +name = "humidity_percent" +type = "f32" +description = "Relative humidity" +settable = true + +[[records.connectors]] +protocol = "mqtt" +direction = "inbound" +url = "sensors/{variant}/observation" +"#; + + fn extended_state() -> ArchitectureState { + ArchitectureState::from_toml(EXTENDED_TOML).unwrap() + } + + fn extended_generated() -> String { + generate_rust(&extended_state()) + } + + #[test] + fn schema_type_impl_generated() { + let out = extended_generated(); + assert!( + out.contains("impl SchemaType for WeatherObservationValue"), + "Missing SchemaType impl:\n{out}" + ); + assert!( + out.contains("\"weather_observation\""), + "Missing schema name:\n{out}" + ); + assert!( + out.contains("VERSION: u32 = 2"), + "Missing schema version:\n{out}" + ); + } + + #[test] + fn linkable_impl_json_generated() { + let out = extended_generated(); + assert!( + out.contains("impl Linkable for WeatherObservationValue"), + "Missing Linkable impl:\n{out}" + ); + assert!( + out.contains("serde_json::to_vec"), + "Missing serde_json::to_vec call:\n{out}" + ); + assert!( + out.contains("serde_json::from_slice"), + "Missing serde_json::from_slice call:\n{out}" + ); + } + + #[test] + fn observable_impl_generated() { + let out = extended_generated(); + assert!( + out.contains("impl Observable for WeatherObservationValue"), + "Missing Observable impl:\n{out}" + ); + assert!( + out.contains("self.temperature_celsius"), + "Missing signal field access:\n{out}" + ); + assert!(out.contains("\"ยฐC\""), "Missing unit:\n{out}"); + } + + #[test] + fn settable_impl_generated() { + let out = extended_generated(); + assert!( + out.contains("impl Settable for WeatherObservationValue"), + "Missing Settable impl:\n{out}" + ); + assert!( + out.contains("(f32, f32)"), + "Missing tuple value type:\n{out}" + ); + } + + #[test] + fn configure_schema_with_real_deserializer() { + let out = extended_generated(); + assert!( + out.contains("with_deserializer(WeatherObservationValue::from_bytes)"), + "Missing with_deserializer for inbound connector:\n{out}" + ); + } + + #[test] + fn data_contracts_import_present() { + let out = extended_generated(); + assert!( + out.contains("use aimdb_data_contracts"), + "Missing aimdb_data_contracts import:\n{out}" + ); + assert!( + out.contains("SchemaType"), + "Missing SchemaType in import:\n{out}" + ); + } + + #[test] + fn generate_cargo_toml_output() { + let state = extended_state(); + let toml = generate_cargo_toml(&state); + assert!( + toml.contains("weather-sentinel-common"), + "Missing crate name:\n{toml}" + ); + assert!( + toml.contains("serde_json"), + "Missing serde_json dep:\n{toml}" + ); + assert!( + toml.contains("linkable"), + "Missing linkable feature:\n{toml}" + ); + } + + #[test] + fn generate_lib_rs_output() { + let lib = generate_lib_rs(); + assert!(lib.contains("no_std"), "Missing no_std attribute:\n{lib}"); + assert!( + lib.contains("extern crate alloc"), + "Missing alloc extern:\n{lib}" + ); + assert!(lib.contains("mod schema"), "Missing schema module:\n{lib}"); + assert!( + lib.contains("pub use schema::*"), + "Missing re-export:\n{lib}" + ); + } + + #[test] + fn schema_rs_has_no_generated_header() { + let out = generate_schema_rs(&extended_state()); + assert!( + !out.contains("@generated"), + "schema.rs should not have @generated header:\n{out}" + ); + } } diff --git a/aimdb-codegen/src/state.rs b/aimdb-codegen/src/state.rs index 552e0344..93c448c3 100644 --- a/aimdb-codegen/src/state.rs +++ b/aimdb-codegen/src/state.rs @@ -9,6 +9,9 @@ use serde::{Deserialize, Serialize}; /// The full contents of `.aimdb/state.toml`. #[derive(Debug, Clone, Serialize, Deserialize)] pub struct ArchitectureState { + /// Optional project metadata for common crate generation. + #[serde(default)] + pub project: Option, pub meta: Meta, #[serde(default)] pub records: Vec, @@ -38,6 +41,46 @@ pub struct Meta { pub last_modified: String, } +// โ”€โ”€ Project metadata โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// `[project]` block โ€” drives common crate naming and Rust edition. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ProjectDef { + /// Project name, used for crate naming: `{name}-common`. + pub name: String, + /// Rust edition for the generated crate (default `"2024"` at codegen time). + #[serde(default)] + pub edition: Option, +} + +// โ”€โ”€ Serialization type โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Serialization format for `Linkable` trait generation. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Default)] +#[serde(rename_all = "lowercase")] +pub enum SerializationType { + /// JSON via `serde_json` (std-only, `no_std` fallback returns error). + #[default] + Json, + /// Binary via `postcard` (works in both std and `no_std`). + Postcard, + /// No generated `Linkable` impl โ€” user provides their own. + Custom, +} + +// โ”€โ”€ Observable metadata โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// `[records.observable]` block โ€” metadata for `Observable` trait generation. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ObservableDef { + /// Field name to use as `Observable::signal()` return value. + pub signal_field: String, + /// Icon/emoji for log output (e.g. `"๐ŸŒก๏ธ"`). + pub icon: String, + /// Unit label for the signal (e.g. `"ยฐC"`). + pub unit: String, +} + // โ”€โ”€ Record definition โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ /// One `[[records]]` entry. @@ -62,6 +105,17 @@ pub struct RecordDef { /// Names of tasks that consume values from this record. #[serde(default)] pub consumers: Vec, + + /// Schema version for `SchemaType::VERSION` (default 1). + #[serde(default)] + pub schema_version: Option, + /// Serialization format for `Linkable` generation (default `"json"`). + #[serde(default)] + pub serialization: Option, + /// Observable trait metadata (omit to skip `Observable` impl). + #[serde(default)] + pub observable: Option, + /// Value struct fields (agent-derived from datasheets / specs / conversation). #[serde(default)] pub fields: Vec, @@ -130,6 +184,9 @@ pub struct FieldDef { pub field_type: String, #[serde(default)] pub description: String, + /// Include this field in `Settable::Value` tuple (default `false`). + #[serde(default)] + pub settable: bool, } // โ”€โ”€ Connector definition โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ @@ -294,6 +351,97 @@ timestamp = "2026-02-22T14:20:00Z" assert_eq!(BufferType::Mailbox.rust_expr(None), "BufferCfg::Mailbox"); } + const EXTENDED_TOML: &str = r#" +[project] +name = "weather-sentinel" + +[meta] +aimdb_version = "0.5.0" +created_at = "2026-02-24T21:39:15Z" +last_modified = "2026-02-25T10:00:00Z" + +[[records]] +name = "WeatherObservation" +buffer = "SpmcRing" +capacity = 256 +key_prefix = "weather.observation." +key_variants = ["Vienna", "Munich"] +schema_version = 2 +serialization = "json" + +[records.observable] +signal_field = "temperature_celsius" +icon = "๐ŸŒก๏ธ" +unit = "ยฐC" + +[[records.fields]] +name = "timestamp" +type = "u64" +description = "Unix timestamp in milliseconds" + +[[records.fields]] +name = "temperature_celsius" +type = "f32" +description = "Air temperature" +settable = true + +[[records.fields]] +name = "humidity_percent" +type = "f32" +description = "Relative humidity" +settable = true +"#; + + #[test] + fn parses_project_block() { + let state = ArchitectureState::from_toml(EXTENDED_TOML).unwrap(); + let project = state.project.as_ref().unwrap(); + assert_eq!(project.name, "weather-sentinel"); + assert!(project.edition.is_none()); + } + + #[test] + fn parses_schema_version_and_serialization() { + let state = ArchitectureState::from_toml(EXTENDED_TOML).unwrap(); + let r = &state.records[0]; + assert_eq!(r.schema_version, Some(2)); + assert_eq!(r.serialization, Some(SerializationType::Json)); + } + + #[test] + fn parses_observable_block() { + let state = ArchitectureState::from_toml(EXTENDED_TOML).unwrap(); + let obs = state.records[0].observable.as_ref().unwrap(); + assert_eq!(obs.signal_field, "temperature_celsius"); + assert_eq!(obs.icon, "๐ŸŒก๏ธ"); + assert_eq!(obs.unit, "ยฐC"); + } + + #[test] + fn parses_settable_field() { + let state = ArchitectureState::from_toml(EXTENDED_TOML).unwrap(); + let fields = &state.records[0].fields; + assert!(!fields[0].settable); // timestamp + assert!(fields[1].settable); // temperature_celsius + assert!(fields[2].settable); // humidity_percent + } + + #[test] + fn project_block_is_optional() { + let state = ArchitectureState::from_toml(SAMPLE_TOML).unwrap(); + assert!(state.project.is_none()); + } + + #[test] + fn new_fields_default_when_absent() { + let state = ArchitectureState::from_toml(SAMPLE_TOML).unwrap(); + let r = &state.records[0]; + assert!(r.schema_version.is_none()); + assert!(r.serialization.is_none()); + assert!(r.observable.is_none()); + assert!(!r.fields[0].settable); + } + #[test] fn round_trips_toml() { let state = ArchitectureState::from_toml(SAMPLE_TOML).unwrap(); diff --git a/aimdb-codegen/src/validate.rs b/aimdb-codegen/src/validate.rs index 88e7c4a6..a69b3d2d 100644 --- a/aimdb-codegen/src/validate.rs +++ b/aimdb-codegen/src/validate.rs @@ -185,6 +185,40 @@ fn validate_records(state: &ArchitectureState, errors: &mut Vec }); } + // schema_version must be >= 1 if specified + if rec.schema_version == Some(0) { + errors.push(ValidationError { + message: format!( + "record '{}' has schema_version = 0; versions must be >= 1", + rec.name + ), + location: format!("{loc}.schema_version"), + severity: Severity::Warning, + }); + } + + // Warn if settable fields exist but no timestamp field is present + let has_settable = rec.fields.iter().any(|f| f.settable); + if has_settable { + let timestamp_names = ["timestamp", "computed_at", "fetched_at"]; + let has_timestamp = rec + .fields + .iter() + .any(|f| f.field_type == "u64" && timestamp_names.contains(&f.name.as_str())); + if !has_timestamp { + errors.push(ValidationError { + message: format!( + "record '{}' has settable fields but no timestamp field \ + (u64 named timestamp, computed_at, or fetched_at) โ€” \ + Settable::set() will use Default::default() for the timestamp slot", + rec.name + ), + location: format!("{loc}.fields"), + severity: Severity::Warning, + }); + } + } + // Validate field types for (fidx, field) in rec.fields.iter().enumerate() { if field.name.is_empty() { @@ -224,6 +258,42 @@ fn validate_records(state: &ArchitectureState, errors: &mut Vec }); } } + + // Validate observable block + if let Some(obs) = &rec.observable { + let field_exists = rec.fields.iter().any(|f| f.name == obs.signal_field); + if !field_exists { + errors.push(ValidationError { + message: format!( + "observable signal_field '{}' does not match any field in record '{}'", + obs.signal_field, rec.name + ), + location: format!("{loc}.observable.signal_field"), + severity: Severity::Error, + }); + } else { + // Check signal_field type is numeric (Observable::Signal: PartialOrd + Copy) + let field = rec + .fields + .iter() + .find(|f| f.name == obs.signal_field) + .unwrap(); + let numeric_types = [ + "f32", "f64", "u8", "u16", "u32", "u64", "i8", "i16", "i32", "i64", + ]; + if !numeric_types.contains(&field.field_type.as_str()) { + errors.push(ValidationError { + message: format!( + "observable signal_field '{}' has type '{}' which is not numeric โ€” \ + Observable::Signal must implement PartialOrd + Copy", + obs.signal_field, field.field_type + ), + location: format!("{loc}.observable.signal_field"), + severity: Severity::Warning, + }); + } + } + } } } @@ -406,4 +476,174 @@ description = "Value" "Display should show location:\n{s}" ); } + + #[test] + fn detects_observable_missing_signal_field() { + let toml = r#" +[meta] +aimdb_version = "0.5.0" +created_at = "2026-02-22T14:00:00Z" +last_modified = "2026-02-22T14:33:00Z" + +[[records]] +name = "TemperatureReading" +buffer = "SpmcRing" +capacity = 256 +key_prefix = "sensors.temp." +key_variants = ["indoor"] + +[records.observable] +signal_field = "nonexistent" +icon = "๐ŸŒก๏ธ" +unit = "ยฐC" + +[[records.fields]] +name = "celsius" +type = "f64" +description = "Temperature" +"#; + let state = ArchitectureState::from_toml(toml).unwrap(); + let errs = validate(&state); + let has_err = errs.iter().any(|e| { + e.severity == Severity::Error && e.message.contains("does not match any field") + }); + assert!( + has_err, + "Should detect missing observable signal_field:\n{errs:?}" + ); + } + + #[test] + fn warns_schema_version_zero() { + let toml = r#" +[meta] +aimdb_version = "0.5.0" +created_at = "2026-02-22T14:00:00Z" +last_modified = "2026-02-22T14:33:00Z" + +[[records]] +name = "TemperatureReading" +buffer = "SpmcRing" +capacity = 256 +key_prefix = "sensors.temp." +key_variants = ["indoor"] +schema_version = 0 + +[[records.fields]] +name = "celsius" +type = "f64" +description = "Temperature" +"#; + let state = ArchitectureState::from_toml(toml).unwrap(); + let errs = validate(&state); + let has_warn = errs + .iter() + .any(|e| e.severity == Severity::Warning && e.message.contains("schema_version = 0")); + assert!(has_warn, "Should warn about schema_version = 0:\n{errs:?}"); + } + + #[test] + fn warns_settable_fields_without_timestamp() { + let toml = r#" +[meta] +aimdb_version = "0.5.0" +created_at = "2026-02-22T14:00:00Z" +last_modified = "2026-02-22T14:33:00Z" + +[[records]] +name = "TemperatureReading" +buffer = "SpmcRing" +capacity = 256 +key_prefix = "sensors.temp." +key_variants = ["indoor"] + +[[records.fields]] +name = "celsius" +type = "f64" +description = "Temperature" +settable = true +"#; + let state = ArchitectureState::from_toml(toml).unwrap(); + let errs = validate(&state); + let has_warn = errs + .iter() + .any(|e| e.severity == Severity::Warning && e.message.contains("no timestamp field")); + assert!( + has_warn, + "Should warn about settable fields with no timestamp:\n{errs:?}" + ); + } + + #[test] + fn no_warn_settable_fields_with_timestamp() { + let toml = r#" +[meta] +aimdb_version = "0.5.0" +created_at = "2026-02-22T14:00:00Z" +last_modified = "2026-02-22T14:33:00Z" + +[[records]] +name = "TemperatureReading" +buffer = "SpmcRing" +capacity = 256 +key_prefix = "sensors.temp." +key_variants = ["indoor"] + +[[records.fields]] +name = "timestamp" +type = "u64" +description = "Unix ms" + +[[records.fields]] +name = "celsius" +type = "f64" +description = "Temperature" +settable = true +"#; + let state = ArchitectureState::from_toml(toml).unwrap(); + let errs = validate(&state); + let has_warn = errs + .iter() + .any(|e| e.severity == Severity::Warning && e.message.contains("no timestamp field")); + assert!( + !has_warn, + "Should not warn when timestamp field is present:\n{errs:?}" + ); + } + + #[test] + fn warns_observable_non_numeric_signal_field() { + let toml = r#" +[meta] +aimdb_version = "0.5.0" +created_at = "2026-02-22T14:00:00Z" +last_modified = "2026-02-22T14:33:00Z" + +[[records]] +name = "TemperatureReading" +buffer = "SpmcRing" +capacity = 256 +key_prefix = "sensors.temp." +key_variants = ["indoor"] + +[records.observable] +signal_field = "label" +icon = "๐Ÿ“Š" +unit = "" + +[[records.fields]] +name = "label" +type = "String" +description = "A label" +"#; + let state = ArchitectureState::from_toml(toml).unwrap(); + let errs = validate(&state); + let has_warn = errs + .iter() + .any(|e| e.severity == Severity::Warning && e.message.contains("not numeric")); + assert!( + has_warn, + "Should warn about non-numeric signal_field:\n{errs:?}" + ); + } } diff --git a/docs/design/024-M11-codegen-common-crate.md b/docs/design/024-M11-codegen-common-crate.md new file mode 100644 index 00000000..f34c15ee --- /dev/null +++ b/docs/design/024-M11-codegen-common-crate.md @@ -0,0 +1,748 @@ +# Design: Extending aimdb-codegen to Generate Common Crates + +**Status:** Draft +**Date:** 2025-02-25 +**Scope:** `aimdb-codegen` โ€” extend Rust code generation to produce a complete, +compilable `xx-xx-common` crate from `.aimdb/state.toml` + +--- + +## 1. Problem Statement + +Today `aimdb-codegen` generates a single `generated_schema.rs` file containing +value structs, key enums, and a `configure_schema()` function. But every real +AimDB project ends up with a hand-written **common crate** that adds: + +- `no_std` / `std` feature gating +- `Linkable` trait implementations (serialisation) +- `SchemaType` trait implementations (identity + versioning) +- `Observable` trait implementations (signal extraction, icons, units) +- Serialiser/deserialiser wiring in `configure_schema()` (currently TODO stubs) +- Convenience re-exports +- A `Cargo.toml` with correct dependencies and feature flags + +This boilerplate is near-identical across projects. The codegen should produce it. + +### Evidence โ€” existing hand-written common crates + +| Crate | Location | Key enums | Custom types | Trait impls | +|-------|----------|-----------|-------------|-------------| +| `weather-mesh-common` | `examples/weather-mesh-demo/` | 2 | 0 (re-exports) | 0 | +| `demo-weather-common` | `aimdb-pro/demo/` | 5 | 3 (Forecast*) | SchemaType, Observable, Linkable | +| `mqtt-connector-demo-common` | `examples/` | 2 | 2 | SchemaType, custom ser/de | +| `knx-connector-demo-common` | `examples/` | 3 | 3 | SchemaType, custom display | + +Every one follows the same structure. The codegen should eliminate 80%+ of this. + +--- + +## 2. Design Principles + +1. **Generate what's derivable, leave extension points for what's not.** + Serialisation format (JSON) is derivable. Domain-specific simulation logic is + not โ€” it is either hand-written or LLM-generated via inline enrichment + (see Section 7). + +2. **Generated code must compile without edits.** + No TODO stubs in the output. If the codegen produces it, it builds. + The agent verifies this by running `cargo check` (and optionally `clippy`) + after generation and iterates until the crate compiles cleanly. + +3. **`no_std` compatible by default.** + Generated common crates use `#![cfg_attr(not(feature = "std"), no_std)]` + with `alloc` for String/Vec types. Feature-gated `serde_json` for std. + +4. **Single source of truth stays in `state.toml`.** + New TOML fields drive generation. No second config file. + +5. **Opt-in enrichment, not mandatory.** + New TOML fields have sensible defaults. Existing `state.toml` files continue + to generate valid (if minimal) output. + +--- + +## 3. TOML Schema Extensions + +### 3.1 Project-level metadata (new `[project]` block) + +```toml +[project] +name = "weather-sentinel" # used for crate naming: weather-sentinel-common +edition = "2024" # Rust edition, default "2024" +``` + +Drives the generated `Cargo.toml` crate name: `{project.name}-common`. + +### 3.2 Record-level enrichment (new optional fields on `[[records]]`) + +```toml +[[records]] +name = "WeatherObservation" +buffer = "SpmcRing" +capacity = 256 +key_prefix = "weather.observation." +key_variants = ["Vienna", "Munich", "Berlin"] + +# โ”€โ”€ New fields โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ +schema_version = 2 # SchemaType::VERSION, default 1 +serialization = "json" # "json" | "postcard" | "custom", default "json" + +# Observable trait metadata (optional โ€” omit to skip Observable impl) +[records.observable] +signal_field = "temperature_celsius" # field name to use as signal() +icon = "๐ŸŒก๏ธ" # Observable::ICON +unit = "ยฐC" # Observable::UNIT +``` + +### 3.3 Field-level enrichment (new optional fields on `[[records.fields]]`) + +```toml +[[records.fields]] +name = "temperature_celsius" +type = "f32" +description = "Air temperature at 2m above ground in ยฐC" + +# โ”€โ”€ New fields โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ +settable = true # include this field in Settable::Value tuple, default false +``` + +### 3.4 Full example โ€” extended `state.toml` + +```toml +[project] +name = "weather-sentinel" + +[meta] +aimdb_version = "0.5.0" +created_at = "2026-02-24T21:39:15Z" +last_modified = "2026-02-25T10:00:00Z" + +[[records]] +name = "WeatherObservation" +buffer = "SpmcRing" +capacity = 256 +key_prefix = "weather.observation." +key_variants = ["Vienna", "Munich", "Berlin", "Rome", "Zurich", "Paris"] +producers = ["station_vienna", "station_munich", "station_berlin", + "station_rome", "station_zurich", "station_paris"] +consumers = ["sentinel_agent"] +schema_version = 1 +serialization = "json" + +[records.observable] +signal_field = "temperature_celsius" +icon = "๐ŸŒก๏ธ" +unit = "ยฐC" + +[[records.fields]] +name = "timestamp" +type = "u64" +description = "Unix timestamp in milliseconds" + +[[records.fields]] +name = "temperature_celsius" +type = "f32" +description = "Air temperature at 2m above ground in ยฐC" +settable = true + +[[records.fields]] +name = "humidity_percent" +type = "f32" +description = "Relative humidity at 2m in %" +settable = true + +[[records.connectors]] +protocol = "mqtt" +direction = "inbound" +url = "sensors/{variant}/observation" +``` + +--- + +## 4. Generated Output โ€” Common Crate Structure + +Running `aimdb generate --common-crate` produces a directory: + +``` +weather-sentinel-common/ +โ”œโ”€โ”€ Cargo.toml # deterministic +โ””โ”€โ”€ src/ + โ”œโ”€โ”€ lib.rs # deterministic + โ””โ”€โ”€ schema.rs # deterministic skeleton + user-accepted LLM enrichments +``` + +### 4.1 Generated `Cargo.toml` + +```toml +# Regenerate with `aimdb generate` +[package] +name = "weather-sentinel-common" +version = "0.1.0" +edition = "2024" + +[features] +default = ["std"] +std = ["aimdb-data-contracts/std", "serde_json"] +alloc = [] + +[dependencies] +aimdb-core = { version = "0.5", default-features = false, features = ["derive", "alloc"] } +aimdb-data-contracts = { version = "0.5", default-features = false } +serde = { version = "1.0", default-features = false, features = ["derive", "alloc"] } +serde_json = { version = "1.0", optional = true } +``` + +**Note:** `aimdb-executor` is intentionally absent. The common crate is +platform-agnostic and carries only data contracts. `configure_schema` and any +runtime registration code live in the application crate, not here. + +### 4.2 Generated `lib.rs` + +```rust +// @generated โ€” do not edit manually. +#![cfg_attr(not(feature = "std"), no_std)] +extern crate alloc; + +mod schema; + +// Re-export all public types for downstream crates +pub use schema::*; +``` + +### 4.3 Generated `schema.rs` โ€” extended + +The existing generation (value structs, key enums, `configure_schema`) stays. +The following sections are **added**. + +--- + +## 5. New Generated Code โ€” Trait Implementations + +### 5.1 `SchemaType` implementation + +Generated for **every** record. Uses `schema_version` from TOML (default 1). + +```rust +impl SchemaType for WeatherObservationValue { + const NAME: &'static str = "weather_observation"; // snake_case of record name + const VERSION: u32 = 1; +} +``` + +**Derivation rule:** `name` field โ†’ `to_snake_case()` โ†’ `SchemaType::NAME`. + +### 5.2 `Linkable` implementation (serialisation) + +Generated when `serialization` is set (default `"json"`). + +**For `serialization = "json"`:** + +```rust +impl Linkable for WeatherObservationValue { + fn to_bytes(&self) -> Result, alloc::string::String> { + #[cfg(feature = "std")] + { + serde_json::to_vec(self) + .map_err(|e| alloc::format!("serialize {}: {e}", Self::NAME)) + } + #[cfg(not(feature = "std"))] + { + // Fallback: serde_json not available in no_std + // Users should override this impl or enable the std feature + Err(alloc::format!("no_std serialization not implemented for {}", Self::NAME)) + } + } + + fn from_bytes(data: &[u8]) -> Result { + #[cfg(feature = "std")] + { + serde_json::from_slice(data) + .map_err(|e| alloc::format!("deserialize {}: {e}", Self::NAME)) + } + #[cfg(not(feature = "std"))] + { + let _ = data; + Err(alloc::format!("no_std deserialization not implemented for {}", Self::NAME)) + } + } +} +``` + +**For `serialization = "postcard"`:** generates `postcard::to_allocvec` / +`postcard::from_bytes` calls. Adds `postcard` to Cargo.toml dependencies. + +**For `serialization = "custom"`:** no deterministic `Linkable` impl is +generated. Instead, the agent proposes a `Linkable` impl as an inline +enrichment, using field types and descriptions to infer the serialisation +format. If the format is ambiguous (e.g. mixed binary and text fields), the +agent asks the user to clarify before generating. + +### 5.3 `Observable` implementation + +Generated **only** when `[records.observable]` block is present. + +```rust +impl Observable for WeatherObservationValue { + type Signal = f32; // inferred from the field type of signal_field + const ICON: &'static str = "๐ŸŒก๏ธ"; + const UNIT: &'static str = "ยฐC"; + + fn signal(&self) -> f32 { + self.temperature_celsius + } + + fn format_log(&self, node_id: &str) -> alloc::string::String { + alloc::format!( + "{} [{}] {}: {:.1}{} at {}", + Self::ICON, + node_id, + Self::NAME, + self.signal(), + Self::UNIT, + self.timestamp, // uses first u64 field as timestamp, or computed_at + ) + } +} +``` + +**`format_log` heuristic for the timestamp field:** Use the first field named +`timestamp`, `computed_at`, or `fetched_at`. If none found, omit the timestamp +portion from the format string. + +### 5.4 `Settable` implementation + +Generated when **any** field has `settable = true`. + +```rust +impl Settable for WeatherObservationValue { + // Tuple of all settable fields, in order + type Value = (f32, f32); // (temperature_celsius, humidity_percent) + + fn set(value: Self::Value, timestamp: u64) -> Self { + Self { + timestamp, + temperature_celsius: value.0, + humidity_percent: value.1, + } + } +} +``` + +**Rules:** +- `Value` is a tuple of the field types marked `settable = true` +- If only one field is settable, `Value` is the bare type (not a 1-tuple) +- The `timestamp` parameter fills the first `u64` field named `timestamp` / + `computed_at` / `fetched_at` + +--- + +## 6. Closing the TODO Gap โ€” `configure_schema()` with Real Serialisers + +The current generated `configure_schema()` has TODO stubs. With `Linkable` +impls available, the codegen can wire real serialisers: + +### Before (current) + +```rust +if let Some(addr) = key.link_address() { + let _ = "TODO: add .with_deserializer(...)"; + reg.link_from(addr); +} +``` + +### After (proposed) + +```rust +if let Some(addr) = key.link_address() { + reg.link_from(addr) + .with_deserializer(WeatherObservationValue::from_bytes) + .finish(); +} +``` + +For outbound: + +```rust +if let Some(addr) = key.link_address() { + reg.link_to(addr) + .with_serializer(|v: &WeatherObservationValue| { + v.to_bytes() + .map_err(|_| aimdb_core::connector::SerializeError::InvalidData) + }) + .finish(); +} +``` + +**Condition:** Only generated when `serialization != "custom"`. When +`serialization = "custom"`, the TODO stubs remain (user provides their own). + +--- + +## 7. LLM-Assisted Enrichment โ€” Inline Suggestions + +Some code cannot be deterministically derived from `state.toml` alone โ€” but it +*can* be generated by an LLM, because the schema already contains rich semantic +context: field names, types, units, descriptions, buffer semantics, and +producer/consumer relationships. + +### Single-file model โ€” no `extensions.rs` + +Instead of generating a separate file for LLM-produced code, the MCP tool +returns the deterministic `schema.rs` back to the calling LLM agent along with +structured context about what enrichments are possible. The agent then proposes +**inline edits** to `schema.rs` โ€” presented as standard IDE inline suggestions +(Copilot-style) that the user can accept, reject, or modify per-suggestion. + +This means: +- **One file** โ€” no module wiring, no feature-gating between generated and + hand-written modules, no "which file owns this impl?" confusion +- **User stays in control** โ€” each suggestion is individually reviewable in the + IDE diff view, not a black-box file drop +- **Incremental** โ€” on schema changes the deterministic codegen re-runs and the + agent can propose new inline enrichments for just the changed/added records +- **No markers, no ceremony** โ€” the agent uses `git diff` to understand what + changed between codegen runs. No `@generated` comments polluting the source. + The file reads like normal Rust code (see Section 7.2) + +### 7.1 Invocation flow (MCP-first) + +``` +โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” +โ”‚ LLM Agent (Claude Code / IDE / MCP client) โ”‚ +โ”‚ โ”‚ +โ”‚ 1. Agent creates/updates .aimdb/state.toml โ”‚ +โ”‚ โ”‚ +โ”‚ 2. Agent calls MCP tool: generate_common_crate โ”‚ +โ”‚ โ””โ”€โ–บ aimdb-mcp delegates to aimdb-codegen โ”‚ +โ”‚ โ””โ”€ writes: schema.rs, Cargo.toml, lib.rs โ”‚ +โ”‚ โ””โ”€ returns to agent: โ”‚ +โ”‚ โ”œโ”€ list of generated types + keys โ”‚ +โ”‚ โ”œโ”€ enrichment opportunities (per record): โ”‚ +โ”‚ โ”‚ โ€ข Simulatable: field ranges, units, semantics โ”‚ +โ”‚ โ”‚ โ€ข format_log: richer formatting possible โ”‚ +โ”‚ โ”‚ โ€ข task scaffolds: producer/consumer stubs โ”‚ +โ”‚ โ””โ”€ the generated schema.rs content โ”‚ +โ”‚ โ”‚ +โ”‚ 3. Agent diffs state.toml + schema.rs against git HEAD โ”‚ +โ”‚ โ””โ”€ identifies added/changed/removed records โ”‚ +โ”‚ โ””โ”€ proposes cleanup for stale user-owned code โ”‚ +โ”‚ โ”‚ +โ”‚ 4. Agent proposes inline edits to schema.rs: โ”‚ +โ”‚ โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”‚ +โ”‚ โ”‚ // After the deterministic Observable impl โ”‚ โ”‚ +โ”‚ โ”‚+ impl Simulatable for WeatherObservation... โ”‚ โ—„ accept โ”‚ +โ”‚ โ”‚ โ”‚ reject โ”‚ +โ”‚ โ”‚- impl Simulatable for RemovedRecord... โ”‚ modify โ”‚ +โ”‚ โ”‚+ async fn station_producer(...) โ”‚ โ”‚ +โ”‚ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ”‚ +โ”‚ โ”‚ +โ”‚ 5. User accepts/rejects each suggestion in IDE โ”‚ +โ”‚ โ”‚ +โ”‚ 6. Agent runs `cargo check` to verify compilation โ”‚ +โ”‚ โ””โ”€ on error: reads diagnostics, proposes fixes โ”‚ +โ”‚ โ””โ”€ iterates until the crate builds cleanly โ”‚ +โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ +``` + +The key insight: the LLM that created `state.toml` and `memory.md` already understands the +domain. The MCP server feeds the deterministic schema back to that same LLM +as structured context. The agent proposes enrichments as normal code edits โ€” +no second LLM, no API key, no separate file. The agent in the loop *is* the +generator, and the IDE's diff view *is* the review surface. + +### 7.2 Re-generation strategy โ€” git diff, not markers + +After the user accepts inline suggestions, `schema.rs` contains a mix of +deterministic code and user-accepted enrichments. On subsequent codegen runs, +the agent must update the deterministic parts without destroying user code. + +**Strategy: the agent uses `git diff` to reason about changes.** + +Both `state.toml` and `schema.rs` are version-controlled. When the agent +re-generates after a schema change: + +1. **Deterministic codegen** produces a fresh `schema.rs` from `state.toml` + (to a temporary location or in-memory) +2. **Agent diffs** the fresh output against the committed `schema.rs` +3. **Agent merges** โ€” it understands which blocks are deterministic (structs, + key enums, trait impls derived from TOML) vs user-owned (Simulatable impls, + helper methods, task scaffolds). It replaces the former and preserves the + latter. +4. **Agent adapts** user code if needed โ€” e.g. if a field was renamed, the + agent updates references in user-owned `Simulatable` impls +5. **Agent verifies** with `cargo check` and iterates until clean + +This avoids polluting the source with marker comments. The file reads like +normal Rust code. The agent's semantic understanding of the codebase replaces +what traditional codegen tools solve with markers. + +**Why this works:** the MCP path guarantees an LLM is always in the loop. +The deterministic codegen never runs blind โ€” there's always an agent to +perform the merge intelligently. For the CLI fallback (no LLM), the codegen +simply overwrites `schema.rs` entirely โ€” the user can `git diff` and restore +any enrichments they want to keep. + +### 7.3 What the LLM enriches + +| Concern | LLM input signals | Inline suggestion | +|---------|------------------|-------------------| +| `Simulatable` impl | field types + units + descriptions | `impl Simulatable for ...` with plausible ranges (e.g. โˆ’30..50 ยฐC for temperature) | +| `format_log` override | field names + observable metadata | Richer formatting than the deterministic heuristic | +| Producer task scaffold | record name + MQTT topic + producers list | `async fn station_producer(...)` with polling skeleton | +| Consumer/agent scaffold | record name + consumers list + buffer type | `async fn sentinel_agent(...)` draining the ring | +| Helper methods | field semantics + descriptions | `fn is_anomaly(&self) -> bool` based on field descriptions | + +**What the LLM does NOT enrich:** +- Custom nested types (e.g. `Vec`) โ€” domain modelling beyond + `state.toml` +- `Streamable` impl (aimdb-pro) โ€” different crate boundary +- Business logic inside tasks โ€” the agent's reasoning algorithm is not + derivable from schema alone + +### 7.4 CLI fallback + +The CLI (`aimdb generate --common-crate`) runs deterministic-only codegen. +LLM enrichment requires the MCP path because it needs an LLM in the loop. +The CLI output is a fully compilable crate โ€” just without `Simulatable` impls +or task scaffolds. + +--- + +## 8. Implementation Plan + +### Phase 1 โ€” TOML schema extensions + `Cargo.toml` generation + +1. Add `[project]` block to `state.rs` (`ProjectDef` struct, optional) +2. Add `schema_version`, `serialization` to `RecordDef` +3. Add `[records.observable]` as `Option` to `RecordDef` +4. Add `settable` flag to `FieldDef` +5. Extend `validate.rs` to validate new fields: + - **Error** if `serialization` is an unrecognised string + - **Warning** if `schema_version = 0` (versions must be โ‰ฅ 1) + - **Warning** if any field has `settable = true` but no `u64` timestamp field + exists (`timestamp`, `computed_at`, or `fetched_at`) โ€” `Settable::set()` + will fall back to `Default::default()` for the timestamp slot + - **Error** if `observable.signal_field` does not name a field in the record + - **Warning** if `observable.signal_field` has a non-numeric type +6. Add `generate_cargo_toml()` to codegen +7. Add `generate_lib_rs()` to codegen + +### Phase 2 โ€” Trait implementation generation + +1. Add `emit_schema_type_impl()` to `rust.rs` +2. Add `emit_linkable_impl()` to `rust.rs` (json / postcard branches) +3. Add `emit_observable_impl()` to `rust.rs` +4. Add `emit_settable_impl()` to `rust.rs` +5. Wire all into the existing `generate_rust()` pipeline + +### Phase 3 โ€” Close the TODO gap + +1. Update `emit_configure_schema()` to call `.with_serializer()` / + `.with_deserializer()` with real `Linkable` methods +2. Add `.finish()` calls after link setup +3. Conditionally generate TODO stubs only for `serialization = "custom"` + +### Phase 4 โ€” CLI integration + +1. Add `--common-crate` flag to `aimdb generate` +2. Default output directory: `{project.name}-common/` alongside `.aimdb/` +3. CLI produces clean `schema.rs` without markers โ€” just normal Rust code +4. Update `aimdb generate` to produce both the flat file (backward compat) and + the crate structure (new default) + +### Phase 5 โ€” MCP-driven LLM enrichment + +1. Add `generate_common_crate` tool to `aimdb-mcp` +2. Tool runs deterministic codegen (Phases 1โ€“4) via `aimdb-codegen` +3. Agent diffs the fresh output against the committed `schema.rs` (git diff) +4. Agent merges deterministic changes while preserving user-owned enrichments +5. Agent proposes new inline enrichments for added/changed records +6. Agent verifies with `cargo check`, iterates until clean +7. CLI fallback: `aimdb generate --common-crate` overwrites `schema.rs` + entirely; the user can `git diff` to restore enrichments manually + +--- + +## 9. Migration Path + +> **Out of scope for this alpha.** `aimdb-codegen` is not yet used in +> production. Migration tooling (version diffing, `Migratable` stub generation, +> backward-compatible output guarantees) will be designed once the codegen is +> stable and adopted. Do not implement or design migration features at this stage. + +When the codegen graduates from alpha, migration will need to address: +- Projects with `state.toml` files predating the `[project]` block +- Schema version bumps between `state.toml` revisions +- Backward compatibility of generated `Cargo.toml` dependency versions + +These are deferred entirely. + +--- + +## 10. Example โ€” Full Generated Output + +Given the `state.toml` from Section 3.4, `aimdb generate --common-crate` +produces: + +``` +weather-sentinel-common/ +โ”œโ”€โ”€ Cargo.toml # deterministic +โ””โ”€โ”€ src/ + โ”œโ”€โ”€ lib.rs # deterministic + โ””โ”€โ”€ schema.rs # deterministic skeleton + user-accepted enrichments +``` + +**`schema.rs`** after deterministic codegen + accepted LLM enrichments: + +```rust +use aimdb_core::RecordKey; +use aimdb_data_contracts::{Linkable, Observable, SchemaType, Settable}; +use serde::{Deserialize, Serialize}; + +// โ”€โ”€ WeatherObservation โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Value type for `WeatherObservation`. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct WeatherObservationValue { + /// Unix timestamp in milliseconds + pub timestamp: u64, + /// Air temperature at 2m above ground in ยฐC + pub temperature_celsius: f32, + /// Relative humidity at 2m in % + pub humidity_percent: f32, +} + +#[derive(Debug, RecordKey, Clone, Copy, PartialEq, Eq)] +#[key_prefix = "weather.observation."] +pub enum WeatherObservationKey { + #[key = "Vienna"] + #[link_address = "sensors/Vienna/observation"] + Vienna, + #[key = "Munich"] + #[link_address = "sensors/Munich/observation"] + Munich, + // ... remaining variants ... +} + +impl SchemaType for WeatherObservationValue { + const NAME: &'static str = "weather_observation"; + const VERSION: u32 = 1; +} + +impl Linkable for WeatherObservationValue { + fn to_bytes(&self) -> Result, alloc::string::String> { + #[cfg(feature = "std")] + { serde_json::to_vec(self).map_err(|e| alloc::format!("serialize {}: {e}", Self::NAME)) } + #[cfg(not(feature = "std"))] + { Err(alloc::string::String::from("no_std serialization not available")) } + } + + fn from_bytes(data: &[u8]) -> Result { + #[cfg(feature = "std")] + { serde_json::from_slice(data).map_err(|e| alloc::format!("deserialize {}: {e}", Self::NAME)) } + #[cfg(not(feature = "std"))] + { let _ = data; Err(alloc::string::String::from("no_std deserialization not available")) } + } +} + +impl Observable for WeatherObservationValue { + type Signal = f32; + const ICON: &'static str = "๐ŸŒก๏ธ"; + const UNIT: &'static str = "ยฐC"; + + fn signal(&self) -> f32 { + self.temperature_celsius + } + + fn format_log(&self, node_id: &str) -> alloc::string::String { + alloc::format!( + "{} [{}] {}: {:.1}{} at {}", + Self::ICON, node_id, Self::NAME, self.signal(), Self::UNIT, self.timestamp + ) + } +} + +impl Settable for WeatherObservationValue { + type Value = (f32, f32); + + fn set(value: Self::Value, timestamp: u64) -> Self { + Self { + timestamp, + temperature_celsius: value.0, + humidity_percent: value.1, + } + } +} + +// โ”€โ”€ LLM enrichment (accepted by user, preserved on re-generation) โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +impl Simulatable for WeatherObservationValue { + fn simulate( + config: &SimulationConfig, + previous: Option<&Self>, + rng: &mut R, + timestamp: u64, + ) -> Self { + let base = config.params.base as f32; + let variation = config.params.variation as f32; + let step = config.params.step as f32; + let celsius = match previous { + Some(prev) => { + let delta = (rng.gen::() - 0.5) * variation * step; + (prev.temperature_celsius + delta).clamp(base - variation, base + variation) + } + None => base + (rng.gen::() - 0.5) * variation, + }; + Self { + timestamp, + temperature_celsius: celsius, + humidity_percent: (50.0 + (rng.gen::() - 0.5) * 40.0).clamp(0.0, 100.0), + } + } +} +``` + +**Note:** `configure_schema` is **not** generated in `schema.rs`. The common +crate is platform-agnostic. Application crates import the types and call +`builder.configure::(key, |reg| { ... })` directly, +using `WeatherObservationValue::from_bytes` / `to_bytes` from the `Linkable` +impl for connector wiring. + +--- + +## 11. Open Questions + +1. **Should `postcard` be a first-class serialisation target?** + It's the natural choice for `no_std` / Embassy. The TOML field supports it, + but implementation can be deferred to Phase 2+. + +2. **Should the codegen generate `Debug` formatting for key enums?** + Currently missing from the derive list. The hand-written crates include it. + โ†’ Recommendation: yes, add `Debug` to key enum derives. + +3. **Should `configure_schema` be generic over the adapter?** + Currently it's ``. Some registrar methods (like `.tap()`) require + adapter-specific extension traits. The generated function should stay generic + and only call the adapter-agnostic subset. + +4. **MCP tool return contract for `generate_common_crate`.** + The tool needs to return enough context for the calling LLM to propose + inline enrichments. Options: (a) return the full generated `schema.rs` + content inline, (b) return a summary + file path and let the agent read it, + (c) return a structured list of enrichment opportunities with insertion + points. โ†’ Recommend (c): return enrichment opportunities as structured data + (record name, missing traits, field semantics) so the agent can propose + targeted edits without re-reading the full file. + +5. **What happens to user code referencing a removed record?** + When a record is deleted from `state.toml`, its `@generated` marker blocks + are removed. Since both `state.toml` and `schema.rs` are versioned, the + agent can `git diff` the before/after to identify exactly which records were + added, changed, or removed. For removed records, the agent proactively + proposes deletion of stale user-owned code (e.g. orphaned `Simulatable` + impls). For changed records, it proposes updates to enrichments that + reference renamed or retyped fields. After applying changes, the agent runs + `cargo check` to verify the crate compiles โ€” if it doesn't, it reads the + compiler errors and proposes fixes. This closes the loop: the agent doesn't + just generate code, it verifies the result and iterates until the crate + builds cleanly. diff --git a/tools/aimdb-cli/src/commands/generate.rs b/tools/aimdb-cli/src/commands/generate.rs index 9377248e..8568925c 100644 --- a/tools/aimdb-cli/src/commands/generate.rs +++ b/tools/aimdb-cli/src/commands/generate.rs @@ -2,12 +2,14 @@ //! //! Reads `.aimdb/state.toml` and emits: //! - `.aimdb/architecture.mermaid` โ€” Mermaid diagram -//! - `src/generated_schema.rs` โ€” compilable Rust schema +//! - `src/generated_schema.rs` โ€” compilable Rust schema (flat mode) +//! - `{project.name}-common/` โ€” compilable common crate (`--common-crate` mode) //! //! # Usage //! //! ```text -//! aimdb generate # generate both artefacts +//! aimdb generate # generate flat file + diagram +//! aimdb generate --common-crate # generate common crate directory //! aimdb generate --check # validate only (CI) //! aimdb generate --dry-run # print to stdout, don't write //! aimdb generate --state path/state.toml # custom state path @@ -19,7 +21,10 @@ //! ``` use crate::error::CliResult; -use aimdb_codegen::{generate_mermaid, generate_rust, validate, ArchitectureState, Severity}; +use aimdb_codegen::{ + generate_cargo_toml, generate_lib_rs, generate_mermaid, generate_rust, generate_schema_rs, + validate, ArchitectureState, Severity, +}; use anyhow::Context; use clap::Args; use colored::Colorize; @@ -36,10 +41,15 @@ pub struct GenerateCommand { #[arg(long, default_value = ".aimdb/architecture.mermaid")] pub mermaid: PathBuf, - /// Output path for generated Rust source + /// Output path for generated Rust source (flat mode only) #[arg(long, default_value = "src/generated_schema.rs")] pub rust: PathBuf, + /// Generate a complete common crate directory instead of a flat file. + /// Requires `[project]` block in state.toml. Outputs to `{project.name}-common/`. + #[arg(long)] + pub common_crate: bool, + /// Validate state.toml without writing files (exit 1 if errors found) #[arg(long)] pub check: bool, @@ -88,9 +98,17 @@ impl GenerateCommand { return Ok(()); } - // โ”€โ”€ Generate โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ - let mermaid_src = generate_mermaid(&state); - let rust_src = generate_rust(&state); + if self.common_crate { + self.execute_common_crate(&state).await + } else { + self.execute_flat(&state).await + } + } + + /// Flat mode: generate `src/generated_schema.rs` and `.aimdb/architecture.mermaid`. + async fn execute_flat(&self, state: &ArchitectureState) -> CliResult<()> { + let mermaid_src = generate_mermaid(state); + let rust_src = generate_rust(state); if self.dry_run { println!("{} {}", "โ”€โ”€ Mermaid".dimmed(), self.mermaid.display()); @@ -100,7 +118,6 @@ impl GenerateCommand { return Ok(()); } - // โ”€โ”€ Write files โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ write_if_changed(&self.mermaid, &mermaid_src, "Mermaid")?; write_if_changed(&self.rust, &rust_src, "Rust")?; @@ -112,6 +129,55 @@ impl GenerateCommand { Ok(()) } + + /// Common crate mode: generate `{project.name}-common/` directory. + async fn execute_common_crate(&self, state: &ArchitectureState) -> CliResult<()> { + let project = state.project.as_ref().ok_or_else(|| { + anyhow::anyhow!( + "--common-crate requires a [project] block in state.toml.\n\ + Add:\n [project]\n name = \"your-project\"" + ) + })?; + + let crate_dir = PathBuf::from(format!("{}-common", project.name)); + let src_dir = crate_dir.join("src"); + + let cargo_toml = generate_cargo_toml(state); + let lib_rs = generate_lib_rs(); + let schema_rs = generate_schema_rs(state); + + if self.dry_run { + let cargo_path = crate_dir.join("Cargo.toml"); + let lib_path = src_dir.join("lib.rs"); + let schema_path = src_dir.join("schema.rs"); + + println!("{} {}", "โ”€โ”€ Cargo.toml".dimmed(), cargo_path.display()); + println!("{cargo_toml}"); + println!("{} {}", "โ”€โ”€ lib.rs".dimmed(), lib_path.display()); + println!("{lib_rs}"); + println!("{} {}", "โ”€โ”€ schema.rs".dimmed(), schema_path.display()); + println!("{schema_rs}"); + return Ok(()); + } + + // Also generate the Mermaid diagram + let mermaid_src = generate_mermaid(state); + write_if_changed(&self.mermaid, &mermaid_src, "Mermaid")?; + + // Write common crate files + write_if_changed(&crate_dir.join("Cargo.toml"), &cargo_toml, "Cargo.toml")?; + write_if_changed(&src_dir.join("lib.rs"), &lib_rs, "lib.rs")?; + write_if_changed(&src_dir.join("schema.rs"), &schema_rs, "schema.rs")?; + + println!( + "{} common crate {} generated ({} record(s))", + "โœ“".green(), + crate_dir.display(), + state.records.len() + ); + + Ok(()) + } } // โ”€โ”€ Helpers โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ diff --git a/tools/aimdb-mcp/src/architecture/mod.rs b/tools/aimdb-mcp/src/architecture/mod.rs index 638ad964..daf3adb4 100644 --- a/tools/aimdb-mcp/src/architecture/mod.rs +++ b/tools/aimdb-mcp/src/architecture/mod.rs @@ -286,6 +286,7 @@ pub fn ensure_state_initialised(path: &Path) -> anyhow::Result Date: Fri, 27 Feb 2026 19:32:05 +0000 Subject: [PATCH 09/46] feat: add tasks and binaries support to architecture state - Introduced `tasks` and `binaries` fields in `ArchitectureState` struct. - Added `TaskDef`, `TaskIo`, `TaskType`, `BinaryDef`, and `ExternalConnectorDef` structs for task and binary definitions. - Implemented validation for tasks and binaries in `validate.rs`, ensuring proper references and existence checks. - Enhanced the CLI with a new `--hub` option to generate a hub binary crate scaffold, including `Cargo.toml`, `main.rs`, and `tasks.rs`. - Updated default paths to be relative to `AIMDB_WORKSPACE` for better project structure management. --- .vscode/mcp.json | 3 +- aimdb-codegen/src/lib.rs | 11 +- aimdb-codegen/src/rust.rs | 1077 +++++++++++++++++++++- aimdb-codegen/src/state.rs | 88 ++ aimdb-codegen/src/validate.rs | 105 +++ tools/aimdb-cli/src/commands/generate.rs | 83 +- tools/aimdb-mcp/src/architecture/mod.rs | 30 +- 7 files changed, 1339 insertions(+), 58 deletions(-) diff --git a/.vscode/mcp.json b/.vscode/mcp.json index 1909b3b0..9e518063 100644 --- a/.vscode/mcp.json +++ b/.vscode/mcp.json @@ -5,7 +5,8 @@ "command": "/aimdb_ws/aimdb/target/release/aimdb-mcp", "args": [], "env": { - "RUST_LOG": "info" + "RUST_LOG": "info", + "AIMDB_WORKSPACE": "${workspaceFolder}" } } } diff --git a/aimdb-codegen/src/lib.rs b/aimdb-codegen/src/lib.rs index b6eea7c7..6711a0d6 100644 --- a/aimdb-codegen/src/lib.rs +++ b/aimdb-codegen/src/lib.rs @@ -54,10 +54,15 @@ pub mod validate; // โ”€โ”€ Convenience re-exports โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ pub use mermaid::generate_mermaid; -pub use rust::{generate_cargo_toml, generate_lib_rs, generate_rust, generate_schema_rs}; +pub use rust::{ + generate_binary_cargo_toml, generate_cargo_toml, generate_hub_cargo_toml, generate_hub_main_rs, + generate_hub_schema_rs, generate_hub_tasks_rs, generate_lib_rs, generate_main_rs, + generate_rust, generate_schema_rs, generate_tasks_rs, +}; pub use rust::{to_pascal_case, to_snake_case}; pub use state::{ - ArchitectureState, BufferType, ConnectorDef, ConnectorDirection, DecisionEntry, FieldDef, Meta, - ObservableDef, ProjectDef, RecordDef, SerializationType, + ArchitectureState, BinaryDef, BufferType, ConnectorDef, ConnectorDirection, DecisionEntry, + ExternalConnectorDef, FieldDef, Meta, ObservableDef, ProjectDef, RecordDef, SerializationType, + TaskDef, TaskIo, TaskType, }; pub use validate::{is_valid, validate, Severity, ValidationError}; diff --git a/aimdb-codegen/src/rust.rs b/aimdb-codegen/src/rust.rs index 171d434b..c6083fbd 100644 --- a/aimdb-codegen/src/rust.rs +++ b/aimdb-codegen/src/rust.rs @@ -10,7 +10,10 @@ use proc_macro2::TokenStream; use quote::{format_ident, quote}; -use crate::state::{ArchitectureState, ConnectorDirection, RecordDef, SerializationType}; +use crate::state::{ + ArchitectureState, ConnectorDef, ConnectorDirection, RecordDef, SerializationType, TaskDef, + TaskType, +}; // โ”€โ”€ Public API โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ @@ -196,7 +199,297 @@ pub use schema::*; .to_string() } -// โ”€โ”€ Imports โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ +// โ”€โ”€ Binary crate generators โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Generate `src/main.rs` for the named binary crate. +/// +/// Uses `quote!` + `prettyplease` for guaranteed idiomatic formatting. +/// Requires the binary to exist in `state.binaries`. Returns `None` if not found. +pub fn generate_main_rs(state: &ArchitectureState, binary_name: &str) -> Option { + let bin = state.binaries.iter().find(|b| b.name == binary_name)?; + let project_name = state + .project + .as_ref() + .map(|p| p.name.as_str()) + .unwrap_or("project"); + let common_crate = format_ident!("{}", format!("{}_common", project_name.replace('-', "_"))); + + // Collect tasks belonging to this binary + let tasks: Vec<&TaskDef> = bin + .tasks + .iter() + .filter_map(|tname| state.tasks.iter().find(|t| &t.name == tname)) + .collect(); + + let task_use_idents: Vec = bin + .tasks + .iter() + .map(|name| format_ident!("{}", name)) + .collect(); + + // โ”€โ”€ Connector use statements โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + let connector_use_stmts: Vec = bin + .external_connectors + .iter() + .filter_map(|c| { + match c.protocol.as_str() { + "mqtt" => Some(quote! { use aimdb_mqtt_connector::MqttConnector; }), + "knx" => Some(quote! { use aimdb_knx_connector::KnxConnector; }), + _ => None, + } + }) + .collect(); + + // โ”€โ”€ Connector env-var bindings + construction โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + let connector_let_stmts: Vec = bin + .external_connectors + .iter() + .map(|c| { + let var_ident = format_ident!("{}", c.env_var.to_lowercase()); + let var_name = &c.env_var; + let default = &c.default; + let ctor: TokenStream = match c.protocol.as_str() { + "mqtt" => quote! { MqttConnector::new(&#var_ident) }, + "knx" => quote! { KnxConnector::new(&#var_ident) }, + _ => { + let msg = format!("build connector for protocol '{}'", c.protocol); + quote! { todo!(#msg) } + } + }; + let connector_ident = format_ident!("{}_connector", c.protocol); + quote! { + let #var_ident = std::env::var(#var_name) + .unwrap_or_else(|_| #default.to_string()); + let #connector_ident = #ctor; + } + }) + .collect(); + + // โ”€โ”€ .with_connector(...) chain calls โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + let with_connector_calls: Vec = bin + .external_connectors + .iter() + .map(|c| { + let connector_ident = format_ident!("{}_connector", c.protocol); + quote! { .with_connector(#connector_ident) } + }) + .collect(); + + // โ”€โ”€ Task source registrations โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + let task_registrations: Vec = tasks + .iter() + .flat_map(|task| { + task.outputs.iter().flat_map(move |output| { + let variants: Vec = if output.variants.is_empty() { + state + .records + .iter() + .find(|r| r.name == output.record) + .map(|r| r.key_variants.clone()) + .unwrap_or_default() + } else { + output.variants.clone() + }; + + let value_type = format_ident!("{}Value", output.record); + let key_type = format_ident!("{}Key", output.record); + let task_fn = format_ident!("{}", task.name); + + variants.into_iter().map(move |variant| { + let variant_ident = format_ident!("{}", to_pascal_case(&variant)); + quote! { + builder.configure::<#value_type>(#key_type::#variant_ident, |reg| { + reg.source(#task_fn); + }); + } + }) + }) + }) + .collect(); + + // โ”€โ”€ Assemble via quote! โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + let file_tokens = quote! { + use aimdb_core::{AimDbBuilder, DbResult}; + use aimdb_tokio_adapter::TokioAdapter; + #(#connector_use_stmts)* + use std::sync::Arc; + use #common_crate::configure_schema; + + mod tasks; + use tasks::{#(#task_use_idents),*}; + + #[tokio::main] + async fn main() -> DbResult<()> { + tracing_subscriber::fmt::init(); + + #(#connector_let_stmts)* + + let runtime = Arc::new(TokioAdapter::new()); + + let mut builder = AimDbBuilder::new() + .runtime(runtime) + #(#with_connector_calls)* + ; + + configure_schema(&mut builder); + + #(#task_registrations)* + + builder.run().await + } + }; + + let header = format!( + "// @generated โ€” do not edit manually.\n\ + // Source: .aimdb/state.toml\n\ + // Regenerate: `aimdb generate --binary {binary_name}`\n\n" + ); + + let syntax_tree = + syn::parse2(file_tokens).expect("generate_main_rs: tokens should be valid Rust"); + Some(format!("{header}{}", prettyplease::unparse(&syntax_tree))) +} + +/// Generate `src/tasks.rs` scaffold for the named binary crate. +/// +/// Uses `quote!` + `prettyplease` for guaranteed idiomatic formatting. +/// This file is generated **once** โ€” it has no `@generated` header and is +/// then owned by the developer. Signatures must not be changed. +/// Returns `None` if the binary is not found. +pub fn generate_tasks_rs(state: &ArchitectureState, binary_name: &str) -> Option { + let bin = state.binaries.iter().find(|b| b.name == binary_name)?; + let project_name = state + .project + .as_ref() + .map(|p| p.name.as_str()) + .unwrap_or("project"); + let common_crate = format_ident!("{}", format!("{}_common", project_name.replace('-', "_"))); + + // Collect tasks belonging to this binary + let tasks: Vec<&TaskDef> = bin + .tasks + .iter() + .filter_map(|tname| state.tasks.iter().find(|t| &t.name == tname)) + .collect(); + + let task_fns: Vec = tasks + .iter() + .map(|task| { + let fn_name = format_ident!("{}", task.name); + + // Build parameter list + let mut params: Vec = vec![ + quote! { ctx: RuntimeContext }, + ]; + for input in &task.inputs { + let arg_name = format_ident!("{}", to_snake_case(&input.record)); + let value_type = format_ident!("{}Value", input.record); + params.push(quote! { #arg_name: Consumer<#value_type, TokioAdapter> }); + } + for output in &task.outputs { + let arg_name = format_ident!("{}", to_snake_case(&output.record)); + let value_type = format_ident!("{}Value", output.record); + params.push(quote! { #arg_name: Producer<#value_type, TokioAdapter> }); + } + + let todo_msg = match &task.task_type { + TaskType::Agent => "LLM agent stub โ€” implement reasoning loop".to_string(), + _ => format!("implement: {}", task.description), + }; + + let doc_attr = if task.description.is_empty() { + quote! {} + } else { + let desc = &task.description; + quote! { #[doc = #desc] } + }; + + quote! { + #doc_attr + pub async fn #fn_name(#(#params),*) -> DbResult<()> { + todo!(#todo_msg) + } + } + }) + .collect(); + + let file_tokens = quote! { + use aimdb_core::{Consumer, DbResult, Producer, RuntimeContext}; + use aimdb_tokio_adapter::TokioAdapter; + use #common_crate::*; + + #(#task_fns)* + }; + + let header = format!( + "// Implement the task bodies; signatures must not change.\n\ + // Regenerate with `aimdb generate --binary {binary_name} --tasks-scaffold`\n\ + // (only writes this file if it does not already exist)\n\n" + ); + + let syntax_tree = + syn::parse2(file_tokens).expect("generate_tasks_rs: tokens should be valid Rust"); + Some(format!("{header}{}", prettyplease::unparse(&syntax_tree))) +} + +/// Generate `Cargo.toml` content for a binary crate. +/// +/// Derives dependencies from the binary's tasks and external connectors. +/// Returns `None` if the binary is not found. +pub fn generate_binary_cargo_toml(state: &ArchitectureState, binary_name: &str) -> Option { + let bin = state.binaries.iter().find(|b| b.name == binary_name)?; + let project_name = state + .project + .as_ref() + .map(|p| p.name.as_str()) + .unwrap_or("project"); + let common_crate_name = format!("{project_name}-common"); + let common_crate_dep = common_crate_name.replace('-', "_"); + let edition = state + .project + .as_ref() + .and_then(|p| p.edition.as_deref()) + .unwrap_or("2024"); + + let has_mqtt = bin.external_connectors.iter().any(|c| c.protocol == "mqtt"); + let has_knx = bin.external_connectors.iter().any(|c| c.protocol == "knx"); + + let mut optional_connector_deps = String::new(); + if has_mqtt { + optional_connector_deps.push_str( + "aimdb-mqtt-connector = { version = \"0.5\", features = [\"tokio-runtime\"] }\n", + ); + } + if has_knx { + optional_connector_deps.push_str( + "aimdb-knx-connector = { version = \"0.5\", features = [\"tokio-runtime\"] }\n", + ); + } + + let out = format!( + "# @generated โ€” do not edit manually.\n\ +# Source: .aimdb/state.toml โ€” regenerate with `aimdb generate --binary {binary_name}`\n\ +[package]\n\ +name = \"{binary_name}\"\n\ +version = \"0.1.0\"\n\ +edition = \"{edition}\"\n\ +\n\ +[[bin]]\n\ +name = \"{binary_name}\"\n\ +path = \"src/main.rs\"\n\ +\n\ +[dependencies]\n\ +{common_crate_dep} = {{ path = \"../{common_crate_name}\" }}\n\ +aimdb-core = {{ version = \"0.5\" }}\n\ +aimdb-tokio-adapter = {{ version = \"0.5\", features = [\"tokio-runtime\"] }}\n\ +{optional_connector_deps}\ +tokio = {{ version = \"1\", features = [\"full\"] }}\n\ +tracing = \"0.1\"\n\ +tracing-subscriber = {{ version = \"0.3\", features = [\"env-filter\"] }}\n" + ); + + Some(out) +} /// Imports for the types-only common crate schema โ€” no runtime deps. fn emit_imports_types_only(state: &ArchitectureState) -> TokenStream { @@ -313,6 +606,9 @@ fn emit_todo_field(msg: &str) -> TokenStream { fn emit_key_enum(rec: &RecordDef) -> TokenStream { let enum_name = format_ident!("{}Key", rec.name); + // The RecordKey derive macro supports a single #[link_address] attribute. + // We use the first connector for that; additional connectors are resolved + // via standalone helper functions emitted by `emit_connector_address_fns`. let connector = rec.connectors.first(); let key_prefix_attr = if !rec.key_prefix.is_empty() { @@ -346,15 +642,68 @@ fn emit_key_enum(rec: &RecordDef) -> TokenStream { .collect() }; + let address_fns = emit_connector_address_fns(rec); + quote! { #[derive(Debug, RecordKey, Clone, Copy, PartialEq, Eq)] #key_prefix_attr pub enum #enum_name { #(#variants)* } + + #address_fns } } +/// Emit standalone address-resolver functions for connectors beyond the first. +/// +/// The first connector's addresses are baked into `#[link_address]` on the key +/// enum and exposed via the `RecordKey::link_address()` trait method. Additional +/// connectors get a `fn {record_snake}_{protocol}_address(key: &{Record}Key) -> Option<&'static str>` +/// function that the configure block can call. +fn emit_connector_address_fns(rec: &RecordDef) -> TokenStream { + if rec.connectors.len() <= 1 || rec.key_variants.is_empty() { + return quote! {}; + } + + let key_type = format_ident!("{}Key", rec.name); + let record_snake = to_snake_case(&rec.name); + + let fns: Vec = rec + .connectors + .iter() + .skip(1) // first connector uses link_address() + .map(|conn| { + let fn_name = format_ident!("{}_{}_address", record_snake, conn.protocol); + let doc = format!( + "Link address for `{}` โ€” {} connector (`{}`).", + rec.name, conn.protocol, conn.direction_label(), + ); + + let arms: Vec = rec + .key_variants + .iter() + .map(|variant_str| { + let variant_ident = format_ident!("{}", to_pascal_case(variant_str)); + let addr = conn.url.replace("{variant}", variant_str); + quote! { #key_type::#variant_ident => Some(#addr), } + }) + .collect(); + + quote! { + #[doc = #doc] + pub fn #fn_name(key: &#key_type) -> Option<&'static str> { + match key { + #(#arms)* + } + } + } + }) + .collect(); + + quote! { #(#fns)* } +} + // โ”€โ”€ configure_schema โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ fn emit_configure_schema(state: &ArchitectureState) -> TokenStream { @@ -402,67 +751,163 @@ fn emit_record_configure_block(rec: &RecordDef) -> TokenStream { .map(|s| s == &SerializationType::Custom) .unwrap_or(false); - let connector_block = rec.connectors.first().map(|conn| { - if is_custom { - // Custom serialization: keep TODO stubs - let link_fn = match conn.direction { - ConnectorDirection::Outbound => format_ident!("link_to"), - ConnectorDirection::Inbound => format_ident!("link_from"), - }; - let todo_comment = match conn.direction { - ConnectorDirection::Outbound => { - "TODO: add .with_serializer(...) โ€” serialization = \"custom\"" - } - ConnectorDirection::Inbound => { - "TODO: add .with_deserializer(...) โ€” serialization = \"custom\"" + if rec.connectors.is_empty() { + // No connectors: just buffer + return quote! { + for key in [ + #(#key_type::#variant_idents,)* + ] { + builder.configure::<#value_type>(key, |reg| { + reg.buffer(#buffer_tokens); + }); + } + }; + } + + // โ”€โ”€ Pre-extract addresses โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + // First connector uses `key.link_address()` (from RecordKey derive). + // Additional connectors use generated helper functions. + let record_snake = to_snake_case(&rec.name); + + let addr_extractions: Vec = rec + .connectors + .iter() + .enumerate() + .map(|(i, conn)| { + let addr_var = format_ident!("addr_{}", i); + if i == 0 { + quote! { + let #addr_var = key.link_address().map(|s| s.to_string()); } - }; - quote! { - if let Some(addr) = key.link_address() { - let _ = #todo_comment; - reg.#link_fn(addr); + } else { + let resolver_fn = format_ident!("{}_{}_address", record_snake, conn.protocol); + quote! { + let #addr_var = #resolver_fn(&key).map(|s| s.to_string()); } } - } else { - // Non-custom: wire real serializers via Linkable trait - match conn.direction { - ConnectorDirection::Inbound => { - quote! { - if let Some(addr) = key.link_address() { - reg.link_from(addr) - .with_deserializer(#value_type::from_bytes) - .finish(); - } - } + }) + .collect(); + + // โ”€โ”€ Build the configure closure body โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + // + // `reg.buffer()` consumes the `&mut` borrow and returns a builder, so + // everything must be a single fluent chain starting from `reg.buffer(...)`. + // We build two branches: one with connectors wired (when all addresses + // resolve), one plain buffer fallback. + let linked_chain = emit_connector_chain(&rec.connectors, &value_type, &buffer_tokens, is_custom); + let addr_conditions: Vec = (0..rec.connectors.len()) + .map(|i| { + let addr_var = format_ident!("addr_{}", i); + quote! { #addr_var.as_deref() } + }) + .collect(); + + // For a single connector: `if let Some(addr) = addr_0.as_deref() { chain } else { buffer }` + // For multiple connectors: nest or tuple-match the conditions. + let body = if rec.connectors.len() == 1 { + let cond = &addr_conditions[0]; + quote! { + if let Some(addr_0) = #cond { + #linked_chain + } else { + reg.buffer(#buffer_tokens); + } + } + } else { + // Multiple connectors: match a tuple of Options. + // When ALL addresses are present, wire the full chain. + // Otherwise fall back to buffer-only. + let some_bindings: Vec = (0..rec.connectors.len()) + .map(|i| { + let binding = format_ident!("addr_{}", i); + quote! { Some(#binding) } + }) + .collect(); + quote! { + match (#(#addr_conditions),*) { + (#(#some_bindings),*) => { + #linked_chain } - ConnectorDirection::Outbound => { - quote! { - if let Some(addr) = key.link_address() { - reg.link_to(addr) - .with_serializer(|v: &#value_type| { - v.to_bytes() - .map_err(|_| aimdb_core::connector::SerializeError::InvalidData) - }) - .finish(); - } - } + _ => { + reg.buffer(#buffer_tokens); } } } - }); + }; quote! { for key in [ #(#key_type::#variant_idents,)* ] { + #(#addr_extractions)* builder.configure::<#value_type>(key, |reg| { - reg.buffer(#buffer_tokens); - #connector_block + #body }); } } } +/// Build the full fluent chain: `reg.buffer(...).link_X(addr_0)...link_Y(addr_1)...` +/// +/// All connector links are chained off a single `reg.buffer()` call so there +/// is only one mutable borrow of `reg`. Address variables `addr_0`, `addr_1`, +/// etc. are assumed to be in scope as `&str`. +fn emit_connector_chain( + connectors: &[ConnectorDef], + value_type: &syn::Ident, + buffer_tokens: &TokenStream, + is_custom: bool, +) -> TokenStream { + // Start the chain with reg.buffer(...) + let mut chain = quote! { reg.buffer(#buffer_tokens) }; + + for (i, conn) in connectors.iter().enumerate() { + let addr_var = format_ident!("addr_{}", i); + + if is_custom { + let todo_comment = match conn.direction { + ConnectorDirection::Outbound => { + "TODO: chain .link_to(...).with_serializer(...) โ€” serialization = \"custom\"" + } + ConnectorDirection::Inbound => { + "TODO: chain .link_from(...).with_deserializer(...) โ€” serialization = \"custom\"" + } + }; + // Can't chain a TODO into the builder, so just emit a let-binding comment + // after the chain. We'll terminate the chain with `;` below. + chain = quote! { + #chain; + let _ = (#todo_comment, #addr_var) + }; + } else { + match conn.direction { + ConnectorDirection::Inbound => { + chain = quote! { + #chain + .link_from(#addr_var) + .with_deserializer(#value_type::from_bytes) + .finish() + }; + } + ConnectorDirection::Outbound => { + chain = quote! { + #chain + .link_to(#addr_var) + .with_serializer(|v: &#value_type| { + v.to_bytes() + .map_err(|_| aimdb_core::connector::SerializeError::InvalidData) + }) + .finish() + }; + } + } + } + } + + // Terminate the chain + quote! { #chain; } +} + // โ”€โ”€ Trait implementations โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ fn emit_schema_type_impl(rec: &RecordDef) -> TokenStream { @@ -722,6 +1167,544 @@ pub fn to_pascal_case(s: &str) -> String { .collect() } +// โ”€โ”€ Hub crate generators โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ +// +// These functions derive a complete hub binary crate scaffold from state.toml +// without requiring `[[tasks]]` or `[[binaries]]` entries in state. +// +// Hub-internal tasks are identified automatically: +// - Producers of records with INBOUND connectors โ†’ external (stations), skipped +// - Producers of records with OUTBOUND or no connectors โ†’ hub-internal tasks + +/// Returns the set of hub-internal task names derived from state. +/// +/// A task is hub-internal if it appears as a producer of any record that has +/// no inbound connector (i.e. the hub itself writes that record). +fn hub_task_names(state: &ArchitectureState) -> Vec { + // Collect external producer names: those that produce records with inbound connectors + use std::collections::HashSet; + let external_producers: HashSet<&str> = state + .records + .iter() + .filter(|r| { + r.connectors + .iter() + .any(|c| matches!(c.direction, ConnectorDirection::Inbound)) + }) + .flat_map(|r| r.producers.iter().map(|p| p.as_str())) + .collect(); + + // Hub tasks: appear as producer of a non-inbound record + let mut seen = HashSet::new(); + let mut tasks: Vec = state + .records + .iter() + .filter(|r| { + !r.connectors + .iter() + .any(|c| matches!(c.direction, ConnectorDirection::Inbound)) + }) + .flat_map(|r| r.producers.iter().cloned()) + .filter(|p| !external_producers.contains(p.as_str())) + .filter(|p| seen.insert(p.clone())) + .collect(); + + // Also include any consumer of a record that is not a known external producer + for rec in &state.records { + for consumer in &rec.consumers { + let consumer = consumer.clone(); + if !external_producers.contains(consumer.as_str()) && seen.insert(consumer.clone()) { + tasks.push(consumer); + } + } + } + + tasks +} + +/// Generate `src/schema.rs` for the hub binary crate. +/// +/// Contains only the `configure_schema` function โ€” no type definitions. +/// Types are imported from the project's common crate. +pub fn generate_hub_schema_rs(state: &ArchitectureState) -> String { + let project = state + .project + .as_ref() + .expect("generate_hub_schema_rs requires [project] block in state.toml"); + let common_crate = format_ident!("{}", project.name.replace('-', "_") + "_common"); + + let configure_fn = emit_configure_schema(state); + + let file_tokens = quote! { + use aimdb_core::buffer::BufferCfg; + use aimdb_core::builder::AimDbBuilder; + use aimdb_executor::Spawn; + use #common_crate::*; + + #configure_fn + }; + + let header = "// @generated โ€” do not edit manually.\n\ +// Source: .aimdb/state.toml โ€” regenerate with `aimdb generate --hub`.\n\n"; + + let syntax_tree = syn::parse2(file_tokens).expect("generated tokens should be valid Rust"); + format!("{header}{}", prettyplease::unparse(&syntax_tree)) +} + +/// Generate `Cargo.toml` for the hub binary crate (`{project.name}-hub`). +pub fn generate_hub_cargo_toml(state: &ArchitectureState) -> String { + let project = state + .project + .as_ref() + .expect("generate_hub_cargo_toml requires [project] block in state.toml"); + let hub_crate = format!("{}-hub", project.name); + let common_crate_name = format!("{}-common", project.name); + let edition = project.edition.as_deref().unwrap_or("2024"); + + let has_mqtt = state + .records + .iter() + .any(|r| r.connectors.iter().any(|c| c.protocol == "mqtt")); + let has_knx = state + .records + .iter() + .any(|r| r.connectors.iter().any(|c| c.protocol == "knx")); + let has_websocket = state + .records + .iter() + .any(|r| r.connectors.iter().any(|c| c.protocol == "websocket")); + + let mut connector_deps = String::new(); + if has_mqtt { + connector_deps.push_str( + "aimdb-mqtt-connector = { version = \"0.5\", features = [\"tokio-runtime\"] }\n", + ); + } + if has_knx { + connector_deps.push_str( + "aimdb-knx-connector = { version = \"0.5\", features = [\"tokio-runtime\"] }\n", + ); + } + if has_websocket { + connector_deps.push_str("# aimdb-websocket-connector is in aimdb-pro โ€” add path dep here\n# aimdb-websocket-connector = { path = \"../../aimdb-pro/aimdb-websocket-connector\" }\n"); + } + + format!( + "# @generated โ€” do not edit manually.\n\ +# Source: .aimdb/state.toml โ€” regenerate with `aimdb generate --hub`\n\ +[package]\n\ +name = \"{hub_crate}\"\n\ +version = \"0.1.0\"\n\ +edition = \"{edition}\"\n\ +description = \"Hub binary for {project_name}\"\n\ +publish = false\n\ +\n\ +[[bin]]\n\ +name = \"{hub_crate}\"\n\ +path = \"src/main.rs\"\n\ +\n\ +[dependencies]\n\ +{common_crate_name} = {{ path = \"../{common_crate_name}\" }}\n\ +aimdb-core = {{ version = \"0.5\" }}\n\ +aimdb-data-contracts = {{ version = \"0.5\", features = [\"linkable\"] }}\n\ +aimdb-tokio-adapter = {{ version = \"0.5\", features = [\"tokio-runtime\"] }}\n\ +{connector_deps}\ +tokio = {{ version = \"1\", features = [\"full\"] }}\n\ +tracing = \"0.1\"\n\ +tracing-subscriber = {{ version = \"0.3\", features = [\"env-filter\"] }}\n", + project_name = project.name + ) +} + +/// Generate `src/main.rs` for the hub binary crate. +/// +/// Uses `quote!` + `prettyplease` for guaranteed idiomatic formatting โ€” +/// the same pipeline as the rest of the codegen (no raw format strings). +pub fn generate_hub_main_rs(state: &ArchitectureState) -> String { + let project = state + .project + .as_ref() + .expect("generate_hub_main_rs requires [project] block in state.toml"); + let common_crate = format_ident!("{}", project.name.replace('-', "_") + "_common"); + + // โ”€โ”€ Protocol detection โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + let has_mqtt = state + .records + .iter() + .any(|r| r.connectors.iter().any(|c| c.protocol == "mqtt")); + let has_knx = state + .records + .iter() + .any(|r| r.connectors.iter().any(|c| c.protocol == "knx")); + + // โ”€โ”€ Connector use statements โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + let connector_use_stmts: Vec = { + let mut v = vec![]; + if has_mqtt { + v.push(quote! { use aimdb_mqtt_connector::MqttConnector; }); + } + if has_knx { + v.push(quote! { use aimdb_knx_connector::KnxConnector; }); + } + v + }; + + // โ”€โ”€ Connector env-var bindings โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + let connector_let_stmts: TokenStream = { + let mut ts = TokenStream::new(); + if has_mqtt { + ts.extend(quote! { + let mqtt_broker = + std::env::var("MQTT_BROKER").unwrap_or_else(|_| "localhost".to_string()); + let mqtt_url = format!("mqtt://{}", mqtt_broker); + }); + } + if has_knx { + ts.extend(quote! { + let knx_gateway = std::env::var("KNX_GATEWAY") + .unwrap_or_else(|_| "224.0.23.12:3671".to_string()); + }); + } + ts + }; + + // โ”€โ”€ .with_connector(...) chain entries โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + let with_connector_calls: Vec = { + let mut v = vec![]; + if has_mqtt { + v.push(quote! { .with_connector(MqttConnector::new(&mqtt_url)) }); + } + if has_knx { + v.push(quote! { .with_connector(KnxConnector::new(&knx_gateway)) }); + } + v + }; + + // โ”€โ”€ Inline record configure blocks (the node graph) โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + let record_blocks: Vec = state + .records + .iter() + .map(|r| emit_hub_record_configure_block(r, state)) + .collect(); + + // โ”€โ”€ String literals โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + let log_filter = format!( + "{}_hub=info,aimdb_core=info", + project.name.replace('-', "_") + ); + let startup_msg = format!("Starting {} hub", project.name); + + // โ”€โ”€ Assemble via quote! โ€” prettyplease formats the whole file โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + let file_tokens = quote! { + use aimdb_core::{buffer::BufferCfg, AimDbBuilder, DbResult, RecordKey}; + use aimdb_data_contracts::Linkable; + use aimdb_tokio_adapter::{TokioAdapter, TokioRecordRegistrarExt}; + #(#connector_use_stmts)* + use std::sync::Arc; + use #common_crate::*; + + mod tasks; + use tasks::*; + + #[tokio::main] + async fn main() -> DbResult<()> { + tracing_subscriber::fmt() + .with_env_filter( + tracing_subscriber::EnvFilter::try_from_default_env() + .unwrap_or_else(|_| #log_filter.into()), + ) + .init(); + + tracing::info!(#startup_msg); + + #connector_let_stmts + + let runtime = Arc::new(TokioAdapter::new()?); + + let mut builder = AimDbBuilder::new() + .runtime(runtime) + #(#with_connector_calls)* + ; + + #(#record_blocks)* + + builder.run().await + } + }; + + let header = "// @generated โ€” do not edit manually.\n\ +// Source: .aimdb/state.toml\n\ +// Regenerate: `aimdb generate --hub`\n\n"; + + let syntax_tree = + syn::parse2(file_tokens).expect("generate_hub_main_rs: tokens should be valid Rust"); + format!("{header}{}", prettyplease::unparse(&syntax_tree)) +} + +/// Hub-specific record configure block. +/// +/// For records produced by a `[[tasks]]`-defined hub task, emits per-variant +/// individual `builder.configure(...)` calls using `.transform()` or +/// `.transform_join()`. For all other records (inbound connector or external +/// source) falls back to the regular loop-based configure block. +fn emit_hub_record_configure_block(rec: &RecordDef, state: &ArchitectureState) -> TokenStream { + if rec.key_variants.is_empty() { + let msg = format!("TODO: {}: no key variants defined yet", rec.name); + return quote! { let _ = (#msg,); }; + } + + // Find a task in [[tasks]] whose outputs include this record + let producing_task = state + .tasks + .iter() + .find(|t| t.outputs.iter().any(|o| o.record == rec.name)); + + match producing_task { + Some(task) => emit_transform_configure_block(rec, task), + None => emit_record_configure_block(rec), + } +} + +/// Emit per-variant configure blocks for a hub-task-produced record. +/// +/// Generates individual (non-loop) `builder.configure(...)` calls so that +/// each variant can reference its specific input keys for `.transform()` / +/// `.transform_join()`. +fn emit_transform_configure_block(rec: &RecordDef, task: &TaskDef) -> TokenStream { + let value_type = format_ident!("{}Value", rec.name); + let key_type = format_ident!("{}Key", rec.name); + let buffer_tokens = rec.buffer.to_tokens(rec.capacity); + + // Only emit connector chain for mqtt/knx outbound (websocket is a pro feature) + let has_outbound = rec.connectors.iter().any(|c| { + matches!(c.direction, ConnectorDirection::Outbound) + && matches!(c.protocol.as_str(), "mqtt" | "knx") + }); + let outbound_chain = if has_outbound { + quote! { + .link_to(addr) + .with_serializer(|v: &#value_type| { + v.to_bytes() + .map_err(|_| aimdb_core::connector::SerializeError::InvalidData) + }) + .finish() + } + } else { + quote! {} + }; + + let variant_idents: Vec = rec + .key_variants + .iter() + .map(|v| format_ident!("{}", to_pascal_case(v))) + .collect(); + + let per_variant: Vec = variant_idents + .iter() + .map(|variant_ident| { + let transform_call = build_transform_call(task, variant_ident); + + if has_outbound { + let outbound = outbound_chain.clone(); + quote! { + { + let link_addr = #key_type::#variant_ident + .link_address() + .map(|s| s.to_string()); + builder.configure::<#value_type>(#key_type::#variant_ident, |reg| { + if let Some(addr) = link_addr.as_deref() { + reg.buffer(#buffer_tokens) + #transform_call + #outbound; + } else { + reg.buffer(#buffer_tokens) + #transform_call; + } + }); + } + } + } else { + quote! { + builder.configure::<#value_type>(#key_type::#variant_ident, |reg| { + reg.buffer(#buffer_tokens) + #transform_call; + }); + } + } + }) + .collect(); + + quote! { #(#per_variant)* } +} + +/// Build the `.transform(...)` or `.transform_join(...)` call for one variant. +/// +/// - 1 input โ†’ `.transform::(InputKey::Variant, |b| b.map(task_transform))` +/// - N inputs โ†’ `.transform_join(|j| j.input::<...>(Key::Variant)....on_trigger(task_handler))` +fn build_transform_call(task: &TaskDef, variant_ident: &syn::Ident) -> TokenStream { + if task.inputs.len() != 1 { + // Multi-input โ†’ transform_join + let handler_ident = format_ident!("{}_handler", task.name); + let input_calls: Vec = task + .inputs + .iter() + .map(|inp| { + let in_val = format_ident!("{}Value", inp.record); + let in_key = format_ident!("{}Key", inp.record); + quote! { .input::<#in_val>(#in_key::#variant_ident) } + }) + .collect(); + quote! { + .transform_join(|j| { + j #(#input_calls)* + .with_state(()) + .on_trigger(#handler_ident) + }) + } + } else { + // Single-input โ†’ transform + map + let handler_ident = format_ident!("{}_transform", task.name); + let inp = &task.inputs[0]; + let in_val = format_ident!("{}Value", inp.record); + let in_key = format_ident!("{}Key", inp.record); + quote! { + .transform::<#in_val, _>(#in_key::#variant_ident, |b| b.map(#handler_ident)) + } + } +} + +/// Generate `src/tasks.rs` stub for the hub binary crate. +/// +/// This file is generated **once** โ€” it is not overwritten if it already exists. +/// Task handler signatures are derived from `[[tasks]]` in state.toml: +/// +/// | Inputs | Outputs | API | Generated stub | +/// |--------|---------|-----------------------|---------------------------| +/// | N > 1 | โ‰ฅ 1 | `.transform_join()` | `fn task_handler(JoinTrigger, &mut (), &Producer)` | +/// | 1 | โ‰ฅ 1 | `.transform().map()` | `fn task_transform(&Input) -> Option` | +/// | 0 | โ‰ฅ 1 | `.source()` | `async fn task(RuntimeContext, Producer)` | +/// | โ‰ฅ 1 | 0 | `.tap()` | `async fn task(RuntimeContext, Consumer)` | +pub fn generate_hub_tasks_rs(state: &ArchitectureState) -> String { + let project = state + .project + .as_ref() + .expect("generate_hub_tasks_rs requires [project] block in state.toml"); + let common_crate = format!("{}_common", project.name.replace('-', "_")); + + let mut fns = String::new(); + let mut handled: std::collections::HashSet = std::collections::HashSet::new(); + + for task in &state.tasks { + handled.insert(task.name.clone()); + let n_in = task.inputs.len(); + let n_out = task.outputs.len(); + + let out_t = task + .outputs + .first() + .map(|o| format!("{}Value", o.record)) + .unwrap_or_else(|| "()".to_string()); + let in_t = task + .inputs + .first() + .map(|i| format!("{}Value", i.record)) + .unwrap_or_else(|| "()".to_string()); + + if !task.description.is_empty() { + fns.push_str(&format!("/// {}\n", task.description)); + } + + if n_in > 1 && n_out >= 1 { + // Multi-input โ†’ join handler + // Returns Pin> โ€” the only concrete return type that satisfies + // the for<'a,'b> HRTB on on_trigger. `-> impl Future` does NOT work here. + let handler = format!("{}_handler", task.name); + let inputs_doc = task + .inputs + .iter() + .enumerate() + .map(|(i, inp)| format!(" index {i} = {}", inp.record)) + .collect::>() + .join(", "); + fns.push_str(&format!( + "/// Join handler โ€” match `trigger.index()` to identify which input fired:\n\ +/// {inputs_doc}\n\ +pub fn {handler}(\n\ + _trigger: aimdb_core::transform::JoinTrigger,\n\ + _state: &mut (),\n\ + _producer: &aimdb_core::Producer<{out_t}, TokioAdapter>,\n\ +) -> std::pin::Pin + Send + 'static>> {{\n\ + Box::pin(async move {{ todo!(\"implement {handler}\") }})\n\ +}}\n\n" + )); + } else if n_in == 1 && n_out >= 1 { + // Single-input โ†’ map transform + let handler = format!("{}_transform", task.name); + let input_rec = &task.inputs[0].record; + let output_rec = task + .outputs + .first() + .map(|o| o.record.as_str()) + .unwrap_or("?"); + fns.push_str(&format!( + "/// Transform: {input_rec} โ†’ {output_rec}\n\ +/// Return `Some(value)` to emit, `None` to skip this input.\n\ +pub fn {handler}(input: &{in_t}) -> Option<{out_t}> {{\n\ + let _ = input;\n\ + todo!(\"implement {handler}\")\n\ +}}\n\n" + )); + } else if n_in == 0 && n_out >= 1 { + // Pure source + fns.push_str(&format!( + "pub async fn {}(\n\ + _ctx: aimdb_core::RuntimeContext,\n\ + _producer: aimdb_core::Producer<{out_t}, TokioAdapter>,\n\ +) {{\n\ + todo!(\"implement {}\")\n\ +}}\n\n", + task.name, task.name + )); + } else if n_in >= 1 && n_out == 0 { + // Pure sink / tap + fns.push_str(&format!( + "pub async fn {}(\n\ + _ctx: aimdb_core::RuntimeContext,\n\ + _consumer: aimdb_core::Consumer<{in_t}, TokioAdapter>,\n\ +) {{\n\ + todo!(\"implement {}\")\n\ +}}\n\n", + task.name, task.name + )); + } + } + + // Fallback: any hub tasks NOT in [[tasks]] get a minimal stub + for task_name in hub_task_names(state) { + if handled.contains(&task_name) { + continue; + } + fns.push_str(&format!( + "/// Hub task: add a `[[tasks]]` entry in state.toml for a typed stub.\n\ +pub async fn {task_name}() {{\n\ + todo!(\"implement {task_name}\")\n\ +}}\n\n" + )); + } + + + format!( + "// Implement task bodies; signatures are derived from state.toml [[tasks]].\n\ +// This file is scaffolded once โ€” it will not be overwritten on subsequent runs.\n\ +// Regenerate signatures: delete this file, then run `aimdb generate --hub`.\n\ +\n\ +use aimdb_tokio_adapter::TokioAdapter;\n\ +use {common_crate}::*;\n\ +\n\ +{fns}" + ) +} + // โ”€โ”€ Tests โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ #[cfg(test)] @@ -959,7 +1942,7 @@ url = "mqtt://ota/cmd/{variant}" fn configure_schema_outbound_link_to_with_serializer() { let out = generated(); assert!( - out.contains("link_to(addr)"), + out.contains("link_to(addr_0)"), "Missing link_to call:\n{out}" ); assert!( @@ -973,7 +1956,7 @@ url = "mqtt://ota/cmd/{variant}" fn configure_schema_inbound_link_from_with_deserializer() { let out = generated(); assert!( - out.contains("link_from(addr)"), + out.contains("link_from(addr_0)"), "Missing link_from call:\n{out}" ); assert!( diff --git a/aimdb-codegen/src/state.rs b/aimdb-codegen/src/state.rs index 93c448c3..50f9e350 100644 --- a/aimdb-codegen/src/state.rs +++ b/aimdb-codegen/src/state.rs @@ -16,6 +16,10 @@ pub struct ArchitectureState { #[serde(default)] pub records: Vec, #[serde(default)] + pub tasks: Vec, + #[serde(default)] + pub binaries: Vec, + #[serde(default)] pub decisions: Vec, } @@ -202,6 +206,16 @@ pub struct ConnectorDef { pub url: String, } +impl ConnectorDef { + /// Human-readable direction label for doc comments. + pub fn direction_label(&self) -> &'static str { + match self.direction { + ConnectorDirection::Outbound => "outbound", + ConnectorDirection::Inbound => "inbound", + } + } +} + /// Connector data flow direction. #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[serde(rename_all = "lowercase")] @@ -210,6 +224,80 @@ pub enum ConnectorDirection { Inbound, } +// โ”€โ”€ Task definition โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// The functional role of a task โ€” drives stub body generation. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Default)] +#[serde(rename_all = "lowercase")] +pub enum TaskType { + /// Reads one or more records, transforms them, writes to output records. + #[default] + Transform, + /// LLM-driven reasoning loop, flags anomalies, cross-correlates data. + Agent, + /// Fetches external data and produces values into a record. + Producer, + /// Forwards, stores, or logs values โ€” no output records in the DB. + Sink, +} + +/// One `[[tasks.inputs]]` or `[[tasks.outputs]]` entry. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct TaskIo { + /// PascalCase record name, e.g. `"HourlyForecastPoint"`. + pub record: String, + /// Specific variants; empty (`[]`) means all variants of that record. + #[serde(default)] + pub variants: Vec, +} + +/// One `[[tasks]]` entry โ€” describes an async task function. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct TaskDef { + /// snake_case function name, e.g. `"hub_validation_task"`. + pub name: String, + /// Functional classification โ€” drives stub body. + #[serde(default)] + pub task_type: TaskType, + /// Human-readable description, used in doc comments and todo! msgs. + #[serde(default)] + pub description: String, + /// Records this task reads from. + #[serde(default)] + pub inputs: Vec, + /// Records this task writes to. + #[serde(default)] + pub outputs: Vec, +} + +// โ”€โ”€ Binary definition โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// One `[[binaries.external_connectors]]` entry โ€” a runtime broker connection. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ExternalConnectorDef { + /// Protocol identifier, e.g. `"mqtt"`. + pub protocol: String, + /// Environment variable that provides the broker URL at runtime. + pub env_var: String, + /// Default URL when the env var is not set. + #[serde(default)] + pub default: String, +} + +/// One `[[binaries]]` entry โ€” a deployable binary crate. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct BinaryDef { + /// Directory name of the binary crate, e.g. `"weather-sentinel-hub"`. + /// The codegen derives the crate path as `../{name}/`. + pub name: String, + /// Task names belonging to this binary (must match `[[tasks]]` entries). + #[serde(default)] + pub tasks: Vec, + /// Runtime broker connections needed by this binary. + #[serde(default)] + pub external_connectors: Vec, +} + // โ”€โ”€ Decision log entry โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ /// One `[[decisions]]` entry โ€” architectural rationale. diff --git a/aimdb-codegen/src/validate.rs b/aimdb-codegen/src/validate.rs index a69b3d2d..1654d3d9 100644 --- a/aimdb-codegen/src/validate.rs +++ b/aimdb-codegen/src/validate.rs @@ -49,6 +49,7 @@ pub fn validate(state: &ArchitectureState) -> Vec { validate_meta(state, &mut errors); validate_records(state, &mut errors); + validate_tasks_and_binaries(state, &mut errors); errors } @@ -297,6 +298,110 @@ fn validate_records(state: &ArchitectureState, errors: &mut Vec } } +// โ”€โ”€ Tasks and binaries validation โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +fn validate_tasks_and_binaries(state: &ArchitectureState, errors: &mut Vec) { + let record_names: Vec<&str> = state.records.iter().map(|r| r.name.as_str()).collect(); + let task_names: Vec<&str> = state.tasks.iter().map(|t| t.name.as_str()).collect(); + + // Rule 1: task name in producers/consumers has no [[tasks]] entry โ†’ Warning + for (ridx, rec) in state.records.iter().enumerate() { + for producer in &rec.producers { + if !task_names.contains(&producer.as_str()) { + errors.push(ValidationError { + message: format!( + "producer '{producer}' in record '{}' has no [[tasks]] entry", + rec.name + ), + location: format!("records[{ridx}].producers"), + severity: Severity::Warning, + }); + } + } + for consumer in &rec.consumers { + if !task_names.contains(&consumer.as_str()) { + errors.push(ValidationError { + message: format!( + "consumer '{consumer}' in record '{}' has no [[tasks]] entry", + rec.name + ), + location: format!("records[{ridx}].consumers"), + severity: Severity::Warning, + }); + } + } + } + + // Rules 2, 3, 5: task I/O references + for (tidx, task) in state.tasks.iter().enumerate() { + let tloc = format!("tasks[{tidx}]"); + + for (iidx, input) in task.inputs.iter().enumerate() { + // Rule 2: inputs reference a record not in [[records]] + if !record_names.contains(&input.record.as_str()) { + errors.push(ValidationError { + message: format!( + "task '{}' input references unknown record '{}'", + task.name, input.record + ), + location: format!("{tloc}.inputs[{iidx}].record"), + severity: Severity::Error, + }); + } + } + + for (oidx, output) in task.outputs.iter().enumerate() { + // Rule 3: outputs reference a record not in [[records]] + if !record_names.contains(&output.record.as_str()) { + errors.push(ValidationError { + message: format!( + "task '{}' output references unknown record '{}'", + task.name, output.record + ), + location: format!("{tloc}.outputs[{oidx}].record"), + severity: Severity::Error, + }); + continue; + } + + // Rule 5: output variant not in that record's key_variants (only when variants is non-empty) + if !output.variants.is_empty() { + let rec = state.records.iter().find(|r| r.name == output.record); + if let Some(rec) = rec { + for variant in &output.variants { + if !rec.key_variants.contains(variant) { + errors.push(ValidationError { + message: format!( + "task '{}' output variant '{variant}' not found in record '{}' key_variants", + task.name, output.record + ), + location: format!("{tloc}.outputs[{oidx}].variants"), + severity: Severity::Error, + }); + } + } + } + } + } + } + + // Rule 4: binary task name not found in [[tasks]] + for (bidx, bin) in state.binaries.iter().enumerate() { + for task_name in &bin.tasks { + if !task_names.contains(&task_name.as_str()) { + errors.push(ValidationError { + message: format!( + "binary '{}' references task '{task_name}' which has no [[tasks]] entry", + bin.name + ), + location: format!("binaries[{bidx}].tasks"), + severity: Severity::Error, + }); + } + } + } +} + // โ”€โ”€ Tests โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ #[cfg(test)] diff --git a/tools/aimdb-cli/src/commands/generate.rs b/tools/aimdb-cli/src/commands/generate.rs index 8568925c..1ed61286 100644 --- a/tools/aimdb-cli/src/commands/generate.rs +++ b/tools/aimdb-cli/src/commands/generate.rs @@ -10,6 +10,7 @@ //! ```text //! aimdb generate # generate flat file + diagram //! aimdb generate --common-crate # generate common crate directory +//! aimdb generate --hub # generate hub binary crate scaffold //! aimdb generate --check # validate only (CI) //! aimdb generate --dry-run # print to stdout, don't write //! aimdb generate --state path/state.toml # custom state path @@ -22,8 +23,9 @@ use crate::error::CliResult; use aimdb_codegen::{ - generate_cargo_toml, generate_lib_rs, generate_mermaid, generate_rust, generate_schema_rs, - validate, ArchitectureState, Severity, + generate_cargo_toml, generate_hub_cargo_toml, generate_hub_main_rs, generate_hub_tasks_rs, + generate_lib_rs, generate_mermaid, generate_rust, generate_schema_rs, validate, + ArchitectureState, Severity, }; use anyhow::Context; use clap::Args; @@ -50,6 +52,12 @@ pub struct GenerateCommand { #[arg(long)] pub common_crate: bool, + /// Generate a complete hub binary crate directory. + /// Requires `[project]` block in state.toml. Outputs to `{project.name}-hub/`. + /// `src/tasks.rs` is only written if it does not already exist. + #[arg(long)] + pub hub: bool, + /// Validate state.toml without writing files (exit 1 if errors found) #[arg(long)] pub check: bool, @@ -98,7 +106,9 @@ impl GenerateCommand { return Ok(()); } - if self.common_crate { + if self.hub { + self.execute_hub_crate(&state).await + } else if self.common_crate { self.execute_common_crate(&state).await } else { self.execute_flat(&state).await @@ -130,6 +140,73 @@ impl GenerateCommand { Ok(()) } + /// Hub crate mode: generate `{project.name}-hub/` binary crate. + async fn execute_hub_crate(&self, state: &ArchitectureState) -> CliResult<()> { + let project = state.project.as_ref().ok_or_else(|| { + anyhow::anyhow!( + "--hub requires a [project] block in state.toml.\n\ + Add:\n [project]\n name = \"your-project\"" + ) + })?; + + let hub_dir = PathBuf::from(format!("{}-hub", project.name)); + let src_dir = hub_dir.join("src"); + + let cargo_toml = generate_hub_cargo_toml(state); + let main_rs = generate_hub_main_rs(state); + let tasks_rs = generate_hub_tasks_rs(state); + + if self.dry_run { + println!( + "{} {}", + "โ”€โ”€ Cargo.toml".dimmed(), + hub_dir.join("Cargo.toml").display() + ); + println!("{cargo_toml}"); + println!( + "{} {}", + "โ”€โ”€ main.rs".dimmed(), + src_dir.join("main.rs").display() + ); + println!("{main_rs}"); + println!( + "{} {}", + "โ”€โ”€ tasks.rs".dimmed(), + src_dir.join("tasks.rs").display() + ); + println!("{tasks_rs}"); + return Ok(()); + } + + // Also regenerate the Mermaid diagram + let mermaid_src = generate_mermaid(state); + write_if_changed(&self.mermaid, &mermaid_src, "Mermaid")?; + + write_if_changed(&hub_dir.join("Cargo.toml"), &cargo_toml, "Cargo.toml")?; + write_if_changed(&src_dir.join("main.rs"), &main_rs, "main.rs")?; + + // tasks.rs is scaffolded once โ€” never overwritten + let tasks_path = src_dir.join("tasks.rs"); + if !tasks_path.exists() { + write_if_changed(&tasks_path, &tasks_rs, "tasks.rs")?; + } else { + println!( + " {} {} (user-owned, skipped)", + "ยท".dimmed(), + tasks_path.display() + ); + } + + println!( + "{} hub crate {} generated ({} record(s))", + "โœ“".green(), + hub_dir.display(), + state.records.len() + ); + + Ok(()) + } + /// Common crate mode: generate `{project.name}-common/` directory. async fn execute_common_crate(&self, state: &ArchitectureState) -> CliResult<()> { let project = state.project.as_ref().ok_or_else(|| { diff --git a/tools/aimdb-mcp/src/architecture/mod.rs b/tools/aimdb-mcp/src/architecture/mod.rs index daf3adb4..a262efea 100644 --- a/tools/aimdb-mcp/src/architecture/mod.rs +++ b/tools/aimdb-mcp/src/architecture/mod.rs @@ -37,21 +37,41 @@ pub fn session_store() -> Option>> { } // โ”€โ”€ Default paths โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ +// +// All paths are anchored to `AIMDB_WORKSPACE` when that variable is set. +// Without it the server falls back to the process CWD, which is unreliable +// when the binary is installed globally (e.g. `~/.cargo/bin/aimdb-mcp`). +// +// `AIMDB_WORKSPACE` must point to the root of the *user's* project โ€” the +// directory that contains (or will contain) the `.aimdb/` folder and `src/`. +// It has nothing to do with the AimDB library installation. +// +// Set it in the project's `.vscode/mcp.json`: +// +// "env": { "AIMDB_WORKSPACE": "${workspaceFolder}" } +// +// `${workspaceFolder}` is expanded by VS Code before the process is started. + +fn project_root() -> PathBuf { + std::env::var("AIMDB_WORKSPACE") + .map(PathBuf::from) + .unwrap_or_else(|_| PathBuf::from(".")) +} pub fn default_state_path() -> PathBuf { - PathBuf::from(".aimdb/state.toml") + project_root().join(".aimdb/state.toml") } pub fn default_mermaid_path() -> PathBuf { - PathBuf::from(".aimdb/architecture.mermaid") + project_root().join(".aimdb/architecture.mermaid") } pub fn default_rust_path() -> PathBuf { - PathBuf::from("src/generated_schema.rs") + project_root().join("src/generated_schema.rs") } pub fn default_memory_path() -> PathBuf { - PathBuf::from(".aimdb/memory.md") + project_root().join(".aimdb/memory.md") } // โ”€โ”€ State I/O โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ @@ -293,6 +313,8 @@ pub fn ensure_state_initialised(path: &Path) -> anyhow::Result Date: Fri, 27 Feb 2026 20:05:02 +0000 Subject: [PATCH 10/46] feat: implement task and binary proposal tools in MCP server --- aimdb-codegen/src/rust.rs | 22 ++-- aimdb-codegen/src/state.rs | 6 +- tools/aimdb-mcp/src/architecture/mod.rs | 36 +++++- tools/aimdb-mcp/src/prompts/mod.rs | 35 +++++ tools/aimdb-mcp/src/server.rs | 121 ++++++++++++++++- tools/aimdb-mcp/src/tools/architecture.rs | 151 +++++++++++++++++++++- tools/aimdb-mcp/src/tools/mod.rs | 7 +- 7 files changed, 357 insertions(+), 21 deletions(-) diff --git a/aimdb-codegen/src/rust.rs b/aimdb-codegen/src/rust.rs index c6083fbd..82f07c14 100644 --- a/aimdb-codegen/src/rust.rs +++ b/aimdb-codegen/src/rust.rs @@ -231,12 +231,10 @@ pub fn generate_main_rs(state: &ArchitectureState, binary_name: &str) -> Option< let connector_use_stmts: Vec = bin .external_connectors .iter() - .filter_map(|c| { - match c.protocol.as_str() { - "mqtt" => Some(quote! { use aimdb_mqtt_connector::MqttConnector; }), - "knx" => Some(quote! { use aimdb_knx_connector::KnxConnector; }), - _ => None, - } + .filter_map(|c| match c.protocol.as_str() { + "mqtt" => Some(quote! { use aimdb_mqtt_connector::MqttConnector; }), + "knx" => Some(quote! { use aimdb_knx_connector::KnxConnector; }), + _ => None, }) .collect(); @@ -378,9 +376,7 @@ pub fn generate_tasks_rs(state: &ArchitectureState, binary_name: &str) -> Option let fn_name = format_ident!("{}", task.name); // Build parameter list - let mut params: Vec = vec![ - quote! { ctx: RuntimeContext }, - ]; + let mut params: Vec = vec![quote! { ctx: RuntimeContext }]; for input in &task.inputs { let arg_name = format_ident!("{}", to_snake_case(&input.record)); let value_type = format_ident!("{}Value", input.record); @@ -677,7 +673,9 @@ fn emit_connector_address_fns(rec: &RecordDef) -> TokenStream { let fn_name = format_ident!("{}_{}_address", record_snake, conn.protocol); let doc = format!( "Link address for `{}` โ€” {} connector (`{}`).", - rec.name, conn.protocol, conn.direction_label(), + rec.name, + conn.protocol, + conn.direction_label(), ); let arms: Vec = rec @@ -794,7 +792,8 @@ fn emit_record_configure_block(rec: &RecordDef) -> TokenStream { // everything must be a single fluent chain starting from `reg.buffer(...)`. // We build two branches: one with connectors wired (when all addresses // resolve), one plain buffer fallback. - let linked_chain = emit_connector_chain(&rec.connectors, &value_type, &buffer_tokens, is_custom); + let linked_chain = + emit_connector_chain(&rec.connectors, &value_type, &buffer_tokens, is_custom); let addr_conditions: Vec = (0..rec.connectors.len()) .map(|i| { let addr_var = format_ident!("addr_{}", i); @@ -1692,7 +1691,6 @@ pub async fn {task_name}() {{\n\ )); } - format!( "// Implement task bodies; signatures are derived from state.toml [[tasks]].\n\ // This file is scaffolded once โ€” it will not be overwritten on subsequent runs.\n\ diff --git a/aimdb-codegen/src/state.rs b/aimdb-codegen/src/state.rs index 50f9e350..f69b2e8e 100644 --- a/aimdb-codegen/src/state.rs +++ b/aimdb-codegen/src/state.rs @@ -235,10 +235,10 @@ pub enum TaskType { Transform, /// LLM-driven reasoning loop, flags anomalies, cross-correlates data. Agent, - /// Fetches external data and produces values into a record. - Producer, + /// Fetches external data and writes values into a record. + Source, /// Forwards, stores, or logs values โ€” no output records in the DB. - Sink, + Tap, } /// One `[[tasks.inputs]]` or `[[tasks.outputs]]` entry. diff --git a/tools/aimdb-mcp/src/architecture/mod.rs b/tools/aimdb-mcp/src/architecture/mod.rs index a262efea..f734e4c0 100644 --- a/tools/aimdb-mcp/src/architecture/mod.rs +++ b/tools/aimdb-mcp/src/architecture/mod.rs @@ -9,7 +9,9 @@ pub mod conflicts; pub mod session; -use aimdb_codegen::{generate_mermaid, generate_rust, ArchitectureState, RecordDef}; +use aimdb_codegen::{ + generate_mermaid, generate_rust, ArchitectureState, BinaryDef, RecordDef, TaskDef, +}; use chrono::Utc; use fs2::FileExt; use once_cell::sync::OnceCell; @@ -196,6 +198,14 @@ pub enum ProposedChange { key_variants: Vec, key_prefix: Option, }, + /// Add a new task definition to state.toml + AddTask { task: TaskDef }, + /// Remove an existing task by name + RemoveTask { task_name: String }, + /// Add a new binary definition to state.toml + AddBinary { binary: BinaryDef }, + /// Remove an existing binary by name + RemoveBinary { binary_name: String }, } /// Resolution for a proposal. @@ -294,6 +304,30 @@ pub fn apply_change(state: &mut ArchitectureState, change: &ProposedChange) -> a rec.key_prefix = prefix.clone(); } } + + ProposedChange::AddTask { task } => { + if let Some(pos) = state.tasks.iter().position(|t| t.name == task.name) { + state.tasks[pos] = task.clone(); + } else { + state.tasks.push(task.clone()); + } + } + + ProposedChange::RemoveTask { task_name } => { + state.tasks.retain(|t| &t.name != task_name); + } + + ProposedChange::AddBinary { binary } => { + if let Some(pos) = state.binaries.iter().position(|b| b.name == binary.name) { + state.binaries[pos] = binary.clone(); + } else { + state.binaries.push(binary.clone()); + } + } + + ProposedChange::RemoveBinary { binary_name } => { + state.binaries.retain(|b| &b.name != binary_name); + } } Ok(()) diff --git a/tools/aimdb-mcp/src/prompts/mod.rs b/tools/aimdb-mcp/src/prompts/mod.rs index 2c87d533..d1f2ef4b 100644 --- a/tools/aimdb-mcp/src/prompts/mod.rs +++ b/tools/aimdb-mcp/src/prompts/mod.rs @@ -280,6 +280,36 @@ If the user says "one per device" without listing them: The agent may derive device lists from fleet manifests, config files, or API responses the user provides. +## Tasks and Binaries + +After records are defined, help the user define **tasks** โ€” the async functions +that source, transform, or tap data โ€” and **binaries** โ€” the deployable +crates that group tasks together. + +### Task Types +- **Source**: Autonomous producer that generates data and writes to a record (sensor polling, simulation, periodic computation). Mutually exclusive with Transform on the same record. +- **Transform**: Reactive derivation that reads one or more input records, computes, and writes to an output record (map, accumulate, join). Mutually exclusive with Source on the same record. +- **Tap**: Read-only observer that subscribes to a record and reacts to values without writing output records (logging, metrics, triggering side-effects). +- **Agent**: LLM-driven reasoning loop that reads records, reasons over them, and writes conclusions or actions to output records (anomaly detection, cross-correlation, adaptive control). + +Note: External data flow (MQTT, KNX, WebSocket) is handled by **connectors** via +`link_from` (inbound) and `link_to` (outbound) โ€” not by tasks. + +### Task I/O +Each task declares its inputs (records it reads) and outputs (records it writes). +Optionally filter by variant: `{ record = "Temperature", variants = ["Indoor"] }`. + +### Binaries +A binary groups tasks into a deployable crate. Each binary can also declare +`external_connectors` for runtime broker connections (MQTT, KNX) with env vars. + +### Ideation Flow for Tasks +1. Once records are settled, ask: "What processes operate on this data?" +2. For each process, resolve: task type, which records it reads/writes +3. Propose with `propose_add_task` +4. After tasks are defined, ask about deployment grouping +5. Propose binaries with `propose_add_binary` + ## Mermaid Conventions Read `aimdb://architecture/conventions` for the full specification. Summary: @@ -429,6 +459,11 @@ you have enough to make the first record proposal. > "Are you running on embedded hardware, edge servers, cloud, or a mix?" > This affects connector and buffer choices. +### 6. Processing and deployment +> "What processing happens between data sources and consumers? +> (transformation, aggregation, anomaly detection, forwarding...)" +> This maps to task definitions and binary grouping. + ## Transition to Proposals Once you have a clear picture of at least one data source and its consumers, diff --git a/tools/aimdb-mcp/src/server.rs b/tools/aimdb-mcp/src/server.rs index 5c25523c..60747d8a 100644 --- a/tools/aimdb-mcp/src/server.rs +++ b/tools/aimdb-mcp/src/server.rs @@ -514,6 +514,121 @@ impl McpServer { "additionalProperties": false }), }, + Tool { + name: "propose_add_task".to_string(), + description: "Propose adding a new task definition. Tasks are async functions that produce, transform, or consume record data. Present the proposal to the user before calling resolve_proposal.".to_string(), + input_schema: json!({ + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "snake_case task function name, e.g. \"sensor_polling_task\"" + }, + "description": { + "type": "string", + "description": "Human-readable description of the proposal shown to the user" + }, + "task_type": { + "type": "string", + "enum": ["transform", "agent", "source", "tap"], + "description": "Functional role: source (autonomous producer writing to a record), transform (reactive derivation from input records to output record), tap (read-only observer, no output records), agent (LLM reasoning loop). Default: transform" + }, + "inputs": { + "type": "array", + "items": { + "type": "object", + "properties": { + "record": { "type": "string", "description": "PascalCase record name to read from" }, + "variants": { "type": "array", "items": { "type": "string" }, "description": "Specific variants to consume (empty = all)" } + }, + "required": ["record"] + }, + "description": "Records this task reads from" + }, + "outputs": { + "type": "array", + "items": { + "type": "object", + "properties": { + "record": { "type": "string", "description": "PascalCase record name to write to" }, + "variants": { "type": "array", "items": { "type": "string" }, "description": "Specific variants to produce (empty = all)" } + }, + "required": ["record"] + }, + "description": "Records this task writes to" + } + }, + "required": ["name", "description"], + "additionalProperties": false + }), + }, + Tool { + name: "propose_add_binary".to_string(), + description: "Propose adding a new binary definition. Binaries are deployable crates that group tasks together and optionally declare external broker connections. Present the proposal to the user before calling resolve_proposal.".to_string(), + input_schema: json!({ + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "Crate directory name, e.g. \"weather-sentinel-hub\"" + }, + "description": { + "type": "string", + "description": "Human-readable description of the proposal shown to the user" + }, + "tasks": { + "type": "array", + "items": { "type": "string" }, + "description": "Task names belonging to this binary (must match [[tasks]] entries)" + }, + "external_connectors": { + "type": "array", + "items": { + "type": "object", + "properties": { + "protocol": { "type": "string", "description": "Protocol identifier, e.g. \"mqtt\"" }, + "env_var": { "type": "string", "description": "Environment variable for the broker URL" }, + "default": { "type": "string", "description": "Default URL when env var is not set" } + }, + "required": ["protocol", "env_var"] + }, + "description": "Runtime broker connections needed by this binary" + } + }, + "required": ["name", "description"], + "additionalProperties": false + }), + }, + Tool { + name: "remove_task".to_string(), + description: "Propose removal of an existing task. Creates a pending proposal โ€” call resolve_proposal to confirm. Note: removing a task affects binaries that reference it.".to_string(), + input_schema: json!({ + "type": "object", + "properties": { + "task_name": { + "type": "string", + "description": "snake_case name of the task to remove" + } + }, + "required": ["task_name"], + "additionalProperties": false + }), + }, + Tool { + name: "remove_binary".to_string(), + description: "Propose removal of an existing binary. Creates a pending proposal โ€” call resolve_proposal to confirm. Task definitions are preserved; only the binary grouping is removed.".to_string(), + input_schema: json!({ + "type": "object", + "properties": { + "binary_name": { + "type": "string", + "description": "Name of the binary crate to remove" + } + }, + "required": ["binary_name"], + "additionalProperties": false + }), + }, Tool { name: "resolve_proposal".to_string(), description: "Resolve a pending proposal. On confirm: applies the change, writes state.toml, generates Mermaid and Rust artefacts. On reject: discards without changes. On revise: discards with a redirect message.".to_string(), @@ -522,7 +637,7 @@ impl McpServer { "properties": { "proposal_id": { "type": "string", - "description": "The proposal ID returned by propose_add_record, propose_modify_buffer, propose_add_connector, propose_modify_fields, propose_modify_key_variants, remove_record, or rename_record" + "description": "The proposal ID returned by any propose_* tool, remove_record, rename_record, remove_task, or remove_binary" }, "resolution": { "type": "string", @@ -685,9 +800,13 @@ impl McpServer { "propose_modify_key_variants" => { tools::propose_modify_key_variants(params.arguments).await? } + "propose_add_task" => tools::propose_add_task(params.arguments).await?, + "propose_add_binary" => tools::propose_add_binary(params.arguments).await?, "resolve_proposal" => tools::resolve_proposal(params.arguments).await?, "remove_record" => tools::remove_record(params.arguments).await?, "rename_record" => tools::rename_record(params.arguments).await?, + "remove_task" => tools::remove_task(params.arguments).await?, + "remove_binary" => tools::remove_binary(params.arguments).await?, "validate_against_instance" => { tools::validate_against_instance(params.arguments).await? } diff --git a/tools/aimdb-mcp/src/tools/architecture.rs b/tools/aimdb-mcp/src/tools/architecture.rs index c4535393..c10c9f0a 100644 --- a/tools/aimdb-mcp/src/tools/architecture.rs +++ b/tools/aimdb-mcp/src/tools/architecture.rs @@ -3,7 +3,8 @@ //! Provides typed proposal tools for the architecture agent ideation loop: //! `get_architecture`, `propose_add_record`, `propose_modify_buffer`, //! `propose_add_connector`, `propose_modify_fields`, `propose_modify_key_variants`, -//! `resolve_proposal`, `remove_record`, `rename_record`, +//! `propose_add_task`, `propose_add_binary`, +//! `resolve_proposal`, `remove_record`, `rename_record`, `remove_task`, `remove_binary`, //! `validate_against_instance`, `get_buffer_metrics`, `reset_session`. //! //! All proposal-related tools are routed through the session state machine, @@ -324,6 +325,154 @@ pub async fn propose_modify_key_variants(args: Option) -> McpResult) -> McpResult { + debug!("propose_add_task called"); + + const SCHEMA_HINT: &str = concat!( + "Expected fields:\n", + " name : string โ€” snake_case task function name (required)\n", + " description : string โ€” human-readable description (required)\n", + " task_type : string โ€” \"source\" | \"transform\" | \"tap\" | \"agent\" (default: \"transform\")\n", + " inputs : [{record, variants?}][] โ€” records this task reads from (optional)\n", + " outputs : [{record, variants?}][] โ€” records this task writes to (optional)" + ); + + #[derive(Debug, Deserialize)] + struct Params { + description: String, + #[serde(flatten)] + task: aimdb_codegen::TaskDef, + } + + let p: Params = serde_json::from_value(args.unwrap_or(Value::Null)) + .map_err(|e| McpError::InvalidParams(format!("propose_add_task: {e}\n\n{SCHEMA_HINT}")))?; + + let proposal_id = submit_proposal(Proposal { + id: next_proposal_id(), + change_type: "add_task".to_string(), + description: p.description, + change: ProposedChange::AddTask { task: p.task }, + created_at: chrono::Utc::now().to_rfc3339(), + })?; + + Ok(serde_json::json!({ + "proposal_id": proposal_id, + "message": "Proposal created. Present it to the user and call resolve_proposal with their decision.", + })) +} + +// โ”€โ”€ propose_add_binary โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Add a new binary definition. Binaries are deployable crates that group +/// tasks and optionally declare external broker connections. +pub async fn propose_add_binary(args: Option) -> McpResult { + debug!("propose_add_binary called"); + + const SCHEMA_HINT: &str = concat!( + "Expected fields:\n", + " name : string โ€” crate directory name (required)\n", + " description : string โ€” human-readable description (required)\n", + " tasks : string[] โ€” task names belonging to this binary (optional)\n", + " external_connectors : [{protocol, env_var, default}][] โ€” broker connections (optional)" + ); + + #[derive(Debug, Deserialize)] + struct Params { + description: String, + #[serde(flatten)] + binary: aimdb_codegen::BinaryDef, + } + + let p: Params = serde_json::from_value(args.unwrap_or(Value::Null)).map_err(|e| { + McpError::InvalidParams(format!("propose_add_binary: {e}\n\n{SCHEMA_HINT}")) + })?; + + let proposal_id = submit_proposal(Proposal { + id: next_proposal_id(), + change_type: "add_binary".to_string(), + description: p.description, + change: ProposedChange::AddBinary { binary: p.binary }, + created_at: chrono::Utc::now().to_rfc3339(), + })?; + + Ok(serde_json::json!({ + "proposal_id": proposal_id, + "message": "Proposal created. Present it to the user and call resolve_proposal with their decision.", + })) +} + +// โ”€โ”€ remove_task โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Propose removal of an existing task (creates a pending proposal). +/// +/// Enforces session phase: must be in `Gathering` phase. +pub async fn remove_task(args: Option) -> McpResult { + debug!("remove_task called"); + + #[derive(Debug, Deserialize)] + struct Params { + task_name: String, + } + + let params: Params = serde_json::from_value(args.unwrap_or(Value::Null)) + .map_err(|e| McpError::InvalidParams(format!("remove_task: {e}")))?; + + let proposal_id = submit_proposal(Proposal { + id: next_proposal_id(), + change_type: "remove_task".to_string(), + description: format!("Remove task '{}'", params.task_name), + change: ProposedChange::RemoveTask { + task_name: params.task_name.clone(), + }, + created_at: chrono::Utc::now().to_rfc3339(), + })?; + + Ok(serde_json::json!({ + "proposal_id": proposal_id, + "task_name": params.task_name, + "warning": "Removing this task will affect any binaries that reference it and any records listing it as a producer/consumer.", + "message": "Removal proposal created. Present to the user, then call resolve_proposal.", + })) +} + +// โ”€โ”€ remove_binary โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Propose removal of an existing binary (creates a pending proposal). +/// +/// Enforces session phase: must be in `Gathering` phase. +pub async fn remove_binary(args: Option) -> McpResult { + debug!("remove_binary called"); + + #[derive(Debug, Deserialize)] + struct Params { + binary_name: String, + } + + let params: Params = serde_json::from_value(args.unwrap_or(Value::Null)) + .map_err(|e| McpError::InvalidParams(format!("remove_binary: {e}")))?; + + let proposal_id = submit_proposal(Proposal { + id: next_proposal_id(), + change_type: "remove_binary".to_string(), + description: format!("Remove binary '{}'", params.binary_name), + change: ProposedChange::RemoveBinary { + binary_name: params.binary_name.clone(), + }, + created_at: chrono::Utc::now().to_rfc3339(), + })?; + + Ok(serde_json::json!({ + "proposal_id": proposal_id, + "binary_name": params.binary_name, + "warning": "Removing this binary will delete the generated crate scaffold. The task definitions themselves are preserved.", + "message": "Removal proposal created. Present to the user, then call resolve_proposal.", + })) +} + // โ”€โ”€ resolve_proposal โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ #[derive(Debug, Deserialize)] diff --git a/tools/aimdb-mcp/src/tools/mod.rs b/tools/aimdb-mcp/src/tools/mod.rs index 31995fca..8ecd7e34 100644 --- a/tools/aimdb-mcp/src/tools/mod.rs +++ b/tools/aimdb-mcp/src/tools/mod.rs @@ -26,9 +26,10 @@ pub(crate) fn connection_pool() -> Option<&'static ConnectionPool> { // Re-export tool functions pub use architecture::{ - get_architecture, get_buffer_metrics, propose_add_connector, propose_add_record, - propose_modify_buffer, propose_modify_fields, propose_modify_key_variants, remove_record, - rename_record, reset_session, resolve_proposal, save_memory, validate_against_instance, + get_architecture, get_buffer_metrics, propose_add_binary, propose_add_connector, + propose_add_record, propose_add_task, propose_modify_buffer, propose_modify_fields, + propose_modify_key_variants, remove_binary, remove_record, remove_task, rename_record, + reset_session, resolve_proposal, save_memory, validate_against_instance, }; pub use graph::{graph_edges, graph_nodes, graph_topo_order}; pub use instance::{discover_instances, get_instance_info}; From 783720f5fa5832655495075d798462aac89c4770 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Fri, 27 Feb 2026 20:26:57 +0000 Subject: [PATCH 11/46] feat: implement WebSocket connector with client management and session handling - Add ClientManager for tracking connected WebSocket clients and their subscriptions. - Implement WebSocketConnector for managing outbound publishing and routing. - Create protocol definitions for server-client communication. - Develop server functionality using Axum for WebSocket upgrades and health checks. - Establish session management for individual WebSocket connections, including message handling and subscription management. - Introduce snapshot functionality for late-join clients. - Implement topic matching for subscription patterns. - Add tests for client registration, subscription, and broadcasting. --- Cargo.lock | 435 +++++++++++++++++- Cargo.toml | 1 + Makefile | 43 +- aimdb-websocket-connector/Cargo.toml | 52 +++ aimdb-websocket-connector/src/auth.rs | 172 +++++++ aimdb-websocket-connector/src/builder.rs | 332 +++++++++++++ .../src/client_manager.rs | 376 +++++++++++++++ aimdb-websocket-connector/src/connector.rs | 153 ++++++ aimdb-websocket-connector/src/lib.rs | 67 +++ aimdb-websocket-connector/src/protocol.rs | 209 +++++++++ aimdb-websocket-connector/src/server.rs | 170 +++++++ aimdb-websocket-connector/src/session.rs | 310 +++++++++++++ 12 files changed, 2288 insertions(+), 32 deletions(-) create mode 100644 aimdb-websocket-connector/Cargo.toml create mode 100644 aimdb-websocket-connector/src/auth.rs create mode 100644 aimdb-websocket-connector/src/builder.rs create mode 100644 aimdb-websocket-connector/src/client_manager.rs create mode 100644 aimdb-websocket-connector/src/connector.rs create mode 100644 aimdb-websocket-connector/src/lib.rs create mode 100644 aimdb-websocket-connector/src/protocol.rs create mode 100644 aimdb-websocket-connector/src/server.rs create mode 100644 aimdb-websocket-connector/src/session.rs diff --git a/Cargo.lock b/Cargo.lock index 39d8fd5e..1667d232 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -99,7 +99,7 @@ version = "0.5.0" dependencies = [ "aimdb-core", "aimdb-executor", - "rand", + "rand 0.8.5", "serde", "serde_json", "ts-rs", @@ -131,7 +131,7 @@ dependencies = [ "futures", "futures-core", "heapless 0.9.1", - "rand", + "rand 0.8.5", "tracing", "tracing-test", ] @@ -272,6 +272,24 @@ dependencies = [ "tracing", ] +[[package]] +name = "aimdb-websocket-connector" +version = "0.1.0" +dependencies = [ + "aimdb-core", + "aimdb-executor", + "async-trait", + "axum", + "dashmap", + "futures-util", + "serde", + "serde_json", + "tokio", + "tokio-tungstenite 0.26.2", + "tower-http", + "tracing", +] + [[package]] name = "aligned" version = "0.4.3" @@ -377,6 +395,23 @@ dependencies = [ "syn 2.0.108", ] +[[package]] +name = "async-trait" +version = "0.1.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.108", +] + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + [[package]] name = "autocfg" version = "1.5.0" @@ -405,6 +440,61 @@ dependencies = [ "fs_extra", ] +[[package]] +name = "axum" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b52af3cb4058c895d37317bb27508dccc8e5f2d39454016b297bf4a400597b8" +dependencies = [ + "axum-core", + "base64 0.22.1", + "bytes", + "form_urlencoded", + "futures-util", + "http 1.4.0", + "http-body 1.0.1", + "http-body-util", + "hyper 1.8.1", + "hyper-util", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "serde_core", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "sha1", + "sync_wrapper 1.0.2", + "tokio", + "tokio-tungstenite 0.28.0", + "tower", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "axum-core" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08c78f31d7b1291f7ee735c1c6780ccde7785daae9a9206026862dab7d8792d1" +dependencies = [ + "bytes", + "futures-core", + "http 1.4.0", + "http-body 1.0.1", + "http-body-util", + "mime", + "pin-project-lite", + "sync_wrapper 1.0.2", + "tower-layer", + "tower-service", + "tracing", +] + [[package]] name = "bare-metal" version = "0.2.5" @@ -426,6 +516,12 @@ version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + [[package]] name = "bit_field" version = "0.10.3" @@ -450,6 +546,15 @@ version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + [[package]] name = "block-device-driver" version = "0.2.0" @@ -653,12 +758,37 @@ dependencies = [ "syn 2.0.108", ] +[[package]] +name = "cpufeatures" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" +dependencies = [ + "libc", +] + [[package]] name = "critical-section" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "790eea4361631c5e7d22598ecd5723ff611904e3344ce8720784c93e3d83d40b" +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + +[[package]] +name = "crypto-common" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a" +dependencies = [ + "generic-array", + "typenum", +] + [[package]] name = "darling" version = "0.20.11" @@ -694,6 +824,26 @@ dependencies = [ "syn 2.0.108", ] +[[package]] +name = "dashmap" +version = "6.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf" +dependencies = [ + "cfg-if", + "crossbeam-utils", + "hashbrown 0.14.5", + "lock_api", + "once_cell", + "parking_lot_core", +] + +[[package]] +name = "data-encoding" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7a1e2f27636f116493b8b860f5546edb47c8d8f8ea73e1d2a20be88e28d1fea" + [[package]] name = "defmt" version = "0.3.100" @@ -745,6 +895,16 @@ dependencies = [ "defmt 1.0.1", ] +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "crypto-common", +] + [[package]] name = "displaydoc" version = "0.2.5" @@ -857,7 +1017,7 @@ dependencies = [ "knx-connector-demo-common", "micromath", "panic-probe", - "rand", + "rand 0.8.5", "static_cell", "stm32-fmc 0.3.2", ] @@ -890,7 +1050,7 @@ dependencies = [ "micromath", "mqtt-connector-demo-common", "panic-probe", - "rand", + "rand 0.8.5", "static_cell", "stm32-fmc 0.3.2", ] @@ -1376,6 +1536,16 @@ dependencies = [ "windows", ] +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + [[package]] name = "getrandom" version = "0.2.16" @@ -1410,7 +1580,7 @@ dependencies = [ "futures-core", "futures-sink", "futures-util", - "http", + "http 0.2.12", "indexmap", "slab", "tokio", @@ -1498,6 +1668,16 @@ dependencies = [ "itoa", ] +[[package]] +name = "http" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a" +dependencies = [ + "bytes", + "itoa", +] + [[package]] name = "http-body" version = "0.4.6" @@ -1505,7 +1685,30 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ "bytes", - "http", + "http 0.2.12", + "pin-project-lite", +] + +[[package]] +name = "http-body" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http 1.4.0", +] + +[[package]] +name = "http-body-util" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" +dependencies = [ + "bytes", + "futures-core", + "http 1.4.0", + "http-body 1.0.1", "pin-project-lite", ] @@ -1532,8 +1735,8 @@ dependencies = [ "futures-core", "futures-util", "h2", - "http", - "http-body", + "http 0.2.12", + "http-body 0.4.6", "httparse", "httpdate", "itoa", @@ -1545,6 +1748,27 @@ dependencies = [ "want", ] +[[package]] +name = "hyper" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11" +dependencies = [ + "atomic-waker", + "bytes", + "futures-channel", + "futures-core", + "http 1.4.0", + "http-body 1.0.1", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "pin-utils", + "smallvec", + "tokio", +] + [[package]] name = "hyper-tls" version = "0.5.0" @@ -1552,12 +1776,27 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes", - "hyper", + "hyper 0.14.32", "native-tls", "tokio", "tokio-native-tls", ] +[[package]] +name = "hyper-util" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96547c2556ec9d12fb1578c4eaf448b04993e7fb79cbaad930a656880a6bdfa0" +dependencies = [ + "bytes", + "http 1.4.0", + "http-body 1.0.1", + "hyper 1.8.1", + "pin-project-lite", + "tokio", + "tower-service", +] + [[package]] name = "iana-time-zone" version = "0.1.64" @@ -1847,6 +2086,12 @@ dependencies = [ "regex-automata", ] +[[package]] +name = "matchit" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3" + [[package]] name = "memchr" version = "2.7.6" @@ -2180,10 +2425,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", - "rand_chacha", + "rand_chacha 0.3.1", "rand_core 0.6.4", ] +[[package]] +name = "rand" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" +dependencies = [ + "rand_chacha 0.9.0", + "rand_core 0.9.3", +] + [[package]] name = "rand_chacha" version = "0.3.1" @@ -2194,6 +2449,16 @@ dependencies = [ "rand_core 0.6.4", ] +[[package]] +name = "rand_chacha" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" +dependencies = [ + "ppv-lite86", + "rand_core 0.9.3", +] + [[package]] name = "rand_core" version = "0.6.4" @@ -2208,6 +2473,9 @@ name = "rand_core" version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" +dependencies = [ + "getrandom 0.3.4", +] [[package]] name = "redox_syscall" @@ -2272,9 +2540,9 @@ dependencies = [ "futures-core", "futures-util", "h2", - "http", - "http-body", - "hyper", + "http 0.2.12", + "http-body 0.4.6", + "hyper 0.14.32", "hyper-tls", "ipnet", "js-sys", @@ -2288,7 +2556,7 @@ dependencies = [ "serde", "serde_json", "serde_urlencoded", - "sync_wrapper", + "sync_wrapper 0.1.2", "system-configuration", "tokio", "tokio-native-tls", @@ -2593,6 +2861,17 @@ dependencies = [ "serde_core", ] +[[package]] +name = "serde_path_to_error" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10a9ff822e371bb5403e391ecd83e182e0e77ba7f6fe0160b795797109d1b457" +dependencies = [ + "itoa", + "serde", + "serde_core", +] + [[package]] name = "serde_spanned" version = "0.6.9" @@ -2627,6 +2906,17 @@ dependencies = [ "unsafe-libyaml", ] +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + [[package]] name = "sharded-slab" version = "0.1.7" @@ -2818,6 +3108,12 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" +[[package]] +name = "sync_wrapper" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" + [[package]] name = "synstructure" version = "0.13.2" @@ -3068,6 +3364,30 @@ dependencies = [ "tokio-stream", ] +[[package]] +name = "tokio-tungstenite" +version = "0.26.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a9daff607c6d2bf6c16fd681ccb7eecc83e4e2cdc1ca067ffaadfca5de7f084" +dependencies = [ + "futures-util", + "log", + "tokio", + "tungstenite 0.26.2", +] + +[[package]] +name = "tokio-tungstenite" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d25a406cddcc431a75d3d9afc6a7c0f7428d4891dd973e4d54c56b46127bf857" +dependencies = [ + "futures-util", + "log", + "tokio", + "tungstenite 0.28.0", +] + [[package]] name = "tokio-util" version = "0.7.17" @@ -3122,6 +3442,42 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" +[[package]] +name = "tower" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebe5ef63511595f1344e2d5cfa636d973292adc0eec1f0ad45fae9f0851ab1d4" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper 1.0.2", + "tokio", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-http" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8" +dependencies = [ + "bitflags 2.10.0", + "bytes", + "http 1.4.0", + "pin-project-lite", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + [[package]] name = "tower-service" version = "0.3.3" @@ -3134,6 +3490,7 @@ version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ + "log", "pin-project-lite", "tracing-attributes", "tracing-core", @@ -3250,6 +3607,46 @@ dependencies = [ "termcolor", ] +[[package]] +name = "tungstenite" +version = "0.26.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4793cb5e56680ecbb1d843515b23b6de9a75eb04b66643e256a396d43be33c13" +dependencies = [ + "bytes", + "data-encoding", + "http 1.4.0", + "httparse", + "log", + "rand 0.9.2", + "sha1", + "thiserror 2.0.17", + "utf-8", +] + +[[package]] +name = "tungstenite" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8628dcc84e5a09eb3d8423d6cb682965dea9133204e8fb3efee74c2a0c259442" +dependencies = [ + "bytes", + "data-encoding", + "http 1.4.0", + "httparse", + "log", + "rand 0.9.2", + "sha1", + "thiserror 2.0.17", + "utf-8", +] + +[[package]] +name = "typenum" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" + [[package]] name = "unicode-ident" version = "1.0.20" @@ -3292,6 +3689,12 @@ dependencies = [ "serde", ] +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + [[package]] name = "utf8_iter" version = "1.0.4" @@ -3486,7 +3889,7 @@ dependencies = [ "aimdb-mqtt-connector", "aimdb-tokio-adapter", "chrono", - "rand", + "rand 0.8.5", "tokio", "tracing", "tracing-subscriber", @@ -3521,7 +3924,7 @@ dependencies = [ "heapless 0.8.0", "micromath", "panic-probe", - "rand", + "rand 0.8.5", "static_cell", "stm32-fmc 0.3.2", "weather-mesh-common", diff --git a/Cargo.toml b/Cargo.toml index 3303bc08..e37ed5d8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -13,6 +13,7 @@ members = [ "aimdb-persistence-sqlite", "aimdb-mqtt-connector", "aimdb-knx-connector", + "aimdb-websocket-connector", "tools/aimdb-cli", "tools/aimdb-mcp", "examples/knx-connector-demo-common", diff --git a/Makefile b/Makefile index a5f7f491..524c378c 100644 --- a/Makefile +++ b/Makefile @@ -68,6 +68,8 @@ build: cargo build --package aimdb-persistence-sqlite @printf "$(YELLOW) โ†’ Building KNX connector$(NC)\n" cargo build --package aimdb-knx-connector --features "std,tokio-runtime" + @printf "$(YELLOW) โ†’ Building WebSocket connector$(NC)\n" + cargo build --package aimdb-websocket-connector --features "tokio-runtime" test: @printf "$(GREEN)Running all tests (valid combinations)...$(NC)\n" @@ -101,10 +103,12 @@ test: cargo test --package aimdb-mqtt-connector --features "std,tokio-runtime" @printf "$(YELLOW) โ†’ Testing KNX connector$(NC)\n" cargo test --package aimdb-knx-connector --features "std,tokio-runtime" + @printf "$(YELLOW) โ†’ Testing WebSocket connector$(NC)\n" + cargo test --package aimdb-websocket-connector --features "tokio-runtime" fmt: @printf "$(GREEN)Formatting code (workspace members only)...$(NC)\n" - @for pkg in aimdb-executor aimdb-derive aimdb-data-contracts aimdb-core aimdb-client aimdb-embassy-adapter aimdb-tokio-adapter aimdb-sync aimdb-persistence aimdb-persistence-sqlite aimdb-mqtt-connector aimdb-knx-connector aimdb-codegen aimdb-cli aimdb-mcp sync-api-demo tokio-mqtt-connector-demo embassy-mqtt-connector-demo tokio-knx-connector-demo embassy-knx-connector-demo weather-mesh-common weather-hub weather-station-alpha weather-station-beta; do \ + @for pkg in aimdb-executor aimdb-derive aimdb-data-contracts aimdb-core aimdb-client aimdb-embassy-adapter aimdb-tokio-adapter aimdb-sync aimdb-persistence aimdb-persistence-sqlite aimdb-mqtt-connector aimdb-knx-connector aimdb-websocket-connector aimdb-codegen aimdb-cli aimdb-mcp sync-api-demo tokio-mqtt-connector-demo embassy-mqtt-connector-demo tokio-knx-connector-demo embassy-knx-connector-demo weather-mesh-common weather-hub weather-station-alpha weather-station-beta; do \ printf "$(YELLOW) โ†’ Formatting $$pkg$(NC)\n"; \ cargo fmt -p $$pkg 2>/dev/null || true; \ done @@ -113,7 +117,7 @@ fmt: fmt-check: @printf "$(GREEN)Checking code formatting (workspace members only)...$(NC)\n" @FAILED=0; \ - for pkg in aimdb-executor aimdb-derive aimdb-data-contracts aimdb-core aimdb-client aimdb-embassy-adapter aimdb-tokio-adapter aimdb-sync aimdb-persistence aimdb-persistence-sqlite aimdb-mqtt-connector aimdb-knx-connector aimdb-codegen aimdb-cli aimdb-mcp sync-api-demo tokio-mqtt-connector-demo embassy-mqtt-connector-demo tokio-knx-connector-demo embassy-knx-connector-demo weather-mesh-common weather-hub weather-station-alpha weather-station-beta; do \ + for pkg in aimdb-executor aimdb-derive aimdb-data-contracts aimdb-core aimdb-client aimdb-embassy-adapter aimdb-tokio-adapter aimdb-sync aimdb-persistence aimdb-persistence-sqlite aimdb-mqtt-connector aimdb-knx-connector aimdb-websocket-connector aimdb-codegen aimdb-cli aimdb-mcp sync-api-demo tokio-mqtt-connector-demo embassy-mqtt-connector-demo tokio-knx-connector-demo embassy-knx-connector-demo weather-mesh-common weather-hub weather-station-alpha weather-station-beta; do \ printf "$(YELLOW) โ†’ Checking $$pkg$(NC)\n"; \ if ! cargo fmt -p $$pkg -- --check 2>&1; then \ printf "$(RED)โŒ Formatting check failed for $$pkg$(NC)\n"; \ @@ -166,6 +170,8 @@ clippy: cargo clippy --package aimdb-mqtt-connector --target thumbv7em-none-eabihf --no-default-features --features "embassy-runtime,defmt" -- -D warnings @printf "$(YELLOW) โ†’ Clippy on KNX connector (embassy + defmt)$(NC)\n" cargo clippy --package aimdb-knx-connector --target thumbv7em-none-eabihf --no-default-features --features "embassy-runtime,defmt" -- -D warnings + @printf "$(YELLOW) โ†’ Clippy on WebSocket connector$(NC)\n" + cargo clippy --package aimdb-websocket-connector --features "tokio-runtime" --all-targets -- -D warnings doc: @printf "$(GREEN)Generating dual-platform documentation...$(NC)\n" @@ -184,6 +190,7 @@ doc: cargo doc --package aimdb-mcp --no-deps cargo doc --package aimdb-persistence --no-deps cargo doc --package aimdb-persistence-sqlite --no-deps + cargo doc --package aimdb-websocket-connector --features "tokio-runtime" --no-deps @cp -r target/doc/* target/doc-final/cloud/ @printf "$(YELLOW) โ†’ Building embedded documentation$(NC)\n" cargo doc --package aimdb-core --no-default-features --features alloc --no-deps @@ -301,59 +308,63 @@ publish: else \ printf "$(BLUE)Running in CI mode - skipping confirmation$(NC)\n"; \ fi - @printf "$(YELLOW) โ†’ Publishing aimdb-executor (1/13)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-executor (1/15)$(NC)\n" @cargo publish -p aimdb-executor @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-derive (2/14)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-derive (2/15)$(NC)\n" @cargo publish -p aimdb-derive @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-codegen (3/14)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-codegen (3/15)$(NC)\n" @cargo publish -p aimdb-codegen @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-core (4/14)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-core (4/15)$(NC)\n" @cargo publish -p aimdb-core @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-tokio-adapter (5/14)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-tokio-adapter (5/15)$(NC)\n" @cargo publish -p aimdb-tokio-adapter @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-embassy-adapter (6/14)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-embassy-adapter (6/15)$(NC)\n" @cargo publish -p aimdb-embassy-adapter --no-verify @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-client (7/14)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-client (7/15)$(NC)\n" @cargo publish -p aimdb-client @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-sync (8/14)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-sync (8/15)$(NC)\n" @cargo publish -p aimdb-sync @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-persistence (9/14)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-persistence (9/15)$(NC)\n" @cargo publish -p aimdb-persistence @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-persistence-sqlite (10/14)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-persistence-sqlite (10/15)$(NC)\n" @cargo publish -p aimdb-persistence-sqlite @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-mqtt-connector (11/14)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-mqtt-connector (11/15)$(NC)\n" @cargo publish -p aimdb-mqtt-connector @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-knx-connector (12/14)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-knx-connector (12/15)$(NC)\n" @cargo publish -p aimdb-knx-connector @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-cli (13/14)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-websocket-connector (13/15)$(NC)\n" + @cargo publish -p aimdb-websocket-connector + @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" + @sleep 10 + @printf "$(YELLOW) โ†’ Publishing aimdb-cli (14/15)$(NC)\n" @cargo publish -p aimdb-cli @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-mcp (14/14)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-mcp (15/15)$(NC)\n" @cargo publish -p aimdb-mcp @printf "$(GREEN)โœ“ All crates published successfully!$(NC)\n" @printf "$(BLUE)๐ŸŽ‰ AimDB v$(shell grep '^version' Cargo.toml | head -1 | cut -d '"' -f 2) is now live on crates.io!$(NC)\n" diff --git a/aimdb-websocket-connector/Cargo.toml b/aimdb-websocket-connector/Cargo.toml new file mode 100644 index 00000000..678d3c1d --- /dev/null +++ b/aimdb-websocket-connector/Cargo.toml @@ -0,0 +1,52 @@ +[package] +name = "aimdb-websocket-connector" +version = "0.1.0" +edition = "2021" +license.workspace = true +description = "WebSocket connector for AimDB โ€” real-time bidirectional streaming" + +[features] +default = ["tokio-runtime"] +tokio-runtime = [ + "aimdb-core/std", + "aimdb-executor/std", + "dep:tokio", + "dep:axum", + "dep:tower-http", +] +tracing = ["dep:tracing"] + +[dependencies] +aimdb-core = { path = "../aimdb-core", default-features = false } +aimdb-executor = { path = "../aimdb-executor", default-features = false } + +# Async runtime +tokio = { version = "1", features = [ + "rt", + "sync", + "macros", + "net", + "time", +], optional = true } + +# WebSocket server +axum = { version = "0.8", features = ["ws"], optional = true } +tower-http = { version = "0.6", features = ["cors"], optional = true } + +# Serialization +serde = { version = "1", features = ["derive"] } +serde_json = "1" + +# Utilities +dashmap = "6" +async-trait = "0.1" +futures-util = { version = "0.3", default-features = false, features = [ + "sink", +] } + +# Observability +tracing = { version = "0.1", optional = true } + +[dev-dependencies] +tokio = { version = "1", features = ["full", "test-util"] } +tokio-tungstenite = "0.26" diff --git a/aimdb-websocket-connector/src/auth.rs b/aimdb-websocket-connector/src/auth.rs new file mode 100644 index 00000000..018ea945 --- /dev/null +++ b/aimdb-websocket-connector/src/auth.rs @@ -0,0 +1,172 @@ +//! Authentication and authorization for WebSocket connections. +//! +//! The [`AuthHandler`] trait provides pluggable auth hooks for: +//! +//! 1. **Connection upgrade** โ€” `authenticate()`: decide whether to accept the WebSocket +//! handshake and assign per-client permissions. +//! 2. **Topic subscriptions** โ€” `authorize_subscribe()`: gate which topics a client can +//! receive data from. +//! 3. **Inbound writes** โ€” `authorize_write()`: gate which topics a client may write to. +//! +//! The default implementation ([`NoAuth`]) allows all operations. + +use std::collections::HashMap; +use std::net::SocketAddr; +use std::sync::Arc; + +use async_trait::async_trait; +use axum::http::HeaderMap; + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Public types +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +/// Opaque identifier for a connected WebSocket client. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct ClientId(pub(crate) u64); + +impl std::fmt::Display for ClientId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "client-{}", self.0) + } +} + +/// Information about a connected client, passed to authorization hooks. +#[derive(Debug, Clone)] +pub struct ClientInfo { + pub id: ClientId, + pub remote_addr: SocketAddr, + pub permissions: Permissions, +} + +/// Per-client permission set assigned during authentication. +/// +/// Each field is a list of topic *patterns* (supporting `*` and `#` wildcards +/// as defined in [`crate::protocol`]). +/// +/// An empty `Vec` means *"no access"*. Use `["#"]` for unrestricted access. +#[derive(Debug, Clone, Default)] +pub struct Permissions { + /// Topic patterns the client may subscribe to. + pub subscribe_patterns: Vec, + /// Topic patterns the client may write to. + pub write_patterns: Vec, +} + +impl Permissions { + /// Creates a permission set that grants full access to everything. + pub fn allow_all() -> Self { + Self { + subscribe_patterns: vec!["#".to_string()], + write_patterns: vec!["#".to_string()], + } + } + + /// Returns `true` if the client is allowed to subscribe to `topic`. + pub fn can_subscribe(&self, topic: &str) -> bool { + self.subscribe_patterns + .iter() + .any(|p| crate::protocol::topic_matches(p, topic)) + } + + /// Returns `true` if the client is allowed to write to `topic`. + pub fn can_write(&self, topic: &str) -> bool { + self.write_patterns + .iter() + .any(|p| crate::protocol::topic_matches(p, topic)) + } +} + +/// Context provided to [`AuthHandler::authenticate`] during WebSocket upgrade. +#[derive(Debug)] +pub struct AuthRequest { + pub headers: HeaderMap, + pub query_params: HashMap, + pub remote_addr: SocketAddr, +} + +/// Error returned when authentication fails. +/// +/// The message is forwarded to the client as an HTTP 401 response body. +#[derive(Debug, Clone)] +pub struct AuthError { + pub message: String, +} + +impl AuthError { + pub fn new(msg: impl Into) -> Self { + Self { + message: msg.into(), + } + } +} + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// AuthHandler trait +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +/// Pluggable authentication and authorization hook. +/// +/// # Example โ€” Bearer token auth +/// +/// ```rust,ignore +/// use aimdb_websocket_connector::auth::{AuthHandler, AuthRequest, AuthError, Permissions}; +/// +/// struct BearerAuth { valid_token: String } +/// +/// #[async_trait::async_trait] +/// impl AuthHandler for BearerAuth { +/// async fn authenticate(&self, req: &AuthRequest) -> Result { +/// let token = req.headers +/// .get("Authorization") +/// .and_then(|v| v.to_str().ok()) +/// .and_then(|v| v.strip_prefix("Bearer ")) +/// .ok_or_else(|| AuthError::new("missing token"))?; +/// +/// if token == self.valid_token { +/// Ok(Permissions::allow_all()) +/// } else { +/// Err(AuthError::new("invalid token")) +/// } +/// } +/// } +/// ``` +#[async_trait] +pub trait AuthHandler: Send + Sync + 'static { + /// Called during WebSocket upgrade to authenticate the client. + /// + /// Return [`Ok(Permissions)`] to accept the connection with the assigned + /// permissions, or [`Err(AuthError)`] to reject it (HTTP 401). + async fn authenticate(&self, request: &AuthRequest) -> Result; + + /// Called before allowing a topic subscription. + /// + /// The default implementation delegates to [`Permissions::can_subscribe`]. + async fn authorize_subscribe(&self, client: &ClientInfo, topic: &str) -> bool { + client.permissions.can_subscribe(topic) + } + + /// Called before routing an inbound write to a producer. + /// + /// The default implementation delegates to [`Permissions::can_write`]. + async fn authorize_write(&self, client: &ClientInfo, topic: &str) -> bool { + client.permissions.can_write(topic) + } +} + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// NoAuth โ€” allow-all default +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +/// Default `AuthHandler` that allows all connections and operations. +pub struct NoAuth; + +#[async_trait] +impl AuthHandler for NoAuth { + async fn authenticate(&self, _request: &AuthRequest) -> Result { + Ok(Permissions::allow_all()) + } +} + +/// Type-erased auth handler stored inside the connector. +pub(crate) type DynAuthHandler = Arc; diff --git a/aimdb-websocket-connector/src/builder.rs b/aimdb-websocket-connector/src/builder.rs new file mode 100644 index 00000000..a7b9f422 --- /dev/null +++ b/aimdb-websocket-connector/src/builder.rs @@ -0,0 +1,332 @@ +//! Builder for the WebSocket connector. +//! +//! [`WebSocketConnectorBuilder`] implements [`ConnectorBuilder`] from +//! `aimdb-core`, following the same pattern as `MqttConnectorBuilder`. +//! +//! # Lifecycle +//! +//! ```text +//! AimDbBuilder::build() +//! โ””โ”€ WebSocketConnectorBuilder::build(&db) +//! โ”œโ”€ db.collect_inbound_routes("ws") โ†’ Router +//! โ”œโ”€ db.collect_outbound_routes("ws") โ†’ outbound tasks +//! โ”œโ”€ start Axum / WebSocket server +//! โ””โ”€ return Arc +//! ``` + +use std::{ + collections::HashMap, + net::{SocketAddr, ToSocketAddrs}, + pin::Pin, + sync::{Arc, Mutex}, + time::Duration, +}; + +use aimdb_core::{router::RouterBuilder, ConnectorBuilder}; +use axum::Router as AxumRouter; + +use crate::{ + auth::{AuthHandler, DynAuthHandler, NoAuth}, + client_manager::ClientManager, + connector::WebSocketConnectorImpl, + server::start_server, + session::{NoSnapshot, SessionContext, SnapshotProvider}, +}; + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Builder +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +/// Builder for the AimDB WebSocket connector. +/// +/// # Example +/// +/// ```rust,ignore +/// use aimdb_websocket_connector::WebSocketConnector; +/// +/// let connector = WebSocketConnector::new() +/// .bind("0.0.0.0:8080") +/// .path("/ws") +/// .with_late_join(true) +/// .with_max_clients(500); +/// ``` +pub struct WebSocketConnectorBuilder { + bind_addr: SocketAddr, + ws_path: String, + auth: DynAuthHandler, + late_join: bool, + max_clients: usize, + channel_capacity: usize, + additional_routes: Option, + /// Heartbeat interval (reserved for future use). + #[allow(dead_code)] + heartbeat_interval: Duration, + /// Client timeout (reserved for future use). + #[allow(dead_code)] + client_timeout: Duration, + /// Topics to subscribe every new client to automatically on connect. + /// + /// When non-empty, clients receive data on these topics immediately after + /// the WebSocket handshake without having to send a `Subscribe` message. + /// Use `["#"]` to push all topics to every client. + auto_subscribe_topics: Vec, + /// When `true`, the serialized payload bytes are sent directly as the + /// WebSocket text frame โ€” no `ServerMessage::Data` envelope. + /// + /// Combine with a serializer that produces a complete flat JSON object + /// (including `"type"` and `"node_id"`) to speak a custom protocol. + raw_payload: bool, +} + +impl Default for WebSocketConnectorBuilder { + fn default() -> Self { + Self { + bind_addr: "0.0.0.0:8080".parse().unwrap(), + ws_path: "/ws".to_string(), + auth: Arc::new(NoAuth), + late_join: true, + max_clients: 1024, + channel_capacity: 256, + additional_routes: None, + heartbeat_interval: Duration::from_secs(30), + client_timeout: Duration::from_secs(60), + auto_subscribe_topics: Vec::new(), + raw_payload: false, + } + } +} + +impl WebSocketConnectorBuilder { + /// Create a new builder with sensible defaults. + /// + /// Defaults: + /// - bind address: `0.0.0.0:8080` + /// - WebSocket path: `/ws` + /// - auth: allow all + /// - late-join snapshots: enabled + /// - max clients: 1 024 + /// - per-client channel capacity: 256 + pub fn new() -> Self { + Self::default() + } + + /// Set the TCP address to bind the WebSocket server to. + /// + /// # Examples + /// + /// ```rust,ignore + /// .bind("0.0.0.0:9090") + /// .bind(([127, 0, 0, 1], 8765)) + /// ``` + pub fn bind(mut self, addr: impl ToSocketAddrs) -> Self { + if let Some(a) = addr.to_socket_addrs().ok().and_then(|mut i| i.next()) { + self.bind_addr = a; + } + self + } + + /// Set the URL path for the WebSocket upgrade endpoint (default: `"/ws"`). + pub fn path(mut self, path: impl Into) -> Self { + self.ws_path = path.into(); + self + } + + /// Use JSON encoding (currently the only supported encoding, this is the + /// default and is provided for explicitness). + pub fn with_json_encoding(self) -> Self { + self + } + + /// Plug in a custom authentication / authorization handler. + pub fn with_auth(mut self, handler: impl AuthHandler + 'static) -> Self { + self.auth = Arc::new(handler); + self + } + + /// Enable or disable late-join snapshots (default: `true`). + /// + /// When enabled, a client that subscribes to a topic immediately receives + /// the current value (if one is available) as a `snapshot` message before + /// live `data` pushes start. + pub fn with_late_join(mut self, enabled: bool) -> Self { + self.late_join = enabled; + self + } + + /// Set the maximum number of concurrent WebSocket clients (default: 1 024). + /// + /// Currently informational โ€” used for pre-allocating the client map. + pub fn with_max_clients(mut self, max: usize) -> Self { + self.max_clients = max; + self + } + + /// Set the per-client send-buffer capacity in messages (default: 256). + /// + /// If the buffer fills up (slow client), messages are silently dropped via + /// `try_send`. + pub fn with_channel_capacity(mut self, cap: usize) -> Self { + self.channel_capacity = cap; + self + } + + /// Mount additional Axum routes (e.g., REST endpoints) on the same server. + /// + /// The extra routes are merged into the connector's Axum application so that + /// REST and WebSocket traffic can share a single port. + /// + /// # Example + /// + /// ```rust,ignore + /// use axum::{routing::get, Router}; + /// + /// let rest = Router::new().route("/api/status", get(status_handler)); + /// let connector = WebSocketConnector::new().with_additional_routes(rest); + /// ``` + pub fn with_additional_routes(mut self, router: AxumRouter) -> Self { + self.additional_routes = Some(router); + self + } + + /// Set the WebSocket heartbeat interval (default: 30 s). + pub fn with_heartbeat_interval(mut self, interval: Duration) -> Self { + self.heartbeat_interval = interval; + self + } + + /// Set the client inactivity timeout (default: 60 s). + pub fn with_client_timeout(mut self, timeout: Duration) -> Self { + self.client_timeout = timeout; + self + } + + /// Subscribe every new client to these topic patterns immediately on connect. + /// + /// Clients will begin receiving data on matching topics right after the + /// WebSocket handshake without needing to send a `Subscribe` message. + /// + /// # Example + /// + /// ```rust,ignore + /// WebSocketConnector::new() + /// .with_auto_subscribe(["#"]) // push everything + /// .with_auto_subscribe(["sensors/#"]) // only sensor topics + /// ``` + pub fn with_auto_subscribe( + mut self, + topics: impl IntoIterator>, + ) -> Self { + self.auto_subscribe_topics = topics.into_iter().map(Into::into).collect(); + self + } + + /// Send serializer output directly as a WebSocket text frame, bypassing + /// the `{"type":"data","topic":โ€ฆ,"payload":โ€ฆ}` envelope. + /// + /// Use this when the record serializers already produce the complete JSON + /// expected by the client (e.g. `{"type":"temperature","node_id":โ€ฆ}`). + pub fn with_raw_payload(mut self, enabled: bool) -> Self { + self.raw_payload = enabled; + self + } +} + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// ConnectorBuilder impl +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +impl ConnectorBuilder for WebSocketConnectorBuilder +where + R: aimdb_executor::Spawn + 'static, +{ + fn scheme(&self) -> &str { + "ws" + } + + fn build<'a>( + &'a self, + db: &'a aimdb_core::builder::AimDb, + ) -> Pin< + Box< + dyn core::future::Future< + Output = aimdb_core::DbResult>, + > + Send + + 'a, + >, + > { + Box::pin(async move { + // โ”€โ”€ Inbound routes โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + let inbound_routes = db.collect_inbound_routes("ws"); + + #[cfg(feature = "tracing")] + tracing::info!( + "WS connector: {} inbound routes collected", + inbound_routes.len() + ); + + let router = Arc::new(RouterBuilder::from_routes(inbound_routes).build()); + + // โ”€โ”€ Outbound routes โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + let outbound_routes = db.collect_outbound_routes("ws"); + + #[cfg(feature = "tracing")] + tracing::info!( + "WS connector: {} outbound routes collected", + outbound_routes.len() + ); + + // โ”€โ”€ Shared snapshot cache (for late-join) โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + let snapshot_map: Arc>>> = + Arc::new(Mutex::new(HashMap::new())); + + // โ”€โ”€ Client manager โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + let client_mgr = ClientManager::new(); + + // โ”€โ”€ Build snapshot provider โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + let snapshot_provider: Arc = if self.late_join { + let snap = snapshot_map.clone(); + Arc::new(DynMapSnapshot(snap)) + } else { + Arc::new(NoSnapshot) + }; + + // โ”€โ”€ Session context โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + let session_ctx = SessionContext { + client_mgr: client_mgr.clone(), + router: router.clone(), + auth: self.auth.clone(), + channel_capacity: self.channel_capacity, + late_join: self.late_join, + snapshot_provider, + auto_subscribe_topics: self.auto_subscribe_topics.clone(), + }; + + // โ”€โ”€ Build connector & spawn outbound publishers โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + let connector = WebSocketConnectorImpl::new(client_mgr, self.raw_payload); + connector.spawn_outbound_publishers(db, outbound_routes, snapshot_map)?; + + // โ”€โ”€ Start Axum server โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + let additional = self.additional_routes.clone(); + start_server( + self.bind_addr, + self.ws_path.clone(), + session_ctx, + additional, + ); + + Ok(Arc::new(connector) as Arc) + }) + } +} + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Dynamic snapshot provider backed by the shared Mutex +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +struct DynMapSnapshot(Arc>>>); + +impl SnapshotProvider for DynMapSnapshot { + fn snapshot(&self, topic: &str) -> Option> { + self.0.lock().ok()?.get(topic).cloned() + } +} diff --git a/aimdb-websocket-connector/src/client_manager.rs b/aimdb-websocket-connector/src/client_manager.rs new file mode 100644 index 00000000..760afaf0 --- /dev/null +++ b/aimdb-websocket-connector/src/client_manager.rs @@ -0,0 +1,376 @@ +//! Shared client registry and topic-based fan-out. +//! +//! [`ClientManager`] tracks all connected WebSocket clients and their topic +//! subscriptions. When an outbound publisher task receives a new value it calls +//! [`ClientManager::broadcast`] which serializes the payload once and delivers it +//! to every client that has a matching subscription pattern. + +use std::{ + collections::HashMap, + sync::{ + atomic::{AtomicU64, Ordering}, + Arc, + }, +}; + +use axum::extract::ws::Message; +use dashmap::DashMap; +use serde_json::Value; +use tokio::sync::mpsc; + +use crate::{ + auth::{ClientId, ClientInfo}, + protocol::{topic_matches, ErrorCode, ServerMessage}, +}; + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Per-client state +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +/// State tracked for each connected WebSocket client. +pub(crate) struct ClientState { + pub info: ClientInfo, + /// Channel used to push messages to the client's send loop. + pub sender: mpsc::Sender, + /// Topic patterns this client has subscribed to. + pub subscriptions: Vec, +} + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// ClientManager +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +/// Shared registry of connected clients with subscription-based fan-out. +/// +/// Cloning this type is cheap โ€” all instances share the same underlying data. +#[derive(Clone)] +pub struct ClientManager { + /// Map from ClientId โ†’ per-client state. + /// + /// `DashMap` is used instead of `RwLock` to minimise lock contention + /// when many publisher tasks are broadcasting concurrently. + clients: Arc>, + /// Monotonically-increasing counter for generating unique `ClientId`s. + next_id: Arc, +} + +impl ClientManager { + /// Create a new, empty client registry. + pub fn new() -> Self { + Self { + clients: Arc::new(DashMap::new()), + next_id: Arc::new(AtomicU64::new(1)), + } + } + + /// Register a new client and return its id together with the message receiver. + /// + /// The caller (session task) owns the `mpsc::Receiver` and drives the + /// WebSocket send loop. + pub fn register( + &self, + info: ClientInfo, + channel_capacity: usize, + ) -> (ClientId, mpsc::Receiver) { + let (tx, rx) = mpsc::channel(channel_capacity); + let state = ClientState { + info, + sender: tx, + subscriptions: Vec::new(), + }; + let raw_id = state.info.id.0; + self.clients.insert(raw_id, state); + (ClientId(raw_id), rx) + } + + /// Remove a client from the registry (called when the connection closes). + pub fn unregister(&self, id: ClientId) { + self.clients.remove(&id.0); + } + + /// Return the number of currently connected clients. + pub fn client_count(&self) -> usize { + self.clients.len() + } + + /// Allocate a new unique `ClientId`. + pub fn next_client_id(&self) -> ClientId { + ClientId(self.next_id.fetch_add(1, Ordering::Relaxed)) + } + + // โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + // Subscription management (called from session recv loop) + // โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + + /// Add subscription patterns for the given client. + /// + /// Returns the list of newly-added patterns (already-subscribed patterns + /// are silently included without duplication). + pub fn subscribe(&self, id: ClientId, patterns: &[String]) -> Vec { + let mut added = Vec::new(); + if let Some(mut entry) = self.clients.get_mut(&id.0) { + for pat in patterns { + if !entry.subscriptions.contains(pat) { + entry.subscriptions.push(pat.clone()); + } + added.push(pat.clone()); + } + } + added + } + + /// Remove subscription patterns for the given client. + pub fn unsubscribe(&self, id: ClientId, patterns: &[String]) { + if let Some(mut entry) = self.clients.get_mut(&id.0) { + entry.subscriptions.retain(|s| !patterns.contains(s)); + } + } + + /// Returns `true` if the client has at least one matching subscription for `topic`. + pub fn is_subscribed(&self, id: ClientId, topic: &str) -> bool { + self.clients + .get(&id.0) + .map(|e| e.subscriptions.iter().any(|p| topic_matches(p, topic))) + .unwrap_or(false) + } + + // โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + // Fan-out + // โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + + /// Broadcast a serialized `data` payload to all clients subscribed to `topic`. + /// + /// The payload bytes (from the record serializer) are parsed as JSON once; + /// if parsing fails the raw bytes are embedded as a JSON string. + pub async fn broadcast(&self, topic: &str, payload_bytes: &[u8]) { + let payload = parse_payload(payload_bytes); + let ts = crate::protocol::now_ms(); + + let msg = ServerMessage::Data { + topic: topic.to_string(), + payload: Some(payload), + ts, + }; + + let text = match serde_json::to_string(&msg) { + Ok(t) => t, + Err(_e) => { + #[cfg(feature = "tracing")] + tracing::error!( + "Failed to serialize data message for topic '{}': {}", + topic, + _e + ); + return; + } + }; + + let ws_msg = Message::Text(text.into()); + + // Iterate clients without holding a write lock + let ids: Vec = self + .clients + .iter() + .filter_map(|entry| { + if entry.subscriptions.iter().any(|p| topic_matches(p, topic)) { + Some(*entry.key()) + } else { + None + } + }) + .collect(); + + for raw_id in ids { + if let Some(entry) = self.clients.get(&raw_id) { + let _ = entry.sender.try_send(ws_msg.clone()); + } + } + } + + /// Broadcast raw payload bytes directly to all subscribed clients as a + /// WebSocket text frame โ€” **no `ServerMessage` envelope**. + /// + /// Use this (with `raw_payload = true` on the connector builder) when the + /// serializer already produces the complete JSON the client expects. + pub async fn broadcast_raw(&self, topic: &str, payload_bytes: &[u8]) { + let text = match std::str::from_utf8(payload_bytes) { + Ok(s) => s.to_string(), + Err(_) => { + #[cfg(feature = "tracing")] + tracing::error!("broadcast_raw: payload for '{}' is not valid UTF-8", topic); + return; + } + }; + + let ws_msg = Message::Text(text.into()); + + let ids: Vec = self + .clients + .iter() + .filter_map(|entry| { + if entry.subscriptions.iter().any(|p| topic_matches(p, topic)) { + Some(*entry.key()) + } else { + None + } + }) + .collect(); + + for raw_id in ids { + if let Some(entry) = self.clients.get(&raw_id) { + let _ = entry.sender.try_send(ws_msg.clone()); + } + } + } + + /// Send a snapshot (late-join current value) to a single client. + pub async fn send_snapshot(&self, id: ClientId, topic: &str, payload_bytes: &[u8]) { + let payload = parse_payload(payload_bytes); + let msg = ServerMessage::Snapshot { + topic: topic.to_string(), + payload: Some(payload), + }; + + self.send_to(id, &msg).await; + } + + /// Send an error message to a single client. + pub async fn send_error( + &self, + id: ClientId, + code: ErrorCode, + topic: Option, + message: impl Into, + ) { + let msg = ServerMessage::Error { + code, + topic, + message: message.into(), + }; + self.send_to(id, &msg).await; + } + + /// Send a `subscribed` acknowledgement to a single client. + pub async fn send_subscribed(&self, id: ClientId, topics: Vec) { + let msg = ServerMessage::Subscribed { topics }; + self.send_to(id, &msg).await; + } + + /// Send a `pong` to a single client. + pub async fn send_pong(&self, id: ClientId) { + self.send_to(id, &ServerMessage::Pong).await; + } + + // โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + // Helpers + // โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + + async fn send_to(&self, id: ClientId, msg: &ServerMessage) { + let text = match serde_json::to_string(msg) { + Ok(t) => t, + Err(_e) => { + #[cfg(feature = "tracing")] + tracing::error!("Failed to serialize message: {}", _e); + return; + } + }; + + if let Some(entry) = self.clients.get(&id.0) { + let _ = entry.sender.try_send(Message::Text(text.into())); + } + } + + /// Return the `ClientInfo` for the given id, if still connected. + pub fn client_info(&self, id: ClientId) -> Option { + self.clients.get(&id.0).map(|e| e.info.clone()) + } + + /// Returns a snapshot of (topic, subscribed-client-count) pairs for monitoring. + pub fn subscription_stats(&self) -> HashMap { + let mut stats: HashMap = HashMap::new(); + for entry in self.clients.iter() { + for pat in &entry.subscriptions { + *stats.entry(pat.clone()).or_insert(0) += 1; + } + } + stats + } +} + +impl Default for ClientManager { + fn default() -> Self { + Self::new() + } +} + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Helpers +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +/// Parse raw bytes as JSON, falling back to a JSON string if parsing fails. +fn parse_payload(bytes: &[u8]) -> Value { + serde_json::from_slice(bytes) + .unwrap_or_else(|_| Value::String(String::from_utf8_lossy(bytes).into_owned())) +} + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Tests +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +#[cfg(test)] +mod tests { + use super::*; + use crate::auth::Permissions; + use std::net::{IpAddr, Ipv4Addr, SocketAddr}; + + fn dummy_info(id: u64) -> ClientInfo { + ClientInfo { + id: ClientId(id), + remote_addr: SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), 1234), + permissions: Permissions::allow_all(), + } + } + + #[tokio::test] + async fn register_and_unregister() { + let mgr = ClientManager::new(); + let info = dummy_info(1); + let (id, _rx) = mgr.register(info, 16); + assert_eq!(mgr.client_count(), 1); + mgr.unregister(id); + assert_eq!(mgr.client_count(), 0); + } + + #[tokio::test] + async fn subscribe_and_broadcast() { + let mgr = ClientManager::new(); + let info = dummy_info(42); + let (id, mut rx) = mgr.register(info, 16); + mgr.subscribe(id, &["sensors/#".to_string()]); + + mgr.broadcast("sensors/temperature/vienna", b"22.5").await; + + let msg = rx.recv().await.expect("should receive message"); + if let Message::Text(text) = msg { + let v: serde_json::Value = serde_json::from_str(&text).unwrap(); + assert_eq!(v["type"], "data"); + assert_eq!(v["topic"], "sensors/temperature/vienna"); + } else { + panic!("expected text message"); + } + } + + #[tokio::test] + async fn no_broadcast_when_not_subscribed() { + let mgr = ClientManager::new(); + let info = dummy_info(7); + let (id, mut rx) = mgr.register(info, 16); + mgr.subscribe(id, &["commands/#".to_string()]); + + // Broadcast to a topic the client is NOT subscribed to + mgr.broadcast("sensors/temperature/vienna", b"22.5").await; + + // Channel should be empty + assert!(rx.try_recv().is_err()); + } +} diff --git a/aimdb-websocket-connector/src/connector.rs b/aimdb-websocket-connector/src/connector.rs new file mode 100644 index 00000000..5b0cebe8 --- /dev/null +++ b/aimdb-websocket-connector/src/connector.rs @@ -0,0 +1,153 @@ +//! WebSocket connector implementation (`Connector` trait). +//! +//! [`WebSocketConnectorImpl`] is the live connector instance built by +//! [`crate::builder::WebSocketConnectorBuilder`]. +//! +//! # Outbound publishing +//! +//! Each outbound route (`link_to("ws://โ€ฆ")`) gets a dedicated Tokio task: +//! +//! ```text +//! consumer.subscribe_any() โ†’ recv_any() โ†’ serializer() โ†’ ClientManager::broadcast() +//! ``` +//! +//! # Inbound routing +//! +//! Inbound writes from WebSocket clients go through the shared [`Router`] +//! (same infrastructure as MQTT). The `Connector::publish()` impl is a +//! no-op because WebSocket inbound happens via the session receive loop instead +//! of the standard publish path. + +use std::{collections::HashMap, pin::Pin, sync::Arc}; + +use aimdb_core::{ + transport::{ConnectorConfig, PublishError}, + OutboundRoute, +}; + +use crate::client_manager::ClientManager; + +/// Live WebSocket connector returned by `build()`. +pub struct WebSocketConnectorImpl { + pub(crate) client_mgr: ClientManager, + /// When `true`, outbound data bypasses the `ServerMessage::Data` envelope + /// and sends the serializer bytes directly as a WebSocket text frame. + pub(crate) raw_payload: bool, +} + +impl WebSocketConnectorImpl { + pub(crate) fn new(client_mgr: ClientManager, raw_payload: bool) -> Self { + Self { + client_mgr, + raw_payload, + } + } + + /// Spawn one Tokio task per outbound route. + /// + /// Each task: + /// 1. Calls `consumer.subscribe_any()` to get a type-erased reader. + /// 2. Loops calling `reader.recv_any()`. + /// 3. Runs the serializer. + /// 4. Broadcasts the bytes via `ClientManager::broadcast()`. + pub(crate) fn spawn_outbound_publishers( + &self, + db: &aimdb_core::builder::AimDb, + outbound_routes: Vec, + snapshot_map: Arc>>>, + ) -> aimdb_core::DbResult<()> + where + R: aimdb_executor::Spawn + 'static, + { + let runtime = db.runtime(); + let raw_payload = self.raw_payload; + + for (default_topic, consumer, serializer, _config, topic_provider) in outbound_routes { + let client_mgr = self.client_mgr.clone(); + let snap = snapshot_map.clone(); + let default_topic_clone = default_topic.clone(); + + runtime.spawn(async move { + let mut reader = match consumer.subscribe_any().await { + Ok(r) => r, + Err(_e) => { + #[cfg(feature = "tracing")] + tracing::error!( + "WS outbound: failed to subscribe for '{}': {:?}", + default_topic_clone, + _e + ); + return; + } + }; + + #[cfg(feature = "tracing")] + tracing::info!( + "WS outbound publisher started for topic: {}", + default_topic_clone + ); + + while let Ok(value_any) = reader.recv_any().await { + // Resolve topic (dynamic or static) + let topic = topic_provider + .as_ref() + .and_then(|p| p.topic_any(&*value_any)) + .unwrap_or_else(|| default_topic_clone.clone()); + + // Serialize + let bytes = match serializer(&*value_any) { + Ok(b) => b, + Err(_e) => { + #[cfg(feature = "tracing")] + tracing::error!( + "WS outbound: serialize error for '{}': {:?}", + topic, + _e + ); + continue; + } + }; + + // Update snapshot cache for late-join + { + let mut map = snap.lock().unwrap(); + map.insert(topic.clone(), bytes.clone()); + } + + // Fan-out to subscribed clients + if raw_payload { + client_mgr.broadcast_raw(&topic, &bytes).await; + } else { + client_mgr.broadcast(&topic, &bytes).await; + } + } + + #[cfg(feature = "tracing")] + tracing::info!( + "WS outbound publisher stopped for topic: {}", + default_topic_clone + ); + })?; + } + + Ok(()) + } +} + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Connector trait +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +impl aimdb_core::transport::Connector for WebSocketConnectorImpl { + /// WebSocket inbound is driven by the session receive loop, not by + /// `publish()`. This implementation exists only to satisfy the trait and + /// will never be called in normal operation. + fn publish( + &self, + _destination: &str, + _config: &ConnectorConfig, + _payload: &[u8], + ) -> Pin> + Send + '_>> { + Box::pin(async move { Ok(()) }) + } +} diff --git a/aimdb-websocket-connector/src/lib.rs b/aimdb-websocket-connector/src/lib.rs new file mode 100644 index 00000000..10183b29 --- /dev/null +++ b/aimdb-websocket-connector/src/lib.rs @@ -0,0 +1,67 @@ +//! # aimdb-websocket-connector +//! +//! First-class WebSocket connector for AimDB โ€” real-time bidirectional streaming. +//! +//! Replaces the `tap()` + `tokio::sync::broadcast` workaround with a proper +//! [`ConnectorBuilder`][aimdb_core::ConnectorBuilder] implementation that +//! integrates cleanly with the existing MQTT/KNX infrastructure. +//! +//! ## Quick Start +//! +//! ```rust,ignore +//! use aimdb_tokio_adapter::TokioAdapter; +//! use aimdb_websocket_connector::WebSocketConnector; +//! +//! let runtime = TokioAdapter::new(); +//! +//! let db = AimDbBuilder::new() +//! .runtime(runtime) +//! .with_connector( +//! WebSocketConnector::new() +//! .bind("0.0.0.0:8080") +//! .path("/ws") +//! .with_late_join(true), +//! ) +//! .configure::(TempKey::Vienna, |reg| { +//! reg.buffer(BufferCfg::SpmcRing { capacity: 100 }) +//! .link_from("mqtt://sensors/vienna/temperature") +//! .with_deserializer(Temperature::from_bytes) +//! .finish() +//! .link_to("ws://sensors/temperature/vienna") +//! .with_serializer(|t: &Temperature| { +//! serde_json::to_vec(t).map_err(Into::into) +//! }) +//! .finish(); +//! }) +//! .build() +//! .await?; +//! ``` +//! +//! ## Wire Protocol +//! +//! See [`protocol`] for the full message specification. +//! +//! ## Authentication +//! +//! See [`auth`] for the [`AuthHandler`][auth::AuthHandler] trait. + +pub mod auth; +pub mod builder; +pub mod client_manager; +pub mod connector; +pub mod protocol; +pub(crate) mod server; +pub(crate) mod session; + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Public re-exports +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +/// The primary entry point โ€” use this to create a WebSocket connector. +/// +/// This is a type alias for [`builder::WebSocketConnectorBuilder`]. +pub type WebSocketConnector = builder::WebSocketConnectorBuilder; + +pub use auth::{AuthError, AuthHandler, AuthRequest, ClientId, ClientInfo, NoAuth, Permissions}; +pub use client_manager::ClientManager; +pub use protocol::{ClientMessage, ErrorCode, ServerMessage}; diff --git a/aimdb-websocket-connector/src/protocol.rs b/aimdb-websocket-connector/src/protocol.rs new file mode 100644 index 00000000..6e16c81f --- /dev/null +++ b/aimdb-websocket-connector/src/protocol.rs @@ -0,0 +1,209 @@ +//! Wire protocol types and topic matching for the WebSocket connector. +//! +//! # Server โ†’ Client messages +//! +//! - `data` โ€” live record push +//! - `snapshot` โ€” late-join current value +//! - `subscribed` โ€” subscription acknowledgement +//! - `error` โ€” per-operation error +//! - `pong` โ€” response to client ping +//! +//! # Client โ†’ Server messages +//! +//! - `subscribe` โ€” subscribe to one or more topics (supports wildcards) +//! - `unsubscribe` โ€” cancel subscriptions +//! - `write` โ€” inbound value for a `link_from("ws://โ€ฆ")` record +//! - `ping` โ€” keepalive ping + +use serde::{Deserialize, Serialize}; + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Server โ†’ Client +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +/// A message sent from the server to a connected WebSocket client. +#[derive(Debug, Clone, Serialize)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum ServerMessage { + /// Live data push from an outbound route. + Data { + topic: String, + #[serde(skip_serializing_if = "Option::is_none")] + payload: Option, + /// Server-side dispatch timestamp (milliseconds since Unix epoch). + ts: u64, + }, + + /// Late-join snapshot โ€” current value sent when a client subscribes. + Snapshot { + topic: String, + #[serde(skip_serializing_if = "Option::is_none")] + payload: Option, + }, + + /// Confirmation sent once subscriptions are recorded. + Subscribed { topics: Vec }, + + /// Per-operation error. + Error { + code: ErrorCode, + #[serde(skip_serializing_if = "Option::is_none")] + topic: Option, + message: String, + }, + + /// Response to a client `ping` message. + Pong, +} + +/// Machine-readable error codes sent in `ServerMessage::Error`. +#[derive(Debug, Clone, Serialize)] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +pub enum ErrorCode { + Unauthorized, + Forbidden, + UnknownTopic, + SerializationError, + WriteError, + ServerError, +} + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Client โ†’ Server +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +/// A message received from a WebSocket client. +#[derive(Debug, Clone, Deserialize)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum ClientMessage { + /// Subscribe to one or more topics (wildcards supported). + Subscribe { topics: Vec }, + + /// Unsubscribe from one or more topics. + Unsubscribe { topics: Vec }, + + /// Write a value to an inbound record (`link_from("ws://โ€ฆ")`). + Write { + topic: String, + payload: serde_json::Value, + }, + + /// Keepalive ping. + Ping, +} + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Topic matching +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +/// Returns `true` if `topic` matches `pattern`. +/// +/// Follows MQTT wildcard conventions: +/// +/// | Pattern | Semantics | +/// |----------|-----------------------------------| +/// | `#` | Multi-level wildcard (all topics) | +/// | `a/#` | Everything under `a/` | +/// | `a/*/c` | Single-level wildcard in segment | +/// | `a/b/c` | Exact match | +pub fn topic_matches(pattern: &str, topic: &str) -> bool { + // Fast path: exact match + if pattern == topic { + return true; + } + + // Multi-level wildcard: `#` matches everything + if pattern == "#" { + return true; + } + + // `prefix/#` matches everything under prefix โ€” only when prefix is literal + // (no wildcards in the prefix). When wildcards are present, fall through to + // the segment loop which handles `#` at any position. + if let Some(prefix) = pattern.strip_suffix("/#") { + if !prefix.contains('*') && !prefix.contains('#') { + return topic.starts_with(prefix) + && (topic.len() == prefix.len() + || topic.as_bytes().get(prefix.len()) == Some(&b'/')); + } + } + + // Segment-by-segment matching with `*` single-level wildcard + let mut pattern_parts = pattern.split('/'); + let mut topic_parts = topic.split('/'); + + loop { + match (pattern_parts.next(), topic_parts.next()) { + (Some("#"), _) => return true, + (Some("*"), Some(_)) => {} // single-level wildcard โ€” consume one segment + (Some(p), Some(t)) if p == t => {} // literal match + (None, None) => return true, // both exhausted at the same time + _ => return false, + } + } +} + +/// Returns the current milliseconds since the Unix epoch (for `ts` fields). +pub fn now_ms() -> u64 { + use std::time::{SystemTime, UNIX_EPOCH}; + SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap_or_default() + .as_millis() as u64 +} + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Tests +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn exact_match() { + assert!(topic_matches("a/b/c", "a/b/c")); + assert!(!topic_matches("a/b/c", "a/b/d")); + } + + #[test] + fn hash_wildcard() { + assert!(topic_matches("#", "anything/goes/here")); + assert!(topic_matches("#", "a")); + } + + #[test] + fn prefix_hash_wildcard() { + assert!(topic_matches("sensors/#", "sensors/temperature/vienna")); + assert!(topic_matches("sensors/#", "sensors/humidity/berlin")); + assert!(!topic_matches("sensors/#", "commands/setpoint")); + // Edge: prefix itself + assert!(topic_matches("sensors/#", "sensors")); + } + + #[test] + fn star_wildcard() { + assert!(topic_matches( + "sensors/temperature/*", + "sensors/temperature/vienna" + )); + assert!(topic_matches( + "sensors/temperature/*", + "sensors/temperature/berlin" + )); + assert!(!topic_matches( + "sensors/temperature/*", + "sensors/humidity/vienna" + )); + assert!(!topic_matches( + "sensors/temperature/*", + "sensors/temperature/a/b" + )); + } + + #[test] + fn mixed_wildcards() { + assert!(topic_matches("a/*/c/#", "a/b/c/d/e/f")); + assert!(!topic_matches("a/*/c/#", "a/b/x/d")); + } +} diff --git a/aimdb-websocket-connector/src/server.rs b/aimdb-websocket-connector/src/server.rs new file mode 100644 index 00000000..390238d3 --- /dev/null +++ b/aimdb-websocket-connector/src/server.rs @@ -0,0 +1,170 @@ +//! Axum WebSocket server and upgrade handler. +//! +//! The server is started by [`start_server`] which binds to the configured +//! address, mounts the WebSocket endpoint at the configured path, and +//! optionally mounts additional user-provided Axum routes. +//! +//! # Health endpoint +//! +//! `GET /health` returns `200 OK` with a JSON body: +//! ```json +//! { "status": "ok", "clients": 3, "uptime_secs": 120 } +//! ``` + +use std::{collections::HashMap, net::SocketAddr, time::Instant}; + +use axum::{ + extract::{ + ws::{WebSocket, WebSocketUpgrade}, + ConnectInfo, Query, State, + }, + http::{HeaderMap, StatusCode}, + response::IntoResponse, + routing::get, + Json, Router, +}; +use tower_http::cors::CorsLayer; + +use crate::{ + auth::{AuthError, AuthRequest, ClientInfo}, + session::{run_session, SessionContext}, +}; + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Shared server state +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +#[derive(Clone)] +pub(crate) struct ServerState { + pub session_ctx: SessionContext, + pub started_at: Instant, +} + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Server start +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +/// Start the WebSocket Axum server and return immediately (the server runs in +/// a background Tokio task). +/// +/// # Arguments +/// +/// * `bind_addr` โ€” TCP address to listen on. +/// * `ws_path` โ€” URL path for the WebSocket endpoint (e.g., `"/ws"`). +/// * `session_ctx` โ€” Shared session context (auth, router, client manager, โ€ฆ). +/// * `additional_routes` โ€” Optional user-supplied Axum `Router` that is merged +/// into the server (useful for REST + WebSocket on the same port). +pub(crate) fn start_server( + bind_addr: SocketAddr, + ws_path: String, + session_ctx: SessionContext, + additional_routes: Option, +) { + let state = ServerState { + session_ctx, + started_at: Instant::now(), + }; + + // Apply state first so the router becomes `Router<()>`, which can then be + // merged with user-supplied `additional_routes: Router<()>` without a + // type-parameter mismatch. + let ws_app = Router::new() + .route(&ws_path, get(ws_upgrade_handler)) + .route("/health", get(health_handler)) + .with_state(state) + .layer(CorsLayer::permissive()); + + let app = if let Some(extra) = additional_routes { + ws_app.merge(extra) + } else { + ws_app + }; + + tokio::spawn(async move { + let listener = match tokio::net::TcpListener::bind(bind_addr).await { + Ok(l) => l, + Err(_e) => { + #[cfg(feature = "tracing")] + tracing::error!("WebSocket connector failed to bind {}: {}", bind_addr, _e); + return; + } + }; + + #[cfg(feature = "tracing")] + tracing::info!("WebSocket connector listening on {}", bind_addr); + + if let Err(_e) = axum::serve( + listener, + app.into_make_service_with_connect_info::(), + ) + .await + { + #[cfg(feature = "tracing")] + tracing::error!("WebSocket server error: {}", _e); + } + }); +} + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Handlers +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +/// WebSocket upgrade handler. +/// +/// Performs authentication before agreeing to upgrade; rejects unauthenticated +/// connections with HTTP 401. +async fn ws_upgrade_handler( + ws: WebSocketUpgrade, + ConnectInfo(remote_addr): ConnectInfo, + headers: HeaderMap, + Query(query_params): Query>, + State(state): State, +) -> impl IntoResponse { + let auth_req = AuthRequest { + headers, + query_params, + remote_addr, + }; + + // Authenticate โ€” returns permissions or rejects + let permissions = match state.session_ctx.auth.authenticate(&auth_req).await { + Ok(p) => p, + Err(AuthError { message }) => { + #[cfg(feature = "tracing")] + tracing::warn!("WebSocket auth rejected from {}: {}", remote_addr, message); + return (StatusCode::UNAUTHORIZED, message).into_response(); + } + }; + + // Allocate a client id before upgrading so it's available synchronously + let id = state.session_ctx.client_mgr.next_client_id(); + let info = ClientInfo { + id, + remote_addr, + permissions, + }; + + #[cfg(feature = "tracing")] + tracing::debug!( + "{}: upgrading WebSocket connection from {}", + id, + remote_addr + ); + + let ctx = state.session_ctx.clone(); + + ws.on_upgrade(move |socket: WebSocket| run_session(socket, info, ctx)) + .into_response() +} + +/// Health check endpoint. +async fn health_handler(State(state): State) -> impl IntoResponse { + let uptime_secs = state.started_at.elapsed().as_secs(); + let clients = state.session_ctx.client_mgr.client_count(); + + Json(serde_json::json!({ + "status": "ok", + "clients": clients, + "uptime_secs": uptime_secs, + })) +} diff --git a/aimdb-websocket-connector/src/session.rs b/aimdb-websocket-connector/src/session.rs new file mode 100644 index 00000000..bc880db4 --- /dev/null +++ b/aimdb-websocket-connector/src/session.rs @@ -0,0 +1,310 @@ +//! Per-client WebSocket session management. +//! +//! Each accepted connection spawns three cooperating tasks: +//! +//! 1. **Send loop** โ€” drains the per-client `mpsc` channel and writes frames to +//! the WebSocket. +//! 2. **Recv loop** โ€” reads frames from the WebSocket and dispatches +//! `subscribe`, `unsubscribe`, `write`, and `ping` messages. +//! 3. A **cleanup** fence โ€” unregisters the client from the [`ClientManager`] +//! when either loop finishes. +//! +//! The session receives an already-authenticated [`ClientInfo`] and the shared +//! [`ClientManager`] / inbound [`Router`] from the server. + +use std::sync::Arc; + +use axum::extract::ws::{Message, WebSocket}; +use futures_util::{SinkExt, StreamExt}; +use tokio::sync::mpsc; + +use crate::{ + auth::{AuthHandler, ClientId, ClientInfo}, + client_manager::ClientManager, + protocol::{ClientMessage, ErrorCode}, +}; + +// Re-export so server.rs can use it easily. +pub use aimdb_core::router::Router; + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Session context +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +/// Shared context injected into every session. +#[derive(Clone)] +pub(crate) struct SessionContext { + pub client_mgr: ClientManager, + /// Inbound router: maps WebSocket topics โ†’ AimDB producers. + pub router: Arc, + pub auth: Arc, + /// Channel capacity used when registering a new client. + pub channel_capacity: usize, + /// Whether to send current values on subscribe (late-join). + pub late_join: bool, + /// Snapshot provider: topic โ†’ serialized current value. + /// + /// Set by the connector builder after collecting outbound routes. + pub snapshot_provider: Arc, + /// Topics to subscribe every new client to automatically on connect. + /// + /// Use `["#"]` to push all data to all clients without requiring an + /// explicit `{"type":"subscribe"}` message from the client. + pub auto_subscribe_topics: Vec, +} + +/// Provides the current serialized value of a record for late-join snapshots. +pub(crate) trait SnapshotProvider: Send + Sync + 'static { + /// Return the latest serialized value for the given topic, if available. + fn snapshot(&self, topic: &str) -> Option>; +} + +/// A snapshot provider that always returns `None` (used when late-join is disabled +/// or no snapshot data is available). +pub(crate) struct NoSnapshot; + +impl SnapshotProvider for NoSnapshot { + fn snapshot(&self, _topic: &str) -> Option> { + None + } +} + +/// A snapshot provider backed by a `HashMap`. +#[cfg(test)] +#[allow(dead_code)] +pub(crate) struct MapSnapshot(pub std::collections::HashMap>); + +#[cfg(test)] +impl SnapshotProvider for MapSnapshot { + fn snapshot(&self, topic: &str) -> Option> { + self.0.get(topic).cloned() + } +} + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Session entry point +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +/// Drive a single WebSocket connection to completion. +/// +/// This function is `await`ed inside `tokio::spawn` by the Axum upgrade handler. +pub(crate) async fn run_session(socket: WebSocket, info: ClientInfo, ctx: SessionContext) { + let id = info.id; + + // Register client and obtain the per-client receiver + let (_, rx) = ctx.client_mgr.register(info, ctx.channel_capacity); + + // Auto-subscribe: subscribe all clients to the configured topics immediately + // on connect, without requiring a Subscribe message from the client. + if !ctx.auto_subscribe_topics.is_empty() { + ctx.client_mgr.subscribe(id, &ctx.auto_subscribe_topics); + } + + #[cfg(feature = "tracing")] + tracing::debug!("{}: session started", id); + + let (ws_sender, ws_receiver) = socket.split(); + + // Spawn the send loop (mpsc receiver โ†’ WebSocket sender) + let mgr_send = ctx.client_mgr.clone(); + let send_handle = tokio::spawn(send_loop(ws_sender, rx, id)); + + // Run the receive loop in-place (WebSocket receiver โ†’ router/subscriptions) + recv_loop(ws_receiver, id, ctx).await; + + // Receiving finished; abort sender and unregister + send_handle.abort(); + mgr_send.unregister(id); + + #[cfg(feature = "tracing")] + tracing::debug!("{}: session ended", id); +} + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Send loop +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +async fn send_loop( + mut ws_sender: futures_util::stream::SplitSink, + mut rx: mpsc::Receiver, + #[allow(unused_variables)] id: ClientId, +) { + while let Some(msg) = rx.recv().await { + if ws_sender.send(msg).await.is_err() { + #[cfg(feature = "tracing")] + tracing::debug!("{}: send failed โ€” closing", id); + break; + } + } +} + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Receive loop +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +async fn recv_loop( + mut ws_receiver: futures_util::stream::SplitStream, + id: ClientId, + ctx: SessionContext, +) { + while let Some(result) = ws_receiver.next().await { + let raw = match result { + Ok(msg) => msg, + Err(_e) => { + #[cfg(feature = "tracing")] + tracing::debug!("{}: recv error: {}", id, _e); + break; + } + }; + + match raw { + Message::Text(text) => { + handle_text(id, text.as_str(), &ctx).await; + } + Message::Binary(bytes) => { + handle_text(id, &String::from_utf8_lossy(&bytes), &ctx).await; + } + Message::Close(_) => { + #[cfg(feature = "tracing")] + tracing::debug!("{}: received close frame", id); + break; + } + // WebSocket ping/pong frames are handled transparently by axum. + _ => {} + } + } +} + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Message dispatch +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +async fn handle_text(id: ClientId, text: &str, ctx: &SessionContext) { + let msg: ClientMessage = match serde_json::from_str(text) { + Ok(m) => m, + Err(_e) => { + #[cfg(feature = "tracing")] + tracing::warn!("{}: invalid JSON from client: {}", id, _e); + ctx.client_mgr + .send_error( + id, + ErrorCode::SerializationError, + None, + "Invalid JSON message", + ) + .await; + return; + } + }; + + match msg { + ClientMessage::Subscribe { topics } => handle_subscribe(id, topics, ctx).await, + ClientMessage::Unsubscribe { topics } => { + ctx.client_mgr.unsubscribe(id, &topics); + } + ClientMessage::Write { topic, payload } => handle_write(id, topic, payload, ctx).await, + ClientMessage::Ping => { + ctx.client_mgr.send_pong(id).await; + } + } +} + +async fn handle_subscribe(id: ClientId, topics: Vec, ctx: &SessionContext) { + // Authorise each requested pattern + let client_info = match ctx.client_mgr.client_info(id) { + Some(i) => i, + None => return, + }; + + let mut allowed = Vec::new(); + + for topic in &topics { + if ctx.auth.authorize_subscribe(&client_info, topic).await { + allowed.push(topic.clone()); + } else { + ctx.client_mgr + .send_error( + id, + ErrorCode::Forbidden, + Some(topic.clone()), + "Not authorised to subscribe to this topic", + ) + .await; + } + } + + if allowed.is_empty() { + return; + } + + // Register subscriptions + let confirmed = ctx.client_mgr.subscribe(id, &allowed); + + // Send acknowledgement + ctx.client_mgr.send_subscribed(id, confirmed.clone()).await; + + // Late-join: send current values for each exact topic pattern that resolves + if ctx.late_join { + for pattern in confirmed { + if let Some(bytes) = ctx.snapshot_provider.snapshot(&pattern) { + ctx.client_mgr.send_snapshot(id, &pattern, &bytes).await; + } + } + } +} + +async fn handle_write( + id: ClientId, + topic: String, + payload: serde_json::Value, + ctx: &SessionContext, +) { + // Authorise + let client_info = match ctx.client_mgr.client_info(id) { + Some(i) => i, + None => return, + }; + + if !ctx.auth.authorize_write(&client_info, &topic).await { + ctx.client_mgr + .send_error( + id, + ErrorCode::Forbidden, + Some(topic.clone()), + "Write permission denied", + ) + .await; + return; + } + + // Serialize payload back to bytes for the router + let bytes = match serde_json::to_vec(&payload) { + Ok(b) => b, + Err(_e) => { + ctx.client_mgr + .send_error( + id, + ErrorCode::SerializationError, + Some(topic.clone()), + "Failed to re-serialize payload", + ) + .await; + return; + } + }; + + // Dispatch through the inbound router + if let Err(_e) = ctx.router.route(&topic, &bytes).await { + #[cfg(feature = "tracing")] + tracing::warn!("{}: write routing failed for '{}': {}", id, topic, _e); + + ctx.client_mgr + .send_error( + id, + ErrorCode::UnknownTopic, + Some(topic), + "No inbound route for this topic", + ) + .await; + } +} From 0a1423ddd637c9f952e38cf09105c25190956adc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Fri, 27 Feb 2026 20:33:12 +0000 Subject: [PATCH 12/46] feat: add WebSocket connector support in main.rs and Cargo.toml generation --- aimdb-codegen/src/rust.rs | 51 ++++++++++++++++++++++++++++++++------- 1 file changed, 42 insertions(+), 9 deletions(-) diff --git a/aimdb-codegen/src/rust.rs b/aimdb-codegen/src/rust.rs index 82f07c14..2789a8cc 100644 --- a/aimdb-codegen/src/rust.rs +++ b/aimdb-codegen/src/rust.rs @@ -234,6 +234,7 @@ pub fn generate_main_rs(state: &ArchitectureState, binary_name: &str) -> Option< .filter_map(|c| match c.protocol.as_str() { "mqtt" => Some(quote! { use aimdb_mqtt_connector::MqttConnector; }), "knx" => Some(quote! { use aimdb_knx_connector::KnxConnector; }), + "ws" => Some(quote! { use aimdb_websocket_connector::WebSocketConnector; }), _ => None, }) .collect(); @@ -249,6 +250,12 @@ pub fn generate_main_rs(state: &ArchitectureState, binary_name: &str) -> Option< let ctor: TokenStream = match c.protocol.as_str() { "mqtt" => quote! { MqttConnector::new(&#var_ident) }, "knx" => quote! { KnxConnector::new(&#var_ident) }, + "ws" => quote! { + WebSocketConnector::new() + .bind(#var_ident.parse::() + .expect("invalid WebSocket bind address")) + .path("/ws") + }, _ => { let msg = format!("build connector for protocol '{}'", c.protocol); quote! { todo!(#msg) } @@ -449,6 +456,7 @@ pub fn generate_binary_cargo_toml(state: &ArchitectureState, binary_name: &str) let has_mqtt = bin.external_connectors.iter().any(|c| c.protocol == "mqtt"); let has_knx = bin.external_connectors.iter().any(|c| c.protocol == "knx"); + let has_ws = bin.external_connectors.iter().any(|c| c.protocol == "ws"); let mut optional_connector_deps = String::new(); if has_mqtt { @@ -461,6 +469,11 @@ pub fn generate_binary_cargo_toml(state: &ArchitectureState, binary_name: &str) "aimdb-knx-connector = { version = \"0.5\", features = [\"tokio-runtime\"] }\n", ); } + if has_ws { + optional_connector_deps.push_str( + "aimdb-websocket-connector = { version = \"0.5\", features = [\"tokio-runtime\"] }\n", + ); + } let out = format!( "# @generated โ€” do not edit manually.\n\ @@ -1268,10 +1281,10 @@ pub fn generate_hub_cargo_toml(state: &ArchitectureState) -> String { .records .iter() .any(|r| r.connectors.iter().any(|c| c.protocol == "knx")); - let has_websocket = state + let has_ws = state .records .iter() - .any(|r| r.connectors.iter().any(|c| c.protocol == "websocket")); + .any(|r| r.connectors.iter().any(|c| c.protocol == "ws")); let mut connector_deps = String::new(); if has_mqtt { @@ -1284,8 +1297,10 @@ pub fn generate_hub_cargo_toml(state: &ArchitectureState) -> String { "aimdb-knx-connector = { version = \"0.5\", features = [\"tokio-runtime\"] }\n", ); } - if has_websocket { - connector_deps.push_str("# aimdb-websocket-connector is in aimdb-pro โ€” add path dep here\n# aimdb-websocket-connector = { path = \"../../aimdb-pro/aimdb-websocket-connector\" }\n"); + if has_ws { + connector_deps.push_str( + "aimdb-websocket-connector = { version = \"0.5\", features = [\"tokio-runtime\"] }\n", + ); } format!( @@ -1335,6 +1350,10 @@ pub fn generate_hub_main_rs(state: &ArchitectureState) -> String { .records .iter() .any(|r| r.connectors.iter().any(|c| c.protocol == "knx")); + let has_ws = state + .records + .iter() + .any(|r| r.connectors.iter().any(|c| c.protocol == "ws")); // โ”€โ”€ Connector use statements โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ let connector_use_stmts: Vec = { @@ -1345,6 +1364,9 @@ pub fn generate_hub_main_rs(state: &ArchitectureState) -> String { if has_knx { v.push(quote! { use aimdb_knx_connector::KnxConnector; }); } + if has_ws { + v.push(quote! { use aimdb_websocket_connector::WebSocketConnector; }); + } v }; @@ -1364,6 +1386,14 @@ pub fn generate_hub_main_rs(state: &ArchitectureState) -> String { .unwrap_or_else(|_| "224.0.23.12:3671".to_string()); }); } + if has_ws { + ts.extend(quote! { + let ws_bind: std::net::SocketAddr = std::env::var("WS_BIND") + .unwrap_or_else(|_| "0.0.0.0:8080".to_string()) + .parse() + .expect("invalid WS_BIND address"); + }); + } ts }; @@ -1376,6 +1406,9 @@ pub fn generate_hub_main_rs(state: &ArchitectureState) -> String { if has_knx { v.push(quote! { .with_connector(KnxConnector::new(&knx_gateway)) }); } + if has_ws { + v.push(quote! { .with_connector(WebSocketConnector::new().bind(ws_bind).path("/ws")) }); + } v }; @@ -1474,11 +1507,11 @@ fn emit_transform_configure_block(rec: &RecordDef, task: &TaskDef) -> TokenStrea let key_type = format_ident!("{}Key", rec.name); let buffer_tokens = rec.buffer.to_tokens(rec.capacity); - // Only emit connector chain for mqtt/knx outbound (websocket is a pro feature) - let has_outbound = rec.connectors.iter().any(|c| { - matches!(c.direction, ConnectorDirection::Outbound) - && matches!(c.protocol.as_str(), "mqtt" | "knx") - }); + // Only emit connector chain for outbound connectors + let has_outbound = rec + .connectors + .iter() + .any(|c| matches!(c.direction, ConnectorDirection::Outbound)); let outbound_chain = if has_outbound { quote! { .link_to(addr) From a60b26be17440488b47c2f97b76512f678086a9f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Fri, 27 Feb 2026 21:47:43 +0000 Subject: [PATCH 13/46] feat: add AimDB WASM adapter design document - Introduced a new design document for the AimDB WASM adapter, detailing its architecture, motivation, and implementation plan. - Outlined the benefits of running the full dataflow engine in the browser, including native contract enforcement and offline capabilities. - Provided a comprehensive overview of the crate layout, dependency graph, and feature flags. - Included detailed sections on trait implementations, buffer design, TypeScript bindings, and WebSocket sync strategies. - Established a clear migration path for integrating the WASM adapter with existing AimDB UI components. --- .devcontainer/Dockerfile | 3 +- Cargo.lock | 81 ++ Cargo.toml | 1 + Makefile | 45 +- aimdb-wasm-adapter/Cargo.toml | 57 ++ aimdb-wasm-adapter/README.md | 52 ++ aimdb-wasm-adapter/src/buffer.rs | 323 ++++++++ aimdb-wasm-adapter/src/lib.rs | 63 ++ aimdb-wasm-adapter/src/logger.rs | 52 ++ aimdb-wasm-adapter/src/runtime.rs | 62 ++ aimdb-wasm-adapter/src/time.rs | 124 +++ docs/design/025-M12-wasm-adapter.md | 1182 +++++++++++++++++++++++++++ 12 files changed, 2040 insertions(+), 5 deletions(-) create mode 100644 aimdb-wasm-adapter/Cargo.toml create mode 100644 aimdb-wasm-adapter/README.md create mode 100644 aimdb-wasm-adapter/src/buffer.rs create mode 100644 aimdb-wasm-adapter/src/lib.rs create mode 100644 aimdb-wasm-adapter/src/logger.rs create mode 100644 aimdb-wasm-adapter/src/runtime.rs create mode 100644 aimdb-wasm-adapter/src/time.rs create mode 100644 docs/design/025-M12-wasm-adapter.md diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 68b93a7d..89ab4e02 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -63,7 +63,8 @@ ENV PATH="/home/$USERNAME/.cargo/bin:${PATH}" # Add embedded targets for AimDB MCU support RUN rustup target add thumbv7em-none-eabihf \ && rustup target add thumbv6m-none-eabi \ - && rustup target add thumbv7m-none-eabi + && rustup target add thumbv7m-none-eabi \ + && rustup target add wasm32-unknown-unknown # Install core AimDB development tools RUN cargo install cargo-audit cargo-watch cargo-expand diff --git a/Cargo.lock b/Cargo.lock index 1667d232..a8110ec4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -272,6 +272,23 @@ dependencies = [ "tracing", ] +[[package]] +name = "aimdb-wasm-adapter" +version = "0.5.0" +dependencies = [ + "aimdb-core", + "aimdb-executor", + "futures-util", + "js-sys", + "serde", + "serde-wasm-bindgen", + "serde_json", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-bindgen-test", + "web-sys", +] + [[package]] name = "aimdb-websocket-connector" version = "0.1.0" @@ -2120,6 +2137,16 @@ version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" +[[package]] +name = "minicov" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4869b6a491569605d66d3952bcdf03df789e5b536e5f0cf7758a7f08a55ae24d" +dependencies = [ + "cc", + "walkdir", +] + [[package]] name = "mio" version = "1.1.0" @@ -2740,6 +2767,15 @@ version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + [[package]] name = "schannel" version = "0.1.28" @@ -2828,6 +2864,17 @@ dependencies = [ "serde_derive", ] +[[package]] +name = "serde-wasm-bindgen" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8302e169f0eddcc139c70f139d19d6467353af16f9fce27e8c30158036a1e16b" +dependencies = [ + "js-sys", + "serde", + "wasm-bindgen", +] + [[package]] name = "serde_core" version = "1.0.228" @@ -3757,6 +3804,16 @@ dependencies = [ "vcell", ] +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + [[package]] name = "want" version = "0.3.1" @@ -3839,6 +3896,30 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "wasm-bindgen-test" +version = "0.3.55" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfc379bfb624eb59050b509c13e77b4eb53150c350db69628141abce842f2373" +dependencies = [ + "js-sys", + "minicov", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-bindgen-test-macro", +] + +[[package]] +name = "wasm-bindgen-test-macro" +version = "0.3.55" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "085b2df989e1e6f9620c1311df6c996e83fe16f57792b272ce1e024ac16a90f1" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.108", +] + [[package]] name = "weather-hub" version = "0.1.0" diff --git a/Cargo.toml b/Cargo.toml index e37ed5d8..509e0f83 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,6 +14,7 @@ members = [ "aimdb-mqtt-connector", "aimdb-knx-connector", "aimdb-websocket-connector", + "aimdb-wasm-adapter", "tools/aimdb-cli", "tools/aimdb-mcp", "examples/knx-connector-demo-common", diff --git a/Makefile b/Makefile index 524c378c..f260efb5 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,7 @@ # AimDB Makefile # Simple automation for common development tasks -.PHONY: help build test clean fmt fmt-check clippy doc all check test-embedded examples deny audit security publish publish-check +.PHONY: help build test clean fmt fmt-check clippy doc all check test-embedded test-wasm wasm wasm-test examples deny audit security publish publish-check .DEFAULT_GOAL := help # Colors for output @@ -28,6 +28,7 @@ help: @printf " $(YELLOW)Testing Commands:$(NC)\n" @printf " check Comprehensive development check (fmt + clippy + all tests)\n" @printf " test-embedded Test embedded/MCU cross-compilation compatibility\n" + @printf " test-wasm Test WASM cross-compilation compatibility\n" @printf "\n" @printf " $(YELLOW)Security & Quality:$(NC)\n" @printf " deny Check dependencies (licenses, advisories, bans)\n" @@ -38,6 +39,10 @@ help: @printf " publish-check Test crates.io publish (dry-run, no git commit required)\n" @printf " publish Publish all crates to crates.io (requires clean git state)\n" @printf "\n" + @printf " $(YELLOW)WASM Commands:$(NC)\n" + @printf " wasm Build WASM adapter with wasm-pack\n" + @printf " wasm-test Run WASM tests in headless browser\n" + @printf "\n" @printf " $(YELLOW)Convenience:$(NC)\n" @printf " all Build everything\n" @@ -70,6 +75,8 @@ build: cargo build --package aimdb-knx-connector --features "std,tokio-runtime" @printf "$(YELLOW) โ†’ Building WebSocket connector$(NC)\n" cargo build --package aimdb-websocket-connector --features "tokio-runtime" + @printf "$(YELLOW) โ†’ Building WASM adapter$(NC)\n" + cargo build --package aimdb-wasm-adapter --target wasm32-unknown-unknown --features "wasm-runtime" test: @printf "$(GREEN)Running all tests (valid combinations)...$(NC)\n" @@ -108,7 +115,7 @@ test: fmt: @printf "$(GREEN)Formatting code (workspace members only)...$(NC)\n" - @for pkg in aimdb-executor aimdb-derive aimdb-data-contracts aimdb-core aimdb-client aimdb-embassy-adapter aimdb-tokio-adapter aimdb-sync aimdb-persistence aimdb-persistence-sqlite aimdb-mqtt-connector aimdb-knx-connector aimdb-websocket-connector aimdb-codegen aimdb-cli aimdb-mcp sync-api-demo tokio-mqtt-connector-demo embassy-mqtt-connector-demo tokio-knx-connector-demo embassy-knx-connector-demo weather-mesh-common weather-hub weather-station-alpha weather-station-beta; do \ + @for pkg in aimdb-executor aimdb-derive aimdb-data-contracts aimdb-core aimdb-client aimdb-embassy-adapter aimdb-tokio-adapter aimdb-wasm-adapter aimdb-sync aimdb-persistence aimdb-persistence-sqlite aimdb-mqtt-connector aimdb-knx-connector aimdb-websocket-connector aimdb-codegen aimdb-cli aimdb-mcp sync-api-demo tokio-mqtt-connector-demo embassy-mqtt-connector-demo tokio-knx-connector-demo embassy-knx-connector-demo weather-mesh-common weather-hub weather-station-alpha weather-station-beta; do \ printf "$(YELLOW) โ†’ Formatting $$pkg$(NC)\n"; \ cargo fmt -p $$pkg 2>/dev/null || true; \ done @@ -117,7 +124,7 @@ fmt: fmt-check: @printf "$(GREEN)Checking code formatting (workspace members only)...$(NC)\n" @FAILED=0; \ - for pkg in aimdb-executor aimdb-derive aimdb-data-contracts aimdb-core aimdb-client aimdb-embassy-adapter aimdb-tokio-adapter aimdb-sync aimdb-persistence aimdb-persistence-sqlite aimdb-mqtt-connector aimdb-knx-connector aimdb-websocket-connector aimdb-codegen aimdb-cli aimdb-mcp sync-api-demo tokio-mqtt-connector-demo embassy-mqtt-connector-demo tokio-knx-connector-demo embassy-knx-connector-demo weather-mesh-common weather-hub weather-station-alpha weather-station-beta; do \ + for pkg in aimdb-executor aimdb-derive aimdb-data-contracts aimdb-core aimdb-client aimdb-embassy-adapter aimdb-tokio-adapter aimdb-wasm-adapter aimdb-sync aimdb-persistence aimdb-persistence-sqlite aimdb-mqtt-connector aimdb-knx-connector aimdb-websocket-connector aimdb-codegen aimdb-cli aimdb-mcp sync-api-demo tokio-mqtt-connector-demo embassy-mqtt-connector-demo tokio-knx-connector-demo embassy-knx-connector-demo weather-mesh-common weather-hub weather-station-alpha weather-station-beta; do \ printf "$(YELLOW) โ†’ Checking $$pkg$(NC)\n"; \ if ! cargo fmt -p $$pkg -- --check 2>&1; then \ printf "$(RED)โŒ Formatting check failed for $$pkg$(NC)\n"; \ @@ -172,6 +179,8 @@ clippy: cargo clippy --package aimdb-knx-connector --target thumbv7em-none-eabihf --no-default-features --features "embassy-runtime,defmt" -- -D warnings @printf "$(YELLOW) โ†’ Clippy on WebSocket connector$(NC)\n" cargo clippy --package aimdb-websocket-connector --features "tokio-runtime" --all-targets -- -D warnings + @printf "$(YELLOW) โ†’ Clippy on WASM adapter$(NC)\n" + cargo clippy --package aimdb-wasm-adapter --target wasm32-unknown-unknown --features "wasm-runtime" -- -D warnings doc: @printf "$(GREEN)Generating dual-platform documentation...$(NC)\n" @@ -198,6 +207,8 @@ doc: cargo doc --package aimdb-mqtt-connector --no-default-features --features "embassy-runtime" --no-deps cargo doc --package aimdb-knx-connector --no-default-features --features "embassy-runtime" --no-deps @cp -r target/doc/* target/doc-final/embedded/ + @printf "$(YELLOW) โ†’ Building WASM/browser documentation$(NC)\n" + cargo doc --package aimdb-wasm-adapter --target wasm32-unknown-unknown --features "wasm-runtime" --no-deps @printf "$(YELLOW) โ†’ Creating main index page$(NC)\n" @cp docs/index.html target/doc-final/index.html @printf "$(BLUE)Documentation generated at: file://$(PWD)/target/doc-final/index.html$(NC)\n" @@ -207,6 +218,12 @@ clean: cargo clean ## Testing commands +test-wasm: + @printf "$(BLUE)Testing WASM cross-compilation compatibility...$(NC)\n" + @printf "$(YELLOW) โ†’ Checking aimdb-wasm-adapter on wasm32-unknown-unknown target$(NC)\n" + cargo check --package aimdb-wasm-adapter --target wasm32-unknown-unknown --features "wasm-runtime" + @printf "$(GREEN)โœ“ WASM target compatibility verified!$(NC)\n" + test-embedded: @printf "$(BLUE)Testing embedded/MCU cross-compilation compatibility...$(NC)\n" @printf "$(YELLOW) โ†’ Checking aimdb-data-contracts (no_std + alloc) on thumbv7em-none-eabihf target$(NC)\n" @@ -370,13 +387,33 @@ publish: @printf "$(BLUE)๐ŸŽ‰ AimDB v$(shell grep '^version' Cargo.toml | head -1 | cut -d '"' -f 2) is now live on crates.io!$(NC)\n" ## Convenience commands -check: fmt-check clippy test test-embedded deny +check: fmt-check clippy test test-embedded test-wasm deny @printf "$(GREEN)Comprehensive development checks completed!$(NC)\n" @printf "$(BLUE)โœ“ Code formatting verified$(NC)\n" @printf "$(BLUE)โœ“ Linter passed$(NC)\n" @printf "$(BLUE)โœ“ All valid feature combinations tested$(NC)\n" @printf "$(BLUE)โœ“ Embedded target compatibility verified$(NC)\n" + @printf "$(BLUE)โœ“ WASM target compatibility verified$(NC)\n" @printf "$(BLUE)โœ“ Dependencies verified (deny)$(NC)\n" +## WASM commands +wasm: + @printf "$(GREEN)Building WASM adapter with wasm-pack...$(NC)\n" + @if ! command -v wasm-pack >/dev/null 2>&1; then \ + printf "$(YELLOW) โš  wasm-pack not found, installing...$(NC)\n"; \ + cargo install wasm-pack --locked; \ + fi + cd aimdb-wasm-adapter && wasm-pack build --target web --out-dir pkg + @printf "$(GREEN)โœ“ WASM build complete! Output in aimdb-wasm-adapter/pkg/$(NC)\n" + +wasm-test: + @printf "$(GREEN)Running WASM tests in headless browser...$(NC)\n" + @if ! command -v wasm-pack >/dev/null 2>&1; then \ + printf "$(YELLOW) โš  wasm-pack not found, installing...$(NC)\n"; \ + cargo install wasm-pack --locked; \ + fi + cd aimdb-wasm-adapter && wasm-pack test --headless --chrome + @printf "$(GREEN)โœ“ WASM tests passed!$(NC)\n" + all: build test examples @printf "$(GREEN)Build and test completed!$(NC)\n" diff --git a/aimdb-wasm-adapter/Cargo.toml b/aimdb-wasm-adapter/Cargo.toml new file mode 100644 index 00000000..dbe9e49b --- /dev/null +++ b/aimdb-wasm-adapter/Cargo.toml @@ -0,0 +1,57 @@ +[package] +name = "aimdb-wasm-adapter" +version = "0.5.0" +edition = "2021" +authors.workspace = true +license.workspace = true +repository.workspace = true +homepage.workspace = true +description = "WebAssembly runtime adapter for AimDB - browser-native async runtime support" +keywords = ["wasm", "webassembly", "browser", "async", "database"] +categories = ["wasm", "asynchronous", "database-implementations"] + +[lib] +crate-type = ["cdylib", "rlib"] + +[features] +default = ["wasm-runtime"] + +# Runtime features +wasm-runtime = ["wasm-bindgen", "wasm-bindgen-futures", "js-sys", "web-sys"] + +[dependencies] +# Executor traits (no_std compatible) +aimdb-executor = { version = "0.1.0", path = "../aimdb-executor", default-features = false } + +# Core AimDB types (alloc only โ€” no std, no tokio) +aimdb-core = { version = "0.5.0", path = "../aimdb-core", default-features = false, features = [ + "alloc", +] } + +# WASM bindings +wasm-bindgen = { version = "0.2", optional = true } +wasm-bindgen-futures = { version = "0.4", optional = true } +js-sys = { version = "0.3", optional = true } +web-sys = { version = "0.3", optional = true, features = [ + "console", + "Performance", + "Window", +] } + +# Serialization (no_std + alloc) +serde = { workspace = true } +serde_json = { workspace = true } +serde-wasm-bindgen = { version = "0.6", optional = true } + +# Async utilities (minimal, no_std compatible) +futures-util = { version = "0.3", default-features = false, features = [ + "alloc", +] } + +[lints.rust] +unexpected_cfgs = { level = "warn", check-cfg = [ + 'cfg(feature, values("std"))', +] } + +[dev-dependencies] +wasm-bindgen-test = "0.3" diff --git a/aimdb-wasm-adapter/README.md b/aimdb-wasm-adapter/README.md new file mode 100644 index 00000000..fa5b4eee --- /dev/null +++ b/aimdb-wasm-adapter/README.md @@ -0,0 +1,52 @@ +# aimdb-wasm-adapter + +WebAssembly runtime adapter for AimDB โ€” browser-native async runtime support. + +## Overview + +This crate provides a WASM runtime adapter that enables the full AimDB dataflow +engine to run inside a web browser (or any `wasm32-unknown-unknown` host). + +Records, buffers, producers, consumers, and data-contract enforcement all +execute natively in WASM โ€” eliminating the need for a parallel validation +layer (Zod, JSON Schema) on the TypeScript side. + +## Platform Matrix + +| Target | Adapter | Buffer Primitive | Spawn Mechanism | +|--------|---------|------------------|-----------------| +| MCU | `aimdb-embassy-adapter` | `embassy-sync` channels | Static task pool | +| Edge / Cloud | `aimdb-tokio-adapter` | `tokio::sync` channels | `tokio::spawn` | +| **Browser** | **`aimdb-wasm-adapter`** | **`Rc>`** | **`spawn_local`** | + +## Usage + +```rust +use aimdb_wasm_adapter::WasmAdapter; +use aimdb_core::{AimDbBuilder, buffer::BufferCfg}; +use std::sync::Arc; + +let adapter = WasmAdapter; +let db = AimDbBuilder::new() + .runtime(Arc::new(adapter)) + .build() + .unwrap(); +``` + +## Build + +```bash +# Install target +rustup target add wasm32-unknown-unknown +cargo install wasm-pack + +# Build +wasm-pack build --target web --out-dir pkg + +# Test +wasm-pack test --headless --chrome +``` + +## License + +Apache-2.0 diff --git a/aimdb-wasm-adapter/src/buffer.rs b/aimdb-wasm-adapter/src/buffer.rs new file mode 100644 index 00000000..e3f801e4 --- /dev/null +++ b/aimdb-wasm-adapter/src/buffer.rs @@ -0,0 +1,323 @@ +//! Single-threaded buffer implementation for the WASM runtime. +//! +//! Uses `Rc>` instead of atomics or channels โ€” zero overhead for +//! the browser's single-threaded execution model. +//! +//! All three buffer types are supported: +//! - **SPMC Ring** โ€” bounded `VecDeque` with per-reader cursors +//! - **SingleLatest** โ€” single slot, version-tracked +//! - **Mailbox** โ€” single slot, take-on-read semantics +//! +//! # Safety +//! +//! `WasmBuffer` and `WasmBufferReader` implement `Send + Sync` via +//! `unsafe impl` because `wasm32-unknown-unknown` is single-threaded. + +extern crate alloc; + +use alloc::boxed::Box; +use alloc::collections::VecDeque; +use alloc::rc::Rc; +use alloc::vec::Vec; +use core::cell::RefCell; +use core::future::Future; +use core::pin::Pin; +use core::task::{Context, Poll, Waker}; + +use aimdb_core::buffer::{Buffer, BufferCfg, BufferReader, DynBuffer}; +use aimdb_core::DbError; + +// ============================================================================ +// Buffer +// ============================================================================ + +/// Single-threaded buffer for the WASM runtime. +/// +/// Wraps an `Rc>` inner enum that holds the actual buffer state. +/// All three AimDB buffer types (SPMC Ring, SingleLatest, Mailbox) share +/// this outer struct โ€” the variant is determined by [`BufferCfg`] at +/// construction time. +pub struct WasmBuffer { + inner: Rc>>, +} + +// SAFETY: wasm32 is single-threaded โ€” Rc> cannot be accessed concurrently +unsafe impl Send for WasmBuffer {} +unsafe impl Sync for WasmBuffer {} + +/// Internal buffer state โ€” one variant per buffer type. +enum WasmBufferInner { + /// Bounded ring buffer with independent consumer cursors. + SpmcRing { + /// Ring storage (oldest at front, newest at back). + ring: VecDeque, + /// Maximum number of items. + capacity: usize, + /// Monotonic write counter โ€” each push increments this. + /// Readers track their own position against this counter. + write_seq: u64, + /// Wakers registered by readers waiting for new data. + wakers: Vec, + }, + + /// Only the latest value, skip intermediates. + SingleLatest { + /// Current value (None until first push). + value: Option, + /// Monotonic version counter โ€” incremented on each push. + version: u64, + /// Wakers registered by readers waiting for a new version. + wakers: Vec, + }, + + /// Single slot, overwrite semantics. + Mailbox { + /// Current slot value (taken on read). + slot: Option, + /// Wakers registered by readers waiting for a value. + wakers: Vec, + }, +} + +impl Buffer for WasmBuffer { + type Reader = WasmBufferReader; + + fn new(cfg: &BufferCfg) -> Self { + let inner = match cfg { + BufferCfg::SpmcRing { capacity } => WasmBufferInner::SpmcRing { + ring: VecDeque::with_capacity(*capacity), + capacity: *capacity, + write_seq: 0, + wakers: Vec::new(), + }, + BufferCfg::SingleLatest => WasmBufferInner::SingleLatest { + value: None, + version: 0, + wakers: Vec::new(), + }, + BufferCfg::Mailbox => WasmBufferInner::Mailbox { + slot: None, + wakers: Vec::new(), + }, + }; + + WasmBuffer { + inner: Rc::new(RefCell::new(inner)), + } + } + + fn push(&self, value: T) { + let mut inner = self.inner.borrow_mut(); + match &mut *inner { + WasmBufferInner::SpmcRing { + ring, + capacity, + write_seq, + wakers, + } => { + if ring.len() >= *capacity { + ring.pop_front(); + } + ring.push_back(value); + *write_seq += 1; + wake_all(wakers); + } + WasmBufferInner::SingleLatest { + value: slot, + version, + wakers, + } => { + *slot = Some(value); + *version += 1; + wake_all(wakers); + } + WasmBufferInner::Mailbox { slot, wakers } => { + *slot = Some(value); + wake_all(wakers); + } + } + } + + fn subscribe(&self) -> Self::Reader { + let inner = self.inner.borrow(); + let state = match &*inner { + WasmBufferInner::SpmcRing { write_seq, .. } => { + // New readers start at the current write position (no backfill). + ReaderState::SpmcRing { + read_seq: *write_seq, + } + } + WasmBufferInner::SingleLatest { version, .. } => { + // Will fire on next push (version change). + ReaderState::SingleLatest { + last_seen_version: *version, + } + } + WasmBufferInner::Mailbox { .. } => ReaderState::Mailbox, + }; + + WasmBufferReader { + buffer: Rc::clone(&self.inner), + state, + } + } +} + +/// Explicit DynBuffer implementation so WasmBuffer can be stored as a trait object. +impl DynBuffer for WasmBuffer { + fn push(&self, value: T) { + >::push(self, value); + } + + fn subscribe_boxed(&self) -> Box + Send> { + Box::new(self.subscribe()) + } + + fn as_any(&self) -> &dyn core::any::Any { + self + } +} + +// ============================================================================ +// Reader +// ============================================================================ + +/// Single-threaded buffer reader for the WASM runtime. +/// +/// Created by [`WasmBuffer::subscribe()`]. Each reader maintains independent +/// state (cursor position, last-seen version) and can advance at its own pace. +pub struct WasmBufferReader { + buffer: Rc>>, + state: ReaderState, +} + +// SAFETY: wasm32 is single-threaded โ€” no concurrent access possible +unsafe impl Send for WasmBufferReader {} +unsafe impl Sync for WasmBufferReader {} + +/// Per-reader tracking state. +enum ReaderState { + /// For SPMC Ring: the sequence number of the next item to read. + SpmcRing { read_seq: u64 }, + /// For SingleLatest: the version we last observed. + SingleLatest { last_seen_version: u64 }, + /// For Mailbox: no extra state (take-on-read). + Mailbox, +} + +impl BufferReader for WasmBufferReader { + fn recv(&mut self) -> Pin> + Send + '_>> { + Box::pin(WasmRecvFuture { reader: self }) + } + + fn try_recv(&mut self) -> Result { + let mut inner = self.buffer.borrow_mut(); + match (&mut *inner, &mut self.state) { + ( + WasmBufferInner::SpmcRing { + ring, write_seq, .. + }, + ReaderState::SpmcRing { read_seq }, + ) => { + if *read_seq >= *write_seq { + return Err(DbError::BufferEmpty); + } + // Calculate offset into the ring + let ring_len = ring.len() as u64; + let oldest_seq = write_seq.saturating_sub(ring_len); + + if *read_seq < oldest_seq { + // Reader fell behind โ€” skip to oldest available + let lag_count = oldest_seq - *read_seq; + *read_seq = oldest_seq; + return Err(DbError::BufferLagged { + lag_count, + _buffer_name: (), + }); + } + + let offset = (*read_seq - oldest_seq) as usize; + let value = ring[offset].clone(); + *read_seq += 1; + Ok(value) + } + ( + WasmBufferInner::SingleLatest { value, version, .. }, + ReaderState::SingleLatest { last_seen_version }, + ) => { + if *version == *last_seen_version { + return Err(DbError::BufferEmpty); + } + match value { + Some(v) => { + *last_seen_version = *version; + Ok(v.clone()) + } + None => Err(DbError::BufferEmpty), + } + } + (WasmBufferInner::Mailbox { slot, .. }, ReaderState::Mailbox) => { + slot.take().ok_or(DbError::BufferEmpty) + } + _ => unreachable!("reader state mismatch"), + } + } +} + +// ============================================================================ +// Async recv future +// ============================================================================ + +/// Future returned by `WasmBufferReader::recv()`. +/// +/// On each poll: +/// 1. Try to read a value (non-blocking). +/// 2. If available, return `Poll::Ready(Ok(value))`. +/// 3. If not, register the waker and return `Poll::Pending`. +/// +/// The waker is woken when `WasmBuffer::push()` fires. +struct WasmRecvFuture<'a, T> { + reader: &'a mut WasmBufferReader, +} + +// SAFETY: wasm32 is single-threaded +unsafe impl Send for WasmRecvFuture<'_, T> {} + +impl Future for WasmRecvFuture<'_, T> { + type Output = Result; + + fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { + let this = self.get_mut(); + + // Try non-blocking read first + match this.reader.try_recv() { + Ok(value) => Poll::Ready(Ok(value)), + Err(e @ DbError::BufferLagged { .. }) => Poll::Ready(Err(e)), + Err(DbError::BufferEmpty) => { + // Register waker so we get woken on next push + let mut inner = this.reader.buffer.borrow_mut(); + let wakers = match &mut *inner { + WasmBufferInner::SpmcRing { wakers, .. } => wakers, + WasmBufferInner::SingleLatest { wakers, .. } => wakers, + WasmBufferInner::Mailbox { wakers, .. } => wakers, + }; + // Replace existing waker for this reader if present, or add new one. + // For simplicity, we always push. Wakers are drained on each push(). + wakers.push(cx.waker().clone()); + Poll::Pending + } + Err(e) => Poll::Ready(Err(e)), + } + } +} + +// ============================================================================ +// Helpers +// ============================================================================ + +/// Wake all registered wakers and clear the list. +fn wake_all(wakers: &mut Vec) { + for waker in wakers.drain(..) { + waker.wake(); + } +} diff --git a/aimdb-wasm-adapter/src/lib.rs b/aimdb-wasm-adapter/src/lib.rs new file mode 100644 index 00000000..290af350 --- /dev/null +++ b/aimdb-wasm-adapter/src/lib.rs @@ -0,0 +1,63 @@ +//! AimDB WASM Runtime Adapter +//! +//! Provides a WebAssembly runtime adapter for AimDB, enabling the full +//! dataflow engine to run inside a web browser or any WASM host. +//! +//! # Architecture +//! +//! This crate implements the four executor traits from `aimdb-executor`: +//! +//! - [`RuntimeAdapter`] โ€” Platform identity (`"wasm"`) +//! - [`Spawn`] โ€” Task spawning via `wasm_bindgen_futures::spawn_local` +//! - [`TimeOps`] โ€” `Performance.now()` + `setTimeout` for async sleep +//! - [`Logger`] โ€” Maps to `console.log/debug/warn/error` +//! +//! # Single-Threaded Safety +//! +//! `wasm32-unknown-unknown` is single-threaded by construction. The `Send + Sync` +//! bounds required by executor traits are satisfied trivially โ€” no concurrent +//! access is possible. This is the same pattern used by `aimdb-embassy-adapter` +//! for bare-metal MCUs. +//! +//! # Buffer Implementation +//! +//! Buffers use `Rc>` instead of atomics โ€” zero-overhead for the +//! single-threaded browser environment. All three buffer types are supported: +//! SPMC Ring, SingleLatest, and Mailbox. +//! +//! # Feature Flags +//! +//! - `wasm-runtime` (default) โ€” Enables WASM bindings (`wasm-bindgen`, +//! `js-sys`, `web-sys`). Disable for native-target unit tests. + +#![no_std] + +extern crate alloc; + +pub mod buffer; +pub mod logger; +pub mod runtime; +pub mod time; + +// Re-export the adapter type at crate root +pub use runtime::WasmAdapter; + +// Re-export executor traits for convenience +pub use aimdb_executor::{ + ExecutorError, ExecutorResult, Logger as LoggerTrait, Runtime, RuntimeAdapter, Spawn, TimeOps, +}; + +// Re-export buffer types +pub use buffer::{WasmBuffer, WasmBufferReader}; + +// Re-export time types +pub use time::{WasmDuration, WasmInstant}; + +// Generate the extension trait for convenient record configuration +aimdb_core::impl_record_registrar_ext! { + WasmRecordRegistrarExt, + WasmAdapter, + WasmBuffer, + "wasm-runtime", + |cfg| WasmBuffer::::new(cfg) +} diff --git a/aimdb-wasm-adapter/src/logger.rs b/aimdb-wasm-adapter/src/logger.rs new file mode 100644 index 00000000..61b2ffab --- /dev/null +++ b/aimdb-wasm-adapter/src/logger.rs @@ -0,0 +1,52 @@ +//! Logger implementation for the WASM runtime. +//! +//! Maps AimDB log levels to browser console methods: +//! - `info` โ†’ `console.log` +//! - `debug` โ†’ `console.debug` +//! - `warn` โ†’ `console.warn` +//! - `error` โ†’ `console.error` + +use crate::runtime::WasmAdapter; +use aimdb_executor::Logger; + +impl Logger for WasmAdapter { + fn info(&self, message: &str) { + #[cfg(feature = "wasm-runtime")] + web_sys::console::log_1(&message.into()); + + #[cfg(not(feature = "wasm-runtime"))] + { + let _ = message; + } + } + + fn debug(&self, message: &str) { + #[cfg(feature = "wasm-runtime")] + web_sys::console::debug_1(&message.into()); + + #[cfg(not(feature = "wasm-runtime"))] + { + let _ = message; + } + } + + fn warn(&self, message: &str) { + #[cfg(feature = "wasm-runtime")] + web_sys::console::warn_1(&message.into()); + + #[cfg(not(feature = "wasm-runtime"))] + { + let _ = message; + } + } + + fn error(&self, message: &str) { + #[cfg(feature = "wasm-runtime")] + web_sys::console::error_1(&message.into()); + + #[cfg(not(feature = "wasm-runtime"))] + { + let _ = message; + } + } +} diff --git a/aimdb-wasm-adapter/src/runtime.rs b/aimdb-wasm-adapter/src/runtime.rs new file mode 100644 index 00000000..df1d2cc1 --- /dev/null +++ b/aimdb-wasm-adapter/src/runtime.rs @@ -0,0 +1,62 @@ +//! WasmAdapter struct and RuntimeAdapter + Spawn implementations +//! +//! Single-threaded WASM runtime โ€” tasks are spawned onto the browser's +//! microtask queue via `wasm_bindgen_futures::spawn_local`. + +use aimdb_executor::{ExecutorResult, RuntimeAdapter, Spawn}; +use core::future::Future; + +/// WASM runtime adapter for AimDB. +/// +/// Implements the four executor traits required by `aimdb-core`: +/// [`RuntimeAdapter`], [`Spawn`], [`TimeOps`](crate::time), and +/// [`Logger`](crate::logger). +/// +/// # Safety +/// +/// `WasmAdapter` implements `Send + Sync` via `unsafe impl` because +/// `wasm32-unknown-unknown` is single-threaded โ€” no concurrent access +/// is possible. This is the identical pattern used by `EmbassyAdapter`. +#[derive(Clone, Copy, Debug)] +pub struct WasmAdapter; + +// SAFETY: wasm32-unknown-unknown is single-threaded. +// No concurrent access is possible โ€” Send + Sync are trivially satisfied. +// This is the same pattern as aimdb-embassy-adapter/src/runtime.rs. +unsafe impl Send for WasmAdapter {} +unsafe impl Sync for WasmAdapter {} + +impl RuntimeAdapter for WasmAdapter { + fn runtime_name() -> &'static str { + "wasm" + } +} + +impl Spawn for WasmAdapter { + type SpawnToken = (); // Same as Embassy โ€” no join handle + + fn spawn(&self, future: F) -> ExecutorResult + where + F: Future + Send + 'static, + { + // spawn_local requires F: 'static but not F: Send. + // The Send bound on the trait is satisfied vacuously โ€” + // all types are effectively Send in a single-threaded context. + #[cfg(feature = "wasm-runtime")] + { + wasm_bindgen_futures::spawn_local(future); + } + + #[cfg(not(feature = "wasm-runtime"))] + { + let _ = future; + // Without wasm-runtime, we can't spawn โ€” this path is only + // hit during native-target unit tests. + return Err(aimdb_executor::ExecutorError::RuntimeUnavailable { + message: "wasm-runtime feature not enabled", + }); + } + + Ok(()) + } +} diff --git a/aimdb-wasm-adapter/src/time.rs b/aimdb-wasm-adapter/src/time.rs new file mode 100644 index 00000000..950c6baa --- /dev/null +++ b/aimdb-wasm-adapter/src/time.rs @@ -0,0 +1,124 @@ +//! TimeOps implementation for the WASM runtime. +//! +//! Uses `Performance.now()` for high-resolution relative timestamps and +//! `setTimeout` (via a JS Promise) for async sleep. + +use crate::runtime::WasmAdapter; +use aimdb_executor::TimeOps; +use core::future::Future; +use core::pin::Pin; +use core::task::{Context, Poll}; + +/// A wrapper that unsafely implements `Send` for a future. +/// +/// # Safety +/// +/// Only safe on `wasm32-unknown-unknown` where all execution is single-threaded. +/// The inner future will never actually be sent between threads. +pub(crate) struct SendFuture(pub(crate) F); + +// SAFETY: wasm32 is single-threaded โ€” the future cannot be sent to another thread +unsafe impl Send for SendFuture {} + +impl Future for SendFuture { + type Output = F::Output; + + fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { + // SAFETY: We only project to the inner field, preserving Pin guarantees. + let inner = unsafe { self.map_unchecked_mut(|s| &mut s.0) }; + inner.poll(cx) + } +} + +/// Milliseconds since page load (from `Performance.now()`). +/// +/// Wraps an `f64` โ€” browser time APIs return floating-point milliseconds. +#[derive(Clone, Debug)] +pub struct WasmInstant(pub(crate) f64); + +/// Duration in milliseconds. +#[derive(Clone, Debug)] +pub struct WasmDuration(pub(crate) f64); + +// SAFETY: single-threaded wasm32 โ€” no concurrent access possible +unsafe impl Send for WasmInstant {} +unsafe impl Sync for WasmInstant {} +unsafe impl Send for WasmDuration {} +unsafe impl Sync for WasmDuration {} + +impl TimeOps for WasmAdapter { + type Instant = WasmInstant; + type Duration = WasmDuration; + + fn now(&self) -> WasmInstant { + #[cfg(feature = "wasm-runtime")] + { + let perf = web_sys::window() + .expect("no global `window` โ€” not running in a browser?") + .performance() + .expect("no `Performance` API available"); + WasmInstant(perf.now()) + } + + #[cfg(not(feature = "wasm-runtime"))] + { + // Fallback for native-target unit tests โ€” monotonic counter + WasmInstant(0.0) + } + } + + fn duration_since(&self, later: WasmInstant, earlier: WasmInstant) -> Option { + let diff = later.0 - earlier.0; + if diff >= 0.0 { + Some(WasmDuration(diff)) + } else { + None + } + } + + fn millis(&self, ms: u64) -> WasmDuration { + WasmDuration(ms as f64) + } + + fn secs(&self, secs: u64) -> WasmDuration { + WasmDuration(secs as f64 * 1000.0) + } + + fn micros(&self, micros: u64) -> WasmDuration { + WasmDuration(micros as f64 / 1000.0) + } + + fn sleep(&self, duration: WasmDuration) -> impl Future + Send { + #[cfg(feature = "wasm-runtime")] + { + use futures_util::FutureExt; + + // Convert setTimeout Promise to a Rust Future. + // setTimeout never rejects, so the Ok/Err result is safe to discard. + let fut = wasm_bindgen_futures::JsFuture::from(js_sys::Promise::new( + &mut |resolve, _reject| { + web_sys::window() + .unwrap() + .set_timeout_with_callback_and_timeout_and_arguments_0( + &resolve, + duration.0 as i32, + ) + .unwrap(); + }, + )) + .map(|_result| ()); + + // SAFETY: wasm32 is single-threaded โ€” JsFuture (which contains Rc) + // cannot be accessed concurrently. The Send bound is required by the + // TimeOps trait but is vacuously satisfied on wasm32. + SendFuture(fut) + } + + #[cfg(not(feature = "wasm-runtime"))] + { + let _ = duration; + // Fallback for native-target unit tests โ€” resolve immediately + SendFuture(core::future::ready(())) + } + } +} diff --git a/docs/design/025-M12-wasm-adapter.md b/docs/design/025-M12-wasm-adapter.md new file mode 100644 index 00000000..e301b7d0 --- /dev/null +++ b/docs/design/025-M12-wasm-adapter.md @@ -0,0 +1,1182 @@ +# Design: AimDB WASM Adapter + +**Status:** ๐Ÿ“‹ Proposed +**Milestone:** M12 โ€” Browser Runtime +**Revision:** 1 (2026-02-27) +**Crate:** `aimdb-wasm-adapter` (open source, `aimdb` workspace) + +--- + +## 1. Summary + +Add a third runtime adapter (`aimdb-wasm-adapter`) that compiles AimDB to +WebAssembly, enabling the **full dataflow engine** to run inside a web browser +or any WASM host. Records, buffers, producers, consumers, and data-contract +enforcement all execute natively in WASM โ€” eliminating the need for a parallel +validation layer (Zod, JSON Schema) on the TypeScript side. + +This completes the platform matrix: + +| Target | Adapter | Buffer Primitive | Spawn Mechanism | +|--------|---------|------------------|-----------------| +| MCU | `aimdb-embassy-adapter` | `embassy-sync` channels | Static task pool | +| Edge / Cloud | `aimdb-tokio-adapter` | `tokio::sync` channels | `tokio::spawn` | +| **Browser** | **`aimdb-wasm-adapter`** | **`Rc>`** | **`spawn_local`** | + +--- + +## 2. Motivation + +### 2.1 Problem: The Validation Gap + +Today the TypeScript UI (`aimdb-ui`) consumes data from the WebSocket connector +with **zero runtime validation**. The defence layers are: + +1. **ts-rs** generates TypeScript type definitions from Rust structs โ†’ + compile-time only, erased at runtime. +2. **schema-registry.ts** exports Observable metadata (icons, units) โ†’ + informational, not enforced. +3. `useWebSocketConnection.ts` does `JSON.parse(event.data)` and passes the + result straight to React state โ†’ **any malformed message silently corrupts + the UI**. + +Approaches like Zod codegen would add runtime validation, but create a +**parallel type system** that must be kept in sync with the Rust source of +truth โ€” the exact problem data contracts were designed to eliminate. + +### 2.2 Solution: Run the Real Engine in the Browser + +A WASM adapter means: + +- **Contract enforcement is native** โ€” the same `serde` deserialization and + `Migratable` migration logic that runs on the server runs in the browser. +- **No parallel type system** โ€” Rust structs compiled to WASM _are_ the + validation layer. `wasm-bindgen` / `serde-wasm-bindgen` handle the + Rust โ†” JS boundary. +- **Full buffer semantics** โ€” SPMC Ring, SingleLatest, Mailbox work identically. + The browser can run producers, consumers, and transforms locally. +- **Offline-first capability** โ€” a local AimDB instance persists state even + when the WebSocket connection is lost. +- **Two-way sync** โ€” a browser AimDB can connect to the server's WebSocket + connector as a client, receiving and sending records through the standard + `link_from` / `link_to` mechanism. + +### 2.3 Non-Goals (v1) + +- **`wasm32-wasi` support** โ€” target is `wasm32-unknown-unknown` (browser). + WASI can be added later with its own feature flag. +- **SharedArrayBuffer / multi-threaded WASM** โ€” v1 is single-threaded only, + matching the Embassy pattern. +- **Persistence backend for WASM** โ€” IndexedDB integration is a separate + design. v1 uses in-memory buffers only. +- **Web Worker offloading** โ€” all execution happens on the main thread. Worker + support is a future optimisation. + +--- + +## 3. Architecture + +### 3.1 Crate Layout + +``` +aimdb-wasm-adapter/ +โ”œโ”€โ”€ Cargo.toml +โ”œโ”€โ”€ README.md +โ”œโ”€โ”€ src/ +โ”‚ โ”œโ”€โ”€ lib.rs # WasmAdapter struct + unsafe Send/Sync + re-exports +โ”‚ โ”œโ”€โ”€ runtime.rs # RuntimeAdapter + Spawn impls +โ”‚ โ”œโ”€โ”€ time.rs # TimeOps impl (Performance.now + setTimeout) +โ”‚ โ”œโ”€โ”€ logger.rs # Logger impl (console.log/warn/error) +โ”‚ โ”œโ”€โ”€ buffer.rs # WasmBuffer (Rc single-threaded channels) +โ”‚ โ””โ”€โ”€ bindings.rs # #[wasm_bindgen] TypeScript-facing API +โ”œโ”€โ”€ tests/ +โ”‚ โ””โ”€โ”€ wasm.rs # wasm-bindgen-test suite +โ””โ”€โ”€ pkg/ # wasm-pack build output (gitignored) +``` + +### 3.2 Dependency Graph + +``` +aimdb-wasm-adapter +โ”œโ”€โ”€ aimdb-core (default-features = false, features = ["alloc"]) +โ”œโ”€โ”€ aimdb-executor (default-features = false) +โ”œโ”€โ”€ wasm-bindgen 0.2 +โ”œโ”€โ”€ wasm-bindgen-futures 0.4 # spawn_local +โ”œโ”€โ”€ js-sys 0.3 # Date, Promise, setTimeout +โ”œโ”€โ”€ web-sys 0.3 # console, Performance, Window +โ”œโ”€โ”€ serde-wasm-bindgen 0.6 # Rust โ†” JsValue conversion +โ”œโ”€โ”€ serde (no default features, alloc) +โ””โ”€โ”€ serde_json (no default features, alloc) +``` + +**No dependency on `tokio`, `embassy-*`, or any OS-level crate.** + +### 3.3 Feature Flags + +```toml +[features] +default = ["wasm-runtime"] +wasm-runtime = ["wasm-bindgen", "wasm-bindgen-futures", "js-sys", "web-sys"] +# Future: wasi, web-worker, indexeddb-persistence +``` + +--- + +## 4. Trait Implementations + +### 4.1 The `Send + Sync` Question + +Every executor trait requires `Send + Sync` (inherited from `RuntimeAdapter`). +WASM (`wasm32-unknown-unknown`) is single-threaded โ€” there are no data races +by construction. This is the **identical situation** as Embassy on bare-metal +MCUs, and the same solution applies: + +```rust +pub struct WasmAdapter; + +// SAFETY: wasm32-unknown-unknown is single-threaded. +// No concurrent access is possible โ€” Send + Sync are trivially satisfied. +unsafe impl Send for WasmAdapter {} +unsafe impl Sync for WasmAdapter {} +``` + +This pattern is established โ€” Embassy has used it since day one: +```rust +// aimdb-embassy-adapter/src/runtime.rs: +unsafe impl Send for EmbassyAdapter {} +unsafe impl Sync for EmbassyAdapter {} +``` + +The same `unsafe impl` applies to `WasmBuffer` internals (which use +`Rc>`, normally `!Send`), justified by the single-threaded +execution model. + +### 4.2 `RuntimeAdapter` + +```rust +impl RuntimeAdapter for WasmAdapter { + fn runtime_name() -> &'static str { "wasm" } +} +``` + +### 4.3 `Spawn` + +WASM has no thread pool. `wasm_bindgen_futures::spawn_local` schedules a +`Future` on the browser's microtask queue. This is analogous to Embassy's +static task pool โ€” fire-and-forget, no join handle. + +```rust +impl Spawn for WasmAdapter { + type SpawnToken = (); // Same as Embassy โ€” no join handle + + fn spawn(&self, future: F) -> ExecutorResult<()> + where + F: Future + Send + 'static, + { + wasm_bindgen_futures::spawn_local(future); + Ok(()) + } +} +``` + +`spawn_local` requires `F: 'static` but **not** `F: Send`. The `Send` bound +on the trait is satisfied vacuously โ€” all types are effectively `Send` in a +single-threaded context. + +### 4.4 `TimeOps` + +Browser time is floating-point milliseconds from `Performance.now()`, with +`setTimeout` for sleeping. We define lightweight wrappers: + +```rust +/// Milliseconds since page load (from Performance.now()) +#[derive(Clone, Debug)] +pub struct WasmInstant(f64); + +/// Duration in milliseconds +#[derive(Clone, Debug)] +pub struct WasmDuration(f64); + +// SAFETY: single-threaded โ€” no concurrent access possible +unsafe impl Send for WasmInstant {} +unsafe impl Sync for WasmInstant {} +unsafe impl Send for WasmDuration {} +unsafe impl Sync for WasmDuration {} + +impl TimeOps for WasmAdapter { + type Instant = WasmInstant; + type Duration = WasmDuration; + + fn now(&self) -> WasmInstant { + let perf = web_sys::window() + .expect("no window") + .performance() + .expect("no performance API"); + WasmInstant(perf.now()) + } + + fn duration_since(&self, later: WasmInstant, earlier: WasmInstant) -> Option { + let diff = later.0 - earlier.0; + if diff >= 0.0 { Some(WasmDuration(diff)) } else { None } + } + + fn millis(&self, ms: u64) -> WasmDuration { WasmDuration(ms as f64) } + fn secs(&self, s: u64) -> WasmDuration { WasmDuration(s as f64 * 1000.0) } + fn micros(&self, us: u64) -> WasmDuration { WasmDuration(us as f64 / 1000.0) } + + fn sleep(&self, duration: WasmDuration) -> impl Future + Send { + // Convert setTimeout Promise to a Rust Future. + // setTimeout never rejects, so the Ok/Err result is safe to discard. + wasm_bindgen_futures::JsFuture::from(js_sys::Promise::new( + &mut |resolve, _| { + web_sys::window() + .unwrap() + .set_timeout_with_callback_and_timeout_and_arguments_0( + &resolve, + duration.0 as i32, + ) + .unwrap(); + }, + )) + .map(|_result| ()) + } +} +``` + +### 4.5 `Logger` + +Maps directly to the browser console: + +```rust +impl Logger for WasmAdapter { + fn info(&self, message: &str) { web_sys::console::log_1(&message.into()); } + fn debug(&self, message: &str) { web_sys::console::debug_1(&message.into()); } + fn warn(&self, message: &str) { web_sys::console::warn_1(&message.into()); } + fn error(&self, message: &str) { web_sys::console::error_1(&message.into()); } +} +``` + +--- + +## 5. Buffer Implementation + +### 5.1 Design Rationale + +| Approach | Pros | Cons | +|----------|------|------| +| Port `tokio::sync` channels | Feature-complete, metrics | Pulls in atomic ops, oversized for single thread | +| Use `futures::channel::mpsc` | Well-tested, async-ready | Extra dependency, `Sender` is `!Sync` | +| **`Rc>` + `Waker`** | **Zero-cost single-threaded, no atomics, no deps** | **Must `unsafe impl Send + Sync`** | +| Reuse Embassy buffer | Proven no_std pattern | Pulls in `embassy-sync`, const generics infect public API | + +**Decision: `Rc>` + Waker.** This matches the browser's +single-threaded model perfectly. No atomic operations, no mutex overhead, no +dependency on `embassy-sync` or `futures`. The buffers are simple, auditable, +and fast. + +### 5.2 Buffer Types + +```rust +pub struct WasmBuffer { + inner: Rc>>, +} + +enum WasmBufferInner { + /// Bounded ring buffer with independent consumer cursors + SpmcRing { + ring: VecDeque, + capacity: usize, + /// Each subscriber gets a cursor index + subscribers: Vec>>>, + wakers: Vec, + }, + /// Only the latest value, skip intermediates + SingleLatest { + value: Option, + version: u64, + wakers: Vec, + }, + /// Single slot, overwrite semantics + Mailbox { + slot: Option, + wakers: Vec, + }, +} + +// SAFETY: wasm32 is single-threaded โ€” Rc> cannot be accessed concurrently +unsafe impl Send for WasmBuffer {} +unsafe impl Sync for WasmBuffer {} +``` + +### 5.3 Reader Implementation + +Readers implement `BufferReader` by returning a `Future` that either +resolves immediately (data available) or registers a `Waker` and returns +`Poll::Pending`. When `push()` is called on the buffer, all registered +wakers are woken, continuing the reader futures on the next microtask. + +```rust +pub struct WasmBufferReader { + buffer: Rc>>, + // Reader-specific state (cursor for SpmcRing, version for SingleLatest) + state: ReaderState, +} + +// SAFETY: wasm32 is single-threaded โ€” no concurrent access possible +unsafe impl Send for WasmBufferReader {} +unsafe impl Sync for WasmBufferReader {} +``` + +### 5.4 Macro Invocation + +```rust +aimdb_core::impl_record_registrar_ext! { + WasmRecordRegistrarExt, + WasmAdapter, + WasmBuffer, + "wasm-runtime", + |cfg| WasmBuffer::::new(cfg) +} +``` + +This generates `buffer()`, `source()`, `tap()`, `transform()`, and +`transform_join()` methods โ€” identical API surface to Tokio and Embassy. + +--- + +## 6. TypeScript Bindings (`#[wasm_bindgen]`) + +### 6.1 Design Principle + +Expose a **high-level facade** to TypeScript, not the raw Rust builder pattern. +JavaScript users should not deal with `Arc`, `RecordRegistrar`, or feature +flags. The facade wraps the Rust API and handles `JsValue` โ†” Rust conversion +via `serde-wasm-bindgen`. + +### 6.2 Core API + +```typescript +// โ”€โ”€ Instantiation โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ +import { WasmDb } from '@aimdb/wasm'; + +const db = new WasmDb(); + +// โ”€โ”€ Record configuration (key = RecordKey, schemaType = contract) โ”€โ”€ +db.configureRecord('sensors.temperature.vienna', { + schemaType: 'temperature', // selects the Rust struct for validation + buffer: 'SingleLatest', // or { type: 'SpmcRing', capacity: 100 } +}); + +db.configureRecord('sensors.temperature.berlin', { + schemaType: 'temperature', // same contract, different record + buffer: 'SingleLatest', +}); + +// โ”€โ”€ Read (validated by Rust serde) โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ +const temp = await db.get('sensors.temperature.vienna'); +// temp: { celsius: number, timestamp: number } โ€” or null if not yet produced + +// โ”€โ”€ Write (validated by Rust serde) โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ +db.set('sensors.temperature.vienna', { celsius: 22.5, timestamp: Date.now() }); +// throws if payload fails Rust deserialization โ†’ contract enforcement + +// โ”€โ”€ Subscribe (reactive) โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ +const unsub = db.subscribe('sensors.temperature.vienna', (value) => { + // Fires on every buffer push โ€” value is already validated + console.log(value.celsius); +}); +unsub(); // cleanup + +// โ”€โ”€ Lifecycle โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ +db.free(); // Release WASM memory +``` + +### 6.3 Rust-Side Binding Implementation + +```rust +#[wasm_bindgen] +pub struct WasmDb { + inner: AimDb, +} + +#[wasm_bindgen] +impl WasmDb { + #[wasm_bindgen(constructor)] + pub fn new() -> Result { + let adapter = WasmAdapter; + let db = AimDbBuilder::new() + .runtime(Arc::new(adapter)) + .build()?; + Ok(WasmDb { inner: db }) + } + + /// Get the current value of a record by its RecordKey (returns JsValue or undefined) + pub fn get(&self, record_key: &str) -> Result { + // Uses AimDb key lookup + serde_wasm_bindgen::to_value + ... + } + + /// Set a record value by its RecordKey (validates via Rust serde deserialization) + pub fn set(&self, record_key: &str, value: JsValue) -> Result<(), JsError> { + // serde_wasm_bindgen::from_value โ†’ T, then push to buffer + // If deserialization fails โ†’ JsError with contract violation message + ... + } + + /// Subscribe to record updates by RecordKey โ€” returns a closure to unsubscribe + pub fn subscribe( + &self, + record_key: &str, + callback: js_sys::Function, + ) -> Result { + // Creates a BufferReader, spawns a loop that calls callback on each recv() + // Returns a JS function that aborts the loop + ... + } +} +``` + +### 6.4 Record Registration by Key + +Records are identified by their **`RecordKey`** โ€” the same string used +throughout AimDB (e.g. `"sensors.temperature.vienna"`). The key uniquely +identifies a record *instance*, while the schema type (from +`SchemaType::NAME`) identifies the *contract*. Multiple records can share +the same contract type. + +**Strategy A: Pre-compiled contract registry (recommended for v1)** + +All known data contracts from `aimdb-data-contracts` are compiled into the +WASM module. The `configureRecord()` call takes a record key and a schema +type name โ€” the key is used as the AimDB `RecordKey`, and the type name +selects the Rust struct for serde validation: + +```typescript +// Record key = AimDB RecordKey, schemaType = SchemaType::NAME +db.configureRecord('sensors.temperature.vienna', { + schemaType: 'temperature', // selects Temperature struct for validation + buffer: 'SingleLatest', +}); + +db.configureRecord('sensors.temperature.berlin', { + schemaType: 'temperature', // same contract, different record + buffer: { type: 'SpmcRing', capacity: 50 }, +}); + +db.configureRecord('sensors.humidity.vienna', { + schemaType: 'humidity', + buffer: 'SingleLatest', +}); +``` + +Rust-side dispatch uses `SchemaType::NAME` to select the concrete type, +and the record key as the `StringKey` for AimDB's `configure()`: + +```rust +/// Register a record by key + schema type name. +/// The key becomes the AimDB RecordKey; the schema type selects the Rust struct. +fn configure_record( + db: &mut AimDbBuilder, + record_key: &str, + schema_type: &str, + cfg: BufferCfg, +) -> Result<(), JsError> { + let key = StringKey::intern(record_key.to_string()); + match schema_type { + Temperature::NAME => db.configure::(key, |reg| { + reg.buffer(cfg.clone()); + }), + Humidity::NAME => db.configure::(key, |reg| { + reg.buffer(cfg.clone()); + }), + GpsLocation::NAME => db.configure::(key, |reg| { + reg.buffer(cfg); + }), + _ => return Err(JsError::new(&format!("Unknown schema type: {schema_type}"))), + }; + Ok(()) +} +``` + +This means `get()`, `set()`, and `subscribe()` all use the record key: + +```typescript +db.set('sensors.temperature.vienna', { celsius: 22.5, timestamp: Date.now() }); +const temp = await db.get('sensors.temperature.vienna'); +db.subscribe('sensors.temperature.vienna', (value) => { ... }); +``` + +**Strategy B: Dynamic JSON records (future)** + +Register records with a JSON Schema at runtime, validated against +`serde_json::Value`. No compile-time Rust struct needed. This requires +the `DynRecord` concept tracked separately. + +### 6.5 React Integration + +For `aimdb-ui`, provide a thin React hook wrapping the WASM API. +The hook takes a **record key** (the AimDB `RecordKey` string): + +```typescript +// useAimDb.ts โ€” library code, ships with @aimdb/wasm npm package +import { WasmDb } from '@aimdb/wasm'; + +const dbInstance = new WasmDb(); + +/** + * Subscribe to a record by its AimDB RecordKey. + * Returns the current value (validated by Rust serde) or null. + */ +export function useRecord(recordKey: string): T | null { + const [value, setValue] = useState(null); + + useEffect(() => { + const unsub = dbInstance.subscribe(recordKey, (v: T) => setValue(v)); + return () => unsub(); + }, [recordKey]); + + return value; +} + +// Usage in component โ€” record key follows AimDB naming convention: +function TemperatureCard({ city }: { city: string }) { + const temp = useRecord(`sensors.temperature.${city}`); + if (!temp) return ; + return {temp.celsius}ยฐC; +} +``` + +--- + +## 7. WebSocket Sync Strategy + +### 7.1 Operational Modes + +The WASM adapter supports three operational modes. The mode is selected at +instantiation and determines whether the browser AimDB instance runs +standalone, connects to a server, or gracefully transitions between both. + +--- + +#### Mode 1: Local-only + +``` +โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” +โ”‚ Browser Tab โ”‚ +โ”‚ โ”‚ +โ”‚ โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”‚ +โ”‚ โ”‚ aimdb-ui โ”‚โ†’โ”‚ AimDB WASM โ”‚ โ”‚ +โ”‚ โ”‚ (React) โ”‚โ†โ”‚ (local) โ”‚ โ”‚ +โ”‚ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ”‚ +โ”‚ โ”‚ +โ”‚ Records produced & consumed โ”‚ +โ”‚ entirely within the browser. โ”‚ +โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ +``` + +AimDB runs entirely in the browser with no network dependency. Records are +configured, produced, and consumed locally using the same buffer semantics +(SPMC Ring, SingleLatest, Mailbox) as server-side deployments. + +**Use cases:** +- **Demos & marketing pages** โ€” show live AimDB behaviour without a backend. + Simulated data can be produced via `source()` using `Simulatable` contracts. +- **Unit / integration testing** โ€” `aimdb-ui` components can be tested against + a real (local) AimDB instance in `vitest` / `wasm-bindgen-test`, replacing + mock data with contract-validated records. +- **Offline-capable apps** โ€” sensors write data locally (e.g. via Web + Bluetooth or manual entry); the UI reacts through subscriptions. +- **Prototyping** โ€” experiment with record schemas, buffer configurations, + and transforms without deploying a server. + +**TypeScript API:** + +```typescript +import { WasmDb } from '@aimdb/wasm'; + +const db = new WasmDb(); // no server URL โ†’ local-only + +db.configureRecord('sensors.temperature.indoor', { + schemaType: 'temperature', + buffer: 'SingleLatest', +}); + +// Produce locally (e.g. from a BLE sensor, manual input, or simulation) +db.set('sensors.temperature.indoor', { celsius: 22.5, timestamp: Date.now() }); + +// Subscribe โ€” fires immediately since the buffer has a value +db.subscribe('sensors.temperature.indoor', (temp) => { + console.log(temp.celsius); // 22.5 +}); +``` + +**Characteristics:** +- Zero network I/O โ€” no WebSocket, no HTTP. +- Full contract enforcement โ€” `set()` validates via Rust serde. +- All buffer types available โ€” SPMC Ring for history, SingleLatest for + current-value dashboards, Mailbox for commands. +- `source()` / `tap()` / `transform()` work natively โ€” can run local + dataflow pipelines in the browser. + +--- + +#### Mode 2: Synchronized + +``` +โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” +โ”‚ Browser Tab โ”‚ ws:// โ”‚ Server โ”‚ +โ”‚ โ”‚ โ”‚ โ”‚ +โ”‚ โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”‚ subscribe โ”‚ โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”‚ +โ”‚ โ”‚ aimdb-ui โ”‚โ†’โ”‚ AimDB WASM โ”‚โ”€โ”€โ”ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ†’โ”‚ โ”‚ AimDB โ”‚ โ”‚ +โ”‚ โ”‚ (React) โ”‚โ†โ”‚ (local) โ”‚โ†โ”€โ”ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”‚ โ”‚ + WS โ”‚ โ”‚ +โ”‚ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ”‚ data/snapshotโ”‚ โ”‚ Connector โ”‚ โ”‚ +โ”‚ โ”‚ โ”‚ โ”‚ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ฌโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ”‚ +โ”‚ โ”‚ write โ”‚ โ”‚ โ”‚ โ”‚ +โ”‚ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ†’โ”‚ MQTT / KNX / โ”‚ +โ”‚ โ”‚ โ”‚ Persistence โ”‚ +โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ +``` + +The browser AimDB instance connects to the server's WebSocket connector +via `WsBridge`. Server records are mirrored into local buffers. UI components +subscribe to local records โ€” they never interact with the WebSocket directly. + +**Use cases:** +- **Production dashboards** โ€” `aimdb-ui` receives live sensor data from the + server's MQTT/KNX mesh via the WebSocket connector, with contract + validation at the WASM boundary before data reaches React. +- **Control panels** โ€” the user writes a setpoint or config record locally; + `WsBridge` forwards the `write` ClientMessage to the server, which routes + it to MQTT/KNX via `link_from("ws://โ€ฆ")`. +- **Multi-tab consistency** โ€” each browser tab runs its own AimDB WASM + instance, each with its own `WsBridge` connection. The server is the + single source of truth; tabs converge via late-join snapshots. + +**TypeScript API:** + +```typescript +import { WasmDb, WsBridge } from '@aimdb/wasm'; + +const db = new WasmDb(); + +// Configure records matching the server's outbound topics +db.configureRecord('sensors.temperature.vienna', { + schemaType: 'temperature', + buffer: 'SingleLatest', +}); +db.configureRecord('sensors.humidity.vienna', { + schemaType: 'humidity', + buffer: 'SingleLatest', +}); + +// Connect to the server's WebSocket connector +const bridge = WsBridge.connect(db, 'wss://api.cloud.aimdb.dev/ws', { + subscribeTopics: ['sensors/#'], // MQTT-style wildcard patterns + autoReconnect: true, // reconnect with exponential backoff + lateJoin: true, // request snapshots on (re)connect +}); + +// Subscribe to local records โ€” updated by WsBridge from server push +db.subscribe('sensors.temperature.vienna', (temp) => { + console.log(temp.celsius); // pushed from server โ†’ local buffer โ†’ callback +}); + +// Write travels: local buffer โ†’ WsBridge โ†’ server โ†’ MQTT/KNX +db.set('commands.setpoint.room1', { target_celsius: 21.0, timestamp: Date.now() }); + +// Lifecycle +bridge.disconnect(); +db.free(); +``` + +**Data flow โ€” server โ†’ browser:** + +1. Server AimDB produces a `Temperature` record (e.g. from MQTT inbound). +2. The outbound `link_to("ws://sensors/temperature/vienna")` triggers the + WS connector's `broadcast()`. +3. `ServerMessage::Data { topic, payload, ts }` is sent over WebSocket. +4. `WsBridge.on_message` receives the JSON frame, dispatches by `topic`. +5. The bridge resolves the `topic` to a local `RecordKey`, deserializes the + `payload` via the record's contract type (Rust serde โ€” this is where + contract enforcement happens), and pushes to the local `WasmBuffer`. +6. React components subscribed via `useRecord()` re-render with the + validated value. + +**Data flow โ€” browser โ†’ server:** + +1. UI calls `db.set('commands.setpoint.room1', { ... })`. +2. The local buffer receives the value (contract-validated by serde). +3. `WsBridge` detects the local write (via a `tap()` on the record) and + sends a `ClientMessage::Write { topic, payload }` over WebSocket. +4. Server's WS connector routes it through the standard `Router` โ€” same + path as any `link_from("ws://โ€ฆ")` record. +5. The server pushes it to MQTT, KNX, persistence, or another AimDB record + โ€” depending on the server-side configuration. + +**Resilience:** +- **Reconnection**: Exponential backoff (default: 500ms โ†’ 1s โ†’ 2s โ†’ 4s โ†’ + 8s), matching the current `useWebSocketConnection.ts` strategy. +- **Late-join**: On reconnect, the bridge sends `ClientMessage::Subscribe` + which triggers server-side `snapshot` responses for each topic โ€” + re-seeding local buffers with current values. +- **Offline writes**: Writes during disconnection are buffered locally. + On reconnect, the bridge flushes pending writes to the server (FIFO). + Buffer capacity is configurable; overflow policy matches the buffer type + (drop oldest for SPMC Ring, overwrite for SingleLatest/Mailbox). + +--- + +#### Mode 3: Hybrid (offline-first with sync) + +``` +โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” +โ”‚ Browser Tab โ”‚ โ”‚ Server โ”‚ +โ”‚ โ”‚ online โ”‚ โ”‚ +โ”‚ โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”‚โ—„โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ–บโ”‚ AimDB + WS โ”‚ +โ”‚ โ”‚ aimdb-ui โ”‚โ†’โ”‚ AimDB WASM โ”‚ โ”‚ โ”‚ Connector โ”‚ +โ”‚ โ”‚ (React) โ”‚โ†โ”‚ (local) โ”‚ โ”‚ offline โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ +โ”‚ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ”‚โ—„โ”€โ”€ โ”€ โ”€ โ”€ โ”€ โ–บ (unavailable) +โ”‚ โ”‚ +โ”‚ Local records always available. โ”‚ +โ”‚ Server sync when possible. โ”‚ +โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ +``` + +Hybrid mode combines Modes 1 and 2. The browser AimDB always has local +records, and the `WsBridge` connects to the server when available. If the +server is unreachable, the UI continues working with locally buffered data. +When the connection is restored, it re-syncs via late-join snapshots. + +**Use cases:** +- **Field worker apps** โ€” a technician configures HVAC setpoints on a + tablet. Changes apply locally immediately (local buffer โ†’ UI update) and + sync to the server when connectivity returns. +- **Progressive web apps (PWAs)** โ€” the app is installable and works offline. + Sensor readings cached in local buffers are available for review even + without a network connection. +- **Unreliable networks** โ€” edge deployments with intermittent + connectivity (construction sites, industrial floors, rural IoT). + +**TypeScript API:** + +```typescript +import { WasmDb, WsBridge } from '@aimdb/wasm'; + +const db = new WasmDb(); + +db.configureRecord('sensors.temperature.vienna', { + schemaType: 'temperature', + buffer: { type: 'SpmcRing', capacity: 200 }, // keep history locally +}); + +// WsBridge attempts connection immediately but doesn't block +const bridge = WsBridge.connect(db, 'wss://api.cloud.aimdb.dev/ws', { + subscribeTopics: ['sensors/#'], + autoReconnect: true, + lateJoin: true, +}); + +// This works immediately โ€” even before / without server connection +db.subscribe('sensors.temperature.vienna', (temp) => { + renderDashboard(temp); +}); + +// Connection status is observable +bridge.onStatusChange((status) => { + // status: 'connecting' | 'connected' | 'disconnected' | 'reconnecting' + updateConnectionIndicator(status); +}); +``` + +**State transitions:** + +``` + โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” connected โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” + โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ–บโ”‚ connecting โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ–บโ”‚ connected โ”‚ + โ”‚ โ””โ”€โ”€โ”€โ”€โ”€โ”ฌโ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”ฌโ”€โ”€โ”€โ”€โ”€โ”˜ + โ”‚ โ”‚ timeout/error โ”‚ close/error + โ”‚ โ–ผ โ–ผ + โ”‚ โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” + โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค disconnected โ”‚โ—„โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค reconnecting โ”‚ + reconnect โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ max โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ + retries โ”‚ โ–ฒ + โ”‚ โ”‚ + โ””โ”€โ”€โ”€โ”€โ”€โ”˜ + backoff +``` + +While `disconnected` or `reconnecting`, the local AimDB instance keeps +functioning. Subscriptions fire on local writes, transforms execute, and +the UI remains interactive. The only difference is that no server data +arrives and outbound writes are queued. + +--- + +#### Mode Selection Summary + +| | Mode 1: Local | Mode 2: Synced | Mode 3: Hybrid | +|---|---|---|---| +| **Network** | None | Required | Optional | +| **Server dependency** | None | Hard | Soft (graceful degradation) | +| **Contract enforcement** | Local serde | Local serde + server serde | Local serde + server serde when connected | +| **Offline writes** | Always works | Fails if disconnected | Queued, flushed on reconnect | +| **Late-join** | N/A | On connect | On connect / reconnect | +| **Data source** | Local `set()` / `source()` | Server push via `WsBridge` | Both | +| **Typical use** | Demos, tests, offline apps | Production dashboards | Field apps, PWAs, unreliable networks | +| **API** | `new WasmDb()` | `new WasmDb()` + `WsBridge.connect(db, url)` | Same as Mode 2 (degrades automatically) | + +The API is incremental: every app starts as Mode 1 by constructing `WasmDb`. +Adding `WsBridge.connect()` upgrades to Mode 2 or 3 depending on the +network โ€” no code change needed to handle offline fallback. + +### 7.2 `WsBridge` Implementation + +The bridge is the Rust-side component behind Modes 2 and 3. It wraps +`web_sys::WebSocket` and maps the server's wire protocol to local buffer +operations: + +```rust +pub struct WsBridge { + ws: web_sys::WebSocket, + db: Rc, // Shared โ€” caller retains access for get/set/subscribe + config: BridgeConfig, + state: Rc>, +} + +pub struct BridgeConfig { + pub url: String, + pub subscribe_topics: Vec, // MQTT wildcard patterns + pub auto_reconnect: bool, + pub late_join: bool, + pub max_offline_queue: usize, // pending writes while disconnected + pub backoff: Vec, // ms: [500, 1000, 2000, 4000, 8000] +} + +struct BridgeState { + status: ConnectionStatus, + pending_writes: VecDeque, // queued during disconnect + backoff_index: usize, +} +``` + +The bridge hooks into browser WebSocket callbacks: + +- **`on_open`** โ€” sends `ClientMessage::Subscribe { topics }` to begin + receiving data. Flushes any queued writes from `pending_writes`. +- **`on_message`** โ€” parses `ServerMessage`, dispatches by variant: + - `Data { topic, payload, ts }` / `Snapshot { topic, payload }` โ†’ + resolves `topic` to a `RecordKey`, deserializes `payload` via the + record's contract type, pushes to local `WasmBuffer`. + - `Subscribed { topics }` โ†’ log/event for UI connection indicator. + - `Error { code, topic, message }` โ†’ log to console; surface via + `bridge.onError()` callback if registered. +- **`on_close`** โ€” transition to `reconnecting`, schedule `setTimeout` + with backoff, retry. +- **`on_error`** โ€” ignored (the `close` event always follows). + +Topic-to-RecordKey resolution uses the same mapping established during +`configureRecord()`. If a `data` message arrives for an unknown topic, +it is logged and discarded (no panic). + +### 7.3 Wire Protocol Compatibility + +The bridge speaks the exact protocol defined in +`aimdb-websocket-connector/src/protocol.rs`: + +| Direction | Message | Bridge Behaviour | +|-----------|---------|------------------| +| Server โ†’ Client | `data` | Deserialize payload โ†’ push to local buffer | +| Server โ†’ Client | `snapshot` | Same as `data` (late-join seed) | +| Server โ†’ Client | `subscribed` | Emit `onStatusChange('connected')` | +| Server โ†’ Client | `error` | Log + invoke error callback | +| Server โ†’ Client | `pong` | Reset keepalive timer | +| Client โ†’ Server | `subscribe` | Sent on connect with `config.subscribe_topics` | +| Client โ†’ Server | `unsubscribe` | Sent on `bridge.unsubscribe(topics)` | +| Client โ†’ Server | `write` | Sent when local record is written (via tap) | +| Client โ†’ Server | `ping` | Periodic keepalive (default: 30s) | + +No custom protocol extensions are needed โ€” the WASM adapter is a standard +WebSocket connector client. + +--- + +## 8. Build & Packaging + +### 8.1 Build Toolchain + +```bash +# Install +rustup target add wasm32-unknown-unknown +cargo install wasm-pack + +# Build +cd aimdb-wasm-adapter +wasm-pack build --target web --out-dir pkg + +# Output: +# pkg/aimdb_wasm_adapter.js (JS glue) +# pkg/aimdb_wasm_adapter_bg.wasm (WASM binary) +# pkg/aimdb_wasm_adapter.d.ts (TypeScript declarations) +# pkg/package.json (npm-publishable) +``` + +### 8.2 npm Distribution + +`wasm-pack` generates a ready-to-publish npm package. In `aimdb-ui`: + +```json +{ + "dependencies": { + "aimdb-wasm": "file:../../aimdb/aimdb-wasm-adapter/pkg" + } +} +``` + +Or published to npm as `@aimdb/wasm` (preferred โ€” see Open Question #4). + +### 8.3 Bundle Size Budget + +| Component | Estimated Size (gzipped) | +|-----------|--------------------------| +| WASM binary (core + 3 contracts + buffers) | ~80โ€“120 KB | +| JS glue (wasm-bindgen) | ~5 KB | +| `serde_json` in WASM | ~30 KB | +| **Total** | **~115โ€“155 KB** | + +For comparison: Zod (~13 KB) + manual schema code (~5 KB) = ~18 KB, but +provides only validation โ€” no buffers, no sync, no migration, no offline. + +### 8.4 Makefile Integration + +```makefile +# Addition to /aimdb_ws/aimdb/Makefile + +.PHONY: wasm +wasm: ## Build WASM adapter + cd aimdb-wasm-adapter && wasm-pack build --target web --out-dir pkg + +.PHONY: wasm-test +wasm-test: ## Run WASM tests in headless browser + cd aimdb-wasm-adapter && wasm-pack test --headless --chrome +``` + +--- + +## 9. Testing Strategy + +### 9.1 Unit Tests (Rust, native target) + +Buffer logic, time conversion, and record configuration run as normal Rust +tests (`cargo test -p aimdb-wasm-adapter`). The `Send + Sync` unsafe impls +are excluded โ€” these only matter under the `wasm32` target. + +### 9.2 WASM Integration Tests + +Use `wasm-bindgen-test` with a headless browser: + +```rust +#[cfg(target_arch = "wasm32")] +mod wasm_tests { + use wasm_bindgen_test::*; + wasm_bindgen_test_configure!(run_in_browser); + + #[wasm_bindgen_test] + async fn test_buffer_push_subscribe() { + let db = WasmDb::new().unwrap(); + db.configure_record( + "sensors.temperature.test", + &serde_wasm_bindgen::to_value(&serde_json::json!({ + "schemaType": "temperature", + "buffer": "SingleLatest" + })).unwrap(), + ).unwrap(); + + let (tx, rx) = futures::channel::oneshot::channel(); + let cb = Closure::once(move |val: JsValue| { tx.send(val).unwrap(); }); + db.subscribe("sensors.temperature.test", cb.as_ref().unchecked_ref()).unwrap(); + + db.set("sensors.temperature.test", serde_wasm_bindgen::to_value( + &Temperature::new(22.5, 1234567890000) + ).unwrap()).unwrap(); + + let received = rx.await.unwrap(); + let temp: Temperature = serde_wasm_bindgen::from_value(received).unwrap(); + assert_eq!(temp.celsius, 22.5); + } + + #[wasm_bindgen_test] + fn test_contract_enforcement_rejects_invalid() { + let db = WasmDb::new().unwrap(); + db.configure_record( + "sensors.temperature.test", + &serde_wasm_bindgen::to_value(&serde_json::json!({ + "schemaType": "temperature", + "buffer": "SingleLatest" + })).unwrap(), + ).unwrap(); + + // Missing required field โ†’ JsError + let bad = js_sys::Object::new(); + js_sys::Reflect::set(&bad, &"celsius".into(), &22.5.into()).unwrap(); + // Missing timestamp โ†’ deserialization fails + let result = db.set("sensors.temperature.test", bad.into()); + assert!(result.is_err()); + } + + #[wasm_bindgen_test] + async fn test_sleep_resolves() { + let adapter = WasmAdapter; + let start = adapter.now(); + adapter.sleep(adapter.millis(50)).await; + let elapsed = adapter.duration_since(adapter.now(), start).unwrap(); + assert!(elapsed.0 >= 45.0); // Allow 5ms jitter + } +} +``` + +### 9.3 CI Integration + +Add to the `check` target in the Makefile and GitHub Actions: + +```yaml +wasm-check: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + with: + targets: wasm32-unknown-unknown + - uses: nicolo-ribaudo/setup-wasm-pack@v1 + - run: wasm-pack test --headless --chrome -- -p aimdb-wasm-adapter +``` + +--- + +## 10. Migration Path for `aimdb-ui` + +### Phase 1: Drop-in Replacement (Low Risk) + +Keep the existing WebSocket hook architecture. Replace `JSON.parse` + blind +cast with WASM-validated deserialization: + +```typescript +// Before (useWebSocketConnection.ts) +ws.onmessage = (event) => { + const data = JSON.parse(event.data); // unvalidated + const normalized = normalizeMessage(data); // manual shape check + onMessageRef.current(normalized); +}; + +// After +import { validate } from '@aimdb/wasm'; + +ws.onmessage = (event) => { + const result = validate(event.data); // Rust serde in WASM + if (result.ok) { + onMessageRef.current(result.value); + } else { + console.warn('Contract violation:', result.error); + } +}; +``` + +### Phase 2: Local DB Instance + +Replace the WebSocket hooks entirely. AimDB WASM manages the WebSocket +connection, buffering, and reactive subscriptions: + +```typescript +// Before: useWebSocket() โ†’ useWebSocketConnection() โ†’ manual state management +// After: useRecord() โ†’ AimDB WASM handles everything + +function Dashboard() { + const temp = useRecord('sensors.temperature.vienna'); + const humidity = useRecord('sensors.humidity.vienna'); + // Reactive, validated, offline-capable +} +``` + +### Phase 3: Full Bidirectional Sync + +Enable browser-to-server writes. The `WsBridge` manages the connection +lifecycle including reconnection and late-join. The UI becomes a full +AimDB node in the mesh: + +``` +MQTT Sensors โ†’ Server AimDB โ†’ WS Connector โ†’ Browser AimDB โ†’ React UI + โ†‘ โ”‚ + โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ + (bidirectional) +``` + +--- + +## 11. Risk Analysis + +| Risk | Likelihood | Impact | Mitigation | +|------|-----------|--------|------------| +| `Send + Sync` unsoundness | Low | High | Single-threaded by construction; same pattern as Embassy; CI runs WASM tests with `wasm-bindgen-test` | +| Bundle size too large | Medium | Medium | Feature-gate contracts; tree-shake unused types; consider `serde_json` alternatives (`miniserde`, `nanoserde`) | +| `Performance.now()` precision | Low | Low | Only used for relative timing; sub-ms precision is sufficient | +| Browser API unavailability (SSR) | Medium | Low | Feature-gate `web-sys` calls; provide no-op stubs for SSR/Node | +| `spawn_local` backpressure | Medium | Medium | Buffer producers that yield; configurable channel capacity; drop-slow-consumer policy (same as server) | +| WASM init async requirement | Low | Low | `wasm-bindgen` handles init; React `Suspense` for loading state | + +--- + +## 12. Open Questions + +1. **Should `WasmBuffer` use dynamic sizing (Vec-backed) or const generics + (Embassy-style)?** + Recommendation: Dynamic. Browser has plentiful heap; const generics + complicate the JS API and provide no benefit without embedded memory + constraints. + +2. **Should the `WsBridge` reuse `aimdb-client` (AimX protocol) or speak + the WebSocket connector protocol directly?** + Recommendation: WebSocket connector protocol. `aimdb-client` uses Unix + sockets (not available in browsers). The WS connector protocol + (`ServerMessage`/`ClientMessage`) is JSON-based and designed for this. + +3. **How should schema registration work for user-defined contracts not in + `aimdb-data-contracts`?** + v1: Only pre-compiled contracts. v2: A `DynRecord` type backed by + `serde_json::Value` with optional JSON Schema validation. + +4. **npm package name: `aimdb-wasm` or `@aimdb/wasm`?** + Scoped name (`@aimdb/wasm`) is preferred if publishing to npm. + +--- + +## 13. Implementation Plan + +| Phase | Scope | Effort | +|-------|-------|--------| +| **P1: Skeleton** | Crate scaffolding, `WasmAdapter` struct, unsafe Send/Sync, `RuntimeAdapter` + `Logger` impls, compiles to `wasm32-unknown-unknown` | 1 day | +| **P2: Time + Spawn** | `TimeOps` with `Performance.now()` / `setTimeout`, `Spawn` with `spawn_local`, `wasm-bindgen-test` suite | 1 day | +| **P3: Buffers** | `WasmBuffer` (all 3 types), `BufferReader`, macro invocation, basic round-trip tests | 2 days | +| **P4: Bindings** | `#[wasm_bindgen]` API โ€” `WasmDb`, `get`/`set`/`subscribe`, `serde-wasm-bindgen` bridge | 2 days | +| **P5: Contract Integration** | Wire up `aimdb-data-contracts` types (Temperature, Humidity, GpsLocation), validation tests | 1 day | +| **P6: WsBridge** | WebSocket client in WASM, `ServerMessage`/`ClientMessage` protocol, reconnection | 2 days | +| **P7: React Hooks** | `useRecord()`, `useAimDb()`, integration with `aimdb-ui` | 1 day | +| **P8: CI & Docs** | GitHub Actions WASM job, README, wasm-pack publish, Makefile targets | 1 day | +| **Total** | | **~11 days** | + +--- + +## 14. References + +- Embassy adapter (precedent for single-threaded unsafe Send/Sync): + `aimdb-embassy-adapter/src/runtime.rs` +- WebSocket connector protocol: + `aimdb-websocket-connector/src/protocol.rs` +- Executor trait definitions: + `aimdb-executor/src/lib.rs` +- Extension macro: + `aimdb-core/src/ext_macros.rs` +- Data contracts codegen: + `aimdb-data-contracts/tests/export_ts.rs` +- wasm-bindgen guide: + https://rustwasm.github.io/docs/wasm-bindgen/ +- `serde-wasm-bindgen`: + https://docs.rs/serde-wasm-bindgen From 19fe389f8bb16aec1c8cc23e9ccbc3a188f4b3dc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Fri, 27 Feb 2026 22:15:29 +0000 Subject: [PATCH 14/46] feat: enhance WASM adapter with WebSocket bridge and streamable data contracts --- Cargo.lock | 1 + aimdb-data-contracts/src/lib.rs | 3 + aimdb-data-contracts/src/streamable.rs | 162 +++++++++ aimdb-wasm-adapter/Cargo.toml | 15 +- aimdb-wasm-adapter/src/bindings.rs | 391 +++++++++++++++++++++ aimdb-wasm-adapter/src/lib.rs | 6 + aimdb-wasm-adapter/src/ws_bridge.rs | 456 +++++++++++++++++++++++++ 7 files changed, 1033 insertions(+), 1 deletion(-) create mode 100644 aimdb-data-contracts/src/streamable.rs create mode 100644 aimdb-wasm-adapter/src/bindings.rs create mode 100644 aimdb-wasm-adapter/src/ws_bridge.rs diff --git a/Cargo.lock b/Cargo.lock index a8110ec4..ed92fa7d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -277,6 +277,7 @@ name = "aimdb-wasm-adapter" version = "0.5.0" dependencies = [ "aimdb-core", + "aimdb-data-contracts", "aimdb-executor", "futures-util", "js-sys", diff --git a/aimdb-data-contracts/src/lib.rs b/aimdb-data-contracts/src/lib.rs index 7f7dbcc0..e0b7e3a3 100644 --- a/aimdb-data-contracts/src/lib.rs +++ b/aimdb-data-contracts/src/lib.rs @@ -36,6 +36,9 @@ extern crate alloc; pub mod contracts; +mod streamable; +pub use streamable::{is_streamable, Streamable}; + #[cfg(feature = "linkable")] mod linkable; diff --git a/aimdb-data-contracts/src/streamable.rs b/aimdb-data-contracts/src/streamable.rs new file mode 100644 index 00000000..58f078af --- /dev/null +++ b/aimdb-data-contracts/src/streamable.rs @@ -0,0 +1,162 @@ +//! Streamable trait for data contracts that can cross serialization boundaries. +//! +//! Types implementing [`Streamable`] can be transported across WebSocket, WASM, +//! and other wire boundaries with full contract enforcement (Rust serde +//! deserialization at the receiving end). +//! +//! # Design +//! +//! `Streamable` is a *capability marker* โ€” it combines [`SchemaType`] identity +//! with the `serde` bounds needed for type-erased dispatch at serialization +//! boundaries. The companion [`dispatch_streamable!`] macro centralises the +//! schema-name โ†’ concrete-type routing so that consumers (WASM adapter, +//! WebSocket bridge, CLI) never hardcode contract types. +//! +//! # Adding a new streamable contract +//! +//! 1. Define your struct with `Serialize + Deserialize` in `contracts/`. +//! 2. Implement `SchemaType` (unique `NAME`). +//! 3. `impl Streamable for MyType {}` in this module. +//! 4. Add a match arm to [`dispatch_streamable!`]. +//! +//! That's it โ€” every consumer that uses the macro picks up the new type +//! automatically. + +use crate::SchemaType; +use core::fmt::Debug; +use serde::{de::DeserializeOwned, Serialize}; + +/// Data contracts that can be transported across serialization boundaries. +/// +/// Implementing this trait signals that a contract type supports: +/// - Type-erased JSON/`JsValue` serialization and deserialization +/// - Registration in AimDB builders by schema name string +/// - Cross-boundary dispatch (WASM bindings, WebSocket bridge, CLI) +/// +/// # Bounds +/// +/// The super-trait bounds mirror what AimDB's typed record APIs require: +/// `Send + Sync + Clone + Debug + 'static` plus serde `Serialize` and +/// `DeserializeOwned`. All standard data contracts satisfy these. +/// +/// # Example +/// +/// ```rust +/// use aimdb_data_contracts::{SchemaType, Streamable}; +/// use aimdb_data_contracts::contracts::Temperature; +/// +/// // Temperature implements Streamable โ€” it can be used across boundaries +/// fn assert_streamable() {} +/// assert_streamable::(); +/// ``` +pub trait Streamable: + SchemaType + Serialize + DeserializeOwned + Send + Sync + Clone + Debug + 'static +{ +} + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Implementations for built-in contracts +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +use crate::contracts::{GpsLocation, Humidity, Temperature}; + +impl Streamable for Temperature {} +impl Streamable for Humidity {} +impl Streamable for GpsLocation {} + +/// Returns `true` if `name` matches a known [`Streamable`] contract's +/// [`SchemaType::NAME`]. +/// +/// Useful for early validation before dispatch. +pub fn is_streamable(name: &str) -> bool { + matches!( + name, + ::NAME + | ::NAME + | ::NAME + ) +} + +/// Dispatch a schema type name to a typed code block. +/// +/// Routes a runtime `&str` schema name to the concrete Rust type that +/// implements [`Streamable`], then executes `$body` with `$T` bound to +/// that type. Returns `Some(body_result)` on match, `None` if the schema +/// name is unknown. +/// +/// This is the **single source of truth** for the schema-name โ†’ type +/// mapping โ€” WASM bindings, WebSocket bridge, and other consumers all +/// use this macro instead of maintaining their own tables. +/// +/// # Usage +/// +/// ```rust,ignore +/// use aimdb_data_contracts::dispatch_streamable; +/// +/// let result = dispatch_streamable!(schema_name, |T| { +/// // `T` is the concrete type (Temperature, Humidity, GpsLocation, โ€ฆ) +/// builder.configure::(key, |reg| reg.buffer(cfg)); +/// }) +/// .ok_or_else(|| format!("Unknown schema: {schema_name}"))?; +/// ``` +/// +/// # Adding a new contract +/// +/// 1. `impl Streamable for NewType {}` (above) +/// 2. Add a match arm here. +#[macro_export] +macro_rules! dispatch_streamable { + ($schema_name:expr, |$T:ident| $body:expr) => { + match $schema_name { + <$crate::contracts::Temperature as $crate::SchemaType>::NAME => { + type $T = $crate::contracts::Temperature; + Some($body) + } + <$crate::contracts::Humidity as $crate::SchemaType>::NAME => { + type $T = $crate::contracts::Humidity; + Some($body) + } + <$crate::contracts::GpsLocation as $crate::SchemaType>::NAME => { + type $T = $crate::contracts::GpsLocation; + Some($body) + } + _ => None, + } + }; +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn known_schemas_are_streamable() { + assert!(is_streamable("temperature")); + assert!(is_streamable("humidity")); + assert!(is_streamable("gps_location")); + } + + #[test] + fn unknown_schema_is_not_streamable() { + assert!(!is_streamable("unknown")); + assert!(!is_streamable("")); + } + + #[test] + fn dispatch_routes_correctly() { + let result = dispatch_streamable!("temperature", |T| ::NAME); + assert_eq!(result.unwrap(), "temperature"); + + let result = dispatch_streamable!("humidity", |T| ::NAME); + assert_eq!(result.unwrap(), "humidity"); + + let result = dispatch_streamable!("gps_location", |T| ::NAME); + assert_eq!(result.unwrap(), "gps_location"); + } + + #[test] + fn dispatch_rejects_unknown() { + let result = dispatch_streamable!("unknown", |_T| ()); + assert!(result.is_none()); + } +} diff --git a/aimdb-wasm-adapter/Cargo.toml b/aimdb-wasm-adapter/Cargo.toml index dbe9e49b..3f536ad6 100644 --- a/aimdb-wasm-adapter/Cargo.toml +++ b/aimdb-wasm-adapter/Cargo.toml @@ -17,7 +17,13 @@ crate-type = ["cdylib", "rlib"] default = ["wasm-runtime"] # Runtime features -wasm-runtime = ["wasm-bindgen", "wasm-bindgen-futures", "js-sys", "web-sys"] +wasm-runtime = [ + "wasm-bindgen", + "wasm-bindgen-futures", + "js-sys", + "web-sys", + "serde-wasm-bindgen", +] [dependencies] # Executor traits (no_std compatible) @@ -28,13 +34,20 @@ aimdb-core = { version = "0.5.0", path = "../aimdb-core", default-features = fal "alloc", ] } +# Data contracts (alloc only โ€” no std) +aimdb-data-contracts = { version = "0.5.0", path = "../aimdb-data-contracts", default-features = false, features = [ + "alloc", +] } + # WASM bindings wasm-bindgen = { version = "0.2", optional = true } wasm-bindgen-futures = { version = "0.4", optional = true } js-sys = { version = "0.3", optional = true } web-sys = { version = "0.3", optional = true, features = [ "console", + "MessageEvent", "Performance", + "WebSocket", "Window", ] } diff --git a/aimdb-wasm-adapter/src/bindings.rs b/aimdb-wasm-adapter/src/bindings.rs new file mode 100644 index 00000000..66cc6655 --- /dev/null +++ b/aimdb-wasm-adapter/src/bindings.rs @@ -0,0 +1,391 @@ +//! `#[wasm_bindgen]` TypeScript-facing API +//! +//! Exposes a high-level facade to JavaScript/TypeScript. Users do not interact +//! with `Arc`, `RecordRegistrar`, or feature flags โ€” all of that is hidden +//! behind `WasmDb`, `configureRecord`, `get`, `set`, and `subscribe`. +//! +//! # Two-Phase Lifecycle +//! +//! 1. **Configuration** โ€” `new WasmDb()` + `configureRecord(โ€ฆ)` calls collect +//! record definitions without building the database. +//! 2. **Build** โ€” `await db.build()` compiles the configuration into a live +//! AimDB instance (buffers, records, typed storage). +//! 3. **Operation** โ€” `get` / `set` / `subscribe` interact with the live +//! database. Contract enforcement (Rust serde) happens at the WASM boundary. + +extern crate alloc; + +use alloc::format; +use alloc::string::{String, ToString}; +use alloc::sync::Arc; +use alloc::vec::Vec; +use core::cell::Cell; +use core::fmt::Debug; + +use serde::de::DeserializeOwned; +use serde::{Deserialize, Serialize}; +use wasm_bindgen::prelude::*; + +use aimdb_core::buffer::BufferCfg; +use aimdb_core::builder::{AimDb, AimDbBuilder}; +use aimdb_core::record_id::StringKey; + +use aimdb_data_contracts::dispatch_streamable; + +use crate::WasmAdapter; + +// โ”€โ”€โ”€ Option parsing โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +struct RecordOptions { + schema_type: String, + buffer: BufferOption, +} + +#[derive(Deserialize)] +#[serde(untagged)] +enum BufferOption { + /// Simple string: `"SingleLatest"`, `"Mailbox"`, `"SpmcRing"` + Simple(String), + /// Object: `{ type: "SpmcRing", capacity: 200 }` + Config { + r#type: String, + capacity: Option, + }, +} + +fn parse_buffer_cfg(opt: &BufferOption) -> Result { + match opt { + BufferOption::Simple(s) => match s.as_str() { + "SingleLatest" => Ok(BufferCfg::SingleLatest), + "Mailbox" => Ok(BufferCfg::Mailbox), + "SpmcRing" => Ok(BufferCfg::SpmcRing { capacity: 1024 }), + _ => Err(JsError::new(&format!("Unknown buffer type: {s}"))), + }, + BufferOption::Config { r#type, capacity } => match r#type.as_str() { + "SpmcRing" => Ok(BufferCfg::SpmcRing { + capacity: capacity.unwrap_or(1024), + }), + "SingleLatest" => Ok(BufferCfg::SingleLatest), + "Mailbox" => Ok(BufferCfg::Mailbox), + _ => Err(JsError::new(&format!("Unknown buffer type: {}", r#type))), + }, + } +} + +fn is_known_schema(name: &str) -> bool { + aimdb_data_contracts::is_streamable(name) +} + +// โ”€โ”€โ”€ Collected config (pre-build) โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +struct RecordConfig { + key: String, + schema_type: String, + buffer_cfg: BufferCfg, +} + +// โ”€โ”€โ”€ WasmDb โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// AimDB instance compiled to WebAssembly. +/// +/// # Example (TypeScript) +/// ```ts +/// const db = new WasmDb(); +/// db.configureRecord('sensors.temperature.vienna', { +/// schemaType: 'temperature', +/// buffer: 'SingleLatest', +/// }); +/// await db.build(); +/// db.set('sensors.temperature.vienna', { celsius: 22.5, timestamp: Date.now() }); +/// const t = db.get('sensors.temperature.vienna'); // validated by Rust serde +/// ``` +#[wasm_bindgen] +pub struct WasmDb { + /// Pre-build record configurations. `None` after `build()`. + configs: Option>, + /// Live database handle. `None` before `build()`. + db: Option>, + /// Maps record key โ†’ schema type name (always populated). + schema_map: Vec<(String, String)>, +} + +impl Default for WasmDb { + fn default() -> Self { + Self::new() + } +} + +#[wasm_bindgen] +impl WasmDb { + /// Create a new (unconfigured) AimDB WASM instance. + #[wasm_bindgen(constructor)] + pub fn new() -> WasmDb { + WasmDb { + configs: Some(Vec::new()), + db: None, + schema_map: Vec::new(), + } + } + + /// Register a record before building the database. + /// + /// `options` is a JS object: + /// ```json + /// { + /// "schemaType": "temperature", + /// "buffer": "SingleLatest" // or { "type": "SpmcRing", "capacity": 100 } + /// } + /// ``` + #[wasm_bindgen(js_name = "configureRecord")] + pub fn configure_record(&mut self, record_key: &str, options: JsValue) -> Result<(), JsError> { + let configs = self + .configs + .as_mut() + .ok_or_else(|| JsError::new("Cannot configure records after build()"))?; + + let opts: RecordOptions = serde_wasm_bindgen::from_value(options) + .map_err(|e| JsError::new(&format!("Invalid options: {e}")))?; + + if !is_known_schema(&opts.schema_type) { + return Err(JsError::new(&format!( + "Unknown schema type: {}", + opts.schema_type + ))); + } + + let buffer_cfg = parse_buffer_cfg(&opts.buffer)?; + + self.schema_map + .push((record_key.to_string(), opts.schema_type.clone())); + + configs.push(RecordConfig { + key: record_key.to_string(), + schema_type: opts.schema_type, + buffer_cfg, + }); + + Ok(()) + } + + /// Build the database from the collected configuration. + /// + /// Must be called exactly once, after all `configureRecord()` calls and + /// before any `get` / `set` / `subscribe`. + pub async fn build(&mut self) -> Result<(), JsError> { + let configs = self + .configs + .take() + .ok_or_else(|| JsError::new("Database already built"))?; + + let rt = Arc::new(WasmAdapter); + let mut builder = AimDbBuilder::new().runtime(rt); + + for config in &configs { + apply_record_config(&mut builder, config)?; + } + + let db = builder + .build() + .await + .map_err(|e| JsError::new(&format!("Build failed: {e:?}")))?; + + self.db = Some(db); + Ok(()) + } + + /// Get the current value of a record (returns JS object or `undefined`). + /// + /// The value is the latest snapshot โ€” it does not wait for a new push. + /// Returns `undefined` if no value has been produced yet. + pub fn get(&self, record_key: &str) -> Result { + let (db, schema) = self.resolve(record_key)?; + dispatch_streamable!(schema, |T| get_typed::(db, record_key)) + .ok_or_else(|| JsError::new(&format!("Unknown schema type: {schema}")))? + } + + /// Set a record value (validates via Rust serde deserialization). + /// + /// Throws `JsError` if the payload fails contract validation (e.g. missing + /// required fields) or the record key is unknown. + pub fn set(&mut self, record_key: &str, value: JsValue) -> Result<(), JsError> { + let (db, schema) = self.resolve(record_key)?; + dispatch_streamable!(schema, |T| set_typed::(db, record_key, value)) + .ok_or_else(|| JsError::new(&format!("Unknown schema type: {schema}")))? + } + + /// Subscribe to record updates. Returns an unsubscribe function. + /// + /// `callback` is invoked on every buffer push with the validated value. + pub fn subscribe( + &self, + record_key: &str, + callback: &js_sys::Function, + ) -> Result { + let (db, schema) = self.resolve(record_key)?; + dispatch_streamable!(schema, |T| subscribe_typed::(db, record_key, callback)) + .ok_or_else(|| JsError::new(&format!("Unknown schema type: {schema}")))? + } + + /// Returns `true` if the database has been built. + #[wasm_bindgen(js_name = "isBuilt")] + pub fn is_built(&self) -> bool { + self.db.is_some() + } +} + +// โ”€โ”€โ”€ Private helpers โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +impl WasmDb { + /// Resolve a record key to the live DB handle and its schema type name. + fn resolve(&self, record_key: &str) -> Result<(&AimDb, &str), JsError> { + let db = self + .db + .as_ref() + .ok_or_else(|| JsError::new("Database not built. Call build() first."))?; + + let schema = self + .schema_map + .iter() + .find(|(k, _)| k == record_key) + .map(|(_, v)| v.as_str()) + .ok_or_else(|| JsError::new(&format!("Unknown record key: {record_key}")))?; + + Ok((db, schema)) + } +} + +// โ”€โ”€โ”€ Typed dispatch โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Apply a single `RecordConfig` to the builder, dispatching on schema type. +fn apply_record_config( + builder: &mut AimDbBuilder, + config: &RecordConfig, +) -> Result<(), JsError> { + use crate::WasmRecordRegistrarExt; + + let key = StringKey::intern(config.key.clone()); + let cfg = config.buffer_cfg.clone(); + + dispatch_streamable!(config.schema_type.as_str(), |T| { + builder.configure::(key, |reg| { + reg.buffer(cfg); + }); + }) + .ok_or_else(|| JsError::new(&format!("Unknown schema type: {}", config.schema_type)))?; + Ok(()) +} + +/// Read the latest snapshot for record `key` and convert to `JsValue`. +fn get_typed(db: &AimDb, key: &str) -> Result +where + T: Send + Sync + 'static + Debug + Clone + Serialize, +{ + let inner = db.inner(); + let typed = inner + .get_typed_record_by_key::(key) + .map_err(|e| JsError::new(&format!("{e:?}")))?; + + match typed.latest() { + Some(val) => serde_wasm_bindgen::to_value(val.get()) + .map_err(|e| JsError::new(&format!("Serialization failed: {e}"))), + None => Ok(JsValue::UNDEFINED), + } +} + +/// Deserialize `JsValue` โ†’ `T` (contract enforcement), then push to buffer. +fn set_typed(db: &AimDb, key: &str, value: JsValue) -> Result<(), JsError> +where + T: Send + Sync + 'static + Debug + Clone + DeserializeOwned, +{ + let val: T = serde_wasm_bindgen::from_value(value) + .map_err(|e| JsError::new(&format!("Contract violation: {e}")))?; + + let inner = db.inner(); + let typed = inner + .get_typed_record_by_key::(key) + .map_err(|e| JsError::new(&format!("{e:?}")))?; + + // TypedRecord::produce() is declared `async` but its body is synchronous: + // it updates `latest_snapshot` and calls `buf.push(val)` โ€” both complete + // immediately on WasmBuffer. We poll the future exactly once. + poll_sync(typed.produce(val)); + Ok(()) +} + +/// Subscribe to a record's buffer and invoke `callback` on each new value. +/// Returns a JS function that cancels the subscription when called. +fn subscribe_typed( + db: &AimDb, + key: &str, + callback: &js_sys::Function, +) -> Result +where + T: Send + Sync + 'static + Debug + Clone + Serialize, +{ + let mut reader = db + .subscribe::(key) + .map_err(|e| JsError::new(&format!("{e:?}")))?; + + let callback = callback.clone(); + let cancelled = alloc::rc::Rc::new(Cell::new(false)); + let cancelled_inner = cancelled.clone(); + + wasm_bindgen_futures::spawn_local(async move { + while !cancelled_inner.get() { + match reader.recv().await { + Ok(val) => { + if let Ok(js) = serde_wasm_bindgen::to_value(&val) { + // Invoke the JS callback: callback(value) + let _ = callback.call1(&JsValue::NULL, &js); + } + } + Err(_) => break, + } + } + }); + + // Return an unsubscribe function + let unsub = Closure::once_into_js(move || { + cancelled.set(true); + }); + Ok(unsub) +} + +// โ”€โ”€โ”€ Sync future polling โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Poll a future that is known to resolve in a single poll (no real I/O). +/// +/// Used for `TypedRecord::produce()` whose body is synchronous despite being +/// declared `async fn` โ€” it just updates a snapshot and pushes to a buffer. +/// +/// # Panics +/// +/// Panics if the future returns `Pending`. This should never happen for +/// operations on `WasmBuffer` (which are single-threaded, non-blocking). +fn poll_sync(f: F) -> F::Output { + use core::pin::Pin; + use core::task::{Context, Poll, RawWaker, RawWakerVTable, Waker}; + + // SAFETY: the future is stack-local and will not be moved after pinning. + let mut f = f; + let f = unsafe { Pin::new_unchecked(&mut f) }; + + // No-op waker โ€” produce() does not need to be woken. + fn noop(_: *const ()) {} + fn clone_noop(p: *const ()) -> RawWaker { + RawWaker::new(p, &VTABLE) + } + const VTABLE: RawWakerVTable = RawWakerVTable::new(clone_noop, noop, noop, noop); + + let waker = unsafe { Waker::from_raw(RawWaker::new(core::ptr::null(), &VTABLE)) }; + let mut cx = Context::from_waker(&waker); + + match f.poll(&mut cx) { + Poll::Ready(val) => val, + Poll::Pending => { + panic!("poll_sync: future returned Pending (expected synchronous completion)") + } + } +} diff --git a/aimdb-wasm-adapter/src/lib.rs b/aimdb-wasm-adapter/src/lib.rs index 290af350..7cd5d089 100644 --- a/aimdb-wasm-adapter/src/lib.rs +++ b/aimdb-wasm-adapter/src/lib.rs @@ -39,6 +39,12 @@ pub mod logger; pub mod runtime; pub mod time; +#[cfg(feature = "wasm-runtime")] +pub mod bindings; + +#[cfg(feature = "wasm-runtime")] +pub mod ws_bridge; + // Re-export the adapter type at crate root pub use runtime::WasmAdapter; diff --git a/aimdb-wasm-adapter/src/ws_bridge.rs b/aimdb-wasm-adapter/src/ws_bridge.rs new file mode 100644 index 00000000..4a9edae5 --- /dev/null +++ b/aimdb-wasm-adapter/src/ws_bridge.rs @@ -0,0 +1,456 @@ +//! WebSocket bridge connecting browser AimDB to a server instance. +//! +//! `WsBridge` wraps `web_sys::WebSocket` and speaks the same wire protocol as +//! `aimdb-websocket-connector` (`ServerMessage` / `ClientMessage`). It maps +//! incoming server data to local buffer pushes and forwards local writes to +//! the server. +//! +//! # Modes +//! +//! - **Synchronized** โ€” browser instance mirrors server records. +//! - **Hybrid** โ€” works offline with local records, syncs when connected. +//! +//! See design doc ยง7 for full details. + +extern crate alloc; + +use alloc::boxed::Box; +use alloc::format; +use alloc::rc::Rc; +use alloc::string::{String, ToString}; +use alloc::vec::Vec; +use core::cell::RefCell; + +use serde::{Deserialize, Serialize}; +use wasm_bindgen::prelude::*; + +use crate::bindings::WasmDb; + +// โ”€โ”€โ”€ Connection status โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Observable connection state (matches design doc ยง7.1). +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum ConnectionStatus { + Connecting, + Connected, + Disconnected, + Reconnecting, +} + +impl ConnectionStatus { + fn as_str(self) -> &'static str { + match self { + Self::Connecting => "connecting", + Self::Connected => "connected", + Self::Disconnected => "disconnected", + Self::Reconnecting => "reconnecting", + } + } +} + +// โ”€โ”€โ”€ Wire protocol (mirrors aimdb-websocket-connector/src/protocol.rs) โ”€โ”€โ”€ + +/// Server โ†’ Client message. +#[derive(Debug, Clone, Deserialize)] +#[serde(tag = "type", rename_all = "snake_case")] +enum ServerMessage { + Data { + topic: String, + payload: Option, + #[allow(dead_code)] + ts: u64, + }, + Snapshot { + topic: String, + payload: Option, + }, + Subscribed { + #[allow(dead_code)] + topics: Vec, + }, + Error { + #[allow(dead_code)] + code: String, + #[allow(dead_code)] + topic: Option, + #[allow(dead_code)] + message: String, + }, + Pong, +} + +/// Client โ†’ Server message. +#[derive(Debug, Clone, Serialize)] +#[serde(tag = "type", rename_all = "snake_case")] +enum ClientMessage { + Subscribe { + topics: Vec, + }, + #[allow(dead_code)] + Unsubscribe { + topics: Vec, + }, + Write { + topic: String, + payload: serde_json::Value, + }, + #[allow(dead_code)] + Ping, +} + +// โ”€โ”€โ”€ Bridge configuration โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Configuration for `WsBridge.connect()`. +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct BridgeOptions { + /// MQTT-style topic patterns to subscribe to (e.g. `["sensors/#"]`). + #[serde(default)] + pub subscribe_topics: Vec, + /// Re-connect automatically on close (default: true). + #[serde(default = "default_true")] + pub auto_reconnect: bool, + /// Request snapshots on (re)connect (default: true). + #[serde(default = "default_true")] + pub late_join: bool, + /// Maximum queued writes while disconnected (default: 256). + #[serde(default = "default_queue_size")] + pub max_offline_queue: usize, + /// Keepalive interval in milliseconds (default: 30 000). + #[serde(default = "default_keepalive_ms")] + pub keepalive_ms: u32, +} + +fn default_true() -> bool { + true +} +fn default_queue_size() -> usize { + 256 +} +fn default_keepalive_ms() -> u32 { + 30_000 +} + +impl Default for BridgeOptions { + fn default() -> Self { + Self { + subscribe_topics: Vec::new(), + auto_reconnect: true, + late_join: true, + max_offline_queue: 256, + keepalive_ms: 30_000, + } + } +} + +// โ”€โ”€โ”€ Bridge state โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +struct BridgeState { + status: ConnectionStatus, + pending_writes: alloc::collections::VecDeque, + backoff_index: usize, + /// Closures retained to prevent GC. + _on_open: Option>, + _on_message: Option>, + _on_close: Option>, + _on_error: Option>, +} + +// โ”€โ”€โ”€ WsBridge โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// WebSocket bridge connecting the in-browser AimDB to a remote server. +/// +/// # Example (TypeScript) +/// ```ts +/// const bridge = WsBridge.connect(db, 'wss://api.example.com/ws', { +/// subscribeTopics: ['sensors/#'], +/// autoReconnect: true, +/// lateJoin: true, +/// }); +/// bridge.onStatusChange((status) => updateIndicator(status)); +/// // ... +/// bridge.disconnect(); +/// ``` +#[wasm_bindgen] +pub struct WsBridge { + ws: web_sys::WebSocket, + db: Rc>, + config: BridgeOptions, + state: Rc>, + /// User-provided status change callback. + on_status_change: Rc>>, + /// Backoff schedule in milliseconds. + backoff: Vec, +} + +// SAFETY: wasm32-unknown-unknown is single-threaded. +unsafe impl Send for WsBridge {} +unsafe impl Sync for WsBridge {} + +#[wasm_bindgen] +impl WsBridge { + /// Open a WebSocket connection and begin synchronising records. + /// + /// - `db` โ€” a built `WasmDb` instance. + /// - `url` โ€” WebSocket endpoint (e.g. `wss://api.example.com/ws`). + /// - `options` โ€” optional JS object with `subscribeTopics`, `autoReconnect`, + /// `lateJoin`, etc. + pub fn connect(db: WasmDb, url: &str, options: JsValue) -> Result { + let config: BridgeOptions = if options.is_undefined() || options.is_null() { + BridgeOptions::default() + } else { + serde_wasm_bindgen::from_value(options) + .map_err(|e| JsError::new(&format!("Invalid bridge options: {e}")))? + }; + + let ws = web_sys::WebSocket::new(url) + .map_err(|e| JsError::new(&format!("WebSocket open failed: {e:?}")))?; + + // We use text (JSON) frames โ€” no binary type configuration needed. + + let db = Rc::new(RefCell::new(db)); + let state = Rc::new(RefCell::new(BridgeState { + status: ConnectionStatus::Connecting, + pending_writes: alloc::collections::VecDeque::new(), + backoff_index: 0, + _on_open: None, + _on_message: None, + _on_close: None, + _on_error: None, + })); + let on_status_change: Rc>> = Rc::new(RefCell::new(None)); + let backoff = alloc::vec![500, 1_000, 2_000, 4_000, 8_000]; + + let mut bridge = WsBridge { + ws, + db, + config, + state, + on_status_change, + backoff, + }; + + bridge.install_callbacks(); + Ok(bridge) + } + + /// Register a callback for connection status changes. + /// + /// ```ts + /// bridge.onStatusChange((status: string) => { /* โ€ฆ */ }); + /// ``` + #[wasm_bindgen(js_name = "onStatusChange")] + pub fn on_status_change(&self, callback: js_sys::Function) { + *self.on_status_change.borrow_mut() = Some(callback); + } + + /// Send a value to the server for a given topic. + /// + /// If the WebSocket is disconnected, the message is queued (up to + /// `maxOfflineQueue`). Queued messages are flushed on reconnect. + pub fn write(&self, topic: &str, payload: JsValue) -> Result<(), JsError> { + let json_payload: serde_json::Value = serde_wasm_bindgen::from_value(payload) + .map_err(|e| JsError::new(&format!("Payload serialization failed: {e}")))?; + + let msg = ClientMessage::Write { + topic: topic.to_string(), + payload: json_payload, + }; + + let state = self.state.borrow(); + if state.status == ConnectionStatus::Connected { + self.send_json(&msg)?; + } else { + drop(state); + let mut state = self.state.borrow_mut(); + if state.pending_writes.len() < self.config.max_offline_queue { + state.pending_writes.push_back(msg); + } + // else: drop (overflow policy) + } + Ok(()) + } + + /// Close the WebSocket and stop reconnection attempts. + pub fn disconnect(&self) { + let mut state = self.state.borrow_mut(); + state.status = ConnectionStatus::Disconnected; + state._on_open = None; + state._on_message = None; + state._on_close = None; + state._on_error = None; + drop(state); + + let _ = self.ws.close(); + self.emit_status(ConnectionStatus::Disconnected); + } + + /// Current connection status as a string. + pub fn status(&self) -> String { + self.state.borrow().status.as_str().to_string() + } +} + +// โ”€โ”€โ”€ Internal โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +impl WsBridge { + fn install_callbacks(&mut self) { + // on_open + { + let state = self.state.clone(); + let ws = self.ws.clone(); + let topics = self.config.subscribe_topics.clone(); + let on_status = self.on_status_change.clone(); + let closure = Closure::wrap(Box::new(move || { + { + let mut s = state.borrow_mut(); + s.status = ConnectionStatus::Connected; + s.backoff_index = 0; + + // Flush pending writes + while let Some(msg) = s.pending_writes.pop_front() { + if let Ok(json) = serde_json::to_string(&msg) { + let _ = ws.send_with_str(&json); + } + } + } + + // Subscribe to configured topics + if !topics.is_empty() { + let sub = ClientMessage::Subscribe { + topics: topics.clone(), + }; + if let Ok(json) = serde_json::to_string(&sub) { + let _ = ws.send_with_str(&json); + } + } + + // Notify status change + if let Some(cb) = on_status.borrow().as_ref() { + let _ = cb.call1( + &JsValue::NULL, + &JsValue::from_str(ConnectionStatus::Connected.as_str()), + ); + } + }) as Box); + self.ws.set_onopen(Some(closure.as_ref().unchecked_ref())); + self.state.borrow_mut()._on_open = Some(closure); + } + + // on_message + { + let db = self.db.clone(); + let closure = Closure::wrap(Box::new(move |event: web_sys::MessageEvent| { + if let Some(text) = event.data().as_string() { + if let Ok(msg) = serde_json::from_str::(&text) { + Self::handle_server_message(&db, msg); + } + } + }) as Box); + self.ws + .set_onmessage(Some(closure.as_ref().unchecked_ref())); + self.state.borrow_mut()._on_message = Some(closure); + } + + // on_close + { + let state = self.state.clone(); + let on_status = self.on_status_change.clone(); + let auto_reconnect = self.config.auto_reconnect; + let backoff = self.backoff.clone(); + let closure = Closure::wrap(Box::new(move || { + let current = state.borrow().status; + if current == ConnectionStatus::Disconnected { + return; // user-initiated disconnect + } + + if auto_reconnect { + let mut s = state.borrow_mut(); + s.status = ConnectionStatus::Reconnecting; + let delay = backoff + .get(s.backoff_index) + .copied() + .unwrap_or(*backoff.last().unwrap_or(&8_000)); + s.backoff_index = (s.backoff_index + 1).min(backoff.len() - 1); + drop(s); + + if let Some(cb) = on_status.borrow().as_ref() { + let _ = cb.call1( + &JsValue::NULL, + &JsValue::from_str(ConnectionStatus::Reconnecting.as_str()), + ); + } + + // Schedule reconnect (actual reconnect requires creating a new WS โ€” + // this is a placeholder; full reconnect needs the URL + re-install). + let _delay = delay; + // TODO: implement setTimeout-based reconnect with new WebSocket + } else { + state.borrow_mut().status = ConnectionStatus::Disconnected; + if let Some(cb) = on_status.borrow().as_ref() { + let _ = cb.call1( + &JsValue::NULL, + &JsValue::from_str(ConnectionStatus::Disconnected.as_str()), + ); + } + } + }) as Box); + self.ws.set_onclose(Some(closure.as_ref().unchecked_ref())); + self.state.borrow_mut()._on_close = Some(closure); + } + + // on_error โ€” WebSocket always fires `close` after `error`, so just log. + { + let closure = Closure::wrap(Box::new(move || { + web_sys::console::warn_1(&"WsBridge: WebSocket error".into()); + }) as Box); + self.ws.set_onerror(Some(closure.as_ref().unchecked_ref())); + self.state.borrow_mut()._on_error = Some(closure); + } + } + + /// Route a decoded server message to the local database. + fn handle_server_message(db: &Rc>, msg: ServerMessage) { + match msg { + ServerMessage::Data { topic, payload, .. } + | ServerMessage::Snapshot { topic, payload } => { + if let Some(payload) = payload { + let js = serde_wasm_bindgen::to_value(&payload).unwrap_or(JsValue::UNDEFINED); + let mut db = db.borrow_mut(); + // Push to local buffer via set() which does contract validation. + let _ = db.set(&topic, js); + } + } + ServerMessage::Subscribed { .. } => { + // ACK โ€” no action needed beyond status change (already handled in on_open). + } + ServerMessage::Error { message, topic, .. } => { + let detail = match topic { + Some(t) => format!("WsBridge error on topic '{t}': {message}"), + None => format!("WsBridge error: {message}"), + }; + web_sys::console::error_1(&detail.into()); + } + ServerMessage::Pong => { + // Keepalive ACK โ€” reset timer if needed. + } + } + } + + /// Serialize a `ClientMessage` and send as text frame. + fn send_json(&self, msg: &ClientMessage) -> Result<(), JsError> { + let json = serde_json::to_string(msg) + .map_err(|e| JsError::new(&format!("JSON serialization failed: {e}")))?; + self.ws + .send_with_str(&json) + .map_err(|e| JsError::new(&format!("WebSocket send failed: {e:?}")))?; + Ok(()) + } + + /// Emit status to the registered callback. + fn emit_status(&self, status: ConnectionStatus) { + if let Some(cb) = self.on_status_change.borrow().as_ref() { + let _ = cb.call1(&JsValue::NULL, &JsValue::from_str(status.as_str())); + } + } +} From 32cde6deb52792594949fd414d0ef834b8e94be2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Fri, 27 Feb 2026 22:38:57 +0000 Subject: [PATCH 15/46] feat: update README and design document for AimDB WASM adapter; add React hooks for database integration --- aimdb-wasm-adapter/README.md | 144 +++++++++-- aimdb-wasm-adapter/src/react/useAimDb.tsx | 275 ++++++++++++++++++++++ docs/design/025-M12-wasm-adapter.md | 4 +- 3 files changed, 406 insertions(+), 17 deletions(-) create mode 100644 aimdb-wasm-adapter/src/react/useAimDb.tsx diff --git a/aimdb-wasm-adapter/README.md b/aimdb-wasm-adapter/README.md index fa5b4eee..a5d09d05 100644 --- a/aimdb-wasm-adapter/README.md +++ b/aimdb-wasm-adapter/README.md @@ -19,34 +19,148 @@ layer (Zod, JSON Schema) on the TypeScript side. | Edge / Cloud | `aimdb-tokio-adapter` | `tokio::sync` channels | `tokio::spawn` | | **Browser** | **`aimdb-wasm-adapter`** | **`Rc>`** | **`spawn_local`** | -## Usage - -```rust -use aimdb_wasm_adapter::WasmAdapter; -use aimdb_core::{AimDbBuilder, buffer::BufferCfg}; -use std::sync::Arc; - -let adapter = WasmAdapter; -let db = AimDbBuilder::new() - .runtime(Arc::new(adapter)) - .build() - .unwrap(); +## Architecture + +The adapter is split into several focused modules: + +| Module | Purpose | +|--------|---------| +| `runtime.rs` | `WasmAdapter` โ€” `RuntimeAdapter` + `Spawn` impl using `wasm_bindgen_futures::spawn_local` | +| `time.rs` | `TimeOps` โ€” `performance.now()` + `setTimeout`-based sleep via `gloo-timers` | +| `logger.rs` | `Logger` โ€” maps log levels to `console.log / debug / warn / error` | +| `buffer.rs` | `WasmBuffer` โ€” SPMC Ring, SingleLatest, Mailbox on `Rc>` | +| `bindings.rs` | `WasmDb` โ€” `#[wasm_bindgen]` facade: `configureRecord`, `get`, `set`, `subscribe` | +| `ws_bridge.rs` | `WsBridge` โ€” WebSocket bridge to remote AimDB server (AimX wire protocol) | +| `react/` | React hooks โ€” `useRecord`, `useSetRecord`, `useBridge` | + +## JavaScript / TypeScript API + +### WasmDb + +```typescript +import init, { WasmDb } from '@aimdb/wasm'; + +await init(); +const db = new WasmDb(); + +// Configure records with Rust data contracts +db.configureRecord('sensors.temperature.vienna', { + schemaType: 'temperature', + buffer: 'SingleLatest', +}); + +await db.build(); + +// Get (returns deserialized JS object validated by Rust serde) +const temp = db.get('sensors.temperature.vienna'); +console.log(temp.celsius); + +// Set (Rust serde validates the payload) +db.set('sensors.temperature.vienna', { celsius: 22.5, timestamp: Date.now() }); + +// Subscribe (callback fires on every buffer push) +const unsub = db.subscribe('sensors.temperature.vienna', (value) => { + console.log('New temperature:', value.celsius); +}); ``` +### WsBridge + +Connect the browser-local AimDB to a remote server: + +```typescript +import { WsBridge } from '@aimdb/wasm'; + +const bridge = WsBridge.connect(db, 'wss://api.example.com/ws', { + subscribeTopics: ['sensors/#'], + autoReconnect: true, + lateJoin: true, +}); + +bridge.onStatusChange((status) => { + console.log('Connection:', status); // 'Connected' | 'Reconnecting' | ... +}); + +bridge.write('commands.setpoint', { target: 21.0 }); +bridge.disconnect(); +``` + +### React Hooks + +```tsx +import { AimDbProvider, useRecord, useSetRecord, useBridge } from '@aimdb/wasm/react'; + +function App() { + return ( + + + + ); +} + +function Dashboard() { + const temp = useRecord('sensors.temperature.vienna'); + if (!temp) return

Loadingโ€ฆ

; + return {temp.celsius.toFixed(1)}ยฐC; +} +``` + +**Available hooks:** + +| Hook | Returns | Purpose | +|------|---------|---------| +| `useRecord(key)` | `T \| null` | Subscribe to record, re-render on updates | +| `useSetRecord(key)` | `(value: T) => void` | Write to record with contract validation | +| `useAimDb()` | `WasmDb \| null` | Raw database access for advanced usage | +| `useBridge()` | `WsBridge \| null` | Connection status and bridge control | + +## Data Contract Enforcement + +All `get` / `set` / `subscribe` calls go through the `Streamable` trait +defined in `aimdb-data-contracts`. The `dispatch_streamable!` macro maps +schema type names to Rust types and performs serde validation: + +``` +TypeScript value โ†’ serde_wasm_bindgen โ†’ Rust T: Streamable โ†’ buffer push +``` + +Adding a new contract requires only one change: implement `Streamable` for +the new type in `aimdb-data-contracts` and add it to `dispatch_streamable!`. + ## Build ```bash -# Install target +# Install dependencies rustup target add wasm32-unknown-unknown cargo install wasm-pack -# Build +# Compile to WASM wasm-pack build --target web --out-dir pkg -# Test +# Run headless browser tests wasm-pack test --headless --chrome ``` +From the workspace root (`make` targets): + +```bash +make wasm # Build WASM adapter +make wasm-test # Run WASM tests +make check # Full workspace check (includes WASM) +``` + +## Feature Flags + +| Feature | Default | Purpose | +|---------|---------|---------| +| `wasm-runtime` | โœ… | Full browser runtime (bindings, WsBridge, web-sys) | +| `alloc` | โœ… | Core buffer + record support (no_std compatible) | + ## License Apache-2.0 diff --git a/aimdb-wasm-adapter/src/react/useAimDb.tsx b/aimdb-wasm-adapter/src/react/useAimDb.tsx new file mode 100644 index 00000000..27eb0d2b --- /dev/null +++ b/aimdb-wasm-adapter/src/react/useAimDb.tsx @@ -0,0 +1,275 @@ +/** + * React hooks for AimDB WASM integration. + * + * Drop-in hooks that subscribe to AimDB records backed by a local WASM + * database instance with full Rust serde contract enforcement. + * + * @example + * ```tsx + * import { AimDbProvider, useRecord } from '@aimdb/wasm/react'; + * + * function App() { + * return ( + * + * + * + * ); + * } + * + * function Dashboard() { + * const temp = useRecord('sensors.temperature.vienna'); + * if (!temp) return ; + * return {temp.celsius}ยฐC; + * } + * ``` + * + * @module + */ + +import { + createContext, + useContext, + useEffect, + useRef, + useState, + type ReactNode, +} from "react"; + +// โ”€โ”€ Types โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/** Re-export from the WASM module (generated by wasm-pack). */ +import type { WasmDb, WsBridge } from "../pkg/aimdb_wasm_adapter"; + +/** Record configuration entry for the provider. */ +export interface RecordConfig { + /** AimDB record key (e.g. `"sensors.temperature.vienna"`). */ + key: string; + /** Schema type name matching `SchemaType::NAME` (e.g. `"temperature"`). */ + schemaType: string; + /** Buffer type โ€” string shorthand or object with capacity. */ + buffer: string | { type: string; capacity?: number }; +} + +/** Optional WebSocket bridge configuration. */ +export interface BridgeConfig { + /** WebSocket endpoint (e.g. `"wss://api.example.com/ws"`). */ + url: string; + /** MQTT-style topic patterns to subscribe to. */ + subscribeTopics?: string[]; + /** Auto-reconnect on disconnect (default: `true`). */ + autoReconnect?: boolean; + /** Request snapshots on (re)connect (default: `true`). */ + lateJoin?: boolean; +} + +/** Full provider configuration. */ +export interface AimDbConfig { + /** Records to register before building the database. */ + records: RecordConfig[]; + /** Optional WsBridge configuration for server sync. */ + bridge?: BridgeConfig; +} + +// โ”€โ”€ Context โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +interface AimDbContextValue { + db: WasmDb | null; + bridge: WsBridge | null; + ready: boolean; +} + +const AimDbContext = createContext({ + db: null, + bridge: null, + ready: false, +}); + +// โ”€โ”€ Provider โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +interface AimDbProviderProps { + config: AimDbConfig; + /** Shown while the WASM module initializes and the DB builds. */ + fallback?: ReactNode; + children: ReactNode; +} + +/** + * Initializes the WASM AimDB instance and makes it available to descendants. + * + * - Loads the WASM module (async). + * - Calls `configureRecord()` for each entry in `config.records`. + * - Calls `await db.build()`. + * - Optionally opens a `WsBridge` if `config.bridge` is provided. + * - Renders `children` once the database is ready. + */ +export function AimDbProvider({ + config, + fallback, + children, +}: AimDbProviderProps) { + const [ctx, setCtx] = useState({ + db: null, + bridge: null, + ready: false, + }); + + useEffect(() => { + let cancelled = false; + + (async () => { + // Dynamic import so the WASM module is only loaded when the provider + // mounts. Tree-shaking removes this path entirely if unused. + const wasm = await import("../pkg/aimdb_wasm_adapter"); + if (cancelled) return; + + const db = new wasm.WasmDb(); + + // Configure all records + for (const rec of config.records) { + db.configureRecord(rec.key, { + schemaType: rec.schemaType, + buffer: rec.buffer, + }); + } + + // Build the database (runs type registration, buffer allocation, etc.) + await db.build(); + if (cancelled) return; + + // Optionally connect WsBridge + let bridge: WsBridge | null = null; + if (config.bridge) { + bridge = wasm.WsBridge.connect(db, config.bridge.url, { + subscribeTopics: config.bridge.subscribeTopics ?? [], + autoReconnect: config.bridge.autoReconnect ?? true, + lateJoin: config.bridge.lateJoin ?? true, + }); + } + + setCtx({ db, bridge, ready: true }); + })(); + + return () => { + cancelled = true; + if (ctx.bridge) ctx.bridge.disconnect(); + if (ctx.db) ctx.db.free(); + }; + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []); + + if (!ctx.ready) return <>{fallback ?? null}; + + return {children}; +} + +// โ”€โ”€ Hooks โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/** + * Subscribe to an AimDB record by its key. + * + * Returns the latest validated value (deserialized by Rust serde in WASM) + * or `null` if no value has been produced yet. Re-renders on every buffer + * push. + * + * @typeParam T - The TypeScript shape of the record (e.g. `Temperature`). + * @param recordKey - The AimDB record key (e.g. `"sensors.temperature.vienna"`). + * + * @example + * ```tsx + * interface Temperature { celsius: number; timestamp: number } + * + * function TempCard({ city }: { city: string }) { + * const temp = useRecord(`sensors.temperature.${city}`); + * if (!temp) return ; + * return {temp.celsius.toFixed(1)}ยฐC; + * } + * ``` + */ +export function useRecord(recordKey: string): T | null { + const { db, ready } = useContext(AimDbContext); + const [value, setValue] = useState(null); + + useEffect(() => { + if (!ready || !db) return; + + // Seed with current value (if available) + try { + const current = db.get(recordKey) as T | undefined; + if (current !== undefined) setValue(current); + } catch { + // Record may not have a value yet โ€” that's fine + } + + // Subscribe for live updates + const unsub = db.subscribe(recordKey, (v: T) => setValue(v)); + + return () => { + if (typeof unsub === "function") unsub(); + }; + }, [db, ready, recordKey]); + + return value; +} + +/** + * Write a value to an AimDB record. + * + * Returns a setter function that validates the payload through Rust serde. + * Throws if contract validation fails (missing fields, wrong types, etc.). + * + * @typeParam T - The TypeScript shape of the record. + * @param recordKey - The AimDB record key. + * + * @example + * ```tsx + * const setTarget = useSetRecord('commands.setpoint.room1'); + * setTarget({ target_celsius: 21.0, timestamp: Date.now() }); + * ``` + */ +export function useSetRecord(recordKey: string): (value: T) => void { + const { db, ready } = useContext(AimDbContext); + + return (value: T) => { + if (!ready || !db) { + throw new Error("AimDB not ready. Wrap your app in ."); + } + db.set(recordKey, value); + }; +} + +/** + * Access the raw `WasmDb` instance for advanced operations. + * + * Prefer `useRecord` / `useSetRecord` for standard use cases. + */ +export function useAimDb(): WasmDb | null { + return useContext(AimDbContext).db; +} + +/** + * Access the `WsBridge` instance for connection status monitoring. + * + * Returns `null` if no bridge was configured or the DB is not ready. + * + * @example + * ```tsx + * function ConnectionIndicator() { + * const bridge = useBridge(); + * const [status, setStatus] = useState('connecting'); + * useEffect(() => { + * bridge?.onStatusChange(setStatus); + * }, [bridge]); + * return {status}; + * } + * ``` + */ +export function useBridge(): WsBridge | null { + return useContext(AimDbContext).bridge; +} diff --git a/docs/design/025-M12-wasm-adapter.md b/docs/design/025-M12-wasm-adapter.md index e301b7d0..0ac54d89 100644 --- a/docs/design/025-M12-wasm-adapter.md +++ b/docs/design/025-M12-wasm-adapter.md @@ -1,8 +1,8 @@ # Design: AimDB WASM Adapter -**Status:** ๐Ÿ“‹ Proposed +**Status:** โœ… Implemented **Milestone:** M12 โ€” Browser Runtime -**Revision:** 1 (2026-02-27) +**Revision:** 2 (2025-07-18) **Crate:** `aimdb-wasm-adapter` (open source, `aimdb` workspace) --- From 8e67e0bedf95420df5e7f7c335caca5eaee72f37 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Sun, 1 Mar 2026 19:04:00 +0000 Subject: [PATCH 16/46] feat: implement WebSocket client connector for AimDB - Added `WsClientConnectorBuilder` for building WebSocket client connections. - Implemented `WsClientConnectorImpl` to manage WebSocket connections, including inbound routing, outbound publishing, reconnection logic, and keepalive pings. - Introduced shared wire protocol types in `aimdb-ws-protocol` for communication between clients and servers. - Defined `ServerMessage` and `ClientMessage` enums for structured message handling. - Implemented topic matching logic to support MQTT-style wildcards. - Added tests for message serialization and topic matching functionality. --- Cargo.lock | 10 + Cargo.toml | 1 + Makefile | 21 +- aimdb-wasm-adapter/Cargo.toml | 3 + aimdb-wasm-adapter/src/bindings.rs | 78 ++- aimdb-wasm-adapter/src/buffer.rs | 73 ++- aimdb-wasm-adapter/src/react/useAimDb.tsx | 49 +- aimdb-wasm-adapter/src/ws_bridge.rs | 575 +++++++++-------- aimdb-websocket-connector/Cargo.toml | 27 +- .../src/client/builder.rs | 224 +++++++ .../src/client/connector.rs | 585 ++++++++++++++++++ aimdb-websocket-connector/src/client/mod.rs | 29 + aimdb-websocket-connector/src/lib.rs | 94 ++- aimdb-websocket-connector/src/protocol.rs | 196 +----- aimdb-ws-protocol/Cargo.toml | 16 + aimdb-ws-protocol/src/lib.rs | 289 +++++++++ tools/aimdb-mcp/src/tools/architecture.rs | 2 +- 17 files changed, 1781 insertions(+), 491 deletions(-) create mode 100644 aimdb-websocket-connector/src/client/builder.rs create mode 100644 aimdb-websocket-connector/src/client/connector.rs create mode 100644 aimdb-websocket-connector/src/client/mod.rs create mode 100644 aimdb-ws-protocol/Cargo.toml create mode 100644 aimdb-ws-protocol/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index ed92fa7d..abf31115 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -279,6 +279,7 @@ dependencies = [ "aimdb-core", "aimdb-data-contracts", "aimdb-executor", + "aimdb-ws-protocol", "futures-util", "js-sys", "serde", @@ -296,6 +297,7 @@ version = "0.1.0" dependencies = [ "aimdb-core", "aimdb-executor", + "aimdb-ws-protocol", "async-trait", "axum", "dashmap", @@ -308,6 +310,14 @@ dependencies = [ "tracing", ] +[[package]] +name = "aimdb-ws-protocol" +version = "0.1.0" +dependencies = [ + "serde", + "serde_json", +] + [[package]] name = "aligned" version = "0.4.3" diff --git a/Cargo.toml b/Cargo.toml index 509e0f83..9a85b386 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,6 +14,7 @@ members = [ "aimdb-mqtt-connector", "aimdb-knx-connector", "aimdb-websocket-connector", + "aimdb-ws-protocol", "aimdb-wasm-adapter", "tools/aimdb-cli", "tools/aimdb-mcp", diff --git a/Makefile b/Makefile index f260efb5..5ee38382 100644 --- a/Makefile +++ b/Makefile @@ -73,6 +73,8 @@ build: cargo build --package aimdb-persistence-sqlite @printf "$(YELLOW) โ†’ Building KNX connector$(NC)\n" cargo build --package aimdb-knx-connector --features "std,tokio-runtime" + @printf "$(YELLOW) โ†’ Building WS protocol$(NC)\n" + cargo build --package aimdb-ws-protocol @printf "$(YELLOW) โ†’ Building WebSocket connector$(NC)\n" cargo build --package aimdb-websocket-connector --features "tokio-runtime" @printf "$(YELLOW) โ†’ Building WASM adapter$(NC)\n" @@ -110,12 +112,14 @@ test: cargo test --package aimdb-mqtt-connector --features "std,tokio-runtime" @printf "$(YELLOW) โ†’ Testing KNX connector$(NC)\n" cargo test --package aimdb-knx-connector --features "std,tokio-runtime" + @printf "$(YELLOW) โ†’ Testing WS protocol$(NC)\n" + cargo test --package aimdb-ws-protocol @printf "$(YELLOW) โ†’ Testing WebSocket connector$(NC)\n" cargo test --package aimdb-websocket-connector --features "tokio-runtime" fmt: @printf "$(GREEN)Formatting code (workspace members only)...$(NC)\n" - @for pkg in aimdb-executor aimdb-derive aimdb-data-contracts aimdb-core aimdb-client aimdb-embassy-adapter aimdb-tokio-adapter aimdb-wasm-adapter aimdb-sync aimdb-persistence aimdb-persistence-sqlite aimdb-mqtt-connector aimdb-knx-connector aimdb-websocket-connector aimdb-codegen aimdb-cli aimdb-mcp sync-api-demo tokio-mqtt-connector-demo embassy-mqtt-connector-demo tokio-knx-connector-demo embassy-knx-connector-demo weather-mesh-common weather-hub weather-station-alpha weather-station-beta; do \ + for pkg in aimdb-executor aimdb-derive aimdb-data-contracts aimdb-core aimdb-client aimdb-embassy-adapter aimdb-tokio-adapter aimdb-wasm-adapter aimdb-sync aimdb-persistence aimdb-persistence-sqlite aimdb-mqtt-connector aimdb-knx-connector aimdb-ws-protocol aimdb-websocket-connector aimdb-codegen aimdb-cli aimdb-mcp sync-api-demo tokio-mqtt-connector-demo embassy-mqtt-connector-demo tokio-knx-connector-demo embassy-knx-connector-demo weather-mesh-common weather-hub weather-station-alpha weather-station-beta; do \ printf "$(YELLOW) โ†’ Formatting $$pkg$(NC)\n"; \ cargo fmt -p $$pkg 2>/dev/null || true; \ done @@ -124,7 +128,7 @@ fmt: fmt-check: @printf "$(GREEN)Checking code formatting (workspace members only)...$(NC)\n" @FAILED=0; \ - for pkg in aimdb-executor aimdb-derive aimdb-data-contracts aimdb-core aimdb-client aimdb-embassy-adapter aimdb-tokio-adapter aimdb-wasm-adapter aimdb-sync aimdb-persistence aimdb-persistence-sqlite aimdb-mqtt-connector aimdb-knx-connector aimdb-websocket-connector aimdb-codegen aimdb-cli aimdb-mcp sync-api-demo tokio-mqtt-connector-demo embassy-mqtt-connector-demo tokio-knx-connector-demo embassy-knx-connector-demo weather-mesh-common weather-hub weather-station-alpha weather-station-beta; do \ + for pkg in aimdb-executor aimdb-derive aimdb-data-contracts aimdb-core aimdb-client aimdb-embassy-adapter aimdb-tokio-adapter aimdb-wasm-adapter aimdb-sync aimdb-persistence aimdb-persistence-sqlite aimdb-mqtt-connector aimdb-knx-connector aimdb-ws-protocol aimdb-websocket-connector aimdb-codegen aimdb-cli aimdb-mcp sync-api-demo tokio-mqtt-connector-demo embassy-mqtt-connector-demo tokio-knx-connector-demo embassy-knx-connector-demo weather-mesh-common weather-hub weather-station-alpha weather-station-beta; do \ printf "$(YELLOW) โ†’ Checking $$pkg$(NC)\n"; \ if ! cargo fmt -p $$pkg -- --check 2>&1; then \ printf "$(RED)โŒ Formatting check failed for $$pkg$(NC)\n"; \ @@ -177,6 +181,8 @@ clippy: cargo clippy --package aimdb-mqtt-connector --target thumbv7em-none-eabihf --no-default-features --features "embassy-runtime,defmt" -- -D warnings @printf "$(YELLOW) โ†’ Clippy on KNX connector (embassy + defmt)$(NC)\n" cargo clippy --package aimdb-knx-connector --target thumbv7em-none-eabihf --no-default-features --features "embassy-runtime,defmt" -- -D warnings + @printf "$(YELLOW) โ†’ Clippy on WS protocol$(NC)\n" + cargo clippy --package aimdb-ws-protocol --all-targets -- -D warnings @printf "$(YELLOW) โ†’ Clippy on WebSocket connector$(NC)\n" cargo clippy --package aimdb-websocket-connector --features "tokio-runtime" --all-targets -- -D warnings @printf "$(YELLOW) โ†’ Clippy on WASM adapter$(NC)\n" @@ -199,6 +205,7 @@ doc: cargo doc --package aimdb-mcp --no-deps cargo doc --package aimdb-persistence --no-deps cargo doc --package aimdb-persistence-sqlite --no-deps + cargo doc --package aimdb-ws-protocol --no-deps cargo doc --package aimdb-websocket-connector --features "tokio-runtime" --no-deps @cp -r target/doc/* target/doc-final/cloud/ @printf "$(YELLOW) โ†’ Building embedded documentation$(NC)\n" @@ -373,15 +380,19 @@ publish: @cargo publish -p aimdb-knx-connector @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-websocket-connector (13/15)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-ws-protocol (13/16)$(NC)\n" + @cargo publish -p aimdb-ws-protocol + @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" + @sleep 10 + @printf "$(YELLOW) โ†’ Publishing aimdb-websocket-connector (14/16)$(NC)\n" @cargo publish -p aimdb-websocket-connector @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-cli (14/15)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-cli (15/16)$(NC)\n" @cargo publish -p aimdb-cli @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-mcp (15/15)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-mcp (16/16)$(NC)\n" @cargo publish -p aimdb-mcp @printf "$(GREEN)โœ“ All crates published successfully!$(NC)\n" @printf "$(BLUE)๐ŸŽ‰ AimDB v$(shell grep '^version' Cargo.toml | head -1 | cut -d '"' -f 2) is now live on crates.io!$(NC)\n" diff --git a/aimdb-wasm-adapter/Cargo.toml b/aimdb-wasm-adapter/Cargo.toml index 3f536ad6..7a0468b3 100644 --- a/aimdb-wasm-adapter/Cargo.toml +++ b/aimdb-wasm-adapter/Cargo.toml @@ -34,6 +34,9 @@ aimdb-core = { version = "0.5.0", path = "../aimdb-core", default-features = fal "alloc", ] } +# Shared WebSocket wire protocol (no_std + alloc) +aimdb-ws-protocol = { version = "0.1.0", path = "../aimdb-ws-protocol", default-features = false } + # Data contracts (alloc only โ€” no std) aimdb-data-contracts = { version = "0.5.0", path = "../aimdb-data-contracts", default-features = false, features = [ "alloc", diff --git a/aimdb-wasm-adapter/src/bindings.rs b/aimdb-wasm-adapter/src/bindings.rs index 66cc6655..1cbe85ae 100644 --- a/aimdb-wasm-adapter/src/bindings.rs +++ b/aimdb-wasm-adapter/src/bindings.rs @@ -15,11 +15,11 @@ extern crate alloc; +use alloc::boxed::Box; use alloc::format; use alloc::string::{String, ToString}; use alloc::sync::Arc; use alloc::vec::Vec; -use core::cell::Cell; use core::fmt::Debug; use serde::de::DeserializeOwned; @@ -32,6 +32,7 @@ use aimdb_core::record_id::StringKey; use aimdb_data_contracts::dispatch_streamable; +use crate::ws_bridge::WsBridge; use crate::WasmAdapter; // โ”€โ”€โ”€ Option parsing โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ @@ -233,6 +234,33 @@ impl WasmDb { pub fn is_built(&self) -> bool { self.db.is_some() } + + /// Connect a WebSocket bridge to this database for server synchronization. + /// + /// The database remains usable for local `get()` / `set()` / `subscribe()` + /// after the bridge is opened โ€” the bridge gets a cheap clone of the + /// internal `AimDb` handle (two `Arc` pointer copies). + /// + /// # Example (TypeScript) + /// ```ts + /// const bridge = db.connectBridge('wss://api.example.com/ws', { + /// subscribeTopics: ['sensors/#'], + /// autoReconnect: true, + /// }); + /// bridge.onStatusChange((status) => console.log(status)); + /// ``` + #[wasm_bindgen(js_name = "connectBridge")] + pub fn connect_bridge(&self, url: &str, options: JsValue) -> Result { + let db = self + .db + .as_ref() + .ok_or_else(|| JsError::new("Database not built. Call build() first."))? + .clone(); // cheap: two Arc pointer copies + + let schema_map = self.schema_map.clone(); + + WsBridge::new_internal(db, schema_map, url, options) + } } // โ”€โ”€โ”€ Private helpers โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ @@ -316,6 +344,10 @@ where /// Subscribe to a record's buffer and invoke `callback` on each new value. /// Returns a JS function that cancels the subscription when called. +/// +/// Uses `futures_util::future::select` to race `recv()` against a cancel +/// future so the unsubscribe closure can break the loop immediately โ€” even +/// when `recv()` is blocked waiting for the next push. fn subscribe_typed( db: &AimDb, key: &str, @@ -329,28 +361,46 @@ where .map_err(|e| JsError::new(&format!("{e:?}")))?; let callback = callback.clone(); - let cancelled = alloc::rc::Rc::new(Cell::new(false)); - let cancelled_inner = cancelled.clone(); + let (cancel_token, cancel_handle) = crate::buffer::cancel_pair(); wasm_bindgen_futures::spawn_local(async move { - while !cancelled_inner.get() { - match reader.recv().await { - Ok(val) => { + use core::task::Poll; + use futures_util::future::{select, Either}; + + loop { + // Future that resolves when cancel() is called. + let cancel_fut = core::future::poll_fn(|cx| { + if cancel_token.is_cancelled() { + Poll::Ready(()) + } else { + cancel_token.register_waker(cx.waker()); + Poll::Pending + } + }); + + let recv_fut = reader.recv(); + + futures_util::pin_mut!(cancel_fut); + futures_util::pin_mut!(recv_fut); + + match select(recv_fut, cancel_fut).await { + Either::Left((Ok(val), _)) => { if let Ok(js) = serde_wasm_bindgen::to_value(&val) { - // Invoke the JS callback: callback(value) let _ = callback.call1(&JsValue::NULL, &js); } } - Err(_) => break, + Either::Left((Err(_), _)) => break, // buffer error + Either::Right(((), _)) => break, // cancelled } } }); - // Return an unsubscribe function - let unsub = Closure::once_into_js(move || { - cancelled.set(true); - }); - Ok(unsub) + // Closure::wrap (not once_into_js) so it can be called multiple times + // (React StrictMode calls cleanup twice). + let unsub = Closure::wrap(Box::new(move || { + cancel_handle.cancel(); + }) as Box); + Ok(unsub.into_js_value()) } // โ”€โ”€โ”€ Sync future polling โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ @@ -364,7 +414,7 @@ where /// /// Panics if the future returns `Pending`. This should never happen for /// operations on `WasmBuffer` (which are single-threaded, non-blocking). -fn poll_sync(f: F) -> F::Output { +pub(crate) fn poll_sync(f: F) -> F::Output { use core::pin::Pin; use core::task::{Context, Poll, RawWaker, RawWakerVTable, Waker}; diff --git a/aimdb-wasm-adapter/src/buffer.rs b/aimdb-wasm-adapter/src/buffer.rs index e3f801e4..c1915ae4 100644 --- a/aimdb-wasm-adapter/src/buffer.rs +++ b/aimdb-wasm-adapter/src/buffer.rs @@ -19,7 +19,7 @@ use alloc::boxed::Box; use alloc::collections::VecDeque; use alloc::rc::Rc; use alloc::vec::Vec; -use core::cell::RefCell; +use core::cell::{Cell, RefCell}; use core::future::Future; use core::pin::Pin; use core::task::{Context, Poll, Waker}; @@ -321,3 +321,74 @@ fn wake_all(wakers: &mut Vec) { waker.wake(); } } + +// ============================================================================ +// Cancellation +// ============================================================================ + +/// Shared state between [`CancelToken`] and [`CancelHandle`]. +struct CancelInner { + cancelled: Cell, + waker: RefCell>, +} + +/// Token held by the subscription task (reader side). +/// +/// Polled in a `futures_util::future::select` alongside `reader.recv()`. +/// When [`CancelHandle::cancel()`] fires, the stored waker is woken and +/// `is_cancelled()` returns `true`, causing the select to resolve. +pub(crate) struct CancelToken { + inner: Rc, +} + +/// Handle held by the JS unsubscribe closure. +/// +/// Calling [`cancel()`](CancelHandle::cancel) sets the flag and wakes the +/// subscription task so it exits immediately โ€” even if `recv()` is blocked. +pub(crate) struct CancelHandle { + inner: Rc, +} + +// SAFETY: wasm32 is single-threaded โ€” no concurrent access possible +unsafe impl Send for CancelToken {} +unsafe impl Sync for CancelToken {} +unsafe impl Send for CancelHandle {} +unsafe impl Sync for CancelHandle {} + +/// Create a linked cancel token/handle pair. +pub(crate) fn cancel_pair() -> (CancelToken, CancelHandle) { + let inner = Rc::new(CancelInner { + cancelled: Cell::new(false), + waker: RefCell::new(None), + }); + ( + CancelToken { + inner: inner.clone(), + }, + CancelHandle { inner }, + ) +} + +impl CancelToken { + /// Returns `true` if [`CancelHandle::cancel()`] has been called. + pub(crate) fn is_cancelled(&self) -> bool { + self.inner.cancelled.get() + } + + /// Store the current task's waker so [`CancelHandle::cancel()`] can wake it. + pub(crate) fn register_waker(&self, waker: &Waker) { + *self.inner.waker.borrow_mut() = Some(waker.clone()); + } +} + +impl CancelHandle { + /// Signal cancellation and wake the subscription task. + /// + /// Idempotent โ€” safe to call multiple times (React StrictMode). + pub(crate) fn cancel(&self) { + self.inner.cancelled.set(true); + if let Some(w) = self.inner.waker.borrow_mut().take() { + w.wake(); + } + } +} diff --git a/aimdb-wasm-adapter/src/react/useAimDb.tsx b/aimdb-wasm-adapter/src/react/useAimDb.tsx index 27eb0d2b..cade45c9 100644 --- a/aimdb-wasm-adapter/src/react/useAimDb.tsx +++ b/aimdb-wasm-adapter/src/react/useAimDb.tsx @@ -35,6 +35,7 @@ import { createContext, + useCallback, useContext, useEffect, useRef, @@ -103,10 +104,10 @@ interface AimDbProviderProps { /** * Initializes the WASM AimDB instance and makes it available to descendants. * - * - Loads the WASM module (async). + * - Loads and initializes the WASM module (async). * - Calls `configureRecord()` for each entry in `config.records`. * - Calls `await db.build()`. - * - Optionally opens a `WsBridge` if `config.bridge` is provided. + * - Optionally opens a `WsBridge` via `db.connectBridge()`. * - Renders `children` once the database is ready. */ export function AimDbProvider({ @@ -120,6 +121,11 @@ export function AimDbProvider({ ready: false, }); + // Refs so the cleanup closure always sees the latest handles + // (avoids the stale-closure bug where cleanup captures initial null state). + const dbRef = useRef(null); + const bridgeRef = useRef(null); + useEffect(() => { let cancelled = false; @@ -127,6 +133,10 @@ export function AimDbProvider({ // Dynamic import so the WASM module is only loaded when the provider // mounts. Tree-shaking removes this path entirely if unused. const wasm = await import("../pkg/aimdb_wasm_adapter"); + + // Initialize the WASM binary (required by wasm-pack --target web + // before any constructors can be used). Idempotent on re-mount. + await wasm.default(); if (cancelled) return; const db = new wasm.WasmDb(); @@ -143,14 +153,17 @@ export function AimDbProvider({ await db.build(); if (cancelled) return; - // Optionally connect WsBridge + dbRef.current = db; + + // Optionally connect WsBridge (db.connectBridge borrows โ€” db stays valid) let bridge: WsBridge | null = null; if (config.bridge) { - bridge = wasm.WsBridge.connect(db, config.bridge.url, { + bridge = db.connectBridge(config.bridge.url, { subscribeTopics: config.bridge.subscribeTopics ?? [], autoReconnect: config.bridge.autoReconnect ?? true, lateJoin: config.bridge.lateJoin ?? true, }); + bridgeRef.current = bridge; } setCtx({ db, bridge, ready: true }); @@ -158,8 +171,15 @@ export function AimDbProvider({ return () => { cancelled = true; - if (ctx.bridge) ctx.bridge.disconnect(); - if (ctx.db) ctx.db.free(); + // Cleanup uses refs โ€” always sees the latest handles + if (bridgeRef.current) { + bridgeRef.current.disconnect(); + bridgeRef.current = null; + } + if (dbRef.current) { + dbRef.current.free(); + dbRef.current = null; + } }; // eslint-disable-next-line react-hooks/exhaustive-deps }, []); @@ -236,12 +256,17 @@ export function useRecord(recordKey: string): T | null { export function useSetRecord(recordKey: string): (value: T) => void { const { db, ready } = useContext(AimDbContext); - return (value: T) => { - if (!ready || !db) { - throw new Error("AimDB not ready. Wrap your app in ."); - } - db.set(recordKey, value); - }; + return useCallback( + (value: T) => { + if (!ready || !db) { + throw new Error( + "AimDB not ready. Wrap your app in .", + ); + } + db.set(recordKey, value); + }, + [db, ready, recordKey], + ); } /** diff --git a/aimdb-wasm-adapter/src/ws_bridge.rs b/aimdb-wasm-adapter/src/ws_bridge.rs index 4a9edae5..25073d23 100644 --- a/aimdb-wasm-adapter/src/ws_bridge.rs +++ b/aimdb-wasm-adapter/src/ws_bridge.rs @@ -1,7 +1,7 @@ //! WebSocket bridge connecting browser AimDB to a server instance. //! -//! `WsBridge` wraps `web_sys::WebSocket` and speaks the same wire protocol as -//! `aimdb-websocket-connector` (`ServerMessage` / `ClientMessage`). It maps +//! `WsBridge` wraps `web_sys::WebSocket` and speaks the shared wire protocol +//! from [`aimdb_ws_protocol`] (`ServerMessage` / `ClientMessage`). It maps //! incoming server data to local buffer pushes and forwards local writes to //! the server. //! @@ -20,11 +20,16 @@ use alloc::rc::Rc; use alloc::string::{String, ToString}; use alloc::vec::Vec; use core::cell::RefCell; +use core::fmt::Debug; -use serde::{Deserialize, Serialize}; +use serde::de::DeserializeOwned; +use serde::Deserialize; use wasm_bindgen::prelude::*; -use crate::bindings::WasmDb; +use aimdb_core::builder::AimDb; +use aimdb_data_contracts::dispatch_streamable; + +use crate::WasmAdapter; // โ”€โ”€โ”€ Connection status โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ @@ -48,60 +53,14 @@ impl ConnectionStatus { } } -// โ”€โ”€โ”€ Wire protocol (mirrors aimdb-websocket-connector/src/protocol.rs) โ”€โ”€โ”€ - -/// Server โ†’ Client message. -#[derive(Debug, Clone, Deserialize)] -#[serde(tag = "type", rename_all = "snake_case")] -enum ServerMessage { - Data { - topic: String, - payload: Option, - #[allow(dead_code)] - ts: u64, - }, - Snapshot { - topic: String, - payload: Option, - }, - Subscribed { - #[allow(dead_code)] - topics: Vec, - }, - Error { - #[allow(dead_code)] - code: String, - #[allow(dead_code)] - topic: Option, - #[allow(dead_code)] - message: String, - }, - Pong, -} +// โ”€โ”€โ”€ Wire protocol (shared with aimdb-websocket-connector) โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ -/// Client โ†’ Server message. -#[derive(Debug, Clone, Serialize)] -#[serde(tag = "type", rename_all = "snake_case")] -enum ClientMessage { - Subscribe { - topics: Vec, - }, - #[allow(dead_code)] - Unsubscribe { - topics: Vec, - }, - Write { - topic: String, - payload: serde_json::Value, - }, - #[allow(dead_code)] - Ping, -} +use aimdb_ws_protocol::{ClientMessage, ServerMessage}; // โ”€โ”€โ”€ Bridge configuration โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ -/// Configuration for `WsBridge.connect()`. -#[derive(Deserialize)] +/// Configuration for `WasmDb.connectBridge()`. +#[derive(Deserialize, Clone)] #[serde(rename_all = "camelCase")] pub struct BridgeOptions { /// MQTT-style topic patterns to subscribe to (e.g. `["sensors/#"]`). @@ -149,6 +108,8 @@ struct BridgeState { status: ConnectionStatus, pending_writes: alloc::collections::VecDeque, backoff_index: usize, + /// Active keepalive interval ID (cleared on close/disconnect). + keepalive_id: Option, /// Closures retained to prevent GC. _on_open: Option>, _on_message: Option>, @@ -156,13 +117,33 @@ struct BridgeState { _on_error: Option>, } +// โ”€โ”€โ”€ Shared reconnect context โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Shared state needed by both the initial connect and reconnect paths. +/// +/// Wrapped in `Rc` so closures can cheaply reference it without cloning +/// every field individually (reduces parameter explosion). +struct SharedCtx { + db: AimDb, + schema_map: Vec<(String, String)>, + state: Rc>, + on_status: Rc>>, + config: BridgeOptions, + backoff: Vec, + url: String, + ws_cell: Rc>, +} + // โ”€โ”€โ”€ WsBridge โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ /// WebSocket bridge connecting the in-browser AimDB to a remote server. /// +/// Created via `db.connectBridge(url, options)`. The database remains usable +/// for local `get()` / `set()` / `subscribe()` after the bridge is opened. +/// /// # Example (TypeScript) /// ```ts -/// const bridge = WsBridge.connect(db, 'wss://api.example.com/ws', { +/// const bridge = db.connectBridge('wss://api.example.com/ws', { /// subscribeTopics: ['sensors/#'], /// autoReconnect: true, /// lateJoin: true, @@ -173,14 +154,7 @@ struct BridgeState { /// ``` #[wasm_bindgen] pub struct WsBridge { - ws: web_sys::WebSocket, - db: Rc>, - config: BridgeOptions, - state: Rc>, - /// User-provided status change callback. - on_status_change: Rc>>, - /// Backoff schedule in milliseconds. - backoff: Vec, + ctx: Rc, } // SAFETY: wasm32-unknown-unknown is single-threaded. @@ -189,51 +163,6 @@ unsafe impl Sync for WsBridge {} #[wasm_bindgen] impl WsBridge { - /// Open a WebSocket connection and begin synchronising records. - /// - /// - `db` โ€” a built `WasmDb` instance. - /// - `url` โ€” WebSocket endpoint (e.g. `wss://api.example.com/ws`). - /// - `options` โ€” optional JS object with `subscribeTopics`, `autoReconnect`, - /// `lateJoin`, etc. - pub fn connect(db: WasmDb, url: &str, options: JsValue) -> Result { - let config: BridgeOptions = if options.is_undefined() || options.is_null() { - BridgeOptions::default() - } else { - serde_wasm_bindgen::from_value(options) - .map_err(|e| JsError::new(&format!("Invalid bridge options: {e}")))? - }; - - let ws = web_sys::WebSocket::new(url) - .map_err(|e| JsError::new(&format!("WebSocket open failed: {e:?}")))?; - - // We use text (JSON) frames โ€” no binary type configuration needed. - - let db = Rc::new(RefCell::new(db)); - let state = Rc::new(RefCell::new(BridgeState { - status: ConnectionStatus::Connecting, - pending_writes: alloc::collections::VecDeque::new(), - backoff_index: 0, - _on_open: None, - _on_message: None, - _on_close: None, - _on_error: None, - })); - let on_status_change: Rc>> = Rc::new(RefCell::new(None)); - let backoff = alloc::vec![500, 1_000, 2_000, 4_000, 8_000]; - - let mut bridge = WsBridge { - ws, - db, - config, - state, - on_status_change, - backoff, - }; - - bridge.install_callbacks(); - Ok(bridge) - } - /// Register a callback for connection status changes. /// /// ```ts @@ -241,7 +170,7 @@ impl WsBridge { /// ``` #[wasm_bindgen(js_name = "onStatusChange")] pub fn on_status_change(&self, callback: js_sys::Function) { - *self.on_status_change.borrow_mut() = Some(callback); + *self.ctx.on_status.borrow_mut() = Some(callback); } /// Send a value to the server for a given topic. @@ -257,13 +186,14 @@ impl WsBridge { payload: json_payload, }; - let state = self.state.borrow(); + let state = self.ctx.state.borrow(); if state.status == ConnectionStatus::Connected { - self.send_json(&msg)?; + drop(state); + send_json(&self.ctx.ws_cell.borrow(), &msg)?; } else { drop(state); - let mut state = self.state.borrow_mut(); - if state.pending_writes.len() < self.config.max_offline_queue { + let mut state = self.ctx.state.borrow_mut(); + if state.pending_writes.len() < self.ctx.config.max_offline_queue { state.pending_writes.push_back(msg); } // else: drop (overflow policy) @@ -273,184 +203,333 @@ impl WsBridge { /// Close the WebSocket and stop reconnection attempts. pub fn disconnect(&self) { - let mut state = self.state.borrow_mut(); + let mut state = self.ctx.state.borrow_mut(); state.status = ConnectionStatus::Disconnected; + // Clear keepalive timer + if let Some(id) = state.keepalive_id.take() { + if let Some(window) = web_sys::window() { + window.clear_interval_with_handle(id); + } + } + // Drop closures to break Rc cycles state._on_open = None; state._on_message = None; state._on_close = None; state._on_error = None; drop(state); - let _ = self.ws.close(); - self.emit_status(ConnectionStatus::Disconnected); + let _ = self.ctx.ws_cell.borrow().close(); + emit_status(&self.ctx.on_status, ConnectionStatus::Disconnected); } /// Current connection status as a string. pub fn status(&self) -> String { - self.state.borrow().status.as_str().to_string() + self.ctx.state.borrow().status.as_str().to_string() } } -// โ”€โ”€โ”€ Internal โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ +// โ”€โ”€โ”€ Internal constructor โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ impl WsBridge { - fn install_callbacks(&mut self) { - // on_open - { - let state = self.state.clone(); - let ws = self.ws.clone(); - let topics = self.config.subscribe_topics.clone(); - let on_status = self.on_status_change.clone(); - let closure = Closure::wrap(Box::new(move || { - { - let mut s = state.borrow_mut(); - s.status = ConnectionStatus::Connected; - s.backoff_index = 0; - - // Flush pending writes - while let Some(msg) = s.pending_writes.pop_front() { - if let Ok(json) = serde_json::to_string(&msg) { - let _ = ws.send_with_str(&json); - } - } - } + /// Create a new bridge (called from `WasmDb::connect_bridge`). + pub(crate) fn new_internal( + db: AimDb, + schema_map: Vec<(String, String)>, + url: &str, + options: JsValue, + ) -> Result { + let config: BridgeOptions = if options.is_undefined() || options.is_null() { + BridgeOptions::default() + } else { + serde_wasm_bindgen::from_value(options) + .map_err(|e| JsError::new(&format!("Invalid bridge options: {e}")))? + }; + + let ws = web_sys::WebSocket::new(url) + .map_err(|e| JsError::new(&format!("WebSocket open failed: {e:?}")))?; + + let ws_cell = Rc::new(RefCell::new(ws)); + let state = Rc::new(RefCell::new(BridgeState { + status: ConnectionStatus::Connecting, + pending_writes: alloc::collections::VecDeque::new(), + backoff_index: 0, + keepalive_id: None, + _on_open: None, + _on_message: None, + _on_close: None, + _on_error: None, + })); + let on_status: Rc>> = Rc::new(RefCell::new(None)); + let backoff = alloc::vec![500, 1_000, 2_000, 4_000, 8_000]; + + let ctx = Rc::new(SharedCtx { + db, + schema_map, + state, + on_status, + config, + backoff, + url: url.to_string(), + ws_cell, + }); + + install_ws_callbacks(&ctx); + + Ok(WsBridge { ctx }) + } +} + +// โ”€โ”€โ”€ Callback installation (shared by connect + reconnect) โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ - // Subscribe to configured topics - if !topics.is_empty() { - let sub = ClientMessage::Subscribe { - topics: topics.clone(), - }; - if let Ok(json) = serde_json::to_string(&sub) { +/// Install WebSocket event callbacks on the current socket in `ctx.ws_cell`. +/// +/// Extracted as a free function so both the initial `connect` and +/// `schedule_reconnect` paths can call it. +fn install_ws_callbacks(ctx: &Rc) { + let ws = ctx.ws_cell.borrow(); + + // on_open + let on_open = { + let ctx = ctx.clone(); + Closure::wrap(Box::new(move || { + { + let mut s = ctx.state.borrow_mut(); + s.status = ConnectionStatus::Connected; + s.backoff_index = 0; + + // Flush pending writes + let ws = ctx.ws_cell.borrow(); + while let Some(msg) = s.pending_writes.pop_front() { + if let Ok(json) = serde_json::to_string(&msg) { let _ = ws.send_with_str(&json); } } + } - // Notify status change - if let Some(cb) = on_status.borrow().as_ref() { - let _ = cb.call1( - &JsValue::NULL, - &JsValue::from_str(ConnectionStatus::Connected.as_str()), - ); + // Subscribe to configured topics + let topics = &ctx.config.subscribe_topics; + if !topics.is_empty() { + let sub = ClientMessage::Subscribe { + topics: topics.clone(), + }; + if let Ok(json) = serde_json::to_string(&sub) { + let _ = ctx.ws_cell.borrow().send_with_str(&json); } - }) as Box); - self.ws.set_onopen(Some(closure.as_ref().unchecked_ref())); - self.state.borrow_mut()._on_open = Some(closure); - } + } - // on_message - { - let db = self.db.clone(); - let closure = Closure::wrap(Box::new(move |event: web_sys::MessageEvent| { - if let Some(text) = event.data().as_string() { - if let Ok(msg) = serde_json::from_str::(&text) { - Self::handle_server_message(&db, msg); + // Start keepalive ping timer + if ctx.config.keepalive_ms > 0 { + let ws_for_ping = ctx.ws_cell.clone(); + let ping_closure = Closure::wrap(Box::new(move || { + let ping = ClientMessage::Ping; + if let Ok(json) = serde_json::to_string(&ping) { + let _ = ws_for_ping.borrow().send_with_str(&json); + } + }) as Box); + + if let Some(window) = web_sys::window() { + if let Ok(id) = window.set_interval_with_callback_and_timeout_and_arguments_0( + ping_closure.as_ref().unchecked_ref(), + ctx.config.keepalive_ms as i32, + ) { + ctx.state.borrow_mut().keepalive_id = Some(id); } } - }) as Box); - self.ws - .set_onmessage(Some(closure.as_ref().unchecked_ref())); - self.state.borrow_mut()._on_message = Some(closure); - } + ping_closure.forget(); + } - // on_close - { - let state = self.state.clone(); - let on_status = self.on_status_change.clone(); - let auto_reconnect = self.config.auto_reconnect; - let backoff = self.backoff.clone(); - let closure = Closure::wrap(Box::new(move || { - let current = state.borrow().status; - if current == ConnectionStatus::Disconnected { - return; // user-initiated disconnect + emit_status(&ctx.on_status, ConnectionStatus::Connected); + }) as Box) + }; + ws.set_onopen(Some(on_open.as_ref().unchecked_ref())); + + // on_message โ€” route server data to local buffers (no JsValue hop) + let on_message = { + let ctx = ctx.clone(); + Closure::wrap(Box::new(move |event: web_sys::MessageEvent| { + if let Some(text) = event.data().as_string() { + if let Ok(msg) = serde_json::from_str::(&text) { + handle_server_message(&ctx.db, &ctx.schema_map, msg); } + } + }) as Box) + }; + ws.set_onmessage(Some(on_message.as_ref().unchecked_ref())); + + // on_close โ€” reconnect with exponential backoff + let on_close = { + let ctx = ctx.clone(); + Closure::wrap(Box::new(move || { + let current = ctx.state.borrow().status; + if current == ConnectionStatus::Disconnected { + return; // user-initiated disconnect โ€” don't reconnect + } + + // Clear keepalive timer + if let Some(id) = ctx.state.borrow_mut().keepalive_id.take() { + if let Some(window) = web_sys::window() { + window.clear_interval_with_handle(id); + } + } - if auto_reconnect { - let mut s = state.borrow_mut(); + if ctx.config.auto_reconnect { + let delay = { + let mut s = ctx.state.borrow_mut(); s.status = ConnectionStatus::Reconnecting; - let delay = backoff + let d = ctx + .backoff .get(s.backoff_index) .copied() - .unwrap_or(*backoff.last().unwrap_or(&8_000)); - s.backoff_index = (s.backoff_index + 1).min(backoff.len() - 1); - drop(s); - - if let Some(cb) = on_status.borrow().as_ref() { - let _ = cb.call1( - &JsValue::NULL, - &JsValue::from_str(ConnectionStatus::Reconnecting.as_str()), - ); - } + .unwrap_or(*ctx.backoff.last().unwrap_or(&8_000)); + s.backoff_index = (s.backoff_index + 1).min(ctx.backoff.len() - 1); + d + }; - // Schedule reconnect (actual reconnect requires creating a new WS โ€” - // this is a placeholder; full reconnect needs the URL + re-install). - let _delay = delay; - // TODO: implement setTimeout-based reconnect with new WebSocket - } else { - state.borrow_mut().status = ConnectionStatus::Disconnected; - if let Some(cb) = on_status.borrow().as_ref() { - let _ = cb.call1( - &JsValue::NULL, - &JsValue::from_str(ConnectionStatus::Disconnected.as_str()), - ); - } - } - }) as Box); - self.ws.set_onclose(Some(closure.as_ref().unchecked_ref())); - self.state.borrow_mut()._on_close = Some(closure); - } + emit_status(&ctx.on_status, ConnectionStatus::Reconnecting); + schedule_reconnect(ctx.clone(), delay); + } else { + ctx.state.borrow_mut().status = ConnectionStatus::Disconnected; + emit_status(&ctx.on_status, ConnectionStatus::Disconnected); + } + }) as Box) + }; + ws.set_onclose(Some(on_close.as_ref().unchecked_ref())); + + // on_error โ€” WebSocket always fires `close` after `error`, so just log. + let on_error = { + Closure::wrap(Box::new(move || { + web_sys::console::warn_1(&"WsBridge: WebSocket error".into()); + }) as Box) + }; + ws.set_onerror(Some(on_error.as_ref().unchecked_ref())); + + // Store closures to prevent GC + let mut state = ctx.state.borrow_mut(); + state._on_open = Some(on_open); + state._on_message = Some(on_message); + state._on_close = Some(on_close); + state._on_error = Some(on_error); +} - // on_error โ€” WebSocket always fires `close` after `error`, so just log. - { - let closure = Closure::wrap(Box::new(move || { - web_sys::console::warn_1(&"WsBridge: WebSocket error".into()); - }) as Box); - self.ws.set_onerror(Some(closure.as_ref().unchecked_ref())); - self.state.borrow_mut()._on_error = Some(closure); +// โ”€โ”€โ”€ Reconnect โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Schedule a reconnect attempt after `delay_ms` using `setTimeout`. +/// +/// On success: swaps the new socket into `ctx.ws_cell` and re-installs +/// callbacks. On failure: increments backoff and schedules the next attempt. +/// +/// Uses `Closure::once` + `.forget()` โ€” each attempt leaks a few bytes. +/// With 5 backoff steps capped at 8 s, worst case is ~5 closures in flight. +fn schedule_reconnect(ctx: Rc, delay_ms: u32) { + let closure = Closure::once(move || { + // Guard: user may have called disconnect() during the delay + if ctx.state.borrow().status == ConnectionStatus::Disconnected { + return; } - } - /// Route a decoded server message to the local database. - fn handle_server_message(db: &Rc>, msg: ServerMessage) { - match msg { - ServerMessage::Data { topic, payload, .. } - | ServerMessage::Snapshot { topic, payload } => { - if let Some(payload) = payload { - let js = serde_wasm_bindgen::to_value(&payload).unwrap_or(JsValue::UNDEFINED); - let mut db = db.borrow_mut(); - // Push to local buffer via set() which does contract validation. - let _ = db.set(&topic, js); - } - } - ServerMessage::Subscribed { .. } => { - // ACK โ€” no action needed beyond status change (already handled in on_open). + match web_sys::WebSocket::new(&ctx.url) { + Ok(new_ws) => { + *ctx.ws_cell.borrow_mut() = new_ws; + install_ws_callbacks(&ctx); } - ServerMessage::Error { message, topic, .. } => { - let detail = match topic { - Some(t) => format!("WsBridge error on topic '{t}': {message}"), - None => format!("WsBridge error: {message}"), + Err(e) => { + web_sys::console::error_1(&format!("WsBridge: reconnect failed: {e:?}").into()); + // Try again with increased backoff + let next_delay = { + let mut s = ctx.state.borrow_mut(); + let d = ctx + .backoff + .get(s.backoff_index) + .copied() + .unwrap_or(*ctx.backoff.last().unwrap_or(&8_000)); + s.backoff_index = (s.backoff_index + 1).min(ctx.backoff.len() - 1); + d }; - web_sys::console::error_1(&detail.into()); - } - ServerMessage::Pong => { - // Keepalive ACK โ€” reset timer if needed. + schedule_reconnect(ctx, next_delay); } } + }); + + if let Some(window) = web_sys::window() { + let _ = window.set_timeout_with_callback_and_timeout_and_arguments_0( + closure.as_ref().unchecked_ref(), + delay_ms as i32, + ); } + closure.forget(); // prevent GC โ€” fires once, then drops itself +} - /// Serialize a `ClientMessage` and send as text frame. - fn send_json(&self, msg: &ClientMessage) -> Result<(), JsError> { - let json = serde_json::to_string(msg) - .map_err(|e| JsError::new(&format!("JSON serialization failed: {e}")))?; - self.ws - .send_with_str(&json) - .map_err(|e| JsError::new(&format!("WebSocket send failed: {e:?}")))?; - Ok(()) +// โ”€โ”€โ”€ Server message handling โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Route a decoded server message to the local database. +/// +/// Uses direct `serde_json::from_value::()` โ†’ buffer push, bypassing the +/// `JsValue` intermediary that the old code path used. +fn handle_server_message( + db: &AimDb, + schema_map: &[(String, String)], + msg: ServerMessage, +) { + match msg { + ServerMessage::Data { topic, payload, .. } | ServerMessage::Snapshot { topic, payload } => { + if let Some(payload) = payload { + let schema = schema_map + .iter() + .find(|(k, _)| k == &topic) + .map(|(_, v)| v.as_str()); + + if let Some(schema) = schema { + dispatch_streamable!(schema, |T| { + produce_from_json::(db, &topic, payload.clone()); + }); + } + } + } + ServerMessage::Subscribed { .. } => { + // ACK โ€” no action needed beyond status change (already handled in on_open). + } + ServerMessage::Error { message, topic, .. } => { + let detail = match topic { + Some(t) => format!("WsBridge error on topic '{t}': {message}"), + None => format!("WsBridge error: {message}"), + }; + web_sys::console::error_1(&detail.into()); + } + ServerMessage::Pong => { + // Keepalive ACK โ€” reset timer if needed. + } } +} - /// Emit status to the registered callback. - fn emit_status(&self, status: ConnectionStatus) { - if let Some(cb) = self.on_status_change.borrow().as_ref() { - let _ = cb.call1(&JsValue::NULL, &JsValue::from_str(status.as_str())); +/// Deserialize `serde_json::Value` โ†’ `T` and push to the record buffer. +/// +/// This is the fast path for incoming server data โ€” no `JsValue` hop. +fn produce_from_json(db: &AimDb, key: &str, json: serde_json::Value) +where + T: Send + Sync + 'static + Debug + Clone + DeserializeOwned, +{ + if let Ok(val) = serde_json::from_value::(json) { + let inner = db.inner(); + if let Ok(typed) = inner.get_typed_record_by_key::(key) { + crate::bindings::poll_sync(typed.produce(val)); } } } + +// โ”€โ”€โ”€ Helpers โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Serialize a `ClientMessage` and send as text frame. +fn send_json(ws: &web_sys::WebSocket, msg: &ClientMessage) -> Result<(), JsError> { + let json = serde_json::to_string(msg) + .map_err(|e| JsError::new(&format!("JSON serialization failed: {e}")))?; + ws.send_with_str(&json) + .map_err(|e| JsError::new(&format!("WebSocket send failed: {e:?}")))?; + Ok(()) +} + +/// Emit status to the registered callback. +fn emit_status(on_status: &Rc>>, status: ConnectionStatus) { + if let Some(cb) = on_status.borrow().as_ref() { + let _ = cb.call1(&JsValue::NULL, &JsValue::from_str(status.as_str())); + } +} diff --git a/aimdb-websocket-connector/Cargo.toml b/aimdb-websocket-connector/Cargo.toml index 678d3c1d..021a88b3 100644 --- a/aimdb-websocket-connector/Cargo.toml +++ b/aimdb-websocket-connector/Cargo.toml @@ -6,19 +6,37 @@ license.workspace = true description = "WebSocket connector for AimDB โ€” real-time bidirectional streaming" [features] -default = ["tokio-runtime"] -tokio-runtime = [ +default = ["server"] + +# Server: accepts incoming WebSocket connections (Axum-based) +server = [ "aimdb-core/std", "aimdb-executor/std", "dep:tokio", "dep:axum", "dep:tower-http", + "dep:dashmap", ] + +# Client: connects out to a remote WebSocket server +client = [ + "aimdb-core/std", + "aimdb-executor/std", + "dep:tokio", + "dep:tokio-tungstenite", +] + +# Legacy alias โ€” implies "server" for backwards compatibility. +# Existing users that specify features = ["tokio-runtime"] continue to get +# the server module without changing their Cargo.toml. +tokio-runtime = ["server"] + tracing = ["dep:tracing"] [dependencies] aimdb-core = { path = "../aimdb-core", default-features = false } aimdb-executor = { path = "../aimdb-executor", default-features = false } +aimdb-ws-protocol = { path = "../aimdb-ws-protocol", features = ["std"] } # Async runtime tokio = { version = "1", features = [ @@ -37,8 +55,11 @@ tower-http = { version = "0.6", features = ["cors"], optional = true } serde = { version = "1", features = ["derive"] } serde_json = "1" +# WebSocket client +tokio-tungstenite = { version = "0.26", optional = true } + # Utilities -dashmap = "6" +dashmap = { version = "6", optional = true } async-trait = "0.1" futures-util = { version = "0.3", default-features = false, features = [ "sink", diff --git a/aimdb-websocket-connector/src/client/builder.rs b/aimdb-websocket-connector/src/client/builder.rs new file mode 100644 index 00000000..509b7ea5 --- /dev/null +++ b/aimdb-websocket-connector/src/client/builder.rs @@ -0,0 +1,224 @@ +//! Builder for the WebSocket client connector. +//! +//! [`WsClientConnectorBuilder`] implements [`ConnectorBuilder`] following the +//! same pattern as `MqttConnectorBuilder` and the server-side +//! `WebSocketConnectorBuilder`. +//! +//! # Lifecycle +//! +//! ```text +//! AimDbBuilder::build() +//! โ””โ”€ WsClientConnectorBuilder::build(&db) +//! โ”œโ”€ db.collect_inbound_routes("ws-client") โ†’ Router +//! โ”œโ”€ db.collect_outbound_routes("ws-client") โ†’ outbound tasks +//! โ”œโ”€ connect to remote WebSocket server +//! โ”œโ”€ spawn receive loop (router dispatch) +//! โ”œโ”€ spawn outbound publisher tasks +//! โ””โ”€ return Arc +//! ``` + +use std::{pin::Pin, sync::Arc, time::Duration}; + +use aimdb_core::{router::RouterBuilder, ConnectorBuilder}; + +use super::connector::WsClientConnectorImpl; + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Builder +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +/// Builder for the AimDB WebSocket client connector. +/// +/// Connects *out* to a remote WebSocket server for direct AimDB-to-AimDB sync. +/// +/// # Example +/// +/// ```rust,ignore +/// use aimdb_websocket_connector::WsClientConnector; +/// +/// let connector = WsClientConnector::new("wss://cloud.example.com/ws") +/// .with_auto_reconnect(true) +/// .with_keepalive_ms(30_000) +/// .with_max_offline_queue(256); +/// ``` +pub struct WsClientConnectorBuilder { + /// WebSocket URL to connect to (e.g., `wss://cloud.example.com/ws`). + url: String, + /// Re-connect automatically on close (default: true). + auto_reconnect: bool, + /// Maximum reconnect attempts before giving up (0 = unlimited, default: 0). + max_reconnect_attempts: usize, + /// Keepalive ping interval in milliseconds (default: 30_000). + keepalive_ms: u64, + /// Maximum queued writes while disconnected (default: 256). + max_offline_queue: usize, + /// Topics to subscribe to on the remote server immediately after connect. + /// Wildcards supported (e.g., `["sensors/#"]`). + subscribe_topics: Vec, + /// Request late-join snapshots on (re)connect (default: true). + late_join: bool, +} + +impl WsClientConnectorBuilder { + /// Create a new builder targeting the given WebSocket URL. + /// + /// # Examples + /// + /// ```rust,ignore + /// WsClientConnector::new("wss://cloud.example.com/ws") + /// WsClientConnector::new("ws://192.168.1.100:8080/ws") + /// ``` + pub fn new(url: impl Into) -> Self { + Self { + url: url.into(), + auto_reconnect: true, + max_reconnect_attempts: 0, + keepalive_ms: 30_000, + max_offline_queue: 256, + subscribe_topics: Vec::new(), + late_join: true, + } + } + + /// Enable or disable automatic reconnection on disconnect (default: `true`). + pub fn with_auto_reconnect(mut self, enabled: bool) -> Self { + self.auto_reconnect = enabled; + self + } + + /// Set maximum reconnect attempts (0 = unlimited, default: 0). + pub fn with_max_reconnect_attempts(mut self, max: usize) -> Self { + self.max_reconnect_attempts = max; + self + } + + /// Set the keepalive ping interval in milliseconds (default: 30 000). + /// + /// Set to 0 to disable keepalive pings. + pub fn with_keepalive_ms(mut self, ms: u64) -> Self { + self.keepalive_ms = ms; + self + } + + /// Set the maximum number of queued writes while disconnected (default: 256). + /// + /// When the queue is full, new writes are silently dropped. + pub fn with_max_offline_queue(mut self, max: usize) -> Self { + self.max_offline_queue = max; + self + } + + /// Subscribe to these topic patterns on the remote server immediately + /// after connecting. + /// + /// If not set, inbound routes are derived from `link_from("ws-client://โ€ฆ")` + /// declarations. + /// + /// # Example + /// + /// ```rust,ignore + /// WsClientConnector::new("wss://cloud/ws") + /// .with_subscribe_topics(["sensors/#", "config/#"]) + /// ``` + pub fn with_subscribe_topics( + mut self, + topics: impl IntoIterator>, + ) -> Self { + self.subscribe_topics = topics.into_iter().map(Into::into).collect(); + self + } + + /// Enable or disable late-join snapshot requests on connect (default: `true`). + pub fn with_late_join(mut self, enabled: bool) -> Self { + self.late_join = enabled; + self + } +} + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// ConnectorBuilder impl +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +impl ConnectorBuilder for WsClientConnectorBuilder +where + R: aimdb_executor::Spawn + 'static, +{ + fn scheme(&self) -> &str { + "ws-client" + } + + fn build<'a>( + &'a self, + db: &'a aimdb_core::builder::AimDb, + ) -> Pin< + Box< + dyn core::future::Future< + Output = aimdb_core::DbResult>, + > + Send + + 'a, + >, + > { + Box::pin(async move { + // โ”€โ”€ Inbound routes โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + let inbound_routes = db.collect_inbound_routes("ws-client"); + + #[cfg(feature = "tracing")] + tracing::info!( + "WS client: {} inbound routes collected", + inbound_routes.len() + ); + + let router = Arc::new(RouterBuilder::from_routes(inbound_routes).build()); + + // โ”€โ”€ Outbound routes โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + let outbound_routes = db.collect_outbound_routes("ws-client"); + + #[cfg(feature = "tracing")] + tracing::info!( + "WS client: {} outbound routes collected", + outbound_routes.len() + ); + + // โ”€โ”€ Resolve subscribe topics โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + // Merge explicit subscribe_topics with topics derived from inbound routes + let mut topics: Vec = self.subscribe_topics.clone(); + for resource_id in router.resource_ids() { + let topic = resource_id.to_string(); + if !topics.contains(&topic) { + topics.push(topic); + } + } + + // โ”€โ”€ Build client config โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + let config = super::connector::WsClientConfig { + url: self.url.clone(), + auto_reconnect: self.auto_reconnect, + max_reconnect_attempts: self.max_reconnect_attempts, + keepalive_interval: if self.keepalive_ms > 0 { + Some(Duration::from_millis(self.keepalive_ms)) + } else { + None + }, + max_offline_queue: self.max_offline_queue, + subscribe_topics: topics, + late_join: self.late_join, + }; + + // โ”€โ”€ Build the connector โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + let connector = WsClientConnectorImpl::connect(config, router, db) + .await + .map_err(|e| aimdb_core::DbError::RuntimeError { + message: format!("WS client connect failed: {}", e).into(), + })?; + + // โ”€โ”€ Spawn outbound publishers โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + connector + .spawn_outbound_publishers(db, outbound_routes) + .map_err(|e| aimdb_core::DbError::RuntimeError { + message: format!("WS client outbound setup failed: {}", e).into(), + })?; + + Ok(Arc::new(connector) as Arc) + }) + } +} diff --git a/aimdb-websocket-connector/src/client/connector.rs b/aimdb-websocket-connector/src/client/connector.rs new file mode 100644 index 00000000..270ea531 --- /dev/null +++ b/aimdb-websocket-connector/src/client/connector.rs @@ -0,0 +1,585 @@ +//! WebSocket client connector implementation. +//! +//! [`WsClientConnectorImpl`] manages a `tokio-tungstenite` WebSocket connection +//! to a remote AimDB server, with: +//! +//! - **Inbound routing**: `ServerMessage::Data/Snapshot` โ†’ `Router::route()` +//! - **Outbound publishing**: `subscribe_any() โ†’ recv_any() โ†’ Write` message +//! - **Reconnection**: exponential backoff with configurable limits +//! - **Keepalive**: periodic `Ping` messages +//! - **Offline queue**: queued writes during disconnection + +use std::{collections::VecDeque, pin::Pin, sync::Arc, time::Duration}; + +use aimdb_core::{ + router::Router, + transport::{ConnectorConfig, PublishError}, + OutboundRoute, +}; +use aimdb_ws_protocol::{ClientMessage, ServerMessage}; +use futures_util::{SinkExt, StreamExt}; +use tokio::sync::{mpsc, Mutex}; + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Configuration +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +/// Internal configuration for the WS client connector. +pub(crate) struct WsClientConfig { + pub url: String, + pub auto_reconnect: bool, + pub max_reconnect_attempts: usize, + pub keepalive_interval: Option, + pub max_offline_queue: usize, + pub subscribe_topics: Vec, + /// Reserved for future use (requesting snapshots on connect). + #[allow(dead_code)] + pub late_join: bool, +} + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Connection status +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +/// Connection state of the WS client. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum ConnectionStatus { + Connecting, + Connected, + Disconnected, + Reconnecting, +} + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Shared state +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +/// Shared mutable state protected by a Mutex. +struct SharedState { + status: ConnectionStatus, + pending_writes: VecDeque, + max_offline_queue: usize, +} + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Connector implementation +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +/// Live WebSocket client connector. +/// +/// Created by [`WsClientConnectorBuilder::build()`]. Manages the connection +/// lifecycle and spawns background tasks for: +/// +/// - Receiving server messages and routing them via `Router` +/// - Sending outbound data from local record changes +/// - Keepalive pings +/// - Automatic reconnection +pub struct WsClientConnectorImpl { + /// Channel to send outbound text frames to the write loop. + write_tx: mpsc::UnboundedSender, + /// Shared state for status and offline queue. + state: Arc>, + /// Router for inbound data (server โ†’ local buffers). + #[allow(dead_code)] + router: Arc, +} + +impl WsClientConnectorImpl { + /// Connect to the remote WebSocket server and spawn background tasks. + pub(crate) async fn connect( + config: WsClientConfig, + router: Arc, + db: &aimdb_core::builder::AimDb, + ) -> Result + where + R: aimdb_executor::Spawn + 'static, + { + let state = Arc::new(Mutex::new(SharedState { + status: ConnectionStatus::Connecting, + pending_writes: VecDeque::new(), + max_offline_queue: config.max_offline_queue, + })); + + // Connect to the remote server + let (ws_stream, _response) = tokio_tungstenite::connect_async(&config.url) + .await + .map_err(|e| format!("WebSocket connection failed: {e}"))?; + + #[cfg(feature = "tracing")] + tracing::info!("WS client: connected to {}", config.url); + + let (ws_write, ws_read) = ws_stream.split(); + + // Channel for sending text frames from any task to the write loop + let (write_tx, write_rx) = mpsc::unbounded_channel::(); + + // Mark connected + { + let mut s = state.lock().await; + s.status = ConnectionStatus::Connected; + } + + // โ”€โ”€ Send subscribe message โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + if !config.subscribe_topics.is_empty() { + let sub_msg = ClientMessage::Subscribe { + topics: config.subscribe_topics.clone(), + }; + if let Ok(json) = serde_json::to_string(&sub_msg) { + let _ = write_tx.send(json); + } + } + + // โ”€โ”€ Flush any pending offline writes โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + { + let mut s = state.lock().await; + while let Some(msg) = s.pending_writes.pop_front() { + let _ = write_tx.send(msg); + } + } + + // โ”€โ”€ Spawn write loop โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + let write_state = state.clone(); + let write_tx_for_reconnect = write_tx.clone(); + let reconnect_url = config.url.clone(); + let reconnect_topics = config.subscribe_topics.clone(); + let auto_reconnect = config.auto_reconnect; + let max_reconnect_attempts = config.max_reconnect_attempts; + let router_for_reconnect = router.clone(); + + db.runtime() + .spawn({ + let state = state.clone(); + async move { + Self::run_write_loop(ws_write, write_rx).await; + + // Write loop ended โ€” connection closed + #[cfg(feature = "tracing")] + tracing::warn!("WS client: write loop ended"); + + state.lock().await.status = ConnectionStatus::Disconnected; + } + }) + .map_err(|e| format!("Failed to spawn write loop: {e:?}"))?; + + // โ”€โ”€ Spawn read loop โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + db.runtime() + .spawn({ + let router = router.clone(); + async move { + Self::run_read_loop(ws_read, &router).await; + + #[cfg(feature = "tracing")] + tracing::warn!("WS client: read loop ended"); + } + }) + .map_err(|e| format!("Failed to spawn read loop: {e:?}"))?; + + // โ”€โ”€ Spawn keepalive โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + if let Some(interval) = config.keepalive_interval { + let write_tx_ka = write_tx.clone(); + db.runtime() + .spawn(async move { + Self::run_keepalive(write_tx_ka, interval).await; + }) + .map_err(|e| format!("Failed to spawn keepalive: {e:?}"))?; + } + + // โ”€โ”€ Spawn reconnect watcher โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + if auto_reconnect { + db.runtime() + .spawn({ + let state = write_state.clone(); + async move { + Self::run_reconnect_watcher( + state, + write_tx_for_reconnect, + reconnect_url, + reconnect_topics, + router_for_reconnect, + max_reconnect_attempts, + ) + .await; + } + }) + .map_err(|e| format!("Failed to spawn reconnect watcher: {e:?}"))?; + } + + Ok(Self { + write_tx, + state, + router, + }) + } + + /// Spawn one Tokio task per outbound route. + /// + /// Each task subscribes to a local record, serializes values, and sends + /// `ClientMessage::Write` to the remote server. + pub(crate) fn spawn_outbound_publishers( + &self, + db: &aimdb_core::builder::AimDb, + outbound_routes: Vec, + ) -> Result<(), String> + where + R: aimdb_executor::Spawn + 'static, + { + let runtime = db.runtime(); + + for (default_topic, consumer, serializer, _config, topic_provider) in outbound_routes { + let write_tx = self.write_tx.clone(); + let state = self.state.clone(); + let default_topic_clone = default_topic.clone(); + + runtime + .spawn(async move { + let mut reader = match consumer.subscribe_any().await { + Ok(r) => r, + Err(_e) => { + #[cfg(feature = "tracing")] + tracing::error!( + "WS client outbound: subscribe failed for '{}': {:?}", + default_topic_clone, + _e + ); + return; + } + }; + + #[cfg(feature = "tracing")] + tracing::info!( + "WS client outbound publisher started for topic: {}", + default_topic_clone + ); + + while let Ok(value_any) = reader.recv_any().await { + // Resolve topic (dynamic or static) + let topic = topic_provider + .as_ref() + .and_then(|p| p.topic_any(&*value_any)) + .unwrap_or_else(|| default_topic_clone.clone()); + + // Serialize + let bytes = match serializer(&*value_any) { + Ok(b) => b, + Err(_e) => { + #[cfg(feature = "tracing")] + tracing::error!( + "WS client outbound: serialize error for '{}': {:?}", + topic, + _e + ); + continue; + } + }; + + // Build Write message + let payload: serde_json::Value = match serde_json::from_slice(&bytes) { + Ok(v) => v, + Err(_e) => { + // Fallback: wrap raw bytes as a JSON string + serde_json::Value::String( + String::from_utf8_lossy(&bytes).into_owned(), + ) + } + }; + + let msg = ClientMessage::Write { + topic: topic.clone(), + payload, + }; + + if let Ok(json) = serde_json::to_string(&msg) { + let s = state.lock().await; + if s.status == ConnectionStatus::Connected { + let _ = write_tx.send(json); + } else if s.pending_writes.len() < s.max_offline_queue { + drop(s); + state.lock().await.pending_writes.push_back(json); + } + // else: drop (overflow policy) + } + } + + #[cfg(feature = "tracing")] + tracing::info!( + "WS client outbound publisher stopped for topic: {}", + default_topic_clone + ); + }) + .map_err(|e| format!("Failed to spawn outbound publisher: {e:?}"))?; + } + + Ok(()) + } + + // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + // Background task implementations + // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + + /// Write loop: drains the mpsc channel and sends text frames. + async fn run_write_loop( + mut ws_write: futures_util::stream::SplitSink< + tokio_tungstenite::WebSocketStream< + tokio_tungstenite::MaybeTlsStream, + >, + tokio_tungstenite::tungstenite::Message, + >, + mut write_rx: mpsc::UnboundedReceiver, + ) { + while let Some(text) = write_rx.recv().await { + let msg = tokio_tungstenite::tungstenite::Message::Text(text.into()); + if ws_write.send(msg).await.is_err() { + #[cfg(feature = "tracing")] + tracing::warn!("WS client: write failed, closing write loop"); + break; + } + } + } + + /// Read loop: receives server messages and routes them via the Router. + async fn run_read_loop( + mut ws_read: futures_util::stream::SplitStream< + tokio_tungstenite::WebSocketStream< + tokio_tungstenite::MaybeTlsStream, + >, + >, + router: &Router, + ) { + while let Some(Ok(msg)) = ws_read.next().await { + let text = match msg { + tokio_tungstenite::tungstenite::Message::Text(t) => t.to_string(), + tokio_tungstenite::tungstenite::Message::Close(_) => { + #[cfg(feature = "tracing")] + tracing::info!("WS client: received close frame"); + break; + } + _ => continue, + }; + + let server_msg: ServerMessage = match serde_json::from_str(&text) { + Ok(m) => m, + Err(_e) => { + #[cfg(feature = "tracing")] + tracing::warn!("WS client: failed to parse server message: {}", _e); + continue; + } + }; + + match server_msg { + ServerMessage::Data { topic, payload, .. } + | ServerMessage::Snapshot { topic, payload } => { + if let Some(payload) = payload { + let bytes = match serde_json::to_vec(&payload) { + Ok(b) => b, + Err(_e) => { + #[cfg(feature = "tracing")] + tracing::warn!( + "WS client: failed to serialize payload for '{}': {}", + topic, + _e + ); + continue; + } + }; + if let Err(_e) = router.route(&topic, &bytes).await { + #[cfg(feature = "tracing")] + tracing::warn!( + "WS client: route failed for topic '{}': {:?}", + topic, + _e + ); + } + } + } + ServerMessage::Subscribed { .. } => { + #[cfg(feature = "tracing")] + tracing::debug!("WS client: subscription acknowledged"); + } + ServerMessage::Error { message, topic, .. } => { + #[cfg(feature = "tracing")] + tracing::error!( + "WS client: server error{}: {}", + topic + .as_ref() + .map(|t| format!(" on '{}'", t)) + .unwrap_or_default(), + message + ); + let _ = (&message, &topic); + } + ServerMessage::Pong => { + // Keepalive ACK โ€” nothing to do. + } + } + } + } + + /// Keepalive loop: sends periodic Ping messages. + async fn run_keepalive(write_tx: mpsc::UnboundedSender, interval: Duration) { + let mut ticker = tokio::time::interval(interval); + ticker.tick().await; // skip first immediate tick + + loop { + ticker.tick().await; + let ping = ClientMessage::Ping; + if let Ok(json) = serde_json::to_string(&ping) { + if write_tx.send(json).is_err() { + break; // channel closed, connection gone + } + } + } + } + + /// Reconnect watcher: monitors connection status and reconnects when needed. + /// + /// Uses exponential backoff: 500ms, 1s, 2s, 4s, 8s (capped). + async fn run_reconnect_watcher( + state: Arc>, + _write_tx: mpsc::UnboundedSender, + url: String, + subscribe_topics: Vec, + router: Arc, + max_attempts: usize, + ) { + let backoff = [500u64, 1_000, 2_000, 4_000, 8_000]; + let mut attempt = 0usize; + + loop { + // Wait a bit before checking + tokio::time::sleep(Duration::from_millis(1_000)).await; + + let status = state.lock().await.status; + if status == ConnectionStatus::Connected || status == ConnectionStatus::Connecting { + attempt = 0; + continue; + } + + // Disconnected โ€” try to reconnect + if max_attempts > 0 && attempt >= max_attempts { + #[cfg(feature = "tracing")] + tracing::error!( + "WS client: max reconnect attempts ({}) reached, giving up", + max_attempts + ); + break; + } + + let delay_ms = backoff.get(attempt).copied().unwrap_or(8_000); + attempt += 1; + + #[cfg(feature = "tracing")] + tracing::info!( + "WS client: reconnecting in {}ms (attempt {})", + delay_ms, + attempt + ); + + state.lock().await.status = ConnectionStatus::Reconnecting; + tokio::time::sleep(Duration::from_millis(delay_ms)).await; + + // Guard: status may have changed during sleep + if state.lock().await.status != ConnectionStatus::Reconnecting { + continue; + } + + match tokio_tungstenite::connect_async(&url).await { + Ok((ws_stream, _)) => { + #[cfg(feature = "tracing")] + tracing::info!("WS client: reconnected to {}", url); + + let (ws_write, ws_read) = ws_stream.split(); + + // Spawn new write loop + let (new_write_tx, new_write_rx) = mpsc::unbounded_channel::(); + + tokio::spawn(Self::run_write_loop(ws_write, new_write_rx)); + + // Spawn new read loop + let router_clone = router.clone(); + tokio::spawn(async move { + Self::run_read_loop(ws_read, &router_clone).await; + }); + + // Re-subscribe + if !subscribe_topics.is_empty() { + let sub = ClientMessage::Subscribe { + topics: subscribe_topics.clone(), + }; + if let Ok(json) = serde_json::to_string(&sub) { + let _ = new_write_tx.send(json); + } + } + + // Flush pending writes + { + let mut s = state.lock().await; + while let Some(msg) = s.pending_writes.pop_front() { + let _ = new_write_tx.send(msg); + } + s.status = ConnectionStatus::Connected; + } + + // Note: The original write_tx is now stale. New outbound + // publishers would need to be re-wired. For the initial + // implementation, outbound publishers detect send failures + // and queue to the offline buffer via the shared state. + // A production-grade implementation would swap the write_tx + // atomically. + + attempt = 0; + } + Err(_e) => { + #[cfg(feature = "tracing")] + tracing::warn!("WS client: reconnect failed: {}", _e); + state.lock().await.status = ConnectionStatus::Disconnected; + } + } + } + } +} + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Connector trait +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +impl aimdb_core::transport::Connector for WsClientConnectorImpl { + /// Send a payload to the remote server as a `Write` message. + /// + /// This is the on-demand publish path used by the `ConnectorConfig` system. + /// Most data flow happens via the outbound publisher tasks instead. + fn publish( + &self, + destination: &str, + _config: &ConnectorConfig, + payload: &[u8], + ) -> Pin> + Send + '_>> { + let destination = destination.to_string(); + let payload_owned = payload.to_vec(); + + Box::pin(async move { + let json_payload: serde_json::Value = serde_json::from_slice(&payload_owned) + .map_err(|_| PublishError::MessageTooLarge)?; + + let msg = ClientMessage::Write { + topic: destination, + payload: json_payload, + }; + + let json = serde_json::to_string(&msg).map_err(|_| PublishError::MessageTooLarge)?; + + let s = self.state.lock().await; + if s.status == ConnectionStatus::Connected { + self.write_tx + .send(json) + .map_err(|_| PublishError::ConnectionFailed)?; + } else if s.pending_writes.len() < s.max_offline_queue { + drop(s); + self.state.lock().await.pending_writes.push_back(json); + } else { + return Err(PublishError::BufferFull); + } + + Ok(()) + }) + } +} diff --git a/aimdb-websocket-connector/src/client/mod.rs b/aimdb-websocket-connector/src/client/mod.rs new file mode 100644 index 00000000..c05e00e0 --- /dev/null +++ b/aimdb-websocket-connector/src/client/mod.rs @@ -0,0 +1,29 @@ +//! WebSocket **client** connector โ€” connects out to a remote WS server. +//! +//! This module provides [`WsClientConnectorBuilder`] which implements the +//! standard [`ConnectorBuilder`][aimdb_core::ConnectorBuilder] trait. +//! +//! # URL Scheme +//! +//! Routes use the `ws-client://` scheme to distinguish from the server-side +//! `ws://` scheme: +//! +//! ```text +//! link_to("ws-client://sensors/temperature") // push local data to remote +//! link_from("ws-client://config/threshold") // receive remote data locally +//! ``` +//! +//! # Architecture +//! +//! ```text +//! AimDB (local) โ†โ”€ WsClientConnector โ”€โ”€WebSocketโ”€โ”€โ†’ AimDB (remote server) +//! โ”‚ โ”‚ +//! โ”œโ”€ link_to โ†’ serialize โ†’ ClientMessage::Write โ”€โ”€โ”€โ†’โ”‚ +//! โ””โ”€ link_from โ† deserialize โ† ServerMessage::Data โ†โ”€โ”€โ”‚ +//! ``` + +mod builder; +mod connector; + +pub use builder::WsClientConnectorBuilder; +pub use connector::WsClientConnectorImpl; diff --git a/aimdb-websocket-connector/src/lib.rs b/aimdb-websocket-connector/src/lib.rs index 10183b29..8048830a 100644 --- a/aimdb-websocket-connector/src/lib.rs +++ b/aimdb-websocket-connector/src/lib.rs @@ -2,20 +2,27 @@ //! //! First-class WebSocket connector for AimDB โ€” real-time bidirectional streaming. //! -//! Replaces the `tap()` + `tokio::sync::broadcast` workaround with a proper -//! [`ConnectorBuilder`][aimdb_core::ConnectorBuilder] implementation that -//! integrates cleanly with the existing MQTT/KNX infrastructure. +//! This crate provides two connector modes controlled by feature flags: //! -//! ## Quick Start +//! - **`server`** (default) โ€” Accepts incoming WebSocket connections via an +//! Axum-based HTTP/WS server. Use `link_to("ws://topic")` to push data to +//! browser clients. +//! +//! - **`client`** โ€” Connects *out* to a remote WebSocket server (powered by +//! `tokio-tungstenite`). Use `link_to("ws-client://host/topic")` and +//! `link_from("ws-client://host/topic")` for direct AimDB-to-AimDB sync +//! without an intermediary broker. +//! +//! Both modes share the same wire protocol from [`aimdb_ws_protocol`]. +//! +//! ## Server Quick Start //! //! ```rust,ignore //! use aimdb_tokio_adapter::TokioAdapter; //! use aimdb_websocket_connector::WebSocketConnector; //! -//! let runtime = TokioAdapter::new(); -//! //! let db = AimDbBuilder::new() -//! .runtime(runtime) +//! .runtime(TokioAdapter::new()) //! .with_connector( //! WebSocketConnector::new() //! .bind("0.0.0.0:8080") @@ -24,44 +31,93 @@ //! ) //! .configure::(TempKey::Vienna, |reg| { //! reg.buffer(BufferCfg::SpmcRing { capacity: 100 }) -//! .link_from("mqtt://sensors/vienna/temperature") -//! .with_deserializer(Temperature::from_bytes) -//! .finish() //! .link_to("ws://sensors/temperature/vienna") -//! .with_serializer(|t: &Temperature| { -//! serde_json::to_vec(t).map_err(Into::into) -//! }) +//! .with_serializer(|t| serde_json::to_vec(t).map_err(Into::into)) +//! .finish(); +//! }) +//! .build().await?; +//! ``` +//! +//! ## Client Quick Start +//! +//! ```rust,ignore +//! use aimdb_websocket_connector::WsClientConnector; +//! +//! let db = AimDbBuilder::new() +//! .runtime(TokioAdapter::new()) +//! .with_connector( +//! WsClientConnector::new("wss://cloud.example.com/ws"), +//! ) +//! .configure::("sensors/temp", |reg| { +//! reg.buffer(BufferCfg::SpmcRing { capacity: 100 }) +//! .link_to("ws-client://sensors/temp") +//! .with_serializer(|t| serde_json::to_vec(t).map_err(Into::into)) +//! .finish() +//! .link_from("ws-client://config/threshold") +//! .with_deserializer(|data| serde_json::from_slice(data)) //! .finish(); //! }) -//! .build() -//! .await?; +//! .build().await?; //! ``` //! //! ## Wire Protocol //! -//! See [`protocol`] for the full message specification. +//! See [`protocol`] for the full message specification (re-exported from +//! [`aimdb_ws_protocol`]). //! -//! ## Authentication +//! ## Authentication (server only) //! //! See [`auth`] for the [`AuthHandler`][auth::AuthHandler] trait. +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Server modules (feature = "server") +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +#[cfg(feature = "server")] pub mod auth; +#[cfg(feature = "server")] pub mod builder; +#[cfg(feature = "server")] pub mod client_manager; +#[cfg(feature = "server")] pub mod connector; -pub mod protocol; +#[cfg(feature = "server")] pub(crate) mod server; +#[cfg(feature = "server")] pub(crate) mod session; +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Client module (feature = "client") +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +#[cfg(feature = "client")] +pub mod client; + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Protocol (always available) +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +pub mod protocol; + // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• // Public re-exports // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• -/// The primary entry point โ€” use this to create a WebSocket connector. +/// The primary entry point for a WebSocket **server** connector. /// /// This is a type alias for [`builder::WebSocketConnectorBuilder`]. +#[cfg(feature = "server")] pub type WebSocketConnector = builder::WebSocketConnectorBuilder; +#[cfg(feature = "server")] pub use auth::{AuthError, AuthHandler, AuthRequest, ClientId, ClientInfo, NoAuth, Permissions}; +#[cfg(feature = "server")] pub use client_manager::ClientManager; + +/// The primary entry point for a WebSocket **client** connector. +/// +/// This is a type alias for [`client::WsClientConnectorBuilder`]. +#[cfg(feature = "client")] +pub type WsClientConnector = client::WsClientConnectorBuilder; + pub use protocol::{ClientMessage, ErrorCode, ServerMessage}; diff --git a/aimdb-websocket-connector/src/protocol.rs b/aimdb-websocket-connector/src/protocol.rs index 6e16c81f..0812aef1 100644 --- a/aimdb-websocket-connector/src/protocol.rs +++ b/aimdb-websocket-connector/src/protocol.rs @@ -1,5 +1,10 @@ //! Wire protocol types and topic matching for the WebSocket connector. //! +//! This module re-exports all types from [`aimdb_ws_protocol`] for backwards +//! compatibility. The canonical definitions now live in the shared `no_std` +//! protocol crate so they can be used by both the server connector and +//! browser/native clients. +//! //! # Server โ†’ Client messages //! //! - `data` โ€” live record push @@ -15,195 +20,10 @@ //! - `write` โ€” inbound value for a `link_from("ws://โ€ฆ")` record //! - `ping` โ€” keepalive ping -use serde::{Deserialize, Serialize}; - -// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• -// Server โ†’ Client -// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• - -/// A message sent from the server to a connected WebSocket client. -#[derive(Debug, Clone, Serialize)] -#[serde(tag = "type", rename_all = "snake_case")] -pub enum ServerMessage { - /// Live data push from an outbound route. - Data { - topic: String, - #[serde(skip_serializing_if = "Option::is_none")] - payload: Option, - /// Server-side dispatch timestamp (milliseconds since Unix epoch). - ts: u64, - }, - - /// Late-join snapshot โ€” current value sent when a client subscribes. - Snapshot { - topic: String, - #[serde(skip_serializing_if = "Option::is_none")] - payload: Option, - }, - - /// Confirmation sent once subscriptions are recorded. - Subscribed { topics: Vec }, - - /// Per-operation error. - Error { - code: ErrorCode, - #[serde(skip_serializing_if = "Option::is_none")] - topic: Option, - message: String, - }, - - /// Response to a client `ping` message. - Pong, -} - -/// Machine-readable error codes sent in `ServerMessage::Error`. -#[derive(Debug, Clone, Serialize)] -#[serde(rename_all = "SCREAMING_SNAKE_CASE")] -pub enum ErrorCode { - Unauthorized, - Forbidden, - UnknownTopic, - SerializationError, - WriteError, - ServerError, -} - -// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• -// Client โ†’ Server -// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• - -/// A message received from a WebSocket client. -#[derive(Debug, Clone, Deserialize)] -#[serde(tag = "type", rename_all = "snake_case")] -pub enum ClientMessage { - /// Subscribe to one or more topics (wildcards supported). - Subscribe { topics: Vec }, - - /// Unsubscribe from one or more topics. - Unsubscribe { topics: Vec }, - - /// Write a value to an inbound record (`link_from("ws://โ€ฆ")`). - Write { - topic: String, - payload: serde_json::Value, - }, - - /// Keepalive ping. - Ping, -} - -// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• -// Topic matching -// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• - -/// Returns `true` if `topic` matches `pattern`. -/// -/// Follows MQTT wildcard conventions: -/// -/// | Pattern | Semantics | -/// |----------|-----------------------------------| -/// | `#` | Multi-level wildcard (all topics) | -/// | `a/#` | Everything under `a/` | -/// | `a/*/c` | Single-level wildcard in segment | -/// | `a/b/c` | Exact match | -pub fn topic_matches(pattern: &str, topic: &str) -> bool { - // Fast path: exact match - if pattern == topic { - return true; - } - - // Multi-level wildcard: `#` matches everything - if pattern == "#" { - return true; - } - - // `prefix/#` matches everything under prefix โ€” only when prefix is literal - // (no wildcards in the prefix). When wildcards are present, fall through to - // the segment loop which handles `#` at any position. - if let Some(prefix) = pattern.strip_suffix("/#") { - if !prefix.contains('*') && !prefix.contains('#') { - return topic.starts_with(prefix) - && (topic.len() == prefix.len() - || topic.as_bytes().get(prefix.len()) == Some(&b'/')); - } - } - - // Segment-by-segment matching with `*` single-level wildcard - let mut pattern_parts = pattern.split('/'); - let mut topic_parts = topic.split('/'); - - loop { - match (pattern_parts.next(), topic_parts.next()) { - (Some("#"), _) => return true, - (Some("*"), Some(_)) => {} // single-level wildcard โ€” consume one segment - (Some(p), Some(t)) if p == t => {} // literal match - (None, None) => return true, // both exhausted at the same time - _ => return false, - } - } -} +// Re-export everything from the shared protocol crate +pub use aimdb_ws_protocol::{topic_matches, ClientMessage, ErrorCode, ServerMessage}; /// Returns the current milliseconds since the Unix epoch (for `ts` fields). pub fn now_ms() -> u64 { - use std::time::{SystemTime, UNIX_EPOCH}; - SystemTime::now() - .duration_since(UNIX_EPOCH) - .unwrap_or_default() - .as_millis() as u64 -} - -// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• -// Tests -// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn exact_match() { - assert!(topic_matches("a/b/c", "a/b/c")); - assert!(!topic_matches("a/b/c", "a/b/d")); - } - - #[test] - fn hash_wildcard() { - assert!(topic_matches("#", "anything/goes/here")); - assert!(topic_matches("#", "a")); - } - - #[test] - fn prefix_hash_wildcard() { - assert!(topic_matches("sensors/#", "sensors/temperature/vienna")); - assert!(topic_matches("sensors/#", "sensors/humidity/berlin")); - assert!(!topic_matches("sensors/#", "commands/setpoint")); - // Edge: prefix itself - assert!(topic_matches("sensors/#", "sensors")); - } - - #[test] - fn star_wildcard() { - assert!(topic_matches( - "sensors/temperature/*", - "sensors/temperature/vienna" - )); - assert!(topic_matches( - "sensors/temperature/*", - "sensors/temperature/berlin" - )); - assert!(!topic_matches( - "sensors/temperature/*", - "sensors/humidity/vienna" - )); - assert!(!topic_matches( - "sensors/temperature/*", - "sensors/temperature/a/b" - )); - } - - #[test] - fn mixed_wildcards() { - assert!(topic_matches("a/*/c/#", "a/b/c/d/e/f")); - assert!(!topic_matches("a/*/c/#", "a/b/x/d")); - } + aimdb_ws_protocol::now_ms() } diff --git a/aimdb-ws-protocol/Cargo.toml b/aimdb-ws-protocol/Cargo.toml new file mode 100644 index 00000000..773488db --- /dev/null +++ b/aimdb-ws-protocol/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "aimdb-ws-protocol" +version = "0.1.0" +edition = "2021" +license.workspace = true +description = "Shared wire protocol types for AimDB WebSocket connector and clients (no_std compatible)" +keywords = ["websocket", "protocol", "no_std", "aimdb"] + +[features] +default = [] +# Enable `now_ms()` helper (requires std::time) +std = [] + +[dependencies] +serde = { workspace = true } +serde_json = { workspace = true } diff --git a/aimdb-ws-protocol/src/lib.rs b/aimdb-ws-protocol/src/lib.rs new file mode 100644 index 00000000..4b26d32e --- /dev/null +++ b/aimdb-ws-protocol/src/lib.rs @@ -0,0 +1,289 @@ +//! # aimdb-ws-protocol +//! +//! Shared wire protocol types for the AimDB WebSocket connector ecosystem. +//! +//! This crate is `no_std + alloc` compatible so it can be used from: +//! +//! - **`aimdb-websocket-connector`** โ€” the server side (Axum/Tokio) +//! - **`aimdb-wasm-adapter`** โ€” the browser client (`WsBridge`) +//! - **Future native WS client** โ€” Tokio/Embassy client connector +//! +//! # Wire Protocol +//! +//! All messages are JSON-encoded with a `"type"` discriminant tag: +//! +//! ## Server โ†’ Client ([`ServerMessage`]) +//! +//! - `data` โ€” live record push with timestamp +//! - `snapshot` โ€” late-join current value +//! - `subscribed` โ€” subscription acknowledgement +//! - `error` โ€” per-operation error +//! - `pong` โ€” response to client ping +//! +//! ## Client โ†’ Server ([`ClientMessage`]) +//! +//! - `subscribe` โ€” subscribe to one or more topics (supports MQTT wildcards) +//! - `unsubscribe` โ€” cancel subscriptions +//! - `write` โ€” inbound value for a `link_from("ws://โ€ฆ")` record +//! - `ping` โ€” keepalive ping +//! +//! # Topic Matching +//! +//! [`topic_matches`] implements MQTT-style wildcard matching (`#` for +//! multi-level, `*` for single-level). + +#![no_std] + +extern crate alloc; + +use alloc::string::String; +use alloc::vec::Vec; + +use serde::{Deserialize, Serialize}; + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Server โ†’ Client +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +/// A message sent from the server to a connected WebSocket client. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum ServerMessage { + /// Live data push from an outbound route. + Data { + topic: String, + #[serde(skip_serializing_if = "Option::is_none")] + payload: Option, + /// Server-side dispatch timestamp (milliseconds since Unix epoch). + ts: u64, + }, + + /// Late-join snapshot โ€” current value sent when a client subscribes. + Snapshot { + topic: String, + #[serde(skip_serializing_if = "Option::is_none")] + payload: Option, + }, + + /// Confirmation sent once subscriptions are recorded. + Subscribed { topics: Vec }, + + /// Per-operation error. + Error { + code: ErrorCode, + #[serde(skip_serializing_if = "Option::is_none")] + topic: Option, + message: String, + }, + + /// Response to a client `ping` message. + Pong, +} + +/// Machine-readable error codes sent in `ServerMessage::Error`. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +pub enum ErrorCode { + Unauthorized, + Forbidden, + UnknownTopic, + SerializationError, + WriteError, + ServerError, +} + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Client โ†’ Server +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +/// A message received from a WebSocket client. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum ClientMessage { + /// Subscribe to one or more topics (wildcards supported). + Subscribe { topics: Vec }, + + /// Unsubscribe from one or more topics. + Unsubscribe { topics: Vec }, + + /// Write a value to an inbound record (`link_from("ws://โ€ฆ")`). + Write { + topic: String, + payload: serde_json::Value, + }, + + /// Keepalive ping. + Ping, +} + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Topic matching +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +/// Returns `true` if `topic` matches `pattern`. +/// +/// Follows MQTT wildcard conventions: +/// +/// | Pattern | Semantics | +/// |----------|-----------------------------------| +/// | `#` | Multi-level wildcard (all topics) | +/// | `a/#` | Everything under `a/` | +/// | `a/*/c` | Single-level wildcard in segment | +/// | `a/b/c` | Exact match | +pub fn topic_matches(pattern: &str, topic: &str) -> bool { + // Fast path: exact match + if pattern == topic { + return true; + } + + // Multi-level wildcard: `#` matches everything + if pattern == "#" { + return true; + } + + // `prefix/#` matches everything under prefix โ€” only when prefix is literal + // (no wildcards in the prefix). When wildcards are present, fall through to + // the segment loop which handles `#` at any position. + if let Some(prefix) = pattern.strip_suffix("/#") { + if !prefix.contains('*') && !prefix.contains('#') { + return topic.starts_with(prefix) + && (topic.len() == prefix.len() + || topic.as_bytes().get(prefix.len()) == Some(&b'/')); + } + } + + // Segment-by-segment matching with `*` single-level wildcard + let mut pattern_parts = pattern.split('/'); + let mut topic_parts = topic.split('/'); + + loop { + match (pattern_parts.next(), topic_parts.next()) { + (Some("#"), _) => return true, + (Some("*"), Some(_)) => {} // single-level wildcard โ€” consume one segment + (Some(p), Some(t)) if p == t => {} // literal match + (None, None) => return true, // both exhausted at the same time + _ => return false, + } + } +} + +/// Returns the current milliseconds since the Unix epoch (for `ts` fields). +/// +/// Requires the `std` feature. +#[cfg(feature = "std")] +pub fn now_ms() -> u64 { + extern crate std; + use std::time::{SystemTime, UNIX_EPOCH}; + SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap_or_default() + .as_millis() as u64 +} + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Tests +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +#[cfg(test)] +mod tests { + use super::*; + use alloc::string::ToString; + + #[test] + fn exact_match() { + assert!(topic_matches("a/b/c", "a/b/c")); + assert!(!topic_matches("a/b/c", "a/b/d")); + } + + #[test] + fn hash_wildcard() { + assert!(topic_matches("#", "anything/goes/here")); + assert!(topic_matches("#", "a")); + } + + #[test] + fn prefix_hash_wildcard() { + assert!(topic_matches("sensors/#", "sensors/temperature/vienna")); + assert!(topic_matches("sensors/#", "sensors/humidity/berlin")); + assert!(!topic_matches("sensors/#", "commands/setpoint")); + // Edge: prefix itself + assert!(topic_matches("sensors/#", "sensors")); + } + + #[test] + fn star_wildcard() { + assert!(topic_matches( + "sensors/temperature/*", + "sensors/temperature/vienna" + )); + assert!(topic_matches( + "sensors/temperature/*", + "sensors/temperature/berlin" + )); + assert!(!topic_matches( + "sensors/temperature/*", + "sensors/humidity/vienna" + )); + assert!(!topic_matches( + "sensors/temperature/*", + "sensors/temperature/a/b" + )); + } + + #[test] + fn mixed_wildcards() { + assert!(topic_matches("a/*/c/#", "a/b/c/d/e/f")); + assert!(!topic_matches("a/*/c/#", "a/b/x/d")); + } + + #[test] + fn serde_server_message_roundtrip() { + let msg = ServerMessage::Data { + topic: "sensors/temp".into(), + payload: Some(serde_json::json!({"celsius": 21.5})), + ts: 1234567890, + }; + let json = serde_json::to_string(&msg).unwrap(); + let parsed: ServerMessage = serde_json::from_str(&json).unwrap(); + match parsed { + ServerMessage::Data { topic, ts, .. } => { + assert_eq!(topic, "sensors/temp"); + assert_eq!(ts, 1234567890); + } + _ => panic!("Expected Data variant"), + } + } + + #[test] + fn serde_client_message_roundtrip() { + let msg = ClientMessage::Subscribe { + topics: alloc::vec!["sensors/#".into()], + }; + let json = serde_json::to_string(&msg).unwrap(); + let parsed: ClientMessage = serde_json::from_str(&json).unwrap(); + match parsed { + ClientMessage::Subscribe { topics } => { + assert_eq!(topics, alloc::vec!["sensors/#".to_string()]); + } + _ => panic!("Expected Subscribe variant"), + } + } + + #[test] + fn serde_error_code_roundtrip() { + let msg = ServerMessage::Error { + code: ErrorCode::UnknownTopic, + topic: Some("foo/bar".into()), + message: "not found".into(), + }; + let json = serde_json::to_string(&msg).unwrap(); + assert!(json.contains("UNKNOWN_TOPIC")); + let parsed: ServerMessage = serde_json::from_str(&json).unwrap(); + match parsed { + ServerMessage::Error { code, .. } => { + assert!(matches!(code, ErrorCode::UnknownTopic)); + } + _ => panic!("Expected Error variant"), + } + } +} diff --git a/tools/aimdb-mcp/src/tools/architecture.rs b/tools/aimdb-mcp/src/tools/architecture.rs index c10c9f0a..744a9847 100644 --- a/tools/aimdb-mcp/src/tools/architecture.rs +++ b/tools/aimdb-mcp/src/tools/architecture.rs @@ -434,7 +434,7 @@ pub async fn remove_task(args: Option) -> McpResult { Ok(serde_json::json!({ "proposal_id": proposal_id, "task_name": params.task_name, - "warning": "Removing this task will affect any binaries that reference it and any records listing it as a producer/consumer.", + "warning": "Removing this task will affect any binaries that reference it and any records listing it as a source/consumer.", "message": "Removal proposal created. Present to the user, then call resolve_proposal.", })) } From 848d33e32c619fdda4e4a76c303c5c39daa3c296 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Thu, 5 Mar 2026 19:17:18 +0000 Subject: [PATCH 17/46] feat: improve error handling in persistence loop for record registration --- aimdb-persistence/src/ext.rs | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/aimdb-persistence/src/ext.rs b/aimdb-persistence/src/ext.rs index 35e28c4c..b8dfe381 100644 --- a/aimdb-persistence/src/ext.rs +++ b/aimdb-persistence/src/ext.rs @@ -67,7 +67,13 @@ where } }; - while let Ok(value) = reader.recv().await { + loop { + let value = match reader.recv().await { + Ok(v) => v, + Err(aimdb_core::DbError::BufferLagged { .. }) => continue, + Err(_) => break, + }; + // T is known here โ€” serialize directly, no with_remote_access() needed. let json = match serde_json::to_value(&value) { Ok(v) => v, From e3be2e89c8bbc62e12d5cc3bd7e39fb935a68043 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Thu, 5 Mar 2026 19:27:25 +0000 Subject: [PATCH 18/46] feat: update formatting command to ensure consistent code style across all packages --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 5ee38382..e2ae5178 100644 --- a/Makefile +++ b/Makefile @@ -119,7 +119,7 @@ test: fmt: @printf "$(GREEN)Formatting code (workspace members only)...$(NC)\n" - for pkg in aimdb-executor aimdb-derive aimdb-data-contracts aimdb-core aimdb-client aimdb-embassy-adapter aimdb-tokio-adapter aimdb-wasm-adapter aimdb-sync aimdb-persistence aimdb-persistence-sqlite aimdb-mqtt-connector aimdb-knx-connector aimdb-ws-protocol aimdb-websocket-connector aimdb-codegen aimdb-cli aimdb-mcp sync-api-demo tokio-mqtt-connector-demo embassy-mqtt-connector-demo tokio-knx-connector-demo embassy-knx-connector-demo weather-mesh-common weather-hub weather-station-alpha weather-station-beta; do \ + @for pkg in aimdb-executor aimdb-derive aimdb-data-contracts aimdb-core aimdb-client aimdb-embassy-adapter aimdb-tokio-adapter aimdb-wasm-adapter aimdb-sync aimdb-persistence aimdb-persistence-sqlite aimdb-mqtt-connector aimdb-knx-connector aimdb-ws-protocol aimdb-websocket-connector aimdb-codegen aimdb-cli aimdb-mcp sync-api-demo tokio-mqtt-connector-demo embassy-mqtt-connector-demo tokio-knx-connector-demo embassy-knx-connector-demo weather-mesh-common weather-hub weather-station-alpha weather-station-beta; do \ printf "$(YELLOW) โ†’ Formatting $$pkg$(NC)\n"; \ cargo fmt -p $$pkg 2>/dev/null || true; \ done From 0051d6f2034df18b80fbf0da84173e42a5686079 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Thu, 5 Mar 2026 19:55:40 +0000 Subject: [PATCH 19/46] feat: enhance WebSocket bridge with query functionality and improve keepalive management --- aimdb-wasm-adapter/src/bindings.rs | 13 +- aimdb-wasm-adapter/src/buffer.rs | 8 +- aimdb-wasm-adapter/src/time.rs | 55 ++++++-- aimdb-wasm-adapter/src/ws_bridge.rs | 204 ++++++++++++++++++++++++---- 4 files changed, 232 insertions(+), 48 deletions(-) diff --git a/aimdb-wasm-adapter/src/bindings.rs b/aimdb-wasm-adapter/src/bindings.rs index 1cbe85ae..97a2b972 100644 --- a/aimdb-wasm-adapter/src/bindings.rs +++ b/aimdb-wasm-adapter/src/bindings.rs @@ -416,21 +416,14 @@ where /// operations on `WasmBuffer` (which are single-threaded, non-blocking). pub(crate) fn poll_sync(f: F) -> F::Output { use core::pin::Pin; - use core::task::{Context, Poll, RawWaker, RawWakerVTable, Waker}; + use core::task::{Context, Poll, Waker}; // SAFETY: the future is stack-local and will not be moved after pinning. let mut f = f; let f = unsafe { Pin::new_unchecked(&mut f) }; - // No-op waker โ€” produce() does not need to be woken. - fn noop(_: *const ()) {} - fn clone_noop(p: *const ()) -> RawWaker { - RawWaker::new(p, &VTABLE) - } - const VTABLE: RawWakerVTable = RawWakerVTable::new(clone_noop, noop, noop, noop); - - let waker = unsafe { Waker::from_raw(RawWaker::new(core::ptr::null(), &VTABLE)) }; - let mut cx = Context::from_waker(&waker); + let waker = Waker::noop(); + let mut cx = Context::from_waker(waker); match f.poll(&mut cx) { Poll::Ready(val) => val, diff --git a/aimdb-wasm-adapter/src/buffer.rs b/aimdb-wasm-adapter/src/buffer.rs index c1915ae4..61d6746d 100644 --- a/aimdb-wasm-adapter/src/buffer.rs +++ b/aimdb-wasm-adapter/src/buffer.rs @@ -301,9 +301,11 @@ impl Future for WasmRecvFuture<'_, T> { WasmBufferInner::SingleLatest { wakers, .. } => wakers, WasmBufferInner::Mailbox { wakers, .. } => wakers, }; - // Replace existing waker for this reader if present, or add new one. - // For simplicity, we always push. Wakers are drained on each push(). - wakers.push(cx.waker().clone()); + // Deduplicate: only add if no existing waker will wake the same task. + // Prevents unbounded growth when a single reader is polled repeatedly. + if !wakers.iter().any(|w| w.will_wake(cx.waker())) { + wakers.push(cx.waker().clone()); + } Poll::Pending } Err(e) => Poll::Ready(Err(e)), diff --git a/aimdb-wasm-adapter/src/time.rs b/aimdb-wasm-adapter/src/time.rs index 950c6baa..268a9b95 100644 --- a/aimdb-wasm-adapter/src/time.rs +++ b/aimdb-wasm-adapter/src/time.rs @@ -2,6 +2,9 @@ //! //! Uses `Performance.now()` for high-resolution relative timestamps and //! `setTimeout` (via a JS Promise) for async sleep. +//! +//! Works in both Window (browser) and Worker/ServiceWorker contexts by +//! accessing `globalThis` via `js_sys::global()` instead of `web_sys::window()`. use crate::runtime::WasmAdapter; use aimdb_executor::TimeOps; @@ -46,6 +49,44 @@ unsafe impl Sync for WasmInstant {} unsafe impl Send for WasmDuration {} unsafe impl Sync for WasmDuration {} +// โ”€โ”€โ”€ globalThis helpers (Window + Worker compatible) โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Get `performance.now()` from `globalThis`. +/// +/// Works in Window, Worker, and ServiceWorker contexts. +#[cfg(feature = "wasm-runtime")] +fn global_performance_now() -> f64 { + use wasm_bindgen::JsCast; + + let global = js_sys::global(); + let perf = js_sys::Reflect::get(&global, &"performance".into()) + .expect("globalThis.performance not available"); + let now = js_sys::Reflect::get(&perf, &"now".into()) + .expect("globalThis.performance.now not available"); + let now_fn: js_sys::Function = now.unchecked_into(); + now_fn + .call0(&perf) + .expect("performance.now() call failed") + .as_f64() + .expect("performance.now() did not return a number") +} + +/// Call `globalThis.setTimeout(callback, delay)`. +/// +/// Works in Window, Worker, and ServiceWorker contexts. +#[cfg(feature = "wasm-runtime")] +fn global_set_timeout(callback: &js_sys::Function, delay_ms: i32) { + use wasm_bindgen::JsCast; + + let global = js_sys::global(); + let set_timeout: js_sys::Function = js_sys::Reflect::get(&global, &"setTimeout".into()) + .expect("globalThis.setTimeout not available") + .unchecked_into(); + let _ = set_timeout.call2(&global, callback, &delay_ms.into()); +} + +// โ”€โ”€โ”€ TimeOps โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + impl TimeOps for WasmAdapter { type Instant = WasmInstant; type Duration = WasmDuration; @@ -53,11 +94,7 @@ impl TimeOps for WasmAdapter { fn now(&self) -> WasmInstant { #[cfg(feature = "wasm-runtime")] { - let perf = web_sys::window() - .expect("no global `window` โ€” not running in a browser?") - .performance() - .expect("no `Performance` API available"); - WasmInstant(perf.now()) + WasmInstant(global_performance_now()) } #[cfg(not(feature = "wasm-runtime"))] @@ -97,13 +134,7 @@ impl TimeOps for WasmAdapter { // setTimeout never rejects, so the Ok/Err result is safe to discard. let fut = wasm_bindgen_futures::JsFuture::from(js_sys::Promise::new( &mut |resolve, _reject| { - web_sys::window() - .unwrap() - .set_timeout_with_callback_and_timeout_and_arguments_0( - &resolve, - duration.0 as i32, - ) - .unwrap(); + global_set_timeout(&resolve, duration.0 as i32); }, )) .map(|_result| ()); diff --git a/aimdb-wasm-adapter/src/ws_bridge.rs b/aimdb-wasm-adapter/src/ws_bridge.rs index 25073d23..999347a0 100644 --- a/aimdb-wasm-adapter/src/ws_bridge.rs +++ b/aimdb-wasm-adapter/src/ws_bridge.rs @@ -15,6 +15,7 @@ extern crate alloc; use alloc::boxed::Box; +use alloc::collections::BTreeMap; use alloc::format; use alloc::rc::Rc; use alloc::string::{String, ToString}; @@ -23,7 +24,7 @@ use core::cell::RefCell; use core::fmt::Debug; use serde::de::DeserializeOwned; -use serde::Deserialize; +use serde::{Deserialize, Serialize}; use wasm_bindgen::prelude::*; use aimdb_core::builder::AimDb; @@ -110,6 +111,8 @@ struct BridgeState { backoff_index: usize, /// Active keepalive interval ID (cleared on close/disconnect). keepalive_id: Option, + /// Keepalive ping closure (prevent GC while interval is active). + _ping_closure: Option>, /// Closures retained to prevent GC. _on_open: Option>, _on_message: Option>, @@ -132,6 +135,10 @@ struct SharedCtx { backoff: Vec, url: String, ws_cell: Rc>, + /// Pending query requests: correlation ID โ†’ (resolve, reject) JS functions. + pending_queries: Rc>>, + /// Simple counter for generating unique query IDs. + query_id_counter: Rc>, } // โ”€โ”€โ”€ WsBridge โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ @@ -195,8 +202,15 @@ impl WsBridge { let mut state = self.ctx.state.borrow_mut(); if state.pending_writes.len() < self.ctx.config.max_offline_queue { state.pending_writes.push_back(msg); + } else { + web_sys::console::warn_1( + &format!( + "[WsBridge] Offline queue full ({} messages), dropping write for topic '{}'", + self.ctx.config.max_offline_queue, topic + ) + .into(), + ); } - // else: drop (overflow policy) } Ok(()) } @@ -212,6 +226,7 @@ impl WsBridge { } } // Drop closures to break Rc cycles + state._ping_closure = None; state._on_open = None; state._on_message = None; state._on_close = None; @@ -226,6 +241,79 @@ impl WsBridge { pub fn status(&self) -> String { self.ctx.state.borrow().status.as_str().to_string() } + + /// Query historical / persisted records over the WebSocket connection. + /// + /// Returns a `Promise` that resolves with `QueryRecord[]`. + /// + /// ```ts + /// const records = await bridge.query('*', { from: 1700000000, to: 1700003600, limit: 500 }); + /// ``` + pub fn query(&self, pattern: &str, options: JsValue) -> js_sys::Promise { + let ctx = self.ctx.clone(); + let pattern = pattern.to_string(); + + // Parse options + #[derive(Deserialize, Default)] + struct QueryOpts { + from: Option, + to: Option, + limit: Option, + } + let opts: QueryOpts = if options.is_undefined() || options.is_null() { + QueryOpts::default() + } else { + serde_wasm_bindgen::from_value(options).unwrap_or_default() + }; + + // Generate unique ID + let id = { + let mut counter = ctx.query_id_counter.borrow_mut(); + *counter += 1; + format!("q{}", *counter) + }; + + // Create promise + let id_for_promise = id.clone(); + js_sys::Promise::new(&mut move |resolve, reject| { + // Register pending query + ctx.pending_queries + .borrow_mut() + .insert(id_for_promise.clone(), (resolve, reject.clone())); + + // Build and send query message + let msg = aimdb_ws_protocol::ClientMessage::Query { + id: id_for_promise.clone(), + pattern: pattern.clone(), + from: opts.from, + to: opts.to, + limit: opts.limit, + }; + + let state = ctx.state.borrow(); + if state.status != ConnectionStatus::Connected { + drop(state); + ctx.pending_queries.borrow_mut().remove(&id_for_promise); + let _ = reject.call1(&JsValue::NULL, &JsValue::from_str("Not connected")); + return; + } + drop(state); + + if let Err(e) = send_json(&ctx.ws_cell.borrow(), &msg) { + ctx.pending_queries.borrow_mut().remove(&id_for_promise); + let _ = reject.call1( + &JsValue::NULL, + &JsValue::from_str(&format!("Send failed: {e:?}")), + ); + } + }) + } +} + +impl Drop for WsBridge { + fn drop(&mut self) { + self.disconnect(); + } } // โ”€โ”€โ”€ Internal constructor โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ @@ -254,6 +342,7 @@ impl WsBridge { pending_writes: alloc::collections::VecDeque::new(), backoff_index: 0, keepalive_id: None, + _ping_closure: None, _on_open: None, _on_message: None, _on_close: None, @@ -271,6 +360,8 @@ impl WsBridge { backoff, url: url.to_string(), ws_cell, + pending_queries: Rc::new(RefCell::new(BTreeMap::new())), + query_id_counter: Rc::new(RefCell::new(0)), }); install_ws_callbacks(&ctx); @@ -332,10 +423,11 @@ fn install_ws_callbacks(ctx: &Rc) { ping_closure.as_ref().unchecked_ref(), ctx.config.keepalive_ms as i32, ) { - ctx.state.borrow_mut().keepalive_id = Some(id); + let mut s = ctx.state.borrow_mut(); + s.keepalive_id = Some(id); + s._ping_closure = Some(ping_closure); } } - ping_closure.forget(); } emit_status(&ctx.on_status, ConnectionStatus::Connected); @@ -349,7 +441,7 @@ fn install_ws_callbacks(ctx: &Rc) { Closure::wrap(Box::new(move |event: web_sys::MessageEvent| { if let Some(text) = event.data().as_string() { if let Ok(msg) = serde_json::from_str::(&text) { - handle_server_message(&ctx.db, &ctx.schema_map, msg); + handle_server_message(&ctx, msg); } } }) as Box) @@ -365,11 +457,15 @@ fn install_ws_callbacks(ctx: &Rc) { return; // user-initiated disconnect โ€” don't reconnect } - // Clear keepalive timer - if let Some(id) = ctx.state.borrow_mut().keepalive_id.take() { - if let Some(window) = web_sys::window() { - window.clear_interval_with_handle(id); + // Clear keepalive timer and closure + { + let mut s = ctx.state.borrow_mut(); + if let Some(id) = s.keepalive_id.take() { + if let Some(window) = web_sys::window() { + window.clear_interval_with_handle(id); + } } + s._ping_closure = None; } if ctx.config.auto_reconnect { @@ -465,23 +561,39 @@ fn schedule_reconnect(ctx: Rc, delay_ms: u32) { /// /// Uses direct `serde_json::from_value::()` โ†’ buffer push, bypassing the /// `JsValue` intermediary that the old code path used. -fn handle_server_message( - db: &AimDb, - schema_map: &[(String, String)], - msg: ServerMessage, -) { +fn handle_server_message(ctx: &SharedCtx, msg: ServerMessage) { match msg { ServerMessage::Data { topic, payload, .. } | ServerMessage::Snapshot { topic, payload } => { if let Some(payload) = payload { - let schema = schema_map + let schema = ctx + .schema_map .iter() .find(|(k, _)| k == &topic) .map(|(_, v)| v.as_str()); - if let Some(schema) = schema { - dispatch_streamable!(schema, |T| { - produce_from_json::(db, &topic, payload.clone()); - }); + match schema { + Some(schema) => { + let dispatched = dispatch_streamable!(schema, |T| { + produce_from_json::(&ctx.db, &topic, payload.clone()); + }); + if dispatched.is_none() { + web_sys::console::warn_1( + &format!( + "[WsBridge] dispatch_streamable returned None for schema='{}' topic='{}'", + schema, topic + ).into(), + ); + } + } + None => { + web_sys::console::warn_1( + &format!( + "[WsBridge] No schema mapping for topic='{}' (schema_map has {} entries)", + topic, + ctx.schema_map.len() + ).into(), + ); + } } } } @@ -498,6 +610,30 @@ fn handle_server_message( ServerMessage::Pong => { // Keepalive ACK โ€” reset timer if needed. } + ServerMessage::QueryResult { id, records, total } => { + // Resolve the pending promise for this query + if let Some((resolve, _reject)) = ctx.pending_queries.borrow_mut().remove(&id) { + // Convert records to JS array. + // Use json_compatible() so serde_json::Value::Object + // becomes a plain JS object (not a JS Map). + let serializer = serde_wasm_bindgen::Serializer::json_compatible(); + let arr = js_sys::Array::new(); + for rec in &records { + if let Ok(js_val) = rec.serialize(&serializer) { + arr.push(&js_val); + } + } + // Attach metadata as properties on the array + let result_obj = js_sys::Object::new(); + let _ = js_sys::Reflect::set(&result_obj, &"records".into(), &arr); + let _ = js_sys::Reflect::set( + &result_obj, + &"total".into(), + &JsValue::from_f64(total as f64), + ); + let _ = resolve.call1(&JsValue::NULL, &result_obj); + } + } } } @@ -508,10 +644,32 @@ fn produce_from_json(db: &AimDb, key: &str, json: serde_json::Va where T: Send + Sync + 'static + Debug + Clone + DeserializeOwned, { - if let Ok(val) = serde_json::from_value::(json) { - let inner = db.inner(); - if let Ok(typed) = inner.get_typed_record_by_key::(key) { - crate::bindings::poll_sync(typed.produce(val)); + match serde_json::from_value::(json) { + Ok(val) => { + let inner = db.inner(); + match inner.get_typed_record_by_key::(key) { + Ok(typed) => { + crate::bindings::poll_sync(typed.produce(val)); + } + Err(e) => { + web_sys::console::warn_1( + &format!( + "[WsBridge] get_typed_record_by_key failed for key='{}': {:?}", + key, e + ) + .into(), + ); + } + } + } + Err(e) => { + web_sys::console::warn_1( + &format!( + "[WsBridge] JSON deserialize failed for key='{}': {}", + key, e + ) + .into(), + ); } } } From c64ba002f3e4ec7ccc524779d870a397a9c26494 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Thu, 5 Mar 2026 20:19:54 +0000 Subject: [PATCH 20/46] feat: implement query handling in WebSocket connector and client sessions --- Cargo.lock | 12 -- aimdb-websocket-connector/Cargo.toml | 1 - aimdb-websocket-connector/src/auth.rs | 62 ++++++---- aimdb-websocket-connector/src/builder.rs | 46 ++++--- .../src/client/builder.rs | 14 +-- .../src/client/connector.rs | 99 ++++++--------- .../src/client_manager.rs | 12 +- aimdb-websocket-connector/src/lib.rs | 5 +- aimdb-websocket-connector/src/protocol.rs | 2 +- aimdb-websocket-connector/src/session.rs | 116 +++++++++++++++++- 10 files changed, 230 insertions(+), 139 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index abf31115..a9eec308 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -298,7 +298,6 @@ dependencies = [ "aimdb-core", "aimdb-executor", "aimdb-ws-protocol", - "async-trait", "axum", "dashmap", "futures-util", @@ -423,17 +422,6 @@ dependencies = [ "syn 2.0.108", ] -[[package]] -name = "async-trait" -version = "0.1.89" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.108", -] - [[package]] name = "atomic-waker" version = "1.1.2" diff --git a/aimdb-websocket-connector/Cargo.toml b/aimdb-websocket-connector/Cargo.toml index 021a88b3..00473c0a 100644 --- a/aimdb-websocket-connector/Cargo.toml +++ b/aimdb-websocket-connector/Cargo.toml @@ -60,7 +60,6 @@ tokio-tungstenite = { version = "0.26", optional = true } # Utilities dashmap = { version = "6", optional = true } -async-trait = "0.1" futures-util = { version = "0.3", default-features = false, features = [ "sink", ] } diff --git a/aimdb-websocket-connector/src/auth.rs b/aimdb-websocket-connector/src/auth.rs index 018ea945..df93a96e 100644 --- a/aimdb-websocket-connector/src/auth.rs +++ b/aimdb-websocket-connector/src/auth.rs @@ -14,7 +14,9 @@ use std::collections::HashMap; use std::net::SocketAddr; use std::sync::Arc; -use async_trait::async_trait; +use core::future::Future; +use core::pin::Pin; + use axum::http::HeaderMap; // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• @@ -114,43 +116,57 @@ impl AuthError { /// /// struct BearerAuth { valid_token: String } /// -/// #[async_trait::async_trait] /// impl AuthHandler for BearerAuth { -/// async fn authenticate(&self, req: &AuthRequest) -> Result { -/// let token = req.headers -/// .get("Authorization") -/// .and_then(|v| v.to_str().ok()) -/// .and_then(|v| v.strip_prefix("Bearer ")) -/// .ok_or_else(|| AuthError::new("missing token"))?; +/// fn authenticate<'a>( +/// &'a self, +/// req: &'a AuthRequest, +/// ) -> Pin> + Send + 'a>> { +/// Box::pin(async move { +/// let token = req.headers +/// .get("Authorization") +/// .and_then(|v| v.to_str().ok()) +/// .and_then(|v| v.strip_prefix("Bearer ")) +/// .ok_or_else(|| AuthError::new("missing token"))?; /// -/// if token == self.valid_token { -/// Ok(Permissions::allow_all()) -/// } else { -/// Err(AuthError::new("invalid token")) -/// } +/// if token == self.valid_token { +/// Ok(Permissions::allow_all()) +/// } else { +/// Err(AuthError::new("invalid token")) +/// } +/// }) /// } /// } /// ``` -#[async_trait] pub trait AuthHandler: Send + Sync + 'static { /// Called during WebSocket upgrade to authenticate the client. /// /// Return [`Ok(Permissions)`] to accept the connection with the assigned /// permissions, or [`Err(AuthError)`] to reject it (HTTP 401). - async fn authenticate(&self, request: &AuthRequest) -> Result; + fn authenticate<'a>( + &'a self, + request: &'a AuthRequest, + ) -> Pin> + Send + 'a>>; /// Called before allowing a topic subscription. /// /// The default implementation delegates to [`Permissions::can_subscribe`]. - async fn authorize_subscribe(&self, client: &ClientInfo, topic: &str) -> bool { - client.permissions.can_subscribe(topic) + fn authorize_subscribe<'a>( + &'a self, + client: &'a ClientInfo, + topic: &'a str, + ) -> Pin + Send + 'a>> { + Box::pin(async move { client.permissions.can_subscribe(topic) }) } /// Called before routing an inbound write to a producer. /// /// The default implementation delegates to [`Permissions::can_write`]. - async fn authorize_write(&self, client: &ClientInfo, topic: &str) -> bool { - client.permissions.can_write(topic) + fn authorize_write<'a>( + &'a self, + client: &'a ClientInfo, + topic: &'a str, + ) -> Pin + Send + 'a>> { + Box::pin(async move { client.permissions.can_write(topic) }) } } @@ -161,10 +177,12 @@ pub trait AuthHandler: Send + Sync + 'static { /// Default `AuthHandler` that allows all connections and operations. pub struct NoAuth; -#[async_trait] impl AuthHandler for NoAuth { - async fn authenticate(&self, _request: &AuthRequest) -> Result { - Ok(Permissions::allow_all()) + fn authenticate<'a>( + &'a self, + _request: &'a AuthRequest, + ) -> Pin> + Send + 'a>> { + Box::pin(async move { Ok(Permissions::allow_all()) }) } } diff --git a/aimdb-websocket-connector/src/builder.rs b/aimdb-websocket-connector/src/builder.rs index a7b9f422..40f71bd8 100644 --- a/aimdb-websocket-connector/src/builder.rs +++ b/aimdb-websocket-connector/src/builder.rs @@ -19,7 +19,6 @@ use std::{ net::{SocketAddr, ToSocketAddrs}, pin::Pin, sync::{Arc, Mutex}, - time::Duration, }; use aimdb_core::{router::RouterBuilder, ConnectorBuilder}; @@ -30,7 +29,7 @@ use crate::{ client_manager::ClientManager, connector::WebSocketConnectorImpl, server::start_server, - session::{NoSnapshot, SessionContext, SnapshotProvider}, + session::{NoQuery, NoSnapshot, QueryHandler, SessionContext, SnapshotProvider}, }; // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• @@ -58,12 +57,6 @@ pub struct WebSocketConnectorBuilder { max_clients: usize, channel_capacity: usize, additional_routes: Option, - /// Heartbeat interval (reserved for future use). - #[allow(dead_code)] - heartbeat_interval: Duration, - /// Client timeout (reserved for future use). - #[allow(dead_code)] - client_timeout: Duration, /// Topics to subscribe every new client to automatically on connect. /// /// When non-empty, clients receive data on these topics immediately after @@ -76,6 +69,8 @@ pub struct WebSocketConnectorBuilder { /// Combine with a serializer that produces a complete flat JSON object /// (including `"type"` and `"node_id"`) to speak a custom protocol. raw_payload: bool, + /// Handler for client `query` messages (history retrieval). + query_handler: Arc, } impl Default for WebSocketConnectorBuilder { @@ -88,10 +83,9 @@ impl Default for WebSocketConnectorBuilder { max_clients: 1024, channel_capacity: 256, additional_routes: None, - heartbeat_interval: Duration::from_secs(30), - client_timeout: Duration::from_secs(60), auto_subscribe_topics: Vec::new(), raw_payload: false, + query_handler: Arc::new(NoQuery), } } } @@ -119,9 +113,11 @@ impl WebSocketConnectorBuilder { /// .bind(([127, 0, 0, 1], 8765)) /// ``` pub fn bind(mut self, addr: impl ToSocketAddrs) -> Self { - if let Some(a) = addr.to_socket_addrs().ok().and_then(|mut i| i.next()) { - self.bind_addr = a; - } + self.bind_addr = addr + .to_socket_addrs() + .expect("invalid bind address") + .next() + .expect("bind address resolved to no addresses"); self } @@ -188,18 +184,6 @@ impl WebSocketConnectorBuilder { self } - /// Set the WebSocket heartbeat interval (default: 30 s). - pub fn with_heartbeat_interval(mut self, interval: Duration) -> Self { - self.heartbeat_interval = interval; - self - } - - /// Set the client inactivity timeout (default: 60 s). - pub fn with_client_timeout(mut self, timeout: Duration) -> Self { - self.client_timeout = timeout; - self - } - /// Subscribe every new client to these topic patterns immediately on connect. /// /// Clients will begin receiving data on matching topics right after the @@ -229,6 +213,17 @@ impl WebSocketConnectorBuilder { self.raw_payload = enabled; self } + + /// Plug in a handler for client `query` messages (history retrieval). + /// + /// When set, clients can send `{"type":"query", "id":"โ€ฆ", "pattern":"*"}` + /// and receive a `{"type":"query_result", โ€ฆ}` response with persisted records. + /// + /// Without this, query messages receive a `server_error` response. + pub fn with_query_handler(mut self, handler: impl QueryHandler + 'static) -> Self { + self.query_handler = Arc::new(handler); + self + } } // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• @@ -299,6 +294,7 @@ where late_join: self.late_join, snapshot_provider, auto_subscribe_topics: self.auto_subscribe_topics.clone(), + query_handler: self.query_handler.clone(), }; // โ”€โ”€ Build connector & spawn outbound publishers โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ diff --git a/aimdb-websocket-connector/src/client/builder.rs b/aimdb-websocket-connector/src/client/builder.rs index 509b7ea5..c89804c0 100644 --- a/aimdb-websocket-connector/src/client/builder.rs +++ b/aimdb-websocket-connector/src/client/builder.rs @@ -55,8 +55,6 @@ pub struct WsClientConnectorBuilder { /// Topics to subscribe to on the remote server immediately after connect. /// Wildcards supported (e.g., `["sensors/#"]`). subscribe_topics: Vec, - /// Request late-join snapshots on (re)connect (default: true). - late_join: bool, } impl WsClientConnectorBuilder { @@ -76,7 +74,6 @@ impl WsClientConnectorBuilder { keepalive_ms: 30_000, max_offline_queue: 256, subscribe_topics: Vec::new(), - late_join: true, } } @@ -127,12 +124,6 @@ impl WsClientConnectorBuilder { self.subscribe_topics = topics.into_iter().map(Into::into).collect(); self } - - /// Enable or disable late-join snapshot requests on connect (default: `true`). - pub fn with_late_join(mut self, enabled: bool) -> Self { - self.late_join = enabled; - self - } } // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• @@ -201,21 +192,20 @@ where }, max_offline_queue: self.max_offline_queue, subscribe_topics: topics, - late_join: self.late_join, }; // โ”€โ”€ Build the connector โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ let connector = WsClientConnectorImpl::connect(config, router, db) .await .map_err(|e| aimdb_core::DbError::RuntimeError { - message: format!("WS client connect failed: {}", e).into(), + message: format!("WS client connect failed: {}", e), })?; // โ”€โ”€ Spawn outbound publishers โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ connector .spawn_outbound_publishers(db, outbound_routes) .map_err(|e| aimdb_core::DbError::RuntimeError { - message: format!("WS client outbound setup failed: {}", e).into(), + message: format!("WS client outbound setup failed: {}", e), })?; Ok(Arc::new(connector) as Arc) diff --git a/aimdb-websocket-connector/src/client/connector.rs b/aimdb-websocket-connector/src/client/connector.rs index 270ea531..235195f4 100644 --- a/aimdb-websocket-connector/src/client/connector.rs +++ b/aimdb-websocket-connector/src/client/connector.rs @@ -32,9 +32,6 @@ pub(crate) struct WsClientConfig { pub keepalive_interval: Option, pub max_offline_queue: usize, pub subscribe_topics: Vec, - /// Reserved for future use (requesting snapshots on connect). - #[allow(dead_code)] - pub late_join: bool, } // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• @@ -59,6 +56,10 @@ struct SharedState { status: ConnectionStatus, pending_writes: VecDeque, max_offline_queue: usize, + /// The current write channel sender. Swapped atomically on reconnect so + /// that all producers (outbound publishers, publish(), keepalive) always + /// send through the live connection. + write_tx: mpsc::UnboundedSender, } // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• @@ -75,9 +76,7 @@ struct SharedState { /// - Keepalive pings /// - Automatic reconnection pub struct WsClientConnectorImpl { - /// Channel to send outbound text frames to the write loop. - write_tx: mpsc::UnboundedSender, - /// Shared state for status and offline queue. + /// Shared state for status, offline queue, and the current write channel. state: Arc>, /// Router for inbound data (server โ†’ local buffers). #[allow(dead_code)] @@ -94,12 +93,6 @@ impl WsClientConnectorImpl { where R: aimdb_executor::Spawn + 'static, { - let state = Arc::new(Mutex::new(SharedState { - status: ConnectionStatus::Connecting, - pending_writes: VecDeque::new(), - max_offline_queue: config.max_offline_queue, - })); - // Connect to the remote server let (ws_stream, _response) = tokio_tungstenite::connect_async(&config.url) .await @@ -113,11 +106,12 @@ impl WsClientConnectorImpl { // Channel for sending text frames from any task to the write loop let (write_tx, write_rx) = mpsc::unbounded_channel::(); - // Mark connected - { - let mut s = state.lock().await; - s.status = ConnectionStatus::Connected; - } + let state = Arc::new(Mutex::new(SharedState { + status: ConnectionStatus::Connected, + pending_writes: VecDeque::new(), + max_offline_queue: config.max_offline_queue, + write_tx, + })); // โ”€โ”€ Send subscribe message โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ if !config.subscribe_topics.is_empty() { @@ -125,21 +119,11 @@ impl WsClientConnectorImpl { topics: config.subscribe_topics.clone(), }; if let Ok(json) = serde_json::to_string(&sub_msg) { - let _ = write_tx.send(json); - } - } - - // โ”€โ”€ Flush any pending offline writes โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ - { - let mut s = state.lock().await; - while let Some(msg) = s.pending_writes.pop_front() { - let _ = write_tx.send(msg); + let _ = state.lock().await.write_tx.send(json); } } // โ”€โ”€ Spawn write loop โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ - let write_state = state.clone(); - let write_tx_for_reconnect = write_tx.clone(); let reconnect_url = config.url.clone(); let reconnect_topics = config.subscribe_topics.clone(); let auto_reconnect = config.auto_reconnect; @@ -176,10 +160,10 @@ impl WsClientConnectorImpl { // โ”€โ”€ Spawn keepalive โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ if let Some(interval) = config.keepalive_interval { - let write_tx_ka = write_tx.clone(); + let ka_state = state.clone(); db.runtime() .spawn(async move { - Self::run_keepalive(write_tx_ka, interval).await; + Self::run_keepalive(ka_state, interval).await; }) .map_err(|e| format!("Failed to spawn keepalive: {e:?}"))?; } @@ -188,11 +172,10 @@ impl WsClientConnectorImpl { if auto_reconnect { db.runtime() .spawn({ - let state = write_state.clone(); + let state = state.clone(); async move { Self::run_reconnect_watcher( state, - write_tx_for_reconnect, reconnect_url, reconnect_topics, router_for_reconnect, @@ -204,11 +187,7 @@ impl WsClientConnectorImpl { .map_err(|e| format!("Failed to spawn reconnect watcher: {e:?}"))?; } - Ok(Self { - write_tx, - state, - router, - }) + Ok(Self { state, router }) } /// Spawn one Tokio task per outbound route. @@ -226,7 +205,6 @@ impl WsClientConnectorImpl { let runtime = db.runtime(); for (default_topic, consumer, serializer, _config, topic_provider) in outbound_routes { - let write_tx = self.write_tx.clone(); let state = self.state.clone(); let default_topic_clone = default_topic.clone(); @@ -289,12 +267,11 @@ impl WsClientConnectorImpl { }; if let Ok(json) = serde_json::to_string(&msg) { - let s = state.lock().await; + let mut s = state.lock().await; if s.status == ConnectionStatus::Connected { - let _ = write_tx.send(json); + let _ = s.write_tx.send(json); } else if s.pending_writes.len() < s.max_offline_queue { - drop(s); - state.lock().await.pending_writes.push_back(json); + s.pending_writes.push_back(json); } // else: drop (overflow policy) } @@ -410,12 +387,16 @@ impl WsClientConnectorImpl { ServerMessage::Pong => { // Keepalive ACK โ€” nothing to do. } + ServerMessage::QueryResult { .. } => { + // Query results are handled by the WASM bridge; the native + // client connector does not issue queries (yet). + } } } } - /// Keepalive loop: sends periodic Ping messages. - async fn run_keepalive(write_tx: mpsc::UnboundedSender, interval: Duration) { + /// Keepalive loop: sends periodic Ping messages via the shared state sender. + async fn run_keepalive(state: Arc>, interval: Duration) { let mut ticker = tokio::time::interval(interval); ticker.tick().await; // skip first immediate tick @@ -423,7 +404,11 @@ impl WsClientConnectorImpl { ticker.tick().await; let ping = ClientMessage::Ping; if let Ok(json) = serde_json::to_string(&ping) { - if write_tx.send(json).is_err() { + let s = state.lock().await; + if s.status != ConnectionStatus::Connected { + continue; + } + if s.write_tx.send(json).is_err() { break; // channel closed, connection gone } } @@ -435,7 +420,6 @@ impl WsClientConnectorImpl { /// Uses exponential backoff: 500ms, 1s, 2s, 4s, 8s (capped). async fn run_reconnect_watcher( state: Arc>, - _write_tx: mpsc::UnboundedSender, url: String, subscribe_topics: Vec, router: Arc, @@ -489,7 +473,7 @@ impl WsClientConnectorImpl { let (ws_write, ws_read) = ws_stream.split(); - // Spawn new write loop + // Create new channel and swap the sender atomically let (new_write_tx, new_write_rx) = mpsc::unbounded_channel::(); tokio::spawn(Self::run_write_loop(ws_write, new_write_rx)); @@ -510,22 +494,18 @@ impl WsClientConnectorImpl { } } - // Flush pending writes + // Swap write_tx and flush pending writes in one critical section. + // All producers (outbound publishers, publish(), keepalive) will + // pick up the new sender on their next lock acquisition. { let mut s = state.lock().await; + s.write_tx = new_write_tx; while let Some(msg) = s.pending_writes.pop_front() { - let _ = new_write_tx.send(msg); + let _ = s.write_tx.send(msg); } s.status = ConnectionStatus::Connected; } - // Note: The original write_tx is now stale. New outbound - // publishers would need to be re-wired. For the initial - // implementation, outbound publishers detect send failures - // and queue to the offline buffer via the shared state. - // A production-grade implementation would swap the write_tx - // atomically. - attempt = 0; } Err(_e) => { @@ -567,14 +547,13 @@ impl aimdb_core::transport::Connector for WsClientConnectorImpl { let json = serde_json::to_string(&msg).map_err(|_| PublishError::MessageTooLarge)?; - let s = self.state.lock().await; + let mut s = self.state.lock().await; if s.status == ConnectionStatus::Connected { - self.write_tx + s.write_tx .send(json) .map_err(|_| PublishError::ConnectionFailed)?; } else if s.pending_writes.len() < s.max_offline_queue { - drop(s); - self.state.lock().await.pending_writes.push_back(json); + s.pending_writes.push_back(json); } else { return Err(PublishError::BufferFull); } diff --git a/aimdb-websocket-connector/src/client_manager.rs b/aimdb-websocket-connector/src/client_manager.rs index 760afaf0..ce47b97c 100644 --- a/aimdb-websocket-connector/src/client_manager.rs +++ b/aimdb-websocket-connector/src/client_manager.rs @@ -104,16 +104,15 @@ impl ClientManager { /// Add subscription patterns for the given client. /// - /// Returns the list of newly-added patterns (already-subscribed patterns - /// are silently included without duplication). + /// Returns only the patterns that were actually new (duplicates are skipped). pub fn subscribe(&self, id: ClientId, patterns: &[String]) -> Vec { let mut added = Vec::new(); if let Some(mut entry) = self.clients.get_mut(&id.0) { for pat in patterns { if !entry.subscriptions.contains(pat) { entry.subscriptions.push(pat.clone()); + added.push(pat.clone()); } - added.push(pat.clone()); } } added @@ -261,6 +260,13 @@ impl ClientManager { self.send_to(id, &ServerMessage::Pong).await; } + /// Send an arbitrary [`ServerMessage`] to a single client. + /// + /// Used by the query handler to deliver `QueryResult` responses. + pub async fn send_to_client(&self, id: ClientId, msg: &ServerMessage) { + self.send_to(id, msg).await; + } + // โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ // Helpers // โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ diff --git a/aimdb-websocket-connector/src/lib.rs b/aimdb-websocket-connector/src/lib.rs index 8048830a..5d58e2b6 100644 --- a/aimdb-websocket-connector/src/lib.rs +++ b/aimdb-websocket-connector/src/lib.rs @@ -120,4 +120,7 @@ pub use client_manager::ClientManager; #[cfg(feature = "client")] pub type WsClientConnector = client::WsClientConnectorBuilder; -pub use protocol::{ClientMessage, ErrorCode, ServerMessage}; +pub use protocol::{ClientMessage, ErrorCode, QueryRecord, ServerMessage}; + +#[cfg(feature = "server")] +pub use session::{NoQuery, QueryHandler}; diff --git a/aimdb-websocket-connector/src/protocol.rs b/aimdb-websocket-connector/src/protocol.rs index 0812aef1..016b5816 100644 --- a/aimdb-websocket-connector/src/protocol.rs +++ b/aimdb-websocket-connector/src/protocol.rs @@ -21,7 +21,7 @@ //! - `ping` โ€” keepalive ping // Re-export everything from the shared protocol crate -pub use aimdb_ws_protocol::{topic_matches, ClientMessage, ErrorCode, ServerMessage}; +pub use aimdb_ws_protocol::{topic_matches, ClientMessage, ErrorCode, QueryRecord, ServerMessage}; /// Returns the current milliseconds since the Unix epoch (for `ts` fields). pub fn now_ms() -> u64 { diff --git a/aimdb-websocket-connector/src/session.rs b/aimdb-websocket-connector/src/session.rs index bc880db4..6554b1d4 100644 --- a/aimdb-websocket-connector/src/session.rs +++ b/aimdb-websocket-connector/src/session.rs @@ -5,7 +5,7 @@ //! 1. **Send loop** โ€” drains the per-client `mpsc` channel and writes frames to //! the WebSocket. //! 2. **Recv loop** โ€” reads frames from the WebSocket and dispatches -//! `subscribe`, `unsubscribe`, `write`, and `ping` messages. +//! `subscribe`, `unsubscribe`, `write`, `ping`, and `query` messages. //! 3. A **cleanup** fence โ€” unregisters the client from the [`ClientManager`] //! when either loop finishes. //! @@ -14,6 +14,9 @@ use std::sync::Arc; +use core::future::Future; +use core::pin::Pin; + use axum::extract::ws::{Message, WebSocket}; use futures_util::{SinkExt, StreamExt}; use tokio::sync::mpsc; @@ -21,12 +24,78 @@ use tokio::sync::mpsc; use crate::{ auth::{AuthHandler, ClientId, ClientInfo}, client_manager::ClientManager, - protocol::{ClientMessage, ErrorCode}, + protocol::{ClientMessage, ErrorCode, QueryRecord}, }; // Re-export so server.rs can use it easily. pub use aimdb_core::router::Router; +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// Query handler +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +/// Boxed future returned by [`QueryHandler::handle_query`]. +pub type QueryFuture<'a> = + Pin, usize), String>> + Send + 'a>>; + +/// Trait for handling `Query` messages from WebSocket clients. +/// +/// Implementations typically query a persistence backend and return matching +/// records. The trait is async to support database I/O. +/// +/// # Example +/// +/// ```rust,ignore +/// struct MyQueryHandler { db: Arc } +/// +/// impl QueryHandler for MyQueryHandler { +/// fn handle_query<'a>( +/// &'a self, +/// pattern: &'a str, +/// from: Option, +/// to: Option, +/// limit: Option, +/// ) -> QueryFuture<'a> { +/// Box::pin(async move { +/// // query your persistence layer โ€ฆ +/// Ok((records, total)) +/// }) +/// } +/// } +/// ``` +pub trait QueryHandler: Send + Sync + 'static { + /// Execute a history query and return `(records, total_count)`. + /// + /// - `pattern` โ€” topic pattern (MQTT wildcards, `"*"` for all) + /// - `from` / `to` โ€” time range in Unix **seconds** (inclusive) + /// - `limit` โ€” max records per matching topic + fn handle_query<'a>( + &'a self, + pattern: &'a str, + from: Option, + to: Option, + limit: Option, + ) -> QueryFuture<'a>; +} + +/// A query handler that always returns an error (used when no persistence +/// backend is configured). +pub struct NoQuery; + +impl QueryHandler for NoQuery { + fn handle_query<'a>( + &'a self, + _pattern: &'a str, + _from: Option, + _to: Option, + _limit: Option, + ) -> QueryFuture<'a> { + Box::pin( + async move { Err("Query not supported โ€” no persistence backend configured".into()) }, + ) + } +} + // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• // Session context // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• @@ -51,6 +120,8 @@ pub(crate) struct SessionContext { /// Use `["#"]` to push all data to all clients without requiring an /// explicit `{"type":"subscribe"}` message from the client. pub auto_subscribe_topics: Vec, + /// Handler for `Query` messages (historical record retrieval). + pub query_handler: Arc, } /// Provides the current serialized value of a record for late-join snapshots. @@ -206,6 +277,15 @@ async fn handle_text(id: ClientId, text: &str, ctx: &SessionContext) { ClientMessage::Ping => { ctx.client_mgr.send_pong(id).await; } + ClientMessage::Query { + id: query_id, + pattern, + from, + to, + limit, + } => { + handle_query(id, query_id, pattern, from, to, limit, ctx).await; + } } } @@ -308,3 +388,35 @@ async fn handle_write( .await; } } + +async fn handle_query( + id: ClientId, + query_id: String, + pattern: String, + from: Option, + to: Option, + limit: Option, + ctx: &SessionContext, +) { + use crate::protocol::ServerMessage; + + match ctx + .query_handler + .handle_query(&pattern, from, to, limit) + .await + { + Ok((records, total)) => { + let result = ServerMessage::QueryResult { + id: query_id, + records, + total, + }; + ctx.client_mgr.send_to_client(id, &result).await; + } + Err(msg) => { + ctx.client_mgr + .send_error(id, ErrorCode::ServerError, None, &msg) + .await; + } + } +} From 0bd1902607448d4eb11f57a25415486c849ffc6a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Thu, 5 Mar 2026 20:31:55 +0000 Subject: [PATCH 21/46] refactor: clean up Cargo.toml dependencies and improve documentation for WebSocket protocol --- aimdb-wasm-adapter/Cargo.toml | 4 +- aimdb-wasm-adapter/src/ws_bridge.rs | 2 +- aimdb-websocket-connector/Cargo.toml | 2 +- aimdb-websocket-connector/src/protocol.rs | 29 ++-------- aimdb-websocket-connector/src/session.rs | 2 +- aimdb-ws-protocol/Cargo.toml | 9 +-- aimdb-ws-protocol/src/lib.rs | 70 +++++++++++++++++------ 7 files changed, 63 insertions(+), 55 deletions(-) diff --git a/aimdb-wasm-adapter/Cargo.toml b/aimdb-wasm-adapter/Cargo.toml index 7a0468b3..8ec19f2f 100644 --- a/aimdb-wasm-adapter/Cargo.toml +++ b/aimdb-wasm-adapter/Cargo.toml @@ -34,8 +34,8 @@ aimdb-core = { version = "0.5.0", path = "../aimdb-core", default-features = fal "alloc", ] } -# Shared WebSocket wire protocol (no_std + alloc) -aimdb-ws-protocol = { version = "0.1.0", path = "../aimdb-ws-protocol", default-features = false } +# Shared WebSocket wire protocol +aimdb-ws-protocol = { version = "0.1.0", path = "../aimdb-ws-protocol" } # Data contracts (alloc only โ€” no std) aimdb-data-contracts = { version = "0.5.0", path = "../aimdb-data-contracts", default-features = false, features = [ diff --git a/aimdb-wasm-adapter/src/ws_bridge.rs b/aimdb-wasm-adapter/src/ws_bridge.rs index 999347a0..79b1d030 100644 --- a/aimdb-wasm-adapter/src/ws_bridge.rs +++ b/aimdb-wasm-adapter/src/ws_bridge.rs @@ -247,7 +247,7 @@ impl WsBridge { /// Returns a `Promise` that resolves with `QueryRecord[]`. /// /// ```ts - /// const records = await bridge.query('*', { from: 1700000000, to: 1700003600, limit: 500 }); + /// const records = await bridge.query('*', { from: 1700000000000, to: 1700003600000, limit: 500 }); /// ``` pub fn query(&self, pattern: &str, options: JsValue) -> js_sys::Promise { let ctx = self.ctx.clone(); diff --git a/aimdb-websocket-connector/Cargo.toml b/aimdb-websocket-connector/Cargo.toml index 00473c0a..3c264cb7 100644 --- a/aimdb-websocket-connector/Cargo.toml +++ b/aimdb-websocket-connector/Cargo.toml @@ -36,7 +36,7 @@ tracing = ["dep:tracing"] [dependencies] aimdb-core = { path = "../aimdb-core", default-features = false } aimdb-executor = { path = "../aimdb-executor", default-features = false } -aimdb-ws-protocol = { path = "../aimdb-ws-protocol", features = ["std"] } +aimdb-ws-protocol = { path = "../aimdb-ws-protocol" } # Async runtime tokio = { version = "1", features = [ diff --git a/aimdb-websocket-connector/src/protocol.rs b/aimdb-websocket-connector/src/protocol.rs index 016b5816..b67289fa 100644 --- a/aimdb-websocket-connector/src/protocol.rs +++ b/aimdb-websocket-connector/src/protocol.rs @@ -1,29 +1,10 @@ //! Wire protocol types and topic matching for the WebSocket connector. //! //! This module re-exports all types from [`aimdb_ws_protocol`] for backwards -//! compatibility. The canonical definitions now live in the shared `no_std` -//! protocol crate so they can be used by both the server connector and -//! browser/native clients. -//! -//! # Server โ†’ Client messages -//! -//! - `data` โ€” live record push -//! - `snapshot` โ€” late-join current value -//! - `subscribed` โ€” subscription acknowledgement -//! - `error` โ€” per-operation error -//! - `pong` โ€” response to client ping -//! -//! # Client โ†’ Server messages -//! -//! - `subscribe` โ€” subscribe to one or more topics (supports wildcards) -//! - `unsubscribe` โ€” cancel subscriptions -//! - `write` โ€” inbound value for a `link_from("ws://โ€ฆ")` record -//! - `ping` โ€” keepalive ping +//! compatibility. The canonical definitions live in the shared protocol crate +//! so they can be used by both the server connector and browser/native clients. // Re-export everything from the shared protocol crate -pub use aimdb_ws_protocol::{topic_matches, ClientMessage, ErrorCode, QueryRecord, ServerMessage}; - -/// Returns the current milliseconds since the Unix epoch (for `ts` fields). -pub fn now_ms() -> u64 { - aimdb_ws_protocol::now_ms() -} +pub use aimdb_ws_protocol::{ + now_ms, topic_matches, ClientMessage, ErrorCode, QueryRecord, ServerMessage, +}; diff --git a/aimdb-websocket-connector/src/session.rs b/aimdb-websocket-connector/src/session.rs index 6554b1d4..3769c7e4 100644 --- a/aimdb-websocket-connector/src/session.rs +++ b/aimdb-websocket-connector/src/session.rs @@ -67,7 +67,7 @@ pub trait QueryHandler: Send + Sync + 'static { /// Execute a history query and return `(records, total_count)`. /// /// - `pattern` โ€” topic pattern (MQTT wildcards, `"*"` for all) - /// - `from` / `to` โ€” time range in Unix **seconds** (inclusive) + /// - `from` / `to` โ€” time range in **milliseconds** since Unix epoch (inclusive) /// - `limit` โ€” max records per matching topic fn handle_query<'a>( &'a self, diff --git a/aimdb-ws-protocol/Cargo.toml b/aimdb-ws-protocol/Cargo.toml index 773488db..15881210 100644 --- a/aimdb-ws-protocol/Cargo.toml +++ b/aimdb-ws-protocol/Cargo.toml @@ -3,13 +3,8 @@ name = "aimdb-ws-protocol" version = "0.1.0" edition = "2021" license.workspace = true -description = "Shared wire protocol types for AimDB WebSocket connector and clients (no_std compatible)" -keywords = ["websocket", "protocol", "no_std", "aimdb"] - -[features] -default = [] -# Enable `now_ms()` helper (requires std::time) -std = [] +description = "Shared wire protocol types for AimDB WebSocket connector and clients" +keywords = ["websocket", "protocol", "aimdb"] [dependencies] serde = { workspace = true } diff --git a/aimdb-ws-protocol/src/lib.rs b/aimdb-ws-protocol/src/lib.rs index 4b26d32e..74c8a56e 100644 --- a/aimdb-ws-protocol/src/lib.rs +++ b/aimdb-ws-protocol/src/lib.rs @@ -2,11 +2,10 @@ //! //! Shared wire protocol types for the AimDB WebSocket connector ecosystem. //! -//! This crate is `no_std + alloc` compatible so it can be used from: +//! Used by: //! //! - **`aimdb-websocket-connector`** โ€” the server side (Axum/Tokio) //! - **`aimdb-wasm-adapter`** โ€” the browser client (`WsBridge`) -//! - **Future native WS client** โ€” Tokio/Embassy client connector //! //! # Wire Protocol //! @@ -19,6 +18,7 @@ //! - `subscribed` โ€” subscription acknowledgement //! - `error` โ€” per-operation error //! - `pong` โ€” response to client ping +//! - `query_result` โ€” response to a client query request //! //! ## Client โ†’ Server ([`ClientMessage`]) //! @@ -26,19 +26,13 @@ //! - `unsubscribe` โ€” cancel subscriptions //! - `write` โ€” inbound value for a `link_from("ws://โ€ฆ")` record //! - `ping` โ€” keepalive ping +//! - `query` โ€” query historical / persisted records //! //! # Topic Matching //! //! [`topic_matches`] implements MQTT-style wildcard matching (`#` for //! multi-level, `*` for single-level). -#![no_std] - -extern crate alloc; - -use alloc::string::String; -use alloc::vec::Vec; - use serde::{Deserialize, Serialize}; // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• @@ -46,7 +40,7 @@ use serde::{Deserialize, Serialize}; // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• /// A message sent from the server to a connected WebSocket client. -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] #[serde(tag = "type", rename_all = "snake_case")] pub enum ServerMessage { /// Live data push from an outbound route. @@ -78,10 +72,33 @@ pub enum ServerMessage { /// Response to a client `ping` message. Pong, + + /// Response to a client `query` request. + /// + /// Contains the matching historical records and metadata. + QueryResult { + /// Correlation ID echoed from the client request. + id: String, + /// Matching records, ordered by timestamp ascending. + records: Vec, + /// Total number of records matched (before any limit). + total: usize, + }, +} + +/// A single record returned in a [`ServerMessage::QueryResult`]. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct QueryRecord { + /// Topic / record name (e.g. `"temp.vienna"`). + pub topic: String, + /// Deserialized record value. + pub payload: serde_json::Value, + /// Storage timestamp (milliseconds since Unix epoch). + pub ts: u64, } /// Machine-readable error codes sent in `ServerMessage::Error`. -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] #[serde(rename_all = "SCREAMING_SNAKE_CASE")] pub enum ErrorCode { Unauthorized, @@ -97,7 +114,7 @@ pub enum ErrorCode { // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• /// A message received from a WebSocket client. -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] #[serde(tag = "type", rename_all = "snake_case")] pub enum ClientMessage { /// Subscribe to one or more topics (wildcards supported). @@ -114,6 +131,26 @@ pub enum ClientMessage { /// Keepalive ping. Ping, + + /// Query historical / persisted records. + /// + /// The server responds with [`ServerMessage::QueryResult`] carrying the + /// same `id` for correlation. + Query { + /// Client-generated correlation ID (echoed in the response). + id: String, + /// Topic pattern to match (MQTT wildcards supported, `"*"` for all). + pattern: String, + /// Start of time range (milliseconds since Unix epoch), inclusive. Defaults to 1 h ago. + #[serde(skip_serializing_if = "Option::is_none")] + from: Option, + /// End of time range (milliseconds since Unix epoch), inclusive. Defaults to now. + #[serde(skip_serializing_if = "Option::is_none")] + to: Option, + /// Maximum number of records to return per matching topic. + #[serde(skip_serializing_if = "Option::is_none")] + limit: Option, + }, } // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• @@ -168,11 +205,7 @@ pub fn topic_matches(pattern: &str, topic: &str) -> bool { } /// Returns the current milliseconds since the Unix epoch (for `ts` fields). -/// -/// Requires the `std` feature. -#[cfg(feature = "std")] pub fn now_ms() -> u64 { - extern crate std; use std::time::{SystemTime, UNIX_EPOCH}; SystemTime::now() .duration_since(UNIX_EPOCH) @@ -187,7 +220,6 @@ pub fn now_ms() -> u64 { #[cfg(test)] mod tests { use super::*; - use alloc::string::ToString; #[test] fn exact_match() { @@ -257,13 +289,13 @@ mod tests { #[test] fn serde_client_message_roundtrip() { let msg = ClientMessage::Subscribe { - topics: alloc::vec!["sensors/#".into()], + topics: vec!["sensors/#".into()], }; let json = serde_json::to_string(&msg).unwrap(); let parsed: ClientMessage = serde_json::from_str(&json).unwrap(); match parsed { ClientMessage::Subscribe { topics } => { - assert_eq!(topics, alloc::vec!["sensors/#".to_string()]); + assert_eq!(topics, vec!["sensors/#".to_string()]); } _ => panic!("Expected Subscribe variant"), } From d8401af8b61b4a7e9e0d4acb8fc9d8146f272eae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Fri, 6 Mar 2026 14:17:09 +0000 Subject: [PATCH 22/46] feat: integrate schema registry for type-erased dispatch in WebSocket adapter and connector --- Cargo.lock | 1 + aimdb-core/src/builder.rs | 26 ++++ aimdb-data-contracts/src/lib.rs | 2 +- aimdb-data-contracts/src/streamable.rs | 180 ++++++++++++---------- aimdb-wasm-adapter/src/bindings.rs | 67 ++++---- aimdb-wasm-adapter/src/lib.rs | 3 + aimdb-wasm-adapter/src/schema_registry.rs | 100 ++++++++++++ aimdb-wasm-adapter/src/ws_bridge.rs | 84 +++++++++- aimdb-websocket-connector/Cargo.toml | 1 + aimdb-websocket-connector/src/builder.rs | 31 ++++ aimdb-websocket-connector/src/protocol.rs | 2 +- aimdb-websocket-connector/src/session.rs | 17 +- aimdb-ws-protocol/src/lib.rs | 27 ++++ 13 files changed, 421 insertions(+), 120 deletions(-) create mode 100644 aimdb-wasm-adapter/src/schema_registry.rs diff --git a/Cargo.lock b/Cargo.lock index a9eec308..09cd9308 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -296,6 +296,7 @@ name = "aimdb-websocket-connector" version = "0.1.0" dependencies = [ "aimdb-core", + "aimdb-data-contracts", "aimdb-executor", "aimdb-ws-protocol", "axum", diff --git a/aimdb-core/src/builder.rs b/aimdb-core/src/builder.rs index faea5263..26da79d6 100644 --- a/aimdb-core/src/builder.rs +++ b/aimdb-core/src/builder.rs @@ -1513,6 +1513,32 @@ impl AimDb { /// connector.spawn_publisher(topic, consumer, serializer, config)?; /// } /// ``` + /// Collect `(topic, TypeId)` pairs for all outbound routes matching `scheme`. + /// + /// Complements [`collect_outbound_routes`](Self::collect_outbound_routes) when + /// callers need to know the concrete record type behind each outbound topic + /// (e.g. to resolve a schema name for discovery responses). + /// + /// The returned TypeId is the `TypeId::of::()` for the record type `T` + /// that was used in the corresponding `configure::()` call. + #[cfg(feature = "alloc")] + pub fn collect_outbound_topic_type_ids(&self, scheme: &str) -> Vec<(String, TypeId)> { + let mut result = Vec::new(); + + for (idx, record) in self.inner.storages.iter().enumerate() { + let type_id = self.inner.types[idx]; + + for link in record.outbound_connectors() { + if link.url.scheme() != scheme { + continue; + } + result.push((link.url.resource_id(), type_id)); + } + } + + result + } + #[cfg(feature = "alloc")] pub fn collect_outbound_routes(&self, scheme: &str) -> Vec { let mut routes = Vec::new(); diff --git a/aimdb-data-contracts/src/lib.rs b/aimdb-data-contracts/src/lib.rs index e0b7e3a3..8e91c467 100644 --- a/aimdb-data-contracts/src/lib.rs +++ b/aimdb-data-contracts/src/lib.rs @@ -37,7 +37,7 @@ extern crate alloc; pub mod contracts; mod streamable; -pub use streamable::{is_streamable, Streamable}; +pub use streamable::{for_each_streamable, Streamable, StreamableVisitor}; #[cfg(feature = "linkable")] mod linkable; diff --git a/aimdb-data-contracts/src/streamable.rs b/aimdb-data-contracts/src/streamable.rs index 58f078af..cb9591ed 100644 --- a/aimdb-data-contracts/src/streamable.rs +++ b/aimdb-data-contracts/src/streamable.rs @@ -8,18 +8,18 @@ //! //! `Streamable` is a *capability marker* โ€” it combines [`SchemaType`] identity //! with the `serde` bounds needed for type-erased dispatch at serialization -//! boundaries. The companion [`dispatch_streamable!`] macro centralises the -//! schema-name โ†’ concrete-type routing so that consumers (WASM adapter, -//! WebSocket bridge, CLI) never hardcode contract types. +//! boundaries. The companion [`for_each_streamable`] function is the single +//! source of truth for which types are streamable โ€” consumers implement +//! [`StreamableVisitor`] to build whatever dispatch tables they need. //! //! # Adding a new streamable contract //! //! 1. Define your struct with `Serialize + Deserialize` in `contracts/`. //! 2. Implement `SchemaType` (unique `NAME`). -//! 3. `impl Streamable for MyType {}` in this module. -//! 4. Add a match arm to [`dispatch_streamable!`]. +//! 3. `impl Streamable for MyType {}` below. +//! 4. Add `visitor.visit::();` in [`for_each_streamable`]. //! -//! That's it โ€” every consumer that uses the macro picks up the new type +//! That's it โ€” every consumer that uses the visitor picks up the new type //! automatically. use crate::SchemaType; @@ -54,6 +54,36 @@ pub trait Streamable: { } +/// Visitor trait for iterating over all registered [`Streamable`] types. +/// +/// Implement this trait to build type-erased dispatch tables, registries, +/// or any other structure that needs to know about all streamable types. +/// +/// # Example +/// +/// ```rust +/// use std::any::TypeId; +/// use aimdb_data_contracts::{SchemaType, Streamable, StreamableVisitor, for_each_streamable}; +/// +/// struct TypeIdCollector { +/// entries: Vec<(TypeId, &'static str)>, +/// } +/// +/// impl StreamableVisitor for TypeIdCollector { +/// fn visit(&mut self) { +/// self.entries.push((TypeId::of::(), T::NAME)); +/// } +/// } +/// +/// let mut collector = TypeIdCollector { entries: Vec::new() }; +/// for_each_streamable(&mut collector); +/// assert_eq!(collector.entries.len(), 3); +/// ``` +pub trait StreamableVisitor { + /// Called once for each registered [`Streamable`] type. + fn visit(&mut self); +} + // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• // Implementations for built-in contracts // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• @@ -64,99 +94,91 @@ impl Streamable for Temperature {} impl Streamable for Humidity {} impl Streamable for GpsLocation {} -/// Returns `true` if `name` matches a known [`Streamable`] contract's -/// [`SchemaType::NAME`]. -/// -/// Useful for early validation before dispatch. -pub fn is_streamable(name: &str) -> bool { - matches!( - name, - ::NAME - | ::NAME - | ::NAME - ) -} - -/// Dispatch a schema type name to a typed code block. -/// -/// Routes a runtime `&str` schema name to the concrete Rust type that -/// implements [`Streamable`], then executes `$body` with `$T` bound to -/// that type. Returns `Some(body_result)` on match, `None` if the schema -/// name is unknown. -/// -/// This is the **single source of truth** for the schema-name โ†’ type -/// mapping โ€” WASM bindings, WebSocket bridge, and other consumers all -/// use this macro instead of maintaining their own tables. -/// -/// # Usage -/// -/// ```rust,ignore -/// use aimdb_data_contracts::dispatch_streamable; +/// Iterate over every registered [`Streamable`] type via the visitor pattern. /// -/// let result = dispatch_streamable!(schema_name, |T| { -/// // `T` is the concrete type (Temperature, Humidity, GpsLocation, โ€ฆ) -/// builder.configure::(key, |reg| reg.buffer(cfg)); -/// }) -/// .ok_or_else(|| format!("Unknown schema: {schema_name}"))?; -/// ``` +/// This is the **single source of truth** for which types are streamable. +/// All consumers (WASM adapter, WebSocket connector, CLI) use this function +/// to discover streamable types instead of maintaining their own lists. /// /// # Adding a new contract /// /// 1. `impl Streamable for NewType {}` (above) -/// 2. Add a match arm here. -#[macro_export] -macro_rules! dispatch_streamable { - ($schema_name:expr, |$T:ident| $body:expr) => { - match $schema_name { - <$crate::contracts::Temperature as $crate::SchemaType>::NAME => { - type $T = $crate::contracts::Temperature; - Some($body) - } - <$crate::contracts::Humidity as $crate::SchemaType>::NAME => { - type $T = $crate::contracts::Humidity; - Some($body) - } - <$crate::contracts::GpsLocation as $crate::SchemaType>::NAME => { - type $T = $crate::contracts::GpsLocation; - Some($body) - } - _ => None, - } - }; +/// 2. Add `visitor.visit::();` here. +pub fn for_each_streamable(visitor: &mut impl StreamableVisitor) { + visitor.visit::(); + visitor.visit::(); + visitor.visit::(); } #[cfg(test)] mod tests { use super::*; + use core::any::TypeId; - #[test] - fn known_schemas_are_streamable() { - assert!(is_streamable("temperature")); - assert!(is_streamable("humidity")); - assert!(is_streamable("gps_location")); + struct NameCollector { + names: alloc::vec::Vec<&'static str>, } - #[test] - fn unknown_schema_is_not_streamable() { - assert!(!is_streamable("unknown")); - assert!(!is_streamable("")); + impl StreamableVisitor for NameCollector { + fn visit(&mut self) { + self.names.push(T::NAME); + } + } + + struct TypeIdResolver { + target: TypeId, + result: Option<&'static str>, + } + + impl StreamableVisitor for TypeIdResolver { + fn visit(&mut self) { + if TypeId::of::() == self.target { + self.result = Some(T::NAME); + } + } } #[test] - fn dispatch_routes_correctly() { - let result = dispatch_streamable!("temperature", |T| ::NAME); - assert_eq!(result.unwrap(), "temperature"); + fn visitor_discovers_all_types() { + let mut c = NameCollector { + names: alloc::vec::Vec::new(), + }; + for_each_streamable(&mut c); + assert!(c.names.contains(&"temperature")); + assert!(c.names.contains(&"humidity")); + assert!(c.names.contains(&"gps_location")); + assert_eq!(c.names.len(), 3); + } - let result = dispatch_streamable!("humidity", |T| ::NAME); - assert_eq!(result.unwrap(), "humidity"); + #[test] + fn visitor_resolves_type_id() { + let mut r = TypeIdResolver { + target: TypeId::of::(), + result: None, + }; + for_each_streamable(&mut r); + assert_eq!(r.result, Some("temperature")); + } - let result = dispatch_streamable!("gps_location", |T| ::NAME); - assert_eq!(result.unwrap(), "gps_location"); + #[test] + fn visitor_returns_none_for_unknown() { + let mut r = TypeIdResolver { + target: TypeId::of::(), + result: None, + }; + for_each_streamable(&mut r); + assert_eq!(r.result, None); } #[test] - fn dispatch_rejects_unknown() { - let result = dispatch_streamable!("unknown", |_T| ()); - assert!(result.is_none()); + fn known_schemas_are_discoverable() { + let mut c = NameCollector { + names: alloc::vec::Vec::new(), + }; + for_each_streamable(&mut c); + assert!(c.names.contains(&"temperature")); + assert!(c.names.contains(&"humidity")); + assert!(c.names.contains(&"gps_location")); + assert!(!c.names.contains(&"unknown")); } } diff --git a/aimdb-wasm-adapter/src/bindings.rs b/aimdb-wasm-adapter/src/bindings.rs index 97a2b972..cf5f94fa 100644 --- a/aimdb-wasm-adapter/src/bindings.rs +++ b/aimdb-wasm-adapter/src/bindings.rs @@ -30,8 +30,7 @@ use aimdb_core::buffer::BufferCfg; use aimdb_core::builder::{AimDb, AimDbBuilder}; use aimdb_core::record_id::StringKey; -use aimdb_data_contracts::dispatch_streamable; - +use crate::schema_registry::{SchemaOps, SchemaRegistry}; use crate::ws_bridge::WsBridge; use crate::WasmAdapter; @@ -75,8 +74,8 @@ fn parse_buffer_cfg(opt: &BufferOption) -> Result { } } -fn is_known_schema(name: &str) -> bool { - aimdb_data_contracts::is_streamable(name) +fn is_known_schema(registry: &SchemaRegistry, name: &str) -> bool { + registry.is_known(name) } // โ”€โ”€โ”€ Collected config (pre-build) โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ @@ -110,6 +109,8 @@ pub struct WasmDb { db: Option>, /// Maps record key โ†’ schema type name (always populated). schema_map: Vec<(String, String)>, + /// Type-erased dispatch registry built from the visitor pattern. + registry: SchemaRegistry, } impl Default for WasmDb { @@ -127,6 +128,7 @@ impl WasmDb { configs: Some(Vec::new()), db: None, schema_map: Vec::new(), + registry: SchemaRegistry::build(), } } @@ -149,7 +151,7 @@ impl WasmDb { let opts: RecordOptions = serde_wasm_bindgen::from_value(options) .map_err(|e| JsError::new(&format!("Invalid options: {e}")))?; - if !is_known_schema(&opts.schema_type) { + if !is_known_schema(&self.registry, &opts.schema_type) { return Err(JsError::new(&format!( "Unknown schema type: {}", opts.schema_type @@ -184,7 +186,7 @@ impl WasmDb { let mut builder = AimDbBuilder::new().runtime(rt); for config in &configs { - apply_record_config(&mut builder, config)?; + apply_record_config(&self.registry, &mut builder, config)?; } let db = builder @@ -201,9 +203,8 @@ impl WasmDb { /// The value is the latest snapshot โ€” it does not wait for a new push. /// Returns `undefined` if no value has been produced yet. pub fn get(&self, record_key: &str) -> Result { - let (db, schema) = self.resolve(record_key)?; - dispatch_streamable!(schema, |T| get_typed::(db, record_key)) - .ok_or_else(|| JsError::new(&format!("Unknown schema type: {schema}")))? + let (db, ops) = self.resolve(record_key)?; + (ops.get)(db, record_key) } /// Set a record value (validates via Rust serde deserialization). @@ -211,9 +212,8 @@ impl WasmDb { /// Throws `JsError` if the payload fails contract validation (e.g. missing /// required fields) or the record key is unknown. pub fn set(&mut self, record_key: &str, value: JsValue) -> Result<(), JsError> { - let (db, schema) = self.resolve(record_key)?; - dispatch_streamable!(schema, |T| set_typed::(db, record_key, value)) - .ok_or_else(|| JsError::new(&format!("Unknown schema type: {schema}")))? + let (db, ops) = self.resolve(record_key)?; + (ops.set)(db, record_key, value) } /// Subscribe to record updates. Returns an unsubscribe function. @@ -224,9 +224,8 @@ impl WasmDb { record_key: &str, callback: &js_sys::Function, ) -> Result { - let (db, schema) = self.resolve(record_key)?; - dispatch_streamable!(schema, |T| subscribe_typed::(db, record_key, callback)) - .ok_or_else(|| JsError::new(&format!("Unknown schema type: {schema}")))? + let (db, ops) = self.resolve(record_key)?; + (ops.subscribe)(db, record_key, callback) } /// Returns `true` if the database has been built. @@ -258,16 +257,17 @@ impl WasmDb { .clone(); // cheap: two Arc pointer copies let schema_map = self.schema_map.clone(); + let registry = SchemaRegistry::build(); - WsBridge::new_internal(db, schema_map, url, options) + WsBridge::new_internal(db, schema_map, registry, url, options) } } // โ”€โ”€โ”€ Private helpers โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ impl WasmDb { - /// Resolve a record key to the live DB handle and its schema type name. - fn resolve(&self, record_key: &str) -> Result<(&AimDb, &str), JsError> { + /// Resolve a record key to the live DB handle and its type-erased ops. + fn resolve(&self, record_key: &str) -> Result<(&AimDb, &SchemaOps), JsError> { let db = self .db .as_ref() @@ -280,7 +280,12 @@ impl WasmDb { .map(|(_, v)| v.as_str()) .ok_or_else(|| JsError::new(&format!("Unknown record key: {record_key}")))?; - Ok((db, schema)) + let ops = self + .registry + .get(schema) + .ok_or_else(|| JsError::new(&format!("Unknown schema type: {schema}")))?; + + Ok((db, ops)) } } @@ -288,25 +293,23 @@ impl WasmDb { /// Apply a single `RecordConfig` to the builder, dispatching on schema type. fn apply_record_config( + registry: &SchemaRegistry, builder: &mut AimDbBuilder, config: &RecordConfig, ) -> Result<(), JsError> { - use crate::WasmRecordRegistrarExt; - let key = StringKey::intern(config.key.clone()); let cfg = config.buffer_cfg.clone(); - dispatch_streamable!(config.schema_type.as_str(), |T| { - builder.configure::(key, |reg| { - reg.buffer(cfg); - }); - }) - .ok_or_else(|| JsError::new(&format!("Unknown schema type: {}", config.schema_type)))?; + let ops = registry + .get(&config.schema_type) + .ok_or_else(|| JsError::new(&format!("Unknown schema type: {}", config.schema_type)))?; + + (ops.configure)(builder, key, cfg); Ok(()) } /// Read the latest snapshot for record `key` and convert to `JsValue`. -fn get_typed(db: &AimDb, key: &str) -> Result +pub(crate) fn get_typed(db: &AimDb, key: &str) -> Result where T: Send + Sync + 'static + Debug + Clone + Serialize, { @@ -323,7 +326,11 @@ where } /// Deserialize `JsValue` โ†’ `T` (contract enforcement), then push to buffer. -fn set_typed(db: &AimDb, key: &str, value: JsValue) -> Result<(), JsError> +pub(crate) fn set_typed( + db: &AimDb, + key: &str, + value: JsValue, +) -> Result<(), JsError> where T: Send + Sync + 'static + Debug + Clone + DeserializeOwned, { @@ -348,7 +355,7 @@ where /// Uses `futures_util::future::select` to race `recv()` against a cancel /// future so the unsubscribe closure can break the loop immediately โ€” even /// when `recv()` is blocked waiting for the next push. -fn subscribe_typed( +pub(crate) fn subscribe_typed( db: &AimDb, key: &str, callback: &js_sys::Function, diff --git a/aimdb-wasm-adapter/src/lib.rs b/aimdb-wasm-adapter/src/lib.rs index 7cd5d089..cf68a995 100644 --- a/aimdb-wasm-adapter/src/lib.rs +++ b/aimdb-wasm-adapter/src/lib.rs @@ -42,6 +42,9 @@ pub mod time; #[cfg(feature = "wasm-runtime")] pub mod bindings; +#[cfg(feature = "wasm-runtime")] +pub(crate) mod schema_registry; + #[cfg(feature = "wasm-runtime")] pub mod ws_bridge; diff --git a/aimdb-wasm-adapter/src/schema_registry.rs b/aimdb-wasm-adapter/src/schema_registry.rs new file mode 100644 index 00000000..2df03e88 --- /dev/null +++ b/aimdb-wasm-adapter/src/schema_registry.rs @@ -0,0 +1,100 @@ +//! Type-erased dispatch registry for [`Streamable`] types in the WASM adapter. +//! +//! Built once via [`SchemaRegistry::build`] using the visitor pattern from +//! `aimdb-data-contracts`. Each entry stores boxed closures that capture the +//! concrete type `T` through monomorphization, enabling runtime dispatch by +//! schema name without a central match macro. + +extern crate alloc; + +use alloc::boxed::Box; +use alloc::collections::BTreeMap; + +use wasm_bindgen::prelude::*; + +use aimdb_core::buffer::BufferCfg; +use aimdb_core::builder::{AimDb, AimDbBuilder}; +use aimdb_core::record_id::StringKey; + +use aimdb_data_contracts::{for_each_streamable, Streamable, StreamableVisitor}; + +use crate::WasmAdapter; + +// โ”€โ”€โ”€ Type-erased operations โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +type ConfigureFn = Box, StringKey, BufferCfg) + Send + Sync>; +type GetFn = Box, &str) -> Result + Send + Sync>; +type SetFn = Box, &str, JsValue) -> Result<(), JsError> + Send + Sync>; +type SubscribeFn = Box< + dyn Fn(&AimDb, &str, &js_sys::Function) -> Result + Send + Sync, +>; +type ProduceFromJsonFn = Box, &str, serde_json::Value) + Send + Sync>; + +/// Type-erased operations for a single [`Streamable`] type. +pub(crate) struct SchemaOps { + pub configure: ConfigureFn, + pub get: GetFn, + pub set: SetFn, + pub subscribe: SubscribeFn, + pub produce_from_json: ProduceFromJsonFn, +} + +// โ”€โ”€โ”€ Registry โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Maps schema names to type-erased operations. +/// +/// Built once at startup via [`SchemaRegistry::build`], then shared across +/// the `WasmDb` and `WsBridge`. +pub(crate) struct SchemaRegistry { + entries: BTreeMap<&'static str, SchemaOps>, +} + +impl SchemaRegistry { + /// Build the registry by visiting all [`Streamable`] types. + pub fn build() -> Self { + let mut builder = RegistryBuilder { + entries: BTreeMap::new(), + }; + for_each_streamable(&mut builder); + SchemaRegistry { + entries: builder.entries, + } + } + + /// Look up operations for a schema name. + pub fn get(&self, schema_name: &str) -> Option<&SchemaOps> { + self.entries.get(schema_name) + } + + /// Returns `true` if the schema name is known. + pub fn is_known(&self, schema_name: &str) -> bool { + self.entries.contains_key(schema_name) + } +} + +// โ”€โ”€โ”€ Visitor that builds the registry โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +struct RegistryBuilder { + entries: BTreeMap<&'static str, SchemaOps>, +} + +impl StreamableVisitor for RegistryBuilder { + fn visit(&mut self) { + use crate::bindings::{get_typed, set_typed, subscribe_typed}; + use crate::ws_bridge::produce_from_json; + + let ops = SchemaOps { + configure: Box::new(|builder, key, cfg| { + use crate::WasmRecordRegistrarExt; + builder.configure::(key, |reg| { + reg.buffer(cfg); + }); + }), + get: Box::new(get_typed::), + set: Box::new(set_typed::), + subscribe: Box::new(|db, key, cb| subscribe_typed::(db, key, cb)), + produce_from_json: Box::new(produce_from_json::), + }; + self.entries.insert(T::NAME, ops); + } +} diff --git a/aimdb-wasm-adapter/src/ws_bridge.rs b/aimdb-wasm-adapter/src/ws_bridge.rs index 79b1d030..180f62ca 100644 --- a/aimdb-wasm-adapter/src/ws_bridge.rs +++ b/aimdb-wasm-adapter/src/ws_bridge.rs @@ -27,8 +27,8 @@ use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; use wasm_bindgen::prelude::*; +use crate::schema_registry::SchemaRegistry; use aimdb_core::builder::AimDb; -use aimdb_data_contracts::dispatch_streamable; use crate::WasmAdapter; @@ -129,6 +129,7 @@ struct BridgeState { struct SharedCtx { db: AimDb, schema_map: Vec<(String, String)>, + registry: SchemaRegistry, state: Rc>, on_status: Rc>>, config: BridgeOptions, @@ -139,6 +140,10 @@ struct SharedCtx { pending_queries: Rc>>, /// Simple counter for generating unique query IDs. query_id_counter: Rc>, + /// Pending list_topics requests: correlation ID โ†’ (resolve, reject) JS functions. + pending_list_topics: Rc>>, + /// Counter for generating unique list_topics IDs. + list_topics_id_counter: Rc>, } // โ”€โ”€โ”€ WsBridge โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ @@ -308,6 +313,53 @@ impl WsBridge { } }) } + /// List all topics served by the WebSocket endpoint. + /// + /// Returns a `Promise` that resolves with `TopicInfo[]`. + /// Each entry has `name: string` and optionally `schema_type: string`. + /// + /// ```ts + /// const topics = await bridge.listTopics(); + /// // [{ name: "temp.vienna", schema_type: "temperature" }, โ€ฆ] + /// ``` + #[wasm_bindgen(js_name = "listTopics")] + pub fn list_topics(&self) -> js_sys::Promise { + let ctx = self.ctx.clone(); + + let id = { + let mut counter = ctx.list_topics_id_counter.borrow_mut(); + *counter += 1; + format!("lt{}", *counter) + }; + + let id_for_promise = id.clone(); + js_sys::Promise::new(&mut move |resolve, reject| { + ctx.pending_list_topics + .borrow_mut() + .insert(id_for_promise.clone(), (resolve, reject.clone())); + + let msg = ClientMessage::ListTopics { + id: id_for_promise.clone(), + }; + + let state = ctx.state.borrow(); + if state.status != ConnectionStatus::Connected { + drop(state); + ctx.pending_list_topics.borrow_mut().remove(&id_for_promise); + let _ = reject.call1(&JsValue::NULL, &JsValue::from_str("Not connected")); + return; + } + drop(state); + + if let Err(e) = send_json(&ctx.ws_cell.borrow(), &msg) { + ctx.pending_list_topics.borrow_mut().remove(&id_for_promise); + let _ = reject.call1( + &JsValue::NULL, + &JsValue::from_str(&format!("Send failed: {e:?}")), + ); + } + }) + } } impl Drop for WsBridge { @@ -323,6 +375,7 @@ impl WsBridge { pub(crate) fn new_internal( db: AimDb, schema_map: Vec<(String, String)>, + registry: SchemaRegistry, url: &str, options: JsValue, ) -> Result { @@ -354,6 +407,7 @@ impl WsBridge { let ctx = Rc::new(SharedCtx { db, schema_map, + registry, state, on_status, config, @@ -362,6 +416,8 @@ impl WsBridge { ws_cell, pending_queries: Rc::new(RefCell::new(BTreeMap::new())), query_id_counter: Rc::new(RefCell::new(0)), + pending_list_topics: Rc::new(RefCell::new(BTreeMap::new())), + list_topics_id_counter: Rc::new(RefCell::new(0)), }); install_ws_callbacks(&ctx); @@ -573,15 +629,15 @@ fn handle_server_message(ctx: &SharedCtx, msg: ServerMessage) { match schema { Some(schema) => { - let dispatched = dispatch_streamable!(schema, |T| { - produce_from_json::(&ctx.db, &topic, payload.clone()); - }); - if dispatched.is_none() { + if let Some(ops) = ctx.registry.get(schema) { + (ops.produce_from_json)(&ctx.db, &topic, payload.clone()); + } else { web_sys::console::warn_1( &format!( - "[WsBridge] dispatch_streamable returned None for schema='{}' topic='{}'", + "[WsBridge] unknown schema='{}' topic='{}'", schema, topic - ).into(), + ) + .into(), ); } } @@ -634,13 +690,25 @@ fn handle_server_message(ctx: &SharedCtx, msg: ServerMessage) { let _ = resolve.call1(&JsValue::NULL, &result_obj); } } + ServerMessage::TopicList { id, topics } => { + if let Some((resolve, _reject)) = ctx.pending_list_topics.borrow_mut().remove(&id) { + let serializer = serde_wasm_bindgen::Serializer::json_compatible(); + let arr = js_sys::Array::new(); + for topic in &topics { + if let Ok(js_val) = topic.serialize(&serializer) { + arr.push(&js_val); + } + } + let _ = resolve.call1(&JsValue::NULL, &arr); + } + } } } /// Deserialize `serde_json::Value` โ†’ `T` and push to the record buffer. /// /// This is the fast path for incoming server data โ€” no `JsValue` hop. -fn produce_from_json(db: &AimDb, key: &str, json: serde_json::Value) +pub(crate) fn produce_from_json(db: &AimDb, key: &str, json: serde_json::Value) where T: Send + Sync + 'static + Debug + Clone + DeserializeOwned, { diff --git a/aimdb-websocket-connector/Cargo.toml b/aimdb-websocket-connector/Cargo.toml index 3c264cb7..1c06f279 100644 --- a/aimdb-websocket-connector/Cargo.toml +++ b/aimdb-websocket-connector/Cargo.toml @@ -35,6 +35,7 @@ tracing = ["dep:tracing"] [dependencies] aimdb-core = { path = "../aimdb-core", default-features = false } +aimdb-data-contracts = { path = "../aimdb-data-contracts", default-features = false } aimdb-executor = { path = "../aimdb-executor", default-features = false } aimdb-ws-protocol = { path = "../aimdb-ws-protocol" } diff --git a/aimdb-websocket-connector/src/builder.rs b/aimdb-websocket-connector/src/builder.rs index 40f71bd8..d8bb8ac1 100644 --- a/aimdb-websocket-connector/src/builder.rs +++ b/aimdb-websocket-connector/src/builder.rs @@ -15,12 +15,15 @@ //! ``` use std::{ + any::TypeId, collections::HashMap, net::{SocketAddr, ToSocketAddrs}, pin::Pin, sync::{Arc, Mutex}, }; +use aimdb_data_contracts::for_each_streamable; + use aimdb_core::{router::RouterBuilder, ConnectorBuilder}; use axum::Router as AxumRouter; @@ -31,6 +34,7 @@ use crate::{ server::start_server, session::{NoQuery, NoSnapshot, QueryHandler, SessionContext, SnapshotProvider}, }; +use aimdb_ws_protocol::TopicInfo; // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• // Builder @@ -285,6 +289,32 @@ where Arc::new(NoSnapshot) }; + // โ”€โ”€ Known topics (for list_topics responses) โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + // Build a TypeId โ†’ schema name map from all registered Streamable types. + struct TypeIdMap(HashMap); + impl aimdb_data_contracts::StreamableVisitor for TypeIdMap { + fn visit(&mut self) { + self.0.insert( + TypeId::of::(), + ::NAME, + ); + } + } + let mut type_id_map = TypeIdMap(HashMap::new()); + for_each_streamable(&mut type_id_map); + + let topic_type_ids = db.collect_outbound_topic_type_ids("ws"); + let known_topics: Vec = topic_type_ids + .into_iter() + .map(|(topic, type_id)| { + let schema_type = type_id_map.0.get(&type_id).map(|s| s.to_string()); + TopicInfo { + name: topic, + schema_type, + } + }) + .collect(); + // โ”€โ”€ Session context โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ let session_ctx = SessionContext { client_mgr: client_mgr.clone(), @@ -295,6 +325,7 @@ where snapshot_provider, auto_subscribe_topics: self.auto_subscribe_topics.clone(), query_handler: self.query_handler.clone(), + known_topics, }; // โ”€โ”€ Build connector & spawn outbound publishers โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ diff --git a/aimdb-websocket-connector/src/protocol.rs b/aimdb-websocket-connector/src/protocol.rs index b67289fa..85b0b2e2 100644 --- a/aimdb-websocket-connector/src/protocol.rs +++ b/aimdb-websocket-connector/src/protocol.rs @@ -6,5 +6,5 @@ // Re-export everything from the shared protocol crate pub use aimdb_ws_protocol::{ - now_ms, topic_matches, ClientMessage, ErrorCode, QueryRecord, ServerMessage, + now_ms, topic_matches, ClientMessage, ErrorCode, QueryRecord, ServerMessage, TopicInfo, }; diff --git a/aimdb-websocket-connector/src/session.rs b/aimdb-websocket-connector/src/session.rs index 3769c7e4..24e4b140 100644 --- a/aimdb-websocket-connector/src/session.rs +++ b/aimdb-websocket-connector/src/session.rs @@ -24,7 +24,7 @@ use tokio::sync::mpsc; use crate::{ auth::{AuthHandler, ClientId, ClientInfo}, client_manager::ClientManager, - protocol::{ClientMessage, ErrorCode, QueryRecord}, + protocol::{ClientMessage, ErrorCode, QueryRecord, TopicInfo}, }; // Re-export so server.rs can use it easily. @@ -122,6 +122,8 @@ pub(crate) struct SessionContext { pub auto_subscribe_topics: Vec, /// Handler for `Query` messages (historical record retrieval). pub query_handler: Arc, + /// All outbound topics served by this endpoint, returned on `list_topics`. + pub known_topics: Vec, } /// Provides the current serialized value of a record for late-join snapshots. @@ -286,6 +288,9 @@ async fn handle_text(id: ClientId, text: &str, ctx: &SessionContext) { } => { handle_query(id, query_id, pattern, from, to, limit, ctx).await; } + ClientMessage::ListTopics { id: req_id } => { + handle_list_topics(id, req_id, ctx).await; + } } } @@ -389,6 +394,16 @@ async fn handle_write( } } +async fn handle_list_topics(id: ClientId, req_id: String, ctx: &SessionContext) { + use crate::protocol::ServerMessage; + + let result = ServerMessage::TopicList { + id: req_id, + topics: ctx.known_topics.clone(), + }; + ctx.client_mgr.send_to_client(id, &result).await; +} + async fn handle_query( id: ClientId, query_id: String, diff --git a/aimdb-ws-protocol/src/lib.rs b/aimdb-ws-protocol/src/lib.rs index 74c8a56e..52cb0cfa 100644 --- a/aimdb-ws-protocol/src/lib.rs +++ b/aimdb-ws-protocol/src/lib.rs @@ -84,6 +84,14 @@ pub enum ServerMessage { /// Total number of records matched (before any limit). total: usize, }, + + /// Response to a client `list_topics` request. + TopicList { + /// Correlation ID echoed from the client request. + id: String, + /// All outbound topics served by this endpoint. + topics: Vec, + }, } /// A single record returned in a [`ServerMessage::QueryResult`]. @@ -97,6 +105,16 @@ pub struct QueryRecord { pub ts: u64, } +/// Metadata for a single outbound topic served by a WebSocket endpoint. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct TopicInfo { + /// Record key / topic name (e.g. `"temp.vienna"`). + pub name: String, + /// Schema type name (e.g. `"temperature"`), if known by the server. + #[serde(skip_serializing_if = "Option::is_none")] + pub schema_type: Option, +} + /// Machine-readable error codes sent in `ServerMessage::Error`. #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] #[serde(rename_all = "SCREAMING_SNAKE_CASE")] @@ -151,6 +169,15 @@ pub enum ClientMessage { #[serde(skip_serializing_if = "Option::is_none")] limit: Option, }, + + /// Request the list of topics served by this WebSocket endpoint. + /// + /// The server responds with [`ServerMessage::TopicList`] carrying the + /// same `id` for correlation. + ListTopics { + /// Client-generated correlation ID (echoed in the response). + id: String, + }, } // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• From 7a7d05817cb9a61b520db86221d810cdbccfd5e3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Fri, 6 Mar 2026 14:45:50 +0000 Subject: [PATCH 23/46] Refactor schema migration system: replace `Migratable` trait with `MigrationStep` and `MigrationChain` for type-safe, bidirectional migrations - Updated `lib.rs` to expose new migration types and modify documentation accordingly. - Enhanced `migratable.rs` with a new architecture for migration steps and chains, including detailed examples and improved error handling. - Removed obsolete TypeScript export test and associated scripts, streamlining the codebase. --- aimdb-data-contracts/.gitignore | 3 - .../src/contracts/temperature.rs | 319 ++++---- aimdb-data-contracts/src/lib.rs | 10 +- aimdb-data-contracts/src/migratable.rs | 706 +++++++++++++++--- aimdb-data-contracts/tests/export_ts.rs | 269 ------- scripts/gen-ts-bindings.sh | 61 -- 6 files changed, 809 insertions(+), 559 deletions(-) delete mode 100644 aimdb-data-contracts/tests/export_ts.rs delete mode 100755 scripts/gen-ts-bindings.sh diff --git a/aimdb-data-contracts/.gitignore b/aimdb-data-contracts/.gitignore index 81f62ce1..de01a5e4 100644 --- a/aimdb-data-contracts/.gitignore +++ b/aimdb-data-contracts/.gitignore @@ -3,6 +3,3 @@ Cargo.lock **/*.rs.bk *.pdb .DS_Store - -# Generated TypeScript bindings (run ./scripts/gen-ts-bindings.sh) -/bindings/ diff --git a/aimdb-data-contracts/src/contracts/temperature.rs b/aimdb-data-contracts/src/contracts/temperature.rs index deb9322f..34c02a1d 100644 --- a/aimdb-data-contracts/src/contracts/temperature.rs +++ b/aimdb-data-contracts/src/contracts/temperature.rs @@ -8,8 +8,8 @@ //! - **v1** (legacy): `{ "schema_version": 1, "temp": f32, "timestamp": u64, "unit": "C"|"F"|"K" }` //! - **v2** (current): `{ "schema_version": 2, "celsius": f32, "timestamp": u64 }` //! -//! The `from_bytes_versioned()` function reads the `schema_version` from the payload -//! and migrates automatically, allowing nodes and hubs to be updated independently. +//! The `MigrationChain` impl (via `migration_chain!`) reads the `schema_version` +//! from the payload and migrates automatically, allowing nodes and hubs to be updated independently. extern crate alloc; @@ -23,7 +23,7 @@ use crate::Linkable; use crate::{Simulatable, SimulationConfig}; #[cfg(feature = "migratable")] -use crate::{Migratable, MigrationError}; +use crate::{MigrationError, MigrationStep}; #[cfg(feature = "ts")] use ts_rs::TS; @@ -132,30 +132,40 @@ impl Linkable for TemperatureV1 { } // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• -// MIGRATABLE IMPLEMENTATION +// TYPE-SAFE MIGRATION (v1 โ†’ v2) // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +/// Migration step: Temperature v1 (temp + unit) โ†’ v2 (celsius only) #[cfg(feature = "migratable")] -impl Migratable for Temperature { - /// Migrate raw JSON from v1 to v2 format. - /// - /// # v1 โ†’ v2 Migration - /// - Rename: `temp` โ†’ `celsius` - /// - Convert: Apply unit conversion if unit is "F" or "K" - /// - Remove: Drop the `unit` field - /// - /// Delegates to `TemperatureV1::to_v2()` to keep conversion logic DRY. - fn migrate(raw: &mut serde_json::Value, from_version: u32) -> Result<(), MigrationError> { - if from_version < 2 { - // Parse as v1, convert via to_v2(), then serialize back - let v1: TemperatureV1 = serde_json::from_value(raw.clone()) - .map_err(|_| MigrationError::MissingField("temp or unit"))?; - let v2 = v1.to_v2(); - *raw = serde_json::to_value(v2) - .map_err(|_| MigrationError::Custom("failed to serialize migrated value"))?; - } +pub struct TemperatureV1ToV2; + +#[cfg(feature = "migratable")] +impl MigrationStep for TemperatureV1ToV2 { + type Older = TemperatureV1; + type Newer = Temperature; + const FROM_VERSION: u32 = 1; + const TO_VERSION: u32 = 2; + + fn up(v1: TemperatureV1) -> Result { + Ok(v1.to_v2()) + } - Ok(()) + fn down(v2: Temperature) -> Result { + Ok(TemperatureV1 { + schema_version: 1, + temp: v2.celsius, + timestamp: v2.timestamp, + unit: alloc::string::String::from("C"), + }) + } +} + +#[cfg(feature = "migratable")] +crate::migration_chain! { + type Current = Temperature; + version_field = "schema_version"; + steps { + TemperatureV1ToV2: TemperatureV1 => Temperature, } } @@ -230,46 +240,14 @@ impl Settable for Temperature { } // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• -// VERSIONED DESERIALIZATION +// LINKABLE WITH MIGRATION SUPPORT // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• -#[cfg(feature = "linkable")] -impl Temperature { - /// Deserialize from bytes with automatic migration based on `schema_version` field. - /// - /// This function enables **decoupled deployment**: nodes and hubs can be - /// updated independently because the hub reads the schema version from the payload. - /// - /// Delegates to `Migratable::deserialize_versioned` for the actual migration. - /// - /// # Example - /// ```ignore - /// let v1 = r#"{"schema_version":1,"temp":68.0,"timestamp":123,"unit":"F"}"#; - /// let v2 = r#"{"schema_version":2,"celsius":20.0,"timestamp":123}"#; - /// ``` - #[cfg(feature = "migratable")] - pub fn from_bytes_versioned(data: &[u8]) -> Result { - use crate::Migratable; - - let mut value: serde_json::Value = - serde_json::from_slice(data).map_err(|e| alloc::format!("JSON parse error: {}", e))?; - - let version = value - .get("schema_version") - .and_then(|v| v.as_u64()) - .ok_or_else(|| alloc::string::String::from("Missing schema_version field"))? - as u32; - - Self::deserialize_versioned(&mut value, version) - .map_err(|e| alloc::format!("Migration error: {:?}", e)) - } -} - #[cfg(all(feature = "linkable", feature = "migratable"))] impl Linkable for Temperature { fn from_bytes(data: &[u8]) -> Result { - // Use versioned deserializer for automatic migration - Self::from_bytes_versioned(data) + use crate::MigrationChain; + Self::migrate_from_bytes(data).map_err(|e| alloc::format!("Migration error: {}", e)) } fn to_bytes(&self) -> Result, alloc::string::String> { @@ -343,105 +321,192 @@ mod tests { assert_eq!(v2.celsius, 22.5); } + // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + // TYPE-SAFE MIGRATION STEP TESTS + // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + #[cfg(feature = "migratable")] #[test] - fn test_migratable_trait_celsius() { - use crate::Migratable; + fn test_migration_step_up_celsius() { + use crate::MigrationStep; - let mut raw = serde_json::json!({ - "temp": 22.5, - "timestamp": 1704326400000_u64, - "unit": "C" - }); - - Temperature::migrate(&mut raw, 1).unwrap(); - - assert_eq!(raw["celsius"], 22.5); - assert!(raw.get("temp").is_none(), "temp field should be removed"); - assert!(raw.get("unit").is_none(), "unit field should be removed"); + let v1 = TemperatureV1::new(22.5, 1704326400000, "C"); + let v2 = TemperatureV1ToV2::up(v1).unwrap(); + assert_eq!(v2.celsius, 22.5); + assert_eq!(v2.timestamp, 1704326400000); } #[cfg(feature = "migratable")] #[test] - fn test_migratable_trait_fahrenheit() { - use crate::Migratable; - - let mut raw = serde_json::json!({ - "temp": 68.0, - "timestamp": 1704326400000_u64, - "unit": "F" - }); - - Temperature::migrate(&mut raw, 1).unwrap(); + fn test_migration_step_up_fahrenheit() { + use crate::MigrationStep; - let celsius = raw["celsius"].as_f64().unwrap(); + let v1 = TemperatureV1::new(68.0, 1704326400000, "F"); + let v2 = TemperatureV1ToV2::up(v1).unwrap(); assert!( - (celsius - 20.0).abs() < 0.01, + (v2.celsius - 20.0).abs() < 0.01, "Expected ~20ยฐC, got {}", - celsius + v2.celsius ); } #[cfg(feature = "migratable")] #[test] - fn test_migratable_trait_kelvin() { - use crate::Migratable; + fn test_migration_step_up_kelvin() { + use crate::MigrationStep; - let mut raw = serde_json::json!({ - "temp": 293.15, - "timestamp": 1704326400000_u64, - "unit": "K" - }); - - Temperature::migrate(&mut raw, 1).unwrap(); - - let celsius = raw["celsius"].as_f64().unwrap(); + let v1 = TemperatureV1::new(293.15, 1704326400000, "K"); + let v2 = TemperatureV1ToV2::up(v1).unwrap(); assert!( - (celsius - 20.0).abs() < 0.01, + (v2.celsius - 20.0).abs() < 0.01, "Expected ~20ยฐC, got {}", - celsius + v2.celsius ); } #[cfg(feature = "migratable")] #[test] - fn test_deserialize_versioned_v1() { - use crate::Migratable; + fn test_migration_step_down() { + use crate::MigrationStep; - let mut raw = serde_json::json!({ - "temp": 22.5, - "timestamp": 1704326400000_u64, - "unit": "C" - }); + let v2 = Temperature::new(22.5, 1704326400000); + let v1 = TemperatureV1ToV2::down(v2).unwrap(); + assert_eq!(v1.temp, 22.5); + assert_eq!(v1.timestamp, 1704326400000); + assert_eq!(v1.unit, "C"); + assert_eq!(v1.schema_version, 1); + } - let temp: Temperature = Temperature::deserialize_versioned(&mut raw, 1).unwrap(); + // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + // MIGRATION CHAIN TESTS (upgrade from bytes) + // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + + #[cfg(feature = "migratable")] + #[test] + fn test_migrate_from_bytes_v1() { + use crate::MigrationChain; + + let json = r#"{"schema_version":1,"temp":22.5,"timestamp":1704326400000,"unit":"C"}"#; + let temp = Temperature::migrate_from_bytes(json.as_bytes()).unwrap(); assert_eq!(temp.celsius, 22.5); assert_eq!(temp.timestamp, 1704326400000); } #[cfg(feature = "migratable")] #[test] - fn test_deserialize_versioned_v2_no_migration() { - use crate::Migratable; + fn test_migrate_from_bytes_v2_no_migration() { + use crate::MigrationChain; - let mut raw = serde_json::json!({ - "celsius": 22.5, - "timestamp": 1704326400000_u64 - }); - - let temp: Temperature = Temperature::deserialize_versioned(&mut raw, 2).unwrap(); + let json = r#"{"schema_version":2,"celsius":22.5,"timestamp":1704326400000}"#; + let temp = Temperature::migrate_from_bytes(json.as_bytes()).unwrap(); assert_eq!(temp.celsius, 22.5); } + #[cfg(feature = "migratable")] + #[test] + fn test_migrate_from_bytes_version_too_new() { + use crate::MigrationChain; + + let json = r#"{"schema_version":99,"celsius":22.5,"timestamp":100}"#; + let err = Temperature::migrate_from_bytes(json.as_bytes()).unwrap_err(); + assert_eq!( + err, + crate::MigrationError::VersionTooNew { + source: 99, + current: 2 + } + ); + } + + #[cfg(feature = "migratable")] + #[test] + fn test_migrate_from_bytes_missing_version() { + use crate::MigrationChain; + + let json = r#"{"celsius":22.5,"timestamp":100}"#; + let err = Temperature::migrate_from_bytes(json.as_bytes()).unwrap_err(); + assert_eq!(err, crate::MigrationError::MissingVersion); + } + // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• - // VERSIONED DESERIALIZATION TESTS (auto-detect version) + // DOWNGRADE TESTS // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• - #[cfg(feature = "linkable")] + #[cfg(feature = "migratable")] + #[test] + fn test_downgrade_to_v1() { + use crate::MigrationChain; + + let temp = Temperature::new(22.5, 1704326400000); + let v1_bytes = temp.migrate_to_version(1).unwrap(); + let v1: serde_json::Value = serde_json::from_slice(&v1_bytes).unwrap(); + + assert_eq!(v1["temp"], 22.5); + assert_eq!(v1["unit"], "C"); + assert_eq!(v1["schema_version"], 1); + } + + #[cfg(feature = "migratable")] + #[test] + fn test_downgrade_to_current_version() { + use crate::MigrationChain; + + let temp = Temperature::new(22.5, 1704326400000); + let v2_bytes = temp.migrate_to_version(2).unwrap(); + let v2: Temperature = serde_json::from_slice(&v2_bytes).unwrap(); + assert_eq!(v2.celsius, 22.5); + } + + #[cfg(feature = "migratable")] + #[test] + fn test_downgrade_version_too_old() { + use crate::MigrationChain; + + let temp = Temperature::new(22.5, 100); + let err = temp.migrate_to_version(0).unwrap_err(); + assert_eq!( + err, + crate::MigrationError::VersionTooOld { + target: 0, + minimum: 1 + } + ); + } + + #[cfg(feature = "migratable")] + #[test] + fn test_roundtrip_v1_upgrade_downgrade() { + use crate::MigrationChain; + + // Start with v1 JSON + let v1_json = r#"{"schema_version":1,"temp":22.5,"timestamp":1704326400000,"unit":"C"}"#; + + // Upgrade to v2 + let v2 = Temperature::migrate_from_bytes(v1_json.as_bytes()).unwrap(); + assert_eq!(v2.celsius, 22.5); + + // Downgrade back to v1 + let v1_bytes = v2.migrate_to_version(1).unwrap(); + let v1: TemperatureV1 = serde_json::from_slice(&v1_bytes).unwrap(); + assert_eq!(v1.temp, 22.5); + assert_eq!(v1.unit, "C"); + + // Upgrade again โ€” should round-trip + let v2_again = Temperature::migrate_from_bytes(&v1_bytes).unwrap(); + assert_eq!(v2_again.celsius, 22.5); + } + + // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + // LINKABLE TRAIT TESTS (with auto-migration) + // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + + #[cfg(all(feature = "linkable", feature = "migratable"))] #[test] fn test_from_bytes_v1_with_version_marker() { + use crate::Linkable; + let json = r#"{"schema_version":1,"temp":68.0,"timestamp":1704326400000,"unit":"F"}"#; - let temp = Temperature::from_bytes_versioned(json.as_bytes()).unwrap(); + let temp = Temperature::from_bytes(json.as_bytes()).unwrap(); assert!( (temp.celsius - 20.0).abs() < 0.01, "Expected ~20ยฐC from 68ยฐF" @@ -449,24 +514,28 @@ mod tests { assert_eq!(temp.timestamp, 1704326400000); } - #[cfg(feature = "linkable")] + #[cfg(all(feature = "linkable", feature = "migratable"))] #[test] fn test_from_bytes_v2_with_version_marker() { + use crate::Linkable; + let json = r#"{"schema_version":2,"celsius":22.5,"timestamp":1704326400000}"#; - let temp = Temperature::from_bytes_versioned(json.as_bytes()).unwrap(); + let temp = Temperature::from_bytes(json.as_bytes()).unwrap(); assert_eq!(temp.celsius, 22.5); assert_eq!(temp.timestamp, 1704326400000); } - #[cfg(feature = "linkable")] + #[cfg(all(feature = "linkable", feature = "migratable"))] #[test] fn test_from_bytes_v1_celsius_unit() { + use crate::Linkable; + let json = r#"{"schema_version":1,"temp":22.5,"timestamp":1704326400000,"unit":"C"}"#; - let temp = Temperature::from_bytes_versioned(json.as_bytes()).unwrap(); + let temp = Temperature::from_bytes(json.as_bytes()).unwrap(); assert_eq!(temp.celsius, 22.5); } - #[cfg(feature = "linkable")] + #[cfg(all(feature = "linkable", feature = "migratable"))] #[test] fn test_from_bytes_via_linkable_trait() { use crate::Linkable; @@ -482,14 +551,14 @@ mod tests { assert_eq!(temp.celsius, 22.5); } - #[cfg(feature = "linkable")] + #[cfg(all(feature = "linkable", feature = "migratable"))] #[test] fn test_from_bytes_missing_version_fails() { - // Payloads without schema_version should fail + use crate::Linkable; + let json = r#"{"celsius":22.5,"timestamp":1704326400000}"#; - let result = Temperature::from_bytes_versioned(json.as_bytes()); + let result = Temperature::from_bytes(json.as_bytes()); assert!(result.is_err()); - assert!(result.unwrap_err().contains("schema_version")); } #[cfg(feature = "linkable")] diff --git a/aimdb-data-contracts/src/lib.rs b/aimdb-data-contracts/src/lib.rs index 8e91c467..3b80394a 100644 --- a/aimdb-data-contracts/src/lib.rs +++ b/aimdb-data-contracts/src/lib.rs @@ -58,7 +58,7 @@ mod migratable; pub use simulatable::{SimulationConfig, SimulationParams}; #[cfg(feature = "migratable")] -pub use migratable::{Migratable, MigrationError}; +pub use migratable::{MigrationChain, MigrationError, MigrationStep}; // โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• // SCHEMA TRAITS (Implementation-defined) @@ -85,11 +85,11 @@ pub use migratable::{Migratable, MigrationError}; /// | Add optional field | โœ… Yes | `#[serde(default)]` new field | /// | Add field with default | โœ… Yes | New field deserializes to default | /// | Remove unused field | โœ… Yes | Old data with field still parses | -/// | Rename field | โš ๏ธ Migration | Use `Migratable` trait | -/// | Change field type | โš ๏ธ Migration | Use `Migratable` trait | -/// | Add required field | โš ๏ธ Migration | Use `Migratable` trait | +/// | Rename field | โš ๏ธ Migration | Use `MigrationStep` + `migration_chain!` | +/// | Change field type | โš ๏ธ Migration | Use `MigrationStep` + `migration_chain!` | +/// | Add required field | โš ๏ธ Migration | Use `MigrationStep` + `migration_chain!` | /// -/// For breaking changes, implement the `Migratable` trait (requires `migration` feature) +/// For breaking changes, implement `MigrationStep` and use `migration_chain!` (requires `migratable` feature) /// to provide runtime transformation of older data formats. pub trait SchemaType: Sized { /// Unique identifier for this schema (e.g., "temperature", "humidity") diff --git a/aimdb-data-contracts/src/migratable.rs b/aimdb-data-contracts/src/migratable.rs index c310fcb9..ecca8a96 100644 --- a/aimdb-data-contracts/src/migratable.rs +++ b/aimdb-data-contracts/src/migratable.rs @@ -1,24 +1,110 @@ -//! Runtime schema migration support. +//! Type-safe, bidirectional schema migration with compile-time chain validation. //! -//! This module provides the `Migratable` trait for handling breaking schema -//! changes at runtime through JSON transformation. +//! This module provides an Alembic-inspired migration system where every version +//! transition is a typed, bidirectional step between concrete Rust structs. +//! +//! # Architecture +//! +//! - [`MigrationStep`] โ€” a single upgrade/downgrade between two concrete types +//! - [`MigrationChain`] โ€” runtime dispatch generated by [`migration_chain!`] macro +//! - [`migration_chain!`] โ€” declarative macro that validates the chain at compile time +//! +//! # How It Works +//! +//! Each schema version is a concrete Rust struct. Migration steps convert between +//! adjacent versions with full type safety โ€” no raw JSON manipulation. +//! +//! The `migration_chain!` macro generates: +//! 1. **Const assertions** โ€” version sequence validated at compile time +//! 2. **Type-checked dispatch** โ€” compiler rejects mismatched type chains +//! 3. **`MigrationChain` impl** โ€” runtime upgrade/downgrade with version detection +//! +//! # Example +//! +//! ```rust +//! extern crate alloc; +//! +//! use aimdb_data_contracts::{SchemaType, MigrationStep, MigrationChain, MigrationError}; +//! use aimdb_data_contracts::migration_chain; +//! use serde::{Deserialize, Serialize}; +//! +//! // v1 schema +//! #[derive(Clone, Debug, Serialize, Deserialize)] +//! struct SensorV1 { +//! schema_version: u32, +//! temp: f32, +//! timestamp: u64, +//! } +//! impl SchemaType for SensorV1 { +//! const NAME: &'static str = "sensor_v1"; +//! const VERSION: u32 = 1; +//! } +//! +//! // v2 schema (current) +//! #[derive(Clone, Debug, Serialize, Deserialize)] +//! struct Sensor { +//! schema_version: u32, +//! celsius: f32, +//! timestamp: u64, +//! } +//! impl SchemaType for Sensor { +//! const NAME: &'static str = "sensor"; +//! const VERSION: u32 = 2; +//! } +//! +//! // Migration step: v1 -> v2 +//! struct SensorV1ToV2; +//! impl MigrationStep for SensorV1ToV2 { +//! type Older = SensorV1; +//! type Newer = Sensor; +//! const FROM_VERSION: u32 = 1; +//! const TO_VERSION: u32 = 2; +//! +//! fn up(v1: SensorV1) -> Result { +//! Ok(Sensor { schema_version: 2, celsius: v1.temp, timestamp: v1.timestamp }) +//! } +//! fn down(v2: Sensor) -> Result { +//! Ok(SensorV1 { schema_version: 1, temp: v2.celsius, timestamp: v2.timestamp }) +//! } +//! } +//! +//! // Wire up the chain +//! migration_chain! { +//! type Current = Sensor; +//! version_field = "schema_version"; +//! steps { +//! SensorV1ToV2: SensorV1 => Sensor, +//! } +//! } +//! +//! // Upgrade from v1 bytes +//! let v1_json = r#"{"schema_version":1,"temp":22.5,"timestamp":100}"#; +//! let sensor = Sensor::migrate_from_bytes(v1_json.as_bytes()).unwrap(); +//! assert_eq!(sensor.celsius, 22.5); +//! +//! // Downgrade to v1 bytes +//! let v1_bytes = sensor.migrate_to_version(1).unwrap(); +//! let v1_roundtrip: serde_json::Value = serde_json::from_slice(&v1_bytes).unwrap(); +//! assert_eq!(v1_roundtrip["temp"], 22.5); +//! ``` use crate::SchemaType; /// Error returned when schema migration fails. #[derive(Debug, Clone, PartialEq, Eq)] pub enum MigrationError { - /// The source version is newer than this schema supports + /// The source version is newer than this binary supports VersionTooNew { source: u32, current: u32 }, - /// A required field is missing and has no default - MissingField(&'static str), - /// Type conversion failed - TypeConversion { - field: &'static str, - expected: &'static str, - }, - /// Custom migration error - Custom(&'static str), + /// The target downgrade version is below the minimum supported + VersionTooOld { target: u32, minimum: u32 }, + /// Deserialization of a versioned payload failed + DeserializationFailed(&'static str), + /// Serialization during downgrade failed + SerializationFailed(&'static str), + /// A domain-specific conversion error in a MigrationStep + ConversionFailed(&'static str), + /// Payload is missing the version field + MissingVersion, } impl core::fmt::Display for MigrationError { @@ -31,113 +117,541 @@ impl core::fmt::Display for MigrationError { source, current ) } - Self::MissingField(field) => write!(f, "missing required field: {}", field), - Self::TypeConversion { field, expected } => { + Self::VersionTooOld { target, minimum } => { write!( f, - "type conversion failed for '{}', expected {}", - field, expected + "target version {} is below minimum supported {}", + target, minimum ) } - Self::Custom(msg) => write!(f, "{}", msg), + Self::DeserializationFailed(msg) => write!(f, "deserialization failed: {}", msg), + Self::SerializationFailed(msg) => write!(f, "serialization failed: {}", msg), + Self::ConversionFailed(msg) => write!(f, "conversion failed: {}", msg), + Self::MissingVersion => write!(f, "payload missing version field"), } } } -/// Runtime schema migration support. -/// -/// Implement this trait to handle breaking schema changes at runtime. -/// The `migrate` function transforms raw JSON data from older versions -/// to the current schema format before deserialization. -/// -/// # When to Use -/// -/// Use `Migratable` when you need to: -/// - Rename fields while maintaining backward compatibility -/// - Change field types (e.g., int to float) -/// - Add required fields with computed defaults -/// - Handle complex structural changes +/// A single, typed, bidirectional migration step between two schema versions. /// -/// For simple additive changes (new optional fields), just use -/// `#[serde(default)]` - no migration needed. +/// Each step converts between two concrete Rust types with full type safety. +/// The compiler enforces that `up()` and `down()` operate on the correct types. /// /// # Example /// -/// ```rust -/// use aimdb_data_contracts::{SchemaType, Migratable, MigrationError}; -/// use serde::{Deserialize, Serialize}; -/// use serde_json::Value; +/// ```rust,ignore +/// struct TempV1ToV2; +/// impl MigrationStep for TempV1ToV2 { +/// type Older = TemperatureV1; +/// type Newer = Temperature; +/// const FROM_VERSION: u32 = 1; +/// const TO_VERSION: u32 = 2; /// -/// #[derive(Serialize, Deserialize)] -/// struct Temperature { -/// celsius: f32, // Was "temp" in v1 -/// timestamp: u64, -/// unit: String, // Added in v3 as required field +/// fn up(v1: TemperatureV1) -> Result { +/// Ok(v1.to_v2()) +/// } +/// fn down(v2: Temperature) -> Result { +/// Ok(TemperatureV1 { temp: v2.celsius, .. }) +/// } /// } +/// ``` +pub trait MigrationStep { + /// The older schema type (input to `up`, output of `down`) + type Older; + /// The newer schema type (output of `up`, input to `down`) + type Newer; + /// The version number of the Older type + const FROM_VERSION: u32; + /// The version number of the Newer type + const TO_VERSION: u32; + + /// Upgrade: convert from older to newer representation. + fn up(older: Self::Older) -> Result; + /// Downgrade: convert from newer to older representation. + fn down(newer: Self::Newer) -> Result; +} + +/// A complete, validated migration chain for a schema type. /// -/// impl SchemaType for Temperature { -/// const NAME: &'static str = "temperature"; -/// const VERSION: u32 = 3; -/// } +/// Generated by the [`migration_chain!`] macro. Provides runtime dispatch +/// for upgrading from any historical version to the current version, +/// and downgrading from the current version to any historical version. /// -/// impl Migratable for Temperature { -/// fn migrate(raw: &mut Value, from_version: u32) -> Result<(), MigrationError> { -/// // v1 -> v2: "temp" was renamed to "celsius" -/// if from_version < 2 { -/// if let Some(v) = raw.get("temp").cloned() { -/// raw["celsius"] = v; -/// raw.as_object_mut().unwrap().remove("temp"); -/// } -/// } +/// All chain validation (sequential versions, type chaining) happens +/// at compile time via const assertions and type checking in the macro expansion. +pub trait MigrationChain: SchemaType + serde::de::DeserializeOwned + serde::Serialize { + /// The minimum version this chain can upgrade from. + const MIN_VERSION: u32; + + /// Deserialize from bytes, auto-detecting version and upgrading to current. + /// + /// Reads the version field from the JSON payload and walks the migration + /// chain upward to produce the current schema version. + fn migrate_from_bytes(data: &[u8]) -> Result; + + /// Downgrade to a target version and serialize to bytes. + /// + /// Walks the migration chain downward from the current version to produce + /// the serialized representation of an older schema version. + fn migrate_to_version( + &self, + target_version: u32, + ) -> Result, MigrationError>; +} + +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• +// MIGRATION CHAIN MACRO +// โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ• + +/// Validate the migration chain at compile time and generate `MigrationChain` impl. /// -/// // v2 -> v3: added required "unit" field -/// if from_version < 3 { -/// if raw.get("unit").is_none() { -/// raw["unit"] = Value::String("celsius".into()); -/// } -/// } +/// # Syntax /// -/// Ok(()) +/// ```rust,ignore +/// migration_chain! { +/// type Current = MyType; +/// version_field = "schema_version"; +/// steps { +/// StepV1ToV2: TypeV1 => TypeV2, +/// StepV2ToV3: TypeV2 => MyType, /// } /// } /// ``` -pub trait Migratable: SchemaType { - /// Migrate raw JSON data from an older version to the current schema. - /// - /// Called during deserialization when `from_version < VERSION`. - /// Mutate `raw` in place to transform it to the current schema format. - /// - /// # Parameters - /// - `raw`: Mutable reference to the JSON value to transform - /// - `from_version`: The version of the incoming data - /// - /// # Returns - /// - `Ok(())` if migration succeeded - /// - `Err(MigrationError)` if migration failed - fn migrate(raw: &mut serde_json::Value, from_version: u32) -> Result<(), MigrationError>; +/// +/// # What it generates +/// +/// 1. Const assertions validating version sequence and chain continuity +/// 2. `impl MigrationChain for Current` with upgrade/downgrade dispatch +/// 3. Type-checked match arms (compiler rejects broken chains) +#[macro_export] +macro_rules! migration_chain { + // โ”€โ”€ Single-step chain โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + ( + type Current = $current:ty; + version_field = $version_field:literal; + steps { + $step1:ty : $older1:ty => $newer1:ty $(,)? + } + ) => { + // Compile-time validation + const _: () = { + // Step must increment by exactly 1 + assert!( + <$step1 as $crate::MigrationStep>::TO_VERSION + == <$step1 as $crate::MigrationStep>::FROM_VERSION + 1, + "migration step must increment version by exactly 1" + ); + // First step starts at version 1 + assert!( + <$step1 as $crate::MigrationStep>::FROM_VERSION == 1, + "first migration step must start at version 1" + ); + // Last step ends at current VERSION + assert!( + <$step1 as $crate::MigrationStep>::TO_VERSION + == <$current as $crate::SchemaType>::VERSION, + "last migration step must end at current VERSION" + ); + }; - /// Deserialize with automatic migration from older versions. - /// - /// This is a convenience method that handles version checking and migration. - fn deserialize_versioned( - raw: &mut serde_json::Value, - from_version: u32, - ) -> Result - where - Self: serde::de::DeserializeOwned, - { - if from_version > Self::VERSION { - return Err(MigrationError::VersionTooNew { - source: from_version, - current: Self::VERSION, - }); + impl $crate::MigrationChain for $current { + const MIN_VERSION: u32 = 1; + + fn migrate_from_bytes(data: &[u8]) -> Result { + let raw: serde_json::Value = serde_json::from_slice(data) + .map_err(|_| $crate::MigrationError::DeserializationFailed("invalid JSON"))?; + + let version = raw + .get($version_field) + .and_then(|v| v.as_u64()) + .ok_or($crate::MigrationError::MissingVersion)? as u32; + + if version > <$current as $crate::SchemaType>::VERSION { + return Err($crate::MigrationError::VersionTooNew { + source: version, + current: <$current as $crate::SchemaType>::VERSION, + }); + } + + match version { + 1 => { + let older: $older1 = serde_json::from_value(raw).map_err(|_| { + $crate::MigrationError::DeserializationFailed(concat!( + "failed to parse as ", + stringify!($older1) + )) + })?; + <$step1 as $crate::MigrationStep>::up(older) + } + v if v == <$current as $crate::SchemaType>::VERSION => { + serde_json::from_value(raw).map_err(|_| { + $crate::MigrationError::DeserializationFailed(concat!( + "failed to parse as ", + stringify!($current) + )) + }) + } + _ => Err($crate::MigrationError::VersionTooNew { + source: version, + current: <$current as $crate::SchemaType>::VERSION, + }), + } + } + + fn migrate_to_version( + &self, + target_version: u32, + ) -> Result, $crate::MigrationError> { + if target_version < Self::MIN_VERSION { + return Err($crate::MigrationError::VersionTooOld { + target: target_version, + minimum: Self::MIN_VERSION, + }); + } + if target_version > <$current as $crate::SchemaType>::VERSION { + return Err($crate::MigrationError::VersionTooNew { + source: target_version, + current: <$current as $crate::SchemaType>::VERSION, + }); + } + + match target_version { + 1 => { + let older = <$step1 as $crate::MigrationStep>::down(self.clone())?; + serde_json::to_vec(&older).map_err(|_| { + $crate::MigrationError::SerializationFailed(concat!( + "failed to serialize as ", + stringify!($older1) + )) + }) + } + v if v == <$current as $crate::SchemaType>::VERSION => serde_json::to_vec(self) + .map_err(|_| { + $crate::MigrationError::SerializationFailed(concat!( + "failed to serialize as ", + stringify!($current) + )) + }), + _ => unreachable!(), + } + } } + }; - if from_version < Self::VERSION { - Self::migrate(raw, from_version)?; + // โ”€โ”€ Two-step chain โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + ( + type Current = $current:ty; + version_field = $version_field:literal; + steps { + $step1:ty : $older1:ty => $newer1:ty, + $step2:ty : $older2:ty => $newer2:ty $(,)? } + ) => { + const _: () = { + // Each step increments by exactly 1 + assert!( + <$step1 as $crate::MigrationStep>::TO_VERSION + == <$step1 as $crate::MigrationStep>::FROM_VERSION + 1, + "migration step must increment version by exactly 1" + ); + assert!( + <$step2 as $crate::MigrationStep>::TO_VERSION + == <$step2 as $crate::MigrationStep>::FROM_VERSION + 1, + "migration step must increment version by exactly 1" + ); + // First step starts at 1 + assert!( + <$step1 as $crate::MigrationStep>::FROM_VERSION == 1, + "first migration step must start at version 1" + ); + // Steps are sequential + assert!( + <$step1 as $crate::MigrationStep>::TO_VERSION + == <$step2 as $crate::MigrationStep>::FROM_VERSION, + "migration steps must be sequential" + ); + // Last step ends at current VERSION + assert!( + <$step2 as $crate::MigrationStep>::TO_VERSION + == <$current as $crate::SchemaType>::VERSION, + "last migration step must end at current VERSION" + ); + }; - serde_json::from_value(raw.clone()) - .map_err(|_| MigrationError::Custom("deserialization failed after migration")) - } + impl $crate::MigrationChain for $current { + const MIN_VERSION: u32 = 1; + + fn migrate_from_bytes(data: &[u8]) -> Result { + let raw: serde_json::Value = serde_json::from_slice(data) + .map_err(|_| $crate::MigrationError::DeserializationFailed("invalid JSON"))?; + + let version = raw + .get($version_field) + .and_then(|v| v.as_u64()) + .ok_or($crate::MigrationError::MissingVersion)? as u32; + + if version > <$current as $crate::SchemaType>::VERSION { + return Err($crate::MigrationError::VersionTooNew { + source: version, + current: <$current as $crate::SchemaType>::VERSION, + }); + } + + match version { + 1 => { + let v1: $older1 = serde_json::from_value(raw).map_err(|_| { + $crate::MigrationError::DeserializationFailed(concat!( + "failed to parse as ", + stringify!($older1) + )) + })?; + let v2 = <$step1 as $crate::MigrationStep>::up(v1)?; + <$step2 as $crate::MigrationStep>::up(v2) + } + 2 => { + let v2: $older2 = serde_json::from_value(raw).map_err(|_| { + $crate::MigrationError::DeserializationFailed(concat!( + "failed to parse as ", + stringify!($older2) + )) + })?; + <$step2 as $crate::MigrationStep>::up(v2) + } + v if v == <$current as $crate::SchemaType>::VERSION => { + serde_json::from_value(raw).map_err(|_| { + $crate::MigrationError::DeserializationFailed(concat!( + "failed to parse as ", + stringify!($current) + )) + }) + } + _ => Err($crate::MigrationError::VersionTooNew { + source: version, + current: <$current as $crate::SchemaType>::VERSION, + }), + } + } + + fn migrate_to_version( + &self, + target_version: u32, + ) -> Result, $crate::MigrationError> { + if target_version < Self::MIN_VERSION { + return Err($crate::MigrationError::VersionTooOld { + target: target_version, + minimum: Self::MIN_VERSION, + }); + } + if target_version > <$current as $crate::SchemaType>::VERSION { + return Err($crate::MigrationError::VersionTooNew { + source: target_version, + current: <$current as $crate::SchemaType>::VERSION, + }); + } + + match target_version { + 1 => { + let v2 = <$step2 as $crate::MigrationStep>::down(self.clone())?; + let v1 = <$step1 as $crate::MigrationStep>::down(v2)?; + serde_json::to_vec(&v1).map_err(|_| { + $crate::MigrationError::SerializationFailed(concat!( + "failed to serialize as ", + stringify!($older1) + )) + }) + } + 2 => { + let v2 = <$step2 as $crate::MigrationStep>::down(self.clone())?; + serde_json::to_vec(&v2).map_err(|_| { + $crate::MigrationError::SerializationFailed(concat!( + "failed to serialize as ", + stringify!($older2) + )) + }) + } + v if v == <$current as $crate::SchemaType>::VERSION => serde_json::to_vec(self) + .map_err(|_| { + $crate::MigrationError::SerializationFailed(concat!( + "failed to serialize as ", + stringify!($current) + )) + }), + _ => unreachable!(), + } + } + } + }; + + // โ”€โ”€ Three-step chain โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + ( + type Current = $current:ty; + version_field = $version_field:literal; + steps { + $step1:ty : $older1:ty => $newer1:ty, + $step2:ty : $older2:ty => $newer2:ty, + $step3:ty : $older3:ty => $newer3:ty $(,)? + } + ) => { + const _: () = { + assert!( + <$step1 as $crate::MigrationStep>::TO_VERSION + == <$step1 as $crate::MigrationStep>::FROM_VERSION + 1, + "migration step must increment version by exactly 1" + ); + assert!( + <$step2 as $crate::MigrationStep>::TO_VERSION + == <$step2 as $crate::MigrationStep>::FROM_VERSION + 1, + "migration step must increment version by exactly 1" + ); + assert!( + <$step3 as $crate::MigrationStep>::TO_VERSION + == <$step3 as $crate::MigrationStep>::FROM_VERSION + 1, + "migration step must increment version by exactly 1" + ); + assert!( + <$step1 as $crate::MigrationStep>::FROM_VERSION == 1, + "first migration step must start at version 1" + ); + assert!( + <$step1 as $crate::MigrationStep>::TO_VERSION + == <$step2 as $crate::MigrationStep>::FROM_VERSION, + "migration steps must be sequential" + ); + assert!( + <$step2 as $crate::MigrationStep>::TO_VERSION + == <$step3 as $crate::MigrationStep>::FROM_VERSION, + "migration steps must be sequential" + ); + assert!( + <$step3 as $crate::MigrationStep>::TO_VERSION + == <$current as $crate::SchemaType>::VERSION, + "last migration step must end at current VERSION" + ); + }; + + impl $crate::MigrationChain for $current { + const MIN_VERSION: u32 = 1; + + fn migrate_from_bytes(data: &[u8]) -> Result { + let raw: serde_json::Value = serde_json::from_slice(data) + .map_err(|_| $crate::MigrationError::DeserializationFailed("invalid JSON"))?; + + let version = raw + .get($version_field) + .and_then(|v| v.as_u64()) + .ok_or($crate::MigrationError::MissingVersion)? as u32; + + if version > <$current as $crate::SchemaType>::VERSION { + return Err($crate::MigrationError::VersionTooNew { + source: version, + current: <$current as $crate::SchemaType>::VERSION, + }); + } + + match version { + 1 => { + let v1: $older1 = serde_json::from_value(raw).map_err(|_| { + $crate::MigrationError::DeserializationFailed(concat!( + "failed to parse as ", + stringify!($older1) + )) + })?; + let v2 = <$step1 as $crate::MigrationStep>::up(v1)?; + let v3 = <$step2 as $crate::MigrationStep>::up(v2)?; + <$step3 as $crate::MigrationStep>::up(v3) + } + 2 => { + let v2: $older2 = serde_json::from_value(raw).map_err(|_| { + $crate::MigrationError::DeserializationFailed(concat!( + "failed to parse as ", + stringify!($older2) + )) + })?; + let v3 = <$step2 as $crate::MigrationStep>::up(v2)?; + <$step3 as $crate::MigrationStep>::up(v3) + } + 3 => { + let v3: $older3 = serde_json::from_value(raw).map_err(|_| { + $crate::MigrationError::DeserializationFailed(concat!( + "failed to parse as ", + stringify!($older3) + )) + })?; + <$step3 as $crate::MigrationStep>::up(v3) + } + v if v == <$current as $crate::SchemaType>::VERSION => { + serde_json::from_value(raw).map_err(|_| { + $crate::MigrationError::DeserializationFailed(concat!( + "failed to parse as ", + stringify!($current) + )) + }) + } + _ => Err($crate::MigrationError::VersionTooNew { + source: version, + current: <$current as $crate::SchemaType>::VERSION, + }), + } + } + + fn migrate_to_version( + &self, + target_version: u32, + ) -> Result, $crate::MigrationError> { + if target_version < Self::MIN_VERSION { + return Err($crate::MigrationError::VersionTooOld { + target: target_version, + minimum: Self::MIN_VERSION, + }); + } + if target_version > <$current as $crate::SchemaType>::VERSION { + return Err($crate::MigrationError::VersionTooNew { + source: target_version, + current: <$current as $crate::SchemaType>::VERSION, + }); + } + + match target_version { + 1 => { + let v3 = <$step3 as $crate::MigrationStep>::down(self.clone())?; + let v2 = <$step2 as $crate::MigrationStep>::down(v3)?; + let v1 = <$step1 as $crate::MigrationStep>::down(v2)?; + serde_json::to_vec(&v1).map_err(|_| { + $crate::MigrationError::SerializationFailed(concat!( + "failed to serialize as ", + stringify!($older1) + )) + }) + } + 2 => { + let v3 = <$step3 as $crate::MigrationStep>::down(self.clone())?; + let v2 = <$step2 as $crate::MigrationStep>::down(v3)?; + serde_json::to_vec(&v2).map_err(|_| { + $crate::MigrationError::SerializationFailed(concat!( + "failed to serialize as ", + stringify!($older2) + )) + }) + } + 3 => { + let v3 = <$step3 as $crate::MigrationStep>::down(self.clone())?; + serde_json::to_vec(&v3).map_err(|_| { + $crate::MigrationError::SerializationFailed(concat!( + "failed to serialize as ", + stringify!($older3) + )) + }) + } + v if v == <$current as $crate::SchemaType>::VERSION => serde_json::to_vec(self) + .map_err(|_| { + $crate::MigrationError::SerializationFailed(concat!( + "failed to serialize as ", + stringify!($current) + )) + }), + _ => unreachable!(), + } + } + } + }; } diff --git a/aimdb-data-contracts/tests/export_ts.rs b/aimdb-data-contracts/tests/export_ts.rs deleted file mode 100644 index 94ca7700..00000000 --- a/aimdb-data-contracts/tests/export_ts.rs +++ /dev/null @@ -1,269 +0,0 @@ -//! TypeScript export test -//! -//! Run with: cargo test --features ts,observable export_typescript -- --ignored -//! -//! This generates TypeScript type definitions to the bindings/ directory, -//! plus a schema-registry.ts with full metadata (fields, units, icons). - -#![cfg(feature = "ts")] - -use aimdb_data_contracts::contracts::{GpsLocation, Humidity, Temperature}; -use aimdb_data_contracts::{Observable, SchemaType}; -use std::fs; -use std::path::Path; -use ts_rs::TS; - -/// Schema metadata for export to TypeScript - derived automatically from traits -struct SchemaMeta { - name: &'static str, - icon: &'static str, - unit: &'static str, - rust_schema: String, - attributes: SchemaAttributes, -} - -/// Trait implementations for a schema -#[derive(Clone)] -struct SchemaAttributes { - observable: bool, - simulatable: bool, - linkable: bool, - settable: bool, - migratable: bool, - no_std: bool, -} - -impl SchemaMeta { - /// Create metadata from any type implementing Observable + TS - fn from_type( - source_file: &str, - struct_name: &str, - attributes: SchemaAttributes, - ) -> Self { - let contracts_dir = Path::new(env!("CARGO_MANIFEST_DIR")).join("src/contracts"); - let rust_schema = extract_struct_definition(&contracts_dir.join(source_file), struct_name) - .unwrap_or_else(|| { - format!("// Could not extract {} from {}", struct_name, source_file) - }); - - SchemaMeta { - name: T::NAME, - icon: T::ICON, - unit: T::UNIT, - rust_schema, - attributes, - } - } - - /// Derive label from name (temperature -> Temperature, gps_location -> GPS Location) - fn label(&self) -> String { - self.name - .split('_') - .map(|word| { - let mut chars = word.chars(); - match chars.next() { - None => String::new(), - Some(c) => { - // Handle known acronyms - if word == "gps" { - "GPS".to_string() - } else { - c.to_uppercase().chain(chars).collect() - } - } - } - }) - .collect::>() - .join(" ") - } -} - -/// Extract a struct definition from a Rust source file -fn extract_struct_definition(file_path: &Path, struct_name: &str) -> Option { - let content = fs::read_to_string(file_path).ok()?; - - // Find the struct definition with doc comments - let struct_marker = format!("pub struct {}", struct_name); - let struct_start = content.find(&struct_marker)?; - - // Walk backwards to find doc comments (/// lines) - let before_struct = &content[..struct_start]; - let doc_start = before_struct - .rfind("\n\n") - .or_else(|| before_struct.rfind("*/\n")) - .map(|i| i + 1) - .unwrap_or(struct_start); - - // Find the closing brace of the struct - let after_struct = &content[struct_start..]; - let struct_end = after_struct - .find("\n}\n") - .or_else(|| after_struct.find("\n}"))?; - - let full_definition = &content[doc_start..struct_start + struct_end + 2]; - - // Clean up: remove #[derive(...)] and #[cfg_attr(...)] lines, keep doc comments and struct - let cleaned: String = full_definition - .lines() - .filter(|line| { - let trimmed = line.trim(); - !trimmed.starts_with("#[derive") - && !trimmed.starts_with("#[cfg_attr") - && !trimmed.starts_with("#[serde") - }) - .collect::>() - .join("\n"); - - Some(cleaned.trim().to_string()) -} - -/// Export all contract types to TypeScript. -/// -/// By default, ts-rs exports to `./bindings/` relative to the crate root. -/// Run this test with --ignored flag to generate the files: -/// -/// ```sh -/// cargo test -p aimdb-data-contracts --features ts,observable export_typescript -- --ignored -/// ``` -#[test] -#[ignore = "Run manually to generate TypeScript bindings"] -fn export_typescript() { - // Export each contract type via ts-rs - Temperature::export_all().expect("Failed to export Temperature"); - Humidity::export_all().expect("Failed to export Humidity"); - GpsLocation::export_all().expect("Failed to export GpsLocation"); - - println!("โœ… TypeScript bindings exported to bindings/"); - - // Generate schema registry with metadata - all derived from traits + source - // Attributes reflect actual trait implementations in Rust source - let base_traits = SchemaAttributes { - observable: true, - simulatable: true, - linkable: true, - settable: true, - migratable: false, - no_std: true, - }; - - // Temperature has Migratable trait (v1 โ†’ v2 migration) - let temp_traits = SchemaAttributes { - migratable: true, - ..base_traits.clone() - }; - - let schemas = vec![ - SchemaMeta::from_type::("temperature.rs", "Temperature", temp_traits), - SchemaMeta::from_type::("humidity.rs", "Humidity", base_traits.clone()), - SchemaMeta::from_type::("location.rs", "GpsLocation", base_traits), - ]; - - generate_schema_registry(&schemas); -} - -fn generate_schema_registry(schemas: &[SchemaMeta]) { - let mut output = String::from( - r#"// AUTO-GENERATED from aimdb-data-contracts - DO NOT EDIT -// Run: cargo test -p aimdb-data-contracts --features ts,observable export_typescript -- --ignored - -export interface SchemaAttributes { - observable: boolean; - simulatable: boolean; - linkable: boolean; - settable: boolean; - migratable: boolean; - noStd: boolean; -} - -export interface SchemaMeta { - name: string; - label: string; - icon: string; - unit: string; - rustSchema: string; - attributes: SchemaAttributes; -} - -/** - * Schema registry with metadata extracted from Rust Observable traits. - * Keys match WebSocket message `type` field. - */ -export const SCHEMA_REGISTRY: Record = { -"#, - ); - - for schema in schemas { - output.push_str(&format!( - r#" "{}": {{ - name: "{}", - label: "{}", - icon: "{}", - unit: "{}", - rustSchema: `{}`, - attributes: {{ - observable: {}, - simulatable: {}, - linkable: {}, - settable: {}, - migratable: {}, - noStd: {}, - }}, - }}, -"#, - schema.name, - schema.name, - schema.label(), - schema.icon, - schema.unit, - schema.rust_schema, - schema.attributes.observable, - schema.attributes.simulatable, - schema.attributes.linkable, - schema.attributes.settable, - schema.attributes.migratable, - schema.attributes.no_std, - )); - } - - output.push_str( - r#"}; - -/** - * Get schema metadata by WebSocket message type. - */ -export function getSchema(messageType: string): SchemaMeta | undefined { - return SCHEMA_REGISTRY[messageType]; -} - -/** - * List all available schema types. - */ -export const SCHEMA_TYPES = Object.keys(SCHEMA_REGISTRY); -"#, - ); - - let bindings_dir = Path::new(env!("CARGO_MANIFEST_DIR")).join("bindings"); - fs::create_dir_all(&bindings_dir).expect("Failed to create bindings dir"); - - let output_path = bindings_dir.join("schema-registry.ts"); - fs::write(&output_path, output).expect("Failed to write schema registry"); - - println!("โœ… Schema registry exported to {:?}", output_path); -} - -/// Verify that all types can be exported (doesn't write files) -#[test] -fn verify_ts_definitions() { - // Just verify the definitions are valid - let temp_ts = Temperature::decl(); - let humidity_ts = Humidity::decl(); - let location_ts = GpsLocation::decl(); - - assert!(temp_ts.contains("Temperature")); - assert!(humidity_ts.contains("Humidity")); - assert!(location_ts.contains("GpsLocation")); - - println!("Temperature:\n{}\n", temp_ts); - println!("Humidity:\n{}\n", humidity_ts); - println!("GpsLocation:\n{}\n", location_ts); -} diff --git a/scripts/gen-ts-bindings.sh b/scripts/gen-ts-bindings.sh deleted file mode 100755 index 1bc6e3b6..00000000 --- a/scripts/gen-ts-bindings.sh +++ /dev/null @@ -1,61 +0,0 @@ -#!/bin/bash -# Generate TypeScript bindings from Rust contracts and copy to aimdb-ui -# -# Usage: ./scripts/gen-ts-bindings.sh -# -# This script: -# 1. Runs the ts-rs export test to generate TypeScript definitions -# 2. Generates schema-registry.ts with Observable metadata (icons, units) -# 3. Copies the generated files to aimdb-ui/src/types/generated/ - -set -e - -SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)" -UI_TYPES_DIR="${AIMDB_UI_PATH:-/aimdb_ws/aimdb-pro/_external/aimdb-ui/src/types/generated}" - -echo "๐Ÿ”ง Generating TypeScript bindings from Rust contracts..." - -cd "$REPO_ROOT" - -# Run the export test (requires both ts and observable features) -cargo test -p aimdb-data-contracts --features ts,observable export_typescript -- --ignored --nocapture - -# Check if bindings were generated -BINDINGS_DIR="$REPO_ROOT/aimdb-data-contracts/bindings" -if [ ! -d "$BINDINGS_DIR" ]; then - echo "โŒ Error: bindings directory not found at $BINDINGS_DIR" - exit 1 -fi - -echo "๐Ÿ“ Generated bindings:" -ls -la "$BINDINGS_DIR" - -# Create target directory if specified and exists -if [ -n "$UI_TYPES_DIR" ]; then - mkdir -p "$UI_TYPES_DIR" - - # Copy bindings (types + schema registry) - cp "$BINDINGS_DIR"/*.ts "$UI_TYPES_DIR/" - - # Create index.ts barrel export - cat > "$UI_TYPES_DIR/index.ts" << 'EOF' -// Auto-generated from aimdb-data-contracts via ts-rs -// Do not edit manually - run scripts/gen-ts-bindings.sh to regenerate - -export type { Temperature } from './Temperature'; -export type { Humidity } from './Humidity'; -export type { GpsLocation } from './GpsLocation'; -export { SCHEMA_REGISTRY, SCHEMA_TYPES, getSchema } from './schema-registry'; -export type { SchemaMeta } from './schema-registry'; -EOF - - echo "โœ… Copied bindings to $UI_TYPES_DIR" - ls -la "$UI_TYPES_DIR" -else - echo "โ„น๏ธ Set AIMDB_UI_PATH to copy bindings to UI project" - echo " Example: AIMDB_UI_PATH=/path/to/aimdb-ui/src/types/generated ./scripts/gen-ts-bindings.sh" -fi - -echo "" -echo "โœ… TypeScript bindings generation complete!" From 79da3b28dbb87333cc510b24dbf0bf9b902b4b38 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Sat, 7 Mar 2026 20:56:31 +0000 Subject: [PATCH 24/46] feat: extract entity identifier from topic name in TopicInfo struct for improved clarity --- Cargo.lock | 2 +- _external/embassy | 2 +- aimdb-websocket-connector/src/builder.rs | 5 +++++ aimdb-ws-protocol/src/lib.rs | 5 +++++ 4 files changed, 12 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 09cd9308..70b38f6b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3084,7 +3084,7 @@ dependencies = [ [[package]] name = "stm32-metapac" version = "19.0.0" -source = "git+https://github.com/embassy-rs/stm32-data-generated?tag=stm32-data-3879f504f35b23446e389c87deeb38542e0c9d4b#91326e22c7580c0da21cb718e06a64eb27bd201d" +source = "git+https://github.com/embassy-rs/stm32-data-generated?tag=stm32-data-0f4c948b5c81ebe421fe902857ccdb39029651f6#da4c008381dbdbee62135dee88aa2e7d7c4e4992" dependencies = [ "cortex-m", "cortex-m-rt", diff --git a/_external/embassy b/_external/embassy index 507ac8bc..777b2dc3 160000 --- a/_external/embassy +++ b/_external/embassy @@ -1 +1 @@ -Subproject commit 507ac8bc1b573f4172486440c7648b489dcfe02e +Subproject commit 777b2dc3287039a17f151124f9948d5a7e4cd062 diff --git a/aimdb-websocket-connector/src/builder.rs b/aimdb-websocket-connector/src/builder.rs index d8bb8ac1..dd22d56d 100644 --- a/aimdb-websocket-connector/src/builder.rs +++ b/aimdb-websocket-connector/src/builder.rs @@ -308,9 +308,14 @@ where .into_iter() .map(|(topic, type_id)| { let schema_type = type_id_map.0.get(&type_id).map(|s| s.to_string()); + // Extract entity from topic name: "temp.vienna" โ†’ "vienna". + // The server owns the naming convention โ€” clients receive + // the entity as a first-class field and never parse topics. + let entity = topic.rsplit('.').next().map(|s| s.to_string()); TopicInfo { name: topic, schema_type, + entity, } }) .collect(); diff --git a/aimdb-ws-protocol/src/lib.rs b/aimdb-ws-protocol/src/lib.rs index 52cb0cfa..02bfac40 100644 --- a/aimdb-ws-protocol/src/lib.rs +++ b/aimdb-ws-protocol/src/lib.rs @@ -113,6 +113,11 @@ pub struct TopicInfo { /// Schema type name (e.g. `"temperature"`), if known by the server. #[serde(skip_serializing_if = "Option::is_none")] pub schema_type: Option, + /// Entity / node identifier (e.g. `"vienna"`), extracted server-side from the + /// topic name. The server is the authority on naming conventions โ€” clients + /// should use this field directly rather than parsing the topic name. + #[serde(skip_serializing_if = "Option::is_none")] + pub entity: Option, } /// Machine-readable error codes sent in `ServerMessage::Error`. From b392e601779dfebf1c03be921f99b145b489f58c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Mon, 9 Mar 2026 21:26:52 +0000 Subject: [PATCH 25/46] feat: enhance WebSocket functionality with discover and known schemas methods, and improve request handling --- aimdb-wasm-adapter/Cargo.toml | 3 + aimdb-wasm-adapter/src/bindings.rs | 182 ++++++++++++- aimdb-wasm-adapter/src/schema_registry.rs | 5 + aimdb-wasm-adapter/src/ws_bridge.rs | 297 ++++++++++++++-------- 4 files changed, 372 insertions(+), 115 deletions(-) diff --git a/aimdb-wasm-adapter/Cargo.toml b/aimdb-wasm-adapter/Cargo.toml index 8ec19f2f..ab3fe0fe 100644 --- a/aimdb-wasm-adapter/Cargo.toml +++ b/aimdb-wasm-adapter/Cargo.toml @@ -48,6 +48,9 @@ wasm-bindgen-futures = { version = "0.4", optional = true } js-sys = { version = "0.3", optional = true } web-sys = { version = "0.3", optional = true, features = [ "console", + "CustomEvent", + "CustomEventInit", + "EventTarget", "MessageEvent", "Performance", "WebSocket", diff --git a/aimdb-wasm-adapter/src/bindings.rs b/aimdb-wasm-adapter/src/bindings.rs index cf5f94fa..8570fe45 100644 --- a/aimdb-wasm-adapter/src/bindings.rs +++ b/aimdb-wasm-adapter/src/bindings.rs @@ -16,10 +16,13 @@ extern crate alloc; use alloc::boxed::Box; +use alloc::collections::BTreeMap; use alloc::format; +use alloc::rc::Rc; use alloc::string::{String, ToString}; use alloc::sync::Arc; use alloc::vec::Vec; +use core::cell::RefCell; use core::fmt::Debug; use serde::de::DeserializeOwned; @@ -30,6 +33,8 @@ use aimdb_core::buffer::BufferCfg; use aimdb_core::builder::{AimDb, AimDbBuilder}; use aimdb_core::record_id::StringKey; +use aimdb_ws_protocol::{ClientMessage, ServerMessage}; + use crate::schema_registry::{SchemaOps, SchemaRegistry}; use crate::ws_bridge::WsBridge; use crate::WasmAdapter; @@ -108,7 +113,7 @@ pub struct WasmDb { /// Live database handle. `None` before `build()`. db: Option>, /// Maps record key โ†’ schema type name (always populated). - schema_map: Vec<(String, String)>, + schema_map: BTreeMap, /// Type-erased dispatch registry built from the visitor pattern. registry: SchemaRegistry, } @@ -127,7 +132,7 @@ impl WasmDb { WasmDb { configs: Some(Vec::new()), db: None, - schema_map: Vec::new(), + schema_map: BTreeMap::new(), registry: SchemaRegistry::build(), } } @@ -161,7 +166,7 @@ impl WasmDb { let buffer_cfg = parse_buffer_cfg(&opts.buffer)?; self.schema_map - .push((record_key.to_string(), opts.schema_type.clone())); + .insert(record_key.to_string(), opts.schema_type.clone()); configs.push(RecordConfig { key: record_key.to_string(), @@ -234,6 +239,35 @@ impl WasmDb { self.db.is_some() } + /// Discover topics served at `url` without building a full database. + /// + /// Opens a one-shot WebSocket, sends `ListTopics`, and resolves with + /// `TopicInfo[]` once the server responds. Rejects after 30 s if no + /// response arrives, or immediately on connection error. + /// + /// # Example (TypeScript) + /// ```ts + /// const wasm = await import("aimdb-wasm-adapter"); + /// await wasm.default(); + /// const topics = await wasm.WasmDb.discover("wss://api.example.com/ws"); + /// topics.forEach(t => db.configureRecord(t.entity, { schemaType: t.schemaType, buffer: "SingleLatest" })); + /// ``` + pub async fn discover(url: &str) -> Result { + wasm_bindgen_futures::JsFuture::from(discover_impl(url.to_string())) + .await + .map_err(|e| JsError::new(&format!("discover: {e:?}"))) + } + + /// Returns the list of schema type names known to this WASM adapter. + /// + /// Use this to filter discovered topics before calling `configureRecord` โ€” + /// topics whose `schemaType` is not in this list cannot be handled by the + /// WASM runtime and should be skipped. + #[wasm_bindgen(js_name = "knownSchemas")] + pub fn known_schemas(&self) -> Vec { + self.registry.known_names().iter().map(|s| s.to_string()).collect() + } + /// Connect a WebSocket bridge to this database for server synchronization. /// /// The database remains usable for local `get()` / `set()` / `subscribe()` @@ -263,6 +297,143 @@ impl WasmDb { } } +// โ”€โ”€โ”€ discover_impl โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Build a one-shot WebSocket promise that resolves with `TopicInfo[]`. +/// +/// Each callback pair (resolve, reject) is stored in an `Rc>` +/// so that whichever event fires first wins and subsequent events are no-ops. +fn discover_impl(url: String) -> js_sys::Promise { + js_sys::Promise::new(&mut move |resolve, reject| { + let ws = match web_sys::WebSocket::new(&url) { + Ok(ws) => ws, + Err(e) => { + let _ = reject.call1( + &JsValue::NULL, + &JsValue::from_str(&format!("WebSocket open failed: {e:?}")), + ); + return; + } + }; + let ws = Rc::new(ws); + let resolve_rc: Rc>> = + Rc::new(RefCell::new(Some(resolve))); + let reject_rc: Rc>> = + Rc::new(RefCell::new(Some(reject))); + + // on_open: send ListTopics + { + let ws_clone = ws.clone(); + let on_open = Closure::wrap(Box::new(move || { + let msg = ClientMessage::ListTopics { + id: "discover".to_string(), + }; + if let Ok(json) = serde_json::to_string(&msg) { + let _ = ws_clone.send_with_str(&json); + } + }) as Box); + ws.set_onopen(Some(on_open.as_ref().unchecked_ref())); + on_open.forget(); + } + + // on_message: parse TopicList, resolve, close socket + { + let ws_clone = ws.clone(); + let resolve_clone = resolve_rc.clone(); + let reject_clone = reject_rc.clone(); + let on_message = + Closure::wrap(Box::new(move |event: web_sys::MessageEvent| { + let _ = ws_clone.close(); + let Some(text) = event.data().as_string() else { + if let Some(rej) = reject_clone.borrow_mut().take() { + let _ = rej.call1( + &JsValue::NULL, + &JsValue::from_str("Non-text frame from server"), + ); + } + return; + }; + match serde_json::from_str::(&text) { + Ok(ServerMessage::TopicList { topics, .. }) => { + let serializer = + serde_wasm_bindgen::Serializer::json_compatible(); + let arr = js_sys::Array::new(); + for topic in &topics { + if let Ok(js_val) = topic.serialize(&serializer) { + arr.push(&js_val); + } + } + if let Some(res) = resolve_clone.borrow_mut().take() { + let _ = res.call1(&JsValue::NULL, &arr); + } + } + _ => { + if let Some(rej) = reject_clone.borrow_mut().take() { + let _ = rej.call1( + &JsValue::NULL, + &JsValue::from_str("Unexpected server message"), + ); + } + } + } + }) as Box); + ws.set_onmessage(Some(on_message.as_ref().unchecked_ref())); + on_message.forget(); + } + + // on_error: reject + { + let reject_clone = reject_rc.clone(); + let on_error = Closure::wrap(Box::new(move || { + if let Some(rej) = reject_clone.borrow_mut().take() { + let _ = rej.call1( + &JsValue::NULL, + &JsValue::from_str("WebSocket error during discover"), + ); + } + }) as Box); + ws.set_onerror(Some(on_error.as_ref().unchecked_ref())); + on_error.forget(); + } + + // on_close: reject if server closed before we got TopicList + // (no-op if on_message already resolved) + { + let reject_clone = reject_rc.clone(); + let on_close = Closure::wrap(Box::new(move || { + if let Some(rej) = reject_clone.borrow_mut().take() { + let _ = rej.call1( + &JsValue::NULL, + &JsValue::from_str("Connection closed before TopicList received"), + ); + } + }) as Box); + ws.set_onclose(Some(on_close.as_ref().unchecked_ref())); + on_close.forget(); + } + + // Timeout: reject after 30 s + { + let reject_clone = reject_rc.clone(); + let timeout_cb = Closure::once(move || { + if let Some(rej) = reject_clone.borrow_mut().take() { + let _ = rej.call1( + &JsValue::NULL, + &JsValue::from_str("discover timed out"), + ); + } + }); + if let Some(window) = web_sys::window() { + let _ = window.set_timeout_with_callback_and_timeout_and_arguments_0( + timeout_cb.as_ref().unchecked_ref(), + 30_000, + ); + } + timeout_cb.forget(); + } + }) +} + // โ”€โ”€โ”€ Private helpers โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ impl WasmDb { @@ -275,9 +446,8 @@ impl WasmDb { let schema = self .schema_map - .iter() - .find(|(k, _)| k == record_key) - .map(|(_, v)| v.as_str()) + .get(record_key) + .map(|v| v.as_str()) .ok_or_else(|| JsError::new(&format!("Unknown record key: {record_key}")))?; let ops = self diff --git a/aimdb-wasm-adapter/src/schema_registry.rs b/aimdb-wasm-adapter/src/schema_registry.rs index 2df03e88..7f18140e 100644 --- a/aimdb-wasm-adapter/src/schema_registry.rs +++ b/aimdb-wasm-adapter/src/schema_registry.rs @@ -70,6 +70,11 @@ impl SchemaRegistry { pub fn is_known(&self, schema_name: &str) -> bool { self.entries.contains_key(schema_name) } + + /// Returns all registered schema names. + pub fn known_names(&self) -> alloc::vec::Vec<&'static str> { + self.entries.keys().copied().collect() + } } // โ”€โ”€โ”€ Visitor that builds the registry โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ diff --git a/aimdb-wasm-adapter/src/ws_bridge.rs b/aimdb-wasm-adapter/src/ws_bridge.rs index 180f62ca..9b825ece 100644 --- a/aimdb-wasm-adapter/src/ws_bridge.rs +++ b/aimdb-wasm-adapter/src/ws_bridge.rs @@ -79,6 +79,10 @@ pub struct BridgeOptions { /// Keepalive interval in milliseconds (default: 30 000). #[serde(default = "default_keepalive_ms")] pub keepalive_ms: u32, + /// Timeout for query / listTopics requests in milliseconds (default: 30 000). + /// Set to 0 to disable timeouts. + #[serde(default = "default_query_timeout_ms")] + pub query_timeout_ms: u32, } fn default_true() -> bool { @@ -90,6 +94,9 @@ fn default_queue_size() -> usize { fn default_keepalive_ms() -> u32 { 30_000 } +fn default_query_timeout_ms() -> u32 { + 30_000 +} impl Default for BridgeOptions { fn default() -> Self { @@ -99,6 +106,7 @@ impl Default for BridgeOptions { late_join: true, max_offline_queue: 256, keepalive_ms: 30_000, + query_timeout_ms: 30_000, } } } @@ -122,13 +130,28 @@ struct BridgeState { // โ”€โ”€โ”€ Shared reconnect context โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ +/// Tag for routing server responses to the correct pending promise. +#[derive(Debug, Clone, Copy)] +enum RequestKind { + Query, + ListTopics, +} + +/// A pending request waiting for a server response. +struct PendingRequest { + /// Reserved for future use when response routing depends on request type. + _kind: RequestKind, + resolve: js_sys::Function, + reject: js_sys::Function, +} + /// Shared state needed by both the initial connect and reconnect paths. /// /// Wrapped in `Rc` so closures can cheaply reference it without cloning /// every field individually (reduces parameter explosion). struct SharedCtx { db: AimDb, - schema_map: Vec<(String, String)>, + schema_map: BTreeMap, registry: SchemaRegistry, state: Rc>, on_status: Rc>>, @@ -136,14 +159,10 @@ struct SharedCtx { backoff: Vec, url: String, ws_cell: Rc>, - /// Pending query requests: correlation ID โ†’ (resolve, reject) JS functions. - pending_queries: Rc>>, - /// Simple counter for generating unique query IDs. - query_id_counter: Rc>, - /// Pending list_topics requests: correlation ID โ†’ (resolve, reject) JS functions. - pending_list_topics: Rc>>, - /// Counter for generating unique list_topics IDs. - list_topics_id_counter: Rc>, + /// Pending request-response pairs: correlation ID โ†’ pending request. + pending_requests: Rc>>, + /// Counter for generating unique request IDs. + request_id_counter: Rc>, } // โ”€โ”€โ”€ WsBridge โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ @@ -182,6 +201,11 @@ impl WsBridge { /// ``` #[wasm_bindgen(js_name = "onStatusChange")] pub fn on_status_change(&self, callback: js_sys::Function) { + // Immediately replay current status so late registrations don't + // miss the "connected" transition that may have already fired. + let current = self.ctx.state.borrow().status; + let _ = callback.call1(&JsValue::NULL, &JsValue::from_str(current.as_str())); + // Store for subsequent status changes *self.ctx.on_status.borrow_mut() = Some(callback); } @@ -239,6 +263,7 @@ impl WsBridge { drop(state); let _ = self.ctx.ws_cell.borrow().close(); + reject_all_pending(&self.ctx.pending_requests, "Disconnected"); emit_status(&self.ctx.on_status, ConnectionStatus::Disconnected); } @@ -249,16 +274,12 @@ impl WsBridge { /// Query historical / persisted records over the WebSocket connection. /// - /// Returns a `Promise` that resolves with `QueryRecord[]`. + /// Returns a `Promise` that resolves with `{ records, total }`. /// /// ```ts - /// const records = await bridge.query('*', { from: 1700000000000, to: 1700003600000, limit: 500 }); + /// const result = await bridge.query('*', { from: 1700000000000, to: 1700003600000, limit: 500 }); /// ``` pub fn query(&self, pattern: &str, options: JsValue) -> js_sys::Promise { - let ctx = self.ctx.clone(); - let pattern = pattern.to_string(); - - // Parse options #[derive(Deserialize, Default)] struct QueryOpts { from: Option, @@ -271,93 +292,29 @@ impl WsBridge { serde_wasm_bindgen::from_value(options).unwrap_or_default() }; - // Generate unique ID - let id = { - let mut counter = ctx.query_id_counter.borrow_mut(); - *counter += 1; - format!("q{}", *counter) - }; - - // Create promise - let id_for_promise = id.clone(); - js_sys::Promise::new(&mut move |resolve, reject| { - // Register pending query - ctx.pending_queries - .borrow_mut() - .insert(id_for_promise.clone(), (resolve, reject.clone())); - - // Build and send query message - let msg = aimdb_ws_protocol::ClientMessage::Query { - id: id_for_promise.clone(), - pattern: pattern.clone(), + let pattern = pattern.to_string(); + send_request(&self.ctx, RequestKind::Query, move |id| { + ClientMessage::Query { + id, + pattern, from: opts.from, to: opts.to, limit: opts.limit, - }; - - let state = ctx.state.borrow(); - if state.status != ConnectionStatus::Connected { - drop(state); - ctx.pending_queries.borrow_mut().remove(&id_for_promise); - let _ = reject.call1(&JsValue::NULL, &JsValue::from_str("Not connected")); - return; - } - drop(state); - - if let Err(e) = send_json(&ctx.ws_cell.borrow(), &msg) { - ctx.pending_queries.borrow_mut().remove(&id_for_promise); - let _ = reject.call1( - &JsValue::NULL, - &JsValue::from_str(&format!("Send failed: {e:?}")), - ); } }) } + /// List all topics served by the WebSocket endpoint. /// /// Returns a `Promise` that resolves with `TopicInfo[]`. - /// Each entry has `name: string` and optionally `schema_type: string`. /// /// ```ts /// const topics = await bridge.listTopics(); - /// // [{ name: "temp.vienna", schema_type: "temperature" }, โ€ฆ] /// ``` #[wasm_bindgen(js_name = "listTopics")] pub fn list_topics(&self) -> js_sys::Promise { - let ctx = self.ctx.clone(); - - let id = { - let mut counter = ctx.list_topics_id_counter.borrow_mut(); - *counter += 1; - format!("lt{}", *counter) - }; - - let id_for_promise = id.clone(); - js_sys::Promise::new(&mut move |resolve, reject| { - ctx.pending_list_topics - .borrow_mut() - .insert(id_for_promise.clone(), (resolve, reject.clone())); - - let msg = ClientMessage::ListTopics { - id: id_for_promise.clone(), - }; - - let state = ctx.state.borrow(); - if state.status != ConnectionStatus::Connected { - drop(state); - ctx.pending_list_topics.borrow_mut().remove(&id_for_promise); - let _ = reject.call1(&JsValue::NULL, &JsValue::from_str("Not connected")); - return; - } - drop(state); - - if let Err(e) = send_json(&ctx.ws_cell.borrow(), &msg) { - ctx.pending_list_topics.borrow_mut().remove(&id_for_promise); - let _ = reject.call1( - &JsValue::NULL, - &JsValue::from_str(&format!("Send failed: {e:?}")), - ); - } + send_request(&self.ctx, RequestKind::ListTopics, |id| { + ClientMessage::ListTopics { id } }) } } @@ -374,7 +331,7 @@ impl WsBridge { /// Create a new bridge (called from `WasmDb::connect_bridge`). pub(crate) fn new_internal( db: AimDb, - schema_map: Vec<(String, String)>, + schema_map: BTreeMap, registry: SchemaRegistry, url: &str, options: JsValue, @@ -414,10 +371,8 @@ impl WsBridge { backoff, url: url.to_string(), ws_cell, - pending_queries: Rc::new(RefCell::new(BTreeMap::new())), - query_id_counter: Rc::new(RefCell::new(0)), - pending_list_topics: Rc::new(RefCell::new(BTreeMap::new())), - list_topics_id_counter: Rc::new(RefCell::new(0)), + pending_requests: Rc::new(RefCell::new(BTreeMap::new())), + request_id_counter: Rc::new(RefCell::new(0)), }); install_ws_callbacks(&ctx); @@ -426,6 +381,94 @@ impl WsBridge { } } +// โ”€โ”€โ”€ Unified request helper โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Send a request-response message and return a JS Promise. +/// +/// Generates a unique ID, registers a pending request, sends the message, +/// and optionally schedules a timeout that rejects the promise if the +/// server doesn't respond in time. +fn send_request( + ctx: &Rc, + kind: RequestKind, + build_msg: impl FnOnce(String) -> ClientMessage, +) -> js_sys::Promise { + let ctx = ctx.clone(); + + let id = { + let mut counter = ctx.request_id_counter.borrow_mut(); + *counter += 1; + format!("r{}", *counter) + }; + + let id_for_promise = id.clone(); + let mut build_msg = Some(build_msg); + js_sys::Promise::new(&mut move |resolve, reject| { + let build_msg = build_msg.take().expect("Promise executor called twice"); + ctx.pending_requests.borrow_mut().insert( + id_for_promise.clone(), + PendingRequest { + _kind: kind, + resolve, + reject: reject.clone(), + }, + ); + + let msg = build_msg(id_for_promise.clone()); + + let state = ctx.state.borrow(); + if state.status != ConnectionStatus::Connected { + drop(state); + ctx.pending_requests.borrow_mut().remove(&id_for_promise); + let _ = reject.call1(&JsValue::NULL, &JsValue::from_str("Not connected")); + return; + } + drop(state); + + if let Err(e) = send_json(&ctx.ws_cell.borrow(), &msg) { + ctx.pending_requests.borrow_mut().remove(&id_for_promise); + let _ = reject.call1( + &JsValue::NULL, + &JsValue::from_str(&format!("Send failed: {e:?}")), + ); + return; + } + + // Schedule timeout if configured + if ctx.config.query_timeout_ms > 0 { + let pending = ctx.pending_requests.clone(); + let timeout_id = id_for_promise.clone(); + let timeout_closure = Closure::once(move || { + if let Some(req) = pending.borrow_mut().remove(&timeout_id) { + let _ = req + .reject + .call1(&JsValue::NULL, &JsValue::from_str("Request timed out")); + } + }); + if let Some(window) = web_sys::window() { + let _ = window.set_timeout_with_callback_and_timeout_and_arguments_0( + timeout_closure.as_ref().unchecked_ref(), + ctx.config.query_timeout_ms as i32, + ); + } + timeout_closure.forget(); + } + }) +} + +/// Reject all pending requests with the given reason. +/// +/// Called on disconnect and on_close to prevent hung promises. +fn reject_all_pending(pending: &Rc>>, reason: &str) { + let mut map = pending.borrow_mut(); + let drained: Vec = core::mem::take(&mut *map).into_values().collect(); + for req in drained { + let _ = req + .reject + .call1(&JsValue::NULL, &JsValue::from_str(reason)); + } +} + // โ”€โ”€โ”€ Callback installation (shared by connect + reconnect) โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ /// Install WebSocket event callbacks on the current socket in `ctx.ws_cell`. @@ -486,6 +529,10 @@ fn install_ws_callbacks(ctx: &Rc) { } } + let has_cb = ctx.on_status.borrow().is_some(); + web_sys::console::log_1( + &format!("[WsBridge] on_open: status=connected, callback={has_cb}").into(), + ); emit_status(&ctx.on_status, ConnectionStatus::Connected); }) as Box) }; @@ -524,6 +571,10 @@ fn install_ws_callbacks(ctx: &Rc) { s._ping_closure = None; } + // Reject pending requests โ€” correlation IDs are per-socket and + // won't match on a reconnected connection. + reject_all_pending(&ctx.pending_requests, "Connection lost"); + if ctx.config.auto_reconnect { let delay = { let mut s = ctx.state.borrow_mut(); @@ -621,11 +672,7 @@ fn handle_server_message(ctx: &SharedCtx, msg: ServerMessage) { match msg { ServerMessage::Data { topic, payload, .. } | ServerMessage::Snapshot { topic, payload } => { if let Some(payload) = payload { - let schema = ctx - .schema_map - .iter() - .find(|(k, _)| k == &topic) - .map(|(_, v)| v.as_str()); + let schema = ctx.schema_map.get(&topic).map(|v| v.as_str()); match schema { Some(schema) => { @@ -667,11 +714,7 @@ fn handle_server_message(ctx: &SharedCtx, msg: ServerMessage) { // Keepalive ACK โ€” reset timer if needed. } ServerMessage::QueryResult { id, records, total } => { - // Resolve the pending promise for this query - if let Some((resolve, _reject)) = ctx.pending_queries.borrow_mut().remove(&id) { - // Convert records to JS array. - // Use json_compatible() so serde_json::Value::Object - // becomes a plain JS object (not a JS Map). + if let Some(req) = ctx.pending_requests.borrow_mut().remove(&id) { let serializer = serde_wasm_bindgen::Serializer::json_compatible(); let arr = js_sys::Array::new(); for rec in &records { @@ -679,7 +722,6 @@ fn handle_server_message(ctx: &SharedCtx, msg: ServerMessage) { arr.push(&js_val); } } - // Attach metadata as properties on the array let result_obj = js_sys::Object::new(); let _ = js_sys::Reflect::set(&result_obj, &"records".into(), &arr); let _ = js_sys::Reflect::set( @@ -687,11 +729,11 @@ fn handle_server_message(ctx: &SharedCtx, msg: ServerMessage) { &"total".into(), &JsValue::from_f64(total as f64), ); - let _ = resolve.call1(&JsValue::NULL, &result_obj); + let _ = req.resolve.call1(&JsValue::NULL, &result_obj); } } ServerMessage::TopicList { id, topics } => { - if let Some((resolve, _reject)) = ctx.pending_list_topics.borrow_mut().remove(&id) { + if let Some(req) = ctx.pending_requests.borrow_mut().remove(&id) { let serializer = serde_wasm_bindgen::Serializer::json_compatible(); let arr = js_sys::Array::new(); for topic in &topics { @@ -699,7 +741,7 @@ fn handle_server_message(ctx: &SharedCtx, msg: ServerMessage) { arr.push(&js_val); } } - let _ = resolve.call1(&JsValue::NULL, &arr); + let _ = req.resolve.call1(&JsValue::NULL, &arr); } } } @@ -753,9 +795,46 @@ fn send_json(ws: &web_sys::WebSocket, msg: &ClientMessage) -> Result<(), JsError Ok(()) } -/// Emit status to the registered callback. +/// Emit status change to the registered JS callback **and** via DOM +/// `CustomEvent` on `window` (secondary channel for non-React consumers). +/// +/// The callback is deferred to a microtask via `spawn_local` so that it +/// executes outside the re-entrant WASMโ†”JS call stack created by +/// WebSocket event handlers (on_open, on_close, on_message). Direct +/// `cb.call1()` from inside those handlers silently fails โ€” the call +/// returns `Ok` but the JS function body never runs. Yielding once via +/// `Promise.resolve().await` puts us in a clean microtask context (same +/// as `subscribe_typed`), where React state updates flush correctly. fn emit_status(on_status: &Rc>>, status: ConnectionStatus) { - if let Some(cb) = on_status.borrow().as_ref() { - let _ = cb.call1(&JsValue::NULL, &JsValue::from_str(status.as_str())); + // Primary: deferred callback via microtask + let cb = on_status.borrow().as_ref().cloned(); + if let Some(cb) = cb { + let status_str = JsValue::from_str(status.as_str()); + wasm_bindgen_futures::spawn_local(async move { + // Yield once to escape the current WASM call stack + let _ = wasm_bindgen_futures::JsFuture::from(js_sys::Promise::resolve(&JsValue::NULL)) + .await; + if let Err(e) = cb.call1(&JsValue::NULL, &status_str) { + web_sys::console::error_1( + &format!("[WsBridge] emit_status callback threw: {:?}", e).into(), + ); + } + }); + } + // Secondary: DOM CustomEvent for non-React listeners + dispatch_status_event(status); +} + +/// Dispatch a `CustomEvent("aimdb:status")` on `window` with the status +/// string as `event.detail`. +fn dispatch_status_event(status: ConnectionStatus) { + let Some(window) = web_sys::window() else { + return; + }; + let init = web_sys::CustomEventInit::new(); + init.set_detail(&JsValue::from_str(status.as_str())); + init.set_bubbles(false); + if let Ok(event) = web_sys::CustomEvent::new_with_event_init_dict("aimdb:status", &init) { + let _ = web_sys::EventTarget::from(window).dispatch_event(&event); } } From ade0e5d2fcb44f8d6002acb7f57dc234d52921cc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Mon, 9 Mar 2026 21:40:00 +0000 Subject: [PATCH 26/46] format --- aimdb-wasm-adapter/src/bindings.rs | 72 ++++++++++++++--------------- aimdb-wasm-adapter/src/ws_bridge.rs | 4 +- 2 files changed, 36 insertions(+), 40 deletions(-) diff --git a/aimdb-wasm-adapter/src/bindings.rs b/aimdb-wasm-adapter/src/bindings.rs index 8570fe45..bbfa77a9 100644 --- a/aimdb-wasm-adapter/src/bindings.rs +++ b/aimdb-wasm-adapter/src/bindings.rs @@ -265,7 +265,11 @@ impl WasmDb { /// WASM runtime and should be skipped. #[wasm_bindgen(js_name = "knownSchemas")] pub fn known_schemas(&self) -> Vec { - self.registry.known_names().iter().map(|s| s.to_string()).collect() + self.registry + .known_names() + .iter() + .map(|s| s.to_string()) + .collect() } /// Connect a WebSocket bridge to this database for server synchronization. @@ -318,8 +322,7 @@ fn discover_impl(url: String) -> js_sys::Promise { let ws = Rc::new(ws); let resolve_rc: Rc>> = Rc::new(RefCell::new(Some(resolve))); - let reject_rc: Rc>> = - Rc::new(RefCell::new(Some(reject))); + let reject_rc: Rc>> = Rc::new(RefCell::new(Some(reject))); // on_open: send ListTopics { @@ -341,42 +344,40 @@ fn discover_impl(url: String) -> js_sys::Promise { let ws_clone = ws.clone(); let resolve_clone = resolve_rc.clone(); let reject_clone = reject_rc.clone(); - let on_message = - Closure::wrap(Box::new(move |event: web_sys::MessageEvent| { - let _ = ws_clone.close(); - let Some(text) = event.data().as_string() else { + let on_message = Closure::wrap(Box::new(move |event: web_sys::MessageEvent| { + let _ = ws_clone.close(); + let Some(text) = event.data().as_string() else { + if let Some(rej) = reject_clone.borrow_mut().take() { + let _ = rej.call1( + &JsValue::NULL, + &JsValue::from_str("Non-text frame from server"), + ); + } + return; + }; + match serde_json::from_str::(&text) { + Ok(ServerMessage::TopicList { topics, .. }) => { + let serializer = serde_wasm_bindgen::Serializer::json_compatible(); + let arr = js_sys::Array::new(); + for topic in &topics { + if let Ok(js_val) = topic.serialize(&serializer) { + arr.push(&js_val); + } + } + if let Some(res) = resolve_clone.borrow_mut().take() { + let _ = res.call1(&JsValue::NULL, &arr); + } + } + _ => { if let Some(rej) = reject_clone.borrow_mut().take() { let _ = rej.call1( &JsValue::NULL, - &JsValue::from_str("Non-text frame from server"), + &JsValue::from_str("Unexpected server message"), ); } - return; - }; - match serde_json::from_str::(&text) { - Ok(ServerMessage::TopicList { topics, .. }) => { - let serializer = - serde_wasm_bindgen::Serializer::json_compatible(); - let arr = js_sys::Array::new(); - for topic in &topics { - if let Ok(js_val) = topic.serialize(&serializer) { - arr.push(&js_val); - } - } - if let Some(res) = resolve_clone.borrow_mut().take() { - let _ = res.call1(&JsValue::NULL, &arr); - } - } - _ => { - if let Some(rej) = reject_clone.borrow_mut().take() { - let _ = rej.call1( - &JsValue::NULL, - &JsValue::from_str("Unexpected server message"), - ); - } - } } - }) as Box); + } + }) as Box); ws.set_onmessage(Some(on_message.as_ref().unchecked_ref())); on_message.forget(); } @@ -417,10 +418,7 @@ fn discover_impl(url: String) -> js_sys::Promise { let reject_clone = reject_rc.clone(); let timeout_cb = Closure::once(move || { if let Some(rej) = reject_clone.borrow_mut().take() { - let _ = rej.call1( - &JsValue::NULL, - &JsValue::from_str("discover timed out"), - ); + let _ = rej.call1(&JsValue::NULL, &JsValue::from_str("discover timed out")); } }); if let Some(window) = web_sys::window() { diff --git a/aimdb-wasm-adapter/src/ws_bridge.rs b/aimdb-wasm-adapter/src/ws_bridge.rs index 9b825ece..6f9bd400 100644 --- a/aimdb-wasm-adapter/src/ws_bridge.rs +++ b/aimdb-wasm-adapter/src/ws_bridge.rs @@ -463,9 +463,7 @@ fn reject_all_pending(pending: &Rc>>, r let mut map = pending.borrow_mut(); let drained: Vec = core::mem::take(&mut *map).into_values().collect(); for req in drained { - let _ = req - .reject - .call1(&JsValue::NULL, &JsValue::from_str(reason)); + let _ = req.reject.call1(&JsValue::NULL, &JsValue::from_str(reason)); } } From 22aa02baa96aafc3401c3e49cb48c33f0cb027ee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Mon, 9 Mar 2026 21:52:05 +0000 Subject: [PATCH 27/46] feat: enhance serialization in WebAssembly bindings with json-compatible serializer --- aimdb-wasm-adapter/src/bindings.rs | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/aimdb-wasm-adapter/src/bindings.rs b/aimdb-wasm-adapter/src/bindings.rs index bbfa77a9..3796aad5 100644 --- a/aimdb-wasm-adapter/src/bindings.rs +++ b/aimdb-wasm-adapter/src/bindings.rs @@ -487,8 +487,12 @@ where .map_err(|e| JsError::new(&format!("{e:?}")))?; match typed.latest() { - Some(val) => serde_wasm_bindgen::to_value(val.get()) - .map_err(|e| JsError::new(&format!("Serialization failed: {e}"))), + Some(val) => { + let serializer = serde_wasm_bindgen::Serializer::json_compatible(); + val.get() + .serialize(&serializer) + .map_err(|e| JsError::new(&format!("Serialization failed: {e}"))) + } None => Ok(JsValue::UNDEFINED), } } @@ -560,7 +564,8 @@ where match select(recv_fut, cancel_fut).await { Either::Left((Ok(val), _)) => { - if let Ok(js) = serde_wasm_bindgen::to_value(&val) { + let serializer = serde_wasm_bindgen::Serializer::json_compatible(); + if let Ok(js) = val.serialize(&serializer) { let _ = callback.call1(&JsValue::NULL, &js); } } From 2d9cfb812ec2e679168fc7ced9638ffae8cfbe88 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Mon, 9 Mar 2026 22:39:57 +0000 Subject: [PATCH 28/46] chore: update subproject commit reference for embassy --- _external/embassy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/_external/embassy b/_external/embassy index 777b2dc3..391ec10b 160000 --- a/_external/embassy +++ b/_external/embassy @@ -1 +1 @@ -Subproject commit 777b2dc3287039a17f151124f9948d5a7e4cd062 +Subproject commit 391ec10b5e5019f9b3d99d3ed0b1f4b54413788f From 1b3dd0563010407387b152c4b8cb945ddb8f7b9b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Tue, 10 Mar 2026 06:10:45 +0000 Subject: [PATCH 29/46] fix: correct version number in Cargo.toml from 0.5.0 to 0.1.0 --- aimdb-wasm-adapter/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aimdb-wasm-adapter/Cargo.toml b/aimdb-wasm-adapter/Cargo.toml index ab3fe0fe..54d211a1 100644 --- a/aimdb-wasm-adapter/Cargo.toml +++ b/aimdb-wasm-adapter/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "aimdb-wasm-adapter" -version = "0.5.0" +version = "0.1.0" edition = "2021" authors.workspace = true license.workspace = true From 063a06c468bfd841fefbee279d3f3a545f136de9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Tue, 10 Mar 2026 06:19:30 +0000 Subject: [PATCH 30/46] fix: update aimdb-wasm-adapter version from 0.5.0 to 0.1.0 and add LICENSE file --- Cargo.lock | 2 +- aimdb-wasm-adapter/LICENSE | 201 +++++++++++++++++++++++++++++++++++++ 2 files changed, 202 insertions(+), 1 deletion(-) create mode 100644 aimdb-wasm-adapter/LICENSE diff --git a/Cargo.lock b/Cargo.lock index 70b38f6b..25335de7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -274,7 +274,7 @@ dependencies = [ [[package]] name = "aimdb-wasm-adapter" -version = "0.5.0" +version = "0.1.0" dependencies = [ "aimdb-core", "aimdb-data-contracts", diff --git a/aimdb-wasm-adapter/LICENSE b/aimdb-wasm-adapter/LICENSE new file mode 100644 index 00000000..261eeb9e --- /dev/null +++ b/aimdb-wasm-adapter/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. From 152be03d2b40d46ecd1b9688b3c280dd7c947a61 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Tue, 10 Mar 2026 16:53:18 +0000 Subject: [PATCH 31/46] fix: update onStatusChange callback example to use console.log for clarity --- aimdb-wasm-adapter/src/ws_bridge.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aimdb-wasm-adapter/src/ws_bridge.rs b/aimdb-wasm-adapter/src/ws_bridge.rs index 6f9bd400..db86b21e 100644 --- a/aimdb-wasm-adapter/src/ws_bridge.rs +++ b/aimdb-wasm-adapter/src/ws_bridge.rs @@ -197,7 +197,7 @@ impl WsBridge { /// Register a callback for connection status changes. /// /// ```ts - /// bridge.onStatusChange((status: string) => { /* โ€ฆ */ }); + /// bridge.onStatusChange((status) => { console.log(status); }); /// ``` #[wasm_bindgen(js_name = "onStatusChange")] pub fn on_status_change(&self, callback: js_sys::Function) { From b8d2975fd3544633265efad702e51bf4e190696e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Tue, 10 Mar 2026 16:59:32 +0000 Subject: [PATCH 32/46] fix: update version number in Cargo.toml from 0.1.0 to 0.1.1 --- aimdb-wasm-adapter/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aimdb-wasm-adapter/Cargo.toml b/aimdb-wasm-adapter/Cargo.toml index 54d211a1..464edbdd 100644 --- a/aimdb-wasm-adapter/Cargo.toml +++ b/aimdb-wasm-adapter/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "aimdb-wasm-adapter" -version = "0.1.0" +version = "0.1.1" edition = "2021" authors.workspace = true license.workspace = true From 4dcb81dc70211d47cddf6cf58fea2218d11137ee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Tue, 10 Mar 2026 17:00:03 +0000 Subject: [PATCH 33/46] fix: update aimdb-wasm-adapter version from 0.1.0 to 0.1.1 --- Cargo.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index 25335de7..946011aa 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -274,7 +274,7 @@ dependencies = [ [[package]] name = "aimdb-wasm-adapter" -version = "0.1.0" +version = "0.1.1" dependencies = [ "aimdb-core", "aimdb-data-contracts", From a8fbd7ec61460f69fc8547677df47dd5f0873230 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Tue, 10 Mar 2026 19:08:05 +0000 Subject: [PATCH 34/46] fix: update subproject commit reference in embassy --- _external/embassy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/_external/embassy b/_external/embassy index 391ec10b..9ee8c1f4 160000 --- a/_external/embassy +++ b/_external/embassy @@ -1 +1 @@ -Subproject commit 391ec10b5e5019f9b3d99d3ed0b1f4b54413788f +Subproject commit 9ee8c1f42da7bf9d58dad0bf06504d3fe3a62025 From e15273badc36c01896e47911bf1934f171a0cda0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Tue, 10 Mar 2026 20:14:21 +0000 Subject: [PATCH 35/46] fix: update README.md content and remove unused architecture SVG --- README.md | 226 +++++++++++++++++++++++++++++++++------- assets/architecture.svg | 224 --------------------------------------- 2 files changed, 190 insertions(+), 260 deletions(-) delete mode 100644 assets/architecture.svg diff --git a/README.md b/README.md index d90a7d6a..30886b70 100644 --- a/README.md +++ b/README.md @@ -1,13 +1,8 @@

- - - - AimDB - + AimDB

- Dataflow engine for distributed systems.
- MCU to Cloud. Same API. + Distributed by design. Data-driven by default.

@@ -27,17 +22,155 @@

-Write your data pipeline once. Run it on microcontrollers, edge gateways or Kubernetes โ€” no code changes. AimDB's portable data contracts handle serialization, transforms and schema evolution across all runtimes. +AimDB turns data contracts into the architecture. Define your schemas once โ€” with built-in versioning, observability and serialization โ€” and deploy them unchanged across microcontrollers, edge gateways, Kubernetes and the browser. -

- AimDB Architecture -

+--- + +### Vision + +A future where every system โ€” from a $2 sensor to a global fleet โ€” shares one data language. Contracts define how data moves, evolves and is observed. Infrastructure adapts to the data, not the other way around. + +--- + +### Design Philosophy + +In a data-driven architecture, every design decision starts with the data, not the service that produces it. + +**Records declare their own semantics.** When you register a record in AimDB, you choose a buffer type that defines how the data moves: + +| Buffer | Semantics | Use Cases | +|--------|-----------|-----------| +| **SPMC Ring** | Bounded stream with independent consumers | Sensor telemetry, event logs, interaction streams | +| **SingleLatest** | Only the current value matters | Feature flags, configuration, UI state | +| **Mailbox** | Latest instruction wins | Device commands, actuation, RPC | + +These are the three universal primitives of data movement โ€” portable, typed and runtime-agnostic. + +**Observability becomes automatic.** A record that exists is observable by definition. Every producer and consumer relationship is declared in the builder, not discovered through instrumentation. + +**Synchronization becomes declarative.** You don't build a sync layer between your MCU, edge gateway and cloud backend. You declare a record with connector metadata on its key and the same typed data flows across all environments without translation. + +**Cross-cutting concerns derive from the schema.** Instead of adding observability libraries, feature flag SDKs and experiment frameworks as separate integrations, they become intrinsic properties of records โ€” declared once, applied everywhere. + +--- + +### How It Works + +Define your contracts, choose buffer semantics and wire up connectors โ€” all in one builder block: + +```rust +// A sensor node: produce temperature readings, publish over MQTT +builder.configure::("sensor::temp", |reg| { + reg.buffer(BufferCfg::SpmcRing { capacity: 256 }) + .source(|ctx, producer| async move { + loop { + let reading = read_sensor().await; + producer.send(Temperature::set(reading, now())).await.ok(); + ctx.sleep(Duration::from_secs(1)).await; + } + }) + .link_to("mqtt://sensors/temperature") + .with_serializer(Temperature::to_bytes) + .finish(); +}); + +// An edge gateway: receive from MQTT, observe and forward +builder.configure::("gateway::temp", |reg| { + reg.buffer(BufferCfg::SingleLatest) + .link_from("mqtt://sensors/temperature") + .with_deserializer(Temperature::from_bytes) + .tap(log_tap::("edge")) // [edge] 22.5 ยฐC + .finish(); +}); +``` + +Transport topics can be passed as strings to `link_to` / `link_from`, or declared on key enums with `#[link_address = "mqtt://..."]` and resolved at runtime. No separate instrumentation. No SDK integration. No sync code. + +--- + +### Data Contracts + +Data contracts are the heart of AimDB. A contract is a plain Rust struct that carries its own identity, version and capabilities โ€” the single source of truth from sensor firmware to browser UI. + +```rust +use aimdb_data_contracts::{SchemaType, Settable}; +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Temperature { + pub celsius: f32, + pub timestamp: u64, +} + +impl SchemaType for Temperature { + const NAME: &'static str = "temperature"; + const VERSION: u32 = 1; +} + +impl Settable for Temperature { + type Value = f32; + fn set(value: Self::Value, timestamp: u64) -> Self { + Self { celsius: value, timestamp } + } +} +``` + +This struct compiles for `no_std` embedded targets and standard Rust alike. `SchemaType` gives the record its identity and version. `Settable` provides a canonical constructor so producers can create records from a raw value โ€” this is the interface used by `producer.send(Temperature::set(reading, now()))` in the builder. + +#### Contract Attributes + +Contracts gain capabilities through trait implementations. Each trait is a compile-time declaration of what a contract *can do*, not a runtime configuration: + +| Attribute | Trait | What It Enables | +|-----------|-------|-----------------| +| **Settable** | `Settable` | Canonical constructor from a raw value โ€” the interface behind `producer.send(T::set(value, ts))`. | +| **Streamable** | `Streamable` | Cross-boundary transport โ€” WASM, WebSocket, CLI. One registry, zero parallel type systems. | +| **Migratable** | `MigrationStep` | Bidirectional schema evolution with typed up/down transforms and chained version steps. | +| **Observable** | `Observable` | Signal extraction for thresholds, logging and monitoring. Icon, unit and `format_log()` built in. | +| **Linkable** | `Linkable` | Wire-format serialization for connectors โ€” MQTT, KNX and any future transport. | +| **Simulatable** | `Simulatable` | Realistic test data generation with random walks, trends and configurable parameters. | + +For example, `Observable` turns a contract into a loggable, monitorable signal: + +```rust +impl Observable for Temperature { + type Signal = f32; + const ICON: &'static str = "thermometer"; + const UNIT: &'static str = "ยฐC"; + + fn signal(&self) -> f32 { self.celsius } + + fn format_log(&self, node_id: &str) -> String { + format!("[{}] {:.1} ยฐC", node_id, self.celsius) + } +} +``` + +Each trait you implement unlocks a capability โ€” contracts without `Observable` simply can't be tapped; contracts without `Linkable` can't be wired to connectors. The type system enforces what your data can do. + +#### Platform-Agnostic by Design + +The same contract works across all runtimes without modification: + +``` +โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” +โ”‚ Temperature Contract โ”‚ +โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ฌโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ฌโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ฌโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค +โ”‚ MCU (Embassy) โ”‚ Edge (Tokio) โ”‚ Cloud (Tokio) โ”‚ Browser (WASM) โ”‚ +โ”‚ no_std + alloc โ”‚ std โ”‚ Kubernetes โ”‚ wasm32 โ”‚ +โ”‚ Cortex-M4 โ”‚ Linux / RPi โ”‚ Full featured โ”‚ Single-threaded โ”‚ +โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ดโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ดโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ดโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ +``` + +The Rust type system enforces correctness at compile time. The dataflow engine's buffer semantics enforce flow guarantees at runtime. Connectors wire everything to your infrastructure without an integration layer. --- ### Getting Started -**See it live** โ€” explore a running sensor mesh at [aimdb.dev](https://aimdb.dev) +#### 1. See it live + +Explore a running sensor mesh โ€” no setup required:

@@ -45,37 +178,56 @@ Write your data pipeline once. Run it on microcontrollers, edge gateways or Kube

-**Run locally** โ€” full MCU โ†’ edge โ†’ cloud mesh in Docker: +> **[aimdb.dev](https://aimdb.dev)** โ€” live weather stations streaming typed contracts across MCU, edge and cloud. + +#### 2. Run locally + +Spin up a full MCU โ†’ edge โ†’ cloud mesh with one command: ```bash cd examples/weather-mesh-demo docker compose up ``` -Then ask VS Code Copilot: *"What's the current temperature from station ...?"* ([MCP setup required](examples/weather-mesh-demo/)) +This starts three weather stations, an MQTT broker and a central hub โ€” all wired together with typed data contracts. + +#### 3. Explore with AI + +With the mesh running, connect an MCP-compatible editor to query your data in natural language:

AimDB MCP Live Demo

-**Learn more:** -- [Quick Start Guide](https://aimdb.dev/docs/getting-started) โ€” Dependency setup and API basics -- [Data Contracts](https://aimdb.dev/docs/data-contracts) โ€” Type-safe schemas -- [Connectors](https://aimdb.dev/docs/connectors) โ€” MQTT, KNX and more -- [Deployment](https://aimdb.dev/docs/deployment) โ€” Running on MCU, edge and cloud -- [API Reference](https://docs.rs/aimdb-core) โ€” Full Rust API documentation +Install the MCP server and add it to your workspace: ---- +```bash +cargo install aimdb-mcp +``` -### Why AimDB? +`.vscode/mcp.json`: -A real-time data runtime that adapts to your infrastructure, not the other way around. +```json +{ + "servers": { + "aimdb": { + "type": "stdio", + "command": "${userHome}/.cargo/bin/aimdb-mcp" + } + } +} +``` -| Problem | AimDB Solution | -|---------|----------------| -| **Runs Where Data Starts** | From $2 MCUs to Kubernetes clusters. Deploy the same code anywhere, process data at the source. | -| **Same API Everywhere** | Tokio + Embassy compatible, embedded-friendly, `no_std`-ready. One interface across all your runtimes. | -| **Built for Continuous Change** | Unified data layer with schema evolution built in. Your data pipelines adapt as fast as your business. | +Then ask: *"What's the current temperature from station alpha?"* โ€” see the [MCP server docs](tools/aimdb-mcp/) for Claude Desktop and other editors. + +#### 4. Build your own + +- [Quick Start Guide](https://aimdb.dev/docs/getting-started) โ€” Dependencies, platform setup and your first contract +- [Data Contracts](https://aimdb.dev/docs/data-contracts) โ€” Type-safe schemas with built-in capabilities +- [Connectors](https://aimdb.dev/docs/connectors) โ€” MQTT, KNX, WebSocket and more +- [Deployment](https://aimdb.dev/docs/deployment) โ€” Running on MCU, edge, cloud and browser +- [API Reference](https://docs.rs/aimdb-core) โ€” Full Rust API documentation +- [Blog](https://aimdb.dev/blog) โ€” News, tutorials and insights from the AimDB team --- @@ -85,7 +237,7 @@ A real-time data runtime that adapts to your infrastructure, not the other way a |----------|-------|--------|----------| | **MQTT** | `aimdb-mqtt-connector` | โœ… Ready | std, no_std | | **KNX** | `aimdb-knx-connector` | โœ… Ready | std, no_std | -| **HTTP/REST** | โ€” | ๐Ÿ”จ Building | std | +| **WebSocket** | `aimdb-websocket-connector` | โœ… Ready | std, wasm | | **Kafka** | โ€” | ๐Ÿ“‹ Planned | std | | **Modbus** | โ€” | ๐Ÿ“‹ Planned | std, no_std | @@ -93,12 +245,12 @@ A real-time data runtime that adapts to your infrastructure, not the other way a ### Platform Support -| Target | Runtime | Features | Footprint | -|--------|---------|----------|-----------| -| **ARM Cortex-M** (STM32H5, STM32F4) | Embassy | no_std, async | ~50KB+ | -| **ARM Cortex-M** (STM32H5, STM32F4) | FreeRTOS | ๐Ÿ“‹ Planned | โ€” | -| **Linux Edge Devices** | Tokio | Full std | ~10MB+ | -| **Containers/K8s** | Tokio | Full std | ~10MB+ | +| Target | Runtime | Adapter | Features | Footprint | +|--------|---------|---------|----------|-----------| +| **ARM Cortex-M** (STM32H5, STM32F4) | Embassy | `aimdb-embassy-adapter` | no_std, async | ~50KB+ | +| **Linux Edge** (RPi, gateways) | Tokio | `aimdb-tokio-adapter` | Full std | ~10MB+ | +| **Containers / K8s** | Tokio | `aimdb-tokio-adapter` | Full std | ~10MB+ | +| **Browser / SPA** | WASM | `aimdb-wasm-adapter` | wasm32, single-threaded | ~2MB+ | --- @@ -119,5 +271,7 @@ Want to contribute? See the [contributing guide](CONTRIBUTING.md). We have [good ---

- Write once. Deploy anywhere. Pay only where it makes sense. + Define once. Deploy anywhere. Observe everything. +

+ Get started ยท Live demo ยท Join the discussion

diff --git a/assets/architecture.svg b/assets/architecture.svg deleted file mode 100644 index 02018153..00000000 --- a/assets/architecture.svg +++ /dev/null @@ -1,224 +0,0 @@ - - - - - - - - Data Contract - Temperature { - celsius: f32 - sensor_id: String - } - + serialize() + transform() - - - - - - - - - - - - - - - - - - - - - MCU - Embassy - no_std / 64KB - - - - - Edge - Tokio - Linux / RPi - - - - - Cloud - Tokio - Kubernetes - - - - - MQTT - - - - - - - Kafka - - - - - - Same code. Your deployment. Your costs. - - \ No newline at end of file From 0d13b72daff47c45afd5d009f8a0c75111d501fe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Tue, 10 Mar 2026 20:29:47 +0000 Subject: [PATCH 36/46] fix: update publishing sequence in Makefile to reflect the correct count of packages --- Makefile | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/Makefile b/Makefile index e2ae5178..32efaf6d 100644 --- a/Makefile +++ b/Makefile @@ -332,51 +332,51 @@ publish: else \ printf "$(BLUE)Running in CI mode - skipping confirmation$(NC)\n"; \ fi - @printf "$(YELLOW) โ†’ Publishing aimdb-executor (1/15)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-executor (1/16)$(NC)\n" @cargo publish -p aimdb-executor @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-derive (2/15)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-derive (2/16)$(NC)\n" @cargo publish -p aimdb-derive @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-codegen (3/15)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-codegen (3/16)$(NC)\n" @cargo publish -p aimdb-codegen @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-core (4/15)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-core (4/16)$(NC)\n" @cargo publish -p aimdb-core @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-tokio-adapter (5/15)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-tokio-adapter (5/16)$(NC)\n" @cargo publish -p aimdb-tokio-adapter @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-embassy-adapter (6/15)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-embassy-adapter (6/16)$(NC)\n" @cargo publish -p aimdb-embassy-adapter --no-verify @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-client (7/15)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-client (7/16)$(NC)\n" @cargo publish -p aimdb-client @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-sync (8/15)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-sync (8/16)$(NC)\n" @cargo publish -p aimdb-sync @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-persistence (9/15)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-persistence (9/16)$(NC)\n" @cargo publish -p aimdb-persistence @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-persistence-sqlite (10/15)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-persistence-sqlite (10/16)$(NC)\n" @cargo publish -p aimdb-persistence-sqlite @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-mqtt-connector (11/15)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-mqtt-connector (11/16)$(NC)\n" @cargo publish -p aimdb-mqtt-connector @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 - @printf "$(YELLOW) โ†’ Publishing aimdb-knx-connector (12/15)$(NC)\n" + @printf "$(YELLOW) โ†’ Publishing aimdb-knx-connector (12/16)$(NC)\n" @cargo publish -p aimdb-knx-connector @printf "$(YELLOW) โ†’ Waiting 10s for crates.io propagation...$(NC)\n" @sleep 10 From fec4a1c889b34345a9f709b46d85d6101014a619 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Tue, 10 Mar 2026 20:45:44 +0000 Subject: [PATCH 37/46] fix: update aimdb-codegen version from 0.5.0 to 0.1.0 in multiple Cargo.toml files --- Cargo.lock | 2 +- aimdb-codegen/Cargo.toml | 2 +- tools/aimdb-cli/Cargo.toml | 2 +- tools/aimdb-mcp/Cargo.toml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 946011aa..1cbab3c3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -60,7 +60,7 @@ dependencies = [ [[package]] name = "aimdb-codegen" -version = "0.5.0" +version = "0.1.0" dependencies = [ "prettyplease", "proc-macro2", diff --git a/aimdb-codegen/Cargo.toml b/aimdb-codegen/Cargo.toml index 99ca32fc..b0ff2e0a 100644 --- a/aimdb-codegen/Cargo.toml +++ b/aimdb-codegen/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "aimdb-codegen" -version = "0.5.0" +version = "0.1.0" edition = "2021" authors.workspace = true license.workspace = true diff --git a/tools/aimdb-cli/Cargo.toml b/tools/aimdb-cli/Cargo.toml index 627d2118..078e107e 100644 --- a/tools/aimdb-cli/Cargo.toml +++ b/tools/aimdb-cli/Cargo.toml @@ -17,7 +17,7 @@ path = "src/main.rs" [dependencies] # AimDB client library aimdb-client = { version = "0.5.0", path = "../../aimdb-client" } -aimdb-codegen = { version = "0.5.0", path = "../../aimdb-codegen" } +aimdb-codegen = { version = "0.1.0", path = "../../aimdb-codegen" } # Core dependencies - reuse protocol types from aimdb-core aimdb-core = { version = "0.5.0", path = "../../aimdb-core", features = [ diff --git a/tools/aimdb-mcp/Cargo.toml b/tools/aimdb-mcp/Cargo.toml index f8590978..f55d058c 100644 --- a/tools/aimdb-mcp/Cargo.toml +++ b/tools/aimdb-mcp/Cargo.toml @@ -27,7 +27,7 @@ path = "src/lib.rs" [dependencies] # AimDB dependencies aimdb-client = { version = "0.5.0", path = "../../aimdb-client" } -aimdb-codegen = { version = "0.5.0", path = "../../aimdb-codegen" } +aimdb-codegen = { version = "0.1.0", path = "../../aimdb-codegen" } aimdb-core = { version = "0.5.0", path = "../../aimdb-core", features = [ "std", ] } From 05c3d92661b4e1a8472f0c811d46fcc8f42f32c6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Tue, 10 Mar 2026 21:02:59 +0000 Subject: [PATCH 38/46] fix: add wasm32-unknown-unknown target to Rust toolchain in CI workflows --- .github/workflows/ci.yml | 6 +++--- .github/workflows/docs.yml | 2 ++ .github/workflows/release.yml | 3 ++- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a019ed9c..b3cefdc5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -22,7 +22,7 @@ jobs: - name: Install Rust uses: dtolnay/rust-toolchain@stable with: - targets: thumbv7em-none-eabihf + targets: thumbv7em-none-eabihf, wasm32-unknown-unknown components: rustfmt, clippy - name: Cache dependencies @@ -52,7 +52,7 @@ jobs: - name: Install Rust uses: dtolnay/rust-toolchain@stable with: - targets: thumbv7em-none-eabihf + targets: thumbv7em-none-eabihf, wasm32-unknown-unknown - name: Cache dependencies uses: actions/cache@v3 @@ -108,7 +108,7 @@ jobs: - name: Install Rust uses: dtolnay/rust-toolchain@stable with: - targets: thumbv7em-none-eabihf + targets: thumbv7em-none-eabihf, wasm32-unknown-unknown components: rustfmt, clippy - name: Cache dependencies diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 8794e76f..f2e4fe78 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -23,6 +23,8 @@ jobs: - name: Install Rust uses: dtolnay/rust-toolchain@stable + with: + targets: wasm32-unknown-unknown - name: Cache dependencies uses: actions/cache@v4 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 707a4ef2..ffc84eea 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -23,7 +23,8 @@ jobs: - name: Install Rust uses: dtolnay/rust-toolchain@stable with: - targets: thumbv7em-none-eabihf + targets: thumbv7em-none-eabihf, wasm32-unknown-unknown + components: rustfmt, clippy - name: Cache dependencies uses: actions/cache@v3 From e2f78055c942854b12816f932532ed32b3a178ea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Tue, 10 Mar 2026 22:08:56 +0000 Subject: [PATCH 39/46] fix: add unzip package and install fnm with shell integration in Dockerfile --- .devcontainer/Dockerfile | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 89ab4e02..5081c53f 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -34,6 +34,7 @@ RUN apt-get update && apt-get install -y \ libudev-dev \ # JSON processing jq \ + unzip \ && rm -rf /var/lib/apt/lists/* # -------------------------------------------------------------------- @@ -75,10 +76,29 @@ RUN cargo install probe-rs --features cli || \ echo "Run: cargo install probe-rs --features cli") # -------------------------------------------------------------------- -# Use Bash for subsequent RUN instructions (important for nvm) +# Use Bash for subsequent RUN instructions (important for fnm) # -------------------------------------------------------------------- SHELL ["/bin/bash", "-c"] +# -------------------------------------------------------------------- +# Install fnm (Fast Node Manager) +# -------------------------------------------------------------------- +RUN curl -fsSL https://fnm.vercel.app/install | bash -s -- --skip-shell \ + && export PATH="/home/$USERNAME/.local/share/fnm:$PATH" \ + && fnm install 20 \ + && fnm default 20 + +# Add fnm shell integration to bashrc +RUN echo '' >> /home/$USERNAME/.bashrc \ + && echo '# fnm' >> /home/$USERNAME/.bashrc \ + && echo 'FNM_PATH="/home/'"$USERNAME"'/.local/share/fnm"' >> /home/$USERNAME/.bashrc \ + && echo 'if [ -d "$FNM_PATH" ]; then' >> /home/$USERNAME/.bashrc \ + && echo ' export PATH="$FNM_PATH:$PATH"' >> /home/$USERNAME/.bashrc \ + && echo ' eval "$(fnm env --shell bash)"' >> /home/$USERNAME/.bashrc \ + && echo 'fi' >> /home/$USERNAME/.bashrc + +ENV PATH="/home/$USERNAME/.local/share/fnm/aliases/default/bin:/home/$USERNAME/.local/share/fnm:${PATH}" + # -------------------------------------------------------------------- # Final working directory # -------------------------------------------------------------------- From a71fbcd89b9a7bacf9e8bb995a88538a37741f78 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Wed, 11 Mar 2026 19:08:18 +0000 Subject: [PATCH 40/46] fix: improve state resource handling and update MIME type logic in architecture.rs --- tools/aimdb-mcp/src/resources/architecture.rs | 29 ++++++++++++------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/tools/aimdb-mcp/src/resources/architecture.rs b/tools/aimdb-mcp/src/resources/architecture.rs index 7b6a212b..1afe021b 100644 --- a/tools/aimdb-mcp/src/resources/architecture.rs +++ b/tools/aimdb-mcp/src/resources/architecture.rs @@ -94,11 +94,14 @@ pub fn read_resource(uri: &str) -> McpResult { Some(state) => aimdb_codegen::generate_mermaid(state), }, - "aimdb://architecture/state" => match &state_opt { - None => "# No state.toml found.\n".to_string(), - Some(state) => serde_json::to_string_pretty(state) - .map_err(|e| McpError::Internal(format!("serialising state: {e}")))?, - }, + "aimdb://architecture/state" => { + if state_path.exists() { + std::fs::read_to_string(&state_path) + .map_err(|e| McpError::Internal(format!("reading state.toml: {e}")))? + } else { + "# No state.toml found.\n".to_string() + } + } "aimdb://architecture/conflicts" => match &state_opt { None => serde_json::to_string_pretty(&serde_json::json!({ @@ -144,11 +147,17 @@ pub fn read_resource(uri: &str) -> McpResult { } }; - let mime_type = if uri.ends_with("architecture") { - Some("text/plain".to_string()) - } else { - Some("application/json".to_string()) - }; + let mime_type = Some( + match uri { + "aimdb://architecture" => "text/plain", + "aimdb://architecture/state" => "application/toml", + "aimdb://architecture/conflicts" => "application/json", + "aimdb://architecture/conventions" => "text/markdown", + "aimdb://architecture/memory" => "text/markdown", + _ => "text/plain", + } + .to_string(), + ); Ok(ResourceReadResult { contents: vec![ResourceContent { From d4e87a0758ab06b904906bfe0175da2c65ab311c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Wed, 11 Mar 2026 19:13:57 +0000 Subject: [PATCH 41/46] fix: update architecture resource reading to use async I/O and improve state handling --- tools/aimdb-mcp/Cargo.toml | 1 + tools/aimdb-mcp/src/resources/architecture.rs | 120 +++++++++++------- tools/aimdb-mcp/src/resources/mod.rs | 2 +- 3 files changed, 76 insertions(+), 47 deletions(-) diff --git a/tools/aimdb-mcp/Cargo.toml b/tools/aimdb-mcp/Cargo.toml index f55d058c..bef3ce22 100644 --- a/tools/aimdb-mcp/Cargo.toml +++ b/tools/aimdb-mcp/Cargo.toml @@ -34,6 +34,7 @@ aimdb-core = { version = "0.5.0", path = "../../aimdb-core", features = [ # Async runtime tokio = { version = "1.48", features = [ + "fs", "io-std", "macros", "rt-multi-thread", diff --git a/tools/aimdb-mcp/src/resources/architecture.rs b/tools/aimdb-mcp/src/resources/architecture.rs index 1afe021b..25e4cb44 100644 --- a/tools/aimdb-mcp/src/resources/architecture.rs +++ b/tools/aimdb-mcp/src/resources/architecture.rs @@ -10,6 +10,7 @@ use crate::architecture::{default_memory_path, default_state_path, read_state}; use crate::error::{McpError, McpResult}; use crate::protocol::{Resource, ResourceContent, ResourceReadResult}; +use std::path::PathBuf; use tracing::debug; /// Mermaid diagram conventions โ€” embedded at compile time so the binary is @@ -80,64 +81,39 @@ pub fn list_resources() -> Vec { // โ”€โ”€ read โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ /// Read a single architecture resource by URI. -pub fn read_resource(uri: &str) -> McpResult { +pub async fn read_resource(uri: &str) -> McpResult { debug!("architecture read_resource: {uri}"); - let state_path = default_state_path(); - - let state_opt = read_state(&state_path) - .map_err(|e| McpError::Internal(format!("reading state.toml: {e}")))?; - let text = match uri { - "aimdb://architecture" => match &state_opt { - None => "No state.toml found. Run the onboarding prompt to get started.".to_string(), - Some(state) => aimdb_codegen::generate_mermaid(state), - }, + // โ”€โ”€ diagram & conflicts need the parsed state โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + "aimdb://architecture" | "aimdb://architecture/conflicts" => { + let uri_owned = uri.to_string(); + tokio::task::spawn_blocking(move || read_with_state(&uri_owned)) + .await + .map_err(|e| McpError::Internal(format!("spawn_blocking join: {e}")))? + .map_err(|e| McpError::Internal(format!("reading state.toml: {e}")))? + } + // โ”€โ”€ raw TOML โ€” just return the file bytes โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ "aimdb://architecture/state" => { - if state_path.exists() { - std::fs::read_to_string(&state_path) - .map_err(|e| McpError::Internal(format!("reading state.toml: {e}")))? - } else { - "# No state.toml found.\n".to_string() - } + let state_path = default_state_path(); + read_file_or_fallback(&state_path, "# No state.toml found.\n", "state.toml").await? } - "aimdb://architecture/conflicts" => match &state_opt { - None => serde_json::to_string_pretty(&serde_json::json!({ - "errors": [], - "warnings": [], - "note": "No state.toml found" - })) - .unwrap(), - Some(state) => { - let errors = aimdb_codegen::validate(state); - serde_json::to_string_pretty(&serde_json::json!({ - "errors": errors.iter() - .filter(|e| e.severity == aimdb_codegen::Severity::Error) - .map(|e| serde_json::json!({ "location": e.location, "message": e.message })) - .collect::>(), - "warnings": errors.iter() - .filter(|e| e.severity == aimdb_codegen::Severity::Warning) - .map(|e| serde_json::json!({ "location": e.location, "message": e.message })) - .collect::>(), - })) - .map_err(|e| McpError::Internal(format!("serialising conflicts: {e}")))? - } - }, - + // โ”€โ”€ conventions โ€” compile-time constant, no I/O โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ "aimdb://architecture/conventions" => CONVENTIONS.to_string(), + // โ”€โ”€ memory โ€” read .aimdb/memory.md โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ "aimdb://architecture/memory" => { let memory_path = default_memory_path(); - if memory_path.exists() { - std::fs::read_to_string(&memory_path) - .map_err(|e| McpError::Internal(format!("reading memory.md: {e}")))? - } else { + read_file_or_fallback( + &memory_path, "# AimDB Architecture Memory\n\nNo memory recorded yet. \ - The architecture agent will populate this after the first confirmed proposal.\n" - .to_string() - } + The architecture agent will populate this after the first \ + confirmed proposal.\n", + "memory.md", + ) + .await? } _ => { @@ -168,3 +144,55 @@ pub fn read_resource(uri: &str) -> McpResult { }], }) } + +// โ”€โ”€ helpers โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ + +/// Read a file with `tokio::fs`, returning a fallback string when absent. +async fn read_file_or_fallback(path: &PathBuf, fallback: &str, label: &str) -> McpResult { + match tokio::fs::read_to_string(path).await { + Ok(contents) => Ok(contents), + Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(fallback.to_string()), + Err(e) => Err(McpError::Internal(format!("reading {label}: {e}"))), + } +} + +/// Synchronous helper executed inside `spawn_blocking`. +/// Reads + parses state.toml and produces the text for `/` or `/conflicts`. +fn read_with_state(uri: &str) -> Result { + let state_path = default_state_path(); + let state_opt = read_state(&state_path).map_err(|e| e.to_string())?; + + match uri { + "aimdb://architecture" => match state_opt { + None => { + Ok("No state.toml found. Run the onboarding prompt to get started.".to_string()) + } + Some(state) => Ok(aimdb_codegen::generate_mermaid(&state)), + }, + + "aimdb://architecture/conflicts" => match state_opt { + None => Ok(serde_json::to_string_pretty(&serde_json::json!({ + "errors": [], + "warnings": [], + "note": "No state.toml found" + })) + .unwrap()), + Some(state) => { + let errors = aimdb_codegen::validate(&state); + serde_json::to_string_pretty(&serde_json::json!({ + "errors": errors.iter() + .filter(|e| e.severity == aimdb_codegen::Severity::Error) + .map(|e| serde_json::json!({ "location": e.location, "message": e.message })) + .collect::>(), + "warnings": errors.iter() + .filter(|e| e.severity == aimdb_codegen::Severity::Warning) + .map(|e| serde_json::json!({ "location": e.location, "message": e.message })) + .collect::>(), + })) + .map_err(|e| format!("serialising conflicts: {e}")) + } + }, + + _ => unreachable!("read_with_state called for unexpected URI: {uri}"), + } +} diff --git a/tools/aimdb-mcp/src/resources/mod.rs b/tools/aimdb-mcp/src/resources/mod.rs index 2ce05def..8e2e6413 100644 --- a/tools/aimdb-mcp/src/resources/mod.rs +++ b/tools/aimdb-mcp/src/resources/mod.rs @@ -22,7 +22,7 @@ pub async fn list_resources() -> McpResult { /// Read a resource by URI (instances first, then architecture) pub async fn read_resource(uri: &str) -> McpResult { if uri.starts_with("aimdb://architecture") { - architecture::read_resource(uri) + architecture::read_resource(uri).await } else { instances::read_resource(uri).await } From bb5929c9552d3f7bc2cd097a6e03d5fc45bce8af Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Wed, 11 Mar 2026 19:23:17 +0000 Subject: [PATCH 42/46] fix: enhance safety checks for SendFuture and Wasm types in wasm32 context --- aimdb-wasm-adapter/src/time.rs | 40 ++++++++++++++++++++++++++-------- 1 file changed, 31 insertions(+), 9 deletions(-) diff --git a/aimdb-wasm-adapter/src/time.rs b/aimdb-wasm-adapter/src/time.rs index 268a9b95..895ba8de 100644 --- a/aimdb-wasm-adapter/src/time.rs +++ b/aimdb-wasm-adapter/src/time.rs @@ -16,11 +16,26 @@ use core::task::{Context, Poll}; /// /// # Safety /// -/// Only safe on `wasm32-unknown-unknown` where all execution is single-threaded. +/// Only safe on `wasm32-unknown-unknown` where all execution is single-threaded +/// **without** the `atomics` / shared-memory proposal enabled. /// The inner future will never actually be sent between threads. +/// +/// The `Send` impl is gated on `target_arch = "wasm32"` so this type cannot +/// accidentally satisfy a `Send` bound when cross-compiled for a native target. pub(crate) struct SendFuture(pub(crate) F); -// SAFETY: wasm32 is single-threaded โ€” the future cannot be sent to another thread +// Guard: detect wasm32 + threads (atomics target feature). The shared-memory +// proposal makes wasm multi-threaded, which invalidates the Send blanket impl. +#[cfg(all(target_arch = "wasm32", target_feature = "atomics"))] +compile_error!( + "SendFuture's blanket `impl Send` is unsound with wasm threads enabled. \ + Disable the `atomics` target feature or provide a thread-safe implementation." +); + +// SAFETY: wasm32 (without atomics) is single-threaded โ€” the future cannot be +// sent to another thread. On non-wasm targets this impl is absent, so +// SendFuture is only Send when F: Send, which is the correct default. +#[cfg(target_arch = "wasm32")] unsafe impl Send for SendFuture {} impl Future for SendFuture { @@ -43,10 +58,17 @@ pub struct WasmInstant(pub(crate) f64); #[derive(Clone, Debug)] pub struct WasmDuration(pub(crate) f64); -// SAFETY: single-threaded wasm32 โ€” no concurrent access possible +// SAFETY: single-threaded wasm32 (without atomics) โ€” no concurrent access +// possible. On non-wasm targets these impls are absent; the types will only +// be Send/Sync if their fields are (f64 is Send+Sync, so this is fine either +// way, but we keep the cfg gate for consistency and to document intent). +#[cfg(target_arch = "wasm32")] unsafe impl Send for WasmInstant {} +#[cfg(target_arch = "wasm32")] unsafe impl Sync for WasmInstant {} +#[cfg(target_arch = "wasm32")] unsafe impl Send for WasmDuration {} +#[cfg(target_arch = "wasm32")] unsafe impl Sync for WasmDuration {} // โ”€โ”€โ”€ globalThis helpers (Window + Worker compatible) โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ @@ -54,7 +76,7 @@ unsafe impl Sync for WasmDuration {} /// Get `performance.now()` from `globalThis`. /// /// Works in Window, Worker, and ServiceWorker contexts. -#[cfg(feature = "wasm-runtime")] +#[cfg(all(feature = "wasm-runtime", target_arch = "wasm32"))] fn global_performance_now() -> f64 { use wasm_bindgen::JsCast; @@ -74,7 +96,7 @@ fn global_performance_now() -> f64 { /// Call `globalThis.setTimeout(callback, delay)`. /// /// Works in Window, Worker, and ServiceWorker contexts. -#[cfg(feature = "wasm-runtime")] +#[cfg(all(feature = "wasm-runtime", target_arch = "wasm32"))] fn global_set_timeout(callback: &js_sys::Function, delay_ms: i32) { use wasm_bindgen::JsCast; @@ -92,12 +114,12 @@ impl TimeOps for WasmAdapter { type Duration = WasmDuration; fn now(&self) -> WasmInstant { - #[cfg(feature = "wasm-runtime")] + #[cfg(all(feature = "wasm-runtime", target_arch = "wasm32"))] { WasmInstant(global_performance_now()) } - #[cfg(not(feature = "wasm-runtime"))] + #[cfg(not(all(feature = "wasm-runtime", target_arch = "wasm32")))] { // Fallback for native-target unit tests โ€” monotonic counter WasmInstant(0.0) @@ -126,7 +148,7 @@ impl TimeOps for WasmAdapter { } fn sleep(&self, duration: WasmDuration) -> impl Future + Send { - #[cfg(feature = "wasm-runtime")] + #[cfg(all(feature = "wasm-runtime", target_arch = "wasm32"))] { use futures_util::FutureExt; @@ -145,7 +167,7 @@ impl TimeOps for WasmAdapter { SendFuture(fut) } - #[cfg(not(feature = "wasm-runtime"))] + #[cfg(not(all(feature = "wasm-runtime", target_arch = "wasm32")))] { let _ = duration; // Fallback for native-target unit tests โ€” resolve immediately From 6412caecd7481d55f7695c3f6dd6982a47e6ddee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Wed, 11 Mar 2026 19:26:21 +0000 Subject: [PATCH 43/46] fix: improve output message formatting in write_if_changed function --- tools/aimdb-cli/src/commands/generate.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tools/aimdb-cli/src/commands/generate.rs b/tools/aimdb-cli/src/commands/generate.rs index 1ed61286..87aa4701 100644 --- a/tools/aimdb-cli/src/commands/generate.rs +++ b/tools/aimdb-cli/src/commands/generate.rs @@ -294,11 +294,10 @@ fn write_if_changed(path: &Path, contents: &str, label: &str) -> CliResult<()> { if changed { std::fs::write(path, contents).with_context(|| format!("writing {}", path.display()))?; - println!(" {} {} written", "โ†’".cyan(), path.display()); + println!(" {} {} ({label})", "โ†’".cyan(), path.display()); } else { println!(" {} {} unchanged", "ยท".dimmed(), path.display()); } - let _ = label; // Suppresses unused warning; label available for future verbose output Ok(()) } From 43c4c0b046e5926d9fc12bdfa329a87f7fd0465d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Wed, 11 Mar 2026 19:28:18 +0000 Subject: [PATCH 44/46] fix: correct documentation for architecture resource descriptors count --- tools/aimdb-mcp/src/resources/architecture.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/aimdb-mcp/src/resources/architecture.rs b/tools/aimdb-mcp/src/resources/architecture.rs index 25e4cb44..397e53cc 100644 --- a/tools/aimdb-mcp/src/resources/architecture.rs +++ b/tools/aimdb-mcp/src/resources/architecture.rs @@ -19,7 +19,7 @@ const CONVENTIONS: &str = include_str!("../../assets/CONVENTIONS.md"); // โ”€โ”€ list โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ -/// Return the 6 architecture resource descriptors. +/// Return the 5 architecture resource descriptors. pub fn list_resources() -> Vec { vec![ Resource { From d17bd4401c855310b12c769e6c7651ca2f9cfdc7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Wed, 11 Mar 2026 19:30:31 +0000 Subject: [PATCH 45/46] fix: update documentation to reference the architecture conventions document --- aimdb-codegen/src/mermaid.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aimdb-codegen/src/mermaid.rs b/aimdb-codegen/src/mermaid.rs index 27dca7fb..4b7c4b01 100644 --- a/aimdb-codegen/src/mermaid.rs +++ b/aimdb-codegen/src/mermaid.rs @@ -1,7 +1,7 @@ //! Mermaid diagram generator //! //! Converts an [`ArchitectureState`] into a `flowchart LR` Mermaid diagram -//! following the conventions defined in `.aimdb/CONVENTIONS.md`. +//! following the conventions defined in the architecture conventions document. use crate::state::{ArchitectureState, BufferType, ConnectorDirection}; From 4b24ee26dcf490635fab1486913c613eb70822c2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Schn=C3=B6rch?= Date: Wed, 11 Mar 2026 19:30:59 +0000 Subject: [PATCH 46/46] fix: update subproject reference for embassy --- _external/embassy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/_external/embassy b/_external/embassy index 9ee8c1f4..e63316a9 160000 --- a/_external/embassy +++ b/_external/embassy @@ -1 +1 @@ -Subproject commit 9ee8c1f42da7bf9d58dad0bf06504d3fe3a62025 +Subproject commit e63316a9409d15a460a9d2821c344b8eaf93a498