diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile
index 68b93a7d..5081c53f 100644
--- a/.devcontainer/Dockerfile
+++ b/.devcontainer/Dockerfile
@@ -34,6 +34,7 @@ RUN apt-get update && apt-get install -y \
libudev-dev \
# JSON processing
jq \
+ unzip \
&& rm -rf /var/lib/apt/lists/*
# --------------------------------------------------------------------
@@ -63,7 +64,8 @@ ENV PATH="/home/$USERNAME/.cargo/bin:${PATH}"
# Add embedded targets for AimDB MCU support
RUN rustup target add thumbv7em-none-eabihf \
&& rustup target add thumbv6m-none-eabi \
- && rustup target add thumbv7m-none-eabi
+ && rustup target add thumbv7m-none-eabi \
+ && rustup target add wasm32-unknown-unknown
# Install core AimDB development tools
RUN cargo install cargo-audit cargo-watch cargo-expand
@@ -74,10 +76,29 @@ RUN cargo install probe-rs --features cli || \
echo "Run: cargo install probe-rs --features cli")
# --------------------------------------------------------------------
-# Use Bash for subsequent RUN instructions (important for nvm)
+# Use Bash for subsequent RUN instructions (important for fnm)
# --------------------------------------------------------------------
SHELL ["/bin/bash", "-c"]
+# --------------------------------------------------------------------
+# Install fnm (Fast Node Manager)
+# --------------------------------------------------------------------
+RUN curl -fsSL https://fnm.vercel.app/install | bash -s -- --skip-shell \
+ && export PATH="/home/$USERNAME/.local/share/fnm:$PATH" \
+ && fnm install 20 \
+ && fnm default 20
+
+# Add fnm shell integration to bashrc
+RUN echo '' >> /home/$USERNAME/.bashrc \
+ && echo '# fnm' >> /home/$USERNAME/.bashrc \
+ && echo 'FNM_PATH="/home/'"$USERNAME"'/.local/share/fnm"' >> /home/$USERNAME/.bashrc \
+ && echo 'if [ -d "$FNM_PATH" ]; then' >> /home/$USERNAME/.bashrc \
+ && echo ' export PATH="$FNM_PATH:$PATH"' >> /home/$USERNAME/.bashrc \
+ && echo ' eval "$(fnm env --shell bash)"' >> /home/$USERNAME/.bashrc \
+ && echo 'fi' >> /home/$USERNAME/.bashrc
+
+ENV PATH="/home/$USERNAME/.local/share/fnm/aliases/default/bin:/home/$USERNAME/.local/share/fnm:${PATH}"
+
# --------------------------------------------------------------------
# Final working directory
# --------------------------------------------------------------------
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index a019ed9c..b3cefdc5 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -22,7 +22,7 @@ jobs:
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
with:
- targets: thumbv7em-none-eabihf
+ targets: thumbv7em-none-eabihf, wasm32-unknown-unknown
components: rustfmt, clippy
- name: Cache dependencies
@@ -52,7 +52,7 @@ jobs:
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
with:
- targets: thumbv7em-none-eabihf
+ targets: thumbv7em-none-eabihf, wasm32-unknown-unknown
- name: Cache dependencies
uses: actions/cache@v3
@@ -108,7 +108,7 @@ jobs:
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
with:
- targets: thumbv7em-none-eabihf
+ targets: thumbv7em-none-eabihf, wasm32-unknown-unknown
components: rustfmt, clippy
- name: Cache dependencies
diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
index 8794e76f..f2e4fe78 100644
--- a/.github/workflows/docs.yml
+++ b/.github/workflows/docs.yml
@@ -23,6 +23,8 @@ jobs:
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
+ with:
+ targets: wasm32-unknown-unknown
- name: Cache dependencies
uses: actions/cache@v4
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 707a4ef2..ffc84eea 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -23,7 +23,8 @@ jobs:
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
with:
- targets: thumbv7em-none-eabihf
+ targets: thumbv7em-none-eabihf, wasm32-unknown-unknown
+ components: rustfmt, clippy
- name: Cache dependencies
uses: actions/cache@v3
diff --git a/.vscode/mcp.json b/.vscode/mcp.json
index 1909b3b0..9e518063 100644
--- a/.vscode/mcp.json
+++ b/.vscode/mcp.json
@@ -5,7 +5,8 @@
"command": "/aimdb_ws/aimdb/target/release/aimdb-mcp",
"args": [],
"env": {
- "RUST_LOG": "info"
+ "RUST_LOG": "info",
+ "AIMDB_WORKSPACE": "${workspaceFolder}"
}
}
}
diff --git a/Cargo.lock b/Cargo.lock
index 263f3222..1cbab3c3 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -28,6 +28,7 @@ name = "aimdb-cli"
version = "0.5.0"
dependencies = [
"aimdb-client",
+ "aimdb-codegen",
"aimdb-core",
"anyhow",
"chrono",
@@ -57,6 +58,18 @@ dependencies = [
"tokio-test",
]
+[[package]]
+name = "aimdb-codegen"
+version = "0.1.0"
+dependencies = [
+ "prettyplease",
+ "proc-macro2",
+ "quote",
+ "serde",
+ "syn 2.0.108",
+ "toml",
+]
+
[[package]]
name = "aimdb-core"
version = "0.5.0"
@@ -86,7 +99,7 @@ version = "0.5.0"
dependencies = [
"aimdb-core",
"aimdb-executor",
- "rand",
+ "rand 0.8.5",
"serde",
"serde_json",
"ts-rs",
@@ -118,7 +131,7 @@ dependencies = [
"futures",
"futures-core",
"heapless 0.9.1",
- "rand",
+ "rand 0.8.5",
"tracing",
"tracing-test",
]
@@ -164,9 +177,11 @@ name = "aimdb-mcp"
version = "0.5.0"
dependencies = [
"aimdb-client",
+ "aimdb-codegen",
"aimdb-core",
"anyhow",
"chrono",
+ "fs2",
"once_cell",
"serde",
"serde_json",
@@ -257,6 +272,52 @@ dependencies = [
"tracing",
]
+[[package]]
+name = "aimdb-wasm-adapter"
+version = "0.1.1"
+dependencies = [
+ "aimdb-core",
+ "aimdb-data-contracts",
+ "aimdb-executor",
+ "aimdb-ws-protocol",
+ "futures-util",
+ "js-sys",
+ "serde",
+ "serde-wasm-bindgen",
+ "serde_json",
+ "wasm-bindgen",
+ "wasm-bindgen-futures",
+ "wasm-bindgen-test",
+ "web-sys",
+]
+
+[[package]]
+name = "aimdb-websocket-connector"
+version = "0.1.0"
+dependencies = [
+ "aimdb-core",
+ "aimdb-data-contracts",
+ "aimdb-executor",
+ "aimdb-ws-protocol",
+ "axum",
+ "dashmap",
+ "futures-util",
+ "serde",
+ "serde_json",
+ "tokio",
+ "tokio-tungstenite 0.26.2",
+ "tower-http",
+ "tracing",
+]
+
+[[package]]
+name = "aimdb-ws-protocol"
+version = "0.1.0"
+dependencies = [
+ "serde",
+ "serde_json",
+]
+
[[package]]
name = "aligned"
version = "0.4.3"
@@ -362,6 +423,12 @@ dependencies = [
"syn 2.0.108",
]
+[[package]]
+name = "atomic-waker"
+version = "1.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0"
+
[[package]]
name = "autocfg"
version = "1.5.0"
@@ -390,6 +457,61 @@ dependencies = [
"fs_extra",
]
+[[package]]
+name = "axum"
+version = "0.8.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b52af3cb4058c895d37317bb27508dccc8e5f2d39454016b297bf4a400597b8"
+dependencies = [
+ "axum-core",
+ "base64 0.22.1",
+ "bytes",
+ "form_urlencoded",
+ "futures-util",
+ "http 1.4.0",
+ "http-body 1.0.1",
+ "http-body-util",
+ "hyper 1.8.1",
+ "hyper-util",
+ "itoa",
+ "matchit",
+ "memchr",
+ "mime",
+ "percent-encoding",
+ "pin-project-lite",
+ "serde_core",
+ "serde_json",
+ "serde_path_to_error",
+ "serde_urlencoded",
+ "sha1",
+ "sync_wrapper 1.0.2",
+ "tokio",
+ "tokio-tungstenite 0.28.0",
+ "tower",
+ "tower-layer",
+ "tower-service",
+ "tracing",
+]
+
+[[package]]
+name = "axum-core"
+version = "0.5.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "08c78f31d7b1291f7ee735c1c6780ccde7785daae9a9206026862dab7d8792d1"
+dependencies = [
+ "bytes",
+ "futures-core",
+ "http 1.4.0",
+ "http-body 1.0.1",
+ "http-body-util",
+ "mime",
+ "pin-project-lite",
+ "sync_wrapper 1.0.2",
+ "tower-layer",
+ "tower-service",
+ "tracing",
+]
+
[[package]]
name = "bare-metal"
version = "0.2.5"
@@ -411,6 +533,12 @@ version = "0.21.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
+[[package]]
+name = "base64"
+version = "0.22.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
+
[[package]]
name = "bit_field"
version = "0.10.3"
@@ -435,6 +563,15 @@ version = "2.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3"
+[[package]]
+name = "block-buffer"
+version = "0.10.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
+dependencies = [
+ "generic-array",
+]
+
[[package]]
name = "block-device-driver"
version = "0.2.0"
@@ -638,12 +775,37 @@ dependencies = [
"syn 2.0.108",
]
+[[package]]
+name = "cpufeatures"
+version = "0.2.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280"
+dependencies = [
+ "libc",
+]
+
[[package]]
name = "critical-section"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "790eea4361631c5e7d22598ecd5723ff611904e3344ce8720784c93e3d83d40b"
+[[package]]
+name = "crossbeam-utils"
+version = "0.8.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28"
+
+[[package]]
+name = "crypto-common"
+version = "0.1.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a"
+dependencies = [
+ "generic-array",
+ "typenum",
+]
+
[[package]]
name = "darling"
version = "0.20.11"
@@ -679,6 +841,26 @@ dependencies = [
"syn 2.0.108",
]
+[[package]]
+name = "dashmap"
+version = "6.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf"
+dependencies = [
+ "cfg-if",
+ "crossbeam-utils",
+ "hashbrown 0.14.5",
+ "lock_api",
+ "once_cell",
+ "parking_lot_core",
+]
+
+[[package]]
+name = "data-encoding"
+version = "2.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d7a1e2f27636f116493b8b860f5546edb47c8d8f8ea73e1d2a20be88e28d1fea"
+
[[package]]
name = "defmt"
version = "0.3.100"
@@ -730,6 +912,16 @@ dependencies = [
"defmt 1.0.1",
]
+[[package]]
+name = "digest"
+version = "0.10.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
+dependencies = [
+ "block-buffer",
+ "crypto-common",
+]
+
[[package]]
name = "displaydoc"
version = "0.2.5"
@@ -842,7 +1034,7 @@ dependencies = [
"knx-connector-demo-common",
"micromath",
"panic-probe",
- "rand",
+ "rand 0.8.5",
"static_cell",
"stm32-fmc 0.3.2",
]
@@ -875,7 +1067,7 @@ dependencies = [
"micromath",
"mqtt-connector-demo-common",
"panic-probe",
- "rand",
+ "rand 0.8.5",
"static_cell",
"stm32-fmc 0.3.2",
]
@@ -1242,6 +1434,16 @@ dependencies = [
"percent-encoding",
]
+[[package]]
+name = "fs2"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213"
+dependencies = [
+ "libc",
+ "winapi",
+]
+
[[package]]
name = "fs_extra"
version = "1.3.0"
@@ -1351,6 +1553,16 @@ dependencies = [
"windows",
]
+[[package]]
+name = "generic-array"
+version = "0.14.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
+dependencies = [
+ "typenum",
+ "version_check",
+]
+
[[package]]
name = "getrandom"
version = "0.2.16"
@@ -1385,7 +1597,7 @@ dependencies = [
"futures-core",
"futures-sink",
"futures-util",
- "http",
+ "http 0.2.12",
"indexmap",
"slab",
"tokio",
@@ -1473,6 +1685,16 @@ dependencies = [
"itoa",
]
+[[package]]
+name = "http"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a"
+dependencies = [
+ "bytes",
+ "itoa",
+]
+
[[package]]
name = "http-body"
version = "0.4.6"
@@ -1480,7 +1702,30 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2"
dependencies = [
"bytes",
- "http",
+ "http 0.2.12",
+ "pin-project-lite",
+]
+
+[[package]]
+name = "http-body"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184"
+dependencies = [
+ "bytes",
+ "http 1.4.0",
+]
+
+[[package]]
+name = "http-body-util"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a"
+dependencies = [
+ "bytes",
+ "futures-core",
+ "http 1.4.0",
+ "http-body 1.0.1",
"pin-project-lite",
]
@@ -1507,8 +1752,8 @@ dependencies = [
"futures-core",
"futures-util",
"h2",
- "http",
- "http-body",
+ "http 0.2.12",
+ "http-body 0.4.6",
"httparse",
"httpdate",
"itoa",
@@ -1520,6 +1765,27 @@ dependencies = [
"want",
]
+[[package]]
+name = "hyper"
+version = "1.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11"
+dependencies = [
+ "atomic-waker",
+ "bytes",
+ "futures-channel",
+ "futures-core",
+ "http 1.4.0",
+ "http-body 1.0.1",
+ "httparse",
+ "httpdate",
+ "itoa",
+ "pin-project-lite",
+ "pin-utils",
+ "smallvec",
+ "tokio",
+]
+
[[package]]
name = "hyper-tls"
version = "0.5.0"
@@ -1527,12 +1793,27 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905"
dependencies = [
"bytes",
- "hyper",
+ "hyper 0.14.32",
"native-tls",
"tokio",
"tokio-native-tls",
]
+[[package]]
+name = "hyper-util"
+version = "0.1.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "96547c2556ec9d12fb1578c4eaf448b04993e7fb79cbaad930a656880a6bdfa0"
+dependencies = [
+ "bytes",
+ "http 1.4.0",
+ "http-body 1.0.1",
+ "hyper 1.8.1",
+ "pin-project-lite",
+ "tokio",
+ "tower-service",
+]
+
[[package]]
name = "iana-time-zone"
version = "0.1.64"
@@ -1822,6 +2103,12 @@ dependencies = [
"regex-automata",
]
+[[package]]
+name = "matchit"
+version = "0.8.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3"
+
[[package]]
name = "memchr"
version = "2.7.6"
@@ -1850,6 +2137,16 @@ version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
+[[package]]
+name = "minicov"
+version = "0.3.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4869b6a491569605d66d3952bcdf03df789e5b536e5f0cf7758a7f08a55ae24d"
+dependencies = [
+ "cc",
+ "walkdir",
+]
+
[[package]]
name = "mio"
version = "1.1.0"
@@ -2092,6 +2389,16 @@ dependencies = [
"zerocopy",
]
+[[package]]
+name = "prettyplease"
+version = "0.2.37"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b"
+dependencies = [
+ "proc-macro2",
+ "syn 2.0.108",
+]
+
[[package]]
name = "proc-macro-error-attr2"
version = "2.0.0"
@@ -2145,10 +2452,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
dependencies = [
"libc",
- "rand_chacha",
+ "rand_chacha 0.3.1",
"rand_core 0.6.4",
]
+[[package]]
+name = "rand"
+version = "0.9.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1"
+dependencies = [
+ "rand_chacha 0.9.0",
+ "rand_core 0.9.3",
+]
+
[[package]]
name = "rand_chacha"
version = "0.3.1"
@@ -2159,6 +2476,16 @@ dependencies = [
"rand_core 0.6.4",
]
+[[package]]
+name = "rand_chacha"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb"
+dependencies = [
+ "ppv-lite86",
+ "rand_core 0.9.3",
+]
+
[[package]]
name = "rand_core"
version = "0.6.4"
@@ -2173,6 +2500,9 @@ name = "rand_core"
version = "0.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38"
+dependencies = [
+ "getrandom 0.3.4",
+]
[[package]]
name = "redox_syscall"
@@ -2237,9 +2567,9 @@ dependencies = [
"futures-core",
"futures-util",
"h2",
- "http",
- "http-body",
- "hyper",
+ "http 0.2.12",
+ "http-body 0.4.6",
+ "hyper 0.14.32",
"hyper-tls",
"ipnet",
"js-sys",
@@ -2253,7 +2583,7 @@ dependencies = [
"serde",
"serde_json",
"serde_urlencoded",
- "sync_wrapper",
+ "sync_wrapper 0.1.2",
"system-configuration",
"tokio",
"tokio-native-tls",
@@ -2437,6 +2767,15 @@ version = "1.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
+[[package]]
+name = "same-file"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
+dependencies = [
+ "winapi-util",
+]
+
[[package]]
name = "schannel"
version = "0.1.28"
@@ -2525,6 +2864,17 @@ dependencies = [
"serde_derive",
]
+[[package]]
+name = "serde-wasm-bindgen"
+version = "0.6.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8302e169f0eddcc139c70f139d19d6467353af16f9fce27e8c30158036a1e16b"
+dependencies = [
+ "js-sys",
+ "serde",
+ "wasm-bindgen",
+]
+
[[package]]
name = "serde_core"
version = "1.0.228"
@@ -2558,6 +2908,26 @@ dependencies = [
"serde_core",
]
+[[package]]
+name = "serde_path_to_error"
+version = "0.1.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "10a9ff822e371bb5403e391ecd83e182e0e77ba7f6fe0160b795797109d1b457"
+dependencies = [
+ "itoa",
+ "serde",
+ "serde_core",
+]
+
+[[package]]
+name = "serde_spanned"
+version = "0.6.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3"
+dependencies = [
+ "serde",
+]
+
[[package]]
name = "serde_urlencoded"
version = "0.7.1"
@@ -2583,6 +2953,17 @@ dependencies = [
"unsafe-libyaml",
]
+[[package]]
+name = "sha1"
+version = "0.10.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba"
+dependencies = [
+ "cfg-if",
+ "cpufeatures",
+ "digest",
+]
+
[[package]]
name = "sharded-slab"
version = "0.1.7"
@@ -2703,7 +3084,7 @@ dependencies = [
[[package]]
name = "stm32-metapac"
version = "19.0.0"
-source = "git+https://github.com/embassy-rs/stm32-data-generated?tag=stm32-data-69c2fa7d0b8afd40fc2172d8af2311008428193d#9a192510e3e113e949ee12001ff60f0ff7a149ea"
+source = "git+https://github.com/embassy-rs/stm32-data-generated?tag=stm32-data-0f4c948b5c81ebe421fe902857ccdb39029651f6#da4c008381dbdbee62135dee88aa2e7d7c4e4992"
dependencies = [
"cortex-m",
"cortex-m-rt",
@@ -2774,6 +3155,12 @@ version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160"
+[[package]]
+name = "sync_wrapper"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263"
+
[[package]]
name = "synstructure"
version = "0.13.2"
@@ -3024,6 +3411,30 @@ dependencies = [
"tokio-stream",
]
+[[package]]
+name = "tokio-tungstenite"
+version = "0.26.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7a9daff607c6d2bf6c16fd681ccb7eecc83e4e2cdc1ca067ffaadfca5de7f084"
+dependencies = [
+ "futures-util",
+ "log",
+ "tokio",
+ "tungstenite 0.26.2",
+]
+
+[[package]]
+name = "tokio-tungstenite"
+version = "0.28.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d25a406cddcc431a75d3d9afc6a7c0f7428d4891dd973e4d54c56b46127bf857"
+dependencies = [
+ "futures-util",
+ "log",
+ "tokio",
+ "tungstenite 0.28.0",
+]
+
[[package]]
name = "tokio-util"
version = "0.7.17"
@@ -3037,6 +3448,83 @@ dependencies = [
"tokio",
]
+[[package]]
+name = "toml"
+version = "0.8.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362"
+dependencies = [
+ "serde",
+ "serde_spanned",
+ "toml_datetime",
+ "toml_edit",
+]
+
+[[package]]
+name = "toml_datetime"
+version = "0.6.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "toml_edit"
+version = "0.22.27"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a"
+dependencies = [
+ "indexmap",
+ "serde",
+ "serde_spanned",
+ "toml_datetime",
+ "toml_write",
+ "winnow",
+]
+
+[[package]]
+name = "toml_write"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801"
+
+[[package]]
+name = "tower"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ebe5ef63511595f1344e2d5cfa636d973292adc0eec1f0ad45fae9f0851ab1d4"
+dependencies = [
+ "futures-core",
+ "futures-util",
+ "pin-project-lite",
+ "sync_wrapper 1.0.2",
+ "tokio",
+ "tower-layer",
+ "tower-service",
+ "tracing",
+]
+
+[[package]]
+name = "tower-http"
+version = "0.6.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8"
+dependencies = [
+ "bitflags 2.10.0",
+ "bytes",
+ "http 1.4.0",
+ "pin-project-lite",
+ "tower-layer",
+ "tower-service",
+]
+
+[[package]]
+name = "tower-layer"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e"
+
[[package]]
name = "tower-service"
version = "0.3.3"
@@ -3049,6 +3537,7 @@ version = "0.1.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0"
dependencies = [
+ "log",
"pin-project-lite",
"tracing-attributes",
"tracing-core",
@@ -3165,6 +3654,46 @@ dependencies = [
"termcolor",
]
+[[package]]
+name = "tungstenite"
+version = "0.26.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4793cb5e56680ecbb1d843515b23b6de9a75eb04b66643e256a396d43be33c13"
+dependencies = [
+ "bytes",
+ "data-encoding",
+ "http 1.4.0",
+ "httparse",
+ "log",
+ "rand 0.9.2",
+ "sha1",
+ "thiserror 2.0.17",
+ "utf-8",
+]
+
+[[package]]
+name = "tungstenite"
+version = "0.28.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8628dcc84e5a09eb3d8423d6cb682965dea9133204e8fb3efee74c2a0c259442"
+dependencies = [
+ "bytes",
+ "data-encoding",
+ "http 1.4.0",
+ "httparse",
+ "log",
+ "rand 0.9.2",
+ "sha1",
+ "thiserror 2.0.17",
+ "utf-8",
+]
+
+[[package]]
+name = "typenum"
+version = "1.19.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb"
+
[[package]]
name = "unicode-ident"
version = "1.0.20"
@@ -3207,6 +3736,12 @@ dependencies = [
"serde",
]
+[[package]]
+name = "utf-8"
+version = "0.7.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9"
+
[[package]]
name = "utf8_iter"
version = "1.0.4"
@@ -3269,6 +3804,16 @@ dependencies = [
"vcell",
]
+[[package]]
+name = "walkdir"
+version = "2.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b"
+dependencies = [
+ "same-file",
+ "winapi-util",
+]
+
[[package]]
name = "want"
version = "0.3.1"
@@ -3351,6 +3896,30 @@ dependencies = [
"unicode-ident",
]
+[[package]]
+name = "wasm-bindgen-test"
+version = "0.3.55"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bfc379bfb624eb59050b509c13e77b4eb53150c350db69628141abce842f2373"
+dependencies = [
+ "js-sys",
+ "minicov",
+ "wasm-bindgen",
+ "wasm-bindgen-futures",
+ "wasm-bindgen-test-macro",
+]
+
+[[package]]
+name = "wasm-bindgen-test-macro"
+version = "0.3.55"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "085b2df989e1e6f9620c1311df6c996e83fe16f57792b272ce1e024ac16a90f1"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.108",
+]
+
[[package]]
name = "weather-hub"
version = "0.1.0"
@@ -3401,7 +3970,7 @@ dependencies = [
"aimdb-mqtt-connector",
"aimdb-tokio-adapter",
"chrono",
- "rand",
+ "rand 0.8.5",
"tokio",
"tracing",
"tracing-subscriber",
@@ -3436,7 +4005,7 @@ dependencies = [
"heapless 0.8.0",
"micromath",
"panic-probe",
- "rand",
+ "rand 0.8.5",
"static_cell",
"stm32-fmc 0.3.2",
"weather-mesh-common",
@@ -3452,6 +4021,22 @@ dependencies = [
"wasm-bindgen",
]
+[[package]]
+name = "winapi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+dependencies = [
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
+]
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+
[[package]]
name = "winapi-util"
version = "0.1.11"
@@ -3461,6 +4046,12 @@ dependencies = [
"windows-sys 0.61.2",
]
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+
[[package]]
name = "windows"
version = "0.61.3"
@@ -3800,6 +4391,15 @@ version = "0.53.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650"
+[[package]]
+name = "winnow"
+version = "0.7.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829"
+dependencies = [
+ "memchr",
+]
+
[[package]]
name = "winreg"
version = "0.50.0"
diff --git a/Cargo.toml b/Cargo.toml
index bc74ba87..9a85b386 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -5,6 +5,7 @@ members = [
"aimdb-data-contracts",
"aimdb-core",
"aimdb-client",
+ "aimdb-codegen",
"aimdb-embassy-adapter",
"aimdb-tokio-adapter",
"aimdb-sync",
@@ -12,6 +13,9 @@ members = [
"aimdb-persistence-sqlite",
"aimdb-mqtt-connector",
"aimdb-knx-connector",
+ "aimdb-websocket-connector",
+ "aimdb-ws-protocol",
+ "aimdb-wasm-adapter",
"tools/aimdb-cli",
"tools/aimdb-mcp",
"examples/knx-connector-demo-common",
diff --git a/Makefile b/Makefile
index 0148b1f7..32efaf6d 100644
--- a/Makefile
+++ b/Makefile
@@ -1,7 +1,7 @@
# AimDB Makefile
# Simple automation for common development tasks
-.PHONY: help build test clean fmt fmt-check clippy doc all check test-embedded examples deny audit security publish publish-check
+.PHONY: help build test clean fmt fmt-check clippy doc all check test-embedded test-wasm wasm wasm-test examples deny audit security publish publish-check
.DEFAULT_GOAL := help
# Colors for output
@@ -28,6 +28,7 @@ help:
@printf " $(YELLOW)Testing Commands:$(NC)\n"
@printf " check Comprehensive development check (fmt + clippy + all tests)\n"
@printf " test-embedded Test embedded/MCU cross-compilation compatibility\n"
+ @printf " test-wasm Test WASM cross-compilation compatibility\n"
@printf "\n"
@printf " $(YELLOW)Security & Quality:$(NC)\n"
@printf " deny Check dependencies (licenses, advisories, bans)\n"
@@ -38,6 +39,10 @@ help:
@printf " publish-check Test crates.io publish (dry-run, no git commit required)\n"
@printf " publish Publish all crates to crates.io (requires clean git state)\n"
@printf "\n"
+ @printf " $(YELLOW)WASM Commands:$(NC)\n"
+ @printf " wasm Build WASM adapter with wasm-pack\n"
+ @printf " wasm-test Run WASM tests in headless browser\n"
+ @printf "\n"
@printf " $(YELLOW)Convenience:$(NC)\n"
@printf " all Build everything\n"
@@ -56,6 +61,8 @@ build:
cargo build --package aimdb-tokio-adapter --features "tokio-runtime,tracing,metrics"
@printf "$(YELLOW) → Building sync wrapper$(NC)\n"
cargo build --package aimdb-sync
+ @printf "$(YELLOW) → Building codegen library$(NC)\n"
+ cargo build --package aimdb-codegen
@printf "$(YELLOW) → Building CLI tools$(NC)\n"
cargo build --package aimdb-cli
@printf "$(YELLOW) → Building MCP server$(NC)\n"
@@ -66,6 +73,12 @@ build:
cargo build --package aimdb-persistence-sqlite
@printf "$(YELLOW) → Building KNX connector$(NC)\n"
cargo build --package aimdb-knx-connector --features "std,tokio-runtime"
+ @printf "$(YELLOW) → Building WS protocol$(NC)\n"
+ cargo build --package aimdb-ws-protocol
+ @printf "$(YELLOW) → Building WebSocket connector$(NC)\n"
+ cargo build --package aimdb-websocket-connector --features "tokio-runtime"
+ @printf "$(YELLOW) → Building WASM adapter$(NC)\n"
+ cargo build --package aimdb-wasm-adapter --target wasm32-unknown-unknown --features "wasm-runtime"
test:
@printf "$(GREEN)Running all tests (valid combinations)...$(NC)\n"
@@ -85,6 +98,8 @@ test:
cargo test --package aimdb-tokio-adapter --features "tokio-runtime,tracing,metrics"
@printf "$(YELLOW) → Testing sync wrapper$(NC)\n"
cargo test --package aimdb-sync
+ @printf "$(YELLOW) → Testing codegen library$(NC)\n"
+ cargo test --package aimdb-codegen
@printf "$(YELLOW) → Testing CLI tools$(NC)\n"
cargo test --package aimdb-cli
@printf "$(YELLOW) → Testing MCP server$(NC)\n"
@@ -97,10 +112,14 @@ test:
cargo test --package aimdb-mqtt-connector --features "std,tokio-runtime"
@printf "$(YELLOW) → Testing KNX connector$(NC)\n"
cargo test --package aimdb-knx-connector --features "std,tokio-runtime"
+ @printf "$(YELLOW) → Testing WS protocol$(NC)\n"
+ cargo test --package aimdb-ws-protocol
+ @printf "$(YELLOW) → Testing WebSocket connector$(NC)\n"
+ cargo test --package aimdb-websocket-connector --features "tokio-runtime"
fmt:
@printf "$(GREEN)Formatting code (workspace members only)...$(NC)\n"
- @for pkg in aimdb-executor aimdb-derive aimdb-data-contracts aimdb-core aimdb-client aimdb-embassy-adapter aimdb-tokio-adapter aimdb-sync aimdb-persistence aimdb-persistence-sqlite aimdb-mqtt-connector aimdb-knx-connector aimdb-cli aimdb-mcp sync-api-demo tokio-mqtt-connector-demo embassy-mqtt-connector-demo tokio-knx-connector-demo embassy-knx-connector-demo weather-mesh-common weather-hub weather-station-alpha weather-station-beta; do \
+ @for pkg in aimdb-executor aimdb-derive aimdb-data-contracts aimdb-core aimdb-client aimdb-embassy-adapter aimdb-tokio-adapter aimdb-wasm-adapter aimdb-sync aimdb-persistence aimdb-persistence-sqlite aimdb-mqtt-connector aimdb-knx-connector aimdb-ws-protocol aimdb-websocket-connector aimdb-codegen aimdb-cli aimdb-mcp sync-api-demo tokio-mqtt-connector-demo embassy-mqtt-connector-demo tokio-knx-connector-demo embassy-knx-connector-demo weather-mesh-common weather-hub weather-station-alpha weather-station-beta; do \
printf "$(YELLOW) → Formatting $$pkg$(NC)\n"; \
cargo fmt -p $$pkg 2>/dev/null || true; \
done
@@ -109,7 +128,7 @@ fmt:
fmt-check:
@printf "$(GREEN)Checking code formatting (workspace members only)...$(NC)\n"
@FAILED=0; \
- for pkg in aimdb-executor aimdb-derive aimdb-data-contracts aimdb-core aimdb-client aimdb-embassy-adapter aimdb-tokio-adapter aimdb-sync aimdb-persistence aimdb-persistence-sqlite aimdb-mqtt-connector aimdb-knx-connector aimdb-cli aimdb-mcp sync-api-demo tokio-mqtt-connector-demo embassy-mqtt-connector-demo tokio-knx-connector-demo embassy-knx-connector-demo weather-mesh-common weather-hub weather-station-alpha weather-station-beta; do \
+ for pkg in aimdb-executor aimdb-derive aimdb-data-contracts aimdb-core aimdb-client aimdb-embassy-adapter aimdb-tokio-adapter aimdb-wasm-adapter aimdb-sync aimdb-persistence aimdb-persistence-sqlite aimdb-mqtt-connector aimdb-knx-connector aimdb-ws-protocol aimdb-websocket-connector aimdb-codegen aimdb-cli aimdb-mcp sync-api-demo tokio-mqtt-connector-demo embassy-mqtt-connector-demo tokio-knx-connector-demo embassy-knx-connector-demo weather-mesh-common weather-hub weather-station-alpha weather-station-beta; do \
printf "$(YELLOW) → Checking $$pkg$(NC)\n"; \
if ! cargo fmt -p $$pkg -- --check 2>&1; then \
printf "$(RED)❌ Formatting check failed for $$pkg$(NC)\n"; \
@@ -144,6 +163,8 @@ clippy:
cargo clippy --package aimdb-sync --all-targets -- -D warnings
@printf "$(YELLOW) → Clippy on client library$(NC)\n"
cargo clippy --package aimdb-client --all-targets -- -D warnings
+ @printf "$(YELLOW) → Clippy on codegen library$(NC)\n"
+ cargo clippy --package aimdb-codegen --all-targets -- -D warnings
@printf "$(YELLOW) → Clippy on CLI tools$(NC)\n"
cargo clippy --package aimdb-cli --all-targets -- -D warnings
@printf "$(YELLOW) → Clippy on MCP server$(NC)\n"
@@ -160,6 +181,12 @@ clippy:
cargo clippy --package aimdb-mqtt-connector --target thumbv7em-none-eabihf --no-default-features --features "embassy-runtime,defmt" -- -D warnings
@printf "$(YELLOW) → Clippy on KNX connector (embassy + defmt)$(NC)\n"
cargo clippy --package aimdb-knx-connector --target thumbv7em-none-eabihf --no-default-features --features "embassy-runtime,defmt" -- -D warnings
+ @printf "$(YELLOW) → Clippy on WS protocol$(NC)\n"
+ cargo clippy --package aimdb-ws-protocol --all-targets -- -D warnings
+ @printf "$(YELLOW) → Clippy on WebSocket connector$(NC)\n"
+ cargo clippy --package aimdb-websocket-connector --features "tokio-runtime" --all-targets -- -D warnings
+ @printf "$(YELLOW) → Clippy on WASM adapter$(NC)\n"
+ cargo clippy --package aimdb-wasm-adapter --target wasm32-unknown-unknown --features "wasm-runtime" -- -D warnings
doc:
@printf "$(GREEN)Generating dual-platform documentation...$(NC)\n"
@@ -173,10 +200,13 @@ doc:
cargo doc --package aimdb-sync --no-deps
cargo doc --package aimdb-mqtt-connector --features "std,tokio-runtime" --no-deps
cargo doc --package aimdb-knx-connector --features "std,tokio-runtime" --no-deps
+ cargo doc --package aimdb-codegen --no-deps
cargo doc --package aimdb-cli --no-deps
cargo doc --package aimdb-mcp --no-deps
cargo doc --package aimdb-persistence --no-deps
cargo doc --package aimdb-persistence-sqlite --no-deps
+ cargo doc --package aimdb-ws-protocol --no-deps
+ cargo doc --package aimdb-websocket-connector --features "tokio-runtime" --no-deps
@cp -r target/doc/* target/doc-final/cloud/
@printf "$(YELLOW) → Building embedded documentation$(NC)\n"
cargo doc --package aimdb-core --no-default-features --features alloc --no-deps
@@ -184,6 +214,8 @@ doc:
cargo doc --package aimdb-mqtt-connector --no-default-features --features "embassy-runtime" --no-deps
cargo doc --package aimdb-knx-connector --no-default-features --features "embassy-runtime" --no-deps
@cp -r target/doc/* target/doc-final/embedded/
+ @printf "$(YELLOW) → Building WASM/browser documentation$(NC)\n"
+ cargo doc --package aimdb-wasm-adapter --target wasm32-unknown-unknown --features "wasm-runtime" --no-deps
@printf "$(YELLOW) → Creating main index page$(NC)\n"
@cp docs/index.html target/doc-final/index.html
@printf "$(BLUE)Documentation generated at: file://$(PWD)/target/doc-final/index.html$(NC)\n"
@@ -193,6 +225,12 @@ clean:
cargo clean
## Testing commands
+test-wasm:
+ @printf "$(BLUE)Testing WASM cross-compilation compatibility...$(NC)\n"
+ @printf "$(YELLOW) → Checking aimdb-wasm-adapter on wasm32-unknown-unknown target$(NC)\n"
+ cargo check --package aimdb-wasm-adapter --target wasm32-unknown-unknown --features "wasm-runtime"
+ @printf "$(GREEN)✓ WASM target compatibility verified!$(NC)\n"
+
test-embedded:
@printf "$(BLUE)Testing embedded/MCU cross-compilation compatibility...$(NC)\n"
@printf "$(YELLOW) → Checking aimdb-data-contracts (no_std + alloc) on thumbv7em-none-eabihf target$(NC)\n"
@@ -294,67 +332,99 @@ publish:
else \
printf "$(BLUE)Running in CI mode - skipping confirmation$(NC)\n"; \
fi
- @printf "$(YELLOW) → Publishing aimdb-executor (1/13)$(NC)\n"
+ @printf "$(YELLOW) → Publishing aimdb-executor (1/16)$(NC)\n"
@cargo publish -p aimdb-executor
@printf "$(YELLOW) → Waiting 10s for crates.io propagation...$(NC)\n"
@sleep 10
- @printf "$(YELLOW) → Publishing aimdb-derive (2/13)$(NC)\n"
+ @printf "$(YELLOW) → Publishing aimdb-derive (2/16)$(NC)\n"
@cargo publish -p aimdb-derive
@printf "$(YELLOW) → Waiting 10s for crates.io propagation...$(NC)\n"
@sleep 10
- @printf "$(YELLOW) → Publishing aimdb-core (3/13)$(NC)\n"
+ @printf "$(YELLOW) → Publishing aimdb-codegen (3/16)$(NC)\n"
+ @cargo publish -p aimdb-codegen
+ @printf "$(YELLOW) → Waiting 10s for crates.io propagation...$(NC)\n"
+ @sleep 10
+ @printf "$(YELLOW) → Publishing aimdb-core (4/16)$(NC)\n"
@cargo publish -p aimdb-core
@printf "$(YELLOW) → Waiting 10s for crates.io propagation...$(NC)\n"
@sleep 10
- @printf "$(YELLOW) → Publishing aimdb-tokio-adapter (4/13)$(NC)\n"
+ @printf "$(YELLOW) → Publishing aimdb-tokio-adapter (5/16)$(NC)\n"
@cargo publish -p aimdb-tokio-adapter
@printf "$(YELLOW) → Waiting 10s for crates.io propagation...$(NC)\n"
@sleep 10
- @printf "$(YELLOW) → Publishing aimdb-embassy-adapter (5/13)$(NC)\n"
+ @printf "$(YELLOW) → Publishing aimdb-embassy-adapter (6/16)$(NC)\n"
@cargo publish -p aimdb-embassy-adapter --no-verify
@printf "$(YELLOW) → Waiting 10s for crates.io propagation...$(NC)\n"
@sleep 10
- @printf "$(YELLOW) → Publishing aimdb-client (6/13)$(NC)\n"
+ @printf "$(YELLOW) → Publishing aimdb-client (7/16)$(NC)\n"
@cargo publish -p aimdb-client
@printf "$(YELLOW) → Waiting 10s for crates.io propagation...$(NC)\n"
@sleep 10
- @printf "$(YELLOW) → Publishing aimdb-sync (7/13)$(NC)\n"
+ @printf "$(YELLOW) → Publishing aimdb-sync (8/16)$(NC)\n"
@cargo publish -p aimdb-sync
@printf "$(YELLOW) → Waiting 10s for crates.io propagation...$(NC)\n"
@sleep 10
- @printf "$(YELLOW) → Publishing aimdb-persistence (8/13)$(NC)\n"
+ @printf "$(YELLOW) → Publishing aimdb-persistence (9/16)$(NC)\n"
@cargo publish -p aimdb-persistence
@printf "$(YELLOW) → Waiting 10s for crates.io propagation...$(NC)\n"
@sleep 10
- @printf "$(YELLOW) → Publishing aimdb-persistence-sqlite (9/13)$(NC)\n"
+ @printf "$(YELLOW) → Publishing aimdb-persistence-sqlite (10/16)$(NC)\n"
@cargo publish -p aimdb-persistence-sqlite
@printf "$(YELLOW) → Waiting 10s for crates.io propagation...$(NC)\n"
@sleep 10
- @printf "$(YELLOW) → Publishing aimdb-mqtt-connector (10/13)$(NC)\n"
+ @printf "$(YELLOW) → Publishing aimdb-mqtt-connector (11/16)$(NC)\n"
@cargo publish -p aimdb-mqtt-connector
@printf "$(YELLOW) → Waiting 10s for crates.io propagation...$(NC)\n"
@sleep 10
- @printf "$(YELLOW) → Publishing aimdb-knx-connector (11/13)$(NC)\n"
+ @printf "$(YELLOW) → Publishing aimdb-knx-connector (12/16)$(NC)\n"
@cargo publish -p aimdb-knx-connector
@printf "$(YELLOW) → Waiting 10s for crates.io propagation...$(NC)\n"
@sleep 10
- @printf "$(YELLOW) → Publishing aimdb-cli (12/13)$(NC)\n"
+ @printf "$(YELLOW) → Publishing aimdb-ws-protocol (13/16)$(NC)\n"
+ @cargo publish -p aimdb-ws-protocol
+ @printf "$(YELLOW) → Waiting 10s for crates.io propagation...$(NC)\n"
+ @sleep 10
+ @printf "$(YELLOW) → Publishing aimdb-websocket-connector (14/16)$(NC)\n"
+ @cargo publish -p aimdb-websocket-connector
+ @printf "$(YELLOW) → Waiting 10s for crates.io propagation...$(NC)\n"
+ @sleep 10
+ @printf "$(YELLOW) → Publishing aimdb-cli (15/16)$(NC)\n"
@cargo publish -p aimdb-cli
@printf "$(YELLOW) → Waiting 10s for crates.io propagation...$(NC)\n"
@sleep 10
- @printf "$(YELLOW) → Publishing aimdb-mcp (13/13)$(NC)\n"
+ @printf "$(YELLOW) → Publishing aimdb-mcp (16/16)$(NC)\n"
@cargo publish -p aimdb-mcp
@printf "$(GREEN)✓ All crates published successfully!$(NC)\n"
@printf "$(BLUE)🎉 AimDB v$(shell grep '^version' Cargo.toml | head -1 | cut -d '"' -f 2) is now live on crates.io!$(NC)\n"
## Convenience commands
-check: fmt-check clippy test test-embedded deny
+check: fmt-check clippy test test-embedded test-wasm deny
@printf "$(GREEN)Comprehensive development checks completed!$(NC)\n"
@printf "$(BLUE)✓ Code formatting verified$(NC)\n"
@printf "$(BLUE)✓ Linter passed$(NC)\n"
@printf "$(BLUE)✓ All valid feature combinations tested$(NC)\n"
@printf "$(BLUE)✓ Embedded target compatibility verified$(NC)\n"
+ @printf "$(BLUE)✓ WASM target compatibility verified$(NC)\n"
@printf "$(BLUE)✓ Dependencies verified (deny)$(NC)\n"
+## WASM commands
+wasm:
+ @printf "$(GREEN)Building WASM adapter with wasm-pack...$(NC)\n"
+ @if ! command -v wasm-pack >/dev/null 2>&1; then \
+ printf "$(YELLOW) ⚠ wasm-pack not found, installing...$(NC)\n"; \
+ cargo install wasm-pack --locked; \
+ fi
+ cd aimdb-wasm-adapter && wasm-pack build --target web --out-dir pkg
+ @printf "$(GREEN)✓ WASM build complete! Output in aimdb-wasm-adapter/pkg/$(NC)\n"
+
+wasm-test:
+ @printf "$(GREEN)Running WASM tests in headless browser...$(NC)\n"
+ @if ! command -v wasm-pack >/dev/null 2>&1; then \
+ printf "$(YELLOW) ⚠ wasm-pack not found, installing...$(NC)\n"; \
+ cargo install wasm-pack --locked; \
+ fi
+ cd aimdb-wasm-adapter && wasm-pack test --headless --chrome
+ @printf "$(GREEN)✓ WASM tests passed!$(NC)\n"
+
all: build test examples
@printf "$(GREEN)Build and test completed!$(NC)\n"
diff --git a/README.md b/README.md
index d90a7d6a..30886b70 100644
--- a/README.md
+++ b/README.md
@@ -1,13 +1,8 @@
-
-
-
-
-
+
- Dataflow engine for distributed systems.
- MCU to Cloud. Same API.
+ Distributed by design. Data-driven by default.
@@ -27,17 +22,155 @@
-Write your data pipeline once. Run it on microcontrollers, edge gateways or Kubernetes — no code changes. AimDB's portable data contracts handle serialization, transforms and schema evolution across all runtimes.
+AimDB turns data contracts into the architecture. Define your schemas once — with built-in versioning, observability and serialization — and deploy them unchanged across microcontrollers, edge gateways, Kubernetes and the browser.
-
-
-
+---
+
+### Vision
+
+A future where every system — from a $2 sensor to a global fleet — shares one data language. Contracts define how data moves, evolves and is observed. Infrastructure adapts to the data, not the other way around.
+
+---
+
+### Design Philosophy
+
+In a data-driven architecture, every design decision starts with the data, not the service that produces it.
+
+**Records declare their own semantics.** When you register a record in AimDB, you choose a buffer type that defines how the data moves:
+
+| Buffer | Semantics | Use Cases |
+|--------|-----------|-----------|
+| **SPMC Ring** | Bounded stream with independent consumers | Sensor telemetry, event logs, interaction streams |
+| **SingleLatest** | Only the current value matters | Feature flags, configuration, UI state |
+| **Mailbox** | Latest instruction wins | Device commands, actuation, RPC |
+
+These are the three universal primitives of data movement — portable, typed and runtime-agnostic.
+
+**Observability becomes automatic.** A record that exists is observable by definition. Every producer and consumer relationship is declared in the builder, not discovered through instrumentation.
+
+**Synchronization becomes declarative.** You don't build a sync layer between your MCU, edge gateway and cloud backend. You declare a record with connector metadata on its key and the same typed data flows across all environments without translation.
+
+**Cross-cutting concerns derive from the schema.** Instead of adding observability libraries, feature flag SDKs and experiment frameworks as separate integrations, they become intrinsic properties of records — declared once, applied everywhere.
+
+---
+
+### How It Works
+
+Define your contracts, choose buffer semantics and wire up connectors — all in one builder block:
+
+```rust
+// A sensor node: produce temperature readings, publish over MQTT
+builder.configure::("sensor::temp", |reg| {
+ reg.buffer(BufferCfg::SpmcRing { capacity: 256 })
+ .source(|ctx, producer| async move {
+ loop {
+ let reading = read_sensor().await;
+ producer.send(Temperature::set(reading, now())).await.ok();
+ ctx.sleep(Duration::from_secs(1)).await;
+ }
+ })
+ .link_to("mqtt://sensors/temperature")
+ .with_serializer(Temperature::to_bytes)
+ .finish();
+});
+
+// An edge gateway: receive from MQTT, observe and forward
+builder.configure::("gateway::temp", |reg| {
+ reg.buffer(BufferCfg::SingleLatest)
+ .link_from("mqtt://sensors/temperature")
+ .with_deserializer(Temperature::from_bytes)
+ .tap(log_tap::("edge")) // [edge] 22.5 °C
+ .finish();
+});
+```
+
+Transport topics can be passed as strings to `link_to` / `link_from`, or declared on key enums with `#[link_address = "mqtt://..."]` and resolved at runtime. No separate instrumentation. No SDK integration. No sync code.
+
+---
+
+### Data Contracts
+
+Data contracts are the heart of AimDB. A contract is a plain Rust struct that carries its own identity, version and capabilities — the single source of truth from sensor firmware to browser UI.
+
+```rust
+use aimdb_data_contracts::{SchemaType, Settable};
+use serde::{Deserialize, Serialize};
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct Temperature {
+ pub celsius: f32,
+ pub timestamp: u64,
+}
+
+impl SchemaType for Temperature {
+ const NAME: &'static str = "temperature";
+ const VERSION: u32 = 1;
+}
+
+impl Settable for Temperature {
+ type Value = f32;
+ fn set(value: Self::Value, timestamp: u64) -> Self {
+ Self { celsius: value, timestamp }
+ }
+}
+```
+
+This struct compiles for `no_std` embedded targets and standard Rust alike. `SchemaType` gives the record its identity and version. `Settable` provides a canonical constructor so producers can create records from a raw value — this is the interface used by `producer.send(Temperature::set(reading, now()))` in the builder.
+
+#### Contract Attributes
+
+Contracts gain capabilities through trait implementations. Each trait is a compile-time declaration of what a contract *can do*, not a runtime configuration:
+
+| Attribute | Trait | What It Enables |
+|-----------|-------|-----------------|
+| **Settable** | `Settable` | Canonical constructor from a raw value — the interface behind `producer.send(T::set(value, ts))`. |
+| **Streamable** | `Streamable` | Cross-boundary transport — WASM, WebSocket, CLI. One registry, zero parallel type systems. |
+| **Migratable** | `MigrationStep` | Bidirectional schema evolution with typed up/down transforms and chained version steps. |
+| **Observable** | `Observable` | Signal extraction for thresholds, logging and monitoring. Icon, unit and `format_log()` built in. |
+| **Linkable** | `Linkable` | Wire-format serialization for connectors — MQTT, KNX and any future transport. |
+| **Simulatable** | `Simulatable` | Realistic test data generation with random walks, trends and configurable parameters. |
+
+For example, `Observable` turns a contract into a loggable, monitorable signal:
+
+```rust
+impl Observable for Temperature {
+ type Signal = f32;
+ const ICON: &'static str = "thermometer";
+ const UNIT: &'static str = "°C";
+
+ fn signal(&self) -> f32 { self.celsius }
+
+ fn format_log(&self, node_id: &str) -> String {
+ format!("[{}] {:.1} °C", node_id, self.celsius)
+ }
+}
+```
+
+Each trait you implement unlocks a capability — contracts without `Observable` simply can't be tapped; contracts without `Linkable` can't be wired to connectors. The type system enforces what your data can do.
+
+#### Platform-Agnostic by Design
+
+The same contract works across all runtimes without modification:
+
+```
+┌───────────────────────────────────────────────────────────────────────────────┐
+│ Temperature Contract │
+├───────────────────┬───────────────────┬───────────────────┬───────────────────┤
+│ MCU (Embassy) │ Edge (Tokio) │ Cloud (Tokio) │ Browser (WASM) │
+│ no_std + alloc │ std │ Kubernetes │ wasm32 │
+│ Cortex-M4 │ Linux / RPi │ Full featured │ Single-threaded │
+└───────────────────┴───────────────────┴───────────────────┴───────────────────┘
+```
+
+The Rust type system enforces correctness at compile time. The dataflow engine's buffer semantics enforce flow guarantees at runtime. Connectors wire everything to your infrastructure without an integration layer.
---
### Getting Started
-**See it live** — explore a running sensor mesh at [aimdb.dev](https://aimdb.dev)
+#### 1. See it live
+
+Explore a running sensor mesh — no setup required:
@@ -45,37 +178,56 @@ Write your data pipeline once. Run it on microcontrollers, edge gateways or Kube
-**Run locally** — full MCU → edge → cloud mesh in Docker:
+> **[aimdb.dev](https://aimdb.dev)** — live weather stations streaming typed contracts across MCU, edge and cloud.
+
+#### 2. Run locally
+
+Spin up a full MCU → edge → cloud mesh with one command:
```bash
cd examples/weather-mesh-demo
docker compose up
```
-Then ask VS Code Copilot: *"What's the current temperature from station ...?"* ([MCP setup required](examples/weather-mesh-demo/))
+This starts three weather stations, an MQTT broker and a central hub — all wired together with typed data contracts.
+
+#### 3. Explore with AI
+
+With the mesh running, connect an MCP-compatible editor to query your data in natural language:
-**Learn more:**
-- [Quick Start Guide](https://aimdb.dev/docs/getting-started) — Dependency setup and API basics
-- [Data Contracts](https://aimdb.dev/docs/data-contracts) — Type-safe schemas
-- [Connectors](https://aimdb.dev/docs/connectors) — MQTT, KNX and more
-- [Deployment](https://aimdb.dev/docs/deployment) — Running on MCU, edge and cloud
-- [API Reference](https://docs.rs/aimdb-core) — Full Rust API documentation
+Install the MCP server and add it to your workspace:
----
+```bash
+cargo install aimdb-mcp
+```
-### Why AimDB?
+`.vscode/mcp.json`:
-A real-time data runtime that adapts to your infrastructure, not the other way around.
+```json
+{
+ "servers": {
+ "aimdb": {
+ "type": "stdio",
+ "command": "${userHome}/.cargo/bin/aimdb-mcp"
+ }
+ }
+}
+```
-| Problem | AimDB Solution |
-|---------|----------------|
-| **Runs Where Data Starts** | From $2 MCUs to Kubernetes clusters. Deploy the same code anywhere, process data at the source. |
-| **Same API Everywhere** | Tokio + Embassy compatible, embedded-friendly, `no_std`-ready. One interface across all your runtimes. |
-| **Built for Continuous Change** | Unified data layer with schema evolution built in. Your data pipelines adapt as fast as your business. |
+Then ask: *"What's the current temperature from station alpha?"* — see the [MCP server docs](tools/aimdb-mcp/) for Claude Desktop and other editors.
+
+#### 4. Build your own
+
+- [Quick Start Guide](https://aimdb.dev/docs/getting-started) — Dependencies, platform setup and your first contract
+- [Data Contracts](https://aimdb.dev/docs/data-contracts) — Type-safe schemas with built-in capabilities
+- [Connectors](https://aimdb.dev/docs/connectors) — MQTT, KNX, WebSocket and more
+- [Deployment](https://aimdb.dev/docs/deployment) — Running on MCU, edge, cloud and browser
+- [API Reference](https://docs.rs/aimdb-core) — Full Rust API documentation
+- [Blog](https://aimdb.dev/blog) — News, tutorials and insights from the AimDB team
---
@@ -85,7 +237,7 @@ A real-time data runtime that adapts to your infrastructure, not the other way a
|----------|-------|--------|----------|
| **MQTT** | `aimdb-mqtt-connector` | ✅ Ready | std, no_std |
| **KNX** | `aimdb-knx-connector` | ✅ Ready | std, no_std |
-| **HTTP/REST** | — | 🔨 Building | std |
+| **WebSocket** | `aimdb-websocket-connector` | ✅ Ready | std, wasm |
| **Kafka** | — | 📋 Planned | std |
| **Modbus** | — | 📋 Planned | std, no_std |
@@ -93,12 +245,12 @@ A real-time data runtime that adapts to your infrastructure, not the other way a
### Platform Support
-| Target | Runtime | Features | Footprint |
-|--------|---------|----------|-----------|
-| **ARM Cortex-M** (STM32H5, STM32F4) | Embassy | no_std, async | ~50KB+ |
-| **ARM Cortex-M** (STM32H5, STM32F4) | FreeRTOS | 📋 Planned | — |
-| **Linux Edge Devices** | Tokio | Full std | ~10MB+ |
-| **Containers/K8s** | Tokio | Full std | ~10MB+ |
+| Target | Runtime | Adapter | Features | Footprint |
+|--------|---------|---------|----------|-----------|
+| **ARM Cortex-M** (STM32H5, STM32F4) | Embassy | `aimdb-embassy-adapter` | no_std, async | ~50KB+ |
+| **Linux Edge** (RPi, gateways) | Tokio | `aimdb-tokio-adapter` | Full std | ~10MB+ |
+| **Containers / K8s** | Tokio | `aimdb-tokio-adapter` | Full std | ~10MB+ |
+| **Browser / SPA** | WASM | `aimdb-wasm-adapter` | wasm32, single-threaded | ~2MB+ |
---
@@ -119,5 +271,7 @@ Want to contribute? See the [contributing guide](CONTRIBUTING.md). We have [good
---
- Write once. Deploy anywhere. Pay only where it makes sense.
+ Define once. Deploy anywhere. Observe everything.
+
+ Get started · Live demo · Join the discussion
diff --git a/_external/embassy b/_external/embassy
index 29640b94..e63316a9 160000
--- a/_external/embassy
+++ b/_external/embassy
@@ -1 +1 @@
-Subproject commit 29640b944410cf6903a42c06e4373ff07c13aede
+Subproject commit e63316a9409d15a460a9d2821c344b8eaf93a498
diff --git a/aimdb-codegen/Cargo.toml b/aimdb-codegen/Cargo.toml
new file mode 100644
index 00000000..b0ff2e0a
--- /dev/null
+++ b/aimdb-codegen/Cargo.toml
@@ -0,0 +1,22 @@
+[package]
+name = "aimdb-codegen"
+version = "0.1.0"
+edition = "2021"
+authors.workspace = true
+license.workspace = true
+repository.workspace = true
+homepage.workspace = true
+description = "Code generation library for AimDB architecture agent — reads state.toml and emits Mermaid diagrams and Rust source"
+keywords = ["codegen", "architecture", "mermaid", "schema", "aimdb"]
+categories = ["development-tools"]
+
+[dependencies]
+proc-macro2 = "1"
+prettyplease = "0.2"
+quote = "1"
+serde = { version = "1.0", features = ["derive"] }
+syn = "2"
+toml = "0.8"
+
+[dev-dependencies]
+# No external dev deps — integration tests use generated string content
diff --git a/aimdb-codegen/src/lib.rs b/aimdb-codegen/src/lib.rs
new file mode 100644
index 00000000..6711a0d6
--- /dev/null
+++ b/aimdb-codegen/src/lib.rs
@@ -0,0 +1,68 @@
+//! AimDB Codegen — architecture state to Mermaid and Rust source
+//!
+//! This library reads `.aimdb/state.toml` (the architecture agent's decision
+//! record) and emits two artefacts:
+//!
+//! - **Mermaid diagram** — `.aimdb/architecture.mermaid`, a read-only graph
+//! projection of the architecture (see [`generate_mermaid`])
+//! - **Rust source** — `src/generated_schema.rs`, compilable AimDB schema
+//! using the actual 0.5.x API (see [`generate_rust`])
+//!
+//! # Usage
+//!
+//! ```rust
+//! use aimdb_codegen::{ArchitectureState, generate_mermaid, generate_rust, validate};
+//!
+//! let toml = r#"
+//! [meta]
+//! aimdb_version = "0.5.0"
+//! created_at = "2026-02-22T14:00:00Z"
+//! last_modified = "2026-02-22T14:00:00Z"
+//!
+//! [[records]]
+//! name = "Temperature"
+//! buffer = "SpmcRing"
+//! capacity = 128
+//! key_prefix = "sensor."
+//! key_variants = ["room1"]
+//! producers = ["sensor_task"]
+//! consumers = ["dashboard"]
+//!
+//! [[records.fields]]
+//! name = "celsius"
+//! type = "f64"
+//! description = "Temperature in Celsius"
+//! "#;
+//!
+//! let state = ArchitectureState::from_toml(toml).unwrap();
+//!
+//! let errors = validate(&state);
+//! assert!(errors.iter().all(|e| e.severity != validate::Severity::Error));
+//!
+//! let mermaid = generate_mermaid(&state);
+//! assert!(mermaid.contains("flowchart LR"));
+//!
+//! let rust = generate_rust(&state);
+//! assert!(rust.contains("pub struct TemperatureValue"));
+//! ```
+
+pub mod mermaid;
+pub mod rust;
+pub mod state;
+pub mod validate;
+
+// ── Convenience re-exports ────────────────────────────────────────────────────
+
+pub use mermaid::generate_mermaid;
+pub use rust::{
+ generate_binary_cargo_toml, generate_cargo_toml, generate_hub_cargo_toml, generate_hub_main_rs,
+ generate_hub_schema_rs, generate_hub_tasks_rs, generate_lib_rs, generate_main_rs,
+ generate_rust, generate_schema_rs, generate_tasks_rs,
+};
+pub use rust::{to_pascal_case, to_snake_case};
+pub use state::{
+ ArchitectureState, BinaryDef, BufferType, ConnectorDef, ConnectorDirection, DecisionEntry,
+ ExternalConnectorDef, FieldDef, Meta, ObservableDef, ProjectDef, RecordDef, SerializationType,
+ TaskDef, TaskIo, TaskType,
+};
+pub use validate::{is_valid, validate, Severity, ValidationError};
diff --git a/aimdb-codegen/src/mermaid.rs b/aimdb-codegen/src/mermaid.rs
new file mode 100644
index 00000000..4b7c4b01
--- /dev/null
+++ b/aimdb-codegen/src/mermaid.rs
@@ -0,0 +1,286 @@
+//! Mermaid diagram generator
+//!
+//! Converts an [`ArchitectureState`] into a `flowchart LR` Mermaid diagram
+//! following the conventions defined in the architecture conventions document.
+
+use crate::state::{ArchitectureState, BufferType, ConnectorDirection};
+
+/// Generate a Mermaid `flowchart LR` diagram from architecture state.
+///
+/// The returned string can be written directly to `.aimdb/architecture.mermaid`.
+///
+/// # Conventions
+/// - Stadium `(["…"])` = SpmcRing
+/// - Rounded rect `("…")` = SingleLatest
+/// - Diamond `{"…"}` = Mailbox
+/// - Solid arrows → data flow (produce / consume)
+/// - Dashed arrows → connector metadata (link_to / link_from)
+pub fn generate_mermaid(state: &ArchitectureState) -> String {
+ let mut out = String::new();
+
+ out.push_str("flowchart LR\n");
+
+ // ── Record nodes ──────────────────────────────────────────────────────────
+ if !state.records.is_empty() {
+ out.push_str(
+ "\n %% ── Records ────────────────────────────────────────────────────────────\n",
+ );
+ }
+ for rec in &state.records {
+ let node_id = node_id(&rec.name);
+ let label = format!("{}\\n{}", rec.name, rec.buffer.label(rec.capacity));
+ let node_def = match rec.buffer {
+ BufferType::SpmcRing => format!(" {node_id}([\"{label}\"])"),
+ BufferType::SingleLatest => format!(" {node_id}(\"{label}\")"),
+ BufferType::Mailbox => format!(" {node_id}{{\"{label}\"}}"),
+ };
+ out.push_str(&node_def);
+ out.push('\n');
+ }
+
+ // ── Data flow arrows ──────────────────────────────────────────────────────
+ if state
+ .records
+ .iter()
+ .any(|r| !r.producers.is_empty() || !r.consumers.is_empty())
+ {
+ out.push_str(
+ "\n %% ── Data flow (solid arrows) ──────────────────────────────────────────\n",
+ );
+ }
+ for rec in &state.records {
+ let nid = node_id(&rec.name);
+ for producer in &rec.producers {
+ let pid = sanitize_id(producer);
+ out.push_str(&format!(" {pid} -->|produce| {nid}\n"));
+ }
+ for consumer in &rec.consumers {
+ let cid = sanitize_id(consumer);
+ out.push_str(&format!(" {nid} -->|consume| {cid}\n"));
+ }
+ }
+
+ // ── Connector metadata (dashed arrows) ────────────────────────────────────
+ let has_connectors = state.records.iter().any(|r| !r.connectors.is_empty());
+ if has_connectors {
+ out.push_str(
+ "\n %% ── Connector metadata (dashed arrows) ────────────────────────────────\n",
+ );
+ // Collect unique protocol bus node names
+ let mut protocols_seen: Vec = Vec::new();
+ for rec in &state.records {
+ for conn in &rec.connectors {
+ let bus = conn.protocol.to_uppercase();
+ if !protocols_seen.contains(&bus) {
+ protocols_seen.push(bus);
+ }
+ }
+ }
+ for rec in &state.records {
+ let nid = node_id(&rec.name);
+ for conn in &rec.connectors {
+ let bus = conn.protocol.to_uppercase();
+ let url = &conn.url;
+ match conn.direction {
+ ConnectorDirection::Outbound => {
+ out.push_str(&format!(" {nid} -.->|\"link_to {url}\"| {bus}\n"));
+ }
+ ConnectorDirection::Inbound => {
+ out.push_str(&format!(" {bus} -.->|\"link_from {url}\"| {nid}\n"));
+ }
+ }
+ }
+ }
+ }
+
+ out
+}
+
+/// Derive a stable Mermaid node ID from a record name.
+///
+/// Converts PascalCase to SCREAMING_SNAKE_CASE, e.g.
+/// `TemperatureReading` → `TEMPERATURE_READING`.
+pub fn node_id(name: &str) -> String {
+ let mut out = String::new();
+ let chars: Vec = name.chars().collect();
+ for (i, &c) in chars.iter().enumerate() {
+ if c.is_uppercase()
+ && i > 0
+ && (chars[i - 1].is_lowercase() || chars[i - 1].is_ascii_digit())
+ {
+ out.push('_');
+ }
+ out.push(c.to_ascii_uppercase());
+ }
+ out
+}
+
+/// Sanitize an arbitrary identifier for use as a Mermaid node ID.
+///
+/// Replaces hyphens and spaces with underscores, removes other non-alphanumeric chars.
+fn sanitize_id(s: &str) -> String {
+ s.chars()
+ .map(|c| {
+ if c.is_alphanumeric() || c == '_' {
+ c
+ } else {
+ '_'
+ }
+ })
+ .collect()
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::state::ArchitectureState;
+
+ const SAMPLE_TOML: &str = r#"
+[meta]
+aimdb_version = "0.5.0"
+created_at = "2026-02-22T14:00:00Z"
+last_modified = "2026-02-22T14:33:00Z"
+
+[[records]]
+name = "TemperatureReading"
+buffer = "SpmcRing"
+capacity = 256
+key_prefix = "sensors.temp."
+key_variants = ["indoor", "outdoor", "garage"]
+producers = ["sensor_task"]
+consumers = ["dashboard", "anomaly_detector"]
+
+[[records.fields]]
+name = "celsius"
+type = "f64"
+description = "Temperature in degrees Celsius"
+
+[[records.connectors]]
+protocol = "mqtt"
+direction = "outbound"
+url = "mqtt://sensors/temp/{variant}"
+
+[[records]]
+name = "OtaCommand"
+buffer = "Mailbox"
+key_prefix = "device.ota."
+key_variants = ["gateway-01"]
+producers = ["cloud_ota_service"]
+consumers = ["device_update_task"]
+
+[[records.fields]]
+name = "action"
+type = "String"
+description = "Command action"
+
+[[records.connectors]]
+protocol = "mqtt"
+direction = "inbound"
+url = "mqtt://ota/cmd/{variant}"
+
+[[records]]
+name = "FirmwareVersion"
+buffer = "SingleLatest"
+key_prefix = "device.firmware."
+key_variants = ["gateway-01"]
+producers = ["cloud_service"]
+consumers = ["updater"]
+
+[[records.fields]]
+name = "version"
+type = "String"
+description = "Semantic version"
+"#;
+
+ fn state() -> ArchitectureState {
+ ArchitectureState::from_toml(SAMPLE_TOML).unwrap()
+ }
+
+ #[test]
+ fn contains_flowchart_header() {
+ let out = generate_mermaid(&state());
+ assert!(
+ out.starts_with("flowchart LR\n"),
+ "Must start with flowchart LR"
+ );
+ }
+
+ #[test]
+ fn spmc_ring_uses_stadium_shape() {
+ let out = generate_mermaid(&state());
+ // Stadium: ([" ... "])
+ assert!(
+ out.contains("TEMPERATURE_READING([\"TemperatureReading\\nSpmcRing · 256\"])"),
+ "SpmcRing node should use stadium shape:\n{out}"
+ );
+ }
+
+ #[test]
+ fn mailbox_uses_diamond_shape() {
+ let out = generate_mermaid(&state());
+ assert!(
+ out.contains("OTA_COMMAND{\"OtaCommand\\nMailbox\"}"),
+ "Mailbox node should use diamond shape:\n{out}"
+ );
+ }
+
+ #[test]
+ fn single_latest_uses_rounded_rect() {
+ let out = generate_mermaid(&state());
+ assert!(
+ out.contains("FIRMWARE_VERSION(\"FirmwareVersion\\nSingleLatest\")"),
+ "SingleLatest node should use rounded rect:\n{out}"
+ );
+ }
+
+ #[test]
+ fn produce_arrows_present() {
+ let out = generate_mermaid(&state());
+ assert!(
+ out.contains("sensor_task -->|produce| TEMPERATURE_READING"),
+ "Producer arrow missing:\n{out}"
+ );
+ }
+
+ #[test]
+ fn consume_arrows_present() {
+ let out = generate_mermaid(&state());
+ assert!(
+ out.contains("TEMPERATURE_READING -->|consume| dashboard"),
+ "Consumer arrow missing:\n{out}"
+ );
+ assert!(
+ out.contains("TEMPERATURE_READING -->|consume| anomaly_detector"),
+ "Consumer arrow missing:\n{out}"
+ );
+ }
+
+ #[test]
+ fn outbound_connector_dashed_arrow() {
+ let out = generate_mermaid(&state());
+ assert!(
+ out.contains(
+ "TEMPERATURE_READING -.->|\"link_to mqtt://sensors/temp/{variant}\"| MQTT"
+ ),
+ "Outbound dashed arrow missing:\n{out}"
+ );
+ }
+
+ #[test]
+ fn inbound_connector_dashed_arrow() {
+ let out = generate_mermaid(&state());
+ assert!(
+ out.contains("MQTT -.->|\"link_from mqtt://ota/cmd/{variant}\"| OTA_COMMAND"),
+ "Inbound dashed arrow missing:\n{out}"
+ );
+ }
+
+ #[test]
+ fn node_id_pascal_to_screaming_snake() {
+ assert_eq!(node_id("TemperatureReading"), "TEMPERATURE_READING");
+ assert_eq!(node_id("OtaCommand"), "OTA_COMMAND");
+ assert_eq!(node_id("FirmwareVersion"), "FIRMWARE_VERSION");
+ assert_eq!(node_id("AppConfig"), "APP_CONFIG");
+ assert_eq!(node_id("Temp"), "TEMP");
+ }
+}
diff --git a/aimdb-codegen/src/rust.rs b/aimdb-codegen/src/rust.rs
new file mode 100644
index 00000000..2789a8cc
--- /dev/null
+++ b/aimdb-codegen/src/rust.rs
@@ -0,0 +1,2246 @@
+//! Rust source code generator
+//!
+//! Converts an [`ArchitectureState`] into compilable Rust source that uses the
+//! actual AimDB 0.5.x API: `#[derive(RecordKey)]`, `BufferCfg`, and
+//! `AimDbBuilder::configure()`.
+//!
+//! Uses [`quote`] for quasi-quoting token streams and [`prettyplease`] for
+//! formatting the output into idiomatic Rust.
+
+use proc_macro2::TokenStream;
+use quote::{format_ident, quote};
+
+use crate::state::{
+ ArchitectureState, ConnectorDef, ConnectorDirection, RecordDef, SerializationType, TaskDef,
+ TaskType,
+};
+
+// ── Public API ────────────────────────────────────────────────────────────────
+
+/// Generate a complete Rust source file from architecture state.
+///
+/// The returned string can be written to `src/generated_schema.rs`.
+/// It contains:
+/// - One `Value` struct per record (with `Serialize` / `Deserialize`)
+/// - One `Key` enum per record (with `#[derive(RecordKey)]`)
+/// - A `configure_schema()` function wiring all records into `AimDbBuilder`
+pub fn generate_rust(state: &ArchitectureState) -> String {
+ let formatted = generate_rust_inner(state);
+
+ let header = "\
+// @generated — do not edit manually.\n\
+// Source: .aimdb/state.toml — edit via `aimdb generate` or the architecture agent.\n\
+// Regenerate: `aimdb generate` or confirm a proposal in the architecture agent.\n\n";
+
+ format!("{header}{formatted}")
+}
+
+/// Generate `schema.rs` for a common crate (no `@generated` header).
+///
+/// Emits only the portable data-contract layer: value structs, key enums,
+/// `SchemaType` and `Linkable` impls. No `configure_schema`, no runtime deps.
+/// This keeps the common crate platform-agnostic (`no_std`-compatible).
+pub fn generate_schema_rs(state: &ArchitectureState) -> String {
+ generate_types_inner(state)
+}
+
+/// Types-only inner — value structs + key enums + trait impls, no `configure_schema`.
+fn generate_types_inner(state: &ArchitectureState) -> String {
+ let imports = emit_imports_types_only(state);
+
+ let record_items: Vec = state
+ .records
+ .iter()
+ .flat_map(|rec| {
+ let mut items = vec![emit_value_struct(rec), emit_key_enum(rec)];
+ items.push(emit_schema_type_impl(rec));
+ let linkable = emit_linkable_impl(rec);
+ if !linkable.is_empty() {
+ items.push(linkable);
+ }
+ if let Some(obs) = emit_observable_impl(rec) {
+ items.push(obs);
+ }
+ if let Some(set) = emit_settable_impl(rec) {
+ items.push(set);
+ }
+ items
+ })
+ .collect();
+
+ let file_tokens = quote! {
+ #imports
+ #(#record_items)*
+ };
+
+ let syntax_tree = syn::parse2(file_tokens).expect("generated tokens should be valid Rust");
+ prettyplease::unparse(&syntax_tree)
+}
+
+fn generate_rust_inner(state: &ArchitectureState) -> String {
+ let imports = emit_imports(state);
+
+ let record_items: Vec = state
+ .records
+ .iter()
+ .flat_map(|rec| {
+ let mut items = vec![emit_value_struct(rec), emit_key_enum(rec)];
+ items.push(emit_schema_type_impl(rec));
+ let linkable = emit_linkable_impl(rec);
+ if !linkable.is_empty() {
+ items.push(linkable);
+ }
+ if let Some(obs) = emit_observable_impl(rec) {
+ items.push(obs);
+ }
+ if let Some(set) = emit_settable_impl(rec) {
+ items.push(set);
+ }
+ items
+ })
+ .collect();
+
+ let configure_fn = emit_configure_schema(state);
+
+ let file_tokens = quote! {
+ #imports
+ #(#record_items)*
+ #configure_fn
+ };
+
+ let syntax_tree = syn::parse2(file_tokens).expect("generated tokens should be valid Rust");
+ prettyplease::unparse(&syntax_tree)
+}
+
+/// Generate `Cargo.toml` content for a common crate.
+///
+/// Requires `state.project` to be `Some`. The caller should validate this
+/// before calling.
+pub fn generate_cargo_toml(state: &ArchitectureState) -> String {
+ let project = state
+ .project
+ .as_ref()
+ .expect("generate_cargo_toml requires [project] block in state.toml");
+ let crate_name = format!("{}-common", project.name);
+ let edition = project.edition.as_deref().unwrap_or("2024");
+
+ let has_non_custom_ser = state.records.iter().any(|r| {
+ r.serialization.as_ref().unwrap_or(&SerializationType::Json) != &SerializationType::Custom
+ });
+ let has_postcard = state
+ .records
+ .iter()
+ .any(|r| r.serialization.as_ref() == Some(&SerializationType::Postcard));
+ let has_observable = state.records.iter().any(|r| r.observable.is_some());
+
+ let mut data_contracts_features = Vec::new();
+ if has_non_custom_ser {
+ data_contracts_features.push("\"linkable\"");
+ }
+
+ let dc_features_str = if data_contracts_features.is_empty() {
+ String::new()
+ } else {
+ format!(", features = [{}]", data_contracts_features.join(", "))
+ };
+
+ // Build std feature deps
+ let mut std_deps = vec!["\"aimdb-data-contracts/std\"".to_string()];
+ if has_non_custom_ser && !has_postcard {
+ std_deps.push("\"serde_json\"".to_string());
+ }
+ if has_observable {
+ std_deps.push("\"aimdb-data-contracts/observable\"".to_string());
+ }
+ let std_features = std_deps.join(", ");
+
+ let mut optional_deps = String::new();
+ if has_non_custom_ser && !has_postcard {
+ optional_deps.push_str("serde_json = { version = \"1.0\", optional = true }\n");
+ }
+ if has_postcard {
+ optional_deps.push_str(
+ "postcard = { version = \"1.0\", default-features = false, features = [\"alloc\"] }\n",
+ );
+ }
+
+ format!(
+ r#"# Regenerate with `aimdb generate --common-crate`
+[package]
+name = "{crate_name}"
+version = "0.1.0"
+edition = "{edition}"
+
+[features]
+default = ["std"]
+std = [{std_features}]
+alloc = []
+
+[dependencies]
+aimdb-core = {{ version = "0.5", default-features = false, features = ["derive", "alloc"] }}
+aimdb-data-contracts = {{ version = "0.5", default-features = false{dc_features_str} }}
+serde = {{ version = "1.0", default-features = false, features = ["derive", "alloc"] }}
+{optional_deps}"#
+ )
+}
+
+/// Generate `lib.rs` content for a common crate.
+pub fn generate_lib_rs() -> String {
+ "\
+// Regenerate with `aimdb generate --common-crate`
+#![cfg_attr(not(feature = \"std\"), no_std)]
+extern crate alloc;
+
+mod schema;
+
+// Re-export all public types for downstream crates
+pub use schema::*;
+"
+ .to_string()
+}
+
+// ── Binary crate generators ───────────────────────────────────────────────────
+
+/// Generate `src/main.rs` for the named binary crate.
+///
+/// Uses `quote!` + `prettyplease` for guaranteed idiomatic formatting.
+/// Requires the binary to exist in `state.binaries`. Returns `None` if not found.
+pub fn generate_main_rs(state: &ArchitectureState, binary_name: &str) -> Option {
+ let bin = state.binaries.iter().find(|b| b.name == binary_name)?;
+ let project_name = state
+ .project
+ .as_ref()
+ .map(|p| p.name.as_str())
+ .unwrap_or("project");
+ let common_crate = format_ident!("{}", format!("{}_common", project_name.replace('-', "_")));
+
+ // Collect tasks belonging to this binary
+ let tasks: Vec<&TaskDef> = bin
+ .tasks
+ .iter()
+ .filter_map(|tname| state.tasks.iter().find(|t| &t.name == tname))
+ .collect();
+
+ let task_use_idents: Vec = bin
+ .tasks
+ .iter()
+ .map(|name| format_ident!("{}", name))
+ .collect();
+
+ // ── Connector use statements ─────────────────────────────────────────
+ let connector_use_stmts: Vec = bin
+ .external_connectors
+ .iter()
+ .filter_map(|c| match c.protocol.as_str() {
+ "mqtt" => Some(quote! { use aimdb_mqtt_connector::MqttConnector; }),
+ "knx" => Some(quote! { use aimdb_knx_connector::KnxConnector; }),
+ "ws" => Some(quote! { use aimdb_websocket_connector::WebSocketConnector; }),
+ _ => None,
+ })
+ .collect();
+
+ // ── Connector env-var bindings + construction ────────────────────────
+ let connector_let_stmts: Vec = bin
+ .external_connectors
+ .iter()
+ .map(|c| {
+ let var_ident = format_ident!("{}", c.env_var.to_lowercase());
+ let var_name = &c.env_var;
+ let default = &c.default;
+ let ctor: TokenStream = match c.protocol.as_str() {
+ "mqtt" => quote! { MqttConnector::new(var_ident) },
+ "knx" => quote! { KnxConnector::new(var_ident) },
+ "ws" => quote! {
+ WebSocketConnector::new()
+ .bind(#var_ident.parse::()
+ .expect("invalid WebSocket bind address"))
+ .path("/ws")
+ },
+ _ => {
+ let msg = format!("build connector for protocol '{}'", c.protocol);
+ quote! { todo!(#msg) }
+ }
+ };
+ let connector_ident = format_ident!("{}_connector", c.protocol);
+ quote! {
+ let #var_ident = std::env::var(#var_name)
+ .unwrap_or_else(|_| #default.to_string());
+ let #connector_ident = #ctor;
+ }
+ })
+ .collect();
+
+ // ── .with_connector(...) chain calls ─────────────────────────────────
+ let with_connector_calls: Vec = bin
+ .external_connectors
+ .iter()
+ .map(|c| {
+ let connector_ident = format_ident!("{}_connector", c.protocol);
+ quote! { .with_connector(#connector_ident) }
+ })
+ .collect();
+
+ // ── Task source registrations ────────────────────────────────────────
+ let task_registrations: Vec = tasks
+ .iter()
+ .flat_map(|task| {
+ task.outputs.iter().flat_map(move |output| {
+ let variants: Vec = if output.variants.is_empty() {
+ state
+ .records
+ .iter()
+ .find(|r| r.name == output.record)
+ .map(|r| r.key_variants.clone())
+ .unwrap_or_default()
+ } else {
+ output.variants.clone()
+ };
+
+ let value_type = format_ident!("{}Value", output.record);
+ let key_type = format_ident!("{}Key", output.record);
+ let task_fn = format_ident!("{}", task.name);
+
+ variants.into_iter().map(move |variant| {
+ let variant_ident = format_ident!("{}", to_pascal_case(&variant));
+ quote! {
+ builder.configure::<#value_type>(#key_type::#variant_ident, |reg| {
+ reg.source(#task_fn);
+ });
+ }
+ })
+ })
+ })
+ .collect();
+
+ // ── Assemble via quote! ──────────────────────────────────────────────
+ let file_tokens = quote! {
+ use aimdb_core::{AimDbBuilder, DbResult};
+ use aimdb_tokio_adapter::TokioAdapter;
+ #(#connector_use_stmts)*
+ use std::sync::Arc;
+ use #common_crate::configure_schema;
+
+ mod tasks;
+ use tasks::{#(#task_use_idents),*};
+
+ #[tokio::main]
+ async fn main() -> DbResult<()> {
+ tracing_subscriber::fmt::init();
+
+ #(#connector_let_stmts)*
+
+ let runtime = Arc::new(TokioAdapter::new());
+
+ let mut builder = AimDbBuilder::new()
+ .runtime(runtime)
+ #(#with_connector_calls)*
+ ;
+
+ configure_schema(&mut builder);
+
+ #(#task_registrations)*
+
+ builder.run().await
+ }
+ };
+
+ let header = format!(
+ "// @generated — do not edit manually.\n\
+ // Source: .aimdb/state.toml\n\
+ // Regenerate: `aimdb generate --binary {binary_name}`\n\n"
+ );
+
+ let syntax_tree =
+ syn::parse2(file_tokens).expect("generate_main_rs: tokens should be valid Rust");
+ Some(format!("{header}{}", prettyplease::unparse(&syntax_tree)))
+}
+
+/// Generate `src/tasks.rs` scaffold for the named binary crate.
+///
+/// Uses `quote!` + `prettyplease` for guaranteed idiomatic formatting.
+/// This file is generated **once** — it has no `@generated` header and is
+/// then owned by the developer. Signatures must not be changed.
+/// Returns `None` if the binary is not found.
+pub fn generate_tasks_rs(state: &ArchitectureState, binary_name: &str) -> Option {
+ let bin = state.binaries.iter().find(|b| b.name == binary_name)?;
+ let project_name = state
+ .project
+ .as_ref()
+ .map(|p| p.name.as_str())
+ .unwrap_or("project");
+ let common_crate = format_ident!("{}", format!("{}_common", project_name.replace('-', "_")));
+
+ // Collect tasks belonging to this binary
+ let tasks: Vec<&TaskDef> = bin
+ .tasks
+ .iter()
+ .filter_map(|tname| state.tasks.iter().find(|t| &t.name == tname))
+ .collect();
+
+ let task_fns: Vec = tasks
+ .iter()
+ .map(|task| {
+ let fn_name = format_ident!("{}", task.name);
+
+ // Build parameter list
+ let mut params: Vec = vec![quote! { ctx: RuntimeContext }];
+ for input in &task.inputs {
+ let arg_name = format_ident!("{}", to_snake_case(&input.record));
+ let value_type = format_ident!("{}Value", input.record);
+ params.push(quote! { #arg_name: Consumer<#value_type, TokioAdapter> });
+ }
+ for output in &task.outputs {
+ let arg_name = format_ident!("{}", to_snake_case(&output.record));
+ let value_type = format_ident!("{}Value", output.record);
+ params.push(quote! { #arg_name: Producer<#value_type, TokioAdapter> });
+ }
+
+ let todo_msg = match &task.task_type {
+ TaskType::Agent => "LLM agent stub — implement reasoning loop".to_string(),
+ _ => format!("implement: {}", task.description),
+ };
+
+ let doc_attr = if task.description.is_empty() {
+ quote! {}
+ } else {
+ let desc = &task.description;
+ quote! { #[doc = #desc] }
+ };
+
+ quote! {
+ #doc_attr
+ pub async fn #fn_name(#(#params),*) -> DbResult<()> {
+ todo!(#todo_msg)
+ }
+ }
+ })
+ .collect();
+
+ let file_tokens = quote! {
+ use aimdb_core::{Consumer, DbResult, Producer, RuntimeContext};
+ use aimdb_tokio_adapter::TokioAdapter;
+ use #common_crate::*;
+
+ #(#task_fns)*
+ };
+
+ let header = format!(
+ "// Implement the task bodies; signatures must not change.\n\
+ // Regenerate with `aimdb generate --binary {binary_name} --tasks-scaffold`\n\
+ // (only writes this file if it does not already exist)\n\n"
+ );
+
+ let syntax_tree =
+ syn::parse2(file_tokens).expect("generate_tasks_rs: tokens should be valid Rust");
+ Some(format!("{header}{}", prettyplease::unparse(&syntax_tree)))
+}
+
+/// Generate `Cargo.toml` content for a binary crate.
+///
+/// Derives dependencies from the binary's tasks and external connectors.
+/// Returns `None` if the binary is not found.
+pub fn generate_binary_cargo_toml(state: &ArchitectureState, binary_name: &str) -> Option {
+ let bin = state.binaries.iter().find(|b| b.name == binary_name)?;
+ let project_name = state
+ .project
+ .as_ref()
+ .map(|p| p.name.as_str())
+ .unwrap_or("project");
+ let common_crate_name = format!("{project_name}-common");
+ let common_crate_dep = common_crate_name.replace('-', "_");
+ let edition = state
+ .project
+ .as_ref()
+ .and_then(|p| p.edition.as_deref())
+ .unwrap_or("2024");
+
+ let has_mqtt = bin.external_connectors.iter().any(|c| c.protocol == "mqtt");
+ let has_knx = bin.external_connectors.iter().any(|c| c.protocol == "knx");
+ let has_ws = bin.external_connectors.iter().any(|c| c.protocol == "ws");
+
+ let mut optional_connector_deps = String::new();
+ if has_mqtt {
+ optional_connector_deps.push_str(
+ "aimdb-mqtt-connector = { version = \"0.5\", features = [\"tokio-runtime\"] }\n",
+ );
+ }
+ if has_knx {
+ optional_connector_deps.push_str(
+ "aimdb-knx-connector = { version = \"0.5\", features = [\"tokio-runtime\"] }\n",
+ );
+ }
+ if has_ws {
+ optional_connector_deps.push_str(
+ "aimdb-websocket-connector = { version = \"0.5\", features = [\"tokio-runtime\"] }\n",
+ );
+ }
+
+ let out = format!(
+ "# @generated — do not edit manually.\n\
+# Source: .aimdb/state.toml — regenerate with `aimdb generate --binary {binary_name}`\n\
+[package]\n\
+name = \"{binary_name}\"\n\
+version = \"0.1.0\"\n\
+edition = \"{edition}\"\n\
+\n\
+[[bin]]\n\
+name = \"{binary_name}\"\n\
+path = \"src/main.rs\"\n\
+\n\
+[dependencies]\n\
+{common_crate_dep} = {{ path = \"../{common_crate_name}\" }}\n\
+aimdb-core = {{ version = \"0.5\" }}\n\
+aimdb-tokio-adapter = {{ version = \"0.5\", features = [\"tokio-runtime\"] }}\n\
+{optional_connector_deps}\
+tokio = {{ version = \"1\", features = [\"full\"] }}\n\
+tracing = \"0.1\"\n\
+tracing-subscriber = {{ version = \"0.3\", features = [\"env-filter\"] }}\n"
+ );
+
+ Some(out)
+}
+
+/// Imports for the types-only common crate schema — no runtime deps.
+fn emit_imports_types_only(state: &ArchitectureState) -> TokenStream {
+ let has_non_custom_ser = state.records.iter().any(|r| {
+ r.serialization.as_ref().unwrap_or(&SerializationType::Json) != &SerializationType::Custom
+ });
+ let has_observable = state.records.iter().any(|r| r.observable.is_some());
+ let has_settable = state
+ .records
+ .iter()
+ .any(|r| r.fields.iter().any(|f| f.settable));
+
+ let mut contract_traits: Vec = vec![quote! { SchemaType }];
+ if has_non_custom_ser {
+ contract_traits.push(quote! { Linkable });
+ }
+ if has_observable {
+ contract_traits.push(quote! { Observable });
+ }
+ if has_settable {
+ contract_traits.push(quote! { Settable });
+ }
+
+ quote! {
+ use aimdb_core::RecordKey;
+ use aimdb_data_contracts::{#(#contract_traits),*};
+ use serde::{Deserialize, Serialize};
+ }
+}
+
+/// Imports for the full flat schema — includes runtime registration deps.
+fn emit_imports(state: &ArchitectureState) -> TokenStream {
+ let has_non_custom_ser = state.records.iter().any(|r| {
+ r.serialization.as_ref().unwrap_or(&SerializationType::Json) != &SerializationType::Custom
+ });
+ let has_observable = state.records.iter().any(|r| r.observable.is_some());
+ let has_settable = state
+ .records
+ .iter()
+ .any(|r| r.fields.iter().any(|f| f.settable));
+
+ // Build aimdb_data_contracts trait imports
+ let mut contract_traits: Vec = vec![quote! { SchemaType }];
+ if has_non_custom_ser {
+ contract_traits.push(quote! { Linkable });
+ }
+ if has_observable {
+ contract_traits.push(quote! { Observable });
+ }
+ if has_settable {
+ contract_traits.push(quote! { Settable });
+ }
+
+ quote! {
+ use aimdb_core::buffer::BufferCfg;
+ use aimdb_core::builder::AimDbBuilder;
+ use aimdb_core::RecordKey;
+ use aimdb_data_contracts::{#(#contract_traits),*};
+ use aimdb_executor::Spawn;
+ use serde::{Deserialize, Serialize};
+ }
+}
+
+// ── Value struct ──────────────────────────────────────────────────────────────
+
+fn emit_value_struct(rec: &RecordDef) -> TokenStream {
+ let struct_name = format_ident!("{}Value", rec.name);
+ let doc = format!("Value type for `{}`.", rec.name);
+
+ let fields: Vec = if rec.fields.is_empty() {
+ vec![emit_todo_field(
+ "add fields — use `propose_record` to define them via the architecture agent",
+ )]
+ } else {
+ rec.fields
+ .iter()
+ .map(|f| {
+ let fname = format_ident!("{}", f.name);
+ let ftype: syn::Type = syn::parse_str(&f.field_type).unwrap_or_else(|_| {
+ panic!("invalid type `{}` for field `{}`", f.field_type, f.name)
+ });
+ if f.description.is_empty() {
+ quote! { pub #fname: #ftype, }
+ } else {
+ let desc = &f.description;
+ quote! {
+ #[doc = #desc]
+ pub #fname: #ftype,
+ }
+ }
+ })
+ .collect()
+ };
+
+ quote! {
+ #[doc = #doc]
+ #[derive(Debug, Clone, Serialize, Deserialize)]
+ pub struct #struct_name {
+ #(#fields)*
+ }
+ }
+}
+
+/// Emit a dummy field with a TODO doc comment (for records with no fields yet).
+fn emit_todo_field(msg: &str) -> TokenStream {
+ let doc = format!("TODO: {msg}");
+ quote! {
+ #[doc = #doc]
+ pub _placeholder: (),
+ }
+}
+
+// ── Key enum ──────────────────────────────────────────────────────────────────
+
+fn emit_key_enum(rec: &RecordDef) -> TokenStream {
+ let enum_name = format_ident!("{}Key", rec.name);
+ // The RecordKey derive macro supports a single #[link_address] attribute.
+ // We use the first connector for that; additional connectors are resolved
+ // via standalone helper functions emitted by `emit_connector_address_fns`.
+ let connector = rec.connectors.first();
+
+ let key_prefix_attr = if !rec.key_prefix.is_empty() {
+ let prefix = &rec.key_prefix;
+ quote! { #[key_prefix = #prefix] }
+ } else {
+ quote! {}
+ };
+
+ let variants: Vec = if rec.key_variants.is_empty() {
+ let doc = "TODO: add key variants — use the architecture agent to resolve them";
+ vec![quote! {
+ #[doc = #doc]
+ _Placeholder,
+ }]
+ } else {
+ rec.key_variants
+ .iter()
+ .map(|variant_str| {
+ let variant_name = format_ident!("{}", to_pascal_case(variant_str));
+ let link_attr = connector.map(|conn| {
+ let addr = conn.url.replace("{variant}", variant_str);
+ quote! { #[link_address = #addr] }
+ });
+ quote! {
+ #[key = #variant_str]
+ #link_attr
+ #variant_name,
+ }
+ })
+ .collect()
+ };
+
+ let address_fns = emit_connector_address_fns(rec);
+
+ quote! {
+ #[derive(Debug, RecordKey, Clone, Copy, PartialEq, Eq)]
+ #key_prefix_attr
+ pub enum #enum_name {
+ #(#variants)*
+ }
+
+ #address_fns
+ }
+}
+
+/// Emit standalone address-resolver functions for connectors beyond the first.
+///
+/// The first connector's addresses are baked into `#[link_address]` on the key
+/// enum and exposed via the `RecordKey::link_address()` trait method. Additional
+/// connectors get a `fn {record_snake}_{protocol}_address(key: &{Record}Key) -> Option<&'static str>`
+/// function that the configure block can call.
+fn emit_connector_address_fns(rec: &RecordDef) -> TokenStream {
+ if rec.connectors.len() <= 1 || rec.key_variants.is_empty() {
+ return quote! {};
+ }
+
+ let key_type = format_ident!("{}Key", rec.name);
+ let record_snake = to_snake_case(&rec.name);
+
+ let fns: Vec = rec
+ .connectors
+ .iter()
+ .skip(1) // first connector uses link_address()
+ .map(|conn| {
+ let fn_name = format_ident!("{}_{}_address", record_snake, conn.protocol);
+ let doc = format!(
+ "Link address for `{}` — {} connector (`{}`).",
+ rec.name,
+ conn.protocol,
+ conn.direction_label(),
+ );
+
+ let arms: Vec = rec
+ .key_variants
+ .iter()
+ .map(|variant_str| {
+ let variant_ident = format_ident!("{}", to_pascal_case(variant_str));
+ let addr = conn.url.replace("{variant}", variant_str);
+ quote! { #key_type::#variant_ident => Some(#addr), }
+ })
+ .collect();
+
+ quote! {
+ #[doc = #doc]
+ pub fn #fn_name(key: key_type) -> Option<&'static str> {
+ match key {
+ #(#arms)*
+ }
+ }
+ }
+ })
+ .collect();
+
+ quote! { #(#fns)* }
+}
+
+// ── configure_schema ──────────────────────────────────────────────────────────
+
+fn emit_configure_schema(state: &ArchitectureState) -> TokenStream {
+ let record_blocks: Vec = state
+ .records
+ .iter()
+ .map(emit_record_configure_block)
+ .collect();
+
+ quote! {
+ /// Register all architecture-agent-defined records on the builder.
+ ///
+ /// Generated from `.aimdb/state.toml`. Configures buffer types and connector
+ /// addresses. Producers, consumers, serializers, and deserializers contain
+ /// business logic and must be provided by application code — they are not
+ /// generated here.
+ pub fn configure_schema(builder: &mut AimDbBuilder) {
+ #(#record_blocks)*
+ }
+ }
+}
+
+fn emit_record_configure_block(rec: &RecordDef) -> TokenStream {
+ if rec.key_variants.is_empty() {
+ let msg = format!("TODO: {}: no key variants defined yet", rec.name);
+ return quote! {
+ // #msg — placeholder
+ let _ = (#msg,);
+ };
+ }
+
+ let value_type = format_ident!("{}Value", rec.name);
+ let key_type = format_ident!("{}Key", rec.name);
+ let buffer_tokens = rec.buffer.to_tokens(rec.capacity);
+
+ let variant_idents: Vec = rec
+ .key_variants
+ .iter()
+ .map(|v| format_ident!("{}", to_pascal_case(v)))
+ .collect();
+
+ let is_custom = rec
+ .serialization
+ .as_ref()
+ .map(|s| s == &SerializationType::Custom)
+ .unwrap_or(false);
+
+ if rec.connectors.is_empty() {
+ // No connectors: just buffer
+ return quote! {
+ for key in [
+ #(#key_type::#variant_idents,)*
+ ] {
+ builder.configure::<#value_type>(key, |reg| {
+ reg.buffer(#buffer_tokens);
+ });
+ }
+ };
+ }
+
+ // ── Pre-extract addresses ────────────────────────────────────────────
+ // First connector uses `key.link_address()` (from RecordKey derive).
+ // Additional connectors use generated helper functions.
+ let record_snake = to_snake_case(&rec.name);
+
+ let addr_extractions: Vec = rec
+ .connectors
+ .iter()
+ .enumerate()
+ .map(|(i, conn)| {
+ let addr_var = format_ident!("addr_{}", i);
+ if i == 0 {
+ quote! {
+ let #addr_var = key.link_address().map(|s| s.to_string());
+ }
+ } else {
+ let resolver_fn = format_ident!("{}_{}_address", record_snake, conn.protocol);
+ quote! {
+ let #addr_var = #resolver_fn(&key).map(|s| s.to_string());
+ }
+ }
+ })
+ .collect();
+
+ // ── Build the configure closure body ─────────────────────────────────
+ //
+ // `reg.buffer()` consumes the `&mut` borrow and returns a builder, so
+ // everything must be a single fluent chain starting from `reg.buffer(...)`.
+ // We build two branches: one with connectors wired (when all addresses
+ // resolve), one plain buffer fallback.
+ let linked_chain =
+ emit_connector_chain(&rec.connectors, &value_type, &buffer_tokens, is_custom);
+ let addr_conditions: Vec = (0..rec.connectors.len())
+ .map(|i| {
+ let addr_var = format_ident!("addr_{}", i);
+ quote! { #addr_var.as_deref() }
+ })
+ .collect();
+
+ // For a single connector: `if let Some(addr) = addr_0.as_deref() { chain } else { buffer }`
+ // For multiple connectors: nest or tuple-match the conditions.
+ let body = if rec.connectors.len() == 1 {
+ let cond = &addr_conditions[0];
+ quote! {
+ if let Some(addr_0) = #cond {
+ #linked_chain
+ } else {
+ reg.buffer(#buffer_tokens);
+ }
+ }
+ } else {
+ // Multiple connectors: match a tuple of Options.
+ // When ALL addresses are present, wire the full chain.
+ // Otherwise fall back to buffer-only.
+ let some_bindings: Vec = (0..rec.connectors.len())
+ .map(|i| {
+ let binding = format_ident!("addr_{}", i);
+ quote! { Some(#binding) }
+ })
+ .collect();
+ quote! {
+ match (#(#addr_conditions),*) {
+ (#(#some_bindings),*) => {
+ #linked_chain
+ }
+ _ => {
+ reg.buffer(#buffer_tokens);
+ }
+ }
+ }
+ };
+
+ quote! {
+ for key in [
+ #(#key_type::#variant_idents,)*
+ ] {
+ #(#addr_extractions)*
+ builder.configure::<#value_type>(key, |reg| {
+ #body
+ });
+ }
+ }
+}
+
+/// Build the full fluent chain: `reg.buffer(...).link_X(addr_0)...link_Y(addr_1)...`
+///
+/// All connector links are chained off a single `reg.buffer()` call so there
+/// is only one mutable borrow of `reg`. Address variables `addr_0`, `addr_1`,
+/// etc. are assumed to be in scope as `&str`.
+fn emit_connector_chain(
+ connectors: &[ConnectorDef],
+ value_type: &syn::Ident,
+ buffer_tokens: &TokenStream,
+ is_custom: bool,
+) -> TokenStream {
+ // Start the chain with reg.buffer(...)
+ let mut chain = quote! { reg.buffer(#buffer_tokens) };
+
+ for (i, conn) in connectors.iter().enumerate() {
+ let addr_var = format_ident!("addr_{}", i);
+
+ if is_custom {
+ let todo_comment = match conn.direction {
+ ConnectorDirection::Outbound => {
+ "TODO: chain .link_to(...).with_serializer(...) — serialization = \"custom\""
+ }
+ ConnectorDirection::Inbound => {
+ "TODO: chain .link_from(...).with_deserializer(...) — serialization = \"custom\""
+ }
+ };
+ // Can't chain a TODO into the builder, so just emit a let-binding comment
+ // after the chain. We'll terminate the chain with `;` below.
+ chain = quote! {
+ #chain;
+ let _ = (#todo_comment, #addr_var)
+ };
+ } else {
+ match conn.direction {
+ ConnectorDirection::Inbound => {
+ chain = quote! {
+ #chain
+ .link_from(#addr_var)
+ .with_deserializer(#value_type::from_bytes)
+ .finish()
+ };
+ }
+ ConnectorDirection::Outbound => {
+ chain = quote! {
+ #chain
+ .link_to(#addr_var)
+ .with_serializer(|v: value_type| {
+ v.to_bytes()
+ .map_err(|_| aimdb_core::connector::SerializeError::InvalidData)
+ })
+ .finish()
+ };
+ }
+ }
+ }
+ }
+
+ // Terminate the chain
+ quote! { #chain; }
+}
+
+// ── Trait implementations ────────────────────────────────────────────────────
+
+fn emit_schema_type_impl(rec: &RecordDef) -> TokenStream {
+ let struct_name = format_ident!("{}Value", rec.name);
+ let schema_name = to_snake_case(&rec.name);
+ let version = proc_macro2::Literal::u32_unsuffixed(rec.schema_version.unwrap_or(1));
+
+ quote! {
+ impl SchemaType for #struct_name {
+ const NAME: &'static str = #schema_name;
+ const VERSION: u32 = #version;
+ }
+ }
+}
+
+fn emit_linkable_impl(rec: &RecordDef) -> TokenStream {
+ let ser = rec
+ .serialization
+ .as_ref()
+ .unwrap_or(&SerializationType::Json);
+
+ match ser {
+ SerializationType::Custom => quote! {},
+ SerializationType::Json => emit_linkable_json(rec),
+ SerializationType::Postcard => emit_linkable_postcard(rec),
+ }
+}
+
+fn emit_linkable_json(rec: &RecordDef) -> TokenStream {
+ let struct_name = format_ident!("{}Value", rec.name);
+ quote! {
+ impl Linkable for #struct_name {
+ fn to_bytes(&self) -> Result, alloc::string::String> {
+ #[cfg(feature = "std")]
+ {
+ serde_json::to_vec(self)
+ .map_err(|e| alloc::format!("serialize {}: {e}", Self::NAME))
+ }
+ #[cfg(not(feature = "std"))]
+ {
+ Err(alloc::string::String::from(
+ "no_std serialization not available — enable the std feature or use postcard",
+ ))
+ }
+ }
+
+ fn from_bytes(data: &[u8]) -> Result {
+ #[cfg(feature = "std")]
+ {
+ serde_json::from_slice(data)
+ .map_err(|e| alloc::format!("deserialize {}: {e}", Self::NAME))
+ }
+ #[cfg(not(feature = "std"))]
+ {
+ let _ = data;
+ Err(alloc::string::String::from(
+ "no_std deserialization not available — enable the std feature or use postcard",
+ ))
+ }
+ }
+ }
+ }
+}
+
+fn emit_linkable_postcard(rec: &RecordDef) -> TokenStream {
+ let struct_name = format_ident!("{}Value", rec.name);
+ quote! {
+ impl Linkable for #struct_name {
+ fn to_bytes(&self) -> Result, alloc::string::String> {
+ postcard::to_allocvec(self)
+ .map_err(|e| alloc::format!("serialize {}: {e}", Self::NAME))
+ }
+
+ fn from_bytes(data: &[u8]) -> Result {
+ postcard::from_bytes(data)
+ .map_err(|e| alloc::format!("deserialize {}: {e}", Self::NAME))
+ }
+ }
+ }
+}
+
+fn emit_observable_impl(rec: &RecordDef) -> Option {
+ let obs = rec.observable.as_ref()?;
+ let struct_name = format_ident!("{}Value", rec.name);
+
+ // Look up signal field type
+ let signal_field = rec.fields.iter().find(|f| f.name == obs.signal_field)?;
+ let signal_type: syn::Type = syn::parse_str(&signal_field.field_type).ok()?;
+ let signal_ident = format_ident!("{}", obs.signal_field);
+
+ let icon = &obs.icon;
+ let unit = &obs.unit;
+
+ // Timestamp heuristic: first u64 field named timestamp/computed_at/fetched_at
+ let timestamp_names = ["timestamp", "computed_at", "fetched_at"];
+ let timestamp_field = rec
+ .fields
+ .iter()
+ .find(|f| f.field_type == "u64" && timestamp_names.contains(&f.name.as_str()));
+
+ let format_log_body = if let Some(ts) = timestamp_field {
+ let ts_ident = format_ident!("{}", ts.name);
+ quote! {
+ alloc::format!(
+ "{} [{}] {}: {:.1}{} at {}",
+ Self::ICON,
+ node_id,
+ Self::NAME,
+ self.signal(),
+ Self::UNIT,
+ self.#ts_ident,
+ )
+ }
+ } else {
+ quote! {
+ alloc::format!(
+ "{} [{}] {}: {:.1}{}",
+ Self::ICON,
+ node_id,
+ Self::NAME,
+ self.signal(),
+ Self::UNIT,
+ )
+ }
+ };
+
+ Some(quote! {
+ impl Observable for #struct_name {
+ type Signal = #signal_type;
+ const ICON: &'static str = #icon;
+ const UNIT: &'static str = #unit;
+
+ fn signal(&self) -> #signal_type {
+ self.#signal_ident
+ }
+
+ fn format_log(&self, node_id: &str) -> alloc::string::String {
+ #format_log_body
+ }
+ }
+ })
+}
+
+fn emit_settable_impl(rec: &RecordDef) -> Option {
+ let settable_fields: Vec<_> = rec.fields.iter().filter(|f| f.settable).collect();
+ if settable_fields.is_empty() {
+ return None;
+ }
+
+ let struct_name = format_ident!("{}Value", rec.name);
+
+ // Build the Value type
+ let settable_types: Vec = settable_fields
+ .iter()
+ .map(|f| syn::parse_str(&f.field_type).unwrap())
+ .collect();
+
+ let value_type: TokenStream = if settable_types.len() == 1 {
+ let t = &settable_types[0];
+ quote! { #t }
+ } else {
+ quote! { (#(#settable_types),*) }
+ };
+
+ // Timestamp heuristic: first u64 field named timestamp/computed_at/fetched_at
+ let timestamp_names = ["timestamp", "computed_at", "fetched_at"];
+ let timestamp_field = rec
+ .fields
+ .iter()
+ .find(|f| f.field_type == "u64" && timestamp_names.contains(&f.name.as_str()));
+
+ // Build field assignments for `set()`
+ let mut settable_idx = 0usize;
+ let field_assignments: Vec = rec
+ .fields
+ .iter()
+ .map(|f| {
+ let fname = format_ident!("{}", f.name);
+ if timestamp_field.map(|tf| tf.name == f.name).unwrap_or(false) && !f.settable {
+ // This is the timestamp field — fill from parameter
+ quote! { #fname: timestamp, }
+ } else if f.settable {
+ let assignment = if settable_fields.len() == 1 {
+ quote! { value }
+ } else {
+ let idx = syn::Index::from(settable_idx);
+ quote! { value.#idx }
+ };
+ settable_idx += 1;
+ quote! { #fname: #assignment, }
+ } else {
+ // Non-settable, non-timestamp field: use Default
+ quote! { #fname: Default::default(), }
+ }
+ })
+ .collect();
+
+ Some(quote! {
+ impl Settable for #struct_name {
+ type Value = #value_type;
+
+ fn set(value: Self::Value, timestamp: u64) -> Self {
+ Self {
+ #(#field_assignments)*
+ }
+ }
+ }
+ })
+}
+
+// ── Utilities ─────────────────────────────────────────────────────────────────
+
+/// Convert a PascalCase string to snake_case.
+///
+/// # Examples
+/// ```
+/// # use aimdb_codegen::rust::to_snake_case;
+/// assert_eq!(to_snake_case("WeatherObservation"), "weather_observation");
+/// assert_eq!(to_snake_case("OtaCommand"), "ota_command");
+/// assert_eq!(to_snake_case("Temperature"), "temperature");
+/// ```
+pub fn to_snake_case(s: &str) -> String {
+ let mut result = String::with_capacity(s.len() + 4);
+ for (i, c) in s.chars().enumerate() {
+ if c.is_uppercase() && i > 0 {
+ result.push('_');
+ }
+ for lc in c.to_lowercase() {
+ result.push(lc);
+ }
+ }
+ result
+}
+
+/// Convert a kebab-case or snake_case string to PascalCase.
+///
+/// # Examples
+/// ```
+/// # use aimdb_codegen::rust::to_pascal_case;
+/// assert_eq!(to_pascal_case("indoor"), "Indoor");
+/// assert_eq!(to_pascal_case("gateway-01"), "Gateway01");
+/// assert_eq!(to_pascal_case("sensor-hub-01"), "SensorHub01");
+/// assert_eq!(to_pascal_case("sensor_hub_01"), "SensorHub01");
+/// ```
+pub fn to_pascal_case(s: &str) -> String {
+ s.split(['-', '_'])
+ .map(|part| {
+ let mut chars = part.chars();
+ match chars.next() {
+ None => String::new(),
+ Some(first) => {
+ let upper: String = first.to_uppercase().collect();
+ upper + chars.as_str()
+ }
+ }
+ })
+ .collect()
+}
+
+// ── Hub crate generators ──────────────────────────────────────────────────────
+//
+// These functions derive a complete hub binary crate scaffold from state.toml
+// without requiring `[[tasks]]` or `[[binaries]]` entries in state.
+//
+// Hub-internal tasks are identified automatically:
+// - Producers of records with INBOUND connectors → external (stations), skipped
+// - Producers of records with OUTBOUND or no connectors → hub-internal tasks
+
+/// Returns the set of hub-internal task names derived from state.
+///
+/// A task is hub-internal if it appears as a producer of any record that has
+/// no inbound connector (i.e. the hub itself writes that record).
+fn hub_task_names(state: &ArchitectureState) -> Vec {
+ // Collect external producer names: those that produce records with inbound connectors
+ use std::collections::HashSet;
+ let external_producers: HashSet<&str> = state
+ .records
+ .iter()
+ .filter(|r| {
+ r.connectors
+ .iter()
+ .any(|c| matches!(c.direction, ConnectorDirection::Inbound))
+ })
+ .flat_map(|r| r.producers.iter().map(|p| p.as_str()))
+ .collect();
+
+ // Hub tasks: appear as producer of a non-inbound record
+ let mut seen = HashSet::new();
+ let mut tasks: Vec = state
+ .records
+ .iter()
+ .filter(|r| {
+ !r.connectors
+ .iter()
+ .any(|c| matches!(c.direction, ConnectorDirection::Inbound))
+ })
+ .flat_map(|r| r.producers.iter().cloned())
+ .filter(|p| !external_producers.contains(p.as_str()))
+ .filter(|p| seen.insert(p.clone()))
+ .collect();
+
+ // Also include any consumer of a record that is not a known external producer
+ for rec in &state.records {
+ for consumer in &rec.consumers {
+ let consumer = consumer.clone();
+ if !external_producers.contains(consumer.as_str()) && seen.insert(consumer.clone()) {
+ tasks.push(consumer);
+ }
+ }
+ }
+
+ tasks
+}
+
+/// Generate `src/schema.rs` for the hub binary crate.
+///
+/// Contains only the `configure_schema` function — no type definitions.
+/// Types are imported from the project's common crate.
+pub fn generate_hub_schema_rs(state: &ArchitectureState) -> String {
+ let project = state
+ .project
+ .as_ref()
+ .expect("generate_hub_schema_rs requires [project] block in state.toml");
+ let common_crate = format_ident!("{}", project.name.replace('-', "_") + "_common");
+
+ let configure_fn = emit_configure_schema(state);
+
+ let file_tokens = quote! {
+ use aimdb_core::buffer::BufferCfg;
+ use aimdb_core::builder::AimDbBuilder;
+ use aimdb_executor::Spawn;
+ use #common_crate::*;
+
+ #configure_fn
+ };
+
+ let header = "// @generated — do not edit manually.\n\
+// Source: .aimdb/state.toml — regenerate with `aimdb generate --hub`.\n\n";
+
+ let syntax_tree = syn::parse2(file_tokens).expect("generated tokens should be valid Rust");
+ format!("{header}{}", prettyplease::unparse(&syntax_tree))
+}
+
+/// Generate `Cargo.toml` for the hub binary crate (`{project.name}-hub`).
+pub fn generate_hub_cargo_toml(state: &ArchitectureState) -> String {
+ let project = state
+ .project
+ .as_ref()
+ .expect("generate_hub_cargo_toml requires [project] block in state.toml");
+ let hub_crate = format!("{}-hub", project.name);
+ let common_crate_name = format!("{}-common", project.name);
+ let edition = project.edition.as_deref().unwrap_or("2024");
+
+ let has_mqtt = state
+ .records
+ .iter()
+ .any(|r| r.connectors.iter().any(|c| c.protocol == "mqtt"));
+ let has_knx = state
+ .records
+ .iter()
+ .any(|r| r.connectors.iter().any(|c| c.protocol == "knx"));
+ let has_ws = state
+ .records
+ .iter()
+ .any(|r| r.connectors.iter().any(|c| c.protocol == "ws"));
+
+ let mut connector_deps = String::new();
+ if has_mqtt {
+ connector_deps.push_str(
+ "aimdb-mqtt-connector = { version = \"0.5\", features = [\"tokio-runtime\"] }\n",
+ );
+ }
+ if has_knx {
+ connector_deps.push_str(
+ "aimdb-knx-connector = { version = \"0.5\", features = [\"tokio-runtime\"] }\n",
+ );
+ }
+ if has_ws {
+ connector_deps.push_str(
+ "aimdb-websocket-connector = { version = \"0.5\", features = [\"tokio-runtime\"] }\n",
+ );
+ }
+
+ format!(
+ "# @generated — do not edit manually.\n\
+# Source: .aimdb/state.toml — regenerate with `aimdb generate --hub`\n\
+[package]\n\
+name = \"{hub_crate}\"\n\
+version = \"0.1.0\"\n\
+edition = \"{edition}\"\n\
+description = \"Hub binary for {project_name}\"\n\
+publish = false\n\
+\n\
+[[bin]]\n\
+name = \"{hub_crate}\"\n\
+path = \"src/main.rs\"\n\
+\n\
+[dependencies]\n\
+{common_crate_name} = {{ path = \"../{common_crate_name}\" }}\n\
+aimdb-core = {{ version = \"0.5\" }}\n\
+aimdb-data-contracts = {{ version = \"0.5\", features = [\"linkable\"] }}\n\
+aimdb-tokio-adapter = {{ version = \"0.5\", features = [\"tokio-runtime\"] }}\n\
+{connector_deps}\
+tokio = {{ version = \"1\", features = [\"full\"] }}\n\
+tracing = \"0.1\"\n\
+tracing-subscriber = {{ version = \"0.3\", features = [\"env-filter\"] }}\n",
+ project_name = project.name
+ )
+}
+
+/// Generate `src/main.rs` for the hub binary crate.
+///
+/// Uses `quote!` + `prettyplease` for guaranteed idiomatic formatting —
+/// the same pipeline as the rest of the codegen (no raw format strings).
+pub fn generate_hub_main_rs(state: &ArchitectureState) -> String {
+ let project = state
+ .project
+ .as_ref()
+ .expect("generate_hub_main_rs requires [project] block in state.toml");
+ let common_crate = format_ident!("{}", project.name.replace('-', "_") + "_common");
+
+ // ── Protocol detection ────────────────────────────────────────────────
+ let has_mqtt = state
+ .records
+ .iter()
+ .any(|r| r.connectors.iter().any(|c| c.protocol == "mqtt"));
+ let has_knx = state
+ .records
+ .iter()
+ .any(|r| r.connectors.iter().any(|c| c.protocol == "knx"));
+ let has_ws = state
+ .records
+ .iter()
+ .any(|r| r.connectors.iter().any(|c| c.protocol == "ws"));
+
+ // ── Connector use statements ──────────────────────────────────────────
+ let connector_use_stmts: Vec = {
+ let mut v = vec![];
+ if has_mqtt {
+ v.push(quote! { use aimdb_mqtt_connector::MqttConnector; });
+ }
+ if has_knx {
+ v.push(quote! { use aimdb_knx_connector::KnxConnector; });
+ }
+ if has_ws {
+ v.push(quote! { use aimdb_websocket_connector::WebSocketConnector; });
+ }
+ v
+ };
+
+ // ── Connector env-var bindings ────────────────────────────────────────
+ let connector_let_stmts: TokenStream = {
+ let mut ts = TokenStream::new();
+ if has_mqtt {
+ ts.extend(quote! {
+ let mqtt_broker =
+ std::env::var("MQTT_BROKER").unwrap_or_else(|_| "localhost".to_string());
+ let mqtt_url = format!("mqtt://{}", mqtt_broker);
+ });
+ }
+ if has_knx {
+ ts.extend(quote! {
+ let knx_gateway = std::env::var("KNX_GATEWAY")
+ .unwrap_or_else(|_| "224.0.23.12:3671".to_string());
+ });
+ }
+ if has_ws {
+ ts.extend(quote! {
+ let ws_bind: std::net::SocketAddr = std::env::var("WS_BIND")
+ .unwrap_or_else(|_| "0.0.0.0:8080".to_string())
+ .parse()
+ .expect("invalid WS_BIND address");
+ });
+ }
+ ts
+ };
+
+ // ── .with_connector(...) chain entries ────────────────────────────────
+ let with_connector_calls: Vec = {
+ let mut v = vec![];
+ if has_mqtt {
+ v.push(quote! { .with_connector(MqttConnector::new(&mqtt_url)) });
+ }
+ if has_knx {
+ v.push(quote! { .with_connector(KnxConnector::new(&knx_gateway)) });
+ }
+ if has_ws {
+ v.push(quote! { .with_connector(WebSocketConnector::new().bind(ws_bind).path("/ws")) });
+ }
+ v
+ };
+
+ // ── Inline record configure blocks (the node graph) ───────────────────
+ let record_blocks: Vec = state
+ .records
+ .iter()
+ .map(|r| emit_hub_record_configure_block(r, state))
+ .collect();
+
+ // ── String literals ───────────────────────────────────────────────────
+ let log_filter = format!(
+ "{}_hub=info,aimdb_core=info",
+ project.name.replace('-', "_")
+ );
+ let startup_msg = format!("Starting {} hub", project.name);
+
+ // ── Assemble via quote! — prettyplease formats the whole file ─────────
+ let file_tokens = quote! {
+ use aimdb_core::{buffer::BufferCfg, AimDbBuilder, DbResult, RecordKey};
+ use aimdb_data_contracts::Linkable;
+ use aimdb_tokio_adapter::{TokioAdapter, TokioRecordRegistrarExt};
+ #(#connector_use_stmts)*
+ use std::sync::Arc;
+ use #common_crate::*;
+
+ mod tasks;
+ use tasks::*;
+
+ #[tokio::main]
+ async fn main() -> DbResult<()> {
+ tracing_subscriber::fmt()
+ .with_env_filter(
+ tracing_subscriber::EnvFilter::try_from_default_env()
+ .unwrap_or_else(|_| #log_filter.into()),
+ )
+ .init();
+
+ tracing::info!(#startup_msg);
+
+ #connector_let_stmts
+
+ let runtime = Arc::new(TokioAdapter::new()?);
+
+ let mut builder = AimDbBuilder::new()
+ .runtime(runtime)
+ #(#with_connector_calls)*
+ ;
+
+ #(#record_blocks)*
+
+ builder.run().await
+ }
+ };
+
+ let header = "// @generated — do not edit manually.\n\
+// Source: .aimdb/state.toml\n\
+// Regenerate: `aimdb generate --hub`\n\n";
+
+ let syntax_tree =
+ syn::parse2(file_tokens).expect("generate_hub_main_rs: tokens should be valid Rust");
+ format!("{header}{}", prettyplease::unparse(&syntax_tree))
+}
+
+/// Hub-specific record configure block.
+///
+/// For records produced by a `[[tasks]]`-defined hub task, emits per-variant
+/// individual `builder.configure(...)` calls using `.transform()` or
+/// `.transform_join()`. For all other records (inbound connector or external
+/// source) falls back to the regular loop-based configure block.
+fn emit_hub_record_configure_block(rec: &RecordDef, state: &ArchitectureState) -> TokenStream {
+ if rec.key_variants.is_empty() {
+ let msg = format!("TODO: {}: no key variants defined yet", rec.name);
+ return quote! { let _ = (#msg,); };
+ }
+
+ // Find a task in [[tasks]] whose outputs include this record
+ let producing_task = state
+ .tasks
+ .iter()
+ .find(|t| t.outputs.iter().any(|o| o.record == rec.name));
+
+ match producing_task {
+ Some(task) => emit_transform_configure_block(rec, task),
+ None => emit_record_configure_block(rec),
+ }
+}
+
+/// Emit per-variant configure blocks for a hub-task-produced record.
+///
+/// Generates individual (non-loop) `builder.configure(...)` calls so that
+/// each variant can reference its specific input keys for `.transform()` /
+/// `.transform_join()`.
+fn emit_transform_configure_block(rec: &RecordDef, task: &TaskDef) -> TokenStream {
+ let value_type = format_ident!("{}Value", rec.name);
+ let key_type = format_ident!("{}Key", rec.name);
+ let buffer_tokens = rec.buffer.to_tokens(rec.capacity);
+
+ // Only emit connector chain for outbound connectors
+ let has_outbound = rec
+ .connectors
+ .iter()
+ .any(|c| matches!(c.direction, ConnectorDirection::Outbound));
+ let outbound_chain = if has_outbound {
+ quote! {
+ .link_to(addr)
+ .with_serializer(|v: value_type| {
+ v.to_bytes()
+ .map_err(|_| aimdb_core::connector::SerializeError::InvalidData)
+ })
+ .finish()
+ }
+ } else {
+ quote! {}
+ };
+
+ let variant_idents: Vec = rec
+ .key_variants
+ .iter()
+ .map(|v| format_ident!("{}", to_pascal_case(v)))
+ .collect();
+
+ let per_variant: Vec = variant_idents
+ .iter()
+ .map(|variant_ident| {
+ let transform_call = build_transform_call(task, variant_ident);
+
+ if has_outbound {
+ let outbound = outbound_chain.clone();
+ quote! {
+ {
+ let link_addr = #key_type::#variant_ident
+ .link_address()
+ .map(|s| s.to_string());
+ builder.configure::<#value_type>(#key_type::#variant_ident, |reg| {
+ if let Some(addr) = link_addr.as_deref() {
+ reg.buffer(#buffer_tokens)
+ #transform_call
+ #outbound;
+ } else {
+ reg.buffer(#buffer_tokens)
+ #transform_call;
+ }
+ });
+ }
+ }
+ } else {
+ quote! {
+ builder.configure::<#value_type>(#key_type::#variant_ident, |reg| {
+ reg.buffer(#buffer_tokens)
+ #transform_call;
+ });
+ }
+ }
+ })
+ .collect();
+
+ quote! { #(#per_variant)* }
+}
+
+/// Build the `.transform(...)` or `.transform_join(...)` call for one variant.
+///
+/// - 1 input → `.transform::(InputKey::Variant, |b| b.map(task_transform))`
+/// - N inputs → `.transform_join(|j| j.input::<...>(Key::Variant)....on_trigger(task_handler))`
+fn build_transform_call(task: &TaskDef, variant_ident: &syn::Ident) -> TokenStream {
+ if task.inputs.len() != 1 {
+ // Multi-input → transform_join
+ let handler_ident = format_ident!("{}_handler", task.name);
+ let input_calls: Vec = task
+ .inputs
+ .iter()
+ .map(|inp| {
+ let in_val = format_ident!("{}Value", inp.record);
+ let in_key = format_ident!("{}Key", inp.record);
+ quote! { .input::<#in_val>(#in_key::#variant_ident) }
+ })
+ .collect();
+ quote! {
+ .transform_join(|j| {
+ j #(#input_calls)*
+ .with_state(())
+ .on_trigger(#handler_ident)
+ })
+ }
+ } else {
+ // Single-input → transform + map
+ let handler_ident = format_ident!("{}_transform", task.name);
+ let inp = &task.inputs[0];
+ let in_val = format_ident!("{}Value", inp.record);
+ let in_key = format_ident!("{}Key", inp.record);
+ quote! {
+ .transform::<#in_val, _>(#in_key::#variant_ident, |b| b.map(#handler_ident))
+ }
+ }
+}
+
+/// Generate `src/tasks.rs` stub for the hub binary crate.
+///
+/// This file is generated **once** — it is not overwritten if it already exists.
+/// Task handler signatures are derived from `[[tasks]]` in state.toml:
+///
+/// | Inputs | Outputs | API | Generated stub |
+/// |--------|---------|-----------------------|---------------------------|
+/// | N > 1 | ≥ 1 | `.transform_join()` | `fn task_handler(JoinTrigger, &mut (), &Producer)` |
+/// | 1 | ≥ 1 | `.transform().map()` | `fn task_transform(&Input) -> Option` |
+/// | 0 | ≥ 1 | `.source()` | `async fn task(RuntimeContext, Producer)` |
+/// | ≥ 1 | 0 | `.tap()` | `async fn task(RuntimeContext, Consumer)` |
+pub fn generate_hub_tasks_rs(state: &ArchitectureState) -> String {
+ let project = state
+ .project
+ .as_ref()
+ .expect("generate_hub_tasks_rs requires [project] block in state.toml");
+ let common_crate = format!("{}_common", project.name.replace('-', "_"));
+
+ let mut fns = String::new();
+ let mut handled: std::collections::HashSet = std::collections::HashSet::new();
+
+ for task in &state.tasks {
+ handled.insert(task.name.clone());
+ let n_in = task.inputs.len();
+ let n_out = task.outputs.len();
+
+ let out_t = task
+ .outputs
+ .first()
+ .map(|o| format!("{}Value", o.record))
+ .unwrap_or_else(|| "()".to_string());
+ let in_t = task
+ .inputs
+ .first()
+ .map(|i| format!("{}Value", i.record))
+ .unwrap_or_else(|| "()".to_string());
+
+ if !task.description.is_empty() {
+ fns.push_str(&format!("/// {}\n", task.description));
+ }
+
+ if n_in > 1 && n_out >= 1 {
+ // Multi-input → join handler
+ // Returns Pin> — the only concrete return type that satisfies
+ // the for<'a,'b> HRTB on on_trigger. `-> impl Future` does NOT work here.
+ let handler = format!("{}_handler", task.name);
+ let inputs_doc = task
+ .inputs
+ .iter()
+ .enumerate()
+ .map(|(i, inp)| format!(" index {i} = {}", inp.record))
+ .collect::>()
+ .join(", ");
+ fns.push_str(&format!(
+ "/// Join handler — match `trigger.index()` to identify which input fired:\n\
+/// {inputs_doc}\n\
+pub fn {handler}(\n\
+ _trigger: aimdb_core::transform::JoinTrigger,\n\
+ _state: &mut (),\n\
+ _producer: &aimdb_core::Producer<{out_t}, TokioAdapter>,\n\
+) -> std::pin::Pin + Send + 'static>> {{\n\
+ Box::pin(async move {{ todo!(\"implement {handler}\") }})\n\
+}}\n\n"
+ ));
+ } else if n_in == 1 && n_out >= 1 {
+ // Single-input → map transform
+ let handler = format!("{}_transform", task.name);
+ let input_rec = &task.inputs[0].record;
+ let output_rec = task
+ .outputs
+ .first()
+ .map(|o| o.record.as_str())
+ .unwrap_or("?");
+ fns.push_str(&format!(
+ "/// Transform: {input_rec} → {output_rec}\n\
+/// Return `Some(value)` to emit, `None` to skip this input.\n\
+pub fn {handler}(input: &{in_t}) -> Option<{out_t}> {{\n\
+ let _ = input;\n\
+ todo!(\"implement {handler}\")\n\
+}}\n\n"
+ ));
+ } else if n_in == 0 && n_out >= 1 {
+ // Pure source
+ fns.push_str(&format!(
+ "pub async fn {}(\n\
+ _ctx: aimdb_core::RuntimeContext,\n\
+ _producer: aimdb_core::Producer<{out_t}, TokioAdapter>,\n\
+) {{\n\
+ todo!(\"implement {}\")\n\
+}}\n\n",
+ task.name, task.name
+ ));
+ } else if n_in >= 1 && n_out == 0 {
+ // Pure sink / tap
+ fns.push_str(&format!(
+ "pub async fn {}(\n\
+ _ctx: aimdb_core::RuntimeContext,\n\
+ _consumer: aimdb_core::Consumer<{in_t}, TokioAdapter>,\n\
+) {{\n\
+ todo!(\"implement {}\")\n\
+}}\n\n",
+ task.name, task.name
+ ));
+ }
+ }
+
+ // Fallback: any hub tasks NOT in [[tasks]] get a minimal stub
+ for task_name in hub_task_names(state) {
+ if handled.contains(&task_name) {
+ continue;
+ }
+ fns.push_str(&format!(
+ "/// Hub task: add a `[[tasks]]` entry in state.toml for a typed stub.\n\
+pub async fn {task_name}() {{\n\
+ todo!(\"implement {task_name}\")\n\
+}}\n\n"
+ ));
+ }
+
+ format!(
+ "// Implement task bodies; signatures are derived from state.toml [[tasks]].\n\
+// This file is scaffolded once — it will not be overwritten on subsequent runs.\n\
+// Regenerate signatures: delete this file, then run `aimdb generate --hub`.\n\
+\n\
+use aimdb_tokio_adapter::TokioAdapter;\n\
+use {common_crate}::*;\n\
+\n\
+{fns}"
+ )
+}
+
+// ── Tests ─────────────────────────────────────────────────────────────────────
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::state::ArchitectureState;
+
+ const SAMPLE_TOML: &str = r#"
+[meta]
+aimdb_version = "0.5.0"
+created_at = "2026-02-22T14:00:00Z"
+last_modified = "2026-02-22T14:33:00Z"
+
+[[records]]
+name = "TemperatureReading"
+buffer = "SpmcRing"
+capacity = 256
+key_prefix = "sensors.temp."
+key_variants = ["indoor", "outdoor", "garage"]
+producers = ["sensor_task"]
+consumers = ["dashboard", "anomaly_detector"]
+
+[[records.fields]]
+name = "celsius"
+type = "f64"
+description = "Temperature in degrees Celsius"
+
+[[records.fields]]
+name = "humidity_percent"
+type = "f64"
+description = "Relative humidity 0-100"
+
+[[records.fields]]
+name = "timestamp"
+type = "u64"
+description = "Unix timestamp in milliseconds"
+
+[[records.connectors]]
+protocol = "mqtt"
+direction = "outbound"
+url = "mqtt://sensors/temp/{variant}"
+
+[[records]]
+name = "OtaCommand"
+buffer = "Mailbox"
+key_prefix = "device.ota."
+key_variants = ["gateway-01", "sensor-hub-01"]
+producers = ["cloud_ota"]
+consumers = ["updater"]
+
+[[records.fields]]
+name = "action"
+type = "String"
+description = "Command: update, rollback, reboot"
+
+[[records.fields]]
+name = "target_version"
+type = "String"
+description = "Target firmware version"
+
+[[records.connectors]]
+protocol = "mqtt"
+direction = "inbound"
+url = "mqtt://ota/cmd/{variant}"
+"#;
+
+ fn state() -> ArchitectureState {
+ ArchitectureState::from_toml(SAMPLE_TOML).unwrap()
+ }
+
+ fn generated() -> String {
+ generate_rust(&state())
+ }
+
+ #[test]
+ fn has_generated_header() {
+ let out = generated();
+ assert!(
+ out.contains("@generated"),
+ "Missing @generated header:\n{out}"
+ );
+ }
+
+ #[test]
+ fn has_imports() {
+ let out = generated();
+ assert!(
+ out.contains("use aimdb_core::buffer::BufferCfg;"),
+ "Missing BufferCfg import:\n{out}"
+ );
+ assert!(
+ out.contains("use aimdb_core::builder::AimDbBuilder;"),
+ "Missing AimDbBuilder import:\n{out}"
+ );
+ assert!(
+ out.contains("use aimdb_core::RecordKey;"),
+ "Missing RecordKey import:\n{out}"
+ );
+ assert!(
+ out.contains("use aimdb_executor::Spawn;"),
+ "Missing Spawn import:\n{out}"
+ );
+ assert!(
+ out.contains("use serde::{Deserialize, Serialize};"),
+ "Missing serde import:\n{out}"
+ );
+ }
+
+ #[test]
+ fn value_struct_generated() {
+ let out = generated();
+ assert!(
+ out.contains("pub struct TemperatureReadingValue"),
+ "Missing TemperatureReadingValue struct:\n{out}"
+ );
+ assert!(
+ out.contains("pub celsius: f64,"),
+ "Missing celsius field:\n{out}"
+ );
+ assert!(
+ out.contains("pub humidity_percent: f64,"),
+ "Missing humidity_percent field:\n{out}"
+ );
+ assert!(
+ out.contains("pub timestamp: u64,"),
+ "Missing timestamp field:\n{out}"
+ );
+ assert!(
+ out.contains("#[derive(Debug, Clone, Serialize, Deserialize)]"),
+ "Missing derives:\n{out}"
+ );
+ }
+
+ #[test]
+ fn key_enum_generated() {
+ let out = generated();
+ assert!(
+ out.contains("pub enum TemperatureReadingKey"),
+ "Missing key enum:\n{out}"
+ );
+ assert!(
+ out.contains("#[derive(Debug, RecordKey, Clone, Copy, PartialEq, Eq)]"),
+ "Missing RecordKey derive:\n{out}"
+ );
+ assert!(
+ out.contains("#[key_prefix = \"sensors.temp.\"]"),
+ "Missing key_prefix:\n{out}"
+ );
+ assert!(
+ out.contains("#[key = \"indoor\"]"),
+ "Missing indoor key attr:\n{out}"
+ );
+ assert!(
+ out.contains("#[key = \"outdoor\"]"),
+ "Missing outdoor key attr:\n{out}"
+ );
+ assert!(
+ out.contains("#[key = \"garage\"]"),
+ "Missing garage key attr:\n{out}"
+ );
+ assert!(out.contains("Indoor,"), "Missing Indoor variant:\n{out}");
+ assert!(out.contains("Outdoor,"), "Missing Outdoor variant:\n{out}");
+ assert!(out.contains("Garage,"), "Missing Garage variant:\n{out}");
+ }
+
+ #[test]
+ fn link_address_substituted_per_variant() {
+ let out = generated();
+ assert!(
+ out.contains("#[link_address = \"mqtt://sensors/temp/indoor\"]"),
+ "link_address not substituted for indoor:\n{out}"
+ );
+ assert!(
+ out.contains("#[link_address = \"mqtt://sensors/temp/outdoor\"]"),
+ "link_address not substituted for outdoor:\n{out}"
+ );
+ assert!(
+ out.contains("#[link_address = \"mqtt://sensors/temp/garage\"]"),
+ "link_address not substituted for garage:\n{out}"
+ );
+ }
+
+ #[test]
+ fn kebab_variants_to_pascal_case() {
+ let out = generated();
+ assert!(
+ out.contains("pub enum OtaCommandKey"),
+ "Missing OtaCommandKey enum:\n{out}"
+ );
+ assert!(
+ out.contains("Gateway01,"),
+ "gateway-01 should become Gateway01:\n{out}"
+ );
+ assert!(
+ out.contains("SensorHub01,"),
+ "sensor-hub-01 should become SensorHub01:\n{out}"
+ );
+ }
+
+ #[test]
+ fn configure_schema_function_present() {
+ let out = generated();
+ assert!(
+ out.contains(
+ "pub fn configure_schema(builder: &mut AimDbBuilder)"
+ ),
+ "Missing configure_schema function:\n{out}"
+ );
+ }
+
+ #[test]
+ fn configure_schema_spmc_buffer() {
+ let out = generated();
+ // prettyplease may split struct literals across lines
+ assert!(
+ out.contains("BufferCfg::SpmcRing"),
+ "Missing SpmcRing buffer call:\n{out}"
+ );
+ assert!(
+ out.contains("capacity: 256"),
+ "Missing capacity value:\n{out}"
+ );
+ }
+
+ #[test]
+ fn configure_schema_mailbox_buffer() {
+ let out = generated();
+ assert!(
+ out.contains("BufferCfg::Mailbox"),
+ "Missing Mailbox buffer call:\n{out}"
+ );
+ }
+
+ #[test]
+ fn configure_schema_outbound_link_to_with_serializer() {
+ let out = generated();
+ assert!(
+ out.contains("link_to(addr_0)"),
+ "Missing link_to call:\n{out}"
+ );
+ assert!(
+ out.contains("with_serializer"),
+ "Missing with_serializer call:\n{out}"
+ );
+ assert!(out.contains(".finish()"), "Missing .finish() call:\n{out}");
+ }
+
+ #[test]
+ fn configure_schema_inbound_link_from_with_deserializer() {
+ let out = generated();
+ assert!(
+ out.contains("link_from(addr_0)"),
+ "Missing link_from call:\n{out}"
+ );
+ assert!(
+ out.contains("with_deserializer(OtaCommandValue::from_bytes)"),
+ "Missing with_deserializer call:\n{out}"
+ );
+ }
+
+ #[test]
+ fn configure_schema_key_variants_iterated() {
+ let out = generated();
+ assert!(
+ out.contains("TemperatureReadingKey::Indoor"),
+ "Missing Indoor in configure_schema:\n{out}"
+ );
+ assert!(
+ out.contains("TemperatureReadingKey::Outdoor"),
+ "Missing Outdoor in configure_schema:\n{out}"
+ );
+ assert!(
+ out.contains("TemperatureReadingKey::Garage"),
+ "Missing Garage in configure_schema:\n{out}"
+ );
+ assert!(
+ out.contains("OtaCommandKey::Gateway01"),
+ "Missing Gateway01 in configure_schema:\n{out}"
+ );
+ }
+
+ // ── to_pascal_case ───────────────────────────────────────────────────────
+
+ #[test]
+ fn pascal_case_simple() {
+ assert_eq!(to_pascal_case("indoor"), "Indoor");
+ assert_eq!(to_pascal_case("outdoor"), "Outdoor");
+ }
+
+ #[test]
+ fn pascal_case_kebab() {
+ assert_eq!(to_pascal_case("gateway-01"), "Gateway01");
+ assert_eq!(to_pascal_case("sensor-hub-01"), "SensorHub01");
+ }
+
+ #[test]
+ fn pascal_case_snake() {
+ assert_eq!(to_pascal_case("sensor_hub_01"), "SensorHub01");
+ }
+
+ #[test]
+ fn pascal_case_already_capitalized() {
+ assert_eq!(to_pascal_case("Indoor"), "Indoor");
+ }
+
+ /// Snapshot: print the full generated output for manual review.
+ #[test]
+ fn snapshot_full_output() {
+ let out = generated();
+ // Uncomment to inspect:
+ // eprintln!("{out}");
+ assert!(!out.is_empty());
+ }
+
+ // ── to_snake_case ───────────────────────────────────────────────────────
+
+ #[test]
+ fn snake_case_basic() {
+ assert_eq!(to_snake_case("WeatherObservation"), "weather_observation");
+ assert_eq!(to_snake_case("Temperature"), "temperature");
+ }
+
+ #[test]
+ fn snake_case_acronym() {
+ assert_eq!(to_snake_case("OtaCommand"), "ota_command");
+ }
+
+ // ── Extended TOML with new fields ───────────────────────────────────────
+
+ const EXTENDED_TOML: &str = r#"
+[project]
+name = "weather-sentinel"
+
+[meta]
+aimdb_version = "0.5.0"
+created_at = "2026-02-24T21:39:15Z"
+last_modified = "2026-02-25T10:00:00Z"
+
+[[records]]
+name = "WeatherObservation"
+buffer = "SpmcRing"
+capacity = 256
+key_prefix = "weather.observation."
+key_variants = ["Vienna", "Munich"]
+schema_version = 2
+serialization = "json"
+
+[records.observable]
+signal_field = "temperature_celsius"
+icon = "🌡️"
+unit = "°C"
+
+[[records.fields]]
+name = "timestamp"
+type = "u64"
+description = "Unix timestamp in milliseconds"
+
+[[records.fields]]
+name = "temperature_celsius"
+type = "f32"
+description = "Air temperature"
+settable = true
+
+[[records.fields]]
+name = "humidity_percent"
+type = "f32"
+description = "Relative humidity"
+settable = true
+
+[[records.connectors]]
+protocol = "mqtt"
+direction = "inbound"
+url = "sensors/{variant}/observation"
+"#;
+
+ fn extended_state() -> ArchitectureState {
+ ArchitectureState::from_toml(EXTENDED_TOML).unwrap()
+ }
+
+ fn extended_generated() -> String {
+ generate_rust(&extended_state())
+ }
+
+ #[test]
+ fn schema_type_impl_generated() {
+ let out = extended_generated();
+ assert!(
+ out.contains("impl SchemaType for WeatherObservationValue"),
+ "Missing SchemaType impl:\n{out}"
+ );
+ assert!(
+ out.contains("\"weather_observation\""),
+ "Missing schema name:\n{out}"
+ );
+ assert!(
+ out.contains("VERSION: u32 = 2"),
+ "Missing schema version:\n{out}"
+ );
+ }
+
+ #[test]
+ fn linkable_impl_json_generated() {
+ let out = extended_generated();
+ assert!(
+ out.contains("impl Linkable for WeatherObservationValue"),
+ "Missing Linkable impl:\n{out}"
+ );
+ assert!(
+ out.contains("serde_json::to_vec"),
+ "Missing serde_json::to_vec call:\n{out}"
+ );
+ assert!(
+ out.contains("serde_json::from_slice"),
+ "Missing serde_json::from_slice call:\n{out}"
+ );
+ }
+
+ #[test]
+ fn observable_impl_generated() {
+ let out = extended_generated();
+ assert!(
+ out.contains("impl Observable for WeatherObservationValue"),
+ "Missing Observable impl:\n{out}"
+ );
+ assert!(
+ out.contains("self.temperature_celsius"),
+ "Missing signal field access:\n{out}"
+ );
+ assert!(out.contains("\"°C\""), "Missing unit:\n{out}");
+ }
+
+ #[test]
+ fn settable_impl_generated() {
+ let out = extended_generated();
+ assert!(
+ out.contains("impl Settable for WeatherObservationValue"),
+ "Missing Settable impl:\n{out}"
+ );
+ assert!(
+ out.contains("(f32, f32)"),
+ "Missing tuple value type:\n{out}"
+ );
+ }
+
+ #[test]
+ fn configure_schema_with_real_deserializer() {
+ let out = extended_generated();
+ assert!(
+ out.contains("with_deserializer(WeatherObservationValue::from_bytes)"),
+ "Missing with_deserializer for inbound connector:\n{out}"
+ );
+ }
+
+ #[test]
+ fn data_contracts_import_present() {
+ let out = extended_generated();
+ assert!(
+ out.contains("use aimdb_data_contracts"),
+ "Missing aimdb_data_contracts import:\n{out}"
+ );
+ assert!(
+ out.contains("SchemaType"),
+ "Missing SchemaType in import:\n{out}"
+ );
+ }
+
+ #[test]
+ fn generate_cargo_toml_output() {
+ let state = extended_state();
+ let toml = generate_cargo_toml(&state);
+ assert!(
+ toml.contains("weather-sentinel-common"),
+ "Missing crate name:\n{toml}"
+ );
+ assert!(
+ toml.contains("serde_json"),
+ "Missing serde_json dep:\n{toml}"
+ );
+ assert!(
+ toml.contains("linkable"),
+ "Missing linkable feature:\n{toml}"
+ );
+ }
+
+ #[test]
+ fn generate_lib_rs_output() {
+ let lib = generate_lib_rs();
+ assert!(lib.contains("no_std"), "Missing no_std attribute:\n{lib}");
+ assert!(
+ lib.contains("extern crate alloc"),
+ "Missing alloc extern:\n{lib}"
+ );
+ assert!(lib.contains("mod schema"), "Missing schema module:\n{lib}");
+ assert!(
+ lib.contains("pub use schema::*"),
+ "Missing re-export:\n{lib}"
+ );
+ }
+
+ #[test]
+ fn schema_rs_has_no_generated_header() {
+ let out = generate_schema_rs(&extended_state());
+ assert!(
+ !out.contains("@generated"),
+ "schema.rs should not have @generated header:\n{out}"
+ );
+ }
+}
diff --git a/aimdb-codegen/src/state.rs b/aimdb-codegen/src/state.rs
new file mode 100644
index 00000000..f69b2e8e
--- /dev/null
+++ b/aimdb-codegen/src/state.rs
@@ -0,0 +1,541 @@
+//! AimDB Codegen — architecture state types and TOML parser
+//!
+//! Deserialises `.aimdb/state.toml` into [`ArchitectureState`].
+
+use serde::{Deserialize, Serialize};
+
+// ── Top-level state ──────────────────────────────────────────────────────────
+
+/// The full contents of `.aimdb/state.toml`.
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct ArchitectureState {
+ /// Optional project metadata for common crate generation.
+ #[serde(default)]
+ pub project: Option,
+ pub meta: Meta,
+ #[serde(default)]
+ pub records: Vec,
+ #[serde(default)]
+ pub tasks: Vec,
+ #[serde(default)]
+ pub binaries: Vec,
+ #[serde(default)]
+ pub decisions: Vec,
+}
+
+impl ArchitectureState {
+ /// Parse from a TOML string (the contents of `state.toml`).
+ pub fn from_toml(s: &str) -> Result {
+ toml::from_str(s)
+ }
+
+ /// Serialise back to a TOML string.
+ pub fn to_toml(&self) -> Result {
+ toml::to_string_pretty(self)
+ }
+}
+
+// ── Meta block ───────────────────────────────────────────────────────────────
+
+/// `[meta]` block — version and timestamps.
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct Meta {
+ pub aimdb_version: String,
+ pub created_at: String,
+ pub last_modified: String,
+}
+
+// ── Project metadata ─────────────────────────────────────────────────────────
+
+/// `[project]` block — drives common crate naming and Rust edition.
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct ProjectDef {
+ /// Project name, used for crate naming: `{name}-common`.
+ pub name: String,
+ /// Rust edition for the generated crate (default `"2024"` at codegen time).
+ #[serde(default)]
+ pub edition: Option,
+}
+
+// ── Serialization type ───────────────────────────────────────────────────────
+
+/// Serialization format for `Linkable` trait generation.
+#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Default)]
+#[serde(rename_all = "lowercase")]
+pub enum SerializationType {
+ /// JSON via `serde_json` (std-only, `no_std` fallback returns error).
+ #[default]
+ Json,
+ /// Binary via `postcard` (works in both std and `no_std`).
+ Postcard,
+ /// No generated `Linkable` impl — user provides their own.
+ Custom,
+}
+
+// ── Observable metadata ─────────────────────────────────────────────────────
+
+/// `[records.observable]` block — metadata for `Observable` trait generation.
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct ObservableDef {
+ /// Field name to use as `Observable::signal()` return value.
+ pub signal_field: String,
+ /// Icon/emoji for log output (e.g. `"🌡️"`).
+ pub icon: String,
+ /// Unit label for the signal (e.g. `"°C"`).
+ pub unit: String,
+}
+
+// ── Record definition ────────────────────────────────────────────────────────
+
+/// One `[[records]]` entry.
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct RecordDef {
+ /// PascalCase name, e.g. `TemperatureReading`.
+ pub name: String,
+ /// Buffer type selection.
+ pub buffer: BufferType,
+ /// Required when `buffer == SpmcRing`. Ignored otherwise.
+ #[serde(default)]
+ pub capacity: Option,
+ /// Common key prefix, e.g. `"sensors.temp."`.
+ #[serde(default)]
+ pub key_prefix: String,
+ /// Concrete key variant strings, e.g. `["indoor", "outdoor", "garage"]`.
+ #[serde(default)]
+ pub key_variants: Vec,
+ /// Names of tasks that produce values into this record.
+ #[serde(default)]
+ pub producers: Vec,
+ /// Names of tasks that consume values from this record.
+ #[serde(default)]
+ pub consumers: Vec,
+
+ /// Schema version for `SchemaType::VERSION` (default 1).
+ #[serde(default)]
+ pub schema_version: Option,
+ /// Serialization format for `Linkable` generation (default `"json"`).
+ #[serde(default)]
+ pub serialization: Option,
+ /// Observable trait metadata (omit to skip `Observable` impl).
+ #[serde(default)]
+ pub observable: Option,
+
+ /// Value struct fields (agent-derived from datasheets / specs / conversation).
+ #[serde(default)]
+ pub fields: Vec,
+ /// External connector definitions.
+ #[serde(default)]
+ pub connectors: Vec,
+}
+
+// ── Buffer type ──────────────────────────────────────────────────────────────
+
+/// The three AimDB buffer primitives.
+#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
+pub enum BufferType {
+ SpmcRing,
+ SingleLatest,
+ Mailbox,
+}
+
+impl BufferType {
+ /// Human-readable label used in Mermaid node annotations.
+ pub fn label(&self, capacity: Option) -> String {
+ match self {
+ BufferType::SpmcRing => {
+ let cap = capacity.unwrap_or(256);
+ format!("SpmcRing · {cap}")
+ }
+ BufferType::SingleLatest => "SingleLatest".to_string(),
+ BufferType::Mailbox => "Mailbox".to_string(),
+ }
+ }
+
+ /// The `BufferCfg` expression emitted into generated Rust.
+ pub fn rust_expr(&self, capacity: Option) -> String {
+ match self {
+ BufferType::SpmcRing => {
+ let cap = capacity.unwrap_or(256);
+ format!("BufferCfg::SpmcRing {{ capacity: {cap} }}")
+ }
+ BufferType::SingleLatest => "BufferCfg::SingleLatest".to_string(),
+ BufferType::Mailbox => "BufferCfg::Mailbox".to_string(),
+ }
+ }
+
+ /// The `BufferCfg` expression as a token stream for use with `quote!`.
+ pub fn to_tokens(&self, capacity: Option) -> proc_macro2::TokenStream {
+ use quote::quote;
+ match self {
+ BufferType::SpmcRing => {
+ let cap = proc_macro2::Literal::usize_unsuffixed(capacity.unwrap_or(256));
+ quote! { BufferCfg::SpmcRing { capacity: #cap } }
+ }
+ BufferType::SingleLatest => quote! { BufferCfg::SingleLatest },
+ BufferType::Mailbox => quote! { BufferCfg::Mailbox },
+ }
+ }
+}
+
+// ── Field definition ─────────────────────────────────────────────────────────
+
+/// One `[[records.fields]]` entry — a typed field in the value struct.
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct FieldDef {
+ pub name: String,
+ /// Rust primitive type string, e.g. `"f64"`, `"u64"`, `"String"`, `"bool"`.
+ #[serde(rename = "type")]
+ pub field_type: String,
+ #[serde(default)]
+ pub description: String,
+ /// Include this field in `Settable::Value` tuple (default `false`).
+ #[serde(default)]
+ pub settable: bool,
+}
+
+// ── Connector definition ─────────────────────────────────────────────────────
+
+/// One `[[records.connectors]]` entry.
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct ConnectorDef {
+ /// Protocol identifier lower-case, e.g. `"mqtt"`, `"knx"`.
+ pub protocol: String,
+ /// `"outbound"` → `link_to`, `"inbound"` → `link_from`.
+ pub direction: ConnectorDirection,
+ /// URL template, may contain `{variant}` placeholder.
+ pub url: String,
+}
+
+impl ConnectorDef {
+ /// Human-readable direction label for doc comments.
+ pub fn direction_label(&self) -> &'static str {
+ match self.direction {
+ ConnectorDirection::Outbound => "outbound",
+ ConnectorDirection::Inbound => "inbound",
+ }
+ }
+}
+
+/// Connector data flow direction.
+#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
+#[serde(rename_all = "lowercase")]
+pub enum ConnectorDirection {
+ Outbound,
+ Inbound,
+}
+
+// ── Task definition ──────────────────────────────────────────────────────────
+
+/// The functional role of a task — drives stub body generation.
+#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Default)]
+#[serde(rename_all = "lowercase")]
+pub enum TaskType {
+ /// Reads one or more records, transforms them, writes to output records.
+ #[default]
+ Transform,
+ /// LLM-driven reasoning loop, flags anomalies, cross-correlates data.
+ Agent,
+ /// Fetches external data and writes values into a record.
+ Source,
+ /// Forwards, stores, or logs values — no output records in the DB.
+ Tap,
+}
+
+/// One `[[tasks.inputs]]` or `[[tasks.outputs]]` entry.
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct TaskIo {
+ /// PascalCase record name, e.g. `"HourlyForecastPoint"`.
+ pub record: String,
+ /// Specific variants; empty (`[]`) means all variants of that record.
+ #[serde(default)]
+ pub variants: Vec,
+}
+
+/// One `[[tasks]]` entry — describes an async task function.
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct TaskDef {
+ /// snake_case function name, e.g. `"hub_validation_task"`.
+ pub name: String,
+ /// Functional classification — drives stub body.
+ #[serde(default)]
+ pub task_type: TaskType,
+ /// Human-readable description, used in doc comments and todo! msgs.
+ #[serde(default)]
+ pub description: String,
+ /// Records this task reads from.
+ #[serde(default)]
+ pub inputs: Vec,
+ /// Records this task writes to.
+ #[serde(default)]
+ pub outputs: Vec,
+}
+
+// ── Binary definition ────────────────────────────────────────────────────────
+
+/// One `[[binaries.external_connectors]]` entry — a runtime broker connection.
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct ExternalConnectorDef {
+ /// Protocol identifier, e.g. `"mqtt"`.
+ pub protocol: String,
+ /// Environment variable that provides the broker URL at runtime.
+ pub env_var: String,
+ /// Default URL when the env var is not set.
+ #[serde(default)]
+ pub default: String,
+}
+
+/// One `[[binaries]]` entry — a deployable binary crate.
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct BinaryDef {
+ /// Directory name of the binary crate, e.g. `"weather-sentinel-hub"`.
+ /// The codegen derives the crate path as `../{name}/`.
+ pub name: String,
+ /// Task names belonging to this binary (must match `[[tasks]]` entries).
+ #[serde(default)]
+ pub tasks: Vec,
+ /// Runtime broker connections needed by this binary.
+ #[serde(default)]
+ pub external_connectors: Vec,
+}
+
+// ── Decision log entry ───────────────────────────────────────────────────────
+
+/// One `[[decisions]]` entry — architectural rationale.
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct DecisionEntry {
+ pub record: String,
+ pub field: String,
+ pub chosen: String,
+ pub alternative: String,
+ pub reason: String,
+ pub timestamp: String,
+}
+
+// ── Tests ────────────────────────────────────────────────────────────────────
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ const SAMPLE_TOML: &str = r#"
+[meta]
+aimdb_version = "0.5.0"
+created_at = "2026-02-22T14:00:00Z"
+last_modified = "2026-02-22T14:33:00Z"
+
+[[records]]
+name = "TemperatureReading"
+buffer = "SpmcRing"
+capacity = 256
+key_prefix = "sensors.temp."
+key_variants = ["indoor", "outdoor", "garage"]
+producers = ["sensor_task"]
+consumers = ["dashboard", "anomaly_detector"]
+
+[[records.fields]]
+name = "celsius"
+type = "f64"
+description = "Temperature in degrees Celsius"
+
+[[records.fields]]
+name = "humidity_percent"
+type = "f64"
+description = "Relative humidity 0-100"
+
+[[records.fields]]
+name = "timestamp"
+type = "u64"
+description = "Unix timestamp in milliseconds"
+
+[[records.connectors]]
+protocol = "mqtt"
+direction = "outbound"
+url = "mqtt://sensors/temp/{variant}"
+
+[[records]]
+name = "OtaCommand"
+buffer = "Mailbox"
+key_prefix = "device.ota."
+key_variants = ["gateway-01"]
+producers = ["cloud_ota_service"]
+consumers = ["device_update_task"]
+
+[[records.fields]]
+name = "action"
+type = "String"
+description = "Command action"
+
+[[decisions]]
+record = "TemperatureReading"
+field = "buffer"
+chosen = "SpmcRing"
+alternative = "SingleLatest"
+reason = "Anomaly detector needs a sample window"
+timestamp = "2026-02-22T14:20:00Z"
+"#;
+
+ #[test]
+ fn parses_meta() {
+ let state = ArchitectureState::from_toml(SAMPLE_TOML).unwrap();
+ assert_eq!(state.meta.aimdb_version, "0.5.0");
+ assert_eq!(state.meta.created_at, "2026-02-22T14:00:00Z");
+ }
+
+ #[test]
+ fn parses_records() {
+ let state = ArchitectureState::from_toml(SAMPLE_TOML).unwrap();
+ assert_eq!(state.records.len(), 2);
+
+ let r = &state.records[0];
+ assert_eq!(r.name, "TemperatureReading");
+ assert_eq!(r.buffer, BufferType::SpmcRing);
+ assert_eq!(r.capacity, Some(256));
+ assert_eq!(r.key_prefix, "sensors.temp.");
+ assert_eq!(r.key_variants, vec!["indoor", "outdoor", "garage"]);
+ assert_eq!(r.producers, vec!["sensor_task"]);
+ assert_eq!(r.consumers, vec!["dashboard", "anomaly_detector"]);
+ }
+
+ #[test]
+ fn parses_fields() {
+ let state = ArchitectureState::from_toml(SAMPLE_TOML).unwrap();
+ let r = &state.records[0];
+ assert_eq!(r.fields.len(), 3);
+ assert_eq!(r.fields[0].name, "celsius");
+ assert_eq!(r.fields[0].field_type, "f64");
+ assert_eq!(r.fields[0].description, "Temperature in degrees Celsius");
+ }
+
+ #[test]
+ fn parses_connectors() {
+ let state = ArchitectureState::from_toml(SAMPLE_TOML).unwrap();
+ let r = &state.records[0];
+ assert_eq!(r.connectors.len(), 1);
+ assert_eq!(r.connectors[0].protocol, "mqtt");
+ assert_eq!(r.connectors[0].direction, ConnectorDirection::Outbound);
+ assert_eq!(r.connectors[0].url, "mqtt://sensors/temp/{variant}");
+ }
+
+ #[test]
+ fn parses_decisions() {
+ let state = ArchitectureState::from_toml(SAMPLE_TOML).unwrap();
+ assert_eq!(state.decisions.len(), 1);
+ assert_eq!(state.decisions[0].record, "TemperatureReading");
+ assert_eq!(state.decisions[0].chosen, "SpmcRing");
+ }
+
+ #[test]
+ fn buffer_label_spmc() {
+ assert_eq!(BufferType::SpmcRing.label(Some(256)), "SpmcRing · 256");
+ }
+
+ #[test]
+ fn buffer_label_single_latest() {
+ assert_eq!(BufferType::SingleLatest.label(None), "SingleLatest");
+ }
+
+ #[test]
+ fn buffer_rust_expr_mailbox() {
+ assert_eq!(BufferType::Mailbox.rust_expr(None), "BufferCfg::Mailbox");
+ }
+
+ const EXTENDED_TOML: &str = r#"
+[project]
+name = "weather-sentinel"
+
+[meta]
+aimdb_version = "0.5.0"
+created_at = "2026-02-24T21:39:15Z"
+last_modified = "2026-02-25T10:00:00Z"
+
+[[records]]
+name = "WeatherObservation"
+buffer = "SpmcRing"
+capacity = 256
+key_prefix = "weather.observation."
+key_variants = ["Vienna", "Munich"]
+schema_version = 2
+serialization = "json"
+
+[records.observable]
+signal_field = "temperature_celsius"
+icon = "🌡️"
+unit = "°C"
+
+[[records.fields]]
+name = "timestamp"
+type = "u64"
+description = "Unix timestamp in milliseconds"
+
+[[records.fields]]
+name = "temperature_celsius"
+type = "f32"
+description = "Air temperature"
+settable = true
+
+[[records.fields]]
+name = "humidity_percent"
+type = "f32"
+description = "Relative humidity"
+settable = true
+"#;
+
+ #[test]
+ fn parses_project_block() {
+ let state = ArchitectureState::from_toml(EXTENDED_TOML).unwrap();
+ let project = state.project.as_ref().unwrap();
+ assert_eq!(project.name, "weather-sentinel");
+ assert!(project.edition.is_none());
+ }
+
+ #[test]
+ fn parses_schema_version_and_serialization() {
+ let state = ArchitectureState::from_toml(EXTENDED_TOML).unwrap();
+ let r = &state.records[0];
+ assert_eq!(r.schema_version, Some(2));
+ assert_eq!(r.serialization, Some(SerializationType::Json));
+ }
+
+ #[test]
+ fn parses_observable_block() {
+ let state = ArchitectureState::from_toml(EXTENDED_TOML).unwrap();
+ let obs = state.records[0].observable.as_ref().unwrap();
+ assert_eq!(obs.signal_field, "temperature_celsius");
+ assert_eq!(obs.icon, "🌡️");
+ assert_eq!(obs.unit, "°C");
+ }
+
+ #[test]
+ fn parses_settable_field() {
+ let state = ArchitectureState::from_toml(EXTENDED_TOML).unwrap();
+ let fields = &state.records[0].fields;
+ assert!(!fields[0].settable); // timestamp
+ assert!(fields[1].settable); // temperature_celsius
+ assert!(fields[2].settable); // humidity_percent
+ }
+
+ #[test]
+ fn project_block_is_optional() {
+ let state = ArchitectureState::from_toml(SAMPLE_TOML).unwrap();
+ assert!(state.project.is_none());
+ }
+
+ #[test]
+ fn new_fields_default_when_absent() {
+ let state = ArchitectureState::from_toml(SAMPLE_TOML).unwrap();
+ let r = &state.records[0];
+ assert!(r.schema_version.is_none());
+ assert!(r.serialization.is_none());
+ assert!(r.observable.is_none());
+ assert!(!r.fields[0].settable);
+ }
+
+ #[test]
+ fn round_trips_toml() {
+ let state = ArchitectureState::from_toml(SAMPLE_TOML).unwrap();
+ let serialised = state.to_toml().unwrap();
+ let state2 = ArchitectureState::from_toml(&serialised).unwrap();
+ assert_eq!(state.records.len(), state2.records.len());
+ assert_eq!(state.decisions.len(), state2.decisions.len());
+ }
+}
diff --git a/aimdb-codegen/src/validate.rs b/aimdb-codegen/src/validate.rs
new file mode 100644
index 00000000..1654d3d9
--- /dev/null
+++ b/aimdb-codegen/src/validate.rs
@@ -0,0 +1,754 @@
+//! Architecture state validator
+//!
+//! Checks an [`ArchitectureState`] for structural and semantic errors before
+//! code is generated or proposals are confirmed.
+
+use crate::state::{ArchitectureState, BufferType};
+
+/// A single validation problem.
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct ValidationError {
+ /// Human-readable description of the problem.
+ pub message: String,
+ /// Location in state.toml that caused the error (e.g. `records[0].fields[1]`).
+ pub location: String,
+ /// Whether this blocks code generation (`Error`) or is advisory (`Warning`).
+ pub severity: Severity,
+}
+
+/// Severity of a [`ValidationError`].
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum Severity {
+ /// Blocks code generation — generated code would be invalid or uncompilable.
+ Error,
+ /// Advisory — generated code may still work but behaviour could be unexpected.
+ Warning,
+}
+
+impl std::fmt::Display for ValidationError {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ let tag = match self.severity {
+ Severity::Error => "ERROR",
+ Severity::Warning => "WARN",
+ };
+ write!(f, "[{}] {}: {}", tag, self.location, self.message)
+ }
+}
+
+/// Supported Rust field types for `records.fields[*].type`.
+pub const VALID_FIELD_TYPES: &[&str] = &[
+ "f64", "f32", "u8", "u16", "u32", "u64", "i8", "i16", "i32", "i64", "bool", "String",
+];
+
+/// Validate an [`ArchitectureState`] and return all problems found.
+///
+/// An empty `Vec` means the state is valid and codegen may proceed.
+/// Any entry with [`Severity::Error`] should block generation.
+pub fn validate(state: &ArchitectureState) -> Vec {
+ let mut errors: Vec = Vec::new();
+
+ validate_meta(state, &mut errors);
+ validate_records(state, &mut errors);
+ validate_tasks_and_binaries(state, &mut errors);
+
+ errors
+}
+
+/// Returns `true` if `validate()` produces no `Error`-severity issues.
+pub fn is_valid(state: &ArchitectureState) -> bool {
+ !validate(state)
+ .iter()
+ .any(|e| e.severity == Severity::Error)
+}
+
+// ── Internal validators ────────────────────────────────────────────────────────
+
+fn validate_meta(state: &ArchitectureState, errors: &mut Vec) {
+ if state.meta.aimdb_version.is_empty() {
+ errors.push(ValidationError {
+ message: "aimdb_version must not be empty".to_string(),
+ location: "meta.aimdb_version".to_string(),
+ severity: Severity::Error,
+ });
+ }
+}
+
+fn validate_records(state: &ArchitectureState, errors: &mut Vec) {
+ let mut seen_names: Vec<&str> = Vec::new();
+
+ for (idx, rec) in state.records.iter().enumerate() {
+ let loc = format!("records[{idx}]");
+
+ // Name must be non-empty
+ if rec.name.is_empty() {
+ errors.push(ValidationError {
+ message: "record name must not be empty".to_string(),
+ location: loc.clone(),
+ severity: Severity::Error,
+ });
+ continue; // Can't do further checks without a name
+ }
+
+ // Name should start with an uppercase letter (PascalCase convention)
+ if !rec
+ .name
+ .chars()
+ .next()
+ .map(|c| c.is_uppercase())
+ .unwrap_or(false)
+ {
+ errors.push(ValidationError {
+ message: format!(
+ "record name '{}' should start with an uppercase letter (PascalCase)",
+ rec.name
+ ),
+ location: format!("{loc}.name"),
+ severity: Severity::Warning,
+ });
+ }
+
+ // Duplicate record names
+ if seen_names.contains(&rec.name.as_str()) {
+ errors.push(ValidationError {
+ message: format!("duplicate record name '{}'", rec.name),
+ location: format!("{loc}.name"),
+ severity: Severity::Error,
+ });
+ } else {
+ seen_names.push(&rec.name);
+ }
+
+ // SpmcRing must have capacity > 0
+ if rec.buffer == BufferType::SpmcRing {
+ match rec.capacity {
+ None => {
+ errors.push(ValidationError {
+ message: "SpmcRing requires 'capacity' to be set".to_string(),
+ location: format!("{loc}.capacity"),
+ severity: Severity::Error,
+ });
+ }
+ Some(0) => {
+ errors.push(ValidationError {
+ message: "SpmcRing capacity must be > 0".to_string(),
+ location: format!("{loc}.capacity"),
+ severity: Severity::Error,
+ });
+ }
+ _ => {}
+ }
+ }
+
+ // Warn if capacity is set but buffer is not SpmcRing
+ if rec.buffer != BufferType::SpmcRing && rec.capacity.is_some() {
+ errors.push(ValidationError {
+ message: "capacity is only meaningful for SpmcRing; it will be ignored".to_string(),
+ location: format!("{loc}.capacity"),
+ severity: Severity::Warning,
+ });
+ }
+
+ // Warn if no key variants
+ if rec.key_variants.is_empty() {
+ errors.push(ValidationError {
+ message: format!(
+ "record '{}' has no key_variants — the key enum will be empty and unusable",
+ rec.name
+ ),
+ location: format!("{loc}.key_variants"),
+ severity: Severity::Warning,
+ });
+ }
+
+ // Duplicate key variants
+ let mut seen_variants: Vec<&str> = Vec::new();
+ for variant in &rec.key_variants {
+ if seen_variants.contains(&variant.as_str()) {
+ errors.push(ValidationError {
+ message: format!("duplicate key variant '{variant}'"),
+ location: format!("{loc}.key_variants"),
+ severity: Severity::Error,
+ });
+ } else {
+ seen_variants.push(variant);
+ }
+ }
+
+ // Warn if no fields
+ if rec.fields.is_empty() {
+ errors.push(ValidationError {
+ message: format!(
+ "record '{}' has no fields — the value struct will be empty",
+ rec.name
+ ),
+ location: format!("{loc}.fields"),
+ severity: Severity::Warning,
+ });
+ }
+
+ // schema_version must be >= 1 if specified
+ if rec.schema_version == Some(0) {
+ errors.push(ValidationError {
+ message: format!(
+ "record '{}' has schema_version = 0; versions must be >= 1",
+ rec.name
+ ),
+ location: format!("{loc}.schema_version"),
+ severity: Severity::Warning,
+ });
+ }
+
+ // Warn if settable fields exist but no timestamp field is present
+ let has_settable = rec.fields.iter().any(|f| f.settable);
+ if has_settable {
+ let timestamp_names = ["timestamp", "computed_at", "fetched_at"];
+ let has_timestamp = rec
+ .fields
+ .iter()
+ .any(|f| f.field_type == "u64" && timestamp_names.contains(&f.name.as_str()));
+ if !has_timestamp {
+ errors.push(ValidationError {
+ message: format!(
+ "record '{}' has settable fields but no timestamp field \
+ (u64 named timestamp, computed_at, or fetched_at) — \
+ Settable::set() will use Default::default() for the timestamp slot",
+ rec.name
+ ),
+ location: format!("{loc}.fields"),
+ severity: Severity::Warning,
+ });
+ }
+ }
+
+ // Validate field types
+ for (fidx, field) in rec.fields.iter().enumerate() {
+ if field.name.is_empty() {
+ errors.push(ValidationError {
+ message: "field name must not be empty".to_string(),
+ location: format!("{loc}.fields[{fidx}]"),
+ severity: Severity::Error,
+ });
+ }
+ if !VALID_FIELD_TYPES.contains(&field.field_type.as_str()) {
+ errors.push(ValidationError {
+ message: format!(
+ "unsupported field type '{}' — valid types: {}",
+ field.field_type,
+ VALID_FIELD_TYPES.join(", ")
+ ),
+ location: format!("{loc}.fields[{fidx}].type"),
+ severity: Severity::Error,
+ });
+ }
+ }
+
+ // Validate connectors
+ for (cidx, conn) in rec.connectors.iter().enumerate() {
+ if conn.url.is_empty() {
+ errors.push(ValidationError {
+ message: "connector URL must not be empty".to_string(),
+ location: format!("{loc}.connectors[{cidx}].url"),
+ severity: Severity::Error,
+ });
+ }
+ if conn.protocol.is_empty() {
+ errors.push(ValidationError {
+ message: "connector protocol must not be empty".to_string(),
+ location: format!("{loc}.connectors[{cidx}].protocol"),
+ severity: Severity::Error,
+ });
+ }
+ }
+
+ // Validate observable block
+ if let Some(obs) = &rec.observable {
+ let field_exists = rec.fields.iter().any(|f| f.name == obs.signal_field);
+ if !field_exists {
+ errors.push(ValidationError {
+ message: format!(
+ "observable signal_field '{}' does not match any field in record '{}'",
+ obs.signal_field, rec.name
+ ),
+ location: format!("{loc}.observable.signal_field"),
+ severity: Severity::Error,
+ });
+ } else {
+ // Check signal_field type is numeric (Observable::Signal: PartialOrd + Copy)
+ let field = rec
+ .fields
+ .iter()
+ .find(|f| f.name == obs.signal_field)
+ .unwrap();
+ let numeric_types = [
+ "f32", "f64", "u8", "u16", "u32", "u64", "i8", "i16", "i32", "i64",
+ ];
+ if !numeric_types.contains(&field.field_type.as_str()) {
+ errors.push(ValidationError {
+ message: format!(
+ "observable signal_field '{}' has type '{}' which is not numeric — \
+ Observable::Signal must implement PartialOrd + Copy",
+ obs.signal_field, field.field_type
+ ),
+ location: format!("{loc}.observable.signal_field"),
+ severity: Severity::Warning,
+ });
+ }
+ }
+ }
+ }
+}
+
+// ── Tasks and binaries validation ─────────────────────────────────────────────
+
+fn validate_tasks_and_binaries(state: &ArchitectureState, errors: &mut Vec) {
+ let record_names: Vec<&str> = state.records.iter().map(|r| r.name.as_str()).collect();
+ let task_names: Vec<&str> = state.tasks.iter().map(|t| t.name.as_str()).collect();
+
+ // Rule 1: task name in producers/consumers has no [[tasks]] entry → Warning
+ for (ridx, rec) in state.records.iter().enumerate() {
+ for producer in &rec.producers {
+ if !task_names.contains(&producer.as_str()) {
+ errors.push(ValidationError {
+ message: format!(
+ "producer '{producer}' in record '{}' has no [[tasks]] entry",
+ rec.name
+ ),
+ location: format!("records[{ridx}].producers"),
+ severity: Severity::Warning,
+ });
+ }
+ }
+ for consumer in &rec.consumers {
+ if !task_names.contains(&consumer.as_str()) {
+ errors.push(ValidationError {
+ message: format!(
+ "consumer '{consumer}' in record '{}' has no [[tasks]] entry",
+ rec.name
+ ),
+ location: format!("records[{ridx}].consumers"),
+ severity: Severity::Warning,
+ });
+ }
+ }
+ }
+
+ // Rules 2, 3, 5: task I/O references
+ for (tidx, task) in state.tasks.iter().enumerate() {
+ let tloc = format!("tasks[{tidx}]");
+
+ for (iidx, input) in task.inputs.iter().enumerate() {
+ // Rule 2: inputs reference a record not in [[records]]
+ if !record_names.contains(&input.record.as_str()) {
+ errors.push(ValidationError {
+ message: format!(
+ "task '{}' input references unknown record '{}'",
+ task.name, input.record
+ ),
+ location: format!("{tloc}.inputs[{iidx}].record"),
+ severity: Severity::Error,
+ });
+ }
+ }
+
+ for (oidx, output) in task.outputs.iter().enumerate() {
+ // Rule 3: outputs reference a record not in [[records]]
+ if !record_names.contains(&output.record.as_str()) {
+ errors.push(ValidationError {
+ message: format!(
+ "task '{}' output references unknown record '{}'",
+ task.name, output.record
+ ),
+ location: format!("{tloc}.outputs[{oidx}].record"),
+ severity: Severity::Error,
+ });
+ continue;
+ }
+
+ // Rule 5: output variant not in that record's key_variants (only when variants is non-empty)
+ if !output.variants.is_empty() {
+ let rec = state.records.iter().find(|r| r.name == output.record);
+ if let Some(rec) = rec {
+ for variant in &output.variants {
+ if !rec.key_variants.contains(variant) {
+ errors.push(ValidationError {
+ message: format!(
+ "task '{}' output variant '{variant}' not found in record '{}' key_variants",
+ task.name, output.record
+ ),
+ location: format!("{tloc}.outputs[{oidx}].variants"),
+ severity: Severity::Error,
+ });
+ }
+ }
+ }
+ }
+ }
+ }
+
+ // Rule 4: binary task name not found in [[tasks]]
+ for (bidx, bin) in state.binaries.iter().enumerate() {
+ for task_name in &bin.tasks {
+ if !task_names.contains(&task_name.as_str()) {
+ errors.push(ValidationError {
+ message: format!(
+ "binary '{}' references task '{task_name}' which has no [[tasks]] entry",
+ bin.name
+ ),
+ location: format!("binaries[{bidx}].tasks"),
+ severity: Severity::Error,
+ });
+ }
+ }
+ }
+}
+
+// ── Tests ─────────────────────────────────────────────────────────────────────
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::state::ArchitectureState;
+
+ const VALID_TOML: &str = r#"
+[meta]
+aimdb_version = "0.5.0"
+created_at = "2026-02-22T14:00:00Z"
+last_modified = "2026-02-22T14:33:00Z"
+
+[[records]]
+name = "TemperatureReading"
+buffer = "SpmcRing"
+capacity = 256
+key_prefix = "sensors.temp."
+key_variants = ["indoor", "outdoor"]
+producers = ["sensor_task"]
+consumers = ["dashboard"]
+
+[[records.fields]]
+name = "celsius"
+type = "f64"
+description = "Temperature"
+
+[[records.connectors]]
+protocol = "mqtt"
+direction = "outbound"
+url = "mqtt://sensors/temp/{variant}"
+"#;
+
+ fn valid_state() -> ArchitectureState {
+ ArchitectureState::from_toml(VALID_TOML).unwrap()
+ }
+
+ #[test]
+ fn valid_state_has_no_errors() {
+ let errs = validate(&valid_state());
+ let error_errs: Vec<_> = errs
+ .iter()
+ .filter(|e| e.severity == Severity::Error)
+ .collect();
+ assert!(error_errs.is_empty(), "Unexpected errors: {error_errs:?}");
+ }
+
+ #[test]
+ fn is_valid_returns_true_for_clean_state() {
+ assert!(is_valid(&valid_state()));
+ }
+
+ #[test]
+ fn detects_spmc_missing_capacity() {
+ let toml = VALID_TOML.replace("capacity = 256\n", "");
+ let state = ArchitectureState::from_toml(&toml).unwrap();
+ let errs = validate(&state);
+ let has_err = errs
+ .iter()
+ .any(|e| e.severity == Severity::Error && e.message.contains("capacity"));
+ assert!(
+ has_err,
+ "Should detect missing SpmcRing capacity:\n{errs:?}"
+ );
+ }
+
+ #[test]
+ fn detects_spmc_zero_capacity() {
+ let toml = VALID_TOML.replace("capacity = 256", "capacity = 0");
+ let state = ArchitectureState::from_toml(&toml).unwrap();
+ let errs = validate(&state);
+ let has_err = errs
+ .iter()
+ .any(|e| e.severity == Severity::Error && e.message.contains("capacity must be > 0"));
+ assert!(has_err, "Should detect zero capacity:\n{errs:?}");
+ }
+
+ #[test]
+ fn detects_duplicate_record_names() {
+ let toml = format!(
+ "{VALID_TOML}{}",
+ r#"
+[[records]]
+name = "TemperatureReading"
+buffer = "SingleLatest"
+key_variants = ["a"]
+
+[[records.fields]]
+name = "value"
+type = "f64"
+description = "Value"
+"#
+ );
+ let state = ArchitectureState::from_toml(&toml).unwrap();
+ let errs = validate(&state);
+ let has_err = errs
+ .iter()
+ .any(|e| e.severity == Severity::Error && e.message.contains("duplicate record name"));
+ assert!(has_err, "Should detect duplicate record name:\n{errs:?}");
+ }
+
+ #[test]
+ fn detects_duplicate_key_variants() {
+ let toml = VALID_TOML.replace(
+ r#"key_variants = ["indoor", "outdoor"]"#,
+ r#"key_variants = ["indoor", "indoor"]"#,
+ );
+ let state = ArchitectureState::from_toml(&toml).unwrap();
+ let errs = validate(&state);
+ let has_err = errs
+ .iter()
+ .any(|e| e.severity == Severity::Error && e.message.contains("duplicate key variant"));
+ assert!(has_err, "Should detect duplicate key variants:\n{errs:?}");
+ }
+
+ #[test]
+ fn detects_invalid_field_type() {
+ let toml = VALID_TOML.replace(r#"type = "f64""#, r#"type = "float64""#);
+ let state = ArchitectureState::from_toml(&toml).unwrap();
+ let errs = validate(&state);
+ let has_err = errs
+ .iter()
+ .any(|e| e.severity == Severity::Error && e.message.contains("unsupported field type"));
+ assert!(has_err, "Should detect invalid field type:\n{errs:?}");
+ }
+
+ #[test]
+ fn detects_empty_connector_url() {
+ let toml = VALID_TOML.replace(r#"url = "mqtt://sensors/temp/{variant}""#, r#"url = """#);
+ let state = ArchitectureState::from_toml(&toml).unwrap();
+ let errs = validate(&state);
+ let has_err = errs
+ .iter()
+ .any(|e| e.severity == Severity::Error && e.message.contains("URL must not be empty"));
+ assert!(has_err, "Should detect empty connector URL:\n{errs:?}");
+ }
+
+ #[test]
+ fn warning_for_non_pascal_case_name() {
+ let toml = VALID_TOML.replace(
+ "name = \"TemperatureReading\"",
+ "name = \"temperatureReading\"",
+ );
+ let state = ArchitectureState::from_toml(&toml).unwrap();
+ let errs = validate(&state);
+ let has_warn = errs
+ .iter()
+ .any(|e| e.severity == Severity::Warning && e.message.contains("uppercase"));
+ assert!(has_warn, "Should warn about non-PascalCase name:\n{errs:?}");
+ }
+
+ #[test]
+ fn warning_for_capacity_on_non_spmc() {
+ let toml = VALID_TOML.replace("buffer = \"SpmcRing\"", "buffer = \"SingleLatest\"");
+ let state = ArchitectureState::from_toml(&toml).unwrap();
+ let errs = validate(&state);
+ let has_warn = errs.iter().any(|e| {
+ e.severity == Severity::Warning && e.message.contains("capacity is only meaningful")
+ });
+ assert!(
+ has_warn,
+ "Should warn about capacity on non-SpmcRing:\n{errs:?}"
+ );
+ }
+
+ #[test]
+ fn display_format() {
+ let e = ValidationError {
+ message: "something wrong".to_string(),
+ location: "records[0].name".to_string(),
+ severity: Severity::Error,
+ };
+ let s = format!("{e}");
+ assert!(s.contains("[ERROR]"), "Display should show [ERROR]:\n{s}");
+ assert!(
+ s.contains("records[0].name"),
+ "Display should show location:\n{s}"
+ );
+ }
+
+ #[test]
+ fn detects_observable_missing_signal_field() {
+ let toml = r#"
+[meta]
+aimdb_version = "0.5.0"
+created_at = "2026-02-22T14:00:00Z"
+last_modified = "2026-02-22T14:33:00Z"
+
+[[records]]
+name = "TemperatureReading"
+buffer = "SpmcRing"
+capacity = 256
+key_prefix = "sensors.temp."
+key_variants = ["indoor"]
+
+[records.observable]
+signal_field = "nonexistent"
+icon = "🌡️"
+unit = "°C"
+
+[[records.fields]]
+name = "celsius"
+type = "f64"
+description = "Temperature"
+"#;
+ let state = ArchitectureState::from_toml(toml).unwrap();
+ let errs = validate(&state);
+ let has_err = errs.iter().any(|e| {
+ e.severity == Severity::Error && e.message.contains("does not match any field")
+ });
+ assert!(
+ has_err,
+ "Should detect missing observable signal_field:\n{errs:?}"
+ );
+ }
+
+ #[test]
+ fn warns_schema_version_zero() {
+ let toml = r#"
+[meta]
+aimdb_version = "0.5.0"
+created_at = "2026-02-22T14:00:00Z"
+last_modified = "2026-02-22T14:33:00Z"
+
+[[records]]
+name = "TemperatureReading"
+buffer = "SpmcRing"
+capacity = 256
+key_prefix = "sensors.temp."
+key_variants = ["indoor"]
+schema_version = 0
+
+[[records.fields]]
+name = "celsius"
+type = "f64"
+description = "Temperature"
+"#;
+ let state = ArchitectureState::from_toml(toml).unwrap();
+ let errs = validate(&state);
+ let has_warn = errs
+ .iter()
+ .any(|e| e.severity == Severity::Warning && e.message.contains("schema_version = 0"));
+ assert!(has_warn, "Should warn about schema_version = 0:\n{errs:?}");
+ }
+
+ #[test]
+ fn warns_settable_fields_without_timestamp() {
+ let toml = r#"
+[meta]
+aimdb_version = "0.5.0"
+created_at = "2026-02-22T14:00:00Z"
+last_modified = "2026-02-22T14:33:00Z"
+
+[[records]]
+name = "TemperatureReading"
+buffer = "SpmcRing"
+capacity = 256
+key_prefix = "sensors.temp."
+key_variants = ["indoor"]
+
+[[records.fields]]
+name = "celsius"
+type = "f64"
+description = "Temperature"
+settable = true
+"#;
+ let state = ArchitectureState::from_toml(toml).unwrap();
+ let errs = validate(&state);
+ let has_warn = errs
+ .iter()
+ .any(|e| e.severity == Severity::Warning && e.message.contains("no timestamp field"));
+ assert!(
+ has_warn,
+ "Should warn about settable fields with no timestamp:\n{errs:?}"
+ );
+ }
+
+ #[test]
+ fn no_warn_settable_fields_with_timestamp() {
+ let toml = r#"
+[meta]
+aimdb_version = "0.5.0"
+created_at = "2026-02-22T14:00:00Z"
+last_modified = "2026-02-22T14:33:00Z"
+
+[[records]]
+name = "TemperatureReading"
+buffer = "SpmcRing"
+capacity = 256
+key_prefix = "sensors.temp."
+key_variants = ["indoor"]
+
+[[records.fields]]
+name = "timestamp"
+type = "u64"
+description = "Unix ms"
+
+[[records.fields]]
+name = "celsius"
+type = "f64"
+description = "Temperature"
+settable = true
+"#;
+ let state = ArchitectureState::from_toml(toml).unwrap();
+ let errs = validate(&state);
+ let has_warn = errs
+ .iter()
+ .any(|e| e.severity == Severity::Warning && e.message.contains("no timestamp field"));
+ assert!(
+ !has_warn,
+ "Should not warn when timestamp field is present:\n{errs:?}"
+ );
+ }
+
+ #[test]
+ fn warns_observable_non_numeric_signal_field() {
+ let toml = r#"
+[meta]
+aimdb_version = "0.5.0"
+created_at = "2026-02-22T14:00:00Z"
+last_modified = "2026-02-22T14:33:00Z"
+
+[[records]]
+name = "TemperatureReading"
+buffer = "SpmcRing"
+capacity = 256
+key_prefix = "sensors.temp."
+key_variants = ["indoor"]
+
+[records.observable]
+signal_field = "label"
+icon = "📊"
+unit = ""
+
+[[records.fields]]
+name = "label"
+type = "String"
+description = "A label"
+"#;
+ let state = ArchitectureState::from_toml(toml).unwrap();
+ let errs = validate(&state);
+ let has_warn = errs
+ .iter()
+ .any(|e| e.severity == Severity::Warning && e.message.contains("not numeric"));
+ assert!(
+ has_warn,
+ "Should warn about non-numeric signal_field:\n{errs:?}"
+ );
+ }
+}
diff --git a/aimdb-core/src/builder.rs b/aimdb-core/src/builder.rs
index faea5263..26da79d6 100644
--- a/aimdb-core/src/builder.rs
+++ b/aimdb-core/src/builder.rs
@@ -1513,6 +1513,32 @@ impl AimDb {
/// connector.spawn_publisher(topic, consumer, serializer, config)?;
/// }
/// ```
+ /// Collect `(topic, TypeId)` pairs for all outbound routes matching `scheme`.
+ ///
+ /// Complements [`collect_outbound_routes`](Self::collect_outbound_routes) when
+ /// callers need to know the concrete record type behind each outbound topic
+ /// (e.g. to resolve a schema name for discovery responses).
+ ///
+ /// The returned TypeId is the `TypeId::of::()` for the record type `T`
+ /// that was used in the corresponding `configure::()` call.
+ #[cfg(feature = "alloc")]
+ pub fn collect_outbound_topic_type_ids(&self, scheme: &str) -> Vec<(String, TypeId)> {
+ let mut result = Vec::new();
+
+ for (idx, record) in self.inner.storages.iter().enumerate() {
+ let type_id = self.inner.types[idx];
+
+ for link in record.outbound_connectors() {
+ if link.url.scheme() != scheme {
+ continue;
+ }
+ result.push((link.url.resource_id(), type_id));
+ }
+ }
+
+ result
+ }
+
#[cfg(feature = "alloc")]
pub fn collect_outbound_routes(&self, scheme: &str) -> Vec {
let mut routes = Vec::new();
diff --git a/aimdb-data-contracts/.gitignore b/aimdb-data-contracts/.gitignore
index 81f62ce1..de01a5e4 100644
--- a/aimdb-data-contracts/.gitignore
+++ b/aimdb-data-contracts/.gitignore
@@ -3,6 +3,3 @@ Cargo.lock
**/*.rs.bk
*.pdb
.DS_Store
-
-# Generated TypeScript bindings (run ./scripts/gen-ts-bindings.sh)
-/bindings/
diff --git a/aimdb-data-contracts/src/contracts/temperature.rs b/aimdb-data-contracts/src/contracts/temperature.rs
index deb9322f..34c02a1d 100644
--- a/aimdb-data-contracts/src/contracts/temperature.rs
+++ b/aimdb-data-contracts/src/contracts/temperature.rs
@@ -8,8 +8,8 @@
//! - **v1** (legacy): `{ "schema_version": 1, "temp": f32, "timestamp": u64, "unit": "C"|"F"|"K" }`
//! - **v2** (current): `{ "schema_version": 2, "celsius": f32, "timestamp": u64 }`
//!
-//! The `from_bytes_versioned()` function reads the `schema_version` from the payload
-//! and migrates automatically, allowing nodes and hubs to be updated independently.
+//! The `MigrationChain` impl (via `migration_chain!`) reads the `schema_version`
+//! from the payload and migrates automatically, allowing nodes and hubs to be updated independently.
extern crate alloc;
@@ -23,7 +23,7 @@ use crate::Linkable;
use crate::{Simulatable, SimulationConfig};
#[cfg(feature = "migratable")]
-use crate::{Migratable, MigrationError};
+use crate::{MigrationError, MigrationStep};
#[cfg(feature = "ts")]
use ts_rs::TS;
@@ -132,30 +132,40 @@ impl Linkable for TemperatureV1 {
}
// ═══════════════════════════════════════════════════════════════════
-// MIGRATABLE IMPLEMENTATION
+// TYPE-SAFE MIGRATION (v1 → v2)
// ═══════════════════════════════════════════════════════════════════
+/// Migration step: Temperature v1 (temp + unit) → v2 (celsius only)
#[cfg(feature = "migratable")]
-impl Migratable for Temperature {
- /// Migrate raw JSON from v1 to v2 format.
- ///
- /// # v1 → v2 Migration
- /// - Rename: `temp` → `celsius`
- /// - Convert: Apply unit conversion if unit is "F" or "K"
- /// - Remove: Drop the `unit` field
- ///
- /// Delegates to `TemperatureV1::to_v2()` to keep conversion logic DRY.
- fn migrate(raw: &mut serde_json::Value, from_version: u32) -> Result<(), MigrationError> {
- if from_version < 2 {
- // Parse as v1, convert via to_v2(), then serialize back
- let v1: TemperatureV1 = serde_json::from_value(raw.clone())
- .map_err(|_| MigrationError::MissingField("temp or unit"))?;
- let v2 = v1.to_v2();
- *raw = serde_json::to_value(v2)
- .map_err(|_| MigrationError::Custom("failed to serialize migrated value"))?;
- }
+pub struct TemperatureV1ToV2;
+
+#[cfg(feature = "migratable")]
+impl MigrationStep for TemperatureV1ToV2 {
+ type Older = TemperatureV1;
+ type Newer = Temperature;
+ const FROM_VERSION: u32 = 1;
+ const TO_VERSION: u32 = 2;
+
+ fn up(v1: TemperatureV1) -> Result {
+ Ok(v1.to_v2())
+ }
- Ok(())
+ fn down(v2: Temperature) -> Result {
+ Ok(TemperatureV1 {
+ schema_version: 1,
+ temp: v2.celsius,
+ timestamp: v2.timestamp,
+ unit: alloc::string::String::from("C"),
+ })
+ }
+}
+
+#[cfg(feature = "migratable")]
+crate::migration_chain! {
+ type Current = Temperature;
+ version_field = "schema_version";
+ steps {
+ TemperatureV1ToV2: TemperatureV1 => Temperature,
}
}
@@ -230,46 +240,14 @@ impl Settable for Temperature {
}
// ═══════════════════════════════════════════════════════════════════
-// VERSIONED DESERIALIZATION
+// LINKABLE WITH MIGRATION SUPPORT
// ═══════════════════════════════════════════════════════════════════
-#[cfg(feature = "linkable")]
-impl Temperature {
- /// Deserialize from bytes with automatic migration based on `schema_version` field.
- ///
- /// This function enables **decoupled deployment**: nodes and hubs can be
- /// updated independently because the hub reads the schema version from the payload.
- ///
- /// Delegates to `Migratable::deserialize_versioned` for the actual migration.
- ///
- /// # Example
- /// ```ignore
- /// let v1 = r#"{"schema_version":1,"temp":68.0,"timestamp":123,"unit":"F"}"#;
- /// let v2 = r#"{"schema_version":2,"celsius":20.0,"timestamp":123}"#;
- /// ```
- #[cfg(feature = "migratable")]
- pub fn from_bytes_versioned(data: &[u8]) -> Result {
- use crate::Migratable;
-
- let mut value: serde_json::Value =
- serde_json::from_slice(data).map_err(|e| alloc::format!("JSON parse error: {}", e))?;
-
- let version = value
- .get("schema_version")
- .and_then(|v| v.as_u64())
- .ok_or_else(|| alloc::string::String::from("Missing schema_version field"))?
- as u32;
-
- Self::deserialize_versioned(&mut value, version)
- .map_err(|e| alloc::format!("Migration error: {:?}", e))
- }
-}
-
#[cfg(all(feature = "linkable", feature = "migratable"))]
impl Linkable for Temperature {
fn from_bytes(data: &[u8]) -> Result {
- // Use versioned deserializer for automatic migration
- Self::from_bytes_versioned(data)
+ use crate::MigrationChain;
+ Self::migrate_from_bytes(data).map_err(|e| alloc::format!("Migration error: {}", e))
}
fn to_bytes(&self) -> Result, alloc::string::String> {
@@ -343,105 +321,192 @@ mod tests {
assert_eq!(v2.celsius, 22.5);
}
+ // ═══════════════════════════════════════════════════════════════════
+ // TYPE-SAFE MIGRATION STEP TESTS
+ // ═══════════════════════════════════════════════════════════════════
+
#[cfg(feature = "migratable")]
#[test]
- fn test_migratable_trait_celsius() {
- use crate::Migratable;
+ fn test_migration_step_up_celsius() {
+ use crate::MigrationStep;
- let mut raw = serde_json::json!({
- "temp": 22.5,
- "timestamp": 1704326400000_u64,
- "unit": "C"
- });
-
- Temperature::migrate(&mut raw, 1).unwrap();
-
- assert_eq!(raw["celsius"], 22.5);
- assert!(raw.get("temp").is_none(), "temp field should be removed");
- assert!(raw.get("unit").is_none(), "unit field should be removed");
+ let v1 = TemperatureV1::new(22.5, 1704326400000, "C");
+ let v2 = TemperatureV1ToV2::up(v1).unwrap();
+ assert_eq!(v2.celsius, 22.5);
+ assert_eq!(v2.timestamp, 1704326400000);
}
#[cfg(feature = "migratable")]
#[test]
- fn test_migratable_trait_fahrenheit() {
- use crate::Migratable;
-
- let mut raw = serde_json::json!({
- "temp": 68.0,
- "timestamp": 1704326400000_u64,
- "unit": "F"
- });
-
- Temperature::migrate(&mut raw, 1).unwrap();
+ fn test_migration_step_up_fahrenheit() {
+ use crate::MigrationStep;
- let celsius = raw["celsius"].as_f64().unwrap();
+ let v1 = TemperatureV1::new(68.0, 1704326400000, "F");
+ let v2 = TemperatureV1ToV2::up(v1).unwrap();
assert!(
- (celsius - 20.0).abs() < 0.01,
+ (v2.celsius - 20.0).abs() < 0.01,
"Expected ~20°C, got {}",
- celsius
+ v2.celsius
);
}
#[cfg(feature = "migratable")]
#[test]
- fn test_migratable_trait_kelvin() {
- use crate::Migratable;
+ fn test_migration_step_up_kelvin() {
+ use crate::MigrationStep;
- let mut raw = serde_json::json!({
- "temp": 293.15,
- "timestamp": 1704326400000_u64,
- "unit": "K"
- });
-
- Temperature::migrate(&mut raw, 1).unwrap();
-
- let celsius = raw["celsius"].as_f64().unwrap();
+ let v1 = TemperatureV1::new(293.15, 1704326400000, "K");
+ let v2 = TemperatureV1ToV2::up(v1).unwrap();
assert!(
- (celsius - 20.0).abs() < 0.01,
+ (v2.celsius - 20.0).abs() < 0.01,
"Expected ~20°C, got {}",
- celsius
+ v2.celsius
);
}
#[cfg(feature = "migratable")]
#[test]
- fn test_deserialize_versioned_v1() {
- use crate::Migratable;
+ fn test_migration_step_down() {
+ use crate::MigrationStep;
- let mut raw = serde_json::json!({
- "temp": 22.5,
- "timestamp": 1704326400000_u64,
- "unit": "C"
- });
+ let v2 = Temperature::new(22.5, 1704326400000);
+ let v1 = TemperatureV1ToV2::down(v2).unwrap();
+ assert_eq!(v1.temp, 22.5);
+ assert_eq!(v1.timestamp, 1704326400000);
+ assert_eq!(v1.unit, "C");
+ assert_eq!(v1.schema_version, 1);
+ }
- let temp: Temperature = Temperature::deserialize_versioned(&mut raw, 1).unwrap();
+ // ═══════════════════════════════════════════════════════════════════
+ // MIGRATION CHAIN TESTS (upgrade from bytes)
+ // ═══════════════════════════════════════════════════════════════════
+
+ #[cfg(feature = "migratable")]
+ #[test]
+ fn test_migrate_from_bytes_v1() {
+ use crate::MigrationChain;
+
+ let json = r#"{"schema_version":1,"temp":22.5,"timestamp":1704326400000,"unit":"C"}"#;
+ let temp = Temperature::migrate_from_bytes(json.as_bytes()).unwrap();
assert_eq!(temp.celsius, 22.5);
assert_eq!(temp.timestamp, 1704326400000);
}
#[cfg(feature = "migratable")]
#[test]
- fn test_deserialize_versioned_v2_no_migration() {
- use crate::Migratable;
+ fn test_migrate_from_bytes_v2_no_migration() {
+ use crate::MigrationChain;
- let mut raw = serde_json::json!({
- "celsius": 22.5,
- "timestamp": 1704326400000_u64
- });
-
- let temp: Temperature = Temperature::deserialize_versioned(&mut raw, 2).unwrap();
+ let json = r#"{"schema_version":2,"celsius":22.5,"timestamp":1704326400000}"#;
+ let temp = Temperature::migrate_from_bytes(json.as_bytes()).unwrap();
assert_eq!(temp.celsius, 22.5);
}
+ #[cfg(feature = "migratable")]
+ #[test]
+ fn test_migrate_from_bytes_version_too_new() {
+ use crate::MigrationChain;
+
+ let json = r#"{"schema_version":99,"celsius":22.5,"timestamp":100}"#;
+ let err = Temperature::migrate_from_bytes(json.as_bytes()).unwrap_err();
+ assert_eq!(
+ err,
+ crate::MigrationError::VersionTooNew {
+ source: 99,
+ current: 2
+ }
+ );
+ }
+
+ #[cfg(feature = "migratable")]
+ #[test]
+ fn test_migrate_from_bytes_missing_version() {
+ use crate::MigrationChain;
+
+ let json = r#"{"celsius":22.5,"timestamp":100}"#;
+ let err = Temperature::migrate_from_bytes(json.as_bytes()).unwrap_err();
+ assert_eq!(err, crate::MigrationError::MissingVersion);
+ }
+
// ═══════════════════════════════════════════════════════════════════
- // VERSIONED DESERIALIZATION TESTS (auto-detect version)
+ // DOWNGRADE TESTS
// ═══════════════════════════════════════════════════════════════════
- #[cfg(feature = "linkable")]
+ #[cfg(feature = "migratable")]
+ #[test]
+ fn test_downgrade_to_v1() {
+ use crate::MigrationChain;
+
+ let temp = Temperature::new(22.5, 1704326400000);
+ let v1_bytes = temp.migrate_to_version(1).unwrap();
+ let v1: serde_json::Value = serde_json::from_slice(&v1_bytes).unwrap();
+
+ assert_eq!(v1["temp"], 22.5);
+ assert_eq!(v1["unit"], "C");
+ assert_eq!(v1["schema_version"], 1);
+ }
+
+ #[cfg(feature = "migratable")]
+ #[test]
+ fn test_downgrade_to_current_version() {
+ use crate::MigrationChain;
+
+ let temp = Temperature::new(22.5, 1704326400000);
+ let v2_bytes = temp.migrate_to_version(2).unwrap();
+ let v2: Temperature = serde_json::from_slice(&v2_bytes).unwrap();
+ assert_eq!(v2.celsius, 22.5);
+ }
+
+ #[cfg(feature = "migratable")]
+ #[test]
+ fn test_downgrade_version_too_old() {
+ use crate::MigrationChain;
+
+ let temp = Temperature::new(22.5, 100);
+ let err = temp.migrate_to_version(0).unwrap_err();
+ assert_eq!(
+ err,
+ crate::MigrationError::VersionTooOld {
+ target: 0,
+ minimum: 1
+ }
+ );
+ }
+
+ #[cfg(feature = "migratable")]
+ #[test]
+ fn test_roundtrip_v1_upgrade_downgrade() {
+ use crate::MigrationChain;
+
+ // Start with v1 JSON
+ let v1_json = r#"{"schema_version":1,"temp":22.5,"timestamp":1704326400000,"unit":"C"}"#;
+
+ // Upgrade to v2
+ let v2 = Temperature::migrate_from_bytes(v1_json.as_bytes()).unwrap();
+ assert_eq!(v2.celsius, 22.5);
+
+ // Downgrade back to v1
+ let v1_bytes = v2.migrate_to_version(1).unwrap();
+ let v1: TemperatureV1 = serde_json::from_slice(&v1_bytes).unwrap();
+ assert_eq!(v1.temp, 22.5);
+ assert_eq!(v1.unit, "C");
+
+ // Upgrade again — should round-trip
+ let v2_again = Temperature::migrate_from_bytes(&v1_bytes).unwrap();
+ assert_eq!(v2_again.celsius, 22.5);
+ }
+
+ // ═══════════════════════════════════════════════════════════════════
+ // LINKABLE TRAIT TESTS (with auto-migration)
+ // ═══════════════════════════════════════════════════════════════════
+
+ #[cfg(all(feature = "linkable", feature = "migratable"))]
#[test]
fn test_from_bytes_v1_with_version_marker() {
+ use crate::Linkable;
+
let json = r#"{"schema_version":1,"temp":68.0,"timestamp":1704326400000,"unit":"F"}"#;
- let temp = Temperature::from_bytes_versioned(json.as_bytes()).unwrap();
+ let temp = Temperature::from_bytes(json.as_bytes()).unwrap();
assert!(
(temp.celsius - 20.0).abs() < 0.01,
"Expected ~20°C from 68°F"
@@ -449,24 +514,28 @@ mod tests {
assert_eq!(temp.timestamp, 1704326400000);
}
- #[cfg(feature = "linkable")]
+ #[cfg(all(feature = "linkable", feature = "migratable"))]
#[test]
fn test_from_bytes_v2_with_version_marker() {
+ use crate::Linkable;
+
let json = r#"{"schema_version":2,"celsius":22.5,"timestamp":1704326400000}"#;
- let temp = Temperature::from_bytes_versioned(json.as_bytes()).unwrap();
+ let temp = Temperature::from_bytes(json.as_bytes()).unwrap();
assert_eq!(temp.celsius, 22.5);
assert_eq!(temp.timestamp, 1704326400000);
}
- #[cfg(feature = "linkable")]
+ #[cfg(all(feature = "linkable", feature = "migratable"))]
#[test]
fn test_from_bytes_v1_celsius_unit() {
+ use crate::Linkable;
+
let json = r#"{"schema_version":1,"temp":22.5,"timestamp":1704326400000,"unit":"C"}"#;
- let temp = Temperature::from_bytes_versioned(json.as_bytes()).unwrap();
+ let temp = Temperature::from_bytes(json.as_bytes()).unwrap();
assert_eq!(temp.celsius, 22.5);
}
- #[cfg(feature = "linkable")]
+ #[cfg(all(feature = "linkable", feature = "migratable"))]
#[test]
fn test_from_bytes_via_linkable_trait() {
use crate::Linkable;
@@ -482,14 +551,14 @@ mod tests {
assert_eq!(temp.celsius, 22.5);
}
- #[cfg(feature = "linkable")]
+ #[cfg(all(feature = "linkable", feature = "migratable"))]
#[test]
fn test_from_bytes_missing_version_fails() {
- // Payloads without schema_version should fail
+ use crate::Linkable;
+
let json = r#"{"celsius":22.5,"timestamp":1704326400000}"#;
- let result = Temperature::from_bytes_versioned(json.as_bytes());
+ let result = Temperature::from_bytes(json.as_bytes());
assert!(result.is_err());
- assert!(result.unwrap_err().contains("schema_version"));
}
#[cfg(feature = "linkable")]
diff --git a/aimdb-data-contracts/src/lib.rs b/aimdb-data-contracts/src/lib.rs
index 7f7dbcc0..3b80394a 100644
--- a/aimdb-data-contracts/src/lib.rs
+++ b/aimdb-data-contracts/src/lib.rs
@@ -36,6 +36,9 @@ extern crate alloc;
pub mod contracts;
+mod streamable;
+pub use streamable::{for_each_streamable, Streamable, StreamableVisitor};
+
#[cfg(feature = "linkable")]
mod linkable;
@@ -55,7 +58,7 @@ mod migratable;
pub use simulatable::{SimulationConfig, SimulationParams};
#[cfg(feature = "migratable")]
-pub use migratable::{Migratable, MigrationError};
+pub use migratable::{MigrationChain, MigrationError, MigrationStep};
// ═══════════════════════════════════════════════════════════════════
// SCHEMA TRAITS (Implementation-defined)
@@ -82,11 +85,11 @@ pub use migratable::{Migratable, MigrationError};
/// | Add optional field | ✅ Yes | `#[serde(default)]` new field |
/// | Add field with default | ✅ Yes | New field deserializes to default |
/// | Remove unused field | ✅ Yes | Old data with field still parses |
-/// | Rename field | ⚠️ Migration | Use `Migratable` trait |
-/// | Change field type | ⚠️ Migration | Use `Migratable` trait |
-/// | Add required field | ⚠️ Migration | Use `Migratable` trait |
+/// | Rename field | ⚠️ Migration | Use `MigrationStep` + `migration_chain!` |
+/// | Change field type | ⚠️ Migration | Use `MigrationStep` + `migration_chain!` |
+/// | Add required field | ⚠️ Migration | Use `MigrationStep` + `migration_chain!` |
///
-/// For breaking changes, implement the `Migratable` trait (requires `migration` feature)
+/// For breaking changes, implement `MigrationStep` and use `migration_chain!` (requires `migratable` feature)
/// to provide runtime transformation of older data formats.
pub trait SchemaType: Sized {
/// Unique identifier for this schema (e.g., "temperature", "humidity")
diff --git a/aimdb-data-contracts/src/migratable.rs b/aimdb-data-contracts/src/migratable.rs
index c310fcb9..ecca8a96 100644
--- a/aimdb-data-contracts/src/migratable.rs
+++ b/aimdb-data-contracts/src/migratable.rs
@@ -1,24 +1,110 @@
-//! Runtime schema migration support.
+//! Type-safe, bidirectional schema migration with compile-time chain validation.
//!
-//! This module provides the `Migratable` trait for handling breaking schema
-//! changes at runtime through JSON transformation.
+//! This module provides an Alembic-inspired migration system where every version
+//! transition is a typed, bidirectional step between concrete Rust structs.
+//!
+//! # Architecture
+//!
+//! - [`MigrationStep`] — a single upgrade/downgrade between two concrete types
+//! - [`MigrationChain`] — runtime dispatch generated by [`migration_chain!`] macro
+//! - [`migration_chain!`] — declarative macro that validates the chain at compile time
+//!
+//! # How It Works
+//!
+//! Each schema version is a concrete Rust struct. Migration steps convert between
+//! adjacent versions with full type safety — no raw JSON manipulation.
+//!
+//! The `migration_chain!` macro generates:
+//! 1. **Const assertions** — version sequence validated at compile time
+//! 2. **Type-checked dispatch** — compiler rejects mismatched type chains
+//! 3. **`MigrationChain` impl** — runtime upgrade/downgrade with version detection
+//!
+//! # Example
+//!
+//! ```rust
+//! extern crate alloc;
+//!
+//! use aimdb_data_contracts::{SchemaType, MigrationStep, MigrationChain, MigrationError};
+//! use aimdb_data_contracts::migration_chain;
+//! use serde::{Deserialize, Serialize};
+//!
+//! // v1 schema
+//! #[derive(Clone, Debug, Serialize, Deserialize)]
+//! struct SensorV1 {
+//! schema_version: u32,
+//! temp: f32,
+//! timestamp: u64,
+//! }
+//! impl SchemaType for SensorV1 {
+//! const NAME: &'static str = "sensor_v1";
+//! const VERSION: u32 = 1;
+//! }
+//!
+//! // v2 schema (current)
+//! #[derive(Clone, Debug, Serialize, Deserialize)]
+//! struct Sensor {
+//! schema_version: u32,
+//! celsius: f32,
+//! timestamp: u64,
+//! }
+//! impl SchemaType for Sensor {
+//! const NAME: &'static str = "sensor";
+//! const VERSION: u32 = 2;
+//! }
+//!
+//! // Migration step: v1 -> v2
+//! struct SensorV1ToV2;
+//! impl MigrationStep for SensorV1ToV2 {
+//! type Older = SensorV1;
+//! type Newer = Sensor;
+//! const FROM_VERSION: u32 = 1;
+//! const TO_VERSION: u32 = 2;
+//!
+//! fn up(v1: SensorV1) -> Result {
+//! Ok(Sensor { schema_version: 2, celsius: v1.temp, timestamp: v1.timestamp })
+//! }
+//! fn down(v2: Sensor) -> Result {
+//! Ok(SensorV1 { schema_version: 1, temp: v2.celsius, timestamp: v2.timestamp })
+//! }
+//! }
+//!
+//! // Wire up the chain
+//! migration_chain! {
+//! type Current = Sensor;
+//! version_field = "schema_version";
+//! steps {
+//! SensorV1ToV2: SensorV1 => Sensor,
+//! }
+//! }
+//!
+//! // Upgrade from v1 bytes
+//! let v1_json = r#"{"schema_version":1,"temp":22.5,"timestamp":100}"#;
+//! let sensor = Sensor::migrate_from_bytes(v1_json.as_bytes()).unwrap();
+//! assert_eq!(sensor.celsius, 22.5);
+//!
+//! // Downgrade to v1 bytes
+//! let v1_bytes = sensor.migrate_to_version(1).unwrap();
+//! let v1_roundtrip: serde_json::Value = serde_json::from_slice(&v1_bytes).unwrap();
+//! assert_eq!(v1_roundtrip["temp"], 22.5);
+//! ```
use crate::SchemaType;
/// Error returned when schema migration fails.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum MigrationError {
- /// The source version is newer than this schema supports
+ /// The source version is newer than this binary supports
VersionTooNew { source: u32, current: u32 },
- /// A required field is missing and has no default
- MissingField(&'static str),
- /// Type conversion failed
- TypeConversion {
- field: &'static str,
- expected: &'static str,
- },
- /// Custom migration error
- Custom(&'static str),
+ /// The target downgrade version is below the minimum supported
+ VersionTooOld { target: u32, minimum: u32 },
+ /// Deserialization of a versioned payload failed
+ DeserializationFailed(&'static str),
+ /// Serialization during downgrade failed
+ SerializationFailed(&'static str),
+ /// A domain-specific conversion error in a MigrationStep
+ ConversionFailed(&'static str),
+ /// Payload is missing the version field
+ MissingVersion,
}
impl core::fmt::Display for MigrationError {
@@ -31,113 +117,541 @@ impl core::fmt::Display for MigrationError {
source, current
)
}
- Self::MissingField(field) => write!(f, "missing required field: {}", field),
- Self::TypeConversion { field, expected } => {
+ Self::VersionTooOld { target, minimum } => {
write!(
f,
- "type conversion failed for '{}', expected {}",
- field, expected
+ "target version {} is below minimum supported {}",
+ target, minimum
)
}
- Self::Custom(msg) => write!(f, "{}", msg),
+ Self::DeserializationFailed(msg) => write!(f, "deserialization failed: {}", msg),
+ Self::SerializationFailed(msg) => write!(f, "serialization failed: {}", msg),
+ Self::ConversionFailed(msg) => write!(f, "conversion failed: {}", msg),
+ Self::MissingVersion => write!(f, "payload missing version field"),
}
}
}
-/// Runtime schema migration support.
-///
-/// Implement this trait to handle breaking schema changes at runtime.
-/// The `migrate` function transforms raw JSON data from older versions
-/// to the current schema format before deserialization.
-///
-/// # When to Use
-///
-/// Use `Migratable` when you need to:
-/// - Rename fields while maintaining backward compatibility
-/// - Change field types (e.g., int to float)
-/// - Add required fields with computed defaults
-/// - Handle complex structural changes
+/// A single, typed, bidirectional migration step between two schema versions.
///
-/// For simple additive changes (new optional fields), just use
-/// `#[serde(default)]` - no migration needed.
+/// Each step converts between two concrete Rust types with full type safety.
+/// The compiler enforces that `up()` and `down()` operate on the correct types.
///
/// # Example
///
-/// ```rust
-/// use aimdb_data_contracts::{SchemaType, Migratable, MigrationError};
-/// use serde::{Deserialize, Serialize};
-/// use serde_json::Value;
+/// ```rust,ignore
+/// struct TempV1ToV2;
+/// impl MigrationStep for TempV1ToV2 {
+/// type Older = TemperatureV1;
+/// type Newer = Temperature;
+/// const FROM_VERSION: u32 = 1;
+/// const TO_VERSION: u32 = 2;
///
-/// #[derive(Serialize, Deserialize)]
-/// struct Temperature {
-/// celsius: f32, // Was "temp" in v1
-/// timestamp: u64,
-/// unit: String, // Added in v3 as required field
+/// fn up(v1: TemperatureV1) -> Result {
+/// Ok(v1.to_v2())
+/// }
+/// fn down(v2: Temperature) -> Result {
+/// Ok(TemperatureV1 { temp: v2.celsius, .. })
+/// }
/// }
+/// ```
+pub trait MigrationStep {
+ /// The older schema type (input to `up`, output of `down`)
+ type Older;
+ /// The newer schema type (output of `up`, input to `down`)
+ type Newer;
+ /// The version number of the Older type
+ const FROM_VERSION: u32;
+ /// The version number of the Newer type
+ const TO_VERSION: u32;
+
+ /// Upgrade: convert from older to newer representation.
+ fn up(older: Self::Older) -> Result;
+ /// Downgrade: convert from newer to older representation.
+ fn down(newer: Self::Newer) -> Result;
+}
+
+/// A complete, validated migration chain for a schema type.
///
-/// impl SchemaType for Temperature {
-/// const NAME: &'static str = "temperature";
-/// const VERSION: u32 = 3;
-/// }
+/// Generated by the [`migration_chain!`] macro. Provides runtime dispatch
+/// for upgrading from any historical version to the current version,
+/// and downgrading from the current version to any historical version.
///
-/// impl Migratable for Temperature {
-/// fn migrate(raw: &mut Value, from_version: u32) -> Result<(), MigrationError> {
-/// // v1 -> v2: "temp" was renamed to "celsius"
-/// if from_version < 2 {
-/// if let Some(v) = raw.get("temp").cloned() {
-/// raw["celsius"] = v;
-/// raw.as_object_mut().unwrap().remove("temp");
-/// }
-/// }
+/// All chain validation (sequential versions, type chaining) happens
+/// at compile time via const assertions and type checking in the macro expansion.
+pub trait MigrationChain: SchemaType + serde::de::DeserializeOwned + serde::Serialize {
+ /// The minimum version this chain can upgrade from.
+ const MIN_VERSION: u32;
+
+ /// Deserialize from bytes, auto-detecting version and upgrading to current.
+ ///
+ /// Reads the version field from the JSON payload and walks the migration
+ /// chain upward to produce the current schema version.
+ fn migrate_from_bytes(data: &[u8]) -> Result;
+
+ /// Downgrade to a target version and serialize to bytes.
+ ///
+ /// Walks the migration chain downward from the current version to produce
+ /// the serialized representation of an older schema version.
+ fn migrate_to_version(
+ &self,
+ target_version: u32,
+ ) -> Result, MigrationError>;
+}
+
+// ═══════════════════════════════════════════════════════════════════
+// MIGRATION CHAIN MACRO
+// ═══════════════════════════════════════════════════════════════════
+
+/// Validate the migration chain at compile time and generate `MigrationChain` impl.
///
-/// // v2 -> v3: added required "unit" field
-/// if from_version < 3 {
-/// if raw.get("unit").is_none() {
-/// raw["unit"] = Value::String("celsius".into());
-/// }
-/// }
+/// # Syntax
///
-/// Ok(())
+/// ```rust,ignore
+/// migration_chain! {
+/// type Current = MyType;
+/// version_field = "schema_version";
+/// steps {
+/// StepV1ToV2: TypeV1 => TypeV2,
+/// StepV2ToV3: TypeV2 => MyType,
/// }
/// }
/// ```
-pub trait Migratable: SchemaType {
- /// Migrate raw JSON data from an older version to the current schema.
- ///
- /// Called during deserialization when `from_version < VERSION`.
- /// Mutate `raw` in place to transform it to the current schema format.
- ///
- /// # Parameters
- /// - `raw`: Mutable reference to the JSON value to transform
- /// - `from_version`: The version of the incoming data
- ///
- /// # Returns
- /// - `Ok(())` if migration succeeded
- /// - `Err(MigrationError)` if migration failed
- fn migrate(raw: &mut serde_json::Value, from_version: u32) -> Result<(), MigrationError>;
+///
+/// # What it generates
+///
+/// 1. Const assertions validating version sequence and chain continuity
+/// 2. `impl MigrationChain for Current` with upgrade/downgrade dispatch
+/// 3. Type-checked match arms (compiler rejects broken chains)
+#[macro_export]
+macro_rules! migration_chain {
+ // ── Single-step chain ──────────────────────────────────────────
+ (
+ type Current = $current:ty;
+ version_field = $version_field:literal;
+ steps {
+ $step1:ty : $older1:ty => $newer1:ty $(,)?
+ }
+ ) => {
+ // Compile-time validation
+ const _: () = {
+ // Step must increment by exactly 1
+ assert!(
+ <$step1 as $crate::MigrationStep>::TO_VERSION
+ == <$step1 as $crate::MigrationStep>::FROM_VERSION + 1,
+ "migration step must increment version by exactly 1"
+ );
+ // First step starts at version 1
+ assert!(
+ <$step1 as $crate::MigrationStep>::FROM_VERSION == 1,
+ "first migration step must start at version 1"
+ );
+ // Last step ends at current VERSION
+ assert!(
+ <$step1 as $crate::MigrationStep>::TO_VERSION
+ == <$current as $crate::SchemaType>::VERSION,
+ "last migration step must end at current VERSION"
+ );
+ };
- /// Deserialize with automatic migration from older versions.
- ///
- /// This is a convenience method that handles version checking and migration.
- fn deserialize_versioned(
- raw: &mut serde_json::Value,
- from_version: u32,
- ) -> Result
- where
- Self: serde::de::DeserializeOwned,
- {
- if from_version > Self::VERSION {
- return Err(MigrationError::VersionTooNew {
- source: from_version,
- current: Self::VERSION,
- });
+ impl $crate::MigrationChain for $current {
+ const MIN_VERSION: u32 = 1;
+
+ fn migrate_from_bytes(data: &[u8]) -> Result {
+ let raw: serde_json::Value = serde_json::from_slice(data)
+ .map_err(|_| $crate::MigrationError::DeserializationFailed("invalid JSON"))?;
+
+ let version = raw
+ .get($version_field)
+ .and_then(|v| v.as_u64())
+ .ok_or($crate::MigrationError::MissingVersion)? as u32;
+
+ if version > <$current as $crate::SchemaType>::VERSION {
+ return Err($crate::MigrationError::VersionTooNew {
+ source: version,
+ current: <$current as $crate::SchemaType>::VERSION,
+ });
+ }
+
+ match version {
+ 1 => {
+ let older: $older1 = serde_json::from_value(raw).map_err(|_| {
+ $crate::MigrationError::DeserializationFailed(concat!(
+ "failed to parse as ",
+ stringify!($older1)
+ ))
+ })?;
+ <$step1 as $crate::MigrationStep>::up(older)
+ }
+ v if v == <$current as $crate::SchemaType>::VERSION => {
+ serde_json::from_value(raw).map_err(|_| {
+ $crate::MigrationError::DeserializationFailed(concat!(
+ "failed to parse as ",
+ stringify!($current)
+ ))
+ })
+ }
+ _ => Err($crate::MigrationError::VersionTooNew {
+ source: version,
+ current: <$current as $crate::SchemaType>::VERSION,
+ }),
+ }
+ }
+
+ fn migrate_to_version(
+ &self,
+ target_version: u32,
+ ) -> Result, $crate::MigrationError> {
+ if target_version < Self::MIN_VERSION {
+ return Err($crate::MigrationError::VersionTooOld {
+ target: target_version,
+ minimum: Self::MIN_VERSION,
+ });
+ }
+ if target_version > <$current as $crate::SchemaType>::VERSION {
+ return Err($crate::MigrationError::VersionTooNew {
+ source: target_version,
+ current: <$current as $crate::SchemaType>::VERSION,
+ });
+ }
+
+ match target_version {
+ 1 => {
+ let older = <$step1 as $crate::MigrationStep>::down(self.clone())?;
+ serde_json::to_vec(&older).map_err(|_| {
+ $crate::MigrationError::SerializationFailed(concat!(
+ "failed to serialize as ",
+ stringify!($older1)
+ ))
+ })
+ }
+ v if v == <$current as $crate::SchemaType>::VERSION => serde_json::to_vec(self)
+ .map_err(|_| {
+ $crate::MigrationError::SerializationFailed(concat!(
+ "failed to serialize as ",
+ stringify!($current)
+ ))
+ }),
+ _ => unreachable!(),
+ }
+ }
}
+ };
- if from_version < Self::VERSION {
- Self::migrate(raw, from_version)?;
+ // ── Two-step chain ─────────────────────────────────────────────
+ (
+ type Current = $current:ty;
+ version_field = $version_field:literal;
+ steps {
+ $step1:ty : $older1:ty => $newer1:ty,
+ $step2:ty : $older2:ty => $newer2:ty $(,)?
}
+ ) => {
+ const _: () = {
+ // Each step increments by exactly 1
+ assert!(
+ <$step1 as $crate::MigrationStep>::TO_VERSION
+ == <$step1 as $crate::MigrationStep>::FROM_VERSION + 1,
+ "migration step must increment version by exactly 1"
+ );
+ assert!(
+ <$step2 as $crate::MigrationStep>::TO_VERSION
+ == <$step2 as $crate::MigrationStep>::FROM_VERSION + 1,
+ "migration step must increment version by exactly 1"
+ );
+ // First step starts at 1
+ assert!(
+ <$step1 as $crate::MigrationStep>::FROM_VERSION == 1,
+ "first migration step must start at version 1"
+ );
+ // Steps are sequential
+ assert!(
+ <$step1 as $crate::MigrationStep>::TO_VERSION
+ == <$step2 as $crate::MigrationStep>::FROM_VERSION,
+ "migration steps must be sequential"
+ );
+ // Last step ends at current VERSION
+ assert!(
+ <$step2 as $crate::MigrationStep>::TO_VERSION
+ == <$current as $crate::SchemaType>::VERSION,
+ "last migration step must end at current VERSION"
+ );
+ };
- serde_json::from_value(raw.clone())
- .map_err(|_| MigrationError::Custom("deserialization failed after migration"))
- }
+ impl $crate::MigrationChain for $current {
+ const MIN_VERSION: u32 = 1;
+
+ fn migrate_from_bytes(data: &[u8]) -> Result {
+ let raw: serde_json::Value = serde_json::from_slice(data)
+ .map_err(|_| $crate::MigrationError::DeserializationFailed("invalid JSON"))?;
+
+ let version = raw
+ .get($version_field)
+ .and_then(|v| v.as_u64())
+ .ok_or($crate::MigrationError::MissingVersion)? as u32;
+
+ if version > <$current as $crate::SchemaType>::VERSION {
+ return Err($crate::MigrationError::VersionTooNew {
+ source: version,
+ current: <$current as $crate::SchemaType>::VERSION,
+ });
+ }
+
+ match version {
+ 1 => {
+ let v1: $older1 = serde_json::from_value(raw).map_err(|_| {
+ $crate::MigrationError::DeserializationFailed(concat!(
+ "failed to parse as ",
+ stringify!($older1)
+ ))
+ })?;
+ let v2 = <$step1 as $crate::MigrationStep>::up(v1)?;
+ <$step2 as $crate::MigrationStep>::up(v2)
+ }
+ 2 => {
+ let v2: $older2 = serde_json::from_value(raw).map_err(|_| {
+ $crate::MigrationError::DeserializationFailed(concat!(
+ "failed to parse as ",
+ stringify!($older2)
+ ))
+ })?;
+ <$step2 as $crate::MigrationStep>::up(v2)
+ }
+ v if v == <$current as $crate::SchemaType>::VERSION => {
+ serde_json::from_value(raw).map_err(|_| {
+ $crate::MigrationError::DeserializationFailed(concat!(
+ "failed to parse as ",
+ stringify!($current)
+ ))
+ })
+ }
+ _ => Err($crate::MigrationError::VersionTooNew {
+ source: version,
+ current: <$current as $crate::SchemaType>::VERSION,
+ }),
+ }
+ }
+
+ fn migrate_to_version(
+ &self,
+ target_version: u32,
+ ) -> Result, $crate::MigrationError> {
+ if target_version < Self::MIN_VERSION {
+ return Err($crate::MigrationError::VersionTooOld {
+ target: target_version,
+ minimum: Self::MIN_VERSION,
+ });
+ }
+ if target_version > <$current as $crate::SchemaType>::VERSION {
+ return Err($crate::MigrationError::VersionTooNew {
+ source: target_version,
+ current: <$current as $crate::SchemaType>::VERSION,
+ });
+ }
+
+ match target_version {
+ 1 => {
+ let v2 = <$step2 as $crate::MigrationStep>::down(self.clone())?;
+ let v1 = <$step1 as $crate::MigrationStep>::down(v2)?;
+ serde_json::to_vec(&v1).map_err(|_| {
+ $crate::MigrationError::SerializationFailed(concat!(
+ "failed to serialize as ",
+ stringify!($older1)
+ ))
+ })
+ }
+ 2 => {
+ let v2 = <$step2 as $crate::MigrationStep>::down(self.clone())?;
+ serde_json::to_vec(&v2).map_err(|_| {
+ $crate::MigrationError::SerializationFailed(concat!(
+ "failed to serialize as ",
+ stringify!($older2)
+ ))
+ })
+ }
+ v if v == <$current as $crate::SchemaType>::VERSION => serde_json::to_vec(self)
+ .map_err(|_| {
+ $crate::MigrationError::SerializationFailed(concat!(
+ "failed to serialize as ",
+ stringify!($current)
+ ))
+ }),
+ _ => unreachable!(),
+ }
+ }
+ }
+ };
+
+ // ── Three-step chain ───────────────────────────────────────────
+ (
+ type Current = $current:ty;
+ version_field = $version_field:literal;
+ steps {
+ $step1:ty : $older1:ty => $newer1:ty,
+ $step2:ty : $older2:ty => $newer2:ty,
+ $step3:ty : $older3:ty => $newer3:ty $(,)?
+ }
+ ) => {
+ const _: () = {
+ assert!(
+ <$step1 as $crate::MigrationStep>::TO_VERSION
+ == <$step1 as $crate::MigrationStep>::FROM_VERSION + 1,
+ "migration step must increment version by exactly 1"
+ );
+ assert!(
+ <$step2 as $crate::MigrationStep>::TO_VERSION
+ == <$step2 as $crate::MigrationStep>::FROM_VERSION + 1,
+ "migration step must increment version by exactly 1"
+ );
+ assert!(
+ <$step3 as $crate::MigrationStep>::TO_VERSION
+ == <$step3 as $crate::MigrationStep>::FROM_VERSION + 1,
+ "migration step must increment version by exactly 1"
+ );
+ assert!(
+ <$step1 as $crate::MigrationStep>::FROM_VERSION == 1,
+ "first migration step must start at version 1"
+ );
+ assert!(
+ <$step1 as $crate::MigrationStep>::TO_VERSION
+ == <$step2 as $crate::MigrationStep>::FROM_VERSION,
+ "migration steps must be sequential"
+ );
+ assert!(
+ <$step2 as $crate::MigrationStep>::TO_VERSION
+ == <$step3 as $crate::MigrationStep>::FROM_VERSION,
+ "migration steps must be sequential"
+ );
+ assert!(
+ <$step3 as $crate::MigrationStep>::TO_VERSION
+ == <$current as $crate::SchemaType>::VERSION,
+ "last migration step must end at current VERSION"
+ );
+ };
+
+ impl $crate::MigrationChain for $current {
+ const MIN_VERSION: u32 = 1;
+
+ fn migrate_from_bytes(data: &[u8]) -> Result {
+ let raw: serde_json::Value = serde_json::from_slice(data)
+ .map_err(|_| $crate::MigrationError::DeserializationFailed("invalid JSON"))?;
+
+ let version = raw
+ .get($version_field)
+ .and_then(|v| v.as_u64())
+ .ok_or($crate::MigrationError::MissingVersion)? as u32;
+
+ if version > <$current as $crate::SchemaType>::VERSION {
+ return Err($crate::MigrationError::VersionTooNew {
+ source: version,
+ current: <$current as $crate::SchemaType>::VERSION,
+ });
+ }
+
+ match version {
+ 1 => {
+ let v1: $older1 = serde_json::from_value(raw).map_err(|_| {
+ $crate::MigrationError::DeserializationFailed(concat!(
+ "failed to parse as ",
+ stringify!($older1)
+ ))
+ })?;
+ let v2 = <$step1 as $crate::MigrationStep>::up(v1)?;
+ let v3 = <$step2 as $crate::MigrationStep>::up(v2)?;
+ <$step3 as $crate::MigrationStep>::up(v3)
+ }
+ 2 => {
+ let v2: $older2 = serde_json::from_value(raw).map_err(|_| {
+ $crate::MigrationError::DeserializationFailed(concat!(
+ "failed to parse as ",
+ stringify!($older2)
+ ))
+ })?;
+ let v3 = <$step2 as $crate::MigrationStep>::up(v2)?;
+ <$step3 as $crate::MigrationStep>::up(v3)
+ }
+ 3 => {
+ let v3: $older3 = serde_json::from_value(raw).map_err(|_| {
+ $crate::MigrationError::DeserializationFailed(concat!(
+ "failed to parse as ",
+ stringify!($older3)
+ ))
+ })?;
+ <$step3 as $crate::MigrationStep>::up(v3)
+ }
+ v if v == <$current as $crate::SchemaType>::VERSION => {
+ serde_json::from_value(raw).map_err(|_| {
+ $crate::MigrationError::DeserializationFailed(concat!(
+ "failed to parse as ",
+ stringify!($current)
+ ))
+ })
+ }
+ _ => Err($crate::MigrationError::VersionTooNew {
+ source: version,
+ current: <$current as $crate::SchemaType>::VERSION,
+ }),
+ }
+ }
+
+ fn migrate_to_version(
+ &self,
+ target_version: u32,
+ ) -> Result, $crate::MigrationError> {
+ if target_version < Self::MIN_VERSION {
+ return Err($crate::MigrationError::VersionTooOld {
+ target: target_version,
+ minimum: Self::MIN_VERSION,
+ });
+ }
+ if target_version > <$current as $crate::SchemaType>::VERSION {
+ return Err($crate::MigrationError::VersionTooNew {
+ source: target_version,
+ current: <$current as $crate::SchemaType>::VERSION,
+ });
+ }
+
+ match target_version {
+ 1 => {
+ let v3 = <$step3 as $crate::MigrationStep>::down(self.clone())?;
+ let v2 = <$step2 as $crate::MigrationStep>::down(v3)?;
+ let v1 = <$step1 as $crate::MigrationStep>::down(v2)?;
+ serde_json::to_vec(&v1).map_err(|_| {
+ $crate::MigrationError::SerializationFailed(concat!(
+ "failed to serialize as ",
+ stringify!($older1)
+ ))
+ })
+ }
+ 2 => {
+ let v3 = <$step3 as $crate::MigrationStep>::down(self.clone())?;
+ let v2 = <$step2 as $crate::MigrationStep>::down(v3)?;
+ serde_json::to_vec(&v2).map_err(|_| {
+ $crate::MigrationError::SerializationFailed(concat!(
+ "failed to serialize as ",
+ stringify!($older2)
+ ))
+ })
+ }
+ 3 => {
+ let v3 = <$step3 as $crate::MigrationStep>::down(self.clone())?;
+ serde_json::to_vec(&v3).map_err(|_| {
+ $crate::MigrationError::SerializationFailed(concat!(
+ "failed to serialize as ",
+ stringify!($older3)
+ ))
+ })
+ }
+ v if v == <$current as $crate::SchemaType>::VERSION => serde_json::to_vec(self)
+ .map_err(|_| {
+ $crate::MigrationError::SerializationFailed(concat!(
+ "failed to serialize as ",
+ stringify!($current)
+ ))
+ }),
+ _ => unreachable!(),
+ }
+ }
+ }
+ };
}
diff --git a/aimdb-data-contracts/src/streamable.rs b/aimdb-data-contracts/src/streamable.rs
new file mode 100644
index 00000000..cb9591ed
--- /dev/null
+++ b/aimdb-data-contracts/src/streamable.rs
@@ -0,0 +1,184 @@
+//! Streamable trait for data contracts that can cross serialization boundaries.
+//!
+//! Types implementing [`Streamable`] can be transported across WebSocket, WASM,
+//! and other wire boundaries with full contract enforcement (Rust serde
+//! deserialization at the receiving end).
+//!
+//! # Design
+//!
+//! `Streamable` is a *capability marker* — it combines [`SchemaType`] identity
+//! with the `serde` bounds needed for type-erased dispatch at serialization
+//! boundaries. The companion [`for_each_streamable`] function is the single
+//! source of truth for which types are streamable — consumers implement
+//! [`StreamableVisitor`] to build whatever dispatch tables they need.
+//!
+//! # Adding a new streamable contract
+//!
+//! 1. Define your struct with `Serialize + Deserialize` in `contracts/`.
+//! 2. Implement `SchemaType` (unique `NAME`).
+//! 3. `impl Streamable for MyType {}` below.
+//! 4. Add `visitor.visit::();` in [`for_each_streamable`].
+//!
+//! That's it — every consumer that uses the visitor picks up the new type
+//! automatically.
+
+use crate::SchemaType;
+use core::fmt::Debug;
+use serde::{de::DeserializeOwned, Serialize};
+
+/// Data contracts that can be transported across serialization boundaries.
+///
+/// Implementing this trait signals that a contract type supports:
+/// - Type-erased JSON/`JsValue` serialization and deserialization
+/// - Registration in AimDB builders by schema name string
+/// - Cross-boundary dispatch (WASM bindings, WebSocket bridge, CLI)
+///
+/// # Bounds
+///
+/// The super-trait bounds mirror what AimDB's typed record APIs require:
+/// `Send + Sync + Clone + Debug + 'static` plus serde `Serialize` and
+/// `DeserializeOwned`. All standard data contracts satisfy these.
+///
+/// # Example
+///
+/// ```rust
+/// use aimdb_data_contracts::{SchemaType, Streamable};
+/// use aimdb_data_contracts::contracts::Temperature;
+///
+/// // Temperature implements Streamable — it can be used across boundaries
+/// fn assert_streamable() {}
+/// assert_streamable::();
+/// ```
+pub trait Streamable:
+ SchemaType + Serialize + DeserializeOwned + Send + Sync + Clone + Debug + 'static
+{
+}
+
+/// Visitor trait for iterating over all registered [`Streamable`] types.
+///
+/// Implement this trait to build type-erased dispatch tables, registries,
+/// or any other structure that needs to know about all streamable types.
+///
+/// # Example
+///
+/// ```rust
+/// use std::any::TypeId;
+/// use aimdb_data_contracts::{SchemaType, Streamable, StreamableVisitor, for_each_streamable};
+///
+/// struct TypeIdCollector {
+/// entries: Vec<(TypeId, &'static str)>,
+/// }
+///
+/// impl StreamableVisitor for TypeIdCollector {
+/// fn visit(&mut self) {
+/// self.entries.push((TypeId::of::(), T::NAME));
+/// }
+/// }
+///
+/// let mut collector = TypeIdCollector { entries: Vec::new() };
+/// for_each_streamable(&mut collector);
+/// assert_eq!(collector.entries.len(), 3);
+/// ```
+pub trait StreamableVisitor {
+ /// Called once for each registered [`Streamable`] type.
+ fn visit(&mut self);
+}
+
+// ═══════════════════════════════════════════════════════════════════
+// Implementations for built-in contracts
+// ═══════════════════════════════════════════════════════════════════
+
+use crate::contracts::{GpsLocation, Humidity, Temperature};
+
+impl Streamable for Temperature {}
+impl Streamable for Humidity {}
+impl Streamable for GpsLocation {}
+
+/// Iterate over every registered [`Streamable`] type via the visitor pattern.
+///
+/// This is the **single source of truth** for which types are streamable.
+/// All consumers (WASM adapter, WebSocket connector, CLI) use this function
+/// to discover streamable types instead of maintaining their own lists.
+///
+/// # Adding a new contract
+///
+/// 1. `impl Streamable for NewType {}` (above)
+/// 2. Add `visitor.visit::();` here.
+pub fn for_each_streamable(visitor: &mut impl StreamableVisitor) {
+ visitor.visit::();
+ visitor.visit::();
+ visitor.visit::();
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use core::any::TypeId;
+
+ struct NameCollector {
+ names: alloc::vec::Vec<&'static str>,
+ }
+
+ impl StreamableVisitor for NameCollector {
+ fn visit(&mut self) {
+ self.names.push(T::NAME);
+ }
+ }
+
+ struct TypeIdResolver {
+ target: TypeId,
+ result: Option<&'static str>,
+ }
+
+ impl StreamableVisitor for TypeIdResolver {
+ fn visit(&mut self) {
+ if TypeId::of::() == self.target {
+ self.result = Some(T::NAME);
+ }
+ }
+ }
+
+ #[test]
+ fn visitor_discovers_all_types() {
+ let mut c = NameCollector {
+ names: alloc::vec::Vec::new(),
+ };
+ for_each_streamable(&mut c);
+ assert!(c.names.contains(&"temperature"));
+ assert!(c.names.contains(&"humidity"));
+ assert!(c.names.contains(&"gps_location"));
+ assert_eq!(c.names.len(), 3);
+ }
+
+ #[test]
+ fn visitor_resolves_type_id() {
+ let mut r = TypeIdResolver {
+ target: TypeId::of::(),
+ result: None,
+ };
+ for_each_streamable(&mut r);
+ assert_eq!(r.result, Some("temperature"));
+ }
+
+ #[test]
+ fn visitor_returns_none_for_unknown() {
+ let mut r = TypeIdResolver {
+ target: TypeId::of::(),
+ result: None,
+ };
+ for_each_streamable(&mut r);
+ assert_eq!(r.result, None);
+ }
+
+ #[test]
+ fn known_schemas_are_discoverable() {
+ let mut c = NameCollector {
+ names: alloc::vec::Vec::new(),
+ };
+ for_each_streamable(&mut c);
+ assert!(c.names.contains(&"temperature"));
+ assert!(c.names.contains(&"humidity"));
+ assert!(c.names.contains(&"gps_location"));
+ assert!(!c.names.contains(&"unknown"));
+ }
+}
diff --git a/aimdb-data-contracts/tests/export_ts.rs b/aimdb-data-contracts/tests/export_ts.rs
deleted file mode 100644
index 94ca7700..00000000
--- a/aimdb-data-contracts/tests/export_ts.rs
+++ /dev/null
@@ -1,269 +0,0 @@
-//! TypeScript export test
-//!
-//! Run with: cargo test --features ts,observable export_typescript -- --ignored
-//!
-//! This generates TypeScript type definitions to the bindings/ directory,
-//! plus a schema-registry.ts with full metadata (fields, units, icons).
-
-#![cfg(feature = "ts")]
-
-use aimdb_data_contracts::contracts::{GpsLocation, Humidity, Temperature};
-use aimdb_data_contracts::{Observable, SchemaType};
-use std::fs;
-use std::path::Path;
-use ts_rs::TS;
-
-/// Schema metadata for export to TypeScript - derived automatically from traits
-struct SchemaMeta {
- name: &'static str,
- icon: &'static str,
- unit: &'static str,
- rust_schema: String,
- attributes: SchemaAttributes,
-}
-
-/// Trait implementations for a schema
-#[derive(Clone)]
-struct SchemaAttributes {
- observable: bool,
- simulatable: bool,
- linkable: bool,
- settable: bool,
- migratable: bool,
- no_std: bool,
-}
-
-impl SchemaMeta {
- /// Create metadata from any type implementing Observable + TS
- fn from_type(
- source_file: &str,
- struct_name: &str,
- attributes: SchemaAttributes,
- ) -> Self {
- let contracts_dir = Path::new(env!("CARGO_MANIFEST_DIR")).join("src/contracts");
- let rust_schema = extract_struct_definition(&contracts_dir.join(source_file), struct_name)
- .unwrap_or_else(|| {
- format!("// Could not extract {} from {}", struct_name, source_file)
- });
-
- SchemaMeta {
- name: T::NAME,
- icon: T::ICON,
- unit: T::UNIT,
- rust_schema,
- attributes,
- }
- }
-
- /// Derive label from name (temperature -> Temperature, gps_location -> GPS Location)
- fn label(&self) -> String {
- self.name
- .split('_')
- .map(|word| {
- let mut chars = word.chars();
- match chars.next() {
- None => String::new(),
- Some(c) => {
- // Handle known acronyms
- if word == "gps" {
- "GPS".to_string()
- } else {
- c.to_uppercase().chain(chars).collect()
- }
- }
- }
- })
- .collect::>()
- .join(" ")
- }
-}
-
-/// Extract a struct definition from a Rust source file
-fn extract_struct_definition(file_path: &Path, struct_name: &str) -> Option {
- let content = fs::read_to_string(file_path).ok()?;
-
- // Find the struct definition with doc comments
- let struct_marker = format!("pub struct {}", struct_name);
- let struct_start = content.find(&struct_marker)?;
-
- // Walk backwards to find doc comments (/// lines)
- let before_struct = &content[..struct_start];
- let doc_start = before_struct
- .rfind("\n\n")
- .or_else(|| before_struct.rfind("*/\n"))
- .map(|i| i + 1)
- .unwrap_or(struct_start);
-
- // Find the closing brace of the struct
- let after_struct = &content[struct_start..];
- let struct_end = after_struct
- .find("\n}\n")
- .or_else(|| after_struct.find("\n}"))?;
-
- let full_definition = &content[doc_start..struct_start + struct_end + 2];
-
- // Clean up: remove #[derive(...)] and #[cfg_attr(...)] lines, keep doc comments and struct
- let cleaned: String = full_definition
- .lines()
- .filter(|line| {
- let trimmed = line.trim();
- !trimmed.starts_with("#[derive")
- && !trimmed.starts_with("#[cfg_attr")
- && !trimmed.starts_with("#[serde")
- })
- .collect::>()
- .join("\n");
-
- Some(cleaned.trim().to_string())
-}
-
-/// Export all contract types to TypeScript.
-///
-/// By default, ts-rs exports to `./bindings/` relative to the crate root.
-/// Run this test with --ignored flag to generate the files:
-///
-/// ```sh
-/// cargo test -p aimdb-data-contracts --features ts,observable export_typescript -- --ignored
-/// ```
-#[test]
-#[ignore = "Run manually to generate TypeScript bindings"]
-fn export_typescript() {
- // Export each contract type via ts-rs
- Temperature::export_all().expect("Failed to export Temperature");
- Humidity::export_all().expect("Failed to export Humidity");
- GpsLocation::export_all().expect("Failed to export GpsLocation");
-
- println!("✅ TypeScript bindings exported to bindings/");
-
- // Generate schema registry with metadata - all derived from traits + source
- // Attributes reflect actual trait implementations in Rust source
- let base_traits = SchemaAttributes {
- observable: true,
- simulatable: true,
- linkable: true,
- settable: true,
- migratable: false,
- no_std: true,
- };
-
- // Temperature has Migratable trait (v1 → v2 migration)
- let temp_traits = SchemaAttributes {
- migratable: true,
- ..base_traits.clone()
- };
-
- let schemas = vec![
- SchemaMeta::from_type::("temperature.rs", "Temperature", temp_traits),
- SchemaMeta::from_type::("humidity.rs", "Humidity", base_traits.clone()),
- SchemaMeta::from_type::("location.rs", "GpsLocation", base_traits),
- ];
-
- generate_schema_registry(&schemas);
-}
-
-fn generate_schema_registry(schemas: &[SchemaMeta]) {
- let mut output = String::from(
- r#"// AUTO-GENERATED from aimdb-data-contracts - DO NOT EDIT
-// Run: cargo test -p aimdb-data-contracts --features ts,observable export_typescript -- --ignored
-
-export interface SchemaAttributes {
- observable: boolean;
- simulatable: boolean;
- linkable: boolean;
- settable: boolean;
- migratable: boolean;
- noStd: boolean;
-}
-
-export interface SchemaMeta {
- name: string;
- label: string;
- icon: string;
- unit: string;
- rustSchema: string;
- attributes: SchemaAttributes;
-}
-
-/**
- * Schema registry with metadata extracted from Rust Observable traits.
- * Keys match WebSocket message `type` field.
- */
-export const SCHEMA_REGISTRY: Record = {
-"#,
- );
-
- for schema in schemas {
- output.push_str(&format!(
- r#" "{}": {{
- name: "{}",
- label: "{}",
- icon: "{}",
- unit: "{}",
- rustSchema: `{}`,
- attributes: {{
- observable: {},
- simulatable: {},
- linkable: {},
- settable: {},
- migratable: {},
- noStd: {},
- }},
- }},
-"#,
- schema.name,
- schema.name,
- schema.label(),
- schema.icon,
- schema.unit,
- schema.rust_schema,
- schema.attributes.observable,
- schema.attributes.simulatable,
- schema.attributes.linkable,
- schema.attributes.settable,
- schema.attributes.migratable,
- schema.attributes.no_std,
- ));
- }
-
- output.push_str(
- r#"};
-
-/**
- * Get schema metadata by WebSocket message type.
- */
-export function getSchema(messageType: string): SchemaMeta | undefined {
- return SCHEMA_REGISTRY[messageType];
-}
-
-/**
- * List all available schema types.
- */
-export const SCHEMA_TYPES = Object.keys(SCHEMA_REGISTRY);
-"#,
- );
-
- let bindings_dir = Path::new(env!("CARGO_MANIFEST_DIR")).join("bindings");
- fs::create_dir_all(&bindings_dir).expect("Failed to create bindings dir");
-
- let output_path = bindings_dir.join("schema-registry.ts");
- fs::write(&output_path, output).expect("Failed to write schema registry");
-
- println!("✅ Schema registry exported to {:?}", output_path);
-}
-
-/// Verify that all types can be exported (doesn't write files)
-#[test]
-fn verify_ts_definitions() {
- // Just verify the definitions are valid
- let temp_ts = Temperature::decl();
- let humidity_ts = Humidity::decl();
- let location_ts = GpsLocation::decl();
-
- assert!(temp_ts.contains("Temperature"));
- assert!(humidity_ts.contains("Humidity"));
- assert!(location_ts.contains("GpsLocation"));
-
- println!("Temperature:\n{}\n", temp_ts);
- println!("Humidity:\n{}\n", humidity_ts);
- println!("GpsLocation:\n{}\n", location_ts);
-}
diff --git a/aimdb-persistence/src/ext.rs b/aimdb-persistence/src/ext.rs
index 35e28c4c..b8dfe381 100644
--- a/aimdb-persistence/src/ext.rs
+++ b/aimdb-persistence/src/ext.rs
@@ -67,7 +67,13 @@ where
}
};
- while let Ok(value) = reader.recv().await {
+ loop {
+ let value = match reader.recv().await {
+ Ok(v) => v,
+ Err(aimdb_core::DbError::BufferLagged { .. }) => continue,
+ Err(_) => break,
+ };
+
// T is known here — serialize directly, no with_remote_access() needed.
let json = match serde_json::to_value(&value) {
Ok(v) => v,
diff --git a/aimdb-wasm-adapter/Cargo.toml b/aimdb-wasm-adapter/Cargo.toml
new file mode 100644
index 00000000..464edbdd
--- /dev/null
+++ b/aimdb-wasm-adapter/Cargo.toml
@@ -0,0 +1,76 @@
+[package]
+name = "aimdb-wasm-adapter"
+version = "0.1.1"
+edition = "2021"
+authors.workspace = true
+license.workspace = true
+repository.workspace = true
+homepage.workspace = true
+description = "WebAssembly runtime adapter for AimDB - browser-native async runtime support"
+keywords = ["wasm", "webassembly", "browser", "async", "database"]
+categories = ["wasm", "asynchronous", "database-implementations"]
+
+[lib]
+crate-type = ["cdylib", "rlib"]
+
+[features]
+default = ["wasm-runtime"]
+
+# Runtime features
+wasm-runtime = [
+ "wasm-bindgen",
+ "wasm-bindgen-futures",
+ "js-sys",
+ "web-sys",
+ "serde-wasm-bindgen",
+]
+
+[dependencies]
+# Executor traits (no_std compatible)
+aimdb-executor = { version = "0.1.0", path = "../aimdb-executor", default-features = false }
+
+# Core AimDB types (alloc only — no std, no tokio)
+aimdb-core = { version = "0.5.0", path = "../aimdb-core", default-features = false, features = [
+ "alloc",
+] }
+
+# Shared WebSocket wire protocol
+aimdb-ws-protocol = { version = "0.1.0", path = "../aimdb-ws-protocol" }
+
+# Data contracts (alloc only — no std)
+aimdb-data-contracts = { version = "0.5.0", path = "../aimdb-data-contracts", default-features = false, features = [
+ "alloc",
+] }
+
+# WASM bindings
+wasm-bindgen = { version = "0.2", optional = true }
+wasm-bindgen-futures = { version = "0.4", optional = true }
+js-sys = { version = "0.3", optional = true }
+web-sys = { version = "0.3", optional = true, features = [
+ "console",
+ "CustomEvent",
+ "CustomEventInit",
+ "EventTarget",
+ "MessageEvent",
+ "Performance",
+ "WebSocket",
+ "Window",
+] }
+
+# Serialization (no_std + alloc)
+serde = { workspace = true }
+serde_json = { workspace = true }
+serde-wasm-bindgen = { version = "0.6", optional = true }
+
+# Async utilities (minimal, no_std compatible)
+futures-util = { version = "0.3", default-features = false, features = [
+ "alloc",
+] }
+
+[lints.rust]
+unexpected_cfgs = { level = "warn", check-cfg = [
+ 'cfg(feature, values("std"))',
+] }
+
+[dev-dependencies]
+wasm-bindgen-test = "0.3"
diff --git a/aimdb-wasm-adapter/LICENSE b/aimdb-wasm-adapter/LICENSE
new file mode 100644
index 00000000..261eeb9e
--- /dev/null
+++ b/aimdb-wasm-adapter/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/aimdb-wasm-adapter/README.md b/aimdb-wasm-adapter/README.md
new file mode 100644
index 00000000..a5d09d05
--- /dev/null
+++ b/aimdb-wasm-adapter/README.md
@@ -0,0 +1,166 @@
+# aimdb-wasm-adapter
+
+WebAssembly runtime adapter for AimDB — browser-native async runtime support.
+
+## Overview
+
+This crate provides a WASM runtime adapter that enables the full AimDB dataflow
+engine to run inside a web browser (or any `wasm32-unknown-unknown` host).
+
+Records, buffers, producers, consumers, and data-contract enforcement all
+execute natively in WASM — eliminating the need for a parallel validation
+layer (Zod, JSON Schema) on the TypeScript side.
+
+## Platform Matrix
+
+| Target | Adapter | Buffer Primitive | Spawn Mechanism |
+|--------|---------|------------------|-----------------|
+| MCU | `aimdb-embassy-adapter` | `embassy-sync` channels | Static task pool |
+| Edge / Cloud | `aimdb-tokio-adapter` | `tokio::sync` channels | `tokio::spawn` |
+| **Browser** | **`aimdb-wasm-adapter`** | **`Rc>`** | **`spawn_local`** |
+
+## Architecture
+
+The adapter is split into several focused modules:
+
+| Module | Purpose |
+|--------|---------|
+| `runtime.rs` | `WasmAdapter` — `RuntimeAdapter` + `Spawn` impl using `wasm_bindgen_futures::spawn_local` |
+| `time.rs` | `TimeOps` — `performance.now()` + `setTimeout`-based sleep via `gloo-timers` |
+| `logger.rs` | `Logger` — maps log levels to `console.log / debug / warn / error` |
+| `buffer.rs` | `WasmBuffer` — SPMC Ring, SingleLatest, Mailbox on `Rc>` |
+| `bindings.rs` | `WasmDb` — `#[wasm_bindgen]` facade: `configureRecord`, `get`, `set`, `subscribe` |
+| `ws_bridge.rs` | `WsBridge` — WebSocket bridge to remote AimDB server (AimX wire protocol) |
+| `react/` | React hooks — `useRecord`, `useSetRecord`, `useBridge` |
+
+## JavaScript / TypeScript API
+
+### WasmDb
+
+```typescript
+import init, { WasmDb } from '@aimdb/wasm';
+
+await init();
+const db = new WasmDb();
+
+// Configure records with Rust data contracts
+db.configureRecord('sensors.temperature.vienna', {
+ schemaType: 'temperature',
+ buffer: 'SingleLatest',
+});
+
+await db.build();
+
+// Get (returns deserialized JS object validated by Rust serde)
+const temp = db.get('sensors.temperature.vienna');
+console.log(temp.celsius);
+
+// Set (Rust serde validates the payload)
+db.set('sensors.temperature.vienna', { celsius: 22.5, timestamp: Date.now() });
+
+// Subscribe (callback fires on every buffer push)
+const unsub = db.subscribe('sensors.temperature.vienna', (value) => {
+ console.log('New temperature:', value.celsius);
+});
+```
+
+### WsBridge
+
+Connect the browser-local AimDB to a remote server:
+
+```typescript
+import { WsBridge } from '@aimdb/wasm';
+
+const bridge = WsBridge.connect(db, 'wss://api.example.com/ws', {
+ subscribeTopics: ['sensors/#'],
+ autoReconnect: true,
+ lateJoin: true,
+});
+
+bridge.onStatusChange((status) => {
+ console.log('Connection:', status); // 'Connected' | 'Reconnecting' | ...
+});
+
+bridge.write('commands.setpoint', { target: 21.0 });
+bridge.disconnect();
+```
+
+### React Hooks
+
+```tsx
+import { AimDbProvider, useRecord, useSetRecord, useBridge } from '@aimdb/wasm/react';
+
+function App() {
+ return (
+
+
+
+ );
+}
+
+function Dashboard() {
+ const temp = useRecord('sensors.temperature.vienna');
+ if (!temp) return Loading…
;
+ return {temp.celsius.toFixed(1)}°C ;
+}
+```
+
+**Available hooks:**
+
+| Hook | Returns | Purpose |
+|------|---------|---------|
+| `useRecord(key)` | `T \| null` | Subscribe to record, re-render on updates |
+| `useSetRecord(key)` | `(value: T) => void` | Write to record with contract validation |
+| `useAimDb()` | `WasmDb \| null` | Raw database access for advanced usage |
+| `useBridge()` | `WsBridge \| null` | Connection status and bridge control |
+
+## Data Contract Enforcement
+
+All `get` / `set` / `subscribe` calls go through the `Streamable` trait
+defined in `aimdb-data-contracts`. The `dispatch_streamable!` macro maps
+schema type names to Rust types and performs serde validation:
+
+```
+TypeScript value → serde_wasm_bindgen → Rust T: Streamable → buffer push
+```
+
+Adding a new contract requires only one change: implement `Streamable` for
+the new type in `aimdb-data-contracts` and add it to `dispatch_streamable!`.
+
+## Build
+
+```bash
+# Install dependencies
+rustup target add wasm32-unknown-unknown
+cargo install wasm-pack
+
+# Compile to WASM
+wasm-pack build --target web --out-dir pkg
+
+# Run headless browser tests
+wasm-pack test --headless --chrome
+```
+
+From the workspace root (`make` targets):
+
+```bash
+make wasm # Build WASM adapter
+make wasm-test # Run WASM tests
+make check # Full workspace check (includes WASM)
+```
+
+## Feature Flags
+
+| Feature | Default | Purpose |
+|---------|---------|---------|
+| `wasm-runtime` | ✅ | Full browser runtime (bindings, WsBridge, web-sys) |
+| `alloc` | ✅ | Core buffer + record support (no_std compatible) |
+
+## License
+
+Apache-2.0
diff --git a/aimdb-wasm-adapter/src/bindings.rs b/aimdb-wasm-adapter/src/bindings.rs
new file mode 100644
index 00000000..3796aad5
--- /dev/null
+++ b/aimdb-wasm-adapter/src/bindings.rs
@@ -0,0 +1,614 @@
+//! `#[wasm_bindgen]` TypeScript-facing API
+//!
+//! Exposes a high-level facade to JavaScript/TypeScript. Users do not interact
+//! with `Arc`, `RecordRegistrar`, or feature flags — all of that is hidden
+//! behind `WasmDb`, `configureRecord`, `get`, `set`, and `subscribe`.
+//!
+//! # Two-Phase Lifecycle
+//!
+//! 1. **Configuration** — `new WasmDb()` + `configureRecord(…)` calls collect
+//! record definitions without building the database.
+//! 2. **Build** — `await db.build()` compiles the configuration into a live
+//! AimDB instance (buffers, records, typed storage).
+//! 3. **Operation** — `get` / `set` / `subscribe` interact with the live
+//! database. Contract enforcement (Rust serde) happens at the WASM boundary.
+
+extern crate alloc;
+
+use alloc::boxed::Box;
+use alloc::collections::BTreeMap;
+use alloc::format;
+use alloc::rc::Rc;
+use alloc::string::{String, ToString};
+use alloc::sync::Arc;
+use alloc::vec::Vec;
+use core::cell::RefCell;
+use core::fmt::Debug;
+
+use serde::de::DeserializeOwned;
+use serde::{Deserialize, Serialize};
+use wasm_bindgen::prelude::*;
+
+use aimdb_core::buffer::BufferCfg;
+use aimdb_core::builder::{AimDb, AimDbBuilder};
+use aimdb_core::record_id::StringKey;
+
+use aimdb_ws_protocol::{ClientMessage, ServerMessage};
+
+use crate::schema_registry::{SchemaOps, SchemaRegistry};
+use crate::ws_bridge::WsBridge;
+use crate::WasmAdapter;
+
+// ─── Option parsing ───────────────────────────────────────────────────────
+
+#[derive(Deserialize)]
+#[serde(rename_all = "camelCase")]
+struct RecordOptions {
+ schema_type: String,
+ buffer: BufferOption,
+}
+
+#[derive(Deserialize)]
+#[serde(untagged)]
+enum BufferOption {
+ /// Simple string: `"SingleLatest"`, `"Mailbox"`, `"SpmcRing"`
+ Simple(String),
+ /// Object: `{ type: "SpmcRing", capacity: 200 }`
+ Config {
+ r#type: String,
+ capacity: Option,
+ },
+}
+
+fn parse_buffer_cfg(opt: &BufferOption) -> Result {
+ match opt {
+ BufferOption::Simple(s) => match s.as_str() {
+ "SingleLatest" => Ok(BufferCfg::SingleLatest),
+ "Mailbox" => Ok(BufferCfg::Mailbox),
+ "SpmcRing" => Ok(BufferCfg::SpmcRing { capacity: 1024 }),
+ _ => Err(JsError::new(&format!("Unknown buffer type: {s}"))),
+ },
+ BufferOption::Config { r#type, capacity } => match r#type.as_str() {
+ "SpmcRing" => Ok(BufferCfg::SpmcRing {
+ capacity: capacity.unwrap_or(1024),
+ }),
+ "SingleLatest" => Ok(BufferCfg::SingleLatest),
+ "Mailbox" => Ok(BufferCfg::Mailbox),
+ _ => Err(JsError::new(&format!("Unknown buffer type: {}", r#type))),
+ },
+ }
+}
+
+fn is_known_schema(registry: &SchemaRegistry, name: &str) -> bool {
+ registry.is_known(name)
+}
+
+// ─── Collected config (pre-build) ─────────────────────────────────────────
+
+struct RecordConfig {
+ key: String,
+ schema_type: String,
+ buffer_cfg: BufferCfg,
+}
+
+// ─── WasmDb ───────────────────────────────────────────────────────────────
+
+/// AimDB instance compiled to WebAssembly.
+///
+/// # Example (TypeScript)
+/// ```ts
+/// const db = new WasmDb();
+/// db.configureRecord('sensors.temperature.vienna', {
+/// schemaType: 'temperature',
+/// buffer: 'SingleLatest',
+/// });
+/// await db.build();
+/// db.set('sensors.temperature.vienna', { celsius: 22.5, timestamp: Date.now() });
+/// const t = db.get('sensors.temperature.vienna'); // validated by Rust serde
+/// ```
+#[wasm_bindgen]
+pub struct WasmDb {
+ /// Pre-build record configurations. `None` after `build()`.
+ configs: Option>,
+ /// Live database handle. `None` before `build()`.
+ db: Option>,
+ /// Maps record key → schema type name (always populated).
+ schema_map: BTreeMap,
+ /// Type-erased dispatch registry built from the visitor pattern.
+ registry: SchemaRegistry,
+}
+
+impl Default for WasmDb {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+#[wasm_bindgen]
+impl WasmDb {
+ /// Create a new (unconfigured) AimDB WASM instance.
+ #[wasm_bindgen(constructor)]
+ pub fn new() -> WasmDb {
+ WasmDb {
+ configs: Some(Vec::new()),
+ db: None,
+ schema_map: BTreeMap::new(),
+ registry: SchemaRegistry::build(),
+ }
+ }
+
+ /// Register a record before building the database.
+ ///
+ /// `options` is a JS object:
+ /// ```json
+ /// {
+ /// "schemaType": "temperature",
+ /// "buffer": "SingleLatest" // or { "type": "SpmcRing", "capacity": 100 }
+ /// }
+ /// ```
+ #[wasm_bindgen(js_name = "configureRecord")]
+ pub fn configure_record(&mut self, record_key: &str, options: JsValue) -> Result<(), JsError> {
+ let configs = self
+ .configs
+ .as_mut()
+ .ok_or_else(|| JsError::new("Cannot configure records after build()"))?;
+
+ let opts: RecordOptions = serde_wasm_bindgen::from_value(options)
+ .map_err(|e| JsError::new(&format!("Invalid options: {e}")))?;
+
+ if !is_known_schema(&self.registry, &opts.schema_type) {
+ return Err(JsError::new(&format!(
+ "Unknown schema type: {}",
+ opts.schema_type
+ )));
+ }
+
+ let buffer_cfg = parse_buffer_cfg(&opts.buffer)?;
+
+ self.schema_map
+ .insert(record_key.to_string(), opts.schema_type.clone());
+
+ configs.push(RecordConfig {
+ key: record_key.to_string(),
+ schema_type: opts.schema_type,
+ buffer_cfg,
+ });
+
+ Ok(())
+ }
+
+ /// Build the database from the collected configuration.
+ ///
+ /// Must be called exactly once, after all `configureRecord()` calls and
+ /// before any `get` / `set` / `subscribe`.
+ pub async fn build(&mut self) -> Result<(), JsError> {
+ let configs = self
+ .configs
+ .take()
+ .ok_or_else(|| JsError::new("Database already built"))?;
+
+ let rt = Arc::new(WasmAdapter);
+ let mut builder = AimDbBuilder::new().runtime(rt);
+
+ for config in &configs {
+ apply_record_config(&self.registry, &mut builder, config)?;
+ }
+
+ let db = builder
+ .build()
+ .await
+ .map_err(|e| JsError::new(&format!("Build failed: {e:?}")))?;
+
+ self.db = Some(db);
+ Ok(())
+ }
+
+ /// Get the current value of a record (returns JS object or `undefined`).
+ ///
+ /// The value is the latest snapshot — it does not wait for a new push.
+ /// Returns `undefined` if no value has been produced yet.
+ pub fn get(&self, record_key: &str) -> Result {
+ let (db, ops) = self.resolve(record_key)?;
+ (ops.get)(db, record_key)
+ }
+
+ /// Set a record value (validates via Rust serde deserialization).
+ ///
+ /// Throws `JsError` if the payload fails contract validation (e.g. missing
+ /// required fields) or the record key is unknown.
+ pub fn set(&mut self, record_key: &str, value: JsValue) -> Result<(), JsError> {
+ let (db, ops) = self.resolve(record_key)?;
+ (ops.set)(db, record_key, value)
+ }
+
+ /// Subscribe to record updates. Returns an unsubscribe function.
+ ///
+ /// `callback` is invoked on every buffer push with the validated value.
+ pub fn subscribe(
+ &self,
+ record_key: &str,
+ callback: &js_sys::Function,
+ ) -> Result {
+ let (db, ops) = self.resolve(record_key)?;
+ (ops.subscribe)(db, record_key, callback)
+ }
+
+ /// Returns `true` if the database has been built.
+ #[wasm_bindgen(js_name = "isBuilt")]
+ pub fn is_built(&self) -> bool {
+ self.db.is_some()
+ }
+
+ /// Discover topics served at `url` without building a full database.
+ ///
+ /// Opens a one-shot WebSocket, sends `ListTopics`, and resolves with
+ /// `TopicInfo[]` once the server responds. Rejects after 30 s if no
+ /// response arrives, or immediately on connection error.
+ ///
+ /// # Example (TypeScript)
+ /// ```ts
+ /// const wasm = await import("aimdb-wasm-adapter");
+ /// await wasm.default();
+ /// const topics = await wasm.WasmDb.discover("wss://api.example.com/ws");
+ /// topics.forEach(t => db.configureRecord(t.entity, { schemaType: t.schemaType, buffer: "SingleLatest" }));
+ /// ```
+ pub async fn discover(url: &str) -> Result {
+ wasm_bindgen_futures::JsFuture::from(discover_impl(url.to_string()))
+ .await
+ .map_err(|e| JsError::new(&format!("discover: {e:?}")))
+ }
+
+ /// Returns the list of schema type names known to this WASM adapter.
+ ///
+ /// Use this to filter discovered topics before calling `configureRecord` —
+ /// topics whose `schemaType` is not in this list cannot be handled by the
+ /// WASM runtime and should be skipped.
+ #[wasm_bindgen(js_name = "knownSchemas")]
+ pub fn known_schemas(&self) -> Vec {
+ self.registry
+ .known_names()
+ .iter()
+ .map(|s| s.to_string())
+ .collect()
+ }
+
+ /// Connect a WebSocket bridge to this database for server synchronization.
+ ///
+ /// The database remains usable for local `get()` / `set()` / `subscribe()`
+ /// after the bridge is opened — the bridge gets a cheap clone of the
+ /// internal `AimDb` handle (two `Arc` pointer copies).
+ ///
+ /// # Example (TypeScript)
+ /// ```ts
+ /// const bridge = db.connectBridge('wss://api.example.com/ws', {
+ /// subscribeTopics: ['sensors/#'],
+ /// autoReconnect: true,
+ /// });
+ /// bridge.onStatusChange((status) => console.log(status));
+ /// ```
+ #[wasm_bindgen(js_name = "connectBridge")]
+ pub fn connect_bridge(&self, url: &str, options: JsValue) -> Result {
+ let db = self
+ .db
+ .as_ref()
+ .ok_or_else(|| JsError::new("Database not built. Call build() first."))?
+ .clone(); // cheap: two Arc pointer copies
+
+ let schema_map = self.schema_map.clone();
+ let registry = SchemaRegistry::build();
+
+ WsBridge::new_internal(db, schema_map, registry, url, options)
+ }
+}
+
+// ─── discover_impl ────────────────────────────────────────────────────────
+
+/// Build a one-shot WebSocket promise that resolves with `TopicInfo[]`.
+///
+/// Each callback pair (resolve, reject) is stored in an `Rc>`
+/// so that whichever event fires first wins and subsequent events are no-ops.
+fn discover_impl(url: String) -> js_sys::Promise {
+ js_sys::Promise::new(&mut move |resolve, reject| {
+ let ws = match web_sys::WebSocket::new(&url) {
+ Ok(ws) => ws,
+ Err(e) => {
+ let _ = reject.call1(
+ &JsValue::NULL,
+ &JsValue::from_str(&format!("WebSocket open failed: {e:?}")),
+ );
+ return;
+ }
+ };
+ let ws = Rc::new(ws);
+ let resolve_rc: Rc>> =
+ Rc::new(RefCell::new(Some(resolve)));
+ let reject_rc: Rc>> = Rc::new(RefCell::new(Some(reject)));
+
+ // on_open: send ListTopics
+ {
+ let ws_clone = ws.clone();
+ let on_open = Closure::wrap(Box::new(move || {
+ let msg = ClientMessage::ListTopics {
+ id: "discover".to_string(),
+ };
+ if let Ok(json) = serde_json::to_string(&msg) {
+ let _ = ws_clone.send_with_str(&json);
+ }
+ }) as Box);
+ ws.set_onopen(Some(on_open.as_ref().unchecked_ref()));
+ on_open.forget();
+ }
+
+ // on_message: parse TopicList, resolve, close socket
+ {
+ let ws_clone = ws.clone();
+ let resolve_clone = resolve_rc.clone();
+ let reject_clone = reject_rc.clone();
+ let on_message = Closure::wrap(Box::new(move |event: web_sys::MessageEvent| {
+ let _ = ws_clone.close();
+ let Some(text) = event.data().as_string() else {
+ if let Some(rej) = reject_clone.borrow_mut().take() {
+ let _ = rej.call1(
+ &JsValue::NULL,
+ &JsValue::from_str("Non-text frame from server"),
+ );
+ }
+ return;
+ };
+ match serde_json::from_str::(&text) {
+ Ok(ServerMessage::TopicList { topics, .. }) => {
+ let serializer = serde_wasm_bindgen::Serializer::json_compatible();
+ let arr = js_sys::Array::new();
+ for topic in &topics {
+ if let Ok(js_val) = topic.serialize(&serializer) {
+ arr.push(&js_val);
+ }
+ }
+ if let Some(res) = resolve_clone.borrow_mut().take() {
+ let _ = res.call1(&JsValue::NULL, &arr);
+ }
+ }
+ _ => {
+ if let Some(rej) = reject_clone.borrow_mut().take() {
+ let _ = rej.call1(
+ &JsValue::NULL,
+ &JsValue::from_str("Unexpected server message"),
+ );
+ }
+ }
+ }
+ }) as Box);
+ ws.set_onmessage(Some(on_message.as_ref().unchecked_ref()));
+ on_message.forget();
+ }
+
+ // on_error: reject
+ {
+ let reject_clone = reject_rc.clone();
+ let on_error = Closure::wrap(Box::new(move || {
+ if let Some(rej) = reject_clone.borrow_mut().take() {
+ let _ = rej.call1(
+ &JsValue::NULL,
+ &JsValue::from_str("WebSocket error during discover"),
+ );
+ }
+ }) as Box);
+ ws.set_onerror(Some(on_error.as_ref().unchecked_ref()));
+ on_error.forget();
+ }
+
+ // on_close: reject if server closed before we got TopicList
+ // (no-op if on_message already resolved)
+ {
+ let reject_clone = reject_rc.clone();
+ let on_close = Closure::wrap(Box::new(move || {
+ if let Some(rej) = reject_clone.borrow_mut().take() {
+ let _ = rej.call1(
+ &JsValue::NULL,
+ &JsValue::from_str("Connection closed before TopicList received"),
+ );
+ }
+ }) as Box);
+ ws.set_onclose(Some(on_close.as_ref().unchecked_ref()));
+ on_close.forget();
+ }
+
+ // Timeout: reject after 30 s
+ {
+ let reject_clone = reject_rc.clone();
+ let timeout_cb = Closure::once(move || {
+ if let Some(rej) = reject_clone.borrow_mut().take() {
+ let _ = rej.call1(&JsValue::NULL, &JsValue::from_str("discover timed out"));
+ }
+ });
+ if let Some(window) = web_sys::window() {
+ let _ = window.set_timeout_with_callback_and_timeout_and_arguments_0(
+ timeout_cb.as_ref().unchecked_ref(),
+ 30_000,
+ );
+ }
+ timeout_cb.forget();
+ }
+ })
+}
+
+// ─── Private helpers ──────────────────────────────────────────────────────
+
+impl WasmDb {
+ /// Resolve a record key to the live DB handle and its type-erased ops.
+ fn resolve(&self, record_key: &str) -> Result<(&AimDb, &SchemaOps), JsError> {
+ let db = self
+ .db
+ .as_ref()
+ .ok_or_else(|| JsError::new("Database not built. Call build() first."))?;
+
+ let schema = self
+ .schema_map
+ .get(record_key)
+ .map(|v| v.as_str())
+ .ok_or_else(|| JsError::new(&format!("Unknown record key: {record_key}")))?;
+
+ let ops = self
+ .registry
+ .get(schema)
+ .ok_or_else(|| JsError::new(&format!("Unknown schema type: {schema}")))?;
+
+ Ok((db, ops))
+ }
+}
+
+// ─── Typed dispatch ───────────────────────────────────────────────────────
+
+/// Apply a single `RecordConfig` to the builder, dispatching on schema type.
+fn apply_record_config(
+ registry: &SchemaRegistry,
+ builder: &mut AimDbBuilder,
+ config: &RecordConfig,
+) -> Result<(), JsError> {
+ let key = StringKey::intern(config.key.clone());
+ let cfg = config.buffer_cfg.clone();
+
+ let ops = registry
+ .get(&config.schema_type)
+ .ok_or_else(|| JsError::new(&format!("Unknown schema type: {}", config.schema_type)))?;
+
+ (ops.configure)(builder, key, cfg);
+ Ok(())
+}
+
+/// Read the latest snapshot for record `key` and convert to `JsValue`.
+pub(crate) fn get_typed(db: &AimDb, key: &str) -> Result
+where
+ T: Send + Sync + 'static + Debug + Clone + Serialize,
+{
+ let inner = db.inner();
+ let typed = inner
+ .get_typed_record_by_key::(key)
+ .map_err(|e| JsError::new(&format!("{e:?}")))?;
+
+ match typed.latest() {
+ Some(val) => {
+ let serializer = serde_wasm_bindgen::Serializer::json_compatible();
+ val.get()
+ .serialize(&serializer)
+ .map_err(|e| JsError::new(&format!("Serialization failed: {e}")))
+ }
+ None => Ok(JsValue::UNDEFINED),
+ }
+}
+
+/// Deserialize `JsValue` → `T` (contract enforcement), then push to buffer.
+pub(crate) fn set_typed(
+ db: &AimDb,
+ key: &str,
+ value: JsValue,
+) -> Result<(), JsError>
+where
+ T: Send + Sync + 'static + Debug + Clone + DeserializeOwned,
+{
+ let val: T = serde_wasm_bindgen::from_value(value)
+ .map_err(|e| JsError::new(&format!("Contract violation: {e}")))?;
+
+ let inner = db.inner();
+ let typed = inner
+ .get_typed_record_by_key::(key)
+ .map_err(|e| JsError::new(&format!("{e:?}")))?;
+
+ // TypedRecord::produce() is declared `async` but its body is synchronous:
+ // it updates `latest_snapshot` and calls `buf.push(val)` — both complete
+ // immediately on WasmBuffer. We poll the future exactly once.
+ poll_sync(typed.produce(val));
+ Ok(())
+}
+
+/// Subscribe to a record's buffer and invoke `callback` on each new value.
+/// Returns a JS function that cancels the subscription when called.
+///
+/// Uses `futures_util::future::select` to race `recv()` against a cancel
+/// future so the unsubscribe closure can break the loop immediately — even
+/// when `recv()` is blocked waiting for the next push.
+pub(crate) fn subscribe_typed(
+ db: &AimDb,
+ key: &str,
+ callback: &js_sys::Function,
+) -> Result
+where
+ T: Send + Sync + 'static + Debug + Clone + Serialize,
+{
+ let mut reader = db
+ .subscribe::(key)
+ .map_err(|e| JsError::new(&format!("{e:?}")))?;
+
+ let callback = callback.clone();
+ let (cancel_token, cancel_handle) = crate::buffer::cancel_pair();
+
+ wasm_bindgen_futures::spawn_local(async move {
+ use core::task::Poll;
+ use futures_util::future::{select, Either};
+
+ loop {
+ // Future that resolves when cancel() is called.
+ let cancel_fut = core::future::poll_fn(|cx| {
+ if cancel_token.is_cancelled() {
+ Poll::Ready(())
+ } else {
+ cancel_token.register_waker(cx.waker());
+ Poll::Pending
+ }
+ });
+
+ let recv_fut = reader.recv();
+
+ futures_util::pin_mut!(cancel_fut);
+ futures_util::pin_mut!(recv_fut);
+
+ match select(recv_fut, cancel_fut).await {
+ Either::Left((Ok(val), _)) => {
+ let serializer = serde_wasm_bindgen::Serializer::json_compatible();
+ if let Ok(js) = val.serialize(&serializer) {
+ let _ = callback.call1(&JsValue::NULL, &js);
+ }
+ }
+ Either::Left((Err(_), _)) => break, // buffer error
+ Either::Right(((), _)) => break, // cancelled
+ }
+ }
+ });
+
+ // Closure::wrap (not once_into_js) so it can be called multiple times
+ // (React StrictMode calls cleanup twice).
+ let unsub = Closure::wrap(Box::new(move || {
+ cancel_handle.cancel();
+ }) as Box);
+ Ok(unsub.into_js_value())
+}
+
+// ─── Sync future polling ──────────────────────────────────────────────────
+
+/// Poll a future that is known to resolve in a single poll (no real I/O).
+///
+/// Used for `TypedRecord::produce()` whose body is synchronous despite being
+/// declared `async fn` — it just updates a snapshot and pushes to a buffer.
+///
+/// # Panics
+///
+/// Panics if the future returns `Pending`. This should never happen for
+/// operations on `WasmBuffer` (which are single-threaded, non-blocking).
+pub(crate) fn poll_sync(f: F) -> F::Output {
+ use core::pin::Pin;
+ use core::task::{Context, Poll, Waker};
+
+ // SAFETY: the future is stack-local and will not be moved after pinning.
+ let mut f = f;
+ let f = unsafe { Pin::new_unchecked(&mut f) };
+
+ let waker = Waker::noop();
+ let mut cx = Context::from_waker(waker);
+
+ match f.poll(&mut cx) {
+ Poll::Ready(val) => val,
+ Poll::Pending => {
+ panic!("poll_sync: future returned Pending (expected synchronous completion)")
+ }
+ }
+}
diff --git a/aimdb-wasm-adapter/src/buffer.rs b/aimdb-wasm-adapter/src/buffer.rs
new file mode 100644
index 00000000..61d6746d
--- /dev/null
+++ b/aimdb-wasm-adapter/src/buffer.rs
@@ -0,0 +1,396 @@
+//! Single-threaded buffer implementation for the WASM runtime.
+//!
+//! Uses `Rc>` instead of atomics or channels — zero overhead for
+//! the browser's single-threaded execution model.
+//!
+//! All three buffer types are supported:
+//! - **SPMC Ring** — bounded `VecDeque` with per-reader cursors
+//! - **SingleLatest** — single slot, version-tracked
+//! - **Mailbox** — single slot, take-on-read semantics
+//!
+//! # Safety
+//!
+//! `WasmBuffer` and `WasmBufferReader` implement `Send + Sync` via
+//! `unsafe impl` because `wasm32-unknown-unknown` is single-threaded.
+
+extern crate alloc;
+
+use alloc::boxed::Box;
+use alloc::collections::VecDeque;
+use alloc::rc::Rc;
+use alloc::vec::Vec;
+use core::cell::{Cell, RefCell};
+use core::future::Future;
+use core::pin::Pin;
+use core::task::{Context, Poll, Waker};
+
+use aimdb_core::buffer::{Buffer, BufferCfg, BufferReader, DynBuffer};
+use aimdb_core::DbError;
+
+// ============================================================================
+// Buffer
+// ============================================================================
+
+/// Single-threaded buffer for the WASM runtime.
+///
+/// Wraps an `Rc>` inner enum that holds the actual buffer state.
+/// All three AimDB buffer types (SPMC Ring, SingleLatest, Mailbox) share
+/// this outer struct — the variant is determined by [`BufferCfg`] at
+/// construction time.
+pub struct WasmBuffer {
+ inner: Rc>>,
+}
+
+// SAFETY: wasm32 is single-threaded — Rc> cannot be accessed concurrently
+unsafe impl Send for WasmBuffer {}
+unsafe impl Sync for WasmBuffer {}
+
+/// Internal buffer state — one variant per buffer type.
+enum WasmBufferInner {
+ /// Bounded ring buffer with independent consumer cursors.
+ SpmcRing {
+ /// Ring storage (oldest at front, newest at back).
+ ring: VecDeque,
+ /// Maximum number of items.
+ capacity: usize,
+ /// Monotonic write counter — each push increments this.
+ /// Readers track their own position against this counter.
+ write_seq: u64,
+ /// Wakers registered by readers waiting for new data.
+ wakers: Vec,
+ },
+
+ /// Only the latest value, skip intermediates.
+ SingleLatest {
+ /// Current value (None until first push).
+ value: Option,
+ /// Monotonic version counter — incremented on each push.
+ version: u64,
+ /// Wakers registered by readers waiting for a new version.
+ wakers: Vec,
+ },
+
+ /// Single slot, overwrite semantics.
+ Mailbox {
+ /// Current slot value (taken on read).
+ slot: Option,
+ /// Wakers registered by readers waiting for a value.
+ wakers: Vec,
+ },
+}
+
+impl Buffer for WasmBuffer {
+ type Reader = WasmBufferReader;
+
+ fn new(cfg: &BufferCfg) -> Self {
+ let inner = match cfg {
+ BufferCfg::SpmcRing { capacity } => WasmBufferInner::SpmcRing {
+ ring: VecDeque::with_capacity(*capacity),
+ capacity: *capacity,
+ write_seq: 0,
+ wakers: Vec::new(),
+ },
+ BufferCfg::SingleLatest => WasmBufferInner::SingleLatest {
+ value: None,
+ version: 0,
+ wakers: Vec::new(),
+ },
+ BufferCfg::Mailbox => WasmBufferInner::Mailbox {
+ slot: None,
+ wakers: Vec::new(),
+ },
+ };
+
+ WasmBuffer {
+ inner: Rc::new(RefCell::new(inner)),
+ }
+ }
+
+ fn push(&self, value: T) {
+ let mut inner = self.inner.borrow_mut();
+ match &mut *inner {
+ WasmBufferInner::SpmcRing {
+ ring,
+ capacity,
+ write_seq,
+ wakers,
+ } => {
+ if ring.len() >= *capacity {
+ ring.pop_front();
+ }
+ ring.push_back(value);
+ *write_seq += 1;
+ wake_all(wakers);
+ }
+ WasmBufferInner::SingleLatest {
+ value: slot,
+ version,
+ wakers,
+ } => {
+ *slot = Some(value);
+ *version += 1;
+ wake_all(wakers);
+ }
+ WasmBufferInner::Mailbox { slot, wakers } => {
+ *slot = Some(value);
+ wake_all(wakers);
+ }
+ }
+ }
+
+ fn subscribe(&self) -> Self::Reader {
+ let inner = self.inner.borrow();
+ let state = match &*inner {
+ WasmBufferInner::SpmcRing { write_seq, .. } => {
+ // New readers start at the current write position (no backfill).
+ ReaderState::SpmcRing {
+ read_seq: *write_seq,
+ }
+ }
+ WasmBufferInner::SingleLatest { version, .. } => {
+ // Will fire on next push (version change).
+ ReaderState::SingleLatest {
+ last_seen_version: *version,
+ }
+ }
+ WasmBufferInner::Mailbox { .. } => ReaderState::Mailbox,
+ };
+
+ WasmBufferReader {
+ buffer: Rc::clone(&self.inner),
+ state,
+ }
+ }
+}
+
+/// Explicit DynBuffer implementation so WasmBuffer can be stored as a trait object.
+impl DynBuffer for WasmBuffer {
+ fn push(&self, value: T) {
+ >::push(self, value);
+ }
+
+ fn subscribe_boxed(&self) -> Box + Send> {
+ Box::new(self.subscribe())
+ }
+
+ fn as_any(&self) -> &dyn core::any::Any {
+ self
+ }
+}
+
+// ============================================================================
+// Reader
+// ============================================================================
+
+/// Single-threaded buffer reader for the WASM runtime.
+///
+/// Created by [`WasmBuffer::subscribe()`]. Each reader maintains independent
+/// state (cursor position, last-seen version) and can advance at its own pace.
+pub struct WasmBufferReader {
+ buffer: Rc>>,
+ state: ReaderState,
+}
+
+// SAFETY: wasm32 is single-threaded — no concurrent access possible
+unsafe impl Send for WasmBufferReader {}
+unsafe impl Sync for WasmBufferReader {}
+
+/// Per-reader tracking state.
+enum ReaderState {
+ /// For SPMC Ring: the sequence number of the next item to read.
+ SpmcRing { read_seq: u64 },
+ /// For SingleLatest: the version we last observed.
+ SingleLatest { last_seen_version: u64 },
+ /// For Mailbox: no extra state (take-on-read).
+ Mailbox,
+}
+
+impl BufferReader for WasmBufferReader {
+ fn recv(&mut self) -> Pin> + Send + '_>> {
+ Box::pin(WasmRecvFuture { reader: self })
+ }
+
+ fn try_recv(&mut self) -> Result {
+ let mut inner = self.buffer.borrow_mut();
+ match (&mut *inner, &mut self.state) {
+ (
+ WasmBufferInner::SpmcRing {
+ ring, write_seq, ..
+ },
+ ReaderState::SpmcRing { read_seq },
+ ) => {
+ if *read_seq >= *write_seq {
+ return Err(DbError::BufferEmpty);
+ }
+ // Calculate offset into the ring
+ let ring_len = ring.len() as u64;
+ let oldest_seq = write_seq.saturating_sub(ring_len);
+
+ if *read_seq < oldest_seq {
+ // Reader fell behind — skip to oldest available
+ let lag_count = oldest_seq - *read_seq;
+ *read_seq = oldest_seq;
+ return Err(DbError::BufferLagged {
+ lag_count,
+ _buffer_name: (),
+ });
+ }
+
+ let offset = (*read_seq - oldest_seq) as usize;
+ let value = ring[offset].clone();
+ *read_seq += 1;
+ Ok(value)
+ }
+ (
+ WasmBufferInner::SingleLatest { value, version, .. },
+ ReaderState::SingleLatest { last_seen_version },
+ ) => {
+ if *version == *last_seen_version {
+ return Err(DbError::BufferEmpty);
+ }
+ match value {
+ Some(v) => {
+ *last_seen_version = *version;
+ Ok(v.clone())
+ }
+ None => Err(DbError::BufferEmpty),
+ }
+ }
+ (WasmBufferInner::Mailbox { slot, .. }, ReaderState::Mailbox) => {
+ slot.take().ok_or(DbError::BufferEmpty)
+ }
+ _ => unreachable!("reader state mismatch"),
+ }
+ }
+}
+
+// ============================================================================
+// Async recv future
+// ============================================================================
+
+/// Future returned by `WasmBufferReader::recv()`.
+///
+/// On each poll:
+/// 1. Try to read a value (non-blocking).
+/// 2. If available, return `Poll::Ready(Ok(value))`.
+/// 3. If not, register the waker and return `Poll::Pending`.
+///
+/// The waker is woken when `WasmBuffer::push()` fires.
+struct WasmRecvFuture<'a, T> {
+ reader: &'a mut WasmBufferReader,
+}
+
+// SAFETY: wasm32 is single-threaded
+unsafe impl Send for WasmRecvFuture<'_, T> {}
+
+impl Future for WasmRecvFuture<'_, T> {
+ type Output = Result;
+
+ fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll {
+ let this = self.get_mut();
+
+ // Try non-blocking read first
+ match this.reader.try_recv() {
+ Ok(value) => Poll::Ready(Ok(value)),
+ Err(e @ DbError::BufferLagged { .. }) => Poll::Ready(Err(e)),
+ Err(DbError::BufferEmpty) => {
+ // Register waker so we get woken on next push
+ let mut inner = this.reader.buffer.borrow_mut();
+ let wakers = match &mut *inner {
+ WasmBufferInner::SpmcRing { wakers, .. } => wakers,
+ WasmBufferInner::SingleLatest { wakers, .. } => wakers,
+ WasmBufferInner::Mailbox { wakers, .. } => wakers,
+ };
+ // Deduplicate: only add if no existing waker will wake the same task.
+ // Prevents unbounded growth when a single reader is polled repeatedly.
+ if !wakers.iter().any(|w| w.will_wake(cx.waker())) {
+ wakers.push(cx.waker().clone());
+ }
+ Poll::Pending
+ }
+ Err(e) => Poll::Ready(Err(e)),
+ }
+ }
+}
+
+// ============================================================================
+// Helpers
+// ============================================================================
+
+/// Wake all registered wakers and clear the list.
+fn wake_all(wakers: &mut Vec) {
+ for waker in wakers.drain(..) {
+ waker.wake();
+ }
+}
+
+// ============================================================================
+// Cancellation
+// ============================================================================
+
+/// Shared state between [`CancelToken`] and [`CancelHandle`].
+struct CancelInner {
+ cancelled: Cell,
+ waker: RefCell>,
+}
+
+/// Token held by the subscription task (reader side).
+///
+/// Polled in a `futures_util::future::select` alongside `reader.recv()`.
+/// When [`CancelHandle::cancel()`] fires, the stored waker is woken and
+/// `is_cancelled()` returns `true`, causing the select to resolve.
+pub(crate) struct CancelToken {
+ inner: Rc,
+}
+
+/// Handle held by the JS unsubscribe closure.
+///
+/// Calling [`cancel()`](CancelHandle::cancel) sets the flag and wakes the
+/// subscription task so it exits immediately — even if `recv()` is blocked.
+pub(crate) struct CancelHandle {
+ inner: Rc,
+}
+
+// SAFETY: wasm32 is single-threaded — no concurrent access possible
+unsafe impl Send for CancelToken {}
+unsafe impl Sync for CancelToken {}
+unsafe impl Send for CancelHandle {}
+unsafe impl Sync for CancelHandle {}
+
+/// Create a linked cancel token/handle pair.
+pub(crate) fn cancel_pair() -> (CancelToken, CancelHandle) {
+ let inner = Rc::new(CancelInner {
+ cancelled: Cell::new(false),
+ waker: RefCell::new(None),
+ });
+ (
+ CancelToken {
+ inner: inner.clone(),
+ },
+ CancelHandle { inner },
+ )
+}
+
+impl CancelToken {
+ /// Returns `true` if [`CancelHandle::cancel()`] has been called.
+ pub(crate) fn is_cancelled(&self) -> bool {
+ self.inner.cancelled.get()
+ }
+
+ /// Store the current task's waker so [`CancelHandle::cancel()`] can wake it.
+ pub(crate) fn register_waker(&self, waker: &Waker) {
+ *self.inner.waker.borrow_mut() = Some(waker.clone());
+ }
+}
+
+impl CancelHandle {
+ /// Signal cancellation and wake the subscription task.
+ ///
+ /// Idempotent — safe to call multiple times (React StrictMode).
+ pub(crate) fn cancel(&self) {
+ self.inner.cancelled.set(true);
+ if let Some(w) = self.inner.waker.borrow_mut().take() {
+ w.wake();
+ }
+ }
+}
diff --git a/aimdb-wasm-adapter/src/lib.rs b/aimdb-wasm-adapter/src/lib.rs
new file mode 100644
index 00000000..cf68a995
--- /dev/null
+++ b/aimdb-wasm-adapter/src/lib.rs
@@ -0,0 +1,72 @@
+//! AimDB WASM Runtime Adapter
+//!
+//! Provides a WebAssembly runtime adapter for AimDB, enabling the full
+//! dataflow engine to run inside a web browser or any WASM host.
+//!
+//! # Architecture
+//!
+//! This crate implements the four executor traits from `aimdb-executor`:
+//!
+//! - [`RuntimeAdapter`] — Platform identity (`"wasm"`)
+//! - [`Spawn`] — Task spawning via `wasm_bindgen_futures::spawn_local`
+//! - [`TimeOps`] — `Performance.now()` + `setTimeout` for async sleep
+//! - [`Logger`] — Maps to `console.log/debug/warn/error`
+//!
+//! # Single-Threaded Safety
+//!
+//! `wasm32-unknown-unknown` is single-threaded by construction. The `Send + Sync`
+//! bounds required by executor traits are satisfied trivially — no concurrent
+//! access is possible. This is the same pattern used by `aimdb-embassy-adapter`
+//! for bare-metal MCUs.
+//!
+//! # Buffer Implementation
+//!
+//! Buffers use `Rc>` instead of atomics — zero-overhead for the
+//! single-threaded browser environment. All three buffer types are supported:
+//! SPMC Ring, SingleLatest, and Mailbox.
+//!
+//! # Feature Flags
+//!
+//! - `wasm-runtime` (default) — Enables WASM bindings (`wasm-bindgen`,
+//! `js-sys`, `web-sys`). Disable for native-target unit tests.
+
+#![no_std]
+
+extern crate alloc;
+
+pub mod buffer;
+pub mod logger;
+pub mod runtime;
+pub mod time;
+
+#[cfg(feature = "wasm-runtime")]
+pub mod bindings;
+
+#[cfg(feature = "wasm-runtime")]
+pub(crate) mod schema_registry;
+
+#[cfg(feature = "wasm-runtime")]
+pub mod ws_bridge;
+
+// Re-export the adapter type at crate root
+pub use runtime::WasmAdapter;
+
+// Re-export executor traits for convenience
+pub use aimdb_executor::{
+ ExecutorError, ExecutorResult, Logger as LoggerTrait, Runtime, RuntimeAdapter, Spawn, TimeOps,
+};
+
+// Re-export buffer types
+pub use buffer::{WasmBuffer, WasmBufferReader};
+
+// Re-export time types
+pub use time::{WasmDuration, WasmInstant};
+
+// Generate the extension trait for convenient record configuration
+aimdb_core::impl_record_registrar_ext! {
+ WasmRecordRegistrarExt,
+ WasmAdapter,
+ WasmBuffer,
+ "wasm-runtime",
+ |cfg| WasmBuffer::::new(cfg)
+}
diff --git a/aimdb-wasm-adapter/src/logger.rs b/aimdb-wasm-adapter/src/logger.rs
new file mode 100644
index 00000000..61b2ffab
--- /dev/null
+++ b/aimdb-wasm-adapter/src/logger.rs
@@ -0,0 +1,52 @@
+//! Logger implementation for the WASM runtime.
+//!
+//! Maps AimDB log levels to browser console methods:
+//! - `info` → `console.log`
+//! - `debug` → `console.debug`
+//! - `warn` → `console.warn`
+//! - `error` → `console.error`
+
+use crate::runtime::WasmAdapter;
+use aimdb_executor::Logger;
+
+impl Logger for WasmAdapter {
+ fn info(&self, message: &str) {
+ #[cfg(feature = "wasm-runtime")]
+ web_sys::console::log_1(&message.into());
+
+ #[cfg(not(feature = "wasm-runtime"))]
+ {
+ let _ = message;
+ }
+ }
+
+ fn debug(&self, message: &str) {
+ #[cfg(feature = "wasm-runtime")]
+ web_sys::console::debug_1(&message.into());
+
+ #[cfg(not(feature = "wasm-runtime"))]
+ {
+ let _ = message;
+ }
+ }
+
+ fn warn(&self, message: &str) {
+ #[cfg(feature = "wasm-runtime")]
+ web_sys::console::warn_1(&message.into());
+
+ #[cfg(not(feature = "wasm-runtime"))]
+ {
+ let _ = message;
+ }
+ }
+
+ fn error(&self, message: &str) {
+ #[cfg(feature = "wasm-runtime")]
+ web_sys::console::error_1(&message.into());
+
+ #[cfg(not(feature = "wasm-runtime"))]
+ {
+ let _ = message;
+ }
+ }
+}
diff --git a/aimdb-wasm-adapter/src/react/useAimDb.tsx b/aimdb-wasm-adapter/src/react/useAimDb.tsx
new file mode 100644
index 00000000..cade45c9
--- /dev/null
+++ b/aimdb-wasm-adapter/src/react/useAimDb.tsx
@@ -0,0 +1,300 @@
+/**
+ * React hooks for AimDB WASM integration.
+ *
+ * Drop-in hooks that subscribe to AimDB records backed by a local WASM
+ * database instance with full Rust serde contract enforcement.
+ *
+ * @example
+ * ```tsx
+ * import { AimDbProvider, useRecord } from '@aimdb/wasm/react';
+ *
+ * function App() {
+ * return (
+ *
+ *
+ *
+ * );
+ * }
+ *
+ * function Dashboard() {
+ * const temp = useRecord('sensors.temperature.vienna');
+ * if (!temp) return ;
+ * return {temp.celsius}°C ;
+ * }
+ * ```
+ *
+ * @module
+ */
+
+import {
+ createContext,
+ useCallback,
+ useContext,
+ useEffect,
+ useRef,
+ useState,
+ type ReactNode,
+} from "react";
+
+// ── Types ────────────────────────────────────────────────────────────────
+
+/** Re-export from the WASM module (generated by wasm-pack). */
+import type { WasmDb, WsBridge } from "../pkg/aimdb_wasm_adapter";
+
+/** Record configuration entry for the provider. */
+export interface RecordConfig {
+ /** AimDB record key (e.g. `"sensors.temperature.vienna"`). */
+ key: string;
+ /** Schema type name matching `SchemaType::NAME` (e.g. `"temperature"`). */
+ schemaType: string;
+ /** Buffer type — string shorthand or object with capacity. */
+ buffer: string | { type: string; capacity?: number };
+}
+
+/** Optional WebSocket bridge configuration. */
+export interface BridgeConfig {
+ /** WebSocket endpoint (e.g. `"wss://api.example.com/ws"`). */
+ url: string;
+ /** MQTT-style topic patterns to subscribe to. */
+ subscribeTopics?: string[];
+ /** Auto-reconnect on disconnect (default: `true`). */
+ autoReconnect?: boolean;
+ /** Request snapshots on (re)connect (default: `true`). */
+ lateJoin?: boolean;
+}
+
+/** Full provider configuration. */
+export interface AimDbConfig {
+ /** Records to register before building the database. */
+ records: RecordConfig[];
+ /** Optional WsBridge configuration for server sync. */
+ bridge?: BridgeConfig;
+}
+
+// ── Context ──────────────────────────────────────────────────────────────
+
+interface AimDbContextValue {
+ db: WasmDb | null;
+ bridge: WsBridge | null;
+ ready: boolean;
+}
+
+const AimDbContext = createContext({
+ db: null,
+ bridge: null,
+ ready: false,
+});
+
+// ── Provider ─────────────────────────────────────────────────────────────
+
+interface AimDbProviderProps {
+ config: AimDbConfig;
+ /** Shown while the WASM module initializes and the DB builds. */
+ fallback?: ReactNode;
+ children: ReactNode;
+}
+
+/**
+ * Initializes the WASM AimDB instance and makes it available to descendants.
+ *
+ * - Loads and initializes the WASM module (async).
+ * - Calls `configureRecord()` for each entry in `config.records`.
+ * - Calls `await db.build()`.
+ * - Optionally opens a `WsBridge` via `db.connectBridge()`.
+ * - Renders `children` once the database is ready.
+ */
+export function AimDbProvider({
+ config,
+ fallback,
+ children,
+}: AimDbProviderProps) {
+ const [ctx, setCtx] = useState({
+ db: null,
+ bridge: null,
+ ready: false,
+ });
+
+ // Refs so the cleanup closure always sees the latest handles
+ // (avoids the stale-closure bug where cleanup captures initial null state).
+ const dbRef = useRef(null);
+ const bridgeRef = useRef(null);
+
+ useEffect(() => {
+ let cancelled = false;
+
+ (async () => {
+ // Dynamic import so the WASM module is only loaded when the provider
+ // mounts. Tree-shaking removes this path entirely if unused.
+ const wasm = await import("../pkg/aimdb_wasm_adapter");
+
+ // Initialize the WASM binary (required by wasm-pack --target web
+ // before any constructors can be used). Idempotent on re-mount.
+ await wasm.default();
+ if (cancelled) return;
+
+ const db = new wasm.WasmDb();
+
+ // Configure all records
+ for (const rec of config.records) {
+ db.configureRecord(rec.key, {
+ schemaType: rec.schemaType,
+ buffer: rec.buffer,
+ });
+ }
+
+ // Build the database (runs type registration, buffer allocation, etc.)
+ await db.build();
+ if (cancelled) return;
+
+ dbRef.current = db;
+
+ // Optionally connect WsBridge (db.connectBridge borrows — db stays valid)
+ let bridge: WsBridge | null = null;
+ if (config.bridge) {
+ bridge = db.connectBridge(config.bridge.url, {
+ subscribeTopics: config.bridge.subscribeTopics ?? [],
+ autoReconnect: config.bridge.autoReconnect ?? true,
+ lateJoin: config.bridge.lateJoin ?? true,
+ });
+ bridgeRef.current = bridge;
+ }
+
+ setCtx({ db, bridge, ready: true });
+ })();
+
+ return () => {
+ cancelled = true;
+ // Cleanup uses refs — always sees the latest handles
+ if (bridgeRef.current) {
+ bridgeRef.current.disconnect();
+ bridgeRef.current = null;
+ }
+ if (dbRef.current) {
+ dbRef.current.free();
+ dbRef.current = null;
+ }
+ };
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ }, []);
+
+ if (!ctx.ready) return <>{fallback ?? null}>;
+
+ return {children} ;
+}
+
+// ── Hooks ────────────────────────────────────────────────────────────────
+
+/**
+ * Subscribe to an AimDB record by its key.
+ *
+ * Returns the latest validated value (deserialized by Rust serde in WASM)
+ * or `null` if no value has been produced yet. Re-renders on every buffer
+ * push.
+ *
+ * @typeParam T - The TypeScript shape of the record (e.g. `Temperature`).
+ * @param recordKey - The AimDB record key (e.g. `"sensors.temperature.vienna"`).
+ *
+ * @example
+ * ```tsx
+ * interface Temperature { celsius: number; timestamp: number }
+ *
+ * function TempCard({ city }: { city: string }) {
+ * const temp = useRecord(`sensors.temperature.${city}`);
+ * if (!temp) return ;
+ * return {temp.celsius.toFixed(1)}°C ;
+ * }
+ * ```
+ */
+export function useRecord(recordKey: string): T | null {
+ const { db, ready } = useContext(AimDbContext);
+ const [value, setValue] = useState(null);
+
+ useEffect(() => {
+ if (!ready || !db) return;
+
+ // Seed with current value (if available)
+ try {
+ const current = db.get(recordKey) as T | undefined;
+ if (current !== undefined) setValue(current);
+ } catch {
+ // Record may not have a value yet — that's fine
+ }
+
+ // Subscribe for live updates
+ const unsub = db.subscribe(recordKey, (v: T) => setValue(v));
+
+ return () => {
+ if (typeof unsub === "function") unsub();
+ };
+ }, [db, ready, recordKey]);
+
+ return value;
+}
+
+/**
+ * Write a value to an AimDB record.
+ *
+ * Returns a setter function that validates the payload through Rust serde.
+ * Throws if contract validation fails (missing fields, wrong types, etc.).
+ *
+ * @typeParam T - The TypeScript shape of the record.
+ * @param recordKey - The AimDB record key.
+ *
+ * @example
+ * ```tsx
+ * const setTarget = useSetRecord('commands.setpoint.room1');
+ * setTarget({ target_celsius: 21.0, timestamp: Date.now() });
+ * ```
+ */
+export function useSetRecord(recordKey: string): (value: T) => void {
+ const { db, ready } = useContext(AimDbContext);
+
+ return useCallback(
+ (value: T) => {
+ if (!ready || !db) {
+ throw new Error(
+ "AimDB not ready. Wrap your app in .",
+ );
+ }
+ db.set(recordKey, value);
+ },
+ [db, ready, recordKey],
+ );
+}
+
+/**
+ * Access the raw `WasmDb` instance for advanced operations.
+ *
+ * Prefer `useRecord` / `useSetRecord` for standard use cases.
+ */
+export function useAimDb(): WasmDb | null {
+ return useContext(AimDbContext).db;
+}
+
+/**
+ * Access the `WsBridge` instance for connection status monitoring.
+ *
+ * Returns `null` if no bridge was configured or the DB is not ready.
+ *
+ * @example
+ * ```tsx
+ * function ConnectionIndicator() {
+ * const bridge = useBridge();
+ * const [status, setStatus] = useState('connecting');
+ * useEffect(() => {
+ * bridge?.onStatusChange(setStatus);
+ * }, [bridge]);
+ * return {status} ;
+ * }
+ * ```
+ */
+export function useBridge(): WsBridge | null {
+ return useContext(AimDbContext).bridge;
+}
diff --git a/aimdb-wasm-adapter/src/runtime.rs b/aimdb-wasm-adapter/src/runtime.rs
new file mode 100644
index 00000000..df1d2cc1
--- /dev/null
+++ b/aimdb-wasm-adapter/src/runtime.rs
@@ -0,0 +1,62 @@
+//! WasmAdapter struct and RuntimeAdapter + Spawn implementations
+//!
+//! Single-threaded WASM runtime — tasks are spawned onto the browser's
+//! microtask queue via `wasm_bindgen_futures::spawn_local`.
+
+use aimdb_executor::{ExecutorResult, RuntimeAdapter, Spawn};
+use core::future::Future;
+
+/// WASM runtime adapter for AimDB.
+///
+/// Implements the four executor traits required by `aimdb-core`:
+/// [`RuntimeAdapter`], [`Spawn`], [`TimeOps`](crate::time), and
+/// [`Logger`](crate::logger).
+///
+/// # Safety
+///
+/// `WasmAdapter` implements `Send + Sync` via `unsafe impl` because
+/// `wasm32-unknown-unknown` is single-threaded — no concurrent access
+/// is possible. This is the identical pattern used by `EmbassyAdapter`.
+#[derive(Clone, Copy, Debug)]
+pub struct WasmAdapter;
+
+// SAFETY: wasm32-unknown-unknown is single-threaded.
+// No concurrent access is possible — Send + Sync are trivially satisfied.
+// This is the same pattern as aimdb-embassy-adapter/src/runtime.rs.
+unsafe impl Send for WasmAdapter {}
+unsafe impl Sync for WasmAdapter {}
+
+impl RuntimeAdapter for WasmAdapter {
+ fn runtime_name() -> &'static str {
+ "wasm"
+ }
+}
+
+impl Spawn for WasmAdapter {
+ type SpawnToken = (); // Same as Embassy — no join handle
+
+ fn spawn(&self, future: F) -> ExecutorResult
+ where
+ F: Future + Send + 'static,
+ {
+ // spawn_local requires F: 'static but not F: Send.
+ // The Send bound on the trait is satisfied vacuously —
+ // all types are effectively Send in a single-threaded context.
+ #[cfg(feature = "wasm-runtime")]
+ {
+ wasm_bindgen_futures::spawn_local(future);
+ }
+
+ #[cfg(not(feature = "wasm-runtime"))]
+ {
+ let _ = future;
+ // Without wasm-runtime, we can't spawn — this path is only
+ // hit during native-target unit tests.
+ return Err(aimdb_executor::ExecutorError::RuntimeUnavailable {
+ message: "wasm-runtime feature not enabled",
+ });
+ }
+
+ Ok(())
+ }
+}
diff --git a/aimdb-wasm-adapter/src/schema_registry.rs b/aimdb-wasm-adapter/src/schema_registry.rs
new file mode 100644
index 00000000..7f18140e
--- /dev/null
+++ b/aimdb-wasm-adapter/src/schema_registry.rs
@@ -0,0 +1,105 @@
+//! Type-erased dispatch registry for [`Streamable`] types in the WASM adapter.
+//!
+//! Built once via [`SchemaRegistry::build`] using the visitor pattern from
+//! `aimdb-data-contracts`. Each entry stores boxed closures that capture the
+//! concrete type `T` through monomorphization, enabling runtime dispatch by
+//! schema name without a central match macro.
+
+extern crate alloc;
+
+use alloc::boxed::Box;
+use alloc::collections::BTreeMap;
+
+use wasm_bindgen::prelude::*;
+
+use aimdb_core::buffer::BufferCfg;
+use aimdb_core::builder::{AimDb, AimDbBuilder};
+use aimdb_core::record_id::StringKey;
+
+use aimdb_data_contracts::{for_each_streamable, Streamable, StreamableVisitor};
+
+use crate::WasmAdapter;
+
+// ─── Type-erased operations ───────────────────────────────────────────────
+
+type ConfigureFn = Box, StringKey, BufferCfg) + Send + Sync>;
+type GetFn = Box, &str) -> Result + Send + Sync>;
+type SetFn = Box, &str, JsValue) -> Result<(), JsError> + Send + Sync>;
+type SubscribeFn = Box<
+ dyn Fn(&AimDb, &str, &js_sys::Function) -> Result