diff --git a/README.md b/README.md
index 00ee1ab..775fb99 100644
--- a/README.md
+++ b/README.md
@@ -92,6 +92,33 @@ utilitarian design, give Stacks a try-- I'd love to hear your thoughts!
| New note | ⌘ ⇧ + n |
+**Note:** Items deleted through the UI are permanently removed on app restart.
+
+## CLI
+
+Stacks includes a command-line interface for programmatic access. First, symlink the app binary to your PATH:
+
+```bash
+ln -s /Applications/Stacks.app/Contents/MacOS/Stacks ~/.local/bin/stacks
+```
+
+
+ | Get top item from most recent stack | stacks |
+ | Get specific item by ID | stacks {id} |
+ | Delete top item | stacks --delete |
+ | Delete specific item | stacks {id} --delete |
+ | List all stacks (JSONL) | stacks list |
+ | Raw packet stream (JSONL) | stacks stream |
+ | Search content* | stacks search {query} [--limit N] |
+ | View complete structure (JSON) | stacks view |
+ | View current navigation state (JSON) | stacks view nav |
+ | List CAS hashes | stacks cas list |
+ | Get content by hash | stacks cas get {hash} |
+ | Purge content by hash | stacks cas purge {hash} |
+
+
+*`stacks search` supports [Tantivy QueryParser](https://docs.rs/tantivy/latest/tantivy/query/struct.QueryParser.html) syntax: plain terms (`foo`), phrases (`"exact phrase"`), and boolean logic (`foo AND bar NOT baz`).*
+
## Development
```bash
diff --git a/scripts/check.sh b/scripts/check.sh
new file mode 100755
index 0000000..0fad907
--- /dev/null
+++ b/scripts/check.sh
@@ -0,0 +1,16 @@
+#!/bin/bash
+
+set -euo pipefail
+
+cd "$(dirname "$0")/../src-tauri"
+
+echo "๐จ Checking formatting..."
+cargo fmt --check
+
+echo "๐ Running clippy..."
+cargo clippy -- -D warnings
+
+echo "๐งช Running tests..."
+cargo test
+
+echo "โ
All checks passed!"
\ No newline at end of file
diff --git a/src-tauri/Cargo.lock b/src-tauri/Cargo.lock
index d91841b..c9aca83 100644
--- a/src-tauri/Cargo.lock
+++ b/src-tauri/Cargo.lock
@@ -4436,7 +4436,7 @@ checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
[[package]]
name = "stacks"
-version = "0.15.13"
+version = "0.15.14-dev"
dependencies = [
"async-openai",
"base64 0.21.7",
diff --git a/src-tauri/Cargo.toml b/src-tauri/Cargo.toml
index 48be1ff..074f710 100644
--- a/src-tauri/Cargo.toml
+++ b/src-tauri/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "stacks"
-version = "0.15.13"
+version = "0.15.14-dev"
description = "Stacks"
authors = ["Andy Gayton "]
license = ""
diff --git a/src-tauri/src/cli.rs b/src-tauri/src/cli.rs
index de520b8..e9786a4 100644
--- a/src-tauri/src/cli.rs
+++ b/src-tauri/src/cli.rs
@@ -4,23 +4,71 @@ use std::path::Path;
use http_body_util::BodyExt;
use hyper_util::rt::TokioIo;
-use clap::{ArgGroup, Parser};
+use clap::{Parser, Subcommand};
#[derive(Parser, Debug, Clone)]
#[clap(author, version, about, long_about = None)]
-#[clap(group(ArgGroup::new("output").args(&["meta", "html"]).required(false)))]
struct Args {
- /// clip id to retrieve
+ #[clap(subcommand)]
+ command: Option,
+
+ /// clip id to retrieve (when no subcommand is used; defaults to top of most recent stack)
#[clap(value_parser)]
id: Option,
/// output metadata, instead of content
- #[clap(long, action = clap::ArgAction::SetTrue, group = "output")]
+ #[clap(long, action = clap::ArgAction::SetTrue)]
meta: bool,
/// output in HTML format
- #[clap(long, action = clap::ArgAction::SetTrue, group = "output")]
+ #[clap(long, action = clap::ArgAction::SetTrue)]
html: bool,
+
+ /// delete the item instead of returning it
+ #[clap(long, action = clap::ArgAction::SetTrue)]
+ delete: bool,
+}
+
+#[derive(Subcommand, Debug, Clone)]
+enum Commands {
+ /// List all stacks with full metadata (JSONL format)
+ List,
+ /// Output raw packet stream (JSONL format)
+ Stream,
+ /// Search content using Tantivy QueryParser
+ Search {
+ /// Search query (supports Tantivy syntax: terms, phrases, boolean logic)
+ query: String,
+ /// Maximum number of results to return
+ #[clap(long)]
+ limit: Option,
+ },
+ /// View complete structure (JSON)
+ View {
+ #[clap(subcommand)]
+ command: Option,
+ },
+ /// Content-Addressable Storage operations
+ Cas {
+ #[clap(subcommand)]
+ command: CasCommand,
+ },
+}
+
+#[derive(Subcommand, Debug, Clone)]
+enum CasCommand {
+ /// List all CAS hashes
+ List,
+ /// Get content by hash
+ Get { hash: String },
+ /// Purge content by hash
+ Purge { hash: String },
+}
+
+#[derive(Subcommand, Debug, Clone)]
+enum ViewCommand {
+ /// View current navigation state (JSON)
+ Nav,
}
pub async fn cli(db_path: &str) {
@@ -32,34 +80,372 @@ pub async fn cli(db_path: &str) {
.expect("Failed to connect to server");
let io = TokioIo::new(stream);
- use bytes::Bytes;
- use http_body_util::Empty;
use hyper::client::conn;
- use hyper::{Request, StatusCode};
let (mut request_sender, connection) = conn::http1::handshake(io).await.unwrap();
// spawn a task to poll the connection and drive the HTTP state
tokio::spawn(async move {
if let Err(e) = connection.await {
- eprintln!("Error in connection: {}", e);
+ eprintln!("Error in connection: {e}");
}
});
- // we should just do a HEAD request if --meta is set
+ match args.command {
+ Some(Commands::List) => {
+ handle_list_command(&mut request_sender).await;
+ }
+ Some(Commands::Stream) => {
+ handle_stream_command(&mut request_sender).await;
+ }
+ Some(Commands::Search { query, limit }) => {
+ handle_search_command(query, limit, &mut request_sender).await;
+ }
+ Some(Commands::View { command }) => {
+ handle_view_command(command, &mut request_sender).await;
+ }
+ Some(Commands::Cas { command }) => {
+ handle_cas_command(command, &mut request_sender).await;
+ }
+ None => {
+ // Legacy behavior for backward compatibility
+ handle_legacy_request(args, &mut request_sender).await;
+ }
+ }
+}
+
+async fn handle_list_command(
+ request_sender: &mut hyper::client::conn::http1::SendRequest<
+ http_body_util::Empty,
+ >,
+) {
+ use bytes::Bytes;
+ use http_body_util::Empty;
+ use hyper::{Method, Request, StatusCode};
+
let request = Request::builder()
- .method("GET")
- .uri(&format!(
- "/{}{}",
- args.id.unwrap_or_default(),
- if args.html { "?as-html" } else { "" }
- ))
+ .method(Method::GET)
+ .uri("/stacks")
.body(Empty::::new())
.unwrap();
let mut res = request_sender.send_request(request).await.unwrap();
- assert!(res.status() == StatusCode::OK);
+ if res.status() != StatusCode::OK {
+ eprintln!("Request failed with status: {}", res.status());
+ return;
+ }
+
+ // Parse JSON response and output each stack as a line (JSONL format)
+ let mut body_bytes = Vec::new();
+ while let Some(next) = res.frame().await {
+ let frame = next.expect("Error reading frame");
+ if let Some(chunk) = frame.data_ref() {
+ body_bytes.extend_from_slice(chunk);
+ }
+ }
+
+ let body_str = String::from_utf8(body_bytes.clone()).unwrap_or_else(|_| {
+ eprintln!("Server returned invalid UTF-8");
+ String::from_utf8_lossy(&body_bytes).to_string()
+ });
+
+ match serde_json::from_str::>(&body_str) {
+ Ok(stacks) => {
+ for stack in stacks {
+ println!("{}", serde_json::to_string(&stack).unwrap());
+ }
+ }
+ Err(e) => {
+ eprintln!("Failed to parse JSON response: {e}");
+ eprintln!("Raw response: {body_str}");
+ }
+ }
+}
+
+async fn handle_stream_command(
+ request_sender: &mut hyper::client::conn::http1::SendRequest<
+ http_body_util::Empty,
+ >,
+) {
+ use bytes::Bytes;
+ use http_body_util::Empty;
+ use hyper::{Method, Request, StatusCode};
+
+ let request = Request::builder()
+ .method(Method::GET)
+ .uri("/stream")
+ .body(Empty::::new())
+ .unwrap();
+
+ let mut res = request_sender.send_request(request).await.unwrap();
+
+ if res.status() != StatusCode::OK {
+ eprintln!("Request failed with status: {}", res.status());
+ return;
+ }
+
+ // Parse JSON response and output each packet as a line (JSONL format)
+ let mut body_bytes = Vec::new();
+ while let Some(next) = res.frame().await {
+ let frame = next.expect("Error reading frame");
+ if let Some(chunk) = frame.data_ref() {
+ body_bytes.extend_from_slice(chunk);
+ }
+ }
+
+ let body_str = String::from_utf8(body_bytes.clone()).unwrap_or_else(|_| {
+ eprintln!("Server returned invalid UTF-8");
+ String::from_utf8_lossy(&body_bytes).to_string()
+ });
+
+ match serde_json::from_str::>(&body_str) {
+ Ok(packets) => {
+ for packet in packets {
+ println!("{}", serde_json::to_string(&packet).unwrap());
+ }
+ }
+ Err(e) => {
+ eprintln!("Failed to parse JSON response: {e}");
+ eprintln!("Raw response: {body_str}");
+ }
+ }
+}
+
+async fn handle_search_command(
+ query: String,
+ limit: Option,
+ request_sender: &mut hyper::client::conn::http1::SendRequest<
+ http_body_util::Empty,
+ >,
+) {
+ use bytes::Bytes;
+ use http_body_util::Empty;
+ use hyper::{Method, Request, StatusCode};
+
+ let mut uri = format!(
+ "/search?q={}",
+ url::form_urlencoded::byte_serialize(query.as_bytes()).collect::()
+ );
+ if let Some(limit) = limit {
+ uri.push_str(&format!("&limit={limit}"));
+ }
+
+ let request = Request::builder()
+ .method(Method::GET)
+ .uri(uri)
+ .body(Empty::::new())
+ .unwrap();
+
+ let mut res = request_sender.send_request(request).await.unwrap();
+
+ if res.status() != StatusCode::OK {
+ eprintln!("Request failed with status: {}", res.status());
+ return;
+ }
+
+ // Parse JSON response and output each result as a line (JSONL format)
+ let mut body_bytes = Vec::new();
+ while let Some(next) = res.frame().await {
+ let frame = next.expect("Error reading frame");
+ if let Some(chunk) = frame.data_ref() {
+ body_bytes.extend_from_slice(chunk);
+ }
+ }
+
+ let body_str = String::from_utf8(body_bytes.clone()).unwrap_or_else(|_| {
+ eprintln!("Server returned invalid UTF-8");
+ String::from_utf8_lossy(&body_bytes).to_string()
+ });
+
+ match serde_json::from_str::>(&body_str) {
+ Ok(results) => {
+ for result in results {
+ println!("{}", serde_json::to_string(&result).unwrap());
+ }
+ }
+ Err(e) => {
+ eprintln!("Failed to parse JSON response: {e}");
+ eprintln!("Raw response: {body_str}");
+ }
+ }
+}
+
+async fn handle_view_command(
+ command: Option,
+ request_sender: &mut hyper::client::conn::http1::SendRequest<
+ http_body_util::Empty,
+ >,
+) {
+ use bytes::Bytes;
+ use http_body_util::Empty;
+ use hyper::{Method, Request, StatusCode};
+
+ let uri = match &command {
+ None => "/view".to_string(),
+ Some(ViewCommand::Nav) => "/view/nav".to_string(),
+ };
+
+ let request = Request::builder()
+ .method(Method::GET)
+ .uri(uri)
+ .body(Empty::::new())
+ .unwrap();
+
+ let mut res = request_sender.send_request(request).await.unwrap();
+
+ if res.status() != StatusCode::OK {
+ eprintln!("Request failed with status: {}", res.status());
+ return;
+ }
+
+ let mut body_bytes = Vec::new();
+ while let Some(next) = res.frame().await {
+ let frame = next.expect("Error reading frame");
+ if let Some(chunk) = frame.data_ref() {
+ body_bytes.extend_from_slice(chunk);
+ }
+ }
+
+ let body_str = String::from_utf8(body_bytes.clone()).unwrap_or_else(|_| {
+ eprintln!("Server returned invalid UTF-8");
+ String::from_utf8_lossy(&body_bytes).to_string()
+ });
+
+ // All view commands output JSON directly
+ println!("{body_str}");
+}
+
+async fn handle_cas_command(
+ command: CasCommand,
+ request_sender: &mut hyper::client::conn::http1::SendRequest<
+ http_body_util::Empty,
+ >,
+) {
+ use bytes::Bytes;
+ use http_body_util::Empty;
+ use hyper::{Method, Request, StatusCode};
+
+ let (method, uri) = match &command {
+ CasCommand::List => (Method::GET, "/cas".to_string()),
+ CasCommand::Get { hash } => (Method::GET, format!("/cas/{hash}")),
+ CasCommand::Purge { hash } => (Method::DELETE, format!("/cas/{hash}")),
+ };
+
+ let request = Request::builder()
+ .method(method)
+ .uri(uri)
+ .body(Empty::::new())
+ .unwrap();
+
+ let mut res = request_sender.send_request(request).await.unwrap();
+
+ if res.status() != StatusCode::OK {
+ eprintln!("Request failed with status: {}", res.status());
+ return;
+ }
+
+ match command {
+ CasCommand::List => {
+ // Parse JSON response and output one hash per line
+ let mut body_bytes = Vec::new();
+ while let Some(next) = res.frame().await {
+ let frame = next.expect("Error reading frame");
+ if let Some(chunk) = frame.data_ref() {
+ body_bytes.extend_from_slice(chunk);
+ }
+ }
+
+ let body_str = String::from_utf8(body_bytes.clone()).unwrap_or_else(|_| {
+ eprintln!("Server returned invalid UTF-8");
+ String::from_utf8_lossy(&body_bytes).to_string()
+ });
+ match serde_json::from_str::>(&body_str) {
+ Ok(hashes) => {
+ for hash in hashes {
+ println!("{hash}");
+ }
+ }
+ Err(e) => {
+ eprintln!("Failed to parse JSON response: {e}");
+ eprintln!("Raw response: {body_str}");
+ }
+ }
+ }
+ CasCommand::Get { .. } => {
+ // Stream content to stdout
+ while let Some(next) = res.frame().await {
+ let frame = next.expect("Error reading frame");
+ if let Some(chunk) = frame.data_ref() {
+ std::io::stdout()
+ .write_all(chunk)
+ .expect("Error writing to stdout");
+ }
+ }
+ }
+ CasCommand::Purge { .. } => {
+ // Output success/error message
+ while let Some(next) = res.frame().await {
+ let frame = next.expect("Error reading frame");
+ if let Some(chunk) = frame.data_ref() {
+ std::io::stdout()
+ .write_all(chunk)
+ .expect("Error writing to stdout");
+ }
+ }
+ }
+ }
+}
+
+async fn handle_legacy_request(
+ args: Args,
+ request_sender: &mut hyper::client::conn::http1::SendRequest<
+ http_body_util::Empty,
+ >,
+) {
+ use bytes::Bytes;
+ use http_body_util::Empty;
+ use hyper::{Request, StatusCode};
+
+ let request = if args.delete {
+ Request::builder()
+ .method("DELETE")
+ .uri(&format!("/delete/{}", args.id.unwrap_or_default()))
+ .body(Empty::::new())
+ .unwrap()
+ } else {
+ Request::builder()
+ .method("GET")
+ .uri(&format!(
+ "/{}{}",
+ args.id.unwrap_or_default(),
+ if args.html { "?as-html" } else { "" }
+ ))
+ .body(Empty::::new())
+ .unwrap()
+ };
+
+ let mut res = request_sender.send_request(request).await.unwrap();
+
+ if res.status() != StatusCode::OK {
+ eprintln!("Request failed with status: {}", res.status());
+ return;
+ }
+
+ if args.delete {
+ // For delete requests, just output the response message
+ while let Some(next) = res.frame().await {
+ let frame = next.expect("Error reading frame");
+ if let Some(chunk) = frame.data_ref() {
+ std::io::stdout()
+ .write_all(chunk)
+ .expect("Error writing to stdout");
+ }
+ }
+ return;
+ }
+
+ // Handle non-delete responses (existing logic)
if args.meta {
if let Some(metadata) = res.headers().get("X-Stacks-Clip-Metadata") {
println!("{}", metadata.to_str().unwrap());
diff --git a/src-tauri/src/commands.rs b/src-tauri/src/commands.rs
index 1a94f68..8504da2 100644
--- a/src-tauri/src/commands.rs
+++ b/src-tauri/src/commands.rs
@@ -12,8 +12,7 @@ use crate::state::SharedState;
use crate::store::{
InProgressStream, MimeType, Movement, Settings, StackLockStatus, StackSortOrder,
};
-use crate::ui::{with_meta, Item as UIItem, Nav, UI};
-use crate::view::View;
+use crate::ui::{with_meta, Item as UIItem, Nav};
#[derive(Debug, Clone, serde::Serialize)]
struct ExecStatus {
@@ -130,7 +129,7 @@ pub async fn store_pipe_stack_to_shell(
Some("image/png") => (MimeType::ImagePng, "Image".to_string()),
Some("text/html") => (MimeType::TextPlain, "HTML".to_string()),
Some(mime_type) => {
- eprintln!("mime_type: {:?}", mime_type);
+ eprintln!("mime_type: {mime_type:?}");
todo!()
}
};
@@ -365,7 +364,7 @@ pub async fn store_pipe_to_command(
Some("image/png") => (MimeType::ImagePng, "Image".to_string()),
Some("text/html") => (MimeType::TextPlain, "HTML".to_string()),
Some(mime_type) => {
- eprintln!("mime_type: {:?}", mime_type);
+ eprintln!("mime_type: {mime_type:?}");
todo!()
}
};
@@ -914,12 +913,7 @@ pub fn store_undo(app: tauri::AppHandle, state: tauri::State) {
state.with_lock(|state| {
if let Some(item) = state.view.undo.clone() {
state.store.remove_packet(&item.last_touched);
- let mut view = View::new();
- state.store.scan().for_each(|p| view.merge(&p));
- let mut ui = UI::new(&view);
- ui.select(view.get_focus_for_id(&item.id));
- state.view = view;
- state.ui = ui;
+ state.rescan(Some(item.id));
}
});
app.emit_all("refresh-items", true).unwrap();
diff --git a/src-tauri/src/http.rs b/src-tauri/src/http.rs
index 23edf3b..5f38c70 100644
--- a/src-tauri/src/http.rs
+++ b/src-tauri/src/http.rs
@@ -29,10 +29,6 @@ async fn handle(
req: Request,
) -> HTTPResult {
let path = req.uri().path();
- let id_option = match path.strip_prefix('/') {
- Some("") | None => None, // Path is "/" or empty
- Some(id_str) => scru128::Scru128Id::from_str(id_str).ok(),
- };
use std::collections::HashMap;
let params: HashMap = req
@@ -47,6 +43,46 @@ async fn handle(
let as_html = params.contains_key("as-html");
+ // Handle CAS routes
+ if path.starts_with("/cas") {
+ return handle_cas(req.method(), path, state, app_handle).await;
+ }
+
+ // Handle stacks routes
+ if path == "/stacks" && req.method() == Method::GET {
+ return get_stacks_list(state).await;
+ }
+
+ // Handle stream routes
+ if path == "/stream" && req.method() == Method::GET {
+ return get_packet_stream(state).await;
+ }
+
+ // Handle search routes
+ if path == "/search" && req.method() == Method::GET {
+ return handle_search(req.uri().query(), state).await;
+ }
+
+ // Handle view routes
+ if path == "/view" && req.method() == Method::GET {
+ return get_view(state).await;
+ }
+
+ if path == "/view/nav" && req.method() == Method::GET {
+ return get_view_nav(state).await;
+ }
+
+ // Handle delete routes
+ if path.starts_with("/delete") && req.method() == Method::DELETE {
+ return handle_delete(path, state, app_handle).await;
+ }
+
+ // Handle legacy routes
+ let id_option = match path.strip_prefix('/') {
+ Some("") | None => None, // Path is "/" or empty
+ Some(id_str) => scru128::Scru128Id::from_str(id_str).ok(),
+ };
+
match (req.method(), id_option) {
(&Method::GET, id) => get(id, state, as_html).await,
(&Method::POST, None) if path == "/" => post(req, state, app_handle).await,
@@ -54,6 +90,277 @@ async fn handle(
}
}
+async fn handle_cas(
+ method: &Method,
+ path: &str,
+ state: SharedState,
+ app_handle: tauri::AppHandle,
+) -> HTTPResult {
+ match (method, path) {
+ (&Method::GET, "/cas") => get_cas_list(state).await,
+ (&Method::GET, path) if path.starts_with("/cas/") => {
+ let hash_str = &path[5..]; // Remove "/cas/" prefix
+ match ssri::Integrity::from_str(hash_str) {
+ Ok(hash) => get_cas_content(state, hash).await,
+ Err(_) => response_404(),
+ }
+ }
+ (&Method::DELETE, path) if path.starts_with("/cas/") => {
+ let hash_str = &path[5..]; // Remove "/cas/" prefix
+ match ssri::Integrity::from_str(hash_str) {
+ Ok(hash) => delete_cas_content(state, hash, app_handle).await,
+ Err(_) => response_404(),
+ }
+ }
+ _ => response_404(),
+ }
+}
+
+async fn get_cas_list(state: SharedState) -> HTTPResult {
+ let hashes = state.with_lock(|state| state.store.enumerate_cas());
+
+ // Convert Integrity objects to strings for JSON serialization
+ let hash_strings: Vec = hashes.iter().map(|h| h.to_string()).collect();
+ let json_response = serde_json::to_string(&hash_strings).unwrap();
+
+ Ok(Response::builder()
+ .status(StatusCode::OK)
+ .header("Content-Type", "application/json")
+ .body(full(json_response))?)
+}
+
+async fn get_cas_content(state: SharedState, hash: ssri::Integrity) -> HTTPResult {
+ let (content, meta) = state.with_lock(|state| {
+ let content = state.store.cas_read(&hash);
+ let meta = state.store.get_content_meta(&hash);
+ (content, meta)
+ });
+
+ match content {
+ Some(_content_bytes) => {
+ let cache_path = state.with_lock(|state| state.store.cache_path.clone());
+ let reader = cacache::Reader::open_hash(cache_path, hash.clone())
+ .await
+ .unwrap();
+
+ let stream = tokio_util::io::ReaderStream::new(reader);
+ let stream = stream
+ .map_ok(Frame::data)
+ .map_err(|e| Box::new(e) as BoxError);
+ let body = BodyExt::boxed(StreamBody::new(stream));
+
+ let content_type = match meta {
+ Some(ref meta) => match meta.mime_type {
+ MimeType::TextPlain => "text/plain",
+ MimeType::ImagePng => "image/png",
+ },
+ None => "application/octet-stream",
+ };
+
+ Ok(Response::builder()
+ .status(StatusCode::OK)
+ .header("Content-Type", content_type)
+ .header("X-Stacks-CAS-Hash", hash.to_string())
+ .body(body)?)
+ }
+ None => response_404(),
+ }
+}
+
+async fn delete_cas_content(
+ state: SharedState,
+ hash: ssri::Integrity,
+ app_handle: tauri::AppHandle,
+) -> HTTPResult {
+ let result = state.with_lock(|state| {
+ let purge_result = state.store.purge(&hash);
+ if purge_result.is_ok() {
+ // Rescan to clean up any dangling references after purge
+ state.rescan(None);
+ }
+ purge_result
+ });
+
+ // Notify UI to refresh after successful purge and rescan
+ if result.is_ok() {
+ app_handle.emit_all("refresh-items", true).unwrap();
+ }
+
+ match result {
+ Ok(_) => {
+ let response_body = format!("Purged content with hash: {hash}");
+ Ok(Response::builder()
+ .status(StatusCode::OK)
+ .header("Content-Type", "text/plain")
+ .body(full(response_body))?)
+ }
+ Err(e) => {
+ let error_body = format!("Error purging content: {e}");
+ Ok(Response::builder()
+ .status(StatusCode::INTERNAL_SERVER_ERROR)
+ .header("Content-Type", "text/plain")
+ .body(full(error_body))?)
+ }
+ }
+}
+
+async fn get_stacks_list(state: SharedState) -> HTTPResult {
+ let stacks = state.with_lock(|state| {
+ // Find all items that are stacks (stack_id is None)
+ let stack_items: Vec<_> = state
+ .view
+ .items
+ .values()
+ .filter(|item| item.stack_id.is_none())
+ .cloned()
+ .collect();
+
+ // Convert to UI items with full metadata
+ stack_items
+ .into_iter()
+ .map(|item| crate::ui::with_meta(&state.store, &item))
+ .collect::>()
+ });
+
+ let json_response = serde_json::to_string(&stacks).unwrap();
+
+ Ok(Response::builder()
+ .status(StatusCode::OK)
+ .header("Content-Type", "application/json")
+ .body(full(json_response))?)
+}
+
+async fn get_packet_stream(state: SharedState) -> HTTPResult {
+ let packets: Vec<_> = state.with_lock(|state| state.store.scan().collect());
+
+ let json_response = serde_json::to_string(&packets).unwrap();
+
+ Ok(Response::builder()
+ .status(StatusCode::OK)
+ .header("Content-Type", "application/json")
+ .body(full(json_response))?)
+}
+
+async fn handle_search(query_str: Option<&str>, state: SharedState) -> HTTPResult {
+ let query_str = query_str.unwrap_or("");
+
+ // Parse query parameters
+ let params: std::collections::HashMap =
+ url::form_urlencoded::parse(query_str.as_bytes())
+ .into_owned()
+ .collect();
+
+ let query = match params.get("q") {
+ Some(q) => q,
+ None => {
+ return Ok(Response::builder()
+ .status(StatusCode::BAD_REQUEST)
+ .header("Content-Type", "text/plain")
+ .body(full("Missing 'q' parameter"))?);
+ }
+ };
+
+ let limit = params.get("limit").and_then(|l| l.parse::().ok());
+
+ let results =
+ state.with_lock(|state| state.store.index.query(query, limit).unwrap_or_default());
+
+ // Convert results to JSON format
+ let json_results: Vec = results
+ .into_iter()
+ .map(|(hash, score)| {
+ serde_json::json!({
+ "hash": hash.to_string(),
+ "score": score
+ })
+ })
+ .collect();
+
+ let json_response = serde_json::to_string(&json_results).unwrap();
+
+ Ok(Response::builder()
+ .status(StatusCode::OK)
+ .header("Content-Type", "application/json")
+ .body(full(json_response))?)
+}
+
+async fn get_view(state: SharedState) -> HTTPResult {
+ let view = state.with_lock(|state| state.view.clone());
+
+ let json_response = serde_json::to_string(&view).unwrap();
+
+ Ok(Response::builder()
+ .status(StatusCode::OK)
+ .header("Content-Type", "application/json")
+ .body(full(json_response))?)
+}
+
+async fn get_view_nav(state: SharedState) -> HTTPResult {
+ let nav = state.with_lock(|state| state.ui.render(&state.store));
+
+ let json_response = serde_json::to_string(&nav).unwrap();
+
+ Ok(Response::builder()
+ .status(StatusCode::OK)
+ .header("Content-Type", "application/json")
+ .body(full(json_response))?)
+}
+
+async fn handle_delete(path: &str, state: SharedState, app_handle: tauri::AppHandle) -> HTTPResult {
+ // Parse the ID from the path: /delete/{id} or /delete/ (empty for default)
+ let id_str = path.strip_prefix("/delete/").unwrap_or("");
+ let id_option = if id_str.is_empty() {
+ None
+ } else {
+ scru128::Scru128Id::from_str(id_str).ok()
+ };
+
+ let result = state.with_lock(|state| {
+ // Find the item to delete (either specific ID or default to first item)
+ let item = if let Some(id) = id_option {
+ state.view.items.get(&id).cloned()
+ } else {
+ state.view.first().map(|focus| focus.item.clone())
+ };
+
+ let Some(item) = item else {
+ return Err("Item not found".to_string());
+ };
+
+ // Get the item's hash for CAS cleanup
+ let hash = item.hash.clone();
+
+ // Delete the item (creates Delete packet)
+ let _packet = state.store.delete(item.id);
+
+ // Purge the CAS content
+ if let Err(e) = state.store.purge(&hash) {
+ tracing::warn!("Failed to purge CAS content for {}: {}", hash, e);
+ }
+
+ // Trigger rescan to clean up dangling references
+ state.rescan(None);
+
+ Ok(format!("Deleted item: {}", item.id))
+ });
+
+ // Notify UI to refresh after successful deletion
+ if result.is_ok() {
+ app_handle.emit_all("refresh-items", true).unwrap();
+ }
+
+ match result {
+ Ok(message) => Ok(Response::builder()
+ .status(StatusCode::OK)
+ .header("Content-Type", "text/plain")
+ .body(full(message))?),
+ Err(error) => Ok(Response::builder()
+ .status(StatusCode::NOT_FOUND)
+ .header("Content-Type", "text/plain")
+ .body(full(error))?),
+ }
+}
+
fn get_as_html(state: SharedState, hash: ssri::Integrity) -> HTTPResult {
let preview = state.with_lock(|state| {
let content = state.store.get_content(&hash);
@@ -242,7 +549,7 @@ pub fn start(app_handle: tauri::AppHandle, state: SharedState, db_path: &str) {
// Silently ignore the NotConnected error
} else {
// Handle or log other errors
- println!("Error serving connection: {:?}", err);
+ println!("Error serving connection: {err:?}");
}
}
});
diff --git a/src-tauri/src/main.rs b/src-tauri/src/main.rs
index a197f5a..a74d1b8 100644
--- a/src-tauri/src/main.rs
+++ b/src-tauri/src/main.rs
@@ -1,6 +1,7 @@
// Prevents additional console window on Windows in release, DO NOT REMOVE!!
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
#![recursion_limit = "512"]
+#![allow(unexpected_cfgs)]
use tracing::info;
diff --git a/src-tauri/src/serve.rs b/src-tauri/src/serve.rs
index 8ded1d4..8310f57 100644
--- a/src-tauri/src/serve.rs
+++ b/src-tauri/src/serve.rs
@@ -154,7 +154,7 @@ fn init_tracing() {
fn system_tray(version: &str) -> SystemTray {
let menu = SystemTrayMenu::new()
.add_item(CustomMenuItem::new("".to_string(), "Stacks").disabled())
- .add_item(CustomMenuItem::new("".to_string(), format!("Version {}", version)).disabled())
+ .add_item(CustomMenuItem::new("".to_string(), format!("Version {version}")).disabled())
.add_native_item(tauri::SystemTrayMenuItem::Separator)
.add_item(CustomMenuItem::new(
"check-updates".to_string(),
diff --git a/src-tauri/src/spotlight.rs b/src-tauri/src/spotlight.rs
index a974dd5..1c979c7 100644
--- a/src-tauri/src/spotlight.rs
+++ b/src-tauri/src/spotlight.rs
@@ -76,7 +76,7 @@ pub fn register_shortcut(app: AppHandle, shortcut: &str) -> Result<(), Erro
shortcut_manager
.register(shortcut, move || {
let panel = app.get_panel("main").unwrap_or_else(|e| {
- eprintln!("Failed to get panel: {:?}", e);
+ eprintln!("Failed to get panel: {e:?}");
panic!("Panel not found")
});
diff --git a/src-tauri/src/state.rs b/src-tauri/src/state.rs
index b8dce4f..7af6c8c 100644
--- a/src-tauri/src/state.rs
+++ b/src-tauri/src/state.rs
@@ -23,10 +23,48 @@ pub struct State {
}
impl State {
+ fn garbage_collect_delete_packet(store: &mut Store, packet: &Packet) {
+ if let Some(source_id) = packet.source_id {
+ if let Some(original_packet) = store.get_packet(&source_id) {
+ // If original packet has CAS content, purge it
+ if let Some(hash) = &original_packet.hash {
+ if let Err(e) = store.purge(hash) {
+ tracing::warn!("Failed to purge CAS content during GC: {}", e);
+ }
+ }
+ // Remove original packet
+ store.remove_packet(&source_id);
+ }
+ // Remove delete packet
+ store.remove_packet(&packet.id);
+ }
+ }
+
pub fn new(db_path: &str, packet_sender: Sender) -> Self {
- let store = Store::new(db_path);
+ let mut store = Store::new(db_path);
let mut view = View::new();
- store.scan().for_each(|p| view.merge(&p));
+ let mut delete_packets = Vec::new();
+
+ store.scan().for_each(|p| {
+ // Collect delete packets for garbage collection
+ if p.packet_type == crate::store::PacketType::Delete {
+ delete_packets.push(p.clone());
+ }
+
+ view.merge(&p);
+ });
+
+ // Garbage collection: process collected delete packets
+ let had_deletes = !delete_packets.is_empty();
+ for packet in delete_packets {
+ Self::garbage_collect_delete_packet(&mut store, &packet);
+ }
+
+ // Rebuild view after garbage collection to remove dangling references
+ if had_deletes {
+ view = View::new();
+ store.scan().for_each(|p| view.merge(&p));
+ }
let ui = UI::new(&view);
let state = Self {
@@ -87,6 +125,18 @@ impl State {
self.ui.refresh_view(&self.view);
let _ = self.packet_sender.send(self.view.clone());
}
+
+ pub fn rescan(&mut self, focus_item_id: Option) {
+ let mut view = View::new();
+ self.store.scan().for_each(|p| view.merge(&p));
+ let mut ui = UI::new(&view);
+ if let Some(id) = focus_item_id {
+ ui.select(view.get_focus_for_id(&id));
+ }
+ self.view = view;
+ self.ui = ui;
+ let _ = self.packet_sender.send(self.view.clone());
+ }
}
pub type SharedState = Arc>;
diff --git a/src-tauri/src/store.rs b/src-tauri/src/store.rs
index 65fda67..82a07ae 100644
--- a/src-tauri/src/store.rs
+++ b/src-tauri/src/store.rs
@@ -172,6 +172,7 @@ pub struct Index {
hash_field: tantivy::schema::Field,
writer: tantivy::IndexWriter,
reader: tantivy::IndexReader,
+ index: tantivy::Index,
}
impl Index {
@@ -192,6 +193,7 @@ impl Index {
hash_field,
writer,
reader,
+ index,
}
}
@@ -207,26 +209,31 @@ impl Index {
self.reader.reload().unwrap();
}
- #[cfg(test)]
- pub fn query(&self, query: &str) -> HashSet {
- use tantivy::schema::Value;
- let term = tantivy::schema::Term::from_field_text(self.content_field, query);
- let query = tantivy::query::FuzzyTermQuery::new_prefix(term, 1, true);
+ pub fn query(
+ &self,
+ q: &str,
+ limit: Option,
+ ) -> Result, Box> {
+ // Build a QueryParser that targets the `content` field
+ let parser = tantivy::query::QueryParser::for_index(&self.index, vec![self.content_field]);
+ let query = parser.parse_query(q)?;
let searcher = self.reader.searcher();
- let top_docs = searcher
- .search(&query, &tantivy::collector::TopDocs::with_limit(10000))
- .unwrap();
+ let max = limit.unwrap_or(10_000);
+ let top_docs = searcher.search(&query, &tantivy::collector::TopDocs::with_limit(max))?;
- top_docs
+ let results = top_docs
.into_iter()
- .map(|(_, doc_address)| {
+ .map(|(score, doc_address)| {
+ use tantivy::schema::Value;
let doc: tantivy::TantivyDocument = searcher.doc(doc_address).unwrap();
let bytes = doc.get_first(self.hash_field).unwrap().as_bytes().unwrap();
let hash: ssri::Integrity = bincode::deserialize(bytes).unwrap();
- hash
+ (hash, score)
})
- .collect()
+ .collect();
+
+ Ok(results)
}
}
@@ -316,6 +323,12 @@ impl Store {
match (hash, meta) {
(Ok(hash), Ok(meta)) => {
+ // Skip content metadata if CAS content no longer exists
+ if self.cas_read(&hash).is_none() {
+ tracing::warn!("Skipping content metadata for missing CAS entry: {}", hash);
+ continue;
+ }
+
if meta.mime_type == MimeType::TextPlain
&& meta.tiktokens == 0
&& !meta.terse.is_empty()
@@ -403,6 +416,28 @@ impl Store {
cacache::read_hash_sync(&self.cache_path, hash).ok()
}
+ #[tracing::instrument(skip_all)]
+ pub fn purge(&mut self, hash: &Integrity) -> Result<(), Box> {
+ // Remove from CAS storage
+ cacache::remove_hash_sync(&self.cache_path, hash)?;
+
+ // Remove from content metadata
+ let hash_bytes = bincode::serialize(hash)?;
+ self.content_meta.remove(hash_bytes)?;
+
+ // Remove from in-memory cache
+ self.content_meta_cache.remove(hash);
+
+ Ok(())
+ }
+
+ #[tracing::instrument(skip_all)]
+ pub fn enumerate_cas(&self) -> Vec {
+ // Since we use cacache::write_hash_sync (no key), list_sync won't find entries.
+ // Instead, enumerate from our content metadata cache, which tracks all CAS hashes.
+ self.content_meta_cache.keys().cloned().collect()
+ }
+
pub fn update_tiktokens(&mut self, hash: ssri::Integrity, tiktokens: usize) {
if let Some(meta) = self.content_meta_cache.get(&hash) {
let mut meta = meta.clone();
@@ -422,10 +457,20 @@ impl Store {
self.packets.insert(packet.id.to_bytes(), encoded).unwrap();
}
- pub fn scan(&self) -> impl Iterator- {
+ pub fn scan(&self) -> impl Iterator
- + use<'_> {
self.packets
.iter()
.filter_map(|item| item.ok().and_then(|(_, value)| deserialize_packet(&value)))
+ .filter(|packet| {
+ // Skip packets with dangling CAS hashes
+ if let Some(hash) = &packet.hash {
+ if self.cas_read(hash).is_none() {
+ tracing::warn!("Skipping packet with missing CAS content: {}", hash);
+ return false;
+ }
+ }
+ true
+ })
}
pub fn add(&mut self, content: &[u8], mime_type: MimeType, stack_id: Scru128Id) -> Packet {
@@ -660,6 +705,11 @@ impl Store {
packet
}
+ pub fn get_packet(&self, id: &Scru128Id) -> Option {
+ let value = self.packets.get(id.to_bytes()).unwrap();
+ value.and_then(|value| deserialize_packet(&value))
+ }
+
pub fn remove_packet(&self, id: &Scru128Id) -> Option {
let removed = self.packets.remove(id.to_bytes()).unwrap();
removed.and_then(|value| deserialize_packet(&value))
diff --git a/src-tauri/src/store_tests.rs b/src-tauri/src/store_tests.rs
index 0cfaf08..fbd17e5 100644
--- a/src-tauri/src/store_tests.rs
+++ b/src-tauri/src/store_tests.rs
@@ -35,12 +35,7 @@ fn test_update() {
let packet = store.add_stack(content, StackLockStatus::Unlocked);
let updated_content = b"Hello, updated world!";
- let update_packet = store.update(
- packet.id.clone(),
- Some(updated_content),
- MimeType::TextPlain,
- None,
- );
+ let update_packet = store.update(packet.id, Some(updated_content), MimeType::TextPlain, None);
let stored_update_packet = store.scan().last().unwrap();
assert_eq!(update_packet, stored_update_packet);
@@ -69,12 +64,7 @@ fn test_fork() {
let packet = store.add_stack(content, StackLockStatus::Unlocked);
let forked_content = b"Hello, forked world!";
- let forked_packet = store.fork(
- packet.id.clone(),
- Some(forked_content),
- MimeType::TextPlain,
- None,
- );
+ let forked_packet = store.fork(packet.id, Some(forked_content), MimeType::TextPlain, None);
let stored_fork_packet = store.scan().last().unwrap();
assert_eq!(forked_packet, stored_fork_packet);
@@ -99,7 +89,7 @@ fn test_delete() {
let mut store = Store::new(path);
let content = b"Hello, world!";
let packet = store.add_stack(content, StackLockStatus::Unlocked);
- let delete_packet = store.delete(packet.id.clone());
+ let delete_packet = store.delete(packet.id);
let stored_delete_packet = store.scan().last().unwrap();
assert_eq!(delete_packet, stored_delete_packet);
}
@@ -119,10 +109,10 @@ fn test_query() {
store.add_stack(content2, StackLockStatus::Unlocked);
store.add_stack(content3, StackLockStatus::Unlocked);
- let results = store.index.query("fzzy");
+ let results = store.index.query("fuzzy", None).unwrap();
let results: Vec<_> = results
.into_iter()
- .map(|hash| store.cas_read(&hash).unwrap())
+ .map(|(hash, _score)| store.cas_read(&hash).unwrap())
.collect();
assert_eq!(results, vec![b"Hello, fuzzy world!".to_vec()]);
}
@@ -132,3 +122,71 @@ fn test_is_valid_https_url() {
assert!(is_valid_https_url(b"https://www.example.com"));
assert!(!is_valid_https_url(b"Good afternoon"));
}
+
+#[test]
+fn test_purge() {
+ let temp_dir = tempfile::tempdir().unwrap();
+ let mut store = Store::new(temp_dir.path().to_str().unwrap());
+
+ let content = b"SECRET_KEY=super_secret_value";
+ let stack_id = scru128::new();
+ let packet = store.add(content, MimeType::TextPlain, stack_id);
+ let hash = packet.hash.clone().unwrap();
+
+ // Verify content exists before purge
+ assert!(store.cas_read(&hash).is_some());
+ assert!(store.get_content_meta(&hash).is_some());
+
+ // Purge the content
+ store.purge(&hash).unwrap();
+
+ // Verify content is gone after purge
+ assert!(store.cas_read(&hash).is_none());
+ assert!(store.get_content_meta(&hash).is_none());
+
+ // Test that scan_content_meta skips missing content
+ let content_meta_cache = store.scan_content_meta();
+ assert!(!content_meta_cache.contains_key(&hash));
+
+ // Add some new content to verify the store still works
+ let new_content = b"This is safe content";
+ let new_packet = store.add(new_content, MimeType::TextPlain, stack_id);
+ let new_hash = new_packet.hash.unwrap();
+
+ assert!(store.cas_read(&new_hash).is_some());
+ assert!(store.get_content_meta(&new_hash).is_some());
+}
+
+#[test]
+fn test_enumerate_cas() {
+ let temp_dir = tempfile::tempdir().unwrap();
+ let mut store = Store::new(temp_dir.path().to_str().unwrap());
+
+ let stack_id = scru128::new();
+
+ // Add some content
+ let content1 = b"First content";
+ let packet1 = store.add(content1, MimeType::TextPlain, stack_id);
+ let hash1 = packet1.hash.unwrap();
+
+ let content2 = b"Second content";
+ let packet2 = store.add(content2, MimeType::TextPlain, stack_id);
+ let hash2 = packet2.hash.unwrap();
+
+ // Enumerate CAS entries
+ let cas_hashes = store.enumerate_cas();
+
+ // Should contain both hashes
+ assert!(cas_hashes.contains(&hash1));
+ assert!(cas_hashes.contains(&hash2));
+ assert_eq!(cas_hashes.len(), 2);
+
+ // Purge one entry
+ store.purge(&hash1).unwrap();
+
+ // Enumerate again - should only have one hash now
+ let cas_hashes_after_purge = store.enumerate_cas();
+ assert!(!cas_hashes_after_purge.contains(&hash1));
+ assert!(cas_hashes_after_purge.contains(&hash2));
+ assert_eq!(cas_hashes_after_purge.len(), 1);
+}
diff --git a/src-tauri/src/ui.rs b/src-tauri/src/ui.rs
index 7cf7a0b..b36c76e 100644
--- a/src-tauri/src/ui.rs
+++ b/src-tauri/src/ui.rs
@@ -309,7 +309,7 @@ use comrak::plugins::syntect::SyntectAdapter;
use comrak::{markdown_to_html_with_plugins, ComrakOptions, ComrakPlugins};
pub fn markdown_to_html(theme_mode: &str, input: &[u8]) -> String {
- let adapter = SyntectAdapter::new(&format!("base16-ocean.{}", theme_mode));
+ let adapter = SyntectAdapter::new(&format!("base16-ocean.{theme_mode}"));
let mut options = ComrakOptions::default();
options.extension.tasklist = true;
@@ -340,7 +340,7 @@ pub fn code_to_html(theme_mode: &str, input: &[u8], ext: &str, ps: &SyntaxSet) -
let syntax = ps
.find_syntax_by_extension(ext)
.unwrap_or_else(|| ps.find_syntax_plain_text());
- let theme = &ts.themes[&format!("base16-ocean.{}", theme_mode)];
+ let theme = &ts.themes[&format!("base16-ocean.{theme_mode}")];
let input_str = String::from_utf8(input.to_owned()).unwrap();
let highlighted_html = highlighted_html_for_string(&input_str, ps, syntax, theme);
highlighted_html.unwrap()
diff --git a/src-tauri/src/ui_tests.rs b/src-tauri/src/ui_tests.rs
index 1e9a5cb..5b1a681 100644
--- a/src-tauri/src/ui_tests.rs
+++ b/src-tauri/src/ui_tests.rs
@@ -81,7 +81,7 @@ fn test_ui_render() {
.map(|i| {
state
.store
- .add_stack(format!("Stack {}", i).as_bytes(), StackLockStatus::Unlocked)
+ .add_stack(format!("Stack {i}").as_bytes(), StackLockStatus::Unlocked)
.id
})
.collect();
@@ -105,7 +105,7 @@ fn test_ui_render() {
assert_nav_as_expected!(&state.ui.render(&state.store), (None, None));
// post initial merge state
- state.store.scan().for_each(|p| state.merge(&p));
+ state.rescan(None);
assert_nav_as_expected!(
&state.ui.render(&state.store),
(
@@ -395,7 +395,7 @@ fn test_ui_generate_preview() {
.update_content_type(nushell_packet.hash.clone().unwrap(), "Nushell".to_string());
// Scan and merge to update the state
- state.store.scan().for_each(|p| state.merge(&p));
+ state.rescan(None);
// Test Rust preview generation
let rust_content = state
diff --git a/src-tauri/src/view_tests.rs b/src-tauri/src/view_tests.rs
index b0bcc0b..2980a86 100644
--- a/src-tauri/src/view_tests.rs
+++ b/src-tauri/src/view_tests.rs
@@ -19,7 +19,7 @@ fn assert_view_as_expected(
.iter()
.filter_map(|item| {
let children = view
- .children(&item)
+ .children(item)
.iter()
.filter_map(|child_id| {
view.items
@@ -163,7 +163,7 @@ fn test_no_duplicate_entry_on_same_hash() {
// Add second item with same hash
let id2 = state.store.add(b"Item 1", MimeType::TextPlain, stack_id).id;
- state.store.scan().for_each(|p| state.merge(&p));
+ state.rescan(None);
// Check that the stack item only has one child and that the item has been updated correctly
assert_view_as_expected!(&state.store, &state.view, vec![("Stack 1", vec!["Item 1"])]);
diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json
index 2a26d51..8dc979b 100644
--- a/src-tauri/tauri.conf.json
+++ b/src-tauri/tauri.conf.json
@@ -1,7 +1,7 @@
{
"package": {
"productName": "Stacks",
- "version": "0.15.13"
+ "version": "0.15.14-dev"
},
"build": {
"beforeDevCommand": "npm run dev",