diff --git a/config/config.docker.yaml b/config/config.docker.yaml index ca06de4f..a0a2db15 100644 --- a/config/config.docker.yaml +++ b/config/config.docker.yaml @@ -136,7 +136,6 @@ komga_api: enabled: true prefix: komga # URL prefix: /{prefix}/api/v1/... - # Rate Limiting (enabled by default) # ================================== # Protects API endpoints from abuse using token bucket algorithm @@ -153,3 +152,6 @@ komga_api: # - /api/v1/books/*/thumbnail # Exempt book thumbnails # cleanup_interval_secs: 60 # How often to clean up stale buckets # bucket_ttl_secs: 300 # Time before a bucket is considered stale + +koreader_api: + enabled: true diff --git a/docs/api/openapi.json b/docs/api/openapi.json index ed748d98..0f2c1e7f 100644 --- a/docs/api/openapi.json +++ b/docs/api/openapi.json @@ -17307,6 +17307,13 @@ "description": "Book unique identifier", "example": "550e8400-e29b-41d4-a716-446655440001" }, + "koreaderHash": { + "type": [ + "string", + "null" + ], + "description": "KOReader-compatible partial MD5 hash for sync" + }, "libraryId": { "type": "string", "format": "uuid", @@ -22701,6 +22708,13 @@ "description": "Book unique identifier", "example": "550e8400-e29b-41d4-a716-446655440001" }, + "koreaderHash": { + "type": [ + "string", + "null" + ], + "description": "KOReader-compatible partial MD5 hash for sync" + }, "libraryId": { "type": "string", "format": "uuid", @@ -27014,6 +27028,13 @@ "description": "Book unique identifier", "example": "550e8400-e29b-41d4-a716-446655440001" }, + "koreaderHash": { + "type": [ + "string", + "null" + ], + "description": "KOReader-compatible partial MD5 hash for sync" + }, "libraryId": { "type": "string", "format": "uuid", diff --git a/docs/docs/third-party-apps.md b/docs/docs/third-party-apps.md index 63256224..ea14a2ad 100644 --- a/docs/docs/third-party-apps.md +++ b/docs/docs/third-party-apps.md @@ -38,6 +38,76 @@ While Codex is primarily tested with Komic, other Komga-compatible apps may also Compatibility with apps other than Komic is not officially tested. Your experience may vary. ::: +### KOReader + +[KOReader](https://koreader.rocks/) is an open-source e-book reader for E Ink devices and other platforms. Codex supports the KOReader sync protocol, allowing you to sync reading progress between KOReader and Codex. + +**Supported formats:** EPUB, PDF, CBZ, CBR + +#### Prerequisites + +1. **Enable the KOReader API** in your Codex configuration (see [Enabling the KOReader API](#enabling-the-koreader-api) below) +2. **Create an API key** in Codex (see [API Keys](./users/api-keys)) +3. **Run a deep scan** so Codex computes KOReader-compatible hashes for your books (see [Deep Scan](./libraries#deep-scan)) + +#### Setup in KOReader + +1. Open a book in KOReader +2. Go to **Top Menu** > **Tools** (🔧) > **Progress sync** +3. Select **Custom sync server** +4. Enter the server settings: + - **Server URL**: `http://your-server:8080/koreader` + - **Username**: Your Codex **API key** (e.g., `codex_abc12345_secretpart123456789`) + - **Password**: Any value (ignored by Codex) +5. Tap **Login** to verify the connection + +:::info +KOReader uses the `x-auth-user` header to send the username, which Codex treats as an API key. The password field (`x-auth-key`) is ignored because KOReader MD5-hashes the password before sending it, making direct password verification impossible. +::: + +#### How It Works + +KOReader identifies books by computing an MD5 hash of the first 4096 bytes of the file. When you enable the KOReader API and run a **deep scan**, Codex computes the same hash for each book and stores it. This allows KOReader to look up books and sync progress. + +- **Progress sync is per-user**: Each user's reading progress is tracked independently +- **EPUB progress**: Codex converts between KOReader's DocFragment format and its internal position tracking +- **PDF/CBZ/CBR progress**: Page numbers are synced directly + +#### Troubleshooting KOReader + +**"Login failed" or 401 Unauthorized:** +- Make sure you're using a Codex **API key** as the username, not your regular username/password +- Verify the API key hasn't expired or been revoked +- Check that `koreader_api.enabled` is `true` in your config + +**"Book not found" (404):** +- Run a **deep scan** on your library so Codex computes KOReader hashes +- The book must be in a Codex library; KOReader identifies books by file hash, not filename + +**Progress not syncing:** +- Ensure both devices are using the same Codex server and user account +- Check that the book files are identical (same hash) across devices + +## Enabling the KOReader API + +The KOReader sync API is disabled by default. To enable it: + +### Via Configuration File + +```yaml +# codex.yaml +koreader_api: + enabled: true +``` + +### Via Environment Variables + +```bash +CODEX_KOREADER_API_ENABLED=true +``` + +After enabling, restart Codex and run a **deep scan** on your libraries to compute KOReader-compatible file hashes. + ## Enabling the Komga API The Komga-compatible API is disabled by default for security. To enable it: diff --git a/docs/docs/users/api-keys.md b/docs/docs/users/api-keys.md index 5e6c9c7f..02825853 100644 --- a/docs/docs/users/api-keys.md +++ b/docs/docs/users/api-keys.md @@ -141,15 +141,17 @@ The prefix is stored in plaintext for lookup, but the secret is hashed - Codex c ### OPDS / Reader Apps -Minimal permissions for read-only access: +Permissions for e-reader apps, OPDS clients, and KOReader sync: ```json { "name": "OPDS Reader", - "permissions": ["LibrariesRead", "SeriesRead", "BooksRead", "PagesRead"] + "permissions": ["LibrariesRead", "SeriesRead", "BooksRead", "PagesRead", "ProgressRead", "ProgressWrite"] } ``` +`ProgressRead` and `ProgressWrite` are needed for apps that sync reading progress (e.g., KOReader). + ### Automation Script For scripts that trigger scans and monitor progress: diff --git a/migration/src/lib.rs b/migration/src/lib.rs index 161b6820..34b3c0ec 100644 --- a/migration/src/lib.rs +++ b/migration/src/lib.rs @@ -123,6 +123,13 @@ mod m20260222_000059_add_search_title; // Add koreader_hash column for KOReader sync mod m20260309_000060_add_koreader_hash; +// Add r2_progression column for Readium/OPDS 2.0 EPUB progress sync +mod m20260314_000061_add_r2_progression; + +// Add epub_positions column for Readium positions list (cross-app sync) +mod m20260315_000062_add_epub_positions; +mod m20260316_000063_add_epub_spine_items; + pub struct Migrator; #[async_trait::async_trait] @@ -219,6 +226,12 @@ impl MigratorTrait for Migrator { Box::new(m20260222_000059_add_search_title::Migration), // Add koreader_hash for KOReader sync Box::new(m20260309_000060_add_koreader_hash::Migration), + // Add r2_progression for Readium EPUB progress sync + Box::new(m20260314_000061_add_r2_progression::Migration), + // Add epub_positions for Readium positions list (cross-app sync) + Box::new(m20260315_000062_add_epub_positions::Migration), + // Add epub_spine_items for char/byte position normalization (cross-device sync) + Box::new(m20260316_000063_add_epub_spine_items::Migration), ] } } diff --git a/migration/src/m20260314_000061_add_r2_progression.rs b/migration/src/m20260314_000061_add_r2_progression.rs new file mode 100644 index 00000000..d96b3d0a --- /dev/null +++ b/migration/src/m20260314_000061_add_r2_progression.rs @@ -0,0 +1,29 @@ +use sea_orm_migration::prelude::*; + +#[derive(DeriveMigrationName)] +pub struct Migration; + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .alter_table( + Table::alter() + .table(Alias::new("read_progress")) + .add_column(ColumnDef::new(Alias::new("r2_progression")).text()) + .to_owned(), + ) + .await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .alter_table( + Table::alter() + .table(Alias::new("read_progress")) + .drop_column(Alias::new("r2_progression")) + .to_owned(), + ) + .await + } +} diff --git a/migration/src/m20260315_000062_add_epub_positions.rs b/migration/src/m20260315_000062_add_epub_positions.rs new file mode 100644 index 00000000..c0464f91 --- /dev/null +++ b/migration/src/m20260315_000062_add_epub_positions.rs @@ -0,0 +1,29 @@ +use sea_orm_migration::prelude::*; + +#[derive(DeriveMigrationName)] +pub struct Migration; + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .alter_table( + Table::alter() + .table(Alias::new("books")) + .add_column(ColumnDef::new(Alias::new("epub_positions")).text()) + .to_owned(), + ) + .await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .alter_table( + Table::alter() + .table(Alias::new("books")) + .drop_column(Alias::new("epub_positions")) + .to_owned(), + ) + .await + } +} diff --git a/migration/src/m20260316_000063_add_epub_spine_items.rs b/migration/src/m20260316_000063_add_epub_spine_items.rs new file mode 100644 index 00000000..90b8be27 --- /dev/null +++ b/migration/src/m20260316_000063_add_epub_spine_items.rs @@ -0,0 +1,29 @@ +use sea_orm_migration::prelude::*; + +#[derive(DeriveMigrationName)] +pub struct Migration; + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .alter_table( + Table::alter() + .table(Alias::new("books")) + .add_column(ColumnDef::new(Alias::new("epub_spine_items")).text()) + .to_owned(), + ) + .await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .alter_table( + Table::alter() + .table(Alias::new("books")) + .drop_column(Alias::new("epub_spine_items")) + .to_owned(), + ) + .await + } +} diff --git a/src/api/extractors/auth.rs b/src/api/extractors/auth.rs index ef5c54c3..949d7820 100644 --- a/src/api/extractors/auth.rs +++ b/src/api/extractors/auth.rs @@ -1,3 +1,5 @@ +use tracing::debug; + use crate::api::error::ApiError; use crate::api::permissions::{Permission, UserRole}; use crate::db::repositories::{ApiKeyRepository, UserRepository}; @@ -256,6 +258,14 @@ impl FromRequestParts> for AuthContext { return extract_from_api_key(api_key, state).await; } + // Try KOReader-style x-auth-user header (value is an API key, x-auth-key is ignored) + if let Some(api_key_header) = parts.headers.get("x-auth-user") + && let Ok(api_key) = api_key_header.to_str() + { + debug!("Attempting KOReader x-auth-user API key authentication"); + return extract_from_api_key(api_key, state).await; + } + Err(ApiError::Unauthorized( "Missing or invalid authentication credentials".to_string(), )) @@ -433,6 +443,17 @@ async fn extract_from_basic_auth( let username = parts[0]; let password = parts[1]; + extract_from_credentials(username, password, state).await +} + +/// Extract auth context from username/password credentials +/// +/// Shared by Basic Auth and KOReader x-auth-user/x-auth-key header authentication. +async fn extract_from_credentials( + username: &str, + password: &str, + state: &AppState, +) -> Result { // Look up user by username let user = UserRepository::get_by_username(&state.db, username) .await @@ -516,6 +537,15 @@ impl FromRequestParts> for FlexibleAuthContext { .map(FlexibleAuthContext); } + // Try KOReader-style x-auth-user header (API key) + if let Some(api_key_header) = parts.headers.get("x-auth-user") + && let Ok(api_key) = api_key_header.to_str() + { + return extract_from_api_key(api_key, state) + .await + .map(FlexibleAuthContext); + } + // Try cookie as fallback if let Some(cookie_header) = parts.headers.get(COOKIE) && let Ok(cookie_str) = cookie_header.to_str() diff --git a/src/api/permissions.rs b/src/api/permissions.rs index 5257a7b0..d04d2f0e 100644 --- a/src/api/permissions.rs +++ b/src/api/permissions.rs @@ -94,6 +94,10 @@ pub enum Permission { // Pages (image serving) PagesRead, + // Progress (reading progress tracking) + ProgressRead, + ProgressWrite, + // Users (admin only) UsersRead, UsersWrite, @@ -131,6 +135,8 @@ impl Permission { Permission::BooksWrite => "books:write", Permission::BooksDelete => "books:delete", Permission::PagesRead => "pages:read", + Permission::ProgressRead => "progress:read", + Permission::ProgressWrite => "progress:write", Permission::UsersRead => "users:read", Permission::UsersWrite => "users:write", Permission::UsersDelete => "users:delete", @@ -161,6 +167,8 @@ impl FromStr for Permission { "books:write" => Ok(Permission::BooksWrite), "books:delete" => Ok(Permission::BooksDelete), "pages:read" => Ok(Permission::PagesRead), + "progress:read" => Ok(Permission::ProgressRead), + "progress:write" => Ok(Permission::ProgressWrite), "users:read" => Ok(Permission::UsersRead), "users:write" => Ok(Permission::UsersWrite), "users:delete" => Ok(Permission::UsersDelete), @@ -199,6 +207,8 @@ lazy_static::lazy_static! { set.insert(Permission::SeriesRead); set.insert(Permission::BooksRead); set.insert(Permission::PagesRead); + set.insert(Permission::ProgressRead); + set.insert(Permission::ProgressWrite); set.insert(Permission::SystemHealth); set }; @@ -217,6 +227,9 @@ lazy_static::lazy_static! { set.insert(Permission::SeriesRead); set.insert(Permission::BooksRead); set.insert(Permission::PagesRead); + // Progress tracking + set.insert(Permission::ProgressRead); + set.insert(Permission::ProgressWrite); // Own API keys set.insert(Permission::ApiKeysRead); set.insert(Permission::ApiKeysWrite); @@ -333,7 +346,7 @@ mod tests { assert!(READONLY_PERMISSIONS.contains(&Permission::LibrariesRead)); assert!(READONLY_PERMISSIONS.contains(&Permission::BooksRead)); assert!(!READONLY_PERMISSIONS.contains(&Permission::LibrariesWrite)); - assert_eq!(READONLY_PERMISSIONS.len(), 5); + assert_eq!(READONLY_PERMISSIONS.len(), 7); } // ============== Role permission preset tests ============== @@ -354,6 +367,9 @@ mod tests { // Reader cannot view or manage tasks assert!(!READER_PERMISSIONS.contains(&Permission::TasksRead)); assert!(!READER_PERMISSIONS.contains(&Permission::TasksWrite)); + // Reader can track reading progress + assert!(READER_PERMISSIONS.contains(&Permission::ProgressRead)); + assert!(READER_PERMISSIONS.contains(&Permission::ProgressWrite)); // Reader cannot modify content assert!(!READER_PERMISSIONS.contains(&Permission::BooksWrite)); assert!(!READER_PERMISSIONS.contains(&Permission::SeriesWrite)); @@ -362,7 +378,7 @@ mod tests { assert!(!READER_PERMISSIONS.contains(&Permission::UsersRead)); assert!(!READER_PERMISSIONS.contains(&Permission::SystemAdmin)); - assert_eq!(READER_PERMISSIONS.len(), 8); + assert_eq!(READER_PERMISSIONS.len(), 10); } #[test] @@ -389,7 +405,7 @@ mod tests { assert!(!MAINTAINER_PERMISSIONS.contains(&Permission::UsersRead)); assert!(!MAINTAINER_PERMISSIONS.contains(&Permission::SystemAdmin)); - assert_eq!(MAINTAINER_PERMISSIONS.len(), 15); + assert_eq!(MAINTAINER_PERMISSIONS.len(), 17); } #[test] @@ -413,7 +429,7 @@ mod tests { // Admin has system admin assert!(ADMIN_PERMISSIONS.contains(&Permission::SystemAdmin)); - assert_eq!(ADMIN_PERMISSIONS.len(), 21); // All permissions + assert_eq!(ADMIN_PERMISSIONS.len(), 23); // All permissions } // ============== UserRole tests ============== diff --git a/src/api/routes/komga/dto/manifest.rs b/src/api/routes/komga/dto/manifest.rs new file mode 100644 index 00000000..7d470af4 --- /dev/null +++ b/src/api/routes/komga/dto/manifest.rs @@ -0,0 +1,72 @@ +//! Readium WebPub Manifest DTOs for Komga-compatible EPUB reading +//! +//! These structures represent the Readium WebPub Manifest format that Komga returns +//! for EPUB books, enabling streaming EPUB reading in compatible apps like Komic. + +use serde::Serialize; +use utoipa::ToSchema; + +/// Readium WebPub Manifest +/// +/// Root structure for the manifest returned by the EPUB manifest endpoint. +/// Matches Komga's WebPub Manifest output for maximum compatibility. +#[allow(dead_code)] +#[derive(Debug, Serialize, ToSchema)] +pub struct WebPubManifest { + pub context: String, + pub metadata: WebPubMetadata, + #[serde(rename = "readingOrder")] + pub reading_order: Vec, + pub resources: Vec, + pub toc: Vec, + pub images: Vec, + pub landmarks: Vec, + pub links: Vec, + #[serde(rename = "pageList")] + pub page_list: Vec, +} + +/// Metadata section of the WebPub Manifest +#[allow(dead_code)] +#[derive(Debug, Serialize, ToSchema)] +pub struct WebPubMetadata { + pub identifier: String, + pub title: String, + #[serde(rename = "conformsTo")] + pub conforms_to: String, + #[serde(skip_serializing_if = "Vec::is_empty")] + pub contributor: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + pub language: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub modified: Option, + #[serde(rename = "numberOfPages")] + pub number_of_pages: i32, + pub rendition: WebPubRendition, +} + +/// Rendition properties for EPUB layout +#[allow(dead_code)] +#[derive(Debug, Serialize, ToSchema)] +pub struct WebPubRendition { + pub layout: String, +} + +/// A link entry in readingOrder or resources +#[derive(Debug, Serialize, ToSchema)] +pub struct WebPubLink { + pub href: String, + #[serde(rename = "type")] + pub media_type: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub rel: Option, +} + +/// A table of contents entry +#[derive(Debug, Serialize, ToSchema)] +pub struct WebPubTocEntry { + pub href: String, + pub title: String, + #[serde(skip_serializing_if = "Vec::is_empty")] + pub children: Vec, +} diff --git a/src/api/routes/komga/dto/mod.rs b/src/api/routes/komga/dto/mod.rs index a9a821fc..49f6cb05 100644 --- a/src/api/routes/komga/dto/mod.rs +++ b/src/api/routes/komga/dto/mod.rs @@ -5,6 +5,7 @@ pub mod book; pub mod library; +pub mod manifest; pub mod page; pub mod pagination; pub mod series; diff --git a/src/api/routes/komga/handlers/manifest.rs b/src/api/routes/komga/handlers/manifest.rs new file mode 100644 index 00000000..be484fa1 --- /dev/null +++ b/src/api/routes/komga/handlers/manifest.rs @@ -0,0 +1,916 @@ +//! Komga-compatible EPUB manifest and resource handlers +//! +//! Provides endpoints for streaming EPUB reading via the Readium WebPub Manifest format. +//! This enables apps like Komic to read EPUBs without downloading the entire file. + +use super::super::dto::manifest::{WebPubLink, WebPubManifest, WebPubTocEntry}; +use crate::api::{ + error::ApiError, + extractors::{AuthState, FlexibleAuthContext}, + permissions::Permission, +}; +use crate::db::repositories::{BookMetadataRepository, BookRepository, SeriesRepository}; +use crate::parsers::epub::EpubParser; +use crate::require_permission; +use axum::{ + body::Body, + extract::{OriginalUri, Path, State}, + http::{StatusCode, header}, + response::Response, +}; +use std::collections::HashSet; +use std::io::Read; +use std::sync::Arc; +use uuid::Uuid; +use zip::ZipArchive; + +/// Get EPUB manifest (Readium WebPub Manifest) +/// +/// Returns a Readium WebPub Manifest JSON for an EPUB book, enabling +/// streaming EPUB reading in compatible apps. +/// +/// ## Endpoint +/// `GET /{prefix}/api/v1/books/{bookId}/manifest/epub` +/// +/// ## Authentication +/// - Bearer token (JWT) +/// - Basic Auth +/// - API Key +#[utoipa::path( + get, + path = "/{prefix}/api/v1/books/{book_id}/manifest/epub", + responses( + (status = 200, description = "EPUB WebPub Manifest", body = WebPubManifest), + (status = 400, description = "Book is not EPUB format"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Book not found"), + ), + params( + ("prefix" = String, Path, description = "Komga API prefix (default: komga)"), + ("book_id" = Uuid, Path, description = "Book ID") + ), + security( + ("jwt_bearer" = []), + ("api_key" = []) + ), + tag = "Komga" +)] +pub async fn get_epub_manifest( + State(state): State>, + FlexibleAuthContext(auth): FlexibleAuthContext, + OriginalUri(uri): OriginalUri, + headers: axum::http::HeaderMap, + Path(book_id): Path, +) -> Result { + require_permission!(auth, Permission::BooksRead)?; + + let book = BookRepository::get_by_id(&state.db, book_id) + .await + .map_err(|e| ApiError::Internal(format!("Failed to fetch book: {}", e)))? + .ok_or_else(|| ApiError::NotFound("Book not found".to_string()))?; + + if book.format.to_lowercase() != "epub" { + return Err(ApiError::BadRequest( + "Book is not in EPUB format".to_string(), + )); + } + + // Build absolute base URL for resource links (Komga returns absolute URLs). + // Derive scheme from X-Forwarded-Proto or default to http. + // Derive host from X-Forwarded-Host, Host header, or fallback. + let scheme = headers + .get("x-forwarded-proto") + .and_then(|v| v.to_str().ok()) + .unwrap_or("http"); + let host = headers + .get("x-forwarded-host") + .or_else(|| headers.get("host")) + .and_then(|v| v.to_str().ok()) + .unwrap_or("localhost"); + + let uri_path = uri.path().to_string(); + let book_path = uri_path + .rfind("/manifest/epub") + .or_else(|| uri_path.rfind("/manifest")) + .map(|pos| &uri_path[..pos]) + .unwrap_or(&uri_path); + let base_url = format!("{}://{}{}", scheme, host, book_path); + + // Open EPUB as ZIP + let file_path = book.file_path.clone(); + let (manifest_items, spine_order, toc_entries, metadata) = + tokio::task::spawn_blocking(move || -> Result<_, ApiError> { + let file = std::fs::File::open(&file_path) + .map_err(|e| ApiError::Internal(format!("Failed to open EPUB file: {}", e)))?; + let mut archive = ZipArchive::new(file) + .map_err(|e| ApiError::Internal(format!("Failed to read EPUB archive: {}", e)))?; + + let opf_path = EpubParser::find_root_file(&mut archive) + .map_err(|e| ApiError::Internal(format!("Failed to find OPF: {}", e)))?; + + let (manifest, spine) = EpubParser::parse_opf(&mut archive, &opf_path) + .map_err(|e| ApiError::Internal(format!("Failed to parse OPF: {}", e)))?; + + // Parse TOC from NCX file + let toc = parse_toc(&mut archive, &manifest, &opf_path); + + Ok((manifest, spine, toc, opf_path)) + }) + .await + .map_err(|e| ApiError::Internal(format!("Task join error: {}", e)))??; + + let _ = metadata; // opf_path not needed further + + // Get book metadata for title/author + let book_metadata = BookMetadataRepository::get_by_book_id(&state.db, book_id) + .await + .ok() + .flatten(); + + let title = book_metadata + .as_ref() + .and_then(|m| m.title.clone()) + .unwrap_or_else(|| book.file_name.clone()); + + let authors: Vec = book_metadata + .as_ref() + .and_then(|m| m.authors_json.as_ref()) + .and_then(|json| { + // authors_json is stored as JSON array of objects with "name" and "role" + serde_json::from_str::>(json) + .ok() + .map(|arr| { + arr.iter() + .filter_map(|v| v.get("name").and_then(|n| n.as_str()).map(String::from)) + .collect() + }) + }) + .unwrap_or_default(); + + // Build spine href set for separating reading_order from resources + let spine_hrefs: HashSet<&str> = spine_order.iter().map(|(href, _)| href.as_str()).collect(); + + // Build readingOrder + let reading_order: Vec = spine_order + .iter() + .map(|(href, media_type)| WebPubLink { + href: format!("{}/resource/{}", base_url, encode_resource_path(href)), + media_type: media_type.clone(), + rel: None, + }) + .collect(); + + // Build resources: thumbnail first (matches Komga), then manifest items not in spine + let mut resources: Vec = Vec::new(); + resources.push(WebPubLink { + href: format!("{}/thumbnail", base_url), + media_type: "image/jpeg".to_string(), + rel: None, + }); + resources.extend( + manifest_items + .values() + .filter(|(href, _)| !spine_hrefs.contains(href.as_str())) + .map(|(href, media_type)| WebPubLink { + href: format!("{}/resource/{}", base_url, encode_resource_path(href)), + media_type: media_type.clone(), + rel: None, + }), + ); + + // Build TOC with rewritten hrefs + let toc: Vec = toc_entries + .into_iter() + .map(|entry| rewrite_toc_hrefs(entry, &base_url)) + .collect(); + + // Build self and acquisition links (matches Komga format) + let manifest_href = format!("{}/manifest", base_url); + let file_href = format!("{}/file", base_url); + let links = vec![ + WebPubLink { + href: manifest_href, + media_type: "application/webpub+json".to_string(), + rel: Some("self".to_string()), + }, + WebPubLink { + href: file_href, + media_type: "application/epub+zip".to_string(), + rel: Some("http://opds-spec.org/acquisition".to_string()), + }, + ]; + + // Fetch series info for belongsTo (matches Komga's output) + let series = SeriesRepository::get_by_id(&state.db, book.series_id) + .await + .ok() + .flatten(); + + // Build manifest as raw JSON to exactly match Komga's Jackson output. + // Komga uses @JsonInclude(NON_NULL) on WPPublicationDto, so null fields + // are omitted but empty arrays are kept. Keys are alphabetical (Jackson default). + let mut metadata_obj = serde_json::Map::new(); + + // belongsTo (series info) - Komga includes this + if let Some(ref series) = series { + let position: f64 = book_metadata + .as_ref() + .and_then(|m| m.number) + .and_then(|n| n.to_string().parse::().ok()) + .unwrap_or(0.0); + // Use integer if whole number, to match Komga's output + let position = if position.fract() == 0.0 { + serde_json::json!(position as i64) + } else { + serde_json::json!(position) + }; + metadata_obj.insert( + "belongsTo".to_string(), + serde_json::json!({ + "series": [{ + "name": series.name, + "position": position + }] + }), + ); + } + + metadata_obj.insert( + "conformsTo".to_string(), + serde_json::Value::String("https://readium.org/webpub-manifest/profiles/epub".to_string()), + ); + if !authors.is_empty() { + metadata_obj.insert( + "contributor".to_string(), + serde_json::Value::Array(authors.into_iter().map(serde_json::Value::String).collect()), + ); + } + + // Use ISBN if available, fall back to UUID + let identifier = book_metadata + .as_ref() + .and_then(|m| m.isbns.as_ref()) + .and_then(|isbns| { + // isbns may be comma-separated or JSON array; take first one + let isbn = isbns.trim().trim_start_matches('[').trim_start_matches('"'); + let end = isbn.find(['"', ',', ']']).unwrap_or(isbn.len()); + let isbn = isbn[..end].trim(); + if isbn.is_empty() { + None + } else { + Some(format!("urn:isbn:{}", isbn)) + } + }) + .unwrap_or_else(|| format!("urn:uuid:{}", book_id)); + metadata_obj.insert( + "identifier".to_string(), + serde_json::Value::String(identifier), + ); + + // Language + if let Some(ref lang) = book_metadata.as_ref().and_then(|m| m.language_iso.clone()) { + metadata_obj.insert( + "language".to_string(), + serde_json::Value::String(lang.clone()), + ); + } + + // Modified timestamp + metadata_obj.insert( + "modified".to_string(), + serde_json::Value::String(book.modified_at.to_rfc3339()), + ); + + metadata_obj.insert( + "numberOfPages".to_string(), + serde_json::Value::Number(book.page_count.into()), + ); + metadata_obj.insert( + "rendition".to_string(), + serde_json::json!({"layout": "reflowable"}), + ); + metadata_obj.insert("title".to_string(), serde_json::Value::String(title)); + + fn links_to_json(links: &[WebPubLink]) -> serde_json::Value { + serde_json::Value::Array( + links + .iter() + .map(|l| { + let mut m = serde_json::Map::new(); + m.insert( + "href".to_string(), + serde_json::Value::String(l.href.clone()), + ); + if let Some(ref rel) = l.rel { + m.insert("rel".to_string(), serde_json::Value::String(rel.clone())); + } + m.insert( + "type".to_string(), + serde_json::Value::String(l.media_type.clone()), + ); + serde_json::Value::Object(m) + }) + .collect(), + ) + } + + fn toc_to_json(entries: &[WebPubTocEntry]) -> serde_json::Value { + serde_json::Value::Array( + entries + .iter() + .map(|e| { + let mut m = serde_json::Map::new(); + if !e.children.is_empty() { + m.insert("children".to_string(), toc_to_json(&e.children)); + } + m.insert( + "href".to_string(), + serde_json::Value::String(e.href.clone()), + ); + m.insert( + "title".to_string(), + serde_json::Value::String(e.title.clone()), + ); + serde_json::Value::Object(m) + }) + .collect(), + ) + } + + // Build top-level object with keys in alphabetical order (matching Komga/Jackson) + let mut root = serde_json::Map::new(); + root.insert( + "context".to_string(), + serde_json::Value::String("https://readium.org/webpub-manifest/context.jsonld".to_string()), + ); + root.insert("images".to_string(), serde_json::Value::Array(Vec::new())); + root.insert( + "landmarks".to_string(), + serde_json::Value::Array(Vec::new()), + ); + root.insert("links".to_string(), links_to_json(&links)); + root.insert( + "metadata".to_string(), + serde_json::Value::Object(metadata_obj), + ); + root.insert("pageList".to_string(), serde_json::Value::Array(Vec::new())); + root.insert("readingOrder".to_string(), links_to_json(&reading_order)); + root.insert("resources".to_string(), links_to_json(&resources)); + root.insert("toc".to_string(), toc_to_json(&toc)); + + let body = serde_json::to_vec(&serde_json::Value::Object(root)) + .map_err(|e| ApiError::Internal(format!("Failed to serialize manifest: {}", e)))?; + + Ok(Response::builder() + .status(StatusCode::OK) + .header(header::CONTENT_TYPE, "application/webpub+json") + .header(header::CONTENT_LENGTH, body.len()) + .body(Body::from(body)) + .unwrap()) +} + +/// Get a resource file from within an EPUB +/// +/// Serves individual files (XHTML chapters, CSS, images, fonts) from within +/// an EPUB archive. Used by EPUB readers to load content referenced in the manifest. +/// +/// ## Endpoint +/// `GET /{prefix}/api/v1/books/{bookId}/resource/*resource` +/// +/// ## Authentication +/// - Bearer token (JWT) +/// - Basic Auth +/// - API Key +#[utoipa::path( + get, + path = "/{prefix}/api/v1/books/{book_id}/resource/{resource}", + responses( + (status = 200, description = "Resource file content"), + (status = 400, description = "Invalid resource path"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Book or resource not found"), + ), + params( + ("prefix" = String, Path, description = "Komga API prefix (default: komga)"), + ("book_id" = Uuid, Path, description = "Book ID"), + ("resource" = String, Path, description = "Resource path within the EPUB") + ), + security( + ("jwt_bearer" = []), + ("api_key" = []) + ), + tag = "Komga" +)] +pub async fn get_epub_resource( + State(state): State>, + FlexibleAuthContext(auth): FlexibleAuthContext, + Path((book_id, resource)): Path<(Uuid, String)>, +) -> Result { + require_permission!(auth, Permission::BooksRead)?; + + // Decode percent-encoded path; strip leading '/' from wildcard capture + let resource = resource.strip_prefix('/').unwrap_or(&resource); + let resource = percent_decode(resource); + + // Security: reject path traversal attempts + if resource.contains("..") || resource.starts_with('/') { + return Err(ApiError::BadRequest("Invalid resource path".to_string())); + } + + let book = BookRepository::get_by_id(&state.db, book_id) + .await + .map_err(|e| ApiError::Internal(format!("Failed to fetch book: {}", e)))? + .ok_or_else(|| ApiError::NotFound("Book not found".to_string()))?; + + if book.format.to_lowercase() != "epub" { + return Err(ApiError::BadRequest( + "Book is not in EPUB format".to_string(), + )); + } + + let file_path = book.file_path.clone(); + let resource_path = resource.clone(); + + let (data, content_type) = + tokio::task::spawn_blocking(move || -> Result<(Vec, String), ApiError> { + let file = std::fs::File::open(&file_path) + .map_err(|e| ApiError::Internal(format!("Failed to open EPUB file: {}", e)))?; + let mut archive = ZipArchive::new(file) + .map_err(|e| ApiError::Internal(format!("Failed to read EPUB archive: {}", e)))?; + + let mut entry = archive.by_name(&resource_path).map_err(|_| { + ApiError::NotFound(format!("Resource not found in EPUB: {}", resource_path)) + })?; + + let mut buf = Vec::with_capacity(entry.size() as usize); + entry + .read_to_end(&mut buf) + .map_err(|e| ApiError::Internal(format!("Failed to read resource: {}", e)))?; + + // Determine content type from file extension + let ct = mime_guess::from_path(&resource_path) + .first_or_octet_stream() + .to_string(); + + Ok((buf, ct)) + }) + .await + .map_err(|e| ApiError::Internal(format!("Task join error: {}", e)))??; + + Ok(Response::builder() + .status(StatusCode::OK) + .header(header::CONTENT_TYPE, content_type) + .header(header::CONTENT_LENGTH, data.len()) + .header(header::CACHE_CONTROL, "public, max-age=86400") + .body(Body::from(data)) + .unwrap()) +} + +// ============================================================================ +// Helper Functions +// ============================================================================ + +/// Well-known manifest item IDs for NCX files (matches Komga's fallback) +const POSSIBLE_NCX_ITEM_IDS: &[&str] = &["toc", "ncx", "ncxtoc"]; + +/// Parse TOC from NCX (EPUB 2) or nav document (EPUB 3) +fn parse_toc( + archive: &mut ZipArchive, + manifest: &std::collections::HashMap, + _opf_path: &str, +) -> Vec { + // Try EPUB 3 nav document first: check all xhtml files for epub:type="toc" + for (nav_href, _) in manifest + .values() + .filter(|(_, mt)| mt == "application/xhtml+xml") + { + if let Ok(entries) = parse_nav_doc(archive, nav_href) + && !entries.is_empty() + { + return entries; + } + } + + // Try EPUB 2 NCX: first by media type, then by well-known item IDs + let ncx_href = manifest + .values() + .find(|(_, mt)| mt == "application/x-dtbncx+xml") + .map(|(href, _)| href.as_str()) + .or_else(|| { + manifest + .iter() + .find(|(id, _)| POSSIBLE_NCX_ITEM_IDS.contains(&id.to_lowercase().as_str())) + .map(|(_, (href, _))| href.as_str()) + }); + + if let Some(ncx_href) = ncx_href + && let Ok(entries) = parse_ncx(archive, ncx_href) + && !entries.is_empty() + { + return entries; + } + + Vec::new() +} + +/// Parse NCX file for table of contents +fn parse_ncx( + archive: &mut ZipArchive, + ncx_href: &str, +) -> Result, ()> { + let mut ncx_file = archive.by_name(ncx_href).map_err(|_| ())?; + let mut content = String::new(); + ncx_file.read_to_string(&mut content).map_err(|_| ())?; + + // Determine base path from NCX href for resolving relative paths + let base_path = ncx_href + .rfind('/') + .map(|pos| &ncx_href[..pos + 1]) + .unwrap_or(""); + + Ok(parse_nav_points(&content, base_path)) +} + +/// Recursively parse navPoint elements from NCX content +fn parse_nav_points(content: &str, base_path: &str) -> Vec { + let mut entries = Vec::new(); + let mut remaining = content; + + while let Some(np_start) = remaining.find(" tag + let Some(inner_start) = nav_point_content.find('>') else { + break; + }; + let inner = &nav_point_content[inner_start + 1..]; + + // Extract navLabel > text + let title = extract_between(inner, "", "") + .or_else(|| extract_between(inner, "", "")) + .unwrap_or_default(); + + // Extract content src + let href = inner + .find(" tag +fn find_closing_nav_point(content: &str) -> Option { + let mut depth = 0; + let mut pos = 0; + + while pos < content.len() { + if content[pos..].starts_with("") { + depth -= 1; + if depth == 0 { + return Some(pos + 11); // skip "" + } + pos += 11; + } else { + pos += 1; + } + } + None +} + +/// Parse EPUB 3 nav document for table of contents +fn parse_nav_doc( + archive: &mut ZipArchive, + nav_href: &str, +) -> Result, ()> { + let mut nav_file = archive.by_name(nav_href).map_err(|_| ())?; + let mut content = String::new(); + nav_file.read_to_string(&mut content).map_err(|_| ())?; + + // Look for + let toc_nav = content + .find("epub:type=\"toc\"") + .or_else(|| content.find("epub:type='toc'")); + + let Some(nav_pos) = toc_nav else { + return Ok(Vec::new()); + }; + + // Find the
    within this nav + let nav_section = &content[nav_pos..]; + let Some(ol_start) = nav_section.find(" element from EPUB 3 nav document +fn parse_nav_ol(content: &str, base_path: &str) -> Vec { + let mut entries = Vec::new(); + let mut remaining = content; + + while let Some(li_start) = remaining.find("Title + if let Some(a_start) = li_content.find("", "") + .map(|t| strip_html_tags(&t)) + .unwrap_or_default(); + + let full_href = if href.is_empty() || href.starts_with('/') { + href + } else { + format!("{}{}", base_path, href) + }; + + // Check for nested
      (children) + let children = if let Some(ol_pos) = li_content.find(" Option { + let close_tag = format!("", tag); + content.find(&close_tag).map(|pos| pos + close_tag.len()) +} + +/// Extract text between two delimiters +fn extract_between(content: &str, start: &str, end: &str) -> Option { + let s = content.find(start)?; + let after = &content[s + start.len()..]; + let e = after.find(end)?; + Some(after[..e].trim().to_string()) +} + +/// Extract an attribute value from an XML/HTML tag +fn extract_attr(tag: &str, attr: &str) -> Option { + let pattern = format!("{}=\"", attr); + let start = tag.find(&pattern)?; + let after = &tag[start + pattern.len()..]; + let end = after.find('"')?; + Some(after[..end].to_string()) +} + +/// Strip HTML tags from a string, leaving only text content +fn strip_html_tags(input: &str) -> String { + let mut result = String::with_capacity(input.len()); + let mut in_tag = false; + for ch in input.chars() { + match ch { + '<' => in_tag = true, + '>' => in_tag = false, + _ if !in_tag => result.push(ch), + _ => {} + } + } + result.trim().to_string() +} + +/// Rewrite TOC entry hrefs to point to the resource endpoint +fn rewrite_toc_hrefs(entry: WebPubTocEntry, base_url: &str) -> WebPubTocEntry { + WebPubTocEntry { + href: format!( + "{}/resource/{}", + base_url, + encode_resource_path(&entry.href) + ), + title: entry.title, + children: entry + .children + .into_iter() + .map(|child| rewrite_toc_hrefs(child, base_url)) + .collect(), + } +} + +/// Percent-encode a resource path for use in URLs, preserving path separators and common chars +fn encode_resource_path(path: &str) -> String { + // For resource paths, we mostly just need to handle spaces and special chars. + // Keep path separators, alphanumeric, dots, hyphens, and underscores as-is. + let mut result = String::with_capacity(path.len()); + for byte in path.bytes() { + match byte { + b'A'..=b'Z' | b'a'..=b'z' | b'0'..=b'9' | b'-' | b'.' | b'_' | b'~' | b'/' | b'#' => { + result.push(byte as char); + } + _ => { + result.push('%'); + result.push_str(&format!("{:02X}", byte)); + } + } + } + result +} + +/// Decode a percent-encoded path +fn percent_decode(path: &str) -> String { + let mut result = Vec::with_capacity(path.len()); + let bytes = path.as_bytes(); + let mut i = 0; + while i < bytes.len() { + if bytes[i] == b'%' + && i + 2 < bytes.len() + && let Ok(byte) = u8::from_str_radix(&path[i + 1..i + 3], 16) + { + result.push(byte); + i += 3; + continue; + } + result.push(bytes[i]); + i += 1; + } + String::from_utf8_lossy(&result).into_owned() +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_parse_nav_points_flat() { + let ncx = r#" + + Cover + + + + Chapter 1 + + + "#; + let entries = parse_nav_points(ncx, "OEBPS/"); + assert_eq!(entries.len(), 2); + assert_eq!(entries[0].title, "Cover"); + assert_eq!(entries[0].href, "OEBPS/cover.xhtml"); + assert_eq!(entries[1].title, "Chapter 1"); + assert!(entries[0].children.is_empty()); + } + + #[test] + fn test_parse_nav_points_nested() { + let ncx = r#" + + Prologue + + + + Book One: King + + + Chapter 1 + + + + Chapter 2 + + + + + Epilogue + + + "#; + let entries = parse_nav_points(ncx, "OEBPS/"); + assert_eq!(entries.len(), 3, "Should have 3 top-level entries"); + assert_eq!(entries[0].title, "Prologue"); + assert!(entries[0].children.is_empty()); + + assert_eq!(entries[1].title, "Book One: King"); + assert_eq!(entries[1].children.len(), 2); + assert_eq!(entries[1].children[0].title, "Chapter 1"); + assert_eq!(entries[1].children[0].href, "OEBPS/ch1-1.xhtml"); + assert_eq!(entries[1].children[1].title, "Chapter 2"); + + assert_eq!(entries[2].title, "Epilogue"); + assert!(entries[2].children.is_empty()); + } + + #[test] + fn test_parse_toc_ncx_found_by_item_id() { + // Simulates the case where NCX has media-type="text/xml" (not the standard + // application/x-dtbncx+xml), but the manifest item ID is "ncx" + let epub_path = "docker/data/libraries/Books/Stephen R. Lawhead/Merlin/2 - Stephen R. Lawhead - Merlin.epub"; + let Ok(file) = std::fs::File::open(epub_path) else { + return; + }; + let mut archive = zip::ZipArchive::new(file).unwrap(); + + // Build manifest map matching what parse_opf produces for this EPUB: + // id="ncx", href="OEBPS/toc.ncx", media-type="text/xml" + let mut manifest = std::collections::HashMap::new(); + manifest.insert( + "ncx".to_string(), + ("OEBPS/toc.ncx".to_string(), "text/xml".to_string()), + ); + + let entries = parse_toc(&mut archive, &manifest, "OEBPS/content.opf"); + assert!( + !entries.is_empty(), + "TOC should be found via NCX item ID fallback" + ); + + let titles: Vec<&str> = entries.iter().map(|e| e.title.as_str()).collect(); + assert!(titles.contains(&"Cover")); + } + + #[test] + fn test_parse_ncx_real_epub() { + // Test with the actual Merlin EPUB if available + let epub_path = "docker/data/libraries/Books/Stephen R. Lawhead/Merlin/2 - Stephen R. Lawhead - Merlin.epub"; + let Ok(file) = std::fs::File::open(epub_path) else { + // Skip if fixture not available + return; + }; + let mut archive = zip::ZipArchive::new(file).unwrap(); + let entries = parse_ncx(&mut archive, "OEBPS/toc.ncx").unwrap(); + + assert!(!entries.is_empty(), "TOC should not be empty"); + + // Komga produces these top-level entries for this book + let titles: Vec<&str> = entries.iter().map(|e| e.title.as_str()).collect(); + assert!(titles.contains(&"Cover")); + assert!(titles.contains(&"Prologue")); + assert!(titles.contains(&"Epilogue")); + + // "Book One: King" should have chapter children + let book_one = entries + .iter() + .find(|e| e.title.contains("Book One")) + .unwrap(); + assert!( + !book_one.children.is_empty(), + "Book One should have chapter children" + ); + assert_eq!(book_one.children[0].title, "Chapter 1"); + } +} diff --git a/src/api/routes/komga/handlers/mod.rs b/src/api/routes/komga/handlers/mod.rs index b404f671..563acc10 100644 --- a/src/api/routes/komga/handlers/mod.rs +++ b/src/api/routes/komga/handlers/mod.rs @@ -5,6 +5,7 @@ pub mod books; pub mod libraries; +pub mod manifest; pub mod pages; pub mod read_progress; pub mod series; @@ -17,9 +18,11 @@ pub use books::{ get_previous_book, search_books, }; pub use libraries::{get_library, get_library_thumbnail, list_libraries}; +pub use manifest::{get_epub_manifest, get_epub_resource}; pub use pages::{get_page, get_page_thumbnail, list_pages}; pub use read_progress::{ - delete_progress, mark_series_as_read, mark_series_as_unread, update_progress, + delete_progress, get_progression, mark_series_as_read, mark_series_as_unread, put_progression, + update_progress, }; pub use series::{ get_series, get_series_books, get_series_new, get_series_thumbnail, get_series_updated, @@ -44,12 +47,15 @@ pub use books::{ pub use libraries::{__path_get_library, __path_get_library_thumbnail, __path_list_libraries}; #[doc(hidden)] #[allow(unused_imports)] +pub use manifest::{__path_get_epub_manifest, __path_get_epub_resource}; +#[doc(hidden)] +#[allow(unused_imports)] pub use pages::{__path_get_page, __path_get_page_thumbnail, __path_list_pages}; #[doc(hidden)] #[allow(unused_imports)] pub use read_progress::{ - __path_delete_progress, __path_mark_series_as_read, __path_mark_series_as_unread, - __path_update_progress, + __path_delete_progress, __path_get_progression, __path_mark_series_as_read, + __path_mark_series_as_unread, __path_put_progression, __path_update_progress, }; #[doc(hidden)] #[allow(unused_imports)] diff --git a/src/api/routes/komga/handlers/read_progress.rs b/src/api/routes/komga/handlers/read_progress.rs index b5c63a2d..8b476840 100644 --- a/src/api/routes/komga/handlers/read_progress.rs +++ b/src/api/routes/komga/handlers/read_progress.rs @@ -15,6 +15,7 @@ use crate::require_permission; use axum::{ extract::{Path, State}, http::StatusCode, + response::{IntoResponse, Response}, }; use std::sync::Arc; use uuid::Uuid; @@ -196,6 +197,195 @@ mod tests { } } +// ============================================================================ +// R2Progression (Readium) Handlers +// ============================================================================ + +/// Get book progression (R2Progression / Readium standard) +/// +/// Returns the stored R2Progression JSON for EPUB reading position sync. +/// Used by Komic and other Readium-compatible readers. +/// +/// ## Endpoint +/// `GET /{prefix}/api/v1/books/{bookId}/progression` +/// +/// ## Response +/// - 200 with R2Progression JSON if progression exists +/// - 204 No Content if no progression exists +#[utoipa::path( + get, + path = "/{prefix}/api/v1/books/{book_id}/progression", + responses( + (status = 200, description = "Progression data", content_type = "application/json"), + (status = 204, description = "No progression exists"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Book not found"), + ), + params( + ("prefix" = String, Path, description = "Komga API prefix (default: komga)"), + ("book_id" = Uuid, Path, description = "Book ID") + ), + security( + ("jwt_bearer" = []), + ("api_key" = []) + ), + tag = "Komga" +)] +pub async fn get_progression( + State(state): State>, + FlexibleAuthContext(auth): FlexibleAuthContext, + Path(book_id): Path, +) -> Result { + require_permission!(auth, Permission::BooksRead)?; + + // Verify book exists + BookRepository::get_by_id(&state.db, book_id) + .await + .map_err(|e| ApiError::Internal(format!("Failed to fetch book: {}", e)))? + .ok_or_else(|| ApiError::NotFound("Book not found".to_string()))?; + + let progress = ReadProgressRepository::get_by_user_and_book(&state.db, auth.user_id, book_id) + .await + .map_err(|e| ApiError::Internal(format!("Failed to fetch progress: {}", e)))?; + + match progress.and_then(|p| p.r2_progression) { + Some(json_str) => { + let json_value: serde_json::Value = serde_json::from_str(&json_str) + .map_err(|e| ApiError::Internal(format!("Invalid R2Progression JSON: {}", e)))?; + Ok(axum::Json(json_value).into_response()) + } + None => Ok(StatusCode::NO_CONTENT.into_response()), + } +} + +/// Update book progression (R2Progression / Readium standard) +/// +/// Stores R2Progression JSON and also updates the underlying read progress +/// (current_page, progress_percentage, completed) for backwards compatibility. +/// +/// ## Endpoint +/// `PUT /{prefix}/api/v1/books/{bookId}/progression` +/// +/// ## Request Body +/// R2Progression JSON with `device`, `locator`, and `modified` fields +/// +/// ## Response +/// - 204 No Content on success +#[utoipa::path( + put, + path = "/{prefix}/api/v1/books/{book_id}/progression", + request_body = serde_json::Value, + responses( + (status = 204, description = "Progression updated successfully"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Book not found"), + ), + params( + ("prefix" = String, Path, description = "Komga API prefix (default: komga)"), + ("book_id" = Uuid, Path, description = "Book ID") + ), + security( + ("jwt_bearer" = []), + ("api_key" = []) + ), + tag = "Komga" +)] +pub async fn put_progression( + State(state): State>, + FlexibleAuthContext(auth): FlexibleAuthContext, + Path(book_id): Path, + axum::Json(body): axum::Json, +) -> Result { + require_permission!(auth, Permission::BooksRead)?; + + let book = BookRepository::get_by_id(&state.db, book_id) + .await + .map_err(|e| ApiError::Internal(format!("Failed to fetch book: {}", e)))? + .ok_or_else(|| ApiError::NotFound("Book not found".to_string()))?; + + // Extract totalProgression and href from the R2Progression locator + let client_total_progression = body + .get("locator") + .and_then(|l| l.get("locations")) + .and_then(|l| l.get("totalProgression")) + .and_then(|v| v.as_f64()) + .unwrap_or(0.0); + + let client_href = body + .get("locator") + .and_then(|l| l.get("href")) + .and_then(|v| v.as_str()) + .unwrap_or(""); + + // Normalize totalProgression using server-side positions if available + let (total_progression, current_page) = if let Some(ref positions_json) = book.epub_positions { + if let Ok(positions) = + serde_json::from_str::>(positions_json) + { + if let Some((normalized, position)) = crate::parsers::normalize_progression( + &positions, + client_href, + client_total_progression, + ) { + (normalized, position) + } else { + // Fallback: no matching position found + let page = if book.page_count > 0 { + (client_total_progression * book.page_count as f64) + .round() + .max(1.0) as i32 + } else { + 1 + }; + (client_total_progression, page) + } + } else { + // Fallback: couldn't parse positions JSON + let page = if book.page_count > 0 { + (client_total_progression * book.page_count as f64) + .round() + .max(1.0) as i32 + } else { + 1 + }; + (client_total_progression, page) + } + } else { + // No positions available, use client value directly + let page = if book.page_count > 0 { + (client_total_progression * book.page_count as f64) + .round() + .max(1.0) as i32 + } else { + 1 + }; + (client_total_progression, page) + }; + + let completed = + total_progression >= 0.98 || (book.page_count > 0 && current_page >= book.page_count); + + // Store the R2Progression as-is from the client. + // Each client uses its own locator (href + progression/CFI) for navigation. + // The normalized values are only used for internal tracking (current_page, percentage). + let json_str = serde_json::to_string(&body) + .map_err(|e| ApiError::Internal(format!("Failed to serialize R2Progression: {}", e)))?; + + ReadProgressRepository::upsert_with_percentage( + &state.db, + auth.user_id, + book_id, + current_page, + Some(total_progression), + completed, + Some(json_str), + ) + .await + .map_err(|e| ApiError::Internal(format!("Failed to update progression: {}", e)))?; + + Ok(StatusCode::NO_CONTENT) +} + // ============================================================================ // Series Read Progress Handlers // ============================================================================ diff --git a/src/api/routes/komga/routes/books.rs b/src/api/routes/komga/routes/books.rs index c5cf237c..e47b3ea7 100644 --- a/src/api/routes/komga/routes/books.rs +++ b/src/api/routes/komga/routes/books.rs @@ -37,4 +37,16 @@ pub fn routes(_state: Arc) -> Router> { "/books/{book_id}/previous", get(handlers::get_previous_book), ) + .route( + "/books/{book_id}/manifest", + get(handlers::get_epub_manifest), + ) + .route( + "/books/{book_id}/manifest/epub", + get(handlers::get_epub_manifest), + ) + .route( + "/books/{book_id}/resource/{*resource}", + get(handlers::get_epub_resource), + ) } diff --git a/src/api/routes/komga/routes/read_progress.rs b/src/api/routes/komga/routes/read_progress.rs index aab23a80..80213797 100644 --- a/src/api/routes/komga/routes/read_progress.rs +++ b/src/api/routes/komga/routes/read_progress.rs @@ -6,7 +6,7 @@ use super::super::handlers; use crate::api::extractors::AppState; use axum::{ Router, - routing::{patch, post}, + routing::{get, patch, post}, }; use std::sync::Arc; @@ -15,6 +15,8 @@ use std::sync::Arc; /// Routes: /// - `PATCH /books/{book_id}/read-progress` - Update reading progress /// - `DELETE /books/{book_id}/read-progress` - Delete reading progress (mark as unread) +/// - `GET /books/{book_id}/progression` - Get R2Progression (Readium) +/// - `PUT /books/{book_id}/progression` - Update R2Progression (Readium) /// - `POST /series/{series_id}/read-progress` - Mark all books in series as read /// - `DELETE /series/{series_id}/read-progress` - Mark all books in series as unread pub fn routes(_state: Arc) -> Router> { @@ -23,6 +25,10 @@ pub fn routes(_state: Arc) -> Router> { "/books/{book_id}/read-progress", patch(handlers::update_progress).delete(handlers::delete_progress), ) + .route( + "/books/{book_id}/progression", + get(handlers::get_progression).put(handlers::put_progression), + ) .route( "/series/{series_id}/read-progress", post(handlers::mark_series_as_read).delete(handlers::mark_series_as_unread), diff --git a/src/api/routes/koreader/handlers/sync.rs b/src/api/routes/koreader/handlers/sync.rs index 422f9e97..d87287a6 100644 --- a/src/api/routes/koreader/handlers/sync.rs +++ b/src/api/routes/koreader/handlers/sync.rs @@ -1,9 +1,16 @@ //! KOReader sync progress handlers +//! +//! Converts between KOReader's DocFragment format and Codex's R2Progression +//! (Readium standard) so that progress is shared across all clients (web reader, +//! KOReader, OPDS apps). use crate::api::error::ApiError; use crate::api::extractors::{AuthContext, AuthState}; +use crate::api::permissions::Permission; use crate::api::routes::koreader::dto::progress::DocumentProgressDto; +use crate::db::entities::books; use crate::db::repositories::{BookRepository, ReadProgressRepository}; +use crate::parsers::EpubPosition; use axum::Json; use axum::extract::{Path, State}; use std::sync::Arc; @@ -11,48 +18,35 @@ use std::sync::Arc; /// GET /koreader/syncs/progress/{document} /// /// Get reading progress for a document identified by its KOReader hash. -/// Returns the stored progress if found. +/// Converts stored R2Progression back to KOReader's DocFragment format. pub async fn get_progress( State(state): State>, auth: AuthContext, Path(document_hash): Path, ) -> Result, ApiError> { + auth.require_permission(&Permission::ProgressRead)?; let user_id = auth.user_id; - // Find book by koreader_hash - let books = BookRepository::find_by_koreader_hash(&state.db, &document_hash) - .await - .map_err(|e| ApiError::Internal(format!("Failed to find book: {}", e)))?; - - if books.is_empty() { - return Err(ApiError::NotFound( - "No book found with this hash".to_string(), - )); - } - - if books.len() > 1 { - return Err(ApiError::Conflict( - "Multiple books found with the same hash".to_string(), - )); - } + let book = find_book_by_hash(&state, &document_hash).await?; - let book = &books[0]; - - // Get reading progress for this user and book let progress = ReadProgressRepository::get_by_user_and_book(&state.db, user_id, book.id) .await .map_err(|e| ApiError::Internal(format!("Failed to get progress: {}", e)))?; match progress { Some(p) => { - // Convert internal progress to KOReader format - // For PDF/CBZ: progress is the page number as a string - // For EPUB: we store page number but KOReader expects DocFragment format - let progress_str = p.current_page.to_string(); let percentage = p .progress_percentage .unwrap_or_else(|| p.current_page as f64 / book.page_count.max(1) as f64); + // Convert R2Progression back to KOReader format + let progress_str = if book.format == "epub" { + r2_progression_to_koreader(&p.r2_progression, &book) + .unwrap_or_else(|| p.current_page.to_string()) + } else { + p.current_page.to_string() + }; + Ok(Json(DocumentProgressDto { document: document_hash, progress: progress_str, @@ -70,41 +64,39 @@ pub async fn get_progress( /// PUT /koreader/syncs/progress /// /// Update reading progress for a document identified by its KOReader hash. +/// Converts KOReader's DocFragment format to R2Progression for unified storage. pub async fn update_progress( State(state): State>, auth: AuthContext, Json(request): Json, ) -> Result, ApiError> { + auth.require_permission(&Permission::ProgressWrite)?; let user_id = auth.user_id; - // Find book by koreader_hash - let books = BookRepository::find_by_koreader_hash(&state.db, &request.document) - .await - .map_err(|e| ApiError::Internal(format!("Failed to find book: {}", e)))?; - - if books.is_empty() { - return Err(ApiError::NotFound( - "No book found with this hash".to_string(), - )); - } - - if books.len() > 1 { - return Err(ApiError::Conflict( - "Multiple books found with the same hash".to_string(), - )); - } + tracing::debug!( + koreader_hash = %request.document, + progress = %request.progress, + percentage = request.percentage, + device = %request.device, + device_id = %request.device_id, + "KOReader progress update request" + ); - let book = &books[0]; + let book = find_book_by_hash(&state, &request.document).await?; // Parse progress string to page number - // For PDF/CBZ: progress is the page number as a string - // For EPUB: progress is a DocFragment XPath string, extract the index let current_page = parse_koreader_progress(&request.progress, &book.format); + // For EPUB: convert KOReader progress to R2Progression JSON + let r2_progression = if book.format == "epub" { + koreader_to_r2_progression(&request.progress, request.percentage, &book) + } else { + None + }; + let completed = request.percentage >= 0.98 || (book.page_count > 0 && current_page >= book.page_count); - // Update progress ReadProgressRepository::upsert_with_percentage( &state.db, user_id, @@ -112,6 +104,7 @@ pub async fn update_progress( current_page, Some(request.percentage), completed, + r2_progression, ) .await .map_err(|e| ApiError::Internal(format!("Failed to update progress: {}", e)))?; @@ -119,11 +112,110 @@ pub async fn update_progress( Ok(Json(request)) } +/// Find a single book by KOReader hash, returning appropriate errors. +async fn find_book_by_hash(state: &AuthState, hash: &str) -> Result { + let books = BookRepository::find_by_koreader_hash(&state.db, hash) + .await + .map_err(|e| ApiError::Internal(format!("Failed to find book: {}", e)))?; + + if books.is_empty() { + return Err(ApiError::NotFound( + "No book found with this hash".to_string(), + )); + } + + if books.len() > 1 { + return Err(ApiError::Conflict( + "Multiple books found with the same hash".to_string(), + )); + } + + Ok(books.into_iter().next().unwrap()) +} + +/// Get the unique hrefs (spine items) from epub_positions, preserving order. +fn get_spine_hrefs(positions: &[EpubPosition]) -> Vec<&str> { + let mut hrefs: Vec<&str> = Vec::new(); + for p in positions { + if hrefs.last().is_none_or(|last| *last != p.href.as_str()) { + hrefs.push(&p.href); + } + } + hrefs +} + +/// Parse the book's epub_positions JSON. +fn parse_epub_positions(book: &books::Model) -> Option> { + book.epub_positions + .as_ref() + .and_then(|json| serde_json::from_str::>(json).ok()) +} + +/// Convert KOReader DocFragment progress to R2Progression JSON. +/// +/// Maps DocFragment index (1-based spine item) to the corresponding EPUB href +/// from the book's positions list, then builds an R2Progression object. +fn koreader_to_r2_progression( + progress: &str, + percentage: f64, + book: &books::Model, +) -> Option { + let doc_fragment_index = parse_epub_progress(progress); + let positions = parse_epub_positions(book)?; + let hrefs = get_spine_hrefs(&positions); + + // DocFragment index is 1-based, convert to 0-based + let spine_index = (doc_fragment_index - 1).max(0) as usize; + let href = hrefs.get(spine_index)?; + + let r2 = serde_json::json!({ + "locator": { + "href": href, + "type": "application/xhtml+xml", + "locations": { + "totalProgression": percentage + } + }, + "device": { + "id": "koreader", + "name": "KOReader" + }, + "modified": chrono::Utc::now().to_rfc3339() + }); + + serde_json::to_string(&r2).ok() +} + +/// Convert stored R2Progression back to KOReader DocFragment format. +/// +/// Finds which spine index the stored href corresponds to, then returns +/// `/body/DocFragment[N].0` where N is the 1-based index. +fn r2_progression_to_koreader(r2_json: &Option, book: &books::Model) -> Option { + let json_str = r2_json.as_ref()?; + let r2: serde_json::Value = serde_json::from_str(json_str).ok()?; + let href = r2.get("locator")?.get("href")?.as_str()?; + + let positions = parse_epub_positions(book)?; + let hrefs = get_spine_hrefs(&positions); + + // Find which spine index this href corresponds to (with suffix matching) + let href_clean = href.split('#').next().unwrap_or(href); + let href_decoded = urlencoding::decode(href_clean).unwrap_or_else(|_| href_clean.into()); + + let spine_index = hrefs.iter().position(|h| { + *h == href_decoded.as_ref() + || href_decoded.ends_with(*h) + || h.ends_with(href_decoded.as_ref()) + })?; + + // Convert 0-based spine index to 1-based DocFragment index + Some(format!("/body/DocFragment[{}].0", spine_index + 1)) +} + /// Parse KOReader progress string into a page number /// /// For PDF/CBZ (pre-paginated): progress is just a page number string like "42" -/// For EPUB: progress is a DocFragment XPath like "/body/DocFragment[10]/body/div/p[1]/text().0" -/// or a TOC-based format like "#_doc_fragment_44_ c37" +/// For EPUB: progress is a DocFragment XPath, extract the spine index fn parse_koreader_progress(progress: &str, format: &str) -> i32 { match format { "epub" => parse_epub_progress(progress), @@ -202,4 +294,191 @@ mod tests { fn test_parse_epub_plain_number() { assert_eq!(parse_koreader_progress("5", "epub"), 5); } + + #[test] + fn test_get_spine_hrefs() { + let positions = vec![ + EpubPosition { + href: "ch1.xhtml".to_string(), + media_type: "application/xhtml+xml".to_string(), + progression: 0.0, + position: 1, + total_progression: 0.0, + }, + EpubPosition { + href: "ch1.xhtml".to_string(), + media_type: "application/xhtml+xml".to_string(), + progression: 0.5, + position: 2, + total_progression: 0.1, + }, + EpubPosition { + href: "ch2.xhtml".to_string(), + media_type: "application/xhtml+xml".to_string(), + progression: 0.0, + position: 3, + total_progression: 0.5, + }, + ]; + let hrefs = get_spine_hrefs(&positions); + assert_eq!(hrefs, vec!["ch1.xhtml", "ch2.xhtml"]); + } + + /// Helper to create a test book model with given epub_positions. + fn test_book(positions: &[EpubPosition]) -> books::Model { + books::Model { + id: uuid::Uuid::new_v4(), + library_id: uuid::Uuid::new_v4(), + series_id: uuid::Uuid::new_v4(), + file_path: String::new(), + file_name: String::new(), + format: "epub".to_string(), + file_size: 0, + file_hash: String::new(), + partial_hash: String::new(), + koreader_hash: None, + page_count: positions.len() as i32, + deleted: false, + analyzed: true, + analysis_error: None, + analysis_errors: None, + modified_at: chrono::Utc::now(), + created_at: chrono::Utc::now(), + updated_at: chrono::Utc::now(), + thumbnail_path: None, + thumbnail_generated_at: None, + epub_positions: Some(serde_json::to_string(positions).unwrap()), + epub_spine_items: None, + } + } + + fn pos(href: &str, position: i32, total_progression: f64) -> EpubPosition { + EpubPosition { + href: href.to_string(), + media_type: "application/xhtml+xml".to_string(), + progression: 0.0, + position, + total_progression, + } + } + + #[test] + fn test_roundtrip_doc_fragment_format() { + // 3 spine items, each with 1 position + let positions = vec![ + pos("OEBPS/ch1.xhtml", 1, 0.0), + pos("OEBPS/ch2.xhtml", 2, 0.33), + pos("OEBPS/ch3.xhtml", 3, 0.66), + ]; + let book = test_book(&positions); + + // Test each DocFragment index roundtrips correctly + for i in 1..=3 { + let input = format!("/body/DocFragment[{}]/body/div/p[1]/text().0", i); + let r2 = koreader_to_r2_progression(&input, i as f64 / 3.0, &book); + assert!(r2.is_some(), "Failed to create R2 for DocFragment[{}]", i); + + let result = r2_progression_to_koreader(&r2, &book); + assert_eq!( + result.unwrap(), + format!("/body/DocFragment[{}].0", i), + "Roundtrip failed for DocFragment[{}]", + i + ); + } + } + + #[test] + fn test_roundtrip_doc_fragment_underscore_format() { + let positions = vec![pos("ch1.xhtml", 1, 0.0), pos("ch2.xhtml", 2, 0.5)]; + let book = test_book(&positions); + + // _doc_fragment_0_ is 0-based -> DocFragment[1] (1-based) + let r2 = koreader_to_r2_progression("#_doc_fragment_0_ c37", 0.1, &book); + assert!(r2.is_some()); + let result = r2_progression_to_koreader(&r2, &book); + assert_eq!(result.unwrap(), "/body/DocFragment[1].0"); + + // _doc_fragment_1_ -> DocFragment[2] + let r2 = koreader_to_r2_progression("#_doc_fragment_1_ c10", 0.6, &book); + assert!(r2.is_some()); + let result = r2_progression_to_koreader(&r2, &book); + assert_eq!(result.unwrap(), "/body/DocFragment[2].0"); + } + + #[test] + fn test_roundtrip_multiple_positions_per_spine() { + // ch1 has 3 positions, ch2 has 2 positions (like a real EPUB) + let positions = vec![ + pos("OEBPS/ch1.xhtml", 1, 0.0), + pos("OEBPS/ch1.xhtml", 2, 0.1), + pos("OEBPS/ch1.xhtml", 3, 0.2), + pos("OEBPS/ch2.xhtml", 4, 0.5), + pos("OEBPS/ch2.xhtml", 5, 0.7), + ]; + let book = test_book(&positions); + + // DocFragment[1] -> ch1.xhtml + let r2 = koreader_to_r2_progression("/body/DocFragment[1].0", 0.1, &book); + let result = r2_progression_to_koreader(&r2, &book); + assert_eq!(result.unwrap(), "/body/DocFragment[1].0"); + + // DocFragment[2] -> ch2.xhtml + let r2 = koreader_to_r2_progression("/body/DocFragment[2].0", 0.6, &book); + let result = r2_progression_to_koreader(&r2, &book); + assert_eq!(result.unwrap(), "/body/DocFragment[2].0"); + } + + #[test] + fn test_roundtrip_preserves_percentage_in_r2() { + let positions = vec![pos("ch1.xhtml", 1, 0.0)]; + let book = test_book(&positions); + + let r2_json = koreader_to_r2_progression("/body/DocFragment[1].0", 0.42, &book).unwrap(); + let r2: serde_json::Value = serde_json::from_str(&r2_json).unwrap(); + + let tp = r2["locator"]["locations"]["totalProgression"] + .as_f64() + .unwrap(); + assert!((tp - 0.42).abs() < f64::EPSILON); + } + + #[test] + fn test_no_epub_positions_returns_none() { + let mut book = test_book(&[]); + book.epub_positions = None; + + let r2 = koreader_to_r2_progression("/body/DocFragment[1].0", 0.5, &book); + assert!(r2.is_none()); + + let result = r2_progression_to_koreader(&Some("{}".to_string()), &book); + assert!(result.is_none()); + } + + #[test] + fn test_web_reader_r2_to_koreader() { + // Simulates: web reader stores R2Progression, KOReader reads it back + let positions = vec![ + pos("OEBPS/ch1.xhtml", 1, 0.0), + pos("OEBPS/ch2.xhtml", 2, 0.5), + pos("OEBPS/ch3.xhtml", 3, 0.8), + ]; + let book = test_book(&positions); + + // Web reader stores R2Progression with href + let web_r2 = serde_json::to_string(&serde_json::json!({ + "locator": { + "href": "OEBPS/ch2.xhtml", + "type": "application/xhtml+xml", + "locations": { + "totalProgression": 0.55 + } + } + })) + .unwrap(); + + // KOReader should get DocFragment[2] back + let result = r2_progression_to_koreader(&Some(web_r2), &book); + assert_eq!(result.unwrap(), "/body/DocFragment[2].0"); + } } diff --git a/src/api/routes/v1/dto/book.rs b/src/api/routes/v1/dto/book.rs index 1b3d4bad..fd4323d4 100644 --- a/src/api/routes/v1/dto/book.rs +++ b/src/api/routes/v1/dto/book.rs @@ -556,6 +556,10 @@ pub struct BookDto { #[schema(example = "a1b2c3d4e5f6g7h8i9j0")] pub file_hash: String, + /// KOReader-compatible partial MD5 hash for sync + #[serde(skip_serializing_if = "Option::is_none")] + pub koreader_hash: Option, + /// Number of pages in the book #[schema(example = 32)] pub page_count: i32, @@ -2116,6 +2120,10 @@ pub struct FullBookResponse { #[schema(example = "a1b2c3d4e5f6g7h8i9j0")] pub file_hash: String, + /// KOReader-compatible partial MD5 hash for sync + #[serde(skip_serializing_if = "Option::is_none")] + pub koreader_hash: Option, + /// Number of pages in the book #[schema(example = 32)] pub page_count: i32, diff --git a/src/api/routes/v1/handlers/books.rs b/src/api/routes/v1/handlers/books.rs index 846eaeaf..66a6dc5f 100644 --- a/src/api/routes/v1/handlers/books.rs +++ b/src/api/routes/v1/handlers/books.rs @@ -295,6 +295,7 @@ pub async fn books_to_dtos( file_format: book.format, file_size: book.file_size, file_hash: book.file_hash, + koreader_hash: book.koreader_hash, page_count: book.page_count, number, created_at: book.created_at, @@ -677,6 +678,7 @@ pub async fn books_to_full_dtos_batched( file_format: book.format, file_size: book.file_size, file_hash: book.file_hash, + koreader_hash: book.koreader_hash, page_count: book.page_count, number, deleted: book.deleted, diff --git a/src/api/routes/v1/handlers/read_progress.rs b/src/api/routes/v1/handlers/read_progress.rs index 92f38c99..1ba179e6 100644 --- a/src/api/routes/v1/handlers/read_progress.rs +++ b/src/api/routes/v1/handlers/read_progress.rs @@ -7,6 +7,7 @@ use axum::{ Json, extract::{Path, State}, http::StatusCode, + response::{IntoResponse, Response}, }; use std::sync::Arc; use utoipa::OpenApi; @@ -21,6 +22,8 @@ use uuid::Uuid; get_user_progress, mark_book_as_read, mark_book_as_unread, + get_progression, + put_progression, ), components(schemas( UpdateProgressRequest, @@ -88,6 +91,7 @@ pub async fn update_reading_progress( request.current_page, request.progress_percentage, completed, + None, ) .await .map_err(|e| ApiError::Internal(format!("Failed to update reading progress: {}", e)))?; @@ -270,3 +274,182 @@ pub async fn mark_book_as_unread( Ok(StatusCode::NO_CONTENT) } + +/// Get book progression (R2Progression / Readium standard) +/// +/// Returns the stored R2Progression JSON for EPUB reading position sync. +/// Returns 200 with the progression data, or 204 if no progression exists. +#[utoipa::path( + get, + path = "/api/v1/books/{book_id}/progression", + responses( + (status = 200, description = "Progression data", content_type = "application/json"), + (status = 204, description = "No progression exists"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Book not found"), + ), + security( + ("bearer_auth" = []), + ("api_key" = []) + ), + tag = "Reading Progress" +)] +pub async fn get_progression( + State(state): State>, + auth: AuthContext, + Path(book_id): Path, +) -> Result { + auth.require_permission(&Permission::BooksRead)?; + + BookRepository::get_by_id(&state.db, book_id) + .await + .map_err(|e| ApiError::Internal(format!("Failed to fetch book: {}", e)))? + .ok_or_else(|| ApiError::NotFound("Book not found".to_string()))?; + + let progress = ReadProgressRepository::get_by_user_and_book(&state.db, auth.user_id, book_id) + .await + .map_err(|e| ApiError::Internal(format!("Failed to fetch progress: {}", e)))?; + + match progress.and_then(|p| p.r2_progression) { + Some(json_str) => { + let json_value: serde_json::Value = serde_json::from_str(&json_str) + .map_err(|e| ApiError::Internal(format!("Invalid R2Progression JSON: {}", e)))?; + Ok(Json(json_value).into_response()) + } + None => Ok(StatusCode::NO_CONTENT.into_response()), + } +} + +/// Update book progression (R2Progression / Readium standard) +/// +/// Stores R2Progression JSON and also updates the underlying read progress +/// (current_page, progress_percentage, completed) for backwards compatibility. +#[utoipa::path( + put, + path = "/api/v1/books/{book_id}/progression", + request_body = serde_json::Value, + responses( + (status = 204, description = "Progression updated successfully"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Book not found"), + ), + security( + ("bearer_auth" = []), + ("api_key" = []) + ), + tag = "Reading Progress" +)] +pub async fn put_progression( + State(state): State>, + auth: AuthContext, + Path(book_id): Path, + Json(body): Json, +) -> Result { + auth.require_permission(&Permission::BooksRead)?; + + let book = BookRepository::get_by_id(&state.db, book_id) + .await + .map_err(|e| ApiError::Internal(format!("Failed to fetch book: {}", e)))? + .ok_or_else(|| ApiError::NotFound("Book not found".to_string()))?; + + let locations = body.get("locator").and_then(|l| l.get("locations")); + + let client_total_progression = locations + .and_then(|l| l.get("totalProgression")) + .and_then(|v| v.as_f64()) + .unwrap_or(0.0); + + let client_href = body + .get("locator") + .and_then(|l| l.get("href")) + .and_then(|v| v.as_str()) + .unwrap_or(""); + + // Detect if the client is character-based (epub.js sends CFI, Readium clients don't) + let has_cfi = locations + .and_then(|l| l.get("cfi")) + .and_then(|v| v.as_str()) + .is_some_and(|s| !s.is_empty()); + + // Convert char-based progression to byte-based if spine items are available + let canonical_progression = if has_cfi { + if let Some(ref spine_json) = book.epub_spine_items { + if let Ok(spine_items) = + serde_json::from_str::>(spine_json) + { + crate::parsers::char_to_byte_progression(&spine_items, client_total_progression) + } else { + client_total_progression + } + } else { + client_total_progression + } + } else { + client_total_progression + }; + + // Normalize totalProgression using server-side positions if available + let (total_progression, current_page) = if let Some(ref positions_json) = book.epub_positions { + if let Ok(positions) = + serde_json::from_str::>(positions_json) + { + if let Some((normalized, position)) = crate::parsers::normalize_progression( + &positions, + client_href, + canonical_progression, + ) { + (normalized, position) + } else { + let page = if book.page_count > 0 { + (canonical_progression * book.page_count as f64) + .round() + .max(1.0) as i32 + } else { + 1 + }; + (canonical_progression, page) + } + } else { + let page = if book.page_count > 0 { + (canonical_progression * book.page_count as f64) + .round() + .max(1.0) as i32 + } else { + 1 + }; + (canonical_progression, page) + } + } else { + let page = if book.page_count > 0 { + (canonical_progression * book.page_count as f64) + .round() + .max(1.0) as i32 + } else { + 1 + }; + (canonical_progression, page) + }; + + let completed = + total_progression >= 0.98 || (book.page_count > 0 && current_page >= book.page_count); + + // Store the R2Progression as-is from the client. + // Each client uses its own locator (href + progression/CFI) for navigation. + // The normalized values are only used for internal tracking (current_page, percentage). + let json_str = serde_json::to_string(&body) + .map_err(|e| ApiError::Internal(format!("Failed to serialize R2Progression: {}", e)))?; + + ReadProgressRepository::upsert_with_percentage( + &state.db, + auth.user_id, + book_id, + current_page, + Some(total_progression), + completed, + Some(json_str), + ) + .await + .map_err(|e| ApiError::Internal(format!("Failed to update progression: {}", e)))?; + + Ok(StatusCode::NO_CONTENT) +} diff --git a/src/api/routes/v1/routes/books.rs b/src/api/routes/v1/routes/books.rs index a572a445..148cbc61 100644 --- a/src/api/routes/v1/routes/books.rs +++ b/src/api/routes/v1/routes/books.rs @@ -159,6 +159,10 @@ pub fn routes(_state: Arc) -> Router> { "/books/{book_id}/progress", delete(handlers::delete_reading_progress), ) + .route( + "/books/{book_id}/progression", + get(handlers::get_progression).put(handlers::put_progression), + ) .route("/progress", get(handlers::get_user_progress)) // Mark as read/unread routes .route("/books/{book_id}/read", post(handlers::mark_book_as_read)) diff --git a/src/db/entities/books.rs b/src/db/entities/books.rs index 9c6c7991..c2acc2fb 100644 --- a/src/db/entities/books.rs +++ b/src/db/entities/books.rs @@ -31,6 +31,10 @@ pub struct Model { pub thumbnail_generated_at: Option>, /// KOReader partial MD5 hash for KOReader sync progress tracking pub koreader_hash: Option, + /// EPUB Readium positions list as JSON (for cross-app progression sync) + pub epub_positions: Option, + /// EPUB spine items with byte/char counts as JSON (for char-to-byte progression conversion) + pub epub_spine_items: Option, } #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] diff --git a/src/db/entities/read_progress.rs b/src/db/entities/read_progress.rs index 191842bc..bad0d812 100644 --- a/src/db/entities/read_progress.rs +++ b/src/db/entities/read_progress.rs @@ -17,6 +17,9 @@ pub struct Model { pub started_at: DateTime, pub updated_at: DateTime, pub completed_at: Option>, + /// R2Progression JSON (Readium standard) for EPUB position sync + #[sea_orm(column_type = "Text", nullable)] + pub r2_progression: Option, } #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] diff --git a/src/db/repositories/book.rs b/src/db/repositories/book.rs index f2bfe321..90e951f4 100644 --- a/src/db/repositories/book.rs +++ b/src/db/repositories/book.rs @@ -399,6 +399,8 @@ impl BookRepository { thumbnail_path: Set(book_model.thumbnail_path.clone()), thumbnail_generated_at: Set(book_model.thumbnail_generated_at), koreader_hash: Set(book_model.koreader_hash.clone()), + epub_positions: Set(book_model.epub_positions.clone()), + epub_spine_items: Set(book_model.epub_spine_items.clone()), }; let created_book = book.insert(db).await.context("Failed to create book")?; @@ -1484,6 +1486,8 @@ impl BookRepository { thumbnail_path: Set(book_model.thumbnail_path.clone()), thumbnail_generated_at: Set(book_model.thumbnail_generated_at), koreader_hash: Set(book_model.koreader_hash.clone()), + epub_positions: Set(book_model.epub_positions.clone()), + epub_spine_items: Set(book_model.epub_spine_items.clone()), }; active.update(db).await.context("Failed to update book")?; @@ -2201,6 +2205,8 @@ impl BookRepository { thumbnail_path: Set(book_model.thumbnail_path.clone()), thumbnail_generated_at: Set(book_model.thumbnail_generated_at), koreader_hash: Set(book_model.koreader_hash.clone()), + epub_positions: Set(book_model.epub_positions.clone()), + epub_spine_items: Set(book_model.epub_spine_items.clone()), }) .collect(); @@ -2263,6 +2269,8 @@ impl BookRepository { thumbnail_path: Set(book_model.thumbnail_path.clone()), thumbnail_generated_at: Set(book_model.thumbnail_generated_at), koreader_hash: Set(book_model.koreader_hash.clone()), + epub_positions: Set(book_model.epub_positions.clone()), + epub_spine_items: Set(book_model.epub_spine_items.clone()), }; active @@ -2491,6 +2499,8 @@ mod tests { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, } } diff --git a/src/db/repositories/book_covers.rs b/src/db/repositories/book_covers.rs index 8d854c04..6b6c3487 100644 --- a/src/db/repositories/book_covers.rs +++ b/src/db/repositories/book_covers.rs @@ -404,6 +404,8 @@ mod tests { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; let book = BookRepository::create(db, &book_model, None).await.unwrap(); diff --git a/src/db/repositories/book_external_id.rs b/src/db/repositories/book_external_id.rs index dca85317..048251b4 100644 --- a/src/db/repositories/book_external_id.rs +++ b/src/db/repositories/book_external_id.rs @@ -371,6 +371,8 @@ mod tests { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; let book = BookRepository::create(db, &book_model, None).await.unwrap(); diff --git a/src/db/repositories/book_external_links.rs b/src/db/repositories/book_external_links.rs index d7510f28..35994fe2 100644 --- a/src/db/repositories/book_external_links.rs +++ b/src/db/repositories/book_external_links.rs @@ -228,6 +228,8 @@ mod tests { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; BookRepository::create(db, &book_model, None).await.unwrap() @@ -602,6 +604,8 @@ mod tests { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; let book2_model = books::Model { diff --git a/src/db/repositories/genre.rs b/src/db/repositories/genre.rs index 838398f2..7fe86850 100644 --- a/src/db/repositories/genre.rs +++ b/src/db/repositories/genre.rs @@ -1007,6 +1007,8 @@ mod tests { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; BookRepository::create(db.sea_orm_connection(), &book, None) diff --git a/src/db/repositories/metadata.rs b/src/db/repositories/metadata.rs index 0a975b22..82df861f 100644 --- a/src/db/repositories/metadata.rs +++ b/src/db/repositories/metadata.rs @@ -389,6 +389,8 @@ mod tests { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; BookRepository::create(db.sea_orm_connection(), &book, None) diff --git a/src/db/repositories/metrics.rs b/src/db/repositories/metrics.rs index 3730d5c7..fb4f69fc 100644 --- a/src/db/repositories/metrics.rs +++ b/src/db/repositories/metrics.rs @@ -371,6 +371,8 @@ mod tests { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; BookRepository::create(db.sea_orm_connection(), &book_model, None) @@ -440,6 +442,8 @@ mod tests { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; BookRepository::create(db.sea_orm_connection(), &book_model, None) @@ -512,6 +516,8 @@ mod tests { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; BookRepository::create(db.sea_orm_connection(), &book_model, None) diff --git a/src/db/repositories/page.rs b/src/db/repositories/page.rs index ba6aade9..e02f0f40 100644 --- a/src/db/repositories/page.rs +++ b/src/db/repositories/page.rs @@ -183,6 +183,8 @@ mod tests { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; BookRepository::create(db.sea_orm_connection(), &book, None) .await @@ -238,6 +240,8 @@ mod tests { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; BookRepository::create(db.sea_orm_connection(), &book, None) .await @@ -296,6 +300,8 @@ mod tests { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; BookRepository::create(db.sea_orm_connection(), &book, None) .await @@ -354,6 +360,8 @@ mod tests { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; BookRepository::create(db.sea_orm_connection(), &book, None) .await @@ -414,6 +422,8 @@ mod tests { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; BookRepository::create(db.sea_orm_connection(), &book, None) .await @@ -473,6 +483,8 @@ mod tests { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; BookRepository::create(db.sea_orm_connection(), &book, None) .await @@ -537,6 +549,8 @@ mod tests { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; BookRepository::create(db.sea_orm_connection(), &book, None) .await diff --git a/src/db/repositories/read_progress.rs b/src/db/repositories/read_progress.rs index 8f3e5f9a..44a6c516 100644 --- a/src/db/repositories/read_progress.rs +++ b/src/db/repositories/read_progress.rs @@ -48,11 +48,13 @@ impl ReadProgressRepository { current_page: i32, completed: bool, ) -> Result { - Self::upsert_with_percentage(db, user_id, book_id, current_page, None, completed).await + Self::upsert_with_percentage(db, user_id, book_id, current_page, None, completed, None) + .await } /// Create or update reading progress for a user and book with optional percentage - /// The percentage field is primarily used for EPUB books with reflowable content + /// The percentage field is primarily used for EPUB books with reflowable content. + /// The r2_progression field stores the full R2Progression JSON for Readium/OPDS 2.0 sync. pub async fn upsert_with_percentage( db: &DatabaseConnection, user_id: Uuid, @@ -60,6 +62,7 @@ impl ReadProgressRepository { current_page: i32, progress_percentage: Option, completed: bool, + r2_progression: Option, ) -> Result { // Check if progress already exists let existing = Self::get_by_user_and_book(db, user_id, book_id).await?; @@ -75,6 +78,7 @@ impl ReadProgressRepository { progress_percentage, completed, now, + r2_progression, ) .await } else { @@ -89,6 +93,7 @@ impl ReadProgressRepository { started_at: Set(now), updated_at: Set(now), completed_at: Set(if completed { Some(now) } else { None }), + r2_progression: Set(r2_progression.clone()), }; match new_progress.insert(db).await { @@ -107,6 +112,7 @@ impl ReadProgressRepository { progress_percentage, completed, now, + r2_progression, ) .await } @@ -123,12 +129,18 @@ impl ReadProgressRepository { progress_percentage: Option, completed: bool, now: chrono::DateTime, + r2_progression: Option, ) -> Result { let mut active_model: read_progress::ActiveModel = existing_model.clone().into(); active_model.current_page = Set(current_page); active_model.progress_percentage = Set(progress_percentage); active_model.completed = Set(completed); active_model.updated_at = Set(now); + // Only update r2_progression if a new value is provided; + // passing None means "don't change", not "clear it" + if r2_progression.is_some() { + active_model.r2_progression = Set(r2_progression); + } // Set completed_at if just marked as completed if completed && existing_model.completed_at.is_none() { @@ -350,6 +362,8 @@ mod tests { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; BookRepository::create(db, &book, None).await.unwrap() } diff --git a/src/db/repositories/series.rs b/src/db/repositories/series.rs index 86d49eb6..fbabc858 100644 --- a/src/db/repositories/series.rs +++ b/src/db/repositories/series.rs @@ -2451,6 +2451,8 @@ mod tests { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; BookRepository::create(db.sea_orm_connection(), &book, None) .await @@ -2546,6 +2548,8 @@ mod tests { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; let book1: books::Model = BookRepository::create(db.sea_orm_connection(), &book1, None) .await @@ -2572,6 +2576,8 @@ mod tests { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; let book2: books::Model = BookRepository::create(db.sea_orm_connection(), &book2, None) .await @@ -2598,6 +2604,8 @@ mod tests { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; let book3: books::Model = BookRepository::create(db.sea_orm_connection(), &book3, None) .await diff --git a/src/db/repositories/tag.rs b/src/db/repositories/tag.rs index 43f346b1..d9726c91 100644 --- a/src/db/repositories/tag.rs +++ b/src/db/repositories/tag.rs @@ -996,6 +996,8 @@ mod tests { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; BookRepository::create(db.sea_orm_connection(), &book, None) diff --git a/src/parsers/cbr/parser.rs b/src/parsers/cbr/parser.rs index 82624bb6..5189f800 100644 --- a/src/parsers/cbr/parser.rs +++ b/src/parsers/cbr/parser.rs @@ -167,6 +167,8 @@ impl FormatParser for CbrParser { // RELATED: EPUB and PDF parsers successfully extract ISBNs from metadata // (see epub/parser.rs and pdf/parser.rs for implemented approaches) isbns: Vec::new(), + epub_positions: None, + epub_spine_items: None, }) } } diff --git a/src/parsers/cbz/parser.rs b/src/parsers/cbz/parser.rs index b7806649..b72edd18 100644 --- a/src/parsers/cbz/parser.rs +++ b/src/parsers/cbz/parser.rs @@ -142,6 +142,8 @@ impl FormatParser for CbzParser { // RELATED: EPUB and PDF parsers successfully extract ISBNs from metadata // (see epub/parser.rs and pdf/parser.rs for implemented approaches) isbns: Vec::new(), + epub_positions: None, + epub_spine_items: None, }) } } diff --git a/src/parsers/epub/parser.rs b/src/parsers/epub/parser.rs index d80668ad..cef70556 100644 --- a/src/parsers/epub/parser.rs +++ b/src/parsers/epub/parser.rs @@ -1,5 +1,6 @@ use crate::parsers::image_utils::{get_image_format, get_svg_dimensions, is_image_file}; use crate::parsers::isbn_utils::extract_isbns; +use crate::parsers::metadata::{SpineItem, compute_epub_positions}; use crate::parsers::opf; use crate::parsers::traits::FormatParser; use crate::parsers::{BookMetadata, FileFormat, ImageFormat, PageInfo}; @@ -14,13 +15,131 @@ use zip::ZipArchive; pub struct EpubParser; +/// Count text characters in XHTML content, excluding HTML markup, scripts, and styles. +/// +/// Uses a simple state-machine to strip tags and count visible text characters. +/// Returns 0 for empty or non-UTF-8 content. +pub fn count_text_chars(xhtml: &[u8]) -> u64 { + let text = match std::str::from_utf8(xhtml) { + Ok(s) => s, + Err(_) => return 0, + }; + + let mut char_count: u64 = 0; + let mut in_tag = false; + let mut in_script = false; + let mut in_style = false; + let mut tag_buf = String::new(); + + for ch in text.chars() { + if ch == '<' { + in_tag = true; + tag_buf.clear(); + continue; + } + + if in_tag { + if ch == '>' { + in_tag = false; + let tag_lower = tag_buf.to_ascii_lowercase(); + let tag_name = tag_lower + .split(|c: char| c.is_whitespace()) + .next() + .unwrap_or(""); + + match tag_name.trim_start_matches('/') { + "script" => in_script = !tag_name.starts_with('/'), + "style" => in_style = !tag_name.starts_with('/'), + _ => {} + } + } else { + tag_buf.push(ch); + } + continue; + } + + if !in_script && !in_style { + char_count += 1; + } + } + + char_count +} + +/// Find the next occurrence of an XML tag, handling optional namespace prefixes. +/// For example, searching for "item" will match both `(haystack: &'a str, local_name: &str) -> Option<(usize, &'a str)> { + let bare_space = format!("<{} ", local_name); + let bare_gt = format!("<{}>", local_name); + let mut search_from = 0; + while search_from < haystack.len() { + let remaining = &haystack[search_from..]; + // Try bare tag first: `` (no attributes) + if let Some(pos) = remaining + .find(bare_space.as_str()) + .or_else(|| remaining.find(bare_gt.as_str())) + { + return Some((search_from + pos, &haystack[search_from + pos..])); + } + // Try namespace-prefixed: look for `: ` or `:` preceded by `<` and a prefix + let prefixed_suffix = format!(":{}", local_name); + if let Some(colon_pos) = remaining.find(prefixed_suffix.as_str()) { + // Check the character after the local_name to ensure it's a complete tag name + let after_pos = colon_pos + prefixed_suffix.len(); + if after_pos < remaining.len() { + let next_char = remaining.as_bytes()[after_pos]; + if next_char == b' ' || next_char == b'>' || next_char == b'/' { + // Walk backwards from colon to find `<` + let before_colon = &remaining[..colon_pos]; + if let Some(lt_pos) = before_colon.rfind('<') { + // Verify the prefix between `<` and `:` is a valid XML name (no spaces) + let prefix = &before_colon[lt_pos + 1..]; + if !prefix.is_empty() && !prefix.contains(' ') && !prefix.contains('>') { + let abs_pos = search_from + lt_pos; + return Some((abs_pos, &haystack[abs_pos..])); + } + } + } + } + search_from += colon_pos + prefixed_suffix.len(); + } else { + break; + } + } + None +} + +/// Find the closing tag for an XML element, handling optional namespace prefixes. +/// For example, searching for "spine" will match both `` and ``. +fn find_xml_closing_tag(haystack: &str, local_name: &str) -> Option { + let bare = format!("", local_name); + if let Some(pos) = haystack.find(bare.as_str()) { + return Some(pos); + } + // Try namespace-prefixed closing tags + let suffix = format!(":{}>", local_name); + if let Some(suffix_pos) = haystack.find(suffix.as_str()) { + // Walk backwards to find `') { + return Some(lt_pos); + } + } + } + None +} + impl EpubParser { pub fn new() -> Self { Self } /// Parse the EPUB container.xml to find the root file (usually content.opf) - fn find_root_file(archive: &mut ZipArchive) -> Result { + pub(crate) fn find_root_file(archive: &mut ZipArchive) -> Result { let mut container_file = archive .by_name("META-INF/container.xml") .map_err(|_| CodexError::ParseError("META-INF/container.xml not found".to_string()))?; @@ -89,10 +208,13 @@ impl EpubParser { } /// Parse the OPF file to get metadata and spine (reading order) - fn parse_opf( + /// + /// Returns (manifest: id -> (href, media_type), spine_order: Vec<(href, media_type)>) + #[allow(clippy::type_complexity)] + pub(crate) fn parse_opf( archive: &mut ZipArchive, opf_path: &str, - ) -> Result<(HashMap, Vec)> { + ) -> Result<(HashMap, Vec<(String, String)>)> { let mut opf_file = archive .by_name(opf_path) .map_err(|_| CodexError::ParseError(format!("OPF file not found: {}", opf_path)))?; @@ -107,13 +229,12 @@ impl EpubParser { "" }; - // Parse manifest to get id -> href mapping - let mut manifest: HashMap = HashMap::new(); + // Parse manifest to get id -> (href, media_type) mapping + let mut manifest: HashMap = HashMap::new(); - // Simple XML parsing for manifest items + // Simple XML parsing for manifest items (handles both and ) let mut remaining = &xml_content[..]; - while let Some(item_start) = remaining.find("') { let item_tag = &item_section[..item_end]; @@ -137,10 +258,21 @@ impl EpubParser { None }; + // Extract media-type + let media_type = if let Some(mt_start) = item_tag.find("media-type=\"") { + let mt_value_start = mt_start + 12; + item_tag[mt_value_start..] + .find('"') + .map(|mt_end| &item_tag[mt_value_start..mt_value_start + mt_end]) + } else { + None + }; + if let (Some(id), Some(href)) = (id, href) { // Combine base path with href let full_path = format!("{}{}", base_path, href); - manifest.insert(id.to_string(), full_path); + let mt = media_type.unwrap_or("application/octet-stream").to_string(); + manifest.insert(id.to_string(), (full_path, mt)); } remaining = &item_section[item_end..]; @@ -150,37 +282,36 @@ impl EpubParser { } // Parse spine to get reading order (idref list) - let mut spine_order: Vec = Vec::new(); + // Handles both and , and + let mut spine_order: Vec<(String, String)> = Vec::new(); remaining = &xml_content[..]; - if let Some(spine_start) = remaining.find("") { - let spine_content = &spine_section[..spine_end]; - - // Extract itemrefs - let mut itemref_remaining = spine_content; - while let Some(itemref_start) = itemref_remaining.find("') { - let itemref_tag = &itemref_section[..itemref_end]; - - // Extract idref - if let Some(idref_start) = itemref_tag.find("idref=\"") { - let idref_value_start = idref_start + 7; - if let Some(idref_end) = itemref_tag[idref_value_start..].find('"') { - let idref = - &itemref_tag[idref_value_start..idref_value_start + idref_end]; - if let Some(path) = manifest.get(idref) { - spine_order.push(path.clone()); - } + if let Some((_pos, spine_section)) = find_xml_tag(remaining, "spine") + && let Some(spine_end) = find_xml_closing_tag(spine_section, "spine") + { + let spine_content = &spine_section[..spine_end]; + + // Extract itemrefs + let mut itemref_remaining = spine_content; + while let Some((_pos, itemref_section)) = find_xml_tag(itemref_remaining, "itemref") { + if let Some(itemref_end) = itemref_section.find('>') { + let itemref_tag = &itemref_section[..itemref_end]; + + // Extract idref + if let Some(idref_start) = itemref_tag.find("idref=\"") { + let idref_value_start = idref_start + 7; + if let Some(idref_end) = itemref_tag[idref_value_start..].find('"') { + let idref = + &itemref_tag[idref_value_start..idref_value_start + idref_end]; + if let Some((path, mt)) = manifest.get(idref) { + spine_order.push((path.clone(), mt.clone())); } } - - itemref_remaining = &itemref_section[itemref_end..]; - } else { - break; } + + itemref_remaining = &itemref_section[itemref_end..]; + } else { + break; } } } @@ -247,6 +378,34 @@ impl FormatParser for EpubParser { // Parse the OPF to get manifest and spine let (_manifest, spine_order) = Self::parse_opf(&mut archive, &opf_path)?; + // Build spine items with file sizes and character counts for position normalization + let spine_items: Vec = spine_order + .iter() + .filter_map(|(href, media_type)| { + let mut entry = archive.by_name(href).ok()?; + let file_size = entry.size(); + + // Count text characters for XHTML spine items + let char_count = if media_type.contains("xhtml") || media_type.contains("html") { + let mut content = Vec::new(); + std::io::Read::read_to_end(&mut entry, &mut content).ok(); + count_text_chars(&content) + } else { + 0 + }; + + Some(SpineItem { + href: href.clone(), + media_type: media_type.clone(), + file_size, + char_count, + }) + }) + .collect(); + + // Compute Readium positions (1 position per 1024 bytes) + let epub_positions = compute_epub_positions(&spine_items); + // Collect and sort image files let mut image_entries: Vec<(usize, String)> = Vec::new(); for i in 0..archive.len() { @@ -306,15 +465,14 @@ impl FormatParser for EpubParser { } // Page count logic for EPUB: - // EPUBs are primarily text-based documents with a spine (reading order) and optional images. - // We use the maximum of: - // - spine_order.len(): Number of content items (chapters/sections) in reading order - // - pages.len(): Number of extracted images (covers, illustrations) - // - // This gives a reasonable page count estimate, though EPUBs don't have fixed "pages" - // like comics do. For pure image-based EPUBs (like converted manga), pages.len() - // will be higher. For text-heavy novels, spine_order.len() will be higher. - let page_count = spine_order.len().max(pages.len()); + // Use the Readium positions count if available (the standard way to count EPUB "pages"). + // This matches Komga's approach and provides consistent page counts across apps. + // Fall back to max(spine items, image count) for edge cases. + let page_count = if !epub_positions.is_empty() { + epub_positions.len() + } else { + spine_order.len().max(pages.len()) + }; Ok(BookMetadata { file_path: path.to_string_lossy().to_string(), @@ -326,6 +484,16 @@ impl FormatParser for EpubParser { pages, comic_info, isbns, + epub_positions: if epub_positions.is_empty() { + None + } else { + Some(epub_positions) + }, + epub_spine_items: if spine_items.is_empty() { + None + } else { + Some(spine_items) + }, }) } } @@ -375,12 +543,12 @@ fn find_cover_image_from_opf(archive: &mut ZipArchive) -> Option { }; // Build a map of manifest item IDs to hrefs + // Handles both and namespace-prefixed tags let mut manifest_items: std::collections::HashMap = std::collections::HashMap::new(); let mut remaining = &opf_content[..]; - while let Some(item_start) = remaining.find("') { let item_tag = &item_section[..item_end]; @@ -879,4 +1047,227 @@ mod tests { assert_eq!(isbns.len(), 1); assert_eq!(isbns[0], "9780306406157"); } + + #[test] + fn test_find_xml_tag_bare() { + let xml = r#""#; + let result = find_xml_tag(xml, "item"); + assert!(result.is_some()); + let (pos, _section) = result.unwrap(); + assert_eq!(pos, 0); + } + + #[test] + fn test_find_xml_tag_namespaced() { + let xml = r#""#; + let result = find_xml_tag(xml, "item"); + assert!(result.is_some()); + let (pos, _section) = result.unwrap(); + assert_eq!(pos, 0); + } + + #[test] + fn test_find_xml_tag_no_match() { + let xml = r#""#; + let result = find_xml_tag(xml, "item"); + assert!(result.is_none()); + } + + #[test] + fn test_find_xml_closing_tag_bare() { + let xml = r#""#; + let result = find_xml_closing_tag(xml, "spine"); + assert!(result.is_some()); + assert_eq!(&xml[result.unwrap()..], ""); + } + + #[test] + fn test_find_xml_closing_tag_namespaced() { + let xml = r#""#; + let result = find_xml_closing_tag(xml, "spine"); + assert!(result.is_some()); + assert_eq!(&xml[result.unwrap()..], ""); + } + + #[test] + fn test_parse_opf_with_namespace_prefixed_tags() { + // Create a minimal EPUB with namespace-prefixed OPF tags + use std::io::Write; + let temp_dir = tempfile::tempdir().unwrap(); + let epub_path = temp_dir.path().join("test.epub"); + + let mut zip = zip::ZipWriter::new(File::create(&epub_path).unwrap()); + + // mimetype + let options = zip::write::SimpleFileOptions::default() + .compression_method(zip::CompressionMethod::Stored); + zip.start_file("mimetype", options).unwrap(); + zip.write_all(b"application/epub+zip").unwrap(); + + // container.xml + let options = zip::write::SimpleFileOptions::default(); + zip.start_file("META-INF/container.xml", options).unwrap(); + zip.write_all( + br#" + + + + +"#, + ) + .unwrap(); + + // OPF with opf: namespace prefix (like the Merlin EPUB) + zip.start_file("OEBPS/content.opf", options).unwrap(); + zip.write_all(br#" + + + Test Book + Test Author + + + + + + + + + + + +"#).unwrap(); + + // Create dummy XHTML files + for name in &["OEBPS/ch1.xhtml", "OEBPS/ch2.xhtml", "OEBPS/ch3.xhtml"] { + zip.start_file(*name, options).unwrap(); + zip.write_all(b"

      Content

      ") + .unwrap(); + } + + zip.finish().unwrap(); + + // Parse and verify + let parser = EpubParser::new(); + let metadata = parser.parse(&epub_path).unwrap(); + // Should find 3 spine items (not fall back to 0 due to namespace issues) + assert_eq!( + metadata.page_count, 3, + "Should parse 3 spine items from namespace-prefixed OPF" + ); + } + + #[test] + fn test_parse_opf_without_namespace_prefix() { + // Verify bare tags still work + use std::io::Write; + let temp_dir = tempfile::tempdir().unwrap(); + let epub_path = temp_dir.path().join("test.epub"); + + let mut zip = zip::ZipWriter::new(File::create(&epub_path).unwrap()); + + let options = zip::write::SimpleFileOptions::default() + .compression_method(zip::CompressionMethod::Stored); + zip.start_file("mimetype", options).unwrap(); + zip.write_all(b"application/epub+zip").unwrap(); + + let options = zip::write::SimpleFileOptions::default(); + zip.start_file("META-INF/container.xml", options).unwrap(); + zip.write_all( + br#" + + + + +"#, + ) + .unwrap(); + + zip.start_file("content.opf", options).unwrap(); + zip.write_all( + br#" + + + Test + + + + + + + + + +"#, + ) + .unwrap(); + + for name in &["ch1.xhtml", "ch2.xhtml"] { + zip.start_file(*name, options).unwrap(); + zip.write_all(b"

      Content

      ") + .unwrap(); + } + + zip.finish().unwrap(); + + let parser = EpubParser::new(); + let metadata = parser.parse(&epub_path).unwrap(); + assert_eq!( + metadata.page_count, 2, + "Should parse 2 spine items from bare OPF tags" + ); + } + + mod count_text_chars_tests { + use super::*; + + #[test] + fn test_plain_xhtml() { + let xhtml = b"

      Hello world

      "; + assert_eq!(count_text_chars(xhtml), 11); + } + + #[test] + fn test_script_excluded() { + let xhtml = + b"

      Hello

      World

      "; + assert_eq!(count_text_chars(xhtml), 10); // "Hello" + "World" + } + + #[test] + fn test_style_excluded() { + let xhtml = + b"Text"; + assert_eq!(count_text_chars(xhtml), 4); + } + + #[test] + fn test_cjk_characters() { + let xhtml = "

      \u{4F60}\u{597D}\u{4E16}\u{754C}

      "; + assert_eq!(count_text_chars(xhtml.as_bytes()), 4); + } + + #[test] + fn test_empty_content() { + assert_eq!(count_text_chars(b""), 0); + } + + #[test] + fn test_whitespace_counted() { + let xhtml = b"

      Hello World

      "; + // "Hello World" = 11 chars including the space + assert_eq!(count_text_chars(xhtml), 11); + } + + #[test] + fn test_nested_tags() { + let xhtml = b"
      AB
      "; + assert_eq!(count_text_chars(xhtml), 2); + } + + #[test] + fn test_invalid_utf8() { + let invalid = [0xFF, 0xFE, 0x00]; + assert_eq!(count_text_chars(&invalid), 0); + } + } } diff --git a/src/parsers/metadata.rs b/src/parsers/metadata.rs index 44de9122..646c174d 100644 --- a/src/parsers/metadata.rs +++ b/src/parsers/metadata.rs @@ -238,6 +238,199 @@ pub struct ComicInfo { pub manga: Option, } +/// A single position in the Readium positions list for EPUB books. +/// +/// Positions are computed using the Readium algorithm (1 position per 1024 bytes +/// of each spine resource). This provides a canonical coordinate system for +/// cross-app reading position sync, matching Komga's implementation. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct EpubPosition { + /// Resource href within the EPUB (e.g., "OEBPS/chapter1.xhtml") + pub href: String, + /// Media type of the resource (e.g., "application/xhtml+xml") + pub media_type: String, + /// Progression within the resource (0.0-1.0) + pub progression: f64, + /// Sequential position number (1-based) across the entire book + pub position: i32, + /// Overall progression within the entire book (0.0-1.0) + pub total_progression: f64, +} + +/// Spine item extracted from the EPUB OPF manifest +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct SpineItem { + /// Full path within the EPUB archive + pub href: String, + /// Media type from the manifest + pub media_type: String, + /// Uncompressed file size in bytes + pub file_size: u64, + /// Number of text characters (excluding HTML markup, scripts, styles) + pub char_count: u64, +} + +/// Compute Readium positions list from spine items. +/// +/// Uses the Readium algorithm: 1 position per 1024 bytes of each spine resource. +/// This matches Komga's implementation for cross-app compatibility. +pub fn compute_epub_positions(spine_items: &[SpineItem]) -> Vec { + let mut positions = Vec::new(); + let mut next_position: i32 = 1; + + for item in spine_items { + let position_count = (item.file_size as f64 / 1024.0).ceil().max(1.0) as usize; + + for p in 0..position_count { + let progression = p as f64 / position_count as f64; + positions.push(EpubPosition { + href: item.href.clone(), + media_type: item.media_type.clone(), + progression, + position: next_position, + total_progression: 0.0, // computed below + }); + next_position += 1; + } + } + + // Compute total_progression for each position + let total = positions.len() as f64; + for pos in &mut positions { + pos.total_progression = pos.position as f64 / total; + } + + positions +} + +/// Normalize a client's totalProgression using the server's positions list. +/// +/// Given the client's `href` and `total_progression`, finds the closest matching +/// position in the server's positions list and returns its authoritative +/// `total_progression` value along with the derived page number. +/// +/// Returns `None` if positions is empty or href doesn't match any position. +pub fn normalize_progression( + positions: &[EpubPosition], + client_href: &str, + client_total_progression: f64, +) -> Option<(f64, i32)> { + if positions.is_empty() { + return None; + } + + // Strip fragment from href and URL-decode + let href_clean = client_href.split('#').next().unwrap_or(client_href); + let href_decoded = urlencoding::decode(href_clean).unwrap_or_else(|_| href_clean.into()); + + // Find positions matching the href (try exact match, then suffix match) + let matching: Vec<&EpubPosition> = positions + .iter() + .filter(|p| { + p.href == href_decoded.as_ref() + || href_decoded.ends_with(&p.href) + || p.href.ends_with(href_decoded.as_ref()) + }) + .collect(); + + if matching.is_empty() { + // No href match; fall back to closest totalProgression across all positions + let closest = positions.iter().min_by(|a, b| { + let da = (a.total_progression - client_total_progression).abs(); + let db = (b.total_progression - client_total_progression).abs(); + da.partial_cmp(&db).unwrap_or(std::cmp::Ordering::Equal) + })?; + return Some((closest.total_progression, closest.position)); + } + + // Among matching positions, find the one closest to client's totalProgression + let closest = matching.iter().min_by(|a, b| { + let da = (a.total_progression - client_total_progression).abs(); + let db = (b.total_progression - client_total_progression).abs(); + da.partial_cmp(&db).unwrap_or(std::cmp::Ordering::Equal) + })?; + + Some((closest.total_progression, closest.position)) +} + +/// Convert character-based totalProgression (epub.js) to byte-based (Readium canonical). +/// +/// epub.js divides the book into character-weighted chunks, while Readium uses +/// byte-weighted chunks. For multi-byte content (CJK, accented text), these +/// diverge significantly. This function maps a char-based percentage to the +/// equivalent byte-based percentage using per-spine-item char/byte counts. +pub fn char_to_byte_progression(spine_items: &[SpineItem], char_prog: f64) -> f64 { + if spine_items.is_empty() { + return char_prog; + } + + let total_chars: u64 = spine_items.iter().map(|s| s.char_count.max(1)).sum(); + let total_bytes: u64 = spine_items.iter().map(|s| s.file_size).sum(); + + if total_chars == 0 || total_bytes == 0 { + return char_prog; + } + + let target_chars = (char_prog * total_chars as f64) as u64; + let mut accumulated_chars: u64 = 0; + let mut accumulated_bytes: u64 = 0; + + for item in spine_items { + let item_chars = item.char_count.max(1); + if accumulated_chars + item_chars >= target_chars { + let within_item_frac = if item_chars > 0 { + (target_chars - accumulated_chars) as f64 / item_chars as f64 + } else { + 0.0 + }; + let byte_offset = accumulated_bytes as f64 + within_item_frac * item.file_size as f64; + return (byte_offset / total_bytes as f64).clamp(0.0, 1.0); + } + accumulated_chars += item_chars; + accumulated_bytes += item.file_size; + } + + 1.0 +} + +/// Convert byte-based totalProgression (Readium/KOReader) to character-based (epub.js). +/// +/// Inverse of `char_to_byte_progression`. Maps a byte-weighted percentage to +/// the equivalent character-weighted percentage. +pub fn byte_to_char_progression(spine_items: &[SpineItem], byte_prog: f64) -> f64 { + if spine_items.is_empty() { + return byte_prog; + } + + let total_chars: u64 = spine_items.iter().map(|s| s.char_count.max(1)).sum(); + let total_bytes: u64 = spine_items.iter().map(|s| s.file_size).sum(); + + if total_chars == 0 || total_bytes == 0 { + return byte_prog; + } + + let target_bytes = (byte_prog * total_bytes as f64) as u64; + let mut accumulated_chars: u64 = 0; + let mut accumulated_bytes: u64 = 0; + + for item in spine_items { + if accumulated_bytes + item.file_size >= target_bytes { + let within_item_frac = if item.file_size > 0 { + (target_bytes - accumulated_bytes) as f64 / item.file_size as f64 + } else { + 0.0 + }; + let char_offset = + accumulated_chars as f64 + within_item_frac * item.char_count.max(1) as f64; + return (char_offset / total_chars as f64).clamp(0.0, 1.0); + } + accumulated_chars += item.char_count.max(1); + accumulated_bytes += item.file_size; + } + + 1.0 +} + /// Complete book metadata #[derive(Debug, Clone, Serialize, Deserialize)] pub struct BookMetadata { @@ -259,6 +452,12 @@ pub struct BookMetadata { pub comic_info: Option, /// Detected ISBNs/barcodes pub isbns: Vec, + /// EPUB Readium positions list (only for EPUB format) + #[serde(skip_serializing_if = "Option::is_none")] + pub epub_positions: Option>, + /// EPUB spine items with byte/char counts (for cross-device sync normalization) + #[serde(skip_serializing_if = "Option::is_none")] + pub epub_spine_items: Option>, } #[cfg(test)] @@ -564,4 +763,264 @@ mod tests { ); } } + + mod epub_positions { + use super::*; + + fn sample_spine() -> Vec { + vec![ + SpineItem { + href: "OEBPS/chapter1.xhtml".to_string(), + media_type: "application/xhtml+xml".to_string(), + file_size: 2048, // 2 positions + char_count: 1500, + }, + SpineItem { + href: "OEBPS/chapter2.xhtml".to_string(), + media_type: "application/xhtml+xml".to_string(), + file_size: 3072, // 3 positions + char_count: 2500, + }, + ] + } + + #[test] + fn test_compute_positions_count() { + let positions = compute_epub_positions(&sample_spine()); + assert_eq!(positions.len(), 5); // 2 + 3 + } + + #[test] + fn test_compute_positions_sequential() { + let positions = compute_epub_positions(&sample_spine()); + for (i, pos) in positions.iter().enumerate() { + assert_eq!(pos.position, (i + 1) as i32); + } + } + + #[test] + fn test_compute_positions_total_progression() { + let positions = compute_epub_positions(&sample_spine()); + assert!((positions[0].total_progression - 1.0 / 5.0).abs() < 1e-10); + assert!((positions[4].total_progression - 5.0 / 5.0).abs() < 1e-10); + } + + #[test] + fn test_compute_positions_min_one_per_resource() { + let spine = vec![SpineItem { + href: "tiny.xhtml".to_string(), + media_type: "application/xhtml+xml".to_string(), + file_size: 100, + char_count: 80, + }]; + let positions = compute_epub_positions(&spine); + assert_eq!(positions.len(), 1); + } + + #[test] + fn test_normalize_exact_match() { + let positions = compute_epub_positions(&sample_spine()); + let (tp, pos) = normalize_progression(&positions, "OEBPS/chapter1.xhtml", 0.2).unwrap(); + assert_eq!(pos, 1); + assert!((tp - 1.0 / 5.0).abs() < 1e-10); + } + + #[test] + fn test_normalize_suffix_match() { + let positions = compute_epub_positions(&sample_spine()); + let result = normalize_progression(&positions, "chapter2.xhtml", 0.7); + assert!(result.is_some()); + let (_, pos) = result.unwrap(); + assert!((3..=5).contains(&pos)); + } + + #[test] + fn test_normalize_with_fragment() { + let positions = compute_epub_positions(&sample_spine()); + let result = normalize_progression(&positions, "OEBPS/chapter1.xhtml#section1", 0.2); + assert!(result.is_some()); + } + + #[test] + fn test_normalize_url_encoded() { + let spine = vec![SpineItem { + href: "OEBPS/chapter 1.xhtml".to_string(), + media_type: "application/xhtml+xml".to_string(), + file_size: 1024, + char_count: 800, + }]; + let positions = compute_epub_positions(&spine); + let result = normalize_progression(&positions, "OEBPS/chapter%201.xhtml", 0.5); + assert!(result.is_some()); + } + + #[test] + fn test_normalize_empty_positions() { + assert!(normalize_progression(&[], "test.xhtml", 0.5).is_none()); + } + + #[test] + fn test_normalize_no_href_match_falls_back() { + let positions = compute_epub_positions(&sample_spine()); + let result = normalize_progression(&positions, "nonexistent.xhtml", 0.6); + assert!(result.is_some()); + } + } + + mod progression_conversion { + use super::*; + + /// ASCII-heavy content: chars ~= bytes minus markup, so conversion is near-identity + fn ascii_spine() -> Vec { + vec![ + SpineItem { + href: "ch1.xhtml".to_string(), + media_type: "application/xhtml+xml".to_string(), + file_size: 1000, + char_count: 800, // ~80% text, 20% markup + }, + SpineItem { + href: "ch2.xhtml".to_string(), + media_type: "application/xhtml+xml".to_string(), + file_size: 1000, + char_count: 800, + }, + ] + } + + /// CJK content: 3 bytes per char, so char_count << file_size + fn cjk_spine() -> Vec { + vec![ + SpineItem { + href: "ch1.xhtml".to_string(), + media_type: "application/xhtml+xml".to_string(), + file_size: 3000, // 1000 CJK chars * 3 bytes each + char_count: 1000, + }, + SpineItem { + href: "ch2.xhtml".to_string(), + media_type: "application/xhtml+xml".to_string(), + file_size: 6000, // 2000 CJK chars * 3 bytes each + char_count: 2000, + }, + ] + } + + /// Mixed content: one ASCII chapter, one CJK chapter + fn mixed_spine() -> Vec { + vec![ + SpineItem { + href: "ch1.xhtml".to_string(), + media_type: "application/xhtml+xml".to_string(), + file_size: 1000, // ASCII + char_count: 1000, + }, + SpineItem { + href: "ch2.xhtml".to_string(), + media_type: "application/xhtml+xml".to_string(), + file_size: 3000, // CJK: 1000 chars * 3 bytes + char_count: 1000, + }, + ] + } + + #[test] + fn test_ascii_near_identity() { + let spine = ascii_spine(); + // For uniform char/byte ratio, conversion should be near-identity + let result = char_to_byte_progression(&spine, 0.5); + assert!( + (result - 0.5).abs() < 0.01, + "ASCII content should be near-identity, got {}", + result + ); + } + + #[test] + fn test_cjk_uniform_ratio_near_identity() { + let spine = cjk_spine(); + // Both chapters have same 3:1 byte:char ratio, so conversion is near-identity + let result = char_to_byte_progression(&spine, 0.5); + assert!( + (result - 0.5).abs() < 0.01, + "Uniform CJK ratio should be near-identity, got {}", + result + ); + } + + #[test] + fn test_mixed_content_diverges() { + let spine = mixed_spine(); + // 50% chars = halfway through (1000 of 2000 chars = end of ch1) + // In bytes: ch1 is 1000 bytes, total is 4000 bytes + // So 50% char = 25% bytes + let result = char_to_byte_progression(&spine, 0.5); + assert!( + (result - 0.25).abs() < 0.01, + "Mixed content: 50% chars should map to ~25% bytes, got {}", + result + ); + } + + #[test] + fn test_roundtrip() { + let spine = mixed_spine(); + for prog in [0.0, 0.1, 0.25, 0.5, 0.75, 0.9, 1.0] { + let byte_prog = char_to_byte_progression(&spine, prog); + let back = byte_to_char_progression(&spine, byte_prog); + assert!( + (back - prog).abs() < 0.01, + "Roundtrip failed for {}: got {} -> {} -> {}", + prog, + prog, + byte_prog, + back + ); + } + } + + #[test] + fn test_boundaries() { + let spine = mixed_spine(); + let start = char_to_byte_progression(&spine, 0.0); + let end = char_to_byte_progression(&spine, 1.0); + assert!((start - 0.0).abs() < 0.01); + assert!((end - 1.0).abs() < 0.01); + } + + #[test] + fn test_empty_spine() { + assert!((char_to_byte_progression(&[], 0.5) - 0.5).abs() < f64::EPSILON); + assert!((byte_to_char_progression(&[], 0.5) - 0.5).abs() < f64::EPSILON); + } + + #[test] + fn test_single_item() { + let spine = vec![SpineItem { + href: "ch1.xhtml".to_string(), + media_type: "application/xhtml+xml".to_string(), + file_size: 3000, + char_count: 1000, + }]; + // Single item: conversion should still be identity (all within one resource) + let result = char_to_byte_progression(&spine, 0.5); + assert!( + (result - 0.5).abs() < 0.01, + "Single item should be near-identity, got {}", + result + ); + } + + #[test] + fn test_byte_to_char_mixed() { + let spine = mixed_spine(); + // 25% bytes = end of ch1 (1000/4000 bytes) = 50% chars (1000/2000 chars) + let result = byte_to_char_progression(&spine, 0.25); + assert!( + (result - 0.5).abs() < 0.01, + "25% bytes should map to ~50% chars in mixed content, got {}", + result + ); + } + } } diff --git a/src/parsers/pdf/parser.rs b/src/parsers/pdf/parser.rs index c3f27305..32993307 100644 --- a/src/parsers/pdf/parser.rs +++ b/src/parsers/pdf/parser.rs @@ -311,6 +311,8 @@ impl FormatParser for PdfParser { pages, comic_info: None, // PDF doesn't use ComicInfo.xml isbns, + epub_positions: None, + epub_spine_items: None, }) } } diff --git a/src/scanner/analyzer_queue.rs b/src/scanner/analyzer_queue.rs index 776fab3e..2bc35583 100644 --- a/src/scanner/analyzer_queue.rs +++ b/src/scanner/analyzer_queue.rs @@ -288,15 +288,32 @@ async fn analyze_single_book( let resolved_number_decimal = resolved_number.map(|n| Decimal::from_f64_retain(n as f64).unwrap_or_default()); + // Recompute KOReader hash during analysis + let file_path_clone = file_path.clone(); + let koreader_hash = tokio::task::spawn_blocking(move || { + crate::utils::hasher::hash_file_koreader(&file_path_clone).ok() + }) + .await + .unwrap_or(None); + book.file_size = metadata.file_size as i64; book.file_hash = metadata.file_hash.clone(); book.partial_hash = partial_hash; + book.koreader_hash = koreader_hash; book.format = format!("{:?}", metadata.format).to_lowercase(); book.page_count = metadata.page_count as i32; book.modified_at = metadata.modified_at; book.analyzed = true; // Mark as analyzed book.analysis_error = None; // Clear any previous error on successful analysis book.updated_at = now; + book.epub_positions = metadata + .epub_positions + .as_ref() + .map(|positions| serde_json::to_string(positions).unwrap_or_default()); + book.epub_spine_items = metadata + .epub_spine_items + .as_ref() + .map(|items| serde_json::to_string(items).unwrap_or_default()); BookRepository::update(db, &book, event_broadcaster).await?; diff --git a/src/scanner/library_scanner.rs b/src/scanner/library_scanner.rs index 48d1f8be..587cd79b 100644 --- a/src/scanner/library_scanner.rs +++ b/src/scanner/library_scanner.rs @@ -973,10 +973,17 @@ async fn process_series_batched( || modified_changed || series_changed; + // Always update koreader_hash (it may have been computed + // with a corrected algorithm or may be newly available) + let koreader_hash_changed = + existing_book.koreader_hash != file_hash.koreader_hash; + let anything_changed = anything_changed || koreader_hash_changed; + if anything_changed { let mut updated_book = existing_book.clone(); updated_book.file_size = file_hash.file_size as i64; updated_book.partial_hash = file_hash.partial_hash; + updated_book.koreader_hash = file_hash.koreader_hash; updated_book.format = file_hash.format; updated_book.modified_at = file_hash.modified_at; updated_book.updated_at = now; @@ -1032,6 +1039,8 @@ async fn process_series_batched( thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: file_hash.koreader_hash, + epub_positions: None, + epub_spine_items: None, }; batch.add_create(book_model, true); diff --git a/src/services/read_progress.rs b/src/services/read_progress.rs index bff9eda8..0f7d8ccb 100644 --- a/src/services/read_progress.rs +++ b/src/services/read_progress.rs @@ -275,6 +275,8 @@ mod tests { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; BookRepository::create(db, &book, None).await.unwrap() } diff --git a/src/tasks/handlers/user_plugin_sync/tests.rs b/src/tasks/handlers/user_plugin_sync/tests.rs index eb383eba..33142d20 100644 --- a/src/tasks/handlers/user_plugin_sync/tests.rs +++ b/src/tasks/handlers/user_plugin_sync/tests.rs @@ -56,6 +56,8 @@ async fn create_test_book( thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; BookRepository::create(db, &book, None).await.unwrap() } diff --git a/src/utils/hasher.rs b/src/utils/hasher.rs index 0459e514..d7323b04 100644 --- a/src/utils/hasher.rs +++ b/src/utils/hasher.rs @@ -62,8 +62,13 @@ pub fn hash_file_partial>(path: P) -> io::Result { /// /// KOReader uses a custom partial hashing algorithm that reads 1024-byte chunks /// at exponentially increasing offsets throughout the file: -/// - For i in -1..=10: seek to (1024 << (2*i)) and read 1024 bytes -/// - Offsets: 256, 1024, 4096, 16384, 65536, ..., 1073741824 +/// - For i in -1..=10: seek to lshift(1024, 2*i) and read 1024 bytes +/// +/// LuaJIT's bit.lshift masks the shift count to lower 5 bits and operates on +/// 32-bit integers. For i=-1: shift count = -2, masked to 30, and +/// lshift(1024, 30) = 2^40 which overflows 32-bit to 0. So offset for i=-1 is 0. +/// +/// Offsets: 0, 1024, 4096, 16384, 65536, ..., 1073741824 /// /// This produces a fast fingerprint without reading the entire file. pub fn hash_file_koreader>(path: P) -> io::Result { @@ -74,13 +79,10 @@ pub fn hash_file_koreader>(path: P) -> io::Result { let mut hasher = Md5::new(); let mut buffer = [0u8; CHUNK_SIZE]; + // Replicate LuaJIT's bit.lshift(1024, 2*i) with 32-bit wrapping semantics. for i in -1i32..=10 { - let offset = if i < 0 { - // For i=-1: 1024 >> 2 = 256 - (CHUNK_SIZE as u64) >> ((-i as u32) * 2) - } else { - (CHUNK_SIZE as u64) << ((i as u32) * 2) - }; + let shift = (2 * i).rem_euclid(32) as u32; + let offset = (1024u32.wrapping_shl(shift)) as u64; if offset >= file_size { break; diff --git a/tests/api/books.rs b/tests/api/books.rs index 327bced0..3c88bfd5 100644 --- a/tests/api/books.rs +++ b/tests/api/books.rs @@ -60,6 +60,8 @@ fn create_test_book_model( thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, } } diff --git a/tests/api/bulk_metadata.rs b/tests/api/bulk_metadata.rs index bb1eaede..08e7a93f 100644 --- a/tests/api/bulk_metadata.rs +++ b/tests/api/bulk_metadata.rs @@ -61,6 +61,8 @@ fn create_test_book_model( thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, } } diff --git a/tests/api/bulk_operations.rs b/tests/api/bulk_operations.rs index c8f79341..5346a9e5 100644 --- a/tests/api/bulk_operations.rs +++ b/tests/api/bulk_operations.rs @@ -62,6 +62,8 @@ fn create_test_book_model( thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, } } diff --git a/tests/api/covers.rs b/tests/api/covers.rs index fc3c093f..fd4085ad 100644 --- a/tests/api/covers.rs +++ b/tests/api/covers.rs @@ -695,6 +695,8 @@ fn create_test_book_model( thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, } } diff --git a/tests/api/genres.rs b/tests/api/genres.rs index 3c55569d..6436a1d4 100644 --- a/tests/api/genres.rs +++ b/tests/api/genres.rs @@ -601,6 +601,8 @@ fn create_test_book_model( thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, } } diff --git a/tests/api/komga.rs b/tests/api/komga.rs index 1a5cfde1..4d464805 100644 --- a/tests/api/komga.rs +++ b/tests/api/komga.rs @@ -4648,3 +4648,255 @@ async fn test_komga_search_series_sort_by_title_mixed_null_and_set() { titles ); } + +// ============================================================================ +// R2Progression (Readium) Tests +// ============================================================================ + +#[tokio::test] +async fn test_komga_get_progression_returns_204_when_no_progression() { + let (db, temp_dir) = setup_test_db().await; + + let library = LibraryRepository::create(&db, "Comics", "/comics", ScanningStrategy::Default) + .await + .unwrap(); + let series = SeriesRepository::create(&db, library.id, "Batman", None) + .await + .unwrap(); + let book = create_test_book( + series.id, + library.id, + "/comics/Batman/issue1.epub", + "issue1.epub", + "hash1", + "epub", + 50, + ); + let created_book = BookRepository::create(&db, &book, None).await.unwrap(); + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router_with_komga(state); + + let uri = format!("/komga/api/v1/books/{}/progression", created_book.id); + let request = get_request_with_auth(&uri, &token); + let (status, _) = make_raw_request(app, request).await; + + assert_eq!(status, StatusCode::NO_CONTENT); +} + +#[tokio::test] +async fn test_komga_put_and_get_progression_round_trip() { + let (db, temp_dir) = setup_test_db().await; + + let library = LibraryRepository::create(&db, "Comics", "/comics", ScanningStrategy::Default) + .await + .unwrap(); + let series = SeriesRepository::create(&db, library.id, "Batman", None) + .await + .unwrap(); + let book = create_test_book( + series.id, + library.id, + "/comics/Batman/issue1.epub", + "issue1.epub", + "hash1", + "epub", + 50, + ); + let created_book = BookRepository::create(&db, &book, None).await.unwrap(); + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + + // PUT progression + let progression_json = serde_json::json!({ + "device": { "id": "komic", "name": "Komic" }, + "locator": { + "href": "OEBPS/chapter1.xhtml", + "locations": { + "position": 10, + "progression": 0.3, + "totalProgression": 0.5 + }, + "type": "application/xhtml+xml" + }, + "modified": "2026-03-14T21:44:34.922Z" + }); + + let app = create_test_router_with_komga(state.clone()); + let uri = format!("/komga/api/v1/books/{}/progression", created_book.id); + let request = put_request_with_auth(&uri, &progression_json.to_string(), &token); + let (status, _) = make_raw_request(app, request).await; + assert_eq!(status, StatusCode::NO_CONTENT); + + // GET progression - should return what we stored + let app = create_test_router_with_komga(state.clone()); + let request = get_request_with_auth(&uri, &token); + let (status, response): (StatusCode, Option) = + make_json_request(app, request).await; + + assert_eq!(status, StatusCode::OK); + let response = response.unwrap(); + assert_eq!(response["device"]["id"], "komic"); + assert_eq!(response["locator"]["href"], "OEBPS/chapter1.xhtml"); + assert_eq!(response["locator"]["locations"]["totalProgression"], 0.5); +} + +#[tokio::test] +async fn test_komga_put_progression_updates_read_progress() { + let (db, temp_dir) = setup_test_db().await; + + let library = LibraryRepository::create(&db, "Comics", "/comics", ScanningStrategy::Default) + .await + .unwrap(); + let series = SeriesRepository::create(&db, library.id, "Batman", None) + .await + .unwrap(); + let book = create_test_book( + series.id, + library.id, + "/comics/Batman/issue1.epub", + "issue1.epub", + "hash1", + "epub", + 100, + ); + let created_book = BookRepository::create(&db, &book, None).await.unwrap(); + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let admin = UserRepository::get_by_username(&db, "admin") + .await + .unwrap() + .unwrap(); + + // PUT progression with 50% totalProgression + let progression_json = serde_json::json!({ + "device": { "id": "komic", "name": "Komic" }, + "locator": { + "href": "OEBPS/chapter5.xhtml", + "locations": { "totalProgression": 0.5 }, + "type": "application/xhtml+xml" + }, + "modified": "2026-03-14T21:44:34.922Z" + }); + + let app = create_test_router_with_komga(state.clone()); + let uri = format!("/komga/api/v1/books/{}/progression", created_book.id); + let request = put_request_with_auth(&uri, &progression_json.to_string(), &token); + let (status, _) = make_raw_request(app, request).await; + assert_eq!(status, StatusCode::NO_CONTENT); + + // Verify read_progress was also updated + let progress = ReadProgressRepository::get_by_user_and_book(&db, admin.id, created_book.id) + .await + .unwrap() + .unwrap(); + assert_eq!(progress.current_page, 50); // 0.5 * 100 pages + assert_eq!(progress.progress_percentage, Some(0.5)); + assert!(!progress.completed); +} + +#[tokio::test] +async fn test_komga_put_progression_auto_completes_at_98_percent() { + let (db, temp_dir) = setup_test_db().await; + + let library = LibraryRepository::create(&db, "Comics", "/comics", ScanningStrategy::Default) + .await + .unwrap(); + let series = SeriesRepository::create(&db, library.id, "Batman", None) + .await + .unwrap(); + let book = create_test_book( + series.id, + library.id, + "/comics/Batman/issue1.epub", + "issue1.epub", + "hash1", + "epub", + 100, + ); + let created_book = BookRepository::create(&db, &book, None).await.unwrap(); + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let admin = UserRepository::get_by_username(&db, "admin") + .await + .unwrap() + .unwrap(); + + // PUT progression with 99% totalProgression + let progression_json = serde_json::json!({ + "device": { "id": "komic", "name": "Komic" }, + "locator": { + "href": "OEBPS/last_chapter.xhtml", + "locations": { "totalProgression": 0.99 }, + "type": "application/xhtml+xml" + }, + "modified": "2026-03-14T22:00:00.000Z" + }); + + let app = create_test_router_with_komga(state.clone()); + let uri = format!("/komga/api/v1/books/{}/progression", created_book.id); + let request = put_request_with_auth(&uri, &progression_json.to_string(), &token); + let (status, _) = make_raw_request(app, request).await; + assert_eq!(status, StatusCode::NO_CONTENT); + + // Should be marked as completed + let progress = ReadProgressRepository::get_by_user_and_book(&db, admin.id, created_book.id) + .await + .unwrap() + .unwrap(); + assert!(progress.completed); + assert!(progress.completed_at.is_some()); +} + +#[tokio::test] +async fn test_komga_put_progression_without_auth() { + let (db, temp_dir) = setup_test_db().await; + + let library = LibraryRepository::create(&db, "Comics", "/comics", ScanningStrategy::Default) + .await + .unwrap(); + let series = SeriesRepository::create(&db, library.id, "Batman", None) + .await + .unwrap(); + let book = create_test_book( + series.id, + library.id, + "/comics/Batman/issue1.epub", + "issue1.epub", + "hash1", + "epub", + 50, + ); + let created_book = BookRepository::create(&db, &book, None).await.unwrap(); + + let state = create_test_auth_state(db.clone()).await; + let app = create_test_router_with_komga(state); + + let uri = format!("/komga/api/v1/books/{}/progression", created_book.id); + let body = r#"{"device":{"id":"test","name":"Test"},"locator":{"href":"x","locations":{"totalProgression":0.1},"type":"text/html"},"modified":"2026-01-01T00:00:00Z"}"#; + let request = put_request_with_auth(&uri, body, "invalid-token"); + let (status, _) = make_raw_request(app, request).await; + + assert_eq!(status, StatusCode::UNAUTHORIZED); +} + +#[tokio::test] +async fn test_komga_put_progression_book_not_found() { + let (db, temp_dir) = setup_test_db().await; + + let state = create_test_auth_state(db.clone()).await; + let token = create_admin_and_token(&db, &state).await; + let app = create_test_router_with_komga(state); + + let fake_id = uuid::Uuid::new_v4(); + let uri = format!("/komga/api/v1/books/{}/progression", fake_id); + let body = r#"{"device":{"id":"test","name":"Test"},"locator":{"href":"x","locations":{"totalProgression":0.1},"type":"text/html"},"modified":"2026-01-01T00:00:00Z"}"#; + let request = put_request_with_auth(&uri, body, &token); + let (status, _) = make_raw_request(app, request).await; + + assert_eq!(status, StatusCode::NOT_FOUND); +} diff --git a/tests/api/metadata_locks.rs b/tests/api/metadata_locks.rs index de3d8a10..7719d097 100644 --- a/tests/api/metadata_locks.rs +++ b/tests/api/metadata_locks.rs @@ -1020,6 +1020,8 @@ fn create_test_book_model( thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, } } diff --git a/tests/api/opds.rs b/tests/api/opds.rs index 76864f52..6dc4fe1a 100644 --- a/tests/api/opds.rs +++ b/tests/api/opds.rs @@ -535,6 +535,8 @@ fn create_test_book_model( thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, } } diff --git a/tests/api/opds2.rs b/tests/api/opds2.rs index 11e91b94..729e56b0 100644 --- a/tests/api/opds2.rs +++ b/tests/api/opds2.rs @@ -916,6 +916,8 @@ fn create_test_book_model( thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, } } diff --git a/tests/api/pages.rs b/tests/api/pages.rs index 1f9a5dff..6fe58812 100644 --- a/tests/api/pages.rs +++ b/tests/api/pages.rs @@ -63,6 +63,8 @@ fn create_test_book_model( thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, } } diff --git a/tests/api/read_progress.rs b/tests/api/read_progress.rs index 302d3cd1..9396cbc2 100644 --- a/tests/api/read_progress.rs +++ b/tests/api/read_progress.rs @@ -58,6 +58,8 @@ fn create_test_book_model( thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, } } @@ -694,3 +696,144 @@ async fn test_update_progress_returns_not_found_for_missing_book() { error.message ); } + +// ============================================================================ +// R2Progression (Readium) Tests - V1 API +// ============================================================================ + +#[tokio::test] +async fn test_v1_get_progression_returns_204_when_no_progression() { + let (db, _temp_dir) = setup_test_db().await; + + let library = LibraryRepository::create(&db, "Comics", "/comics", ScanningStrategy::Default) + .await + .unwrap(); + let series = SeriesRepository::create(&db, library.id, "Test Series", None) + .await + .unwrap(); + let book = create_test_book_model( + series.id, + library.id, + "/comics/test.epub", + "test.epub", + None, + 50, + ); + let created_book = BookRepository::create(&db, &book, None).await.unwrap(); + + let (state, app) = setup_test_app(db.clone()).await; + let (_user_id, token) = create_admin_and_token(&db, &state).await; + + let uri = format!("/api/v1/books/{}/progression", created_book.id); + let request = get_request_with_auth(&uri, &token); + let (status, _) = make_raw_request(app, request).await; + + assert_eq!(status, StatusCode::NO_CONTENT); +} + +#[tokio::test] +async fn test_v1_put_and_get_progression_round_trip() { + let (db, _temp_dir) = setup_test_db().await; + + let library = LibraryRepository::create(&db, "Comics", "/comics", ScanningStrategy::Default) + .await + .unwrap(); + let series = SeriesRepository::create(&db, library.id, "Test Series", None) + .await + .unwrap(); + let book = create_test_book_model( + series.id, + library.id, + "/comics/test.epub", + "test.epub", + None, + 50, + ); + let created_book = BookRepository::create(&db, &book, None).await.unwrap(); + + let (state, _app) = setup_test_app(db.clone()).await; + let (_user_id, token) = create_admin_and_token(&db, &state).await; + + let progression_json = serde_json::json!({ + "device": { "id": "codex-web", "name": "Codex Web Reader" }, + "locator": { + "href": "OEBPS/chapter3.xhtml", + "locations": { + "position": 15, + "totalProgression": 0.6, + "cfi": "/6/14!/4/2/1:0" + }, + "type": "application/xhtml+xml" + }, + "modified": "2026-03-14T21:44:34.922Z" + }); + + // PUT progression + let (_, app) = setup_test_app(db.clone()).await; + let uri = format!("/api/v1/books/{}/progression", created_book.id); + let request = put_request_with_auth(&uri, &progression_json.to_string(), &token); + let (status, _) = make_raw_request(app, request).await; + assert_eq!(status, StatusCode::NO_CONTENT); + + // GET progression + let (_, app) = setup_test_app(db.clone()).await; + let request = get_request_with_auth(&uri, &token); + let (status, response): (StatusCode, Option) = + make_json_request(app, request).await; + + assert_eq!(status, StatusCode::OK); + let response = response.unwrap(); + assert_eq!(response["device"]["id"], "codex-web"); + assert_eq!(response["locator"]["href"], "OEBPS/chapter3.xhtml"); + assert_eq!(response["locator"]["locations"]["totalProgression"], 0.6); + assert_eq!(response["locator"]["locations"]["cfi"], "/6/14!/4/2/1:0"); +} + +#[tokio::test] +async fn test_v1_put_progression_updates_legacy_progress() { + let (db, _temp_dir) = setup_test_db().await; + + let library = LibraryRepository::create(&db, "Comics", "/comics", ScanningStrategy::Default) + .await + .unwrap(); + let series = SeriesRepository::create(&db, library.id, "Test Series", None) + .await + .unwrap(); + let book = create_test_book_model( + series.id, + library.id, + "/comics/test.epub", + "test.epub", + None, + 200, + ); + let created_book = BookRepository::create(&db, &book, None).await.unwrap(); + + let (state, app) = setup_test_app(db.clone()).await; + let (user_id, token) = create_admin_and_token(&db, &state).await; + + let progression_json = serde_json::json!({ + "device": { "id": "codex-web", "name": "Codex Web Reader" }, + "locator": { + "href": "OEBPS/chapter5.xhtml", + "locations": { "totalProgression": 0.75 }, + "type": "application/xhtml+xml" + }, + "modified": "2026-03-14T22:00:00.000Z" + }); + + let uri = format!("/api/v1/books/{}/progression", created_book.id); + let request = put_request_with_auth(&uri, &progression_json.to_string(), &token); + let (status, _) = make_raw_request(app, request).await; + assert_eq!(status, StatusCode::NO_CONTENT); + + // Verify legacy read_progress fields + let progress = ReadProgressRepository::get_by_user_and_book(&db, user_id, created_book.id) + .await + .unwrap() + .unwrap(); + assert_eq!(progress.current_page, 150); // 0.75 * 200 + assert_eq!(progress.progress_percentage, Some(0.75)); + assert!(!progress.completed); + assert!(progress.r2_progression.is_some()); +} diff --git a/tests/api/series.rs b/tests/api/series.rs index 1ee5d870..43812591 100644 --- a/tests/api/series.rs +++ b/tests/api/series.rs @@ -1326,6 +1326,8 @@ fn create_test_book( thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, } } diff --git a/tests/api/tags.rs b/tests/api/tags.rs index 332e08f6..4d498b36 100644 --- a/tests/api/tags.rs +++ b/tests/api/tags.rs @@ -577,6 +577,8 @@ fn create_test_book_model( thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, } } diff --git a/tests/common/fixtures.rs b/tests/common/fixtures.rs index 3ed58284..205b21fb 100644 --- a/tests/common/fixtures.rs +++ b/tests/common/fixtures.rs @@ -85,6 +85,8 @@ pub fn create_test_book( thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, } } @@ -206,6 +208,8 @@ pub async fn create_test_book_with_hash( thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, updated_at: Utc::now(), }; diff --git a/tests/db/auth.rs b/tests/db/auth.rs index 7f5b8e54..a8c3c2c9 100644 --- a/tests/db/auth.rs +++ b/tests/db/auth.rs @@ -158,13 +158,13 @@ async fn test_permission_sets() { assert!(READONLY_PERMISSIONS.contains(&Permission::LibrariesRead)); assert!(READONLY_PERMISSIONS.contains(&Permission::BooksRead)); assert!(!READONLY_PERMISSIONS.contains(&Permission::LibrariesWrite)); - assert_eq!(READONLY_PERMISSIONS.len(), 5); + assert_eq!(READONLY_PERMISSIONS.len(), 7); // Test ADMIN permissions assert!(ADMIN_PERMISSIONS.contains(&Permission::SystemAdmin)); assert!(ADMIN_PERMISSIONS.contains(&Permission::UsersWrite)); assert!(ADMIN_PERMISSIONS.contains(&Permission::LibrariesDelete)); - assert_eq!(ADMIN_PERMISSIONS.len(), 21); + assert_eq!(ADMIN_PERMISSIONS.len(), 23); // Test permission serialization roundtrip let perms = READONLY_PERMISSIONS.clone(); @@ -217,9 +217,9 @@ async fn test_user_with_multiple_api_keys() { for key in &user_keys { let perms: HashSet = serde_json::from_value(key.permissions.clone()).unwrap(); if key.name == "Mobile App" { - assert_eq!(perms.len(), 5); // READONLY + assert_eq!(perms.len(), 7); // READONLY } else if key.name == "Admin Tool" { - assert_eq!(perms.len(), 21); // ADMIN + assert_eq!(perms.len(), 23); // ADMIN } else if key.name == "CI/CD" { assert_eq!(perms.len(), 1); assert!(perms.contains(&Permission::BooksRead)); diff --git a/tests/db/postgres.rs b/tests/db/postgres.rs index 8e9ff12d..b6e66f26 100644 --- a/tests/db/postgres.rs +++ b/tests/db/postgres.rs @@ -146,6 +146,8 @@ async fn test_postgres_series_book_relationship() { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; let book = BookRepository::create(conn, &book_model, None) @@ -293,6 +295,8 @@ async fn test_postgres_metrics_repository() { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; BookRepository::create(conn, &book_model, None) diff --git a/tests/db/repositories.rs b/tests/db/repositories.rs index 0f47fc61..90823715 100644 --- a/tests/db/repositories.rs +++ b/tests/db/repositories.rs @@ -234,6 +234,7 @@ async fn test_user_read_progress() { started_at: Set(Utc::now()), updated_at: Set(Utc::now()), completed_at: Set(None), + r2_progression: Set(None), }; let progress = progress.insert(conn).await.unwrap(); diff --git a/tests/scanner/book_analysis_metadata.rs b/tests/scanner/book_analysis_metadata.rs index efd258e1..419c2555 100644 --- a/tests/scanner/book_analysis_metadata.rs +++ b/tests/scanner/book_analysis_metadata.rs @@ -62,6 +62,8 @@ async fn create_test_book( thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; let created_book = BookRepository::create(db.sea_orm_connection(), &book, None).await?; @@ -107,6 +109,8 @@ async fn create_test_book_with_strategy( thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; let created_book = BookRepository::create(db.sea_orm_connection(), &book, None).await?; @@ -869,6 +873,8 @@ async fn test_series_metadata_populated_from_first_book() -> Result<()> { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; BookRepository::create(db.sea_orm_connection(), &book1, None).await?; @@ -928,6 +934,8 @@ async fn test_series_metadata_populated_from_first_book() -> Result<()> { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; BookRepository::create(db.sea_orm_connection(), &book2, None).await?; @@ -1097,6 +1105,8 @@ async fn test_series_title_sort_populated_from_title() -> Result<()> { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; BookRepository::create(db.sea_orm_connection(), &book, None).await?; @@ -1194,6 +1204,8 @@ async fn test_series_title_sort_respects_lock() -> Result<()> { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; BookRepository::create(db.sea_orm_connection(), &book, None).await?; @@ -1298,6 +1310,8 @@ async fn test_series_title_sort_not_overwritten_if_already_set() -> Result<()> { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; BookRepository::create(db.sea_orm_connection(), &book, None).await?; @@ -1376,6 +1390,8 @@ async fn test_series_title_sort_populated_without_comic_info() -> Result<()> { thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; BookRepository::create(db.sea_orm_connection(), &book, None).await?; diff --git a/tests/scanner/force_analysis.rs b/tests/scanner/force_analysis.rs index dc89ccea..d56dcbb7 100644 --- a/tests/scanner/force_analysis.rs +++ b/tests/scanner/force_analysis.rs @@ -44,6 +44,8 @@ async fn create_analyzed_book( thumbnail_path: None, thumbnail_generated_at: None, koreader_hash: None, + epub_positions: None, + epub_spine_items: None, }; BookRepository::create(db_conn, &book, None).await?; diff --git a/web/openapi.json b/web/openapi.json index ed748d98..0f2c1e7f 100644 --- a/web/openapi.json +++ b/web/openapi.json @@ -17307,6 +17307,13 @@ "description": "Book unique identifier", "example": "550e8400-e29b-41d4-a716-446655440001" }, + "koreaderHash": { + "type": [ + "string", + "null" + ], + "description": "KOReader-compatible partial MD5 hash for sync" + }, "libraryId": { "type": "string", "format": "uuid", @@ -22701,6 +22708,13 @@ "description": "Book unique identifier", "example": "550e8400-e29b-41d4-a716-446655440001" }, + "koreaderHash": { + "type": [ + "string", + "null" + ], + "description": "KOReader-compatible partial MD5 hash for sync" + }, "libraryId": { "type": "string", "format": "uuid", @@ -27014,6 +27028,13 @@ "description": "Book unique identifier", "example": "550e8400-e29b-41d4-a716-446655440001" }, + "koreaderHash": { + "type": [ + "string", + "null" + ], + "description": "KOReader-compatible partial MD5 hash for sync" + }, "libraryId": { "type": "string", "format": "uuid", diff --git a/web/package-lock.json b/web/package-lock.json index f364b33f..a6459aa6 100644 --- a/web/package-lock.json +++ b/web/package-lock.json @@ -1133,60 +1133,6 @@ "react": "^18.x || ^19.x" } }, - "node_modules/@mapbox/node-pre-gyp": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.11.tgz", - "integrity": "sha512-Yhlar6v9WQgUp/He7BdgzOz8lqMQ8sU+jkCq7Wx8Myc5YFJLbEe7lgui/V7G1qB1DJykHSGwreceSaD60Y0PUQ==", - "dev": true, - "license": "BSD-3-Clause", - "optional": true, - "peer": true, - "dependencies": { - "detect-libc": "^2.0.0", - "https-proxy-agent": "^5.0.0", - "make-dir": "^3.1.0", - "node-fetch": "^2.6.7", - "nopt": "^5.0.0", - "npmlog": "^5.0.1", - "rimraf": "^3.0.2", - "semver": "^7.3.5", - "tar": "^6.1.11" - }, - "bin": { - "node-pre-gyp": "bin/node-pre-gyp" - } - }, - "node_modules/@mapbox/node-pre-gyp/node_modules/agent-base": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", - "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", - "dev": true, - "license": "MIT", - "optional": true, - "peer": true, - "dependencies": { - "debug": "4" - }, - "engines": { - "node": ">= 6.0.0" - } - }, - "node_modules/@mapbox/node-pre-gyp/node_modules/https-proxy-agent": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", - "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", - "dev": true, - "license": "MIT", - "optional": true, - "peer": true, - "dependencies": { - "agent-base": "6", - "debug": "4" - }, - "engines": { - "node": ">= 6" - } - }, "node_modules/@mswjs/interceptors": { "version": "0.41.2", "resolved": "https://registry.npmjs.org/@mswjs/interceptors/-/interceptors-0.41.2.tgz", @@ -2783,15 +2729,6 @@ "node": ">=10.0.0" } }, - "node_modules/abbrev": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", - "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", - "dev": true, - "license": "ISC", - "optional": true, - "peer": true - }, "node_modules/agent-base": { "version": "7.1.4", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.4.tgz", @@ -2838,49 +2775,6 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/aproba": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/aproba/-/aproba-2.1.0.tgz", - "integrity": "sha512-tLIEcj5GuR2RSTnxNKdkK0dJ/GrC7P38sUkiDmDuHfsHmbagTFAxDVIBltoklXEVIQ/f14IL8IMJ5pn9Hez1Ew==", - "dev": true, - "license": "ISC", - "optional": true, - "peer": true - }, - "node_modules/are-we-there-yet": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz", - "integrity": "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==", - "deprecated": "This package is no longer supported.", - "dev": true, - "license": "ISC", - "optional": true, - "peer": true, - "dependencies": { - "delegates": "^1.0.0", - "readable-stream": "^3.6.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/are-we-there-yet/node_modules/readable-stream": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", - "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", - "dev": true, - "license": "MIT", - "optional": true, - "peer": true, - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, "node_modules/argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", @@ -2997,24 +2891,6 @@ "node": ">= 6" } }, - "node_modules/canvas": { - "version": "2.11.2", - "resolved": "https://registry.npmjs.org/canvas/-/canvas-2.11.2.tgz", - "integrity": "sha512-ItanGBMrmRV7Py2Z+Xhs7cT+FNt5K0vPL4p9EZ/UX/Mu7hFbkxSjKF2KVtPwX7UYWp7dRKnrTvReflgrItJbdw==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "optional": true, - "peer": true, - "dependencies": { - "@mapbox/node-pre-gyp": "^1.0.0", - "nan": "^2.17.0", - "simple-get": "^3.0.3" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/ccount": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz", @@ -3099,18 +2975,6 @@ "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/chownr": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", - "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", - "dev": true, - "license": "ISC", - "optional": true, - "peer": true, - "engines": { - "node": ">=10" - } - }, "node_modules/ci-info": { "version": "3.9.0", "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz", @@ -3199,18 +3063,6 @@ "dev": true, "license": "MIT" }, - "node_modules/color-support": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", - "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", - "dev": true, - "license": "ISC", - "optional": true, - "peer": true, - "bin": { - "color-support": "bin.js" - } - }, "node_modules/colorette": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/colorette/-/colorette-1.4.0.tgz", @@ -3240,24 +3092,6 @@ "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", - "dev": true, - "license": "MIT", - "optional": true, - "peer": true - }, - "node_modules/console-control-strings": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", - "integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==", - "dev": true, - "license": "ISC", - "optional": true, - "peer": true - }, "node_modules/cookie": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/cookie/-/cookie-1.1.1.tgz", @@ -3446,21 +3280,6 @@ "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/decompress-response": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-4.2.1.tgz", - "integrity": "sha512-jOSne2qbyE+/r8G1VU+G/82LBs2Fs4LAsTiLSHOCOMZQl2OKZ6i8i4IyHemTe+/yIXOtTcRQMzPcgyhoFlqPkw==", - "dev": true, - "license": "MIT", - "optional": true, - "peer": true, - "dependencies": { - "mimic-response": "^2.0.0" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/delayed-stream": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", @@ -3470,15 +3289,6 @@ "node": ">=0.4.0" } }, - "node_modules/delegates": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", - "integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==", - "dev": true, - "license": "MIT", - "optional": true, - "peer": true - }, "node_modules/dequal": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", @@ -3488,18 +3298,6 @@ "node": ">=6" } }, - "node_modules/detect-libc": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", - "integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==", - "dev": true, - "license": "Apache-2.0", - "optional": true, - "peer": true, - "engines": { - "node": ">=8" - } - }, "node_modules/detect-node-es": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/detect-node-es/-/detect-node-es-1.1.0.tgz", @@ -3947,45 +3745,6 @@ "node": ">= 6" } }, - "node_modules/fs-minipass": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", - "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", - "dev": true, - "license": "ISC", - "optional": true, - "peer": true, - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/fs-minipass/node_modules/minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "dev": true, - "license": "ISC", - "optional": true, - "peer": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", - "dev": true, - "license": "ISC", - "optional": true, - "peer": true - }, "node_modules/fsevents": { "version": "2.3.3", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", @@ -4010,39 +3769,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/gauge": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.2.tgz", - "integrity": "sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==", - "deprecated": "This package is no longer supported.", - "dev": true, - "license": "ISC", - "optional": true, - "peer": true, - "dependencies": { - "aproba": "^1.0.3 || ^2.0.0", - "color-support": "^1.1.2", - "console-control-strings": "^1.0.0", - "has-unicode": "^2.0.1", - "object-assign": "^4.1.1", - "signal-exit": "^3.0.0", - "string-width": "^4.2.3", - "strip-ansi": "^6.0.1", - "wide-align": "^1.1.2" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/gauge/node_modules/signal-exit": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", - "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", - "dev": true, - "license": "ISC", - "optional": true, - "peer": true - }, "node_modules/get-caller-file": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", @@ -4099,58 +3825,6 @@ "node": ">= 0.4" } }, - "node_modules/glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", - "deprecated": "Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me", - "dev": true, - "license": "ISC", - "optional": true, - "peer": true, - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/glob/node_modules/brace-expansion": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", - "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", - "dev": true, - "license": "MIT", - "optional": true, - "peer": true, - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/glob/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "license": "ISC", - "optional": true, - "peer": true, - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, "node_modules/globals": { "version": "16.5.0", "resolved": "https://registry.npmjs.org/globals/-/globals-16.5.0.tgz", @@ -4272,15 +3946,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/has-unicode": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", - "integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==", - "dev": true, - "license": "ISC", - "optional": true, - "peer": true - }, "node_modules/hasown": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", @@ -4547,20 +4212,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", - "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", - "dev": true, - "license": "ISC", - "optional": true, - "peer": true, - "dependencies": { - "once": "^1.3.0", - "wrappy": "1" - } - }, "node_modules/inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", @@ -5085,36 +4736,6 @@ "url": "https://github.com/wojtekmaj/make-cancellable-promise?sponsor=1" } }, - "node_modules/make-dir": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", - "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", - "dev": true, - "license": "MIT", - "optional": true, - "peer": true, - "dependencies": { - "semver": "^6.0.0" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/make-dir/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "dev": true, - "license": "ISC", - "optional": true, - "peer": true, - "bin": { - "semver": "bin/semver.js" - } - }, "node_modules/make-event-props": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/make-event-props/-/make-event-props-2.0.0.tgz", @@ -6034,21 +5655,6 @@ "node": ">= 0.6" } }, - "node_modules/mimic-response": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-2.1.0.tgz", - "integrity": "sha512-wXqjST+SLt7R009ySCglWBCFpjUygmCIfD790/kVbiGmUgfYGuB14PiTd5DwVxSV4NcYHjzMkoj5LjQZwTQLEA==", - "dev": true, - "license": "MIT", - "optional": true, - "peer": true, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/min-indent": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", @@ -6081,64 +5687,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", - "dev": true, - "license": "ISC", - "optional": true, - "peer": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/minizlib": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", - "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", - "dev": true, - "license": "MIT", - "optional": true, - "peer": true, - "dependencies": { - "minipass": "^3.0.0", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/minizlib/node_modules/minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "dev": true, - "license": "ISC", - "optional": true, - "peer": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/mkdirp": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", - "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", - "dev": true, - "license": "MIT", - "optional": true, - "peer": true, - "bin": { - "mkdirp": "bin/cmd.js" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/mrmime": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/mrmime/-/mrmime-2.0.1.tgz", @@ -6259,15 +5807,6 @@ "node": "^18.17.0 || >=20.5.0" } }, - "node_modules/nan": { - "version": "2.25.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.25.0.tgz", - "integrity": "sha512-0M90Ag7Xn5KMLLZ7zliPWP3rT90P6PN+IzVFS0VqmnPktBk3700xUVv8Ikm9EUaUE5SDWdp/BIxdENzVznpm1g==", - "dev": true, - "license": "MIT", - "optional": true, - "peer": true - }, "node_modules/nanoid": { "version": "3.3.11", "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", @@ -6299,94 +5838,6 @@ "integrity": "sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ==", "license": "ISC" }, - "node_modules/node-fetch": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", - "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", - "dev": true, - "license": "MIT", - "optional": true, - "peer": true, - "dependencies": { - "whatwg-url": "^5.0.0" - }, - "engines": { - "node": "4.x || >=6.0.0" - }, - "peerDependencies": { - "encoding": "^0.1.0" - }, - "peerDependenciesMeta": { - "encoding": { - "optional": true - } - } - }, - "node_modules/node-fetch/node_modules/tr46": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", - "dev": true, - "license": "MIT", - "optional": true, - "peer": true - }, - "node_modules/node-fetch/node_modules/webidl-conversions": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", - "dev": true, - "license": "BSD-2-Clause", - "optional": true, - "peer": true - }, - "node_modules/node-fetch/node_modules/whatwg-url": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", - "dev": true, - "license": "MIT", - "optional": true, - "peer": true, - "dependencies": { - "tr46": "~0.0.3", - "webidl-conversions": "^3.0.0" - } - }, - "node_modules/nopt": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz", - "integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==", - "dev": true, - "license": "ISC", - "optional": true, - "peer": true, - "dependencies": { - "abbrev": "1" - }, - "bin": { - "nopt": "bin/nopt.js" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/npmlog": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz", - "integrity": "sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==", - "deprecated": "This package is no longer supported.", - "dev": true, - "license": "ISC", - "optional": true, - "peer": true, - "dependencies": { - "are-we-there-yet": "^2.0.0", - "console-control-strings": "^1.1.0", - "gauge": "^3.0.0", - "set-blocking": "^2.0.0" - } - }, "node_modules/nwsapi": { "version": "2.2.23", "resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.23.tgz", @@ -6426,18 +5877,6 @@ ], "license": "MIT" }, - "node_modules/once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "dev": true, - "license": "ISC", - "optional": true, - "peer": true, - "dependencies": { - "wrappy": "1" - } - }, "node_modules/openapi-typescript": { "version": "7.13.0", "resolved": "https://registry.npmjs.org/openapi-typescript/-/openapi-typescript-7.13.0.tgz", @@ -6540,18 +5979,6 @@ "url": "https://github.com/inikulin/parse5?sponsor=1" } }, - "node_modules/path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", - "dev": true, - "license": "MIT", - "optional": true, - "peer": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/path-to-regexp": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-6.3.0.tgz", @@ -7272,25 +6699,6 @@ "dev": true, "license": "MIT" }, - "node_modules/rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "deprecated": "Rimraf versions prior to v4 are no longer supported", - "dev": true, - "license": "ISC", - "optional": true, - "peer": true, - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/rollup": { "version": "4.57.1", "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.57.1.tgz", @@ -7375,21 +6783,6 @@ "integrity": "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q==", "license": "MIT" }, - "node_modules/semver": { - "version": "7.7.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", - "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", - "dev": true, - "license": "ISC", - "optional": true, - "peer": true, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/seroval": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/seroval/-/seroval-1.5.0.tgz", @@ -7411,15 +6804,6 @@ "seroval": "^1.0" } }, - "node_modules/set-blocking": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", - "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==", - "dev": true, - "license": "ISC", - "optional": true, - "peer": true - }, "node_modules/set-cookie-parser": { "version": "2.7.2", "resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.7.2.tgz", @@ -7452,43 +6836,6 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/simple-concat": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz", - "integrity": "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT", - "optional": true, - "peer": true - }, - "node_modules/simple-get": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/simple-get/-/simple-get-3.1.1.tgz", - "integrity": "sha512-CQ5LTKGfCpvE1K0n2us+kuMPbk/q0EKl82s4aheV9oXjFEz6W/Y7oQFVJuU6QG77hRT4Ghb5RURteF5vnWjupA==", - "dev": true, - "license": "MIT", - "optional": true, - "peer": true, - "dependencies": { - "decompress-response": "^4.2.0", - "once": "^1.3.1", - "simple-concat": "^1.0.0" - } - }, "node_modules/sirv": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/sirv/-/sirv-3.0.2.tgz", @@ -7741,27 +7088,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/tar": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", - "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", - "deprecated": "Old versions of tar are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me", - "dev": true, - "license": "ISC", - "optional": true, - "peer": true, - "dependencies": { - "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", - "minipass": "^5.0.0", - "minizlib": "^2.1.1", - "mkdirp": "^1.0.3", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/tiny-invariant": { "version": "1.3.3", "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.3.3.tgz", @@ -8568,18 +7894,6 @@ "node": ">=8" } }, - "node_modules/wide-align": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", - "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", - "dev": true, - "license": "ISC", - "optional": true, - "peer": true, - "dependencies": { - "string-width": "^1.0.2 || 2 || 3 || 4" - } - }, "node_modules/wordwrap": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", @@ -8601,15 +7915,6 @@ "node": ">=8" } }, - "node_modules/wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", - "dev": true, - "license": "ISC", - "optional": true, - "peer": true - }, "node_modules/ws": { "version": "8.19.0", "resolved": "https://registry.npmjs.org/ws/-/ws-8.19.0.tgz", @@ -8659,15 +7964,6 @@ "node": ">=10" } }, - "node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true, - "license": "ISC", - "optional": true, - "peer": true - }, "node_modules/yaml-ast-parser": { "version": "0.0.43", "resolved": "https://registry.npmjs.org/yaml-ast-parser/-/yaml-ast-parser-0.0.43.tgz", diff --git a/web/src/api/readProgress.ts b/web/src/api/readProgress.ts index 8e296ea0..38631a6f 100644 --- a/web/src/api/readProgress.ts +++ b/web/src/api/readProgress.ts @@ -6,6 +6,23 @@ export type ReadProgressResponse = export type UpdateProgressRequest = components["schemas"]["UpdateProgressRequest"]; +/** Readium R2Progression format for EPUB position sync */ +export interface R2Progression { + device: { id: string; name: string }; + locator: { + href: string; + locations: { + position?: number; + progression?: number; + totalProgression: number; + /** Codex extension: epub.js CFI for precise position restoration */ + cfi?: string; + }; + type: string; + }; + modified: string; +} + export const readProgressApi = { /** * Get reading progress for a book @@ -38,4 +55,28 @@ export const readProgressApi = { delete: async (bookId: string): Promise => { await api.delete(`/books/${bookId}/progress`); }, + + /** + * Get R2Progression for a book (Readium standard) + * Returns null if no progression exists (204 response) + */ + getProgression: async (bookId: string): Promise => { + const response = await api.get( + `/books/${bookId}/progression`, + { + validateStatus: (status) => status === 200 || status === 204, + }, + ); + return response.status === 204 ? null : response.data; + }, + + /** + * Update R2Progression for a book (Readium standard) + */ + updateProgression: async ( + bookId: string, + progression: R2Progression, + ): Promise => { + await api.put(`/books/${bookId}/progression`, progression); + }, }; diff --git a/web/src/components/book/BookInfoModal.tsx b/web/src/components/book/BookInfoModal.tsx index 4f13c41e..bdaf89b3 100644 --- a/web/src/components/book/BookInfoModal.tsx +++ b/web/src/components/book/BookInfoModal.tsx @@ -165,6 +165,12 @@ export function BookInfoModal({ opened, onClose, book }: BookInfoModalProps) { + diff --git a/web/src/components/library/MediaCard.tsx b/web/src/components/library/MediaCard.tsx index 19680d28..cfffe38f 100644 --- a/web/src/components/library/MediaCard.tsx +++ b/web/src/components/library/MediaCard.tsx @@ -155,10 +155,14 @@ export const MediaCard = memo(function MediaCard({ }; // Calculate progress percentage for books + // Prefer progressPercentage (from R2Progression) for EPUBs where page_count + // is spine items, not actual pages. const progressPercentage = - book?.readProgress && book.pageCount - ? (book.readProgress.currentPage / book.pageCount) * 100 - : 0; + book?.readProgress?.progressPercentage != null + ? book.readProgress.progressPercentage * 100 + : book?.readProgress && book.pageCount + ? (book.readProgress.currentPage / book.pageCount) * 100 + : 0; // Book analysis mutation const bookAnalyzeMutation = useMutation({ diff --git a/web/src/components/reader/EpubReader.test.tsx b/web/src/components/reader/EpubReader.test.tsx index ed20ba2f..ffde0547 100644 --- a/web/src/components/reader/EpubReader.test.tsx +++ b/web/src/components/reader/EpubReader.test.tsx @@ -280,29 +280,6 @@ describe("EpubReader", () => { }); }); - describe("URL parameters", () => { - it("should handle startPercent parameter", () => { - renderWithProviders(); - - // Reader should render with the start percent - expect(screen.getByTestId("react-reader-mock")).toBeInTheDocument(); - }); - - it("should ignore invalid startPercent (negative)", () => { - renderWithProviders(); - - // Should still render without error - expect(screen.getByTestId("react-reader-mock")).toBeInTheDocument(); - }); - - it("should ignore invalid startPercent (greater than 1)", () => { - renderWithProviders(); - - // Should still render without error - expect(screen.getByTestId("react-reader-mock")).toBeInTheDocument(); - }); - }); - describe("fullscreen", () => { it("should not be fullscreen by default", () => { renderWithProviders(); diff --git a/web/src/components/reader/EpubReader.tsx b/web/src/components/reader/EpubReader.tsx index 080b130d..d3bf630d 100644 --- a/web/src/components/reader/EpubReader.tsx +++ b/web/src/components/reader/EpubReader.tsx @@ -134,8 +134,6 @@ interface EpubReaderProps { title: string; /** Total pages in the book (for progress calculation) */ totalPages: number; - /** Starting percentage from URL parameter (0.0-1.0, overrides saved progress) */ - startPercent?: number; /** Incognito mode - when true, progress tracking is disabled */ incognito?: boolean; /** Callback when reader should close */ @@ -158,7 +156,6 @@ export function EpubReader({ seriesId, title, totalPages, - startPercent, incognito, onClose, }: EpubReaderProps) { @@ -171,7 +168,11 @@ export function EpubReader({ // CFI-based progress tracking (also syncs to backend, disabled in incognito mode) const { getSavedLocation, - initialPercentage, + getLocalTimestamp, + initialCfi, + initialHref, + initialProgression, + apiTimestamp, isLoadingProgress, saveLocation, } = useEpubProgress({ @@ -228,12 +229,9 @@ export function EpubReader({ totalPagesRef.current = totalPages; // Local state - initialize with saved CFI location from localStorage - // Note: startPercent from URL is handled after locations are generated + // Note: This provides instant restore, but the cross-device sync effect + // below may override it if the API has newer progress. const [location, setLocation] = useState(() => { - // If startPercent is provided, don't load from localStorage - we'll navigate after locations are ready - if (startPercent != null && startPercent >= 0 && startPercent <= 1) { - return 0; // Start at 0, will navigate to startPercent after locations are generated - } const saved = getSavedLocation(); if (saved) { initialLocationLoadedRef.current = true; @@ -241,7 +239,6 @@ export function EpubReader({ } return 0; }); - const [hasAppliedStartPercent, setHasAppliedStartPercent] = useState(false); const [hasAppliedApiProgress, setHasAppliedApiProgress] = useState(false); const [locationsReady, setLocationsReady] = useState(false); const [isLoading, setIsLoading] = useState(true); @@ -266,6 +263,7 @@ export function EpubReader({ (state) => state.settings.epubLineHeight, ); const epubMargin = useReaderStore((state) => state.settings.epubMargin); + const epubSpread = useReaderStore((state) => state.settings.epubSpread); // Use refs for initial styles to avoid re-creating handleGetRendition const epubThemeRef = useRef(epubTheme); @@ -300,11 +298,6 @@ export function EpubReader({ // Generate EPUB file URL const epubUrl = `/api/v1/books/${bookId}/file`; - // Track if we need to wait for startPercent navigation before showing content - const needsStartPercentNavigation = - startPercent != null && startPercent >= 0 && startPercent <= 1; - const startPercentAppliedRef = useRef(!needsStartPercentNavigation); - // Handle location change (CFI-based progress) // Note: Progress is saved in the 'relocated' event handler below, // where we have access to the accurate percentage value @@ -353,60 +346,127 @@ export function EpubReader({ } }, [epubMargin]); - // Apply startPercent from URL (highest priority - overrides saved progress) + // Apply spread mode to rendition + useEffect(() => { + if (renditionRef.current) { + // epub.js spread() accepts "none" (single), "always" (double), or "auto" (responsive) + // For "always", set minSpreadWidth to 0 so it never collapses to single page + const minWidth = epubSpread === "always" ? 0 : 800; + renditionRef.current.spread(epubSpread, minWidth); + } + }, [epubSpread]); + + // Helper: check if API progress is newer than localStorage + const isApiNewer = useCallback(() => { + if (!initialLocationLoadedRef.current) return true; // No local data, always apply + if (!apiTimestamp) return false; + const localTs = getLocalTimestamp(); + if (!localTs) return true; // No local timestamp, prefer API + return new Date(apiTimestamp).getTime() > new Date(localTs).getTime(); + }, [apiTimestamp, getLocalTimestamp]); + + // Apply CFI-based API progress immediately (no need to wait for locations generation). + // This handles cross-device sync when the R2Progression was saved by another Codex web + // instance (which includes a precise CFI). useEffect(() => { if ( - locationsReady && - !hasAppliedStartPercent && - startPercent != null && - startPercent >= 0 && - startPercent <= 1 && - renditionRef.current + !isLoadingProgress && + initialCfi !== null && + !hasAppliedApiProgress && + renditionRef.current && + isApiNewer() ) { - // Navigate directly to percentage - const book = renditionRef.current.book; - if (book?.locations?.length()) { - const cfi = book.locations.cfiFromPercentage(startPercent); - if (cfi) { - setLocation(cfi); - } - } - setHasAppliedStartPercent(true); - startPercentAppliedRef.current = true; - // Clear loading now that we've navigated to the correct position - setIsLoading(false); + setLocation(initialCfi); + setHasAppliedApiProgress(true); } - }, [locationsReady, hasAppliedStartPercent, startPercent]); + }, [isLoadingProgress, initialCfi, hasAppliedApiProgress, isApiNewer]); - // Apply API progress for cross-device sync (only if no localStorage CFI and no startPercent) + // Whether we need cross-app sync (Komic/Readium): no CFI, but has href, and API is newer. + // When true, we show a loading spinner until locations are ready for precise positioning. + const needsCrossAppSync = useMemo(() => { + if (isLoadingProgress) return false; + return initialCfi === null && initialHref !== null && isApiNewer(); + }, [isLoadingProgress, initialCfi, initialHref, isApiNewer]); + + // Ref so the relocated callback can check if cross-app sync is pending + const pendingCrossAppSyncRef = useRef(false); + useEffect(() => { + pendingCrossAppSyncRef.current = + needsCrossAppSync && !hasAppliedApiProgress; + }, [needsCrossAppSync, hasAppliedApiProgress]); + + // Cross-app sync: navigate precisely using href + within-resource progression. + // Waits for locations to be generated so we can position accurately within the chapter. + // The loading spinner stays visible until this completes. useEffect(() => { if ( locationsReady && - !isLoadingProgress && - initialPercentage !== null && + needsCrossAppSync && !hasAppliedApiProgress && - !hasAppliedStartPercent && - !initialLocationLoadedRef.current && - renditionRef.current && - startPercent == null // Don't apply API progress if startPercent is provided + renditionRef.current ) { - // Navigate to percentage-based location from API const book = renditionRef.current.book; if (book?.locations?.length()) { - const cfi = book.locations.cfiFromPercentage(initialPercentage); - if (cfi) { - setLocation(cfi); + const spine = book.spine as { + items?: Array<{ href: string; cfiBase: string }>; + }; + const spineItem = spine.items?.find( + (item) => + item.href === initialHref || + item.href.endsWith(initialHref!) || + initialHref!.endsWith(item.href), + ); + + if ( + spineItem && + initialProgression !== null && + initialProgression > 0 + ) { + // Interpolate within the section's book-level percentage range + const locations = book.locations; + const total = locations.length(); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const allLocs: string[] = (locations as any)._locations ?? []; + + let firstIdx = -1; + let lastIdx = -1; + for (let i = 0; i < allLocs.length; i++) { + if (allLocs[i].includes(spineItem.cfiBase)) { + if (firstIdx === -1) firstIdx = i; + lastIdx = i; + } + } + + if (firstIdx >= 0 && total > 0) { + const sectionStart = firstIdx / total; + const sectionEnd = (lastIdx + 1) / total; + const targetPct = + sectionStart + initialProgression * (sectionEnd - sectionStart); + const cfi = locations.cfiFromPercentage( + Math.min(targetPct, 0.9999), + ); + if (cfi) { + setLocation(cfi); + } + } else { + // No locations for this section, navigate to href start + setLocation(initialHref!); + } + } else { + // progression is 0/null, navigate to start of chapter + setLocation(initialHref!); } } setHasAppliedApiProgress(true); + // Clear the loading spinner now that we've navigated to the right spot + setIsLoading(false); } }, [ locationsReady, - isLoadingProgress, - initialPercentage, + needsCrossAppSync, + initialHref, + initialProgression, hasAppliedApiProgress, - hasAppliedStartPercent, - startPercent, ]); // Ref for onClose to keep handleGetRendition stable @@ -464,8 +524,8 @@ export function EpubReader({ // Track current chapter for TOC highlighting and save progress rendition.on("relocated", (location: Location) => { setCurrentHref(location.start.href); - // Only clear loading if we don't need to wait for startPercent navigation - if (startPercentAppliedRef.current) { + // Keep spinner visible while waiting for cross-app position sync + if (!pendingCrossAppSyncRef.current) { setIsLoading(false); } @@ -511,7 +571,19 @@ export function EpubReader({ // Save progress - the hook handles debouncing and duplicate detection // Note: percentage can be 0 at the start of the book, which is valid - saveLocationRef.current(cfi, percentage); + // Resolve href to full EPUB-internal path (e.g., "OEBPS/chapter1.xhtml") + // epub.js returns href relative to the OPF directory, but Readium-based + // apps (like Komic) expect the full path within the EPUB archive. + const bookDir = + (rendition.book.path as { directory?: string })?.directory ?? ""; + const stripped = bookDir === "/" ? "" : bookDir; + const normalizedDir = stripped.startsWith("/") + ? stripped.slice(1) + : stripped; + const fullHref = normalizedDir + ? `${normalizedDir}${location.start.href}` + : location.start.href; + saveLocationRef.current(cfi, percentage, fullHref); }); }, []); @@ -899,6 +971,7 @@ export function EpubReader({ }} epubOptions={{ allowScriptedContent: false, + spread: epubSpread, }} /> diff --git a/web/src/components/reader/EpubReaderSettings.tsx b/web/src/components/reader/EpubReaderSettings.tsx index dabf16e8..2ddcd0f6 100644 --- a/web/src/components/reader/EpubReaderSettings.tsx +++ b/web/src/components/reader/EpubReaderSettings.tsx @@ -12,6 +12,7 @@ import { } from "@mantine/core"; import { type EpubFontFamily, + type EpubSpread, type EpubTheme, useReaderStore, } from "@/store/readerStore"; @@ -39,6 +40,13 @@ const THEME_OPTIONS = [ { value: "forest", label: "Forest" }, ]; +/** Page layout (spread) options */ +const SPREAD_OPTIONS = [ + { value: "auto", label: "Auto (responsive)" }, + { value: "none", label: "Single page" }, + { value: "always", label: "Double page" }, +]; + /** Font family options for display in select */ const FONT_FAMILY_OPTIONS = [ { value: "default", label: "Default" }, @@ -69,6 +77,7 @@ export function EpubReaderSettings({ const setEpubFontFamily = useReaderStore((state) => state.setEpubFontFamily); const setEpubLineHeight = useReaderStore((state) => state.setEpubLineHeight); const setEpubMargin = useReaderStore((state) => state.setEpubMargin); + const setEpubSpread = useReaderStore((state) => state.setEpubSpread); const setAutoHideToolbar = useReaderStore( (state) => state.setAutoHideToolbar, ); @@ -190,6 +199,21 @@ export function EpubReaderSettings({ ]} /> + + {/* Page Layout */} + + + Page Layout + +