diff --git a/.specify/feature.json b/.specify/feature.json index b0aa5b0..3fd9950 100644 --- a/.specify/feature.json +++ b/.specify/feature.json @@ -1,3 +1,3 @@ { - "feature_directory": "specs/015-bike-expense-tracking" + "feature_directory": "specs/016-csv-expense-import" } diff --git a/specs/016-csv-expense-import/contracts/api-contracts.md b/specs/016-csv-expense-import/contracts/api-contracts.md new file mode 100644 index 0000000..6016ccb --- /dev/null +++ b/specs/016-csv-expense-import/contracts/api-contracts.md @@ -0,0 +1,264 @@ +# API Contracts: CSV Expense Import + +**Feature**: 016-csv-expense-import +**Date**: 2026-04-20 +**Clarified**: 2026-04-20 +**Base path**: `/api/expense-imports` + +--- + +## New Endpoints + +### POST `/api/expense-imports/preview` + +Uploads and validates a CSV expense file. Creates an `ExpenseImportJob` + `ExpenseImportRow` records in `awaiting-confirmation` state. Returns preview data including validation errors and duplicate conflicts. + +**Request**: `multipart/form-data` +- `file`: CSV file (`.csv`, max 5 MB) + +**Response**: `ExpenseImportPreviewResponse` + +```csharp +public sealed record ExpenseImportPreviewResponse( + long JobId, + string FileName, + int TotalRows, + int ValidRows, + int InvalidRows, + int DuplicateCount, + IReadOnlyList Errors, + IReadOnlyList Duplicates, + bool CanConfirmImport +); + +public sealed record ExpenseImportRowErrorView( + int RowNumber, + string Field, + string Message +); + +public sealed record ExpenseImportDuplicateView( + int RowNumber, + DateOnly ExpenseDate, + decimal Amount, + string? Note, + IReadOnlyList ExistingMatches +); + +public sealed record ExistingExpenseMatchView( + long ExpenseId, + DateOnly ExpenseDate, + decimal Amount, + string? Note +); +``` + +**Error responses**: +- `400 Bad Request` — missing or invalid file, file is not a CSV, file exceeds 5 MB, missing required columns (Date or Amount) +- `401 Unauthorized` — unauthenticated rider + +**Notes**: +- `CanConfirmImport` is `true` when `ValidRows > 0`. +- `DuplicateCount` counts valid rows that have at least one date+amount match in existing expenses. +- Errors are reported per-row. A row may have multiple errors (e.g., bad date AND invalid amount). +- Fully blank rows are silently skipped and not included in `TotalRows`, `ValidRows`, or `InvalidRows`. + +--- + +### POST `/api/expense-imports/{jobId}/confirm` + +Confirms and executes a previously previewed import job. Applies duplicate resolutions and creates (or updates) expense records. Returns a completion summary. + +**Path parameters**: +- `jobId` (long) — ID returned by the preview endpoint + +**Request body**: `application/json` → `ConfirmExpenseImportRequest` + +```csharp +public sealed record ConfirmExpenseImportRequest( + bool OverrideAllDuplicates, + IReadOnlyList DuplicateChoices +); + +public sealed record ExpenseDuplicateResolutionChoice( + int RowNumber, + string Resolution // "keep-existing" | "replace-with-import" +); +``` + +**Response**: `ExpenseImportSummaryResponse` + +```csharp +public sealed record ExpenseImportSummaryResponse( + long JobId, + int TotalRows, + int ImportedRows, + int SkippedRows, + int FailedRows +); +``` + +**Error responses**: +- `400 Bad Request` — job is not in `awaiting-confirmation` status, or invalid resolution values +- `401 Unauthorized` — unauthenticated rider +- `403 Forbidden` — `jobId` belongs to a different rider +- `404 Not Found` — `jobId` does not exist +- `409 Conflict` — job has already been confirmed (status is `completed` or `processing`) + +**Notes**: +- If `OverrideAllDuplicates = true`, `DuplicateChoices` is ignored — all valid rows are imported. +- If `OverrideAllDuplicates = false`, any duplicate rows without a corresponding `DuplicateChoices` entry default to `keep-existing`. +- Rows created by this endpoint are created via `RecordExpenseService`, which applies all domain validation and event sourcing rules. +- Rows with `Resolution = replace-with-import` update the matching existing expense via `EditExpenseService` with **partial-update note semantics**: the note is only overwritten when the incoming CSV row provides a non-blank note value; a blank CSV note preserves the existing note unchanged. +- All imported expenses have `ReceiptPath = null` (receipts cannot be imported). + +--- + +### GET `/api/expense-imports/{jobId}/status` + +Returns the current status of an import job. Used for page reload recovery if the rider navigates away after confirming. + +**Path parameters**: +- `jobId` (long) + +**Response**: `ExpenseImportStatusResponse` + +```csharp +public sealed record ExpenseImportStatusResponse( + long JobId, + string Status, // "previewing" | "awaiting-confirmation" | "processing" | "completed" | "failed" + int TotalRows, + int ValidRows, + int InvalidRows, + int DuplicateCount, + ExpenseImportSummaryResponse? Summary // non-null when Status = "completed" +); +``` + +**Error responses**: +- `401 Unauthorized` — unauthenticated rider +- `403 Forbidden` — `jobId` belongs to a different rider +- `404 Not Found` — `jobId` does not exist + +--- + +### DELETE `/api/expense-imports/{jobId}` + +Deletes an import job and all associated import row records. Called client-side when the rider navigates away from the summary page. Import jobs are session-scoped and do not persist beyond the current import session. + +**Path parameters**: +- `jobId` (long) + +**Response**: `204 No Content` + +**Error responses**: +- `401 Unauthorized` — unauthenticated rider +- `403 Forbidden` — `jobId` belongs to a different rider +- `404 Not Found` — `jobId` does not exist (idempotent; clients may safely re-call) + +**Notes**: +- Deletion is cascade: the job and all child `ExpenseImportRow` records are removed in one operation. +- Already-imported `ExpenseEntity` records are **not** affected — only the import job metadata is removed. +- The frontend calls this endpoint via `useEffect` cleanup and `beforeunload` event handler. +- If the delete call fails silently (e.g., network drop), the orphaned job row has no functional impact; a safety-net cleanup of jobs older than 24 hours may be added in a future phase. + +--- + +## TypeScript Client Types + +```typescript +// Matches ExpenseImportPreviewResponse +export interface ExpenseImportPreviewResponse { + jobId: number; + fileName: string; + totalRows: number; + validRows: number; + invalidRows: number; + duplicateCount: number; + errors: ExpenseImportRowError[]; + duplicates: ExpenseImportDuplicateConflict[]; + canConfirmImport: boolean; +} + +export interface ExpenseImportRowError { + rowNumber: number; + field: string; + message: string; +} + +export interface ExpenseImportDuplicateConflict { + rowNumber: number; + expenseDate: string; // ISO date string + amount: number; + note: string | null; + existingMatches: ExistingExpenseMatch[]; +} + +export interface ExistingExpenseMatch { + expenseId: number; + expenseDate: string; // ISO date string + amount: number; + note: string | null; +} + +// Matches ConfirmExpenseImportRequest +export interface ConfirmExpenseImportRequest { + overrideAllDuplicates: boolean; + duplicateChoices: ExpenseDuplicateResolutionChoice[]; +} + +export interface ExpenseDuplicateResolutionChoice { + rowNumber: number; + resolution: 'keep-existing' | 'replace-with-import'; +} + +// Matches ExpenseImportSummaryResponse +export interface ExpenseImportSummaryResponse { + jobId: number; + totalRows: number; + importedRows: number; + skippedRows: number; + failedRows: number; +} + +// Matches ExpenseImportStatusResponse +export interface ExpenseImportStatusResponse { + jobId: number; + status: 'previewing' | 'awaiting-confirmation' | 'processing' | 'completed' | 'failed'; + totalRows: number; + validRows: number; + invalidRows: number; + duplicateCount: number; + summary: ExpenseImportSummaryResponse | null; +} + +// Matches ExpenseImportStatusResponse +export interface ExpenseImportStatusResponse { + jobId: number; + status: 'previewing' | 'awaiting-confirmation' | 'processing' | 'completed' | 'failed'; + totalRows: number; + validRows: number; + invalidRows: number; + duplicateCount: number; + summary: ExpenseImportSummaryResponse | null; +} +``` + +--- + +## Validation Rules (enforced at endpoint layer) + +| Field | Rule | HTTP status | Error message | +|-------|------|-------------|---------------| +| `file` | Required | 400 | "A CSV file is required." | +| `file` | Extension must be `.csv` | 400 | "Please upload a .csv file." | +| `file` | Size ≤ 5 MB | 400 | "File size must not exceed 5 MB." | +| CSV columns | Must contain `Date` and `Amount` | 400 | "Missing required columns: {list}" | +| `jobId` | Must exist and belong to authenticated rider | 403/404 | — | +| `Resolution` | Must be `keep-existing` or `replace-with-import` | 400 | "Invalid resolution value: {value}" | + +--- + +## Modified Endpoints + +No existing endpoints are modified by this feature. All new endpoints are additive. diff --git a/specs/016-csv-expense-import/data-model.md b/specs/016-csv-expense-import/data-model.md new file mode 100644 index 0000000..51847a9 --- /dev/null +++ b/specs/016-csv-expense-import/data-model.md @@ -0,0 +1,165 @@ +# Data Model: CSV Expense Import (Spec 016) + +**Feature**: 016-csv-expense-import +**Date**: 2026-04-20 +**Clarified**: 2026-04-20 +**Status**: Complete + +## Overview + +This feature introduces two new lightweight persistence entities to track the two-phase import workflow (preview → confirm). Existing expense entities from spec 015 are reused for the final persisted expense records. + +1. `ExpenseImportJob` tracks one CSV import lifecycle for a rider. +2. `ExpenseImportRow` tracks parsed row state, validation, and duplicate metadata. +3. Existing `ExpenseEntity` (spec 015) is used for final persisted expenses. + +--- + +## Entity: ExpenseImportJob + +Represents one uploaded CSV expense import request. + +| Column | Type | Nullable | Constraints / Notes | +|--------|------|----------|----------------------| +| `Id` | `long` | No | PK, auto-increment | +| `RiderId` | `long` | No | Required; rider-scoped ownership | +| `FileName` | `string` | No | MaxLength(255); stored for traceability | +| `TotalRows` | `int` | No | ≥ 0 | +| `ValidRows` | `int` | No | ≥ 0; excludes fully blank rows | +| `InvalidRows` | `int` | No | ≥ 0 | +| `ImportedRows` | `int` | No | ≥ 0; set after confirm | +| `SkippedRows` | `int` | No | ≥ 0; duplicates kept | +| `OverrideAllDuplicates` | `bool` | No | Default `false` | +| `Status` | `string` | No | Enum: `previewing`, `awaiting-confirmation`, `processing`, `completed`, `failed` | +| `LastError` | `string` | Yes | MaxLength(1000); failure summary if any | +| `CreatedAtUtc` | `DateTime` | No | Set on initial preview upload | +| `CompletedAtUtc` | `DateTime` | Yes | Set on completed or failed | + +### Validation invariants + +- `ValidRows + InvalidRows <= TotalRows` (blank rows are excluded from both counts) +- `ImportedRows + SkippedRows <= ValidRows` +- `Status = completed | failed` requires `CompletedAtUtc` to be set + +### State transitions + +``` +previewing → awaiting-confirmation → processing → completed + ↘ failed +[any state] → deleted (triggered by client-side navigation away from summary)``` + +- `previewing`: CSV has been received and is being parsed (transient; resolves quickly). +- `awaiting-confirmation`: Parse and validation complete; preview data available; waiting for rider to confirm or cancel. +- `processing`: Rider confirmed; expenses are being written. +- `completed`: All valid rows processed; summary available. +- `failed`: Unrecoverable parse or infrastructure error. + +--- + +## Entity: ExpenseImportRow + +Represents one parsed CSV row and its processing state. + +| Column | Type | Nullable | Constraints / Notes | +|--------|------|----------|----------------------| +| `Id` | `long` | No | PK, auto-increment | +| `ImportJobId` | `long` | No | FK → `ExpenseImportJob.Id` | +| `RowNumber` | `int` | No | 1-based CSV row index (excluding header) | +| `ExpenseDateLocal` | `DateOnly` | Yes | Null if date is unparseable | +| `Amount` | `decimal(10,2)` | Yes | Null if amount is invalid | +| `Notes` | `string` | Yes | MaxLength(500); raw value from CSV | +| `ValidationStatus` | `string` | No | Enum: `valid`, `invalid` | +| `ValidationErrorsJson` | `string` | Yes | Structured error array; set when `ValidationStatus = invalid` | +| `DuplicateStatus` | `string` | No | Enum: `none`, `duplicate` | +| `DuplicateResolution` | `string` | Yes | Enum: `keep-existing`, `replace-with-import`, `override-all`; null until rider resolves | +| `ProcessingStatus` | `string` | No | Enum: `pending`, `processed`, `skipped`, `failed` | +| `ExistingExpenseIdsJson` | `string` | Yes | JSON array of matching existing expense IDs for duplicate dialog context | +| `CreatedExpenseId` | `long` | Yes | Expense ID created or updated when `ProcessingStatus = processed` | + +### Validation invariants + +- `ValidationStatus = invalid` implies `ValidationErrorsJson` is not null +- `DuplicateStatus = duplicate` implies `ExistingExpenseIdsJson` is not null and contains at least one ID +- `ProcessingStatus = processed` implies `ValidationStatus = valid` + +--- + +## Derived model: DuplicateKey + +Duplicate detection key per incoming row. + +| Component | Source | +|-----------|--------| +| `Date` | `ExpenseImportRow.ExpenseDateLocal` | +| `Amount` | `ExpenseImportRow.Amount` (rounded to 2 decimal places) | + +A row is a duplicate when an existing active (non-deleted) rider expense matches both the date and the amount (to 2 decimal places). Intra-file rows are never compared against each other — only against existing history records. + +## Derived model: AmountNormalizationPipeline + +Applied in `CsvExpenseParser.NormalizeAmount` before decimal parsing: + +1. Trim leading/trailing whitespace +2. Strip leading currency symbol: `$`, `£`, `€`, `¥` +3. Remove commas (thousands separators) +4. Strip trailing ISO currency code via regex `\s*[A-Z]{3}$` (e.g., ` USD`, ` GBP`, ` EUR`) +5. Parse result as `decimal`; must be > 0 or row is invalid + +--- + +## Derived model: ImportPreviewSummary + +Returned to the client after preview completes. + +| Field | Type | Notes | +|-------|------|-------| +| `JobId` | `long` | Correlates with the persisted import job | +| `FileName` | `string` | Original uploaded file name | +| `TotalRows` | `int` | Total non-blank rows parsed | +| `ValidRows` | `int` | Rows that passed all validation rules | +| `InvalidRows` | `int` | Rows that failed validation | +| `DuplicateCount` | `int` | Count of valid rows flagged as duplicates | +| `Errors` | `ImportRowError[]` | Field-level error details per invalid row | +| `Duplicates` | `ExpenseDuplicateConflict[]` | Duplicate details per flagged row | +| `CanConfirmImport` | `bool` | `true` when `ValidRows > 0` | + +--- + +## Derived model: ImportCompletionSummary + +Returned to the client after confirm + execute completes. + +| Field | Type | Notes | +|-------|------|-------| +| `JobId` | `long` | Correlates with the import job | +| `TotalRows` | `int` | Total non-blank rows processed | +| `ImportedRows` | `int` | Rows successfully created as expenses | +| `SkippedRows` | `int` | Rows skipped (duplicate kept) | +| `FailedRows` | `int` | Rows that could not be imported (validation or write error) | + +--- + +## Relationship map + +- One rider has many `ExpenseImportJob` records (session-scoped; deleted after summary is dismissed). +- One `ExpenseImportJob` has many `ExpenseImportRow` records (cascade-deleted with the job). +- One `ExpenseImportRow` may create one `ExpenseEntity` through the existing expense write service. + +--- + +## Database Indexes + +`ExpenseImportJob`: +- `IX_ExpenseImportJobs_RiderId` — (RiderId) for rider-scoped job queries + +`ExpenseImportRow`: +- `IX_ExpenseImportRows_ImportJobId` — (ImportJobId) for row lookup per job; cascade delete when job is deleted + +--- + +## EF Core notes + +- Both entities are added to `BikeTrackingDbContext` under `DbSet` and `DbSet`. +- A single EF Core migration (`AddExpenseImportTables`) creates both tables. +- Cascade delete: deleting an `ExpenseImportJob` deletes its `ExpenseImportRow` children. +- JSON columns (`ValidationErrorsJson`, `ExistingExpenseIdsJson`) are stored as `TEXT` in SQLite and serialized/deserialized in the application layer (no EF JSON column mapping required for SQLite compatibility). diff --git a/specs/016-csv-expense-import/plan.md b/specs/016-csv-expense-import/plan.md new file mode 100644 index 0000000..1fa6263 --- /dev/null +++ b/specs/016-csv-expense-import/plan.md @@ -0,0 +1,237 @@ +# Implementation Plan: CSV Expense Import + +**Branch**: `016-csv-expense-import` | **Date**: 2026-04-20 | **Clarified**: 2026-04-20 | **Spec**: [spec.md](./spec.md) +**Input**: Feature specification from `specs/016-csv-expense-import/spec.md` + +--- + +## Summary + +Add a CSV expense import workflow linked from the Expenses history page. Riders upload a CSV containing Date, Amount, and Note, review a validation preview with duplicate detection, resolve any conflicts, then confirm to import expense records. The import is synchronous (no enrichment, no real-time progress). Receipts cannot be imported — a UI note directs riders to attach receipts individually via the expense history edit flow. + +**Technical approach**: Two-phase synchronous import (preview → confirm). Lightweight `ExpenseImportJob` and `ExpenseImportRow` EF entities persist the import state between phases. New `CsvExpenseParser`, `ExpenseDuplicateDetector`, and `CsvExpenseImportService` application-layer classes. Three new Minimal API endpoints under `/api/expense-imports`. New frontend page at `/expenses/import` with upload, preview, duplicate resolution, and summary views. Link added to the Expenses history page. + +--- + +## Technical Context + +**Language/Version**: C# 13 / .NET 10 (API), F# (domain unchanged), TypeScript 5 / React 19 (frontend) +**Primary Dependencies**: ASP.NET Core Minimal API, EF Core 9 (SQLite), xUnit, Vitest, Playwright, React Router v7, Vite +**Storage**: SQLite local file; additive `ExpenseImportJob` and `ExpenseImportRow` tables; existing `Expenses` table from spec 015 +**Testing**: xUnit (backend unit + integration), Vitest (frontend unit), Playwright (E2E) +**Target Platform**: Local user machine (Windows/macOS/Linux); devcontainer for development +**Project Type**: Local-first desktop web application (Aspire-orchestrated) +**Performance Goals**: Preview response < 2s for typical files (< 500 rows); confirm response < 2s; no background processing required +**Constraints**: No receipts in import; no SignalR; no enrichment; synchronous confirm+execute; existing spec 015 expense write path reused for creating imported expenses +**Scale/Scope**: Single-user local deployment; import files expected in tens to low hundreds of rows for typical use + +--- + +## Constitution Check + +| Principle | Check | Status | +|-----------|-------|--------| +| I — Clean Architecture / Ports-and-Adapters | Import application services isolated from endpoints; no file I/O in domain layer | PASS | +| I — No god services | Three focused services: parser, duplicate detector, import orchestrator | PASS | +| II — Pure/Impure Sandwich | CSV parsing and duplicate key computation are pure helpers; DB writes remain at application service edges | PASS | +| III — Event Sourcing | Imported expenses created via existing `RecordExpenseService` which emits `ExpenseRecorded` events | PASS | +| IV — TDD | Red-Green-Refactor mandatory; test plan in quickstart.md; failing tests before implementation | PASS | +| V — UX Consistency | Import page follows existing page/component structure; preview/error states follow ride import (spec 013) patterns | PASS | +| VI — Performance | Preview and confirm both synchronous and < 2s for expected data volumes | PASS | +| VII — Three-layer validation | React form + DataAnnotations DTOs + DB check constraints on Amount (from spec 015) | PASS | +| VIII — Security | Uploaded CSV content is parsed as data only; file name is sanitized; rider ownership validated on all endpoints | PASS | +| IX — Contract-first | API contracts in `contracts/api-contracts.md` defined before implementation | PASS | +| X — Additive | New import page, new endpoints, two new tables; no breaking changes to spec 015 expense API | PASS | +| TDD mandatory gate | PASS | Plan requires user confirmation on failing tests before code implementation | +| Migration test coverage policy | PASS | New EF migration must include migration policy test entry | +| Spec completion gate | PASS | Completion requires migration apply + backend tests + frontend lint/build/unit + E2E | + +--- + +## Project Structure + +### Documentation (this feature) + +```text +specs/016-csv-expense-import/ +├── plan.md ← this file +├── spec.md +├── research.md +├── data-model.md +├── quickstart.md +├── contracts/ +│ └── api-contracts.md +└── tasks.md +``` + +### Source Code — New Files + +```text +src/BikeTracking.Api/ +├── Application/ +│ └── ExpenseImports/ +│ ├── CsvExpenseParser.cs ← NEW: CSV parsing, header normalization, row validation +│ ├── ExpenseDuplicateDetector.cs ← NEW: duplicate key computation, match lookup +│ └── CsvExpenseImportService.cs ← NEW: orchestrates preview and confirm flows +├── Contracts/ +│ └── ExpenseImportContracts.cs ← NEW: all import request/response DTOs +├── Endpoints/ +│ └── ExpenseImportEndpoints.cs ← NEW: POST preview, POST confirm, GET status +└── Infrastructure/ + └── Persistence/ + ├── Entities/ + │ ├── ExpenseImportJobEntity.cs ← NEW + │ └── ExpenseImportRowEntity.cs ← NEW + └── Migrations/ + └── {timestamp}_AddExpenseImportTables.cs ← NEW + +src/BikeTracking.Api.Tests/ +└── Application/ + └── ExpenseImports/ + ├── CsvExpenseParserTests.cs ← NEW + ├── ExpenseDuplicateDetectorTests.cs ← NEW + └── CsvExpenseImportServiceTests.cs ← NEW + +src/BikeTracking.Frontend/src/ +├── pages/ +│ └── expenses/ +│ ├── ExpenseImportPage.tsx ← NEW +│ └── ExpenseImportPage.css ← NEW +├── components/ +│ └── expense-import/ +│ ├── ExpenseDuplicateResolutionPanel.tsx ← NEW: inline duplicate conflict UI +│ └── ExpenseDuplicateResolutionPanel.test.tsx ← NEW +└── services/ + └── expense-import-api.ts ← NEW: preview and confirm API calls +``` + +### Source Code — Modified Files + +```text +src/BikeTracking.Api/Infrastructure/Persistence/BikeTrackingDbContext.cs + # Add DbSet + DbSet + model config + +src/BikeTracking.Api/Program.cs + # Register CsvExpenseImportService, ExpenseDuplicateDetector, CsvExpenseParser + # Map ExpenseImportEndpoints + +src/BikeTracking.Frontend/src/App.tsx + # Add route: /expenses/import → ExpenseImportPage + +src/BikeTracking.Frontend/src/pages/expenses/ExpenseHistoryPage.tsx + # Add "Import Expenses" button/link to header area + +src/BikeTracking.Api.Tests/Infrastructure/MigrationTestCoveragePolicyTests.cs + # Add AddExpenseImportTables migration entry +``` + +--- + +## Architecture Decisions + +### 1. Two-Phase Synchronous Import (No Background Job) + +Preview (phase 1) parses the CSV and detects duplicates, persisting results as `ExpenseImportJob` + `ExpenseImportRow` records in `awaiting-confirmation` status. Confirm (phase 2) reads the persisted rows, applies duplicate resolutions, and creates expenses via `RecordExpenseService`. Both phases return in < 2s for expected data volumes. No background threads, no polling, no SignalR required. + +### 2. Reuse Existing Expense Write Path; Partial Update for Replace-with-Import + +Confirmed import rows are created via the existing `RecordExpenseService` from spec 015. `Replace with Import` rows use `EditExpenseService`, but with **partial-update semantics for the note field**: the note is updated only when the incoming CSV row provides a non-blank note value. A blank CSV note preserves the existing expense's note unchanged. This prevents silently erasing notes that were entered manually but omitted from the CSV export. + +### 3. Duplicate Key: Date + Amount (2dp) — History-Only Scope + +Duplicate detection compares `(ExpenseDateLocal, Amount)` against active (non-deleted) rider expenses in history. Amount comparison uses 2 decimal places (`Math.Round(amount, 2)`) to avoid floating-point drift. + +**Intra-file scope**: Rows within the same CSV are never compared against each other. Two rows in the same file with identical date+amount are both treated as distinct import candidates and are both imported (each may independently match a history record and be flagged). This avoids silently dropping legitimate repeated expenses (e.g., two $5 coffee purchases on the same day) that happen to appear in the same CSV export. + +### 4. Session-Scoped Import Job Cleanup + +Import job records (`ExpenseImportJob` + child `ExpenseImportRow` rows) are session-scoped. They are deleted when the rider navigates away from the summary page. Cleanup is triggered client-side: `ExpenseImportPage` calls `DELETE /api/expense-imports/{jobId}` in its `useEffect` cleanup function (and on `beforeunload`). The server-side handler deletes the job + rows immediately (cascade delete) regardless of status. This keeps the database lean and requires no background cleanup job. + +If the delete call fails (e.g., network error), the orphaned job row has no impact on the expense history — the imported expenses were already committed. A periodic admin-level cleanup can purge orphaned jobs older than 24 hours as a safety net, but is not required for MVP. + +### 5. Receipt Exclusion + +Imported expenses are created with `ReceiptPath = null`. The import UI shows a persistent informational note: "Receipts cannot be imported. To add a receipt, find the expense in your history and use the edit option." No UI surface for receipt upload exists on the import page. + +### 5. Amount Normalization (Currency Symbols, Trailing Codes, Commas) + +Before decimal parsing, `CsvExpenseParser.NormalizeAmount` applies this pipeline in order: +1. Trim leading/trailing whitespace +2. Strip leading currency symbols: `$`, `£`, `€`, `¥` +3. Remove commas used as thousands separators (e.g., `1,250.00` → `1250.00`) +4. Strip trailing ISO currency codes via regex `\s*[A-Z]{3}$` (e.g., `25.00 USD` → `25.00`, `12.50 GBP` → `12.50`) + +If the resulting string does not parse to a positive decimal, the row is flagged as invalid. Per-cell size limits are not enforced separately; the 5 MB file cap and field-level validation (Note ≤ 500 chars, Amount must parse) are sufficient. + +--- + +## Test Plan (TDD Gates) + +### Backend Unit Tests + +**CsvExpenseParserTests** +- Valid CSV with Date, Amount, Note → returns 1 valid row +- Header matching case-insensitive ("AMOUNT" → Amount) +- Missing Date column → returns parse error +- Missing Amount column → returns parse error +- Amount with `$` prefix stripped and parsed correctly +- Amount with comma thousands separator (1,250.00) parsed correctly +- Amount with trailing currency code ("25.00 USD") stripped and parsed correctly +- Amount with trailing currency code ("12.50 GBP") stripped and parsed correctly +- Amount with unrecognized trailing text → row invalid +- Amount of 0 → row invalid +- Amount of -5 → row invalid +- Note exceeding 500 chars → row invalid +- Unparseable date → row invalid +- Blank row → row skipped +- Extra columns beyond Date/Amount/Note → silently ignored +- CSV with BOM prefix → parsed correctly + +**ExpenseDuplicateDetectorTests** +- Row with date+amount matching existing expense → flagged as duplicate +- Row with same date but different amount → not a duplicate +- Row with same amount but different date → not a duplicate +- Row matching a deleted expense → not a duplicate (IsDeleted=true excluded) +- Multiple rows with same date+amount → each flagged independently +- Two CSV rows with same date+amount (intra-file) → both imported; no intra-file duplicate check performed + +**CsvExpenseImportServiceTests** +- Preview with all valid rows → returns correct `ValidRows`, `InvalidRows`, `DuplicateCount` +- Preview with mixed valid/invalid → only valid rows in import candidates +- Confirm with `KeepExisting` resolution → duplicate row skipped, `SkippedRows` incremented +- Confirm with `ReplaceWithImport` + non-blank CSV note → existing expense note updated +- Confirm with `ReplaceWithImport` + blank CSV note → existing expense note preserved unchanged +- Confirm with `ReplaceWithImport` → date and amount always updated from CSV +- Confirm with `OverrideAllDuplicates=true` → all valid rows imported including duplicates +- Confirm happy path (no duplicates) → `ImportedRows` matches `ValidRows` + +### API Endpoint Integration Tests (xUnit) + +- `POST /api/expense-imports/preview` with valid CSV → 200 with preview response +- `POST /api/expense-imports/preview` with non-CSV file → 400 +- `POST /api/expense-imports/preview` with file > 5 MB → 400 +- `POST /api/expense-imports/preview` with missing required column → 400 +- `POST /api/expense-imports/{jobId}/confirm` with valid job ID → 200 with summary +- `POST /api/expense-imports/{jobId}/confirm` with wrong rider → 403 +- `POST /api/expense-imports/{jobId}/confirm` with expired/completed job → 409 +- `DELETE /api/expense-imports/{jobId}` with valid job ID → 204 and job+rows deleted +- `DELETE /api/expense-imports/{jobId}` with wrong rider → 403 + +### Frontend Unit Tests (Vitest) + +- `ExpenseImportPage` renders file upload control +- `ExpenseImportPage` shows preview table after upload +- `ExpenseImportPage` shows receipt-exclusion note +- `ExpenseImportPage` shows completion summary after confirm +- `ExpenseDuplicateResolutionPanel` renders both existing and incoming expense details +- `ExpenseDuplicateResolutionPanel` emits correct resolution choice on selection +- `ExpenseHistoryPage` renders "Import Expenses" button/link + +### E2E Tests (Playwright) + +- Upload valid CSV → preview shows correct row count → confirm → expenses appear in history +- Upload CSV with duplicate → preview shows conflict → choose Keep Existing → duplicate row absent from history +- Upload CSV with duplicate → preview shows conflict → choose Replace → expense updated in history +- Upload CSV with Override All Duplicates → all rows imported including duplicates +- Upload non-CSV → error message shown +- Navigate to import page without authentication → redirect to login diff --git a/specs/016-csv-expense-import/quickstart.md b/specs/016-csv-expense-import/quickstart.md new file mode 100644 index 0000000..a17323e --- /dev/null +++ b/specs/016-csv-expense-import/quickstart.md @@ -0,0 +1,229 @@ +# Developer Quickstart: CSV Expense Import + +**Feature**: 016-csv-expense-import +**Branch**: `016-csv-expense-import` +**Date**: 2026-04-20 + +## Overview + +This feature adds an Expenses-page-linked CSV import flow that supports upload, validation preview, duplicate conflict resolution, and synchronous confirm+import. Unlike the ride import (spec 013), there is no enrichment, no SignalR, and no background processing — the import completes synchronously in two HTTP requests. + +## Prerequisites + +- DevContainer running +- App launch: `dotnet run --project src/BikeTracking.AppHost` +- Spec 015 (Bike Expense Tracking) must be fully implemented — `RecordExpenseService`, `EditExpenseService`, `ExpenseEntity`, and `Expenses` table are required dependencies +- Follow strict TDD gate: write failing tests first, confirm failures before writing implementation + +## Implementation Order + +### Step 1: Define contracts first + +Create backend DTOs before any service code. + +```text +src/BikeTracking.Api/Contracts/ + ExpenseImportContracts.cs +``` + +Define all request and response types: +- `ExpenseImportPreviewResponse` +- `ExpenseImportRowErrorView` +- `ExpenseImportDuplicateView` +- `ExistingExpenseMatchView` +- `ConfirmExpenseImportRequest` +- `ExpenseImportDuplicateResolutionChoice` +- `ExpenseImportSummaryResponse` + +### Step 2: Persistence and migration + +Add import job/row entities and EF mappings. + +```text +src/BikeTracking.Api/Infrastructure/Persistence/Entities/ + ExpenseImportJobEntity.cs + ExpenseImportRowEntity.cs +src/BikeTracking.Api/Infrastructure/Persistence/BikeTrackingDbContext.cs + # Add DbSet + # Add DbSet + # Configure cascade delete, indexes +src/BikeTracking.Api/Infrastructure/Persistence/Migrations/ + {timestamp}_AddExpenseImportTables.cs +``` + +Update migration policy test in `MigrationTestCoveragePolicyTests.cs`. + +### Step 3: CSV parser + +Build CSV parse/normalize/validate helpers. **Write tests before implementation (TDD gate).** + +```text +src/BikeTracking.Api.Tests/Application/ExpenseImports/CsvExpenseParserTests.cs ← WRITE FIRST +src/BikeTracking.Api/Application/ExpenseImports/CsvExpenseParser.cs +``` + +Parsing rules to implement: +- Case-insensitive header matching for Date, Amount, Note +- Skip fully blank rows +- Amount normalization pipeline: trim → strip leading `$£€¥` → remove commas → strip trailing ISO code (`USD`, `GBP`, `EUR`, etc.) via regex `\s*[A-Z]{3}$` → parse decimal +- Accept common date formats: YYYY-MM-DD, MM/DD/YYYY, M/D/YYYY, DD-MMM-YYYY, MMM DD YYYY +- Validate: Amount > 0, parseable date, Note ≤ 500 chars when present +- Extra columns silently ignored +- BOM-aware UTF-8/UTF-16 reading + +### Step 4: Duplicate detector + +**Write tests before implementation (TDD gate).** + +```text +src/BikeTracking.Api.Tests/Application/ExpenseImports/ExpenseDuplicateDetectorTests.cs ← WRITE FIRST +src/BikeTracking.Api/Application/ExpenseImports/ExpenseDuplicateDetector.cs +``` + +Key behavior: +- Duplicate key: `(ExpenseDateLocal, Math.Round(Amount, 2))` +- Load active (non-deleted) rider expenses from DB; build lookup dictionary keyed by `(date, amount)` +- Return matching existing expense IDs for each duplicate row +- Deleted expenses are excluded from duplicate matching +- **Intra-file rows are never compared against each other** — only against existing history records; two identical rows in the same CSV are both treated as independent import candidates + +### Step 5: Import orchestration service + +**Write tests before implementation (TDD gate).** + +```text +src/BikeTracking.Api.Tests/Application/ExpenseImports/CsvExpenseImportServiceTests.cs ← WRITE FIRST +src/BikeTracking.Api/Application/ExpenseImports/CsvExpenseImportService.cs +``` + +`CsvExpenseImportService` responsibilities: +- **Preview**: Parse CSV, validate rows, detect duplicates, persist `ExpenseImportJob` + `ExpenseImportRow` entities, return `ExpenseImportPreviewResponse` +- **Confirm**: Load persisted job + rows, apply duplicate resolutions, create expenses via `RecordExpenseService` (or update via `EditExpenseService` for `replace-with-import`), update job status to `completed`, return `ExpenseImportSummaryResponse` +- **Replace-with-Import partial note update**: when `EditExpenseService` is called for a `replace-with-import` row, pass the incoming note only when it is non-blank; omit the note field from the update payload when CSV note is blank so the existing note is preserved unchanged. + +### Step 6: Minimal API endpoints + +```text +src/BikeTracking.Api/Endpoints/ExpenseImportEndpoints.cs +``` + +Four endpoints: +- `POST /api/expense-imports/preview` — multipart form-data, returns preview +- `POST /api/expense-imports/{jobId}/confirm` — JSON body, returns summary +- `GET /api/expense-imports/{jobId}/status` — returns current job status (for page reload recovery) +- `DELETE /api/expense-imports/{jobId}` — deletes job + rows (called client-side on navigation away from summary) + +Register in `Program.cs`. + +### Step 7: Frontend API service + +```text +src/BikeTracking.Frontend/src/services/expense-import-api.ts +``` + +Typed functions: +- `previewExpenseImport(file: File): Promise` +- `confirmExpenseImport(jobId: number, request: ConfirmExpenseImportRequest): Promise` +- `getExpenseImportStatus(jobId: number): Promise` +- `deleteExpenseImport(jobId: number): Promise` — called on summary page unmount and beforeunload + +### Step 8: Frontend import page + +```text +src/BikeTracking.Frontend/src/pages/expenses/ + ExpenseImportPage.tsx + ExpenseImportPage.css +``` + +UI states: +1. **Upload** — file picker, receipts-excluded notice, upload button +2. **Preview** — row counts (valid, invalid, duplicates), error table, duplicate resolution panel, confirm/cancel buttons +3. **Processing** — brief loading indicator (synchronous, typically < 1s) +4. **Summary** — imported/skipped/failed counts, link to expense history + +> **Session cleanup**: On the Summary state, wire a `useEffect` cleanup function and a `beforeunload` listener that call `deleteExpenseImport(jobId)` so the import job is removed from the database when the rider navigates away or closes the tab. + +Add `ExpenseDuplicateResolutionPanel` component: + +```text +src/BikeTracking.Frontend/src/components/expense-import/ + ExpenseDuplicateResolutionPanel.tsx + ExpenseDuplicateResolutionPanel.test.tsx +``` + +Shows each duplicate conflict with: +- Existing expense: date, amount, note +- Incoming row: date, amount, note +- Resolution options: "Keep Existing" | "Replace with Import" +- Global "Override All Duplicates" checkbox + +### Step 9: Wire route and navigation link + +```text +src/BikeTracking.Frontend/src/App.tsx + # Add route: /expenses/import → ExpenseImportPage + +src/BikeTracking.Frontend/src/pages/expenses/ExpenseHistoryPage.tsx + # Add "Import Expenses" button near existing expense entry controls +``` + +--- + +## TDD Red-Green Gate (mandatory) + +For each implementation step involving tests: + +1. **Write the test(s)** — target the specific behavior; tests must be meaningful +2. **Run tests** — confirm they fail (`dotnet test` or `npm run test:unit`) +3. **Show failure output to user** — confirm tests fail for the right reason +4. **Implement the code** — minimal code to make tests pass +5. **Run tests again** — confirm all pass +6. **Proceed to next step** + +Never write implementation before the corresponding test is red. + +--- + +## Key Validation Rules (implement in CsvExpenseParser) + +| Field | Rule | Error Message | +|-------|------|---------------| +| Date | Required; parseable date | "Date is required" / "Date is not a valid date" | +| Amount | Required; > 0 after full normalization | "Amount is required" / "Amount must be greater than zero" | +| Note | Optional; ≤ 500 characters | "Note must be 500 characters or fewer" | +| Amount format | Strip leading `$£€¥`, commas, trailing ISO code, then parse | "Amount is not a valid number" | + +--- + +## Sample Test CSV + +```csv +Date,Amount,Note +2026-01-10,25.00,Tube replacement +2026-01-15,$12.50,Lube +2026-01-15,$12.50,Second lube (same date+amount as above; both imported) +2026-02-01,0,Zero amount should fail +2026-02-05,-5.00,Negative should fail +bad-date,10.00,Bad date row +2026-03-01,"1,250.00","Wheel rebuild, labor" +2026-03-15,"15.00 USD",Trailing currency code stripped +2026-03-20,,Existing note preserved when blank (only relevant in replace-with-import flow) +``` + +Expected preview: 5 valid rows, 3 invalid rows (zero amount, negative amount, bad date). The two identical rows (2026-01-15, $12.50) are both valid import candidates with no intra-file duplicate flagging. + +--- + +## File Size and Format Notes + +- Maximum file size: 5 MB +- Accepted extension: `.csv` only (validate both extension and MIME type `text/csv`, `application/csv`, `text/plain`) +- Encoding: BOM-aware (UTF-8, UTF-16); default UTF-8 without BOM + +--- + +## Receipts Notice (UI copy) + +> Receipts cannot be imported from CSV. To attach a receipt to an imported expense, find the expense in your Expense History and use the edit option. + +This notice MUST appear on the import page regardless of import state (upload, preview, summary). diff --git a/specs/016-csv-expense-import/research.md b/specs/016-csv-expense-import/research.md new file mode 100644 index 0000000..49be6b3 --- /dev/null +++ b/specs/016-csv-expense-import/research.md @@ -0,0 +1,89 @@ +# Research: CSV Expense Import + +**Feature**: 016-csv-expense-import +**Date**: 2026-04-20 +**Status**: Complete + +## Decision 1: Import processing model + +**Decision**: Use a two-phase synchronous import model (preview then confirm) without a persisted background job. Phase 1 uploads and validates the CSV, returning a preview response with a short-lived import token. Phase 2 accepts the token plus duplicate resolutions and executes the import synchronously, returning a completion summary. + +**Rationale**: +- Expense import has no enrichment (no gas price or weather lookups), so the processing time for typical imports (hundreds of rows) is well under one second. +- A persisted job with polling or real-time progress (as in spec 013) adds significant complexity — import job entities, status endpoints, and reconnect logic — that provides no user benefit when the import completes faster than the user can react. +- A short-lived server-side preview state (cached by job ID, expires in 30 minutes) allows the rider to review duplicates and make resolution choices before confirming, maintaining the same two-step UX as spec 013 without the background job overhead. + +**Alternatives considered**: +- Persisted import job with polling (spec 013 model): rejected because expense enrichment does not exist; there is no long-running work to track. +- Single-request upload+import (no preview): rejected because preview is required by spec for duplicate detection and validation feedback before committing. +- In-memory job keyed by session: rejected because server restarts between preview and confirm would lose the parsed state; a lightweight EF-persisted import record (status: `awaiting-confirmation`) avoids this. + +**Resolution**: Use a lightweight persisted `ExpenseImportJob` entity (single table, minimal columns) to survive server restarts between preview and confirm. Once confirmed and completed, the job record is retained for traceability (status: `completed`). No background processing thread or SignalR hub is introduced. + +--- + +## Decision 2: CSV schema and validation strategy + +**Decision**: Parse headers case-insensitively. Require `Date` and `Amount`; treat `Note` as optional. Validate each row independently and allow valid rows to proceed while reporting invalid rows. + +**Rationale**: +- Matches existing expense validation expectations (date required, amount > 0, note ≤ 500 chars). +- Independent row validation prevents one bad row from blocking the entire import. +- Maintains user trust by showing specific field-level errors per row. +- Stripping currency symbols and commas from Amount follows user expectation when exporting from spreadsheet tools. + +**Alternatives considered**: +- Strict all-or-nothing file validation: rejected because it blocks valid data unnecessarily. +- Flexible free-form column mapping UI in v1: rejected as out of scope. + +--- + +## Decision 3: Duplicate detection policy + +**Decision**: Duplicate key is `(date, amount)` against existing active (non-deleted) rider expenses. Provide per-conflict choices (`keep existing`, `replace with import`) shown in the preview response. Also provide an `override-all-duplicates` bypass option. + +**Rationale**: +- Date-only matching creates false positives when a rider has multiple expenses on the same day (e.g., fuel + parking). +- Date+amount provides a low-false-positive key for personal expense tracking with small datasets. +- Explicit resolution choices in the preview flow satisfy data-integrity and user override requirements (mirrors spec 013's approach). + +**Alternatives considered**: +- Date-only duplicate key: rejected due to false positives for multi-expense days. +- Date+amount+note key: rejected for being too strict when source CSV omits notes. + +--- + +## Decision 4: Receipt handling in import + +**Decision**: Receipts are excluded from the CSV import flow entirely. Imported expenses are created without a receipt. The import UI displays a persistent note informing riders that receipts must be attached individually via the expense history edit page. + +**Rationale**: +- Bulk CSV import of binary receipt files would require multipart upload of potentially hundreds of files, complex path management, and significant error surface. +- Receipt attachment is low-frequency (most expenses don't have digital receipts); forcing batch import capability is over-engineering for v1. +- The spec requirement explicitly excludes receipts from import. + +--- + +## Decision 5: Currency symbol and format normalization + +**Decision**: Before parsing Amount to a decimal, strip leading/trailing whitespace, remove common currency symbols (`$`, `£`, `€`, `¥`), and remove thousands-separator commas. The resulting string must parse to a positive decimal or the row is flagged as invalid. + +**Rationale**: +- Users exporting from spreadsheet tools (Excel, Google Sheets) frequently include currency formatting in amount columns. +- Stripping these characters silently reduces friction without loss of data integrity, since the semantic value (numeric amount) is preserved. +- Negative values and zero are always invalid for expense amounts per spec 015. + +--- + +## Decision 6: Import page entry point + +**Decision**: Link "Import Expenses" from the Expenses history page (not the Settings page). + +**Rationale**: +- Expense import is contextually related to the expense history view, not general application settings. +- The ride CSV import (spec 013) is linked from Settings because it is a one-time historical data migration; expense import is expected to be used more regularly as riders maintain financial records from external tools. +- Proximity to the expense history list allows the rider to immediately verify imported expenses after the import completes. + +**Alternatives considered**: +- Link from Settings page: considered but rejected as the expense import is contextually closer to the expense history page. +- Link from both pages: deferred to a later phase if discoverability is identified as an issue. diff --git a/specs/016-csv-expense-import/spec.md b/specs/016-csv-expense-import/spec.md new file mode 100644 index 0000000..0c6bfcf --- /dev/null +++ b/specs/016-csv-expense-import/spec.md @@ -0,0 +1,153 @@ +# Feature Specification: CSV Expense Import + +**Feature Branch**: `016-csv-expense-import` +**Created**: 2026-04-20 +**Status**: Draft +**Input**: User description: "Allow importing of expenses. Follow the same overall approach as spec 013 (CSV Ride Import). Columns needed are Date, Amount, Note. Receipts can not be imported, only added in the expense history editing." + +## Clarifications + +### Session 2026-04-20 + +- Q: Should duplicate detection match on date only, or date + amount? → A: Date + Amount — flag as duplicate only when both date and amount match an existing expense. +- Q: Should the import be linked from the Expenses history page or from Settings? → A: Link from the Expenses history page (same vicinity as the existing expense entry button). +- Q: Is real-time progress (SignalR) needed since there is no enrichment? → A: No SignalR required — expenses have no gas price or weather enrichment, so import completes fast. Simple confirmation + synchronous execution with a summary on completion is sufficient. +- Q: What is the maximum file size? → A: 5 MB, matching the ride import limit. +- Q: Can the rider cancel a running import? → A: Import is synchronous (no enrichment); cancel is not applicable. Rider may navigate away and return — import result is available immediately upon completion. +- Q: What is the Note column max length? → A: 500 characters, matching the expense note limit from spec 015. +- Q: When "Replace with Import" is chosen and the incoming CSV row has a blank Note, what happens to the existing note? → A: Preserve existing note — only date and amount are updated; a blank CSV Note never overwrites an existing note. +- Q: If a CSV contains two rows with the same date and amount (intra-file), is the second treated as a duplicate of the first? → A: No — intra-file rows are never compared against each other. Duplicate detection only compares incoming rows against already-saved expenses in history. +- Q: What should happen when an Amount value has trailing non-numeric text (e.g. "25.00 USD" or "12.50 GBP")? → A: Strip trailing currency codes — extract the leading numeric portion; if the result is a valid positive number, accept it. Unexpected formats that don't yield a valid positive decimal are still flagged as invalid. +- Q: How long should a completed import job and its parsed row data be retained? → A: Session only — the import job record and its rows are deleted once the rider navigates away from the summary page. There is no long-term import history. +- Q: Should the system enforce a per-cell size limit in addition to the 5 MB file cap? → A: File cap only — rely on the 5 MB file size limit and existing field-level validation (Note ≤ 500 chars, Amount must parse). No separate per-cell byte limit is needed. + +--- + +## User Scenarios & Testing *(mandatory)* + +### User Story 1 - Upload and Import an Expense CSV File (Priority: P1) + +A rider navigates to the Expenses page and selects "Import Expenses." They are presented with a dedicated import page where they can upload a CSV file containing historical expense data. The system reads the file, validates its structure and content, and — once the rider confirms — imports the expense records into their account. + +**Why this priority**: This is the core capability of the feature. Without the ability to upload, parse, and persist expense data from a CSV, no other functionality (duplicate handling, preview) matters. Receipts are intentionally excluded from import; they must be attached individually via the expense history edit flow. + +**Independent Test**: Can be fully tested by uploading a valid CSV with 5–10 expense rows and confirming each row appears in the rider's expense history with the correct date, amount, and note. + +**Acceptance Scenarios**: + +1. **Given** a logged-in rider on the Expenses page, **When** they click "Import Expenses," **Then** the system navigates to a dedicated CSV expense import page. +2. **Given** the import page is displayed, **When** the rider selects a valid CSV file with columns Date and Amount (Note is optional), **Then** the system parses the file and displays a preview of the rows to be imported (showing count and a sample). +3. **Given** a valid CSV has been parsed and previewed, **When** the rider confirms the import, **Then** expense records are created for each valid row, associated with the authenticated rider. +4. **Given** a CSV file with header variations (e.g., "amount" vs "Amount" vs "AMOUNT"), **When** uploaded, **Then** column matching is case-insensitive and succeeds. +5. **Given** a CSV with missing required columns (Date or Amount), **When** uploaded, **Then** the system rejects the file with a clear error message naming the missing columns. +6. **Given** a CSV with rows that fail validation (e.g., amount ≤ 0, unparseable date, note exceeding 500 characters), **When** parsed, **Then** the system highlights invalid rows with specific error messages and excludes them from import while allowing valid rows to proceed. +7. **Given** a completed import, **When** the rider views the summary, **Then** they see total rows processed, expenses imported, expenses skipped (duplicates kept), and rows failed (validation errors). +8. **Given** a CSV row with a Note value that exceeds 500 characters, **When** parsed, **Then** the row is flagged as invalid with a message indicating the note is too long. + +--- + +### User Story 2 - Duplicate Detection and Resolution (Priority: P1) + +During import, the system checks each row's date and amount against the rider's existing expense history. If a duplicate is found (an existing expense with the same date and the same amount), the rider is prompted with details showing the conflicting rows side-by-side. The rider can resolve each duplicate individually or use an "Override All Duplicates" option to bypass duplicate checking and import everything. + +**Why this priority**: Without duplicate handling, re-importing the same file could create repeated expense entries and corrupt financial totals. This is a data-integrity concern co-equal with the core import. + +**Independent Test**: Can be tested by first recording an expense for a specific date and amount, then importing a CSV containing a row for that same date and amount. Verify the duplicate conflict is surfaced with correct details. + +**Acceptance Scenarios**: + +1. **Given** the rider has an existing expense on 2026-03-15 with amount $25.00 and the CSV contains a row dated 2026-03-15 with amount 25.00, **When** the preview is processed, **Then** the system flags that row as a duplicate and displays the conflicting details (date, amount, note) for both the existing expense and the incoming row. +2. **Given** a duplicate conflict is displayed in the preview, **When** the rider chooses "Keep Existing" for that row, **Then** the CSV row is skipped and the import continues with remaining rows. +3. **Given** a duplicate conflict is displayed in the preview, **When** the rider chooses "Replace with Import," **Then** the existing expense is updated with the imported row's values (via a new edit event) and the import continues. +4. **Given** a CSV with multiple duplicate rows, **When** conflicts are displayed in the preview, **Then** the rider may also select "Override All Duplicates" to import all rows without further prompts (creating new expense records alongside existing ones). +5. **Given** the rider enables "Override All Duplicates" before confirming the import, **When** duplicates are encountered, **Then** all rows are imported without any duplicate prompts, creating new expense records. +6. **Given** two expenses on the same date but with different amounts (e.g., $25.00 and $40.00), **When** one is in the existing history and the other is in the CSV, **Then** no duplicate is flagged — both amounts are distinct trips and are imported without prompting. + +--- + +### User Story 3 - Navigation and Access from Expenses Page (Priority: P2) + +The import functionality is discoverable from the Expenses page via a clearly labeled link or button. The import page is only accessible to authenticated riders. Receipts cannot be imported; a note in the UI informs riders that receipts must be attached individually via the expense history edit flow. + +**Why this priority**: Navigation and access are low-complexity but necessary for the feature to be usable and discoverable in the app. + +**Independent Test**: Can be tested by logging in, navigating to the Expenses page, clicking "Import Expenses," and confirming the import page loads. Also verify unauthenticated access redirects to login. + +**Acceptance Scenarios**: + +1. **Given** a logged-in rider on the Expenses page, **When** they look for import functionality, **Then** a clearly labeled "Import Expenses" link or button is visible near the existing expense entry controls. +2. **Given** the import page is displayed, **When** the rider reads the page instructions, **Then** a note is visible stating that receipts cannot be imported and must be attached individually via the expense history edit page. +3. **Given** an unauthenticated user, **When** they attempt to access the import page directly via URL, **Then** they are redirected to the login screen. + +--- + +## Edge Cases + +- What happens when the CSV file is empty (header row only, no data rows)? → System shows a message: "No expense data found in the uploaded file." +- What happens when the CSV is extremely large (e.g., 10,000+ rows)? → System accepts the file but warns the rider before import that it may take a moment. The maximum accepted file size is 5 MB. +- What happens when the CSV uses different date formats (MM/DD/YYYY vs YYYY-MM-DD vs DD-MMM-YYYY)? → System attempts common date formats and rejects rows with completely unparseable dates, listing them as errors. +- What happens when the rider uploads a non-CSV file (e.g., .xlsx, .pdf)? → System rejects the file with an error: "Please upload a .csv file." +- What happens when a CSV row has extra columns beyond the expected three? → Extra columns are ignored silently. +- What happens when a CSV row has fewer columns than expected? → Note is optional and treated as empty when absent. Missing required columns (Date or Amount) cause that row to be flagged as invalid. +- What happens when a CSV row is fully empty (all columns blank)? → The row is skipped during parsing and excluded from preview/import totals. +- What happens when a CSV Amount value is formatted with currency symbols (e.g., "$25.00" or "£10")? → Currency symbols are stripped during parsing and the numeric value is extracted. If the result is not a valid positive decimal, the row is flagged as invalid. +- What happens when an Amount value has trailing text such as "25.00 USD" or "12.50 GBP"? → The system extracts the leading numeric portion by stripping the trailing currency code. If the result is a valid positive decimal, the row is accepted. Any trailing text that does not match a recognized currency code pattern causes the row to be flagged as invalid. +- What happens when a CSV Amount value is zero or negative? → The row is flagged as invalid with a message: "Amount must be greater than zero." +- What happens when the rider starts a second import while one is already in progress? → Import is synchronous and completes before the user can trigger a second. No concurrent-import guard is required. +- What happens when multiple expenses exist on the same date in the rider's history? → Duplicate detection flags a match only when both date and amount are identical. A rider with two expenses on the same date but different amounts (e.g., $10.00 and $25.00) would not trigger a duplicate for either if the CSV rows have different amounts. If a CSV row matches on date+amount with any existing expense, the preview flags all matching existing expenses alongside the incoming row. +- What happens when a CSV contains two rows with the same date and amount (intra-file)? → Both rows are treated as distinct import candidates. Duplicate detection only compares incoming rows against already-saved history, not against other rows within the same CSV. +- What happens to import job data after the rider leaves the summary page? → The import job record and all associated row records are deleted. Only the actual imported expense records are retained. The rider can review their imported expenses in expense history. + +--- + +## Known Limitations + +**Receipt Attachment**: Receipts cannot be imported via CSV. Riders who have scanned receipt images must attach them manually via the expense history edit page after import. This is a deliberate design decision to avoid file upload complexity during bulk import. + +**Preview Performance**: The preview phase (CSV parsing + duplicate detection) loads all rider expenses into memory and checks each CSV row against the full history. For riders with very large expense histories (1000+ expenses) or large CSV uploads (500+ rows), the preview operation may take a few seconds. **Mitigation**: A preview spinner is displayed during the preview phase. + +**No Progress Updates**: Because expense import has no enrichment (no gas price or weather lookups), the import is expected to complete within seconds for typical data volumes and does not require real-time progress notifications. + +--- + +## Requirements *(mandatory)* + +### Functional Requirements + +- **FR-001**: System MUST provide a dedicated "Import Expenses" page accessible from the Expenses page. +- **FR-002**: System MUST accept CSV file uploads with the following columns: Date, Amount, Note (Note is optional). +- **FR-003**: System MUST match CSV column headers case-insensitively (e.g., "date," "DATE," and "Date" all map to the Date column). +- **FR-004**: System MUST validate that the CSV contains at least the required columns: Date and Amount. +- **FR-005**: System MUST validate each row: parseable date, amount greater than zero, note ≤ 500 characters when provided. +- **FR-006**: System MUST display a preview of parsed CSV data (row count, valid count, invalid count, sample rows, any validation errors, and any detected duplicates) before the rider confirms the import. +- **FR-007**: System MUST create expense records for each valid non-skipped row, associated with the authenticated rider, preserving all provided fields (date, amount, note). +- **FR-008**: System MUST NOT import receipts. A note in the UI must inform the rider that receipts must be attached individually via the expense history edit page. +- **FR-009**: System MUST detect duplicate expenses by comparing each CSV row's date and amount against the rider's existing expense history. A duplicate is flagged only when both date and amount match. +- **FR-010**: System MUST present duplicate conflicts in the preview response, showing the existing expense and incoming row details (date, amount, note) for each conflict, with options to "Keep Existing" or "Replace with Import." When "Replace with Import" is chosen, the existing expense's date and amount are updated with the CSV row values; the note is updated only when the CSV row provides a non-blank note value — a blank CSV note preserves the existing note unchanged. +- **FR-011**: System MUST provide an "Override All Duplicates" option that bypasses all per-row duplicate checks and imports all valid rows (creating new expense records alongside existing ones). +- **FR-012**: System MUST display a completion summary after import finishes: total rows processed, expenses imported, expenses skipped (duplicates kept), and rows failed (validation errors). +- **FR-013**: System MUST reject non-CSV files with a clear error message. +- **FR-014**: System MUST enforce a maximum file size of 5 MB for uploaded CSV files. +- **FR-015**: System MUST restrict import page access to authenticated riders only. +- **FR-016**: System MUST skip fully empty CSV rows (all mapped fields blank) during parsing, excluding those rows from preview/import totals and validation counts. +- **FR-017**: System MUST normalize Amount values before parsing: strip leading/trailing whitespace, remove leading currency symbols (`$`, `£`, `€`, `¥`), remove trailing ISO currency codes (e.g., `USD`, `GBP`, `EUR`), and remove thousands-separator commas. If the resulting string does not parse to a valid positive decimal, the row is flagged as invalid. +- **FR-018**: System MUST handle Amount values with commas as thousands separators (e.g., "1,250.00" → 1250.00). +- **FR-019**: System MUST delete the import job record and all associated import row records when the rider navigates away from the import summary page. Import job data is session-scoped and is not retained beyond the current import session. + +### Key Entities + +- **Expense Import Job**: Represents a single CSV expense import operation. Attributes: rider identity, upload timestamp, file name, total row count, valid row count, invalid row count, imported row count, skipped row count, status (previewing, awaiting-confirmation, processing, completed, failed). +- **Expense Import Row**: A single parsed row from the CSV. Attributes: row number, date, amount, note, validation status (valid/invalid with error details), duplicate status (none/duplicate), resolution (import/skip/override). +- **Expense Import Summary**: The outcome of a completed import. Attributes: total rows, expenses imported count, expenses skipped count, rows failed count. +- **Duplicate Conflict**: Represents a date+amount conflict between an incoming CSV row and an existing expense. Attributes: conflicting date, conflicting amount, existing expense details (date, amount, note), incoming row details. + +## Assumptions + +- The rider's CSV decoding is BOM-aware. UTF-8 and UTF-16 BOM-encoded files are supported; files without BOM default to UTF-8 decoding. +- Date formats attempted during parsing include: YYYY-MM-DD, MM/DD/YYYY, M/D/YYYY, DD-MMM-YYYY, and MMM DD YYYY. Dates that don't match any recognized format are flagged as invalid. +- The Note column may contain free-text of up to 500 characters. +- Amount values may include a leading currency symbol (`$`, `£`, `€`, `¥`) and thousands separators (commas). These are stripped before numeric parsing. +- A 5 MB file size limit accommodates roughly 50,000+ expense rows, which far exceeds expected usage for a personal commute tracker. +- The import is synchronous (completes in a single request/response cycle for preview, then a second for confirm+execute). No background job or SignalR is required. +- The "Override All Duplicates" option creates new expense records alongside existing ones (does not replace or merge). +- Imported expenses have no receipt attached. Receipts must be added individually via the expense history edit page. diff --git a/specs/016-csv-expense-import/tasks.md b/specs/016-csv-expense-import/tasks.md new file mode 100644 index 0000000..1a12089 --- /dev/null +++ b/specs/016-csv-expense-import/tasks.md @@ -0,0 +1,150 @@ +# Tasks: CSV Expense Import + +**Input**: Design documents from `/specs/016-csv-expense-import/` +**Prerequisites**: `plan.md` (required), `spec.md` (required for user stories), `research.md`, `data-model.md`, `contracts/api-contracts.md`, `quickstart.md` +**Dependency**: Spec 015 (Bike Expense Tracking) must be fully implemented — `RecordExpenseService`, `EditExpenseService`, `ExpenseEntity`, and `Expenses` table are required. + +**Tests**: Tests are required for this feature. Strict TDD gate applies: write failing tests before each implementation step. + +**Organization**: Tasks are grouped by user story so each story can be implemented and tested independently. + +--- + +## Phase 1: Setup (Shared Infrastructure) + +**Purpose**: Prepare base folders, service registration scaffolding, and frontend route placeholders. + +- [X] T001 Create `src/BikeTracking.Api/Application/ExpenseImports/` folder +- [X] T002 Create `src/BikeTracking.Frontend/src/pages/expenses/` import page placeholder (`ExpenseImportPage.tsx`, `ExpenseImportPage.css`) +- [ ] T003 Create `src/BikeTracking.Frontend/src/components/expense-import/` folder +- [X] T004 Create `src/BikeTracking.Frontend/src/services/expense-import-api.ts` placeholder + +--- + +## Phase 2: Foundational (Blocking Prerequisites) + +**Purpose**: Define contracts, persistence model, and endpoint wiring that all stories depend on. + +**CRITICAL**: No user story work can begin until this phase is complete. + +- [X] T005 Create import DTO contracts in `src/BikeTracking.Api/Contracts/ExpenseImportContracts.cs` +- [X] T006 [P] Create `ExpenseImportJobEntity` in `src/BikeTracking.Api/Infrastructure/Persistence/Entities/ExpenseImportJobEntity.cs` +- [X] T007 [P] Create `ExpenseImportRowEntity` in `src/BikeTracking.Api/Infrastructure/Persistence/Entities/ExpenseImportRowEntity.cs` +- [X] T008 Configure import entity mappings (DbSet, indexes, cascade delete) in `src/BikeTracking.Api/Infrastructure/Persistence/BikeTrackingDbContext.cs` +- [X] T009 Create endpoint shell in `src/BikeTracking.Api/Endpoints/ExpenseImportEndpoints.cs` +- [X] T010 Register import services and endpoint mapping in `src/BikeTracking.Api/Program.cs` +- [X] T011 Create EF migration `AddExpenseImportTables` in `src/BikeTracking.Api/Infrastructure/Persistence/Migrations/` +- [X] T012 Add `AddExpenseImportTables` migration entry to `src/BikeTracking.Api.Tests/Infrastructure/MigrationTestCoveragePolicyTests.cs` + +**Checkpoint**: Foundation ready. User story implementation can begin. + +--- + +## Phase 3: User Story 1 — Upload and Import CSV File (Priority: P1) + +**Goal**: Allow rider to upload a CSV with Date, Amount, Note; validate rows; preview results; confirm to import valid rows. Receipts excluded. + +**Independent Test**: Upload a CSV with valid and invalid rows; confirm preview shows errors and only valid non-skipped rows are imported after confirmation. + +### Tests for User Story 1 + +- [X] T013 [P] [US1] Add parser header/required-column tests in `src/BikeTracking.Api.Tests/Application/ExpenseImports/CsvExpenseParserTests.cs` +- [X] T014 [P] [US1] Add row validation tests (date, amount, note rules; full amount normalization pipeline including trailing ISO currency codes) in `src/BikeTracking.Api.Tests/Application/ExpenseImports/CsvExpenseParserTests.cs` +- [X] T015 [P] [US1] Add preview endpoint contract tests in `src/BikeTracking.Api.Tests/Endpoints/ExpenseImportEndpointsTests.cs` +- [X] T016 [P] [US1] Add frontend preview rendering unit tests in `src/BikeTracking.Frontend/src/pages/expenses/ExpenseImportPage.test.tsx` +- [ ] T017 [US1] Add E2E happy-path upload-preview-confirm-import test in `src/BikeTracking.Frontend/tests/e2e/expense-import.spec.ts` +- [ ] T018 [US1] Add non-CSV upload rejection integration test in `src/BikeTracking.Api.Tests/Endpoints/ExpenseImportEndpointsTests.cs` +- [ ] T019 [US1] Add frontend non-CSV error rendering unit test in `src/BikeTracking.Frontend/src/pages/expenses/ExpenseImportPage.test.tsx` +- [ ] T020 [US1] Add oversized upload (> 5 MB) endpoint integration test in `src/BikeTracking.Api.Tests/Endpoints/ExpenseImportEndpointsTests.cs` +- [ ] T021 [US1] Add oversized-file frontend error unit test in `src/BikeTracking.Frontend/src/pages/expenses/ExpenseImportPage.test.tsx` +- [X] T022 [US1] Add receipts-excluded notice unit test (notice present in all page states) in `src/BikeTracking.Frontend/src/pages/expenses/ExpenseImportPage.test.tsx` + +### Implementation for User Story 1 + +- [X] T023 [US1] Implement CSV parsing, header normalization, and `NormalizeAmount` pipeline (leading symbol strip → comma removal → trailing ISO code strip → decimal parse) in `src/BikeTracking.Api/Application/ExpenseImports/CsvExpenseParser.cs` +- [X] T024 [US1] Implement row validation rules (date parsing, amount > 0, currency stripping, note length) in `src/BikeTracking.Api/Application/ExpenseImports/CsvExpenseParser.cs` +- [X] T025 [US1] Implement preview orchestration (parse → validate → persist job + rows) in `src/BikeTracking.Api/Application/ExpenseImports/CsvExpenseImportService.cs` +- [X] T026 [US1] Implement confirm + execute orchestration (load rows → apply resolutions → create expenses → update job) in `src/BikeTracking.Api/Application/ExpenseImports/CsvExpenseImportService.cs` +- [X] T027 [US1] Implement preview endpoint (`POST /api/expense-imports/preview`) in `src/BikeTracking.Api/Endpoints/ExpenseImportEndpoints.cs` +- [X] T028 [US1] Implement confirm endpoint (`POST /api/expense-imports/{jobId}/confirm`) in `src/BikeTracking.Api/Endpoints/ExpenseImportEndpoints.cs` +- [X] T029 [US1] Implement status endpoint (`GET /api/expense-imports/{jobId}/status`) in `src/BikeTracking.Api/Endpoints/ExpenseImportEndpoints.cs` +- [X] T030 [US1] Implement frontend import API service functions in `src/BikeTracking.Frontend/src/services/expense-import-api.ts` +- [X] T031 [US1] Build import page upload and preview UI in `src/BikeTracking.Frontend/src/pages/expenses/ExpenseImportPage.tsx` +- [X] T032 [US1] Build import page completion summary UI in `src/BikeTracking.Frontend/src/pages/expenses/ExpenseImportPage.tsx` +- [X] T033 [US1] Add receipts-excluded notice (persistent across all page states) in `src/BikeTracking.Frontend/src/pages/expenses/ExpenseImportPage.tsx` +- [X] T034 [US1] Add import page styles in `src/BikeTracking.Frontend/src/pages/expenses/ExpenseImportPage.css` +- [X] T035 [US1] Wire `/expenses/import` route in `src/BikeTracking.Frontend/src/App.tsx` +- [X] T036 [US1] Enforce non-CSV file validation (extension + MIME type) in `src/BikeTracking.Api/Endpoints/ExpenseImportEndpoints.cs` +- [X] T037 [US1] Enforce 5 MB upload limit in `src/BikeTracking.Api/Endpoints/ExpenseImportEndpoints.cs` + +**Checkpoint**: User Story 1 is independently functional (upload, validate, preview, confirm, import, summary). + +--- + +## Phase 4: User Story 2 — Duplicate Detection and Resolution (Priority: P1) + +**Goal**: Detect duplicates by date+amount; allow keep existing, replace with import, or override-all behavior. + +**Independent Test**: Import a CSV containing at least one date+amount duplicate; verify duplicate conflict is shown in preview; verify resolution choice controls final import outcome. + +### Tests for User Story 2 + +- [ ] T038 [P] [US2] Add duplicate key and match tests (including: intra-file rows never compared against each other; deleted expenses not matched) in `src/BikeTracking.Api.Tests/Application/ExpenseImports/ExpenseDuplicateDetectorTests.cs` +- [ ] T039 [P] [US2] Add duplicate resolution orchestration tests (including: `replace-with-import` + blank CSV note → existing note preserved; `replace-with-import` + non-blank note → note updated) in `src/BikeTracking.Api.Tests/Application/ExpenseImports/CsvExpenseImportServiceTests.cs` +- [X] T040 [P] [US2] Add confirm endpoint duplicate resolution tests in `src/BikeTracking.Api.Tests/Endpoints/ExpenseImportEndpointsTests.cs` +- [ ] T041 [P] [US2] Add duplicate resolution panel unit tests in `src/BikeTracking.Frontend/src/components/expense-import/ExpenseDuplicateResolutionPanel.test.tsx` +- [ ] T042 [US2] Add E2E duplicate keep-existing test in `src/BikeTracking.Frontend/tests/e2e/expense-import.spec.ts` +- [ ] T043 [US2] Add E2E duplicate replace-with-import test in `src/BikeTracking.Frontend/tests/e2e/expense-import.spec.ts` +- [ ] T044 [US2] Add E2E override-all-duplicates test in `src/BikeTracking.Frontend/tests/e2e/expense-import.spec.ts` + +### Implementation for User Story 2 + +- [X] T045 [US2] Implement duplicate detection by date+amount in `src/BikeTracking.Api/Application/ExpenseImports/ExpenseDuplicateDetector.cs` +- [X] T046 [US2] Integrate duplicate detection into preview orchestration in `src/BikeTracking.Api/Application/ExpenseImports/CsvExpenseImportService.cs` +- [X] T047 [US2] Implement `keep-existing` resolution (skip row, increment `SkippedRows`) in `src/BikeTracking.Api/Application/ExpenseImports/CsvExpenseImportService.cs` +- [X] T048 [US2] Implement `replace-with-import` resolution with **partial-note semantics** via `EditExpenseService`: update date and amount always; update note only when CSV note is non-blank; preserve existing note when CSV note is blank in `src/BikeTracking.Api/Application/ExpenseImports/CsvExpenseImportService.cs` +- [X] T049 [US2] Implement `override-all-duplicates` bypass (import all valid rows including duplicates) in `src/BikeTracking.Api/Application/ExpenseImports/CsvExpenseImportService.cs` +- [X] T050 [US2] Return duplicate conflict details in preview endpoint response in `src/BikeTracking.Api/Endpoints/ExpenseImportEndpoints.cs` +- [ ] T051 [US2] Build `ExpenseDuplicateResolutionPanel` component with per-row and override-all controls in `src/BikeTracking.Frontend/src/components/expense-import/ExpenseDuplicateResolutionPanel.tsx` +- [ ] T052 [US2] Integrate `ExpenseDuplicateResolutionPanel` into `ExpenseImportPage` preview state in `src/BikeTracking.Frontend/src/pages/expenses/ExpenseImportPage.tsx` + +**Checkpoint**: User Story 2 is independently functional (duplicate detection, resolution, override-all). + +--- + +## Phase 5: User Story 3 — Navigation and Access from Expenses Page (Priority: P2) + +**Goal**: "Import Expenses" is discoverable from the Expenses history page; import page is auth-guarded; receipts-excluded notice is visible. + +**Independent Test**: Log in, navigate to Expenses history page, click "Import Expenses," confirm import page loads; attempt unauthenticated access, confirm redirect to login. + +### Tests for User Story 3 + +- [X] T053 [P] [US3] Add unit test confirming "Import Expenses" button/link renders in `ExpenseHistoryPage` in `src/BikeTracking.Frontend/src/pages/expenses/ExpenseHistoryPage.test.tsx` (or existing test file) +- [X] T054 [P] [US3] Add DELETE endpoint integration tests in `src/BikeTracking.Api.Tests/Endpoints/ExpenseImportEndpointsTests.cs` — valid job → 204 and job+rows deleted; wrong rider → 403; job not found → 404 (idempotent) +- [ ] T055 [US3] Add E2E test: navigation from Expenses page to import page in `src/BikeTracking.Frontend/tests/e2e/expense-import.spec.ts` +- [ ] T056 [US3] Add E2E test: unauthenticated access to `/expenses/import` redirects to login in `src/BikeTracking.Frontend/tests/e2e/expense-import.spec.ts` + +### Implementation for User Story 3 + +- [X] T057 [US3] Implement `DELETE /api/expense-imports/{jobId}` endpoint (ownership check, cascade delete of job + rows, 204 response) in `src/BikeTracking.Api/Endpoints/ExpenseImportEndpoints.cs` +- [X] T058 [US3] Add `deleteExpenseImport(jobId: number): Promise` function to `src/BikeTracking.Frontend/src/services/expense-import-api.ts` +- [X] T059 [US3] Wire `useEffect` cleanup and `beforeunload` listener on summary state to call `deleteExpenseImport(jobId)` in `src/BikeTracking.Frontend/src/pages/expenses/ExpenseImportPage.tsx` +- [X] T060 [US3] Add "Import Expenses" button/link to `ExpenseHistoryPage` header area in `src/BikeTracking.Frontend/src/pages/expenses/ExpenseHistoryPage.tsx` +- [X] T061 [US3] Ensure `/expenses/import` route is covered by the existing auth guard in `src/BikeTracking.Frontend/src/App.tsx` + +**Checkpoint**: User Story 3 is independently functional (navigation link, auth guard, receipts notice, session cleanup on summary dismiss). + +--- + +## Phase 6: Completion Gate + +**Purpose**: Verify all quality gates before the feature branch is considered complete. + +- [ ] T062 Run `dotnet test BikeTracking.slnx` and confirm all backend tests pass +- [ ] T063 Run EF migration (`dotnet ef database update`) and confirm schema applies cleanly +- [ ] T064 Run `npm run lint` from `src/BikeTracking.Frontend` and confirm no lint errors +- [ ] T065 Run `npm run build` from `src/BikeTracking.Frontend` and confirm clean build +- [ ] T066 Run `npm run test:unit` from `src/BikeTracking.Frontend` and confirm all unit tests pass +- [ ] T067 Run `npm run test:e2e` from `src/BikeTracking.Frontend` against live Aspire stack and confirm all E2E tests pass +- [ ] T068 Run `csharpier format .` from repo root and confirm no formatting changes required diff --git a/specs/017-create-feature-branch/plan.md b/specs/017-create-feature-branch/plan.md new file mode 100644 index 0000000..5a2fafe --- /dev/null +++ b/specs/017-create-feature-branch/plan.md @@ -0,0 +1,104 @@ +# Implementation Plan: [FEATURE] + +**Branch**: `[###-feature-name]` | **Date**: [DATE] | **Spec**: [link] +**Input**: Feature specification from `/specs/[###-feature-name]/spec.md` + +**Note**: This template is filled in by the `/speckit.plan` command. See `.specify/templates/plan-template.md` for the execution workflow. + +## Summary + +[Extract from feature spec: primary requirement + technical approach from research] + +## Technical Context + + + +**Language/Version**: [e.g., Python 3.11, Swift 5.9, Rust 1.75 or NEEDS CLARIFICATION] +**Primary Dependencies**: [e.g., FastAPI, UIKit, LLVM or NEEDS CLARIFICATION] +**Storage**: [if applicable, e.g., PostgreSQL, CoreData, files or N/A] +**Testing**: [e.g., pytest, XCTest, cargo test or NEEDS CLARIFICATION] +**Target Platform**: [e.g., Linux server, iOS 15+, WASM or NEEDS CLARIFICATION] +**Project Type**: [e.g., library/cli/web-service/mobile-app/compiler/desktop-app or NEEDS CLARIFICATION] +**Performance Goals**: [domain-specific, e.g., 1000 req/s, 10k lines/sec, 60 fps or NEEDS CLARIFICATION] +**Constraints**: [domain-specific, e.g., <200ms p95, <100MB memory, offline-capable or NEEDS CLARIFICATION] +**Scale/Scope**: [domain-specific, e.g., 10k users, 1M LOC, 50 screens or NEEDS CLARIFICATION] + +## Constitution Check + +*GATE: Must pass before Phase 0 research. Re-check after Phase 1 design.* + +[Gates determined based on constitution file] + +## Project Structure + +### Documentation (this feature) + +```text +specs/[###-feature]/ +├── plan.md # This file (/speckit.plan command output) +├── research.md # Phase 0 output (/speckit.plan command) +├── data-model.md # Phase 1 output (/speckit.plan command) +├── quickstart.md # Phase 1 output (/speckit.plan command) +├── contracts/ # Phase 1 output (/speckit.plan command) +└── tasks.md # Phase 2 output (/speckit.tasks command - NOT created by /speckit.plan) +``` + +### Source Code (repository root) + + +```text +# [REMOVE IF UNUSED] Option 1: Single project (DEFAULT) +src/ +├── models/ +├── services/ +├── cli/ +└── lib/ + +tests/ +├── contract/ +├── integration/ +└── unit/ + +# [REMOVE IF UNUSED] Option 2: Web application (when "frontend" + "backend" detected) +backend/ +├── src/ +│ ├── models/ +│ ├── services/ +│ └── api/ +└── tests/ + +frontend/ +├── src/ +│ ├── components/ +│ ├── pages/ +│ └── services/ +└── tests/ + +# [REMOVE IF UNUSED] Option 3: Mobile + API (when "iOS/Android" detected) +api/ +└── [same as backend above] + +ios/ or android/ +└── [platform-specific structure: feature modules, UI flows, platform tests] +``` + +**Structure Decision**: [Document the selected structure and reference the real +directories captured above] + +## Complexity Tracking + +> **Fill ONLY if Constitution Check has violations that must be justified** + +| Violation | Why Needed | Simpler Alternative Rejected Because | +|-----------|------------|-------------------------------------| +| [e.g., 4th project] | [current need] | [why 3 projects insufficient] | +| [e.g., Repository pattern] | [specific problem] | [why direct DB access insufficient] | diff --git a/src/BikeTracking.Api.Tests/Application/ExpenseImports/CsvExpenseParserTests.cs b/src/BikeTracking.Api.Tests/Application/ExpenseImports/CsvExpenseParserTests.cs new file mode 100644 index 0000000..add6008 --- /dev/null +++ b/src/BikeTracking.Api.Tests/Application/ExpenseImports/CsvExpenseParserTests.cs @@ -0,0 +1,54 @@ +using BikeTracking.Api.Application.ExpenseImports; + +namespace BikeTracking.Api.Tests.Application.ExpenseImports; + +public sealed class CsvExpenseParserTests +{ + private readonly CsvExpenseParser parser = new(); + + [Fact] + public void Parse_WithUtf8BomAndCaseInsensitiveHeaders_ParsesRows() + { + var csv = "\uFEFFDATE,amount,nOtE\r\n2026-04-01,$25.00,Chain lube\r\n"; + + var result = parser.Parse(csv); + + var row = Assert.Single(result.Rows); + Assert.Equal(1, row.RowNumber); + Assert.Equal("2026-04-01", row.Date); + Assert.Equal("$25.00", row.Amount); + Assert.Equal("Chain lube", row.Note); + } + + [Fact] + public void Parse_WithMissingAmountHeader_Throws() + { + var csv = "Date,Note\n2026-04-01,Missing amount"; + + var exception = Assert.Throws(() => parser.Parse(csv)); + + Assert.Contains("Amount", exception.Message); + } + + [Theory] + [InlineData("$1,250.00 USD", "1250.00")] + [InlineData("£12.50 GBP", "12.50")] + [InlineData(" 25.00 EUR ", "25.00")] + public void NormalizeAmount_StripsCurrencyFormatting(string raw, string expected) + { + var normalized = parser.NormalizeAmount(raw); + + Assert.Equal(expected, normalized); + } + + [Fact] + public void ValidateRow_WithTooLongNote_ReturnsNoteError() + { + var row = new ParsedExpenseCsvRow(3, "2026-04-01", "15.50", new string('n', 501)); + + var errors = parser.ValidateRow(row); + + var error = Assert.Single(errors); + Assert.Equal("Note", error.Field); + } +} diff --git a/src/BikeTracking.Api.Tests/Endpoints/ExpenseImportEndpointsTests.cs b/src/BikeTracking.Api.Tests/Endpoints/ExpenseImportEndpointsTests.cs new file mode 100644 index 0000000..7f45ffe --- /dev/null +++ b/src/BikeTracking.Api.Tests/Endpoints/ExpenseImportEndpointsTests.cs @@ -0,0 +1,307 @@ +using System.Net; +using System.Net.Http.Json; +using BikeTracking.Api.Application.ExpenseImports; +using BikeTracking.Api.Application.Expenses; +using BikeTracking.Api.Contracts; +using BikeTracking.Api.Endpoints; +using BikeTracking.Api.Infrastructure.Persistence; +using BikeTracking.Api.Infrastructure.Persistence.Entities; +using BikeTracking.Api.Infrastructure.Security; +using Microsoft.AspNetCore.TestHost; +using Microsoft.EntityFrameworkCore; + +namespace BikeTracking.Api.Tests.Endpoints; + +public sealed class ExpenseImportEndpointsTests +{ + [Fact] + public async Task PostPreview_WithValidCsv_ReturnsPreviewSummary() + { + await using var host = await ExpenseImportApiHost.StartAsync(); + var userId = await host.SeedUserAsync("expense-import-preview"); + + using var form = BuildCsvForm( + "expenses.csv", + "Date,Amount,Note\n2026-04-01,12.50,Coffee\n2026-04-02,0,Invalid" + ); + + var response = await PostMultipartAsAuthAsync( + host.Client, + "/api/expense-imports/preview", + form, + userId + ); + + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + var payload = await response.Content.ReadFromJsonAsync(); + Assert.NotNull(payload); + Assert.Equal(2, payload.TotalRows); + Assert.Equal(1, payload.ValidRows); + Assert.Equal(1, payload.InvalidRows); + Assert.Single(payload.Errors); + } + + [Fact] + public async Task PostConfirm_WithDuplicateKeepExisting_ReturnsSummaryAndSkipsRow() + { + await using var host = await ExpenseImportApiHost.StartAsync(); + var userId = await host.SeedUserAsync("expense-import-confirm"); + await host.SeedExpenseAsync(userId, new DateTime(2026, 4, 1), 12.50m, "Original note"); + + using var form = BuildCsvForm( + "expenses.csv", + "Date,Amount,Note\n2026-04-01,12.50,Imported note" + ); + var previewResponse = await PostMultipartAsAuthAsync( + host.Client, + "/api/expense-imports/preview", + form, + userId + ); + var previewPayload = + await previewResponse.Content.ReadFromJsonAsync(); + Assert.NotNull(previewPayload); + + var confirmResponse = await PostJsonAsAuthAsync( + host.Client, + $"/api/expense-imports/{previewPayload.JobId}/confirm", + new ConfirmExpenseImportRequest(false, []), + userId + ); + + Assert.Equal(HttpStatusCode.OK, confirmResponse.StatusCode); + var summary = + await confirmResponse.Content.ReadFromJsonAsync(); + Assert.NotNull(summary); + Assert.Equal(0, summary.ImportedRows); + Assert.Equal(1, summary.SkippedRows); + } + + [Fact] + public async Task PostConfirm_WithReplaceWithImportAndBlankNote_PreservesExistingNote() + { + await using var host = await ExpenseImportApiHost.StartAsync(); + var userId = await host.SeedUserAsync("expense-import-replace"); + var expenseId = await host.SeedExpenseAsync( + userId, + new DateTime(2026, 4, 1), + 12.50m, + "Keep me" + ); + + using var form = BuildCsvForm("expenses.csv", "Date,Amount,Note\n2026-04-01,12.50,"); + var previewResponse = await PostMultipartAsAuthAsync( + host.Client, + "/api/expense-imports/preview", + form, + userId + ); + var previewPayload = + await previewResponse.Content.ReadFromJsonAsync(); + Assert.NotNull(previewPayload); + + var confirmResponse = await PostJsonAsAuthAsync( + host.Client, + $"/api/expense-imports/{previewPayload.JobId}/confirm", + new ConfirmExpenseImportRequest( + false, + [new ExpenseDuplicateResolutionChoice(1, "replace-with-import")] + ), + userId + ); + + Assert.Equal(HttpStatusCode.OK, confirmResponse.StatusCode); + + await using var scope = host.App.Services.CreateAsyncScope(); + var dbContext = scope.ServiceProvider.GetRequiredService(); + var expense = await dbContext.Expenses.SingleAsync(current => current.Id == expenseId); + Assert.Equal(12.50m, expense.Amount); + Assert.Equal("Keep me", expense.Notes); + Assert.Equal(2, expense.Version); + } + + [Fact] + public async Task Delete_RemovesJobAndRows() + { + await using var host = await ExpenseImportApiHost.StartAsync(); + var userId = await host.SeedUserAsync("expense-import-delete"); + + using var form = BuildCsvForm("expenses.csv", "Date,Amount,Note\n2026-04-01,12.50,Coffee"); + var previewResponse = await PostMultipartAsAuthAsync( + host.Client, + "/api/expense-imports/preview", + form, + userId + ); + var previewPayload = + await previewResponse.Content.ReadFromJsonAsync(); + Assert.NotNull(previewPayload); + + var deleteResponse = await DeleteAsAuthAsync( + host.Client, + $"/api/expense-imports/{previewPayload.JobId}", + userId + ); + Assert.Equal(HttpStatusCode.NoContent, deleteResponse.StatusCode); + + await using var scope = host.App.Services.CreateAsyncScope(); + var dbContext = scope.ServiceProvider.GetRequiredService(); + Assert.False(await dbContext.ExpenseImportJobs.AnyAsync()); + Assert.False(await dbContext.ExpenseImportRows.AnyAsync()); + } + + private static MultipartFormDataContent BuildCsvForm(string fileName, string csvContent) + { + var form = new MultipartFormDataContent(); + var fileContent = new ByteArrayContent(System.Text.Encoding.UTF8.GetBytes(csvContent)); + form.Add(fileContent, "file", fileName); + return form; + } + + private static async Task PostMultipartAsAuthAsync( + HttpClient client, + string uri, + MultipartFormDataContent form, + long userId + ) + { + using var request = new HttpRequestMessage(HttpMethod.Post, uri) { Content = form }; + request.Headers.Add("X-User-Id", userId.ToString()); + return await client.SendAsync(request); + } + + private static async Task PostJsonAsAuthAsync( + HttpClient client, + string uri, + T payload, + long userId + ) + { + using var request = new HttpRequestMessage(HttpMethod.Post, uri) + { + Content = JsonContent.Create(payload), + }; + request.Headers.Add("X-User-Id", userId.ToString()); + return await client.SendAsync(request); + } + + private static async Task DeleteAsAuthAsync( + HttpClient client, + string uri, + long userId + ) + { + using var request = new HttpRequestMessage(HttpMethod.Delete, uri); + request.Headers.Add("X-User-Id", userId.ToString()); + return await client.SendAsync(request); + } + + private sealed class ExpenseImportApiHost(WebApplication app) : IAsyncDisposable + { + public WebApplication App { get; } = app; + + public HttpClient Client { get; } = app.GetTestClient(); + + public static async Task StartAsync() + { + var builder = WebApplication.CreateBuilder(); + builder.WebHost.UseTestServer(); + var dbName = Guid.NewGuid().ToString(); + + builder + .Services.AddAuthentication(UserIdHeaderAuthenticationHandler.SchemeName) + .AddScheme< + UserIdHeaderAuthenticationSchemeOptions, + UserIdHeaderAuthenticationHandler + >(UserIdHeaderAuthenticationHandler.SchemeName, _ => { }); + builder.Services.AddAuthorization(); + builder.Services.AddDbContext(options => + options.UseInMemoryDatabase(dbName) + ); + builder.Services.AddScoped(); + builder.Services.AddScoped(); + builder.Services.AddScoped(); + builder.Services.AddScoped(); + builder.Services.AddScoped(); + builder.Services.AddScoped(); + + var app = builder.Build(); + app.UseAuthentication(); + app.UseAuthorization(); + app.MapExpenseImportEndpoints(); + await app.StartAsync(); + + return new ExpenseImportApiHost(app); + } + + public async Task SeedUserAsync(string displayName) + { + using var scope = App.Services.CreateScope(); + var dbContext = scope.ServiceProvider.GetRequiredService(); + + var user = new UserEntity + { + DisplayName = displayName, + NormalizedName = displayName.ToLowerInvariant(), + CreatedAtUtc = DateTime.UtcNow, + IsActive = true, + }; + + dbContext.Users.Add(user); + await dbContext.SaveChangesAsync(); + return user.UserId; + } + + public async Task SeedExpenseAsync( + long riderId, + DateTime expenseDate, + decimal amount, + string? notes + ) + { + using var scope = App.Services.CreateScope(); + var dbContext = scope.ServiceProvider.GetRequiredService(); + + var expense = new ExpenseEntity + { + RiderId = riderId, + ExpenseDate = expenseDate, + Amount = amount, + Notes = notes, + IsDeleted = false, + Version = 1, + CreatedAtUtc = DateTime.UtcNow, + UpdatedAtUtc = DateTime.UtcNow, + }; + + dbContext.Expenses.Add(expense); + await dbContext.SaveChangesAsync(); + return expense.Id; + } + + public async ValueTask DisposeAsync() + { + Client.Dispose(); + await App.StopAsync(); + await App.DisposeAsync(); + } + } +} + +internal sealed class ExpenseImportStubReceiptStorage : IReceiptStorage +{ + public Task SaveAsync(long riderId, long expenseId, string filename, Stream stream) + { + return Task.FromResult($"{riderId}/{expenseId}/stub.bin"); + } + + public Task DeleteAsync(string relativePath) + { + return Task.CompletedTask; + } + + public Task GetAsync(string relativePath) + { + return Task.FromResult(new MemoryStream()); + } +} diff --git a/src/BikeTracking.Api.Tests/Infrastructure/MigrationTestCoveragePolicyTests.cs b/src/BikeTracking.Api.Tests/Infrastructure/MigrationTestCoveragePolicyTests.cs index f1365e5..1887deb 100644 --- a/src/BikeTracking.Api.Tests/Infrastructure/MigrationTestCoveragePolicyTests.cs +++ b/src/BikeTracking.Api.Tests/Infrastructure/MigrationTestCoveragePolicyTests.cs @@ -38,6 +38,8 @@ public sealed class MigrationTestCoveragePolicyTests "Added test: rides service and history projection coverage validates note persistence and retrieval after schema migration.", ["20260417194545_AddExpensesTable"] = "Added test: expense endpoint integration tests validate expense creation, editing, deletion, and receipt handling after schema migration.", + ["20260420155250_AddExpenseImportTables"] = + "Added test: expense import endpoint integration tests validate ExpenseImportJobs and ExpenseImportRows schema behavior after migration.", }; [Fact] diff --git a/src/BikeTracking.Api/Application/ExpenseImports/CsvExpenseImportService.cs b/src/BikeTracking.Api/Application/ExpenseImports/CsvExpenseImportService.cs new file mode 100644 index 0000000..03624f9 --- /dev/null +++ b/src/BikeTracking.Api/Application/ExpenseImports/CsvExpenseImportService.cs @@ -0,0 +1,423 @@ +using BikeTracking.Api.Application.Expenses; +using BikeTracking.Api.Contracts; +using BikeTracking.Api.Infrastructure.Persistence; +using BikeTracking.Api.Infrastructure.Persistence.Entities; +using Microsoft.EntityFrameworkCore; + +namespace BikeTracking.Api.Application.ExpenseImports; + +public sealed class CsvExpenseImportService( + BikeTrackingDbContext dbContext, + CsvExpenseParser parser, + ExpenseDuplicateDetector duplicateDetector, + RecordExpenseService recordExpenseService, + EditExpenseService editExpenseService +) +{ + public sealed record OperationError(string Code, string Message, int StatusCode); + + public sealed record OperationResult(T? Value, OperationError? Error) + { + public bool IsSuccess => Error is null; + + public static OperationResult Success(T value) => new(value, null); + + public static OperationResult Failure(string code, string message, int statusCode) => + new(default, new OperationError(code, message, statusCode)); + } + + public async Task PreviewAsync( + long riderId, + string fileName, + string csvText, + CancellationToken cancellationToken = default + ) + { + var parsedDocument = parser.Parse(csvText); + var persistedRows = new List(); + var errors = new List(); + var candidates = new List(); + + foreach (var row in parsedDocument.Rows) + { + var validationErrors = parser.ValidateRow(row); + var isValid = validationErrors.Count == 0; + DateOnly? parsedDate = null; + decimal? parsedAmount = null; + + if (parser.TryParseDate(row.Date, out var dateValue)) + { + parsedDate = dateValue; + } + + if (parser.TryParseAmount(row.Amount, out var amountValue)) + { + parsedAmount = decimal.Round(amountValue, 2, MidpointRounding.AwayFromZero); + } + + if (!isValid) + { + errors.AddRange( + validationErrors.Select(error => new ExpenseImportRowErrorView( + row.RowNumber, + error.Field, + error.Message + )) + ); + } + + if (isValid && parsedDate.HasValue && parsedAmount.HasValue) + { + candidates.Add( + new ExpenseImportCandidate(row.RowNumber, parsedDate.Value, parsedAmount.Value) + ); + } + + persistedRows.Add( + new ExpenseImportRowEntity + { + RowNumber = row.RowNumber, + ExpenseDateLocal = parsedDate, + Amount = parsedAmount, + Notes = string.IsNullOrWhiteSpace(row.Note) ? null : row.Note, + ValidationStatus = isValid ? "valid" : "invalid", + ValidationErrorsJson = + validationErrors.Count == 0 + ? null + : System.Text.Json.JsonSerializer.Serialize(validationErrors), + DuplicateStatus = "none", + DuplicateResolution = null, + ProcessingStatus = isValid ? "pending" : "failed", + ExistingExpenseIdsJson = null, + CreatedExpenseId = null, + } + ); + } + + var duplicateLookup = await duplicateDetector.GetDuplicateMatchesAsync( + riderId, + candidates, + cancellationToken + ); + var duplicateViews = new List(); + foreach (var row in persistedRows.Where(static row => row.ValidationStatus == "valid")) + { + if (!duplicateLookup.TryGetValue(row.RowNumber, out var matches) || matches.Count == 0) + { + continue; + } + + row.DuplicateStatus = "duplicate"; + row.ExistingExpenseIdsJson = ExpenseDuplicateDetector.SerializeExistingExpenseIds( + matches + ); + duplicateViews.Add( + new ExpenseImportDuplicateView( + row.RowNumber, + row.ExpenseDateLocal!.Value, + row.Amount!.Value, + row.Notes, + matches + .Select(match => new ExistingExpenseMatchView( + match.Id, + DateOnly.FromDateTime(match.ExpenseDate), + match.Amount, + match.Notes + )) + .ToArray() + ) + ); + } + + var job = new ExpenseImportJobEntity + { + RiderId = riderId, + FileName = fileName, + TotalRows = persistedRows.Count, + ValidRows = persistedRows.Count(static row => row.ValidationStatus == "valid"), + InvalidRows = persistedRows.Count(static row => row.ValidationStatus == "invalid"), + ImportedRows = 0, + SkippedRows = 0, + OverrideAllDuplicates = false, + Status = "awaiting-confirmation", + LastError = null, + CreatedAtUtc = DateTime.UtcNow, + CompletedAtUtc = null, + Rows = persistedRows, + }; + + dbContext.ExpenseImportJobs.Add(job); + await dbContext.SaveChangesAsync(cancellationToken); + + return new ExpenseImportPreviewResponse( + job.Id, + fileName, + job.TotalRows, + job.ValidRows, + job.InvalidRows, + duplicateViews.Count, + errors, + duplicateViews, + job.ValidRows > 0 + ); + } + + public async Task> ConfirmAsync( + long riderId, + long jobId, + ConfirmExpenseImportRequest request, + CancellationToken cancellationToken = default + ) + { + var job = await dbContext + .ExpenseImportJobs.Include(static current => current.Rows) + .SingleOrDefaultAsync(current => current.Id == jobId, cancellationToken); + + if (job is null) + { + return OperationResult.Failure( + "NOT_FOUND", + "Import job was not found.", + StatusCodes.Status404NotFound + ); + } + + if (job.RiderId != riderId) + { + return OperationResult.Failure( + "FORBIDDEN", + "Import job belongs to a different rider.", + StatusCodes.Status403Forbidden + ); + } + + if (job.Status == "completed" || job.Status == "processing") + { + return OperationResult.Failure( + "CONFLICT", + "Import job has already been confirmed.", + StatusCodes.Status409Conflict + ); + } + + if (job.Status != "awaiting-confirmation") + { + return OperationResult.Failure( + "VALIDATION_FAILED", + "Import job is not awaiting confirmation.", + StatusCodes.Status400BadRequest + ); + } + + job.Status = "processing"; + job.OverrideAllDuplicates = request.OverrideAllDuplicates; + + var choiceLookup = request.DuplicateChoices.ToDictionary( + static choice => choice.RowNumber, + static choice => choice.Resolution, + EqualityComparer.Default + ); + + var processingFailures = 0; + foreach (var row in job.Rows.OrderBy(static current => current.RowNumber)) + { + if (row.ValidationStatus != "valid") + { + continue; + } + + var isDuplicate = row.DuplicateStatus == "duplicate"; + var resolution = request.OverrideAllDuplicates + ? "override-all" + : choiceLookup.GetValueOrDefault(row.RowNumber, "keep-existing"); + + try + { + if (isDuplicate && !request.OverrideAllDuplicates && resolution == "keep-existing") + { + row.DuplicateResolution = "keep-existing"; + row.ProcessingStatus = "skipped"; + job.SkippedRows += 1; + continue; + } + + if ( + isDuplicate + && !request.OverrideAllDuplicates + && resolution == "replace-with-import" + ) + { + var existingExpenseIds = ExpenseDuplicateDetector.DeserializeExistingExpenseIds( + row.ExistingExpenseIdsJson + ); + var existingExpenseId = existingExpenseIds.FirstOrDefault(); + var existingExpense = await dbContext.Expenses.SingleOrDefaultAsync( + expense => + expense.Id == existingExpenseId + && expense.RiderId == riderId + && !expense.IsDeleted, + cancellationToken + ); + + if (existingExpense is null) + { + row.ProcessingStatus = "failed"; + processingFailures += 1; + continue; + } + + var notes = string.IsNullOrWhiteSpace(row.Notes) + ? existingExpense.Notes + : row.Notes; + var editResult = await editExpenseService.ExecuteAsync( + riderId, + existingExpense.Id, + new EditExpenseRequest( + row.ExpenseDateLocal!.Value.ToDateTime(TimeOnly.MinValue), + row.Amount!.Value, + notes, + existingExpense.Version + ), + cancellationToken + ); + + if (!editResult.IsSuccess || editResult.Response is null) + { + row.ProcessingStatus = "failed"; + processingFailures += 1; + continue; + } + + row.DuplicateResolution = "replace-with-import"; + row.ProcessingStatus = "processed"; + row.CreatedExpenseId = existingExpense.Id; + job.ImportedRows += 1; + continue; + } + + var recordResponse = await recordExpenseService.ExecuteAsync( + riderId, + new RecordExpenseRequest( + row.ExpenseDateLocal!.Value.ToDateTime(TimeOnly.MinValue), + row.Amount!.Value, + row.Notes + ), + cancellationToken: cancellationToken + ); + + row.DuplicateResolution = isDuplicate ? "override-all" : null; + row.ProcessingStatus = "processed"; + row.CreatedExpenseId = recordResponse.ExpenseId; + job.ImportedRows += 1; + } + catch + { + row.ProcessingStatus = "failed"; + processingFailures += 1; + } + } + + job.Status = "completed"; + job.CompletedAtUtc = DateTime.UtcNow; + await dbContext.SaveChangesAsync(cancellationToken); + + return OperationResult.Success( + new ExpenseImportSummaryResponse( + job.Id, + job.TotalRows, + job.ImportedRows, + job.SkippedRows, + job.InvalidRows + processingFailures + ) + ); + } + + public async Task> GetStatusAsync( + long riderId, + long jobId, + CancellationToken cancellationToken = default + ) + { + var job = await dbContext + .ExpenseImportJobs.AsNoTracking() + .Include(static current => current.Rows) + .SingleOrDefaultAsync(current => current.Id == jobId, cancellationToken); + + if (job is null) + { + return OperationResult.Failure( + "NOT_FOUND", + "Import job was not found.", + StatusCodes.Status404NotFound + ); + } + + if (job.RiderId != riderId) + { + return OperationResult.Failure( + "FORBIDDEN", + "Import job belongs to a different rider.", + StatusCodes.Status403Forbidden + ); + } + + ExpenseImportSummaryResponse? summary = null; + if (job.Status == "completed") + { + summary = new ExpenseImportSummaryResponse( + job.Id, + job.TotalRows, + job.ImportedRows, + job.SkippedRows, + job.Rows.Count(row => + row.ValidationStatus == "invalid" || row.ProcessingStatus == "failed" + ) + ); + } + + return OperationResult.Success( + new ExpenseImportStatusResponse( + job.Id, + job.Status, + job.TotalRows, + job.ValidRows, + job.InvalidRows, + job.Rows.Count(row => row.DuplicateStatus == "duplicate"), + summary + ) + ); + } + + public async Task> DeleteAsync( + long riderId, + long jobId, + CancellationToken cancellationToken = default + ) + { + var job = await dbContext + .ExpenseImportJobs.Include(static current => current.Rows) + .SingleOrDefaultAsync(current => current.Id == jobId, cancellationToken); + + if (job is null) + { + return OperationResult.Failure( + "NOT_FOUND", + "Import job was not found.", + StatusCodes.Status404NotFound + ); + } + + if (job.RiderId != riderId) + { + return OperationResult.Failure( + "FORBIDDEN", + "Import job belongs to a different rider.", + StatusCodes.Status403Forbidden + ); + } + + dbContext.ExpenseImportJobs.Remove(job); + await dbContext.SaveChangesAsync(cancellationToken); + return OperationResult.Success(true); + } +} diff --git a/src/BikeTracking.Api/Application/ExpenseImports/CsvExpenseParser.cs b/src/BikeTracking.Api/Application/ExpenseImports/CsvExpenseParser.cs new file mode 100644 index 0000000..f2f820c --- /dev/null +++ b/src/BikeTracking.Api/Application/ExpenseImports/CsvExpenseParser.cs @@ -0,0 +1,226 @@ +using System.Globalization; +using System.Text; +using System.Text.RegularExpressions; + +namespace BikeTracking.Api.Application.ExpenseImports; + +public sealed class CsvExpenseParser +{ + private static readonly string[] RequiredColumns = ["DATE", "AMOUNT"]; + + private static readonly string[] SupportedDateFormats = + [ + "yyyy-MM-dd", + "MM/dd/yyyy", + "M/d/yyyy", + "dd-MMM-yyyy", + "d-MMM-yyyy", + "MMM dd yyyy", + ]; + + public ParsedExpenseCsvDocument Parse(string csvText) + { + ArgumentException.ThrowIfNullOrWhiteSpace(csvText); + + var lines = csvText + .Replace("\r\n", "\n", StringComparison.Ordinal) + .Replace('\r', '\n') + .Split('\n', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + + if (lines.Length == 0) + { + return new ParsedExpenseCsvDocument([]); + } + + var headers = SplitCsvLine(lines[0]) + .Select(static value => NormalizeHeader(value)) + .ToArray(); + var missingRequired = RequiredColumns + .Where(required => !headers.Contains(required)) + .ToArray(); + if (missingRequired.Length > 0) + { + var displayNames = missingRequired.Select(static required => + required[..1] + required[1..].ToLowerInvariant() + ); + throw new ArgumentException( + $"Missing required columns: {string.Join(", ", displayNames)}" + ); + } + + var columnIndex = headers + .Select((header, index) => new { header, index }) + .ToDictionary( + static value => value.header, + static value => value.index, + StringComparer.Ordinal + ); + + var rows = new List(); + for (var lineIndex = 1; lineIndex < lines.Length; lineIndex++) + { + var values = SplitCsvLine(lines[lineIndex]); + + string? GetValue(string header) + { + if (!columnIndex.TryGetValue(header, out var index) || index >= values.Count) + { + return null; + } + + var value = values[index].Trim(); + return string.IsNullOrWhiteSpace(value) ? null : value; + } + + var date = GetValue("DATE"); + var amount = GetValue("AMOUNT"); + var note = GetValue("NOTE"); + + if (date is null && amount is null && note is null) + { + continue; + } + + rows.Add(new ParsedExpenseCsvRow(lineIndex, date, amount, note)); + } + + return new ParsedExpenseCsvDocument(rows); + } + + public IReadOnlyList ValidateRow(ParsedExpenseCsvRow row) + { + ArgumentNullException.ThrowIfNull(row); + + var errors = new List(); + if (!TryParseDate(row.Date, out _)) + { + errors.Add( + new ExpenseImportValidationError( + "INVALID_DATE", + "Date", + "Date is required and must be parseable." + ) + ); + } + + if (!TryParseAmount(row.Amount, out var parsedAmount) || parsedAmount <= 0m) + { + errors.Add( + new ExpenseImportValidationError( + "INVALID_AMOUNT", + "Amount", + "Amount must be greater than zero." + ) + ); + } + + if (row.Note is not null && row.Note.Length > 500) + { + errors.Add( + new ExpenseImportValidationError( + "NOTE_TOO_LONG", + "Note", + "Note must be 500 characters or fewer." + ) + ); + } + + return errors; + } + + public string NormalizeAmount(string amount) + { + ArgumentNullException.ThrowIfNull(amount); + + var normalized = amount.Trim(); + normalized = normalized.TrimStart('$', '£', '€', '¥'); + normalized = normalized.Replace(",", string.Empty, StringComparison.Ordinal); + normalized = Regex.Replace( + normalized, + "\\s*[A-Z]{3}$", + string.Empty, + RegexOptions.CultureInvariant + ); + return normalized.Trim(); + } + + public bool TryParseAmount(string? amount, out decimal parsedAmount) + { + if (string.IsNullOrWhiteSpace(amount)) + { + parsedAmount = default; + return false; + } + + return decimal.TryParse( + NormalizeAmount(amount), + NumberStyles.Number, + CultureInfo.InvariantCulture, + out parsedAmount + ); + } + + public bool TryParseDate(string? rawDate, out DateOnly parsedDate) + { + if (string.IsNullOrWhiteSpace(rawDate)) + { + parsedDate = default; + return false; + } + + return DateOnly.TryParseExact( + rawDate.Trim(), + SupportedDateFormats, + CultureInfo.InvariantCulture, + DateTimeStyles.AllowWhiteSpaces, + out parsedDate + ); + } + + private static string NormalizeHeader(string value) + { + return value.Trim().Trim('\uFEFF').ToUpperInvariant(); + } + + private static List SplitCsvLine(string line) + { + var values = new List(); + var current = new StringBuilder(); + var inQuotes = false; + + for (var index = 0; index < line.Length; index++) + { + var character = line[index]; + if (character == '"') + { + if (inQuotes && index + 1 < line.Length && line[index + 1] == '"') + { + current.Append('"'); + index += 1; + continue; + } + + inQuotes = !inQuotes; + continue; + } + + if (character == ',' && !inQuotes) + { + values.Add(current.ToString()); + current.Clear(); + continue; + } + + current.Append(character); + } + + values.Add(current.ToString()); + return values; + } +} + +public sealed record ParsedExpenseCsvDocument(IReadOnlyList Rows); + +public sealed record ParsedExpenseCsvRow(int RowNumber, string? Date, string? Amount, string? Note); + +public sealed record ExpenseImportValidationError(string Code, string Field, string Message); diff --git a/src/BikeTracking.Api/Application/ExpenseImports/ExpenseDuplicateDetector.cs b/src/BikeTracking.Api/Application/ExpenseImports/ExpenseDuplicateDetector.cs new file mode 100644 index 0000000..1bf82be --- /dev/null +++ b/src/BikeTracking.Api/Application/ExpenseImports/ExpenseDuplicateDetector.cs @@ -0,0 +1,78 @@ +using System.Text.Json; +using BikeTracking.Api.Infrastructure.Persistence; +using BikeTracking.Api.Infrastructure.Persistence.Entities; +using Microsoft.EntityFrameworkCore; + +namespace BikeTracking.Api.Application.ExpenseImports; + +public sealed class ExpenseDuplicateDetector(BikeTrackingDbContext dbContext) +{ + public async Task< + IReadOnlyDictionary> + > GetDuplicateMatchesAsync( + long riderId, + IReadOnlyList candidates, + CancellationToken cancellationToken + ) + { + if (candidates.Count == 0) + { + return new Dictionary>(); + } + + var activeExpenses = await dbContext + .Expenses.AsNoTracking() + .Where(expense => expense.RiderId == riderId && !expense.IsDeleted) + .OrderBy(expense => expense.ExpenseDate) + .ThenBy(expense => expense.Id) + .ToListAsync(cancellationToken); + + var lookup = activeExpenses + .GroupBy(static expense => + BuildKey(DateOnly.FromDateTime(expense.ExpenseDate), expense.Amount) + ) + .ToDictionary( + static group => group.Key, + static group => (IReadOnlyList)group.ToList(), + StringComparer.Ordinal + ); + + var results = new Dictionary>(); + foreach (var candidate in candidates) + { + var key = BuildKey(candidate.ExpenseDateLocal, candidate.Amount); + if (lookup.TryGetValue(key, out var matches)) + { + results[candidate.RowNumber] = matches; + } + } + + return results; + } + + public static string SerializeExistingExpenseIds(IReadOnlyList matches) + { + return JsonSerializer.Serialize(matches.Select(static expense => expense.Id).ToArray()); + } + + public static IReadOnlyList DeserializeExistingExpenseIds(string? json) + { + if (string.IsNullOrWhiteSpace(json)) + { + return []; + } + + return JsonSerializer.Deserialize(json) ?? []; + } + + private static string BuildKey(DateOnly expenseDateLocal, decimal amount) + { + return $"{expenseDateLocal:yyyy-MM-dd}|{decimal.Round(amount, 2, MidpointRounding.AwayFromZero):0.00}"; + } +} + +public sealed record ExpenseImportCandidate( + int RowNumber, + DateOnly ExpenseDateLocal, + decimal Amount +); diff --git a/src/BikeTracking.Api/Contracts/ExpenseImportContracts.cs b/src/BikeTracking.Api/Contracts/ExpenseImportContracts.cs new file mode 100644 index 0000000..5652bdd --- /dev/null +++ b/src/BikeTracking.Api/Contracts/ExpenseImportContracts.cs @@ -0,0 +1,62 @@ +using System.ComponentModel.DataAnnotations; + +namespace BikeTracking.Api.Contracts; + +public sealed record ExpenseImportPreviewRequest( + [property: Required] string FileName, + [property: Required] string ContentBase64 +); + +public sealed record ExpenseImportRowErrorView(int RowNumber, string Field, string Message); + +public sealed record ExistingExpenseMatchView( + long ExpenseId, + DateOnly ExpenseDate, + decimal Amount, + string? Note +); + +public sealed record ExpenseImportDuplicateView( + int RowNumber, + DateOnly ExpenseDate, + decimal Amount, + string? Note, + IReadOnlyList ExistingMatches +); + +public sealed record ExpenseImportPreviewResponse( + long JobId, + string FileName, + int TotalRows, + int ValidRows, + int InvalidRows, + int DuplicateCount, + IReadOnlyList Errors, + IReadOnlyList Duplicates, + bool CanConfirmImport +); + +public sealed record ExpenseDuplicateResolutionChoice(int RowNumber, string Resolution); + +public sealed record ConfirmExpenseImportRequest( + bool OverrideAllDuplicates, + IReadOnlyList DuplicateChoices +); + +public sealed record ExpenseImportSummaryResponse( + long JobId, + int TotalRows, + int ImportedRows, + int SkippedRows, + int FailedRows +); + +public sealed record ExpenseImportStatusResponse( + long JobId, + string Status, + int TotalRows, + int ValidRows, + int InvalidRows, + int DuplicateCount, + ExpenseImportSummaryResponse? Summary +); diff --git a/src/BikeTracking.Api/Endpoints/ExpenseImportEndpoints.cs b/src/BikeTracking.Api/Endpoints/ExpenseImportEndpoints.cs new file mode 100644 index 0000000..97fa635 --- /dev/null +++ b/src/BikeTracking.Api/Endpoints/ExpenseImportEndpoints.cs @@ -0,0 +1,232 @@ +using System.Text; +using BikeTracking.Api.Application.ExpenseImports; +using BikeTracking.Api.Contracts; +using Microsoft.AspNetCore.Mvc; + +namespace BikeTracking.Api.Endpoints; + +public static class ExpenseImportEndpoints +{ + private const int MaxUploadBytes = 5 * 1024 * 1024; + + public static IEndpointRouteBuilder MapExpenseImportEndpoints( + this IEndpointRouteBuilder endpoints + ) + { + var group = endpoints.MapGroup("/api/expense-imports"); + + group + .MapPost("/preview", PostPreview) + .Produces(StatusCodes.Status200OK) + .Produces(StatusCodes.Status400BadRequest) + .Produces(StatusCodes.Status401Unauthorized) + .RequireAuthorization(); + + group + .MapPost("/{jobId:long}/confirm", PostConfirm) + .Produces(StatusCodes.Status200OK) + .Produces(StatusCodes.Status400BadRequest) + .Produces(StatusCodes.Status401Unauthorized) + .Produces(StatusCodes.Status403Forbidden) + .Produces(StatusCodes.Status404NotFound) + .Produces(StatusCodes.Status409Conflict) + .RequireAuthorization(); + + group + .MapGet("/{jobId:long}/status", GetStatus) + .Produces(StatusCodes.Status200OK) + .Produces(StatusCodes.Status401Unauthorized) + .Produces(StatusCodes.Status403Forbidden) + .Produces(StatusCodes.Status404NotFound) + .RequireAuthorization(); + + group + .MapDelete("/{jobId:long}", Delete) + .Produces(StatusCodes.Status204NoContent) + .Produces(StatusCodes.Status401Unauthorized) + .Produces(StatusCodes.Status403Forbidden) + .Produces(StatusCodes.Status404NotFound) + .RequireAuthorization(); + + return endpoints; + } + + private static Task PostPreview( + HttpContext context, + CsvExpenseImportService importService, + CancellationToken cancellationToken + ) + { + return PostPreviewCore(context, importService, cancellationToken); + } + + private static Task PostConfirm( + long jobId, + [FromBody] ConfirmExpenseImportRequest request, + HttpContext context, + CsvExpenseImportService importService, + CancellationToken cancellationToken + ) + { + return PostConfirmCore(jobId, request, context, importService, cancellationToken); + } + + private static Task GetStatus( + long jobId, + HttpContext context, + CsvExpenseImportService importService, + CancellationToken cancellationToken + ) + { + return GetStatusCore(jobId, context, importService, cancellationToken); + } + + private static Task Delete( + long jobId, + HttpContext context, + CsvExpenseImportService importService, + CancellationToken cancellationToken + ) + { + return DeleteCore(jobId, context, importService, cancellationToken); + } + + private static async Task PostPreviewCore( + HttpContext context, + CsvExpenseImportService importService, + CancellationToken cancellationToken + ) + { + if (!TryGetRiderId(context, out var riderId)) + { + return Results.Unauthorized(); + } + + var form = await context.Request.ReadFormAsync(cancellationToken); + var file = form.Files.GetFile("file"); + if (file is null || file.Length == 0) + { + return Results.BadRequest( + new ErrorResponse("VALIDATION_FAILED", "A CSV file is required.") + ); + } + + if (!file.FileName.EndsWith(".csv", StringComparison.OrdinalIgnoreCase)) + { + return Results.BadRequest( + new ErrorResponse("VALIDATION_FAILED", "Please upload a .csv file.") + ); + } + + if (file.Length > MaxUploadBytes) + { + return Results.BadRequest( + new ErrorResponse("VALIDATION_FAILED", "CSV file must be 5 MB or smaller.") + ); + } + + using var stream = file.OpenReadStream(); + using var reader = new StreamReader( + stream, + Encoding.UTF8, + detectEncodingFromByteOrderMarks: true + ); + var csvText = await reader.ReadToEndAsync(cancellationToken); + + try + { + var response = await importService.PreviewAsync( + riderId, + file.FileName, + csvText, + cancellationToken + ); + return Results.Ok(response); + } + catch (ArgumentException exception) + { + return Results.BadRequest(new ErrorResponse("VALIDATION_FAILED", exception.Message)); + } + } + + private static async Task PostConfirmCore( + long jobId, + ConfirmExpenseImportRequest request, + HttpContext context, + CsvExpenseImportService importService, + CancellationToken cancellationToken + ) + { + if (!TryGetRiderId(context, out var riderId)) + { + return Results.Unauthorized(); + } + + return ToResult( + await importService.ConfirmAsync(riderId, jobId, request, cancellationToken) + ); + } + + private static async Task GetStatusCore( + long jobId, + HttpContext context, + CsvExpenseImportService importService, + CancellationToken cancellationToken + ) + { + if (!TryGetRiderId(context, out var riderId)) + { + return Results.Unauthorized(); + } + + return ToResult(await importService.GetStatusAsync(riderId, jobId, cancellationToken)); + } + + private static async Task DeleteCore( + long jobId, + HttpContext context, + CsvExpenseImportService importService, + CancellationToken cancellationToken + ) + { + if (!TryGetRiderId(context, out var riderId)) + { + return Results.Unauthorized(); + } + + var result = await importService.DeleteAsync(riderId, jobId, cancellationToken); + return result.IsSuccess ? Results.NoContent() : ErrorToResult(result.Error!); + } + + private static bool TryGetRiderId(HttpContext context, out long riderId) + { + var userIdString = context.User.FindFirst("sub")?.Value; + return long.TryParse(userIdString, out riderId) && riderId > 0; + } + + private static IResult ToResult(CsvExpenseImportService.OperationResult result) + { + return result.IsSuccess ? Results.Ok(result.Value) : ErrorToResult(result.Error!); + } + + private static IResult ErrorToResult(CsvExpenseImportService.OperationError error) + { + return error.StatusCode switch + { + StatusCodes.Status400BadRequest => Results.BadRequest( + new ErrorResponse(error.Code, error.Message) + ), + StatusCodes.Status403Forbidden => Results.Json( + new ErrorResponse(error.Code, error.Message), + statusCode: StatusCodes.Status403Forbidden + ), + StatusCodes.Status404NotFound => Results.NotFound( + new ErrorResponse(error.Code, error.Message) + ), + StatusCodes.Status409Conflict => Results.Conflict( + new ErrorResponse(error.Code, error.Message) + ), + _ => Results.BadRequest(new ErrorResponse(error.Code, error.Message)), + }; + } +} diff --git a/src/BikeTracking.Api/Infrastructure/Persistence/BikeTrackingDbContext.cs b/src/BikeTracking.Api/Infrastructure/Persistence/BikeTrackingDbContext.cs index 65bf1b1..3733436 100644 --- a/src/BikeTracking.Api/Infrastructure/Persistence/BikeTrackingDbContext.cs +++ b/src/BikeTracking.Api/Infrastructure/Persistence/BikeTrackingDbContext.cs @@ -12,6 +12,8 @@ public sealed class BikeTrackingDbContext(DbContextOptions OutboxEvents => Set(); public DbSet Rides => Set(); public DbSet Expenses => Set(); + public DbSet ExpenseImportJobs => Set(); + public DbSet ExpenseImportRows => Set(); public DbSet ImportJobs => Set(); public DbSet ImportRows => Set(); public DbSet GasPriceLookups => Set(); @@ -196,6 +198,61 @@ protected override void OnModelCreating(ModelBuilder modelBuilder) .OnDelete(DeleteBehavior.Cascade); }); + modelBuilder.Entity(static entity => + { + entity.ToTable("ExpenseImportJobs"); + entity.HasKey(static x => x.Id); + entity.Property(static x => x.RiderId).IsRequired(); + entity.Property(static x => x.FileName).IsRequired().HasMaxLength(255); + entity.Property(static x => x.TotalRows).HasDefaultValue(0); + entity.Property(static x => x.ValidRows).HasDefaultValue(0); + entity.Property(static x => x.InvalidRows).HasDefaultValue(0); + entity.Property(static x => x.ImportedRows).HasDefaultValue(0); + entity.Property(static x => x.SkippedRows).HasDefaultValue(0); + entity.Property(static x => x.OverrideAllDuplicates).HasDefaultValue(false); + entity.Property(static x => x.Status).IsRequired().HasMaxLength(50); + entity.Property(static x => x.LastError).HasMaxLength(1000); + entity.Property(static x => x.CreatedAtUtc).IsRequired(); + entity.Property(static x => x.CompletedAtUtc); + + entity.HasIndex(static x => x.RiderId).HasDatabaseName("IX_ExpenseImportJobs_RiderId"); + + entity + .HasOne() + .WithMany() + .HasForeignKey(static x => x.RiderId) + .OnDelete(DeleteBehavior.Cascade); + }); + + modelBuilder.Entity(static entity => + { + entity.ToTable("ExpenseImportRows"); + entity.HasKey(static x => x.Id); + entity.Property(static x => x.ImportJobId).IsRequired(); + entity.Property(static x => x.RowNumber).IsRequired(); + entity.Property(static x => x.ExpenseDateLocal); + entity.Property(static x => x.Amount).HasPrecision(10, 2); + entity.Property(static x => x.Notes).HasMaxLength(500); + entity.Property(static x => x.ValidationStatus).IsRequired().HasMaxLength(30); + entity.Property(static x => x.ValidationErrorsJson); + entity.Property(static x => x.DuplicateStatus).IsRequired().HasMaxLength(30); + entity.Property(static x => x.DuplicateResolution).HasMaxLength(30); + entity.Property(static x => x.ProcessingStatus).IsRequired().HasMaxLength(30); + entity.Property(static x => x.ExistingExpenseIdsJson); + entity.Property(static x => x.CreatedExpenseId); + + entity + .HasIndex(static x => x.ImportJobId) + .HasDatabaseName("IX_ExpenseImportRows_ImportJobId"); + entity.HasIndex(static x => new { x.ImportJobId, x.RowNumber }).IsUnique(); + + entity + .HasOne(static x => x.ImportJob) + .WithMany(static x => x.Rows) + .HasForeignKey(static x => x.ImportJobId) + .OnDelete(DeleteBehavior.Cascade); + }); + modelBuilder.Entity(static entity => { entity.ToTable( diff --git a/src/BikeTracking.Api/Infrastructure/Persistence/Entities/ExpenseImportJobEntity.cs b/src/BikeTracking.Api/Infrastructure/Persistence/Entities/ExpenseImportJobEntity.cs new file mode 100644 index 0000000..58d4fc5 --- /dev/null +++ b/src/BikeTracking.Api/Infrastructure/Persistence/Entities/ExpenseImportJobEntity.cs @@ -0,0 +1,20 @@ +namespace BikeTracking.Api.Infrastructure.Persistence.Entities; + +public sealed class ExpenseImportJobEntity +{ + public long Id { get; set; } + public long RiderId { get; set; } + public required string FileName { get; set; } + public int TotalRows { get; set; } + public int ValidRows { get; set; } + public int InvalidRows { get; set; } + public int ImportedRows { get; set; } + public int SkippedRows { get; set; } + public bool OverrideAllDuplicates { get; set; } + public required string Status { get; set; } + public string? LastError { get; set; } + public DateTime CreatedAtUtc { get; set; } + public DateTime? CompletedAtUtc { get; set; } + + public ICollection Rows { get; set; } = []; +} diff --git a/src/BikeTracking.Api/Infrastructure/Persistence/Entities/ExpenseImportRowEntity.cs b/src/BikeTracking.Api/Infrastructure/Persistence/Entities/ExpenseImportRowEntity.cs new file mode 100644 index 0000000..17e5e3b --- /dev/null +++ b/src/BikeTracking.Api/Infrastructure/Persistence/Entities/ExpenseImportRowEntity.cs @@ -0,0 +1,20 @@ +namespace BikeTracking.Api.Infrastructure.Persistence.Entities; + +public sealed class ExpenseImportRowEntity +{ + public long Id { get; set; } + public long ImportJobId { get; set; } + public int RowNumber { get; set; } + public DateOnly? ExpenseDateLocal { get; set; } + public decimal? Amount { get; set; } + public string? Notes { get; set; } + public required string ValidationStatus { get; set; } + public string? ValidationErrorsJson { get; set; } + public required string DuplicateStatus { get; set; } + public string? DuplicateResolution { get; set; } + public required string ProcessingStatus { get; set; } + public string? ExistingExpenseIdsJson { get; set; } + public long? CreatedExpenseId { get; set; } + + public ExpenseImportJobEntity ImportJob { get; set; } = null!; +} diff --git a/src/BikeTracking.Api/Infrastructure/Persistence/Migrations/20260420155250_AddExpenseImportTables.Designer.cs b/src/BikeTracking.Api/Infrastructure/Persistence/Migrations/20260420155250_AddExpenseImportTables.Designer.cs new file mode 100644 index 0000000..12cdaa3 --- /dev/null +++ b/src/BikeTracking.Api/Infrastructure/Persistence/Migrations/20260420155250_AddExpenseImportTables.Designer.cs @@ -0,0 +1,876 @@ +// +using System; +using BikeTracking.Api.Infrastructure.Persistence; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Migrations; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; + +#nullable disable + +namespace BikeTracking.Api.Infrastructure.Persistence.Migrations +{ + [DbContext(typeof(BikeTrackingDbContext))] + [Migration("20260420155250_AddExpenseImportTables")] + partial class AddExpenseImportTables + { + /// + protected override void BuildTargetModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder.HasAnnotation("ProductVersion", "10.0.5"); + + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.AuthAttemptStateEntity", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("ConsecutiveWrongCount") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(0); + + b.Property("DelayUntilUtc") + .HasColumnType("TEXT"); + + b.Property("LastSuccessfulAuthUtc") + .HasColumnType("TEXT"); + + b.Property("LastWrongAttemptUtc") + .HasColumnType("TEXT"); + + b.HasKey("UserId"); + + b.ToTable("AuthAttemptStates", (string)null); + }); + + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.Entities.ExpenseEntity", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("Amount") + .HasPrecision(10, 2) + .HasColumnType("TEXT"); + + b.Property("CreatedAtUtc") + .HasColumnType("TEXT"); + + b.Property("ExpenseDate") + .HasColumnType("TEXT"); + + b.Property("IsDeleted") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(false); + + b.Property("Notes") + .HasMaxLength(500) + .HasColumnType("TEXT"); + + b.Property("ReceiptPath") + .HasMaxLength(500) + .HasColumnType("TEXT"); + + b.Property("RiderId") + .HasColumnType("INTEGER"); + + b.Property("UpdatedAtUtc") + .HasColumnType("TEXT"); + + b.Property("Version") + .IsConcurrencyToken() + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(1); + + b.HasKey("Id"); + + b.HasIndex("RiderId", "ExpenseDate") + .IsDescending(false, true) + .HasDatabaseName("IX_Expenses_RiderId_ExpenseDate_Desc"); + + b.HasIndex("RiderId", "IsDeleted") + .HasDatabaseName("IX_Expenses_RiderId_IsDeleted"); + + b.ToTable("Expenses", null, t => + { + t.HasCheckConstraint("CK_Expenses_Amount_Positive", "CAST(\"Amount\" AS REAL) > 0"); + }); + }); + + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.Entities.ExpenseImportJobEntity", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CompletedAtUtc") + .HasColumnType("TEXT"); + + b.Property("CreatedAtUtc") + .HasColumnType("TEXT"); + + b.Property("FileName") + .IsRequired() + .HasMaxLength(255) + .HasColumnType("TEXT"); + + b.Property("ImportedRows") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(0); + + b.Property("InvalidRows") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(0); + + b.Property("LastError") + .HasMaxLength(1000) + .HasColumnType("TEXT"); + + b.Property("OverrideAllDuplicates") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(false); + + b.Property("RiderId") + .HasColumnType("INTEGER"); + + b.Property("SkippedRows") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(0); + + b.Property("Status") + .IsRequired() + .HasMaxLength(50) + .HasColumnType("TEXT"); + + b.Property("TotalRows") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(0); + + b.Property("ValidRows") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(0); + + b.HasKey("Id"); + + b.HasIndex("RiderId") + .HasDatabaseName("IX_ExpenseImportJobs_RiderId"); + + b.ToTable("ExpenseImportJobs", (string)null); + }); + + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.Entities.ExpenseImportRowEntity", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("Amount") + .HasPrecision(10, 2) + .HasColumnType("TEXT"); + + b.Property("CreatedExpenseId") + .HasColumnType("INTEGER"); + + b.Property("DuplicateResolution") + .HasMaxLength(30) + .HasColumnType("TEXT"); + + b.Property("DuplicateStatus") + .IsRequired() + .HasMaxLength(30) + .HasColumnType("TEXT"); + + b.Property("ExistingExpenseIdsJson") + .HasColumnType("TEXT"); + + b.Property("ExpenseDateLocal") + .HasColumnType("TEXT"); + + b.Property("ImportJobId") + .HasColumnType("INTEGER"); + + b.Property("Notes") + .HasMaxLength(500) + .HasColumnType("TEXT"); + + b.Property("ProcessingStatus") + .IsRequired() + .HasMaxLength(30) + .HasColumnType("TEXT"); + + b.Property("RowNumber") + .HasColumnType("INTEGER"); + + b.Property("ValidationErrorsJson") + .HasColumnType("TEXT"); + + b.Property("ValidationStatus") + .IsRequired() + .HasMaxLength(30) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("ImportJobId") + .HasDatabaseName("IX_ExpenseImportRows_ImportJobId"); + + b.HasIndex("ImportJobId", "RowNumber") + .IsUnique(); + + b.ToTable("ExpenseImportRows", (string)null); + }); + + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.Entities.GasPriceLookupEntity", b => + { + b.Property("GasPriceLookupId") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("DataSource") + .IsRequired() + .HasMaxLength(64) + .HasColumnType("TEXT"); + + b.Property("EiaPeriodDate") + .HasColumnType("TEXT"); + + b.Property("PriceDate") + .HasColumnType("TEXT"); + + b.Property("PricePerGallon") + .HasPrecision(10, 4) + .HasColumnType("TEXT"); + + b.Property("RetrievedAtUtc") + .HasColumnType("TEXT"); + + b.Property("WeekStartDate") + .HasColumnType("TEXT"); + + b.HasKey("GasPriceLookupId"); + + b.HasIndex("PriceDate") + .IsUnique(); + + b.HasIndex("WeekStartDate") + .IsUnique(); + + b.ToTable("GasPriceLookups", (string)null); + }); + + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.Entities.ImportJobEntity", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CompletedAtUtc") + .HasColumnType("TEXT"); + + b.Property("CreatedAtUtc") + .HasColumnType("TEXT"); + + b.Property("EtaMinutesRounded") + .HasColumnType("INTEGER"); + + b.Property("FailedRows") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(0); + + b.Property("FileName") + .IsRequired() + .HasMaxLength(255) + .HasColumnType("TEXT"); + + b.Property("ImportedRows") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(0); + + b.Property("LastError") + .HasMaxLength(2048) + .HasColumnType("TEXT"); + + b.Property("OverrideAllDuplicates") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(false); + + b.Property("ProcessedRows") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(0); + + b.Property("RiderId") + .HasColumnType("INTEGER"); + + b.Property("SkippedRows") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(0); + + b.Property("StartedAtUtc") + .HasColumnType("TEXT"); + + b.Property("Status") + .IsRequired() + .HasMaxLength(50) + .HasColumnType("TEXT"); + + b.Property("TotalRows") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(0); + + b.HasKey("Id"); + + b.HasIndex("RiderId", "CreatedAtUtc"); + + b.ToTable("ImportJobs", null, t => + { + t.HasCheckConstraint("CK_ImportJobs_FailedRows_NonNegative", "\"FailedRows\" >= 0"); + + t.HasCheckConstraint("CK_ImportJobs_ImportedRows_NonNegative", "\"ImportedRows\" >= 0"); + + t.HasCheckConstraint("CK_ImportJobs_ProcessedRows_Lte_TotalRows", "\"ProcessedRows\" <= \"TotalRows\""); + + t.HasCheckConstraint("CK_ImportJobs_ProcessedRows_NonNegative", "\"ProcessedRows\" >= 0"); + + t.HasCheckConstraint("CK_ImportJobs_SkippedRows_NonNegative", "\"SkippedRows\" >= 0"); + + t.HasCheckConstraint("CK_ImportJobs_TotalRows_NonNegative", "\"TotalRows\" >= 0"); + }); + }); + + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.Entities.ImportRowEntity", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CreatedRideId") + .HasColumnType("INTEGER"); + + b.Property("DuplicateResolution") + .HasMaxLength(30) + .HasColumnType("TEXT"); + + b.Property("DuplicateStatus") + .IsRequired() + .HasMaxLength(30) + .HasColumnType("TEXT"); + + b.Property("ExistingRideIdsJson") + .HasColumnType("TEXT"); + + b.Property("ImportJobId") + .HasColumnType("INTEGER"); + + b.Property("Miles") + .HasPrecision(10, 4) + .HasColumnType("TEXT"); + + b.Property("Notes") + .HasMaxLength(2000) + .HasColumnType("TEXT"); + + b.Property("ProcessingStatus") + .IsRequired() + .HasMaxLength(30) + .HasColumnType("TEXT"); + + b.Property("RideDateLocal") + .HasColumnType("TEXT"); + + b.Property("RideMinutes") + .HasColumnType("INTEGER"); + + b.Property("RowNumber") + .HasColumnType("INTEGER"); + + b.Property("TagsRaw") + .HasMaxLength(512) + .HasColumnType("TEXT"); + + b.Property("Temperature") + .HasPrecision(10, 4) + .HasColumnType("TEXT"); + + b.Property("ValidationErrorsJson") + .HasColumnType("TEXT"); + + b.Property("ValidationStatus") + .IsRequired() + .HasMaxLength(30) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("ImportJobId", "RowNumber") + .IsUnique(); + + b.ToTable("ImportRows", null, t => + { + t.HasCheckConstraint("CK_ImportRows_Miles_Range", "\"Miles\" IS NULL OR (CAST(\"Miles\" AS REAL) > 0 AND CAST(\"Miles\" AS REAL) <= 200)"); + + t.HasCheckConstraint("CK_ImportRows_RideMinutes_Positive", "\"RideMinutes\" IS NULL OR \"RideMinutes\" > 0"); + + t.HasCheckConstraint("CK_ImportRows_RowNumber_Positive", "\"RowNumber\" > 0"); + }); + }); + + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.Entities.RideEntity", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CloudCoverPercent") + .HasColumnType("INTEGER"); + + b.Property("CreatedAtUtc") + .HasColumnType("TEXT"); + + b.Property("GasPricePerGallon") + .HasPrecision(10, 4) + .HasColumnType("TEXT"); + + b.Property("Miles") + .HasColumnType("TEXT"); + + b.Property("Notes") + .HasMaxLength(500) + .HasColumnType("TEXT"); + + b.Property("PrecipitationType") + .HasMaxLength(50) + .HasColumnType("TEXT"); + + b.Property("RelativeHumidityPercent") + .HasColumnType("INTEGER"); + + b.Property("RideDateTimeLocal") + .HasColumnType("TEXT"); + + b.Property("RideMinutes") + .HasColumnType("INTEGER"); + + b.Property("RiderId") + .HasColumnType("INTEGER"); + + b.Property("SnapshotAverageCarMpg") + .HasPrecision(10, 4) + .HasColumnType("TEXT"); + + b.Property("SnapshotMileageRateCents") + .HasPrecision(10, 4) + .HasColumnType("TEXT"); + + b.Property("SnapshotOilChangePrice") + .HasPrecision(10, 4) + .HasColumnType("TEXT"); + + b.Property("SnapshotYearlyGoalMiles") + .HasPrecision(10, 4) + .HasColumnType("TEXT"); + + b.Property("Temperature") + .HasColumnType("TEXT"); + + b.Property("Version") + .IsConcurrencyToken() + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(1); + + b.Property("WeatherUserOverridden") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(false); + + b.Property("WindDirectionDeg") + .HasColumnType("INTEGER"); + + b.Property("WindSpeedMph") + .HasPrecision(10, 4) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("RiderId", "CreatedAtUtc") + .IsDescending(false, true) + .HasDatabaseName("IX_Rides_RiderId_CreatedAtUtc_Desc"); + + b.ToTable("Rides", null, t => + { + t.HasCheckConstraint("CK_Rides_Miles_GreaterThanZero", "CAST(\"Miles\" AS REAL) > 0 AND CAST(\"Miles\" AS REAL) <= 200"); + + t.HasCheckConstraint("CK_Rides_RideMinutes_GreaterThanZero", "\"RideMinutes\" IS NULL OR \"RideMinutes\" > 0"); + + t.HasCheckConstraint("CK_Rides_SnapshotAverageCarMpg_Positive", "\"SnapshotAverageCarMpg\" IS NULL OR CAST(\"SnapshotAverageCarMpg\" AS REAL) > 0"); + + t.HasCheckConstraint("CK_Rides_SnapshotMileageRateCents_Positive", "\"SnapshotMileageRateCents\" IS NULL OR CAST(\"SnapshotMileageRateCents\" AS REAL) > 0"); + + t.HasCheckConstraint("CK_Rides_SnapshotOilChangePrice_Positive", "\"SnapshotOilChangePrice\" IS NULL OR CAST(\"SnapshotOilChangePrice\" AS REAL) > 0"); + + t.HasCheckConstraint("CK_Rides_SnapshotYearlyGoalMiles_Positive", "\"SnapshotYearlyGoalMiles\" IS NULL OR CAST(\"SnapshotYearlyGoalMiles\" AS REAL) > 0"); + }); + }); + + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.Entities.UserSettingsEntity", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("AverageCarMpg") + .HasColumnType("TEXT"); + + b.Property("DashboardGallonsAvoidedEnabled") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(false); + + b.Property("DashboardGoalProgressEnabled") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(false); + + b.Property("Latitude") + .HasColumnType("TEXT"); + + b.Property("LocationLabel") + .HasMaxLength(200) + .HasColumnType("TEXT"); + + b.Property("Longitude") + .HasColumnType("TEXT"); + + b.Property("MileageRateCents") + .HasColumnType("TEXT"); + + b.Property("OilChangePrice") + .HasColumnType("TEXT"); + + b.Property("UpdatedAtUtc") + .HasColumnType("TEXT"); + + b.Property("YearlyGoalMiles") + .HasColumnType("TEXT"); + + b.HasKey("UserId"); + + b.ToTable("UserSettings", null, t => + { + t.HasCheckConstraint("CK_UserSettings_AverageCarMpg_Positive", "\"AverageCarMpg\" IS NULL OR CAST(\"AverageCarMpg\" AS REAL) > 0"); + + t.HasCheckConstraint("CK_UserSettings_Latitude_Range", "\"Latitude\" IS NULL OR (CAST(\"Latitude\" AS REAL) >= -90 AND CAST(\"Latitude\" AS REAL) <= 90)"); + + t.HasCheckConstraint("CK_UserSettings_Longitude_Range", "\"Longitude\" IS NULL OR (CAST(\"Longitude\" AS REAL) >= -180 AND CAST(\"Longitude\" AS REAL) <= 180)"); + + t.HasCheckConstraint("CK_UserSettings_MileageRateCents_Positive", "\"MileageRateCents\" IS NULL OR CAST(\"MileageRateCents\" AS REAL) > 0"); + + t.HasCheckConstraint("CK_UserSettings_OilChangePrice_Positive", "\"OilChangePrice\" IS NULL OR CAST(\"OilChangePrice\" AS REAL) > 0"); + + t.HasCheckConstraint("CK_UserSettings_YearlyGoalMiles_Positive", "\"YearlyGoalMiles\" IS NULL OR CAST(\"YearlyGoalMiles\" AS REAL) > 0"); + }); + }); + + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.Entities.WeatherLookupEntity", b => + { + b.Property("WeatherLookupId") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CloudCoverPercent") + .HasColumnType("INTEGER"); + + b.Property("DataSource") + .IsRequired() + .HasMaxLength(100) + .HasColumnType("TEXT"); + + b.Property("LatitudeRounded") + .HasPrecision(8, 2) + .HasColumnType("TEXT"); + + b.Property("LongitudeRounded") + .HasPrecision(8, 2) + .HasColumnType("TEXT"); + + b.Property("LookupHourUtc") + .HasColumnType("TEXT"); + + b.Property("PrecipitationType") + .HasMaxLength(50) + .HasColumnType("TEXT"); + + b.Property("RelativeHumidityPercent") + .HasColumnType("INTEGER"); + + b.Property("RetrievedAtUtc") + .HasColumnType("TEXT"); + + b.Property("Status") + .IsRequired() + .HasMaxLength(50) + .HasColumnType("TEXT"); + + b.Property("Temperature") + .HasPrecision(10, 4) + .HasColumnType("TEXT"); + + b.Property("WindDirectionDeg") + .HasColumnType("INTEGER"); + + b.Property("WindSpeedMph") + .HasPrecision(10, 4) + .HasColumnType("TEXT"); + + b.HasKey("WeatherLookupId"); + + b.HasIndex("LookupHourUtc", "LatitudeRounded", "LongitudeRounded") + .IsUnique(); + + b.ToTable("WeatherLookups", (string)null); + }); + + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.OutboxEventEntity", b => + { + b.Property("OutboxEventId") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AggregateId") + .HasColumnType("INTEGER"); + + b.Property("AggregateType") + .IsRequired() + .HasMaxLength(64) + .HasColumnType("TEXT"); + + b.Property("EventPayloadJson") + .IsRequired() + .HasColumnType("TEXT"); + + b.Property("EventType") + .IsRequired() + .HasMaxLength(128) + .HasColumnType("TEXT"); + + b.Property("LastError") + .HasMaxLength(2048) + .HasColumnType("TEXT"); + + b.Property("NextAttemptUtc") + .HasColumnType("TEXT"); + + b.Property("OccurredAtUtc") + .HasColumnType("TEXT"); + + b.Property("PublishedAtUtc") + .HasColumnType("TEXT"); + + b.Property("RetryCount") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(0); + + b.HasKey("OutboxEventId"); + + b.HasIndex("AggregateType", "AggregateId"); + + b.HasIndex("PublishedAtUtc", "NextAttemptUtc"); + + b.ToTable("OutboxEvents", (string)null); + }); + + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.UserCredentialEntity", b => + { + b.Property("UserCredentialId") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CredentialVersion") + .HasColumnType("INTEGER"); + + b.Property("HashAlgorithm") + .IsRequired() + .HasMaxLength(64) + .HasColumnType("TEXT"); + + b.Property("IterationCount") + .HasColumnType("INTEGER"); + + b.Property("PinHash") + .IsRequired() + .HasColumnType("BLOB"); + + b.Property("PinSalt") + .IsRequired() + .HasColumnType("BLOB"); + + b.Property("UpdatedAtUtc") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("UserCredentialId"); + + b.HasIndex("UserId") + .IsUnique(); + + b.ToTable("UserCredentials", (string)null); + }); + + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.UserEntity", b => + { + b.Property("UserId") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CreatedAtUtc") + .HasColumnType("TEXT"); + + b.Property("DisplayName") + .IsRequired() + .HasMaxLength(120) + .HasColumnType("TEXT"); + + b.Property("IsActive") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(true); + + b.Property("NormalizedName") + .IsRequired() + .HasMaxLength(120) + .HasColumnType("TEXT"); + + b.HasKey("UserId"); + + b.HasIndex("NormalizedName") + .IsUnique(); + + b.ToTable("Users", (string)null); + }); + + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.AuthAttemptStateEntity", b => + { + b.HasOne("BikeTracking.Api.Infrastructure.Persistence.UserEntity", "User") + .WithOne("AuthAttemptState") + .HasForeignKey("BikeTracking.Api.Infrastructure.Persistence.AuthAttemptStateEntity", "UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("User"); + }); + + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.Entities.ExpenseEntity", b => + { + b.HasOne("BikeTracking.Api.Infrastructure.Persistence.UserEntity", null) + .WithMany() + .HasForeignKey("RiderId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.Entities.ExpenseImportJobEntity", b => + { + b.HasOne("BikeTracking.Api.Infrastructure.Persistence.UserEntity", null) + .WithMany() + .HasForeignKey("RiderId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.Entities.ExpenseImportRowEntity", b => + { + b.HasOne("BikeTracking.Api.Infrastructure.Persistence.Entities.ExpenseImportJobEntity", "ImportJob") + .WithMany("Rows") + .HasForeignKey("ImportJobId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("ImportJob"); + }); + + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.Entities.ImportJobEntity", b => + { + b.HasOne("BikeTracking.Api.Infrastructure.Persistence.UserEntity", null) + .WithMany() + .HasForeignKey("RiderId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.Entities.ImportRowEntity", b => + { + b.HasOne("BikeTracking.Api.Infrastructure.Persistence.Entities.ImportJobEntity", "ImportJob") + .WithMany("Rows") + .HasForeignKey("ImportJobId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("ImportJob"); + }); + + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.Entities.RideEntity", b => + { + b.HasOne("BikeTracking.Api.Infrastructure.Persistence.UserEntity", null) + .WithMany() + .HasForeignKey("RiderId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.Entities.UserSettingsEntity", b => + { + b.HasOne("BikeTracking.Api.Infrastructure.Persistence.UserEntity", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.UserCredentialEntity", b => + { + b.HasOne("BikeTracking.Api.Infrastructure.Persistence.UserEntity", "User") + .WithOne("Credential") + .HasForeignKey("BikeTracking.Api.Infrastructure.Persistence.UserCredentialEntity", "UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("User"); + }); + + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.Entities.ExpenseImportJobEntity", b => + { + b.Navigation("Rows"); + }); + + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.Entities.ImportJobEntity", b => + { + b.Navigation("Rows"); + }); + + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.UserEntity", b => + { + b.Navigation("AuthAttemptState"); + + b.Navigation("Credential"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/src/BikeTracking.Api/Infrastructure/Persistence/Migrations/20260420155250_AddExpenseImportTables.cs b/src/BikeTracking.Api/Infrastructure/Persistence/Migrations/20260420155250_AddExpenseImportTables.cs new file mode 100644 index 0000000..92ac2db --- /dev/null +++ b/src/BikeTracking.Api/Infrastructure/Persistence/Migrations/20260420155250_AddExpenseImportTables.cs @@ -0,0 +1,101 @@ +using System; +using Microsoft.EntityFrameworkCore.Migrations; + +#nullable disable + +namespace BikeTracking.Api.Infrastructure.Persistence.Migrations +{ + /// + public partial class AddExpenseImportTables : Migration + { + /// + protected override void Up(MigrationBuilder migrationBuilder) + { + migrationBuilder.CreateTable( + name: "ExpenseImportJobs", + columns: table => new + { + Id = table.Column(type: "INTEGER", nullable: false) + .Annotation("Sqlite:Autoincrement", true), + RiderId = table.Column(type: "INTEGER", nullable: false), + FileName = table.Column(type: "TEXT", maxLength: 255, nullable: false), + TotalRows = table.Column(type: "INTEGER", nullable: false, defaultValue: 0), + ValidRows = table.Column(type: "INTEGER", nullable: false, defaultValue: 0), + InvalidRows = table.Column(type: "INTEGER", nullable: false, defaultValue: 0), + ImportedRows = table.Column(type: "INTEGER", nullable: false, defaultValue: 0), + SkippedRows = table.Column(type: "INTEGER", nullable: false, defaultValue: 0), + OverrideAllDuplicates = table.Column(type: "INTEGER", nullable: false, defaultValue: false), + Status = table.Column(type: "TEXT", maxLength: 50, nullable: false), + LastError = table.Column(type: "TEXT", maxLength: 1000, nullable: true), + CreatedAtUtc = table.Column(type: "TEXT", nullable: false), + CompletedAtUtc = table.Column(type: "TEXT", nullable: true) + }, + constraints: table => + { + table.PrimaryKey("PK_ExpenseImportJobs", x => x.Id); + table.ForeignKey( + name: "FK_ExpenseImportJobs_Users_RiderId", + column: x => x.RiderId, + principalTable: "Users", + principalColumn: "UserId", + onDelete: ReferentialAction.Cascade); + }); + + migrationBuilder.CreateTable( + name: "ExpenseImportRows", + columns: table => new + { + Id = table.Column(type: "INTEGER", nullable: false) + .Annotation("Sqlite:Autoincrement", true), + ImportJobId = table.Column(type: "INTEGER", nullable: false), + RowNumber = table.Column(type: "INTEGER", nullable: false), + ExpenseDateLocal = table.Column(type: "TEXT", nullable: true), + Amount = table.Column(type: "TEXT", precision: 10, scale: 2, nullable: true), + Notes = table.Column(type: "TEXT", maxLength: 500, nullable: true), + ValidationStatus = table.Column(type: "TEXT", maxLength: 30, nullable: false), + ValidationErrorsJson = table.Column(type: "TEXT", nullable: true), + DuplicateStatus = table.Column(type: "TEXT", maxLength: 30, nullable: false), + DuplicateResolution = table.Column(type: "TEXT", maxLength: 30, nullable: true), + ProcessingStatus = table.Column(type: "TEXT", maxLength: 30, nullable: false), + ExistingExpenseIdsJson = table.Column(type: "TEXT", nullable: true), + CreatedExpenseId = table.Column(type: "INTEGER", nullable: true) + }, + constraints: table => + { + table.PrimaryKey("PK_ExpenseImportRows", x => x.Id); + table.ForeignKey( + name: "FK_ExpenseImportRows_ExpenseImportJobs_ImportJobId", + column: x => x.ImportJobId, + principalTable: "ExpenseImportJobs", + principalColumn: "Id", + onDelete: ReferentialAction.Cascade); + }); + + migrationBuilder.CreateIndex( + name: "IX_ExpenseImportJobs_RiderId", + table: "ExpenseImportJobs", + column: "RiderId"); + + migrationBuilder.CreateIndex( + name: "IX_ExpenseImportRows_ImportJobId", + table: "ExpenseImportRows", + column: "ImportJobId"); + + migrationBuilder.CreateIndex( + name: "IX_ExpenseImportRows_ImportJobId_RowNumber", + table: "ExpenseImportRows", + columns: new[] { "ImportJobId", "RowNumber" }, + unique: true); + } + + /// + protected override void Down(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropTable( + name: "ExpenseImportRows"); + + migrationBuilder.DropTable( + name: "ExpenseImportJobs"); + } + } +} diff --git a/src/BikeTracking.Api/Infrastructure/Persistence/Migrations/BikeTrackingDbContextModelSnapshot.cs b/src/BikeTracking.Api/Infrastructure/Persistence/Migrations/BikeTrackingDbContextModelSnapshot.cs index e7b3778..0ebe970 100644 --- a/src/BikeTracking.Api/Infrastructure/Persistence/Migrations/BikeTrackingDbContextModelSnapshot.cs +++ b/src/BikeTracking.Api/Infrastructure/Persistence/Migrations/BikeTrackingDbContextModelSnapshot.cs @@ -97,6 +97,135 @@ protected override void BuildModel(ModelBuilder modelBuilder) }); }); + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.Entities.ExpenseImportJobEntity", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CompletedAtUtc") + .HasColumnType("TEXT"); + + b.Property("CreatedAtUtc") + .HasColumnType("TEXT"); + + b.Property("FileName") + .IsRequired() + .HasMaxLength(255) + .HasColumnType("TEXT"); + + b.Property("ImportedRows") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(0); + + b.Property("InvalidRows") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(0); + + b.Property("LastError") + .HasMaxLength(1000) + .HasColumnType("TEXT"); + + b.Property("OverrideAllDuplicates") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(false); + + b.Property("RiderId") + .HasColumnType("INTEGER"); + + b.Property("SkippedRows") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(0); + + b.Property("Status") + .IsRequired() + .HasMaxLength(50) + .HasColumnType("TEXT"); + + b.Property("TotalRows") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(0); + + b.Property("ValidRows") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(0); + + b.HasKey("Id"); + + b.HasIndex("RiderId") + .HasDatabaseName("IX_ExpenseImportJobs_RiderId"); + + b.ToTable("ExpenseImportJobs", (string)null); + }); + + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.Entities.ExpenseImportRowEntity", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("Amount") + .HasPrecision(10, 2) + .HasColumnType("TEXT"); + + b.Property("CreatedExpenseId") + .HasColumnType("INTEGER"); + + b.Property("DuplicateResolution") + .HasMaxLength(30) + .HasColumnType("TEXT"); + + b.Property("DuplicateStatus") + .IsRequired() + .HasMaxLength(30) + .HasColumnType("TEXT"); + + b.Property("ExistingExpenseIdsJson") + .HasColumnType("TEXT"); + + b.Property("ExpenseDateLocal") + .HasColumnType("TEXT"); + + b.Property("ImportJobId") + .HasColumnType("INTEGER"); + + b.Property("Notes") + .HasMaxLength(500) + .HasColumnType("TEXT"); + + b.Property("ProcessingStatus") + .IsRequired() + .HasMaxLength(30) + .HasColumnType("TEXT"); + + b.Property("RowNumber") + .HasColumnType("INTEGER"); + + b.Property("ValidationErrorsJson") + .HasColumnType("TEXT"); + + b.Property("ValidationStatus") + .IsRequired() + .HasMaxLength(30) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("ImportJobId") + .HasDatabaseName("IX_ExpenseImportRows_ImportJobId"); + + b.HasIndex("ImportJobId", "RowNumber") + .IsUnique(); + + b.ToTable("ExpenseImportRows", (string)null); + }); + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.Entities.GasPriceLookupEntity", b => { b.Property("GasPriceLookupId") @@ -653,6 +782,26 @@ protected override void BuildModel(ModelBuilder modelBuilder) .IsRequired(); }); + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.Entities.ExpenseImportJobEntity", b => + { + b.HasOne("BikeTracking.Api.Infrastructure.Persistence.UserEntity", null) + .WithMany() + .HasForeignKey("RiderId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.Entities.ExpenseImportRowEntity", b => + { + b.HasOne("BikeTracking.Api.Infrastructure.Persistence.Entities.ExpenseImportJobEntity", "ImportJob") + .WithMany("Rows") + .HasForeignKey("ImportJobId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("ImportJob"); + }); + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.Entities.ImportJobEntity", b => { b.HasOne("BikeTracking.Api.Infrastructure.Persistence.UserEntity", null) @@ -702,6 +851,11 @@ protected override void BuildModel(ModelBuilder modelBuilder) b.Navigation("User"); }); + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.Entities.ExpenseImportJobEntity", b => + { + b.Navigation("Rows"); + }); + modelBuilder.Entity("BikeTracking.Api.Infrastructure.Persistence.Entities.ImportJobEntity", b => { b.Navigation("Rows"); diff --git a/src/BikeTracking.Api/Program.cs b/src/BikeTracking.Api/Program.cs index 91cb713..0a7e4ff 100644 --- a/src/BikeTracking.Api/Program.cs +++ b/src/BikeTracking.Api/Program.cs @@ -1,5 +1,6 @@ using BikeTracking.Api.Application.Dashboard; using BikeTracking.Api.Application.Events; +using BikeTracking.Api.Application.ExpenseImports; using BikeTracking.Api.Application.Expenses; using BikeTracking.Api.Application.Imports; using BikeTracking.Api.Application.Notifications; @@ -58,6 +59,9 @@ builder.Services.AddScoped(); builder.Services.AddScoped(); builder.Services.AddScoped(); +builder.Services.AddScoped(); +builder.Services.AddScoped(); +builder.Services.AddScoped(); builder.Services.AddScoped(); builder.Services.AddScoped(); builder.Services.AddScoped(); @@ -163,6 +167,7 @@ app.MapUsersEndpoints(); app.MapRidesEndpoints(); app.MapExpensesEndpoints(); +app.MapExpenseImportEndpoints(); app.MapImportEndpoints(); app.MapHub("/hubs/import-progress").RequireAuthorization(); app.MapDefaultEndpoints(); diff --git a/src/BikeTracking.Frontend/src/App.test.tsx b/src/BikeTracking.Frontend/src/App.test.tsx index d7ef3ee..698e0d7 100644 --- a/src/BikeTracking.Frontend/src/App.test.tsx +++ b/src/BikeTracking.Frontend/src/App.test.tsx @@ -26,6 +26,9 @@ vi.mock('./pages/settings/SettingsPage', () => ({ vi.mock('./pages/import-rides/ImportRidesPage', () => ({ ImportRidesPage: () =>
Import Rides Page
, })) +vi.mock('./pages/expenses/ExpenseImportPage', () => ({ + ExpenseImportPage: () =>
Expense Import Page
, +})) vi.mock('./components/app-header/app-header', () => ({ AppHeader: () =>
App Header
, })) diff --git a/src/BikeTracking.Frontend/src/App.tsx b/src/BikeTracking.Frontend/src/App.tsx index 48fac83..d4619de 100644 --- a/src/BikeTracking.Frontend/src/App.tsx +++ b/src/BikeTracking.Frontend/src/App.tsx @@ -10,6 +10,7 @@ import { HistoryPage } from './pages/HistoryPage' import { SettingsPage } from './pages/settings/SettingsPage' import { ImportRidesPage } from './pages/import-rides/ImportRidesPage' import { ExpenseEntryPage } from './pages/expenses/ExpenseEntryPage' +import { ExpenseImportPage } from './pages/expenses/ExpenseImportPage' import { ExpenseHistoryPage } from './pages/expenses/ExpenseHistoryPage' function App() { @@ -27,6 +28,7 @@ function App() { } /> } /> } /> + } /> } /> } /> diff --git a/src/BikeTracking.Frontend/src/pages/expenses/ExpenseHistoryPage.test.tsx b/src/BikeTracking.Frontend/src/pages/expenses/ExpenseHistoryPage.test.tsx index 3d327ea..3631136 100644 --- a/src/BikeTracking.Frontend/src/pages/expenses/ExpenseHistoryPage.test.tsx +++ b/src/BikeTracking.Frontend/src/pages/expenses/ExpenseHistoryPage.test.tsx @@ -77,6 +77,21 @@ describe('ExpenseHistoryPage', () => { }) }) + it('renders an import expenses link near the history controls', async () => { + render( + + + + ) + + await waitFor(() => { + expect(screen.getByRole('link', { name: /import expenses/i })).toHaveAttribute( + 'href', + '/expenses/import', + ) + }) + }) + it('applies date range filter', async () => { render( diff --git a/src/BikeTracking.Frontend/src/pages/expenses/ExpenseHistoryPage.tsx b/src/BikeTracking.Frontend/src/pages/expenses/ExpenseHistoryPage.tsx index 933c07f..a46762d 100644 --- a/src/BikeTracking.Frontend/src/pages/expenses/ExpenseHistoryPage.tsx +++ b/src/BikeTracking.Frontend/src/pages/expenses/ExpenseHistoryPage.tsx @@ -1,4 +1,5 @@ import { useEffect, useState } from 'react' +import { Link } from 'react-router-dom' import type { ExpenseHistoryRow } from '../../services/expenses-api' import { deleteExpense, @@ -128,6 +129,12 @@ export function ExpenseHistoryPage() {

Expense History

+
+ + Import Expenses + +
+
({ + previewExpenseImport: vi.fn(), + confirmExpenseImport: vi.fn(), + getExpenseImportStatus: vi.fn(), + deleteExpenseImport: vi.fn(), +})) + +import * as expenseImportApi from '../../services/expense-import-api' + +const mockPreviewExpenseImport = vi.mocked(expenseImportApi.previewExpenseImport) + +describe('ExpenseImportPage', () => { + beforeEach(() => { + vi.clearAllMocks() + mockPreviewExpenseImport.mockResolvedValue({ + ok: true, + status: 200, + data: { + jobId: 44, + fileName: 'expenses.csv', + totalRows: 3, + validRows: 2, + invalidRows: 1, + duplicateCount: 0, + errors: [{ rowNumber: 2, field: 'Amount', message: 'Amount must be greater than zero.' }], + duplicates: [], + canConfirmImport: true, + }, + }) + }) + + it('renders upload controls and receipt exclusion notice', () => { + render( + + + , + ) + + expect(screen.getByRole('heading', { name: /import expenses/i })).toBeInTheDocument() + expect(screen.getByLabelText(/select csv file/i)).toBeInTheDocument() + expect(screen.getByText(/receipts cannot be imported/i)).toBeInTheDocument() + }) + + it('shows preview counts and row validation errors after preview', async () => { + render( + + + , + ) + + const file = new File(['Date,Amount,Note\n2026-04-01,12.50,Coffee'], 'expenses.csv', { + type: 'text/csv', + }) + + fireEvent.change(screen.getByLabelText(/select csv file/i), { + target: { files: [file] }, + }) + + fireEvent.click(screen.getByRole('button', { name: /preview import/i })) + + await waitFor(() => { + expect(screen.getByText(/total rows: 3/i)).toBeInTheDocument() + expect(screen.getByText(/valid rows: 2/i)).toBeInTheDocument() + expect(screen.getByText(/invalid rows: 1/i)).toBeInTheDocument() + expect(screen.getByText(/row 2/i)).toBeInTheDocument() + expect(screen.getByText(/amount must be greater than zero/i)).toBeInTheDocument() + }) + }) +}) \ No newline at end of file diff --git a/src/BikeTracking.Frontend/src/pages/expenses/ExpenseImportPage.tsx b/src/BikeTracking.Frontend/src/pages/expenses/ExpenseImportPage.tsx new file mode 100644 index 0000000..ea68ba0 --- /dev/null +++ b/src/BikeTracking.Frontend/src/pages/expenses/ExpenseImportPage.tsx @@ -0,0 +1,253 @@ +import { useEffect, useRef, useState } from 'react' +import { Link } from 'react-router-dom' +import { + confirmExpenseImport, + deleteExpenseImport, + previewExpenseImport, + type ConfirmExpenseImportRequest, + type ExpenseImportPreviewResponse, + type ExpenseImportSummaryResponse, +} from '../../services/expense-import-api' +import './ExpenseImportPage.css' + +const MAX_UPLOAD_BYTES = 5 * 1024 * 1024 + +export function ExpenseImportPage() { + const [selectedFile, setSelectedFile] = useState(null) + const [errorMessage, setErrorMessage] = useState('') + const [preview, setPreview] = useState(null) + const [summary, setSummary] = useState(null) + const [isPreviewing, setIsPreviewing] = useState(false) + const [isConfirming, setIsConfirming] = useState(false) + const [overrideAllDuplicates, setOverrideAllDuplicates] = useState(false) + const [duplicateChoices, setDuplicateChoices] = useState>({}) + const summaryJobIdRef = useRef(null) + + useEffect(() => { + if (summary === null) { + return + } + + summaryJobIdRef.current = summary.jobId + + const handleBeforeUnload = (): void => { + const jobId = summaryJobIdRef.current + if (jobId !== null) { + void deleteExpenseImport(jobId) + } + } + + window.addEventListener('beforeunload', handleBeforeUnload) + + return () => { + window.removeEventListener('beforeunload', handleBeforeUnload) + const jobId = summaryJobIdRef.current + if (jobId !== null) { + void deleteExpenseImport(jobId) + } + summaryJobIdRef.current = null + } + }, [summary]) + + function onSelectFile(event: React.ChangeEvent): void { + const file = event.target.files?.[0] ?? null + setPreview(null) + setSummary(null) + setDuplicateChoices({}) + setOverrideAllDuplicates(false) + setErrorMessage('') + + if (file === null) { + setSelectedFile(null) + return + } + + if (!file.name.toLowerCase().endsWith('.csv')) { + setSelectedFile(null) + setErrorMessage('Please upload a .csv file.') + return + } + + if (file.size > MAX_UPLOAD_BYTES) { + setSelectedFile(null) + setErrorMessage('CSV file must be 5 MB or smaller.') + return + } + + setSelectedFile(file) + } + + async function onPreview(event: React.FormEvent): Promise { + event.preventDefault() + if (selectedFile === null) { + setErrorMessage('Select a CSV file before previewing import results.') + return + } + + setErrorMessage('') + setIsPreviewing(true) + try { + const result = await previewExpenseImport({ file: selectedFile }) + if (result.ok && result.data) { + setPreview(result.data) + return + } + + setErrorMessage(result.error?.message ?? 'Unable to preview this CSV import.') + } finally { + setIsPreviewing(false) + } + } + + async function onConfirm(): Promise { + if (preview === null) { + return + } + + const request: ConfirmExpenseImportRequest = { + overrideAllDuplicates: overrideAllDuplicates, + duplicateChoices: Object.entries(duplicateChoices).map(([rowNumber, resolution]) => ({ + rowNumber: Number.parseInt(rowNumber, 10), + resolution, + })), + } + + setErrorMessage('') + setIsConfirming(true) + try { + const result = await confirmExpenseImport(preview.jobId, request) + if (result.ok && result.data) { + setSummary(result.data) + return + } + + setErrorMessage(result.error?.message ?? 'Unable to confirm this CSV import.') + } finally { + setIsConfirming(false) + } + } + + function setDuplicateResolution( + rowNumber: number, + resolution: 'keep-existing' | 'replace-with-import', + ): void { + setDuplicateChoices((current) => ({ ...current, [rowNumber]: resolution })) + } + + return ( +
+
+

Import Expenses

+

+ Upload a CSV with Date, Amount, and Note columns to preview and import historical + expenses. +

+

+ Receipts cannot be imported. To add a receipt, find the expense in your history and use + the edit option. +

+ +
void onPreview(event)}> + + + {selectedFile ?

Selected: {selectedFile.name}

: null} +
+ +
+
+ + {errorMessage ? ( +

+ {errorMessage} +

+ ) : null} + + {preview ? ( +
+

Preview

+

Total rows: {preview.totalRows}

+

Valid rows: {preview.validRows}

+

Invalid rows: {preview.invalidRows}

+

Duplicate rows: {preview.duplicateCount}

+ + {preview.errors.length > 0 ? ( +
    + {preview.errors.map((error) => ( +
  • + Row {error.rowNumber}: {error.message} +
  • + ))} +
+ ) : null} + + {preview.duplicates.length > 0 ? ( +
+ +
    + {preview.duplicates.map((duplicate) => ( +
  • +

    + Row {duplicate.rowNumber}: {duplicate.expenseDate} · ${duplicate.amount.toFixed(2)} +

    + + +
  • + ))} +
+
+ ) : null} + +
+ +
+
+ ) : null} + + {summary ? ( +
+

Import complete

+

Imported rows: {summary.importedRows}

+

Skipped rows: {summary.skippedRows}

+

Failed rows: {summary.failedRows}

+ Back to Expense History +
+ ) : null} +
+
+ ) +} \ No newline at end of file diff --git a/src/BikeTracking.Frontend/src/services/expense-import-api.ts b/src/BikeTracking.Frontend/src/services/expense-import-api.ts new file mode 100644 index 0000000..7ef0ff2 --- /dev/null +++ b/src/BikeTracking.Frontend/src/services/expense-import-api.ts @@ -0,0 +1,207 @@ +import type { ApiResult, ErrorResponse } from "./users-api"; + +const API_BASE_URL = + (import.meta.env.VITE_API_BASE_URL as string | undefined)?.replace( + /\/$/, + "", + ) ?? "http://localhost:5436"; + +const SESSION_KEY = "bike_tracking_auth_session"; + +export interface ExpenseImportPreviewResponse { + jobId: number; + fileName: string; + totalRows: number; + validRows: number; + invalidRows: number; + duplicateCount: number; + errors: ExpenseImportRowError[]; + duplicates: ExpenseImportDuplicateConflict[]; + canConfirmImport: boolean; +} + +export interface ExpenseImportRowError { + rowNumber: number; + field: string; + message: string; +} + +export interface ExistingExpenseMatch { + expenseId: number; + expenseDate: string; + amount: number; + note: string | null; +} + +export interface ExpenseImportDuplicateConflict { + rowNumber: number; + expenseDate: string; + amount: number; + note: string | null; + existingMatches: ExistingExpenseMatch[]; +} + +export interface ExpenseImportPreviewRequest { + file: File; +} + +export interface ConfirmExpenseImportRequest { + overrideAllDuplicates: boolean; + duplicateChoices: Array<{ + rowNumber: number; + resolution: "keep-existing" | "replace-with-import"; + }>; +} + +export interface ExpenseImportSummaryResponse { + jobId: number; + totalRows: number; + importedRows: number; + skippedRows: number; + failedRows: number; +} + +export interface ExpenseImportStatusResponse { + jobId: number; + status: string; + totalRows: number; + validRows: number; + invalidRows: number; + duplicateCount: number; + summary: ExpenseImportSummaryResponse | null; +} + +function getAuthHeaders(contentTypeJson: boolean): Record { + const headers: Record = {}; + if (contentTypeJson) { + headers["Content-Type"] = "application/json"; + } + + try { + const raw = sessionStorage.getItem(SESSION_KEY); + if (!raw) { + return headers; + } + + const parsed = JSON.parse(raw) as { userId?: number }; + if (typeof parsed.userId === "number" && parsed.userId > 0) { + headers["X-User-Id"] = parsed.userId.toString(); + } + } catch { + return headers; + } + + return headers; +} + +async function parseError( + response: Response, +): Promise { + try { + return (await response.json()) as ErrorResponse; + } catch { + return undefined; + } +} + +export async function previewExpenseImport( + request: ExpenseImportPreviewRequest, +): Promise> { + const formData = new FormData(); + formData.append("file", request.file); + + const response = await fetch(`${API_BASE_URL}/api/expense-imports/preview`, { + method: "POST", + headers: getAuthHeaders(false), + body: formData, + }); + + if (response.ok) { + return { + ok: true, + status: response.status, + data: (await response.json()) as ExpenseImportPreviewResponse, + }; + } + + return { + ok: false, + status: response.status, + error: await parseError(response), + }; +} + +export async function confirmExpenseImport( + jobId: number, + request: ConfirmExpenseImportRequest, +): Promise> { + const response = await fetch( + `${API_BASE_URL}/api/expense-imports/${jobId}/confirm`, + { + method: "POST", + headers: getAuthHeaders(true), + body: JSON.stringify(request), + }, + ); + + if (response.ok) { + return { + ok: true, + status: response.status, + data: (await response.json()) as ExpenseImportSummaryResponse, + }; + } + + return { + ok: false, + status: response.status, + error: await parseError(response), + }; +} + +export async function getExpenseImportStatus( + jobId: number, +): Promise> { + const response = await fetch( + `${API_BASE_URL}/api/expense-imports/${jobId}/status`, + { + headers: getAuthHeaders(false), + }, + ); + + if (response.ok) { + return { + ok: true, + status: response.status, + data: (await response.json()) as ExpenseImportStatusResponse, + }; + } + + return { + ok: false, + status: response.status, + error: await parseError(response), + }; +} + +export async function deleteExpenseImport( + jobId: number, +): Promise> { + const response = await fetch(`${API_BASE_URL}/api/expense-imports/${jobId}`, { + method: "DELETE", + headers: getAuthHeaders(false), + }); + + if (response.ok) { + return { + ok: true, + status: response.status, + }; + } + + return { + ok: false, + status: response.status, + error: await parseError(response), + }; +}