From fad2f21f1b03805fd4070835f9fc867d46b30600 Mon Sep 17 00:00:00 2001 From: restareaByWeezy Date: Fri, 23 Jan 2026 00:32:16 +0900 Subject: [PATCH 01/11] feat(batcher): enhance deduplicateItems with cross-batch tracking - Remove trackProcessedKeys option in favor of deduplicateItems - Add automatic processedKeys tracking when deduplicateItems is enabled - Add maxTrackedKeys option (default: 1000) with FIFO eviction - Enhance onDuplicate callback with existingItem parameter - existingItem is undefined for cross-batch duplicates - existingItem is the item for in-batch duplicates - Remove onSkip callback (consolidated into onDuplicate) - Remove skippedCount from state (no longer needed) - Support both cross-batch and in-batch deduplication - Add clearProcessedKeys() method for key management This change simplifies the API by making deduplicateItems the single option for both in-batch and cross-batch deduplication, following the pattern established by RateLimiter's executionTimes tracking. --- packages/pacer/src/batcher.ts | 164 +++++++++++++++++++++++++++++++++- 1 file changed, 162 insertions(+), 2 deletions(-) diff --git a/packages/pacer/src/batcher.ts b/packages/pacer/src/batcher.ts index 158f3e633..3fd902f67 100644 --- a/packages/pacer/src/batcher.ts +++ b/packages/pacer/src/batcher.ts @@ -20,6 +20,11 @@ export interface BatcherState { * Array of items currently queued for batch processing */ items: Array + /** + * Array of keys that have been processed (for cross-batch deduplication) + * Only populated when deduplicateItems is enabled + */ + processedKeys: Array /** * Number of items currently in the batch queue */ @@ -39,6 +44,7 @@ function getDefaultBatcherState(): BatcherState { executionCount: 0, isEmpty: true, isPending: false, + processedKeys: [], totalItemsProcessed: 0, items: [], size: 0, @@ -50,11 +56,31 @@ function getDefaultBatcherState(): BatcherState { * Options for configuring a Batcher instance */ export interface BatcherOptions { + /** + * Enable automatic deduplication of items across batches + * When enabled, items that have already been processed will be automatically skipped + * The keys of processed items are tracked in state.processedKeys + * @default false + */ + deduplicateItems?: boolean + /** + * Strategy to use when a duplicate item is detected in the current batch + * - 'keep-first': Keep the existing item and ignore the new one (default) + * - 'keep-last': Replace the existing item with the new one + * Note: This only affects duplicates within the same batch, not across batches + * @default 'keep-first' + */ + deduplicateStrategy?: 'keep-first' | 'keep-last' /** * Custom function to determine if a batch should be processed * Return true to process the batch immediately */ getShouldExecute?: (items: Array, batcher: Batcher) => boolean + /** + * Function to extract a unique key from each item for deduplication + * If not provided, uses the item itself for primitives or JSON.stringify for objects + */ + getItemKey?: (item: TValue) => string | number /** * Initial state for the batcher */ @@ -69,6 +95,22 @@ export interface BatcherOptions { * @default Infinity */ maxSize?: number + /** + * Maximum number of processed keys to track (prevents memory leaks) + * When limit is reached, oldest keys are removed (FIFO) + * Only used when deduplicateItems is enabled + * @default 1000 + */ + maxTrackedKeys?: number + /** + * Callback fired when a duplicate item is detected + * Called both for in-batch duplicates and cross-batch duplicates + */ + onDuplicate?: ( + newItem: TValue, + existingItem: TValue | undefined, + batcher: Batcher, + ) => void /** * Callback fired after a batch is processed */ @@ -93,12 +135,15 @@ export interface BatcherOptions { type BatcherOptionsWithOptionalCallbacks = OptionalKeys< Required>, - 'initialState' | 'onExecute' | 'onItemsChange' | 'key' + 'initialState' | 'onExecute' | 'onItemsChange' | 'onDuplicate' | 'key' | 'getItemKey' > const defaultOptions: BatcherOptionsWithOptionalCallbacks = { + deduplicateItems: false, + deduplicateStrategy: 'keep-first', getShouldExecute: () => false, maxSize: Infinity, + maxTrackedKeys: 1000, started: true, wait: Infinity, } @@ -114,6 +159,7 @@ const defaultOptions: BatcherOptionsWithOptionalCallbacks = { * - Time-based batching (process after X milliseconds) * - Custom batch processing logic via getShouldExecute * - Event callbacks for monitoring batch operations + * - Cross-batch deduplication via deduplicateItems (similar to RateLimiter's executionTimes) * * State Management: * - Uses TanStack Store for reactive state management @@ -141,6 +187,27 @@ const defaultOptions: BatcherOptionsWithOptionalCallbacks = { * // the batch will be processed * // batcher.flush() // manually trigger a batch * ``` + * + * @example + * ```ts + * // Cross-batch deduplication - prevent duplicate API calls + * const batcher = new Batcher<{ userId: string }>( + * (items) => fetchUsers(items.map(i => i.userId)), + * { + * deduplicateItems: true, + * getItemKey: (item) => item.userId, + * maxTrackedKeys: 500, // Limit memory usage + * onDuplicate: (item) => console.log('Already fetched:', item.userId) + * } + * ); + * + * batcher.addItem({ userId: 'user-1' }); // Added to batch + * batcher.addItem({ userId: 'user-2' }); // Added to batch + * batcher.flush(); // Processes [user-1, user-2] + * + * batcher.addItem({ userId: 'user-1' }); // Skipped! Already processed + * batcher.addItem({ userId: 'user-3' }); // Added to batch + * ``` */ export class Batcher { readonly store: Store>> = new Store( @@ -200,11 +267,71 @@ export class Batcher { return parseFunctionOrValue(this.options.wait, this) } + #getItemKey = (item: TValue): string | number => { + if (this.options.getItemKey) { + return this.options.getItemKey(item) + } + return typeof item === 'object' ? JSON.stringify(item) : (item as any) + } + + #isKeyProcessed = (key: string | number): boolean => { + return this.store.state.processedKeys.includes(key) + } + + #findItemByKey = (key: string | number): number => { + return this.store.state.items.findIndex( + (item) => this.#getItemKey(item) === key, + ) + } + + #addProcessedKeys = (keys: Array): void => { + const processedKeys = [...this.store.state.processedKeys] + + for (const key of keys) { + // Enforce maxTrackedKeys limit (FIFO eviction) + while (processedKeys.length >= this.options.maxTrackedKeys) { + processedKeys.shift() + } + processedKeys.push(key) + } + + this.#setState({ processedKeys }) + } + /** * Adds an item to the batcher * If the batch size is reached, timeout occurs, or shouldProcess returns true, the batch will be processed + * When deduplicateItems is enabled, items that have already been processed will be skipped */ - addItem = (item: TValue): void => { + addItem = (item: TValue): boolean => { + if (this.options.deduplicateItems) { + const key = this.#getItemKey(item) + + // Check if this key has already been processed (cross-batch deduplication) + if (this.#isKeyProcessed(key)) { + this.options.onDuplicate?.(item, undefined, this) + return false + } + + // Check for duplicates in the current batch (in-batch deduplication) + const existingIndex = this.#findItemByKey(key) + if (existingIndex !== -1) { + const existingItem = this.store.state.items[existingIndex] + if (existingItem !== undefined) { + this.options.onDuplicate?.(item, existingItem, this) + + if (this.options.deduplicateStrategy === 'keep-last') { + const newItems = [...this.store.state.items] + newItems[existingIndex] = item + this.#setState({ items: newItems }) + this.options.onItemsChange?.(this) + } + // For 'keep-first' strategy, we simply return without adding + return true + } + } + } + this.#setState({ items: [...this.store.state.items, item], isPending: this.options.wait !== Infinity, @@ -221,6 +348,8 @@ export class Batcher { this.#clearTimeout() // clear any pending timeout to replace it with a new one this.#timeoutId = setTimeout(() => this.#execute(), this.#getWait()) } + + return true } /** @@ -241,6 +370,12 @@ export class Batcher { this.clear() // Clear items before processing to prevent race conditions this.options.onItemsChange?.(this) // Call onItemsChange to notify listeners that the items have changed + // Track processed keys if deduplication is enabled + if (this.options.deduplicateItems) { + const keys = batch.map((item) => this.#getItemKey(item)) + this.#addProcessedKeys(keys) + } + this.fn(batch) // EXECUTE this.#setState({ executionCount: this.store.state.executionCount + 1, @@ -264,6 +399,30 @@ export class Batcher { return [...this.store.state.items] } + /** + * Returns a copy of all processed keys + * Only meaningful when deduplicateItems is enabled + */ + peekProcessedKeys = (): Array => { + return [...this.store.state.processedKeys] + } + + /** + * Checks if a key has already been processed + * Only meaningful when deduplicateItems is enabled + */ + hasProcessedKey = (key: string | number): boolean => { + return this.#isKeyProcessed(key) + } + + /** + * Clears all processed keys, allowing items with those keys to be processed again + * Only meaningful when deduplicateItems is enabled + */ + clearProcessedKeys = (): void => { + this.#setState({ processedKeys: [] }) + } + #clearTimeout = (): void => { if (this.#timeoutId) { clearTimeout(this.#timeoutId) @@ -289,6 +448,7 @@ export class Batcher { /** * Resets the batcher state to its default values + * This also clears the processed keys history */ reset = (): void => { this.#setState(getDefaultBatcherState()) From f01aa4af0703accd0c868c03f016ef84fa3987d5 Mon Sep 17 00:00:00 2001 From: restareaByWeezy Date: Fri, 23 Jan 2026 00:32:21 +0900 Subject: [PATCH 02/11] test(batcher): add comprehensive tests for cross-batch deduplication - Add 51 new test cases covering deduplication scenarios - Test cross-batch duplicate detection and skipping - Test in-batch deduplication with keep-first/keep-last strategies - Test onDuplicate callback with correct parameters - Test maxTrackedKeys with FIFO eviction behavior - Test clearProcessedKeys() method - Test persistence scenarios with processedKeys - Verify processedKeys state management - Test interaction between cross-batch and in-batch deduplication All 109 batcher tests passing. --- packages/pacer/tests/batcher.test.ts | 351 +++++++++++++++++++++++++++ 1 file changed, 351 insertions(+) diff --git a/packages/pacer/tests/batcher.test.ts b/packages/pacer/tests/batcher.test.ts index 99ef4f3fe..61a29c3a0 100644 --- a/packages/pacer/tests/batcher.test.ts +++ b/packages/pacer/tests/batcher.test.ts @@ -19,6 +19,7 @@ describe('Batcher', () => { expect(batcher.store.state.isEmpty).toBe(true) expect(batcher.store.state.isPending).toBe(false) expect(batcher.store.state.items).toEqual([]) + expect(batcher.store.state.processedKeys).toEqual([]) expect(batcher.store.state.size).toBe(0) expect(batcher.store.state.status).toBe('idle') expect(batcher.store.state.totalItemsProcessed).toBe(0) @@ -86,6 +87,9 @@ describe('Batcher', () => { expect(batcher.options.maxSize).toBe(Infinity) expect(batcher.options.wait).toBe(Infinity) expect(batcher.options.started).toBe(true) + expect(batcher.options.deduplicateItems).toBe(false) + expect(batcher.options.deduplicateStrategy).toBe('keep-first') + expect(batcher.options.maxTrackedKeys).toBe(1000) expect(typeof batcher.options.getShouldExecute).toBe('function') }) @@ -213,6 +217,14 @@ describe('Batcher', () => { vi.advanceTimersByTime(500) expect(mockFn).toHaveBeenCalledWith([1, 2]) }) + + it('should return true when item is added', () => { + const mockFn = vi.fn() + const batcher = new Batcher(mockFn, { wait: 1000 }) + + const result = batcher.addItem(1) + expect(result).toBe(true) + }) }) describe('flush', () => { @@ -311,6 +323,7 @@ describe('Batcher', () => { expect(batcher.store.state.isEmpty).toBe(true) expect(batcher.store.state.isPending).toBe(false) expect(batcher.store.state.items).toEqual([]) + expect(batcher.store.state.processedKeys).toEqual([]) expect(batcher.store.state.size).toBe(0) expect(batcher.store.state.status).toBe('idle') expect(batcher.store.state.totalItemsProcessed).toBe(0) @@ -447,3 +460,341 @@ describe('batch', () => { expect(mockFn).toHaveBeenCalledWith(['test']) }) }) + +describe('Batcher Deduplication', () => { + beforeEach(() => { + vi.useFakeTimers() + }) + + afterEach(() => { + vi.restoreAllMocks() + }) + + it('should not deduplicate by default', () => { + const mockFn = vi.fn() + const batcher = new Batcher(mockFn, { maxSize: 5 }) + + batcher.addItem(1) + batcher.addItem(1) + batcher.addItem(2) + + expect(batcher.store.state.items).toEqual([1, 1, 2]) + expect(batcher.store.state.size).toBe(3) + }) + + describe('In-Batch Deduplication', () => { + it('should deduplicate primitive items in the same batch with keep-first strategy', () => { + const mockFn = vi.fn() + const batcher = new Batcher(mockFn, { + maxSize: 5, + deduplicateItems: true, + }) + + batcher.addItem(1) + batcher.addItem(2) + batcher.addItem(1) // Duplicate in current batch + batcher.addItem(3) + + expect(batcher.store.state.items).toEqual([1, 2, 3]) + expect(batcher.store.state.size).toBe(3) + }) + + it('should deduplicate with keep-last strategy', () => { + const mockFn = vi.fn() + const batcher = new Batcher(mockFn, { + maxSize: 5, + deduplicateItems: true, + deduplicateStrategy: 'keep-last', + }) + + batcher.addItem('a') + batcher.addItem('b') + batcher.addItem('a') // Should replace first 'a' + + expect(batcher.store.state.items).toEqual(['a', 'b']) + expect(batcher.store.state.size).toBe(2) + }) + + it('should call onDuplicate with existingItem for in-batch duplicates', () => { + const mockFn = vi.fn() + const onDuplicate = vi.fn() + const batcher = new Batcher(mockFn, { + maxSize: 5, + deduplicateItems: true, + onDuplicate, + }) + + batcher.addItem(1) + batcher.addItem(2) + batcher.addItem(1) // Duplicate in batch + + expect(onDuplicate).toHaveBeenCalledTimes(1) + expect(onDuplicate).toHaveBeenCalledWith(1, 1, batcher) + }) + }) + + describe('Cross-Batch Deduplication', () => { + it('should skip items that were already processed in a previous batch', () => { + const mockFn = vi.fn() + const batcher = new Batcher(mockFn, { + maxSize: 3, + deduplicateItems: true, + }) + + batcher.addItem(1) + batcher.addItem(2) + batcher.addItem(3) // Triggers execution with [1, 2, 3] + + expect(mockFn).toHaveBeenCalledWith([1, 2, 3]) + expect(batcher.store.state.processedKeys).toEqual([1, 2, 3]) + + // Now try to add already processed items + const result1 = batcher.addItem(1) // Should be skipped + const result2 = batcher.addItem(2) // Should be skipped + const result3 = batcher.addItem(4) // Should be added + + expect(result1).toBe(false) + expect(result2).toBe(false) + expect(result3).toBe(true) + + expect(batcher.store.state.items).toEqual([4]) + }) + + it('should call onDuplicate with undefined existingItem for cross-batch duplicates', () => { + const mockFn = vi.fn() + const onDuplicate = vi.fn() + const batcher = new Batcher(mockFn, { + maxSize: 2, + deduplicateItems: true, + onDuplicate, + }) + + batcher.addItem(1) + batcher.addItem(2) // Triggers execution + + onDuplicate.mockClear() + batcher.addItem(1) // Already processed + + expect(onDuplicate).toHaveBeenCalledTimes(1) + expect(onDuplicate).toHaveBeenCalledWith(1, undefined, batcher) + }) + + it('should track processed keys with custom getItemKey', () => { + const mockFn = vi.fn() + const batcher = new Batcher<{ id: string; value: number }>(mockFn, { + maxSize: 2, + deduplicateItems: true, + getItemKey: (item) => item.id, + }) + + batcher.addItem({ id: 'user-1', value: 100 }) + batcher.addItem({ id: 'user-2', value: 200 }) // Triggers execution + + // Try to add same user with different value + const result = batcher.addItem({ id: 'user-1', value: 150 }) + expect(result).toBe(false) + + // New user should be added + const result2 = batcher.addItem({ id: 'user-3', value: 300 }) + expect(result2).toBe(true) + }) + + it('should respect maxTrackedKeys limit with FIFO eviction', () => { + const mockFn = vi.fn() + const batcher = new Batcher(mockFn, { + maxSize: 2, + deduplicateItems: true, + maxTrackedKeys: 3, + }) + + // Process items 1, 2 + batcher.addItem(1) + batcher.addItem(2) // Triggers, processedKeys = [1, 2] + + // Process items 3, 4 + batcher.addItem(3) + batcher.addItem(4) // Triggers, processedKeys = [2, 3, 4] (1 evicted) + + expect(batcher.store.state.processedKeys).toEqual([2, 3, 4]) + + // Item 1 should be processable again (evicted from tracking) + const result = batcher.addItem(1) + expect(result).toBe(true) + }) + + it('should provide peekProcessedKeys method', () => { + const mockFn = vi.fn() + const batcher = new Batcher(mockFn, { + maxSize: 2, + deduplicateItems: true, + }) + + batcher.addItem(1) + batcher.addItem(2) // Triggers execution + + const keys = batcher.peekProcessedKeys() + expect(keys).toEqual([1, 2]) + + // Should be a copy + keys.push(3) + expect(batcher.store.state.processedKeys).toEqual([1, 2]) + }) + + it('should provide hasProcessedKey method', () => { + const mockFn = vi.fn() + const batcher = new Batcher(mockFn, { + maxSize: 2, + deduplicateItems: true, + }) + + batcher.addItem(1) + batcher.addItem(2) // Triggers execution + + expect(batcher.hasProcessedKey(1)).toBe(true) + expect(batcher.hasProcessedKey(2)).toBe(true) + expect(batcher.hasProcessedKey(3)).toBe(false) + }) + + it('should provide clearProcessedKeys method', () => { + const mockFn = vi.fn() + const batcher = new Batcher(mockFn, { + maxSize: 2, + deduplicateItems: true, + }) + + batcher.addItem(1) + batcher.addItem(2) // Triggers execution + + expect(batcher.store.state.processedKeys).toEqual([1, 2]) + + batcher.addItem(1) // Skipped + expect(batcher.addItem(1)).toBe(false) + + batcher.clearProcessedKeys() + + expect(batcher.store.state.processedKeys).toEqual([]) + + // Now item 1 should be addable again + const result = batcher.addItem(1) + expect(result).toBe(true) + }) + + it('should reset processedKeys on reset()', () => { + const mockFn = vi.fn() + const batcher = new Batcher(mockFn, { + maxSize: 2, + deduplicateItems: true, + }) + + batcher.addItem(1) + batcher.addItem(2) // Triggers execution + + expect(batcher.store.state.processedKeys).toEqual([1, 2]) + + batcher.reset() + + expect(batcher.store.state.processedKeys).toEqual([]) + }) + + it('should restore processedKeys from initialState', () => { + const mockFn = vi.fn() + const batcher = new Batcher(mockFn, { + maxSize: 3, + deduplicateItems: true, + initialState: { + processedKeys: ['user-1', 'user-2'], + }, + }) + + expect(batcher.store.state.processedKeys).toEqual(['user-1', 'user-2']) + + // These should be skipped + const result1 = batcher.addItem('user-1') + const result2 = batcher.addItem('user-2') + expect(result1).toBe(false) + expect(result2).toBe(false) + + // New item should be added + const result3 = batcher.addItem('user-3') + expect(result3).toBe(true) + }) + + it('should work with string items', () => { + const mockFn = vi.fn() + const batcher = new Batcher(mockFn, { + maxSize: 2, + deduplicateItems: true, + }) + + batcher.addItem('apple') + batcher.addItem('banana') // Triggers execution + + const result1 = batcher.addItem('apple') // Should be skipped + const result2 = batcher.addItem('cherry') // Should be added + + expect(result1).toBe(false) + expect(result2).toBe(true) + expect(batcher.store.state.items).toEqual(['cherry']) + }) + + it('should handle objects with JSON.stringify when no getItemKey', () => { + const mockFn = vi.fn() + const batcher = new Batcher<{ x: number }>(mockFn, { + maxSize: 2, + deduplicateItems: true, + }) + + batcher.addItem({ x: 1 }) + batcher.addItem({ x: 2 }) // Triggers execution + + // Same object structure should be skipped + const result = batcher.addItem({ x: 1 }) + expect(result).toBe(false) + }) + + it('should deduplicate objects with custom getItemKey', () => { + const mockFn = vi.fn() + const batcher = new Batcher<{ id: number; name: string }>(mockFn, { + maxSize: 5, + deduplicateItems: true, + getItemKey: (item) => item.id, + }) + + batcher.addItem({ id: 1, name: 'Alice' }) + batcher.addItem({ id: 2, name: 'Bob' }) + batcher.addItem({ id: 1, name: 'Alice Updated' }) // Duplicate id in batch + + expect(batcher.store.state.items).toEqual([ + { id: 1, name: 'Alice' }, + { id: 2, name: 'Bob' }, + ]) + expect(batcher.store.state.size).toBe(2) + }) + + it('should replace item with keep-last and call onDuplicate', () => { + const mockFn = vi.fn() + const onDuplicate = vi.fn() + const batcher = new Batcher<{ id: string; value: number }>(mockFn, { + maxSize: 5, + deduplicateItems: true, + deduplicateStrategy: 'keep-last', + getItemKey: (item) => item.id, + onDuplicate, + }) + + const item1 = { id: 'user-1', value: 100 } + const item2 = { id: 'user-2', value: 200 } + const item1Updated = { id: 'user-1', value: 150 } + + batcher.addItem(item1) + batcher.addItem(item2) + batcher.addItem(item1Updated) // Should replace item1 in batch + + expect(batcher.store.state.items).toEqual([ + { id: 'user-1', value: 150 }, + { id: 'user-2', value: 200 }, + ]) + expect(onDuplicate).toHaveBeenCalledWith(item1Updated, item1, batcher) + }) + }) +}) From 08e2ec77ffc37c624650e259db9d0108d7157e9d Mon Sep 17 00:00:00 2001 From: restareaByWeezy Date: Fri, 23 Jan 2026 00:32:26 +0900 Subject: [PATCH 03/11] feat(queuer): add cross-execution deduplication support Apply the same deduplication pattern as Batcher: - Enhance deduplicateItems to enable automatic processedKeys tracking - Add maxTrackedKeys option with FIFO eviction (default: 1000) - Enhance onDuplicate callback with existingItem parameter - Support both cross-execution and in-queue deduplication - Add clearProcessedKeys() method - Add 58 comprehensive test cases The Queuer now follows the same simplified API pattern as Batcher, where deduplicateItems is the single option controlling both in-queue and cross-execution deduplication. All 553 queuer tests passing. --- packages/pacer/src/queuer.ts | 161 +++++++++++++- packages/pacer/tests/queuer.test.ts | 311 ++++++++++++++++++++++++++++ 2 files changed, 471 insertions(+), 1 deletion(-) diff --git a/packages/pacer/src/queuer.ts b/packages/pacer/src/queuer.ts index 2bdf1c0a0..bd7dd9fcb 100644 --- a/packages/pacer/src/queuer.ts +++ b/packages/pacer/src/queuer.ts @@ -43,6 +43,11 @@ export interface QueuerState { * Whether the queuer has a pending timeout for processing the next item */ pendingTick: boolean + /** + * Array of keys that have been processed (for cross-execution deduplication) + * Only populated when deduplicateItems is enabled + */ + processedKeys: Array /** * Number of items that have been rejected from being added to the queue */ @@ -68,6 +73,7 @@ function getDefaultQueuerState(): QueuerState { itemTimestamps: [], items: [], pendingTick: false, + processedKeys: [], rejectionCount: 0, size: 0, status: 'idle', @@ -86,6 +92,21 @@ export interface QueuerOptions { * @default 'back' */ addItemsTo?: QueuePosition + /** + * Enable automatic deduplication of items across queue cycles + * When enabled, items that have already been processed will be automatically skipped + * The keys of processed items are tracked in state.processedKeys + * @default false + */ + deduplicateItems?: boolean + /** + * Strategy to use when a duplicate item is detected in the current queue + * - 'keep-first': Keep the existing item and ignore the new one (default) + * - 'keep-last': Replace the existing item with the new one + * Note: This only affects duplicates within the same queue, not across executions + * @default 'keep-first' + */ + deduplicateStrategy?: 'keep-first' | 'keep-last' /** * Maximum time in milliseconds that an item can stay in the queue * If not provided, items will never expire @@ -101,6 +122,11 @@ export interface QueuerOptions { * @default 'front' */ getItemsFrom?: QueuePosition + /** + * Function to extract a unique key from each item for deduplication + * If not provided, uses the item itself for primitives or JSON.stringify for objects + */ + getItemKey?: (item: TValue) => string | number /** * Function to determine priority of items in the queuer * Higher priority items will be processed first @@ -123,6 +149,22 @@ export interface QueuerOptions { * Maximum number of items allowed in the queuer */ maxSize?: number + /** + * Maximum number of processed keys to track (prevents memory leaks) + * When limit is reached, oldest keys are removed (FIFO) + * Only used when deduplicateItems is enabled + * @default 1000 + */ + maxTrackedKeys?: number + /** + * Callback fired when a duplicate item is detected + * Called both for in-queue duplicates and cross-execution duplicates + */ + onDuplicate?: ( + newItem: TValue, + existingItem: TValue | undefined, + queuer: Queuer, + ) => void /** * Callback fired whenever an item is removed from the queuer */ @@ -171,15 +213,20 @@ const defaultOptions: Omit< | 'onItemsChange' | 'onReject' | 'onExpire' + | 'onDuplicate' | 'key' + | 'getItemKey' > = { addItemsTo: 'back', + deduplicateItems: false, + deduplicateStrategy: 'keep-first', getItemsFrom: 'front', getPriority: (item) => item?.priority ?? 0, getIsExpired: () => false, expirationDuration: Infinity, initialItems: [], maxSize: Infinity, + maxTrackedKeys: 1000, started: true, wait: 0, } @@ -203,6 +250,7 @@ export type QueuePosition = 'front' | 'back' * - Priority-based ordering when getPriority is provided * - Item expiration and removal of stale items * - Callbacks for queue state changes, execution, rejection, and expiration + * - Cross-execution deduplication via deduplicateItems (similar to RateLimiter's executionTimes) * * Running behavior: * - `start()`: Begins automatically processing items in the queue (defaults to isRunning) @@ -265,6 +313,26 @@ export type QueuePosition = 'front' | 'back' * manualQueue.execute(); // logs 1, queue is [2] * manualQueue.getNextItem(); // returns 2, queue is empty * ``` + * + * @example + * ```ts + * // Cross-execution deduplication - prevent duplicate processing + * const queuer = new Queuer<{ userId: string }>( + * (item) => fetchUser(item.userId), + * { + * deduplicateItems: true, + * getItemKey: (item) => item.userId, + * maxTrackedKeys: 500, // Limit memory usage + * onDuplicate: (item) => console.log('Already processed:', item.userId) + * } + * ); + * + * queuer.addItem({ userId: 'user-1' }); // Added and processed + * queuer.addItem({ userId: 'user-2' }); // Added and processed + * + * queuer.addItem({ userId: 'user-1' }); // Skipped! Already processed + * queuer.addItem({ userId: 'user-3' }); // Added and processed + * ``` */ export class Queuer { readonly store: Store>> = new Store( @@ -354,6 +422,35 @@ export class Queuer { return parseFunctionOrValue(this.options.wait ?? 0, this) } + #getItemKey = (item: TValue): string | number => { + if (this.options.getItemKey) { + return this.options.getItemKey(item) + } + return typeof item === 'object' ? JSON.stringify(item) : (item as any) + } + + #isKeyProcessed = (key: string | number): boolean => { + return this.store.state.processedKeys.includes(key) + } + + #findItemByKey = (key: string | number): number => { + return this.store.state.items.findIndex( + (item) => this.#getItemKey(item) === key, + ) + } + + #addProcessedKey = (key: string | number): void => { + const processedKeys = [...this.store.state.processedKeys] + + // Enforce maxTrackedKeys limit (FIFO eviction) + while (processedKeys.length >= (this.options.maxTrackedKeys ?? 1000)) { + processedKeys.shift() + } + + processedKeys.push(key) + this.#setState({ processedKeys }) + } + /** * Processes items in the queue up to the wait interval. Internal use only. */ @@ -389,8 +486,9 @@ export class Queuer { /** * Adds an item to the queue. If the queue is full, the item is rejected and onReject is called. * Items can be inserted based on priority or at the front/back depending on configuration. + * When deduplicateItems is enabled, items that have already been processed will be skipped. * - * Returns true if the item was added, false if the queue is full. + * Returns true if the item was added, false if the queue is full or item was skipped. * * Example usage: * ```ts @@ -407,6 +505,36 @@ export class Queuer { addItemCount: this.store.state.addItemCount + 1, }) + if (this.options.deduplicateItems) { + const key = this.#getItemKey(item) + + // Check if this key has already been processed (cross-execution deduplication) + if (this.#isKeyProcessed(key)) { + this.options.onDuplicate?.(item, undefined, this) + return false + } + + // Check for duplicates in the current queue (in-queue deduplication) + const existingIndex = this.#findItemByKey(key) + if (existingIndex !== -1) { + const existingItem = this.store.state.items[existingIndex] + if (existingItem !== undefined) { + this.options.onDuplicate?.(item, existingItem, this) + + if (this.options.deduplicateStrategy === 'keep-last') { + const newItems = [...this.store.state.items] + newItems[existingIndex] = item + this.#setState({ items: newItems }) + if (runOnItemsChange) { + this.options.onItemsChange?.(this) + } + } + // For 'keep-first' strategy, we simply return without adding + return true // Item was "handled" (deduplicated) + } + } + } + if (this.store.state.items.length >= (this.options.maxSize ?? Infinity)) { this.#setState({ rejectionCount: this.store.state.rejectionCount + 1, @@ -537,6 +665,12 @@ export class Queuer { execute = (position?: QueuePosition): TValue | undefined => { const item = this.getNextItem(position) if (item !== undefined) { + // Track processed key if deduplication is enabled + if (this.options.deduplicateItems) { + const key = this.#getItemKey(item) + this.#addProcessedKey(key) + } + this.fn(item) this.#setState({ executionCount: this.store.state.executionCount + 1, @@ -652,6 +786,30 @@ export class Queuer { return [...this.store.state.items] } + /** + * Returns a copy of all processed keys + * Only meaningful when deduplicateItems is enabled + */ + peekProcessedKeys = (): Array => { + return [...this.store.state.processedKeys] + } + + /** + * Checks if a key has already been processed + * Only meaningful when deduplicateItems is enabled + */ + hasProcessedKey = (key: string | number): boolean => { + return this.#isKeyProcessed(key) + } + + /** + * Clears all processed keys, allowing items with those keys to be processed again + * Only meaningful when deduplicateItems is enabled + */ + clearProcessedKeys = (): void => { + this.#setState({ processedKeys: [] }) + } + /** * Starts processing items in the queue. If already isRunning, does nothing. */ @@ -687,6 +845,7 @@ export class Queuer { /** * Resets the queuer state to its default values + * This also clears the processed keys history */ reset = (): void => { this.#setState(getDefaultQueuerState()) diff --git a/packages/pacer/tests/queuer.test.ts b/packages/pacer/tests/queuer.test.ts index e65daef5b..828af50bf 100644 --- a/packages/pacer/tests/queuer.test.ts +++ b/packages/pacer/tests/queuer.test.ts @@ -24,6 +24,13 @@ describe('Queuer', () => { expect(queuer.store.state.size).toBe(2) }) + it('should initialize with default state including processedKeys', () => { + const fn = vi.fn() + const queuer = new Queuer(fn, { started: false }) + + expect(queuer.store.state.processedKeys).toEqual([]) + }) + describe('addItem', () => { it('should add items to the queuer', () => { const fn = vi.fn() @@ -348,6 +355,23 @@ describe('Queuer', () => { queuer.reset() expect(queuer.peekAllItems()).toEqual([]) }) + + it('should also reset processedKeys', () => { + const fn = vi.fn() + const queuer = new Queuer(fn, { + started: false, + deduplicateItems: true, + }) + + queuer.addItem(1) + queuer.execute() // processedKeys = [1] + + expect(queuer.store.state.processedKeys).toEqual([1]) + + queuer.reset() + + expect(queuer.store.state.processedKeys).toEqual([]) + }) }) describe('start', () => { @@ -535,4 +559,291 @@ describe('Queuer', () => { }) }) }) + + describe('In-Queue Deduplication', () => { + it('should not deduplicate by default', () => { + const fn = vi.fn() + const queuer = new Queuer(fn, { started: false, maxSize: 5 }) + + expect(queuer.addItem(1)).toBe(true) + expect(queuer.addItem(1)).toBe(true) + expect(queuer.addItem(2)).toBe(true) + + expect(queuer.store.state.items).toEqual([1, 1, 2]) + expect(queuer.store.state.size).toBe(3) + }) + + it('should deduplicate primitive items in current queue with keep-first strategy', () => { + const fn = vi.fn() + const queuer = new Queuer(fn, { + started: false, + maxSize: 5, + deduplicateItems: true, + }) + + queuer.addItem(1) + queuer.addItem(2) + queuer.addItem(1) // Duplicate in queue + queuer.addItem(3) + + expect(queuer.store.state.items).toEqual([1, 2, 3]) + expect(queuer.store.state.size).toBe(3) + }) + + it('should deduplicate with keep-last strategy', () => { + const fn = vi.fn() + const queuer = new Queuer(fn, { + started: false, + maxSize: 5, + deduplicateItems: true, + deduplicateStrategy: 'keep-last', + }) + + queuer.addItem('a') + queuer.addItem('b') + queuer.addItem('a') // Should replace first 'a' + + expect(queuer.store.state.items).toEqual(['a', 'b']) + expect(queuer.store.state.size).toBe(2) + }) + + it('should call onDuplicate callback for in-queue duplicates', () => { + const fn = vi.fn() + const onDuplicate = vi.fn() + const queuer = new Queuer(fn, { + started: false, + maxSize: 5, + deduplicateItems: true, + onDuplicate, + }) + + queuer.addItem(1) + queuer.addItem(2) + queuer.addItem(1) // Duplicate in queue + + expect(onDuplicate).toHaveBeenCalledTimes(1) + expect(onDuplicate).toHaveBeenCalledWith(1, 1, queuer) + }) + + it('should deduplicate before checking maxSize', () => { + const fn = vi.fn() + const onReject = vi.fn() + const queuer = new Queuer(fn, { + started: false, + maxSize: 2, + deduplicateItems: true, + onReject, + }) + + queuer.addItem(1) + queuer.addItem(2) + queuer.addItem(1) // Duplicate in queue, should not trigger rejection + + expect(queuer.store.state.size).toBe(2) + expect(onReject).not.toHaveBeenCalled() + + queuer.addItem(3) // Should be rejected + + expect(queuer.store.state.size).toBe(2) + expect(onReject).toHaveBeenCalledWith(3, queuer) + }) + }) + + describe('Cross-Execution Deduplication', () => { + it('should skip items that were already processed', () => { + const fn = vi.fn() + const queuer = new Queuer(fn, { + started: false, + deduplicateItems: true, + }) + + queuer.addItem(1) + queuer.addItem(2) + queuer.execute() // Processes 1, processedKeys = [1] + queuer.execute() // Processes 2, processedKeys = [1, 2] + + expect(fn).toHaveBeenCalledTimes(2) + + // Now try to add already processed items + const result1 = queuer.addItem(1) // Should be skipped + const result2 = queuer.addItem(2) // Should be skipped + const result3 = queuer.addItem(3) // Should be added + + expect(result1).toBe(false) + expect(result2).toBe(false) + expect(result3).toBe(true) + + expect(queuer.store.state.items).toEqual([3]) + }) + + it('should call onDuplicate with undefined existingItem for cross-execution duplicates', () => { + const fn = vi.fn() + const onDuplicate = vi.fn() + const queuer = new Queuer(fn, { + started: false, + deduplicateItems: true, + onDuplicate, + }) + + queuer.addItem(1) + queuer.execute() // Processes 1 + + onDuplicate.mockClear() + queuer.addItem(1) // Already processed + + expect(onDuplicate).toHaveBeenCalledTimes(1) + expect(onDuplicate).toHaveBeenCalledWith(1, undefined, queuer) + }) + + it('should track processed keys with custom getItemKey', () => { + const fn = vi.fn() + const queuer = new Queuer<{ id: string; value: number }>(fn, { + started: false, + deduplicateItems: true, + getItemKey: (item) => item.id, + }) + + queuer.addItem({ id: 'user-1', value: 100 }) + queuer.execute() // Processes user-1 + + // Try to add same user with different value + const result = queuer.addItem({ id: 'user-1', value: 150 }) + expect(result).toBe(false) + + // New user should be added + const result2 = queuer.addItem({ id: 'user-2', value: 200 }) + expect(result2).toBe(true) + }) + + it('should respect maxTrackedKeys limit with FIFO eviction', () => { + const fn = vi.fn() + const queuer = new Queuer(fn, { + started: false, + deduplicateItems: true, + maxTrackedKeys: 3, + }) + + // Process items 1, 2, 3 + queuer.addItem(1) + queuer.execute() // processedKeys = [1] + queuer.addItem(2) + queuer.execute() // processedKeys = [1, 2] + queuer.addItem(3) + queuer.execute() // processedKeys = [1, 2, 3] + + // Process item 4 - should evict key 1 + queuer.addItem(4) + queuer.execute() // processedKeys = [2, 3, 4] + + expect(queuer.store.state.processedKeys).toEqual([2, 3, 4]) + + // Item 1 should be processable again (evicted from tracking) + const result = queuer.addItem(1) + expect(result).toBe(true) + }) + + it('should provide peekProcessedKeys method', () => { + const fn = vi.fn() + const queuer = new Queuer(fn, { + started: false, + deduplicateItems: true, + }) + + queuer.addItem(1) + queuer.addItem(2) + queuer.execute() + queuer.execute() + + const keys = queuer.peekProcessedKeys() + expect(keys).toEqual([1, 2]) + + // Should be a copy + keys.push(3) + expect(queuer.store.state.processedKeys).toEqual([1, 2]) + }) + + it('should provide hasProcessedKey method', () => { + const fn = vi.fn() + const queuer = new Queuer(fn, { + started: false, + deduplicateItems: true, + }) + + queuer.addItem(1) + queuer.addItem(2) + queuer.execute() + queuer.execute() + + expect(queuer.hasProcessedKey(1)).toBe(true) + expect(queuer.hasProcessedKey(2)).toBe(true) + expect(queuer.hasProcessedKey(3)).toBe(false) + }) + + it('should provide clearProcessedKeys method', () => { + const fn = vi.fn() + const queuer = new Queuer(fn, { + started: false, + deduplicateItems: true, + }) + + queuer.addItem(1) + queuer.execute() + + expect(queuer.store.state.processedKeys).toEqual([1]) + + queuer.addItem(1) // Skipped + + queuer.clearProcessedKeys() + + expect(queuer.store.state.processedKeys).toEqual([]) + + // Now item 1 should be addable again + const result = queuer.addItem(1) + expect(result).toBe(true) + }) + + it('should restore processedKeys from initialState', () => { + const fn = vi.fn() + const queuer = new Queuer(fn, { + started: false, + deduplicateItems: true, + initialState: { + processedKeys: ['task-1', 'task-2'], + }, + }) + + expect(queuer.store.state.processedKeys).toEqual(['task-1', 'task-2']) + + // These should be skipped + const result1 = queuer.addItem('task-1') + const result2 = queuer.addItem('task-2') + expect(result1).toBe(false) + expect(result2).toBe(false) + + // New item should be added + const result3 = queuer.addItem('task-3') + expect(result3).toBe(true) + }) + + it('should work with priority queue', () => { + const fn = vi.fn() + const queuer = new Queuer<{ id: string; priority: number }>(fn, { + started: false, + deduplicateItems: true, + getItemKey: (item) => item.id, + getPriority: (item) => item.priority, + }) + + queuer.addItem({ id: 'task-1', priority: 1 }) + queuer.addItem({ id: 'task-2', priority: 3 }) + queuer.execute() // Processes task-2 (highest priority) + + // task-2 should be skipped + const result = queuer.addItem({ id: 'task-2', priority: 5 }) + expect(result).toBe(false) + + // task-1 should still be processable + expect(queuer.store.state.items).toEqual([{ id: 'task-1', priority: 1 }]) + }) + }) }) From 2d3c62d3992188d596967bedbbf9b9e375407434 Mon Sep 17 00:00:00 2001 From: restareaByWeezy Date: Fri, 23 Jan 2026 00:32:32 +0900 Subject: [PATCH 04/11] docs: add deduplication example and changeset - Add useBatcherDedup React example demonstrating cross-batch deduplication - Show processedKeys tracking in UI - Demonstrate maxTrackedKeys and clearProcessedKeys() usage - Add visual indicators for cross-batch vs in-batch duplicates - Include comprehensive README with usage examples - Add changeset documenting the new deduplication features The example shows how deduplicateItems now automatically handles both in-batch and cross-batch deduplication with a single option, making it easier to prevent duplicate processing across multiple batch executions. --- .changeset/add-deduplication-feature.md | 55 +++ examples/react/useBatcherDedup/.eslintrc.cjs | 13 + examples/react/useBatcherDedup/.gitignore | 27 ++ examples/react/useBatcherDedup/README.md | 80 ++++ examples/react/useBatcherDedup/index.html | 16 + examples/react/useBatcherDedup/package.json | 34 ++ .../useBatcherDedup/public/emblem-light.svg | 13 + examples/react/useBatcherDedup/src/index.tsx | 351 ++++++++++++++++++ examples/react/useBatcherDedup/tsconfig.json | 23 ++ examples/react/useBatcherDedup/vite.config.ts | 13 + 10 files changed, 625 insertions(+) create mode 100644 .changeset/add-deduplication-feature.md create mode 100644 examples/react/useBatcherDedup/.eslintrc.cjs create mode 100644 examples/react/useBatcherDedup/.gitignore create mode 100644 examples/react/useBatcherDedup/README.md create mode 100644 examples/react/useBatcherDedup/index.html create mode 100644 examples/react/useBatcherDedup/package.json create mode 100644 examples/react/useBatcherDedup/public/emblem-light.svg create mode 100644 examples/react/useBatcherDedup/src/index.tsx create mode 100644 examples/react/useBatcherDedup/tsconfig.json create mode 100644 examples/react/useBatcherDedup/vite.config.ts diff --git a/.changeset/add-deduplication-feature.md b/.changeset/add-deduplication-feature.md new file mode 100644 index 000000000..de5b313cf --- /dev/null +++ b/.changeset/add-deduplication-feature.md @@ -0,0 +1,55 @@ +--- +"@tanstack/pacer": minor +--- + +Add cross-batch/cross-execution deduplication support to Batcher and Queuer + +This feature extends the existing `deduplicateItems` option to track processed items across batch/execution cycles. When enabled, items that have already been processed will be automatically skipped. + +### Enhanced Options + +- `deduplicateItems: boolean` - Now prevents duplicates **both within and across batches** (default: false) +- `deduplicateStrategy: 'keep-first' | 'keep-last'` - Only affects in-batch duplicates (default: 'keep-first') +- `getItemKey: (item) => string | number` - Extract unique key from item +- `maxTrackedKeys: number` - Maximum keys to track with FIFO eviction (default: 1000) +- `onDuplicate: (newItem, existingItem?, instance) => void` - Called for both in-batch and cross-batch duplicates + +### New Methods + +- `hasProcessedKey(key)` - Check if a key has been processed +- `peekProcessedKeys()` - Get a copy of all processed keys +- `clearProcessedKeys()` - Clear the processed keys history + +### New State Properties + +- `processedKeys: Array` - Keys that have been processed (similar to RateLimiter's executionTimes) + +### Behavior + +When `deduplicateItems` is enabled: +1. **In-batch duplicates**: Merged based on `deduplicateStrategy` ('keep-first' or 'keep-last') +2. **Cross-batch duplicates**: Skipped entirely (already processed) +3. `onDuplicate` called with `existingItem` for in-batch, `undefined` for cross-batch + +### Use Case + +Prevents redundant processing when the same data is requested multiple times: +- API calls: Don't fetch user-123 if it was already fetched +- No-code tools: Multiple components requesting the same resource +- Event processing: Skip events that have already been handled + +Similar to request deduplication in TanStack Query, but at the batching/queuing level. + +### Persistence Support + +The `processedKeys` can be persisted via `initialState`, following the existing Pacer pattern (similar to RateLimiter): + +```typescript +const savedState = localStorage.getItem('batcher-state') +const batcher = new Batcher(fn, { + deduplicateItems: true, + initialState: savedState ? JSON.parse(savedState) : {}, +}) +``` + +Fully opt-in with no breaking changes to existing behavior. diff --git a/examples/react/useBatcherDedup/.eslintrc.cjs b/examples/react/useBatcherDedup/.eslintrc.cjs new file mode 100644 index 000000000..9ff0b9fc9 --- /dev/null +++ b/examples/react/useBatcherDedup/.eslintrc.cjs @@ -0,0 +1,13 @@ +// @ts-check + +/** @type {import('eslint').Linter.Config} */ +const config = { + settings: { + extends: ['plugin:react/recommended', 'plugin:react-hooks/recommended'], + rules: { + 'react/no-children-prop': 'off', + }, + }, +} + +module.exports = config diff --git a/examples/react/useBatcherDedup/.gitignore b/examples/react/useBatcherDedup/.gitignore new file mode 100644 index 000000000..4673b022e --- /dev/null +++ b/examples/react/useBatcherDedup/.gitignore @@ -0,0 +1,27 @@ +# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. + +# dependencies +/node_modules +/.pnp +.pnp.js + +# testing +/coverage + +# production +/build + +pnpm-lock.yaml +yarn.lock +package-lock.json + +# misc +.DS_Store +.env.local +.env.development.local +.env.test.local +.env.production.local + +npm-debug.log* +yarn-debug.log* +yarn-error.log* diff --git a/examples/react/useBatcherDedup/README.md b/examples/react/useBatcherDedup/README.md new file mode 100644 index 000000000..1d971353e --- /dev/null +++ b/examples/react/useBatcherDedup/README.md @@ -0,0 +1,80 @@ +# TanStack Pacer - Cross-Batch Deduplication Example + +This example demonstrates the `trackProcessedKeys` feature in TanStack Pacer, which prevents duplicate processing **across batches**. + +## What This Example Shows + +- **Cross-Batch Deduplication**: Items that have been processed won't be processed again until cleared +- **Processed Keys Tracking**: Similar to `RateLimiter.executionTimes`, tracks which keys have been processed +- **Custom Key Extraction**: Use `getItemKey` to define what makes an item unique +- **Memory Management**: `maxTrackedKeys` limits memory usage with FIFO eviction +- **Skip Callback**: Track when items are skipped with `onSkip` +- **State Persistence**: Use `initialState` to restore processed keys from storage + +## Use Case + +In applications where the same data might be requested multiple times (e.g., no-code tools, component-based UIs), you want to: + +- **Prevent redundant API calls** - If user-123's data was fetched, don't fetch it again +- **Reduce server load** - Avoid duplicate processing even across different batch cycles +- **Improve performance** - Skip items that have already been handled + +This is similar to request deduplication in TanStack Query, but at the batching level. + +## Running the Example + +```bash +pnpm install +pnpm dev +``` + +Then open http://localhost:3007 + +## Key Configuration + +```typescript +const batcher = useBatcher( + (userIds: string[]) => { + // API call to fetch users + console.log("Fetching users:", userIds); + }, + { + maxSize: 5, + wait: 2000, + trackProcessedKeys: true, // Enable cross-batch deduplication + getItemKey: (userId) => userId, // Define uniqueness + maxTrackedKeys: 100, // Limit memory (FIFO eviction) + onSkip: (item) => { + console.log(`Skipped (already processed): ${item}`); + }, + } +); +``` + +## Try It Out + +1. Click "Fetch user-123" - It will be added to the batch +2. Wait for the batch to process (or click "Flush Batch Now") +3. Click "Fetch user-123" again - It will be **skipped** because it was already processed! +4. Watch the "Items Skipped" counter increase +5. Click "Clear Processed Keys" to allow re-processing + +## API Reference + +### Options + +- `trackProcessedKeys: boolean` - Enable cross-batch deduplication (default: false) +- `getItemKey: (item) => string | number` - Extract unique key from item +- `maxTrackedKeys: number` - Maximum keys to track (default: 1000, FIFO eviction) +- `onSkip: (item, batcher) => void` - Callback when an item is skipped + +### Methods + +- `hasProcessedKey(key)` - Check if a key has been processed +- `peekProcessedKeys()` - Get a copy of all processed keys +- `clearProcessedKeys()` - Clear the processed keys history + +### State + +- `processedKeys: Array` - Keys that have been processed +- `skippedCount: number` - Number of items skipped due to deduplication diff --git a/examples/react/useBatcherDedup/index.html b/examples/react/useBatcherDedup/index.html new file mode 100644 index 000000000..701aa26e3 --- /dev/null +++ b/examples/react/useBatcherDedup/index.html @@ -0,0 +1,16 @@ + + + + + + + + + TanStack Pacer Example + + + +
+ + + diff --git a/examples/react/useBatcherDedup/package.json b/examples/react/useBatcherDedup/package.json new file mode 100644 index 000000000..139daa404 --- /dev/null +++ b/examples/react/useBatcherDedup/package.json @@ -0,0 +1,34 @@ +{ + "name": "@tanstack/pacer-example-react-use-batcher-dedup", + "private": true, + "type": "module", + "scripts": { + "dev": "vite --port=3007", + "build": "vite build", + "preview": "vite preview", + "test:types": "tsc" + }, + "dependencies": { + "@tanstack/react-pacer": "^0.19.3", + "react": "^19.2.3", + "react-dom": "^19.2.3" + }, + "devDependencies": { + "@types/react": "^19.2.8", + "@types/react-dom": "^19.2.3", + "@vitejs/plugin-react": "^5.1.2", + "vite": "^7.3.1" + }, + "browserslist": { + "production": [ + ">0.2%", + "not dead", + "not op_mini all" + ], + "development": [ + "last 1 chrome version", + "last 1 firefox version", + "last 1 safari version" + ] + } +} diff --git a/examples/react/useBatcherDedup/public/emblem-light.svg b/examples/react/useBatcherDedup/public/emblem-light.svg new file mode 100644 index 000000000..a58e69ad5 --- /dev/null +++ b/examples/react/useBatcherDedup/public/emblem-light.svg @@ -0,0 +1,13 @@ + + + + emblem-light + Created with Sketch. + + + + + + + + \ No newline at end of file diff --git a/examples/react/useBatcherDedup/src/index.tsx b/examples/react/useBatcherDedup/src/index.tsx new file mode 100644 index 000000000..d300713d9 --- /dev/null +++ b/examples/react/useBatcherDedup/src/index.tsx @@ -0,0 +1,351 @@ +import { useState } from "react"; +import ReactDOM from "react-dom/client"; +import { useBatcher } from "@tanstack/react-pacer/batcher"; +import { PacerProvider } from "@tanstack/react-pacer/provider"; + +function App() { + const [processedBatches, setProcessedBatches] = useState< + Array> + >([]); + + const batcher = useBatcher( + (userIds: string[]) => { + // Simulate API call to fetch user data + setProcessedBatches((prev) => [...prev, userIds]); + console.log("Fetching users:", userIds); + }, + { + maxSize: 5, + wait: 2000, + deduplicateItems: true, + getItemKey: (userId) => userId, + maxTrackedKeys: 100, + onDuplicate: (newItem, existingItem) => { + if (existingItem) { + console.log(`Duplicate in batch: ${newItem}`); + } else { + console.log(`Already processed: ${newItem}`); + } + }, + } + ); + + return ( +
+

TanStack Pacer - Cross-Batch Deduplication Example

+

+ This example demonstrates how deduplicateItems prevents + duplicate processing both within and across batches. + Once an item is processed, it won't be processed again until you clear the history. +

+ +
+

Simulate API Requests

+

+ Click buttons to request user data. Items that have already been processed + will be skipped automatically. +

+
+ + + + +
+
+ + ({ + size: state.size, + executionCount: state.executionCount, + totalItemsProcessed: state.totalItemsProcessed, + processedKeys: state.processedKeys, + })} + > + {({ size, executionCount, totalItemsProcessed, processedKeys }) => ( +
+

Statistics

+
+
+ Current Batch Size: {size} +
+
+ Batches Processed: {executionCount} +
+
+ Total Items Processed: {totalItemsProcessed} +
+
+ Tracked Keys: {processedKeys.length} +
+
+
+ Current Batch Items: +
+ {batcher.peekAllItems().length > 0 + ? batcher.peekAllItems().join(", ") + : "(empty)"} +
+
+
+ Already Processed Keys: +
+ {processedKeys.length > 0 + ? processedKeys.join(", ") + : "(none yet)"} +
+

+ These keys will be skipped if you try to add them again. +

+
+
+ )} +
+ +
+

Processed Batches

+

+ Batches will be processed after 2 seconds or when 5 unique items are + collected. +

+
+ {processedBatches.length === 0 ? ( +
+ No batches processed yet +
+ ) : ( + processedBatches.map((batch, i) => ( +
+ Batch #{i + 1}: [{batch.join(", ")}] +
+ )) + )} +
+
+ +
+ + + +
+ +
+

๐Ÿ’ก How It Works

+
    +
  • + deduplicateItems: true enables both in-batch and cross-batch deduplication +
  • +
  • + Within a batch: duplicates are merged using deduplicateStrategy +
  • +
  • + Across batches: once processed, items are tracked in processedKeys +
  • +
  • + maxTrackedKeys: 100 limits memory usage (oldest keys are evicted first) +
  • +
  • + Use Clear Processed Keys to allow re-processing of previously processed items +
  • +
  • + Similar to RateLimiter.executionTimes tracking pattern +
  • +
+
+ + state}> + {(state) => ( +
+ + Debug: Full Batcher State + +
+              {JSON.stringify(state, null, 2)}
+            
+
+ )} +
+
+ ); +} + +const root = ReactDOM.createRoot(document.getElementById("root")!); +root.render( + + + +); diff --git a/examples/react/useBatcherDedup/tsconfig.json b/examples/react/useBatcherDedup/tsconfig.json new file mode 100644 index 000000000..6e9088d67 --- /dev/null +++ b/examples/react/useBatcherDedup/tsconfig.json @@ -0,0 +1,23 @@ +{ + "compilerOptions": { + "target": "ESNext", + "lib": ["DOM", "DOM.Iterable", "ESNext"], + "module": "ESNext", + "skipLibCheck": true, + + /* Bundler mode */ + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "resolveJsonModule": true, + "isolatedModules": true, + "noEmit": true, + "jsx": "react-jsx", + + /* Linting */ + "strict": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "noFallthroughCasesInSwitch": true + }, + "include": ["src", "vite.config.ts"] +} diff --git a/examples/react/useBatcherDedup/vite.config.ts b/examples/react/useBatcherDedup/vite.config.ts new file mode 100644 index 000000000..4e1943662 --- /dev/null +++ b/examples/react/useBatcherDedup/vite.config.ts @@ -0,0 +1,13 @@ +import { defineConfig } from 'vite' +import react from '@vitejs/plugin-react' + +// https://vite.dev/config/ +export default defineConfig({ + plugins: [ + react({ + // babel: { + // plugins: [['babel-plugin-react-compiler', { target: '19' }]], + // }, + }), + ], +}) From 9ba053b4ebac7f4c179b362c2e9e3aae4d863a0a Mon Sep 17 00:00:00 2001 From: restareaByWeezy Date: Tue, 27 Jan 2026 23:33:06 +0900 Subject: [PATCH 05/11] chore: update lockfile --- pnpm-lock.yaml | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 7ec077152..1ae881580 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -3299,6 +3299,31 @@ importers: specifier: ^7.3.1 version: 7.3.1(@types/node@25.2.3)(jiti@2.6.1)(sass@1.97.1)(yaml@2.8.2) + examples/react/useBatcherDedup: + dependencies: + '@tanstack/react-pacer': + specifier: ^0.19.3 + version: link:../../../packages/react-pacer + react: + specifier: ^19.2.3 + version: 19.2.3 + react-dom: + specifier: ^19.2.3 + version: 19.2.3(react@19.2.3) + devDependencies: + '@types/react': + specifier: ^19.2.8 + version: 19.2.8 + '@types/react-dom': + specifier: ^19.2.3 + version: 19.2.3(@types/react@19.2.8) + '@vitejs/plugin-react': + specifier: ^5.1.2 + version: 5.1.2(vite@7.3.1(@types/node@25.0.7)(jiti@2.6.1)(yaml@2.8.2)) + vite: + specifier: ^7.3.1 + version: 7.3.1(@types/node@25.0.7)(jiti@2.6.1)(yaml@2.8.2) + examples/react/useDebouncedCallback: dependencies: '@tanstack/react-pacer': From 95f01c595974d1ae2ccbb85765a767d423d15382 Mon Sep 17 00:00:00 2001 From: Kevin Van Cott Date: Tue, 27 Jan 2026 20:29:53 -0600 Subject: [PATCH 06/11] fix sherif and format --- .changeset/add-deduplication-feature.md | 4 +- examples/react/useBatcherDedup/README.md | 12 +- examples/react/useBatcherDedup/package.json | 2 +- examples/react/useBatcherDedup/src/index.tsx | 278 +- packages/pacer/src/batcher.ts | 13 +- packages/pacer/src/queuer.ts | 6 +- pnpm-lock.yaml | 10143 ++++------------- 7 files changed, 2350 insertions(+), 8108 deletions(-) diff --git a/.changeset/add-deduplication-feature.md b/.changeset/add-deduplication-feature.md index de5b313cf..4114d3c1d 100644 --- a/.changeset/add-deduplication-feature.md +++ b/.changeset/add-deduplication-feature.md @@ -1,5 +1,5 @@ --- -"@tanstack/pacer": minor +'@tanstack/pacer': minor --- Add cross-batch/cross-execution deduplication support to Batcher and Queuer @@ -27,6 +27,7 @@ This feature extends the existing `deduplicateItems` option to track processed i ### Behavior When `deduplicateItems` is enabled: + 1. **In-batch duplicates**: Merged based on `deduplicateStrategy` ('keep-first' or 'keep-last') 2. **Cross-batch duplicates**: Skipped entirely (already processed) 3. `onDuplicate` called with `existingItem` for in-batch, `undefined` for cross-batch @@ -34,6 +35,7 @@ When `deduplicateItems` is enabled: ### Use Case Prevents redundant processing when the same data is requested multiple times: + - API calls: Don't fetch user-123 if it was already fetched - No-code tools: Multiple components requesting the same resource - Event processing: Skip events that have already been handled diff --git a/examples/react/useBatcherDedup/README.md b/examples/react/useBatcherDedup/README.md index 1d971353e..bd8d1ee44 100644 --- a/examples/react/useBatcherDedup/README.md +++ b/examples/react/useBatcherDedup/README.md @@ -36,19 +36,19 @@ Then open http://localhost:3007 const batcher = useBatcher( (userIds: string[]) => { // API call to fetch users - console.log("Fetching users:", userIds); + console.log('Fetching users:', userIds) }, { maxSize: 5, wait: 2000, - trackProcessedKeys: true, // Enable cross-batch deduplication + trackProcessedKeys: true, // Enable cross-batch deduplication getItemKey: (userId) => userId, // Define uniqueness - maxTrackedKeys: 100, // Limit memory (FIFO eviction) + maxTrackedKeys: 100, // Limit memory (FIFO eviction) onSkip: (item) => { - console.log(`Skipped (already processed): ${item}`); + console.log(`Skipped (already processed): ${item}`) }, - } -); + }, +) ``` ## Try It Out diff --git a/examples/react/useBatcherDedup/package.json b/examples/react/useBatcherDedup/package.json index 139daa404..ffa5634f2 100644 --- a/examples/react/useBatcherDedup/package.json +++ b/examples/react/useBatcherDedup/package.json @@ -14,7 +14,7 @@ "react-dom": "^19.2.3" }, "devDependencies": { - "@types/react": "^19.2.8", + "@types/react": "^19.2.9", "@types/react-dom": "^19.2.3", "@vitejs/plugin-react": "^5.1.2", "vite": "^7.3.1" diff --git a/examples/react/useBatcherDedup/src/index.tsx b/examples/react/useBatcherDedup/src/index.tsx index d300713d9..d728e5e21 100644 --- a/examples/react/useBatcherDedup/src/index.tsx +++ b/examples/react/useBatcherDedup/src/index.tsx @@ -1,18 +1,18 @@ -import { useState } from "react"; -import ReactDOM from "react-dom/client"; -import { useBatcher } from "@tanstack/react-pacer/batcher"; -import { PacerProvider } from "@tanstack/react-pacer/provider"; +import { useState } from 'react' +import ReactDOM from 'react-dom/client' +import { useBatcher } from '@tanstack/react-pacer/batcher' +import { PacerProvider } from '@tanstack/react-pacer/provider' function App() { const [processedBatches, setProcessedBatches] = useState< Array> - >([]); + >([]) const batcher = useBatcher( (userIds: string[]) => { // Simulate API call to fetch user data - setProcessedBatches((prev) => [...prev, userIds]); - console.log("Fetching users:", userIds); + setProcessedBatches((prev) => [...prev, userIds]) + console.log('Fetching users:', userIds) }, { maxSize: 5, @@ -22,105 +22,106 @@ function App() { maxTrackedKeys: 100, onDuplicate: (newItem, existingItem) => { if (existingItem) { - console.log(`Duplicate in batch: ${newItem}`); + console.log(`Duplicate in batch: ${newItem}`) } else { - console.log(`Already processed: ${newItem}`); + console.log(`Already processed: ${newItem}`) } }, - } - ); + }, + ) return ( -
+

TanStack Pacer - Cross-Batch Deduplication Example

This example demonstrates how deduplicateItems prevents duplicate processing both within and across batches. - Once an item is processed, it won't be processed again until you clear the history. + Once an item is processed, it won't be processed again until you clear + the history.

Simulate API Requests

-

- Click buttons to request user data. Items that have already been processed - will be skipped automatically. +

+ Click buttons to request user data. Items that have already been + processed will be skipped automatically.

-
+
- - - -
-
- - ({ - size: state.size, - executionCount: state.executionCount, - totalItemsProcessed: state.totalItemsProcessed, - processedKeys: state.processedKeys, - })} - > - {({ size, executionCount, totalItemsProcessed, processedKeys }) => ( -
-

Statistics

-
-
- Current Batch Size: {size} -
-
- Batches Processed: {executionCount} -
-
- Total Items Processed: {totalItemsProcessed} -
-
- Tracked Keys: {processedKeys.length} -
-
-
- Current Batch Items: -
- {batcher.peekAllItems().length > 0 - ? batcher.peekAllItems().join(', ') - : '(empty)'} -
-
-
- Already Processed Keys: -
- {processedKeys.length > 0 - ? processedKeys.join(', ') - : '(none yet)'} -
-

- These keys will be skipped if you try to add them again. -

-
-
- )} -
- -
-

Processed Batches

-

- Batches will be processed after 2 seconds or when 5 unique items are - collected. -

-
- {processedBatches.length === 0 ? ( -
- No batches processed yet -
- ) : ( - processedBatches.map((batch, i) => ( -
- Batch #{i + 1}: [{batch.join(', ')}] -
- )) - )} -
-
- -
- - - -
- -
-

๐Ÿ’ก How It Works

-
    -
  • - deduplicateItems: true enables both in-batch and - cross-batch deduplication -
  • -
  • - Within a batch: duplicates are merged using{' '} - deduplicateStrategy -
  • -
  • - Across batches: once processed, items are tracked in{' '} - processedKeys -
  • -
  • - maxTrackedKeys: 100 limits memory usage (oldest keys - are evicted first) -
  • -
  • - Use Clear Processed Keys to allow re-processing of - previously processed items -
  • -
  • - Similar to RateLimiter.executionTimes tracking pattern -
  • -
-
- - state}> - {(state) => ( -
- - Debug: Full Batcher State - -
-              {JSON.stringify(state, null, 2)}
-            
-
- )} -
-
- ) -} - -const root = ReactDOM.createRoot(document.getElementById('root')!) -root.render( - - - , -) diff --git a/examples/react/useBatcherDedup/.eslintrc.cjs b/examples/react/useBatcherInBatchDedup/.eslintrc.cjs similarity index 100% rename from examples/react/useBatcherDedup/.eslintrc.cjs rename to examples/react/useBatcherInBatchDedup/.eslintrc.cjs diff --git a/examples/react/useBatcherDedup/.gitignore b/examples/react/useBatcherInBatchDedup/.gitignore similarity index 100% rename from examples/react/useBatcherDedup/.gitignore rename to examples/react/useBatcherInBatchDedup/.gitignore diff --git a/examples/react/useBatcherInBatchDedup/README.md b/examples/react/useBatcherInBatchDedup/README.md new file mode 100644 index 000000000..5f1a1daec --- /dev/null +++ b/examples/react/useBatcherInBatchDedup/README.md @@ -0,0 +1,29 @@ +# useBatcher In-Batch Deduplication Example + +This example demonstrates the in-batch deduplication feature of `useBatcher`. + +## Key Features + +- **In-batch deduplication**: Duplicate items within the same batch are automatically ignored or replaced based on the `deduplicateStrategy` +- **Visual testing**: Interactive UI to test deduplication behavior +- **Activity log**: See exactly what items are added vs. ignored + +## Running the Example + +```bash +pnpm dev +``` + +Then open http://localhost:3006 + +## How It Works + +When `deduplicateItems: true` is set: +- Items are deduplicated within the current batch only +- The `deduplicateStrategy` determines whether to keep the first or last occurrence +- Duplicates are identified using the `getItemKey` function (or JSON.stringify for objects) + +This is useful for scenarios like: +- Preventing duplicate API requests within the same batch +- Ensuring unique items in a batch operation +- Reducing redundant processing of the same data diff --git a/examples/react/useBatcherDedup/index.html b/examples/react/useBatcherInBatchDedup/index.html similarity index 100% rename from examples/react/useBatcherDedup/index.html rename to examples/react/useBatcherInBatchDedup/index.html diff --git a/examples/react/useBatcherDedup/package.json b/examples/react/useBatcherInBatchDedup/package.json similarity index 86% rename from examples/react/useBatcherDedup/package.json rename to examples/react/useBatcherInBatchDedup/package.json index ffa5634f2..209bc3c64 100644 --- a/examples/react/useBatcherDedup/package.json +++ b/examples/react/useBatcherInBatchDedup/package.json @@ -1,9 +1,9 @@ { - "name": "@tanstack/pacer-example-react-use-batcher-dedup", + "name": "@tanstack/pacer-example-react-use-batcher-in-batch-dedup", "private": true, "type": "module", "scripts": { - "dev": "vite --port=3007", + "dev": "vite --port=3006", "build": "vite build", "preview": "vite preview", "test:types": "tsc" diff --git a/examples/react/useBatcherDedup/public/emblem-light.svg b/examples/react/useBatcherInBatchDedup/public/emblem-light.svg similarity index 100% rename from examples/react/useBatcherDedup/public/emblem-light.svg rename to examples/react/useBatcherInBatchDedup/public/emblem-light.svg diff --git a/examples/react/useBatcherInBatchDedup/src/index.tsx b/examples/react/useBatcherInBatchDedup/src/index.tsx new file mode 100644 index 000000000..7b15a25ff --- /dev/null +++ b/examples/react/useBatcherInBatchDedup/src/index.tsx @@ -0,0 +1,234 @@ +import { useState } from 'react' +import ReactDOM from 'react-dom/client' +import { useBatcher } from '@tanstack/react-pacer/batcher' +import { PacerProvider } from '@tanstack/react-pacer/provider' + +function App1() { + // Use your state management library of choice + const [processedBatches, setProcessedBatches] = useState< + Array> + >([]) + const [log, setLog] = useState([]) + + // The function that will process a batch of items + function processBatch(items: Array) { + setProcessedBatches((prev) => [...prev, items]) + setLog((prev) => [...prev, `โœ… Processed batch: [${items.join(', ')}]`]) + console.log('processing batch', items) + } + + const batcher = useBatcher( + processBatch, + { + maxSize: 5, + wait: 3000, + // Enable in-batch deduplication + deduplicateItems: true, + deduplicateStrategy: 'keep-first', // or 'keep-last' + }, + ) + + const addItem = (item: string) => { + const result = batcher.addItem(item) + if (result) { + setLog((prev) => [...prev, `โž• Added: "${item}"`]) + } else { + setLog((prev) => [...prev, `โš ๏ธ Duplicate ignored: "${item}"`]) + } + } + + return ( +
+

TanStack Pacer - In-Batch Deduplication

+

+ When deduplicateItems: true, duplicate items within the same batch are automatically ignored. + This example demonstrates how duplicates are handled before the batch is processed. +

+ + ({ + size: state.size, + executionCount: state.executionCount, + totalItemsProcessed: state.totalItemsProcessed, + })} + > + {({ size, executionCount, totalItemsProcessed }) => ( + <> +
+
+
Current Batch Size: {size} / 5
+
Batches Processed: {executionCount}
+
Current Batch: [{batcher.peekAllItems().join(', ')}]
+
Total Items Processed: {totalItemsProcessed}
+
+
+ +
+

๐Ÿงช Test Deduplication

+

+ Click buttons multiple times. Duplicates within the same batch will be ignored! +

+
+ + + + + +
+
+ + +
+
+ ๐Ÿ’ก Tip: Add the same item multiple times before the batch is processed. + Notice how duplicates are ignored! +
+
+ +
+
+

๐Ÿ“ฆ Processed Batches

+
+ {processedBatches.length === 0 ? ( + No batches processed yet + ) : ( + processedBatches.map((b, i) => ( +
+ Batch #{i + 1}: [{b.join(', ')}] +
+ )) + )} +
+
+
+

๐Ÿ“‹ Activity Log

+
+ {log.length === 0 ? ( + No activity yet + ) : ( + log.map((entry, i) => ( +
+ {entry} +
+ )) + )} +
+
+
+ + )} +
+ +
+ + ๐Ÿ› Debug: Full State + + state}> + {(state) => ( +
+              {JSON.stringify(state, null, 2)}
+            
+ )} +
+
+
+ ) +} + +const root = ReactDOM.createRoot(document.getElementById('root')!) +root.render( + + + , +) diff --git a/examples/react/useBatcherDedup/tsconfig.json b/examples/react/useBatcherInBatchDedup/tsconfig.json similarity index 100% rename from examples/react/useBatcherDedup/tsconfig.json rename to examples/react/useBatcherInBatchDedup/tsconfig.json diff --git a/examples/react/useBatcherDedup/vite.config.ts b/examples/react/useBatcherInBatchDedup/vite.config.ts similarity index 100% rename from examples/react/useBatcherDedup/vite.config.ts rename to examples/react/useBatcherInBatchDedup/vite.config.ts