@@ -1417,4 +1417,108 @@ describe("RedisRealtimeStreams", () => {
14171417 await redis . quit ( ) ;
14181418 }
14191419 ) ;
1420+
1421+ redisTest (
1422+ "Should handle chunks split mid-line (regression test)" ,
1423+ { timeout : 30_000 } ,
1424+ async ( { redisOptions } ) => {
1425+ const redis = new Redis ( redisOptions ) ;
1426+ const redisRealtimeStreams = new RedisRealtimeStreams ( {
1427+ redis : redisOptions ,
1428+ } ) ;
1429+
1430+ const runId = "run_split_test" ;
1431+ const streamId = "test-split-stream" ;
1432+
1433+ // Simulate what happens in production: a JSON line split across multiple network chunks
1434+ // This reproduces the issue where we see partial chunks like:
1435+ // - "{\"timestamp\":"
1436+ // - "1762880245493,\"chunkIndex\":780,\"data\":\"Chunk 781/1000\"}"
1437+ const fullLine = JSON . stringify ( {
1438+ timestamp : 1762880245493 ,
1439+ chunkIndex : 780 ,
1440+ data : "Chunk 781/1000" ,
1441+ } ) ;
1442+
1443+ // Split the line at an arbitrary position (in the middle of the JSON)
1444+ const splitPoint = 16 ; // Splits after '{"timestamp":'
1445+ const chunk1 = fullLine . substring ( 0 , splitPoint ) ;
1446+ const chunk2 = fullLine . substring ( splitPoint ) ;
1447+
1448+ // Create a ReadableStream that sends split chunks
1449+ const encoder = new TextEncoder ( ) ;
1450+ const stream = new ReadableStream ( {
1451+ start ( controller ) {
1452+ controller . enqueue ( encoder . encode ( chunk1 ) ) ;
1453+ controller . enqueue ( encoder . encode ( chunk2 + "\n" ) ) ; // Add newline at end
1454+ controller . close ( ) ;
1455+ } ,
1456+ } ) ;
1457+
1458+ // Ingest the split data
1459+ await redisRealtimeStreams . ingestData ( stream , runId , streamId , "client1" ) ;
1460+
1461+ // Now consume the stream and verify we get the complete line, not split chunks
1462+ const abortController = new AbortController ( ) ;
1463+ const response = await redisRealtimeStreams . streamResponse (
1464+ new Request ( "http://localhost/test" ) ,
1465+ runId ,
1466+ streamId ,
1467+ abortController . signal
1468+ ) ;
1469+
1470+ const reader = response . body ! . getReader ( ) ;
1471+ const decoder = new TextDecoder ( ) ;
1472+ let receivedData = "" ;
1473+
1474+ // Read all chunks from the response
1475+ const readTimeout = setTimeout ( ( ) => {
1476+ abortController . abort ( ) ;
1477+ } , 5000 ) ;
1478+
1479+ try {
1480+ while ( true ) {
1481+ const { done, value } = await reader . read ( ) ;
1482+ if ( done ) break ;
1483+
1484+ receivedData += decoder . decode ( value , { stream : true } ) ;
1485+
1486+ // Once we have data, we can stop
1487+ if ( receivedData . includes ( "data: " ) ) {
1488+ break ;
1489+ }
1490+ }
1491+ } finally {
1492+ clearTimeout ( readTimeout ) ;
1493+ abortController . abort ( ) ;
1494+ reader . releaseLock ( ) ;
1495+ }
1496+
1497+ // Parse the SSE data
1498+ const lines = receivedData . split ( "\n" ) . filter ( ( line ) => line . startsWith ( "data: " ) ) ;
1499+
1500+ // We should receive exactly ONE complete line, not two partial lines
1501+ expect ( lines . length ) . toBe ( 1 ) ;
1502+
1503+ // Extract the data (remove "data: " prefix)
1504+ const dataLine = lines [ 0 ] . substring ( 6 ) ;
1505+
1506+ // Verify it's the complete, valid JSON
1507+ expect ( dataLine ) . toBe ( fullLine ) ;
1508+
1509+ // Verify it parses correctly as JSON
1510+ const parsed = JSON . parse ( dataLine ) as {
1511+ timestamp : number ;
1512+ chunkIndex : number ;
1513+ data : string ;
1514+ } ;
1515+ expect ( parsed . timestamp ) . toBe ( 1762880245493 ) ;
1516+ expect ( parsed . chunkIndex ) . toBe ( 780 ) ;
1517+ expect ( parsed . data ) . toBe ( "Chunk 781/1000" ) ;
1518+
1519+ // Cleanup
1520+ await redis . del ( `stream:${ runId } :${ streamId } ` ) ;
1521+ await redis . quit ( ) ;
1522+ }
1523+ ) ;
14201524} ) ;
0 commit comments