From e557583a33a2c485b7187675de3a6045a9499c75 Mon Sep 17 00:00:00 2001 From: Alexander Dinauer Date: Mon, 10 Nov 2025 14:57:52 +0100 Subject: [PATCH 1/5] Drop log events once buffer hits hard limit --- .../src/main/java/io/sentry/logger/LoggerBatchProcessor.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/sentry/src/main/java/io/sentry/logger/LoggerBatchProcessor.java b/sentry/src/main/java/io/sentry/logger/LoggerBatchProcessor.java index 369f24f75de..84a1b4234a1 100644 --- a/sentry/src/main/java/io/sentry/logger/LoggerBatchProcessor.java +++ b/sentry/src/main/java/io/sentry/logger/LoggerBatchProcessor.java @@ -24,6 +24,7 @@ public final class LoggerBatchProcessor implements ILoggerBatchProcessor { public static final int FLUSH_AFTER_MS = 5000; public static final int MAX_BATCH_SIZE = 100; + public static final int MAX_QUEUE_SIZE = 1000; private final @NotNull SentryOptions options; private final @NotNull ISentryClient client; @@ -46,6 +47,9 @@ public LoggerBatchProcessor( @Override public void add(final @NotNull SentryLogEvent logEvent) { + if (pendingCount.getCount() >= MAX_QUEUE_SIZE) { + return; + } pendingCount.increment(); queue.offer(logEvent); maybeSchedule(false, false); From a39be4d7d2e9bff73862d2366554693608819a81 Mon Sep 17 00:00:00 2001 From: Alexander Dinauer Date: Tue, 11 Nov 2025 16:09:52 +0100 Subject: [PATCH 2/5] changelog --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index dcdc3870eea..cc3254d3544 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,8 @@ - For feature flag evaluations tracked on spans: - Only 10 evaluations are tracked per span, existing flags are updated but new ones exceeding the limit are ignored - Spans do not inherit evaluations from their parent +- Drop log events once buffer hits hard limit ([#4889](https://github.com/getsentry/sentry-java/pull/4889)) + - If we have 1000 log events queued up, we drop any new logs coming in to prevent OOM ### Fixes From 004bf19f1f837f6cc65abc985781c1c1a404e783 Mon Sep 17 00:00:00 2001 From: Alexander Dinauer Date: Tue, 11 Nov 2025 09:43:48 +0100 Subject: [PATCH 3/5] test hard limit --- sentry/api/sentry.api | 1 + .../sentry/logger/LoggerBatchProcessorTest.kt | 57 +++++++++++++++++++ 2 files changed, 58 insertions(+) create mode 100644 sentry/src/test/java/io/sentry/logger/LoggerBatchProcessorTest.kt diff --git a/sentry/api/sentry.api b/sentry/api/sentry.api index 9c5438fac2a..fb241c9b4ef 100644 --- a/sentry/api/sentry.api +++ b/sentry/api/sentry.api @@ -5034,6 +5034,7 @@ public final class io/sentry/logger/LoggerApi : io/sentry/logger/ILoggerApi { public final class io/sentry/logger/LoggerBatchProcessor : io/sentry/logger/ILoggerBatchProcessor { public static final field FLUSH_AFTER_MS I public static final field MAX_BATCH_SIZE I + public static final field MAX_QUEUE_SIZE I public fun (Lio/sentry/SentryOptions;Lio/sentry/ISentryClient;)V public fun add (Lio/sentry/SentryLogEvent;)V public fun close (Z)V diff --git a/sentry/src/test/java/io/sentry/logger/LoggerBatchProcessorTest.kt b/sentry/src/test/java/io/sentry/logger/LoggerBatchProcessorTest.kt new file mode 100644 index 00000000000..c4afefa820a --- /dev/null +++ b/sentry/src/test/java/io/sentry/logger/LoggerBatchProcessorTest.kt @@ -0,0 +1,57 @@ +package io.sentry.logger + +import io.sentry.ISentryClient +import io.sentry.SentryLogEvent +import io.sentry.SentryLogEvents +import io.sentry.SentryLogLevel +import io.sentry.SentryNanotimeDate +import io.sentry.SentryOptions +import io.sentry.protocol.SentryId +import io.sentry.test.DeferredExecutorService +import io.sentry.test.injectForField +import kotlin.test.Test +import kotlin.test.assertEquals +import kotlin.test.assertFalse +import kotlin.test.assertTrue +import org.mockito.kotlin.argumentCaptor +import org.mockito.kotlin.atLeast +import org.mockito.kotlin.mock +import org.mockito.kotlin.verify + +class LoggerBatchProcessorTest { + @Test + fun `drops log events after reaching MAX_QUEUE_SIZE limit`() { + // given + val mockClient = mock() + val mockExecutor = DeferredExecutorService() + val options = SentryOptions() + val processor = LoggerBatchProcessor(options, mockClient) + processor.injectForField("executorService", mockExecutor) + + for (i in 1..1001) { + val logEvent = + SentryLogEvent(SentryId(), SentryNanotimeDate(), "log message $i", SentryLogLevel.INFO) + processor.add(logEvent) + } + + // run twice since a non full batch would be scheduled at the end + mockExecutor.runAll() + mockExecutor.runAll() + + // assert that the transport received 1000 log events + val captor = argumentCaptor() + verify(mockClient, atLeast(1)).captureBatchedLogEvents(captor.capture()) + + val allCapturedEvents = mutableListOf() + captor.allValues.forEach { logEvents -> allCapturedEvents.addAll(logEvents.items) } + + assertEquals(1000, allCapturedEvents.size) + + // assert that log 1001 did not make it but log 1000 did get sent + val log1000Found = allCapturedEvents.any { it.body == "log message 1000" } + val log1001Found = allCapturedEvents.any { it.body == "log message 1001" } + + assertTrue(log1000Found, "Log 1000 should have been sent") + assertFalse(log1001Found, "Log 1001 should not have been sent") + } +} From 54a5d677e5de919b7cd1a03c756bd62901c01f46 Mon Sep 17 00:00:00 2001 From: Alexander Dinauer Date: Tue, 11 Nov 2025 10:22:41 +0100 Subject: [PATCH 4/5] record client report --- .../sentry/logger/LoggerBatchProcessor.java | 12 +++++ .../sentry/logger/LoggerBatchProcessorTest.kt | 44 +++++++++++++++++++ 2 files changed, 56 insertions(+) diff --git a/sentry/src/main/java/io/sentry/logger/LoggerBatchProcessor.java b/sentry/src/main/java/io/sentry/logger/LoggerBatchProcessor.java index 84a1b4234a1..e8943a2982b 100644 --- a/sentry/src/main/java/io/sentry/logger/LoggerBatchProcessor.java +++ b/sentry/src/main/java/io/sentry/logger/LoggerBatchProcessor.java @@ -1,5 +1,6 @@ package io.sentry.logger; +import io.sentry.DataCategory; import io.sentry.ISentryClient; import io.sentry.ISentryExecutorService; import io.sentry.ISentryLifecycleToken; @@ -8,8 +9,10 @@ import io.sentry.SentryLogEvent; import io.sentry.SentryLogEvents; import io.sentry.SentryOptions; +import io.sentry.clientreport.DiscardReason; import io.sentry.transport.ReusableCountLatch; import io.sentry.util.AutoClosableReentrantLock; +import io.sentry.util.JsonSerializationUtils; import java.util.ArrayList; import java.util.List; import java.util.Queue; @@ -48,6 +51,15 @@ public LoggerBatchProcessor( @Override public void add(final @NotNull SentryLogEvent logEvent) { if (pendingCount.getCount() >= MAX_QUEUE_SIZE) { + options + .getClientReportRecorder() + .recordLostEvent(DiscardReason.QUEUE_OVERFLOW, DataCategory.LogItem); + final long lostBytes = + JsonSerializationUtils.byteSizeOf( + options.getSerializer(), options.getLogger(), logEvent); + options + .getClientReportRecorder() + .recordLostEvent(DiscardReason.QUEUE_OVERFLOW, DataCategory.Attachment, lostBytes); return; } pendingCount.increment(); diff --git a/sentry/src/test/java/io/sentry/logger/LoggerBatchProcessorTest.kt b/sentry/src/test/java/io/sentry/logger/LoggerBatchProcessorTest.kt index c4afefa820a..432cd666404 100644 --- a/sentry/src/test/java/io/sentry/logger/LoggerBatchProcessorTest.kt +++ b/sentry/src/test/java/io/sentry/logger/LoggerBatchProcessorTest.kt @@ -1,14 +1,19 @@ package io.sentry.logger +import io.sentry.DataCategory import io.sentry.ISentryClient import io.sentry.SentryLogEvent import io.sentry.SentryLogEvents import io.sentry.SentryLogLevel import io.sentry.SentryNanotimeDate import io.sentry.SentryOptions +import io.sentry.clientreport.ClientReportTestHelper +import io.sentry.clientreport.DiscardReason +import io.sentry.clientreport.DiscardedEvent import io.sentry.protocol.SentryId import io.sentry.test.DeferredExecutorService import io.sentry.test.injectForField +import io.sentry.util.JsonSerializationUtils import kotlin.test.Test import kotlin.test.assertEquals import kotlin.test.assertFalse @@ -54,4 +59,43 @@ class LoggerBatchProcessorTest { assertTrue(log1000Found, "Log 1000 should have been sent") assertFalse(log1001Found, "Log 1001 should not have been sent") } + + @Test + fun `records client report when log event is dropped due to queue overflow`() { + // given + val mockClient = mock() + val mockExecutor = DeferredExecutorService() + val options = SentryOptions() + val processor = LoggerBatchProcessor(options, mockClient) + processor.injectForField("executorService", mockExecutor) + + // fill the queue to MAX_QUEUE_SIZE + for (i in 1..1000) { + val logEvent = + SentryLogEvent(SentryId(), SentryNanotimeDate(), "log message $i", SentryLogLevel.INFO) + processor.add(logEvent) + } + + // add one more log event that should be dropped + val droppedLogEvent = + SentryLogEvent(SentryId(), SentryNanotimeDate(), "dropped log", SentryLogLevel.INFO) + processor.add(droppedLogEvent) + + // calculate expected bytes for the dropped log event + val expectedBytes = + JsonSerializationUtils.byteSizeOf(options.serializer, options.logger, droppedLogEvent) + + // verify that a client report was recorded for the dropped log item and bytes + val expectedEvents = + mutableListOf( + DiscardedEvent(DiscardReason.QUEUE_OVERFLOW.reason, DataCategory.LogItem.category, 1), + DiscardedEvent( + DiscardReason.QUEUE_OVERFLOW.reason, + DataCategory.Attachment.category, + expectedBytes, + ), + ) + + ClientReportTestHelper.assertClientReport(options.clientReportRecorder, expectedEvents) + } } From ff0b6afa960cd17411fd4ed05ea021945cc86017 Mon Sep 17 00:00:00 2001 From: Alexander Dinauer Date: Tue, 11 Nov 2025 16:10:32 +0100 Subject: [PATCH 5/5] format --- .../src/main/java/io/sentry/logger/LoggerBatchProcessor.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/sentry/src/main/java/io/sentry/logger/LoggerBatchProcessor.java b/sentry/src/main/java/io/sentry/logger/LoggerBatchProcessor.java index e8943a2982b..48a73400f51 100644 --- a/sentry/src/main/java/io/sentry/logger/LoggerBatchProcessor.java +++ b/sentry/src/main/java/io/sentry/logger/LoggerBatchProcessor.java @@ -55,8 +55,7 @@ public void add(final @NotNull SentryLogEvent logEvent) { .getClientReportRecorder() .recordLostEvent(DiscardReason.QUEUE_OVERFLOW, DataCategory.LogItem); final long lostBytes = - JsonSerializationUtils.byteSizeOf( - options.getSerializer(), options.getLogger(), logEvent); + JsonSerializationUtils.byteSizeOf(options.getSerializer(), options.getLogger(), logEvent); options .getClientReportRecorder() .recordLostEvent(DiscardReason.QUEUE_OVERFLOW, DataCategory.Attachment, lostBytes);