Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@
- For feature flag evaluations tracked on spans:
- Only 10 evaluations are tracked per span, existing flags are updated but new ones exceeding the limit are ignored
- Spans do not inherit evaluations from their parent
- Drop log events once buffer hits hard limit ([#4889](https://github.com/getsentry/sentry-java/pull/4889))
- If we have 1000 log events queued up, we drop any new logs coming in to prevent OOM
- Remove vendored code and upgrade to async profiler 4.2 ([#4856](https://github.com/getsentry/sentry-java/pull/4856))
- This adds support for JDK 23+

Expand Down
1 change: 1 addition & 0 deletions sentry/api/sentry.api
Original file line number Diff line number Diff line change
Expand Up @@ -5034,6 +5034,7 @@ public final class io/sentry/logger/LoggerApi : io/sentry/logger/ILoggerApi {
public final class io/sentry/logger/LoggerBatchProcessor : io/sentry/logger/ILoggerBatchProcessor {
public static final field FLUSH_AFTER_MS I
public static final field MAX_BATCH_SIZE I
public static final field MAX_QUEUE_SIZE I
public fun <init> (Lio/sentry/SentryOptions;Lio/sentry/ISentryClient;)V
public fun add (Lio/sentry/SentryLogEvent;)V
public fun close (Z)V
Expand Down
15 changes: 15 additions & 0 deletions sentry/src/main/java/io/sentry/logger/LoggerBatchProcessor.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package io.sentry.logger;

import io.sentry.DataCategory;
import io.sentry.ISentryClient;
import io.sentry.ISentryExecutorService;
import io.sentry.ISentryLifecycleToken;
Expand All @@ -8,8 +9,10 @@
import io.sentry.SentryLogEvent;
import io.sentry.SentryLogEvents;
import io.sentry.SentryOptions;
import io.sentry.clientreport.DiscardReason;
import io.sentry.transport.ReusableCountLatch;
import io.sentry.util.AutoClosableReentrantLock;
import io.sentry.util.JsonSerializationUtils;
import java.util.ArrayList;
import java.util.List;
import java.util.Queue;
Expand All @@ -24,6 +27,7 @@ public final class LoggerBatchProcessor implements ILoggerBatchProcessor {

public static final int FLUSH_AFTER_MS = 5000;
public static final int MAX_BATCH_SIZE = 100;
public static final int MAX_QUEUE_SIZE = 1000;

private final @NotNull SentryOptions options;
private final @NotNull ISentryClient client;
Expand All @@ -46,6 +50,17 @@ public LoggerBatchProcessor(

@Override
public void add(final @NotNull SentryLogEvent logEvent) {
if (pendingCount.getCount() >= MAX_QUEUE_SIZE) {
options
.getClientReportRecorder()
.recordLostEvent(DiscardReason.QUEUE_OVERFLOW, DataCategory.LogItem);
final long lostBytes =
JsonSerializationUtils.byteSizeOf(options.getSerializer(), options.getLogger(), logEvent);
options
.getClientReportRecorder()
.recordLostEvent(DiscardReason.QUEUE_OVERFLOW, DataCategory.Attachment, lostBytes);
return;
}
pendingCount.increment();
queue.offer(logEvent);
maybeSchedule(false, false);
Expand Down
101 changes: 101 additions & 0 deletions sentry/src/test/java/io/sentry/logger/LoggerBatchProcessorTest.kt
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
package io.sentry.logger

import io.sentry.DataCategory
import io.sentry.ISentryClient
import io.sentry.SentryLogEvent
import io.sentry.SentryLogEvents
import io.sentry.SentryLogLevel
import io.sentry.SentryNanotimeDate
import io.sentry.SentryOptions
import io.sentry.clientreport.ClientReportTestHelper
import io.sentry.clientreport.DiscardReason
import io.sentry.clientreport.DiscardedEvent
import io.sentry.protocol.SentryId
import io.sentry.test.DeferredExecutorService
import io.sentry.test.injectForField
import io.sentry.util.JsonSerializationUtils
import kotlin.test.Test
import kotlin.test.assertEquals
import kotlin.test.assertFalse
import kotlin.test.assertTrue
import org.mockito.kotlin.argumentCaptor
import org.mockito.kotlin.atLeast
import org.mockito.kotlin.mock
import org.mockito.kotlin.verify

class LoggerBatchProcessorTest {
@Test
fun `drops log events after reaching MAX_QUEUE_SIZE limit`() {
// given
val mockClient = mock<ISentryClient>()
val mockExecutor = DeferredExecutorService()
val options = SentryOptions()
val processor = LoggerBatchProcessor(options, mockClient)
processor.injectForField("executorService", mockExecutor)

for (i in 1..1001) {
val logEvent =
SentryLogEvent(SentryId(), SentryNanotimeDate(), "log message $i", SentryLogLevel.INFO)
processor.add(logEvent)
}

// run twice since a non full batch would be scheduled at the end
mockExecutor.runAll()
mockExecutor.runAll()

// assert that the transport received 1000 log events
val captor = argumentCaptor<SentryLogEvents>()
verify(mockClient, atLeast(1)).captureBatchedLogEvents(captor.capture())

val allCapturedEvents = mutableListOf<SentryLogEvent>()
captor.allValues.forEach { logEvents -> allCapturedEvents.addAll(logEvents.items) }

assertEquals(1000, allCapturedEvents.size)

// assert that log 1001 did not make it but log 1000 did get sent
val log1000Found = allCapturedEvents.any { it.body == "log message 1000" }
val log1001Found = allCapturedEvents.any { it.body == "log message 1001" }

assertTrue(log1000Found, "Log 1000 should have been sent")
assertFalse(log1001Found, "Log 1001 should not have been sent")
}

@Test
fun `records client report when log event is dropped due to queue overflow`() {
// given
val mockClient = mock<ISentryClient>()
val mockExecutor = DeferredExecutorService()
val options = SentryOptions()
val processor = LoggerBatchProcessor(options, mockClient)
processor.injectForField("executorService", mockExecutor)

// fill the queue to MAX_QUEUE_SIZE
for (i in 1..1000) {
val logEvent =
SentryLogEvent(SentryId(), SentryNanotimeDate(), "log message $i", SentryLogLevel.INFO)
processor.add(logEvent)
}

// add one more log event that should be dropped
val droppedLogEvent =
SentryLogEvent(SentryId(), SentryNanotimeDate(), "dropped log", SentryLogLevel.INFO)
processor.add(droppedLogEvent)

// calculate expected bytes for the dropped log event
val expectedBytes =
JsonSerializationUtils.byteSizeOf(options.serializer, options.logger, droppedLogEvent)

// verify that a client report was recorded for the dropped log item and bytes
val expectedEvents =
mutableListOf(
DiscardedEvent(DiscardReason.QUEUE_OVERFLOW.reason, DataCategory.LogItem.category, 1),
DiscardedEvent(
DiscardReason.QUEUE_OVERFLOW.reason,
DataCategory.Attachment.category,
expectedBytes,
),
)

ClientReportTestHelper.assertClientReport(options.clientReportRecorder, expectedEvents)
}
}
Loading