Skip to content

Commit ae19b87

Browse files
authored
feat(ACI): Send updated data to Seer on all snuba query changes (#103332)
Follow up to #102934 (comment) to update Seer when anything on the snuba query changes - we were missing some instances where the data changed in a way Seer would want to know about but we weren't sending the updates.
1 parent 47ba7ab commit ae19b87

File tree

3 files changed

+164
-17
lines changed

3 files changed

+164
-17
lines changed

src/sentry/incidents/metric_issue_detector.py

Lines changed: 16 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
from datetime import timedelta
2-
from typing import Any, cast
2+
from typing import Any
33

44
from rest_framework import serializers
55

@@ -241,7 +241,9 @@ def is_editing_transaction_dataset(
241241
return True
242242
return False
243243

244-
def update_data_source(self, instance: Detector, data_source: SnubaQueryDataSourceType):
244+
def update_data_source(
245+
self, instance: Detector, data_source: SnubaQueryDataSourceType, seer_updated: bool = False
246+
):
245247
try:
246248
source_instance = DataSource.objects.get(detector=instance)
247249
except DataSource.DoesNotExist:
@@ -276,17 +278,15 @@ def update_data_source(self, instance: Detector, data_source: SnubaQueryDataSour
276278

277279
# Handle a dynamic detector's snuba query changing
278280
if instance.config.get("detection_type") == AlertRuleDetectionType.DYNAMIC:
279-
if snuba_query.query != data_source.get(
280-
"query"
281-
) or snuba_query.aggregate != data_source.get("aggregate"):
282-
try:
283-
validated_data_source = cast(dict[str, Any], data_source)
281+
try:
282+
validated_data_source: dict[str, Any] = {"data_sources": [data_source]}
283+
if not seer_updated:
284284
update_detector_data(instance, validated_data_source)
285-
except Exception:
286-
# don't update the snuba query if we failed to send data to Seer
287-
raise serializers.ValidationError(
288-
"Failed to send data to Seer, cannot update detector"
289-
)
285+
except Exception:
286+
# don't update the snuba query if we failed to send data to Seer
287+
raise serializers.ValidationError(
288+
"Failed to send data to Seer, cannot update detector"
289+
)
290290

291291
update_snuba_query(
292292
snuba_query=snuba_query,
@@ -304,6 +304,7 @@ def update_data_source(self, instance: Detector, data_source: SnubaQueryDataSour
304304
)
305305

306306
def update(self, instance: Detector, validated_data: dict[str, Any]):
307+
seer_updated = False
307308
# Handle anomaly detection changes first in case we need to exit before saving
308309
if (
309310
not instance.config.get("detection_type") == AlertRuleDetectionType.DYNAMIC
@@ -313,6 +314,8 @@ def update(self, instance: Detector, validated_data: dict[str, Any]):
313314
# Detector has been changed to become a dynamic detector
314315
try:
315316
update_detector_data(instance, validated_data)
317+
seer_updated = True
318+
316319
except Exception:
317320
# Don't update if we failed to send data to Seer
318321
raise serializers.ValidationError(
@@ -339,7 +342,7 @@ def update(self, instance: Detector, validated_data: dict[str, Any]):
339342
data_source = validated_data.pop("data_sources")[0]
340343

341344
if data_source is not None:
342-
self.update_data_source(instance, data_source)
345+
self.update_data_source(instance, data_source, seer_updated)
343346

344347
instance.save()
345348

src/sentry/seer/anomaly_detection/store_data_workflow_engine.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,7 @@ def update_detector_data(
100100
updated_data_source_data = updated_fields.get("data_sources")
101101
if updated_data_source_data:
102102
data_source_data = updated_data_source_data[0]
103-
event_types = data_source_data.get("eventTypes")
103+
event_types = data_source_data.get("event_types")
104104

105105
for k, v in data_source_data.items():
106106
if k == "dataset":

tests/sentry/incidents/endpoints/validators/test_validators.py

Lines changed: 147 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -709,12 +709,11 @@ def test_update_anomaly_detection_from_static(
709709
"sentry.seer.anomaly_detection.store_data_workflow_engine.seer_anomaly_detection_connection_pool.urlopen"
710710
)
711711
@mock.patch("sentry.workflow_engine.endpoints.validators.base.detector.create_audit_entry")
712-
def test_update_anomaly_detection_snuba_query(
712+
def test_update_anomaly_detection_snuba_query_query(
713713
self, mock_audit: mock.MagicMock, mock_seer_request: mock.MagicMock
714714
) -> None:
715715
"""
716-
Test that when we update the snuba query for a dynamic detector
717-
we make a call to Seer with the changes
716+
Test that when we update the snuba query query for a dynamic detector we make a call to Seer with the changes
718717
"""
719718
seer_return_value: StoreDataResponse = {"success": True}
720719
mock_seer_request.return_value = HTTPResponse(orjson.dumps(seer_return_value), status=200)
@@ -775,6 +774,36 @@ def test_update_anomaly_detection_snuba_query(
775774
event=audit_log.get_event_id("DETECTOR_EDIT"),
776775
data=dynamic_detector.get_audit_log_data(),
777776
)
777+
778+
@mock.patch(
779+
"sentry.seer.anomaly_detection.store_data_workflow_engine.seer_anomaly_detection_connection_pool.urlopen"
780+
)
781+
@mock.patch("sentry.workflow_engine.endpoints.validators.base.detector.create_audit_entry")
782+
def test_update_anomaly_detection_snuba_query_aggregate(
783+
self, mock_audit: mock.MagicMock, mock_seer_request: mock.MagicMock
784+
) -> None:
785+
"""
786+
Test that when we update the snuba query aggregate for a dynamic detector we make a call to Seer with the changes
787+
"""
788+
seer_return_value: StoreDataResponse = {"success": True}
789+
mock_seer_request.return_value = HTTPResponse(orjson.dumps(seer_return_value), status=200)
790+
791+
detector = self.create_dynamic_detector()
792+
793+
# Verify detector in DB
794+
self.assert_validated(detector)
795+
796+
assert mock_seer_request.call_count == 1
797+
mock_seer_request.reset_mock()
798+
799+
# Verify audit log
800+
mock_audit.assert_called_once_with(
801+
request=self.context["request"],
802+
organization=self.project.organization,
803+
target_object=detector.id,
804+
event=audit_log.get_event_id("DETECTOR_ADD"),
805+
data=detector.get_audit_log_data(),
806+
)
778807
mock_audit.reset_mock()
779808

780809
# Change the aggregate which should call Seer
@@ -815,6 +844,121 @@ def test_update_anomaly_detection_snuba_query(
815844
data=dynamic_detector.get_audit_log_data(),
816845
)
817846

847+
@mock.patch(
848+
"sentry.seer.anomaly_detection.store_data_workflow_engine.seer_anomaly_detection_connection_pool.urlopen"
849+
)
850+
@mock.patch("sentry.workflow_engine.endpoints.validators.base.detector.create_audit_entry")
851+
def test_update_anomaly_detection_snuba_query_to_perf(
852+
self, mock_audit: mock.MagicMock, mock_seer_request: mock.MagicMock
853+
) -> None:
854+
"""
855+
Test that when we update the snuba query for a dynamic detector
856+
to become a performance query we make a call to Seer with the changes
857+
"""
858+
seer_return_value: StoreDataResponse = {"success": True}
859+
mock_seer_request.return_value = HTTPResponse(orjson.dumps(seer_return_value), status=200)
860+
861+
detector = self.create_dynamic_detector()
862+
assert mock_seer_request.call_count == 1
863+
mock_seer_request.reset_mock()
864+
mock_audit.reset_mock()
865+
866+
# Change the dataset, queryType, and aggregate to perf stuff
867+
update_data = {
868+
**self.valid_anomaly_detection_data,
869+
"dataSources": [
870+
{
871+
"queryType": SnubaQuery.Type.PERFORMANCE.value,
872+
"dataset": Dataset.EventsAnalyticsPlatform.value,
873+
"query": "updated_query",
874+
"aggregate": "count()",
875+
"timeWindow": 3600,
876+
"environment": self.environment.name,
877+
"eventTypes": [SnubaQueryEventType.EventType.TRANSACTION.name.lower()],
878+
}
879+
],
880+
}
881+
update_validator = MetricIssueDetectorValidator(
882+
instance=detector, data=update_data, context=self.context, partial=True
883+
)
884+
assert update_validator.is_valid(), update_validator.errors
885+
dynamic_detector = update_validator.save()
886+
887+
assert mock_seer_request.call_count == 1
888+
889+
# Verify snuba query changes
890+
data_source = DataSource.objects.get(detector=dynamic_detector)
891+
query_subscription = QuerySubscription.objects.get(id=data_source.source_id)
892+
snuba_query = SnubaQuery.objects.get(id=query_subscription.snuba_query_id)
893+
assert snuba_query.aggregate == "count()"
894+
assert snuba_query.type == SnubaQuery.Type.PERFORMANCE.value
895+
assert snuba_query.dataset == Dataset.EventsAnalyticsPlatform.value
896+
assert snuba_query.query == "updated_query"
897+
898+
mock_audit.assert_called_once_with(
899+
request=self.context["request"],
900+
organization=self.project.organization,
901+
target_object=dynamic_detector.id,
902+
event=audit_log.get_event_id("DETECTOR_EDIT"),
903+
data=dynamic_detector.get_audit_log_data(),
904+
)
905+
906+
@mock.patch(
907+
"sentry.seer.anomaly_detection.store_data_workflow_engine.seer_anomaly_detection_connection_pool.urlopen"
908+
)
909+
@mock.patch(
910+
"sentry.seer.anomaly_detection.store_data_workflow_engine.handle_send_historical_data_to_seer"
911+
)
912+
def test_update_anomaly_detection_event_types(
913+
self, mock_send_historical_data: mock.MagicMock, mock_seer_request: mock.MagicMock
914+
) -> None:
915+
"""
916+
Test that when we update the eventTypes for a dynamic detector it gets sent through as expected
917+
"""
918+
seer_return_value: StoreDataResponse = {"success": True}
919+
mock_seer_request.return_value = HTTPResponse(orjson.dumps(seer_return_value), status=200)
920+
921+
detector = self.create_dynamic_detector()
922+
assert mock_send_historical_data.call_count == 1
923+
mock_send_historical_data.reset_mock()
924+
925+
# Change the dataset, queryType, aggregate, and eventTypes to performance data
926+
data_source_data = {
927+
"queryType": SnubaQuery.Type.PERFORMANCE.value,
928+
"dataset": Dataset.EventsAnalyticsPlatform.value,
929+
"query": "updated_query",
930+
"aggregate": "count()",
931+
"timeWindow": 3600,
932+
"environment": self.environment.name,
933+
"eventTypes": [SnubaQueryEventType.EventType.TRANSACTION.name.lower()],
934+
}
935+
update_data = {
936+
**self.valid_anomaly_detection_data,
937+
"dataSources": [data_source_data],
938+
}
939+
update_validator = MetricIssueDetectorValidator(
940+
instance=detector, data=update_data, context=self.context, partial=True
941+
)
942+
assert update_validator.is_valid(), update_validator.errors
943+
detector = update_validator.save()
944+
945+
# Verify snuba query changes
946+
data_source = DataSource.objects.get(detector=detector)
947+
query_subscription = QuerySubscription.objects.get(id=data_source.source_id)
948+
snuba_query = SnubaQuery.objects.get(id=query_subscription.snuba_query_id)
949+
condition_group = DataConditionGroup.objects.get(id=detector.workflow_condition_group_id)
950+
data_condition = DataCondition.objects.get(condition_group=condition_group)
951+
952+
mock_send_historical_data.assert_called_once_with(
953+
detector,
954+
data_source,
955+
data_condition,
956+
snuba_query,
957+
detector.project,
958+
"update",
959+
[SnubaQueryEventType.EventType.TRANSACTION],
960+
)
961+
818962
@mock.patch(
819963
"sentry.seer.anomaly_detection.store_data_workflow_engine.seer_anomaly_detection_connection_pool.urlopen"
820964
)

0 commit comments

Comments
 (0)