@@ -709,12 +709,11 @@ def test_update_anomaly_detection_from_static(
709709 "sentry.seer.anomaly_detection.store_data_workflow_engine.seer_anomaly_detection_connection_pool.urlopen"
710710 )
711711 @mock .patch ("sentry.workflow_engine.endpoints.validators.base.detector.create_audit_entry" )
712- def test_update_anomaly_detection_snuba_query (
712+ def test_update_anomaly_detection_snuba_query_query (
713713 self , mock_audit : mock .MagicMock , mock_seer_request : mock .MagicMock
714714 ) -> None :
715715 """
716- Test that when we update the snuba query for a dynamic detector
717- we make a call to Seer with the changes
716+ Test that when we update the snuba query query for a dynamic detector we make a call to Seer with the changes
718717 """
719718 seer_return_value : StoreDataResponse = {"success" : True }
720719 mock_seer_request .return_value = HTTPResponse (orjson .dumps (seer_return_value ), status = 200 )
@@ -775,6 +774,36 @@ def test_update_anomaly_detection_snuba_query(
775774 event = audit_log .get_event_id ("DETECTOR_EDIT" ),
776775 data = dynamic_detector .get_audit_log_data (),
777776 )
777+
778+ @mock .patch (
779+ "sentry.seer.anomaly_detection.store_data_workflow_engine.seer_anomaly_detection_connection_pool.urlopen"
780+ )
781+ @mock .patch ("sentry.workflow_engine.endpoints.validators.base.detector.create_audit_entry" )
782+ def test_update_anomaly_detection_snuba_query_aggregate (
783+ self , mock_audit : mock .MagicMock , mock_seer_request : mock .MagicMock
784+ ) -> None :
785+ """
786+ Test that when we update the snuba query aggregate for a dynamic detector we make a call to Seer with the changes
787+ """
788+ seer_return_value : StoreDataResponse = {"success" : True }
789+ mock_seer_request .return_value = HTTPResponse (orjson .dumps (seer_return_value ), status = 200 )
790+
791+ detector = self .create_dynamic_detector ()
792+
793+ # Verify detector in DB
794+ self .assert_validated (detector )
795+
796+ assert mock_seer_request .call_count == 1
797+ mock_seer_request .reset_mock ()
798+
799+ # Verify audit log
800+ mock_audit .assert_called_once_with (
801+ request = self .context ["request" ],
802+ organization = self .project .organization ,
803+ target_object = detector .id ,
804+ event = audit_log .get_event_id ("DETECTOR_ADD" ),
805+ data = detector .get_audit_log_data (),
806+ )
778807 mock_audit .reset_mock ()
779808
780809 # Change the aggregate which should call Seer
@@ -815,6 +844,121 @@ def test_update_anomaly_detection_snuba_query(
815844 data = dynamic_detector .get_audit_log_data (),
816845 )
817846
847+ @mock .patch (
848+ "sentry.seer.anomaly_detection.store_data_workflow_engine.seer_anomaly_detection_connection_pool.urlopen"
849+ )
850+ @mock .patch ("sentry.workflow_engine.endpoints.validators.base.detector.create_audit_entry" )
851+ def test_update_anomaly_detection_snuba_query_to_perf (
852+ self , mock_audit : mock .MagicMock , mock_seer_request : mock .MagicMock
853+ ) -> None :
854+ """
855+ Test that when we update the snuba query for a dynamic detector
856+ to become a performance query we make a call to Seer with the changes
857+ """
858+ seer_return_value : StoreDataResponse = {"success" : True }
859+ mock_seer_request .return_value = HTTPResponse (orjson .dumps (seer_return_value ), status = 200 )
860+
861+ detector = self .create_dynamic_detector ()
862+ assert mock_seer_request .call_count == 1
863+ mock_seer_request .reset_mock ()
864+ mock_audit .reset_mock ()
865+
866+ # Change the dataset, queryType, and aggregate to perf stuff
867+ update_data = {
868+ ** self .valid_anomaly_detection_data ,
869+ "dataSources" : [
870+ {
871+ "queryType" : SnubaQuery .Type .PERFORMANCE .value ,
872+ "dataset" : Dataset .EventsAnalyticsPlatform .value ,
873+ "query" : "updated_query" ,
874+ "aggregate" : "count()" ,
875+ "timeWindow" : 3600 ,
876+ "environment" : self .environment .name ,
877+ "eventTypes" : [SnubaQueryEventType .EventType .TRANSACTION .name .lower ()],
878+ }
879+ ],
880+ }
881+ update_validator = MetricIssueDetectorValidator (
882+ instance = detector , data = update_data , context = self .context , partial = True
883+ )
884+ assert update_validator .is_valid (), update_validator .errors
885+ dynamic_detector = update_validator .save ()
886+
887+ assert mock_seer_request .call_count == 1
888+
889+ # Verify snuba query changes
890+ data_source = DataSource .objects .get (detector = dynamic_detector )
891+ query_subscription = QuerySubscription .objects .get (id = data_source .source_id )
892+ snuba_query = SnubaQuery .objects .get (id = query_subscription .snuba_query_id )
893+ assert snuba_query .aggregate == "count()"
894+ assert snuba_query .type == SnubaQuery .Type .PERFORMANCE .value
895+ assert snuba_query .dataset == Dataset .EventsAnalyticsPlatform .value
896+ assert snuba_query .query == "updated_query"
897+
898+ mock_audit .assert_called_once_with (
899+ request = self .context ["request" ],
900+ organization = self .project .organization ,
901+ target_object = dynamic_detector .id ,
902+ event = audit_log .get_event_id ("DETECTOR_EDIT" ),
903+ data = dynamic_detector .get_audit_log_data (),
904+ )
905+
906+ @mock .patch (
907+ "sentry.seer.anomaly_detection.store_data_workflow_engine.seer_anomaly_detection_connection_pool.urlopen"
908+ )
909+ @mock .patch (
910+ "sentry.seer.anomaly_detection.store_data_workflow_engine.handle_send_historical_data_to_seer"
911+ )
912+ def test_update_anomaly_detection_event_types (
913+ self , mock_send_historical_data : mock .MagicMock , mock_seer_request : mock .MagicMock
914+ ) -> None :
915+ """
916+ Test that when we update the eventTypes for a dynamic detector it gets sent through as expected
917+ """
918+ seer_return_value : StoreDataResponse = {"success" : True }
919+ mock_seer_request .return_value = HTTPResponse (orjson .dumps (seer_return_value ), status = 200 )
920+
921+ detector = self .create_dynamic_detector ()
922+ assert mock_send_historical_data .call_count == 1
923+ mock_send_historical_data .reset_mock ()
924+
925+ # Change the dataset, queryType, aggregate, and eventTypes to performance data
926+ data_source_data = {
927+ "queryType" : SnubaQuery .Type .PERFORMANCE .value ,
928+ "dataset" : Dataset .EventsAnalyticsPlatform .value ,
929+ "query" : "updated_query" ,
930+ "aggregate" : "count()" ,
931+ "timeWindow" : 3600 ,
932+ "environment" : self .environment .name ,
933+ "eventTypes" : [SnubaQueryEventType .EventType .TRANSACTION .name .lower ()],
934+ }
935+ update_data = {
936+ ** self .valid_anomaly_detection_data ,
937+ "dataSources" : [data_source_data ],
938+ }
939+ update_validator = MetricIssueDetectorValidator (
940+ instance = detector , data = update_data , context = self .context , partial = True
941+ )
942+ assert update_validator .is_valid (), update_validator .errors
943+ detector = update_validator .save ()
944+
945+ # Verify snuba query changes
946+ data_source = DataSource .objects .get (detector = detector )
947+ query_subscription = QuerySubscription .objects .get (id = data_source .source_id )
948+ snuba_query = SnubaQuery .objects .get (id = query_subscription .snuba_query_id )
949+ condition_group = DataConditionGroup .objects .get (id = detector .workflow_condition_group_id )
950+ data_condition = DataCondition .objects .get (condition_group = condition_group )
951+
952+ mock_send_historical_data .assert_called_once_with (
953+ detector ,
954+ data_source ,
955+ data_condition ,
956+ snuba_query ,
957+ detector .project ,
958+ "update" ,
959+ [SnubaQueryEventType .EventType .TRANSACTION ],
960+ )
961+
818962 @mock .patch (
819963 "sentry.seer.anomaly_detection.store_data_workflow_engine.seer_anomaly_detection_connection_pool.urlopen"
820964 )
0 commit comments