Skip to content

Commit a1580bc

Browse files
authored
fix(typing): Type tests.sentry.workflow_engine.processors (#103174)
Some EZPZ missing test types.
1 parent 7f72a70 commit a1580bc

File tree

6 files changed

+55
-31
lines changed

6 files changed

+55
-31
lines changed

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -866,7 +866,7 @@ module = [
866866
"tests.sentry.workflow_engine.endpoints.*",
867867
"tests.sentry.workflow_engine.handlers.action.*",
868868
"tests.sentry.workflow_engine.models.*",
869-
"tests.sentry.workflow_engine.processors.contexts.*",
869+
"tests.sentry.workflow_engine.processors.*",
870870
"tests.sentry.workflow_engine.service.*",
871871
"tests.sentry.workflow_engine.utils.*",
872872
"tests.sentry_plugins.*",

tests/sentry/workflow_engine/processors/test_data_sources.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,8 +37,10 @@ def setUp(self) -> None:
3737
self.ds2 = self.create_data_source(source_id=source_id_2, type="test")
3838
self.ds2.detectors.set([self.detector_one, self.detector_two])
3939

40-
self.packet = DataPacket[dict](source_id_1, {"source_id": source_id_1, "foo": "bar"})
41-
self.two_detector_packet = DataPacket[dict](
40+
self.packet = DataPacket[dict[str, str]](
41+
source_id_1, {"source_id": source_id_1, "foo": "bar"}
42+
)
43+
self.two_detector_packet = DataPacket[dict[str, str]](
4244
source_id_2, {"source_id": source_id_2, "foo": "baz"}
4345
)
4446

tests/sentry/workflow_engine/processors/test_delayed_workflow.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -175,7 +175,9 @@ def create_project_event_freq_workflow(
175175

176176
return workflow, [workflow_action_slow_filter_group, workflow_action_filter_group]
177177

178-
def setup_event(self, project, environment, name) -> tuple[Event, Group]:
178+
def setup_event(
179+
self, project: Project, environment: Environment, name: str
180+
) -> tuple[Event, Group]:
179181
event = self.create_event(project.id, FROZEN_TIME, name, environment.name)
180182
assert event.group
181183
return event, event.group

tests/sentry/workflow_engine/processors/test_detector.py

Lines changed: 16 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import unittest
22
import uuid
3+
from typing import Any
34
from unittest import mock
45
from unittest.mock import MagicMock, call
56

@@ -72,9 +73,9 @@ class TestProcessDetectors(BaseDetectorHandlerTest):
7273
def setUp(self) -> None:
7374
super().setUp()
7475

75-
def build_data_packet(self, **kwargs):
76+
def build_data_packet(self, **kwargs: Any) -> DataPacket[dict[str, Any]]:
7677
source_id = "1234"
77-
return DataPacket[dict](
78+
return DataPacket[dict[str, Any]](
7879
source_id, {"source_id": source_id, "group_vals": {"group_1": 6}, **kwargs}
7980
)
8081

@@ -228,8 +229,11 @@ def test_sending_metric_before_evaluating(self) -> None:
228229
@mock.patch("sentry.workflow_engine.processors.detector.metrics")
229230
@mock.patch("sentry.workflow_engine.processors.detector.logger")
230231
def test_metrics_and_logs_fire(
231-
self, mock_logger, mock_metrics, mock_produce_occurrence_to_kafka
232-
):
232+
self,
233+
mock_logger: mock.MagicMock,
234+
mock_metrics: mock.MagicMock,
235+
mock_produce_occurrence_to_kafka: mock.MagicMock,
236+
) -> None:
233237
detector, _ = self.create_detector_and_condition(type=self.handler_state_type.slug)
234238
data_packet = DataPacket("1", {"dedupe": 2, "group_vals": {None: 6}})
235239
results = process_detectors(data_packet, [detector])
@@ -281,8 +285,11 @@ def test_metrics_and_logs_fire(
281285
@mock.patch("sentry.workflow_engine.processors.detector.metrics")
282286
@mock.patch("sentry.workflow_engine.processors.detector.logger")
283287
def test_metrics_and_logs_resolve(
284-
self, mock_logger, mock_metrics, mock_produce_occurrence_to_kafka
285-
):
288+
self,
289+
mock_logger: mock.MagicMock,
290+
mock_metrics: mock.MagicMock,
291+
mock_produce_occurrence_to_kafka: mock.MagicMock,
292+
) -> None:
286293
detector, _ = self.create_detector_and_condition(type=self.handler_state_type.slug)
287294
data_packet = DataPacket("1", {"dedupe": 2, "group_vals": {None: 6}})
288295
process_detectors(data_packet, [detector])
@@ -751,7 +758,7 @@ def test_dedupe(self) -> None:
751758
handler.state_manager.enqueue_dedupe_update("group_key", 99)
752759
handler.state_manager.commit_state_updates()
753760

754-
data_packet = DataPacket[dict](
761+
data_packet = DataPacket[dict[str, Any]](
755762
source_id="1234",
756763
packet={"id": "1234", "group_vals": {"group_key": 10}, "dedupe": 100},
757764
)
@@ -778,7 +785,7 @@ def test_dedupe__already_processed(self) -> None:
778785
handler.state_manager.commit_state_updates()
779786

780787
handler.evaluate(
781-
DataPacket[dict](
788+
DataPacket[dict[str, Any]](
782789
source_id="1234",
783790
packet={"id": "1234", "group_vals": {"group_key": 10}, "dedupe": 100},
784791
),
@@ -789,7 +796,7 @@ def test_dedupe__already_processed(self) -> None:
789796

790797
def test_status_change(self) -> None:
791798
handler = self.build_handler()
792-
data_packet = DataPacket[dict](
799+
data_packet = DataPacket[dict[str, Any]](
793800
source_id="1234", packet={"id": "1234", "group_vals": {"group_key": 10}, "dedupe": 100}
794801
)
795802

tests/sentry/workflow_engine/processors/test_schedule.py

Lines changed: 28 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,14 @@ def setUp(self) -> None:
3535
super().setUp()
3636
self.batch_client = DelayedWorkflowClient()
3737

38-
def push_to_hash(self, project_id, rule_id, group_id, event_id=None, occurrence_id=None):
38+
def push_to_hash(
39+
self,
40+
project_id: int,
41+
rule_id: str | int,
42+
group_id: str | int,
43+
event_id: str | int | None = None,
44+
occurrence_id: str | int | None = None,
45+
) -> None:
3946
value = json.dumps({"event_id": event_id, "occurrence_id": occurrence_id})
4047
self.batch_client.for_project(project_id).push_to_hash(
4148
batch_key=None,
@@ -81,7 +88,7 @@ def test_fetches_from_buffer_and_executes_with_conditional_delete(
8188

8289
@patch("sentry.workflow_engine.processors.schedule.process_in_batches")
8390
@override_options({"delayed_workflow.rollout": False})
84-
def test_skips_processing_with_option(self, mock_process_in_batches) -> None:
91+
def test_skips_processing_with_option(self, mock_process_in_batches: MagicMock) -> None:
8592
project = self.create_project()
8693
self.batch_client.add_project_ids([project.id])
8794

@@ -197,12 +204,12 @@ def run_to_timestamp(run: int, interval_sec: int, jitter: bool = True) -> float:
197204

198205
class TestProjectChooser:
199206
@pytest.fixture
200-
def mock_buffer(self):
207+
def mock_buffer(self) -> Mock:
201208
mock_buffer = Mock(spec=DelayedWorkflowClient)
202209
return mock_buffer
203210

204211
@pytest.fixture
205-
def project_chooser(self, mock_buffer):
212+
def project_chooser(self, mock_buffer: Mock) -> ProjectChooser:
206213
return ProjectChooser(mock_buffer, num_cohorts=6, min_scheduling_age=timedelta(seconds=50))
207214

208215
def _find_projects_for_cohorts(self, chooser: ProjectChooser, num_cohorts: int) -> list[int]:
@@ -218,7 +225,7 @@ def _find_projects_for_cohorts(self, chooser: ProjectChooser, num_cohorts: int)
218225
project_id += 1
219226
return all_project_ids
220227

221-
def test_project_id_to_cohort_distribution(self, project_chooser):
228+
def test_project_id_to_cohort_distribution(self, project_chooser: ProjectChooser) -> None:
222229
project_ids = list(range(1, 1001)) # 1000 project IDs
223230
cohorts = [project_chooser._project_id_to_cohort(pid) for pid in project_ids]
224231

@@ -230,7 +237,7 @@ def test_project_id_to_cohort_distribution(self, project_chooser):
230237
assert all(count > 0 for count in cohort_counts)
231238
assert all(count < 1000 for count in cohort_counts)
232239

233-
def test_project_id_to_cohort_consistent(self, project_chooser):
240+
def test_project_id_to_cohort_consistent(self, project_chooser: ProjectChooser) -> None:
234241
for project_id in [123, 999, 4, 193848493]:
235242
cohort1 = project_chooser._project_id_to_cohort(project_id)
236243
cohort2 = project_chooser._project_id_to_cohort(project_id)
@@ -239,7 +246,9 @@ def test_project_id_to_cohort_consistent(self, project_chooser):
239246
assert cohort1 == cohort2 == cohort3
240247
assert 0 <= cohort1 < 6
241248

242-
def test_project_ids_to_process_must_process_over_minute(self, project_chooser):
249+
def test_project_ids_to_process_must_process_over_minute(
250+
self, project_chooser: ProjectChooser
251+
) -> None:
243252
fetch_time = 1000.0
244253
cohort_updates = CohortUpdates(
245254
values={
@@ -260,7 +269,9 @@ def test_project_ids_to_process_must_process_over_minute(self, project_chooser):
260269
# cohort_updates should be updated with fetch_time for processed cohorts
261270
assert 0 in cohort_updates.values
262271

263-
def test_project_ids_to_process_may_process_fallback(self, project_chooser):
272+
def test_project_ids_to_process_may_process_fallback(
273+
self, project_chooser: ProjectChooser
274+
) -> None:
264275
fetch_time = 1000.0
265276
cohort_updates = CohortUpdates(
266277
values={
@@ -282,7 +293,9 @@ def test_project_ids_to_process_may_process_fallback(self, project_chooser):
282293
for cohort in processed_cohorts:
283294
assert cohort_updates.values[cohort] == fetch_time
284295

285-
def test_project_ids_to_process_no_processing_needed(self, project_chooser):
296+
def test_project_ids_to_process_no_processing_needed(
297+
self, project_chooser: ProjectChooser
298+
) -> None:
286299
fetch_time = 1000.0
287300
cohort_updates = CohortUpdates(
288301
values={
@@ -369,7 +382,7 @@ def test_scenario_six_times_per_minute(self, project_chooser: ProjectChooser) ->
369382
), f"Run {run} should process all cohorts, got {processed_in_last_cycle}"
370383
processed_cohorts_over_time.append(processed_cohorts)
371384

372-
def test_scenario_once_per_minute_cohort_count_1(self, mock_buffer) -> None:
385+
def test_scenario_once_per_minute_cohort_count_1(self, mock_buffer: Mock) -> None:
373386
"""
374387
Scenario test: Running once per minute with cohort count of 1 (production default).
375388
This demonstrates that all projects are processed together every minute.
@@ -411,7 +424,7 @@ def test_scenario_once_per_minute_cohort_count_1(self, mock_buffer) -> None:
411424
f"but got {len(processed_projects)}: {sorted(processed_projects)}"
412425
)
413426

414-
def test_cohort_count_change_uses_eldest_freshness(self, mock_buffer) -> None:
427+
def test_cohort_count_change_uses_eldest_freshness(self, mock_buffer: Mock) -> None:
415428
"""
416429
Test that when num_cohorts changes, all new cohorts use the eldest stored cohort freshness,
417430
then cohorts that need processing are scheduled and updated to current time.
@@ -440,11 +453,11 @@ def test_cohort_count_change_uses_eldest_freshness(self, mock_buffer) -> None:
440453

441454
class TestChosenProjects:
442455
@pytest.fixture
443-
def mock_project_chooser(self):
456+
def mock_project_chooser(self) -> Mock:
444457
"""Create a mock ProjectChooser."""
445458
return Mock(spec=ProjectChooser)
446459

447-
def test_chosen_projects_context_manager(self, mock_project_chooser):
460+
def test_chosen_projects_context_manager(self, mock_project_chooser: Mock) -> None:
448461
"""Test chosen_projects as a context manager."""
449462
# Setup mocks
450463
mock_cohort_updates = Mock(spec=CohortUpdates)
@@ -476,7 +489,7 @@ def test_chosen_projects_context_manager(self, mock_project_chooser):
476489
# Verify persist_updates was called after context exit
477490
mock_buffer_client.persist_updates.assert_called_once_with(mock_cohort_updates)
478491

479-
def test_chosen_projects_fetch_updates_exception(self, mock_project_chooser):
492+
def test_chosen_projects_fetch_updates_exception(self, mock_project_chooser: Mock) -> None:
480493
"""Test that exception during fetch_updates is properly handled."""
481494
# Setup mocks
482495
mock_buffer_client = Mock(spec=DelayedWorkflowClient)
@@ -495,7 +508,7 @@ def test_chosen_projects_fetch_updates_exception(self, mock_project_chooser):
495508
# persist_updates should not be called if fetch_updates fails
496509
mock_buffer_client.persist_updates.assert_not_called()
497510

498-
def test_chosen_projects_exception_during_processing(self, mock_project_chooser):
511+
def test_chosen_projects_exception_during_processing(self, mock_project_chooser: Mock) -> None:
499512
mock_buffer_client = Mock(spec=DelayedWorkflowClient)
500513
mock_project_chooser.client = mock_buffer_client
501514
mock_buffer_client.fetch_updates.return_value = Mock(spec=CohortUpdates)

tests/sentry/workflow_engine/processors/test_workflow.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -439,7 +439,7 @@ def test_many_workflows(self) -> None:
439439
assert triggered_workflows == {self.workflow, workflow_two}
440440

441441
@patch.object(get_data_conditions_for_group, "batch")
442-
def test_batched_data_condition_lookup_is_used(self, mock_batch):
442+
def test_batched_data_condition_lookup_is_used(self, mock_batch: MagicMock) -> None:
443443
"""Test that batch lookup is used when evaluating multiple workflows."""
444444
workflow_two, _, _, _ = self.create_detector_and_workflow(name_prefix="two")
445445

@@ -816,7 +816,7 @@ def test_with_slow_conditions(self) -> None:
816816
assert not triggered_action_filters
817817

818818
@patch.object(get_data_conditions_for_group, "batch")
819-
def test_batched_data_condition_lookup_for_action_filters(self, mock_batch):
819+
def test_batched_data_condition_lookup_for_action_filters(self, mock_batch: MagicMock) -> None:
820820
"""Test that batch lookup is used when evaluating action filters."""
821821
# Create a second workflow with action filters
822822
workflow_two, _, _, _ = self.create_detector_and_workflow(name_prefix="two")
@@ -878,7 +878,7 @@ def test_activity__with_slow_conditions(self) -> None:
878878
# ensure we do not enqueue slow condition evaluation
879879
assert not queue_items
880880

881-
def test_enqueues_when_slow_conditions(self):
881+
def test_enqueues_when_slow_conditions(self) -> None:
882882
assert isinstance(self.event_data.event, GroupEvent)
883883
queue_items_by_workflow_id = {
884884
self.workflow: DelayedWorkflowItem(

0 commit comments

Comments
 (0)