Skip to content

Commit 823b561

Browse files
committed
int wrap of CUMULATIVE_TEST_TOKENS_PER_SEQUENCE
Signed-off-by: Abhishek <maurya.abhishek@ibm.com>
1 parent 1374fd2 commit 823b561

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

tests/models/test_decoders.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -69,8 +69,8 @@
6969
USE_MICRO_MODELS = os.environ.get("FMS_TEST_SHAPES_USE_MICRO_MODELS", "1") == "1"
7070
USE_DISTRIBUTED = os.environ.get("FMS_TEST_SHAPES_DISTRIBUTED", "0") == "1"
7171
TIMING = os.environ.get("TIMING", "")
72-
CUMULATIVE_TEST_TOKENS_PER_SEQUENCE = os.environ.get(
73-
"FMS_TEST_SHAPES_CUMULATIVE_TEST_TOKENS_PER_SEQUENCE", "1024"
72+
CUMULATIVE_TEST_TOKENS_PER_SEQUENCE = int(
73+
os.environ.get("FMS_TEST_SHAPES_CUMULATIVE_TEST_TOKENS_PER_SEQUENCE", "1024")
7474
)
7575
ATTN_TYPE = os.environ.get("FMS_TEST_SHAPES_ATTN_TYPE", "sdpa")
7676
attention_map = {
@@ -610,7 +610,7 @@ def _metric_calculator(r: torch.Tensor, t: torch.Tensor):
610610
)
611611
return (cross_entropy, diff)
612612

613-
iters = CUMULATIVE_TEST_TOKENS_PER_SEQUENCE // max_new_tokens
613+
iters = int(CUMULATIVE_TEST_TOKENS_PER_SEQUENCE) // max_new_tokens
614614
ce_fail_responses_list = []
615615
diff_fail_responses_list = []
616616
total_tokens = 0

0 commit comments

Comments
 (0)