diff --git a/.circleci/config.yml b/.circleci/config.yml index 486a1c7f5..f17f9bace 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -46,6 +46,7 @@ jobs: name: "Docker: Pull required images" # not available without "not e2e" tests as they pull ahead of time command: | + docker pull codalab/codalab-legacy:py312 docker pull codalab/codalab-legacy:py37 docker pull codalab/codalab-legacy:py3 docker pull vergilgxw/autotable:v2 diff --git a/documentation/docs/Developers_and_Administrators/Robot-submissions.md b/documentation/docs/Developers_and_Administrators/Robot-submissions.md index efa029389..edb4fda3e 100644 --- a/documentation/docs/Developers_and_Administrators/Robot-submissions.md +++ b/documentation/docs/Developers_and_Administrators/Robot-submissions.md @@ -127,7 +127,7 @@ On the Leaderboard, you can see the score details of each of your tasks. * Create a competition with robot submissions enabled - [Example competition bundle](https://github.com/codalab/competitions-v2/blob/develop/src/tests/functional/test_files/competition.zip) + [Example competition bundle](https://github.com/codalab/competitions-v2/blob/develop/src/tests/functional/test_files/competition_v2_wheat_code.zip) ![Edit Competition Page Allow Bots Checkbox](../_attachments/87486437-3037af00-c5f0-11ea-8edf-e758c969ab84_1752851308355029.jpeg) diff --git a/documentation/docs/Developers_and_Administrators/Running-tests.md b/documentation/docs/Developers_and_Administrators/Running-tests.md index 687e8aba9..dab7395b3 100644 --- a/documentation/docs/Developers_and_Administrators/Running-tests.md +++ b/documentation/docs/Developers_and_Administrators/Running-tests.md @@ -23,13 +23,13 @@ The repo comes with a couple examples that are used during tests: ### v2 test data ``` - src/tests/functional/test_files/submission.zip - src/tests/functional/test_files/competition.zip + src/tests/functional/test_files/submission_v2_wheat_code.zip + src/tests/functional/test_files/competition_v2_wheat_code.zip ``` ### v1.5 legacy test data ``` - src/tests/functional/test_files/submission15.zip - src/tests/functional/test_files/competition15.zip + src/tests/functional/test_files/submission15_sncf.zip + src/tests/functional/test_files/competition15_sncf.zip ``` ### Other Codalab Competition examples diff --git a/tests/test_competition.py b/tests/test_competition.py index 6a3951417..9d4d35600 100644 --- a/tests/test_competition.py +++ b/tests/test_competition.py @@ -25,7 +25,7 @@ def test_competition_upload(page: Page): with page.expect_file_chooser() as fc_info: page.get_by_role("button", name="").click() file_chooser = fc_info.value - file_chooser.set_files("test_files/competitions/competition.zip") + file_chooser.set_files("test_files/competitions/competition_v2_wheat_code.zip") expect(page.get_by_text("Competition created!")).to_be_visible() @@ -75,7 +75,7 @@ def test_manual_competition_creation(page: Page): with page.expect_file_chooser() as fc_info: page.get_by_role("button", name="").click() file_chooser = fc_info.value - file_chooser.set_files("test_files/competition/test_logo.png") + file_chooser.set_files("test_files/competitions/test_logo.png") page.locator(".CodeMirror-scroll").first.click() page.get_by_role("application").get_by_role("textbox").fill("Test Description ") page.get_by_role("textbox", name="Example: $1000 for the top").click() diff --git a/tests/test_files/competitions/competition.zip b/tests/test_files/competitions/competition.zip deleted file mode 100644 index 005fba612..000000000 Binary files a/tests/test_files/competitions/competition.zip and /dev/null differ diff --git a/tests/test_files/competitions/competition_15_iris.zip b/tests/test_files/competitions/competition_v15_iris.zip similarity index 100% rename from tests/test_files/competitions/competition_15_iris.zip rename to tests/test_files/competitions/competition_v15_iris.zip diff --git a/tests/test_files/competitions/competition_15.zip b/tests/test_files/competitions/competition_v15_sncf.zip similarity index 100% rename from tests/test_files/competitions/competition_15.zip rename to tests/test_files/competitions/competition_v15_sncf.zip diff --git a/tests/test_files/competitions/competition_18.zip b/tests/test_files/competitions/competition_v18_autowsl.zip similarity index 100% rename from tests/test_files/competitions/competition_18.zip rename to tests/test_files/competitions/competition_v18_autowsl.zip diff --git a/tests/test_files/competitions/competition_v2_miniautoml.zip b/tests/test_files/competitions/competition_v2_miniautoml.zip new file mode 100644 index 000000000..6f09b2b6c Binary files /dev/null and b/tests/test_files/competitions/competition_v2_miniautoml.zip differ diff --git a/tests/test_files/competitions/competition_v2_wheat_code.zip b/tests/test_files/competitions/competition_v2_wheat_code.zip new file mode 100644 index 000000000..fe4d09160 Binary files /dev/null and b/tests/test_files/competitions/competition_v2_wheat_code.zip differ diff --git a/tests/test_files/competitions/competition_v2_wheat_results.zip b/tests/test_files/competitions/competition_v2_wheat_results.zip new file mode 100644 index 000000000..2504ded93 Binary files /dev/null and b/tests/test_files/competitions/competition_v2_wheat_results.zip differ diff --git a/tests/test_files/competition/test_logo.png b/tests/test_files/competitions/test_logo.png similarity index 100% rename from tests/test_files/competition/test_logo.png rename to tests/test_files/competitions/test_logo.png diff --git a/tests/test_files/iris_code/iris3.zip b/tests/test_files/iris_code/iris3.zip deleted file mode 100644 index 335b08cfd..000000000 Binary files a/tests/test_files/iris_code/iris3.zip and /dev/null differ diff --git a/tests/test_files/iris_code/sample_code_submission.zip b/tests/test_files/iris_code/sample_code_submission.zip deleted file mode 100644 index c054e5eae..000000000 Binary files a/tests/test_files/iris_code/sample_code_submission.zip and /dev/null differ diff --git a/tests/test_files/submissions/submission.zip b/tests/test_files/submissions/submission.zip deleted file mode 100644 index 4863303ff..000000000 Binary files a/tests/test_files/submissions/submission.zip and /dev/null differ diff --git a/tests/test_files/submissions/submission_15_iris_code.zip b/tests/test_files/submissions/submission_v15_iris_code.zip similarity index 100% rename from tests/test_files/submissions/submission_15_iris_code.zip rename to tests/test_files/submissions/submission_v15_iris_code.zip diff --git a/tests/test_files/submissions/submission_15_iris_result.zip b/tests/test_files/submissions/submission_v15_iris_results.zip similarity index 100% rename from tests/test_files/submissions/submission_15_iris_result.zip rename to tests/test_files/submissions/submission_v15_iris_results.zip diff --git a/tests/test_files/submissions/submission_15.zip b/tests/test_files/submissions/submission_v15_sncf.zip similarity index 100% rename from tests/test_files/submissions/submission_15.zip rename to tests/test_files/submissions/submission_v15_sncf.zip diff --git a/tests/test_files/submissions/submission_18.zip b/tests/test_files/submissions/submission_v18_autowsl.zip similarity index 100% rename from tests/test_files/submissions/submission_18.zip rename to tests/test_files/submissions/submission_v18_autowsl.zip diff --git a/tests/test_files/submissions/submission_v2_miniautoml.zip b/tests/test_files/submissions/submission_v2_miniautoml.zip new file mode 100644 index 000000000..720ae00bc Binary files /dev/null and b/tests/test_files/submissions/submission_v2_miniautoml.zip differ diff --git a/tests/test_files/submissions/submission_v2_wheat_code.zip b/tests/test_files/submissions/submission_v2_wheat_code.zip new file mode 100644 index 000000000..b703f2b5b Binary files /dev/null and b/tests/test_files/submissions/submission_v2_wheat_code.zip differ diff --git a/tests/test_files/submissions/submission_v2_wheat_results.zip b/tests/test_files/submissions/submission_v2_wheat_results.zip new file mode 100644 index 000000000..481ed0dee Binary files /dev/null and b/tests/test_files/submissions/submission_v2_wheat_results.zip differ diff --git a/tests/test_files/submissions/submission_v2_wheat_results_failure.zip b/tests/test_files/submissions/submission_v2_wheat_results_failure.zip new file mode 100644 index 000000000..c88bf52b6 Binary files /dev/null and b/tests/test_files/submissions/submission_v2_wheat_results_failure.zip differ diff --git a/tests/test_submission.py b/tests/test_submission.py index eb99d4f4f..e23f1a6fb 100644 --- a/tests/test_submission.py +++ b/tests/test_submission.py @@ -22,7 +22,7 @@ def browser_context_args(browser_context_args): return browser_context_args -def run_tests(page, competition, submission) -> None: +def run_tests(page, competition, submission, expected_result="Finished") -> None: page.goto("/") page.get_by_role("link", name=" Benchmarks/Competitions").click() page.get_by_role("link", name=" Upload").click() @@ -38,65 +38,103 @@ def run_tests(page, competition, submission) -> None: file_chooser.set_files(submission) expect(page.locator(".ui.indicating")).to_be_visible() expect(page.locator(".ui.indicating")).not_to_be_visible() - # Wait for Finished to show. If it does not, catch the error and reload the page in case the page didn't update automatically + # Wait for the run to be completed (Finished or Failed) to show. + # "Failed" regex is more flexible because the cell also contain a question mark + finished_or_failed = re.compile(r"^(Finished|Failed.*)$") try: - expect(page.get_by_role("cell", name="Finished")).to_be_visible(timeout=25000) + expect(page.get_by_role("cell", name=finished_or_failed)).to_be_visible(timeout=25000) + # If it does not, catch the error and reload the page in case the page didn't update automatically except: page.reload() - expect(page.get_by_role("cell", name="Finished")).to_be_visible(timeout=2000) - # Add to leaderboard and see if shows - text = page.locator(".submission_row").first.inner_text() - submission_Id = text.split(None, 1) - try: - page.locator("td:nth-child(6) > span > .icon").first.click(timeout=300) - except: - page.locator("td:nth-child(7) > span > .icon").first.click(timeout=300) - page.locator("div").filter(has_text=re.compile(r"^Results$")).click() - expect( - page.locator("#leaderboardTable").get_by_role( - "link", name=data["default_user"]["username"] + expect(page.get_by_role("cell", name=finished_or_failed)).to_be_visible(timeout=2000) + # Then we actually check if we got the expected result + if expected_result == "Failed": + expect(page.get_by_role("cell", name=re.compile(r"^Failed.*$"))).to_be_visible() + elif expected_result == "Finished": + expect(page.get_by_role("cell", name=expected_result)).to_be_visible() + # Add to leaderboard and see if shows + text = page.locator(".submission_row").first.inner_text() + submission_Id = text.split(None, 1) + try: + page.locator("td:nth-child(6) > span > .icon").first.click(timeout=300) + except: + page.locator("td:nth-child(7) > span > .icon").first.click(timeout=300) + page.locator("div").filter(has_text=re.compile(r"^Results$")).click() + expect( + page.locator("#leaderboardTable").get_by_role( + "link", name=data["default_user"]["username"] + ) + ).to_be_visible() + expect(page.get_by_role("cell", name=submission_Id[0], exact=True)).to_be_visible() + else: + raise ValueError( + f"Unsupported expected_result={expected_result!r}. " + "Expected 'Finished' or 'Failed'." ) - ).to_be_visible() - expect(page.get_by_role("cell", name=submission_Id[0], exact=True)).to_be_visible() -def test_basic(page: Page): +def test_v2_code(page: Page): + run_tests( + page, + competition="test_files/competitions/competition_v2_wheat_code.zip", + submission="test_files/submissions/submission_v2_wheat_code.zip", + ) + + +def test_v2_results(page: Page): + run_tests( + page, + competition="test_files/competitions/competition_v2_wheat_results.zip", + submission="test_files/submissions/submission_v2_wheat_results.zip", + ) + + +def test_v2_results_failure(page: Page): + run_tests( + page, + competition="test_files/competitions/competition_v2_wheat_results.zip", + submission="test_files/submissions/submission_v2_wheat_results_failure.zip", + expected_result="Failed", + ) + + +def test_v2_miniautoml(page: Page): run_tests( page, - competition="test_files/competitions/competition.zip", - submission="test_files/submissions/submission.zip", + competition="test_files/competitions/competition_v2_miniautoml.zip", + submission="test_files/submissions/submission_v2_miniautoml.zip", ) -def test_v15(page: Page): +def test_v15_sncf(page: Page): run_tests( page, - competition="test_files/competitions/competition_15.zip", - submission="test_files/submissions/submission_15.zip", + competition="test_files/competitions/competition_v15_sncf.zip", + submission="test_files/submissions/submission_v15_sncf.zip", ) -def test_irisV15_code(page: Page): +def test_v15_iris_code(page: Page): run_tests( page, - competition="test_files/competitions/competition_15_iris.zip", - submission="test_files/submissions/submission_15_iris_code.zip", + competition="test_files/competitions/competition_v15_iris.zip", + submission="test_files/submissions/submission_v15_iris_code.zip", ) -def test_irisV15_result(page: Page): +def test_v15_iris_results(page: Page): run_tests( page, - competition="test_files/competitions/competition_15_iris.zip", - submission="test_files/submissions/submission_15_iris_result.zip", + competition="test_files/competitions/competition_v15_iris.zip", + submission="test_files/submissions/submission_v15_iris_results.zip", ) -def test_v18(page: Page): +def test_v18_autowsl(page: Page): run_tests( page, - competition="test_files/competitions/competition_18.zip", - submission="test_files/submissions/submission_18.zip", + competition="test_files/competitions/competition_v18_autowsl.zip", + submission="test_files/submissions/submission_v18_autowsl.zip", ) @@ -115,7 +153,7 @@ def test_v2_multiTask(page: Page) -> None: with page.expect_file_chooser() as fc_info: page.get_by_role("button", name="").click() file_chooser = fc_info.value - file_chooser.set_files("test_files/submissions/submission.zip") + file_chooser.set_files("test_files/submissions/submission_v2_wheat_code.zip") expect(page.locator(".ui.indicating")).to_be_visible() expect(page.locator(".ui.indicating")).not_to_be_visible() # Wait for Finished to show. If it does not, catch the error and reload the page in case the page didn't update automatically @@ -175,7 +213,7 @@ def test_v2_multiTaskFactSheet(page: Page) -> None: with page.expect_file_chooser() as fc_info: page.get_by_role("button", name="").click() file_chooser = fc_info.value - file_chooser.set_files("test_files/submissions/submission.zip") + file_chooser.set_files("test_files/submissions/submission_v2_wheat_code.zip") expect(page.locator(".ui.indicating")).to_be_visible() expect(page.locator(".ui.indicating")).not_to_be_visible() # Wait for Finished to show. If it does not, catch the error and reload the page in case the page didn't update automatically