From abbc6b2c96958f4bd512d37a3f667fdd8bdadbc2 Mon Sep 17 00:00:00 2001 From: Eleanor Boyd Date: Mon, 14 Aug 2023 17:22:53 -0700 Subject: [PATCH] Feature branch testing overflow bug fix (#21812) This merges in two PRs that were reverted because of a bug introduced that caused subprocess overflow. reverted PRs: https://github.com/microsoft/vscode-python/pull/21667, https://github.com/microsoft/vscode-python/pull/21682 This now implements these two PRs allowing for absolute testIds and an execObservable for the subprocess. This PR also adds a bug fix and functional tests to ensure this doesn't happen again. Since this PR is large, all items in it have already been reviewed as they were merged into the feature branch. --- .../pytestadapter/.data/root/tests/pytest.ini | 0 .../pytestadapter/.data/root/tests/test_a.py | 6 + .../pytestadapter/.data/root/tests/test_b.py | 6 + .../expected_discovery_test_output.py | 516 +++++++++++++----- .../expected_execution_test_output.py | 365 ++++++++++--- pythonFiles/tests/pytestadapter/helpers.py | 16 +- .../tests/pytestadapter/test_discovery.py | 52 +- .../tests/pytestadapter/test_execution.py | 45 +- pythonFiles/vscode_pytest/__init__.py | 58 +- .../testing/testController/common/server.ts | 36 +- .../testing/testController/common/types.ts | 7 +- .../pytest/pytestDiscoveryAdapter.ts | 48 +- .../pytest/pytestExecutionAdapter.ts | 64 ++- .../unittest/testDiscoveryAdapter.ts | 20 +- .../unittest/testExecutionAdapter.ts | 22 +- src/test/linters/lint.functional.test.ts | 6 - src/test/mocks/helper.ts | 11 + src/test/mocks/mockChildProcess.ts | 239 ++++++++ .../testing/common/testingAdapter.test.ts | 428 +++++++++++++++ .../pytestDiscoveryAdapter.unit.test.ts | 76 ++- .../pytestExecutionAdapter.unit.test.ts | 118 +++- .../testController/server.unit.test.ts | 248 ++++++--- .../workspaceTestAdapter.unit.test.ts | 3 +- .../test_parameterized_subtest.py | 16 + .../smallWorkspace/test_simple.py | 12 + 25 files changed, 1993 insertions(+), 425 deletions(-) create mode 100644 pythonFiles/tests/pytestadapter/.data/root/tests/pytest.ini create mode 100644 pythonFiles/tests/pytestadapter/.data/root/tests/test_a.py create mode 100644 pythonFiles/tests/pytestadapter/.data/root/tests/test_b.py create mode 100644 src/test/mocks/helper.ts create mode 100644 src/test/mocks/mockChildProcess.ts create mode 100644 src/test/testing/common/testingAdapter.test.ts create mode 100644 src/testTestingRootWkspc/largeWorkspace/test_parameterized_subtest.py create mode 100644 src/testTestingRootWkspc/smallWorkspace/test_simple.py diff --git a/pythonFiles/tests/pytestadapter/.data/root/tests/pytest.ini b/pythonFiles/tests/pytestadapter/.data/root/tests/pytest.ini new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/pythonFiles/tests/pytestadapter/.data/root/tests/test_a.py b/pythonFiles/tests/pytestadapter/.data/root/tests/test_a.py new file mode 100644 index 0000000000000..3ec3dd9626cb6 --- /dev/null +++ b/pythonFiles/tests/pytestadapter/.data/root/tests/test_a.py @@ -0,0 +1,6 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + + +def test_a_function(): # test_marker--test_a_function + assert True diff --git a/pythonFiles/tests/pytestadapter/.data/root/tests/test_b.py b/pythonFiles/tests/pytestadapter/.data/root/tests/test_b.py new file mode 100644 index 0000000000000..0d3148641f852 --- /dev/null +++ b/pythonFiles/tests/pytestadapter/.data/root/tests/test_b.py @@ -0,0 +1,6 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + + +def test_b_function(): # test_marker--test_b_function + assert True diff --git a/pythonFiles/tests/pytestadapter/expected_discovery_test_output.py b/pythonFiles/tests/pytestadapter/expected_discovery_test_output.py index 91c1453dfc77f..2b2c07ab8ea72 100644 --- a/pythonFiles/tests/pytestadapter/expected_discovery_test_output.py +++ b/pythonFiles/tests/pytestadapter/expected_discovery_test_output.py @@ -1,6 +1,7 @@ import os -from .helpers import TEST_DATA_PATH, find_test_line_number + +from .helpers import TEST_DATA_PATH, find_test_line_number, get_absolute_test_id # This file contains the expected output dictionaries for tests discovery and is used in test_discovery.py. @@ -18,7 +19,7 @@ # This is the expected output for the simple_pytest.py file. # └── simple_pytest.py # └── test_function -simple_test_file_path = os.fspath(TEST_DATA_PATH / "simple_pytest.py") +simple_test_file_path = TEST_DATA_PATH / "simple_pytest.py" simple_discovery_pytest_expected_output = { "name": ".data", "path": TEST_DATA_PATH_STR, @@ -26,20 +27,24 @@ "children": [ { "name": "simple_pytest.py", - "path": simple_test_file_path, + "path": os.fspath(simple_test_file_path), "type_": "file", - "id_": simple_test_file_path, + "id_": os.fspath(simple_test_file_path), "children": [ { "name": "test_function", - "path": simple_test_file_path, + "path": os.fspath(simple_test_file_path), "lineno": find_test_line_number( "test_function", simple_test_file_path, ), "type_": "test", - "id_": "simple_pytest.py::test_function", - "runID": "simple_pytest.py::test_function", + "id_": get_absolute_test_id( + "simple_pytest.py::test_function", simple_test_file_path + ), + "runID": get_absolute_test_id( + "simple_pytest.py::test_function", simple_test_file_path + ), } ], } @@ -52,7 +57,7 @@ # ├── TestExample # │ └── test_true_unittest # └── test_true_pytest -unit_pytest_same_file_path = os.fspath(TEST_DATA_PATH / "unittest_pytest_same_file.py") +unit_pytest_same_file_path = TEST_DATA_PATH / "unittest_pytest_same_file.py" unit_pytest_same_file_discovery_expected_output = { "name": ".data", "path": TEST_DATA_PATH_STR, @@ -60,39 +65,51 @@ "children": [ { "name": "unittest_pytest_same_file.py", - "path": unit_pytest_same_file_path, + "path": os.fspath(unit_pytest_same_file_path), "type_": "file", - "id_": unit_pytest_same_file_path, + "id_": os.fspath(unit_pytest_same_file_path), "children": [ { "name": "TestExample", - "path": unit_pytest_same_file_path, + "path": os.fspath(unit_pytest_same_file_path), "type_": "class", "children": [ { "name": "test_true_unittest", - "path": unit_pytest_same_file_path, + "path": os.fspath(unit_pytest_same_file_path), "lineno": find_test_line_number( "test_true_unittest", - unit_pytest_same_file_path, + os.fspath(unit_pytest_same_file_path), ), "type_": "test", - "id_": "unittest_pytest_same_file.py::TestExample::test_true_unittest", - "runID": "unittest_pytest_same_file.py::TestExample::test_true_unittest", + "id_": get_absolute_test_id( + "unittest_pytest_same_file.py::TestExample::test_true_unittest", + unit_pytest_same_file_path, + ), + "runID": get_absolute_test_id( + "unittest_pytest_same_file.py::TestExample::test_true_unittest", + unit_pytest_same_file_path, + ), } ], "id_": "unittest_pytest_same_file.py::TestExample", }, { "name": "test_true_pytest", - "path": unit_pytest_same_file_path, + "path": os.fspath(unit_pytest_same_file_path), "lineno": find_test_line_number( "test_true_pytest", unit_pytest_same_file_path, ), "type_": "test", - "id_": "unittest_pytest_same_file.py::test_true_pytest", - "runID": "unittest_pytest_same_file.py::test_true_pytest", + "id_": get_absolute_test_id( + "unittest_pytest_same_file.py::test_true_pytest", + unit_pytest_same_file_path, + ), + "runID": get_absolute_test_id( + "unittest_pytest_same_file.py::test_true_pytest", + unit_pytest_same_file_path, + ), }, ], } @@ -124,9 +141,9 @@ # └── test_subtract_positive_numbers # │ └── TestDuplicateFunction # │ └── test_dup_s -unittest_folder_path = os.fspath(TEST_DATA_PATH / "unittest_folder") -test_add_path = os.fspath(TEST_DATA_PATH / "unittest_folder" / "test_add.py") -test_subtract_path = os.fspath(TEST_DATA_PATH / "unittest_folder" / "test_subtract.py") +unittest_folder_path = TEST_DATA_PATH / "unittest_folder" +test_add_path = TEST_DATA_PATH / "unittest_folder" / "test_add.py" +test_subtract_path = TEST_DATA_PATH / "unittest_folder" / "test_subtract.py" unittest_folder_discovery_expected_output = { "name": ".data", "path": TEST_DATA_PATH_STR, @@ -134,61 +151,79 @@ "children": [ { "name": "unittest_folder", - "path": unittest_folder_path, + "path": os.fspath(unittest_folder_path), "type_": "folder", - "id_": unittest_folder_path, + "id_": os.fspath(unittest_folder_path), "children": [ { "name": "test_add.py", - "path": test_add_path, + "path": os.fspath(test_add_path), "type_": "file", - "id_": test_add_path, + "id_": os.fspath(test_add_path), "children": [ { "name": "TestAddFunction", - "path": test_add_path, + "path": os.fspath(test_add_path), "type_": "class", "children": [ { "name": "test_add_negative_numbers", - "path": test_add_path, + "path": os.fspath(test_add_path), "lineno": find_test_line_number( "test_add_negative_numbers", - test_add_path, + os.fspath(test_add_path), ), "type_": "test", - "id_": "unittest_folder/test_add.py::TestAddFunction::test_add_negative_numbers", - "runID": "unittest_folder/test_add.py::TestAddFunction::test_add_negative_numbers", + "id_": get_absolute_test_id( + "unittest_folder/test_add.py::TestAddFunction::test_add_negative_numbers", + test_add_path, + ), + "runID": get_absolute_test_id( + "unittest_folder/test_add.py::TestAddFunction::test_add_negative_numbers", + test_add_path, + ), }, { "name": "test_add_positive_numbers", - "path": test_add_path, + "path": os.fspath(test_add_path), "lineno": find_test_line_number( "test_add_positive_numbers", - test_add_path, + os.fspath(test_add_path), ), "type_": "test", - "id_": "unittest_folder/test_add.py::TestAddFunction::test_add_positive_numbers", - "runID": "unittest_folder/test_add.py::TestAddFunction::test_add_positive_numbers", + "id_": get_absolute_test_id( + "unittest_folder/test_add.py::TestAddFunction::test_add_positive_numbers", + test_add_path, + ), + "runID": get_absolute_test_id( + "unittest_folder/test_add.py::TestAddFunction::test_add_positive_numbers", + test_add_path, + ), }, ], "id_": "unittest_folder/test_add.py::TestAddFunction", }, { "name": "TestDuplicateFunction", - "path": test_add_path, + "path": os.fspath(test_add_path), "type_": "class", "children": [ { "name": "test_dup_a", - "path": test_add_path, + "path": os.fspath(test_add_path), "lineno": find_test_line_number( "test_dup_a", - test_add_path, + os.fspath(test_add_path), ), "type_": "test", - "id_": "unittest_folder/test_add.py::TestDuplicateFunction::test_dup_a", - "runID": "unittest_folder/test_add.py::TestDuplicateFunction::test_dup_a", + "id_": get_absolute_test_id( + "unittest_folder/test_add.py::TestDuplicateFunction::test_dup_a", + test_add_path, + ), + "runID": get_absolute_test_id( + "unittest_folder/test_add.py::TestDuplicateFunction::test_dup_a", + test_add_path, + ), }, ], "id_": "unittest_folder/test_add.py::TestDuplicateFunction", @@ -197,55 +232,73 @@ }, { "name": "test_subtract.py", - "path": test_subtract_path, + "path": os.fspath(test_subtract_path), "type_": "file", - "id_": test_subtract_path, + "id_": os.fspath(test_subtract_path), "children": [ { "name": "TestSubtractFunction", - "path": test_subtract_path, + "path": os.fspath(test_subtract_path), "type_": "class", "children": [ { "name": "test_subtract_negative_numbers", - "path": test_subtract_path, + "path": os.fspath(test_subtract_path), "lineno": find_test_line_number( "test_subtract_negative_numbers", - test_subtract_path, + os.fspath(test_subtract_path), ), "type_": "test", - "id_": "unittest_folder/test_subtract.py::TestSubtractFunction::test_subtract_negative_numbers", - "runID": "unittest_folder/test_subtract.py::TestSubtractFunction::test_subtract_negative_numbers", + "id_": get_absolute_test_id( + "unittest_folder/test_subtract.py::TestSubtractFunction::test_subtract_negative_numbers", + test_subtract_path, + ), + "runID": get_absolute_test_id( + "unittest_folder/test_subtract.py::TestSubtractFunction::test_subtract_negative_numbers", + test_subtract_path, + ), }, { "name": "test_subtract_positive_numbers", - "path": test_subtract_path, + "path": os.fspath(test_subtract_path), "lineno": find_test_line_number( "test_subtract_positive_numbers", - test_subtract_path, + os.fspath(test_subtract_path), ), "type_": "test", - "id_": "unittest_folder/test_subtract.py::TestSubtractFunction::test_subtract_positive_numbers", - "runID": "unittest_folder/test_subtract.py::TestSubtractFunction::test_subtract_positive_numbers", + "id_": get_absolute_test_id( + "unittest_folder/test_subtract.py::TestSubtractFunction::test_subtract_positive_numbers", + test_subtract_path, + ), + "runID": get_absolute_test_id( + "unittest_folder/test_subtract.py::TestSubtractFunction::test_subtract_positive_numbers", + test_subtract_path, + ), }, ], "id_": "unittest_folder/test_subtract.py::TestSubtractFunction", }, { "name": "TestDuplicateFunction", - "path": test_subtract_path, + "path": os.fspath(test_subtract_path), "type_": "class", "children": [ { "name": "test_dup_s", - "path": test_subtract_path, + "path": os.fspath(test_subtract_path), "lineno": find_test_line_number( "test_dup_s", - test_subtract_path, + os.fspath(test_subtract_path), ), "type_": "test", - "id_": "unittest_folder/test_subtract.py::TestDuplicateFunction::test_dup_s", - "runID": "unittest_folder/test_subtract.py::TestDuplicateFunction::test_dup_s", + "id_": get_absolute_test_id( + "unittest_folder/test_subtract.py::TestDuplicateFunction::test_dup_s", + test_subtract_path, + ), + "runID": get_absolute_test_id( + "unittest_folder/test_subtract.py::TestDuplicateFunction::test_dup_s", + test_subtract_path, + ), }, ], "id_": "unittest_folder/test_subtract.py::TestDuplicateFunction", @@ -268,20 +321,23 @@ # └── test_bottom_folder.py # └── test_bottom_function_t # └── test_bottom_function_f -dual_level_nested_folder_path = os.fspath(TEST_DATA_PATH / "dual_level_nested_folder") -test_top_folder_path = os.fspath( +dual_level_nested_folder_path = TEST_DATA_PATH / "dual_level_nested_folder" +test_top_folder_path = ( TEST_DATA_PATH / "dual_level_nested_folder" / "test_top_folder.py" ) -test_nested_folder_one_path = os.fspath( + +test_nested_folder_one_path = ( TEST_DATA_PATH / "dual_level_nested_folder" / "nested_folder_one" ) -test_bottom_folder_path = os.fspath( + +test_bottom_folder_path = ( TEST_DATA_PATH / "dual_level_nested_folder" / "nested_folder_one" / "test_bottom_folder.py" ) + dual_level_nested_folder_expected_output = { "name": ".data", "path": TEST_DATA_PATH_STR, @@ -289,73 +345,97 @@ "children": [ { "name": "dual_level_nested_folder", - "path": dual_level_nested_folder_path, + "path": os.fspath(dual_level_nested_folder_path), "type_": "folder", - "id_": dual_level_nested_folder_path, + "id_": os.fspath(dual_level_nested_folder_path), "children": [ { "name": "test_top_folder.py", - "path": test_top_folder_path, + "path": os.fspath(test_top_folder_path), "type_": "file", - "id_": test_top_folder_path, + "id_": os.fspath(test_top_folder_path), "children": [ { "name": "test_top_function_t", - "path": test_top_folder_path, + "path": os.fspath(test_top_folder_path), "lineno": find_test_line_number( "test_top_function_t", test_top_folder_path, ), "type_": "test", - "id_": "dual_level_nested_folder/test_top_folder.py::test_top_function_t", - "runID": "dual_level_nested_folder/test_top_folder.py::test_top_function_t", + "id_": get_absolute_test_id( + "dual_level_nested_folder/test_top_folder.py::test_top_function_t", + test_top_folder_path, + ), + "runID": get_absolute_test_id( + "dual_level_nested_folder/test_top_folder.py::test_top_function_t", + test_top_folder_path, + ), }, { "name": "test_top_function_f", - "path": test_top_folder_path, + "path": os.fspath(test_top_folder_path), "lineno": find_test_line_number( "test_top_function_f", test_top_folder_path, ), "type_": "test", - "id_": "dual_level_nested_folder/test_top_folder.py::test_top_function_f", - "runID": "dual_level_nested_folder/test_top_folder.py::test_top_function_f", + "id_": get_absolute_test_id( + "dual_level_nested_folder/test_top_folder.py::test_top_function_f", + test_top_folder_path, + ), + "runID": get_absolute_test_id( + "dual_level_nested_folder/test_top_folder.py::test_top_function_f", + test_top_folder_path, + ), }, ], }, { "name": "nested_folder_one", - "path": test_nested_folder_one_path, + "path": os.fspath(test_nested_folder_one_path), "type_": "folder", - "id_": test_nested_folder_one_path, + "id_": os.fspath(test_nested_folder_one_path), "children": [ { "name": "test_bottom_folder.py", - "path": test_bottom_folder_path, + "path": os.fspath(test_bottom_folder_path), "type_": "file", - "id_": test_bottom_folder_path, + "id_": os.fspath(test_bottom_folder_path), "children": [ { "name": "test_bottom_function_t", - "path": test_bottom_folder_path, + "path": os.fspath(test_bottom_folder_path), "lineno": find_test_line_number( "test_bottom_function_t", test_bottom_folder_path, ), "type_": "test", - "id_": "dual_level_nested_folder/nested_folder_one/test_bottom_folder.py::test_bottom_function_t", - "runID": "dual_level_nested_folder/nested_folder_one/test_bottom_folder.py::test_bottom_function_t", + "id_": get_absolute_test_id( + "dual_level_nested_folder/nested_folder_one/test_bottom_folder.py::test_bottom_function_t", + test_bottom_folder_path, + ), + "runID": get_absolute_test_id( + "dual_level_nested_folder/nested_folder_one/test_bottom_folder.py::test_bottom_function_t", + test_bottom_folder_path, + ), }, { "name": "test_bottom_function_f", - "path": test_bottom_folder_path, + "path": os.fspath(test_bottom_folder_path), "lineno": find_test_line_number( "test_bottom_function_f", test_bottom_folder_path, ), "type_": "test", - "id_": "dual_level_nested_folder/nested_folder_one/test_bottom_folder.py::test_bottom_function_f", - "runID": "dual_level_nested_folder/nested_folder_one/test_bottom_folder.py::test_bottom_function_f", + "id_": get_absolute_test_id( + "dual_level_nested_folder/nested_folder_one/test_bottom_folder.py::test_bottom_function_f", + test_bottom_folder_path, + ), + "runID": get_absolute_test_id( + "dual_level_nested_folder/nested_folder_one/test_bottom_folder.py::test_bottom_function_f", + test_bottom_folder_path, + ), }, ], } @@ -374,12 +454,10 @@ # └── test_nest.py # └── test_function -folder_a_path = os.fspath(TEST_DATA_PATH / "folder_a") -folder_b_path = os.fspath(TEST_DATA_PATH / "folder_a" / "folder_b") -folder_a_nested_path = os.fspath(TEST_DATA_PATH / "folder_a" / "folder_b" / "folder_a") -test_nest_path = os.fspath( - TEST_DATA_PATH / "folder_a" / "folder_b" / "folder_a" / "test_nest.py" -) +folder_a_path = TEST_DATA_PATH / "folder_a" +folder_b_path = TEST_DATA_PATH / "folder_a" / "folder_b" +folder_a_nested_path = TEST_DATA_PATH / "folder_a" / "folder_b" / "folder_a" +test_nest_path = TEST_DATA_PATH / "folder_a" / "folder_b" / "folder_a" / "test_nest.py" double_nested_folder_expected_output = { "name": ".data", "path": TEST_DATA_PATH_STR, @@ -387,38 +465,44 @@ "children": [ { "name": "folder_a", - "path": folder_a_path, + "path": os.fspath(folder_a_path), "type_": "folder", - "id_": folder_a_path, + "id_": os.fspath(folder_a_path), "children": [ { "name": "folder_b", - "path": folder_b_path, + "path": os.fspath(folder_b_path), "type_": "folder", - "id_": folder_b_path, + "id_": os.fspath(folder_b_path), "children": [ { "name": "folder_a", - "path": folder_a_nested_path, + "path": os.fspath(folder_a_nested_path), "type_": "folder", - "id_": folder_a_nested_path, + "id_": os.fspath(folder_a_nested_path), "children": [ { "name": "test_nest.py", - "path": test_nest_path, + "path": os.fspath(test_nest_path), "type_": "file", - "id_": test_nest_path, + "id_": os.fspath(test_nest_path), "children": [ { "name": "test_function", - "path": test_nest_path, + "path": os.fspath(test_nest_path), "lineno": find_test_line_number( "test_function", test_nest_path, ), "type_": "test", - "id_": "folder_a/folder_b/folder_a/test_nest.py::test_function", - "runID": "folder_a/folder_b/folder_a/test_nest.py::test_function", + "id_": get_absolute_test_id( + "folder_a/folder_b/folder_a/test_nest.py::test_function", + test_nest_path, + ), + "runID": get_absolute_test_id( + "folder_a/folder_b/folder_a/test_nest.py::test_function", + test_nest_path, + ), } ], } @@ -438,7 +522,7 @@ # └── [3+5-8] # └── [2+4-6] # └── [6+9-16] -parameterize_tests_path = os.fspath(TEST_DATA_PATH / "parametrize_tests.py") +parameterize_tests_path = TEST_DATA_PATH / "parametrize_tests.py" parametrize_tests_expected_output = { "name": ".data", "path": TEST_DATA_PATH_STR, @@ -446,77 +530,107 @@ "children": [ { "name": "parametrize_tests.py", - "path": parameterize_tests_path, + "path": os.fspath(parameterize_tests_path), "type_": "file", - "id_": parameterize_tests_path, + "id_": os.fspath(parameterize_tests_path), "children": [ { "name": "test_adding", - "path": parameterize_tests_path, + "path": os.fspath(parameterize_tests_path), "type_": "function", "id_": "parametrize_tests.py::test_adding", "children": [ { "name": "[3+5-8]", - "path": parameterize_tests_path, + "path": os.fspath(parameterize_tests_path), "lineno": find_test_line_number( "test_adding[3+5-8]", parameterize_tests_path, ), "type_": "test", - "id_": "parametrize_tests.py::test_adding[3+5-8]", - "runID": "parametrize_tests.py::test_adding[3+5-8]", + "id_": get_absolute_test_id( + "parametrize_tests.py::test_adding[3+5-8]", + parameterize_tests_path, + ), + "runID": get_absolute_test_id( + "parametrize_tests.py::test_adding[3+5-8]", + parameterize_tests_path, + ), }, { "name": "[2+4-6]", - "path": parameterize_tests_path, + "path": os.fspath(parameterize_tests_path), "lineno": find_test_line_number( "test_adding[2+4-6]", parameterize_tests_path, ), "type_": "test", - "id_": "parametrize_tests.py::test_adding[2+4-6]", - "runID": "parametrize_tests.py::test_adding[2+4-6]", + "id_": get_absolute_test_id( + "parametrize_tests.py::test_adding[2+4-6]", + parameterize_tests_path, + ), + "runID": get_absolute_test_id( + "parametrize_tests.py::test_adding[2+4-6]", + parameterize_tests_path, + ), }, { "name": "[6+9-16]", - "path": parameterize_tests_path, + "path": os.fspath(parameterize_tests_path), "lineno": find_test_line_number( "test_adding[6+9-16]", parameterize_tests_path, ), "type_": "test", - "id_": "parametrize_tests.py::test_adding[6+9-16]", - "runID": "parametrize_tests.py::test_adding[6+9-16]", + "id_": get_absolute_test_id( + "parametrize_tests.py::test_adding[6+9-16]", + parameterize_tests_path, + ), + "runID": get_absolute_test_id( + "parametrize_tests.py::test_adding[6+9-16]", + parameterize_tests_path, + ), }, ], }, { "name": "test_under_ten", - "path": parameterize_tests_path, + "path": os.fspath(parameterize_tests_path), "type_": "function", "children": [ { "name": "[1]", - "path": parameterize_tests_path, + "path": os.fspath(parameterize_tests_path), "lineno": find_test_line_number( "test_under_ten[1]", parameterize_tests_path, ), "type_": "test", - "id_": "parametrize_tests.py::test_under_ten[1]", - "runID": "parametrize_tests.py::test_under_ten[1]", + "id_": get_absolute_test_id( + "parametrize_tests.py::test_under_ten[1]", + parameterize_tests_path, + ), + "runID": get_absolute_test_id( + "parametrize_tests.py::test_under_ten[1]", + parameterize_tests_path, + ), }, { "name": "[2]", - "path": parameterize_tests_path, + "path": os.fspath(parameterize_tests_path), "lineno": find_test_line_number( "test_under_ten[2]", parameterize_tests_path, ), "type_": "test", - "id_": "parametrize_tests.py::test_under_ten[2]", - "runID": "parametrize_tests.py::test_under_ten[2]", + "id_": get_absolute_test_id( + "parametrize_tests.py::test_under_ten[2]", + parameterize_tests_path, + ), + "runID": get_absolute_test_id( + "parametrize_tests.py::test_under_ten[2]", + parameterize_tests_path, + ), }, ], "id_": "parametrize_tests.py::test_under_ten", @@ -529,7 +643,7 @@ # This is the expected output for the text_docstring.txt tests. # └── text_docstring.txt -text_docstring_path = os.fspath(TEST_DATA_PATH / "text_docstring.txt") +text_docstring_path = TEST_DATA_PATH / "text_docstring.txt" doctest_pytest_expected_output = { "name": ".data", "path": TEST_DATA_PATH_STR, @@ -537,20 +651,24 @@ "children": [ { "name": "text_docstring.txt", - "path": text_docstring_path, + "path": os.fspath(text_docstring_path), "type_": "file", - "id_": text_docstring_path, + "id_": os.fspath(text_docstring_path), "children": [ { "name": "text_docstring.txt", - "path": text_docstring_path, + "path": os.fspath(text_docstring_path), "lineno": find_test_line_number( "text_docstring.txt", - text_docstring_path, + os.fspath(text_docstring_path), ), "type_": "test", - "id_": "text_docstring.txt::text_docstring.txt", - "runID": "text_docstring.txt::text_docstring.txt", + "id_": get_absolute_test_id( + "text_docstring.txt::text_docstring.txt", text_docstring_path + ), + "runID": get_absolute_test_id( + "text_docstring.txt::text_docstring.txt", text_docstring_path + ), } ], } @@ -570,8 +688,8 @@ # └── [1] # └── [2] # └── [3] -param1_path = os.fspath(TEST_DATA_PATH / "param_same_name" / "test_param1.py") -param2_path = os.fspath(TEST_DATA_PATH / "param_same_name" / "test_param2.py") +param1_path = TEST_DATA_PATH / "param_same_name" / "test_param1.py" +param2_path = TEST_DATA_PATH / "param_same_name" / "test_param2.py" param_same_name_expected_output = { "name": ".data", "path": TEST_DATA_PATH_STR, @@ -585,38 +703,56 @@ "children": [ { "name": "test_param1.py", - "path": param1_path, + "path": os.fspath(param1_path), "type_": "file", - "id_": param1_path, + "id_": os.fspath(param1_path), "children": [ { "name": "test_odd_even", - "path": param1_path, + "path": os.fspath(param1_path), "type_": "function", "children": [ { "name": "[a]", - "path": param1_path, + "path": os.fspath(param1_path), "lineno": "6", "type_": "test", - "id_": "param_same_name/test_param1.py::test_odd_even[a]", - "runID": "param_same_name/test_param1.py::test_odd_even[a]", + "id_": get_absolute_test_id( + "param_same_name/test_param1.py::test_odd_even[a]", + param1_path, + ), + "runID": get_absolute_test_id( + "param_same_name/test_param1.py::test_odd_even[a]", + param1_path, + ), }, { "name": "[b]", - "path": param1_path, + "path": os.fspath(param1_path), "lineno": "6", "type_": "test", - "id_": "param_same_name/test_param1.py::test_odd_even[b]", - "runID": "param_same_name/test_param1.py::test_odd_even[b]", + "id_": get_absolute_test_id( + "param_same_name/test_param1.py::test_odd_even[b]", + param1_path, + ), + "runID": get_absolute_test_id( + "param_same_name/test_param1.py::test_odd_even[b]", + param1_path, + ), }, { "name": "[c]", - "path": param1_path, + "path": os.fspath(param1_path), "lineno": "6", "type_": "test", - "id_": "param_same_name/test_param1.py::test_odd_even[c]", - "runID": "param_same_name/test_param1.py::test_odd_even[c]", + "id_": get_absolute_test_id( + "param_same_name/test_param1.py::test_odd_even[c]", + param1_path, + ), + "runID": get_absolute_test_id( + "param_same_name/test_param1.py::test_odd_even[c]", + param1_path, + ), }, ], "id_": "param_same_name/test_param1.py::test_odd_even", @@ -625,38 +761,56 @@ }, { "name": "test_param2.py", - "path": param2_path, + "path": os.fspath(param2_path), "type_": "file", - "id_": param2_path, + "id_": os.fspath(param2_path), "children": [ { "name": "test_odd_even", - "path": param2_path, + "path": os.fspath(param2_path), "type_": "function", "children": [ { "name": "[1]", - "path": param2_path, + "path": os.fspath(param2_path), "lineno": "6", "type_": "test", - "id_": "param_same_name/test_param2.py::test_odd_even[1]", - "runID": "param_same_name/test_param2.py::test_odd_even[1]", + "id_": get_absolute_test_id( + "param_same_name/test_param2.py::test_odd_even[1]", + param2_path, + ), + "runID": get_absolute_test_id( + "param_same_name/test_param2.py::test_odd_even[1]", + param2_path, + ), }, { "name": "[2]", - "path": param2_path, + "path": os.fspath(param2_path), "lineno": "6", "type_": "test", - "id_": "param_same_name/test_param2.py::test_odd_even[2]", - "runID": "param_same_name/test_param2.py::test_odd_even[2]", + "id_": get_absolute_test_id( + "param_same_name/test_param2.py::test_odd_even[2]", + param2_path, + ), + "runID": get_absolute_test_id( + "param_same_name/test_param2.py::test_odd_even[2]", + param2_path, + ), }, { "name": "[3]", - "path": param2_path, + "path": os.fspath(param2_path), "lineno": "6", "type_": "test", - "id_": "param_same_name/test_param2.py::test_odd_even[3]", - "runID": "param_same_name/test_param2.py::test_odd_even[3]", + "id_": get_absolute_test_id( + "param_same_name/test_param2.py::test_odd_even[3]", + param2_path, + ), + "runID": get_absolute_test_id( + "param_same_name/test_param2.py::test_odd_even[3]", + param2_path, + ), }, ], "id_": "param_same_name/test_param2.py::test_odd_even", @@ -668,3 +822,67 @@ ], "id_": TEST_DATA_PATH_STR, } + +tests_path = TEST_DATA_PATH / "root" / "tests" +tests_a_path = TEST_DATA_PATH / "root" / "tests" / "test_a.py" +tests_b_path = TEST_DATA_PATH / "root" / "tests" / "test_b.py" +# This is the expected output for the root folder tests. +# └── tests +# └── test_a.py +# └── test_a_function +# └── test_b.py +# └── test_b_function +root_with_config_expected_output = { + "name": "tests", + "path": os.fspath(tests_path), + "type_": "folder", + "children": [ + { + "name": "test_a.py", + "path": os.fspath(tests_a_path), + "type_": "file", + "id_": os.fspath(tests_a_path), + "children": [ + { + "name": "test_a_function", + "path": os.fspath(os.path.join(tests_path, "test_a.py")), + "lineno": find_test_line_number( + "test_a_function", + os.path.join(tests_path, "test_a.py"), + ), + "type_": "test", + "id_": get_absolute_test_id( + "tests/test_a.py::test_a_function", tests_a_path + ), + "runID": get_absolute_test_id( + "tests/test_a.py::test_a_function", tests_a_path + ), + } + ], + }, + { + "name": "test_b.py", + "path": os.fspath(tests_b_path), + "type_": "file", + "id_": os.fspath(tests_b_path), + "children": [ + { + "name": "test_b_function", + "path": os.fspath(os.path.join(tests_path, "test_b.py")), + "lineno": find_test_line_number( + "test_b_function", + os.path.join(tests_path, "test_b.py"), + ), + "type_": "test", + "id_": get_absolute_test_id( + "tests/test_b.py::test_b_function", tests_b_path + ), + "runID": get_absolute_test_id( + "tests/test_b.py::test_b_function", tests_b_path + ), + } + ], + }, + ], + "id_": os.fspath(tests_path), +} diff --git a/pythonFiles/tests/pytestadapter/expected_execution_test_output.py b/pythonFiles/tests/pytestadapter/expected_execution_test_output.py index dd8f458d792e1..76d21b3e25188 100644 --- a/pythonFiles/tests/pytestadapter/expected_execution_test_output.py +++ b/pythonFiles/tests/pytestadapter/expected_execution_test_output.py @@ -1,14 +1,12 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. +from .helpers import TEST_DATA_PATH, get_absolute_test_id TEST_SUBTRACT_FUNCTION = "unittest_folder/test_subtract.py::TestSubtractFunction::" TEST_ADD_FUNCTION = "unittest_folder/test_add.py::TestAddFunction::" SUCCESS = "success" FAILURE = "failure" -TEST_SUBTRACT_FUNCTION_NEGATIVE_NUMBERS_ERROR = "self = \n\n def test_subtract_negative_numbers( # test_marker--test_subtract_negative_numbers\n self,\n ):\n result = subtract(-2, -3)\n> self.assertEqual(result, 100000)\nE AssertionError: 1 != 100000\n\nunittest_folder/test_subtract.py:25: AssertionError" # noqa: E501 - - # This is the expected output for the unittest_folder execute tests # └── unittest_folder # ├── test_add.py @@ -19,30 +17,52 @@ # └── TestSubtractFunction # ├── test_subtract_negative_numbers: failure # └── test_subtract_positive_numbers: success +test_add_path = TEST_DATA_PATH / "unittest_folder" / "test_add.py" +test_subtract_path = TEST_DATA_PATH / "unittest_folder" / "test_subtract.py" uf_execution_expected_output = { - f"{TEST_ADD_FUNCTION}test_add_negative_numbers": { - "test": f"{TEST_ADD_FUNCTION}test_add_negative_numbers", + get_absolute_test_id( + f"{TEST_ADD_FUNCTION}test_add_negative_numbers", test_add_path + ): { + "test": get_absolute_test_id( + f"{TEST_ADD_FUNCTION}test_add_negative_numbers", test_add_path + ), "outcome": SUCCESS, "message": None, "traceback": None, "subtest": None, }, - f"{TEST_ADD_FUNCTION}test_add_positive_numbers": { - "test": f"{TEST_ADD_FUNCTION}test_add_positive_numbers", + get_absolute_test_id( + f"{TEST_ADD_FUNCTION}test_add_positive_numbers", test_add_path + ): { + "test": get_absolute_test_id( + f"{TEST_ADD_FUNCTION}test_add_positive_numbers", test_add_path + ), "outcome": SUCCESS, "message": None, "traceback": None, "subtest": None, }, - f"{TEST_SUBTRACT_FUNCTION}test_subtract_negative_numbers": { - "test": f"{TEST_SUBTRACT_FUNCTION}test_subtract_negative_numbers", + get_absolute_test_id( + f"{TEST_SUBTRACT_FUNCTION}test_subtract_negative_numbers", + test_subtract_path, + ): { + "test": get_absolute_test_id( + f"{TEST_SUBTRACT_FUNCTION}test_subtract_negative_numbers", + test_subtract_path, + ), "outcome": FAILURE, "message": "ERROR MESSAGE", "traceback": None, "subtest": None, }, - f"{TEST_SUBTRACT_FUNCTION}test_subtract_positive_numbers": { - "test": f"{TEST_SUBTRACT_FUNCTION}test_subtract_positive_numbers", + get_absolute_test_id( + f"{TEST_SUBTRACT_FUNCTION}test_subtract_positive_numbers", + test_subtract_path, + ): { + "test": get_absolute_test_id( + f"{TEST_SUBTRACT_FUNCTION}test_subtract_positive_numbers", + test_subtract_path, + ), "outcome": SUCCESS, "message": None, "traceback": None, @@ -57,16 +77,26 @@ # │ └── TestAddFunction # │ ├── test_add_negative_numbers: success # │ └── test_add_positive_numbers: success +test_add_path = TEST_DATA_PATH / "unittest_folder" / "test_add.py" + uf_single_file_expected_output = { - f"{TEST_ADD_FUNCTION}test_add_negative_numbers": { - "test": f"{TEST_ADD_FUNCTION}test_add_negative_numbers", + get_absolute_test_id( + f"{TEST_ADD_FUNCTION}test_add_negative_numbers", test_add_path + ): { + "test": get_absolute_test_id( + f"{TEST_ADD_FUNCTION}test_add_negative_numbers", test_add_path + ), "outcome": SUCCESS, "message": None, "traceback": None, "subtest": None, }, - f"{TEST_ADD_FUNCTION}test_add_positive_numbers": { - "test": f"{TEST_ADD_FUNCTION}test_add_positive_numbers", + get_absolute_test_id( + f"{TEST_ADD_FUNCTION}test_add_positive_numbers", test_add_path + ): { + "test": get_absolute_test_id( + f"{TEST_ADD_FUNCTION}test_add_positive_numbers", test_add_path + ), "outcome": SUCCESS, "message": None, "traceback": None, @@ -74,19 +104,24 @@ }, } + # This is the expected output for the unittest_folder execute only signle method # └── unittest_folder # ├── test_add.py # │ └── TestAddFunction # │ └── test_add_positive_numbers: success uf_single_method_execution_expected_output = { - f"{TEST_ADD_FUNCTION}test_add_positive_numbers": { - "test": f"{TEST_ADD_FUNCTION}test_add_positive_numbers", + get_absolute_test_id( + f"{TEST_ADD_FUNCTION}test_add_positive_numbers", test_add_path + ): { + "test": get_absolute_test_id( + f"{TEST_ADD_FUNCTION}test_add_positive_numbers", test_add_path + ), "outcome": SUCCESS, "message": None, "traceback": None, "subtest": None, - } + }, } # This is the expected output for the unittest_folder tests run where two tests @@ -98,18 +133,28 @@ # └── test_subtract.py # └── TestSubtractFunction # └── test_subtract_positive_numbers: success +test_subtract_path = TEST_DATA_PATH / "unittest_folder" / "test_subtract.py" +test_add_path = TEST_DATA_PATH / "unittest_folder" / "test_add.py" + uf_non_adjacent_tests_execution_expected_output = { - TEST_SUBTRACT_FUNCTION - + "test_subtract_positive_numbers": { - "test": TEST_SUBTRACT_FUNCTION + "test_subtract_positive_numbers", + get_absolute_test_id( + f"{TEST_SUBTRACT_FUNCTION}test_subtract_positive_numbers", test_subtract_path + ): { + "test": get_absolute_test_id( + f"{TEST_SUBTRACT_FUNCTION}test_subtract_positive_numbers", + test_subtract_path, + ), "outcome": SUCCESS, "message": None, "traceback": None, "subtest": None, }, - TEST_ADD_FUNCTION - + "test_add_positive_numbers": { - "test": TEST_ADD_FUNCTION + "test_add_positive_numbers", + get_absolute_test_id( + f"{TEST_ADD_FUNCTION}test_add_positive_numbers", test_add_path + ): { + "test": get_absolute_test_id( + f"{TEST_ADD_FUNCTION}test_add_positive_numbers", test_add_path + ), "outcome": SUCCESS, "message": None, "traceback": None, @@ -117,12 +162,15 @@ }, } + # This is the expected output for the simple_pytest.py file. # └── simple_pytest.py # └── test_function: success +simple_pytest_path = TEST_DATA_PATH / "unittest_folder" / "simple_pytest.py" + simple_execution_pytest_expected_output = { - "simple_pytest.py::test_function": { - "test": "simple_pytest.py::test_function", + get_absolute_test_id("test_function", simple_pytest_path): { + "test": get_absolute_test_id("test_function", simple_pytest_path), "outcome": "success", "message": None, "traceback": None, @@ -130,21 +178,34 @@ } } + # This is the expected output for the unittest_pytest_same_file.py file. # ├── unittest_pytest_same_file.py # ├── TestExample # │ └── test_true_unittest: success # └── test_true_pytest: success +unit_pytest_same_file_path = TEST_DATA_PATH / "unittest_pytest_same_file.py" unit_pytest_same_file_execution_expected_output = { - "unittest_pytest_same_file.py::TestExample::test_true_unittest": { - "test": "unittest_pytest_same_file.py::TestExample::test_true_unittest", + get_absolute_test_id( + "unittest_pytest_same_file.py::TestExample::test_true_unittest", + unit_pytest_same_file_path, + ): { + "test": get_absolute_test_id( + "unittest_pytest_same_file.py::TestExample::test_true_unittest", + unit_pytest_same_file_path, + ), "outcome": "success", "message": None, "traceback": None, "subtest": None, }, - "unittest_pytest_same_file.py::test_true_pytest": { - "test": "unittest_pytest_same_file.py::test_true_pytest", + get_absolute_test_id( + "unittest_pytest_same_file.py::test_true_pytest", unit_pytest_same_file_path + ): { + "test": get_absolute_test_id( + "unittest_pytest_same_file.py::test_true_pytest", + unit_pytest_same_file_path, + ), "outcome": "success", "message": None, "traceback": None, @@ -156,9 +217,15 @@ # └── error_raise_exception.py # ├── TestSomething # │ └── test_a: failure +error_raised_exception_path = TEST_DATA_PATH / "error_raise_exception.py" error_raised_exception_execution_expected_output = { - "error_raise_exception.py::TestSomething::test_a": { - "test": "error_raise_exception.py::TestSomething::test_a", + get_absolute_test_id( + "error_raise_exception.py::TestSomething::test_a", error_raised_exception_path + ): { + "test": get_absolute_test_id( + "error_raise_exception.py::TestSomething::test_a", + error_raised_exception_path, + ), "outcome": "error", "message": "ERROR MESSAGE", "traceback": "TRACEBACK", @@ -174,44 +241,60 @@ # ├── TestClass # │ └── test_class_function_a: skipped # │ └── test_class_function_b: skipped + +skip_tests_path = TEST_DATA_PATH / "skip_tests.py" skip_tests_execution_expected_output = { - "skip_tests.py::test_something": { - "test": "skip_tests.py::test_something", + get_absolute_test_id("skip_tests.py::test_something", skip_tests_path): { + "test": get_absolute_test_id("skip_tests.py::test_something", skip_tests_path), "outcome": "success", "message": None, "traceback": None, "subtest": None, }, - "skip_tests.py::test_another_thing": { - "test": "skip_tests.py::test_another_thing", + get_absolute_test_id("skip_tests.py::test_another_thing", skip_tests_path): { + "test": get_absolute_test_id( + "skip_tests.py::test_another_thing", skip_tests_path + ), "outcome": "skipped", "message": None, "traceback": None, "subtest": None, }, - "skip_tests.py::test_decorator_thing": { - "test": "skip_tests.py::test_decorator_thing", + get_absolute_test_id("skip_tests.py::test_decorator_thing", skip_tests_path): { + "test": get_absolute_test_id( + "skip_tests.py::test_decorator_thing", skip_tests_path + ), "outcome": "skipped", "message": None, "traceback": None, "subtest": None, }, - "skip_tests.py::test_decorator_thing_2": { - "test": "skip_tests.py::test_decorator_thing_2", + get_absolute_test_id("skip_tests.py::test_decorator_thing_2", skip_tests_path): { + "test": get_absolute_test_id( + "skip_tests.py::test_decorator_thing_2", skip_tests_path + ), "outcome": "skipped", "message": None, "traceback": None, "subtest": None, }, - "skip_tests.py::TestClass::test_class_function_a": { - "test": "skip_tests.py::TestClass::test_class_function_a", + get_absolute_test_id( + "skip_tests.py::TestClass::test_class_function_a", skip_tests_path + ): { + "test": get_absolute_test_id( + "skip_tests.py::TestClass::test_class_function_a", skip_tests_path + ), "outcome": "skipped", "message": None, "traceback": None, "subtest": None, }, - "skip_tests.py::TestClass::test_class_function_b": { - "test": "skip_tests.py::TestClass::test_class_function_b", + get_absolute_test_id( + "skip_tests.py::TestClass::test_class_function_b", skip_tests_path + ): { + "test": get_absolute_test_id( + "skip_tests.py::TestClass::test_class_function_b", skip_tests_path + ), "outcome": "skipped", "message": None, "traceback": None, @@ -229,30 +312,59 @@ # └── test_bottom_folder.py # └── test_bottom_function_t: success # └── test_bottom_function_f: failure +dual_level_nested_folder_top_path = ( + TEST_DATA_PATH / "dual_level_nested_folder" / "test_top_folder.py" +) +dual_level_nested_folder_bottom_path = ( + TEST_DATA_PATH + / "dual_level_nested_folder" + / "nested_folder_one" + / "test_bottom_folder.py" +) dual_level_nested_folder_execution_expected_output = { - "dual_level_nested_folder/test_top_folder.py::test_top_function_t": { - "test": "dual_level_nested_folder/test_top_folder.py::test_top_function_t", + get_absolute_test_id( + "test_top_folder.py::test_top_function_t", dual_level_nested_folder_top_path + ): { + "test": get_absolute_test_id( + "test_top_folder.py::test_top_function_t", dual_level_nested_folder_top_path + ), "outcome": "success", "message": None, "traceback": None, "subtest": None, }, - "dual_level_nested_folder/test_top_folder.py::test_top_function_f": { - "test": "dual_level_nested_folder/test_top_folder.py::test_top_function_f", + get_absolute_test_id( + "test_top_folder.py::test_top_function_f", dual_level_nested_folder_top_path + ): { + "test": get_absolute_test_id( + "test_top_folder.py::test_top_function_f", dual_level_nested_folder_top_path + ), "outcome": "failure", "message": "ERROR MESSAGE", "traceback": None, "subtest": None, }, - "dual_level_nested_folder/nested_folder_one/test_bottom_folder.py::test_bottom_function_t": { - "test": "dual_level_nested_folder/nested_folder_one/test_bottom_folder.py::test_bottom_function_t", + get_absolute_test_id( + "nested_folder_one/test_bottom_folder.py::test_bottom_function_t", + dual_level_nested_folder_bottom_path, + ): { + "test": get_absolute_test_id( + "nested_folder_one/test_bottom_folder.py::test_bottom_function_t", + dual_level_nested_folder_bottom_path, + ), "outcome": "success", "message": None, "traceback": None, "subtest": None, }, - "dual_level_nested_folder/nested_folder_one/test_bottom_folder.py::test_bottom_function_f": { - "test": "dual_level_nested_folder/nested_folder_one/test_bottom_folder.py::test_bottom_function_f", + get_absolute_test_id( + "nested_folder_one/test_bottom_folder.py::test_bottom_function_f", + dual_level_nested_folder_bottom_path, + ): { + "test": get_absolute_test_id( + "nested_folder_one/test_bottom_folder.py::test_bottom_function_f", + dual_level_nested_folder_bottom_path, + ), "outcome": "failure", "message": "ERROR MESSAGE", "traceback": None, @@ -266,38 +378,59 @@ # └── folder_a # └── test_nest.py # └── test_function: success + +nested_folder_path = ( + TEST_DATA_PATH / "folder_a" / "folder_b" / "folder_a" / "test_nest.py" +) double_nested_folder_expected_execution_output = { - "folder_a/folder_b/folder_a/test_nest.py::test_function": { - "test": "folder_a/folder_b/folder_a/test_nest.py::test_function", + get_absolute_test_id( + "folder_a/folder_b/folder_a/test_nest.py::test_function", nested_folder_path + ): { + "test": get_absolute_test_id( + "folder_a/folder_b/folder_a/test_nest.py::test_function", nested_folder_path + ), "outcome": "success", "message": None, "traceback": None, "subtest": None, } } - # This is the expected output for the nested_folder tests. # └── parametrize_tests.py # └── test_adding[3+5-8]: success # └── test_adding[2+4-6]: success # └── test_adding[6+9-16]: failure +parametrize_tests_path = TEST_DATA_PATH / "parametrize_tests.py" + parametrize_tests_expected_execution_output = { - "parametrize_tests.py::test_adding[3+5-8]": { - "test": "parametrize_tests.py::test_adding[3+5-8]", + get_absolute_test_id( + "parametrize_tests.py::test_adding[3+5-8]", parametrize_tests_path + ): { + "test": get_absolute_test_id( + "parametrize_tests.py::test_adding[3+5-8]", parametrize_tests_path + ), "outcome": "success", "message": None, "traceback": None, "subtest": None, }, - "parametrize_tests.py::test_adding[2+4-6]": { - "test": "parametrize_tests.py::test_adding[2+4-6]", + get_absolute_test_id( + "parametrize_tests.py::test_adding[2+4-6]", parametrize_tests_path + ): { + "test": get_absolute_test_id( + "parametrize_tests.py::test_adding[2+4-6]", parametrize_tests_path + ), "outcome": "success", "message": None, "traceback": None, "subtest": None, }, - "parametrize_tests.py::test_adding[6+9-16]": { - "test": "parametrize_tests.py::test_adding[6+9-16]", + get_absolute_test_id( + "parametrize_tests.py::test_adding[6+9-16]", parametrize_tests_path + ): { + "test": get_absolute_test_id( + "parametrize_tests.py::test_adding[6+9-16]", parametrize_tests_path + ), "outcome": "failure", "message": "ERROR MESSAGE", "traceback": None, @@ -309,8 +442,12 @@ # └── parametrize_tests.py # └── test_adding[3+5-8]: success single_parametrize_tests_expected_execution_output = { - "parametrize_tests.py::test_adding[3+5-8]": { - "test": "parametrize_tests.py::test_adding[3+5-8]", + get_absolute_test_id( + "parametrize_tests.py::test_adding[3+5-8]", parametrize_tests_path + ): { + "test": get_absolute_test_id( + "parametrize_tests.py::test_adding[3+5-8]", parametrize_tests_path + ), "outcome": "success", "message": None, "traceback": None, @@ -321,9 +458,12 @@ # This is the expected output for the single parameterized tests. # └── text_docstring.txt # └── text_docstring: success +doc_test_path = TEST_DATA_PATH / "text_docstring.txt" doctest_pytest_expected_execution_output = { - "text_docstring.txt::text_docstring.txt": { - "test": "text_docstring.txt::text_docstring.txt", + get_absolute_test_id("text_docstring.txt::text_docstring.txt", doc_test_path): { + "test": get_absolute_test_id( + "text_docstring.txt::text_docstring.txt", doc_test_path + ), "outcome": "success", "message": None, "traceback": None, @@ -332,68 +472,127 @@ } # Will run all tests in the cwd that fit the test file naming pattern. +folder_a_path = TEST_DATA_PATH / "folder_a" / "folder_b" / "folder_a" / "test_nest.py" +dual_level_nested_folder_top_path = ( + TEST_DATA_PATH / "dual_level_nested_folder" / "test_top_folder.py" +) +dual_level_nested_folder_bottom_path = ( + TEST_DATA_PATH + / "dual_level_nested_folder" + / "nested_folder_one" + / "test_bottom_folder.py" +) +unittest_folder_add_path = TEST_DATA_PATH / "unittest_folder" / "test_add.py" +unittest_folder_subtract_path = TEST_DATA_PATH / "unittest_folder" / "test_subtract.py" + no_test_ids_pytest_execution_expected_output = { - "folder_a/folder_b/folder_a/test_nest.py::test_function": { - "test": "folder_a/folder_b/folder_a/test_nest.py::test_function", + get_absolute_test_id("test_function", folder_a_path): { + "test": get_absolute_test_id("test_function", folder_a_path), "outcome": "success", "message": None, "traceback": None, "subtest": None, }, - "dual_level_nested_folder/test_top_folder.py::test_top_function_t": { - "test": "dual_level_nested_folder/test_top_folder.py::test_top_function_t", + get_absolute_test_id("test_top_function_t", dual_level_nested_folder_top_path): { + "test": get_absolute_test_id( + "test_top_function_t", dual_level_nested_folder_top_path + ), "outcome": "success", "message": None, "traceback": None, "subtest": None, }, - "dual_level_nested_folder/test_top_folder.py::test_top_function_f": { - "test": "dual_level_nested_folder/test_top_folder.py::test_top_function_f", + get_absolute_test_id("test_top_function_f", dual_level_nested_folder_top_path): { + "test": get_absolute_test_id( + "test_top_function_f", dual_level_nested_folder_top_path + ), "outcome": "failure", "message": "ERROR MESSAGE", "traceback": None, "subtest": None, }, - "dual_level_nested_folder/nested_folder_one/test_bottom_folder.py::test_bottom_function_t": { - "test": "dual_level_nested_folder/nested_folder_one/test_bottom_folder.py::test_bottom_function_t", + get_absolute_test_id( + "test_bottom_function_t", dual_level_nested_folder_bottom_path + ): { + "test": get_absolute_test_id( + "test_bottom_function_t", dual_level_nested_folder_bottom_path + ), "outcome": "success", "message": None, "traceback": None, "subtest": None, }, - "dual_level_nested_folder/nested_folder_one/test_bottom_folder.py::test_bottom_function_f": { - "test": "dual_level_nested_folder/nested_folder_one/test_bottom_folder.py::test_bottom_function_f", + get_absolute_test_id( + "test_bottom_function_f", dual_level_nested_folder_bottom_path + ): { + "test": get_absolute_test_id( + "test_bottom_function_f", dual_level_nested_folder_bottom_path + ), "outcome": "failure", "message": "ERROR MESSAGE", "traceback": None, "subtest": None, }, - "unittest_folder/test_add.py::TestAddFunction::test_add_negative_numbers": { - "test": "unittest_folder/test_add.py::TestAddFunction::test_add_negative_numbers", + get_absolute_test_id( + "TestAddFunction::test_add_negative_numbers", unittest_folder_add_path + ): { + "test": get_absolute_test_id( + "TestAddFunction::test_add_negative_numbers", unittest_folder_add_path + ), "outcome": "success", "message": None, "traceback": None, "subtest": None, }, - "unittest_folder/test_add.py::TestAddFunction::test_add_positive_numbers": { - "test": "unittest_folder/test_add.py::TestAddFunction::test_add_positive_numbers", + get_absolute_test_id( + "TestAddFunction::test_add_positive_numbers", unittest_folder_add_path + ): { + "test": get_absolute_test_id( + "TestAddFunction::test_add_positive_numbers", unittest_folder_add_path + ), "outcome": "success", "message": None, "traceback": None, "subtest": None, }, - "unittest_folder/test_subtract.py::TestSubtractFunction::test_subtract_negative_numbers": { - "test": "unittest_folder/test_subtract.py::TestSubtractFunction::test_subtract_negative_numbers", + get_absolute_test_id( + "TestSubtractFunction::test_subtract_negative_numbers", + unittest_folder_subtract_path, + ): { + "test": get_absolute_test_id( + "TestSubtractFunction::test_subtract_negative_numbers", + unittest_folder_subtract_path, + ), "outcome": "failure", "message": "ERROR MESSAGE", "traceback": None, "subtest": None, }, - "unittest_folder/test_subtract.py::TestSubtractFunction::test_subtract_positive_numbers": { - "test": "unittest_folder/test_subtract.py::TestSubtractFunction::test_subtract_positive_numbers", + get_absolute_test_id( + "TestSubtractFunction::test_subtract_positive_numbers", + unittest_folder_subtract_path, + ): { + "test": get_absolute_test_id( + "TestSubtractFunction::test_subtract_positive_numbers", + unittest_folder_subtract_path, + ), "outcome": "success", "message": None, "traceback": None, "subtest": None, }, } + +# This is the expected output for the root folder with the config file referenced. +# └── test_a.py +# └── test_a_function: success +test_add_path = TEST_DATA_PATH / "root" / "tests" / "test_a.py" +config_file_pytest_expected_execution_output = { + get_absolute_test_id("tests/test_a.py::test_a_function", test_add_path): { + "test": get_absolute_test_id("tests/test_a.py::test_a_function", test_add_path), + "outcome": "success", + "message": None, + "traceback": None, + "subtest": None, + } +} diff --git a/pythonFiles/tests/pytestadapter/helpers.py b/pythonFiles/tests/pytestadapter/helpers.py index 47b4f75d6d609..7195cfe43ea5b 100644 --- a/pythonFiles/tests/pytestadapter/helpers.py +++ b/pythonFiles/tests/pytestadapter/helpers.py @@ -16,6 +16,13 @@ from typing_extensions import TypedDict +def get_absolute_test_id(test_id: str, testPath: pathlib.Path) -> str: + split_id = test_id.split("::")[1:] + absolute_test_id = "::".join([str(testPath), *split_id]) + print("absolute path", absolute_test_id) + return absolute_test_id + + def create_server( host: str = "127.0.0.1", port: int = 0, @@ -104,6 +111,13 @@ def process_rpc_json(data: str) -> List[Dict[str, Any]]: def runner(args: List[str]) -> Optional[List[Dict[str, Any]]]: + """Run the pytest discovery and return the JSON data from the server.""" + return runner_with_cwd(args, TEST_DATA_PATH) + + +def runner_with_cwd( + args: List[str], path: pathlib.Path +) -> Optional[List[Dict[str, Any]]]: """Run the pytest discovery and return the JSON data from the server.""" process_args: List[str] = [ sys.executable, @@ -134,7 +148,7 @@ def runner(args: List[str]) -> Optional[List[Dict[str, Any]]]: t2 = threading.Thread( target=_run_test_code, - args=(process_args, env, TEST_DATA_PATH, completed), + args=(process_args, env, path, completed), ) t2.start() diff --git a/pythonFiles/tests/pytestadapter/test_discovery.py b/pythonFiles/tests/pytestadapter/test_discovery.py index 5288c7ad769e9..8d785be27c8b8 100644 --- a/pythonFiles/tests/pytestadapter/test_discovery.py +++ b/pythonFiles/tests/pytestadapter/test_discovery.py @@ -7,7 +7,7 @@ import pytest from . import expected_discovery_test_output -from .helpers import TEST_DATA_PATH, runner +from .helpers import TEST_DATA_PATH, runner, runner_with_cwd def test_import_error(tmp_path): @@ -153,3 +153,53 @@ def test_pytest_collect(file, expected_const): assert actual["status"] == "success" assert actual["cwd"] == os.fspath(TEST_DATA_PATH) assert actual["tests"] == expected_const + + +def test_pytest_root_dir(): + """ + Test to test pytest discovery with the command line arg --rootdir specified to be a subfolder + of the workspace root. Discovery should succeed and testids should be relative to workspace root. + """ + rd = f"--rootdir={TEST_DATA_PATH / 'root' / 'tests'}" + actual = runner_with_cwd( + [ + "--collect-only", + rd, + ], + TEST_DATA_PATH / "root", + ) + if actual: + actual = actual[0] + assert actual + assert all(item in actual for item in ("status", "cwd", "tests")) + assert actual["status"] == "success" + assert actual["cwd"] == os.fspath(TEST_DATA_PATH / "root") + assert ( + actual["tests"] + == expected_discovery_test_output.root_with_config_expected_output + ) + + +def test_pytest_config_file(): + """ + Test to test pytest discovery with the command line arg -c with a specified config file which + changes the workspace root. Discovery should succeed and testids should be relative to workspace root. + """ + actual = runner_with_cwd( + [ + "--collect-only", + "-c", + "tests/pytest.ini", + ], + TEST_DATA_PATH / "root", + ) + if actual: + actual = actual[0] + assert actual + assert all(item in actual for item in ("status", "cwd", "tests")) + assert actual["status"] == "success" + assert actual["cwd"] == os.fspath(TEST_DATA_PATH / "root") + assert ( + actual["tests"] + == expected_discovery_test_output.root_with_config_expected_output + ) diff --git a/pythonFiles/tests/pytestadapter/test_execution.py b/pythonFiles/tests/pytestadapter/test_execution.py index e3b00386882d3..07354b01709bc 100644 --- a/pythonFiles/tests/pytestadapter/test_execution.py +++ b/pythonFiles/tests/pytestadapter/test_execution.py @@ -7,7 +7,50 @@ from tests.pytestadapter import expected_execution_test_output -from .helpers import TEST_DATA_PATH, runner +from .helpers import TEST_DATA_PATH, runner, runner_with_cwd + + +def test_config_file(): + """Test pytest execution when a config file is specified.""" + args = [ + "-c", + "tests/pytest.ini", + str(TEST_DATA_PATH / "root" / "tests" / "test_a.py::test_a_function"), + ] + new_cwd = TEST_DATA_PATH / "root" + actual = runner_with_cwd(args, new_cwd) + expected_const = ( + expected_execution_test_output.config_file_pytest_expected_execution_output + ) + assert actual + assert len(actual) == len(expected_const) + actual_result_dict = dict() + for a in actual: + assert all(item in a for item in ("status", "cwd", "result")) + assert a["status"] == "success" + assert a["cwd"] == os.fspath(new_cwd) + actual_result_dict.update(a["result"]) + assert actual_result_dict == expected_const + + +def test_rootdir_specified(): + """Test pytest execution when a --rootdir is specified.""" + rd = f"--rootdir={TEST_DATA_PATH / 'root' / 'tests'}" + args = [rd, "tests/test_a.py::test_a_function"] + new_cwd = TEST_DATA_PATH / "root" + actual = runner_with_cwd(args, new_cwd) + expected_const = ( + expected_execution_test_output.config_file_pytest_expected_execution_output + ) + assert actual + assert len(actual) == len(expected_const) + actual_result_dict = dict() + for a in actual: + assert all(item in a for item in ("status", "cwd", "result")) + assert a["status"] == "success" + assert a["cwd"] == os.fspath(new_cwd) + actual_result_dict.update(a["result"]) + assert actual_result_dict == expected_const def test_syntax_error_execution(tmp_path): diff --git a/pythonFiles/vscode_pytest/__init__.py b/pythonFiles/vscode_pytest/__init__.py index 1ac287a8410a7..49d429662e3a6 100644 --- a/pythonFiles/vscode_pytest/__init__.py +++ b/pythonFiles/vscode_pytest/__init__.py @@ -69,8 +69,7 @@ def pytest_exception_interact(node, call, report): """ # call.excinfo is the captured exception of the call, if it raised as type ExceptionInfo. # call.excinfo.exconly() returns the exception as a string. - # See if it is during discovery or execution. - # if discovery, then add the error to error logs. + # If it is during discovery, then add the error to error logs. if type(report) == pytest.CollectReport: if call.excinfo and call.excinfo.typename != "AssertionError": if report.outcome == "skipped" and "SkipTest" in str(call): @@ -83,11 +82,11 @@ def pytest_exception_interact(node, call, report): report.longreprtext + "\n Check Python Test Logs for more details." ) else: - # if execution, send this data that the given node failed. + # If during execution, send this data that the given node failed. report_value = "error" if call.excinfo.typename == "AssertionError": report_value = "failure" - node_id = str(node.nodeid) + node_id = get_absolute_test_id(node.nodeid, get_node_path(node)) if node_id not in collected_tests_so_far: collected_tests_so_far.append(node_id) item_result = create_test_outcome( @@ -106,6 +105,22 @@ def pytest_exception_interact(node, call, report): ) +def get_absolute_test_id(test_id: str, testPath: pathlib.Path) -> str: + """A function that returns the absolute test id. This is necessary because testIds are relative to the rootdir. + This does not work for our case since testIds when referenced during run time are relative to the instantiation + location. Absolute paths for testIds are necessary for the test tree ensures configurations that change the rootdir + of pytest are handled correctly. + + Keyword arguments: + test_id -- the pytest id of the test which is relative to the rootdir. + testPath -- the path to the file the test is located in, as a pathlib.Path object. + """ + split_id = test_id.split("::")[1:] + absolute_test_id = "::".join([str(testPath), *split_id]) + print("absolute path", absolute_test_id) + return absolute_test_id + + def pytest_keyboard_interrupt(excinfo): """A pytest hook that is called when a keyboard interrupt is raised. @@ -130,7 +145,7 @@ class TestOutcome(Dict): def create_test_outcome( - test: str, + testid: str, outcome: str, message: Union[str, None], traceback: Union[str, None], @@ -138,7 +153,7 @@ def create_test_outcome( ) -> TestOutcome: """A function that creates a TestOutcome object.""" return TestOutcome( - test=test, + test=testid, outcome=outcome, message=message, traceback=traceback, # TODO: traceback @@ -154,6 +169,7 @@ class testRunResultDict(Dict[str, Dict[str, TestOutcome]]): IS_DISCOVERY = False +map_id_to_path = dict() def pytest_load_initial_conftests(early_config, parser, args): @@ -184,17 +200,21 @@ def pytest_report_teststatus(report, config): elif report.failed: report_value = "failure" message = report.longreprtext - node_id = str(report.nodeid) - if node_id not in collected_tests_so_far: - collected_tests_so_far.append(node_id) + node_path = map_id_to_path[report.nodeid] + if not node_path: + node_path = cwd + # Calculate the absolute test id and use this as the ID moving forward. + absolute_node_id = get_absolute_test_id(report.nodeid, node_path) + if absolute_node_id not in collected_tests_so_far: + collected_tests_so_far.append(absolute_node_id) item_result = create_test_outcome( - node_id, + absolute_node_id, report_value, message, traceback, ) collected_test = testRunResultDict() - collected_test[node_id] = item_result + collected_test[absolute_node_id] = item_result execution_post( os.fsdecode(cwd), "success", @@ -211,21 +231,22 @@ def pytest_report_teststatus(report, config): def pytest_runtest_protocol(item, nextitem): + map_id_to_path[item.nodeid] = get_node_path(item) skipped = check_skipped_wrapper(item) if skipped: - node_id = str(item.nodeid) + absolute_node_id = get_absolute_test_id(item.nodeid, get_node_path(item)) report_value = "skipped" cwd = pathlib.Path.cwd() - if node_id not in collected_tests_so_far: - collected_tests_so_far.append(node_id) + if absolute_node_id not in collected_tests_so_far: + collected_tests_so_far.append(absolute_node_id) item_result = create_test_outcome( - node_id, + absolute_node_id, report_value, None, None, ) collected_test = testRunResultDict() - collected_test[node_id] = item_result + collected_test[absolute_node_id] = item_result execution_post( os.fsdecode(cwd), "success", @@ -471,13 +492,14 @@ def create_test_node( test_case_loc: str = ( str(test_case.location[1] + 1) if (test_case.location[1] is not None) else "" ) + absolute_test_id = get_absolute_test_id(test_case.nodeid, get_node_path(test_case)) return { "name": test_case.name, "path": get_node_path(test_case), "lineno": test_case_loc, "type_": "test", - "id_": test_case.nodeid, - "runID": test_case.nodeid, + "id_": absolute_test_id, + "runID": absolute_test_id, } diff --git a/src/client/testing/testController/common/server.ts b/src/client/testing/testController/common/server.ts index f854371ffc35a..661290bf49885 100644 --- a/src/client/testing/testController/common/server.ts +++ b/src/client/testing/testController/common/server.ts @@ -3,10 +3,11 @@ import * as net from 'net'; import * as crypto from 'crypto'; -import { Disposable, Event, EventEmitter } from 'vscode'; +import { Disposable, Event, EventEmitter, TestRun } from 'vscode'; import * as path from 'path'; import { ExecutionFactoryCreateWithEnvironmentOptions, + ExecutionResult, IPythonExecutionFactory, SpawnOptions, } from '../../../common/process/types'; @@ -15,6 +16,7 @@ import { DataReceivedEvent, ITestServer, TestCommandOptions } from './types'; import { ITestDebugLauncher, LaunchOptions } from '../../common/types'; import { UNITTEST_PROVIDER } from '../../common/constants'; import { jsonRPCHeaders, jsonRPCContent, JSONRPC_UUID_HEADER } from './utils'; +import { createDeferred } from '../../../common/utils/async'; export class PythonTestServer implements ITestServer, Disposable { private _onDataReceived: EventEmitter = new EventEmitter(); @@ -140,7 +142,12 @@ export class PythonTestServer implements ITestServer, Disposable { return this._onDataReceived.event; } - async sendCommand(options: TestCommandOptions, runTestIdPort?: string, callback?: () => void): Promise { + async sendCommand( + options: TestCommandOptions, + runTestIdPort?: string, + runInstance?: TestRun, + callback?: () => void, + ): Promise { const { uuid } = options; const pythonPathParts: string[] = process.env.PYTHONPATH?.split(path.delimiter) ?? []; @@ -154,7 +161,7 @@ export class PythonTestServer implements ITestServer, Disposable { }; if (spawnOptions.extraVariables) spawnOptions.extraVariables.RUN_TEST_IDS_PORT = runTestIdPort; - const isRun = !options.testIds; + const isRun = runTestIdPort !== undefined; // Create the Python environment in which to execute the command. const creationOptions: ExecutionFactoryCreateWithEnvironmentOptions = { allowEnvironmentFetchExceptions: false, @@ -195,7 +202,28 @@ export class PythonTestServer implements ITestServer, Disposable { // This means it is running discovery traceLog(`Discovering unittest tests with arguments: ${args}\r\n`); } - await execService.exec(args, spawnOptions); + const deferred = createDeferred>(); + + const result = execService.execObservable(args, spawnOptions); + + runInstance?.token.onCancellationRequested(() => { + result?.proc?.kill(); + }); + + // Take all output from the subprocess and add it to the test output channel. This will be the pytest output. + // Displays output to user and ensure the subprocess doesn't run into buffer overflow. + result?.proc?.stdout?.on('data', (data) => { + spawnOptions?.outputChannel?.append(data); + }); + result?.proc?.stderr?.on('data', (data) => { + spawnOptions?.outputChannel?.append(data); + }); + result?.proc?.on('exit', () => { + traceLog('Exec server closed.', uuid); + deferred.resolve({ stdout: '', stderr: '' }); + callback?.(); + }); + await deferred.promise; } } catch (ex) { this.uuids = this.uuids.filter((u) => u !== uuid); diff --git a/src/client/testing/testController/common/types.ts b/src/client/testing/testController/common/types.ts index d4e54951bfd72..16c0bd0e3cee0 100644 --- a/src/client/testing/testController/common/types.ts +++ b/src/client/testing/testController/common/types.ts @@ -174,7 +174,12 @@ export interface ITestServer { readonly onDataReceived: Event; readonly onRunDataReceived: Event; readonly onDiscoveryDataReceived: Event; - sendCommand(options: TestCommandOptions, runTestIdsPort?: string, callback?: () => void): Promise; + sendCommand( + options: TestCommandOptions, + runTestIdsPort?: string, + runInstance?: TestRun, + callback?: () => void, + ): Promise; serverReady(): Promise; getPort(): number; createUUID(cwd: string): string; diff --git a/src/client/testing/testController/pytest/pytestDiscoveryAdapter.ts b/src/client/testing/testController/pytest/pytestDiscoveryAdapter.ts index b83224d4161bf..44ab3746dde49 100644 --- a/src/client/testing/testController/pytest/pytestDiscoveryAdapter.ts +++ b/src/client/testing/testController/pytest/pytestDiscoveryAdapter.ts @@ -4,13 +4,14 @@ import * as path from 'path'; import { Uri } from 'vscode'; import { ExecutionFactoryCreateWithEnvironmentOptions, + ExecutionResult, IPythonExecutionFactory, SpawnOptions, } from '../../../common/process/types'; import { IConfigurationService, ITestOutputChannel } from '../../../common/types'; import { createDeferred } from '../../../common/utils/async'; import { EXTENSION_ROOT_DIR } from '../../../constants'; -import { traceError, traceVerbose } from '../../../logging'; +import { traceVerbose } from '../../../logging'; import { DataReceivedEvent, DiscoveredTestPayload, @@ -32,27 +33,30 @@ export class PytestTestDiscoveryAdapter implements ITestDiscoveryAdapter { async discoverTests(uri: Uri, executionFactory?: IPythonExecutionFactory): Promise { const settings = this.configSettings.getSettings(uri); + const uuid = this.testServer.createUUID(uri.fsPath); const { pytestArgs } = settings.testing; traceVerbose(pytestArgs); - const disposable = this.testServer.onDiscoveryDataReceived((e: DataReceivedEvent) => { - // cancelation token ? + const dataReceivedDisposable = this.testServer.onDiscoveryDataReceived((e: DataReceivedEvent) => { this.resultResolver?.resolveDiscovery(JSON.parse(e.data)); }); + const disposeDataReceiver = function (testServer: ITestServer) { + testServer.deleteUUID(uuid); + dataReceivedDisposable.dispose(); + }; try { - await this.runPytestDiscovery(uri, executionFactory); + await this.runPytestDiscovery(uri, uuid, executionFactory); } finally { - disposable.dispose(); + disposeDataReceiver(this.testServer); } // this is only a placeholder to handle function overloading until rewrite is finished const discoveryPayload: DiscoveredTestPayload = { cwd: uri.fsPath, status: 'success' }; return discoveryPayload; } - async runPytestDiscovery(uri: Uri, executionFactory?: IPythonExecutionFactory): Promise { + async runPytestDiscovery(uri: Uri, uuid: string, executionFactory?: IPythonExecutionFactory): Promise { const deferred = createDeferred(); const relativePathToPytest = 'pythonFiles'; const fullPluginPath = path.join(EXTENSION_ROOT_DIR, relativePathToPytest); - const uuid = this.testServer.createUUID(uri.fsPath); const settings = this.configSettings.getSettings(uri); const { pytestArgs } = settings.testing; const cwd = settings.testing.cwd && settings.testing.cwd.length > 0 ? settings.testing.cwd : uri.fsPath; @@ -78,17 +82,23 @@ export class PytestTestDiscoveryAdapter implements ITestDiscoveryAdapter { }; const execService = await executionFactory?.createActivatedEnvironment(creationOptions); // delete UUID following entire discovery finishing. - execService - ?.exec(['-m', 'pytest', '-p', 'vscode_pytest', '--collect-only'].concat(pytestArgs), spawnOptions) - .then(() => { - this.testServer.deleteUUID(uuid); - return deferred.resolve(); - }) - .catch((err) => { - traceError(`Error while trying to run pytest discovery, \n${err}\r\n\r\n`); - this.testServer.deleteUUID(uuid); - return deferred.reject(err); - }); - return deferred.promise; + const deferredExec = createDeferred>(); + const execArgs = ['-m', 'pytest', '-p', 'vscode_pytest', '--collect-only'].concat(pytestArgs); + const result = execService?.execObservable(execArgs, spawnOptions); + + // Take all output from the subprocess and add it to the test output channel. This will be the pytest output. + // Displays output to user and ensure the subprocess doesn't run into buffer overflow. + result?.proc?.stdout?.on('data', (data) => { + spawnOptions.outputChannel?.append(data); + }); + result?.proc?.stderr?.on('data', (data) => { + spawnOptions.outputChannel?.append(data); + }); + result?.proc?.on('exit', () => { + deferredExec.resolve({ stdout: '', stderr: '' }); + deferred.resolve(); + }); + + await deferredExec.promise; } } diff --git a/src/client/testing/testController/pytest/pytestExecutionAdapter.ts b/src/client/testing/testController/pytest/pytestExecutionAdapter.ts index a75a6089627cd..4a9a57b16fede 100644 --- a/src/client/testing/testController/pytest/pytestExecutionAdapter.ts +++ b/src/client/testing/testController/pytest/pytestExecutionAdapter.ts @@ -15,6 +15,7 @@ import { } from '../common/types'; import { ExecutionFactoryCreateWithEnvironmentOptions, + ExecutionResult, IPythonExecutionFactory, SpawnOptions, } from '../../../common/process/types'; @@ -22,13 +23,7 @@ import { removePositionalFoldersAndFiles } from './arguments'; import { ITestDebugLauncher, LaunchOptions } from '../../common/types'; import { PYTEST_PROVIDER } from '../../common/constants'; import { EXTENSION_ROOT_DIR } from '../../../common/constants'; -import { startTestIdServer } from '../common/utils'; - -// eslint-disable-next-line @typescript-eslint/no-explicit-any -// (global as any).EXTENSION_ROOT_DIR = EXTENSION_ROOT_DIR; -/** - * Wrapper Class for pytest test execution.. - */ +import * as utils from '../common/utils'; export class PytestTestExecutionAdapter implements ITestExecutionAdapter { constructor( @@ -48,18 +43,29 @@ export class PytestTestExecutionAdapter implements ITestExecutionAdapter { ): Promise { const uuid = this.testServer.createUUID(uri.fsPath); traceVerbose(uri, testIds, debugBool); - const disposable = this.testServer.onRunDataReceived((e: DataReceivedEvent) => { + const dataReceivedDisposable = this.testServer.onRunDataReceived((e: DataReceivedEvent) => { if (runInstance) { this.resultResolver?.resolveExecution(JSON.parse(e.data), runInstance); } }); - try { - await this.runTestsNew(uri, testIds, uuid, debugBool, executionFactory, debugLauncher); - } finally { - this.testServer.deleteUUID(uuid); - disposable.dispose(); - // confirm with testing that this gets called (it must clean this up) - } + const disposeDataReceiver = function (testServer: ITestServer) { + testServer.deleteUUID(uuid); + dataReceivedDisposable.dispose(); + }; + runInstance?.token.onCancellationRequested(() => { + disposeDataReceiver(this.testServer); + }); + await this.runTestsNew( + uri, + testIds, + uuid, + runInstance, + debugBool, + executionFactory, + debugLauncher, + disposeDataReceiver, + ); + // placeholder until after the rewrite is adopted // TODO: remove after adoption. const executionPayload: ExecutionTestPayload = { @@ -74,9 +80,11 @@ export class PytestTestExecutionAdapter implements ITestExecutionAdapter { uri: Uri, testIds: string[], uuid: string, + runInstance?: TestRun, debugBool?: boolean, executionFactory?: IPythonExecutionFactory, debugLauncher?: ITestDebugLauncher, + disposeDataReceiver?: (testServer: ITestServer) => void, ): Promise { const deferred = createDeferred(); const relativePathToPytest = 'pythonFiles'; @@ -124,7 +132,7 @@ export class PytestTestExecutionAdapter implements ITestExecutionAdapter { } traceLog(`Running PYTEST execution for the following test ids: ${testIds}`); - const pytestRunTestIdsPort = await startTestIdServer(testIds); + const pytestRunTestIdsPort = await utils.startTestIdServer(testIds); if (spawnOptions.extraVariables) spawnOptions.extraVariables.RUN_TEST_IDS_PORT = pytestRunTestIdsPort.toString(); @@ -143,6 +151,7 @@ export class PytestTestExecutionAdapter implements ITestExecutionAdapter { traceInfo(`Running DEBUG pytest with arguments: ${testArgs.join(' ')}\r\n`); await debugLauncher!.launchDebugger(launchOptions, () => { deferred.resolve(); + this.testServer.deleteUUID(uuid); }); } else { // combine path to run script with run args @@ -150,7 +159,28 @@ export class PytestTestExecutionAdapter implements ITestExecutionAdapter { const runArgs = [scriptPath, ...testArgs]; traceInfo(`Running pytests with arguments: ${runArgs.join(' ')}\r\n`); - await execService?.exec(runArgs, spawnOptions); + const deferredExec = createDeferred>(); + const result = execService?.execObservable(runArgs, spawnOptions); + + runInstance?.token.onCancellationRequested(() => { + result?.proc?.kill(); + }); + + // Take all output from the subprocess and add it to the test output channel. This will be the pytest output. + // Displays output to user and ensure the subprocess doesn't run into buffer overflow. + result?.proc?.stdout?.on('data', (data) => { + this.outputChannel?.append(data); + }); + result?.proc?.stderr?.on('data', (data) => { + this.outputChannel?.append(data); + }); + + result?.proc?.on('exit', () => { + deferredExec.resolve({ stdout: '', stderr: '' }); + deferred.resolve(); + disposeDataReceiver?.(this.testServer); + }); + await deferredExec.promise; } } catch (ex) { traceError(`Error while running tests: ${testIds}\r\n${ex}\r\n\r\n`); diff --git a/src/client/testing/testController/unittest/testDiscoveryAdapter.ts b/src/client/testing/testController/unittest/testDiscoveryAdapter.ts index 6deca55117ea1..1cbad7ef65ef3 100644 --- a/src/client/testing/testController/unittest/testDiscoveryAdapter.ts +++ b/src/client/testing/testController/unittest/testDiscoveryAdapter.ts @@ -43,15 +43,17 @@ export class UnittestTestDiscoveryAdapter implements ITestDiscoveryAdapter { outChannel: this.outputChannel, }; - const disposable = this.testServer.onDiscoveryDataReceived((e: DataReceivedEvent) => { + const dataReceivedDisposable = this.testServer.onDiscoveryDataReceived((e: DataReceivedEvent) => { this.resultResolver?.resolveDiscovery(JSON.parse(e.data)); }); - try { - await this.callSendCommand(options); - } finally { - this.testServer.deleteUUID(uuid); - disposable.dispose(); - } + const disposeDataReceiver = function (testServer: ITestServer) { + testServer.deleteUUID(uuid); + dataReceivedDisposable.dispose(); + }; + + await this.callSendCommand(options, () => { + disposeDataReceiver(this.testServer); + }); // placeholder until after the rewrite is adopted // TODO: remove after adoption. const discoveryPayload: DiscoveredTestPayload = { @@ -61,8 +63,8 @@ export class UnittestTestDiscoveryAdapter implements ITestDiscoveryAdapter { return discoveryPayload; } - private async callSendCommand(options: TestCommandOptions): Promise { - await this.testServer.sendCommand(options); + private async callSendCommand(options: TestCommandOptions, callback: () => void): Promise { + await this.testServer.sendCommand(options, undefined, undefined, callback); const discoveryPayload: DiscoveredTestPayload = { cwd: '', status: 'success' }; return discoveryPayload; } diff --git a/src/client/testing/testController/unittest/testExecutionAdapter.ts b/src/client/testing/testController/unittest/testExecutionAdapter.ts index 4cab941c26087..9af9e593c246b 100644 --- a/src/client/testing/testController/unittest/testExecutionAdapter.ts +++ b/src/client/testing/testController/unittest/testExecutionAdapter.ts @@ -37,18 +37,19 @@ export class UnittestTestExecutionAdapter implements ITestExecutionAdapter { runInstance?: TestRun, ): Promise { const uuid = this.testServer.createUUID(uri.fsPath); - const disposable = this.testServer.onRunDataReceived((e: DataReceivedEvent) => { + const disposedDataReceived = this.testServer.onRunDataReceived((e: DataReceivedEvent) => { if (runInstance) { this.resultResolver?.resolveExecution(JSON.parse(e.data), runInstance); } }); - try { - await this.runTestsNew(uri, testIds, uuid, debugBool); - } finally { - this.testServer.deleteUUID(uuid); - disposable.dispose(); - // confirm with testing that this gets called (it must clean this up) - } + const disposeDataReceiver = function (testServer: ITestServer) { + testServer.deleteUUID(uuid); + disposedDataReceived.dispose(); + }; + runInstance?.token.onCancellationRequested(() => { + disposeDataReceiver(this.testServer); + }); + await this.runTestsNew(uri, testIds, uuid, runInstance, debugBool, disposeDataReceiver); const executionPayload: ExecutionTestPayload = { cwd: uri.fsPath, status: 'success', error: '' }; return executionPayload; } @@ -57,7 +58,9 @@ export class UnittestTestExecutionAdapter implements ITestExecutionAdapter { uri: Uri, testIds: string[], uuid: string, + runInstance?: TestRun, debugBool?: boolean, + disposeDataReceiver?: (testServer: ITestServer) => void, ): Promise { const settings = this.configSettings.getSettings(uri); const { unittestArgs } = settings.testing; @@ -80,8 +83,9 @@ export class UnittestTestExecutionAdapter implements ITestExecutionAdapter { const runTestIdsPort = await startTestIdServer(testIds); - await this.testServer.sendCommand(options, runTestIdsPort.toString(), () => { + await this.testServer.sendCommand(options, runTestIdsPort.toString(), runInstance, () => { deferred.resolve(); + disposeDataReceiver?.(this.testServer); }); // placeholder until after the rewrite is adopted // TODO: remove after adoption. diff --git a/src/test/linters/lint.functional.test.ts b/src/test/linters/lint.functional.test.ts index a3dc70b7c21e2..9887cbc5605af 100644 --- a/src/test/linters/lint.functional.test.ts +++ b/src/test/linters/lint.functional.test.ts @@ -4,7 +4,6 @@ 'use strict'; import * as assert from 'assert'; -import * as childProcess from 'child_process'; import * as fs from 'fs-extra'; import * as os from 'os'; import * as path from 'path'; @@ -780,11 +779,6 @@ suite('Linting Functional Tests', () => { teardown(() => { sinon.restore(); }); - - const pythonPath = childProcess.execSync(`"${PYTHON_PATH}" -c "import sys;print(sys.executable)"`); - - console.log(`Testing linter with python ${pythonPath}`); - // These are integration tests that mock out everything except // the filesystem and process execution. diff --git a/src/test/mocks/helper.ts b/src/test/mocks/helper.ts new file mode 100644 index 0000000000000..24d7a8290b18f --- /dev/null +++ b/src/test/mocks/helper.ts @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +import { Readable } from 'stream'; + +export class FakeReadableStream extends Readable { + _read(_size: unknown): void | null { + // custom reading logic here + this.push(null); // end the stream + } +} diff --git a/src/test/mocks/mockChildProcess.ts b/src/test/mocks/mockChildProcess.ts new file mode 100644 index 0000000000000..a46d66d79ca07 --- /dev/null +++ b/src/test/mocks/mockChildProcess.ts @@ -0,0 +1,239 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +/* eslint-disable @typescript-eslint/no-explicit-any */ +import { Serializable, SendHandle, MessageOptions } from 'child_process'; +import { EventEmitter } from 'node:events'; +import { Writable, Readable, Pipe } from 'stream'; +import { FakeReadableStream } from './helper'; + +export class MockChildProcess extends EventEmitter { + constructor(spawnfile: string, spawnargs: string[]) { + super(); + this.spawnfile = spawnfile; + this.spawnargs = spawnargs; + this.stdin = new Writable(); + this.stdout = new FakeReadableStream(); + this.stderr = new FakeReadableStream(); + this.channel = null; + this.stdio = [this.stdin, this.stdout, this.stdout, this.stderr, null]; + this.killed = false; + this.connected = false; + this.exitCode = null; + this.signalCode = null; + this.eventMap = new Map(); + } + + stdin: Writable | null; + + stdout: Readable | null; + + stderr: Readable | null; + + eventMap: Map; + + readonly channel?: Pipe | null | undefined; + + readonly stdio: [ + Writable | null, + // stdin + Readable | null, + // stdout + Readable | null, + // stderr + Readable | Writable | null | undefined, + // extra + Readable | Writable | null | undefined, // extra + ]; + + readonly killed: boolean; + + readonly pid?: number | undefined; + + readonly connected: boolean; + + readonly exitCode: number | null; + + readonly signalCode: NodeJS.Signals | null; + + readonly spawnargs: string[]; + + readonly spawnfile: string; + + signal?: NodeJS.Signals | number; + + send(message: Serializable, callback?: (error: Error | null) => void): boolean; + + send(message: Serializable, sendHandle?: SendHandle, callback?: (error: Error | null) => void): boolean; + + send( + message: Serializable, + sendHandle?: SendHandle, + options?: MessageOptions, + callback?: (error: Error | null) => void, + ): boolean; + + send( + message: Serializable, + _sendHandleOrCallback?: SendHandle | ((error: Error | null) => void), + _optionsOrCallback?: MessageOptions | ((error: Error | null) => void), + _callback?: (error: Error | null) => void, + ): boolean { + // Implementation of the send method + // For example, you might want to emit a 'message' event + this.stdout?.push(message.toString()); + return true; + } + + // eslint-disable-next-line class-methods-use-this + disconnect(): void { + /* noop */ + } + + // eslint-disable-next-line class-methods-use-this + unref(): void { + /* noop */ + } + + // eslint-disable-next-line class-methods-use-this + ref(): void { + /* noop */ + } + + addListener(event: 'close', listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + + addListener(event: 'disconnect', listener: () => void): this; + + addListener(event: 'error', listener: (err: Error) => void): this; + + addListener(event: 'exit', listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + + addListener(event: 'message', listener: (message: Serializable, sendHandle: SendHandle) => void): this; + + addListener(event: 'spawn', listener: () => void): this; + + addListener(event: string, listener: (...args: any[]) => void): this { + if (this.eventMap.has(event)) { + this.eventMap.get(event).push(listener); + } else { + this.eventMap.set(event, [listener]); + } + return this; + } + + emit(event: 'close', code: number | null, signal: NodeJS.Signals | null): boolean; + + emit(event: 'disconnect'): boolean; + + emit(event: 'error', err: Error): boolean; + + emit(event: 'exit', code: number | null, signal: NodeJS.Signals | null): boolean; + + emit(event: 'message', message: Serializable, sendHandle: SendHandle): boolean; + + emit(event: 'spawn', listener: () => void): boolean; + + emit(event: string | symbol, ...args: unknown[]): boolean { + if (this.eventMap.has(event.toString())) { + this.eventMap.get(event.toString()).forEach((listener: (arg0: unknown) => void) => { + const argsArray = Array.isArray(args) ? args : [args]; + listener(argsArray); + }); + } + return true; + } + + on(event: 'close', listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + + on(event: 'disconnect', listener: () => void): this; + + on(event: 'error', listener: (err: Error) => void): this; + + on(event: 'exit', listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + + on(event: 'message', listener: (message: Serializable, sendHandle: SendHandle) => void): this; + + on(event: 'spawn', listener: () => void): this; + + on(event: string, listener: (...args: any[]) => void): this { + if (this.eventMap.has(event)) { + this.eventMap.get(event).push(listener); + } else { + this.eventMap.set(event, [listener]); + } + return this; + } + + once(event: 'close', listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + + once(event: 'disconnect', listener: () => void): this; + + once(event: 'error', listener: (err: Error) => void): this; + + once(event: 'exit', listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + + once(event: 'message', listener: (message: Serializable, sendHandle: SendHandle) => void): this; + + once(event: 'spawn', listener: () => void): this; + + once(event: string, listener: (...args: any[]) => void): this { + if (this.eventMap.has(event)) { + this.eventMap.get(event).push(listener); + } else { + this.eventMap.set(event, [listener]); + } + return this; + } + + prependListener(event: 'close', listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + + prependListener(event: 'disconnect', listener: () => void): this; + + prependListener(event: 'error', listener: (err: Error) => void): this; + + prependListener(event: 'exit', listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + + prependListener(event: 'message', listener: (message: Serializable, sendHandle: SendHandle) => void): this; + + prependListener(event: 'spawn', listener: () => void): this; + + prependListener(event: string, listener: (...args: any[]) => void): this { + if (this.eventMap.has(event)) { + this.eventMap.get(event).push(listener); + } else { + this.eventMap.set(event, [listener]); + } + return this; + } + + prependOnceListener(event: 'close', listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + + prependOnceListener(event: 'disconnect', listener: () => void): this; + + prependOnceListener(event: 'error', listener: (err: Error) => void): this; + + prependOnceListener(event: 'exit', listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + + prependOnceListener(event: 'message', listener: (message: Serializable, sendHandle: SendHandle) => void): this; + + prependOnceListener(event: 'spawn', listener: () => void): this; + + prependOnceListener(event: string, listener: (...args: any[]) => void): this { + if (this.eventMap.has(event)) { + this.eventMap.get(event).push(listener); + } else { + this.eventMap.set(event, [listener]); + } + return this; + } + + trigger(event: string): Array { + if (this.eventMap.has(event)) { + return this.eventMap.get(event); + } + return []; + } + + kill(_signal?: NodeJS.Signals | number): boolean { + this.stdout?.destroy(); + return true; + } +} diff --git a/src/test/testing/common/testingAdapter.test.ts b/src/test/testing/common/testingAdapter.test.ts new file mode 100644 index 0000000000000..5c92c7cf3941b --- /dev/null +++ b/src/test/testing/common/testingAdapter.test.ts @@ -0,0 +1,428 @@ +/* eslint-disable @typescript-eslint/no-explicit-any */ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +import { TestRun, Uri } from 'vscode'; +import * as typeMoq from 'typemoq'; +import * as path from 'path'; +import * as assert from 'assert'; +import { PytestTestDiscoveryAdapter } from '../../../client/testing/testController/pytest/pytestDiscoveryAdapter'; +import { ITestResultResolver, ITestServer } from '../../../client/testing/testController/common/types'; +import { PythonTestServer } from '../../../client/testing/testController/common/server'; +import { IPythonExecutionFactory } from '../../../client/common/process/types'; +import { ITestDebugLauncher } from '../../../client/testing/common/types'; +import { IConfigurationService, ITestOutputChannel } from '../../../client/common/types'; +import { IServiceContainer } from '../../../client/ioc/types'; +import { EXTENSION_ROOT_DIR_FOR_TESTS, initialize } from '../../initialize'; +import { traceError, traceLog } from '../../../client/logging'; +import { PytestTestExecutionAdapter } from '../../../client/testing/testController/pytest/pytestExecutionAdapter'; +import { UnittestTestDiscoveryAdapter } from '../../../client/testing/testController/unittest/testDiscoveryAdapter'; +import { UnittestTestExecutionAdapter } from '../../../client/testing/testController/unittest/testExecutionAdapter'; + +suite('End to End Tests: test adapters', () => { + let resultResolver: typeMoq.IMock; + let pythonTestServer: ITestServer; + let pythonExecFactory: IPythonExecutionFactory; + let debugLauncher: ITestDebugLauncher; + let configService: IConfigurationService; + let testOutputChannel: ITestOutputChannel; + let serviceContainer: IServiceContainer; + let workspaceUri: Uri; + const rootPathSmallWorkspace = path.join( + EXTENSION_ROOT_DIR_FOR_TESTS, + 'src', + 'testTestingRootWkspc', + 'smallWorkspace', + ); + const rootPathLargeWorkspace = path.join( + EXTENSION_ROOT_DIR_FOR_TESTS, + 'src', + 'testTestingRootWkspc', + 'largeWorkspace', + ); + suiteSetup(async () => { + serviceContainer = (await initialize()).serviceContainer; + }); + + setup(async () => { + // create objects that were injected + configService = serviceContainer.get(IConfigurationService); + pythonExecFactory = serviceContainer.get(IPythonExecutionFactory); + debugLauncher = serviceContainer.get(ITestDebugLauncher); + testOutputChannel = serviceContainer.get(ITestOutputChannel); + + // create mock resultResolver object + resultResolver = typeMoq.Mock.ofType(); + + // create objects that were not injected + pythonTestServer = new PythonTestServer(pythonExecFactory, debugLauncher); + await pythonTestServer.serverReady(); + }); + test('unittest discovery adapter small workspace', async () => { + // result resolver and saved data for assertions + let actualData: { + status: unknown; + error: string | any[]; + tests: unknown; + }; + resultResolver + .setup((x) => x.resolveDiscovery(typeMoq.It.isAny(), typeMoq.It.isAny())) + .returns((data) => { + traceLog(`resolveDiscovery ${data}`); + actualData = data; + return Promise.resolve(); + }); + + // set workspace to test workspace folder and set up settings + workspaceUri = Uri.parse(rootPathSmallWorkspace); + configService.getSettings(workspaceUri).testing.unittestArgs = ['-s', '.', '-p', '*test*.py']; + + // run unittest discovery + const discoveryAdapter = new UnittestTestDiscoveryAdapter( + pythonTestServer, + configService, + testOutputChannel, + resultResolver.object, + ); + + await discoveryAdapter.discoverTests(workspaceUri).finally(() => { + // verification after discovery is complete + resultResolver.verify( + (x) => x.resolveDiscovery(typeMoq.It.isAny(), typeMoq.It.isAny()), + typeMoq.Times.once(), + ); + + // 1. Check the status is "success" + assert.strictEqual(actualData.status, 'success', "Expected status to be 'success'"); + // 2. Confirm no errors + assert.strictEqual(actualData.error, undefined, "Expected no errors in 'error' field"); + // 3. Confirm tests are found + assert.ok(actualData.tests, 'Expected tests to be present'); + }); + }); + + test('unittest discovery adapter large workspace', async () => { + // result resolver and saved data for assertions + let actualData: { + status: unknown; + error: string | any[]; + tests: unknown; + }; + resultResolver + .setup((x) => x.resolveDiscovery(typeMoq.It.isAny(), typeMoq.It.isAny())) + .returns((data) => { + traceLog(`resolveDiscovery ${data}`); + actualData = data; + return Promise.resolve(); + }); + + // set settings to work for the given workspace + workspaceUri = Uri.parse(rootPathLargeWorkspace); + configService.getSettings(workspaceUri).testing.unittestArgs = ['-s', '.', '-p', '*test*.py']; + // run discovery + const discoveryAdapter = new UnittestTestDiscoveryAdapter( + pythonTestServer, + configService, + testOutputChannel, + resultResolver.object, + ); + + await discoveryAdapter.discoverTests(workspaceUri).finally(() => { + // verification after discovery is complete + resultResolver.verify( + (x) => x.resolveDiscovery(typeMoq.It.isAny(), typeMoq.It.isAny()), + typeMoq.Times.once(), + ); + + // 1. Check the status is "success" + assert.strictEqual(actualData.status, 'success', "Expected status to be 'success'"); + // 2. Confirm no errors + assert.strictEqual(actualData.error, undefined, "Expected no errors in 'error' field"); + // 3. Confirm tests are found + assert.ok(actualData.tests, 'Expected tests to be present'); + }); + }); + test('pytest discovery adapter small workspace', async () => { + // result resolver and saved data for assertions + let actualData: { + status: unknown; + error: string | any[]; + tests: unknown; + }; + resultResolver + .setup((x) => x.resolveDiscovery(typeMoq.It.isAny(), typeMoq.It.isAny())) + .returns((data) => { + traceLog(`resolveDiscovery ${data}`); + actualData = data; + return Promise.resolve(); + }); + // run pytest discovery + const discoveryAdapter = new PytestTestDiscoveryAdapter( + pythonTestServer, + configService, + testOutputChannel, + resultResolver.object, + ); + + // set workspace to test workspace folder + workspaceUri = Uri.parse(rootPathSmallWorkspace); + + await discoveryAdapter.discoverTests(workspaceUri, pythonExecFactory).finally(() => { + // verification after discovery is complete + resultResolver.verify( + (x) => x.resolveDiscovery(typeMoq.It.isAny(), typeMoq.It.isAny()), + typeMoq.Times.once(), + ); + + // 1. Check the status is "success" + assert.strictEqual(actualData.status, 'success', "Expected status to be 'success'"); + // 2. Confirm no errors + assert.strictEqual(actualData.error.length, 0, "Expected no errors in 'error' field"); + // 3. Confirm tests are found + assert.ok(actualData.tests, 'Expected tests to be present'); + }); + }); + test('pytest discovery adapter large workspace', async () => { + // result resolver and saved data for assertions + let actualData: { + status: unknown; + error: string | any[]; + tests: unknown; + }; + resultResolver + .setup((x) => x.resolveDiscovery(typeMoq.It.isAny(), typeMoq.It.isAny())) + .returns((data) => { + traceLog(`resolveDiscovery ${data}`); + actualData = data; + return Promise.resolve(); + }); + // run pytest discovery + const discoveryAdapter = new PytestTestDiscoveryAdapter( + pythonTestServer, + configService, + testOutputChannel, + resultResolver.object, + ); + + // set workspace to test workspace folder + workspaceUri = Uri.parse(rootPathLargeWorkspace); + + await discoveryAdapter.discoverTests(workspaceUri, pythonExecFactory).finally(() => { + // verification after discovery is complete + resultResolver.verify( + (x) => x.resolveDiscovery(typeMoq.It.isAny(), typeMoq.It.isAny()), + typeMoq.Times.once(), + ); + + // 1. Check the status is "success" + assert.strictEqual(actualData.status, 'success', "Expected status to be 'success'"); + // 2. Confirm no errors + assert.strictEqual(actualData.error.length, 0, "Expected no errors in 'error' field"); + // 3. Confirm tests are found + assert.ok(actualData.tests, 'Expected tests to be present'); + }); + }); + test('unittest execution adapter small workspace', async () => { + // result resolver and saved data for assertions + let actualData: { + status: unknown; + error: string | any[]; + result: unknown; + }; + resultResolver + .setup((x) => x.resolveExecution(typeMoq.It.isAny(), typeMoq.It.isAny())) + .returns((data) => { + traceLog(`resolveExecution ${data}`); + actualData = data; + return Promise.resolve(); + }); + + // set workspace to test workspace folder + workspaceUri = Uri.parse(rootPathSmallWorkspace); + configService.getSettings(workspaceUri).testing.unittestArgs = ['-s', '.', '-p', '*test*.py']; + // run execution + const executionAdapter = new UnittestTestExecutionAdapter( + pythonTestServer, + configService, + testOutputChannel, + resultResolver.object, + ); + const testRun = typeMoq.Mock.ofType(); + testRun + .setup((t) => t.token) + .returns( + () => + ({ + onCancellationRequested: () => undefined, + } as any), + ); + await executionAdapter + .runTests(workspaceUri, ['test_simple.SimpleClass.test_simple_unit'], false, testRun.object) + .finally(() => { + // verification after execution is complete + resultResolver.verify( + (x) => x.resolveExecution(typeMoq.It.isAny(), typeMoq.It.isAny()), + typeMoq.Times.once(), + ); + + // 1. Check the status is "success" + assert.strictEqual(actualData.status, 'success', "Expected status to be 'success'"); + // 2. Confirm tests are found + assert.ok(actualData.result, 'Expected results to be present'); + }); + }); + test('unittest execution adapter large workspace', async () => { + // result resolver and saved data for assertions + resultResolver + .setup((x) => x.resolveExecution(typeMoq.It.isAny(), typeMoq.It.isAny())) + .returns((data) => { + traceError(`resolveExecution ${data}`); + console.log(`resolveExecution ${data}`); + traceLog(`resolveExecution ${data}`); + // do the following asserts for each time resolveExecution is called, should be called once per test. + // 1. Check the status, can be subtest success or failure + assert( + data.status === 'subtest-success' || data.status === 'subtest-failure', + "Expected status to be 'subtest-success' or 'subtest-failure'", + ); + // 2. Confirm tests are found + assert.ok(data.result, 'Expected results to be present'); + return Promise.resolve(); + }); + + // set workspace to test workspace folder + workspaceUri = Uri.parse(rootPathLargeWorkspace); + configService.getSettings(workspaceUri).testing.unittestArgs = ['-s', '.', '-p', '*test*.py']; + + // run unittest execution + const executionAdapter = new UnittestTestExecutionAdapter( + pythonTestServer, + configService, + testOutputChannel, + resultResolver.object, + ); + const testRun = typeMoq.Mock.ofType(); + testRun + .setup((t) => t.token) + .returns( + () => + ({ + onCancellationRequested: () => undefined, + } as any), + ); + await executionAdapter + .runTests(workspaceUri, ['test_parameterized_subtest.NumbersTest.test_even'], false, testRun.object) + .finally(() => { + // verification after discovery is complete + resultResolver.verify( + (x) => x.resolveExecution(typeMoq.It.isAny(), typeMoq.It.isAny()), + typeMoq.Times.exactly(200), + ); + }); + }); + test('pytest execution adapter small workspace', async () => { + // result resolver and saved data for assertions + let actualData: { + status: unknown; + error: string | any[]; + result: unknown; + }; + resultResolver + .setup((x) => x.resolveExecution(typeMoq.It.isAny(), typeMoq.It.isAny())) + .returns((data) => { + traceLog(`resolveExecution ${data}`); + actualData = data; + return Promise.resolve(); + }); + + // set workspace to test workspace folder + workspaceUri = Uri.parse(rootPathSmallWorkspace); + + // run pytest execution + const executionAdapter = new PytestTestExecutionAdapter( + pythonTestServer, + configService, + testOutputChannel, + resultResolver.object, + ); + const testRun = typeMoq.Mock.ofType(); + testRun + .setup((t) => t.token) + .returns( + () => + ({ + onCancellationRequested: () => undefined, + } as any), + ); + await executionAdapter + .runTests( + workspaceUri, + [`${rootPathSmallWorkspace}/test_simple.py::test_a`], + false, + testRun.object, + pythonExecFactory, + ) + .finally(() => { + // verification after discovery is complete + resultResolver.verify( + (x) => x.resolveExecution(typeMoq.It.isAny(), typeMoq.It.isAny()), + typeMoq.Times.once(), + ); + + // 1. Check the status is "success" + assert.strictEqual(actualData.status, 'success', "Expected status to be 'success'"); + // 2. Confirm no errors + assert.strictEqual(actualData.error, null, "Expected no errors in 'error' field"); + // 3. Confirm tests are found + assert.ok(actualData.result, 'Expected results to be present'); + }); + }); + test('pytest execution adapter large workspace', async () => { + resultResolver + .setup((x) => x.resolveExecution(typeMoq.It.isAny(), typeMoq.It.isAny())) + .returns((data) => { + traceLog(`resolveExecution ${data}`); + // do the following asserts for each time resolveExecution is called, should be called once per test. + // 1. Check the status is "success" + assert.strictEqual(data.status, 'success', "Expected status to be 'success'"); + // 2. Confirm no errors + assert.strictEqual(data.error, null, "Expected no errors in 'error' field"); + // 3. Confirm tests are found + assert.ok(data.result, 'Expected results to be present'); + return Promise.resolve(); + }); + + // set workspace to test workspace folder + workspaceUri = Uri.parse(rootPathLargeWorkspace); + + // generate list of test_ids + const testIds: string[] = []; + for (let i = 0; i < 200; i = i + 1) { + const testId = `${rootPathLargeWorkspace}/test_parameterized_subtest.py::test_odd_even[${i}]`; + testIds.push(testId); + } + + // run pytest execution + const executionAdapter = new PytestTestExecutionAdapter( + pythonTestServer, + configService, + testOutputChannel, + resultResolver.object, + ); + const testRun = typeMoq.Mock.ofType(); + testRun + .setup((t) => t.token) + .returns( + () => + ({ + onCancellationRequested: () => undefined, + } as any), + ); + await executionAdapter.runTests(workspaceUri, testIds, false, testRun.object, pythonExecFactory).finally(() => { + // resolve execution should be called 200 times since there are 200 tests run. + resultResolver.verify( + (x) => x.resolveExecution(typeMoq.It.isAny(), typeMoq.It.isAny()), + typeMoq.Times.exactly(200), + ); + }); + }); +}); diff --git a/src/test/testing/testController/pytest/pytestDiscoveryAdapter.unit.test.ts b/src/test/testing/testController/pytest/pytestDiscoveryAdapter.unit.test.ts index 18212b2d1032e..8ba7dd9a6f004 100644 --- a/src/test/testing/testController/pytest/pytestDiscoveryAdapter.unit.test.ts +++ b/src/test/testing/testController/pytest/pytestDiscoveryAdapter.unit.test.ts @@ -5,6 +5,7 @@ import * as assert from 'assert'; import { Uri } from 'vscode'; import * as typeMoq from 'typemoq'; import * as path from 'path'; +import { Observable } from 'rxjs/Observable'; import { IConfigurationService, ITestOutputChannel } from '../../../../client/common/types'; import { PytestTestDiscoveryAdapter } from '../../../../client/testing/testController/pytest/pytestDiscoveryAdapter'; import { ITestServer } from '../../../../client/testing/testController/common/types'; @@ -12,9 +13,11 @@ import { IPythonExecutionFactory, IPythonExecutionService, SpawnOptions, + Output, } from '../../../../client/common/process/types'; -import { createDeferred, Deferred } from '../../../../client/common/utils/async'; import { EXTENSION_ROOT_DIR } from '../../../../client/constants'; +import { MockChildProcess } from '../../../mocks/mockChildProcess'; +import { Deferred, createDeferred } from '../../../../client/common/utils/async'; suite('pytest test discovery adapter', () => { let testServer: typeMoq.IMock; @@ -29,6 +32,7 @@ suite('pytest test discovery adapter', () => { let expectedPath: string; let uri: Uri; let expectedExtraVariables: Record; + let mockProc: MockChildProcess; setup(() => { const mockExtensionRootDir = typeMoq.Mock.ofType(); @@ -66,32 +70,46 @@ suite('pytest test discovery adapter', () => { }), } as unknown) as IConfigurationService; - // set up exec factory - execFactory = typeMoq.Mock.ofType(); - execFactory - .setup((x) => x.createActivatedEnvironment(typeMoq.It.isAny())) - .returns(() => Promise.resolve(execService.object)); - - // set up exec service + // set up exec service with child process + mockProc = new MockChildProcess('', ['']); execService = typeMoq.Mock.ofType(); - deferred = createDeferred(); + const output = new Observable>(() => { + /* no op */ + }); execService - .setup((x) => x.exec(typeMoq.It.isAny(), typeMoq.It.isAny())) - .returns(() => { - deferred.resolve(); - return Promise.resolve({ stdout: '{}' }); - }); + .setup((x) => x.execObservable(typeMoq.It.isAny(), typeMoq.It.isAny())) + .returns(() => ({ + proc: mockProc, + out: output, + dispose: () => { + /* no-body */ + }, + })); execService.setup((p) => ((p as unknown) as any).then).returns(() => undefined); outputChannel = typeMoq.Mock.ofType(); }); test('Discovery should call exec with correct basic args', async () => { + // set up exec mock + deferred = createDeferred(); + execFactory = typeMoq.Mock.ofType(); + execFactory + .setup((x) => x.createActivatedEnvironment(typeMoq.It.isAny())) + .returns(() => { + deferred.resolve(); + return Promise.resolve(execService.object); + }); + adapter = new PytestTestDiscoveryAdapter(testServer.object, configService, outputChannel.object); - await adapter.discoverTests(uri, execFactory.object); - const expectedArgs = ['-m', 'pytest', '-p', 'vscode_pytest', '--collect-only', '.']; + adapter.discoverTests(uri, execFactory.object); + // add in await and trigger + await deferred.promise; + mockProc.trigger('close'); + // verification + const expectedArgs = ['-m', 'pytest', '-p', 'vscode_pytest', '--collect-only', '.']; execService.verify( (x) => - x.exec( + x.execObservable( expectedArgs, typeMoq.It.is((options) => { assert.deepEqual(options.extraVariables, expectedExtraVariables); @@ -108,16 +126,34 @@ suite('pytest test discovery adapter', () => { const expectedPathNew = path.join('other', 'path'); const configServiceNew: IConfigurationService = ({ getSettings: () => ({ - testing: { pytestArgs: ['.', 'abc', 'xyz'], cwd: expectedPathNew }, + testing: { + pytestArgs: ['.', 'abc', 'xyz'], + cwd: expectedPathNew, + }, }), } as unknown) as IConfigurationService; + // set up exec mock + deferred = createDeferred(); + execFactory = typeMoq.Mock.ofType(); + execFactory + .setup((x) => x.createActivatedEnvironment(typeMoq.It.isAny())) + .returns(() => { + deferred.resolve(); + return Promise.resolve(execService.object); + }); + adapter = new PytestTestDiscoveryAdapter(testServer.object, configServiceNew, outputChannel.object); - await adapter.discoverTests(uri, execFactory.object); + adapter.discoverTests(uri, execFactory.object); + // add in await and trigger + await deferred.promise; + mockProc.trigger('close'); + + // verification const expectedArgs = ['-m', 'pytest', '-p', 'vscode_pytest', '--collect-only', '.', 'abc', 'xyz']; execService.verify( (x) => - x.exec( + x.execObservable( expectedArgs, typeMoq.It.is((options) => { assert.deepEqual(options.extraVariables, expectedExtraVariables); diff --git a/src/test/testing/testController/pytest/pytestExecutionAdapter.unit.test.ts b/src/test/testing/testController/pytest/pytestExecutionAdapter.unit.test.ts index 44116fd753b0c..43b763f56e6cf 100644 --- a/src/test/testing/testController/pytest/pytestExecutionAdapter.unit.test.ts +++ b/src/test/testing/testController/pytest/pytestExecutionAdapter.unit.test.ts @@ -6,11 +6,13 @@ import { TestRun, Uri } from 'vscode'; import * as typeMoq from 'typemoq'; import * as sinon from 'sinon'; import * as path from 'path'; +import { Observable } from 'rxjs/Observable'; import { IConfigurationService, ITestOutputChannel } from '../../../../client/common/types'; import { ITestServer } from '../../../../client/testing/testController/common/types'; import { IPythonExecutionFactory, IPythonExecutionService, + Output, SpawnOptions, } from '../../../../client/common/process/types'; import { createDeferred, Deferred } from '../../../../client/common/utils/async'; @@ -18,6 +20,7 @@ import { PytestTestExecutionAdapter } from '../../../../client/testing/testContr import { ITestDebugLauncher, LaunchOptions } from '../../../../client/testing/common/types'; import * as util from '../../../../client/testing/testController/common/utils'; import { EXTENSION_ROOT_DIR } from '../../../../client/constants'; +import { MockChildProcess } from '../../../mocks/mockChildProcess'; suite('pytest test execution adapter', () => { let testServer: typeMoq.IMock; @@ -29,8 +32,8 @@ suite('pytest test execution adapter', () => { let debugLauncher: typeMoq.IMock; (global as any).EXTENSION_ROOT_DIR = EXTENSION_ROOT_DIR; let myTestPath: string; - let startTestIdServerStub: sinon.SinonStub; - + let mockProc: MockChildProcess; + let utilsStub: sinon.SinonStub; setup(() => { testServer = typeMoq.Mock.ofType(); testServer.setup((t) => t.getPort()).returns(() => 12345); @@ -47,8 +50,24 @@ suite('pytest test execution adapter', () => { }), isTestExecution: () => false, } as unknown) as IConfigurationService; - execFactory = typeMoq.Mock.ofType(); + + // set up exec service with child process + mockProc = new MockChildProcess('', ['']); + const output = new Observable>(() => { + /* no op */ + }); execService = typeMoq.Mock.ofType(); + execService + .setup((x) => x.execObservable(typeMoq.It.isAny(), typeMoq.It.isAny())) + .returns(() => ({ + proc: mockProc, + out: output, + dispose: () => { + /* no-body */ + }, + })); + execFactory = typeMoq.Mock.ofType(); + utilsStub = sinon.stub(util, 'startTestIdServer'); debugLauncher = typeMoq.Mock.ofType(); execFactory .setup((x) => x.createActivatedEnvironment(typeMoq.It.isAny())) @@ -66,7 +85,6 @@ suite('pytest test execution adapter', () => { deferred.resolve(); return Promise.resolve(); }); - startTestIdServerStub = sinon.stub(util, 'startTestIdServer').returns(Promise.resolve(54321)); execFactory.setup((p) => ((p as unknown) as any).then).returns(() => undefined); execService.setup((p) => ((p as unknown) as any).then).returns(() => undefined); @@ -77,10 +95,25 @@ suite('pytest test execution adapter', () => { sinon.restore(); }); test('startTestIdServer called with correct testIds', async () => { + const deferred2 = createDeferred(); + const deferred3 = createDeferred(); + execFactory = typeMoq.Mock.ofType(); + execFactory + .setup((x) => x.createActivatedEnvironment(typeMoq.It.isAny())) + .returns(() => { + deferred2.resolve(); + return Promise.resolve(execService.object); + }); + utilsStub.callsFake(() => { + deferred3.resolve(); + return Promise.resolve(54321); + }); + const testRun = typeMoq.Mock.ofType(); + testRun.setup((t) => t.token).returns(() => ({ onCancellationRequested: () => undefined } as any)); const uri = Uri.file(myTestPath); const uuid = 'uuid123'; testServer - .setup((t) => t.onDiscoveryDataReceived(typeMoq.It.isAny(), typeMoq.It.isAny())) + .setup((t) => t.onRunDataReceived(typeMoq.It.isAny(), typeMoq.It.isAny())) .returns(() => ({ dispose: () => { /* no-body */ @@ -88,19 +121,38 @@ suite('pytest test execution adapter', () => { })); testServer.setup((t) => t.createUUID(typeMoq.It.isAny())).returns(() => uuid); const outputChannel = typeMoq.Mock.ofType(); - const testRun = typeMoq.Mock.ofType(); adapter = new PytestTestExecutionAdapter(testServer.object, configService, outputChannel.object); - const testIds = ['test1id', 'test2id']; - await adapter.runTests(uri, testIds, false, testRun.object, execFactory.object); + adapter.runTests(uri, testIds, false, testRun.object, execFactory.object); - sinon.assert.calledWithExactly(startTestIdServerStub, testIds); + // add in await and trigger + await deferred2.promise; + await deferred3.promise; + mockProc.trigger('close'); + + // assert + sinon.assert.calledWithExactly(utilsStub, testIds); }); test('pytest execution called with correct args', async () => { + const deferred2 = createDeferred(); + const deferred3 = createDeferred(); + execFactory = typeMoq.Mock.ofType(); + execFactory + .setup((x) => x.createActivatedEnvironment(typeMoq.It.isAny())) + .returns(() => { + deferred2.resolve(); + return Promise.resolve(execService.object); + }); + utilsStub.callsFake(() => { + deferred3.resolve(); + return Promise.resolve(54321); + }); + const testRun = typeMoq.Mock.ofType(); + testRun.setup((t) => t.token).returns(() => ({ onCancellationRequested: () => undefined } as any)); const uri = Uri.file(myTestPath); const uuid = 'uuid123'; testServer - .setup((t) => t.onDiscoveryDataReceived(typeMoq.It.isAny(), typeMoq.It.isAny())) + .setup((t) => t.onRunDataReceived(typeMoq.It.isAny(), typeMoq.It.isAny())) .returns(() => ({ dispose: () => { /* no-body */ @@ -108,9 +160,12 @@ suite('pytest test execution adapter', () => { })); testServer.setup((t) => t.createUUID(typeMoq.It.isAny())).returns(() => uuid); const outputChannel = typeMoq.Mock.ofType(); - const testRun = typeMoq.Mock.ofType(); adapter = new PytestTestExecutionAdapter(testServer.object, configService, outputChannel.object); - await adapter.runTests(uri, [], false, testRun.object, execFactory.object); + adapter.runTests(uri, [], false, testRun.object, execFactory.object); + + await deferred2.promise; + await deferred3.promise; + mockProc.trigger('close'); const pathToPythonFiles = path.join(EXTENSION_ROOT_DIR, 'pythonFiles'); const pathToPythonScript = path.join(pathToPythonFiles, 'vscode_pytest', 'run_pytest_script.py'); @@ -123,7 +178,7 @@ suite('pytest test execution adapter', () => { // execService.verify((x) => x.exec(expectedArgs, typeMoq.It.isAny()), typeMoq.Times.once()); execService.verify( (x) => - x.exec( + x.execObservable( expectedArgs, typeMoq.It.is((options) => { assert.equal(options.extraVariables?.PYTHONPATH, expectedExtraVariables.PYTHONPATH); @@ -139,6 +194,21 @@ suite('pytest test execution adapter', () => { ); }); test('pytest execution respects settings.testing.cwd when present', async () => { + const deferred2 = createDeferred(); + const deferred3 = createDeferred(); + execFactory = typeMoq.Mock.ofType(); + execFactory + .setup((x) => x.createActivatedEnvironment(typeMoq.It.isAny())) + .returns(() => { + deferred2.resolve(); + return Promise.resolve(execService.object); + }); + utilsStub.callsFake(() => { + deferred3.resolve(); + return Promise.resolve(54321); + }); + const testRun = typeMoq.Mock.ofType(); + testRun.setup((t) => t.token).returns(() => ({ onCancellationRequested: () => undefined } as any)); const newCwd = path.join('new', 'path'); configService = ({ getSettings: () => ({ @@ -149,7 +219,7 @@ suite('pytest test execution adapter', () => { const uri = Uri.file(myTestPath); const uuid = 'uuid123'; testServer - .setup((t) => t.onDiscoveryDataReceived(typeMoq.It.isAny(), typeMoq.It.isAny())) + .setup((t) => t.onRunDataReceived(typeMoq.It.isAny(), typeMoq.It.isAny())) .returns(() => ({ dispose: () => { /* no-body */ @@ -157,9 +227,12 @@ suite('pytest test execution adapter', () => { })); testServer.setup((t) => t.createUUID(typeMoq.It.isAny())).returns(() => uuid); const outputChannel = typeMoq.Mock.ofType(); - const testRun = typeMoq.Mock.ofType(); adapter = new PytestTestExecutionAdapter(testServer.object, configService, outputChannel.object); - await adapter.runTests(uri, [], false, testRun.object, execFactory.object); + adapter.runTests(uri, [], false, testRun.object, execFactory.object); + + await deferred2.promise; + await deferred3.promise; + mockProc.trigger('close'); const pathToPythonFiles = path.join(EXTENSION_ROOT_DIR, 'pythonFiles'); const pathToPythonScript = path.join(pathToPythonFiles, 'vscode_pytest', 'run_pytest_script.py'); @@ -172,7 +245,7 @@ suite('pytest test execution adapter', () => { execService.verify( (x) => - x.exec( + x.execObservable( expectedArgs, typeMoq.It.is((options) => { assert.equal(options.extraVariables?.PYTHONPATH, expectedExtraVariables.PYTHONPATH); @@ -188,10 +261,17 @@ suite('pytest test execution adapter', () => { ); }); test('Debug launched correctly for pytest', async () => { + const deferred3 = createDeferred(); + utilsStub.callsFake(() => { + deferred3.resolve(); + return Promise.resolve(54321); + }); + const testRun = typeMoq.Mock.ofType(); + testRun.setup((t) => t.token).returns(() => ({ onCancellationRequested: () => undefined } as any)); const uri = Uri.file(myTestPath); const uuid = 'uuid123'; testServer - .setup((t) => t.onDiscoveryDataReceived(typeMoq.It.isAny(), typeMoq.It.isAny())) + .setup((t) => t.onRunDataReceived(typeMoq.It.isAny(), typeMoq.It.isAny())) .returns(() => ({ dispose: () => { /* no-body */ @@ -199,9 +279,9 @@ suite('pytest test execution adapter', () => { })); testServer.setup((t) => t.createUUID(typeMoq.It.isAny())).returns(() => uuid); const outputChannel = typeMoq.Mock.ofType(); - const testRun = typeMoq.Mock.ofType(); adapter = new PytestTestExecutionAdapter(testServer.object, configService, outputChannel.object); await adapter.runTests(uri, [], true, testRun.object, execFactory.object, debugLauncher.object); + await deferred3.promise; debugLauncher.verify( (x) => x.launchDebugger( diff --git a/src/test/testing/testController/server.unit.test.ts b/src/test/testing/testController/server.unit.test.ts index 1131c26c64449..53c2b72e40f78 100644 --- a/src/test/testing/testController/server.unit.test.ts +++ b/src/test/testing/testController/server.unit.test.ts @@ -1,15 +1,24 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. +/* eslint-disable @typescript-eslint/no-explicit-any */ import * as assert from 'assert'; import * as net from 'net'; import * as sinon from 'sinon'; import * as crypto from 'crypto'; import { OutputChannel, Uri } from 'vscode'; -import { IPythonExecutionFactory, IPythonExecutionService, SpawnOptions } from '../../../client/common/process/types'; +import { Observable } from 'rxjs'; +import * as typeMoq from 'typemoq'; +import { + IPythonExecutionFactory, + IPythonExecutionService, + Output, + SpawnOptions, +} from '../../../client/common/process/types'; import { PythonTestServer } from '../../../client/testing/testController/common/server'; import { ITestDebugLauncher } from '../../../client/testing/common/types'; -import { createDeferred } from '../../../client/common/utils/async'; +import { Deferred, createDeferred } from '../../../client/common/utils/async'; +import { MockChildProcess } from '../../mocks/mockChildProcess'; suite('Python Test Server', () => { const fakeUuid = 'fake-uuid'; @@ -18,10 +27,12 @@ suite('Python Test Server', () => { let stubExecutionService: IPythonExecutionService; let server: PythonTestServer; let sandbox: sinon.SinonSandbox; - let execArgs: string[]; - let spawnOptions: SpawnOptions; let v4Stub: sinon.SinonStub; let debugLauncher: ITestDebugLauncher; + let mockProc: MockChildProcess; + let execService: typeMoq.IMock; + let deferred: Deferred; + let execFactory = typeMoq.Mock.ofType(); setup(() => { sandbox = sinon.createSandbox(); @@ -29,27 +40,42 @@ suite('Python Test Server', () => { v4Stub.returns(fakeUuid); stubExecutionService = ({ - exec: (args: string[], spawnOptionsProvided: SpawnOptions) => { - execArgs = args; - spawnOptions = spawnOptionsProvided; - return Promise.resolve({ stdout: '', stderr: '' }); - }, + execObservable: () => Promise.resolve({ stdout: '', stderr: '' }), } as unknown) as IPythonExecutionService; stubExecutionFactory = ({ createActivatedEnvironment: () => Promise.resolve(stubExecutionService), } as unknown) as IPythonExecutionFactory; + + // set up exec service with child process + mockProc = new MockChildProcess('', ['']); + execService = typeMoq.Mock.ofType(); + const output = new Observable>(() => { + /* no op */ + }); + execService + .setup((x) => x.execObservable(typeMoq.It.isAny(), typeMoq.It.isAny())) + .returns(() => ({ + proc: mockProc, + out: output, + dispose: () => { + /* no-body */ + }, + })); + execService.setup((p) => ((p as unknown) as any).then).returns(() => undefined); }); teardown(() => { sandbox.restore(); - execArgs = []; server.dispose(); }); test('sendCommand should add the port to the command being sent and add the correct extra spawn variables', async () => { const options = { - command: { script: 'myscript', args: ['-foo', 'foo'] }, + command: { + script: 'myscript', + args: ['-foo', 'foo'], + }, workspaceFolder: Uri.file('/foo/bar'), cwd: '/foo/bar', uuid: fakeUuid, @@ -59,17 +85,31 @@ suite('Python Test Server', () => { outputChannel: undefined, token: undefined, throwOnStdErr: true, - extraVariables: { PYTHONPATH: '/foo/bar', RUN_TEST_IDS_PORT: '56789' }, + extraVariables: { + PYTHONPATH: '/foo/bar', + RUN_TEST_IDS_PORT: '56789', + }, } as SpawnOptions; + const deferred2 = createDeferred(); + execFactory = typeMoq.Mock.ofType(); + execFactory + .setup((x) => x.createActivatedEnvironment(typeMoq.It.isAny())) + .returns(() => { + deferred2.resolve(); + return Promise.resolve(execService.object); + }); - server = new PythonTestServer(stubExecutionFactory, debugLauncher); + server = new PythonTestServer(execFactory.object, debugLauncher); await server.serverReady(); - await server.sendCommand(options, '56789'); - const port = server.getPort(); + server.sendCommand(options, '56789'); + // add in await and trigger + await deferred2.promise; + mockProc.trigger('close'); - assert.deepStrictEqual(execArgs, ['myscript', '--port', `${port}`, '--uuid', fakeUuid, '-foo', 'foo']); - assert.deepStrictEqual(spawnOptions, expectedSpawnOptions); + const port = server.getPort(); + const expectedArgs = ['myscript', '--port', `${port}`, '--uuid', fakeUuid, '-foo', 'foo']; + execService.verify((x) => x.execObservable(expectedArgs, expectedSpawnOptions), typeMoq.Times.once()); }); test('sendCommand should write to an output channel if it is provided as an option', async () => { @@ -80,17 +120,31 @@ suite('Python Test Server', () => { }, } as OutputChannel; const options = { - command: { script: 'myscript', args: ['-foo', 'foo'] }, + command: { + script: 'myscript', + args: ['-foo', 'foo'], + }, workspaceFolder: Uri.file('/foo/bar'), cwd: '/foo/bar', uuid: fakeUuid, outChannel, }; + deferred = createDeferred(); + execFactory = typeMoq.Mock.ofType(); + execFactory + .setup((x) => x.createActivatedEnvironment(typeMoq.It.isAny())) + .returns(() => { + deferred.resolve(); + return Promise.resolve(execService.object); + }); - server = new PythonTestServer(stubExecutionFactory, debugLauncher); + server = new PythonTestServer(execFactory.object, debugLauncher); await server.serverReady(); - await server.sendCommand(options); + server.sendCommand(options); + // add in await and trigger + await deferred.promise; + mockProc.trigger('close'); const port = server.getPort(); const expected = ['python', 'myscript', '--port', `${port}`, '--uuid', fakeUuid, '-foo', 'foo'].join(' '); @@ -99,13 +153,12 @@ suite('Python Test Server', () => { }); test('If script execution fails during sendCommand, an onDataReceived event should be fired with the "error" status', async () => { - let eventData: { status: string; errors: string[] }; + let eventData: { status: string; errors: string[] } | undefined; stubExecutionService = ({ - exec: () => { + execObservable: () => { throw new Error('Failed to execute'); }, } as unknown) as IPythonExecutionService; - const options = { command: { script: 'myscript', args: ['-foo', 'foo'] }, workspaceFolder: Uri.file('/foo/bar'), @@ -122,30 +175,43 @@ suite('Python Test Server', () => { await server.sendCommand(options); - assert.deepStrictEqual(eventData!.status, 'error'); - assert.deepStrictEqual(eventData!.errors, ['Failed to execute']); + assert.notEqual(eventData, undefined); + assert.deepStrictEqual(eventData?.status, 'error'); + assert.deepStrictEqual(eventData?.errors, ['Failed to execute']); }); test('If the server receives malformed data, it should display a log message, and not fire an event', async () => { let eventData: string | undefined; const client = new net.Socket(); - const deferred = createDeferred(); - const options = { command: { script: 'myscript', args: ['-foo', 'foo'] }, workspaceFolder: Uri.file('/foo/bar'), cwd: '/foo/bar', uuid: fakeUuid, }; - - stubExecutionService = ({ - exec: async () => { + mockProc = new MockChildProcess('', ['']); + const output = new Observable>(() => { + /* no op */ + }); + const stubExecutionService2 = ({ + execObservable: () => { client.connect(server.getPort()); - return Promise.resolve({ stdout: '', stderr: '' }); + return { + proc: mockProc, + out: output, + dispose: () => { + /* no-body */ + }, + }; }, } as unknown) as IPythonExecutionService; - server = new PythonTestServer(stubExecutionFactory, debugLauncher); + const stubExecutionFactory2 = ({ + createActivatedEnvironment: () => Promise.resolve(stubExecutionService2), + } as unknown) as IPythonExecutionFactory; + + deferred = createDeferred(); + server = new PythonTestServer(stubExecutionFactory2, debugLauncher); await server.serverReady(); server.onDataReceived(({ data }) => { eventData = data; @@ -161,16 +227,17 @@ suite('Python Test Server', () => { console.log('Socket connection error:', error); }); - await server.sendCommand(options); + server.sendCommand(options); + // add in await and trigger await deferred.promise; + mockProc.trigger('close'); + assert.deepStrictEqual(eventData, ''); }); test('If the server doesnt recognize the UUID it should ignore it', async () => { let eventData: string | undefined; const client = new net.Socket(); - const deferred = createDeferred(); - const options = { command: { script: 'myscript', args: ['-foo', 'foo'] }, workspaceFolder: Uri.file('/foo/bar'), @@ -178,14 +245,28 @@ suite('Python Test Server', () => { uuid: fakeUuid, }; - stubExecutionService = ({ - exec: async () => { + deferred = createDeferred(); + mockProc = new MockChildProcess('', ['']); + const output = new Observable>(() => { + /* no op */ + }); + const stubExecutionService2 = ({ + execObservable: () => { client.connect(server.getPort()); - return Promise.resolve({ stdout: '', stderr: '' }); + return { + proc: mockProc, + out: output, + dispose: () => { + /* no-body */ + }, + }; }, } as unknown) as IPythonExecutionService; - server = new PythonTestServer(stubExecutionFactory, debugLauncher); + const stubExecutionFactory2 = ({ + createActivatedEnvironment: () => Promise.resolve(stubExecutionService2), + } as unknown) as IPythonExecutionFactory; + server = new PythonTestServer(stubExecutionFactory2, debugLauncher); await server.serverReady(); server.onDataReceived(({ data }) => { eventData = data; @@ -201,7 +282,7 @@ suite('Python Test Server', () => { console.log('Socket connection error:', error); }); - await server.sendCommand(options); + server.sendCommand(options); await deferred.promise; assert.deepStrictEqual(eventData, ''); }); @@ -212,23 +293,34 @@ suite('Python Test Server', () => { test('Error if payload does not have a content length header', async () => { let eventData: string | undefined; const client = new net.Socket(); - const deferred = createDeferred(); - const options = { command: { script: 'myscript', args: ['-foo', 'foo'] }, workspaceFolder: Uri.file('/foo/bar'), cwd: '/foo/bar', uuid: fakeUuid, }; - - stubExecutionService = ({ - exec: async () => { + deferred = createDeferred(); + mockProc = new MockChildProcess('', ['']); + const output = new Observable>(() => { + /* no op */ + }); + const stubExecutionService2 = ({ + execObservable: () => { client.connect(server.getPort()); - return Promise.resolve({ stdout: '', stderr: '' }); + return { + proc: mockProc, + out: output, + dispose: () => { + /* no-body */ + }, + }; }, } as unknown) as IPythonExecutionService; - server = new PythonTestServer(stubExecutionFactory, debugLauncher); + const stubExecutionFactory2 = ({ + createActivatedEnvironment: () => Promise.resolve(stubExecutionService2), + } as unknown) as IPythonExecutionFactory; + server = new PythonTestServer(stubExecutionFactory2, debugLauncher); await server.serverReady(); server.onDataReceived(({ data }) => { eventData = data; @@ -244,7 +336,7 @@ suite('Python Test Server', () => { console.log('Socket connection error:', error); }); - await server.sendCommand(options); + server.sendCommand(options); await deferred.promise; assert.deepStrictEqual(eventData, ''); }); @@ -267,7 +359,6 @@ Request-uuid: UUID_HERE // Your test logic here let eventData: string | undefined; const client = new net.Socket(); - const deferred = createDeferred(); const options = { command: { script: 'myscript', args: ['-foo', 'foo'] }, @@ -275,15 +366,28 @@ Request-uuid: UUID_HERE cwd: '/foo/bar', uuid: fakeUuid, }; - - stubExecutionService = ({ - exec: async () => { + deferred = createDeferred(); + mockProc = new MockChildProcess('', ['']); + const output = new Observable>(() => { + /* no op */ + }); + const stubExecutionService2 = ({ + execObservable: () => { client.connect(server.getPort()); - return Promise.resolve({ stdout: '', stderr: '' }); + return { + proc: mockProc, + out: output, + dispose: () => { + /* no-body */ + }, + }; }, } as unknown) as IPythonExecutionService; + const stubExecutionFactory2 = ({ + createActivatedEnvironment: () => Promise.resolve(stubExecutionService2), + } as unknown) as IPythonExecutionFactory; - server = new PythonTestServer(stubExecutionFactory, debugLauncher); + server = new PythonTestServer(stubExecutionFactory2, debugLauncher); await server.serverReady(); const uuid = server.createUUID(); payload = payload.replace('UUID_HERE', uuid); @@ -301,7 +405,7 @@ Request-uuid: UUID_HERE console.log('Socket connection error:', error); }); - await server.sendCommand(options); + server.sendCommand(options); await deferred.promise; assert.deepStrictEqual(eventData, expectedResult); }); @@ -310,8 +414,29 @@ Request-uuid: UUID_HERE test('Calls run resolver if the result header is in the payload', async () => { let eventData: string | undefined; const client = new net.Socket(); - const deferred = createDeferred(); + deferred = createDeferred(); + mockProc = new MockChildProcess('', ['']); + const output = new Observable>(() => { + /* no op */ + }); + const stubExecutionService2 = ({ + execObservable: () => { + client.connect(server.getPort()); + return { + proc: mockProc, + out: output, + dispose: () => { + /* no-body */ + }, + }; + }, + } as unknown) as IPythonExecutionService; + + const stubExecutionFactory2 = ({ + createActivatedEnvironment: () => Promise.resolve(stubExecutionService2), + } as unknown) as IPythonExecutionFactory; + server = new PythonTestServer(stubExecutionFactory2, debugLauncher); const options = { command: { script: 'myscript', args: ['-foo', 'foo'] }, workspaceFolder: Uri.file('/foo/bar'), @@ -319,14 +444,6 @@ Request-uuid: UUID_HERE uuid: fakeUuid, }; - stubExecutionService = ({ - exec: async () => { - client.connect(server.getPort()); - return Promise.resolve({ stdout: '', stderr: '' }); - }, - } as unknown) as IPythonExecutionService; - - server = new PythonTestServer(stubExecutionFactory, debugLauncher); await server.serverReady(); const uuid = server.createUUID(); server.onRunDataReceived(({ data }) => { @@ -349,9 +466,8 @@ Request-uuid: ${uuid} console.log('Socket connection error:', error); }); - await server.sendCommand(options); + server.sendCommand(options); await deferred.promise; - console.log('event data', eventData); const expectedResult = '{"cwd": "path", "status": "success", "result": "xyz", "not_found": null, "error": null}'; assert.deepStrictEqual(eventData, expectedResult); diff --git a/src/test/testing/testController/workspaceTestAdapter.unit.test.ts b/src/test/testing/testController/workspaceTestAdapter.unit.test.ts index 5a2e48130746b..41cd1bbd7ef2f 100644 --- a/src/test/testing/testController/workspaceTestAdapter.unit.test.ts +++ b/src/test/testing/testController/workspaceTestAdapter.unit.test.ts @@ -164,8 +164,7 @@ suite('Workspace test adapter', () => { const buildErrorNodeOptionsStub = sinon.stub(util, 'buildErrorNodeOptions').returns(errorTestItemOptions); const testProvider = 'unittest'; - const abc = await workspaceTestAdapter.discoverTests(testController); - console.log(abc); + await workspaceTestAdapter.discoverTests(testController); sinon.assert.calledWithMatch(createErrorTestItemStub, sinon.match.any, sinon.match.any); sinon.assert.calledWithMatch(buildErrorNodeOptionsStub, Uri.parse('foo'), sinon.match.any, testProvider); diff --git a/src/testTestingRootWkspc/largeWorkspace/test_parameterized_subtest.py b/src/testTestingRootWkspc/largeWorkspace/test_parameterized_subtest.py new file mode 100644 index 0000000000000..3e84df0a2d9f8 --- /dev/null +++ b/src/testTestingRootWkspc/largeWorkspace/test_parameterized_subtest.py @@ -0,0 +1,16 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +import pytest +import unittest + + +@pytest.mark.parametrize("num", range(0, 200)) +def test_odd_even(num): + return num % 2 == 0 + + +class NumbersTest(unittest.TestCase): + def test_even(self): + for i in range(0, 200): + with self.subTest(i=i): + self.assertEqual(i % 2, 0) diff --git a/src/testTestingRootWkspc/smallWorkspace/test_simple.py b/src/testTestingRootWkspc/smallWorkspace/test_simple.py new file mode 100644 index 0000000000000..6b4f7bd2f8a62 --- /dev/null +++ b/src/testTestingRootWkspc/smallWorkspace/test_simple.py @@ -0,0 +1,12 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +import unittest + + +def test_a(): + assert 1 == 1 + + +class SimpleClass(unittest.TestCase): + def test_simple_unit(self): + assert True