Skip to content

chore(deps-dev): bump flake8-bugbear from 24.4.26 to 24.10.31 #364

chore(deps-dev): bump flake8-bugbear from 24.4.26 to 24.10.31

chore(deps-dev): bump flake8-bugbear from 24.4.26 to 24.10.31 #364

GitHub Actions / E2E Tests Python3.12 failed Nov 1, 2024 in 0s

5 fail, 150 pass in 47s

155 tests   150 ✅  47s ⏱️
  1 suites    0 💤
  1 files      5 ❌

Results for commit 5975f1b.

Annotations

Check warning on line 0 in tests_e2e.__specs.execute_tool_with_args

See this annotation in the file changed.

@github-actions github-actions / E2E Tests Python3.12

test_only_optional_arguments[args0] (tests_e2e.__specs.execute_tool_with_args) failed

junit/e2e-test-results-3.12.xml [took 0s]
Raw output
Exception: Failed to match hexagon output:
Expected: #x1B[92mname: John
Got: #x1B[93mThere where 1 error(s) in your input for tool only-optionals
args = ['--name=John', '--age', '31', '--country', 'Argentina', '--likes', ...]

    @pytest.mark.parametrize(
        "args",
        [
            ["--name=John", "--age", "31", "--country", "Argentina", "--likes", "sand", "beach"],  # fmt: skip
            ["--likes", "sand", "beach", "--name=John", "--country", "Argentina", "--age", "31"],  # fmt: skip
            ["--name=John", "--age", "31", "--country", "Argentina", "--likes", "sand", "beach"],  # fmt: skip
            ["-n=John", "-a", "31", "-c", "Argentina", "-l", "sand", "beach"],  # fmt: skip
        ],
    )
    def test_only_optional_arguments(args):
        (
            as_a_user(__file__)
            .run_hexagon(["only-optionals"] + args)
>           .then_output_should_be(
                [
                    "name: John",
                    "age: 31",
                    "country: Argentina",
                    "likes: ['sand', 'beach']",
                ]
            )
            .exit()
        )

tests_e2e/__specs/execute_tool_with_args.py:114: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

line = 'There where 1 error(s) in your input for tool only-optionals\n'
expected = 'name: John'

    def _assert_expected_text(line: str, expected: str or List[str]):
        if isinstance(expected, list):
            for assertion in expected:
                single_assert_line(line, assertion)
        else:
>           single_assert_line(line, expected)
E           Exception: Failed to match hexagon output:
E           Expected: #x1B[92mname: John
E           Got: #x1B[93mThere where 1 error(s) in your input for tool only-optionals

tests_e2e/__specs/utils/assertions.py:123: Exception

Check warning on line 0 in tests_e2e.__specs.execute_tool_with_args

See this annotation in the file changed.

@github-actions github-actions / E2E Tests Python3.12

test_only_optional_arguments[args1] (tests_e2e.__specs.execute_tool_with_args) failed

junit/e2e-test-results-3.12.xml [took 0s]
Raw output
Exception: Failed to match hexagon output:
Expected: #x1B[92mlikes: ['sand', 'beach']
Got: #x1B[93mlikes: ['beach']
args = ['--likes', 'sand', 'beach', '--name=John', '--country', 'Argentina', ...]

    @pytest.mark.parametrize(
        "args",
        [
            ["--name=John", "--age", "31", "--country", "Argentina", "--likes", "sand", "beach"],  # fmt: skip
            ["--likes", "sand", "beach", "--name=John", "--country", "Argentina", "--age", "31"],  # fmt: skip
            ["--name=John", "--age", "31", "--country", "Argentina", "--likes", "sand", "beach"],  # fmt: skip
            ["-n=John", "-a", "31", "-c", "Argentina", "-l", "sand", "beach"],  # fmt: skip
        ],
    )
    def test_only_optional_arguments(args):
        (
            as_a_user(__file__)
            .run_hexagon(["only-optionals"] + args)
>           .then_output_should_be(
                [
                    "name: John",
                    "age: 31",
                    "country: Argentina",
                    "likes: ['sand', 'beach']",
                ]
            )
            .exit()
        )

tests_e2e/__specs/execute_tool_with_args.py:114: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

line = "likes: ['beach']\n", expected = "likes: ['sand', 'beach']"

    def _assert_expected_text(line: str, expected: str or List[str]):
        if isinstance(expected, list):
            for assertion in expected:
                single_assert_line(line, assertion)
        else:
>           single_assert_line(line, expected)
E           Exception: Failed to match hexagon output:
E           Expected: #x1B[92mlikes: ['sand', 'beach']
E           Got: #x1B[93mlikes: ['beach']

tests_e2e/__specs/utils/assertions.py:123: Exception

Check warning on line 0 in tests_e2e.__specs.execute_tool_with_args

See this annotation in the file changed.

@github-actions github-actions / E2E Tests Python3.12

test_only_optional_arguments[args2] (tests_e2e.__specs.execute_tool_with_args) failed

junit/e2e-test-results-3.12.xml [took 0s]
Raw output
Exception: Failed to match hexagon output:
Expected: #x1B[92mname: John
Got: #x1B[93mThere where 1 error(s) in your input for tool only-optionals
args = ['--name=John', '--age', '31', '--country', 'Argentina', '--likes', ...]

    @pytest.mark.parametrize(
        "args",
        [
            ["--name=John", "--age", "31", "--country", "Argentina", "--likes", "sand", "beach"],  # fmt: skip
            ["--likes", "sand", "beach", "--name=John", "--country", "Argentina", "--age", "31"],  # fmt: skip
            ["--name=John", "--age", "31", "--country", "Argentina", "--likes", "sand", "beach"],  # fmt: skip
            ["-n=John", "-a", "31", "-c", "Argentina", "-l", "sand", "beach"],  # fmt: skip
        ],
    )
    def test_only_optional_arguments(args):
        (
            as_a_user(__file__)
            .run_hexagon(["only-optionals"] + args)
>           .then_output_should_be(
                [
                    "name: John",
                    "age: 31",
                    "country: Argentina",
                    "likes: ['sand', 'beach']",
                ]
            )
            .exit()
        )

tests_e2e/__specs/execute_tool_with_args.py:114: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

line = 'There where 1 error(s) in your input for tool only-optionals\n'
expected = 'name: John'

    def _assert_expected_text(line: str, expected: str or List[str]):
        if isinstance(expected, list):
            for assertion in expected:
                single_assert_line(line, assertion)
        else:
>           single_assert_line(line, expected)
E           Exception: Failed to match hexagon output:
E           Expected: #x1B[92mname: John
E           Got: #x1B[93mThere where 1 error(s) in your input for tool only-optionals

tests_e2e/__specs/utils/assertions.py:123: Exception

Check warning on line 0 in tests_e2e.__specs.execute_tool_with_args

See this annotation in the file changed.

@github-actions github-actions / E2E Tests Python3.12

test_only_optional_arguments[args3] (tests_e2e.__specs.execute_tool_with_args) failed

junit/e2e-test-results-3.12.xml [took 0s]
Raw output
Exception: Failed to match hexagon output:
Expected: #x1B[92mname: John
Got: #x1B[93mThere where 1 error(s) in your input for tool only-optionals
args = ['-n=John', '-a', '31', '-c', 'Argentina', '-l', ...]

    @pytest.mark.parametrize(
        "args",
        [
            ["--name=John", "--age", "31", "--country", "Argentina", "--likes", "sand", "beach"],  # fmt: skip
            ["--likes", "sand", "beach", "--name=John", "--country", "Argentina", "--age", "31"],  # fmt: skip
            ["--name=John", "--age", "31", "--country", "Argentina", "--likes", "sand", "beach"],  # fmt: skip
            ["-n=John", "-a", "31", "-c", "Argentina", "-l", "sand", "beach"],  # fmt: skip
        ],
    )
    def test_only_optional_arguments(args):
        (
            as_a_user(__file__)
            .run_hexagon(["only-optionals"] + args)
>           .then_output_should_be(
                [
                    "name: John",
                    "age: 31",
                    "country: Argentina",
                    "likes: ['sand', 'beach']",
                ]
            )
            .exit()
        )

tests_e2e/__specs/execute_tool_with_args.py:114: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

line = 'There where 1 error(s) in your input for tool only-optionals\n'
expected = 'name: John'

    def _assert_expected_text(line: str, expected: str or List[str]):
        if isinstance(expected, list):
            for assertion in expected:
                single_assert_line(line, assertion)
        else:
>           single_assert_line(line, expected)
E           Exception: Failed to match hexagon output:
E           Expected: #x1B[92mname: John
E           Got: #x1B[93mThere where 1 error(s) in your input for tool only-optionals

tests_e2e/__specs/utils/assertions.py:123: Exception

Check warning on line 0 in tests_e2e.__specs.ux_create_tool_and_execute

See this annotation in the file changed.

@github-actions github-actions / E2E Tests Python3.12

test_creates_a_python_tool_and_executes_it (tests_e2e.__specs.ux_create_tool_and_execute) failed

junit/e2e-test-results-3.12.xml [took 0s]
Raw output
Exception: Failed to match hexagon output:
Expected: #x1B[92m│ selected tool: a_new_action
Got: #x1B[93mThere where 1 error(s) in your input for tool a-new-action-command
def test_creates_a_python_tool_and_executes_it():
        _clear_custom_tool()
        (
            as_a_user(__file__)
            .given_a_cli_yaml(config_file)
            .run_hexagon(["create-tool"])
            .arrow_down()
            .enter()
            .input("a_new_action")
            .then_output_should_be(
                [["What name would you like to give your new action?", "a_new_action"]],
                discard_until_first_match=True,
            )
            .carriage_return()
            .input("-command")
            .enter()
            .input(LONG_NAME)
            .input(DESCRIPTION)
            .exit()
        )
    
        (
            as_a_user(__file__)
            .run_hexagon(
                ["a-new-action-command", "--last-name", "my-last-name"],
                {"HEXAGON_THEME": "no_border"},
            )
>           .then_output_should_be(
                [
                    "╭╼ Test",
                    "│",
                    "│ selected tool: a_new_action",
                    "│ selected env: None",
                    "│ values in tool.envs[env.name]: None",
                    "│ extra cli arguments: show_help=False extra_args=None raw_extra_args=[] last_name=",
                    "├ your last name is: my-last-name",
                    "│",
                    "╰╼",
                ]
            )
            .exit()
        )

tests_e2e/__specs/ux_create_tool_and_execute.py:52: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

line = 'There where 1 error(s) in your input for tool a-new-action-command\n'
expected = '│ selected tool: a_new_action'

    def _assert_expected_text(line: str, expected: str or List[str]):
        if isinstance(expected, list):
            for assertion in expected:
                single_assert_line(line, assertion)
        else:
>           single_assert_line(line, expected)
E           Exception: Failed to match hexagon output:
E           Expected: #x1B[92m│ selected tool: a_new_action
E           Got: #x1B[93mThere where 1 error(s) in your input for tool a-new-action-command

tests_e2e/__specs/utils/assertions.py:123: Exception