diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 530534475d..74f4ecc6d4 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -19,21 +19,22 @@ jobs: timeout-minutes: 20 steps: - name: Check out code from GitHub - uses: actions/checkout@v3.0.2 + uses: actions/checkout@v3.1.0 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v4.2.0 + uses: actions/setup-python@v4.3.0 with: python-version: ${{ env.DEFAULT_PYTHON }} - name: Generate partial Python venv restore key id: generate-python-key run: >- - echo "::set-output name=key::base-venv-${{ env.CACHE_VERSION }}-${{ + echo "key=base-venv-${{ env.CACHE_VERSION }}-${{ hashFiles('setup.cfg', 'requirements_test.txt', 'requirements_test_min.txt', - 'requirements_test_brain.txt', 'requirements_test_pre_commit.txt') }}" + 'requirements_test_brain.txt', 'requirements_test_pre_commit.txt') }}" >> + $GITHUB_OUTPUT - name: Restore Python virtual environment id: cache-venv - uses: actions/cache@v3.0.7 + uses: actions/cache@v3.0.11 with: path: venv key: >- @@ -52,11 +53,11 @@ jobs: - name: Generate pre-commit restore key id: generate-pre-commit-key run: >- - echo "::set-output name=key::pre-commit-${{ env.CACHE_VERSION }}-${{ - hashFiles('.pre-commit-config.yaml') }}" + echo "key=pre-commit-${{ env.CACHE_VERSION }}-${{ + hashFiles('.pre-commit-config.yaml') }}" >> $GITHUB_OUTPUT - name: Restore pre-commit environment id: cache-precommit - uses: actions/cache@v3.0.7 + uses: actions/cache@v3.0.11 with: path: ${{ env.PRE_COMMIT_CACHE }} key: >- @@ -86,10 +87,10 @@ jobs: python-key: ${{ steps.generate-python-key.outputs.key }} steps: - name: Check out code from GitHub - uses: actions/checkout@v3.0.2 + uses: actions/checkout@v3.1.0 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v4.2.0 + uses: actions/setup-python@v4.3.0 with: python-version: ${{ matrix.python-version }} - name: Install Qt @@ -99,12 +100,12 @@ jobs: - name: Generate partial Python venv restore key id: generate-python-key run: >- - echo "::set-output name=key::venv-${{ env.CACHE_VERSION }}-${{ + echo "key=venv-${{ env.CACHE_VERSION }}-${{ hashFiles('setup.cfg', 'requirements_test.txt', 'requirements_test_min.txt', - 'requirements_test_brain.txt') }}" + 'requirements_test_brain.txt') }}" >> $GITHUB_OUTPUT - name: Restore Python virtual environment id: cache-venv - uses: actions/cache@v3.0.7 + uses: actions/cache@v3.0.11 with: path: venv key: >- @@ -142,15 +143,15 @@ jobs: COVERAGERC_FILE: .coveragerc steps: - name: Check out code from GitHub - uses: actions/checkout@v3.0.2 + uses: actions/checkout@v3.1.0 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v4.2.0 + uses: actions/setup-python@v4.3.0 with: python-version: ${{ matrix.python-version }} - name: Restore Python virtual environment id: cache-venv - uses: actions/cache@v3.0.7 + uses: actions/cache@v3.0.11 with: path: venv key: @@ -190,21 +191,21 @@ jobs: # Workaround to set correct temp directory on Windows # https://github.com/actions/virtual-environments/issues/712 - name: Check out code from GitHub - uses: actions/checkout@v3.0.2 + uses: actions/checkout@v3.1.0 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v4.2.0 + uses: actions/setup-python@v4.3.0 with: python-version: ${{ matrix.python-version }} - name: Generate partial Python venv restore key id: generate-python-key run: >- - echo "::set-output name=key::venv-${{ env.CACHE_VERSION }}-${{ + echo "key=venv-${{ env.CACHE_VERSION }}-${{ hashFiles('setup.cfg', 'requirements_test_min.txt', - 'requirements_test_brain.txt') }}" + 'requirements_test_brain.txt') }}" >> $GITHUB_OUTPUT - name: Restore Python virtual environment id: cache-venv - uses: actions/cache@v3.0.7 + uses: actions/cache@v3.0.11 with: path: venv key: >- @@ -235,20 +236,20 @@ jobs: python-version: ["pypy3.7", "pypy3.8", "pypy3.9"] steps: - name: Check out code from GitHub - uses: actions/checkout@v3.0.2 + uses: actions/checkout@v3.1.0 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v4.2.0 + uses: actions/setup-python@v4.3.0 with: python-version: ${{ matrix.python-version }} - name: Generate partial Python venv restore key id: generate-python-key run: >- - echo "::set-output name=key::venv-${{ env.CACHE_VERSION }}-${{ - hashFiles('setup.cfg', 'requirements_test_min.txt') }}" + echo "key=venv-${{ env.CACHE_VERSION }}-${{ + hashFiles('setup.cfg', 'requirements_test_min.txt') }}" >> $GITHUB_OUTPUT - name: Restore Python virtual environment id: cache-venv - uses: actions/cache@v3.0.7 + uses: actions/cache@v3.0.11 with: path: venv key: >- diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index da5a1c9466..7f5c8f1343 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -39,7 +39,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3.0.2 + uses: actions/checkout@v3.1.0 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL diff --git a/.github/workflows/release-tests.yml b/.github/workflows/release-tests.yml index 0937cd118a..2ea78ff27b 100644 --- a/.github/workflows/release-tests.yml +++ b/.github/workflows/release-tests.yml @@ -16,10 +16,10 @@ jobs: timeout-minutes: 5 steps: - name: Check out code from GitHub - uses: actions/checkout@v3.0.2 + uses: actions/checkout@v3.1.0 - name: Set up Python id: python - uses: actions/setup-python@v4.2.0 + uses: actions/setup-python@v4.3.0 with: python-version: ${{ env.DEFAULT_PYTHON }} - name: Create Python virtual environment with virtualenv==15.1.0 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index ab98005f28..61eb5ff6f4 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -17,10 +17,10 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out code from Github - uses: actions/checkout@v3.0.2 + uses: actions/checkout@v3.1.0 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v4.2.0 + uses: actions/setup-python@v4.3.0 with: python-version: ${{ env.DEFAULT_PYTHON }} - name: Install requirements diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5cc2a69970..b5f67fb354 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,8 +9,8 @@ repos: exclude: .github/|tests/testdata - id: end-of-file-fixer exclude: tests/testdata - - repo: https://github.com/myint/autoflake - rev: v1.4 + - repo: https://github.com/PyCQA/autoflake + rev: v1.7.6 hooks: - id: autoflake exclude: tests/testdata|astroid/__init__.py|astroid/scoped_nodes.py|astroid/node_classes.py @@ -28,7 +28,7 @@ repos: exclude: tests/testdata|setup.py types: [python] - repo: https://github.com/asottile/pyupgrade - rev: v2.37.3 + rev: v3.1.0 hooks: - id: pyupgrade exclude: tests/testdata @@ -44,7 +44,7 @@ repos: - id: black-disable-checker exclude: tests/unittest_nodes_lineno.py - repo: https://github.com/psf/black - rev: 22.6.0 + rev: 22.10.0 hooks: - id: black args: [--safe, --quiet] @@ -71,7 +71,7 @@ repos: ] exclude: tests/testdata|conf.py - repo: https://github.com/pre-commit/mirrors-mypy - rev: v0.971 + rev: v0.982 hooks: - id: mypy name: mypy @@ -90,7 +90,7 @@ repos: ] exclude: tests/testdata| # exclude everything, we're not ready - repo: https://github.com/pre-commit/mirrors-prettier - rev: v3.0.0-alpha.0 + rev: v3.0.0-alpha.2 hooks: - id: prettier args: [--prose-wrap=always, --print-width=88] diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index ded27a708a..dd2d12e32b 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -23,6 +23,7 @@ Maintainers - Łukasz Rogalski - Florian Bruhin - Ashley Whetter +- Mark Byrne <31762852+mbyrnepr2@users.noreply.github.com> - Dimitri Prybysh - Areveny @@ -39,7 +40,6 @@ Contributors - David Gilman - Julien Jehannet - Calen Pennington -- Mark Byrne <31762852+mbyrnepr2@users.noreply.github.com> - Tim Martin - Phil Schaf - Hugo van Kemenade @@ -111,6 +111,7 @@ Contributors - Stanislav Levin - Simon Hewitt - Serhiy Storchaka +- Saugat Pachhai (सौगात) - Roy Wright - Robin Jarry - René Fritze <47802+renefritze@users.noreply.github.com> diff --git a/ChangeLog b/ChangeLog index 8bf6182ec1..6e60f2b3b2 100644 --- a/ChangeLog +++ b/ChangeLog @@ -16,11 +16,136 @@ Release date: TBA Refs PyCQA/pylint#7306 +* Create ``ContextManagerModel`` and let ``GeneratorModel`` inherit from it. + + Refs PyCQA/pylint#2567 + +* Add ``_value2member_map_`` member to the ``enum`` brain. + + Refs PyCQA/pylint#3941 + + +What's New in astroid 2.12.13? +============================== +Release date: TBA + + + +What's New in astroid 2.12.12? +============================== +Release date: 2022-10-19 + +* Add the ``length`` parameter to ``hash.digest`` & ``hash.hexdigest`` in the ``hashlib`` brain. + + Refs PyCQA/pylint#4039 + +* Prevent a crash when a module's ``__path__`` attribute is unexpectedly missing. + + Refs PyCQA/pylint#7592 + +* Fix inferring attributes with empty annotation assignments if parent + class contains valid assignment. + + Refs PyCQA/pylint#7631 + + +What's New in astroid 2.12.11? +============================== +Release date: 2022-10-10 + +* Improve detection of namespace packages for the modules with ``__spec__`` set to None. + + Closes PyCQA/pylint#7488. + +* Fixed a regression in the creation of the ``__init__`` of dataclasses with + multiple inheritance. + + Closes PyCQA/pylint#7434 + + +What's New in astroid 2.12.10? +============================== +Release date: 2022-09-17 + + +* Fixed a crash when introspecting modules compiled by `cffi`. + + Closes #1776 + Closes PyCQA/pylint#7399 + +* ``decorators.cached`` now gets its cache cleared by calling ``AstroidManager.clear_cache``. + + Refs #1780 + +What's New in astroid 2.12.9? +============================= +Release date: 2022-09-07 + +* Fixed creation of the ``__init__`` of ``dataclassess`` with multiple inheritance. + + Closes PyCQA/pylint#7427 + +* Fixed a crash on ``namedtuples`` that use ``typename`` to specify their name. + + Closes PyCQA/pylint#7429 + + + +What's New in astroid 2.12.8? +============================= +Release date: 2022-09-06 + +* Fixed a crash in the ``dataclass`` brain for ``InitVars`` without subscript typing. + + Closes PyCQA/pylint#7422 + +* Fixed parsing of default values in ``dataclass`` attributes. + + Closes PyCQA/pylint#7425 + +What's New in astroid 2.12.7? +============================= +Release date: 2022-09-06 + +* Fixed a crash in the ``dataclass`` brain for uninferable bases. + + Closes PyCQA/pylint#7418 + + +What's New in astroid 2.12.6? +============================= +Release date: 2022-09-05 + +* Fix a crash involving ``Uninferable`` arguments to ``namedtuple()``. + + Closes PyCQA/pylint#7375 + +* The ``dataclass`` brain now understands the ``kw_only`` keyword in dataclass decorators. + + Closes PyCQA/pylint#7290 + + +What's New in astroid 2.12.5? +============================= +Release date: 2022-08-29 + +* Prevent first-party imports from being resolved to `site-packages`. + + Refs PyCQA/pylint#7365 + +* Fix ``astroid.interpreter._import.util.is_namespace()`` incorrectly + returning ``True`` for frozen stdlib modules on PyPy. + + Closes #1755 + What's New in astroid 2.12.4? ============================= -Release date: TBA +Release date: 2022-08-25 + +* Fixed a crash involving non-standard type comments such as ``# type: # any comment``. + Refs PyCQA/pylint#7347 What's New in astroid 2.12.3? @@ -337,7 +462,7 @@ Release date: 2022-02-27 Closes PyCQA/pylint#5679 -* Inlcude names of keyword-only arguments in ``astroid.scoped_nodes.Lambda.argnames``. +* Include names of keyword-only arguments in ``astroid.scoped_nodes.Lambda.argnames``. Closes PyCQA/pylint#5771 diff --git a/README.rst b/README.rst index b7c3c232e5..cbba168868 100644 --- a/README.rst +++ b/README.rst @@ -86,4 +86,4 @@ Tests are in the 'test' subdirectory. To launch the whole tests suite, you can u either `tox` or `pytest`:: tox - pytest astroid + pytest diff --git a/astroid/_cache.py b/astroid/_cache.py new file mode 100644 index 0000000000..fc4ddc205b --- /dev/null +++ b/astroid/_cache.py @@ -0,0 +1,26 @@ +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE +# Copyright (c) https://github.com/PyCQA/astroid/blob/main/CONTRIBUTORS.txt + +from __future__ import annotations + +from typing import Any + + +class CacheManager: + """Manager of caches, to be used as a singleton.""" + + def __init__(self) -> None: + self.dict_caches: list[dict[Any, Any]] = [] + + def clear_all_caches(self) -> None: + """Clear all caches.""" + for dict_cache in self.dict_caches: + dict_cache.clear() + + def add_dict_cache(self, cache: dict[Any, Any]) -> None: + """Add a dictionary cache to the manager.""" + self.dict_caches.append(cache) + + +CACHE_MANAGER = CacheManager() diff --git a/astroid/arguments.py b/astroid/arguments.py index fdbe7aac91..4108c0ddf0 100644 --- a/astroid/arguments.py +++ b/astroid/arguments.py @@ -150,7 +150,7 @@ def _unpack_args(self, args, context=None): values.append(arg) return values - def infer_argument(self, funcnode, name, context): + def infer_argument(self, funcnode, name, context): # noqa: C901 """infer a function argument value according to the call context Arguments: diff --git a/astroid/bases.py b/astroid/bases.py index 1f5072a8e8..25a8393dde 100644 --- a/astroid/bases.py +++ b/astroid/bases.py @@ -485,7 +485,7 @@ def implicit_parameters(self) -> Literal[0, 1]: def is_bound(self): return True - def _infer_type_new_call(self, caller, context): + def _infer_type_new_call(self, caller, context): # noqa: C901 """Try to infer what type.__new__(mcs, name, bases, attrs) returns. In order for such call to be valid, the metaclass needs to be diff --git a/astroid/brain/brain_dataclasses.py b/astroid/brain/brain_dataclasses.py index b458d70f9f..5d3c346101 100644 --- a/astroid/brain/brain_dataclasses.py +++ b/astroid/brain/brain_dataclasses.py @@ -16,31 +16,16 @@ from __future__ import annotations import sys -from collections.abc import Generator +from collections.abc import Iterator from typing import Tuple, Union -from astroid import bases, context, helpers, inference_tip +from astroid import bases, context, helpers, nodes from astroid.builder import parse from astroid.const import PY39_PLUS, PY310_PLUS -from astroid.exceptions import ( - AstroidSyntaxError, - InferenceError, - MroError, - UseInferenceDefault, -) +from astroid.exceptions import AstroidSyntaxError, InferenceError, UseInferenceDefault +from astroid.inference_tip import inference_tip from astroid.manager import AstroidManager -from astroid.nodes.node_classes import ( - AnnAssign, - Assign, - AssignName, - Attribute, - Call, - Name, - NodeNG, - Subscript, - Unknown, -) -from astroid.nodes.scoped_nodes import ClassDef, FunctionDef +from astroid.typing import InferenceResult from astroid.util import Uninferable if sys.version_info >= (3, 8): @@ -49,7 +34,9 @@ from typing_extensions import Literal _FieldDefaultReturn = Union[ - None, Tuple[Literal["default"], NodeNG], Tuple[Literal["default_factory"], Call] + None, + Tuple[Literal["default"], nodes.NodeNG], + Tuple[Literal["default_factory"], nodes.Call], ] DATACLASSES_DECORATORS = frozenset(("dataclass",)) @@ -60,9 +47,11 @@ DEFAULT_FACTORY = "_HAS_DEFAULT_FACTORY" # based on typing.py -def is_decorated_with_dataclass(node, decorator_names=DATACLASSES_DECORATORS): +def is_decorated_with_dataclass( + node: nodes.ClassDef, decorator_names: frozenset[str] = DATACLASSES_DECORATORS +) -> bool: """Return True if a decorated node has a `dataclass` decorator applied.""" - if not isinstance(node, ClassDef) or not node.decorators: + if not isinstance(node, nodes.ClassDef) or not node.decorators: return False return any( @@ -71,14 +60,14 @@ def is_decorated_with_dataclass(node, decorator_names=DATACLASSES_DECORATORS): ) -def dataclass_transform(node: ClassDef) -> None: +def dataclass_transform(node: nodes.ClassDef) -> None: """Rewrite a dataclass to be easily understood by pylint""" node.is_dataclass = True for assign_node in _get_dataclass_attributes(node): name = assign_node.target.name - rhs_node = Unknown( + rhs_node = nodes.Unknown( lineno=assign_node.lineno, col_offset=assign_node.col_offset, parent=assign_node, @@ -89,21 +78,22 @@ def dataclass_transform(node: ClassDef) -> None: if not _check_generate_dataclass_init(node): return - try: - reversed_mro = list(reversed(node.mro())) - except MroError: - reversed_mro = [node] - - field_assigns = {} - field_order = [] - for klass in (k for k in reversed_mro if is_decorated_with_dataclass(k)): - for assign_node in _get_dataclass_attributes(klass, init=True): - name = assign_node.target.name - if name not in field_assigns: - field_order.append(name) - field_assigns[name] = assign_node - - init_str = _generate_dataclass_init([field_assigns[name] for name in field_order]) + kw_only_decorated = False + if PY310_PLUS and node.decorators.nodes: + for decorator in node.decorators.nodes: + if not isinstance(decorator, nodes.Call): + kw_only_decorated = False + break + for keyword in decorator.keywords: + if keyword.arg == "kw_only": + kw_only_decorated = keyword.value.bool_value() + + init_str = _generate_dataclass_init( + node, + list(_get_dataclass_attributes(node, init=True)), + kw_only_decorated, + ) + try: init_node = parse(init_str)["__init__"] except AstroidSyntaxError: @@ -120,15 +110,17 @@ def dataclass_transform(node: ClassDef) -> None: root.locals[DEFAULT_FACTORY] = [new_assign.targets[0]] -def _get_dataclass_attributes(node: ClassDef, init: bool = False) -> Generator: +def _get_dataclass_attributes( + node: nodes.ClassDef, init: bool = False +) -> Iterator[nodes.AnnAssign]: """Yield the AnnAssign nodes of dataclass attributes for the node. If init is True, also include InitVars, but exclude attributes from calls to field where init=False. """ for assign_node in node.body: - if not isinstance(assign_node, AnnAssign) or not isinstance( - assign_node.target, AssignName + if not isinstance(assign_node, nodes.AnnAssign) or not isinstance( + assign_node.target, nodes.AssignName ): continue @@ -141,11 +133,10 @@ def _get_dataclass_attributes(node: ClassDef, init: bool = False) -> Generator: if init: value = assign_node.value if ( - isinstance(value, Call) + isinstance(value, nodes.Call) and _looks_like_dataclass_field_call(value, check_scope=False) and any( - keyword.arg == "init" - and not keyword.value.bool_value() # type: ignore[union-attr] # value is never None + keyword.arg == "init" and not keyword.value.bool_value() for keyword in value.keywords ) ): @@ -156,7 +147,7 @@ def _get_dataclass_attributes(node: ClassDef, init: bool = False) -> Generator: yield assign_node -def _check_generate_dataclass_init(node: ClassDef) -> bool: +def _check_generate_dataclass_init(node: nodes.ClassDef) -> bool: """Return True if we should generate an __init__ method for node. This is True when: @@ -169,7 +160,7 @@ def _check_generate_dataclass_init(node: ClassDef) -> bool: found = None for decorator_attribute in node.decorators.nodes: - if not isinstance(decorator_attribute, Call): + if not isinstance(decorator_attribute, nodes.Call): continue if _looks_like_dataclass_decorator(decorator_attribute): @@ -179,26 +170,82 @@ def _check_generate_dataclass_init(node: ClassDef) -> bool: return True # Check for keyword arguments of the form init=False - return all( - keyword.arg != "init" - and keyword.value.bool_value() # type: ignore[union-attr] # value is never None + return not any( + keyword.arg == "init" + and not keyword.value.bool_value() # type: ignore[union-attr] # value is never None for keyword in found.keywords ) -def _generate_dataclass_init(assigns: list[AnnAssign]) -> str: +def _find_arguments_from_base_classes( + node: nodes.ClassDef, skippable_names: set[str] +) -> tuple[str, str]: + """Iterate through all bases and add them to the list of arguments to add to the init.""" + pos_only_store: dict[str, tuple[str | None, str | None]] = {} + kw_only_store: dict[str, tuple[str | None, str | None]] = {} + # See TODO down below + # all_have_defaults = True + + for base in reversed(node.mro()): + if not base.is_dataclass: + continue + try: + base_init: nodes.FunctionDef = base.locals["__init__"][0] + except KeyError: + continue + + pos_only, kw_only = base_init.args._get_arguments_data() + for posarg, data in pos_only.items(): + if posarg in skippable_names: + continue + # if data[1] is None: + # if all_have_defaults and pos_only_store: + # # TODO: This should return an Uninferable as this would raise + # # a TypeError at runtime. However, transforms can't return + # # Uninferables currently. + # pass + # all_have_defaults = False + pos_only_store[posarg] = data + + for kwarg, data in kw_only.items(): + if kwarg in skippable_names: + continue + kw_only_store[kwarg] = data + + pos_only, kw_only = "", "" + for pos_arg, data in pos_only_store.items(): + pos_only += pos_arg + if data[0]: + pos_only += ": " + data[0] + if data[1]: + pos_only += " = " + data[1] + pos_only += ", " + for kw_arg, data in kw_only_store.items(): + kw_only += kw_arg + if data[0]: + kw_only += ": " + data[0] + if data[1]: + kw_only += " = " + data[1] + kw_only += ", " + + return pos_only, kw_only + + +def _generate_dataclass_init( + node: nodes.ClassDef, assigns: list[nodes.AnnAssign], kw_only_decorated: bool +) -> str: """Return an init method for a dataclass given the targets.""" - target_names = [] - params = [] - assignments = [] + params: list[str] = [] + assignments: list[str] = [] + assign_names: list[str] = [] for assign in assigns: name, annotation, value = assign.target.name, assign.annotation, assign.value - target_names.append(name) + assign_names.append(name) if _is_init_var(annotation): # type: ignore[arg-type] # annotation is never None init_var = True - if isinstance(annotation, Subscript): + if isinstance(annotation, nodes.Subscript): annotation = annotation.slice else: # Cannot determine type annotation for parameter from InitVar @@ -208,13 +255,13 @@ def _generate_dataclass_init(assigns: list[AnnAssign]) -> str: init_var = False assignment_str = f"self.{name} = {name}" - if annotation: + if annotation is not None: param_str = f"{name}: {annotation.as_string()}" else: param_str = name if value: - if isinstance(value, Call) and _looks_like_dataclass_field_call( + if isinstance(value, nodes.Call) and _looks_like_dataclass_field_call( value, check_scope=False ): result = _get_field_default(value) @@ -235,14 +282,34 @@ def _generate_dataclass_init(assigns: list[AnnAssign]) -> str: if not init_var: assignments.append(assignment_str) - params_string = ", ".join(["self"] + params) + prev_pos_only, prev_kw_only = _find_arguments_from_base_classes( + node, set(assign_names + ["self"]) + ) + + # Construct the new init method paramter string + params_string = "self, " + if prev_pos_only: + params_string += prev_pos_only + if not kw_only_decorated: + params_string += ", ".join(params) + + if not params_string.endswith(", "): + params_string += ", " + + if prev_kw_only: + params_string += "*, " + prev_kw_only + if kw_only_decorated: + params_string += ", ".join(params) + ", " + elif kw_only_decorated: + params_string += "*, " + ", ".join(params) + ", " + assignments_string = "\n ".join(assignments) if assignments else "pass" return f"def __init__({params_string}) -> None:\n {assignments_string}" def infer_dataclass_attribute( - node: Unknown, ctx: context.InferenceContext | None = None -) -> Generator: + node: nodes.Unknown, ctx: context.InferenceContext | None = None +) -> Iterator[InferenceResult]: """Inference tip for an Unknown node that was dynamically generated to represent a dataclass attribute. @@ -250,7 +317,7 @@ def infer_dataclass_attribute( Then, an Instance of the annotated class is yielded. """ assign = node.parent - if not isinstance(assign, AnnAssign): + if not isinstance(assign, nodes.AnnAssign): yield Uninferable return @@ -264,10 +331,10 @@ def infer_dataclass_attribute( def infer_dataclass_field_call( - node: Call, ctx: context.InferenceContext | None = None -) -> Generator: + node: nodes.Call, ctx: context.InferenceContext | None = None +) -> Iterator[InferenceResult]: """Inference tip for dataclass field calls.""" - if not isinstance(node.parent, (AnnAssign, Assign)): + if not isinstance(node.parent, (nodes.AnnAssign, nodes.Assign)): raise UseInferenceDefault result = _get_field_default(node) if not result: @@ -283,14 +350,14 @@ def infer_dataclass_field_call( def _looks_like_dataclass_decorator( - node: NodeNG, decorator_names: frozenset[str] = DATACLASSES_DECORATORS + node: nodes.NodeNG, decorator_names: frozenset[str] = DATACLASSES_DECORATORS ) -> bool: """Return True if node looks like a dataclass decorator. Uses inference to lookup the value of the node, and if that fails, matches against specific names. """ - if isinstance(node, Call): # decorator with arguments + if isinstance(node, nodes.Call): # decorator with arguments node = node.func try: inferred = next(node.infer()) @@ -298,21 +365,21 @@ def _looks_like_dataclass_decorator( inferred = Uninferable if inferred is Uninferable: - if isinstance(node, Name): + if isinstance(node, nodes.Name): return node.name in decorator_names - if isinstance(node, Attribute): + if isinstance(node, nodes.Attribute): return node.attrname in decorator_names return False return ( - isinstance(inferred, FunctionDef) + isinstance(inferred, nodes.FunctionDef) and inferred.name in decorator_names and inferred.root().name in DATACLASS_MODULES ) -def _looks_like_dataclass_attribute(node: Unknown) -> bool: +def _looks_like_dataclass_attribute(node: nodes.Unknown) -> bool: """Return True if node was dynamically generated as the child of an AnnAssign statement. """ @@ -322,13 +389,15 @@ def _looks_like_dataclass_attribute(node: Unknown) -> bool: scope = parent.scope() return ( - isinstance(parent, AnnAssign) - and isinstance(scope, ClassDef) + isinstance(parent, nodes.AnnAssign) + and isinstance(scope, nodes.ClassDef) and is_decorated_with_dataclass(scope) ) -def _looks_like_dataclass_field_call(node: Call, check_scope: bool = True) -> bool: +def _looks_like_dataclass_field_call( + node: nodes.Call, check_scope: bool = True +) -> bool: """Return True if node is calling dataclasses field or Field from an AnnAssign statement directly in the body of a ClassDef. @@ -338,9 +407,9 @@ def _looks_like_dataclass_field_call(node: Call, check_scope: bool = True) -> bo stmt = node.statement(future=True) scope = stmt.scope() if not ( - isinstance(stmt, AnnAssign) + isinstance(stmt, nodes.AnnAssign) and stmt.value is not None - and isinstance(scope, ClassDef) + and isinstance(scope, nodes.ClassDef) and is_decorated_with_dataclass(scope) ): return False @@ -350,13 +419,13 @@ def _looks_like_dataclass_field_call(node: Call, check_scope: bool = True) -> bo except (InferenceError, StopIteration): return False - if not isinstance(inferred, FunctionDef): + if not isinstance(inferred, nodes.FunctionDef): return False return inferred.name == FIELD_NAME and inferred.root().name in DATACLASS_MODULES -def _get_field_default(field_call: Call) -> _FieldDefaultReturn: +def _get_field_default(field_call: nodes.Call) -> _FieldDefaultReturn: """Return a the default value of a field call, and the corresponding keyword argument name. field(default=...) results in the ... node @@ -376,7 +445,7 @@ def _get_field_default(field_call: Call) -> _FieldDefaultReturn: return "default", default if default is None and default_factory is not None: - new_call = Call( + new_call = nodes.Call( lineno=field_call.lineno, col_offset=field_call.col_offset, parent=field_call.parent, @@ -387,7 +456,7 @@ def _get_field_default(field_call: Call) -> _FieldDefaultReturn: return None -def _is_class_var(node: NodeNG) -> bool: +def _is_class_var(node: nodes.NodeNG) -> bool: """Return True if node is a ClassVar, with or without subscripting.""" if PY39_PLUS: try: @@ -399,15 +468,15 @@ def _is_class_var(node: NodeNG) -> bool: # Before Python 3.9, inference returns typing._SpecialForm instead of ClassVar. # Our backup is to inspect the node's structure. - return isinstance(node, Subscript) and ( - isinstance(node.value, Name) + return isinstance(node, nodes.Subscript) and ( + isinstance(node.value, nodes.Name) and node.value.name == "ClassVar" - or isinstance(node.value, Attribute) + or isinstance(node.value, nodes.Attribute) and node.value.attrname == "ClassVar" ) -def _is_keyword_only_sentinel(node: NodeNG) -> bool: +def _is_keyword_only_sentinel(node: nodes.NodeNG) -> bool: """Return True if node is the KW_ONLY sentinel.""" if not PY310_PLUS: return False @@ -418,7 +487,7 @@ def _is_keyword_only_sentinel(node: NodeNG) -> bool: ) -def _is_init_var(node: NodeNG) -> bool: +def _is_init_var(node: nodes.NodeNG) -> bool: """Return True if node is an InitVar, with or without subscripting.""" try: inferred = next(node.infer()) @@ -441,8 +510,8 @@ def _is_init_var(node: NodeNG) -> bool: def _infer_instance_from_annotation( - node: NodeNG, ctx: context.InferenceContext | None = None -) -> Generator: + node: nodes.NodeNG, ctx: context.InferenceContext | None = None +) -> Iterator[type[Uninferable] | bases.Instance]: """Infer an instance corresponding to the type annotation represented by node. Currently has limited support for the typing module. @@ -452,7 +521,7 @@ def _infer_instance_from_annotation( klass = next(node.infer(context=ctx)) except (InferenceError, StopIteration): yield Uninferable - if not isinstance(klass, ClassDef): + if not isinstance(klass, nodes.ClassDef): yield Uninferable elif klass.root().name in { "typing", @@ -468,17 +537,17 @@ def _infer_instance_from_annotation( AstroidManager().register_transform( - ClassDef, dataclass_transform, is_decorated_with_dataclass + nodes.ClassDef, dataclass_transform, is_decorated_with_dataclass ) AstroidManager().register_transform( - Call, + nodes.Call, inference_tip(infer_dataclass_field_call, raise_on_overwrite=True), _looks_like_dataclass_field_call, ) AstroidManager().register_transform( - Unknown, + nodes.Unknown, inference_tip(infer_dataclass_attribute, raise_on_overwrite=True), _looks_like_dataclass_attribute, ) diff --git a/astroid/brain/brain_gi.py b/astroid/brain/brain_gi.py index 53491d1400..248a60167b 100644 --- a/astroid/brain/brain_gi.py +++ b/astroid/brain/brain_gi.py @@ -54,7 +54,7 @@ ) -def _gi_build_stub(parent): +def _gi_build_stub(parent): # noqa: C901 """ Inspect the passed module recursively and build stubs for functions, classes, etc. diff --git a/astroid/brain/brain_hashlib.py b/astroid/brain/brain_hashlib.py index b628361d8d..e321af6dc8 100644 --- a/astroid/brain/brain_hashlib.py +++ b/astroid/brain/brain_hashlib.py @@ -10,48 +10,86 @@ def _hashlib_transform(): maybe_usedforsecurity = ", usedforsecurity=True" if PY39_PLUS else "" - signature = f"value=''{maybe_usedforsecurity}" + init_signature = f"value=''{maybe_usedforsecurity}" + digest_signature = "self" + shake_digest_signature = "self, length" + template = """ - class %(name)s(object): - def __init__(self, %(signature)s): pass - def digest(self): - return %(digest)s - def copy(self): - return self - def update(self, value): pass - def hexdigest(self): - return '' - @property - def name(self): - return %(name)r - @property - def block_size(self): - return 1 - @property - def digest_size(self): - return 1 + class %(name)s: + def __init__(self, %(init_signature)s): pass + def digest(%(digest_signature)s): + return %(digest)s + def copy(self): + return self + def update(self, value): pass + def hexdigest(%(digest_signature)s): + return '' + @property + def name(self): + return %(name)r + @property + def block_size(self): + return 1 + @property + def digest_size(self): + return 1 """ + algorithms_with_signature = dict.fromkeys( - ["md5", "sha1", "sha224", "sha256", "sha384", "sha512"], signature + [ + "md5", + "sha1", + "sha224", + "sha256", + "sha384", + "sha512", + "sha3_224", + "sha3_256", + "sha3_384", + "sha3_512", + ], + (init_signature, digest_signature), + ) + + blake2b_signature = ( + "data=b'', *, digest_size=64, key=b'', salt=b'', " + "person=b'', fanout=1, depth=1, leaf_size=0, node_offset=0, " + f"node_depth=0, inner_size=0, last_node=False{maybe_usedforsecurity}" + ) + + blake2s_signature = ( + "data=b'', *, digest_size=32, key=b'', salt=b'', " + "person=b'', fanout=1, depth=1, leaf_size=0, node_offset=0, " + f"node_depth=0, inner_size=0, last_node=False{maybe_usedforsecurity}" ) - blake2b_signature = f"data=b'', *, digest_size=64, key=b'', salt=b'', \ - person=b'', fanout=1, depth=1, leaf_size=0, node_offset=0, \ - node_depth=0, inner_size=0, last_node=False{maybe_usedforsecurity}" - blake2s_signature = f"data=b'', *, digest_size=32, key=b'', salt=b'', \ - person=b'', fanout=1, depth=1, leaf_size=0, node_offset=0, \ - node_depth=0, inner_size=0, last_node=False{maybe_usedforsecurity}" - new_algorithms = dict.fromkeys( - ["sha3_224", "sha3_256", "sha3_384", "sha3_512", "shake_128", "shake_256"], - signature, + + shake_algorithms = dict.fromkeys( + ["shake_128", "shake_256"], + (init_signature, shake_digest_signature), ) - algorithms_with_signature.update(new_algorithms) + algorithms_with_signature.update(shake_algorithms) + algorithms_with_signature.update( - {"blake2b": blake2b_signature, "blake2s": blake2s_signature} + { + "blake2b": (blake2b_signature, digest_signature), + "blake2s": (blake2s_signature, digest_signature), + } ) + classes = "".join( - template % {"name": hashfunc, "digest": 'b""', "signature": signature} - for hashfunc, signature in algorithms_with_signature.items() + template + % { + "name": hashfunc, + "digest": 'b""', + "init_signature": init_signature, + "digest_signature": digest_signature, + } + for hashfunc, ( + init_signature, + digest_signature, + ) in algorithms_with_signature.items() ) + return parse(classes) diff --git a/astroid/brain/brain_mechanize.py b/astroid/brain/brain_mechanize.py index 4c86fd9ba3..6b08bc42f5 100644 --- a/astroid/brain/brain_mechanize.py +++ b/astroid/brain/brain_mechanize.py @@ -9,71 +9,111 @@ def mechanize_transform(): return AstroidBuilder(AstroidManager()).string_build( - """ - -class Browser(object): + """class Browser(object): def __getattr__(self, name): return None + def __getitem__(self, name): return None + def __setitem__(self, name, val): return None + def back(self, n=1): return None + def clear_history(self): return None + def click(self, *args, **kwds): return None + def click_link(self, link=None, **kwds): return None + def close(self): return None + def encoding(self): return None - def find_link(self, text=None, text_regex=None, name=None, name_regex=None, url=None, url_regex=None, tag=None, predicate=None, nr=0): + + def find_link( + self, + text=None, + text_regex=None, + name=None, + name_regex=None, + url=None, + url_regex=None, + tag=None, + predicate=None, + nr=0, + ): return None + def follow_link(self, link=None, **kwds): return None + def forms(self): return None + def geturl(self): return None + def global_form(self): return None + def links(self, **kwds): return None + def open_local_file(self, filename): return None + def open(self, url, data=None, timeout=None): return None + def open_novisit(self, url, data=None, timeout=None): return None + def open_local_file(self, filename): return None + def reload(self): return None + def response(self): return None + def select_form(self, name=None, predicate=None, nr=None, **attrs): return None + def set_cookie(self, cookie_string): return None + def set_handle_referer(self, handle): return None + def set_header(self, header, value=None): return None + def set_html(self, html, url="http://example.com/"): return None + def set_response(self, response): return None - def set_simple_cookie(self, name, value, domain, path='/'): + + def set_simple_cookie(self, name, value, domain, path="/"): return None + def submit(self, *args, **kwds): return None + def title(self): return None + def viewing_html(self): return None + def visit_response(self, response, request=None): return None """ diff --git a/astroid/brain/brain_namedtuple_enum.py b/astroid/brain/brain_namedtuple_enum.py index 736f9f9fc5..dfc9bf6833 100644 --- a/astroid/brain/brain_namedtuple_enum.py +++ b/astroid/brain/brain_namedtuple_enum.py @@ -425,6 +425,10 @@ def name(self): new_targets.append(fake.instantiate_class()) dunder_members[local] = fake node.locals[local] = new_targets + + # The undocumented `_value2member_map_` member: + node.locals["_value2member_map_"] = [nodes.Dict(parent=node)] + members = nodes.Dict(parent=node) members.postinit( [ @@ -538,7 +542,25 @@ def _get_namedtuple_fields(node: nodes.Call) -> str: extract a node from them later on. """ names = [] - for elt in next(node.args[1].infer()).elts: + container = None + try: + container = next(node.args[1].infer()) + except (InferenceError, StopIteration) as exc: + raise UseInferenceDefault from exc + # We pass on IndexError as we'll try to infer 'field_names' from the keywords + except IndexError: + pass + if not container: + for keyword_node in node.keywords: + if keyword_node.arg == "field_names": + try: + container = next(keyword_node.value.infer()) + except (InferenceError, StopIteration) as exc: + raise UseInferenceDefault from exc + break + if not isinstance(container, nodes.BaseContainer): + raise UseInferenceDefault + for elt in container.elts: if isinstance(elt, nodes.Const): names.append(elt.as_string()) continue diff --git a/astroid/brain/brain_numpy_core_multiarray.py b/astroid/brain/brain_numpy_core_multiarray.py index 487ec471d0..dbdb24ea47 100644 --- a/astroid/brain/brain_numpy_core_multiarray.py +++ b/astroid/brain/brain_numpy_core_multiarray.py @@ -47,10 +47,15 @@ def vdot(a, b): return numpy.ndarray([0, 0])""", "bincount": """def bincount(x, weights=None, minlength=0): return numpy.ndarray([0, 0])""", - "busday_count": """def busday_count(begindates, enddates, weekmask='1111100', holidays=[], busdaycal=None, out=None): - return numpy.ndarray([0, 0])""", - "busday_offset": """def busday_offset(dates, offsets, roll='raise', weekmask='1111100', holidays=None, busdaycal=None, out=None): - return numpy.ndarray([0, 0])""", + "busday_count": """def busday_count( + begindates, enddates, weekmask='1111100', holidays=[], busdaycal=None, out=None + ): + return numpy.ndarray([0, 0])""", + "busday_offset": """def busday_offset( + dates, offsets, roll='raise', weekmask='1111100', holidays=None, + busdaycal=None, out=None + ): + return numpy.ndarray([0, 0])""", "can_cast": """def can_cast(from_, to, casting='safe'): return True""", "copyto": """def copyto(dst, src, casting='same_kind', where=True): diff --git a/astroid/brain/brain_typing.py b/astroid/brain/brain_typing.py index 807ba96e6e..b34b8bec50 100644 --- a/astroid/brain/brain_typing.py +++ b/astroid/brain/brain_typing.py @@ -240,7 +240,7 @@ def _forbid_class_getitem_access(node: ClassDef) -> None: def full_raiser(origin_func, attr, *args, **kwargs): """ Raises an AttributeInferenceError in case of access to __class_getitem__ method. - Otherwise just call origin_func. + Otherwise, just call origin_func. """ if attr == "__class_getitem__": raise AttributeInferenceError("__class_getitem__ access is not allowed") @@ -248,8 +248,9 @@ def full_raiser(origin_func, attr, *args, **kwargs): try: node.getattr("__class_getitem__") - # If we are here, then we are sure to modify object that do have __class_getitem__ method (which origin is one the - # protocol defined in collections module) whereas the typing module consider it should not + # If we are here, then we are sure to modify an object that does have + # __class_getitem__ method (which origin is the protocol defined in + # collections module) whereas the typing module considers it should not. # We do not want __class_getitem__ to be found in the classdef partial_raiser = partial(full_raiser, node.getattr) node.getattr = partial_raiser diff --git a/astroid/builder.py b/astroid/builder.py index 24caa0c6e0..a3b87faafe 100644 --- a/astroid/builder.py +++ b/astroid/builder.py @@ -10,18 +10,25 @@ from __future__ import annotations +import ast import os import textwrap import types +from collections.abc import Iterator, Sequence +from io import TextIOWrapper from tokenize import detect_encoding +from typing import TYPE_CHECKING from astroid import bases, modutils, nodes, raw_building, rebuilder, util -from astroid._ast import get_parser_module +from astroid._ast import ParserModule, get_parser_module from astroid.exceptions import AstroidBuildingError, AstroidSyntaxError, InferenceError from astroid.manager import AstroidManager -from astroid.nodes.node_classes import NodeNG -objects = util.lazy_import("objects") +if TYPE_CHECKING: + from astroid import objects +else: + objects = util.lazy_import("objects") + # The name of the transient function that is used to # wrap expressions to be extracted when calling @@ -34,7 +41,7 @@ MISPLACED_TYPE_ANNOTATION_ERROR = "misplaced type annotation" -def open_source_file(filename): +def open_source_file(filename: str) -> tuple[TextIOWrapper, str, str]: # pylint: disable=consider-using-with with open(filename, "rb") as byte_stream: encoding = detect_encoding(byte_stream.readline)[0] @@ -43,7 +50,7 @@ def open_source_file(filename): return stream, encoding, data -def _can_assign_attr(node, attrname): +def _can_assign_attr(node: nodes.ClassDef, attrname: str | None) -> bool: try: slots = node.slots() except NotImplementedError: @@ -64,7 +71,9 @@ class AstroidBuilder(raw_building.InspectBuilder): by default being True. """ - def __init__(self, manager=None, apply_transforms=True): + def __init__( + self, manager: AstroidManager | None = None, apply_transforms: bool = True + ) -> None: super().__init__(manager) self._apply_transforms = apply_transforms @@ -94,9 +103,10 @@ def module_build( # We have to handle transformation by ourselves since the # rebuilder isn't called for builtin nodes node = self._manager.visit_transforms(node) + assert isinstance(node, nodes.Module) return node - def file_build(self, path, modname=None): + def file_build(self, path: str, modname: str | None = None) -> nodes.Module: """Build astroid from a source code file (i.e. from an ast) *path* is expected to be a python source file @@ -134,7 +144,9 @@ def file_build(self, path, modname=None): module, builder = self._data_build(data, modname, path) return self._post_build(module, builder, encoding) - def string_build(self, data, modname="", path=None): + def string_build( + self, data: str, modname: str = "", path: str | None = None + ) -> nodes.Module: """Build astroid from source code string.""" module, builder = self._data_build(data, modname, path) module.file_bytes = data.encode("utf-8") @@ -162,7 +174,7 @@ def _post_build( return module def _data_build( - self, data: str, modname, path + self, data: str, modname: str, path: str | None ) -> tuple[nodes.Module, rebuilder.TreeRebuilder]: """Build tree node from data and add some informations""" try: @@ -192,18 +204,19 @@ def _data_build( module = builder.visit_module(node, modname, node_file, package) return module, builder - def add_from_names_to_locals(self, node): + def add_from_names_to_locals(self, node: nodes.ImportFrom) -> None: """Store imported names to the locals Resort the locals if coming from a delayed node """ - def _key_func(node): - return node.fromlineno + def _key_func(node: nodes.NodeNG) -> int: + return node.fromlineno or 0 - def sort_locals(my_list): + def sort_locals(my_list: list[nodes.NodeNG]) -> None: my_list.sort(key=_key_func) + assert node.parent # It should always default to the module for (name, asname) in node.names: if name == "*": try: @@ -217,7 +230,7 @@ def sort_locals(my_list): node.parent.set_local(asname or name, node) sort_locals(node.parent.scope().locals[asname or name]) - def delayed_assattr(self, node): + def delayed_assattr(self, node: nodes.AssignAttr) -> None: """Visit a AssAttr node This adds name to locals and handle members definition. @@ -228,8 +241,12 @@ def delayed_assattr(self, node): if inferred is util.Uninferable: continue try: - cls = inferred.__class__ - if cls is bases.Instance or cls is objects.ExceptionInstance: + # pylint: disable=unidiomatic-typecheck # We want a narrow check on the + # parent type, not all of its subclasses + if ( + type(inferred) == bases.Instance + or type(inferred) == objects.ExceptionInstance + ): inferred = inferred._proxied iattrs = inferred.instance_attrs if not _can_assign_attr(inferred, node.attrname): @@ -238,6 +255,11 @@ def delayed_assattr(self, node): # Const, Tuple or other containers that inherit from # `Instance` continue + elif ( + isinstance(inferred, bases.Proxy) + or inferred is util.Uninferable + ): + continue elif inferred.is_function: iattrs = inferred.instance_attrs else: @@ -261,11 +283,17 @@ def delayed_assattr(self, node): pass -def build_namespace_package_module(name: str, path: list[str]) -> nodes.Module: - return nodes.Module(name, path=path, package=True) +def build_namespace_package_module(name: str, path: Sequence[str]) -> nodes.Module: + # TODO: Typing: Remove the cast to list and just update typing to accept Sequence + return nodes.Module(name, path=list(path), package=True) -def parse(code, module_name="", path=None, apply_transforms=True): +def parse( + code: str, + module_name: str = "", + path: str | None = None, + apply_transforms: bool = True, +) -> nodes.Module: """Parses a source string in order to obtain an astroid AST from it :param str code: The code for the module. @@ -282,7 +310,7 @@ def parse(code, module_name="", path=None, apply_transforms=True): return builder.string_build(code, modname=module_name, path=path) -def _extract_expressions(node): +def _extract_expressions(node: nodes.NodeNG) -> Iterator[nodes.NodeNG]: """Find expressions in a call to _TRANSIENT_FUNCTION and extract them. The function walks the AST recursively to search for expressions that @@ -301,6 +329,7 @@ def _extract_expressions(node): and node.func.name == _TRANSIENT_FUNCTION ): real_expr = node.args[0] + assert node.parent real_expr.parent = node.parent # Search for node in all _astng_fields (the fields checked when # get_children is called) of its parent. Some of those fields may @@ -309,7 +338,7 @@ def _extract_expressions(node): # like no call to _TRANSIENT_FUNCTION ever took place. for name in node.parent._astroid_fields: child = getattr(node.parent, name) - if isinstance(child, (list, tuple)): + if isinstance(child, list): for idx, compound_child in enumerate(child): if compound_child is node: child[idx] = real_expr @@ -321,7 +350,7 @@ def _extract_expressions(node): yield from _extract_expressions(child) -def _find_statement_by_line(node, line): +def _find_statement_by_line(node: nodes.NodeNG, line: int) -> nodes.NodeNG | None: """Extracts the statement on a specific line from an AST. If the line number of node matches line, it will be returned; @@ -356,7 +385,7 @@ def _find_statement_by_line(node, line): return None -def extract_node(code: str, module_name: str = "") -> NodeNG | list[NodeNG]: +def extract_node(code: str, module_name: str = "") -> nodes.NodeNG | list[nodes.NodeNG]: """Parses some Python code as a module and extracts a designated AST node. Statements: @@ -410,13 +439,13 @@ def extract_node(code: str, module_name: str = "") -> NodeNG | list[NodeNG]: :returns: The designated node from the parse tree, or a list of nodes. """ - def _extract(node): + def _extract(node: nodes.NodeNG | None) -> nodes.NodeNG | None: if isinstance(node, nodes.Expr): return node.value return node - requested_lines = [] + requested_lines: list[int] = [] for idx, line in enumerate(code.splitlines()): if line.strip().endswith(_STATEMENT_SELECTOR): requested_lines.append(idx + 1) @@ -425,7 +454,7 @@ def _extract(node): if not tree.body: raise ValueError("Empty tree, cannot extract from it") - extracted = [] + extracted: list[nodes.NodeNG | None] = [] if requested_lines: extracted = [_find_statement_by_line(tree, line) for line in requested_lines] @@ -436,12 +465,13 @@ def _extract(node): extracted.append(tree.body[-1]) extracted = [_extract(node) for node in extracted] - if len(extracted) == 1: - return extracted[0] - return extracted + extracted_without_none = [node for node in extracted if node is not None] + if len(extracted_without_none) == 1: + return extracted_without_none[0] + return extracted_without_none -def _extract_single_node(code: str, module_name: str = "") -> NodeNG: +def _extract_single_node(code: str, module_name: str = "") -> nodes.NodeNG: """Call extract_node while making sure that only one value is returned.""" ret = extract_node(code, module_name) if isinstance(ret, list): @@ -449,7 +479,9 @@ def _extract_single_node(code: str, module_name: str = "") -> NodeNG: return ret -def _parse_string(data, type_comments=True): +def _parse_string( + data: str, type_comments: bool = True +) -> tuple[ast.Module, ParserModule]: parser_module = get_parser_module(type_comments=type_comments) try: parsed = parser_module.parse(data + "\n", type_comments=type_comments) diff --git a/astroid/decorators.py b/astroid/decorators.py index c4f44dcd27..9def52cdc5 100644 --- a/astroid/decorators.py +++ b/astroid/decorators.py @@ -15,7 +15,7 @@ import wrapt -from astroid import util +from astroid import _cache, util from astroid.context import InferenceContext from astroid.exceptions import InferenceError @@ -34,6 +34,7 @@ def cached(func, instance, args, kwargs): cache = getattr(instance, "__cache", None) if cache is None: instance.__cache = cache = {} + _cache.CACHE_MANAGER.add_dict_cache(cache) try: return cache[func] except KeyError: @@ -156,7 +157,7 @@ def raise_if_nothing_inferred(func, instance, args, kwargs): # Expensive decorators only used to emit Deprecation warnings. # If no other than the default DeprecationWarning are enabled, # fall back to passthrough implementations. -if util.check_warnings_filter(): +if util.check_warnings_filter(): # noqa: C901 def deprecate_default_argument_values( astroid_version: str = "3.0", **arguments: str @@ -207,7 +208,8 @@ def wrapper(*args: _P.args, **kwargs: _P.kwargs) -> _R: ): warnings.warn( f"'{arg}' will be a required argument for " - f"'{args[0].__class__.__qualname__}.{func.__name__}' in astroid {astroid_version} " + f"'{args[0].__class__.__qualname__}.{func.__name__}'" + f" in astroid {astroid_version} " f"('{arg}' should be of type: '{type_annotation}')", DeprecationWarning, ) diff --git a/astroid/exceptions.py b/astroid/exceptions.py index 0dac271dd7..412b0ac703 100644 --- a/astroid/exceptions.py +++ b/astroid/exceptions.py @@ -87,7 +87,7 @@ def __init__( error: Exception | None = None, source: str | None = None, path: str | None = None, - cls: None = None, + cls: type | None = None, class_repr: str | None = None, **kws: Any, ) -> None: @@ -131,7 +131,7 @@ class AstroidSyntaxError(AstroidBuildingError): def __init__( self, message: str, - modname: str, + modname: str | None, error: Exception, path: str | None, source: str | None = None, diff --git a/astroid/helpers.py b/astroid/helpers.py index 928aeed6be..82b719639b 100644 --- a/astroid/helpers.py +++ b/astroid/helpers.py @@ -8,6 +8,8 @@ from __future__ import annotations +from collections.abc import Generator + from astroid import bases, manager, nodes, raw_building, util from astroid.context import CallContext, InferenceContext from astroid.exceptions import ( @@ -18,7 +20,7 @@ _NonDeducibleTypeHierarchy, ) from astroid.nodes import scoped_nodes -from astroid.typing import InferenceResult +from astroid.typing import InferenceResult, SuccessfulInferenceResult def _build_proxy_class(cls_name: str, builtins: nodes.Module) -> nodes.ClassDef: @@ -42,7 +44,9 @@ def _function_type( return _build_proxy_class(cls_name, builtins) -def _object_type(node, context=None): +def _object_type( + node: SuccessfulInferenceResult, context: InferenceContext | None = None +) -> Generator[InferenceResult | None, None, None]: astroid_manager = manager.AstroidManager() builtins = astroid_manager.builtins_module context = context or InferenceContext() @@ -61,11 +65,17 @@ def _object_type(node, context=None): yield _build_proxy_class("module", builtins) elif isinstance(inferred, nodes.Unknown): raise InferenceError - else: + elif inferred is util.Uninferable: + yield inferred + elif isinstance(inferred, (bases.Proxy, nodes.Slice)): yield inferred._proxied + else: # pragma: no cover + raise AssertionError(f"We don't handle {type(inferred)} currently") -def object_type(node, context=None): +def object_type( + node: SuccessfulInferenceResult, context: InferenceContext | None = None +) -> InferenceResult | None: """Obtain the type of the given node This is used to implement the ``type`` builtin, which means that it's diff --git a/astroid/inference.py b/astroid/inference.py index 942988b21c..a71005540b 100644 --- a/astroid/inference.py +++ b/astroid/inference.py @@ -11,8 +11,9 @@ import functools import itertools import operator +import typing from collections.abc import Callable, Generator, Iterable, Iterator -from typing import TYPE_CHECKING, Any, TypeVar +from typing import TYPE_CHECKING, Any, Optional, TypeVar, Union from astroid import bases, decorators, helpers, nodes, protocols, util from astroid.context import ( @@ -45,17 +46,26 @@ # Prevents circular imports objects = util.lazy_import("objects") - +_T = TypeVar("_T") +_BaseContainerT = TypeVar("_BaseContainerT", bound=nodes.BaseContainer) _FunctionDefT = TypeVar("_FunctionDefT", bound=nodes.FunctionDef) +GetFlowFactory = typing.Callable[ + [ + InferenceResult, + Optional[InferenceResult], + Union[nodes.AugAssign, nodes.BinOp], + InferenceResult, + Optional[InferenceResult], + InferenceContext, + InferenceContext, + ], + Any, +] # .infer method ############################################################### -_T = TypeVar("_T") -_BaseContainerT = TypeVar("_BaseContainerT", bound=nodes.BaseContainer) - - def infer_end( self: _T, context: InferenceContext | None = None, **kwargs: Any ) -> Iterator[_T]: @@ -652,7 +662,14 @@ def _infer_old_style_string_formatting( return (util.Uninferable,) -def _invoke_binop_inference(instance, opnode, op, other, context, method_name): +def _invoke_binop_inference( + instance: InferenceResult, + opnode: nodes.AugAssign | nodes.BinOp, + op: str, + other: InferenceResult, + context: InferenceContext, + method_name: str, +): """Invoke binary operation inference on the given instance.""" methods = dunder_lookup.lookup(instance, method_name) context = bind_context_to_node(context, instance) @@ -675,7 +692,14 @@ def _invoke_binop_inference(instance, opnode, op, other, context, method_name): return instance.infer_binary_op(opnode, op, other, context, inferred) -def _aug_op(instance, opnode, op, other, context, reverse=False): +def _aug_op( + instance: InferenceResult, + opnode: nodes.AugAssign, + op: str, + other: InferenceResult, + context: InferenceContext, + reverse: bool = False, +): """Get an inference callable for an augmented binary operation.""" method_name = protocols.AUGMENTED_OP_METHOD[op] return functools.partial( @@ -689,7 +713,14 @@ def _aug_op(instance, opnode, op, other, context, reverse=False): ) -def _bin_op(instance, opnode, op, other, context, reverse=False): +def _bin_op( + instance: InferenceResult, + opnode: nodes.AugAssign | nodes.BinOp, + op: str, + other: InferenceResult, + context: InferenceContext, + reverse: bool = False, +): """Get an inference callable for a normal binary operation. If *reverse* is True, then the reflected method will be used instead. @@ -731,7 +762,13 @@ def _same_type(type1, type2): def _get_binop_flow( - left, left_type, binary_opnode, right, right_type, context, reverse_context + left: InferenceResult, + left_type: InferenceResult | None, + binary_opnode: nodes.AugAssign | nodes.BinOp, + right: InferenceResult, + right_type: InferenceResult | None, + context: InferenceContext, + reverse_context: InferenceContext, ): """Get the flow for binary operations. @@ -766,7 +803,13 @@ def _get_binop_flow( def _get_aug_flow( - left, left_type, aug_opnode, right, right_type, context, reverse_context + left: InferenceResult, + left_type: InferenceResult | None, + aug_opnode: nodes.AugAssign, + right: InferenceResult, + right_type: InferenceResult | None, + context: InferenceContext, + reverse_context: InferenceContext, ): """Get the flow for augmented binary operations. @@ -810,7 +853,13 @@ def _get_aug_flow( return methods -def _infer_binary_operation(left, right, binary_opnode, context, flow_factory): +def _infer_binary_operation( + left: InferenceResult, + right: InferenceResult, + binary_opnode: nodes.AugAssign | nodes.BinOp, + context: InferenceContext, + flow_factory: GetFlowFactory, +): """Infer a binary operation between a left operand and a right operand This is used by both normal binary operations and augmented binary diff --git a/astroid/inference_tip.py b/astroid/inference_tip.py index 341efd631e..e4c54822e0 100644 --- a/astroid/inference_tip.py +++ b/astroid/inference_tip.py @@ -23,7 +23,7 @@ _cache: dict[tuple[InferFn, NodeNG], list[InferOptions] | None] = {} -def clear_inference_tip_cache(): +def clear_inference_tip_cache() -> None: """Clear the inference tips cache.""" _cache.clear() diff --git a/astroid/interpreter/_import/spec.py b/astroid/interpreter/_import/spec.py index 5b186b104d..dc725d156b 100644 --- a/astroid/interpreter/_import/spec.py +++ b/astroid/interpreter/_import/spec.py @@ -129,7 +129,9 @@ def find_module( else: try: spec = importlib.util.find_spec(modname) - if spec and spec.loader is importlib.machinery.FrozenImporter: # type: ignore[comparison-overlap] + if ( + spec and spec.loader is importlib.machinery.FrozenImporter + ): # noqa: E501 # type: ignore[comparison-overlap] # No need for BuiltinImporter; builtins handled above return ModuleSpec( name=modname, diff --git a/astroid/interpreter/_import/util.py b/astroid/interpreter/_import/util.py index f082e9c4a7..6cc15b5d3c 100644 --- a/astroid/interpreter/_import/util.py +++ b/astroid/interpreter/_import/util.py @@ -10,9 +10,18 @@ from importlib._bootstrap_external import _NamespacePath from importlib.util import _find_spec_from_path # type: ignore[attr-defined] +from astroid.const import IS_PYPY + @lru_cache(maxsize=4096) def is_namespace(modname: str) -> bool: + from astroid.modutils import ( # pylint: disable=import-outside-toplevel + EXT_LIB_DIRS, + STD_LIB_DIRS, + ) + + STD_AND_EXT_LIB_DIRS = STD_LIB_DIRS.union(EXT_LIB_DIRS) + if modname in sys.builtin_module_names: return False @@ -33,12 +42,25 @@ def is_namespace(modname: str) -> bool: found_spec = _find_spec_from_path( working_modname, path=last_submodule_search_locations ) + except AttributeError: + return False except ValueError: if modname == "__main__": return False try: # .pth files will be on sys.modules - return sys.modules[modname].__spec__ is None + # __spec__ is set inconsistently on PyPy so we can't really on the heuristic here + # See: https://foss.heptapod.net/pypy/pypy/-/issues/3736 + # Check first fragment of modname, e.g. "astroid", not "astroid.interpreter" + # because of cffi's behavior + # See: https://github.com/PyCQA/astroid/issues/1776 + mod = sys.modules[processed_components[0]] + return ( + mod.__spec__ is None + and getattr(mod, "__file__", None) is None + and hasattr(mod, "__path__") + and not IS_PYPY + ) except KeyError: return False except AttributeError: @@ -68,8 +90,15 @@ def is_namespace(modname: str) -> bool: last_submodule_search_locations.append(str(assumed_location)) continue - # Update last_submodule_search_locations + # Update last_submodule_search_locations for next iteration if found_spec and found_spec.submodule_search_locations: + # But immediately return False if we can detect we are in stdlib + # or external lib (e.g site-packages) + if any( + any(location.startswith(lib_dir) for lib_dir in STD_AND_EXT_LIB_DIRS) + for location in found_spec.submodule_search_locations + ): + return False last_submodule_search_locations = found_spec.submodule_search_locations return ( diff --git a/astroid/interpreter/objectmodel.py b/astroid/interpreter/objectmodel.py index 0c613fb26b..1f41a11122 100644 --- a/astroid/interpreter/objectmodel.py +++ b/astroid/interpreter/objectmodel.py @@ -588,6 +588,48 @@ def attr___self__(self): attr_im_self = attr___self__ +class ContextManagerModel(ObjectModel): + """Model for context managers. + + Based on 3.3.9 of the Data Model documentation: + https://docs.python.org/3/reference/datamodel.html#with-statement-context-managers + """ + + @property + def attr___enter__(self) -> bases.BoundMethod: + """Representation of the base implementation of __enter__. + + As per Python documentation: + Enter the runtime context related to this object. The with statement + will bind this method's return value to the target(s) specified in the + as clause of the statement, if any. + """ + node: nodes.FunctionDef = builder.extract_node("""def __enter__(self): ...""") + # We set the parent as being the ClassDef of 'object' as that + # is where this method originally comes from + node.parent = AstroidManager().builtins_module["object"] + + return bases.BoundMethod(proxy=node, bound=_get_bound_node(self)) + + @property + def attr___exit__(self) -> bases.BoundMethod: + """Representation of the base implementation of __exit__. + + As per Python documentation: + Exit the runtime context related to this object. The parameters describe the + exception that caused the context to be exited. If the context was exited + without an exception, all three arguments will be None. + """ + node: nodes.FunctionDef = builder.extract_node( + """def __exit__(self, exc_type, exc_value, traceback): ...""" + ) + # We set the parent as being the ClassDef of 'object' as that + # is where this method originally comes from + node.parent = AstroidManager().builtins_module["object"] + + return bases.BoundMethod(proxy=node, bound=_get_bound_node(self)) + + class BoundMethodModel(FunctionModel): @property def attr___func__(self): @@ -598,7 +640,7 @@ def attr___self__(self): return self._instance.bound -class GeneratorModel(FunctionModel): +class GeneratorModel(FunctionModel, ContextManagerModel): def __new__(cls, *args, **kwargs): # Append the values from the GeneratorType unto this object. ret = super().__new__(cls, *args, **kwargs) @@ -795,7 +837,9 @@ class PropertyModel(ObjectModel): """Model for a builtin property""" def _init_function(self, name): - args = nodes.Arguments() + function = nodes.FunctionDef(name=name, parent=self._instance) + + args = nodes.Arguments(parent=function) args.postinit( args=[], defaults=[], @@ -807,8 +851,6 @@ def _init_function(self, name): kwonlyargs_annotations=[], ) - function = nodes.FunctionDef(name=name, parent=self._instance) - function.postinit(args=args, body=[]) return function diff --git a/astroid/manager.py b/astroid/manager.py index 77d22503cf..9f88c699fa 100644 --- a/astroid/manager.py +++ b/astroid/manager.py @@ -13,10 +13,14 @@ import os import types import zipimport +from collections.abc import Callable, Iterator, Sequence from importlib.util import find_spec, module_from_spec -from typing import TYPE_CHECKING, ClassVar +from typing import Any, ClassVar +from astroid import nodes +from astroid._cache import CACHE_MANAGER from astroid.const import BRAIN_MODULES_DIRECTORY +from astroid.context import InferenceContext from astroid.exceptions import AstroidBuildingError, AstroidImportError from astroid.interpreter._import import spec, util from astroid.modutils import ( @@ -31,15 +35,12 @@ modpath_from_file, ) from astroid.transforms import TransformVisitor -from astroid.typing import AstroidManagerBrain - -if TYPE_CHECKING: - from astroid import nodes +from astroid.typing import AstroidManagerBrain, InferenceResult ZIP_IMPORT_EXTS = (".zip", ".egg", ".whl", ".pyz", ".pyzw") -def safe_repr(obj): +def safe_repr(obj: Any) -> str: try: return repr(obj) except Exception: # pylint: disable=broad-except @@ -89,11 +90,17 @@ def unregister_transform(self): def builtins_module(self) -> nodes.Module: return self.astroid_cache["builtins"] - def visit_transforms(self, node): + def visit_transforms(self, node: nodes.NodeNG) -> InferenceResult: """Visit the transforms and apply them to the given *node*.""" return self._transform.visit(node) - def ast_from_file(self, filepath, modname=None, fallback=True, source=False): + def ast_from_file( + self, + filepath: str, + modname: str | None = None, + fallback: bool = True, + source: bool = False, + ) -> nodes.Module: """given a module name, return the astroid object""" try: filepath = get_source_file(filepath, include_no_ext=True) @@ -119,20 +126,24 @@ def ast_from_file(self, filepath, modname=None, fallback=True, source=False): return self.ast_from_module_name(modname) raise AstroidBuildingError("Unable to build an AST for {path}.", path=filepath) - def ast_from_string(self, data, modname="", filepath=None): + def ast_from_string( + self, data: str, modname: str = "", filepath: str | None = None + ) -> nodes.Module: """Given some source code as a string, return its corresponding astroid object""" # pylint: disable=import-outside-toplevel; circular import from astroid.builder import AstroidBuilder return AstroidBuilder(self).string_build(data, modname, filepath) - def _build_stub_module(self, modname): + def _build_stub_module(self, modname: str) -> nodes.Module: # pylint: disable=import-outside-toplevel; circular import from astroid.builder import AstroidBuilder return AstroidBuilder(self).string_build("", modname) - def _build_namespace_module(self, modname: str, path: list[str]) -> nodes.Module: + def _build_namespace_module( + self, modname: str, path: Sequence[str] + ) -> nodes.Module: # pylint: disable=import-outside-toplevel; circular import from astroid.builder import build_namespace_package_module @@ -147,13 +158,15 @@ def _can_load_extension(self, modname: str) -> bool: modname, self.extension_package_whitelist ) - def ast_from_module_name( + def ast_from_module_name( # noqa: C901 self, modname: str | None, context_file: str | None = None, use_cache: bool = True, ) -> nodes.Module: """Given a module name, return the astroid object.""" + if modname is None: + raise AstroidBuildingError("No module name given.") # Sometimes we don't want to use the cache. For example, when we're # importing a module with the same name as the file that is importing # we want to fallback on the import system to make sure we get the correct @@ -182,14 +195,14 @@ def ast_from_module_name( ): return self._build_stub_module(modname) try: - module = load_module_from_name(modname) + named_module = load_module_from_name(modname) except Exception as e: raise AstroidImportError( "Loading {modname} failed with:\n{error}", modname=modname, path=found_spec.location, ) from e - return self.ast_from_module(module, modname) + return self.ast_from_module(named_module, modname) elif found_spec.type == spec.ModuleType.PY_COMPILED: raise AstroidImportError( @@ -200,7 +213,7 @@ def ast_from_module_name( elif found_spec.type == spec.ModuleType.PY_NAMESPACE: return self._build_namespace_module( - modname, found_spec.submodule_search_locations + modname, found_spec.submodule_search_locations or [] ) elif found_spec.type == spec.ModuleType.PY_FROZEN: if found_spec.location is None: @@ -226,7 +239,7 @@ def ast_from_module_name( if context_file: os.chdir(old_cwd) - def zip_import_data(self, filepath): + def zip_import_data(self, filepath: str) -> nodes.Module | None: if zipimport is None: return None @@ -253,7 +266,9 @@ def zip_import_data(self, filepath): continue return None - def file_from_module_name(self, modname, contextfile): + def file_from_module_name( + self, modname: str, contextfile: str | None + ) -> spec.ModuleSpec: try: value = self._mod_file_cache[(modname, contextfile)] except KeyError: @@ -275,7 +290,9 @@ def file_from_module_name(self, modname, contextfile): raise value.with_traceback(None) # pylint: disable=no-member return value - def ast_from_module(self, module: types.ModuleType, modname: str | None = None): + def ast_from_module( + self, module: types.ModuleType, modname: str | None = None + ) -> nodes.Module: """given an imported module, return the astroid object""" modname = modname or module.__name__ if modname in self.astroid_cache: @@ -284,7 +301,8 @@ def ast_from_module(self, module: types.ModuleType, modname: str | None = None): # some builtin modules don't have __file__ attribute filepath = module.__file__ if is_python_source(filepath): - return self.ast_from_file(filepath, modname) + # Type is checked in is_python_source + return self.ast_from_file(filepath, modname) # type: ignore[arg-type] except AttributeError: pass @@ -293,7 +311,7 @@ def ast_from_module(self, module: types.ModuleType, modname: str | None = None): return AstroidBuilder(self).module_build(module, modname) - def ast_from_class(self, klass, modname=None): + def ast_from_class(self, klass: type, modname: str | None = None) -> nodes.ClassDef: """get astroid for the given class""" if modname is None: try: @@ -306,14 +324,24 @@ def ast_from_class(self, klass, modname=None): modname=modname, ) from exc modastroid = self.ast_from_module_name(modname) - return modastroid.getattr(klass.__name__)[0] # XXX + ret = modastroid.getattr(klass.__name__)[0] + assert isinstance(ret, nodes.ClassDef) + return ret - def infer_ast_from_something(self, obj, context=None): + def infer_ast_from_something( + self, obj: object, context: InferenceContext | None = None + ) -> Iterator[InferenceResult]: """infer astroid for the given class""" if hasattr(obj, "__class__") and not isinstance(obj, type): klass = obj.__class__ - else: + elif isinstance(obj, type): klass = obj + else: + raise AstroidBuildingError( # pragma: no cover + "Unable to get type for {class_repr}.", + cls=None, + class_repr=safe_repr(obj), + ) try: modname = klass.__module__ except AttributeError as exc: @@ -352,7 +380,7 @@ def infer_ast_from_something(self, obj, context=None): for inferred in modastroid.igetattr(name, context): yield inferred.instantiate_class() - def register_failed_import_hook(self, hook): + def register_failed_import_hook(self, hook: Callable[[str], nodes.Module]) -> None: """Registers a hook to resolve imports that cannot be found otherwise. `hook` must be a function that accepts a single argument `modname` which @@ -362,7 +390,7 @@ def register_failed_import_hook(self, hook): """ self._failed_import_hooks.append(hook) - def cache_module(self, module): + def cache_module(self, module: nodes.Module) -> None: """Cache a module if no module with the same name is known yet.""" self.astroid_cache.setdefault(module.name, module) @@ -391,6 +419,8 @@ def clear_cache(self) -> None: # NB: not a new TransformVisitor() AstroidManager.brain["_transform"].transforms = collections.defaultdict(list) + CACHE_MANAGER.clear_all_caches() + for lru_cache in ( LookupMixIn.lookup, _cache_normalize_path_, diff --git a/astroid/node_classes.py b/astroid/node_classes.py index 3711309bbf..59bb0109eb 100644 --- a/astroid/node_classes.py +++ b/astroid/node_classes.py @@ -92,6 +92,7 @@ # Please remove astroid/scoped_nodes.py|astroid/node_classes.py in autoflake # exclude when removing this file. warnings.warn( - "The 'astroid.node_classes' module is deprecated and will be replaced by 'astroid.nodes' in astroid 3.0.0", + "The 'astroid.node_classes' module is deprecated and will be replaced by " + "'astroid.nodes' in astroid 3.0.0", DeprecationWarning, ) diff --git a/astroid/nodes/node_classes.py b/astroid/nodes/node_classes.py index 0d23d209e0..2f515dbe90 100644 --- a/astroid/nodes/node_classes.py +++ b/astroid/nodes/node_classes.py @@ -784,7 +784,7 @@ def arguments(self): """Get all the arguments for this node, including positional only and positional and keyword""" return list(itertools.chain((self.posonlyargs or ()), self.args or ())) - def format_args(self): + def format_args(self, *, skippable_names: set[str] | None = None) -> str: """Get the arguments formatted as string. :returns: The formatted arguments. @@ -804,6 +804,7 @@ def format_args(self): self.posonlyargs, positional_only_defaults, self.posonlyargs_annotations, + skippable_names=skippable_names, ) ) result.append("/") @@ -813,6 +814,7 @@ def format_args(self): self.args, positional_or_keyword_defaults, getattr(self, "annotations", None), + skippable_names=skippable_names, ) ) if self.vararg: @@ -822,13 +824,85 @@ def format_args(self): result.append("*") result.append( _format_args( - self.kwonlyargs, self.kw_defaults, self.kwonlyargs_annotations + self.kwonlyargs, + self.kw_defaults, + self.kwonlyargs_annotations, + skippable_names=skippable_names, ) ) if self.kwarg: result.append(f"**{self.kwarg}") return ", ".join(result) + def _get_arguments_data( + self, + ) -> tuple[ + dict[str, tuple[str | None, str | None]], + dict[str, tuple[str | None, str | None]], + ]: + """Get the arguments as dictionary with information about typing and defaults. + + The return tuple contains a dictionary for positional and keyword arguments with their typing + and their default value, if any. + The method follows a similar order as format_args but instead of formatting into a string it + returns the data that is used to do so. + """ + pos_only: dict[str, tuple[str | None, str | None]] = {} + kw_only: dict[str, tuple[str | None, str | None]] = {} + + # Setup and match defaults with arguments + positional_only_defaults = [] + positional_or_keyword_defaults = self.defaults + if self.defaults: + args = self.args or [] + positional_or_keyword_defaults = self.defaults[-len(args) :] + positional_only_defaults = self.defaults[: len(self.defaults) - len(args)] + + for index, posonly in enumerate(self.posonlyargs): + annotation, default = self.posonlyargs_annotations[index], None + if annotation is not None: + annotation = annotation.as_string() + if positional_only_defaults: + default = positional_only_defaults[index].as_string() + pos_only[posonly.name] = (annotation, default) + + for index, arg in enumerate(self.args): + annotation, default = self.annotations[index], None + if annotation is not None: + annotation = annotation.as_string() + if positional_or_keyword_defaults: + defaults_offset = len(self.args) - len(positional_or_keyword_defaults) + default_index = index - defaults_offset + if ( + default_index > -1 + and positional_or_keyword_defaults[default_index] is not None + ): + default = positional_or_keyword_defaults[default_index].as_string() + pos_only[arg.name] = (annotation, default) + + if self.vararg: + annotation = self.varargannotation + if annotation is not None: + annotation = annotation.as_string() + pos_only[self.vararg] = (annotation, None) + + for index, kwarg in enumerate(self.kwonlyargs): + annotation = self.kwonlyargs_annotations[index] + if annotation is not None: + annotation = annotation.as_string() + default = self.kw_defaults[index] + if default is not None: + default = default.as_string() + kw_only[kwarg.name] = (annotation, default) + + if self.kwarg: + annotation = self.kwargannotation + if annotation is not None: + annotation = annotation.as_string() + kw_only[self.kwarg] = (annotation, None) + + return pos_only, kw_only + def default_value(self, argname): """Get the default value for an argument. @@ -929,7 +1003,11 @@ def _find_arg(argname, args, rec=False): return None, None -def _format_args(args, defaults=None, annotations=None): +def _format_args( + args, defaults=None, annotations=None, skippable_names: set[str] | None = None +) -> str: + if skippable_names is None: + skippable_names = set() values = [] if args is None: return "" @@ -939,6 +1017,8 @@ def _format_args(args, defaults=None, annotations=None): default_offset = len(args) - len(defaults) packed = itertools.zip_longest(args, annotations) for i, (arg, annotation) in enumerate(packed): + if arg.name in skippable_names: + continue if isinstance(arg, Tuple): values.append(f"({_format_args(arg.elts)})") else: @@ -3727,7 +3807,7 @@ def _wrap_attribute(self, attr): return attr @cached_property - def _proxied(self): + def _proxied(self) -> nodes.ClassDef: builtins = AstroidManager().builtins_module return builtins.getattr("slice")[0] diff --git a/astroid/nodes/scoped_nodes/scoped_nodes.py b/astroid/nodes/scoped_nodes/scoped_nodes.py index 3fec274a9e..e3632d6d3e 100644 --- a/astroid/nodes/scoped_nodes/scoped_nodes.py +++ b/astroid/nodes/scoped_nodes/scoped_nodes.py @@ -46,6 +46,7 @@ from astroid.nodes.scoped_nodes.mixin import ComprehensionScope, LocalsDictNodeNG from astroid.nodes.scoped_nodes.utils import builtin_lookup from astroid.nodes.utils import Position +from astroid.typing import InferenceResult if sys.version_info >= (3, 8): from functools import cached_property @@ -1475,7 +1476,7 @@ def extra_decorators(self) -> list[node_classes.Call]: return decorators @cached_property - def type(self) -> str: # pylint: disable=too-many-return-statements + def type(self) -> str: # pylint: disable=too-many-return-statements # noqa: C901 """The function type for this node. Possible values are: method, function, staticmethod, classmethod. @@ -2525,7 +2526,12 @@ def instantiate_class(self) -> bases.Instance: pass return bases.Instance(self) - def getattr(self, name, context=None, class_context=True): + def getattr( + self, + name: str, + context: InferenceContext | None = None, + class_context: bool = True, + ) -> list[NodeNG]: """Get an attribute from this class, using Python's attribute semantic. This method doesn't look in the :attr:`instance_attrs` dictionary @@ -2541,13 +2547,10 @@ def getattr(self, name, context=None, class_context=True): metaclass will be done. :param name: The attribute to look for. - :type name: str :param class_context: Whether the attribute can be accessed statically. - :type class_context: bool :returns: The attribute. - :rtype: list(NodeNG) :raises AttributeInferenceError: If the attribute cannot be inferred. """ @@ -2570,17 +2573,16 @@ def getattr(self, name, context=None, class_context=True): if class_context: values += self._metaclass_lookup_attribute(name, context) - if not values: - raise AttributeInferenceError(target=self, attribute=name, context=context) - - # Look for AnnAssigns, which are not attributes in the purest sense. - for value in values: + # Remove AnnAssigns without value, which are not attributes in the purest sense. + for value in values.copy(): if isinstance(value, node_classes.AssignName): stmt = value.statement(future=True) if isinstance(stmt, node_classes.AnnAssign) and stmt.value is None: - raise AttributeInferenceError( - target=self, attribute=name, context=context - ) + values.pop(values.index(value)) + + if not values: + raise AttributeInferenceError(target=self, attribute=name, context=context) + return values def _metaclass_lookup_attribute(self, name, context): @@ -2622,14 +2624,17 @@ def _get_attribute_from_metaclass(self, cls, name, context): else: yield bases.BoundMethod(attr, self) - def igetattr(self, name, context=None, class_context=True): + def igetattr( + self, + name: str, + context: InferenceContext | None = None, + class_context: bool = True, + ) -> Iterator[InferenceResult]: """Infer the possible values of the given variable. :param name: The name of the variable to infer. - :type name: str :returns: The inferred possible values. - :rtype: iterable(NodeNG or Uninferable) """ # set lookup name since this is necessary to infer on import nodes for # instance @@ -2811,7 +2816,9 @@ def implicit_metaclass(self): return builtin_lookup("type")[1][0] return None - def declared_metaclass(self, context=None): + def declared_metaclass( + self, context: InferenceContext | None = None + ) -> NodeNG | None: """Return the explicit declared metaclass for the current class. An explicit declared metaclass is defined @@ -2822,7 +2829,6 @@ def declared_metaclass(self, context=None): :returns: The metaclass of this class, or None if one could not be found. - :rtype: NodeNG or None """ for base in self.bases: try: @@ -2847,7 +2853,9 @@ def declared_metaclass(self, context=None): return None - def _find_metaclass(self, seen=None, context=None): + def _find_metaclass( + self, seen: set[ClassDef] | None = None, context: InferenceContext | None = None + ) -> NodeNG | None: if seen is None: seen = set() seen.add(self) @@ -2861,7 +2869,7 @@ def _find_metaclass(self, seen=None, context=None): break return klass - def metaclass(self, context=None): + def metaclass(self, context: InferenceContext | None = None) -> NodeNG | None: """Get the metaclass of this class. If this class does not define explicitly a metaclass, @@ -2869,7 +2877,6 @@ def metaclass(self, context=None): instead. :returns: The metaclass of this class. - :rtype: NodeNG or None """ return self._find_metaclass(context=context) diff --git a/astroid/objects.py b/astroid/objects.py index 1649674f8e..a1d886bb1f 100644 --- a/astroid/objects.py +++ b/astroid/objects.py @@ -138,7 +138,9 @@ def name(self): def qname(self) -> Literal["super"]: return "super" - def igetattr(self, name: str, context: InferenceContext | None = None): + def igetattr( # noqa: C901 + self, name: str, context: InferenceContext | None = None + ): """Retrieve the inferred values of the given attribute name.""" # '__class__' is a special attribute that should be taken directly # from the special attributes dict diff --git a/astroid/protocols.py b/astroid/protocols.py index 0d90d90bc3..1b2bf73de0 100644 --- a/astroid/protocols.py +++ b/astroid/protocols.py @@ -672,7 +672,7 @@ def named_expr_assigned_stmts( @decorators.yes_if_nothing_inferred -def starred_assigned_stmts( +def starred_assigned_stmts( # noqa: C901 self: nodes.Starred, node: node_classes.AssignedStmtsPossibleNode = None, context: InferenceContext | None = None, @@ -823,7 +823,6 @@ def _determine_starred_iteration_lookups( last_lookup = lookup_slice for element in itered: - # We probably want to infer the potential values *for each* element in an # iterable, but we can't infer a list of all values, when only a list of # step values are expected: diff --git a/astroid/raw_building.py b/astroid/raw_building.py index 8cff41d33d..212939c226 100644 --- a/astroid/raw_building.py +++ b/astroid/raw_building.py @@ -338,7 +338,7 @@ class InspectBuilder: FunctionDef and ClassDef nodes and some others as guessed. """ - def __init__(self, manager_instance=None): + def __init__(self, manager_instance: AstroidManager | None = None) -> None: self._manager = manager_instance or AstroidManager() self._done: dict[types.ModuleType | type, nodes.Module | nodes.ClassDef] = {} self._module: types.ModuleType @@ -486,7 +486,7 @@ def _set_proxied(const): return _CONST_PROXY[const.value.__class__] -def _astroid_bootstrapping(): +def _astroid_bootstrapping() -> None: """astroid bootstrapping the builtins module""" # this boot strapping is necessary since we need the Const nodes to # inspect_build builtins, and then we can proxy Const diff --git a/astroid/rebuilder.py b/astroid/rebuilder.py index a658971352..2c868fd076 100644 --- a/astroid/rebuilder.py +++ b/astroid/rebuilder.py @@ -259,7 +259,7 @@ def visit_module( self._reset_end_lineno(newnode) return newnode - if TYPE_CHECKING: + if TYPE_CHECKING: # noqa: C901 @overload def visit(self, node: ast.arg, parent: NodeNG) -> nodes.AssignName: @@ -734,6 +734,11 @@ def check_type_comment( # Invalid type comment, just skip it. return None + # For '# type: # any comment' ast.parse returns a Module node, + # without any nodes in the body. + if not type_comment_ast.body: + return None + type_object = self.visit(type_comment_ast.body[0], parent=parent) if not isinstance(type_object, nodes.Expr): return None @@ -1327,7 +1332,8 @@ def visit_attribute( ) # Prohibit a local save if we are in an ExceptHandler. if not isinstance(parent, nodes.ExceptHandler): - # mypy doesn't recognize that newnode has to be AssignAttr because it doesn't support ParamSpec + # mypy doesn't recognize that newnode has to be AssignAttr because it + # doesn't support ParamSpec # See https://github.com/python/mypy/issues/8645 self._delayed_assattr.append(newnode) # type: ignore[arg-type] else: diff --git a/astroid/scoped_nodes.py b/astroid/scoped_nodes.py index 677f892578..1e3fbf31e1 100644 --- a/astroid/scoped_nodes.py +++ b/astroid/scoped_nodes.py @@ -28,6 +28,7 @@ # Please remove astroid/scoped_nodes.py|astroid/node_classes.py in autoflake # exclude when removing this file. warnings.warn( - "The 'astroid.scoped_nodes' module is deprecated and will be replaced by 'astroid.nodes' in astroid 3.0.0", + "The 'astroid.scoped_nodes' module is deprecated and will be replaced by " + "'astroid.nodes' in astroid 3.0.0", DeprecationWarning, ) diff --git a/astroid/typing.py b/astroid/typing.py index 6fd553ab04..e42cd716b1 100644 --- a/astroid/typing.py +++ b/astroid/typing.py @@ -8,8 +8,9 @@ from typing import TYPE_CHECKING, Any, Callable, Union if TYPE_CHECKING: - from astroid import bases, nodes, transforms, util + from astroid import bases, exceptions, nodes, transforms, util from astroid.context import InferenceContext + from astroid.interpreter._import import spec if sys.version_info >= (3, 8): from typing import TypedDict @@ -33,11 +34,13 @@ class AstroidManagerBrain(TypedDict): """Dictionary to store relevant information for a AstroidManager class.""" astroid_cache: dict[str, nodes.Module] - _mod_file_cache: dict - _failed_import_hooks: list + _mod_file_cache: dict[ + tuple[str, str | None], spec.ModuleSpec | exceptions.AstroidImportError + ] + _failed_import_hooks: list[Callable[[str], nodes.Module]] always_load_extensions: bool optimize_ast: bool - extension_package_whitelist: set + extension_package_whitelist: set[str] _transform: transforms.TransformVisitor diff --git a/doc/requirements.txt b/doc/requirements.txt index 90795c2603..3033b17ba7 100644 --- a/doc/requirements.txt +++ b/doc/requirements.txt @@ -1,2 +1,2 @@ -e . -sphinx~=5.1 +sphinx~=5.3 diff --git a/pylintrc b/pylintrc index ace5fb5728..0cc82e7a3d 100644 --- a/pylintrc +++ b/pylintrc @@ -22,7 +22,6 @@ load-plugins= pylint.extensions.code_style, pylint.extensions.overlapping_exceptions, pylint.extensions.typing, - pylint.extensions.code_style, pylint.extensions.set_membership, pylint.extensions.redefined_variable_type, pylint.extensions.for_any_all, diff --git a/pyproject.toml b/pyproject.toml index cc724ed419..66a52f5c7d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -54,3 +54,41 @@ include = ["astroid*"] [tool.setuptools.dynamic] version = {attr = "astroid.__pkginfo__.__version__"} + +[tool.aliases] +test = "pytest" + +[tool.pytest.ini_options] +addopts = '-m "not acceptance"' +python_files = ["*test_*.py"] +testpaths = ["tests"] + +[tool.isort] +include_trailing_comma = true +known_first_party = ["astroid"] +known_third_party = ["attr", "nose", "numpy", "pytest", "six", "sphinx"] +line_length = 88 +multi_line_output = 3 +skip_glob = ["tests/testdata"] + +[tool.mypy] +enable_error_code = "ignore-without-code" +no_implicit_optional = true +scripts_are_modules = true +show_error_codes = true +warn_redundant_casts = true + +[[tool.mypy.overrides]] +# Importlib typeshed stubs do not include the private functions we use +module = [ + "_io.*", + "gi.*", + "importlib.*", + "lazy_object_proxy.*", + "nose.*", + "numpy.*", + "pytest", + "setuptools", + "wrapt.*", +] +ignore_missing_imports = true diff --git a/requirements_test.txt b/requirements_test.txt index 4cd6433e0e..ec539a07ec 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -2,9 +2,9 @@ -r requirements_test_pre_commit.txt contributors-txt>=0.7.4 coveralls~=3.3 -coverage~=6.4 +coverage~=6.5 pre-commit~=2.20 -pytest-cov~=3.0 +pytest-cov~=4.0 tbump~=6.9.0 types-typed-ast; implementation_name=="cpython" and python_version<"3.8" types-pkg_resources==0.1.3 diff --git a/requirements_test_pre_commit.txt b/requirements_test_pre_commit.txt index 8aef8ec803..333de33115 100644 --- a/requirements_test_pre_commit.txt +++ b/requirements_test_pre_commit.txt @@ -1,6 +1,6 @@ -black==22.6.0 -pylint==2.14.5 +black==22.10.0 +pylint==2.15.4 isort==5.10.1 flake8==5.0.4 flake8-typing-imports==1.13.0 -mypy==0.971 +mypy==0.982 diff --git a/script/.contributors_aliases.json b/script/.contributors_aliases.json index 7076260223..061e77e5de 100644 --- a/script/.contributors_aliases.json +++ b/script/.contributors_aliases.json @@ -13,6 +13,11 @@ "name": "Marc Mueller", "team": "Maintainers" }, + "31762852+mbyrnepr2@users.noreply.github.com": { + "mails": ["31762852+mbyrnepr2@users.noreply.github.com", "mbyrnepr2@gmail.com"], + "name": "Mark Byrne", + "team": "Maintainers" + }, "adam.grant.hendry@gmail.com": { "mails": ["adam.grant.hendry@gmail.com"], "name": "Adam Hendry" diff --git a/setup.cfg b/setup.cfg index f5dca3637a..c6be9f8f4e 100644 --- a/setup.cfg +++ b/setup.cfg @@ -8,69 +8,14 @@ license_files = LICENSE CONTRIBUTORS.txt -[aliases] -test = pytest - -[tool:pytest] -testpaths = tests -python_files = *test_*.py -addopts = -m "not acceptance" - -[isort] -multi_line_output = 3 -line_length = 88 -known_third_party = sphinx, pytest, six, nose, numpy, attr -known_first_party = astroid -include_trailing_comma = True -skip_glob = tests/testdata - [flake8] extend-ignore = - C901, # Function complexity checker F401, # Unused imports E203, # Incompatible with black see https://github.com/psf/black/issues/315 W503, # Incompatible with black - E501, # Line too long - B950, # Line too long B901 # Combine yield and return statements in one function -max-line-length=88 -max-complexity = 20 +max-line-length = 110 select = B,C,E,F,W,T4,B9 # Required for flake8-typing-imports (v1.12.0) # The plugin doesn't yet read the value from pyproject.toml min_python_version = 3.7.2 - -[mypy] -scripts_are_modules = True -no_implicit_optional = True -warn_redundant_casts = True -show_error_codes = True -enable_error_code = ignore-without-code - -[mypy-setuptools] -ignore_missing_imports = True - -[mypy-pytest] -ignore_missing_imports = True - -[mypy-nose.*] -ignore_missing_imports = True - -[mypy-numpy.*] -ignore_missing_imports = True - -[mypy-_io.*] -ignore_missing_imports = True - -[mypy-wrapt.*] -ignore_missing_imports = True - -[mypy-lazy_object_proxy.*] -ignore_missing_imports = True - -[mypy-gi.*] -ignore_missing_imports = True - -[mypy-importlib.*] -# Importlib typeshed stubs do not include the private functions we use -ignore_missing_imports = True diff --git a/tests/unittest_brain.py b/tests/unittest_brain.py index 149100eb50..114751c8d9 100644 --- a/tests/unittest_brain.py +++ b/tests/unittest_brain.py @@ -18,6 +18,7 @@ import astroid from astroid import MANAGER, bases, builder, nodes, objects, test_utils, util from astroid.bases import Instance +from astroid.brain.brain_namedtuple_enum import _get_namedtuple_fields from astroid.const import PY39_PLUS from astroid.exceptions import ( AttributeInferenceError, @@ -82,24 +83,44 @@ def _assert_hashlib_class(self, class_obj: ClassDef) -> None: len(class_obj["__init__"].args.defaults), 2 if PY39_PLUS else 1 ) self.assertEqual(len(class_obj["update"].args.args), 2) - self.assertEqual(len(class_obj["digest"].args.args), 1) - self.assertEqual(len(class_obj["hexdigest"].args.args), 1) def test_hashlib(self) -> None: """Tests that brain extensions for hashlib work.""" hashlib_module = MANAGER.ast_from_module_name("hashlib") - for class_name in ("md5", "sha1"): + for class_name in ( + "md5", + "sha1", + "sha224", + "sha256", + "sha384", + "sha512", + "sha3_224", + "sha3_256", + "sha3_384", + "sha3_512", + ): class_obj = hashlib_module[class_name] self._assert_hashlib_class(class_obj) + self.assertEqual(len(class_obj["digest"].args.args), 1) + self.assertEqual(len(class_obj["hexdigest"].args.args), 1) - def test_hashlib_py36(self) -> None: + def test_shake(self) -> None: + """Tests that the brain extensions for the hashlib shake algorithms work.""" hashlib_module = MANAGER.ast_from_module_name("hashlib") - for class_name in ("sha3_224", "sha3_512", "shake_128"): + for class_name in ("shake_128", "shake_256"): class_obj = hashlib_module[class_name] self._assert_hashlib_class(class_obj) + self.assertEqual(len(class_obj["digest"].args.args), 2) + self.assertEqual(len(class_obj["hexdigest"].args.args), 2) + + def test_blake2(self) -> None: + """Tests that the brain extensions for the hashlib blake2 hash functions work.""" + hashlib_module = MANAGER.ast_from_module_name("hashlib") for class_name in ("blake2b", "blake2s"): class_obj = hashlib_module[class_name] self.assertEqual(len(class_obj["__init__"].args.args), 2) + self.assertEqual(len(class_obj["digest"].args.args), 1) + self.assertEqual(len(class_obj["hexdigest"].args.args), 1) class CollectionsDequeTests(unittest.TestCase): @@ -433,6 +454,23 @@ def __str__(self): inferred = next(node.infer()) self.assertIs(util.Uninferable, inferred) + def test_name_as_typename(self) -> None: + """Reported in https://github.com/PyCQA/pylint/issues/7429 as a crash.""" + good_node, good_node_two, bad_node = builder.extract_node( + """ + import collections + collections.namedtuple(typename="MyTuple", field_names=["birth_date", "city"]) #@ + collections.namedtuple("MyTuple", field_names=["birth_date", "city"]) #@ + collections.namedtuple(["birth_date", "city"], typename="MyTuple") #@ + """ + ) + good_inferred = next(good_node.infer()) + assert isinstance(good_inferred, nodes.ClassDef) + good_node_two_inferred = next(good_node_two.infer()) + assert isinstance(good_node_two_inferred, nodes.ClassDef) + bad_node_inferred = next(bad_node.infer()) + assert bad_node_inferred == util.Uninferable + class DefaultDictTest(unittest.TestCase): def test_1(self) -> None: @@ -1455,7 +1493,8 @@ def test_collections_object_not_yet_subscriptable_2(self): @test_utils.require_version(minver="3.9") def test_collections_object_subscriptable_3(self): - """With python39 ByteString class of the colletions module is subscritable (but not the same class from typing module)""" + """With Python 3.9 the ByteString class of the collections module is subscritable + (but not the same class from typing module)""" right_node = builder.extract_node( """ import collections.abc @@ -1644,6 +1683,25 @@ def NamedTuple(): ) next(node.infer()) + def test_namedtuple_uninferable_member(self) -> None: + call = builder.extract_node( + """ + from typing import namedtuple + namedtuple('uninf', {x: x for x in range(0)}) #@""" + ) + with pytest.raises(UseInferenceDefault): + _get_namedtuple_fields(call) + + call = builder.extract_node( + """ + from typing import namedtuple + uninferable = {x: x for x in range(0)} + namedtuple('uninferable', uninferable) #@ + """ + ) + with pytest.raises(UseInferenceDefault): + _get_namedtuple_fields(call) + def test_typing_types(self) -> None: ast_nodes = builder.extract_node( """ @@ -1947,7 +2005,8 @@ class Derived(typing.Hashable, typing.Iterator[int]): ) def test_typing_object_notsubscriptable_3(self): - """Until python39 ByteString class of the typing module is not subscritable (whereas it is in the collections module)""" + """Until python39 ByteString class of the typing module is not + subscriptable (whereas it is in the collections' module)""" right_node = builder.extract_node( """ import typing diff --git a/tests/unittest_brain_ctypes.py b/tests/unittest_brain_ctypes.py index cae95409f5..dbcf54d9b1 100644 --- a/tests/unittest_brain_ctypes.py +++ b/tests/unittest_brain_ctypes.py @@ -10,7 +10,8 @@ pytestmark = pytest.mark.skipif( hasattr(sys, "pypy_version_info"), - reason="pypy has its own implementation of _ctypes module which is different from the one of cpython", + reason="pypy has its own implementation of _ctypes module which is different " + "from the one of cpython", ) @@ -83,8 +84,9 @@ def test_ctypes_redefined_types_members(c_type, builtin_type, type_code): def test_cdata_member_access() -> None: """ - Test that the base members are still accessible. Each redefined ctypes type inherits from _SimpleCData which itself - inherits from _CData. Checks that _CData members are accessibles + Test that the base members are still accessible. Each redefined ctypes type + inherits from _SimpleCData which itself inherits from _CData. Checks that + _CData members are accessible. """ src = """ import ctypes diff --git a/tests/unittest_brain_dataclasses.py b/tests/unittest_brain_dataclasses.py index 406c755775..a65a8dec0e 100644 --- a/tests/unittest_brain_dataclasses.py +++ b/tests/unittest_brain_dataclasses.py @@ -625,12 +625,12 @@ class B(A): """ ) init = next(node.infer()) - assert [a.name for a in init.args.args] == ["self", "arg0", "arg2", "arg1"] + assert [a.name for a in init.args.args] == ["self", "arg0", "arg1", "arg2"] assert [a.as_string() if a else None for a in init.args.annotations] == [ None, "float", - "list", # not str "int", + "list", # not str ] @@ -747,3 +747,370 @@ class B: init = next(node_two.infer()) assert [a.name for a in init.args.args] == expected + + +def test_kw_only_decorator() -> None: + """Test that we update the signature correctly based on the keyword. + + kw_only was introduced in PY310. + """ + foodef, bardef, cee, dee = astroid.extract_node( + """ + from dataclasses import dataclass + + @dataclass(kw_only=True) + class Foo: + a: int + e: str + + + @dataclass(kw_only=False) + class Bar(Foo): + c: int + + + @dataclass(kw_only=False) + class Cee(Bar): + d: int + + + @dataclass(kw_only=True) + class Dee(Cee): + ee: int + + + Foo.__init__ #@ + Bar.__init__ #@ + Cee.__init__ #@ + Dee.__init__ #@ + """ + ) + + foo_init: bases.UnboundMethod = next(foodef.infer()) + if PY310_PLUS: + assert [a.name for a in foo_init.args.args] == ["self"] + assert [a.name for a in foo_init.args.kwonlyargs] == ["a", "e"] + else: + assert [a.name for a in foo_init.args.args] == ["self", "a", "e"] + assert [a.name for a in foo_init.args.kwonlyargs] == [] + + bar_init: bases.UnboundMethod = next(bardef.infer()) + if PY310_PLUS: + assert [a.name for a in bar_init.args.args] == ["self", "c"] + assert [a.name for a in bar_init.args.kwonlyargs] == ["a", "e"] + else: + assert [a.name for a in bar_init.args.args] == ["self", "a", "e", "c"] + assert [a.name for a in bar_init.args.kwonlyargs] == [] + + cee_init: bases.UnboundMethod = next(cee.infer()) + if PY310_PLUS: + assert [a.name for a in cee_init.args.args] == ["self", "c", "d"] + assert [a.name for a in cee_init.args.kwonlyargs] == ["a", "e"] + else: + assert [a.name for a in cee_init.args.args] == ["self", "a", "e", "c", "d"] + assert [a.name for a in cee_init.args.kwonlyargs] == [] + + dee_init: bases.UnboundMethod = next(dee.infer()) + if PY310_PLUS: + assert [a.name for a in dee_init.args.args] == ["self", "c", "d"] + assert [a.name for a in dee_init.args.kwonlyargs] == ["a", "e", "ee"] + else: + assert [a.name for a in dee_init.args.args] == [ + "self", + "a", + "e", + "c", + "d", + "ee", + ] + assert [a.name for a in dee_init.args.kwonlyargs] == [] + + +def test_dataclass_with_unknown_base() -> None: + """Regression test for dataclasses with unknown base classes. + + Reported in https://github.com/PyCQA/pylint/issues/7418 + """ + node = astroid.extract_node( + """ + import dataclasses + + from unknown import Unknown + + + @dataclasses.dataclass + class MyDataclass(Unknown): + pass + + MyDataclass() + """ + ) + + assert next(node.infer()) + + +def test_dataclass_with_unknown_typing() -> None: + """Regression test for dataclasses with unknown base classes. + + Reported in https://github.com/PyCQA/pylint/issues/7422 + """ + node = astroid.extract_node( + """ + from dataclasses import dataclass, InitVar + + + @dataclass + class TestClass: + '''Test Class''' + + config: InitVar = None + + TestClass.__init__ #@ + """ + ) + + init_def: bases.UnboundMethod = next(node.infer()) + assert [a.name for a in init_def.args.args] == ["self", "config"] + + +def test_dataclass_with_default_factory() -> None: + """Regression test for dataclasses with default values. + + Reported in https://github.com/PyCQA/pylint/issues/7425 + """ + bad_node, good_node = astroid.extract_node( + """ + from dataclasses import dataclass + from typing import Union + + @dataclass + class BadExampleParentClass: + xyz: Union[str, int] + + @dataclass + class BadExampleClass(BadExampleParentClass): + xyz: str = "" + + BadExampleClass.__init__ #@ + + @dataclass + class GoodExampleParentClass: + xyz: str + + @dataclass + class GoodExampleClass(GoodExampleParentClass): + xyz: str = "" + + GoodExampleClass.__init__ #@ + """ + ) + + bad_init: bases.UnboundMethod = next(bad_node.infer()) + assert bad_init.args.defaults + assert [a.name for a in bad_init.args.args] == ["self", "xyz"] + + good_init: bases.UnboundMethod = next(good_node.infer()) + assert bad_init.args.defaults + assert [a.name for a in good_init.args.args] == ["self", "xyz"] + + +def test_dataclass_with_multiple_inheritance() -> None: + """Regression test for dataclasses with multiple inheritance. + + Reported in https://github.com/PyCQA/pylint/issues/7427 + Reported in https://github.com/PyCQA/pylint/issues/7434 + """ + first, second, overwritten, overwriting, mixed = astroid.extract_node( + """ + from dataclasses import dataclass + + @dataclass + class BaseParent: + _abc: int = 1 + + @dataclass + class AnotherParent: + ef: int = 2 + + @dataclass + class FirstChild(BaseParent, AnotherParent): + ghi: int = 3 + + @dataclass + class ConvolutedParent(AnotherParent): + '''Convoluted Parent''' + + @dataclass + class SecondChild(BaseParent, ConvolutedParent): + jkl: int = 4 + + @dataclass + class OverwritingParent: + ef: str = "2" + + @dataclass + class OverwrittenChild(OverwritingParent, AnotherParent): + '''Overwritten Child''' + + @dataclass + class OverwritingChild(BaseParent, AnotherParent): + _abc: float = 1.0 + ef: float = 2.0 + + class NotADataclassParent: + ef: int = 2 + + @dataclass + class ChildWithMixedParents(BaseParent, NotADataclassParent): + ghi: int = 3 + + FirstChild.__init__ #@ + SecondChild.__init__ #@ + OverwrittenChild.__init__ #@ + OverwritingChild.__init__ #@ + ChildWithMixedParents.__init__ #@ + """ + ) + + first_init: bases.UnboundMethod = next(first.infer()) + assert [a.name for a in first_init.args.args] == ["self", "ef", "_abc", "ghi"] + assert [a.value for a in first_init.args.defaults] == [2, 1, 3] + + second_init: bases.UnboundMethod = next(second.infer()) + assert [a.name for a in second_init.args.args] == ["self", "ef", "_abc", "jkl"] + assert [a.value for a in second_init.args.defaults] == [2, 1, 4] + + overwritten_init: bases.UnboundMethod = next(overwritten.infer()) + assert [a.name for a in overwritten_init.args.args] == ["self", "ef"] + assert [a.value for a in overwritten_init.args.defaults] == ["2"] + + overwriting_init: bases.UnboundMethod = next(overwriting.infer()) + assert [a.name for a in overwriting_init.args.args] == ["self", "_abc", "ef"] + assert [a.value for a in overwriting_init.args.defaults] == [1.0, 2.0] + + mixed_init: bases.UnboundMethod = next(mixed.infer()) + assert [a.name for a in mixed_init.args.args] == ["self", "_abc", "ghi"] + assert [a.value for a in mixed_init.args.defaults] == [1, 3] + + first = astroid.extract_node( + """ + from dataclasses import dataclass + + @dataclass + class BaseParent: + required: bool + + @dataclass + class FirstChild(BaseParent): + ... + + @dataclass + class SecondChild(BaseParent): + optional: bool = False + + @dataclass + class GrandChild(FirstChild, SecondChild): + ... + + GrandChild.__init__ #@ + """ + ) + + first_init: bases.UnboundMethod = next(first.infer()) + assert [a.name for a in first_init.args.args] == ["self", "required", "optional"] + assert [a.value for a in first_init.args.defaults] == [False] + + +@pytest.mark.xfail(reason="Transforms returning Uninferable isn't supported.") +def test_dataclass_non_default_argument_after_default() -> None: + """Test that a non-default argument after a default argument is not allowed. + + This should succeed, but the dataclass brain is a transform + which currently can't return an Uninferable correctly. Therefore, we can't + set the dataclass ClassDef node to be Uninferable currently. + Eventually it can be merged into test_dataclass_with_multiple_inheritance. + """ + + impossible = astroid.extract_node( + """ + from dataclasses import dataclass + + @dataclass + class BaseParent: + required: bool + + @dataclass + class FirstChild(BaseParent): + ... + + @dataclass + class SecondChild(BaseParent): + optional: bool = False + + @dataclass + class ThirdChild: + other: bool = False + + @dataclass + class ImpossibleGrandChild(FirstChild, SecondChild, ThirdChild): + ... + + ImpossibleGrandChild() #@ + """ + ) + + assert next(impossible.infer()) is Uninferable + + +def test_dataclass_inits_of_non_dataclasses() -> None: + """Regression test for __init__ mangling for non dataclasses. + + Regression test against changes tested in test_dataclass_with_multiple_inheritance + """ + first, second, third = astroid.extract_node( + """ + from dataclasses import dataclass + + @dataclass + class DataclassParent: + _abc: int = 1 + + + class NotADataclassParent: + ef: int = 2 + + + class FirstChild(DataclassParent, NotADataclassParent): + ghi: int = 3 + + + class SecondChild(DataclassParent, NotADataclassParent): + ghi: int = 3 + + def __init__(self, ef: int = 3): + self.ef = ef + + + class ThirdChild(NotADataclassParent, DataclassParent): + ghi: int = 3 + + def __init__(self, ef: int = 3): + self.ef = ef + + FirstChild.__init__ #@ + SecondChild.__init__ #@ + ThirdChild.__init__ #@ + """ + ) + + first_init: bases.UnboundMethod = next(first.infer()) + assert [a.name for a in first_init.args.args] == ["self", "_abc"] + assert [a.value for a in first_init.args.defaults] == [1] + + second_init: bases.UnboundMethod = next(second.infer()) + assert [a.name for a in second_init.args.args] == ["self", "ef"] + assert [a.value for a in second_init.args.defaults] == [3] + + third_init: bases.UnboundMethod = next(third.infer()) + assert [a.name for a in third_init.args.args] == ["self", "ef"] + assert [a.value for a in third_init.args.defaults] == [3] diff --git a/tests/unittest_builder.py b/tests/unittest_builder.py index 61ef1bba3a..cc1cb28bfd 100644 --- a/tests/unittest_builder.py +++ b/tests/unittest_builder.py @@ -737,6 +737,14 @@ def test_not_implemented(self) -> None: self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, NotImplemented) + def test_type_comments_without_content(self) -> None: + node = builder.parse( + """ + a = 1 # type: # any comment + """ + ) + assert node + class FileBuildTest(unittest.TestCase): def setUp(self) -> None: diff --git a/tests/unittest_inference.py b/tests/unittest_inference.py index 767d2190e8..8ca10ca60f 100644 --- a/tests/unittest_inference.py +++ b/tests/unittest_inference.py @@ -3374,7 +3374,6 @@ def __radd__(self, other): return NotImplemented self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.name, "B") - @pytest.mark.xfail(reason="String interpolation is incorrect for modulo formatting") def test_string_interpolation(self): ast_nodes = extract_node( """ diff --git a/tests/unittest_manager.py b/tests/unittest_manager.py index 5bce29041e..2266e32ab0 100644 --- a/tests/unittest_manager.py +++ b/tests/unittest_manager.py @@ -9,13 +9,16 @@ import unittest from collections.abc import Iterator from contextlib import contextmanager +from unittest import mock + +import pytest import astroid from astroid import manager, test_utils -from astroid.const import IS_JYTHON +from astroid.const import IS_JYTHON, IS_PYPY from astroid.exceptions import AstroidBuildingError, AstroidImportError from astroid.interpreter._import import util -from astroid.modutils import is_standard_module +from astroid.modutils import EXT_LIB_DIRS, is_standard_module from astroid.nodes import Const from astroid.nodes.scoped_nodes import ClassDef @@ -128,6 +131,10 @@ def test_submodule_homonym_with_non_module(self) -> None: def test_module_is_not_namespace(self) -> None: self.assertFalse(util.is_namespace("tests.testdata.python3.data.all")) self.assertFalse(util.is_namespace("__main__")) + self.assertFalse( + util.is_namespace(list(EXT_LIB_DIRS)[0].rsplit("/", maxsplit=1)[-1]), + ) + self.assertFalse(util.is_namespace("importlib._bootstrap")) def test_module_unexpectedly_missing_spec(self) -> None: astroid_module = sys.modules["astroid"] @@ -138,6 +145,23 @@ def test_module_unexpectedly_missing_spec(self) -> None: finally: astroid_module.__spec__ = original_spec + @mock.patch( + "astroid.interpreter._import.util._find_spec_from_path", + side_effect=AttributeError, + ) + def test_module_unexpectedly_missing_path(self, mocked) -> None: + """https://github.com/PyCQA/pylint/issues/7592""" + self.assertFalse(util.is_namespace("astroid")) + + def test_module_unexpectedly_spec_is_none(self) -> None: + astroid_module = sys.modules["astroid"] + original_spec = astroid_module.__spec__ + astroid_module.__spec__ = None + try: + self.assertFalse(util.is_namespace("astroid")) + finally: + astroid_module.__spec__ = original_spec + def test_implicit_namespace_package(self) -> None: data_dir = os.path.dirname(resources.find("data/namespace_pep_420")) contribute = os.path.join(data_dir, "contribute_to_namespace") @@ -155,6 +179,10 @@ def test_implicit_namespace_package(self) -> None: for _ in range(2): sys.path.pop(0) + @pytest.mark.skipif( + IS_PYPY, + reason="PyPy provides no way to tell apart frozen stdlib from old-style namespace packages", + ) def test_namespace_package_pth_support(self) -> None: pth = "foogle_fax-0.12.5-py2.7-nspkg.pth" site.addpackage(resources.RESOURCE_PATH, pth, []) @@ -169,6 +197,10 @@ def test_namespace_package_pth_support(self) -> None: finally: sys.modules.pop("foogle") + @pytest.mark.skipif( + IS_PYPY, + reason="PyPy provides no way to tell apart frozen stdlib from old-style namespace packages", + ) def test_nested_namespace_import(self) -> None: pth = "foogle_fax-0.12.5-py2.7-nspkg.pth" site.addpackage(resources.RESOURCE_PATH, pth, []) @@ -342,6 +374,10 @@ def test_same_name_import_module(self) -> None: stdlib_math = next(module.body[1].value.args[0].infer()) assert self.manager.astroid_cache["math"] != stdlib_math + def test_raises_exception_for_empty_modname(self) -> None: + with pytest.raises(AstroidBuildingError): + self.manager.ast_from_module_name(None) + class BorgAstroidManagerTC(unittest.TestCase): def test_borg(self) -> None: diff --git a/tests/unittest_modutils.py b/tests/unittest_modutils.py index 4687925c69..69403a72c6 100644 --- a/tests/unittest_modutils.py +++ b/tests/unittest_modutils.py @@ -411,7 +411,6 @@ def test_load_module_set_attribute(self) -> None: class ExtensionPackageWhitelistTest(unittest.TestCase): def test_is_module_name_part_of_extension_package_whitelist_true(self) -> None: - """Test that the is_module_name_part_of_extension_package_whitelist function returns True when needed""" self.assertTrue( modutils.is_module_name_part_of_extension_package_whitelist( "numpy", {"numpy"} @@ -429,7 +428,6 @@ def test_is_module_name_part_of_extension_package_whitelist_true(self) -> None: ) def test_is_module_name_part_of_extension_package_whitelist_success(self) -> None: - """Test that the is_module_name_part_of_extension_package_whitelist function returns False when needed""" self.assertFalse( modutils.is_module_name_part_of_extension_package_whitelist( "numpy", {"numpy.core"} diff --git a/tests/unittest_nodes_lineno.py b/tests/unittest_nodes_lineno.py index c1c089ac07..2cc8094d94 100644 --- a/tests/unittest_nodes_lineno.py +++ b/tests/unittest_nodes_lineno.py @@ -661,9 +661,10 @@ async def func(): #@ assert isinstance(f1.args.kwonlyargs[0], nodes.AssignName) assert (f1.args.kwonlyargs[0].lineno, f1.args.kwonlyargs[0].col_offset) == (4, 4) assert (f1.args.kwonlyargs[0].end_lineno, f1.args.kwonlyargs[0].end_col_offset) == (4, 16) - assert isinstance(f1.args.kwonlyargs_annotations[0], nodes.Name) - assert (f1.args.kwonlyargs_annotations[0].lineno, f1.args.kwonlyargs_annotations[0].col_offset) == (4, 13) - assert (f1.args.kwonlyargs_annotations[0].end_lineno, f1.args.kwonlyargs_annotations[0].end_col_offset) == (4, 16) + annotations = f1.args.kwonlyargs_annotations + assert isinstance(annotations[0], nodes.Name) + assert (annotations[0].lineno, annotations[0].col_offset) == (4, 13) + assert (annotations[0].end_lineno, annotations[0].end_col_offset) == (4, 16) assert isinstance(f1.args.kw_defaults[0], nodes.Const) assert (f1.args.kw_defaults[0].lineno, f1.args.kw_defaults[0].col_offset) == (4, 19) assert (f1.args.kw_defaults[0].end_lineno, f1.args.kw_defaults[0].end_col_offset) == (4, 20) diff --git a/tests/unittest_object_model.py b/tests/unittest_object_model.py index 3dbe5026b9..9d412b7865 100644 --- a/tests/unittest_object_model.py +++ b/tests/unittest_object_model.py @@ -571,6 +571,45 @@ def test(a: 1, b: 2, /, c: 3): pass self.assertEqual(annotations.getitem(astroid.Const("c")).value, 3) +class TestContextManagerModel: + def test_model(self) -> None: + """We use a generator to test this model.""" + ast_nodes = builder.extract_node( + """ + def test(): + "a" + yield + + gen = test() + gen.__enter__ #@ + gen.__exit__ #@ + """ + ) + assert isinstance(ast_nodes, list) + + enter = next(ast_nodes[0].infer()) + assert isinstance(enter, astroid.BoundMethod) + # Test that the method is correctly bound + assert isinstance(enter.bound, bases.Generator) + assert enter.bound._proxied.qname() == "builtins.generator" + # Test that thet FunctionDef accepts no arguments except self + # NOTE: This probably shouldn't be double proxied, but this is a + # quirck of the current model implementations. + assert isinstance(enter._proxied._proxied, nodes.FunctionDef) + assert len(enter._proxied._proxied.args.args) == 1 + assert enter._proxied._proxied.args.args[0].name == "self" + + exit_node = next(ast_nodes[1].infer()) + assert isinstance(exit_node, astroid.BoundMethod) + # Test that the FunctionDef accepts the arguments as defiend in the ObjectModel + assert isinstance(exit_node._proxied._proxied, nodes.FunctionDef) + assert len(exit_node._proxied._proxied.args.args) == 4 + assert exit_node._proxied._proxied.args.args[0].name == "self" + assert exit_node._proxied._proxied.args.args[1].name == "exc_type" + assert exit_node._proxied._proxied.args.args[2].name == "exc_value" + assert exit_node._proxied._proxied.args.args[3].name == "traceback" + + class GeneratorModelTest(unittest.TestCase): def test_model(self) -> None: ast_nodes = builder.extract_node( @@ -585,6 +624,8 @@ def test(): gen.gi_code #@ gen.gi_frame #@ gen.send #@ + gen.__enter__ #@ + gen.__exit__ #@ """ ) assert isinstance(ast_nodes, list) @@ -605,6 +646,12 @@ def test(): send = next(ast_nodes[4].infer()) self.assertIsInstance(send, astroid.BoundMethod) + enter = next(ast_nodes[5].infer()) + assert isinstance(enter, astroid.BoundMethod) + + exit_node = next(ast_nodes[6].infer()) + assert isinstance(exit_node, astroid.BoundMethod) + class ExceptionModelTest(unittest.TestCase): @staticmethod diff --git a/tests/unittest_scoped_nodes.py b/tests/unittest_scoped_nodes.py index a11a3b9630..2a37a8ad7e 100644 --- a/tests/unittest_scoped_nodes.py +++ b/tests/unittest_scoped_nodes.py @@ -1268,6 +1268,22 @@ class Past(Present): self.assertIsInstance(attr1, nodes.AssignName) self.assertEqual(attr1.name, "attr") + @staticmethod + def test_getattr_with_enpty_annassign() -> None: + code = """ + class Parent: + attr: int = 2 + + class Child(Parent): #@ + attr: int + """ + child = extract_node(code) + attr = child.getattr("attr") + assert len(attr) == 1 + assert isinstance(attr[0], nodes.AssignName) + assert attr[0].name == "attr" + assert attr[0].lineno == 3 + def test_function_with_decorator_lineno(self) -> None: data = """ @f(a=2, @@ -1415,6 +1431,20 @@ class Invalid(object): inferred = next(klass.infer()) self.assertIsNone(inferred.metaclass()) + @staticmethod + def test_with_invalid_metaclass(): + klass = extract_node( + """ + class InvalidAsMetaclass: ... + + class Invalid(metaclass=InvalidAsMetaclass()): #@ + pass + """ + ) + inferred = next(klass.infer()) + metaclass = inferred.metaclass() + assert isinstance(metaclass, Instance) + def test_nonregr_infer_callresult(self) -> None: astroid = builder.parse( """ @@ -2534,6 +2564,21 @@ class Veg(Enum): assert inferred_member_value.value is None +def test_enums_value2member_map_() -> None: + """Check the `_value2member_map_` member is present in an Enum class""" + node = builder.extract_node( + """ + from enum import Enum + class Veg(Enum): + TOMATO: 1 + + Veg + """ + ) + inferred_class = node.inferred()[0] + assert "_value2member_map_" in inferred_class.locals + + @pytest.mark.parametrize("annotation, value", [("int", 42), ("bytes", b"")]) def test_enums_type_annotation_non_str_member(annotation, value) -> None: """A type-annotated member of an Enum class where: